commit d2bbeb79d2a8de6425195a70b6ce25111902c38f Author: Relintai Date: Tue May 23 19:06:25 2023 +0200 Initial commit. Added https://github.com/touilleMan/godot-python b9757da859a4d as a base, but without the submodule. diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 0000000..71d3a76 --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,9 @@ +[bumpversion] +current_version = 0.9.0 +commit = True +tag = True + +[bumpversion:file:pythonscript/cffi_bindings/mod_godot.inc.py] +search = __version__ = '{current_version}' +replace = __version__ = '{new_version}' + diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..47d5c94 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,251 @@ +name: CI build +on: + push: + branches: + - master + pull_request: + branches: + - master + + +# Global Settings +env: + PYTHON_VERSION: "3.7" + GODOT_BINARY_VERSION: "3.2.3" + + +jobs: + + + static-checks: + name: '๐Ÿ“Š Static checks' + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@f1d3225b5376a0791fdee5a0e8eac5289355e43a # pin@v2 + with: + submodules: true + - name: Set up Python + uses: actions/setup-python@0291cefc54fa79cd1986aee8fa5ecb89ad4defea # pin@v2 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Bootstrap + run: | + set -eux + python --version + pip install pre-commit + - name: Pre-commit hooks check + run: | + pre-commit run --all-files --show-diff-on-failure + + +################################################################################# + + + linux-build: + name: '๐Ÿง Linux build' + runs-on: ubuntu-latest + env: + CC: clang + PLATFORM: 'x11-64' + steps: + - name: 'Checkout' + uses: actions/checkout@f1d3225b5376a0791fdee5a0e8eac5289355e43a # pin@v2 + with: + submodules: true + - name: 'Set up Python' + uses: actions/setup-python@0291cefc54fa79cd1986aee8fa5ecb89ad4defea # pin@v2 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: 'Setup venv' + run: | + set -eux + ${{ env.CC }} --version + python --version + pip install -U pip + pip install -r requirements.txt + # Configuration for scons + echo 'godot_binary = "${{ env.GODOT_BINARY_VERSION }}"' >> custom.py + echo 'platform = "${{ env.PLATFORM }}"' >> custom.py + echo 'CC = "${{ env.CC }}"' >> custom.py + - name: 'Build project' + run: | + set -eux + scons build -j2 + - name: 'Start xvfb' + run: | + /usr/bin/Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 & + echo ">>> Started xvfb" + - name: 'Run tests' + run: | + set -eux + scons tests headless=true + env: + DISPLAY: ':99.0' + - name: 'Generate artifact archive' + run: | + set -eux + scons release + - name: 'Export release artifact' + uses: actions/upload-artifact@11830c9f4d30053679cb8904e3b3ce1b8c00bf40 # pin@v2 + with: + name: ${{ env.PLATFORM }}-release + path: 'build/godot-python-*.tar.bz2' + + +################################################################################# + + + windows-build: + name: '๐Ÿ Windows build' + runs-on: windows-latest + strategy: + matrix: + include: + - PLATFORM: 'windows-64' + PYTHON_ARCH: 'x64' + VS_ARCH: 'amd64' + - PLATFORM: 'windows-32' + PYTHON_ARCH: 'x86' + VS_ARCH: 'x86' + steps: + - name: 'Checkout' + uses: actions/checkout@f1d3225b5376a0791fdee5a0e8eac5289355e43a # pin@v2 + with: + submodules: true + - name: 'Set up Python' + uses: actions/setup-python@0291cefc54fa79cd1986aee8fa5ecb89ad4defea # pin@v2 + with: + python-version: ${{ env.PYTHON_VERSION }} + architecture: ${{ matrix.PYTHON_ARCH }} + - name: 'Setup venv' + shell: bash + run: | + set -eux + python --version + python -m pip install --user -U pip + python -m pip install --user -r requirements.txt + # Configuration for scons + echo 'godot_binary = "${{ env.GODOT_BINARY_VERSION }}"' >> custom.py + echo 'platform = "${{ matrix.PLATFORM }}"' >> custom.py + echo 'MSVC_USE_SCRIPT = True' >> custom.py + echo 'TARGET_ARCH = "${{ matrix.VS_ARCH }}"' >> custom.py + echo 'CC = "cl.exe"' >> custom.py + - name: 'Build project' + shell: bash + run: | + set -eux + scons build -j2 + - name: 'Install Mesa3D OpenGL' + shell: bash + run: | + set -eux + # Azure pipelines doesn't provide a GPU with an OpenGL driver, + # hence we use Mesa3D as software OpenGL driver + pushd build/${{ matrix.PLATFORM }}/platforms/ + if [ "${{ matrix.PLATFORM }}" = "windows-64" ] + then + curl https://downloads.fdossena.com/Projects/Mesa3D/Builds/MesaForWindows-x64-20.0.7.7z -o mesa.7z + else + curl https://downloads.fdossena.com/Projects/Mesa3D/Builds/MesaForWindows-20.0.7.7z -o mesa.7z + fi + # opengl32.dll must be extracted in the same directory than Godot binary + 7z.exe x mesa.7z + ls -lh opengl32.dll # Sanity check + popd + - name: 'Run tests' + shell: bash + run: | + set -eux + scons tests + - name: 'Generate artifact archive' + shell: bash + run: | + scons release + - name: 'Export release artifact' + uses: actions/upload-artifact@11830c9f4d30053679cb8904e3b3ce1b8c00bf40 # pin@v2 + with: + name: ${{ matrix.PLATFORM }}-release + path: 'build/godot-python-*.zip' + + +################################################################################# + + + macos-build: + name: '๐ŸŽ macOS build' + runs-on: macos-latest + env: + CC: clang + PLATFORM: 'osx-64' + steps: + - name: 'Checkout' + uses: actions/checkout@f1d3225b5376a0791fdee5a0e8eac5289355e43a # pin@v2 + with: + submodules: true + - name: 'Set up Python' + uses: actions/setup-python@0291cefc54fa79cd1986aee8fa5ecb89ad4defea # pin@v2 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: 'Setup venv' + run: | + set -eux + ${{ env.CC }} --version + python --version + brew update + brew install zlib openssl + brew install --cask xquartz + pip install -U pip + pip install -r requirements.txt + # Configuration for scons + echo 'godot_binary = "${{ env.GODOT_BINARY_VERSION }}"' >> custom.py + echo 'platform = "${{ env.PLATFORM }}"' >> custom.py + echo 'CC = "${{ env.CC }}"' >> custom.py + - name: 'Build project' + run: | + set -eux + scons build -j2 + - name: 'Run tests' + run: | + set -eux + scons tests + - name: 'Generate artifact archive' + run: | + set -eux + scons release + - name: 'Export release artifact' + uses: actions/upload-artifact@11830c9f4d30053679cb8904e3b3ce1b8c00bf40 # pin@v2 + with: + name: ${{ env.PLATFORM }}-release + path: 'build/godot-python-*.tar.bz2' + + +################################################################################# + + + publish-release: + name: 'Publish ${{ matrix.PLATFORM }} release' + if: startsWith(github.ref, 'refs/tags/v') + runs-on: ubuntu-latest + needs: + - linux-build + - windows-build + - macos-build + strategy: + matrix: + include: + - PLATFORM: x11-64 + - PLATFORM: windows-64 + - PLATFORM: windows-32 + - PLATFORM: osx-64 + steps: + - uses: actions/download-artifact@0ede0875b5db9a2824878bbbbe3d758a75eb8268 # pin@v2 + name: ${{ matrix.PLATFORM }}-release + - name: 'Upload release' + uses: svenstaro/upload-release-action@483c1e56f95e88835747b1c7c60581215016cbf2 # pin@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + tag: ${{ github.ref }} + file: godot-python-*.* + file_glob: true + overwrite: true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e4e090a --- /dev/null +++ b/.gitignore @@ -0,0 +1,34 @@ +# Python stuff +/venv* +__pycache__ +*.pyc +.mypy_cache + +# IDE stuff +.vs +.vscode +.idea + +# mac os thumbs files +.DS_Store + +# Godot import folders +.import +.cache + +# Godot runtime logs +logs + +# Scons build artefact +.sconsign.dblite + +# scons stuff +/custom.py + +# Build directory +/build/ + +# Lazy generated symlinks on build +/examples/*/addons +/tests/*/addons +/tests/*/lib diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..400646f --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "godot_headers"] + path = godot_headers + url = https://github.com/godotengine/godot_headers.git + branch = 3.3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..4a060f2 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,26 @@ +repos: +- repo: https://github.com/ambv/black + rev: 19.3b0 + hooks: + - id: black + types: [file] # override `types: [python]` + files: (\.py$|^SConstruct$|/SConscript$) + exclude: (^tests/_lib_vendors|^(tests|examples)/lib) # Ignore 3rd party stuff + args: + - "--line-length=100" + language_version: python3 +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.2.3 + hooks: + - id: mixed-line-ending + exclude: (^tests/_lib_vendors|^(tests|examples)/lib) # Ignore 3rd party stuff + - id: trailing-whitespace + exclude: (^tests/_lib_vendors|^(tests|examples)/lib) # Ignore 3rd party stuff +- repo: local + hooks: + - id: git_actions_pin + name: "Gitub actions pin 3rd party repos" + entry: python ./misc/pin_github_actions.py check + language: python + language_version: python3 + files: ^.github/ diff --git a/AUTHORS.rst b/AUTHORS.rst new file mode 100644 index 0000000..814e0f3 --- /dev/null +++ b/AUTHORS.rst @@ -0,0 +1,23 @@ +======= +Credits +======= + +Development Lead +---------------- + +* Emmanuel Leblond `@touilleMan `_ + +Godot Python logo +----------------- + +* `@Pinswell `_ + +Contributors +------------ + +* Rฤƒzvan Cosmin Rฤƒdulescu `@razvanc-r `_ +* Max Hilbrunner `@mhilbrunner `_ +* Chris Ridenour `@cridenour `_ +* Gary Oberbrunner `@garyo `_ +* Paolo Barresi `@paolobb4 `_ +* Colin Kinloch `@ColinKinloch `_ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..2a9683d --- /dev/null +++ b/LICENSE @@ -0,0 +1,25 @@ +Godot Python Copyright (c) 2016 by Emmanuel Leblond. +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +Godot Python Logo (C) Pinswell +Distributed under the terms of the Creative Commons Attribution License +version 3.0 (CC-BY 3.0) +https://creativecommons.org/licenses/by/3.0/legalcode. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..8e48906 --- /dev/null +++ b/Makefile @@ -0,0 +1,31 @@ +.PHONY: all build clean test example + + +BASEDIR = $(shell pwd) +BACKEND ?= cpython +PLATFORM ?= x11-64 + +EXTRA_OPTS ?= + +SCONS_BIN ?= scons +SCONS_CMD = $(SCONS_BIN) backend=$(BACKEND) platform=$(PLATFORM) $(EXTRA_OPTS) + +# Add `LIBGL_ALWAYS_SOFTWARE=1` if you computer sucks with opengl3... + +all: build + + +build: + $(SCONS_CMD) + + +clean: + $(SCONS_CMD) -c + + +test: + $(SCONS_CMD) test + + +example: + $(SCONS_CMD) example diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..db35286 --- /dev/null +++ b/README.rst @@ -0,0 +1,403 @@ +.. image:: https://github.com/touilleMan/godot-python/actions/workflows/build.yml/badge.svg + :target: https://github.com/touilleMan/godot-python/actions + :alt: Github action tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + :alt: Code style: black + + +================================================ +Godot Python, because you want Python on Godot ! +================================================ + + +๐Ÿšง๐Ÿšจ **Heavy refactoring in progress** ๐Ÿšจ๐Ÿšง + +The project is under heavy refactoring to support Godot4 (which is totally incompatible +with the current codebase). + +Development is done on the `godot4-meson branch `_ +until things start getting usable. + + +.. image:: https://github.com/touilleMan/godot-python/raw/master/misc/godot_python.svg + :width: 200px + :align: right + +The goal of this project is to provide Python language support as a scripting +module for the `Godot `_ game engine. + + +Quickstart +========== + +By order of simplicity: + +- Directly download the project from within Godot with the asset library tab. +- Download from the `asset library website `_. +- Finally you can also head to the project `release page `_ if you want to only download one specific platform build + + +.. image:: https://github.com/touilleMan/godot-python/raw/master/misc/showcase.png + :align: center + + +API +=== + +example: + +.. code-block:: python + + # Explicit is better than implicit + from godot import exposed, export, Vector2, Node2D, ResourceLoader + + WEAPON_RES = ResourceLoader.load("res://weapon.tscn") + SPEED = Vector2(10, 10) + + @exposed + class Player(Node2D): + """ + This is the file's main class which will be made available to Godot. This + class must inherit from `godot.Node` or any of its children (e.g. + `godot.KinematicBody`). + + Because Godot scripts only accept file paths, you can't have two `exposed` classes in the same file. + """ + # Exposed class can define some attributes as export() to achieve + # similar goal than GDSscript's `export` keyword + name = export(str) + + # Can export property as well + @export(int) + @property + def age(self): + return self._age + + @age.setter + def age(self, value): + self._age = value + + # All methods are exposed to Godot + def talk(self, msg): + print(f"I'm saying {msg}") + + def _ready(self): + # Don't confuse `__init__` with Godot's `_ready`! + self.weapon = WEAPON_RES.instance() + self._age = 42 + # Of course you can access property & methods defined in the parent + name = self.get_name() + print(f"{name} position x={self.position.x}, y={self.position.y}") + + def _process(self, delta): + self.position += SPEED * delta + + ... + + + class Helper: + """ + Other classes are considered helpers and cannot be called from outside + Python. However they can be imported from another python module. + """ + ... + + +Building +======== + +To build the project from source, first checkout the repo or download the +latest tarball. + +Godot-Python requires Python >= 3.7 and a C compiler. + + +Godot GDNative header +--------------------- + + +The Godot GDNative headers are provided as git submodule: + +.. code-block:: bash + + $ git submodule init + $ git submodule update + +Alternatively, you can get them `from github `_. + + +Linux +----- + + +On a fresh Ubuntu install, you will need to install these: + +.. code-block:: bash + + $ apt install python3 python3-pip python3-venv build-essential + +On top of that build the CPython interpreter requires development headers +of it `extension modules `_ +(for instance if you lack sqlite dev headers, your Godot-Python build won't +contain the sqlite3 python module) + +The simplest way is to uncomment the main deb-src in `/etc/apt/sources.list`: + +.. code-block:: bash + + deb-src http://archive.ubuntu.com/ubuntu/ artful main + +and instruct apt to install the needed packages: + +.. code-block:: bash + + $ apt update + $ apt build-dep python3.6 + +See the `Python Developer's Guide `_ +for instructions on additional platforms. + + +MacOS +----- + +With MacOS, you will need XCode installed and install the command line tools. + +.. code-block:: bash + + $ xcode-select --install + +If you are using CPython as your backend, you will need these. To install with Homebrew: + +.. code-block:: bash + + $ brew install python3 openssl zlib + +You will also need virtualenv for your python. + + +Windows +------- + + +Install VisualStudio and Python3, then submit a PR to improve this paragraph ;-) + + +Create the virtual env +---------------------- + +Godot-Python build system is heavily based on Python (mainly Scons, Cython and Jinja2). +Hence we have to create a Python virtual env to install all those dependencies +without clashing with your global Python configuration. + + +.. code-block:: bash + + $ cd + godot-python$ python3 -m venv venv + + +Now you need to activate the virtual env, this is something you should do +every time you want to use the virtual env. + +For Linux/MacOS: + +.. code-block:: bash + + godot-python$ . ./venv/bin/activate + +For Windows: + +.. code-block:: bash + + godot-python$ ./venv/bin/activate.bat + + +Finally we can install dependencies: + +.. code-block:: bash + + godot-python(venv)$ pip install -r requirements.txt + + +Running the build +----------------- + + +For Linux: + +.. code-block:: bash + + godot-python(venv)$ scons platform=x11-64 release + +For Windows: + +.. code-block:: bash + + godot-python(venv)$ scons platform=windows-64 release + +For MacOS: + +.. code-block:: bash + + godot-python(venv)$ scons platform=osx-64 CC=clang release + +Valid platforms are `x11-64`, `x11-32`, `windows-64`, `windows-32` and `osx-64`. +Check Travis or Appveyor links above to see the current status of your platform. + +This command will checkout CPython repo, move to a pinned commit and build +CPython from source. + +It will then generate ``pythonscript/godot/bindings.pyx`` (Godot api bindings) +from GDNative's ``api.json`` and compile it. +This part is long and really memory demanding so be patient ;-) +When hacking godot-python you can heavily speedup this step by passing +``sample=true`` to scons in order to build only a small subset of the bindings. + +Eventually the rest of the source will be compiled and a zip build archive +will be available in the build directory. + + +Testing your build +------------------ + +.. code-block:: bash + + godot-python(venv)$ scons platform= test + +This will run pytests defined in `tests/bindings` inside the Godot environment. +If not present, will download a precompiled Godot binary (defined in SConstruct +and platform specific SCSub files) to and set the correct library path for +the GDNative wrapper. + + +Running the example project +--------------------------- + +.. code-block:: bash + + godot-python(venv)$ scons platform= example + +This will run the converted pong example in `examples/pong` inside the Godot +environment. If not present, will download a precompiled Godot binary +(defined in SConstruct) to and set the correct library path for the GDNative +wrapper. + + +Using a local Godot version +--------------------------- + +If you have a pre-existing version of godot, you can instruct the build script to +use that the static library and binary for building and tests. + +.. code-block:: bash + + godot-python(venv)$ scons platform=x11-64 godot_binary=../godot/bin/godot.x11.opt.64 + + +Additional build options +------------------------ + +You check out all the build options `in this file `_. + + +FAQ +=== + +**How can I export my project?** + +Currently, godot-python does not support automatic export, which means that the python environment is not copied to the release when using Godot's export menu. A release can be created manually: + +First, export the project in .zip format. + +Second, extract the .zip in a directory. For sake of example let's say the directory is called :code:`godotpythonproject`. + +Third, copy the correct Python environment into this folder (if it hasn't been automatically included in the export). Inside your project folder, you will need to find :code:`/addons/pythonscript/x11-64`, replacing "x11-64" with the correct target system you are deploying to. Copy the entire folder for your system, placing it at the same relative position, e.g. :code:`godotpythonproject/addons/pythonscript/x11-64` if your unzipped directory was "godotpythonproject". Legally speaking you should also copy LICENSE.txt from the pythonscript folder. (The lazy option at this point is to simply copy the entire addons folder from your project to your unzipped directory.) + +Fourth, place a godot release into the directory. The Godot export menu has probably downloaded an appropriate release already, or you can go to Editor -> Manage Export Templates inside Godot to download fresh ones. These are stored in a location which depends on your operating system. For example, on Windows they may be found at :code:`%APPDATA%\Godot\templates\ `; in Linux or OSX it is :code:`~/.godot/templates/`. Copy the file matching your export. (It may matter whether you selected "Export With Debug" when creating the .zip file; choose the debug or release version accordingly.) + +Running the Godot release should now properly execute your release. However, if you were developing on a different Python environment (say, the one held in the osx-64 folder) than you include with the release (for example the windows-64 folder), and you make any alterations to that environment, such as installing Python packages, these will not carry over; take care to produce a suitable Python environment for the target platform. + +See also `this issue `_. + +**How can I use Python packages in my project?** + +In essence, godot-python installs a python interpreter inside your project which can then be distributed as part of the final game. Python packages you want to use need to be installed for that interpreter and of course included in the final release. This can be accomplished by using pip to install packages; however, pip is not provided, so it must be installed too. + +First, locate the correct python interpreter. This will be inside your project at :code:`addons\pythonscript\windows-64\python.exe` for 64-bit Windows, :code:`addons/pythonscript/ox-64/bin/python3` for OSX, etc. Then install pip by running: + +.. code-block:: + + addons\pythonscript\windows-64\python.exe -m ensurepip + +(substituting the correct python for your system). Any other method of installing pip at this location is fine too, and this only needs to be done once. Afterward, any desired packages can be installed by running + +.. code-block:: + + addons\pythonscript\windows-64\python.exe -m pip install numpy + +again, substituting the correct python executable, and replacing numpy with whatever packages you desire. The package can now be imported in your Python code as normal. + +Note that this will only install packages onto the target platform (here, windows-64), so when exporting the project to a different platform, care must be taken to provide all the necessary libraries. + +**How can I debug my project with PyCharm?** + +This can be done using "Attach to Local Process", but first you have to change the Godot binary filename to include :code:`python`, for example :code:`Godot_v3.0.2-stable_win64.exe` to :code:`python_Godot_v3.0.2-stable_win64.exe`. +For more detailed guide and explanation see this `external blog post `_. + +**How can I autoload a python script without attaching it to a Node?** + +In your :code:`project.godot` file, add the following section:: + + [autoload] + autoloadpy="*res://autoload.py" + +In addition to the usual:: + + [gdnative] + singletons=[ "res://pythonscript.gdnlib" ] + +You can use any name for the python file and the class name +:code:`autoloadpy`. + +Then :code:`autoload.py` can expose a Node:: + + from godot import exposed, export + from godot.bindings import * + + @exposed + class autoload(Node): + + def hi(self, to): + return 'Hello %s from Python !' % to + +which can then be called from your gdscript code as an attribute of +the :code:`autoloadpy` class (use the name defined in your :code:`project.godot`):: + + print(autoloadpy.hi('root')) + +**How can I efficiently access PoolArrays?** + +:code:`PoolIntArray`, :code:`PoolFloatArray`, :code:`PoolVector3Array` +and the other pool arrays can't be accessed directly because they must +be locked in memory first. Use the :code:`arr.raw_access()` context +manager to lock it:: + + arr = PoolIntArray() # create the array + arr.resize(10000) + + with arr.raw_access() as ptr: + for i in range(10000): + ptr[i] = i # this is fast + + # read access: + with arr.raw_access() as ptr: + for i in range(10000): + assert ptr[i] == i # so is this + +Keep in mind great performances comes with great responsabilities: there is no +boundary check so you may end up with memory corruption if you don't take care ;-) + +See the `godot-python issue `_. diff --git a/SConstruct b/SConstruct new file mode 100644 index 0000000..69859bd --- /dev/null +++ b/SConstruct @@ -0,0 +1,234 @@ +import os +import re +import shutil +from datetime import datetime +from SCons.Platform.virtualenv import ImportVirtualenv +from SCons.Errors import UserError + + +EnsurePythonVersion(3, 7) +EnsureSConsVersion(3, 0) + + +def extract_version(): + # Hold my beer... + gl = {} + exec(open("pythonscript/godot/_version.py").read(), gl) + return gl["__version__"] + + +def godot_binary_converter(val, env): + file = File(val) + if file.exists(): + # Note here `env["godot_binary_download_version"]` is not defined, this is ok given + # this variable shouldn't be needed if Godot doesn't have to be downloaded + return file + # Provided value is version information with format ..[-] + match = re.match(r"^([0-9]+)\.([0-9]+)\.([0-9]+)(?:-(\w+))?$", val) + if match: + major, minor, patch, extra = match.groups() + else: + raise UserError( + f"`{val}` is neither an existing file nor a valid ..[-] Godot version format" + ) + env["godot_binary_download_version"] = (major, minor, patch, extra or "stable") + # `godot_binary` is set to None to indicate it should be downloaded + return None + + +vars = Variables("custom.py") +vars.Add( + EnumVariable( + "platform", + "Target platform", + "", + allowed_values=("x11-64", "x11-32", "windows-64", "windows-32", "osx-64"), + ) +) +vars.Add("pytest_args", "Pytest arguments passed to tests functions", "") +vars.Add( + "godot_args", "Additional arguments passed to godot binary when running tests&examples", "" +) +vars.Add("release_suffix", "Suffix to add to the release archive", extract_version()) +vars.Add( + "godot_binary", + "Path to Godot binary or version of Godot to use", + default="3.2.2", + converter=godot_binary_converter, +) +vars.Add("godot_headers", "Path to Godot GDnative headers", "") +vars.Add("debugger", "Run test with a debugger", "") +vars.Add(BoolVariable("debug", "Compile with debug symbols", False)) +vars.Add(BoolVariable("headless", "Run tests in headless mode", False)) +vars.Add(BoolVariable("compressed_stdlib", "Compress Python std lib as a zip to save space", True)) +vars.Add( + BoolVariable( + "bindings_generate_sample", + "Generate only a subset of the bindings (faster build time)", + False, + ) +) +vars.Add("CC", "C compiler") +vars.Add("CFLAGS", "Custom flags for the C compiler") +vars.Add("LINK", "linker") +vars.Add("LINKFLAGS", "Custom flags for the linker") +vars.Add("CPYTHON_CFLAGS", "Custom flags for the C compiler used to compile CPython") +vars.Add("CPYTHON_LINKFLAGS", "Custom flags for the linker used to compile CPython") +vars.Add("OPENSSL_PATH", "Path to the root of openssl installation to link CPython against") +vars.Add( + "MSVC_VERSION", + "MSVC version to use (Windows only) -- version num X.Y. Default: highest installed.", +) +vars.Add( + BoolVariable( + "MSVC_USE_SCRIPT", + ( + "Set to True to let SCons find compiler (with MSVC_VERSION and TARGET_ARCH), " + "False to use cmd.exe env (MSVC_VERSION and TARGET_ARCH will be ignored), " + "or vcvarsXY.bat script name to use." + ), + True, + ) +) + + +# Set Visual Studio arch according to platform target +vanilla_vars_update = vars.Update + + +def _patched_vars_update(env, args=None): + vanilla_vars_update(env, args=None) + if env["platform"] == "windows-64": + env["TARGET_ARCH"] = "x86_64" + elif env["platform"] == "windows-32": + env["TARGET_ARCH"] = "x86" + + +vars.Update = _patched_vars_update + + +env = Environment( + variables=vars, + tools=["default", "cython", "symlink", "virtual_target", "download"], + ENV=os.environ, + # ENV = {'PATH' : os.environ['PATH']}, +) + + +# Detect compiler +env["CC_IS_MSVC"] = env.get("CC") in ("cl", "cl.exe") +env["CC_IS_GCC"] = "gcc" in env.get("CC") +env["CC_IS_CLANG"] = "clang" in env.get("CC") + + +Help(vars.GenerateHelpText(env)) +# if env["HOST_OS"] == "win32": +# # Fix ImportVirtualenv raising error if PATH make reference to other drives +# from SCons.Platform import virtualenv +# vanilla_IsInVirtualenv = virtualenv.IsInVirtualenv +# def patched_IsInVirtualenv(path): +# try: +# return vanilla_IsInVirtualenv(path) +# except ValueError: +# return False +# virtualenv.IsInVirtualenv = patched_IsInVirtualenv +# ImportVirtualenv(env) + + +if env["godot_headers"]: + env["godot_headers"] = Dir(env["godot_headers"]) +else: + env["godot_headers"] = Dir("godot_headers") +env.AppendUnique(CPPPATH=["$godot_headers"]) +# TODO: not sure why, but CPPPATH scan result for cython modules change between +# first and subsequent runs of scons (module is considered to no longer depend +# on godot_headers on subsequent run, so the build redone) +SetOption("implicit_cache", 1) + + +### Save my eyes plz ### + +env["ENV"]["TERM"] = os.environ.get("TERM", "") +if env["CC_IS_CLANG"]: + env.Append(CCFLAGS=["-fcolor-diagnostics"]) +if env["CC_IS_GCC"]: + env.Append(CCFLAGS=["-fdiagnostics-color=always"]) + + +### Default compile flags ### + +if not env["CC_IS_MSVC"]: + if env["debug"]: + env.Append(CFLAGS=["-g", "-ggdb"]) + env.Append(LINKFLAGS=["-g", "-ggdb"]) + else: + env.Append(CFLAGS=["-O2"]) +else: + if env["debug"]: + env.Append(CFLAGS=["/DEBUG:FULL"]) + env.Append(LINKFLAGS=["/DEBUG:FULL"]) + else: + env.Append(CFLAGS=["/WX", "/W2"]) + + +env["DIST_ROOT"] = Dir(f"build/dist") +env["DIST_PLATFORM"] = Dir(f"{env['DIST_ROOT']}/addons/pythonscript/{env['platform']}") +VariantDir(f"build/{env['platform']}/platforms", f"platforms") +VariantDir(f"build/{env['platform']}/pythonscript", "pythonscript") + + +### Load sub scons scripts ### + + +Export(env=env) +SConscript( + [ + f"build/{env['platform']}/platforms/SConscript", # Must be kept first + f"build/{env['platform']}/pythonscript/SConscript", + "tests/SConscript", + "examples/SConscript", + ] +) + + +### Define default target ### + + +env.Default(env["DIST_ROOT"]) +env.Alias("build", env["DIST_ROOT"]) + + +### Static files added to dist ### + + +env.VanillaInstallAs( + target="$DIST_ROOT/pythonscript.gdnlib", source="#/misc/release_pythonscript.gdnlib" +) +env.VanillaInstallAs( + target="$DIST_ROOT/addons/pythonscript/LICENSE.txt", source="#/misc/release_LICENSE.txt" +) +env.Command(target="$DIST_ROOT/addons/pythonscript/.gdignore", source=None, action=Touch("$TARGET")) +# SCons install on directory doesn't check for file changes +for item in env.Glob("addons/pythonscript_repl/*"): + env.VanillaInstall(target="$DIST_ROOT/addons/pythonscript_repl", source=item) + + +### Release archive ### + + +def generate_release(target, source, env): + for suffix, format in [(".zip", "zip"), (".tar.bz2", "bztar")]: + if target[0].name.endswith(suffix): + base_name = target[0].abspath[: -len(suffix)] + break + shutil.make_archive(base_name, format, root_dir=source[0].abspath) + + +# Zip format doesn't support symlinks that are needed for Linux&macOS +if env["platform"].startswith("windows"): + release_target = "build/godot-python-${release_suffix}-${platform}.zip" +else: + release_target = "build/godot-python-${release_suffix}-${platform}.tar.bz2" +release = env.Command(release_target, env["DIST_ROOT"], generate_release) +env.Alias("release", release) +env.AlwaysBuild("release") diff --git a/addons/pythonscript_repl/hack_regular.tres b/addons/pythonscript_repl/hack_regular.tres new file mode 100644 index 0000000..19f0738 --- /dev/null +++ b/addons/pythonscript_repl/hack_regular.tres @@ -0,0 +1,7 @@ +[gd_resource type="DynamicFont" load_steps=2 format=2] + +[ext_resource path="res://addons/pythonscript_repl/hack_regular.ttf" type="DynamicFontData" id=1] + +[resource] +size = 14 +font_data = ExtResource( 1 ) diff --git a/addons/pythonscript_repl/hack_regular.ttf b/addons/pythonscript_repl/hack_regular.ttf new file mode 100644 index 0000000..92a90cb Binary files /dev/null and b/addons/pythonscript_repl/hack_regular.ttf differ diff --git a/addons/pythonscript_repl/input_box.py b/addons/pythonscript_repl/input_box.py new file mode 100644 index 0000000..0747427 --- /dev/null +++ b/addons/pythonscript_repl/input_box.py @@ -0,0 +1,16 @@ +from godot import exposed, InputEventKey, KEY_UP, KEY_DOWN, LineEdit + + +@exposed(tool=True) +class InputBox(LineEdit): + def _enter_tree(self): + self.repl_node = self.get_parent().get_parent() + + def _gui_input(self, event): + if isinstance(event, InputEventKey) and event.pressed: + if event.scancode == KEY_UP: + self.repl_node.up_pressed() + self.accept_event() + elif event.scancode == KEY_DOWN: + self.repl_node.down_pressed() + self.accept_event() diff --git a/addons/pythonscript_repl/plugin.cfg b/addons/pythonscript_repl/plugin.cfg new file mode 100644 index 0000000..757944d --- /dev/null +++ b/addons/pythonscript_repl/plugin.cfg @@ -0,0 +1,7 @@ +[plugin] + +name="pythonscript_repl" +description="" +author="godot-python" +version="0.1" +script="plugin.py" diff --git a/addons/pythonscript_repl/plugin.py b/addons/pythonscript_repl/plugin.py new file mode 100644 index 0000000..6d73a3c --- /dev/null +++ b/addons/pythonscript_repl/plugin.py @@ -0,0 +1,19 @@ +from godot import exposed, EditorPlugin, ProjectSettings, ResourceLoader + + +BASE_RES = str(ProjectSettings.localize_path(__file__)).rsplit("/", 1)[0] +PYTHON_REPL_RES = ResourceLoader.load(f"{BASE_RES}/python_repl.tscn") + + +@exposed(tool=True) +class plugin(EditorPlugin): + def _enter_tree(self): + # Initialization of the plugin goes here + self.repl = PYTHON_REPL_RES.instance() + self.repl_button = self.add_control_to_bottom_panel(self.repl, "Python REPL") + + def _exit_tree(self): + # Clean-up of the plugin goes here + self.remove_control_from_bottom_panel(self.repl) + self.repl.queue_free() + self.repl = None diff --git a/addons/pythonscript_repl/python_repl.py b/addons/pythonscript_repl/python_repl.py new file mode 100644 index 0000000..ae13e03 --- /dev/null +++ b/addons/pythonscript_repl/python_repl.py @@ -0,0 +1,241 @@ +import sys +import ctypes +from code import InteractiveConsole +from collections import deque +from threading import Thread, Lock, Event +from queue import SimpleQueue + +from _godot import StdoutStderrCaptureToGodot, StdinCapture +from godot import exposed, export, ResourceLoader, VBoxContainer + +from .plugin import BASE_RES + + +FONT = ResourceLoader.load(f"{BASE_RES}/hack_regular.tres") + + +class StdoutStderrCaptureToBufferAndPassthrough(StdoutStderrCaptureToGodot): + def __init__(self): + super().__init__() + self._buffer = "" + + def _write(self, buff): + # _write always executed with _lock taken + super()._write(buff) + self._buffer += buff + + def read_buffer(self): + with self._lock: + buffer = self._buffer + self._buffer = "" + return buffer + + +class StdinCaptureToBuffer(StdinCapture): + def __init__(self): + super().__init__() + self._lock = Lock() + self._has_data = Event() + self._buffer = "" + self._closed = False + + def _read(self, size=-1): + if self._closed: + raise EOFError + + if size < 0 or size > len(self._buffer): + data = self._buffer + self._buffer = "" + else: + data = self._buffer[:size] + self._buffer = self._buffer[size:] + + if not self._buffer: + self._has_data.clear() + + return data + + def read(self, size=-1): + while True: + self._has_data.wait() + with self._lock: + # Check if a concurrent readinto has already processed the data + if not self._has_data.is_set(): + continue + + return self._read(size) + + def readline(size=-1): + while True: + self._has_data.wait() + with self._lock: + # Check if a concurrent readinto has already processed the data + if not self._has_data.is_set(): + continue + + if size < 0: + size = len(self._buffer) + try: + size = min(size, self._buffer.index("\n") + 1) + except ValueError: + # \n not in self._buffer + pass + return self._read(size) + + def write(self, buffer): + if not buffer: + return + with self._lock: + self._has_data.set() + self._buffer += buffer + + def close(self): + self._closed = True + # Ensure read is waken up so it can raise EOFError + self._has_data.set() + + +class InteractiveConsoleInREPL(InteractiveConsole): + def __init__(self, repl_write, repl_read): + super().__init__(locals={"__name__": "__console__", "__doc__": None}) + # Default write/raw_input relies on stderr/stdin, overwrite them + # to only talk with the REPL + self.write = repl_write + # Note overwritting `InteractiveConsole.raw_input` doesn't prevent + # from user code directly calling `input` (for instance when typing + # `help()` which makes use of a pager). + self.repl_read = repl_read + self.thread = None + + def raw_input(self, prompt): + data = self.repl_read() + # Print the command line in the ouput box, this is needed given + # we have a separate input box that is cleared each time + # the user hit enter (unlike regular terminal where input and output + # are mixed together and enter only jumps to next line) + self.write(f"{prompt}{data}") + return data + + def start_in_thread(self): + assert not self.thread + self.thread = Thread(target=self.interact) + self.thread.start() + + def send_keyboard_interrupt(self): + # Inject a exception in the thread running the interpreter. + # This is not 100% perfect given the thread checks for exception only + # when it is actually running Python code so we cannot interrupt native + # code (for instance calling `time.sleep` cannot be interrupted) + ctypes.pythonapi.PyThreadState_SetAsyncExc( + self.thread.ident, ctypes.py_object(KeyboardInterrupt) + ) + + +@exposed(tool=True) +class PythonREPL(VBoxContainer): + __STREAMS_CAPTURE_INSTALLED = False + + def _enter_tree(self): + self.__plugin_instantiated = False + self.history = [] + self.selected_history = 0 + self.output_box = self.get_node("OutputBox") + self.output_box.add_font_override("normal_font", FONT) + self.output_box.add_font_override("mono_font", FONT) + self.run_button = self.get_node("FooterContainer/RunButton") + self.run_button.connect("pressed", self, "execute") + self.clear_button = self.get_node("HeaderContainer/ClearButton") + self.clear_button.connect("pressed", self, "clear") + self.interrupt_button = self.get_node("HeaderContainer/KeyboardInterruptButton") + self.interrupt_button.connect("pressed", self, "send_keyboard_interrupt") + self.input_box = self.get_node("FooterContainer/InputBox") + self.input_box.connect("text_entered", self, "execute") + + # Hijack stdout/stderr/stdin streams + self.stdout_stderr_capture = StdoutStderrCaptureToBufferAndPassthrough() + self.stdin_capture = StdinCaptureToBuffer() + # Only overwrite streams if the scene has been created by the + # pythonscript_repl plugin. This avoid concurrent streams patching + # when the scene is opened from the editor (typically when we want + # to edit the repl GUI) + # TODO: find a way to differentiate plugin instantiated from other + # instantiations instead of relying on "first instantiated is plugin" + if not PythonREPL.__STREAMS_CAPTURE_INSTALLED: + PythonREPL.__STREAMS_CAPTURE_INSTALLED = True + self.__plugin_instantiated = True + self.stdout_stderr_capture.install() + self.stdin_capture.install() + + # Finally start the Python interpreter, it must be running it in own + # thread given it does blocking reads on stdin + self.interpreter = InteractiveConsoleInREPL( + repl_write=self.write, repl_read=self.stdin_capture.read + ) + self.interpreter.start_in_thread() + + def _exit_tree(self): + # Closing our custom stdin stream should make `InteractiveConsole.interact` + # return, hence finishing the interpreter thread + self.stdin_capture.close() + self.interpreter.thread.join() + + # Our custom stream capture must be removed before this node is destroyed, + # otherwise segfault will occur on next print ! + if self.__plugin_instantiated: + PythonREPL.__STREAMS_CAPTURE_INSTALLED = False + self.stdout_stderr_capture.remove() + self.stdin_capture.remove() + + def write(self, buffer): + for line in buffer.splitlines(): + self.output_box.push_mono() + self.output_box.add_text(line) + self.output_box.newline() + self.output_box.pop() + + def _process(self, delta): + if not hasattr(self, "stdout_stderr_capture"): + return + # Display new lines + self.write(self.stdout_stderr_capture.read_buffer()) + + def remove_last_line(self): + self.output_box.remove_line(self.output_box.get_line_count() - 2) + self.output_box.scroll_to_line(self.output_box.get_line_count() - 1) + + def execute(self, *args, **kwargs): + string = str(self.input_box.get_text()) + # Avoid adding multiple repeated entries to the command history + if not (len(self.history) > 0 and self.history[-1] == string): + self.history.append(string) + self.selected_history = 0 + self.input_box.clear() + # Send the line into stdin and let the interpret do the rest + self.stdin_capture.write(string + "\n") + + def up_pressed(self): + if len(self.history) >= abs(self.selected_history - 1): + self.selected_history -= 1 + self.input_box.clear() + val = str(self.history[self.selected_history]) + self.input_box.set_text(val) + self.input_box.set_cursor_position(len(val)) + self.input_box.grab_focus() + + def down_pressed(self): + if self.selected_history + 1 == 0: + self.selected_history += 1 + self.input_box.clear() + elif self.selected_history + 1 < 0: + self.selected_history += 1 + self.input_box.clear() + val = str(self.history[self.selected_history]) + self.input_box.set_text(val) + self.input_box.set_cursor_position(len(val)) + self.input_box.grab_focus() + + def clear(self): + self.output_box.clear() + + def send_keyboard_interrupt(self): + self.interpreter.send_keyboard_interrupt() diff --git a/addons/pythonscript_repl/python_repl.tscn b/addons/pythonscript_repl/python_repl.tscn new file mode 100644 index 0000000..9e442fa --- /dev/null +++ b/addons/pythonscript_repl/python_repl.tscn @@ -0,0 +1,66 @@ +[gd_scene load_steps=4 format=2] + +[ext_resource path="res://addons/pythonscript_repl/python_repl.py" type="Script" id=1] +[ext_resource path="res://addons/pythonscript_repl/hack_regular.tres" type="DynamicFont" id=2] +[ext_resource path="res://addons/pythonscript_repl/input_box.py" type="Script" id=3] + +[node name="Python REPL" type="VBoxContainer"] +margin_right = 580.0 +margin_bottom = 234.0 +script = ExtResource( 1 ) +__meta__ = { +"_edit_use_anchors_": false +} + +[node name="HeaderContainer" type="HBoxContainer" parent="."] +margin_right = 580.0 +margin_bottom = 20.0 + +[node name="Label" type="Label" parent="HeaderContainer"] +margin_top = 3.0 +margin_right = 459.0 +margin_bottom = 17.0 +size_flags_horizontal = 3 +text = "Python REPL:" + +[node name="KeyboardInterruptButton" type="Button" parent="HeaderContainer"] +margin_left = 463.0 +margin_right = 532.0 +margin_bottom = 20.0 +text = "Interrupt" + +[node name="ClearButton" type="Button" parent="HeaderContainer"] +margin_left = 536.0 +margin_right = 580.0 +margin_bottom = 20.0 +text = "Clear" + +[node name="OutputBox" type="RichTextLabel" parent="."] +margin_top = 24.0 +margin_right = 580.0 +margin_bottom = 206.0 +rect_min_size = Vector2( 0, 180 ) +focus_mode = 2 +size_flags_horizontal = 3 +size_flags_vertical = 3 +custom_fonts/mono_font = ExtResource( 2 ) +custom_fonts/normal_font = ExtResource( 2 ) +scroll_following = true +selection_enabled = true + +[node name="FooterContainer" type="HBoxContainer" parent="."] +margin_top = 210.0 +margin_right = 580.0 +margin_bottom = 234.0 + +[node name="InputBox" type="LineEdit" parent="FooterContainer"] +margin_right = 540.0 +margin_bottom = 24.0 +size_flags_horizontal = 3 +script = ExtResource( 3 ) + +[node name="RunButton" type="Button" parent="FooterContainer"] +margin_left = 544.0 +margin_right = 580.0 +margin_bottom = 24.0 +text = "Run" diff --git a/docs/io.rst b/docs/io.rst new file mode 100644 index 0000000..ed75210 --- /dev/null +++ b/docs/io.rst @@ -0,0 +1,20 @@ +IO model +======== + +Python is supposed to interact with outside world (i.e. everything +outside the interpretor) through numerous ways: + +- ``open`` and ``input`` builtin functions. +- ``os`` module (e.g. ``os.open`` function) +- ``stdout``, ``stderr`` and ``stdin`` files descriptors +- ``__import__`` & co +- ``ctypes`` & micropython's ``ffi`` libraries +- ... + +However those functions are no longer relevant when python is embedded +into Godot. They can even be dangerous when opening a Godot application to +modding given a 3rd party python code has suddently full access to the computer ! + +Hence, those functions needs to be adapted to Godot: +- ``ctype``, ``ffi`` and ``open`` disabled +- ``stdout``, ``stderr`` and ``stdin`` redirected to Godot editor's console diff --git a/docs/memory.rst b/docs/memory.rst new file mode 100644 index 0000000..d2cfac1 --- /dev/null +++ b/docs/memory.rst @@ -0,0 +1,74 @@ +Object conversion model +======================= + + +Base object types +----------------- + +Godot Variant +- standalone: bool, int, real +- pointer to builtin type (e.g. ``Matrix32``, ``AABB``, etc.) +- pointer to generic ``Object`` + +Python mp_obj_t +- standalone: bool, small int, real (depend of implementation), qstr +- pointer to generic struct (must have ``mp_obj_base_t base`` as first attribute) + +.. note: + Variant and mp_obj_t instances are only used by copy, no memory management + needed on themselves. + +Naming conventions: +- GST: Godot STandalone +- GPB: Godot Pointer Builtin +- GPO: Godot Pointer Object +- PST: Python STandalone +- PPB: Python Pointer Binding (proxy to Godot data) +- PPE: Python Pointer Exposed (defined with `@exposed` decorator) +- PPI: Python Pointer Internal + + +Variant memory management +------------------------- + +For GPO, Variant contains a raw pointer on the Object and (not necessary) a +reference on the Object. +- If a reference is present, ref counting is at work. +- If not, user need to do manual memory management by calling ``free`` method. + +For GPB, there is 3 possibilities: +- No allocated memory for data (e.g. ``Rect2``), so nothing is done. +- Data is stored in a memory pool (e.g. ``Dictionary``), so data's destructor + is called which make use of ref counting to know what to do. +- Classic C++ allocation for data (e.g. ``Matrix3``) so regular ``delete`` + is called on it. + + +Conversions implicating a standalone +------------------------------------ + +Standalone doesn't need garbage collection and doesn't hold reference on +other objects. Hence conversion is trivial. + + +Conversion Godot -> Python +-------------------------- + +Each GPB has a corresponding PPB, acting like a proxy from within the +Python interpreter. + +GPO binding is done dynamically with the ``DynamicBinder`` using Godot +introspection (i.e. ``ObjectTypeDB``). + +It is possible in the future that to create static proxy for core GPO and rely +on dynamic method as a fall-back for unknown classes (i.g. added by 3rd party). + + +Conversion Python -> Godot +-------------------------- + +PPB -> GPB described earlier. + +PPI objects cannot be converted back to Godot. + +PPE instance are exposed as ``PyInstance`` (class exposed as ``PyScript``). diff --git a/examples/SConscript b/examples/SConscript new file mode 100644 index 0000000..948f8b9 --- /dev/null +++ b/examples/SConscript @@ -0,0 +1,10 @@ +Import("env") + +for test in ["pong", "pong_multiplayer"]: + dist_symlink = env.Symlink(f"{test}/addons", "$DIST_ROOT/addons") + target = env.Command( + test, ["$godot_binary", dist_symlink], "${SOURCE.abspath} ${godot_args} --path ${TARGET}" + ) + env.AlwaysBuild(target) + +env.Alias("example", "pong") diff --git a/examples/pong/ball.gd b/examples/pong/ball.gd new file mode 100644 index 0000000..38ccb02 --- /dev/null +++ b/examples/pong/ball.gd @@ -0,0 +1,60 @@ + +extends Area2D + +const DEFAULT_SPEED=220 + +var direction = Vector2(1,0) +var ball_speed = DEFAULT_SPEED +var stopped=false + + + +onready var screen_size = get_viewport_rect().size + +func _reset_ball(for_left): + + position = screen_size / 2 + if (for_left): + direction = Vector2(-1,0) + else: + direction = Vector2( 1,0) + + ball_speed = DEFAULT_SPEED + +func stop(): + stopped=true + +func _process(delta): + + # ball will move normally for both players + # even if it's sightly out of sync between them + # so each player sees the motion as smooth and not jerky + + if (not stopped): + translate( direction * ball_speed * delta ) + + # check screen bounds to make ball bounce + + if ((position.y < 0 and direction.y < 0) or (position.y > screen_size.y and direction.y > 0)): + direction.y = -direction.y + + if (position.x < 0 or position.x > screen_size.x): + var for_left = position.x > 0 + get_parent().update_score(for_left) + _reset_ball(for_left) + +sync func bounce(left,random): + + #using sync because both players can make it bounce + if (left): + direction.x = abs(direction.x) + else: + direction.x = -abs(direction.x) + + ball_speed *= 1.1 + direction.y = random*2.0 - 1 + direction = direction.normalized() + +func _ready(): + set_process(true) + diff --git a/examples/pong/ball.png b/examples/pong/ball.png new file mode 100644 index 0000000..c0f6da4 Binary files /dev/null and b/examples/pong/ball.png differ diff --git a/examples/pong/ball.png.import b/examples/pong/ball.png.import new file mode 100644 index 0000000..b06d44c --- /dev/null +++ b/examples/pong/ball.png.import @@ -0,0 +1,34 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/ball.png-9a4ca347acb7532f6ae347744a6b04f7.stex" +metadata={ +"vram_texture": false +} + +[deps] + +source_file="res://ball.png" +dest_files=[ "res://.import/ball.png-9a4ca347acb7532f6ae347744a6b04f7.stex" ] + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/bptc_ldr=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=true +process/HDR_as_SRGB=false +process/invert_color=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/examples/pong/ball.py b/examples/pong/ball.py new file mode 100644 index 0000000..b400ae3 --- /dev/null +++ b/examples/pong/ball.py @@ -0,0 +1,53 @@ +from godot import exposed, Vector2, Area2D + + +DEFAULT_SPEED = 220 + + +@exposed +class Ball(Area2D): + def _reset_ball(self, for_left): + self.position = self.screen_size / 2 + if for_left: + self.direction = Vector2(-1, 0) + else: + self.direction = Vector2(1, 0) + self.ball_speed = DEFAULT_SPEED + + def stop(self): + self.stopped = True + + def _process(self, delta): + # ball will move normally for both players + # even if it's sightly out of sync between them + # so each player sees the motion as smooth and not jerky + if not self.stopped: + self.translate(self.direction * self.ball_speed * delta) + + # check screen bounds to make ball bounce + if (self.position.y < 0 and self.direction.y < 0) or ( + self.position.y > self.screen_size.y and self.direction.y > 0 + ): + self.direction.y = -self.direction.y + + if self.position.x < 0 or self.position.x > self.screen_size.x: + for_left = self.position.x > 0 + self.get_parent().update_score(for_left) + self._reset_ball(for_left) + + def bounce(self, left, random): + # using sync because both players can make it bounce + if left: + self.direction.x = abs(self.direction.x) + else: + self.direction.x = -abs(self.direction.x) + self.ball_speed *= 1.1 + self.direction.y = random * 2.0 - 1 + self.direction = self.direction.normalized() + + def _ready(self): + self.direction = Vector2(1, 0) + self.ball_speed = DEFAULT_SPEED + self.stopped = False + self.screen_size = self.get_viewport_rect().size + self.set_process(True) # REMOVE ME diff --git a/examples/pong/ball.tscn b/examples/pong/ball.tscn new file mode 100644 index 0000000..1248022 --- /dev/null +++ b/examples/pong/ball.tscn @@ -0,0 +1,33 @@ +[gd_scene load_steps=4 format=2] + +[ext_resource path="res://ball.py" type="Script" id=1] +[ext_resource path="res://ball.png" type="Texture" id=2] + +[sub_resource type="CircleShape2D" id=1] + +custom_solver_bias = 0.0 +radius = 5.11969 + +[node name="ball" type="Area2D"] + +input_pickable = true +shapes/0/shape = SubResource( 1 ) +shapes/0/transform = Transform2D( 1, 0, 0, 1, 0, 0 ) +shapes/0/trigger = false +gravity_vec = Vector2( 0, 1 ) +gravity = 98.0 +linear_damp = 0.1 +angular_damp = 1.0 +script = ExtResource( 1 ) + +[node name="sprite" type="Sprite" parent="."] + +texture = ExtResource( 2 ) + +[node name="shape" type="CollisionShape2D" parent="."] + +shape = SubResource( 1 ) +trigger = false +_update_shape_index = 0 + + diff --git a/examples/pong/icon.png b/examples/pong/icon.png new file mode 100644 index 0000000..eab34de Binary files /dev/null and b/examples/pong/icon.png differ diff --git a/examples/pong/icon.png.import b/examples/pong/icon.png.import new file mode 100644 index 0000000..2125c6b --- /dev/null +++ b/examples/pong/icon.png.import @@ -0,0 +1,34 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/icon.png-487276ed1e3a0c39cad0279d744ee560.stex" +metadata={ +"vram_texture": false +} + +[deps] + +source_file="res://icon.png" +dest_files=[ "res://.import/icon.png-487276ed1e3a0c39cad0279d744ee560.stex" ] + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/bptc_ldr=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=true +process/HDR_as_SRGB=false +process/invert_color=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/examples/pong/paddle.gd b/examples/pong/paddle.gd new file mode 100644 index 0000000..c44e72a --- /dev/null +++ b/examples/pong/paddle.gd @@ -0,0 +1,36 @@ +extends Area2D + +export var left=false + +const MOTION_SPEED=150 + +var motion = 0 +var can_move = true +var action_prefix = '' + +onready var screen_size = get_viewport_rect().size + +func _process(delta): + + #is the master of the paddle + motion = 0 + if (Input.is_action_pressed(action_prefix + "_move_up")): + motion -= 1 + elif (Input.is_action_pressed(action_prefix + "_move_down")): + motion += 1 + + motion*=MOTION_SPEED + if can_move: + translate( Vector2(0,motion*delta) ) + + # set screen limits + if (position.y < 0 ): + position.y = 0 + elif (position.y > screen_size.y): + position.y = screen_size.y + +func _ready(): + set_process(true) + +func _on_paddle_area_enter( area ): + area.bounce(left, randf()) #random for new direction generated on each peer diff --git a/examples/pong/paddle.png b/examples/pong/paddle.png new file mode 100644 index 0000000..e23491e Binary files /dev/null and b/examples/pong/paddle.png differ diff --git a/examples/pong/paddle.png.import b/examples/pong/paddle.png.import new file mode 100644 index 0000000..8e50aa3 --- /dev/null +++ b/examples/pong/paddle.png.import @@ -0,0 +1,34 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/paddle.png-0e798fb0912613386507c9904d5cc01a.stex" +metadata={ +"vram_texture": false +} + +[deps] + +source_file="res://paddle.png" +dest_files=[ "res://.import/paddle.png-0e798fb0912613386507c9904d5cc01a.stex" ] + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/bptc_ldr=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=true +process/HDR_as_SRGB=false +process/invert_color=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/examples/pong/paddle.py b/examples/pong/paddle.py new file mode 100644 index 0000000..2cbdfcf --- /dev/null +++ b/examples/pong/paddle.py @@ -0,0 +1,41 @@ +from random import random + +from godot import exposed, export, Vector2, GDString, Area2D, Input + + +MOTION_SPEED = 150 + + +@exposed +class Paddle(Area2D): + + left = export(bool, default=False) + action_prefix = export(str, default="") + can_move = export(bool, default=False) + + def _ready(self): + self.motion = 0 + self.can_move = True + self.screen_size = self.get_viewport_rect().size + self.set_process(True) + + def _process(self, delta): + motion = 0 + if Input.is_action_pressed(self.action_prefix + GDString("_move_up")): + motion -= 1 + elif Input.is_action_pressed(self.action_prefix + GDString("_move_down")): + motion += 1 + + motion *= MOTION_SPEED + if self.can_move: + self.translate(Vector2(0, motion * delta)) + + # set screen limits + if self.position.y < 0: + self.position.y = 0 + elif self.position.y > self.screen_size.y: + self.position.y = self.screen_size.y + + def _on_paddle_area_enter(self, area): + # random for new direction generated on each peer + area.bounce(self.left, random()) diff --git a/examples/pong/paddle.tscn b/examples/pong/paddle.tscn new file mode 100644 index 0000000..8d63dea --- /dev/null +++ b/examples/pong/paddle.tscn @@ -0,0 +1,37 @@ +[gd_scene load_steps=4 format=2] + +[ext_resource path="res://paddle.py" type="Script" id=1] +[ext_resource path="res://paddle.png" type="Texture" id=2] + +[sub_resource type="CapsuleShape2D" id=1] + +custom_solver_bias = 0.0 +radius = 4.78568 +height = 23.6064 + +[node name="paddle" type="Area2D"] + +input_pickable = true +shapes/0/shape = SubResource( 1 ) +shapes/0/transform = Transform2D( 1, 0, 0, 1, 0, 0 ) +shapes/0/trigger = false +gravity_vec = Vector2( 0, 1 ) +gravity = 98.0 +linear_damp = 0.1 +angular_damp = 1.0 +script = ExtResource( 1 ) +left = false + +[node name="sprite" type="Sprite" parent="."] + +texture = ExtResource( 2 ) + +[node name="shape" type="CollisionShape2D" parent="."] + +shape = SubResource( 1 ) +trigger = false +_update_shape_index = 0 + +[connection signal="area_entered" from="." to="." method="_on_paddle_area_enter"] + + diff --git a/examples/pong/pong.gd b/examples/pong/pong.gd new file mode 100644 index 0000000..38e639a --- /dev/null +++ b/examples/pong/pong.gd @@ -0,0 +1,42 @@ + +extends Node2D + +const SCORE_TO_WIN = 2 + +var score_left = 0 +var score_right = 0 + +signal game_finished() + +func update_score(add_to_left): + if (add_to_left): + + score_left+=1 + get_node("score_left").set_text( str(score_left) ) + else: + + score_right+=1 + get_node("score_right").set_text( str(score_right) ) + + var game_ended = false + + if (score_left==SCORE_TO_WIN): + get_node("winner_left").show() + game_ended=true + elif (score_right==SCORE_TO_WIN): + get_node("winner_right").show() + game_ended=true + + if (game_ended): + get_node("ball").stop() + get_node("player1").can_move=false + get_node("player2").can_move=false + +func _ready(): + + #let each paddle know which one is left, too + get_node("player1").left=true + get_node("player2").left=false + get_node("player1").action_prefix = 'p1' + get_node("player2").action_prefix = 'p2' + diff --git a/examples/pong/pong.py b/examples/pong/pong.py new file mode 100644 index 0000000..0bfb05b --- /dev/null +++ b/examples/pong/pong.py @@ -0,0 +1,41 @@ +from godot import exposed, signal, export, Node2D + + +SCORE_TO_WIN = 2 + + +@exposed +class Pong(Node2D): + game_finished = signal() + + def _ready(self): + self.score_left = 0 + self.score_right = 0 + # let each paddle know which one is left, too + p1 = self.get_node("player1") + p2 = self.get_node("player2") + p1.left = True + p2.left = False + p1.action_prefix = "p1" + p2.action_prefix = "p2" + + def update_score(self, add_to_left): + if add_to_left: + self.score_left += 1 + self.get_node("score_left").set_text(str(self.score_left)) + else: + self.score_right += 1 + self.get_node("score_right").set_text(str(self.score_right)) + + game_ended = False + if self.score_left == SCORE_TO_WIN: + self.get_node("winner_left").show() + game_ended = True + elif self.score_right == SCORE_TO_WIN: + self.get_node("winner_right").show() + game_ended = True + + if game_ended: + self.get_node("ball").stop() + self.get_node("player1").can_move = False + self.get_node("player2").can_move = False diff --git a/examples/pong/pong.tscn b/examples/pong/pong.tscn new file mode 100644 index 0000000..9736122 --- /dev/null +++ b/examples/pong/pong.tscn @@ -0,0 +1,69 @@ +[gd_scene load_steps=5 format=2] + +[ext_resource path="res://pong.py" type="Script" id=1] +[ext_resource path="res://separator.png" type="Texture" id=2] +[ext_resource path="res://paddle.tscn" type="PackedScene" id=3] +[ext_resource path="res://ball.tscn" type="PackedScene" id=5] + +[node name="pong" type="Node2D"] +script = ExtResource( 1 ) + +[node name="separator" type="Sprite" parent="."] +position = Vector2( 512.309, 298.233 ) +scale = Vector2( 1.04883, 1.4884 ) +texture = ExtResource( 2 ) + +[node name="player1" parent="." instance=ExtResource( 3 )] +position = Vector2( 19.9447, 267.036 ) + +[node name="sprite" parent="player1" index="0"] +modulate = Color( 1, 0, 0.960938, 1 ) + +[node name="player2" parent="." instance=ExtResource( 3 )] +position = Vector2( 995.015, 244.876 ) + +[node name="sprite" parent="player2" index="0"] +modulate = Color( 0, 0.929688, 1, 1 ) + +[node name="ball" parent="." instance=ExtResource( 5 )] +position = Vector2( 513.02, 248.2 ) + +[node name="score_left" type="Label" parent="."] +margin_left = 96.0 +margin_top = 57.0 +margin_right = 104.0 +margin_bottom = 71.0 +size_flags_vertical = 0 +text = "0" +align = 1 + +[node name="score_right" type="Label" parent="."] +margin_left = 907.0 +margin_top = 62.0 +margin_right = 915.0 +margin_bottom = 76.0 +size_flags_vertical = 0 +text = "0" +align = 1 + +[node name="winner_left" type="Label" parent="."] +visible = false +margin_left = 60.0 +margin_top = 33.0 +margin_right = 137.0 +margin_bottom = 47.0 +size_flags_vertical = 0 +text = "The Winner!" + +[node name="winner_right" type="Label" parent="."] +visible = false +margin_left = 872.0 +margin_top = 41.0 +margin_right = 949.0 +margin_bottom = 55.0 +size_flags_vertical = 0 +text = "The Winner!" + +[editable path="player1"] + +[editable path="player2"] diff --git a/examples/pong/project.godot b/examples/pong/project.godot new file mode 100644 index 0000000..0f736ff --- /dev/null +++ b/examples/pong/project.godot @@ -0,0 +1,67 @@ +; Engine configuration file. +; It's best edited using the editor UI and not directly, +; since the parameters that go here are not all obvious. +; +; Format: +; [section] ; section goes between [] +; param=value ; assign values to parameters + +config_version=4 + +_global_script_classes=[ ] +_global_script_class_icons={ + +} + +[application] + +run/main_scene="res://pong.tscn" +name="Pong" +main_scene="res://pong.tscn" +disable_stdout=true +icon="res://icon.png" + +[display] + +width=640 +height=400 +stretch_2d=true + +[editor_plugins] + +enabled=PoolStringArray( "pythonscript_repl" ) + +[gdnative] + +singletons=[ "res://pythonscript.gdnlib" ] + +[input] + +p1_move_up={ +"deadzone": 0.5, +"events": [ Object(InputEventKey,"resource_local_to_scene":false,"resource_name":"","device":0,"alt":false,"shift":false,"control":false,"meta":false,"command":false,"pressed":false,"scancode":16777237,"unicode":0,"echo":false,"script":null) + ] +} +p1_move_down={ +"deadzone": 0.5, +"events": [ Object(InputEventKey,"resource_local_to_scene":false,"resource_name":"","device":0,"alt":false,"shift":false,"control":false,"meta":false,"command":false,"pressed":false,"scancode":16777238,"unicode":0,"echo":false,"script":null) + ] +} +p2_move_up={ +"deadzone": 0.5, +"events": [ Object(InputEventKey,"resource_local_to_scene":false,"resource_name":"","device":0,"alt":false,"shift":false,"control":false,"meta":false,"command":false,"pressed":false,"scancode":16777232,"unicode":0,"echo":false,"script":null) + ] +} +p2_move_down={ +"deadzone": 0.5, +"events": [ Object(InputEventKey,"resource_local_to_scene":false,"resource_name":"","device":0,"alt":false,"shift":false,"control":false,"meta":false,"command":false,"pressed":false,"scancode":16777234,"unicode":0,"echo":false,"script":null) + ] +} + +[memory] + +multithread/thread_rid_pool_prealloc=60 + +[render] + +default_clear_color=Color( 0, 0, 0, 1 ) diff --git a/examples/pong/pythonscript.gdnlib b/examples/pong/pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/examples/pong/pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/examples/pong/separator.png b/examples/pong/separator.png new file mode 100644 index 0000000..56874a5 Binary files /dev/null and b/examples/pong/separator.png differ diff --git a/examples/pong/separator.png.import b/examples/pong/separator.png.import new file mode 100644 index 0000000..32091d9 --- /dev/null +++ b/examples/pong/separator.png.import @@ -0,0 +1,34 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/separator.png-f981c8489b9148e2e1dc63398273da74.stex" +metadata={ +"vram_texture": false +} + +[deps] + +source_file="res://separator.png" +dest_files=[ "res://.import/separator.png-f981c8489b9148e2e1dc63398273da74.stex" ] + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/bptc_ldr=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=false +process/HDR_as_SRGB=false +process/invert_color=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/examples/pong_multiplayer/ball.gd b/examples/pong_multiplayer/ball.gd new file mode 100644 index 0000000..09ee31a --- /dev/null +++ b/examples/pong_multiplayer/ball.gd @@ -0,0 +1,73 @@ + +extends Area2D + +const DEFAULT_SPEED=80 + +var direction = Vector2(1,0) +var ball_speed = DEFAULT_SPEED +var stopped=false + + + +onready var screen_size = get_viewport_rect().size + +sync func _reset_ball(for_left): + + position = screen_size / 2 + if (for_left): + direction = Vector2(-1,0) + else: + direction = Vector2( 1,0) + + ball_speed = DEFAULT_SPEED + +sync func stop(): + stopped=true + +func _process(delta): + + # ball will move normally for both players + # even if it's sightly out of sync between them + # so each player sees the motion as smooth and not jerky + + if (not stopped): + translate( direction * ball_speed * delta ) + + # check screen bounds to make ball bounce + + if ((position.y < 0 and direction.y < 0) or (position.y > screen_size.y and direction.y > 0)): + direction.y = -direction.y + + if (is_network_master()): + # only master will decide when the ball is out in the left side (it's own side) + # this makes the game playable even if latency is high and ball is going fast + # otherwise ball might be out in the other player's screen but not this one + + if (position.x < 0 ): + get_parent().rpc("update_score",false) + rpc("_reset_ball",false) + else: + # only the slave will decide when the ball is out in the right side (it's own side) + # this makes the game playable even if latency is high and ball is going fast + # otherwise ball might be out in the other player's screen but not this one + + if (position.x > screen_size.x): + get_parent().rpc("update_score",true) + rpc("_reset_ball",true) + + +sync func bounce(left,random): + + #using sync because both players can make it bounce + if (left): + direction.x = abs(direction.x) + else: + direction.x = -abs(direction.x) + + ball_speed *= 1.1 + direction.y = random*2.0 - 1 + direction = direction.normalized() + +func _ready(): + set_process(true) + diff --git a/examples/pong_multiplayer/ball.png b/examples/pong_multiplayer/ball.png new file mode 100644 index 0000000..c0f6da4 Binary files /dev/null and b/examples/pong_multiplayer/ball.png differ diff --git a/examples/pong_multiplayer/ball.png.import b/examples/pong_multiplayer/ball.png.import new file mode 100644 index 0000000..cd051d1 --- /dev/null +++ b/examples/pong_multiplayer/ball.png.import @@ -0,0 +1,24 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/ball.png-9a4ca347acb7532f6ae347744a6b04f7.stex" + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=true +process/HDR_as_SRGB=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/examples/pong_multiplayer/ball.py b/examples/pong_multiplayer/ball.py new file mode 100644 index 0000000..61945c4 --- /dev/null +++ b/examples/pong_multiplayer/ball.py @@ -0,0 +1,65 @@ +from godot import exposed, rpcsync, Area2D, Vector2 + + +DEFAULT_SPEED = 80 + + +@exposed +class Ball(Area2D): + @rpcsync + def _reset_ball(self, for_left): + print("RESET BALL", for_left) + self.position = self.screen_size / 2 + if for_left: + self.direction = Vector2(-1, 0) + else: + self.direction = Vector2(1, 0) + self.ball_speed = DEFAULT_SPEED + + @rpcsync + def stop(self): + self.stopped = True + + def _process(self, delta): + # ball will move normally for both players + # even if it's sightly out of sync between them + # so each player sees the motion as smooth and not jerky + if not self.stopped: + self.translate(self.direction * self.ball_speed * delta) + # check screen bounds to make ball bounce + if (self.position.y < 0 and self.direction.y < 0) or ( + self.position.y > self.screen_size.y and self.direction.y > 0 + ): + self.direction.y = -self.direction.y + if self.is_network_master(): + # only master will decide when the ball is out in the left side (it's own side) + # this makes the game playable even if latency is high and ball is going fast + # otherwise ball might be out in the other player's screen but not this one + if self.position.x < 0: + self.get_parent().rpc("update_score", False) + self.rpc("_reset_ball", False) + else: + # only the slave will decide when the ball is out in the right side (it's own side) + # this makes the game playable even if latency is high and ball is going fast + # otherwise ball might be out in the other player's screen but not this one + if self.position.x > self.screen_size.x: + self.get_parent().rpc("update_score", True) + self.rpc("_reset_ball", True) + + @rpcsync + def bounce(self, left, random): + # using sync because both players can make it bounce + if self.left: + self.direction.x = abs(self.direction.x) + else: + self.direction.x = -abs(self.direction.x) + self.ball_speed *= 1.1 + self.direction.y = random * 2.0 - 1 + self.direction = self.direction.normalized() + + def _ready(self): + self.direction = Vector2(1, 0) + self.ball_speed = DEFAULT_SPEED + self.stopped = False + self.screen_size = self.get_viewport_rect().size + self.set_process(True) # REMOVE ME diff --git a/examples/pong_multiplayer/ball.tscn b/examples/pong_multiplayer/ball.tscn new file mode 100644 index 0000000..1248022 --- /dev/null +++ b/examples/pong_multiplayer/ball.tscn @@ -0,0 +1,33 @@ +[gd_scene load_steps=4 format=2] + +[ext_resource path="res://ball.py" type="Script" id=1] +[ext_resource path="res://ball.png" type="Texture" id=2] + +[sub_resource type="CircleShape2D" id=1] + +custom_solver_bias = 0.0 +radius = 5.11969 + +[node name="ball" type="Area2D"] + +input_pickable = true +shapes/0/shape = SubResource( 1 ) +shapes/0/transform = Transform2D( 1, 0, 0, 1, 0, 0 ) +shapes/0/trigger = false +gravity_vec = Vector2( 0, 1 ) +gravity = 98.0 +linear_damp = 0.1 +angular_damp = 1.0 +script = ExtResource( 1 ) + +[node name="sprite" type="Sprite" parent="."] + +texture = ExtResource( 2 ) + +[node name="shape" type="CollisionShape2D" parent="."] + +shape = SubResource( 1 ) +trigger = false +_update_shape_index = 0 + + diff --git a/examples/pong_multiplayer/icon.png b/examples/pong_multiplayer/icon.png new file mode 100644 index 0000000..eab34de Binary files /dev/null and b/examples/pong_multiplayer/icon.png differ diff --git a/examples/pong_multiplayer/icon.png.import b/examples/pong_multiplayer/icon.png.import new file mode 100644 index 0000000..d01c747 --- /dev/null +++ b/examples/pong_multiplayer/icon.png.import @@ -0,0 +1,24 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/icon.png-487276ed1e3a0c39cad0279d744ee560.stex" + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=true +process/HDR_as_SRGB=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/examples/pong_multiplayer/lobby.gd b/examples/pong_multiplayer/lobby.gd new file mode 100644 index 0000000..99822e6 --- /dev/null +++ b/examples/pong_multiplayer/lobby.gd @@ -0,0 +1,106 @@ + +extends Control + +const DEFAULT_PORT = 8910 # some random number, pick your port properly + +#### Network callbacks from SceneTree #### + +# callback from SceneTree +func _player_connected(id): + #someone connected, start the game! + var pong = load("res://pong.tscn").instance() + pong.connect("game_finished",self,"_end_game",[],CONNECT_DEFERRED) # connect deferred so we can safely erase it from the callback + + get_tree().get_root().add_child(pong) + hide() + +func _player_disconnected(id): + + if (get_tree().is_network_server()): + _end_game("Client disconnected") + else: + _end_game("Server disconnected") + +# callback from SceneTree, only for clients (not server) +func _connected_ok(): + # will not use this one + pass + +# callback from SceneTree, only for clients (not server) +func _connected_fail(): + + _set_status("Couldn't connect",false) + + get_tree().set_network_peer(null) #remove peer + + get_node("panel/join").set_disabled(false) + get_node("panel/host").set_disabled(false) + +func _server_disconnected(): + _end_game("Server disconnected") + +##### Game creation functions ###### + +func _end_game(with_error=""): + if (has_node("/root/pong")): + #erase pong scene + get_node("/root/pong").free() # erase immediately, otherwise network might show errors (this is why we connected deferred above) + show() + + get_tree().set_network_peer(null) #remove peer + + get_node("panel/join").set_disabled(false) + get_node("panel/host").set_disabled(false) + + _set_status(with_error,false) + +func _set_status(text,isok): + #simple way to show status + if (isok): + get_node("panel/status_ok").set_text(text) + get_node("panel/status_fail").set_text("") + else: + get_node("panel/status_ok").set_text("") + get_node("panel/status_fail").set_text(text) + +func _on_host_pressed(): + + var host = NetworkedMultiplayerENet.new() + host.set_compression_mode(NetworkedMultiplayerENet.COMPRESS_RANGE_CODER) + var err = host.create_server(DEFAULT_PORT,1) # max: 1 peer, since it's a 2 players game + if (err!=OK): + #is another server running? + _set_status("Can't host, address in use.",false) + return + + get_tree().set_network_peer(host) + get_node("panel/join").set_disabled(true) + get_node("panel/host").set_disabled(true) + _set_status("Waiting for player..",true) + +func _on_join_pressed(): + + var ip = get_node("panel/address").get_text() + if (not ip.is_valid_ip_address()): + _set_status("IP address is invalid",false) + return + + var host = NetworkedMultiplayerENet.new() + host.set_compression_mode(NetworkedMultiplayerENet.COMPRESS_RANGE_CODER) + host.create_client(ip,DEFAULT_PORT) + get_tree().set_network_peer(host) + + _set_status("Connecting..",true) + + + +### INITIALIZER #### + +func _ready(): + # connect all the callbacks related to networking + get_tree().connect("network_peer_connected",self,"_player_connected") + get_tree().connect("network_peer_disconnected",self,"_player_disconnected") + get_tree().connect("connected_to_server",self,"_connected_ok") + get_tree().connect("connection_failed",self,"_connected_fail") + get_tree().connect("server_disconnected",self,"_server_disconnected") + diff --git a/examples/pong_multiplayer/lobby.tscn b/examples/pong_multiplayer/lobby.tscn new file mode 100644 index 0000000..3128daf --- /dev/null +++ b/examples/pong_multiplayer/lobby.tscn @@ -0,0 +1,184 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://lobby.gd" type="Script" id=1] + +[node name="lobby" type="Control"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 0 +size_flags_horizontal = 1 +size_flags_vertical = 1 +script = ExtResource( 1 ) + +[node name="title" type="Label" parent="."] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 214.0 +margin_top = 7.0 +margin_right = 321.0 +margin_bottom = 21.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +text = "Multiplayer Pong" +align = 1 +valign = 1 +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[node name="panel" type="Panel" parent="."] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 0 +size_flags_horizontal = 1 +size_flags_vertical = 1 + +[node name="address_label" type="Label" parent="panel"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 162.0 +margin_top = 54.0 +margin_right = 214.0 +margin_bottom = 68.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +text = "Address" +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[node name="address" type="LineEdit" parent="panel"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 163.0 +margin_top = 74.0 +margin_right = 242.0 +margin_bottom = 98.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 0 +size_flags_horizontal = 1 +size_flags_vertical = 1 +text = "127.0.0.1" +expand_to_len = false +focus_mode = 2 +placeholder_alpha = 0.6 +caret_blink = false +caret_blink_speed = 0.65 + +[node name="host" type="Button" parent="panel"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 105.0 +margin_top = 107.0 +margin_right = 147.0 +margin_bottom = 127.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 0 +size_flags_horizontal = 1 +size_flags_vertical = 1 +toggle_mode = false +enabled_focus_mode = 2 +shortcut = null +group = null +text = "Host" +flat = false + +[node name="join" type="Button" parent="panel"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 109.0 +margin_top = 79.0 +margin_right = 144.0 +margin_bottom = 99.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 0 +size_flags_horizontal = 1 +size_flags_vertical = 1 +toggle_mode = false +enabled_focus_mode = 2 +shortcut = null +group = null +text = "Join" +flat = false + +[node name="status_ok" type="Label" parent="panel"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 265.0 +margin_top = 43.0 +margin_right = 303.0 +margin_bottom = 57.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +custom_colors/font_color = Color( 0, 1, 0.015625, 1 ) +align = 1 +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[node name="status_fail" type="Label" parent="panel"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 261.0 +margin_top = 78.0 +margin_right = 295.0 +margin_bottom = 92.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +custom_colors/font_color = Color( 1, 0, 0, 1 ) +align = 1 +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[connection signal="pressed" from="panel/host" to="." method="_on_host_pressed"] + +[connection signal="pressed" from="panel/join" to="." method="_on_join_pressed"] + + diff --git a/examples/pong_multiplayer/paddle.gd b/examples/pong_multiplayer/paddle.gd new file mode 100644 index 0000000..84bfb40 --- /dev/null +++ b/examples/pong_multiplayer/paddle.gd @@ -0,0 +1,63 @@ +extends Area2D + +export var left=false + +const MOTION_SPEED=150 + +var motion = 0 +var you_hidden=false + +onready var screen_size = get_viewport_rect().size + +#synchronize position and speed to the other peers +slave func set_pos_and_motion(p_pos,p_motion): + position = p_pos + motion=p_motion + +func _hide_you_label(): + you_hidden=true + get_node("you").hide() + +func _process(delta): + + #is the master of the paddle + if (is_network_master()): + + motion = 0 + if (Input.is_action_pressed("move_up")): + motion -= 1 + elif (Input.is_action_pressed("move_down")): + motion += 1 + + if (not you_hidden and motion!=0): + _hide_you_label() + + + motion*=MOTION_SPEED + + #using unreliable to make sure position is updated as fast as possible, even if one of the calls is dropped + rpc_unreliable("set_pos_and_motion",position,motion) + + else: + if (not you_hidden): + _hide_you_label() + + + translate( Vector2(0,motion*delta) ) + + # set screen limits + + if (position.y < 0 ): + position.y = 0 + elif (position.y > screen_size.y): + position.y = screen_size.y + + + +func _ready(): + set_process(true) + +func _on_paddle_area_enter( area ): + + if (is_network_master()): + area.rpc("bounce",left,randf()) #random for new direction generated on each peer diff --git a/examples/pong_multiplayer/paddle.png b/examples/pong_multiplayer/paddle.png new file mode 100644 index 0000000..e23491e Binary files /dev/null and b/examples/pong_multiplayer/paddle.png differ diff --git a/examples/pong_multiplayer/paddle.png.import b/examples/pong_multiplayer/paddle.png.import new file mode 100644 index 0000000..fdedcc3 --- /dev/null +++ b/examples/pong_multiplayer/paddle.png.import @@ -0,0 +1,24 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/paddle.png-0e798fb0912613386507c9904d5cc01a.stex" + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=true +process/HDR_as_SRGB=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/examples/pong_multiplayer/paddle.tscn b/examples/pong_multiplayer/paddle.tscn new file mode 100644 index 0000000..1f03164 --- /dev/null +++ b/examples/pong_multiplayer/paddle.tscn @@ -0,0 +1,52 @@ +[gd_scene load_steps=4 format=2] + +[ext_resource path="res://paddle.gd" type="Script" id=1] +[ext_resource path="res://paddle.png" type="Texture" id=2] + +[sub_resource type="CapsuleShape2D" id=1] + +custom_solver_bias = 0.0 +radius = 4.78568 +height = 23.6064 + +[node name="paddle" type="Area2D"] + +input_pickable = true +shapes/0/shape = SubResource( 1 ) +shapes/0/transform = Transform2D( 1, 0, 0, 1, 0, 0 ) +shapes/0/trigger = false +gravity_vec = Vector2( 0, 1 ) +gravity = 98.0 +linear_damp = 0.1 +angular_damp = 1.0 +script = ExtResource( 1 ) +left = false + +[node name="sprite" type="Sprite" parent="."] + +texture = ExtResource( 2 ) + +[node name="shape" type="CollisionShape2D" parent="."] + +shape = SubResource( 1 ) +trigger = false +_update_shape_index = 0 + +[node name="you" type="Label" parent="."] + +margin_left = -12.0 +margin_top = 21.0 +margin_right = 11.0 +margin_bottom = 35.0 +rect_clip_content = false +mouse_filter = 2 +size_flags_vertical = 0 +text = "You" +align = 1 +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[connection signal="area_entered" from="." to="." method="_on_paddle_area_enter"] + + diff --git a/examples/pong_multiplayer/pong.gd b/examples/pong_multiplayer/pong.gd new file mode 100644 index 0000000..d54e9e7 --- /dev/null +++ b/examples/pong_multiplayer/pong.gd @@ -0,0 +1,53 @@ + +extends Node2D + +const SCORE_TO_WIN=10 + +var score_left = 0 +var score_right = 0 + +signal game_finished() + +sync func update_score(add_to_left): + if (add_to_left): + + score_left+=1 + get_node("score_left").set_text( str(score_left) ) + else: + + score_right+=1 + get_node("score_right").set_text( str(score_right) ) + + var game_ended = false + + if (score_left==SCORE_TO_WIN): + get_node("winner_left").show() + game_ended=true + elif (score_right==SCORE_TO_WIN): + get_node("winner_right").show() + game_ended=true + + if (game_ended): + get_node("exit_game").show() + get_node("ball").rpc("stop") + +func _on_exit_game_pressed(): + emit_signal("game_finished") + +func _ready(): + + # by default, all nodes in server inherit from master + # while all nodes in clients inherit from slave + + if (get_tree().is_network_server()): + #set to not control player 2. since it's master as everything else +# get_node("player2").set_network_mode(NETWORK_MODE_SLAVE) + get_node("player2").set_network_master(2, true) + else: + #set to control player 2, as it's slave as everything else + get_node("player2").set_network_mode(NETWORK_MODE_MASTER) + + #let each paddle know which one is left, too + get_node("player1").left=true + get_node("player2").left=false + diff --git a/examples/pong_multiplayer/pong.tscn b/examples/pong_multiplayer/pong.tscn new file mode 100644 index 0000000..ee12688 --- /dev/null +++ b/examples/pong_multiplayer/pong.tscn @@ -0,0 +1,172 @@ +[gd_scene load_steps=6 format=2] + +[ext_resource path="res://pong.gd" type="Script" id=1] +[ext_resource path="res://separator.png" type="Texture" id=2] +[ext_resource path="res://paddle.tscn" type="PackedScene" id=3] +[ext_resource path="res://ball.tscn" type="PackedScene" id=4] +[ext_resource path="res://ball.gd" type="Script" id=5] + +[node name="pong" type="Node2D"] + +script = ExtResource( 1 ) + +[node name="separator" type="Sprite" parent="."] + +position = Vector2( 512.309, 298.233 ) +scale = Vector2( 1.04883, 1.4884 ) +texture = ExtResource( 2 ) + +[node name="player1" parent="." instance=ExtResource( 3 )] + +position = Vector2( 19.9447, 267.036 ) +audio_bus_override = false +audio_bus_name = "Master" + +[node name="sprite" parent="player1"] + +modulate = Color( 1, 0, 0.960938, 1 ) + +[node name="you" parent="player1"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +rect_pivot_offset = Vector2( 0, 0 ) +size_flags_horizontal = 1 + +[node name="player2" parent="." instance=ExtResource( 3 )] + +position = Vector2( 995.015, 244.876 ) +audio_bus_override = false +audio_bus_name = "Master" + +[node name="sprite" parent="player2"] + +modulate = Color( 0, 0.929688, 1, 1 ) + +[node name="you" parent="player2"] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +rect_pivot_offset = Vector2( 0, 0 ) +size_flags_horizontal = 1 + +[node name="ball" parent="." instance=ExtResource( 4 )] + +position = Vector2( 513.02, 248.2 ) +audio_bus_override = false +audio_bus_name = "Master" +script = ExtResource( 5 ) + +[node name="score_left" type="Label" parent="."] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 96.0 +margin_top = 57.0 +margin_right = 104.0 +margin_bottom = 71.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +text = "0" +align = 1 +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[node name="score_right" type="Label" parent="."] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 907.0 +margin_top = 62.0 +margin_right = 915.0 +margin_bottom = 76.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +text = "0" +align = 1 +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[node name="winner_left" type="Label" parent="."] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 60.0 +margin_top = 33.0 +margin_right = 137.0 +margin_bottom = 47.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +text = "The Winner!" +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[node name="winner_right" type="Label" parent="."] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 872.0 +margin_top = 41.0 +margin_right = 949.0 +margin_bottom = 55.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 2 +size_flags_horizontal = 1 +size_flags_vertical = 0 +text = "The Winner!" +percent_visible = 1.0 +lines_skipped = 0 +max_lines_visible = -1 + +[node name="exit_game" type="Button" parent="."] + +anchor_left = 0.0 +anchor_top = 0.0 +anchor_right = 0.0 +anchor_bottom = 0.0 +margin_left = 412.0 +margin_top = 20.0 +margin_right = 489.0 +margin_bottom = 40.0 +rect_pivot_offset = Vector2( 0, 0 ) +rect_clip_content = false +mouse_filter = 0 +size_flags_horizontal = 1 +size_flags_vertical = 1 +toggle_mode = false +enabled_focus_mode = 2 +shortcut = null +group = null +text = "Exit Game" +flat = false + +[connection signal="pressed" from="exit_game" to="." method="_on_exit_game_pressed"] + + +[editable path="player1"] +[editable path="player2"] diff --git a/examples/pong_multiplayer/project.godot b/examples/pong_multiplayer/project.godot new file mode 100644 index 0000000..d53766a --- /dev/null +++ b/examples/pong_multiplayer/project.godot @@ -0,0 +1,41 @@ +; Engine configuration file. +; It's best edited using the editor UI and not directly, +; since the parameters that go here are not all obvious. +; +; Format: +; [section] ; section goes between [] +; param=value ; assign values to parameters + +config_version=3 + +[application] + +run/main_scene="res://lobby.tscn" +name="Pong Multiplayer" +main_scene="res://lobby.tscn" +icon="res://icon.png" + +[display] + +width=640 +height=400 +stretch_2d=true + +[gdnative] + +singletons=[ ] + +[input] + +move_up=[ Object(InputEventKey,"resource_local_to_scene":false,"resource_name":"","device":0,"alt":false,"shift":false,"control":false,"meta":false,"command":false,"pressed":false,"scancode":16777232,"unicode":0,"echo":false,"script":null) + ] +move_down=[ Object(InputEventKey,"resource_local_to_scene":false,"resource_name":"","device":0,"alt":false,"shift":false,"control":false,"meta":false,"command":false,"pressed":false,"scancode":16777234,"unicode":0,"echo":false,"script":null) + ] + +[memory] + +multithread/thread_rid_pool_prealloc=60 + +[render] + +default_clear_color=Color( 0, 0, 0, 1 ) diff --git a/examples/pong_multiplayer/pythonscript.gdnlib b/examples/pong_multiplayer/pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/examples/pong_multiplayer/pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/examples/pong_multiplayer/separator.png b/examples/pong_multiplayer/separator.png new file mode 100644 index 0000000..56874a5 Binary files /dev/null and b/examples/pong_multiplayer/separator.png differ diff --git a/examples/pong_multiplayer/separator.png.import b/examples/pong_multiplayer/separator.png.import new file mode 100644 index 0000000..5c19c68 --- /dev/null +++ b/examples/pong_multiplayer/separator.png.import @@ -0,0 +1,24 @@ +[remap] + +importer="texture" +type="StreamTexture" +path="res://.import/separator.png-f981c8489b9148e2e1dc63398273da74.stex" + +[params] + +compress/mode=0 +compress/lossy_quality=0.7 +compress/hdr_mode=0 +compress/normal_map=0 +flags/repeat=0 +flags/filter=true +flags/mipmaps=false +flags/anisotropic=false +flags/srgb=2 +process/fix_alpha_border=true +process/premult_alpha=true +process/HDR_as_SRGB=false +stream=false +size_limit=0 +detect_3d=true +svg/scale=1.0 diff --git a/generation/bindings_templates/bindings.tmpl.pxd b/generation/bindings_templates/bindings.tmpl.pxd new file mode 100644 index 0000000..c0323c0 --- /dev/null +++ b/generation/bindings_templates/bindings.tmpl.pxd @@ -0,0 +1,13 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_bindings.py` + +from godot._hazmat.gdnative_api_struct cimport * +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot.builtins cimport * + +{% from 'class.tmpl.pxd' import render_class_pxd -%} +{%- for cls in classes %} +{{ render_class_pxd(cls) }} +{%- endfor %} + +cdef void _initialize_bindings() diff --git a/generation/bindings_templates/bindings.tmpl.pyi b/generation/bindings_templates/bindings.tmpl.pyi new file mode 100644 index 0000000..8efa4b5 --- /dev/null +++ b/generation/bindings_templates/bindings.tmpl.pyi @@ -0,0 +1,164 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_bindings.py` + +# Imports needed for typing +# (Note PEP484 state that import without as and * are not exposed by the stub file) +from typing import Any, Union +from enum import IntFlag +from godot.builtins import ( + AABB, + Array, + Basis, + Color, + Dictionary, + NodePath, + Plane, + Quat, + Rect2, + RID, + Transform2D, + Transform, + Vector2, + Vector3, + PoolByteArray, + PoolIntArray, + PoolRealArray, + PoolStringArray, + PoolVector2Array, + PoolVector3Array, + PoolColorArray, + GDString, +) + + +class Error(IntFlag): + OK: int + FAILED: int + ERR_UNAVAILABLE: int + ERR_UNCONFIGURED: int + ERR_UNAUTHORIZED: int + ERR_PARAMETER_RANGE_ERROR: int + ERR_OUT_OF_MEMORY: int + ERR_FILE_NOT_FOUND: int + ERR_FILE_BAD_DRIVE: int + ERR_FILE_BAD_PATH: int + ERR_FILE_NO_PERMISSION: int + ERR_FILE_ALREADY_IN_USE: int + ERR_FILE_CANT_OPEN: int + ERR_FILE_CANT_WRITE: int + ERR_FILE_CANT_READ: int + ERR_FILE_UNRECOGNIZED: int + ERR_FILE_CORRUPT: int + ERR_FILE_MISSING_DEPENDENCIES: int + ERR_FILE_EOF: int + ERR_CANT_OPEN: int + ERR_CANT_CREATE: int + ERR_QUERY_FAILED: int + ERR_ALREADY_IN_USE: int + ERR_LOCKED: int + ERR_TIMEOUT: int + ERR_CANT_CONNECT: int + ERR_CANT_RESOLVE: int + ERR_CONNECTION_ERROR: int + ERR_CANT_ACQUIRE_RESOURCE: int + ERR_CANT_FORK: int + ERR_INVALID_DATA: int + ERR_INVALID_PARAMETER: int + ERR_ALREADY_EXISTS: int + ERR_DOES_NOT_EXIST: int + ERR_DATABASE_CANT_READ: int + ERR_DATABASE_CANT_WRITE: int + ERR_COMPILATION_FAILED: int + ERR_METHOD_NOT_FOUND: int + ERR_LINK_FAILED: int + ERR_SCRIPT_FAILED: int + ERR_CYCLIC_LINK: int + ERR_INVALID_DECLARATION: int + ERR_DUPLICATE_SYMBOL: int + ERR_PARSE_ERROR: int + ERR_BUSY: int + ERR_SKIP: int + ERR_HELP: int + ERR_BUG: int + ERR_PRINTER_ON_FIRE: int + + +class VariantType(IntFlag): + NIL: int + BOOL: int + INT: int + REAL: int + STRING: int + VECTOR2: int + RECT2: int + VECTOR3: int + TRANSFORM2D: int + PLANE: int + QUAT: int + AABB: int + BASIS: int + TRANSFORM: int + COLOR: int + NODE_PATH: int + RID: int + OBJECT: int + DICTIONARY: int + ARRAY: int + POOL_BYTE_ARRAY: int + POOL_INT_ARRAY: int + POOL_REAL_ARRAY: int + POOL_STRING_ARRAY: int + POOL_VECTOR2_ARRAY: int + POOL_VECTOR3_ARRAY: int + POOL_COLOR_ARRAY: int + + +class VariantOperator(IntFlag): + EQUAL: int + NOT_EQUAL: int + LESS: int + LESS_EQUAL: int + GREATER: int + GREATER_EQUAL: int + ADD: int + SUBTRACT: int + MULTIPLY: int + DIVIDE: int + NEGATE: int + POSITIVE: int + MODULE: int + STRING_CONCAT: int + SHIFT_LEFT: int + SHIFT_RIGHT: int + BIT_AND: int + BIT_OR: int + BIT_XOR: int + BIT_NEGATE: int + AND: int + OR: int + XOR: int + NOT: int + IN: int + MAX: int + + +### Classes ### + +{% from 'class.tmpl.pyi' import render_class, render_class_gdapi_ptrs_init -%} +{%- for cls in classes %} +{{ render_class(cls) }} +{%- endfor %} + +### Global constants ### + +{% for key, value in constants.items() %} +{{key}}: int +{% endfor %} + +### Singletons ### + +{% for cls in classes %} +{% if cls.singleton %} +{{ cls.singleton }}: {{ cls.name }} +{% endif %} +{% endfor %} diff --git a/generation/bindings_templates/bindings.tmpl.pyx b/generation/bindings_templates/bindings.tmpl.pyx new file mode 100644 index 0000000..027d193 --- /dev/null +++ b/generation/bindings_templates/bindings.tmpl.pyx @@ -0,0 +1,191 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_bindings.py` + +from godot._hazmat.gdnative_api_struct cimport * +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot._hazmat.conversion cimport * +from godot.builtins cimport * + +from enum import IntFlag + + +__ERR_MSG_BINDING_NOT_AVAILABLE = "No Godot binding available" + + +class Error(IntFlag): + OK = godot_error.GODOT_OK + FAILED = godot_error.GODOT_FAILED + ERR_UNAVAILABLE = godot_error.GODOT_ERR_UNAVAILABLE + ERR_UNCONFIGURED = godot_error.GODOT_ERR_UNCONFIGURED + ERR_UNAUTHORIZED = godot_error.GODOT_ERR_UNAUTHORIZED + ERR_PARAMETER_RANGE_ERROR = godot_error.GODOT_ERR_PARAMETER_RANGE_ERROR + ERR_OUT_OF_MEMORY = godot_error.GODOT_ERR_OUT_OF_MEMORY + ERR_FILE_NOT_FOUND = godot_error.GODOT_ERR_FILE_NOT_FOUND + ERR_FILE_BAD_DRIVE = godot_error.GODOT_ERR_FILE_BAD_DRIVE + ERR_FILE_BAD_PATH = godot_error.GODOT_ERR_FILE_BAD_PATH + ERR_FILE_NO_PERMISSION = godot_error.GODOT_ERR_FILE_NO_PERMISSION + ERR_FILE_ALREADY_IN_USE = godot_error.GODOT_ERR_FILE_ALREADY_IN_USE + ERR_FILE_CANT_OPEN = godot_error.GODOT_ERR_FILE_CANT_OPEN + ERR_FILE_CANT_WRITE = godot_error.GODOT_ERR_FILE_CANT_WRITE + ERR_FILE_CANT_READ = godot_error.GODOT_ERR_FILE_CANT_READ + ERR_FILE_UNRECOGNIZED = godot_error.GODOT_ERR_FILE_UNRECOGNIZED + ERR_FILE_CORRUPT = godot_error.GODOT_ERR_FILE_CORRUPT + ERR_FILE_MISSING_DEPENDENCIES = godot_error.GODOT_ERR_FILE_MISSING_DEPENDENCIES + ERR_FILE_EOF = godot_error.GODOT_ERR_FILE_EOF + ERR_CANT_OPEN = godot_error.GODOT_ERR_CANT_OPEN + ERR_CANT_CREATE = godot_error.GODOT_ERR_CANT_CREATE + ERR_QUERY_FAILED = godot_error.GODOT_ERR_QUERY_FAILED + ERR_ALREADY_IN_USE = godot_error.GODOT_ERR_ALREADY_IN_USE + ERR_LOCKED = godot_error.GODOT_ERR_LOCKED + ERR_TIMEOUT = godot_error.GODOT_ERR_TIMEOUT + ERR_CANT_CONNECT = godot_error.GODOT_ERR_CANT_CONNECT + ERR_CANT_RESOLVE = godot_error.GODOT_ERR_CANT_RESOLVE + ERR_CONNECTION_ERROR = godot_error.GODOT_ERR_CONNECTION_ERROR + ERR_CANT_ACQUIRE_RESOURCE = godot_error.GODOT_ERR_CANT_ACQUIRE_RESOURCE + ERR_CANT_FORK = godot_error.GODOT_ERR_CANT_FORK + ERR_INVALID_DATA = godot_error.GODOT_ERR_INVALID_DATA + ERR_INVALID_PARAMETER = godot_error.GODOT_ERR_INVALID_PARAMETER + ERR_ALREADY_EXISTS = godot_error.GODOT_ERR_ALREADY_EXISTS + ERR_DOES_NOT_EXIST = godot_error.GODOT_ERR_DOES_NOT_EXIST + ERR_DATABASE_CANT_READ = godot_error.GODOT_ERR_DATABASE_CANT_READ + ERR_DATABASE_CANT_WRITE = godot_error.GODOT_ERR_DATABASE_CANT_WRITE + ERR_COMPILATION_FAILED = godot_error.GODOT_ERR_COMPILATION_FAILED + ERR_METHOD_NOT_FOUND = godot_error.GODOT_ERR_METHOD_NOT_FOUND + ERR_LINK_FAILED = godot_error.GODOT_ERR_LINK_FAILED + ERR_SCRIPT_FAILED = godot_error.GODOT_ERR_SCRIPT_FAILED + ERR_CYCLIC_LINK = godot_error.GODOT_ERR_CYCLIC_LINK + ERR_INVALID_DECLARATION = godot_error.GODOT_ERR_INVALID_DECLARATION + ERR_DUPLICATE_SYMBOL = godot_error.GODOT_ERR_DUPLICATE_SYMBOL + ERR_PARSE_ERROR = godot_error.GODOT_ERR_PARSE_ERROR + ERR_BUSY = godot_error.GODOT_ERR_BUSY + ERR_SKIP = godot_error.GODOT_ERR_SKIP + ERR_HELP = godot_error.GODOT_ERR_HELP + ERR_BUG = godot_error.GODOT_ERR_BUG + ERR_PRINTER_ON_FIRE = godot_error.GODOT_ERR_PRINTER_ON_FIRE + + +class VariantType(IntFlag): + NIL = godot_variant_type.GODOT_VARIANT_TYPE_NIL + BOOL = godot_variant_type.GODOT_VARIANT_TYPE_BOOL + INT = godot_variant_type.GODOT_VARIANT_TYPE_INT + REAL = godot_variant_type.GODOT_VARIANT_TYPE_REAL + STRING = godot_variant_type.GODOT_VARIANT_TYPE_STRING + VECTOR2 = godot_variant_type.GODOT_VARIANT_TYPE_VECTOR2 + RECT2 = godot_variant_type.GODOT_VARIANT_TYPE_RECT2 + VECTOR3 = godot_variant_type.GODOT_VARIANT_TYPE_VECTOR3 + TRANSFORM2D = godot_variant_type.GODOT_VARIANT_TYPE_TRANSFORM2D + PLANE = godot_variant_type.GODOT_VARIANT_TYPE_PLANE + QUAT = godot_variant_type.GODOT_VARIANT_TYPE_QUAT + AABB = godot_variant_type.GODOT_VARIANT_TYPE_AABB + BASIS = godot_variant_type.GODOT_VARIANT_TYPE_BASIS + TRANSFORM = godot_variant_type.GODOT_VARIANT_TYPE_TRANSFORM + COLOR = godot_variant_type.GODOT_VARIANT_TYPE_COLOR + NODE_PATH = godot_variant_type.GODOT_VARIANT_TYPE_NODE_PATH + RID = godot_variant_type.GODOT_VARIANT_TYPE_RID + OBJECT = godot_variant_type.GODOT_VARIANT_TYPE_OBJECT + DICTIONARY = godot_variant_type.GODOT_VARIANT_TYPE_DICTIONARY + ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_ARRAY + POOL_BYTE_ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_POOL_BYTE_ARRAY + POOL_INT_ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_POOL_INT_ARRAY + POOL_REAL_ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_POOL_REAL_ARRAY + POOL_STRING_ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_POOL_STRING_ARRAY + POOL_VECTOR2_ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_POOL_VECTOR2_ARRAY + POOL_VECTOR3_ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_POOL_VECTOR3_ARRAY + POOL_COLOR_ARRAY = godot_variant_type.GODOT_VARIANT_TYPE_POOL_COLOR_ARRAY + + +class VariantOperator(IntFlag): + EQUAL = godot_variant_operator.GODOT_VARIANT_OP_EQUAL + NOT_EQUAL = godot_variant_operator.GODOT_VARIANT_OP_NOT_EQUAL + LESS = godot_variant_operator.GODOT_VARIANT_OP_LESS + LESS_EQUAL = godot_variant_operator.GODOT_VARIANT_OP_LESS_EQUAL + GREATER = godot_variant_operator.GODOT_VARIANT_OP_GREATER + GREATER_EQUAL = godot_variant_operator.GODOT_VARIANT_OP_GREATER_EQUAL + ADD = godot_variant_operator.GODOT_VARIANT_OP_ADD + SUBTRACT = godot_variant_operator.GODOT_VARIANT_OP_SUBTRACT + MULTIPLY = godot_variant_operator.GODOT_VARIANT_OP_MULTIPLY + DIVIDE = godot_variant_operator.GODOT_VARIANT_OP_DIVIDE + NEGATE = godot_variant_operator.GODOT_VARIANT_OP_NEGATE + POSITIVE = godot_variant_operator.GODOT_VARIANT_OP_POSITIVE + MODULE = godot_variant_operator.GODOT_VARIANT_OP_MODULE + STRING_CONCAT = godot_variant_operator.GODOT_VARIANT_OP_STRING_CONCAT + SHIFT_LEFT = godot_variant_operator.GODOT_VARIANT_OP_SHIFT_LEFT + SHIFT_RIGHT = godot_variant_operator.GODOT_VARIANT_OP_SHIFT_RIGHT + BIT_AND = godot_variant_operator.GODOT_VARIANT_OP_BIT_AND + BIT_OR = godot_variant_operator.GODOT_VARIANT_OP_BIT_OR + BIT_XOR = godot_variant_operator.GODOT_VARIANT_OP_BIT_XOR + BIT_NEGATE = godot_variant_operator.GODOT_VARIANT_OP_BIT_NEGATE + AND = godot_variant_operator.GODOT_VARIANT_OP_AND + OR = godot_variant_operator.GODOT_VARIANT_OP_OR + XOR = godot_variant_operator.GODOT_VARIANT_OP_XOR + NOT = godot_variant_operator.GODOT_VARIANT_OP_NOT + IN = godot_variant_operator.GODOT_VARIANT_OP_IN + MAX = godot_variant_operator.GODOT_VARIANT_OP_MAX + + +### Classes ### + +{% from 'class.tmpl.pyx' import render_class, render_class_gdapi_ptrs_init -%} +{%- for cls in classes %} +{{ render_class(cls) }} +{%- endfor %} + +### Global constants ### + +{% for key, value in constants.items() %} +{{key}} = {{value}} +{% endfor %} + +### Class&singletons needed for Pythonscript bootstrap ### + +# Godot classes&singletons are not all available when loading Pythonscript. +# Hence greedy loading is done only for items needed for Pythonscript +# bootstrap. +# The remaining loading will be achieved when loading the first python script +# (where at this point Godot should have finished it initialization). + +{% set early_needed_bindings = ["_OS", "_ProjectSettings"] %} +cdef godot_object *_ptr +{% for cls in classes %} +{% if cls.name in early_needed_bindings %} +{{ render_class_gdapi_ptrs_init(cls) }} +{% if cls.singleton %} +_ptr = gdapi10.godot_global_get_singleton("{{ cls.singleton }}") +if _ptr != NULL: + {{ cls.singleton }} = {{ cls.name }}.from_ptr(_ptr) +else: + print("ERROR: cannot load singleton `{{ cls.singleton }}` required for Pythonscript init") +{% endif %} +{% endif %} +{% endfor %} + +### Remining bindings late intialization ### + +cdef bint _bindings_initialized = False + +{% for cls in classes %} +{% if cls.name not in early_needed_bindings %} +{% if cls.singleton %} +{{ cls.singleton }} = {{ cls.name }}.from_ptr(NULL) +{% endif %} +{% endif %} +{% endfor %} + +cdef void _initialize_bindings(): + global _bindings_initialized + if _bindings_initialized: + return + +{%- for cls in classes %} +{%- if cls.name not in early_needed_bindings %} + {{ render_class_gdapi_ptrs_init(cls) | indent }} +{%- if cls.singleton %} + global {{ cls.singleton }} + (<{{ cls["name"] }}>{{ cls.singleton }})._gd_ptr = gdapi10.godot_global_get_singleton("{{ cls.singleton }}") + if (<{{ cls["name"] }}>{{ cls.singleton }})._gd_ptr == NULL: + print('Cannot retreive singleton {{ cls.singleton }}') +{%- endif %} +{%- endif %} +{%- endfor %} + + _bindings_initialized = True diff --git a/generation/bindings_templates/class.tmpl.pxd b/generation/bindings_templates/class.tmpl.pxd new file mode 100644 index 0000000..71305de --- /dev/null +++ b/generation/bindings_templates/class.tmpl.pxd @@ -0,0 +1,19 @@ +{% from 'method.tmpl.pyx' import get_method_bind_register_name, render_method_signature %} + +{% macro render_class_pxd(cls) %} + +cdef class {{ cls.name }}({{ cls.base_class }}): +{% if not cls.base_class %} + cdef godot_object *_gd_ptr + + @staticmethod + cdef inline Object cast_from_variant(const godot_variant *p_gdvar) + + @staticmethod + cdef inline Object cast_from_ptr(godot_object *ptr) + +{% endif %} + @staticmethod + cdef {{ cls.name }} from_ptr(godot_object *_ptr) + +{% endmacro %} diff --git a/generation/bindings_templates/class.tmpl.pyi b/generation/bindings_templates/class.tmpl.pyi new file mode 100644 index 0000000..140e4e5 --- /dev/null +++ b/generation/bindings_templates/class.tmpl.pyi @@ -0,0 +1,77 @@ +{# TODO: Handle signals #} +{% macro render_class(cls) %} + +class {{ cls.name }}({{ cls.base_class }}): +{% if not cls.base_class %} + def free(self) -> None: ... + def __init__(self): ... + def __repr__(self) -> str: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any): ... + def call(self, name: str, *args) -> Any: ... + +{% endif %} + +{% if not cls.singleton and cls.instantiable %} + +{% if cls.is_reference %} + def __init__(self): ... +{% else %} + @staticmethod + def new() -> {{ cls.name }}: ... +{% endif %} + +{% if cls.name == "Reference" %} + @classmethod + def new(cls) -> Reference: ... +{% endif %} + +{% endif %} +{% if cls.constants | length %} + # Constants +{% endif %} +{% for key, value in cls.constants.items() %} + {{ key }}: int +{% endfor %} +{% if cls.enums | length %} + # Enums +{% endif %} +{% for enum in cls.enums %} + class {{ enum.name }}(IntFlag): +{% for key, value in enum.values.items() %} + {{ key }}: int +{% endfor %} +{% endfor %} + +{% if cls.methods | length %} + # Methods +{% endif %} +{# TODO: Use typing for params&return #} +{% for method in cls.methods %} +{% if method.name != "free" %} + def {{ method.name }}(self, +{%- for arg in method.arguments %} +{{ arg.name }}: {{ arg.type.py_type }} +{%- if arg.has_default_value %} +={{ arg.default_value }} +{%- endif %} +, +{%- endfor %} +) -> {{ method.return_type.py_type }}: ... +{% endif %} +{% endfor %} + +{% if cls.properties | length %} + # Properties +{% endif %} +{% for prop in cls.properties %} + {{ prop.name }}: {{ prop.type.py_type }} +{% endfor %} + +{% if not cls.constants and not cls.enums and not cls.methods and not cls.properties %} + pass +{% endif %} + +{% endmacro %} diff --git a/generation/bindings_templates/class.tmpl.pyx b/generation/bindings_templates/class.tmpl.pyx new file mode 100644 index 0000000..de39705 --- /dev/null +++ b/generation/bindings_templates/class.tmpl.pyx @@ -0,0 +1,280 @@ +{% from 'method.tmpl.pyx' import render_method, get_method_bind_register_name %} + + +{% macro render_class_gdapi_ptrs_init(cls) %} + +{% if not cls.singleton %} +global __{{ cls.name }}_constructor +__{{ cls.name }}_constructor = gdapi10.godot_get_class_constructor("{{ cls.name }}") +{% endif %} + +{% for method in cls.methods %} +global {{ get_method_bind_register_name(cls, method) }} +{{ get_method_bind_register_name(cls, method) }} = gdapi10.godot_method_bind_get_method("{{ cls.bind_register_name }}", "{{ method.name }}") +{% endfor %} + +{% endmacro %} + + +{# TODO: Handle signals #} +{% macro render_class(cls) %} + +{% if not cls.base_class %} +from cpython.object cimport PyObject_GenericGetAttr, PyObject_GenericSetAttr +{% endif %} + +{% if not cls.singleton %} +cdef godot_class_constructor __{{ cls.name }}_constructor = NULL +{% endif %} + +{% for method in cls.methods %} +cdef godot_method_bind *{{ get_method_bind_register_name(cls, method) }} = NULL +{% endfor %} + +cdef class {{ cls.name }}({{ cls.base_class }}): +{% if not cls.base_class %} + # free is virtual but this is not marked in api.json :'( + def free(self): + with nogil: + gdapi10.godot_object_destroy(self._gd_ptr) + + def __init__(self): + raise RuntimeError( + f"Use `new()` method to instantiate non-refcounted Godot object (and don't forget to free it !)" + ) + + def __repr__(self): + return f"<{type(self).__name__} wrapper on 0x{self._gd_ptr:x}>" + + @staticmethod + cdef inline Object cast_from_variant(const godot_variant *p_gdvar): + cdef godot_object *ptr = gdapi10.godot_variant_as_object(p_gdvar) + # Retreive class + cdef GDString classname = GDString.__new__(GDString) + with nogil: + gdapi10.godot_method_bind_ptrcall( + __methbind__Object__get_class, + ptr, + NULL, + &classname._gd_data + ) + return globals()[str(classname)]._from_ptr(ptr) + + @staticmethod + cdef inline Object cast_from_ptr(godot_object *ptr): + # Retreive class + cdef GDString classname = GDString.__new__(GDString) + with nogil: + gdapi10.godot_method_bind_ptrcall( + __methbind__Object__get_class, + ptr, + NULL, + &classname._gd_data + ) + return globals()[str(classname)]._from_ptr(ptr) + + def __eq__(self, other): + try: + return self._gd_ptr == (<{{ cls.name }}>other)._gd_ptr + except TypeError: + return False + + def __ne__(self, other): + try: + return self._gd_ptr != (<{{ cls.name }}>other)._gd_ptr + except TypeError: + return True + + def __getattr__(self, name): + cdef GDString gdname = GDString(name) + cdef GDString gdnamefield = GDString("name") + + # If a script is attached to the object, we expose here it methods + if not hasattr(type(self), '__exposed_python_class'): + if self.has_method(name): + + def _call(*args): + return {{ cls.name }}.callv(self, gdname, Array(args)) + + return _call + # from functools import partial + # return partial(self.call, gdname) + + elif any(x for x in self.get_property_list() if x[gdnamefield] == gdname): + # TODO: Godot currently lacks a `has_property` method + return self.get(gdname) + + raise AttributeError( + f"`{type(self).__name__}` object has no attribute `{name}`" + ) + + def __setattr__(self, name, value): + cdef GDString gdname = GDString(name) + cdef GDString gdnamefield = GDString("name") + + if hasattr(type(self), '__exposed_python_class'): + PyObject_GenericSetAttr(self, name, value) + return + + # Could retrieve the item inside the Godot class, try to look into + # the attached script if it has one + else: + if any(x for x in self.get_property_list() if x[gdnamefield] == gdname): + # TODO: Godot currently lacks a `has_property` method + self.set(name, value) + return + + raise AttributeError( + f"`{type(self).__name__}` object has no attribute `{name}`" + ) + + def call(self, name, *args): + return self.callv(name, Array(args)) + +{% endif %} + +{% if not cls.singleton and cls.instantiable %} + +{% if cls.is_reference %} + def __init__(self): + if __{{ cls.name }}_constructor == NULL: + raise NotImplementedError(__ERR_MSG_BINDING_NOT_AVAILABLE) + cdef godot_bool __ret + with nogil: + self._gd_ptr = __{{ cls["name"] }}_constructor() + + if self._gd_ptr is NULL: + raise MemoryError + + gdapi10.godot_method_bind_ptrcall( + __methbind__Reference__init_ref, + self._gd_ptr, + NULL, + &__ret + ) +{% else %} + @staticmethod + def new(): + if __{{ cls.name }}_constructor == NULL: + raise NotImplementedError(__ERR_MSG_BINDING_NOT_AVAILABLE) + # Call to __new__ bypasses __init__ constructor + cdef {{ cls.name }} wrapper = {{ cls.name }}.__new__({{ cls.name }}) + with nogil: + wrapper._gd_ptr = __{{ cls.name }}_constructor() + if wrapper._gd_ptr is NULL: + raise MemoryError + return wrapper +{% endif %} + +{% if cls.name == "Reference" %} + @classmethod + def new(cls): + raise RuntimeError(f"Refcounted Godot object must be created with `{ cls.__name__ }()`") + + def __dealloc__(self): + cdef godot_bool __ret + if self._gd_ptr == NULL: + return + with nogil: + gdapi10.godot_method_bind_ptrcall( + __methbind__Reference__unreference, + self._gd_ptr, + NULL, + &__ret + ) + if __ret: + gdapi10.godot_object_destroy(self._gd_ptr) +{% endif %} + +{% endif %} + + @staticmethod + cdef {{ cls.name }} from_ptr(godot_object *_ptr): + # Call to __new__ bypasses __init__ constructor + cdef {{ cls.name }} wrapper = {{ cls.name }}.__new__({{ cls.name }}) + wrapper._gd_ptr = _ptr +{% if cls.is_reference %} + # Note we steal the reference from the caller given we + # don't call `Reference.reference` here +{% endif %} + return wrapper + +{% if not cls.singleton and cls.instantiable %} + @classmethod + def _new(cls): + cdef godot_object* ptr = __{{ cls.name }}_constructor() + if ptr is NULL: + raise MemoryError + return ptr +{% endif %} + + @staticmethod + def _from_ptr(ptr): + # Call to __new__ bypasses __init__ constructor + cdef {{ cls.name }} wrapper = {{ cls.name }}.__new__({{ cls.name }}) + # /!\ doing `ptr` would return the address of + # the PyObject instead of casting it value ! + wrapper._gd_ptr = ptr +{% if cls.is_reference %} + # Note we steal the reference from the caller given we + # don't call `Reference.reference` here +{% endif %} + return wrapper + +{% if cls.constants | length %} + # Constants +{% endif %} +{% for key, value in cls.constants.items() %} + {{ key }} = {{ value }} +{% endfor %} +{% if cls.enums | length %} + # Enums +{% endif %} +{% for enum in cls.enums %} + {{ enum.name }} = IntFlag("{{ enum.name }}", { +{% for key, value in enum.values.items() %} + "{{ key }}": {{ value }}, +{% endfor %} + }) +{% endfor %} + +{% if cls.methods | length %} + # Methods +{% endif %} +{# TODO: Use typing for params&return #} +{% for method in cls.methods %} +{% if method.name != "free" %} + {{ render_method(cls, method) | indent }} +{% endif %} +{% endfor %} +{% if cls.properties | length %} + # Properties +{% endif %} +{# +TODO: some properties has / in there name +TODO: some properties pass a parameter to the setter/getter +TODO: see PinJoint.params/bias for a good example +#} +{% for prop in cls.properties %} + + @property + def {{ prop.name }}(self): +{% if prop.is_supported %} + return self.{{ prop.getter }}({% if prop.index is not none %}{{ prop.index }}{% endif %}) +{% else %} + raise NotImplementedError("{{prop.unsupported_reason}}") +{% endif %} + +{% if prop.setter %} + @{{ prop.name }}.setter + def {{ prop.name }}(self, val): +{% if prop.is_supported %} + self.{{ prop.setter }}({% if prop.index is not none %}{{ prop.index }},{% endif %}val) +{% else %} + raise NotImplementedError("{{prop.unsupported_reason}}") +{% endif %} +{% endif %} + +{% endfor %} + +{% endmacro %} diff --git a/generation/bindings_templates/method.tmpl.pyx b/generation/bindings_templates/method.tmpl.pyx new file mode 100644 index 0000000..c069325 --- /dev/null +++ b/generation/bindings_templates/method.tmpl.pyx @@ -0,0 +1,166 @@ +{% macro get_method_bind_register_name(cls, method) -%} +__methbind__{{ cls.name }}__{{ method.name }} +{%- endmacro %} + + +{% macro render_method_c_signature(method) %} +{{ method.return_type.c_type }} {{ method.name }}(self, +{%- for arg in method.arguments %} + {{ arg.type.c_type }} {{ arg.name }}, +{%- endfor %} +) +{%- endmacro %} + + +{% macro render_method_signature(method) %} +{{ method.name }}(self, +{%- for arg in method.arguments %} +{%- if arg.type.c_type in ("godot_string", "godot_node_path") %} + object {{ arg.name }} +{%- else %} + {{ arg.type.cy_type }} {{ arg.name }} +{#- `not None` is only for Python arguments so no need for base type #} +{#- if default value is NULL, None should be allowed #} +{%- if not arg.type.is_base_type and not (arg.has_default_value and arg.default_value == "None") %} + not None +{%- endif %} +{%- endif %} +{%- if arg.has_default_value %} +={{ arg.default_value }} +{%- endif %} +, +{%- endfor %} +) +{%- endmacro %} + + +{% macro _render_method_return(method, retval="__ret") %} +{% if method.return_type.c_type == "void" %} +return +{% elif method.return_type.is_object %} +if {{ retval }} == NULL: + return None +else: + return Object.cast_from_ptr({{ retval }}) +{% elif method.return_type.c_type == "godot_variant" %} +try: + return godot_variant_to_pyobj(&{{ retval }}) +finally: + with nogil: + gdapi10.godot_variant_destroy(&{{ retval }}) +{% elif method.return_type.is_enum %} +return {{ method.return_type.py_type }}({{ retval }}) +{% else %} +return {{ retval }} +{% endif %} +{%- endmacro %} + + +{% macro _render_method_cook_args(method, argsval="__args") %} +{% if (method.arguments | length ) != 0 %} +cdef const void *{{ argsval }}[{{ method.arguments | length }}] +{% endif %} +{% for arg in method.arguments %} +{% set i = loop.index - 1 %} +# {{ arg.type.c_type }} {{ arg.name }} +{% if arg.type.c_type == "godot_string" %} +cdef GDString __gdstr_{{ arg.name }} = ensure_is_gdstring({{ arg.name }}) +{{ argsval }}[{{ i }}] = (&__gdstr_{{ arg.name }}._gd_data) +{% elif arg.type.c_type == "godot_node_path" %} +cdef NodePath __nodepath_{{ arg.name }} = ensure_is_nodepath({{ arg.name }}) +{{ argsval }}[{{ i }}] = (&__nodepath_{{ arg.name }}._gd_data) +{% elif arg.type.is_object %} +{%- if arg.has_default_value and arg.default_value == "None" %} +{{ argsval }}[{{ i }}] = {{ arg.name }}._gd_ptr if {{ arg.name }} is not None else NULL +{%- else %} +{{ argsval }}[{{ i }}] = {{ arg.name }}._gd_ptr +{%- endif %} +{% elif arg.type.c_type == "godot_variant" %} +cdef godot_variant __var_{{ arg.name }} +pyobj_to_godot_variant({{ arg.name }}, &__var_{{ arg.name }}) +{{ argsval }}[{{ i }}] = (&__var_{{ arg.name }}) +{% elif arg.type.is_builtin %} +{{ argsval }}[{{ i }}] = (&{{ arg.name }}._gd_data) +{% elif arg.type.c_type == "godot_real" %} +# ptrcall does not work with single precision floats, so we must convert to a double +cdef double {{ arg.name }}_d = {{ arg.name }}; +{{ argsval }}[{{ i }}] = &{{ arg.name }}_d +{% else %} +{{ argsval }}[{{ i }}] = &{{ arg.name }} +{% endif %} +{% endfor %} +{%- endmacro %} + + +{% macro _render_method_destroy_args(method) %} +{% for arg in method.arguments %} +{% set i = loop.index - 1 %} +{% if arg.type.c_type == "godot_variant" %} +with nogil: + gdapi10.godot_variant_destroy(&__var_{{ arg.name }}) +{% endif %} +{% endfor %} +{%- endmacro %} + + +{% macro _render_method_call(cls, method, argsval="__args", retval="__ret") %} +{% if method.return_type.c_type == "void" %} +{% set retval_as_arg = "NULL" %} +{% elif method.return_type.is_object %} +# It's important to initialize this pointer to null given +# in case of Reference, Godot will try to decrease the +# refcount if the pointer is valid ! +# (see https://github.com/godotengine/godot/issues/35609) +cdef godot_object *{{ retval }} = NULL +{% set retval_as_arg = "&{}".format(retval) %} +{% elif method.return_type.c_type == "godot_variant" %} +cdef godot_variant {{ retval }} +{% set retval_as_arg = "&{}".format(retval) %} +{% elif method.return_type.is_builtin %} +{% set cy_type = method.return_type.cy_type %} +cdef {{ cy_type }} {{ retval }} = {{ cy_type }}.__new__({{ cy_type }}) +{% set retval_as_arg = "&{}._gd_data".format(retval) %} +{% elif method.return_type.c_type == "godot_real" %} +# ptrcall does not work with single precision floats, so we must convert to a double +cdef double {{ retval }} +{% set retval_as_arg = "&{}".format(retval) %} +{% else %} +cdef {{ method.return_type.c_type }} {{ retval }} +{% set retval_as_arg = "&{}".format(retval) %} +{% endif %} +if {{ get_method_bind_register_name(cls, method) }} == NULL: + raise NotImplementedError(__ERR_MSG_BINDING_NOT_AVAILABLE) +with nogil: + gdapi10.godot_method_bind_ptrcall( + {{ get_method_bind_register_name(cls, method) }}, + self._gd_ptr, + {% if (method.arguments | length ) != 0 %} + {{ argsval }}, + {%else %} + NULL, + {% endif %} + {{ retval_as_arg }} + ) +{%- endmacro %} + + +{% macro render_method(cls, method) %} +# {{ render_method_c_signature(method) }} +def {{ render_method_signature(method) }}: +{% if method.is_virtual %} + cdef Array args = Array() +{% for arg in method.arguments %} + args.append({{ arg.name }}) +{% endfor %} + return Object.callv(self, "{{ method.name }}", args) +{% else %} +{% if method.is_supported %} + {{ _render_method_cook_args(method) | indent }} + {{ _render_method_call(cls, method) | indent }} + {{ _render_method_destroy_args(method) | indent }} + {{ _render_method_return(method) | indent }} +{% else %} + raise NotImplementedError("{{method.unsupported_reason}}") +{% endif %} +{% endif %} +{% endmacro %} diff --git a/generation/builtins_templates/aabb.tmpl.pxi b/generation/builtins_templates/aabb.tmpl.pxi new file mode 100644 index 0000000..5108bd7 --- /dev/null +++ b/generation/builtins_templates/aabb.tmpl.pxi @@ -0,0 +1,75 @@ +{%- block pxd_header -%} +{%- endblock -%} +{%- block pyx_header -%} +{%- endblock -%} + +@cython.final +cdef class AABB: +{% block cdef_attributes %} + cdef godot_aabb _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, Vector3 pos not None=Vector3(), Vector3 size not None=Vector3()): + {{ force_mark_rendered("godot_aabb_new" )}} + gdapi10.godot_aabb_new(&self._gd_data, &pos._gd_data, &size._gd_data) + + def __repr__(self): + return f"" + + @property + def position(AABB self) -> Vector3: + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_aabb_get_position" )}} + ret._gd_data = gdapi10.godot_aabb_get_position(&self._gd_data) + return ret + + @position.setter + def position(AABB self, Vector3 val not None) -> None: + {{ force_mark_rendered("godot_aabb_set_position" )}} + gdapi10.godot_aabb_set_position(&self._gd_data, &val._gd_data) + + @property + def size(AABB self) -> Vector3: + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_aabb_get_size" )}} + ret._gd_data = gdapi10.godot_aabb_get_size(&self._gd_data) + return ret + + @size.setter + def size(AABB self, Vector3 val not None) -> None: + {{ force_mark_rendered("godot_aabb_set_size" )}} + gdapi10.godot_aabb_set_size(&self._gd_data, &val._gd_data) + + @property + def end(AABB self) -> Vector3: + cdef godot_vector3 position = gdapi10.godot_aabb_get_position(&self._gd_data) + cdef godot_vector3 size = gdapi10.godot_aabb_get_size(&self._gd_data) + cdef Vector3 ret = Vector3.__new__(Vector3) + ret._gd_data = gdapi10.godot_vector3_operator_add(&position, &size) + return ret + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + {{ render_method("as_string") | indent }} + {{ render_method("get_area") | indent }} + {{ render_method("has_no_area") | indent }} + {{ render_method("has_no_surface") | indent }} + {{ render_method("intersects") | indent }} + {{ render_method("encloses") | indent }} + {{ render_method("merge") | indent }} + {{ render_method("intersection") | indent }} + {{ render_method("intersects_plane") | indent }} + {{ render_method("intersects_segment") | indent }} + {{ render_method("has_point") | indent }} + {{ render_method("get_support") | indent }} + {{ render_method("get_longest_axis") | indent }} + {{ render_method("get_longest_axis_index") | indent }} + {{ render_method("get_longest_axis_size") | indent }} + {{ render_method("get_shortest_axis") | indent }} + {{ render_method("get_shortest_axis_index") | indent }} + {{ render_method("get_shortest_axis_size") | indent }} + {{ render_method("expand") | indent }} + {{ render_method("grow") | indent }} + {{ render_method("get_endpoint") | indent }} +{% endblock %} diff --git a/generation/builtins_templates/array.tmpl.pxi b/generation/builtins_templates/array.tmpl.pxi new file mode 100644 index 0000000..c47627a --- /dev/null +++ b/generation/builtins_templates/array.tmpl.pxi @@ -0,0 +1,249 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + +{# TODO: conversion from pool arrays is not supported #} +{{ force_mark_rendered("godot_array_new_pool_byte_array") }} +{{ force_mark_rendered("godot_array_new_pool_color_array") }} +{{ force_mark_rendered("godot_array_new_pool_int_array") }} +{{ force_mark_rendered("godot_array_new_pool_real_array") }} +{{ force_mark_rendered("godot_array_new_pool_string_array") }} +{{ force_mark_rendered("godot_array_new_pool_vector2_array") }} +{{ force_mark_rendered("godot_array_new_pool_vector3_array") }} +{# We can't do const in Python #} +{{ force_mark_rendered("godot_array_operator_index_const") }} + +@cython.final +cdef class Array: +{% block cdef_attributes %} + cdef godot_array _gd_data + + @staticmethod + cdef inline Array new() + + @staticmethod + cdef inline Array from_ptr(const godot_array *_ptr) + + cdef inline Array operator_getslice(self, godot_int start, godot_int stop, godot_int step) + cdef inline bint operator_equal(self, Array other) + cdef inline Array operator_add(self, Array items) + cdef inline operator_iadd(self, Array items) +{% endblock %} + +{% block python_defs %} + def __init__(self, iterable=None): + {{ force_mark_rendered("godot_array_new") }} + {{ force_mark_rendered("godot_array_duplicate") }} + if not iterable: + gdapi10.godot_array_new(&self._gd_data) + elif isinstance(iterable, Array): + self._gd_data = gdapi11.godot_array_duplicate(&(iterable)._gd_data, False) + # TODO: handle Pool*Array + else: + gdapi10.godot_array_new(&self._gd_data) + for x in iterable: + self.append(x) + + @staticmethod + cdef inline Array new(): + # Call to __new__ bypasses __init__ constructor + cdef Array ret = Array.__new__(Array) + gdapi10.godot_array_new(&ret._gd_data) + return ret + + @staticmethod + cdef inline Array from_ptr(const godot_array *_ptr): + # Call to __new__ bypasses __init__ constructor + cdef Array ret = Array.__new__(Array) + # `godot_array` is a cheap structure pointing on a refcounted vector + # of variants. Unlike it name could let think, `godot_array_new_copy` + # only increment the refcount of the underlying structure. + {{ force_mark_rendered("godot_array_new_copy") }} + gdapi10.godot_array_new_copy(&ret._gd_data, _ptr) + return ret + + def __dealloc__(self): + # /!\ if `__init__` is skipped, `_gd_data` must be initialized by + # hand otherwise we will get a segfault here + {{ force_mark_rendered("godot_array_destroy") }} + gdapi10.godot_array_destroy(&self._gd_data) + + def __repr__(self): + return f"<{type(self).__name__}([{', '.join([repr(x) for x in self])}])>" + + # Operators + + cdef inline Array operator_getslice(self, godot_int start, godot_int stop, godot_int step): + {{ force_mark_rendered("godot_array_slice") }} + cdef Array ret = Array.__new__(Array) + ret._gd_data = gdapi12.godot_array_slice(&self._gd_data, start, stop, step, False) + return ret + + # TODO: support slice + def __getitem__(self, index): + {{ force_mark_rendered("godot_array_operator_index") }} + cdef godot_int size = self.size() + cdef godot_int start + cdef godot_int stop + cdef godot_int step + + if isinstance(index, slice): + step = index.step if index.step is not None else 1 + if step == 0: + raise ValueError("slice step cannot be zero") + elif step > 0: + start = index.start if index.start is not None else 0 + stop = index.stop if index.stop is not None else size + else: + start = index.start if index.start is not None else size + stop = index.stop if index.stop is not None else -size - 1 + return Array.operator_getslice(self, start, stop, step) + + if index < 0: + index = index + size + if index < 0 or index >= size: + raise IndexError("list index out of range") + + cdef godot_variant *p_ret = gdapi10.godot_array_operator_index(&self._gd_data, index) + return godot_variant_to_pyobj(p_ret) + + # TODO: support slice + def __setitem__(self, godot_int index, object value): + cdef godot_int size = self.size() + index = size + index if index < 0 else index + if abs(index) >= size: + raise IndexError("list index out of range") + + cdef godot_variant *p_ret = gdapi10.godot_array_operator_index(&self._gd_data, index) + gdapi10.godot_variant_destroy(p_ret) + pyobj_to_godot_variant(value, p_ret) + + # TODO: support slice + def __delitem__(self, godot_int index): + cdef godot_int size = self.size() + index = size + index if index < 0 else index + if abs(index) >= size: + raise IndexError("list index out of range") + + gdapi10.godot_array_remove(&self._gd_data, index) + + def __iter__(self): + # TODO: mid iteration mutation should throw exception ? + cdef int i + for i in range(self.size()): + yield self.get(i) + + def __copy__(self): + return self.duplicate(False) + + def __deepcopy__(self): + return self.duplicate(True) + + cdef inline bint operator_equal(self, Array other): + # TODO `godot_array_operator_equal` is missing in gdapi, submit a PR ? + cdef godot_int size = self.size() + if size != other.size(): + return False + cdef int i + for i in range(size): + if not gdapi10.godot_variant_operator_equal( + gdapi10.godot_array_operator_index(&self._gd_data, i), + gdapi10.godot_array_operator_index(&other._gd_data, i) + ): + return False + return True + + def __eq__(self, other): + try: + return Array.operator_equal(self, other) + except TypeError: + return False + + def __ne__(self, other): + try: + return not Array.operator_equal(self, other) + except TypeError: + return True + + cdef inline operator_iadd(self, Array items): + cdef godot_int self_size = self.size() + cdef godot_int items_size = items.size() + gdapi10.godot_array_resize(&self._gd_data, self_size + items_size) + cdef int i + for i in range(items_size): + Array.set(self, self_size + i, items.get(i)) + + # TODO: support __iadd__ for other types than Array ? + def __iadd__(self, items not None): + try: + Array.operator_iadd(self, items) + except TypeError: + for x in items: + self.append(x) + return self + + cdef inline Array operator_add(self, Array items): + cdef godot_int self_size = self.size() + cdef godot_int items_size = items.size() + cdef Array ret = Array.new() + gdapi10.godot_array_resize(&ret._gd_data, self_size + items_size) + cdef int i + for i in range(self_size): + Array.set(ret, i, self.get(i)) + for i in range(items_size): + Array.set(ret, self_size + i, items.get(i)) + return ret + + # TODO: support __add__ for other types than Array ? + def __add__(self, items not None): + try: + return Array.operator_add(self, items) + except TypeError: + ret = Array.duplicate(self, False) + for x in items: + ret.append(x) + return ret + + {{ render_method("size", py_name="__len__") | indent }} + {{ render_method("hash", py_name="__hash__") | indent }} + {{ render_method("has", py_name="__contains__") | indent }} + + {{ render_method("hash") | indent }} + {{ render_method("size") | indent }} + {{ render_method("duplicate") | indent }} + {{ render_method("get") | indent }} + {{ render_method("set") | indent }} + {{ render_method("append") | indent }} + {{ render_method("clear") | indent }} + {{ render_method("empty") | indent }} + {{ render_method("count") | indent }} + {{ render_method("erase") | indent }} + {{ render_method("front") | indent }} + {{ render_method("back") | indent }} + {{ render_method("find") | indent }} + {{ render_method("find_last") | indent }} + {{ render_method("insert") | indent }} + {{ render_method("invert") | indent }} + {{ render_method("pop_back") | indent }} + {{ render_method("pop_front") | indent }} + {{ render_method("push_back") | indent }} + {{ render_method("push_front") | indent }} + {{ render_method("remove") | indent }} + {{ render_method("resize") | indent }} + {{ render_method("rfind") | indent }} + {{ render_method("sort") | indent }} + {#- TODO: opaque object as param is not supported #} + {{- force_mark_rendered("godot_array_sort_custom") }} + {#- {{ render_method("sort_custom") | indent }} #} + {{ render_method("bsearch") | indent }} + {#- TODO: opaque object as param is not supported #} + {{- force_mark_rendered("godot_array_bsearch_custom") }} + {#- {{ render_method("bsearch_custom") | indent }} #} + {{ render_method("max") | indent }} + {{ render_method("min") | indent }} + {{ render_method("shuffle") | indent }} +{% endblock %} + +{%- block python_consts %} +{% endblock %} diff --git a/generation/builtins_templates/basis.tmpl.pxi b/generation/builtins_templates/basis.tmpl.pxi new file mode 100644 index 0000000..055e72e --- /dev/null +++ b/generation/builtins_templates/basis.tmpl.pxi @@ -0,0 +1,135 @@ +{%- block pxd_header -%} +{%- endblock -%} +{%- block pyx_header -%} + +cdef inline Basis Basis_multiply_vector(Basis self, Basis b): + cdef Basis ret = Basis.__new__(Basis) + {{ force_mark_rendered("godot_basis_operator_multiply_vector") }} + ret._gd_data = gdapi10.godot_basis_operator_multiply_vector(&self._gd_data, &b._gd_data) + return ret + +cdef inline Basis Basis_multiply_scalar(Basis self, godot_real b): + cdef Basis ret = Basis.__new__(Basis) + {{ force_mark_rendered("godot_basis_operator_multiply_scalar") }} + ret._gd_data = gdapi10.godot_basis_operator_multiply_scalar(&self._gd_data, b) + return ret + +{%- endblock %} + +@cython.final +cdef class Basis: +{% block cdef_attributes %} + cdef godot_basis _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, Vector3 x not None=Vector3.RIGHT, Vector3 y not None=Vector3.UP, Vector3 z not None=Vector3.BACK): + {{ force_mark_rendered("godot_basis_new") }} {# We always use the `with_rows` version #} + {{ force_mark_rendered("godot_basis_new_with_rows") }} + gdapi10.godot_basis_new_with_rows(&self._gd_data, &(x)._gd_data, &(y)._gd_data, &(z)._gd_data) + + @staticmethod + def from_euler(from_): + cdef Basis ret = Basis.__new__(Basis) + try: + {{ force_mark_rendered("godot_basis_new_with_euler") }} + gdapi10.godot_basis_new_with_euler(&ret._gd_data, &(from_)._gd_data) + return ret + except TypeError: + pass + try: + {{ force_mark_rendered("godot_basis_new_with_euler_quat") }} + gdapi10.godot_basis_new_with_euler_quat(&ret._gd_data, &(from_)._gd_data) + return ret + except TypeError: + raise TypeError('`from_` must be Quat or Vector3') + + @staticmethod + def from_axis_angle(Vector3 axis not None, phi): + cdef Basis ret = Basis.__new__(Basis) + {{ force_mark_rendered("godot_basis_new_with_axis_and_angle") }} + gdapi10.godot_basis_new_with_axis_and_angle(&ret._gd_data, &axis._gd_data, phi) + return ret + + def __repr__(self): + return f"" + + @property + def x(Basis self) -> Vector3: + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_basis_get_axis") }} + ret._gd_data = gdapi10.godot_basis_get_axis(&self._gd_data, 0) + return ret + + @x.setter + def x(Basis self, Vector3 val not None) -> None: + {{ force_mark_rendered("godot_basis_set_axis") }} + gdapi10.godot_basis_set_axis(&self._gd_data, 0, &val._gd_data) + + @property + def y(Basis self) -> Vector3: + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_basis_get_axis") }} + ret._gd_data = gdapi10.godot_basis_get_axis(&self._gd_data, 1) + return ret + + @y.setter + def y(Basis self, Vector3 val not None) -> None: + {{ force_mark_rendered("godot_basis_set_axis") }} + gdapi10.godot_basis_set_axis(&self._gd_data, 1, &val._gd_data) + + @property + def z(Basis self) -> Vector3: + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_basis_get_axis") }} + ret._gd_data = gdapi10.godot_basis_get_axis(&self._gd_data, 2) + return ret + + @z.setter + def z(Basis self, Vector3 val not None) -> None: + {{ force_mark_rendered("godot_basis_set_axis") }} + gdapi10.godot_basis_set_axis(&self._gd_data, 2, &val._gd_data) + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + + {{ render_method("operator_add", py_name="__add__") | indent }} + {{ render_method("operator_subtract", py_name="__sub__") | indent }} + + def __mul__(Basis self, val): + cdef Basis _val + + try: + _val = val + + except TypeError: + return Basis_multiply_scalar(self, val) + + else: + return Basis_multiply_vector(self, _val) + + {{ render_method("as_string") | indent }} + {{ render_method("inverse") | indent }} + {{ render_method("transposed") | indent }} + {{ render_method("orthonormalized") | indent }} + {{ render_method("determinant") | indent }} + {{ render_method("rotated") | indent }} + {{ render_method("scaled") | indent }} + {{ render_method("get_scale") | indent }} + {{ render_method("get_euler") | indent }} + {{ render_method("get_quat") | indent }} + {{ render_method("set_quat") | indent }} + {{ render_method("set_axis_angle_scale") | indent }} + {{ render_method("set_euler_scale") | indent }} + {{ render_method("set_quat_scale") | indent }} + {{ render_method("tdotx") | indent }} + {{ render_method("tdoty") | indent }} + {{ render_method("tdotz") | indent }} + {{ render_method("xform") | indent }} + {{ render_method("xform_inv") | indent }} + {{ render_method("get_orthogonal_index") | indent }} + {{ render_method("get_elements") | indent }} + {{ render_method("get_row") | indent }} + {{ render_method("set_row") | indent }} + {{ render_method("slerp") | indent }} +{% endblock %} diff --git a/generation/builtins_templates/builtins.tmpl.pxd b/generation/builtins_templates/builtins.tmpl.pxd new file mode 100644 index 0000000..43ddc11 --- /dev/null +++ b/generation/builtins_templates/builtins.tmpl.pxd @@ -0,0 +1,46 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_builtins.py` + +cimport cython + +from godot._hazmat.gdnative_api_struct cimport * +from godot.pool_arrays cimport ( + PoolIntArray, + PoolRealArray, + PoolByteArray, + PoolVector2Array, + PoolVector3Array, + PoolColorArray, + PoolStringArray, +) + +{% set render_target = "rid" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "vector3" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "vector2" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "aabb" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "basis" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "color" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "gdstring" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "rect2" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "transform2d" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "plane" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "quat" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "transform" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "node_path" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "dictionary" %} +{% include 'render.tmpl.pxd' with context %} +{% set render_target = "array" %} +{% include 'render.tmpl.pxd' with context %} diff --git a/generation/builtins_templates/builtins.tmpl.pyi b/generation/builtins_templates/builtins.tmpl.pyi new file mode 100644 index 0000000..49c36d6 --- /dev/null +++ b/generation/builtins_templates/builtins.tmpl.pyi @@ -0,0 +1,35 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_builtins.py` + +from typing import Union + +{% set render_target = "rid" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "vector3" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "vector2" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "aabb" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "basis" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "color" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "gdstring" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "rect2" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "transform2d" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "plane" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "quat" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "transform" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "node_path" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "dictionary" %} +{% include 'render.tmpl.pyi' with context %} +{% set render_target = "array" %} +{% include 'render.tmpl.pyi' with context %} diff --git a/generation/builtins_templates/builtins.tmpl.pyx b/generation/builtins_templates/builtins.tmpl.pyx new file mode 100644 index 0000000..0399776 --- /dev/null +++ b/generation/builtins_templates/builtins.tmpl.pyx @@ -0,0 +1,54 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_builtins.py` + +from typing import Union + +cimport cython + +from godot._hazmat.gdnative_api_struct cimport * +from godot._hazmat.gdapi cimport ( + pythonscript_gdapi10 as gdapi10, + pythonscript_gdapi11 as gdapi11, + pythonscript_gdapi12 as gdapi12, +) +from godot._hazmat.conversion cimport * +from godot.pool_arrays cimport ( + PoolIntArray, + PoolRealArray, + PoolByteArray, + PoolVector2Array, + PoolVector3Array, + PoolColorArray, + PoolStringArray, +) + +{% set render_target = "rid" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "vector3" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "vector2" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "aabb" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "basis" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "color" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "gdstring" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "rect2" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "transform2d" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "plane" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "quat" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "transform" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "node_path" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "dictionary" %} +{% include 'render.tmpl.pyx' with context %} +{% set render_target = "array" %} +{% include 'render.tmpl.pyx' with context %} diff --git a/generation/builtins_templates/color.tmpl.pxi b/generation/builtins_templates/color.tmpl.pxi new file mode 100644 index 0000000..ba30514 --- /dev/null +++ b/generation/builtins_templates/color.tmpl.pxi @@ -0,0 +1,244 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +from libc.stdint cimport uint8_t +{% endblock -%} + + +@cython.final +cdef class Color: +{% block cdef_attributes %} + cdef godot_color _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, godot_real r=0, godot_real g=0, godot_real b=0, a=None): + if a is None: + {{ force_mark_rendered("godot_color_new_rgb")}} + gdapi10.godot_color_new_rgb(&self._gd_data, r, g, b) + else: + {{ force_mark_rendered("godot_color_new_rgba")}} + gdapi10.godot_color_new_rgba(&self._gd_data, r, g, b, a) + + def __repr__(self): + return f"" + + @staticmethod + def from_resource(Resource resource not None): + # Call to __new__ bypasses __init__ constructor + cdef RID ret = RID.__new__(RID) + gdapi10.godot_rid_new_with_resource(&ret._gd_data, resource._gd_ptr) + return ret + + @property + def r8(Color self): + return int(self.r * 256) + + @r8.setter + def r8(Color self, uint8_t val): + self.r = (float(val) / 256) + + @property + def g8(Color self): + return int(self.g * 256) + + @g8.setter + def g8(Color self, uint8_t val): + self.g = (float(val) / 256) + + @property + def b8(Color self): + return int(self.b * 256) + + @b8.setter + def b8(Color self, uint8_t val): + self.b = (float(val) / 256) + + @property + def a8(Color self): + return int(self.a * 256) + + @a8.setter + def a8(Color self, uint8_t val): + self.a = (float(val) / 256) + + {{ render_property("r", getter="get_r", setter="set_r") | indent }} + {{ render_property("g", getter="get_g", setter="set_g") | indent }} + {{ render_property("b", getter="get_b", setter="set_b") | indent }} + {{ render_property("a", getter="get_a", setter="set_a") | indent }} + + {{ render_property("h", getter="get_h") | indent }} + {{ render_property("s", getter="get_s") | indent }} + {{ render_property("v", getter="get_v") | indent }} + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + {{ render_operator_lt() | indent }} + + {{ render_method("as_string") | indent }} + {{ render_method("to_rgba32") | indent }} + {{ render_method("to_abgr32") | indent }} + {{ render_method("to_abgr64") | indent }} + {{ render_method("to_argb64") | indent }} + {{ render_method("to_rgba64") | indent }} + {{ render_method("to_argb32") | indent }} + {{ render_method("gray") | indent }} + {{ render_method("inverted") | indent }} + {{ render_method("contrasted") | indent }} + {{ render_method("linear_interpolate") | indent }} + {{ render_method("blend") | indent }} + {{ render_method("darkened") | indent }} + {{ render_method("from_hsv") | indent }} + {{ render_method("lightened") | indent }} + {{ render_method("to_html") | indent }} + +{% endblock %} + +{%- block python_consts %} + # TODO: gdapi should expose those constants to us + GRAY = Color(0.75, 0.75, 0.75) + ALICEBLUE = Color(0.94, 0.97, 1) + ANTIQUEWHITE = Color(0.98, 0.92, 0.84) + AQUA = Color(0, 1, 1) + AQUAMARINE = Color(0.5, 1, 0.83) + AZURE = Color(0.94, 1, 1) + BEIGE = Color(0.96, 0.96, 0.86) + BISQUE = Color(1, 0.89, 0.77) + BLACK = Color(0, 0, 0) + BLANCHEDALMOND = Color(1, 0.92, 0.8) + BLUE = Color(0, 0, 1) + BLUEVIOLET = Color(0.54, 0.17, 0.89) + BROWN = Color(0.65, 0.16, 0.16) + BURLYWOOD = Color(0.87, 0.72, 0.53) + CADETBLUE = Color(0.37, 0.62, 0.63) + CHARTREUSE = Color(0.5, 1, 0) + CHOCOLATE = Color(0.82, 0.41, 0.12) + CORAL = Color(1, 0.5, 0.31) + CORNFLOWER = Color(0.39, 0.58, 0.93) + CORNSILK = Color(1, 0.97, 0.86) + CRIMSON = Color(0.86, 0.08, 0.24) + CYAN = Color(0, 1, 1) + DARKBLUE = Color(0, 0, 0.55) + DARKCYAN = Color(0, 0.55, 0.55) + DARKGOLDENROD = Color(0.72, 0.53, 0.04) + DARKGRAY = Color(0.66, 0.66, 0.66) + DARKGREEN = Color(0, 0.39, 0) + DARKKHAKI = Color(0.74, 0.72, 0.42) + DARKMAGENTA = Color(0.55, 0, 0.55) + DARKOLIVEGREEN = Color(0.33, 0.42, 0.18) + DARKORANGE = Color(1, 0.55, 0) + DARKORCHID = Color(0.6, 0.2, 0.8) + DARKRED = Color(0.55, 0, 0) + DARKSALMON = Color(0.91, 0.59, 0.48) + DARKSEAGREEN = Color(0.56, 0.74, 0.56) + DARKSLATEBLUE = Color(0.28, 0.24, 0.55) + DARKSLATEGRAY = Color(0.18, 0.31, 0.31) + DARKTURQUOISE = Color(0, 0.81, 0.82) + DARKVIOLET = Color(0.58, 0, 0.83) + DEEPPINK = Color(1, 0.08, 0.58) + DEEPSKYBLUE = Color(0, 0.75, 1) + DIMGRAY = Color(0.41, 0.41, 0.41) + DODGERBLUE = Color(0.12, 0.56, 1) + FIREBRICK = Color(0.7, 0.13, 0.13) + FLORALWHITE = Color(1, 0.98, 0.94) + FORESTGREEN = Color(0.13, 0.55, 0.13) + FUCHSIA = Color(1, 0, 1) + GAINSBORO = Color(0.86, 0.86, 0.86) + GHOSTWHITE = Color(0.97, 0.97, 1) + GOLD = Color(1, 0.84, 0) + GOLDENROD = Color(0.85, 0.65, 0.13) + GREEN = Color(0, 1, 0) + GREENYELLOW = Color(0.68, 1, 0.18) + HONEYDEW = Color(0.94, 1, 0.94) + HOTPINK = Color(1, 0.41, 0.71) + INDIANRED = Color(0.8, 0.36, 0.36) + INDIGO = Color(0.29, 0, 0.51) + IVORY = Color(1, 1, 0.94) + KHAKI = Color(0.94, 0.9, 0.55) + LAVENDER = Color(0.9, 0.9, 0.98) + LAVENDERBLUSH = Color(1, 0.94, 0.96) + LAWNGREEN = Color(0.49, 0.99, 0) + LEMONCHIFFON = Color(1, 0.98, 0.8) + LIGHTBLUE = Color(0.68, 0.85, 0.9) + LIGHTCORAL = Color(0.94, 0.5, 0.5) + LIGHTCYAN = Color(0.88, 1, 1) + LIGHTGOLDENROD = Color(0.98, 0.98, 0.82) + LIGHTGRAY = Color(0.83, 0.83, 0.83) + LIGHTGREEN = Color(0.56, 0.93, 0.56) + LIGHTPINK = Color(1, 0.71, 0.76) + LIGHTSALMON = Color(1, 0.63, 0.48) + LIGHTSEAGREEN = Color(0.13, 0.7, 0.67) + LIGHTSKYBLUE = Color(0.53, 0.81, 0.98) + LIGHTSLATEGRAY = Color(0.47, 0.53, 0.6) + LIGHTSTEELBLUE = Color(0.69, 0.77, 0.87) + LIGHTYELLOW = Color(1, 1, 0.88) + LIME = Color(0, 1, 0) + LIMEGREEN = Color(0.2, 0.8, 0.2) + LINEN = Color(0.98, 0.94, 0.9) + MAGENTA = Color(1, 0, 1) + MAROON = Color(0.69, 0.19, 0.38) + MEDIUMAQUAMARINE = Color(0.4, 0.8, 0.67) + MEDIUMBLUE = Color(0, 0, 0.8) + MEDIUMORCHID = Color(0.73, 0.33, 0.83) + MEDIUMPURPLE = Color(0.58, 0.44, 0.86) + MEDIUMSEAGREEN = Color(0.24, 0.7, 0.44) + MEDIUMSLATEBLUE = Color(0.48, 0.41, 0.93) + MEDIUMSPRINGGREEN = Color(0, 0.98, 0.6) + MEDIUMTURQUOISE = Color(0.28, 0.82, 0.8) + MEDIUMVIOLETRED = Color(0.78, 0.08, 0.52) + MIDNIGHTBLUE = Color(0.1, 0.1, 0.44) + MINTCREAM = Color(0.96, 1, 0.98) + MISTYROSE = Color(1, 0.89, 0.88) + MOCCASIN = Color(1, 0.89, 0.71) + NAVAJOWHITE = Color(1, 0.87, 0.68) + NAVYBLUE = Color(0, 0, 0.5) + OLDLACE = Color(0.99, 0.96, 0.9) + OLIVE = Color(0.5, 0.5, 0) + OLIVEDRAB = Color(0.42, 0.56, 0.14) + ORANGE = Color(1, 0.65, 0) + ORANGERED = Color(1, 0.27, 0) + ORCHID = Color(0.85, 0.44, 0.84) + PALEGOLDENROD = Color(0.93, 0.91, 0.67) + PALEGREEN = Color(0.6, 0.98, 0.6) + PALETURQUOISE = Color(0.69, 0.93, 0.93) + PALEVIOLETRED = Color(0.86, 0.44, 0.58) + PAPAYAWHIP = Color(1, 0.94, 0.84) + PEACHPUFF = Color(1, 0.85, 0.73) + PERU = Color(0.8, 0.52, 0.25) + PINK = Color(1, 0.75, 0.8) + PLUM = Color(0.87, 0.63, 0.87) + POWDERBLUE = Color(0.69, 0.88, 0.9) + PURPLE = Color(0.63, 0.13, 0.94) + REBECCAPURPLE = Color(0.4, 0.2, 0.6) + RED = Color(1, 0, 0) + ROSYBROWN = Color(0.74, 0.56, 0.56) + ROYALBLUE = Color(0.25, 0.41, 0.88) + SADDLEBROWN = Color(0.55, 0.27, 0.07) + SALMON = Color(0.98, 0.5, 0.45) + SANDYBROWN = Color(0.96, 0.64, 0.38) + SEAGREEN = Color(0.18, 0.55, 0.34) + SEASHELL = Color(1, 0.96, 0.93) + SIENNA = Color(0.63, 0.32, 0.18) + SILVER = Color(0.75, 0.75, 0.75) + SKYBLUE = Color(0.53, 0.81, 0.92) + SLATEBLUE = Color(0.42, 0.35, 0.8) + SLATEGRAY = Color(0.44, 0.5, 0.56) + SNOW = Color(1, 0.98, 0.98) + SPRINGGREEN = Color(0, 1, 0.5) + STEELBLUE = Color(0.27, 0.51, 0.71) + TAN = Color(0.82, 0.71, 0.55) + TEAL = Color(0, 0.5, 0.5) + THISTLE = Color(0.85, 0.75, 0.85) + TOMATO = Color(1, 0.39, 0.28) + TURQUOISE = Color(0.25, 0.88, 0.82) + VIOLET = Color(0.93, 0.51, 0.93) + WEBGRAY = Color(0.5, 0.5, 0.5) + WEBGREEN = Color(0, 0.5, 0) + WEBMAROON = Color(0.5, 0, 0) + WEBPURPLE = Color(0.5, 0, 0.5) + WHEAT = Color(0.96, 0.87, 0.7) + WHITE = Color(1, 1, 1) + WHITESMOKE = Color(0.96, 0.96, 0.96) + YELLOW = Color(1, 1, 0) + YELLOWGREEN = Color(0.6, 0.8, 0.2) +{% endblock %} diff --git a/generation/builtins_templates/dictionary.tmpl.pxi b/generation/builtins_templates/dictionary.tmpl.pxi new file mode 100644 index 0000000..6be0272 --- /dev/null +++ b/generation/builtins_templates/dictionary.tmpl.pxi @@ -0,0 +1,210 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + +{# We can't do const in Python #} +{{ force_mark_rendered("godot_dictionary_operator_index_const") }} + +@cython.final +cdef class Dictionary: +{% block cdef_attributes %} + cdef godot_dictionary _gd_data + + @staticmethod + cdef inline Dictionary new() + + @staticmethod + cdef inline Dictionary from_ptr(const godot_dictionary *_ptr) + + cdef inline operator_update(self, Dictionary items) + cdef inline bint operator_equal(self, Dictionary other) +{% endblock %} + +{% block python_defs %} + def __init__(self, iterable=None): + {{ force_mark_rendered("godot_dictionary_new") }} + if not iterable: + gdapi10.godot_dictionary_new(&self._gd_data) + elif isinstance(iterable, Dictionary): + self._gd_data = gdapi12.godot_dictionary_duplicate(&(iterable)._gd_data, False) + # TODO: handle Pool*Array + elif isinstance(iterable, dict): + gdapi10.godot_dictionary_new(&self._gd_data) + for k, v in iterable.items(): + self[k] = v + else: + gdapi10.godot_dictionary_new(&self._gd_data) + try: + for k, v in iterable: + self[k] = v + except ValueError as exc: + raise ValueError("dictionary update sequence element has length 1; 2 is required") + + def __dealloc__(self): + {{ force_mark_rendered("godot_dictionary_destroy") }} + # /!\ if `__init__` is skipped, `_gd_data` must be initialized by + # hand otherwise we will get a segfault here + gdapi10.godot_dictionary_destroy(&self._gd_data) + + @staticmethod + cdef inline Dictionary new(): + # Call to __new__ bypasses __init__ constructor + cdef Dictionary ret = Dictionary.__new__(Dictionary) + gdapi10.godot_dictionary_new(&ret._gd_data) + return ret + + @staticmethod + cdef inline Dictionary from_ptr(const godot_dictionary *_ptr): + # Call to __new__ bypasses __init__ constructor + cdef Dictionary ret = Dictionary.__new__(Dictionary) + # `godot_dictionary` is a cheap structure pointing on a refcounted hashmap + # of variants. Unlike it name could let think, `godot_dictionary_new_copy` + # only increment the refcount of the underlying structure. + {{ force_mark_rendered("godot_dictionary_new_copy") }} + gdapi10.godot_dictionary_new_copy(&ret._gd_data, _ptr) + return ret + + def __repr__(self): + repr_dict = {} + for k, v in self.items(): + if isinstance(k, GDString): + k = str(k) + if isinstance(v, GDString): + v = str(v) + repr_dict[k] = v + return f"" + + def __getitem__(self, object key): + {{ force_mark_rendered("godot_dictionary_operator_index") }} + cdef godot_variant var_key + if not pyobj_to_godot_variant(key, &var_key): + raise TypeError(f"Cannot convert `{key!r}` to Godot Variant") + cdef godot_variant *p_var_ret = gdapi10.godot_dictionary_operator_index(&self._gd_data, &var_key) + gdapi10.godot_variant_destroy(&var_key) + if p_var_ret == NULL: + raise KeyError(key) + else: + return godot_variant_to_pyobj(p_var_ret) + + {{ render_method("set", py_name="__setitem__") | indent }} + + def __delitem__(self, object key): + {{ force_mark_rendered("godot_dictionary_erase_with_return") }} + cdef godot_variant var_key + if not pyobj_to_godot_variant(key, &var_key): + raise TypeError(f"Cannot convert `{key!r}` to Godot Variant") + cdef godot_bool ret = gdapi11.godot_dictionary_erase_with_return(&self._gd_data, &var_key) + gdapi10.godot_variant_destroy(&var_key) + if not ret: + raise KeyError(key) + + def __iter__(self): + {{ force_mark_rendered("godot_dictionary_next") }} + cdef godot_variant *p_key = NULL + # TODO: mid iteration mutation should throw exception ? + while True: + p_key = gdapi10.godot_dictionary_next(&self._gd_data, p_key) + if p_key == NULL: + return + yield godot_variant_to_pyobj(p_key) + + def __copy__(self): + return self.duplicate(False) + + def __deepcopy__(self): + return self.duplicate(True) + + def get(self, object key, object default=None): + {{ force_mark_rendered("godot_dictionary_get") }} + {{ force_mark_rendered("godot_dictionary_get_with_default") }} + cdef godot_variant var_key + pyobj_to_godot_variant(key, &var_key) + cdef godot_variant var_ret + cdef godot_variant var_default + if default is not None: + pyobj_to_godot_variant(default, &var_default) + var_ret = gdapi11.godot_dictionary_get_with_default(&self._gd_data, &var_key, &var_default) + gdapi10.godot_variant_destroy(&var_default) + else: + var_ret = gdapi10.godot_dictionary_get(&self._gd_data, &var_key) + gdapi10.godot_variant_destroy(&var_key) + cdef object ret = godot_variant_to_pyobj(&var_ret) + gdapi10.godot_variant_destroy(&var_ret) + return ret + + cdef inline operator_update(self, Dictionary items): + cdef godot_variant *p_value + cdef godot_variant *p_key = NULL + while True: + p_key = gdapi10.godot_dictionary_next(&items._gd_data, p_key) + if p_key == NULL: + break + p_value = gdapi10.godot_dictionary_operator_index(&items._gd_data, p_key) + gdapi10.godot_dictionary_set(&self._gd_data, p_key, p_value) + return self + + def update(self, other): + cdef object k + cdef object v + if isinstance(other, Dictionary): + Dictionary.operator_update(self, other) + elif isinstance(other, dict): + for k, v in other.items(): + self[k] = v + else: + raise TypeError("other must be godot.Dictionary or dict") + + def items(self): + cdef godot_variant *p_key = NULL + cdef godot_variant *p_value + # TODO: mid iteration mutation should throw exception ? + while True: + p_key = gdapi10.godot_dictionary_next(&self._gd_data, p_key) + if p_key == NULL: + return + p_value = gdapi10.godot_dictionary_operator_index(&self._gd_data, p_key) + yield godot_variant_to_pyobj(p_key), godot_variant_to_pyobj(p_value) + + cdef inline bint operator_equal(self, Dictionary other): + if other is None: + return False + cdef godot_int size = self.size() + if size != other.size(): + return False + # TODO: gdnative should provide a function to do that + return dict(self) == dict(other) + + def __eq__(self, other): + {# see https://github.com/godotengine/godot/issues/27615 #} + {{ force_mark_rendered("godot_dictionary_operator_equal") }} + try: + return Dictionary.operator_equal(self, other) + except TypeError: + return False + + def __ne__(self, other): + try: + return not Dictionary.operator_equal(self, other) + except TypeError: + return True + + {{ render_method("size", py_name="__len__") | indent }} + {{ render_method("hash", py_name="__hash__") | indent }} + {{ render_method("has", py_name="__contains__") | indent }} + + {{ render_method("duplicate") | indent }} + {{ render_method("size") | indent }} + {{ render_method("empty") | indent }} + {{ render_method("clear") | indent }} + {{ render_method("has") | indent }} + {{ render_method("has_all") | indent }} + {{ render_method("erase") | indent }} + {{ render_method("hash") | indent }} + {{ render_method("keys") | indent }} + {{ render_method("values") | indent }} + {{ render_method("to_json") | indent }} +{% endblock %} + +{%- block python_consts %} +{% endblock %} diff --git a/generation/builtins_templates/gdstring.tmpl.pxi b/generation/builtins_templates/gdstring.tmpl.pxi new file mode 100644 index 0000000..573fbd3 --- /dev/null +++ b/generation/builtins_templates/gdstring.tmpl.pxi @@ -0,0 +1,255 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +from libc.stdint cimport int8_t +{% endblock -%} + +{# godot_char_string is not really a bultin type...#} +{{ force_mark_rendered("godot_char_string_destroy") }} +{{ force_mark_rendered("godot_char_string_get_data") }} +{{ force_mark_rendered("godot_char_string_length") }} +{# Those methods are present in gdnative_api.json but not in the Godot documentation... #} +{{ force_mark_rendered("godot_string_ascii") }} +{{ force_mark_rendered("godot_string_ascii_extended") }} +{{ force_mark_rendered("godot_string_begins_with_char_array") }} +{{ force_mark_rendered("godot_string_c_escape_multiline") }} +{{ force_mark_rendered("godot_string_camelcase_to_underscore") }} +{{ force_mark_rendered("godot_string_camelcase_to_underscore_lowercased") }} +{{ force_mark_rendered("godot_string_char_lowercase") }} +{{ force_mark_rendered("godot_string_char_to_double") }} +{{ force_mark_rendered("godot_string_char_to_int") }} +{{ force_mark_rendered("godot_string_char_to_int64_with_len") }} +{{ force_mark_rendered("godot_string_char_to_int_with_len") }} +{{ force_mark_rendered("godot_string_char_uppercase") }} +{{ force_mark_rendered("godot_string_chars_to_utf8") }} +{{ force_mark_rendered("godot_string_chars_to_utf8_with_len") }} +{{ force_mark_rendered("godot_string_chr") }} +{{ force_mark_rendered("godot_string_find_from") }} +{{ force_mark_rendered("godot_string_findmk") }} +{{ force_mark_rendered("godot_string_findmk_from") }} +{{ force_mark_rendered("godot_string_findmk_from_in_place") }} +{{ force_mark_rendered("godot_string_findn_from") }} +{{ force_mark_rendered("godot_string_format_with_custom_placeholder") }} +{{ force_mark_rendered("godot_string_get_slice") }} +{{ force_mark_rendered("godot_string_get_slice_count") }} +{{ force_mark_rendered("godot_string_get_slicec") }} +{{ force_mark_rendered("godot_string_hash64") }} +{{ force_mark_rendered("godot_string_hash_chars") }} +{{ force_mark_rendered("godot_string_hash_chars_with_len") }} +{{ force_mark_rendered("godot_string_hash_utf8_chars") }} +{{ force_mark_rendered("godot_string_hash_utf8_chars_with_len") }} +{{ force_mark_rendered("godot_string_hex_encode_buffer") }} +{{ force_mark_rendered("godot_string_hex_to_int64") }} +{{ force_mark_rendered("godot_string_hex_to_int64_with_prefix") }} +{{ force_mark_rendered("godot_string_hex_to_int_without_prefix") }} +{{ force_mark_rendered("godot_string_is_numeric") }} +{{ force_mark_rendered("godot_string_is_resource_file") }} +{{ force_mark_rendered("godot_string_lpad") }} +{{ force_mark_rendered("godot_string_lpad_with_custom_character") }} +{{ force_mark_rendered("godot_string_md5") }} +{{ force_mark_rendered("godot_string_name_destroy") }} +{{ force_mark_rendered("godot_string_name_get_data_unique_pointer") }} +{{ force_mark_rendered("godot_string_name_get_hash") }} +{{ force_mark_rendered("godot_string_name_get_name") }} +{{ force_mark_rendered("godot_string_name_new") }} +{{ force_mark_rendered("godot_string_name_new_data") }} +{{ force_mark_rendered("godot_string_name_operator_equal") }} +{{ force_mark_rendered("godot_string_name_operator_less") }} +{{ force_mark_rendered("godot_string_naturalnocasecmp_to") }} +{{ force_mark_rendered("godot_string_num") }} +{{ force_mark_rendered("godot_string_num_int64") }} +{{ force_mark_rendered("godot_string_num_int64_capitalized") }} +{{ force_mark_rendered("godot_string_num_real") }} +{{ force_mark_rendered("godot_string_num_scientific") }} +{{ force_mark_rendered("godot_string_num_with_decimals") }} +{{ force_mark_rendered("godot_string_operator_index") }} +{{ force_mark_rendered("godot_string_operator_index_const") }} +{{ force_mark_rendered("godot_string_parse_utf8") }} +{{ force_mark_rendered("godot_string_parse_utf8_with_len") }} +{{ force_mark_rendered("godot_string_path_to") }} +{{ force_mark_rendered("godot_string_path_to_file") }} +{{ force_mark_rendered("godot_string_replace_first") }} +{{ force_mark_rendered("godot_string_rfind_from") }} +{{ force_mark_rendered("godot_string_rfindn_from") }} +{{ force_mark_rendered("godot_string_rpad") }} +{{ force_mark_rendered("godot_string_rpad_with_custom_character") }} +{{ force_mark_rendered("godot_string_simplify_path") }} +{{ force_mark_rendered("godot_string_split_allow_empty") }} +{{ force_mark_rendered("godot_string_split_floats_allows_empty") }} +{{ force_mark_rendered("godot_string_split_floats_mk") }} +{{ force_mark_rendered("godot_string_split_floats_mk_allows_empty") }} +{{ force_mark_rendered("godot_string_split_ints") }} +{{ force_mark_rendered("godot_string_split_ints_allows_empty") }} +{{ force_mark_rendered("godot_string_split_ints_mk") }} +{{ force_mark_rendered("godot_string_split_ints_mk_allows_empty") }} +{{ force_mark_rendered("godot_string_split_spaces") }} +{{ force_mark_rendered("godot_string_sprintf") }} +{{ force_mark_rendered("godot_string_to_double") }} +{{ force_mark_rendered("godot_string_to_int64") }} +{{ force_mark_rendered("godot_string_unicode_char_to_double") }} +{{ force_mark_rendered("godot_string_utf8") }} +{{ force_mark_rendered("godot_string_wchar_to_int") }} +{{ force_mark_rendered("godot_string_wide_str") }} +{{ force_mark_rendered("godot_string_word_wrap") }} +{{ force_mark_rendered("godot_string_xml_escape_with_quotes") }} + +@cython.final +cdef class GDString: +{% block cdef_attributes %} + cdef godot_string _gd_data + + @staticmethod + cdef inline GDString new() + + @staticmethod + cdef inline GDString new_with_wide_string(wchar_t *content, int size) + + @staticmethod + cdef inline GDString from_ptr(const godot_string *_ptr) +{% endblock %} + +{% block python_defs %} + def __init__(self, str pystr=None): + if not pystr: + {{ force_mark_rendered("godot_string_new" )}} + gdapi10.godot_string_new(&self._gd_data) + else: + pyobj_to_godot_string(pystr, &self._gd_data) + + @staticmethod + cdef inline GDString new(): + # Call to __new__ bypasses __init__ constructor + cdef GDString ret = GDString.__new__(GDString) + gdapi10.godot_string_new(&ret._gd_data) + return ret + + @staticmethod + cdef inline GDString new_with_wide_string(wchar_t *content, int size): + {{ force_mark_rendered("godot_string_new_with_wide_string") }} + # Call to __new__ bypasses __init__ constructor + cdef GDString ret = GDString.__new__(GDString) + gdapi10.godot_string_new_with_wide_string(&ret._gd_data, content, size) + return ret + + @staticmethod + cdef inline GDString from_ptr(const godot_string *_ptr): + # Call to __new__ bypasses __init__ constructor + cdef GDString ret = GDString.__new__(GDString) + # `godot_string` is a cheap structure pointing on a refcounted buffer. + # Unlike it name could let think, `godot_string_new_copy` only + # increments the refcount of the underlying structure. + {{ force_mark_rendered("godot_string_new_copy") }} + gdapi10.godot_string_new_copy(&ret._gd_data, _ptr) + return ret + + def __dealloc__(GDString self): + # /!\ if `__init__` is skipped, `_gd_data` must be initialized by + # hand otherwise we will get a segfault here + {{ force_mark_rendered("godot_string_destroy" )}} + gdapi10.godot_string_destroy(&self._gd_data) + + def __repr__(GDString self): + return f"" + + def __str__(GDString self): + return godot_string_to_pyobj(&self._gd_data) + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + {{ render_operator_lt() | indent }} + + {{ render_method("hash", py_name="__hash__") | indent }} + {{ render_method("operator_plus", py_name="__add__") | indent }} + + {{ render_method("begins_with") | indent }} + {{ render_method("bigrams") | indent }} + {{ render_method("c_escape") | indent }} + {{ render_method("c_unescape") | indent }} + {{ render_method("capitalize") | indent }} + {{ render_method("casecmp_to") | indent }} + {{ render_method("count") | indent }} + {{ render_method("countn") | indent }} + {{ render_method("dedent") | indent }} + {{ render_method("empty") | indent }} + {{ render_method("ends_with") | indent }} + {{ render_method("erase") | indent }} + {{ render_method("find") | indent }} + {{ render_method("find_last") | indent }} + {{ render_method("findn") | indent }} + {{ render_method("format") | indent }} + {{ render_method("get_base_dir") | indent }} + {{ render_method("get_basename") | indent }} + {{ render_method("get_extension") | indent }} + {{ render_method("get_file") | indent }} + {{ render_method("hash") | indent }} + {{ render_method("hex_to_int") | indent }} + {{ render_method("http_escape") | indent }} + {{ render_method("http_unescape") | indent }} + + @staticmethod + def humanize_size(size_t size): + {{ force_mark_rendered("godot_string_humanize_size") }} + cdef GDString __ret = GDString.__new__(GDString) + __ret._gd_data = gdapi10.godot_string_humanize_size(size) + return __ret + + {{ render_method("insert") | indent }} + {{ render_method("is_abs_path") | indent }} + {{ render_method("is_rel_path") | indent }} + {{ render_method("is_subsequence_of") | indent }} + {{ render_method("is_subsequence_ofi") | indent }} + {#- {{ render_method("is_valid_filename") | indent }} # TODO: Missing from binding ! #} + {{ render_method("is_valid_float") | indent }} + {{ render_method("is_valid_hex_number") | indent }} + {{ render_method("is_valid_html_color") | indent }} + {{ render_method("is_valid_identifier") | indent }} + {{ render_method("is_valid_integer") | indent }} + {{ render_method("is_valid_ip_address") | indent }} + {{ render_method("json_escape") | indent }} + {{ render_method("left") | indent }} + {{ render_method("length") | indent }} + {#- {{ render_method("lstrip") | indent }} # TODO: Missing from binding ! #} + {{ render_method("match") | indent }} + {{ render_method("matchn") | indent }} + {{ render_method("md5_buffer") | indent }} + {{ render_method("md5_text") | indent }} + {{ render_method("nocasecmp_to") | indent }} + {{ render_method("ord_at") | indent }} + {{ render_method("pad_decimals") | indent }} + {{ render_method("pad_zeros") | indent }} + {{ render_method("percent_decode") | indent }} + {{ render_method("percent_encode") | indent }} + {{ render_method("plus_file") | indent }} + {#- {{ render_method("repeat") | indent }} # TODO: Missing from binding ! #} + {{ render_method("replace") | indent }} + {{ render_method("replacen") | indent }} + {{ render_method("rfind") | indent }} + {{ render_method("rfindn") | indent }} + {{ render_method("right") | indent }} + {{ render_method("rsplit") | indent }} + {{ render_method("rstrip") | indent }} + {#- {{ render_method("sha1_buffer") | indent }} # TODO: Missing from binding ! #} + {#- {{ render_method("sha1_text") | indent }} # TODO: Missing from binding ! #} + {{ render_method("sha256_buffer") | indent }} + {{ render_method("sha256_text") | indent }} + {{ render_method("similarity") | indent }} + {{ render_method("split") | indent }} + {{ render_method("split_floats") | indent }} + {{ render_method("strip_edges") | indent }} + {{ render_method("strip_escapes") | indent }} + {{ render_method("substr") | indent }} + {#- {{ render_method("to_ascii") | indent }} # TODO: Missing from binding ! #} + {{ render_method("to_float") | indent }} + {{ render_method("to_int") | indent }} + {{ render_method("to_lower") | indent }} + {{ render_method("to_upper") | indent }} + {#- {{ render_method("to_utf8") | indent }} # TODO: Missing from binding ! #} + {{ render_method("trim_prefix") | indent }} + {{ render_method("trim_suffix") | indent }} + {{ render_method("xml_escape") | indent }} + {{ render_method("xml_unescape") | indent }} + +{% endblock %} + +{%- block python_consts %} +{% endblock -%} diff --git a/generation/builtins_templates/node_path.tmpl.pxi b/generation/builtins_templates/node_path.tmpl.pxi new file mode 100644 index 0000000..ce863af --- /dev/null +++ b/generation/builtins_templates/node_path.tmpl.pxi @@ -0,0 +1,55 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + +{{ force_mark_rendered("godot_node_path_new_copy") }} {# NodePath is const, why does this exists in the first place ? #} + +@cython.final +cdef class NodePath: +{% block cdef_attributes %} + cdef godot_node_path _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, from_): + {{ force_mark_rendered("godot_node_path_new") }} + cdef godot_string gd_from + try: + gdapi10.godot_node_path_new(&self._gd_data, &(from_)._gd_data) + except TypeError: + if not isinstance(from_, str): + raise TypeError("`from_` must be str or GDString") + pyobj_to_godot_string(from_, &gd_from) + gdapi10.godot_node_path_new(&self._gd_data, &gd_from) + gdapi10.godot_string_destroy(&gd_from) + + def __dealloc__(NodePath self): + {{ force_mark_rendered("godot_node_path_destroy") }} + # /!\ if `__init__` is skipped, `_gd_data` must be initialized by + # hand otherwise we will get a segfault here + gdapi10.godot_node_path_destroy(&self._gd_data) + + def __repr__(NodePath self): + return f"" + + def __str__(NodePath self): + return str(self.as_string()) + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + + {{ render_method("destroy") | indent }} + {{ render_method("as_string") | indent }} + {{ render_method("is_absolute") | indent }} + {{ render_method("get_name_count") | indent }} + {{ render_method("get_name") | indent }} + {{ render_method("get_subname_count") | indent }} + {{ render_method("get_subname") | indent }} + {{ render_method("get_concatenated_subnames") | indent }} + {{ render_method("is_empty") | indent }} + {{ render_method("get_as_property_path") | indent }} +{% endblock %} + +{%- block python_consts %} +{% endblock %} diff --git a/generation/builtins_templates/plane.tmpl.pxi b/generation/builtins_templates/plane.tmpl.pxi new file mode 100644 index 0000000..d0a06ed --- /dev/null +++ b/generation/builtins_templates/plane.tmpl.pxi @@ -0,0 +1,89 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + + +@cython.final +cdef class Plane: +{% block cdef_attributes %} + cdef godot_plane _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, godot_real a, godot_real b, godot_real c, godot_real d): + {{ force_mark_rendered("godot_plane_new_with_reals") }} + gdapi10.godot_plane_new_with_reals(&self._gd_data, a, b, c, d) + + @staticmethod + def from_vectors(Vector3 v1 not None, Vector3 v2 not None, Vector3 v3 not None): + cdef Plane ret = Plane.__new__(Plane) + {{ force_mark_rendered("godot_plane_new_with_vectors") }} + gdapi10.godot_plane_new_with_vectors(&ret._gd_data, &v1._gd_data, &v2._gd_data, &v3._gd_data) + return ret + + @staticmethod + def from_normal(Vector3 normal not None, godot_real d): + cdef Plane ret = Plane.__new__(Plane) + {{ force_mark_rendered("godot_plane_new_with_normal") }} + gdapi10.godot_plane_new_with_normal(&ret._gd_data, &normal._gd_data, d) + return ret + + def __repr__(Plane self): + return f"" + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + + {{ render_method("operator_neg", py_name="__neg__") | indent }} + + def __pos__(Plane self): + return self + + {{ render_property("normal", getter="get_normal", setter="set_normal") | indent }} + {{ render_property("d", getter="get_d", setter="set_d") | indent }} + + {{ render_method("as_string") | indent }} + {{ render_method("normalized") | indent }} + {{ render_method("center") | indent }} + {{ render_method("get_any_point") | indent }} + {{ render_method("is_point_over") | indent }} + {{ render_method("distance_to") | indent }} + {{ render_method("has_point") | indent }} + {{ render_method("project") | indent }} + + def intersects_segment(Plane self, Vector3 begin not None, Vector3 end not None): + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_plane_intersects_segment") }} + if gdapi10.godot_plane_intersects_segment(&self._gd_data, &ret._gd_data, &begin._gd_data, &end._gd_data): + return ret + else: + return None + + def intersects_ray(Plane self, Vector3 from_ not None, Vector3 dir not None): + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_plane_intersects_ray") }} + if gdapi10.godot_plane_intersects_ray(&self._gd_data, &ret._gd_data, &from_._gd_data, &dir._gd_data): + return ret + else: + return None + + def intersect_3(Plane self, Plane b not None, Plane c not None): + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_plane_intersect_3") }} + if gdapi10.godot_plane_intersect_3(&self._gd_data, &ret._gd_data, &b._gd_data, &c._gd_data): + return ret + else: + return None + + {{ render_method("set_normal") | indent }} + {{ render_method("get_normal") | indent }} + {{ render_method("get_d") | indent }} + {{ render_method("set_d") | indent }} +{% endblock %} + +{%- block python_consts %} + PLANE_YZ = Plane(1, 0, 0, 0) + PLANE_XZ = Plane(0, 1, 0, 0) + PLANE_XY = Plane(0, 0, 1, 0) +{% endblock %} diff --git a/generation/builtins_templates/quat.tmpl.pxi b/generation/builtins_templates/quat.tmpl.pxi new file mode 100644 index 0000000..238b76e --- /dev/null +++ b/generation/builtins_templates/quat.tmpl.pxi @@ -0,0 +1,86 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + + +@cython.final +cdef class Quat: +{% block cdef_attributes %} + cdef godot_quat _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, x=0, y=0, z=0, w=0): + {{ force_mark_rendered("godot_quat_new") }} + gdapi10.godot_quat_new(&self._gd_data, x, y, z, w) + + @staticmethod + def from_axis_angle(Vector3 axis not None, godot_real angle): + # Call to __new__ bypasses __init__ constructor + cdef Quat ret = Quat.__new__(Quat) + {{ force_mark_rendered("godot_quat_new_with_axis_angle") }} + gdapi10.godot_quat_new_with_axis_angle(&ret._gd_data, &axis._gd_data, angle) + return ret + + @staticmethod + def from_basis(Basis basis not None): + # Call to __new__ bypasses __init__ constructor + cdef Quat ret = Quat.__new__(Quat) + {{ force_mark_rendered("godot_quat_new_with_basis") }} + gdapi11.godot_quat_new_with_basis(&ret._gd_data, &basis._gd_data) + return ret + + @staticmethod + def from_euler(Vector3 euler not None): + # Call to __new__ bypasses __init__ constructor + cdef Quat ret = Quat.__new__(Quat) + {{ force_mark_rendered("godot_quat_new_with_euler") }} + gdapi11.godot_quat_new_with_euler(&ret._gd_data, &euler._gd_data) + return ret + + def __repr__(Quat self): + return f"" + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + + {{ render_method("operator_neg", py_name="__neg__") | indent }} + + def __pos__(Quat self): + return self + + {{ render_method("operator_add", py_name="__add__") | indent }} + {{ render_method("operator_subtract", py_name="__sub__") | indent }} + {{ render_method("operator_multiply", py_name="__mul__") | indent }} + + def __truediv__(Quat self, godot_real val): + if val == 0: + raise ZeroDivisionError + cdef Quat ret = Quat.__new__(Quat) + {{ force_mark_rendered("godot_quat_operator_divide") }} + ret._gd_data = gdapi10.godot_quat_operator_divide(&self._gd_data, val) + return ret + + {{ render_property("x", getter="get_x", setter="set_x") | indent }} + {{ render_property("y", getter="get_y", setter="set_y") | indent }} + {{ render_property("z", getter="get_z", setter="set_z") | indent }} + {{ render_property("w", getter="get_w", setter="set_w") | indent }} + + {{ render_method("as_string") | indent }} + {{ render_method("length") | indent }} + {{ render_method("length_squared") | indent }} + {{ render_method("normalized") | indent }} + {{ render_method("is_normalized") | indent }} + {{ render_method("inverse") | indent }} + {{ render_method("dot") | indent }} + {{ render_method("xform") | indent }} + {{ render_method("slerp") | indent }} + {{ render_method("slerpni") | indent }} + {{ render_method("cubic_slerp") | indent }} + {{ render_method("set_axis_angle") | indent }} +{% endblock %} + +{%- block python_consts %} + IDENTITY = Quat(0, 0, 0, 1) +{% endblock %} diff --git a/generation/builtins_templates/rect2.tmpl.pxi b/generation/builtins_templates/rect2.tmpl.pxi new file mode 100644 index 0000000..4d2edf0 --- /dev/null +++ b/generation/builtins_templates/rect2.tmpl.pxi @@ -0,0 +1,58 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + + +@cython.final +cdef class Rect2: +{% block cdef_attributes %} + cdef godot_rect2 _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, godot_real x=0.0, godot_real y=0.0, godot_real width=0.0, godot_real height=0.0): + {{ force_mark_rendered("godot_rect2_new") }} + gdapi10.godot_rect2_new(&self._gd_data, x, y, width, height) + + @staticmethod + def from_pos_size(Vector2 position not None, Vector2 size not None): + {{ force_mark_rendered("godot_rect2_new_with_position_and_size") }} + cdef Rect2 ret = Rect2.__new__(Rect2) + gdapi10.godot_rect2_new_with_position_and_size(&ret._gd_data, &position._gd_data, &size._gd_data) + return ret + + def __repr__(Rect2 self): + return f"" + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + + {{ render_property("size", getter="get_size", setter="set_size") | indent }} + {{ render_property("position", getter="get_position", setter="set_position") | indent }} + + @property + def end(Rect2 self) -> Vector2: + cdef godot_vector2 position = gdapi10.godot_rect2_get_position(&self._gd_data) + cdef godot_vector2 size = gdapi10.godot_rect2_get_size(&self._gd_data) + cdef Vector2 ret = Vector2.__new__(Vector2) + ret._gd_data = gdapi10.godot_vector2_operator_add(&position, &size) + return ret + + {{ render_method("as_string") | indent }} + {{ render_method("get_area") | indent }} + {{ render_method("intersects") | indent }} + {{ render_method("encloses") | indent }} + {{ render_method("has_no_area") | indent }} + {{ render_method("clip") | indent }} + {{ render_method("merge") | indent }} + {{ render_method("has_point") | indent }} + {{ render_method("grow") | indent }} + {{ render_method("grow_individual") | indent }} + {{ render_method("grow_margin") | indent }} + {{ render_method("abs") | indent }} + {{ render_method("expand") | indent }} +{% endblock %} + +{%- block python_consts %} +{% endblock %} diff --git a/generation/builtins_templates/render.tmpl.pxd b/generation/builtins_templates/render.tmpl.pxd new file mode 100644 index 0000000..4a00a96 --- /dev/null +++ b/generation/builtins_templates/render.tmpl.pxd @@ -0,0 +1,20 @@ +{#- `render_target` must be defined by calling context -#} +{% set get_target_method_spec = get_target_method_spec_factory(render_target) %} + +{#- Define rendering macros -#} + +{% macro render_method(method_name, py_name=None, default_args={}) %}{% endmacro %} +{% macro render_property(py_name, getter, setter=None) %}{% endmacro %} +{% macro render_operator_eq() %}{% endmacro %} +{% macro render_operator_ne() %}{% endmacro %} +{% macro render_operator_lt() %}{% endmacro %} + +{#- Overwrite blocks to be ignored -#} + +{% block pyx_header %}{% endblock %} +{% block python_defs %}{% endblock %} +{% block python_consts %}{% endblock %} + +{#- Now the template will be generated with the context -#} + +{% extends render_target_to_template(render_target) %} diff --git a/generation/builtins_templates/render.tmpl.pyi b/generation/builtins_templates/render.tmpl.pyi new file mode 100644 index 0000000..9080bda --- /dev/null +++ b/generation/builtins_templates/render.tmpl.pyi @@ -0,0 +1,44 @@ +{#- `render_target` must be defined by calling context -#} +{% set get_target_method_spec = get_target_method_spec_factory(render_target) %} + +{#- Define rendering macros -#} + +{% macro render_method(method_name, py_name=None, default_args={}) %} +{% set spec = get_target_method_spec(method_name) %} +def {{ py_name or spec.py_name }}(self{%- if spec.args -%},{%- endif -%} +{%- for arg in spec.args %} + {{ arg.name }}: {{ arg.type.py_type }} +, +{%- endfor -%} +) -> {{ spec.return_type.py_type }}: ... +{% endmacro %} + +{% macro render_operator_eq() %} +def __eq__(self, other) -> bool: ... +{% endmacro %} + +{% macro render_operator_ne() %} +def __ne__(self, other) -> bool: ... +{% endmacro %} + +{% macro render_operator_lt() %} +def __lt__(self, other) -> bool: ... +{% endmacro %} + +{% macro render_property(py_name, getter, setter=None) %} +{{ pyname }}: {{ getter.return_type.py_type }} +{% endmacro %} + +{#- Overwrite blocks to be ignored -#} + +{% block python_defs %} + pass +{% endblock %} +{% block pxd_header %}{% endblock %} +{% block pyx_header %}{% endblock %} +{% block python_consts %}{% endblock %} +{% block cdef_attributes %}{% endblock %} + +{#- Now the template will be generated with the context -#} + +{% extends render_target_to_template(render_target) %} diff --git a/generation/builtins_templates/render.tmpl.pyx b/generation/builtins_templates/render.tmpl.pyx new file mode 100644 index 0000000..55ab3a0 --- /dev/null +++ b/generation/builtins_templates/render.tmpl.pyx @@ -0,0 +1,120 @@ +{#- `render_target` must be defined by calling context -#} +{% set get_target_method_spec = get_target_method_spec_factory(render_target) %} + +{#- Define rendering macros -#} + +{% macro render_method(method_name, py_name=None, default_args={}) %} +{% set spec = get_target_method_spec(method_name) %} +{% set args_without_self = spec.args[1:] %} +def {{ py_name or spec.py_name }}({{ spec.klass.cy_type }} self{%- if args_without_self -%},{%- endif -%} +{%- for arg in args_without_self %} + {{ arg.cy_type }} {{ arg.name }} +{%- if not arg.is_base_type and not arg.is_variant %} + not None +{%- endif -%} +, +{%- endfor -%} +) -> {{ spec.return_type.py_type }}: +{% for arg in args_without_self %} +{% if arg.is_variant %} + cdef godot_variant __var_{{ arg.name }} + if not pyobj_to_godot_variant({{ arg.name }}, &__var_{{ arg.name }}): +{% for initialized_arg in args_without_self %} +{% if initialized_arg.name == arg.name %} +{% break %} +{% endif %} +{% if initialized_arg.is_variant %} + gdapi10.godot_variant_destroy(&__var_{{ initialized_arg.name }}) +{% endif %} +{% endfor %} + raise TypeError(f"Cannot convert `{ {{ arg.name}} !r}` to Godot Variant") +{% endif %} +{% endfor %} +{% if spec.return_type.is_variant %} + cdef godot_variant __var_ret = ( +{%- elif spec.return_type.is_builtin %} + cdef {{ spec.return_type.cy_type }} __ret = {{ spec.return_type.cy_type }}.__new__({{ spec.return_type.cy_type }}) + __ret._gd_data = ( +{%- elif spec.return_type.is_object %} + cdef {{ spec.return_type.cy_type }} __ret = {{ spec.return_type.cy_type }}.__new__({{ spec.return_type.cy_type }}) + __ret._gd_ptr = ( +{%- elif not spec.return_type.is_void %} + cdef {{ spec.return_type.cy_type }} __ret = ( +{%- else %} + ( +{%- endif %} +{{ spec.gdapi }}.{{ spec.c_name }}(&self._gd_data, +{%- for arg in args_without_self %} +{%- if arg.is_variant %} + &__var_{{ arg.name }}, +{%- elif arg.is_builtin %} +{%- if arg.is_ptr %} + &{{ arg.name }}._gd_data, +{%- else %} + {{ arg.name }}._gd_data, +{%- endif %} +{%- elif arg.is_object %} + {{ arg.name }}._gd_ptr, +{%- else %} + {{ arg.name }}, +{%- endif %} +{% endfor %} +)) +{% for arg in args_without_self %} +{% if arg.is_variant %} + gdapi10.godot_variant_destroy(&__var_{{ arg.name }}) +{% endif %} +{% endfor %} +{% if spec.return_type.is_variant %} + cdef object __ret = godot_variant_to_pyobj(&__var_ret) + gdapi10.godot_variant_destroy(&__var_ret) + return __ret +{% elif not spec.return_type.is_void %} + return __ret +{% endif %} +{% endmacro %} + +{% macro render_operator_eq() %} +{% set spec = get_target_method_spec("operator_equal") %} +def __eq__({{ spec.klass.cy_type }} self, other): + try: + return {{ spec.gdapi }}.{{ spec.c_name }}(&self._gd_data, &(<{{ spec.klass.cy_type }}?>other)._gd_data) + except TypeError: + return False +{% endmacro %} + +{% macro render_operator_ne() %} +{% set spec = get_target_method_spec("operator_equal") %} +def __ne__({{ spec.klass.cy_type }} self, other): + try: + return not {{ spec.gdapi }}.{{ spec.c_name }}(&self._gd_data, &(<{{ spec.klass.cy_type }}?>other)._gd_data) + except TypeError: + return True +{% endmacro %} + +{% macro render_operator_lt() %} +{% set spec = get_target_method_spec("operator_less") %} +def __lt__({{ spec.klass.cy_type }} self, other): + try: + return {{ spec.gdapi }}.{{ spec.c_name }}(&self._gd_data, &(<{{ spec.klass.cy_type }}?>other)._gd_data) + except TypeError: + return False +{% endmacro %} + +{% macro render_property(py_name, getter, setter=None) %} +@property +{{ render_method(getter, py_name=py_name) }} +{% if setter %} +@{{ py_name }}.setter +{{ render_method(setter, py_name=py_name) }} +{% endif %} +{% endmacro %} + +{#- Overwrite blocks to be ignored -#} + +{% block pxd_header %}{% endblock %} +{% block cdef_attributes %}{% endblock %} + +{#- Now the template will be generated with the context -#} + +{% extends render_target_to_template(render_target) %} diff --git a/generation/builtins_templates/rid.tmpl.pxi b/generation/builtins_templates/rid.tmpl.pxi new file mode 100644 index 0000000..ac2345e --- /dev/null +++ b/generation/builtins_templates/rid.tmpl.pxi @@ -0,0 +1,44 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +from godot.bindings cimport Resource +{% endblock -%} + + +@cython.final +cdef class RID: +{% block cdef_attributes %} + cdef godot_rid _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, Resource from_=None): + if from_ is not None: + {{ force_mark_rendered("godot_rid_new_with_resource") }} + gdapi10.godot_rid_new_with_resource( + &self._gd_data, + from_._gd_ptr + ) + else: + {{ force_mark_rendered("godot_rid_new") }} + gdapi10.godot_rid_new(&self._gd_data) + + def __repr__(RID self): + return f"" + + @staticmethod + def from_resource(Resource resource not None): + # Call to __new__ bypasses __init__ constructor + cdef RID ret = RID.__new__(RID) + gdapi10.godot_rid_new_with_resource(&ret._gd_data, resource._gd_ptr) + return ret + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + {{ render_operator_lt() | indent }} + {{ render_method("get_id") | indent }} + +{% endblock %} + +{%- block python_consts %} +{% endblock -%} diff --git a/generation/builtins_templates/transform.tmpl.pxi b/generation/builtins_templates/transform.tmpl.pxi new file mode 100644 index 0000000..eefc0a4 --- /dev/null +++ b/generation/builtins_templates/transform.tmpl.pxi @@ -0,0 +1,74 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + + +@cython.final +cdef class Transform: +{% block cdef_attributes %} + cdef godot_transform _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, x_axis=None, y_axis=None, z_axis=None, origin=None): + if x_axis is None and y_axis is None and z_axis is None and origin is None: + {{ force_mark_rendered("godot_transform_new_identity") }} + gdapi10.godot_transform_new_identity(&self._gd_data) + else: + {{ force_mark_rendered("godot_transform_new_with_axis_origin") }} + gdapi10.godot_transform_new_with_axis_origin( + &self._gd_data, + &(x_axis)._gd_data, + &(y_axis)._gd_data, + &(z_axis)._gd_data, + &(origin)._gd_data, + ) + + @staticmethod + def from_basis_origin(Basis basis not None, Vector3 origin not None): + cdef Transform ret = Transform.__new__(Transform) + {{ force_mark_rendered("godot_transform_new") }} + gdapi10.godot_transform_new(&ret._gd_data, &basis._gd_data, &origin._gd_data) + return ret + + @staticmethod + def from_quat(Quat quat not None): + cdef Transform ret = Transform.__new__(Transform) + {{ force_mark_rendered("godot_transform_new_with_quat") }} + gdapi11.godot_transform_new_with_quat(&ret._gd_data, &quat._gd_data) + return ret + + def __repr__(Transform self): + return f"" + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + + {{ render_method("operator_multiply", py_name="__mul__") | indent }} + + {{ render_property("basis", getter="get_basis", setter="set_basis") | indent }} + {{ render_property("origin", getter="get_origin", setter="set_origin") | indent }} + + {{ render_method("as_string") | indent }} + {{ render_method("inverse") | indent }} + {{ render_method("affine_inverse") | indent }} + {{ render_method("orthonormalized") | indent }} + {{ render_method("rotated") | indent }} + {{ render_method("scaled") | indent }} + {{ render_method("translated") | indent }} + {{ render_method("looking_at") | indent }} + {{ render_method("xform_plane") | indent }} + {{ render_method("xform_inv_plane") | indent }} + {{ render_method("xform_vector3") | indent }} + {{ render_method("xform_inv_vector3") | indent }} + {{ render_method("xform_aabb") | indent }} + {{ render_method("xform_inv_aabb") | indent }} +{% endblock %} + +{%- block python_consts %} + IDENTITY = Transform(Vector3(1, 0, 0), Vector3(0, 1, 0), Vector3(0, 0, 1), Vector3(0, 0, 0)) + FLIP_X = Transform(Vector3(-1, 0, 0), Vector3(0, 1, 0), Vector3(0, 0, 1), Vector3(0, 0, 0)) + FLIP_Y = Transform(Vector3(1, 0, 0), Vector3(0, -1, 0), Vector3(0, 0, 1), Vector3(0, 0, 0)) + FLIP_Z = Transform(Vector3(1, 0, 0), Vector3(0, 1, 0), Vector3(0, 0, -1), Vector3(0, 0, 0)) +{% endblock %} diff --git a/generation/builtins_templates/transform2d.tmpl.pxi b/generation/builtins_templates/transform2d.tmpl.pxi new file mode 100644 index 0000000..97c3a71 --- /dev/null +++ b/generation/builtins_templates/transform2d.tmpl.pxi @@ -0,0 +1,97 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +{% endblock -%} + + +@cython.final +cdef class Transform2D: +{% block cdef_attributes %} + cdef godot_transform2d _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, x_axis=None, y_axis=None, origin=None): + if x_axis is None and y_axis is None and origin is None: + {{ force_mark_rendered("godot_transform2d_new_identity") }} + gdapi10.godot_transform2d_new_identity(&self._gd_data) + else: + {{ force_mark_rendered("godot_transform2d_new_axis_origin") }} + gdapi10.godot_transform2d_new_axis_origin( + &self._gd_data, + &(x_axis)._gd_data, + &(y_axis)._gd_data, + &(origin)._gd_data, + ) + + @staticmethod + def from_rot_pos(godot_real rot, Vector2 pos not None): + cdef Transform2D ret = Transform2D.__new__(Transform2D) + {{ force_mark_rendered("godot_transform2d_new") }} + gdapi10.godot_transform2d_new(&ret._gd_data, rot, &pos._gd_data) + return ret + + def __repr__(Transform2D self): + return f"" + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + + {{ render_method("operator_multiply", py_name="__mul__") | indent }} + + # TODO: add axis properties once gdnative is updated + {{ render_property("origin", getter="get_origin") | indent }} + + {{ render_method("as_string") | indent }} + {{ render_method("inverse") | indent }} + {{ render_method("affine_inverse") | indent }} + {{ render_method("get_rotation") | indent }} + {{ render_method("get_scale") | indent }} + {{ render_method("orthonormalized") | indent }} + {{ render_method("rotated") | indent }} + {{ render_method("scaled") | indent }} + {{ render_method("translated") | indent }} + + def xform(Transform2D self, v): + cdef Vector2 ret_v2 + cdef Rect2 ret_r2 + try: + ret_v2 = Vector2.__new__(Vector2) + {{ force_mark_rendered("godot_transform2d_xform_vector2") }} + ret_v2._gd_data = gdapi10.godot_transform2d_xform_vector2(&self._gd_data, &(v)._gd_data) + return ret_v2 + except TypeError: + pass + try: + ret_r2 = Rect2.__new__(Rect2) + {{ force_mark_rendered("godot_transform2d_xform_rect2") }} + ret_r2._gd_data = gdapi10.godot_transform2d_xform_rect2(&self._gd_data, &(v)._gd_data) + return ret_r2 + except TypeError: + raise TypeError("`v` must be Vector2 or Rect2") + + def xform_inv(Transform2D self, v): + cdef Vector2 ret_v2 + cdef Rect2 ret_r2 + try: + ret_v2 = Vector2.__new__(Vector2) + {{ force_mark_rendered("godot_transform2d_xform_inv_vector2") }} + ret_v2._gd_data = gdapi10.godot_transform2d_xform_inv_vector2(&self._gd_data, &(v)._gd_data) + return ret_v2 + except TypeError: + pass + try: + ret_r2 = Rect2.__new__(Rect2) + {{ force_mark_rendered("godot_transform2d_xform_inv_rect2") }} + ret_r2._gd_data = gdapi10.godot_transform2d_xform_inv_rect2(&self._gd_data, &(v)._gd_data) + return ret_r2 + except TypeError: + raise TypeError("`v` must be Vector2 or Rect2") + + {{ render_method("basis_xform_vector2", py_name="basis_xform") | indent }} + {{ render_method("basis_xform_inv_vector2", py_name="basis_xform_inv") | indent }} + {{ render_method("interpolate_with") | indent }} +{% endblock %} + +{%- block python_consts %} +{% endblock %} diff --git a/generation/builtins_templates/vector2.tmpl.pxi b/generation/builtins_templates/vector2.tmpl.pxi new file mode 100644 index 0000000..2704676 --- /dev/null +++ b/generation/builtins_templates/vector2.tmpl.pxi @@ -0,0 +1,123 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +import math + +cdef inline Vector2 Vector2_multiply_vector(Vector2 self, Vector2 b): + cdef Vector2 ret = Vector2.__new__(Vector2) + {{ force_mark_rendered("godot_vector2_operator_multiply_vector") }} + ret._gd_data = gdapi10.godot_vector2_operator_multiply_vector(&self._gd_data, &b._gd_data) + return ret + +cdef inline Vector2 Vector2_multiply_scalar(Vector2 self, godot_real b): + cdef Vector2 ret = Vector2.__new__(Vector2) + {{ force_mark_rendered("godot_vector2_operator_multiply_scalar") }} + ret._gd_data = gdapi10.godot_vector2_operator_multiply_scalar(&self._gd_data, b) + return ret + +cdef inline Vector2 Vector2_divide_vector(Vector2 self, Vector2 b): + cdef Vector2 ret = Vector2.__new__(Vector2) + {{ force_mark_rendered("godot_vector2_operator_divide_vector") }} + ret._gd_data = gdapi10.godot_vector2_operator_divide_vector(&self._gd_data, &b._gd_data) + return ret + +cdef inline Vector2 Vector2_divide_scalar(Vector2 self, godot_real b): + cdef Vector2 ret = Vector2.__new__(Vector2) + {{ force_mark_rendered("godot_vector2_operator_divide_scalar") }} + ret._gd_data = gdapi10.godot_vector2_operator_divide_scalar(&self._gd_data, b) + return ret +{% endblock -%} + + +@cython.final +cdef class Vector2: +{% block cdef_attributes %} + cdef godot_vector2 _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, godot_real x=0.0, godot_real y=0.0): + {{ force_mark_rendered("godot_vector2_new") }} + gdapi10.godot_vector2_new(&self._gd_data, x, y) + + def __repr__(Vector2 self): + return f"" + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + {{ render_operator_lt() | indent }} + + {{ render_method("operator_neg", py_name="__neg__") | indent }} + + def __pos__(Vector2 self): + return self + + {{ render_method("operator_add", py_name="__add__") | indent }} + {{ render_method("operator_subtract", py_name="__sub__") | indent }} + + def __mul__(Vector2 self, val): + cdef Vector2 _val + try: + _val = val + except TypeError: + return Vector2_multiply_scalar(self, val) + else: + return Vector2_multiply_vector(self, _val) + + def __truediv__(Vector2 self, val): + cdef Vector2 _val + try: + _val = val + except TypeError: + if val is 0: + raise ZeroDivisionError() + return Vector2_divide_scalar(self, val) + else: + if _val.x == 0 or _val.y == 0: + raise ZeroDivisionError() + return Vector2_divide_vector(self, _val) + + {{ render_property("x", "get_x", "set_x") | indent }} + {{ render_property("y", "get_y", "set_y") | indent }} + {{ render_property("width", "get_x", "set_x") | indent }} + {{ render_property("height", "get_y", "set_y") | indent }} + + {{ render_method("as_string") | indent }} + {{ render_method("normalized") | indent }} + {{ render_method("length") | indent }} + {{ render_method("angle") | indent }} + {{ render_method("length_squared") | indent }} + {{ render_method("is_normalized") | indent }} + {{ render_method("distance_to") | indent }} + {{ render_method("distance_squared_to") | indent }} + {{ render_method("angle_to") | indent }} + {{ render_method("angle_to_point") | indent }} + {{ render_method("linear_interpolate") | indent }} + {{ render_method("cubic_interpolate") | indent }} + {{ render_method("move_toward") | indent }} + {{ render_method("direction_to") | indent }} + {{ render_method("rotated") | indent }} + {{ render_method("tangent") | indent }} + {{ render_method("floor") | indent }} + {{ render_method("snapped") | indent }} + {{ render_method("aspect") | indent }} + {{ render_method("dot") | indent }} + {{ render_method("slide") | indent }} + {{ render_method("bounce") | indent }} + {{ render_method("reflect") | indent }} + {{ render_method("abs") | indent }} + {{ render_method("clamped") | indent }} +{% endblock %} + +{%- block python_consts %} + AXIS_X = 0 + AXIS_Y = 0 + + ZERO = Vector2(0, 0) + ONE = Vector2(1, 1) + INF = Vector2(math.inf, math.inf) + LEFT = Vector2(-1, 0) + RIGHT = Vector2(1, 0) + UP = Vector2(0, -1) + DOWN = Vector2(0, 1) +{% endblock %} diff --git a/generation/builtins_templates/vector3.tmpl.pxi b/generation/builtins_templates/vector3.tmpl.pxi new file mode 100644 index 0000000..9c0047c --- /dev/null +++ b/generation/builtins_templates/vector3.tmpl.pxi @@ -0,0 +1,160 @@ +{%- block pxd_header %} +{% endblock -%} +{%- block pyx_header %} +from godot._hazmat.gdnative_api_struct cimport godot_vector3_axis + +import math +from enum import IntEnum + + +cdef inline Vector3_multiply_vector(Vector3 self, Vector3 b): + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_vector3_operator_multiply_vector") }} + ret._gd_data = gdapi10.godot_vector3_operator_multiply_vector(&self._gd_data, &b._gd_data) + return ret + +cdef inline Vector3_multiply_scalar(Vector3 self, godot_real b): + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_vector3_operator_multiply_scalar") }} + ret._gd_data = gdapi10.godot_vector3_operator_multiply_scalar(&self._gd_data, b) + return ret + +cdef inline Vector3_divide_vector(Vector3 self, Vector3 b): + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_vector3_operator_divide_vector") }} + ret._gd_data = gdapi10.godot_vector3_operator_divide_vector(&self._gd_data, &b._gd_data) + return ret + +cdef inline Vector3_divide_scalar(Vector3 self, godot_real b): + cdef Vector3 ret = Vector3.__new__(Vector3) + {{ force_mark_rendered("godot_vector3_operator_divide_scalar") }} + ret._gd_data = gdapi10.godot_vector3_operator_divide_scalar(&self._gd_data, b) + return ret + +{% endblock -%} + + +@cython.final +cdef class Vector3: +{% block cdef_attributes %} + cdef godot_vector3 _gd_data +{% endblock %} + +{% block python_defs %} + def __init__(self, godot_real x=0.0, godot_real y=0.0, godot_real z=0.0): + {{ force_mark_rendered("godot_vector3_new") }} + gdapi10.godot_vector3_new(&self._gd_data, x, y, z) + + def __repr__(self): + return f"" + + @property + def x(self) -> godot_real: + {{ force_mark_rendered("godot_vector3_get_axis") }} + return gdapi10.godot_vector3_get_axis(&self._gd_data, godot_vector3_axis.GODOT_VECTOR3_AXIS_X) + + @x.setter + def x(self, godot_real val) -> None: + {{ force_mark_rendered("godot_vector3_set_axis") }} + gdapi10.godot_vector3_set_axis(&self._gd_data, godot_vector3_axis.GODOT_VECTOR3_AXIS_X, val) + + @property + def y(self) -> godot_real: + {{ force_mark_rendered("godot_vector3_get_axis") }} + return gdapi10.godot_vector3_get_axis(&self._gd_data, godot_vector3_axis.GODOT_VECTOR3_AXIS_Y) + + @y.setter + def y(self, godot_real val) -> None: + {{ force_mark_rendered("godot_vector3_set_axis") }} + gdapi10.godot_vector3_set_axis(&self._gd_data, godot_vector3_axis.GODOT_VECTOR3_AXIS_Y, val) + + @property + def z(self) -> godot_real: + {{ force_mark_rendered("godot_vector3_get_axis") }} + return gdapi10.godot_vector3_get_axis(&self._gd_data, godot_vector3_axis.GODOT_VECTOR3_AXIS_Z) + + @z.setter + def z(self, godot_real val) -> None: + {{ force_mark_rendered("godot_vector3_set_axis") }} + gdapi10.godot_vector3_set_axis(&self._gd_data, godot_vector3_axis.GODOT_VECTOR3_AXIS_Z, val) + + {{ render_operator_eq() | indent }} + {{ render_operator_ne() | indent }} + {{ render_operator_lt() | indent }} + + {{ render_method("operator_neg", py_name="__neg__") | indent }} + + def __pos__(Vector3 self): + return self + + {{ render_method("operator_add", py_name="__add__") | indent }} + {{ render_method("operator_subtract", py_name="__sub__") | indent }} + + def __mul__(Vector3 self, val): + cdef Vector3 _val + try: + _val = val + except TypeError: + return Vector3_multiply_scalar(self, val) + else: + return Vector3_multiply_vector(self, _val) + + def __truediv__(Vector3 self, val): + cdef Vector3 _val + try: + _val = val + except TypeError: + if val is 0: + raise ZeroDivisionError() + return Vector3_divide_scalar(self, val) + else: + if _val.x == 0 or _val.y == 0 or _val.z == 0: + raise ZeroDivisionError() + return Vector3_divide_vector(self, _val) + + {{ render_method("as_string") | indent }} + {{ render_method("min_axis") | indent }} + {{ render_method("max_axis") | indent }} + {{ render_method("length") | indent }} + {{ render_method("length_squared") | indent }} + {{ render_method("is_normalized") | indent }} + {{ render_method("normalized") | indent }} + {{ render_method("inverse") | indent }} + {{ render_method("snapped") | indent }} + {{ render_method("rotated") | indent }} + {{ render_method("linear_interpolate") | indent }} + {{ render_method("cubic_interpolate") | indent }} + {{ render_method("move_toward") | indent }} + {{ render_method("direction_to") | indent }} + {{ render_method("dot") | indent }} + {{ render_method("cross") | indent }} + {{ render_method("outer") | indent }} + {{ render_method("to_diagonal_matrix") | indent }} + {{ render_method("abs") | indent }} + {{ render_method("floor") | indent }} + {{ render_method("ceil") | indent }} + {{ render_method("distance_to") | indent }} + {{ render_method("distance_squared_to") | indent }} + {{ render_method("angle_to") | indent }} + {{ render_method("slide") | indent }} + {{ render_method("bounce") | indent }} + {{ render_method("reflect") | indent }} +{% endblock %} + +{%- block python_consts %} + AXIS = IntEnum("AXIS", { + "X": godot_vector3_axis.GODOT_VECTOR3_AXIS_X, + "Y": godot_vector3_axis.GODOT_VECTOR3_AXIS_Y, + "Z": godot_vector3_axis.GODOT_VECTOR3_AXIS_Z, + }) + + ZERO = Vector3(0, 0, 0) # Zero vector. + ONE = Vector3(1, 1, 1) # One vector. + INF = Vector3(math.inf, math.inf, math.inf) # Infinite vector. + LEFT = Vector3(-1, 0, 0) # Left unit vector. + RIGHT = Vector3(1, 0, 0) # Right unit vector. + UP = Vector3(0, 1, 0) # Up unit vector. + DOWN = Vector3(0, -1, 0) # Down unit vector. + FORWARD = Vector3(0, 0, -1) # Forward unit vector. + BACK = Vector3(0, 0, 1) # Back unit vector. +{% endblock %} diff --git a/generation/generate_bindings.py b/generation/generate_bindings.py new file mode 100644 index 0000000..247685c --- /dev/null +++ b/generation/generate_bindings.py @@ -0,0 +1,613 @@ +import os +import argparse +import json +import re +from warnings import warn +from keyword import iskeyword +from collections import defaultdict +from jinja2 import Environment, FileSystemLoader +from dataclasses import dataclass, replace +from typing import Optional, Dict, List, Tuple + +from type_specs import TypeSpec, ALL_TYPES_EXCEPT_OBJECTS + + +BASEDIR = os.path.dirname(__file__) +env = Environment( + loader=FileSystemLoader(f"{BASEDIR}/bindings_templates"), trim_blocks=True, lstrip_blocks=True +) + + +@dataclass +class PropertyInfo: + name: str + type: TypeSpec + getter: str + setter: str + index: Optional[int] + + # If using feature we don't support yet + unsupported_reason: Optional[str] = None + + @property + def is_supported(self) -> bool: + return self.unsupported_reason is None + + +@dataclass +class ArgumentInfo: + name: str + type: TypeSpec + default_value: Optional[str] + + @property + def has_default_value(self): + return self.default_value is not None + + +@dataclass +class SignalInfo: + name: str + arguments: List[ArgumentInfo] + + # If using feature we don't support yet + unsupported_reason: Optional[str] = None + + @property + def is_supported(self) -> bool: + return self.unsupported_reason is None + + +@dataclass +class MethodInfo: + name: str + return_type: TypeSpec + is_editor: bool + is_noscript: bool + is_const: bool + is_reverse: bool + is_virtual: bool + has_varargs: bool + is_from_script: bool + arguments: List[ArgumentInfo] + + # If using feature we don't support yet + unsupported_reason: Optional[str] = None + + @property + def is_supported(self) -> bool: + return self.unsupported_reason is None + + +@dataclass +class EnumInfo: + name: str + values: Dict[str, int] + + +@dataclass +class ClassInfo: + # Cleaned up name (mainly ensure singleton classes have a leading underscore) + name: str + # Name as provided in api.json (needed to use GDNative's ClassDB) + bind_register_name: str + # Parent class name (also cleaned up) + base_class: str + singleton: Optional[str] + instantiable: bool + is_reference: bool + constants: Dict[str, int] + properties: List[PropertyInfo] + signals: List[SignalInfo] + methods: List[MethodInfo] + enums: List[EnumInfo] + + +TYPES = {t.gdapi_type: t for t in ALL_TYPES_EXCEPT_OBJECTS} + + +# Basically provide enough to run the tests and the pong demo +SAMPLE_CLASSES = { + "Object", + "_ProjectSettings", + "_Input", + "_InputMap", + "MainLoop", + "SceneTree", + "Node", + "CanvasItem", + "Node2D", + "Reference", + "Resource", + "OpenSimplexNoise", + "CollisionObject2D", + "Area2D", + "ARVRInterface", + "ARVRInterfaceGDNative", + "Environment", + "Viewport", + "Script", + "PluginScript", + "GDScript", + "Control", + "Label", + # "_ClassDB", + # "_Engine", + # "_Geometry", + # "_JSON", + "_OS", + "_ResourceLoader", + # "_ResourceSaver", + # "_VisualScriptEditor", + "SurfaceTool", + "Mesh", + "ArrayMesh", + "Spatial", + "VisualInstance", + "GeometryInstance", + "MeshInstance", + # For REPL editor plugin + "GlobalConstants", + "EditorPlugin", + "PackedScene", + "BaseButton", + "Button", + "ToolButton", + "Panel", + "Container", + "BoxContainer", + "VBoxContainer", + "HBoxContainer", + "RichTextLabel", + "LineEdit", + "Font", + "BitmapFont", + "DynamicFont", + "DynamicFontData", + # Input event & friends stuff + "InputEvent", + "InputEventAction", + "InputEventJoypadButton", + "InputEventJoypadMotion", + "InputEventMIDI", + "InputEventScreenDrag", + "InputEventScreenTouch", + "InputEventWithModifiers", + "InputEventGesture", + "InputEventMagnifyGesture", + "InputEventPanGesture", + "InputEventKey", + "InputEventMouse", + "InputEventMouseButton", + "InputEventMouseMotion", +} + +SUPPORTED_TYPES = { + "void", + "godot_bool", + "godot_int", + "godot_real", + "godot_string", + "godot_variant", + "godot_object", + "godot_aabb", + "godot_array", + "godot_basis", + "godot_color", + "godot_dictionary", + "godot_node_path", + "godot_plane", + "godot_quat", + "godot_rect2", + "godot_rid", + "godot_transform", + "godot_transform2d", + "godot_vector2", + "godot_vector3", + "godot_pool_byte_array", + "godot_pool_int_array", + "godot_pool_real_array", + "godot_pool_string_array", + "godot_pool_vector2_array", + "godot_pool_vector3_array", + "godot_pool_color_array", +} + + +def pre_cook_patch_stuff(raw_data): + for klass in raw_data: + # see https://github.com/godotengine/godot/pull/40386 + if klass["name"] == "Reference": + klass["is_reference"] = True + for prop in klass["properties"]: + prop["name"] = prop["name"].replace("/", "_") + # see https://github.com/godotengine/godot/pull/40383 + if prop["type"] == "17/17:RichTextEffect": + prop["type"] = "Array" + for meth in klass["methods"]: + if meth["is_noscript"]: + warn( + f"`{klass['name']}.{meth['name']}` has `is_noscript=True`" + " (should never be the case...)" + ) + if meth["is_from_script"]: + warn( + f"`{klass['name']}.{meth['name']}` has `is_from_script=True`" + " (should never be the case...)" + ) + + +def post_cook_patch_stuff(classes): + for klass in classes: + # See https://github.com/godotengine/godot/issues/34254 + if klass.name == "_OS": + for meth in klass.methods: + if meth.name in ( + "get_static_memory_usage", + "get_static_memory_peak_usage", + "get_dynamic_memory_usage", + ): + meth.return_type.c_type = "uint64_t" + + +def strip_unsupported_stuff(classes): + supported_classes = {k.name for k in classes} + + def _is_supported_type(specs): + if specs.is_builtin: + return specs.c_type in SUPPORTED_TYPES + elif specs.is_object: + return specs.cy_type in supported_classes + else: + return True + + for klass in classes: + for meth in klass.methods: + unsupported_reason = None + # TODO: handle default param values + # TODO: handle those flags + if meth.is_editor: + unsupported_reason = "attribute `is_editor=True` not supported" + if meth.is_reverse: + unsupported_reason = "attribute `is_reverse=True` not supported" + if meth.has_varargs: + unsupported_reason = "attribute `has_varargs=True` not supported" + if not _is_supported_type(meth.return_type): + unsupported_reason = f"return type {meth.return_type} not supported" + bad_arg = next( + (arg for arg in meth.arguments if not _is_supported_type(arg.type)), None + ) + if bad_arg: + unsupported_reason = f"argument type {bad_arg} not supported" + + if unsupported_reason: + warn(f"Ignoring `{klass.name}.{meth.name}` ({unsupported_reason})") + meth.unsupported_reason = unsupported_reason + + for prop in klass.properties: + if not _is_supported_type(prop.type): + unsupported_reason = f"property type {prop.type} not supported" + warn(f"Ignoring property `{klass.name}.{prop.name}` ({unsupported_reason})") + prop.unsupported_reason = unsupported_reason + + for signal in klass.signals: + bad_arg = next( + (arg for arg in signal.arguments if not _is_supported_type(arg.type)), None + ) + if bad_arg: + unsupported_reason = f"argument type {bad_arg} not supported" + warn(f"Ignoring signal `{klass.name}.{signal.name}` ({unsupported_reason})") + signal.unsupported_reason = unsupported_reason + + +def strip_sample_stuff(classes): + def _is_supported(type): + return not type.is_object or type.cy_type in SAMPLE_CLASSES + + classes2 = [klass for klass in classes if klass.name in SAMPLE_CLASSES] + for klass in classes2: + klass.methods = [ + meth + for meth in klass.methods + if all(_is_supported(arg.type) for arg in meth.arguments) + and _is_supported(meth.return_type) + ] + klass.signals = [ + signal + for signal in klass.signals + if all(_is_supported(arg.type) for arg in signal.arguments) + ] + klass.properties = [prop for prop in klass.properties if _is_supported(prop.type)] + + classes[:] = classes2 + + +def camel_to_snake(name): + s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() + + +def build_class_renames(data): + renames = {"": ""} + for item in data: + old_name = item["name"] + # In api.json, some singletons have underscore and others don't ( + # e.g. ARVRServer vs _OS). But to access them with `get_singleton_object` + # we always need the name without underscore... + if item["singleton"] and not old_name.startswith("_"): + new_name = f"_{old_name}" + else: + new_name = old_name + renames[old_name] = new_name + return renames + + +def cook_data(data): + classes = [] + constants = {} + + class_renames = build_class_renames(data) + + def _cook_type(type_): + try: + return TYPES[type_] + except KeyError: + if type_.startswith("enum."): + # typically somethin like ``enum.AnimationTree::AnimationProcessMode`` + pcls, ecls = re.match(r"enum.(\w+)::(\w+)", type_).groups() + return TypeSpec( + gdapi_type=type_, + c_type="godot_int", + cy_type="godot_int", + py_type=f"{class_renames[pcls]}.{ecls}", + is_base_type=True, + is_stack_only=True, + is_enum=True, + ) + + # TODO: improve handling of resources + if "," in type_: + return TypeSpec( + gdapi_type=type_, + c_type="godot_object", + cy_type="Resource", + py_type=f"Union[{','.join([class_renames[x] for x in type_.split(',')])}]", + is_object=True, + ) + else: + return TypeSpec( + gdapi_type=type_, + c_type="godot_object", + cy_type=class_renames[type_], + is_object=True, + ) + + def _cook_name(name): + if iskeyword(name) or name in ("char", "bool", "int", "float", "short", "type"): + return f"{name}_" + else: + return name + + def _cook_default_value(type, value, has_default_value): + if not has_default_value: + return None + # Mostly ad-hoc stuff given default values format in api.json is broken + if type in ("godot_bool", "godot_int", "godot_real", "godot_variant"): + if value == "Null": + return "None" + else: + return value + elif type == "godot_string": + return f'"{value}"' + elif type == "godot_object" and value in ("[Object:null]", "Null"): + return "None" + elif type == "godot_dictionary" and value == "{}": + return "Dictionary()" + elif type == "godot_vector2": + return f"Vector2{value}" + elif type == "godot_rect2": + return f"Rect2{value}" + elif type == "godot_vector3": + return f"Vector3{value}" + elif type == "godot_transform" and value == "1, 0, 0, 0, 1, 0, 0, 0, 1 - 0, 0, 0": + return ( + "Transform(Vector3(1, 0, 0), Vector3(0, 1, 0), Vector3(0, 0, 1), Vector3(0, 0, 0))" + ) + elif type == "godot_transform2d" and value == "((1, 0), (0, 1), (0, 0))": + return "Transform2D(Vector2(1, 0), Vector2(0, 1), Vector2(0, 0))" + elif value == "[RID]": + return "RID()" + elif type == "godot_color": + return f"Color({value})" + elif type == "godot_pool_color_array" and value == "[PoolColorArray]": + return "PoolColorArray()" + elif type == "godot_array" and value == "[]": + return f"Array()" + elif type == "godot_pool_vector2_array" and value == "[]": + return f"PoolVector2Array()" + elif type == "godot_pool_vector3_array" and value == "[]": + return f"PoolVector3Array()" + elif type == "godot_pool_int_array" and value == "[]": + return f"PoolIntArray()" + elif type == "godot_pool_real_array" and value == "[]": + return f"PoolRealArray()" + elif type == "godot_pool_string_array" and value == "[]": + return f"PoolStringArray()" + elif value == "Null": + return "None" + else: + warn(f"Unknown default arg value: type=`{type}`, value=`{value}`") + return "None" + + for cls_data in data: + if cls_data["name"] == "GlobalConstants": + constants = cls_data["constants"] + continue + + cls_info = { + "bind_register_name": cls_data["name"], + "name": class_renames[cls_data["name"]], + "base_class": class_renames[cls_data["base_class"]], + "instantiable": cls_data["instanciable"], + "is_reference": cls_data["is_reference"], + "constants": cls_data["constants"], + "properties": [], + "signals": [], + "methods": [], + "enums": [], + } + + if cls_data["singleton"]: + # Strip the leading underscore + cls_info["singleton"] = cls_info["name"][1:] + else: + cls_info["singleton"] = None + + for prop_data in cls_data["properties"]: + cls_info["properties"].append( + PropertyInfo( + name=_cook_name(prop_data["name"]), + type=_cook_type(prop_data["type"]), + getter=prop_data["getter"], + setter=prop_data["setter"], + index=prop_data["index"] if prop_data["index"] != -1 else None, + ) + ) + + for signal_data in cls_data["signals"]: + args_info = [ + ArgumentInfo( + name=_cook_name(arg_data["name"]), + type=_cook_type(arg_data["type"]), + default_value=None, + ) + for arg_data in signal_data["arguments"] + ] + if any(arg_data["default_value"] != "" for arg_data in signal_data["arguments"]): + warn( + f"{cls_info['name']}.{signal_data['name']}: default value are not supported for signals" + ) + cls_info["signals"].append( + SignalInfo(name=_cook_name(signal_data["name"]), arguments=args_info) + ) + + for meth_data in cls_data["methods"]: + args_info = [ + ArgumentInfo( + name=_cook_name(arg_data["name"]), + type=_cook_type(arg_data["type"]), + default_value=_cook_default_value( + _cook_type(arg_data["type"]).c_type, + arg_data["default_value"], + arg_data["has_default_value"], + ), + ) + for arg_data in meth_data["arguments"] + ] + meth_info = { + "name": _cook_name(meth_data["name"]), + "return_type": _cook_type(meth_data["return_type"]), + "is_editor": meth_data["is_editor"], + "is_noscript": meth_data["is_noscript"], + "is_const": meth_data["is_const"], + "is_reverse": meth_data["is_reverse"], + "is_virtual": meth_data["is_virtual"], + "has_varargs": meth_data["has_varargs"], + "is_from_script": meth_data["is_from_script"], + "arguments": args_info, + } + cls_info["methods"].append(MethodInfo(**meth_info)) + + for enum_data in cls_data["enums"]: + cls_info["enums"].append( + EnumInfo(name=_cook_name(enum_data["name"]), values=enum_data["values"]) + ) + + classes.append(ClassInfo(**cls_info)) + + # Order classes by inheritance + inheritances = defaultdict(list) + for klass in classes: + inheritances[klass.base_class].append(klass) + sorted_classes = [*inheritances[""]] + todo_base_classes = [*inheritances[""]] + while todo_base_classes: + base_class = todo_base_classes.pop() + children_classes = inheritances[base_class.name] + todo_base_classes += children_classes + sorted_classes += children_classes + + return sorted_classes, constants + + +def load_bindings_specs_from_api_json( + api_json: dict, sample: bool +) -> Tuple[List[ClassInfo], Dict[str, int]]: + pre_cook_patch_stuff(api_json) + classes, constants = cook_data(api_json) + if sample: + strip_sample_stuff(classes) + strip_unsupported_stuff(classes) + post_cook_patch_stuff(classes) + return classes, constants + + +def generate_bindings( + no_suffix_output_path: str, classes_specs: List[ClassInfo], constants_specs: Dict[str, int] +): + pyx_output_path = f"{no_suffix_output_path}.pyx" + print(f"Generating {pyx_output_path}") + template = env.get_template("bindings.tmpl.pyx") + out = template.render(classes=classes_specs, constants=constants_specs) + with open(pyx_output_path, "w") as fd: + fd.write(out) + + pyi_output_path = f"{no_suffix_output_path}.pyi" + print(f"Generating {pyi_output_path}") + template = env.get_template("bindings.tmpl.pyi") + out = template.render(classes=classes_specs, constants=constants_specs) + with open(pyi_output_path, "w") as fd: + fd.write(out) + + pxd_output_path = f"{no_suffix_output_path}.pxd" + print(f"Generating {pxd_output_path}") + template = env.get_template("bindings.tmpl.pxd") + out = template.render(classes=classes_specs, constants=constants_specs) + with open(pxd_output_path, "w") as fd: + fd.write(out) + + +if __name__ == "__main__": + + def _parse_output(val): + suffix = ".pyx" + if not val.endswith(suffix): + raise argparse.ArgumentTypeError(f"Must have a `{suffix}` suffix") + return val[: -len(suffix)] + + parser = argparse.ArgumentParser(description="Generate godot api bindings bindings files") + parser.add_argument( + "--input", + "-i", + required=True, + metavar="API_PATH", + type=argparse.FileType("r", encoding="utf8"), + help="Path to Godot api.json file", + ) + parser.add_argument( + "--output", + "-o", + required=True, + metavar="BINDINGS_PYX", + type=_parse_output, + help="Path to store the generated bindings.pyx (also used to determine .pxd/.pyi output path)", + ) + parser.add_argument( + "--sample", + action="store_true", + help="Generate a subset of the bindings (faster to build, useful for dev)", + ) + args = parser.parse_args() + api_json = json.load(args.input) + classes_specs, constants_specs = load_bindings_specs_from_api_json(api_json, args.sample) + generate_bindings(args.output, classes_specs, constants_specs) diff --git a/generation/generate_builtins.py b/generation/generate_builtins.py new file mode 100644 index 0000000..1ba247a --- /dev/null +++ b/generation/generate_builtins.py @@ -0,0 +1,400 @@ +import os +import argparse +import json +import re +from warnings import warn +from functools import partial +from keyword import iskeyword +from dataclasses import dataclass, replace +from collections import defaultdict +from itertools import product +from jinja2 import Environment, FileSystemLoader, StrictUndefined +from typing import List, Set + +from type_specs import ( + TypeSpec, + ALL_TYPES_EXCEPT_OBJECTS, + TYPE_RID, + TYPE_VECTOR3, + TYPE_VECTOR2, + TYPE_AABB, + TYPE_BASIS, + TYPE_COLOR, + TYPE_STRING, + TYPE_RECT2, + TYPE_TRANSFORM2D, + TYPE_PLANE, + TYPE_QUAT, + TYPE_TRANSFORM, + TYPE_NODEPATH, + TYPE_DICTIONARY, + TYPE_ARRAY, +) + + +# TODO: after all, it may not be a great idea to share TypeSpec between builtin and binding scripts... + + +# Bonus types +TYPES_SIZED_INT = [ + TypeSpec( + gdapi_type=f"{signed}int{size}_t", + c_type=f"{signed}int{size}_t", + cy_type=f"{signed}int{size}_t", + py_type="int", + is_base_type=True, + is_stack_only=True, + ) + for signed, size in product(["u", ""], [8, 32, 64]) +] +ALL_TYPES = [ + *ALL_TYPES_EXCEPT_OBJECTS, + *TYPES_SIZED_INT, + TypeSpec( + gdapi_type="godot_object", + c_type="godot_object", + cy_type="object", + py_type="Object", + is_object=True, + ), + TypeSpec( + gdapi_type="int", + c_type="int", + cy_type="int", + py_type="int", + is_base_type=True, + is_stack_only=True, + ), + TypeSpec( + gdapi_type="size_t", + c_type="size_t", + cy_type="size_t", + py_type="int", + is_base_type=True, + is_stack_only=True, + ), + # /!\ godot_real is a C float (note py_type is still `float` given that's how Python call all floating point numbers) + TypeSpec( + gdapi_type="double", + c_type="double", + cy_type="double", + py_type="float", + is_base_type=True, + is_stack_only=True, + ), + TypeSpec( + gdapi_type="wchar_t", + c_type="wchar_t", + cy_type="wchar_t", + is_base_type=True, + is_stack_only=True, + ), + TypeSpec( + gdapi_type="char", c_type="char", cy_type="char", is_base_type=True, is_stack_only=True + ), + TypeSpec( + gdapi_type="schar", + c_type="schar", + cy_type="signed char", + is_base_type=True, + is_stack_only=True, + ), + TypeSpec( + gdapi_type="godot_char_string", + c_type="godot_char_string", + cy_type="godot_char_string", + py_type="str", + is_builtin=True, + ), + TypeSpec( + gdapi_type="godot_string_name", + c_type="godot_string_name", + cy_type="godot_string_name", + py_type="str", + is_builtin=True, + ), + TypeSpec( + gdapi_type="bool", + c_type="bool", + cy_type="bool", + py_type="bool", + is_base_type=True, + is_stack_only=True, + ), +] +C_NAME_TO_TYPE_SPEC = {s.c_type: s for s in ALL_TYPES} +BUILTINS_TYPES = [s for s in ALL_TYPES if s.is_builtin] + + +TARGET_TO_TYPE_SPEC = { + "rid": TYPE_RID, + "vector3": TYPE_VECTOR3, + "vector2": TYPE_VECTOR2, + "aabb": TYPE_AABB, + "basis": TYPE_BASIS, + "color": TYPE_COLOR, + "gdstring": TYPE_STRING, + "rect2": TYPE_RECT2, + "transform2d": TYPE_TRANSFORM2D, + "plane": TYPE_PLANE, + "quat": TYPE_QUAT, + "transform": TYPE_TRANSFORM, + "node_path": TYPE_NODEPATH, + "dictionary": TYPE_DICTIONARY, + "array": TYPE_ARRAY, +} + + +@dataclass +class ArgumentSpec: + name: str + type: TypeSpec + is_ptr: bool + is_const: bool + + def __getattr__(self, key): + return getattr(self.type, key) + + +@dataclass +class BuiltinMethodSpec: + # Builtin type this method apply on (e.g. Vector2) + klass: TypeSpec + # Name of the function in the GDNative C API + c_name: str + # Basically gd_name without the `godot__` prefix + py_name: str + return_type: TypeSpec + args: List[ArgumentSpec] + gdapi: str + + +def cook_name(name): + return f"{name}_" if iskeyword(name) else name + + +BASEDIR = os.path.dirname(__file__) +env = Environment( + loader=FileSystemLoader(f"{BASEDIR}/builtins_templates"), + trim_blocks=True, + lstrip_blocks=False, + extensions=["jinja2.ext.loopcontrols"], + undefined=StrictUndefined, +) +env.filters["merge"] = lambda x, **kwargs: {**x, **kwargs} + + +def load_builtin_method_spec(func: dict, gdapi: str) -> BuiltinMethodSpec: + c_name = func["name"] + assert c_name.startswith("godot_"), func + for builtin_type in BUILTINS_TYPES: + prefix = f"{builtin_type.c_type}_" + if c_name.startswith(prefix): + py_name = c_name[len(prefix) :] + break + else: + # This function is not part of a builtin class (e.g. godot_print), we can ignore it + return + + def _cook_type(raw_type): + # Hack type detection, might need to be improved with api evolutions + match = re.match(r"^(const\W+|)([a-zA-Z_0-9]+)(\W*\*|)$", raw_type.strip()) + if not match: + raise RuntimeError(f"Unsuported type `{raw_type}` in function `{c_name}`") + is_const = bool(match.group(1)) + c_type = match.group(2) + is_ptr = bool(match.group(3)) + + for type_spec in ALL_TYPES: + if c_type == type_spec.c_type: + break + else: + raise RuntimeError(f"Unsuported type `{raw_type}` in function `{c_name}`") + + return is_const, is_ptr, type_spec + + args = [] + for arg_type, arg_name in func["arguments"]: + if arg_name.startswith("p_"): + arg_name = arg_name[2:] + arg_name = cook_name(arg_name) + arg_is_const, arg_is_ptr, arg_type_spec = _cook_type(arg_type) + args.append( + ArgumentSpec( + name=arg_name, type=arg_type_spec, is_ptr=arg_is_ptr, is_const=arg_is_const + ) + ) + + ret_is_const, ret_is_ptr, ret_type_spec = _cook_type(func["return_type"]) + return_type = ArgumentSpec( + name="", type=ret_type_spec, is_ptr=ret_is_ptr, is_const=ret_is_const + ) + + return BuiltinMethodSpec( + klass=builtin_type, + c_name=c_name, + py_name=py_name, + return_type=return_type, + args=args, + gdapi=gdapi, + ) + + +def pre_cook_patch_stuff(gdnative_api): + revision = gdnative_api["core"] + while revision: + for func in revision["api"]: + # `signed char` is used in some string methods to return comparison + # information (see `godot_string_casecmp_to`). + # The type in two word messes with our (poor) type parsing. + if func["return_type"] == "signed char": + func["return_type"] = "int8_t" + revision = revision["next"] + + +def load_builtins_specs_from_gdnative_api_json(gdnative_api: dict) -> List[BuiltinMethodSpec]: + pre_cook_patch_stuff(gdnative_api) + revision = gdnative_api["core"] + specs = [] + while revision: + revision_gdapi = f"gdapi{revision['version']['major']}{revision['version']['minor']}" + for func in revision["api"]: + assert func["name"] not in specs + # Ignore godot pool (generate by another script) + if func["name"].startswith("godot_pool_") or func["name"].startswith("godot_variant_"): + continue + spec = load_builtin_method_spec(func, gdapi=revision_gdapi) + if spec: + specs.append(spec) + revision = revision["next"] + + return specs + + +def generate_builtins( + no_suffix_output_path: str, methods_specs: List[BuiltinMethodSpec] +) -> Set[str]: + methods_c_name_to_spec = {s.c_name: s for s in methods_specs} + + # Track the methods used in the templates to enforce they are in sync with the gdnative_api.json + rendered_methods = set() + + def _mark_rendered(method_c_name): + rendered_methods.add(method_c_name) + return "" # Return empty string to not output anything when used in a template + + def _render_target_to_template(render_target): + assert isinstance(render_target, str) + return f"{render_target}.tmpl.pxi" + + def _get_builtin_method_spec(method_c_name): + assert isinstance(method_c_name, str) + try: + _mark_rendered(method_c_name) + return methods_c_name_to_spec[method_c_name] + except KeyError: + raise RuntimeError(f"Unknown method `{method_c_name}`") + + def _get_type_spec(py_type): + assert isinstance(py_type, str) + try: + return next(t for t in ALL_TYPES if t.py_type == py_type) + except StopIteration: + raise RuntimeError(f"Unknown type `{py_type}`") + + def _get_target_method_spec_factory(render_target): + assert isinstance(render_target, str) + try: + type_spec = TARGET_TO_TYPE_SPEC[render_target] + except KeyError: + raise RuntimeError(f"Unknown target `{render_target}`") + + def _get_target_method_spec(method_py_name): + return _get_builtin_method_spec(f"{type_spec.c_type}_{method_py_name}") + + return _get_target_method_spec + + context = { + "render_target_to_template": _render_target_to_template, + "get_builtin_method_spec": _get_builtin_method_spec, + "get_type_spec": _get_type_spec, + "get_target_method_spec_factory": _get_target_method_spec_factory, + "force_mark_rendered": _mark_rendered, + } + + template = env.get_template("builtins.tmpl.pyx") + pyx_output_path = f"{no_suffix_output_path}.pyx" + print(f"Generating {pyx_output_path}") + out = template.render(**context) + with open(pyx_output_path, "w") as fd: + fd.write(out) + + pyi_output_path = f"{no_suffix_output_path}.pyi" + print(f"Generating {pyi_output_path}") + template = env.get_template("builtins.tmpl.pyi") + out = template.render(**context) + with open(pyi_output_path, "w") as fd: + fd.write(out) + + pxd_output_path = f"{no_suffix_output_path}.pxd" + print(f"Generating {pxd_output_path}") + template = env.get_template("builtins.tmpl.pxd") + out = template.render(**context) + with open(pxd_output_path, "w") as fd: + fd.write(out) + + return rendered_methods + + +def ensure_all_methods_has_been_rendered( + methods_specs: List[BuiltinMethodSpec], rendered_methods: Set[str] +): + all_methods = {s.c_name for s in methods_specs} + + unknown_rendered_methods = rendered_methods - all_methods + for method in sorted(unknown_rendered_methods): + print(f"ERROR: `{method}` is used in the templates but not present in gnative_api.json") + + not_rendered_methods = all_methods - rendered_methods + + for method in sorted(not_rendered_methods): + print(f"ERROR: `{method}` is listed in gnative_api.json but not used in the templates") + + return not unknown_rendered_methods and not not_rendered_methods + + +if __name__ == "__main__": + + def _parse_output(val): + suffix = ".pyx" + if not val.endswith(suffix): + raise argparse.ArgumentTypeError(f"Must have a `{suffix}` suffix") + return val[: -len(suffix)] + + parser = argparse.ArgumentParser( + description="Generate godot builtins bindings files (except pool arrays)" + ) + parser.add_argument( + "--input", + "-i", + required=True, + metavar="GDNATIVE_API_PATH", + type=argparse.FileType("r", encoding="utf8"), + help="Path to Godot gdnative_api.json file", + ) + parser.add_argument( + "--output", + "-o", + required=True, + metavar="BUILTINS_PYX", + type=_parse_output, + help="Path to store the generated builtins.pyx (also used to determine .pxd/.pyi output path)", + ) + args = parser.parse_args() + gdnative_api_json = json.load(args.input) + methods_specs = load_builtins_specs_from_gdnative_api_json(gdnative_api_json) + rendered_methods = generate_builtins(args.output, methods_specs) + if not ensure_all_methods_has_been_rendered(methods_specs, rendered_methods): + raise SystemExit( + "Generated builtins are not in line with the provided gdnative_api.json :'(" + ) diff --git a/generation/generate_gdnative_api_struct.py b/generation/generate_gdnative_api_struct.py new file mode 100644 index 0000000..a22e18a --- /dev/null +++ b/generation/generate_gdnative_api_struct.py @@ -0,0 +1,399 @@ +import re +from typing import List, Dict +import argparse +from pathlib import Path +from pycparser import CParser, c_ast +from autopxd import AutoPxd + + +# List the includes to the stdlib we are expecting. This is needed to hack +# around them given they are needed for pycparser, but should endup in the pxd +# as `from libc.stdint cimport uint8_t` instead of being inside the `cdef extern` +# describing the whole header stuff. +STDLIB_INCLUDES = { + "stdbool.h": ["bool"], + "stdint.h": [ + "uint8_t", + "int8_t", + "uint16_t", + "int16_t", + "uint32_t", + "int32_t", + "uint64_t", + "int64_t", + ], + "wchar.h": ["wchar_t", "size_t"], +} +STDLIB_TYPES = {t for m in STDLIB_INCLUDES.values() for t in m} + + +class CCCP: + """ + CCCP: the Cheap&Coarse C Preprocessor + PyCParser needs to get passed preprocessed C code, but we don't want to + use a real one: + - different OS have different preprocessors (msvc vs clang vs gcc) + - we can control which #include to follow given we don't care about stdlibs ones + - we can easily tweak the behavior of #ifdef parts to ignore platform specificities + + In the end remember that we are not compiling a C program, but creating a + .pxd file that will (in conjuction with a .pyx) be used to generate a .c + file that will include the godot api headers. So there is no need to handle + platform specific (or even opaque structure size !) detail here: they will + be ignored by cython and left to the final C compilation. + """ + + def __init__( + self, include_dirs: List[str], forced_defined_vars: Dict[str, str], debug: bool = False + ): + self.source = [] + self.source_cursor = 0 + self.forced_defined_vars = forced_defined_vars.keys() + self.defined_vars = {**forced_defined_vars} + self.include_dirs = [Path(p) for p in include_dirs] + self.ingnored_includes = set() + self.debug = debug + + @staticmethod + def source_to_lines(src: str) -> List[str]: + # First remove all comments + src = re.sub(r"(//.*$)", "", src, flags=re.MULTILINE) + src = re.sub(r"/\*.*?\*/", "", src, flags=re.DOTALL) + + # Split lines, taking care of backslashes + lines = [] + multi_lines = "" + for line in src.splitlines(): + line = line.rstrip() + if line.endswith("\\"): + multi_lines += line[:-1] + continue + lines.append(multi_lines + line) + multi_lines = "" + + return lines + + def debug_explain(self, msg): + if self.debug: + print(msg) + + def error_occurred(self, msg): + extract = "\n".join(self.source[max(0, self.source_cursor - 5) : self.source_cursor + 5]) + raise RuntimeError(f"{msg}\n\nOccurred around:\n{extract}") + + def handle_include(self, line): + match_include = re.match(r"^\s*#\s*include\s+[<\"]([a-zA-Z0-9_./]+)[>\"]$", line) + if not match_include: + return None + include_name = match_include.group(1) + if include_name in STDLIB_INCLUDES.keys(): + self.debug_explain(f"INCLUDE INGNORED {include_name}") + self.source.pop(self.source_cursor) + return 0 + for include_dir in self.include_dirs: + include_path = include_dir / include_name + try: + included_source = include_path.read_text() + # Remove #include line and replace it by included source + self.source = ( + self.source[: self.source_cursor] + + self.source_to_lines(included_source) + + self.source[self.source_cursor + 1 :] + ) + self.debug_explain(f"INCLUDE {include_name}") + return 0 + except FileNotFoundError: + pass + self.error_occurred(f"Cannot resolve import `{line}`") + + def handle_define(self, line): + match_define = re.match(r"^\s*#\s*define\s+([a-zA-Z0-9_]+)(\s+|$)", line) + if not match_define: + return None + define_name = match_define.group(1) + define_value = line[len(match_define.group(0)) :] + if define_name not in self.forced_defined_vars: + self.defined_vars[define_name] = self.expand_macros(define_value) + self.debug_explain(f"DEF {define_name}={define_value}") + else: + self.debug_explain(f"DEF IGNORED {define_name}={define_value}") + self.source.pop(self.source_cursor) + return 0 + + def handle_define_macro(self, line): + match_define_macro = re.match(r"^\s*#\s*define\s+([a-zA-Z0-9_]+)\(", line) + if not match_define_macro: + return None + define_name = match_define_macro.group(1) + define_value = line[len(match_define_macro.group(0)) :] + # Macro are not supported, this is ok given they are not used + # (but some are defined) in the gdnative headers. + # As a sanity measure, we make sure the code generated if the macro + # is used will cause the C parser to crash. + self.defined_vars[define_name] = f"#error unsuported macro {define_name}" + self.debug_explain(f"DEF MACRO {define_name}=__UNSUPORTED__") + self.source.pop(self.source_cursor) + return 0 + + def handle_undef(self, line): + match_undefine = re.match(r"^\s*#\s*undef\s+([a-zA-Z0-9_]+)$", line) + if not match_undefine: + return None + define_name = match_undefine.group(1) + if define_name not in self.forced_defined_vars: + self.defined_vars.pop(define_name) + self.debug_explain(f"UNDEF {define_name}") + else: + self.debug_explain(f"UNDEF INGNORED {define_name}") + self.source.pop(self.source_cursor) + return 0 + + def handle_if(self, line): + # Replace ifdef/ifndef by generic if to simplify parsing + line = re.sub(r"^\s*#\s*ifdef\s+([a-zA-Z0-9_]+)$", r"#if defined(\1)", line) + line = re.sub(r"^\s*#\s*ifndef\s+([a-zA-Z0-9_]+)$", r"#if !defined(\1)", line) + + match_if = re.match(r"^\s*#\s*if\s+", line) + if not match_if: + return None + + def _eval_if_condition(condition): + # Turn condition into Python code and eval it \o/ + expr = condition.replace("||", " or ") + expr = expr.replace("&&", " and ") + expr = expr.replace("!", " not ") + expr = re.sub(r"defined\(([a-zA-Z0-9_]+)\)", r"defined('\1')", expr) + try: + return eval( + expr, {"defined": lambda key: key in self.defined_vars}, self.defined_vars + ) + except Exception as exc: + self.error_occurred( + f"Error {exc} while evaluating `{expr}` (generated from `{condition}`)" + ) + + def _keep_until_next_condition(offset): + nested_count = 0 + kept_body = [] + while True: + try: + line = self.source[self.source_cursor + offset] + except IndexError: + self.error_occurred("Reach end of file without #endif") + if re.match(r"^\s*#\s*(if|ifdef|ifndef)(\s+|$)", line): + # Nested #if + nested_count += 1 + else_match = re.match(r"^\s*#\s*(else$|elif\s+)", line) + if else_match: + if nested_count == 0: + condition_type = else_match.group(1).strip() + condition = line[len(else_match.group(1)) :] + return kept_body, condition_type, condition, offset + 1 + if re.match(r"^\s*#\s*endif$", line): + if nested_count == 0: + return kept_body, "endif", "", offset + 1 + else: + nested_count -= 1 + offset += 1 + kept_body.append(line) + + def _retreive_kept_body(condition, offset): + if _eval_if_condition(condition): + kept_body, condition_type, condition, offset = _keep_until_next_condition(offset) + # Skip other else/elif body parts until the matching endif + while condition_type != "endif": + _, condition_type, _, offset = _keep_until_next_condition(offset) + return kept_body, offset + else: + # Ignore the if body part + _, condition_type, condition, offset = _keep_until_next_condition(offset) + if condition_type == "elif": + return _retreive_kept_body(condition, offset) + elif condition_type == "else": + return _retreive_kept_body("True", offset) + else: # endif + return [], offset + + if_condition = line[len(match_if.group()) :] + body, offset = _retreive_kept_body(if_condition, offset=1) + + if_starts = self.source_cursor + if_ends = self.source_cursor + offset + self.source[if_starts:if_ends] = body + + self.debug_explain(f"IF ({line}) ==> {if_starts} {if_ends}") + + return 0 # 0 is not equivalent to None ! + + def handle_unknown(self, line): + match_unknown = re.match(r"^\s*#", line) + if not match_unknown: + return None + self.error_occurred(f"Unknown preprocessor command `{line}`") + + def expand_macros(self, line): + # Simple optim to discard most of the lines given regex search is cpu heavy + if not line or all(key not in line for key in self.defined_vars.keys()): + return line + expanded_line = line + # Recursive expansion given a macro can reference another one + while True: + for key, value in self.defined_vars.items(): + expanded_line = re.sub( + f"(^|[^a-zA-Z0-9_]){key}([^a-zA-Z0-9_]|$)", + f"\\g<1>{value}\\g<2>", + expanded_line, + ) + if expanded_line == line: + break + line = expanded_line + return line + + def parse(self, src: str) -> str: + self.source = self.source_to_lines(src) + + cpp_handlers = ( + self.handle_define, + self.handle_define_macro, + self.handle_if, + self.handle_include, + self.handle_undef, + self.handle_unknown, + ) + while True: + try: + source_line = self.source[self.source_cursor] + except IndexError: + # Parsing is done + break + + for cpp_handler in cpp_handlers: + eaten_lines = cpp_handler(source_line) + if eaten_lines is not None: + self.source_cursor += eaten_lines + break + else: + # Not a preprocessor line + self.source[self.source_cursor] = self.expand_macros(source_line) + self.source_cursor += 1 + + return "\n".join(self.source) + + +class PatchedAutoPxd(AutoPxd): + def visit_TypeDecl(self, node): + # Ignore types from stdlib (will be provided by the + # `from libc.stdint cimport uint8_t` syntax) + if node.declname in STDLIB_TYPES: + return + else: + return super().visit_TypeDecl(node) + + def visit_ArrayDecl(self, node): + # autopxd doesn't support array with an expression as size, but in: + # typedef struct {uint8_t _dont_touch_that[GODOT_VECTOR3_SIZE];} godot_vector3; + # `GODOT_VECTOR3_SIZE` gets resolved as `sizeof(void*)` :( + if node.type.declname == "_dont_touch_that": + # Of course the 0 size is wrong, but it's not an issue given + # we don't touch this array in Cython code (hence the name ^^) + node.dim = c_ast.Constant(type="int", value="0") + return super().visit_ArrayDecl(node) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Convert gdnative_api_struct.gen.h into Cython .pxd" + ) + parser.add_argument( + "--input", + "-i", + required=True, + metavar="GODOT_HEADERS_PATH", + help="Path to Godot GDNative headers", + ) + parser.add_argument( + "--output", + "-o", + required=True, + type=argparse.FileType("w", encoding="utf8"), + metavar="GDNATIVE_API_STRUCT_PXD", + help="Path to store the generated gdnative_api_struct.pxd file", + ) + args = parser.parse_args() + + # Step 1: preprocessing + header_name = "gdnative_api_struct.gen.h" + with open(f"{args.input}/{header_name}", "r") as fd: + source = fd.read() + + # ัะฐะปัŽั‚ ั‚ะพะฒะฐั€ะธั‰ ! + cccp = CCCP( + include_dirs=[args.input], + forced_defined_vars={"GDAPI": "", "GDN_EXPORT": "", "GDCALLINGCONV": ""}, + ) + + preprocessed = "" + # pycparser requires each symbol must be defined, hence provide a dummy + # definition of the needed stdlib types. + # Note those definitions will then be detected and ignored by PatchedAutoPxd. + for stdtype in STDLIB_TYPES: + preprocessed += f"typedef int {stdtype};\n" + preprocessed += cccp.parse(source) + + with open("output.preprocessed.c", "w") as fd: + fd.write(preprocessed) + + # Step 2: C parsing + parser = CParser() + ast = parser.parse(preprocessed) + + # Step 3: .pxd generation + p = PatchedAutoPxd(header_name) + p.visit(ast) + + pxd_cdef = p.lines() + # Remove the cdef part given we want to add the `nogil` option and + # we also want to add the `godot_method_flags` C inline code + assert pxd_cdef[0].startswith("cdef extern from") + pxd_cdef_body = "\n".join(pxd_cdef[1:]) + + pxd = f"""\ +# /!\\ Autogenerated code, modifications will be lost /!\\ +# see `generation/generate_gdnative_api_struct.py` + + +from libc.stddef cimport wchar_t, size_t +from libc.stdint cimport {', '.join(STDLIB_INCLUDES['stdint.h'])} + +cdef extern from "{header_name}" nogil: + + \"\"\" + typedef enum {{ + GODOT_METHOD_FLAG_NORMAL = 1, + GODOT_METHOD_FLAG_EDITOR = 2, + GODOT_METHOD_FLAG_NOSCRIPT = 4, + GODOT_METHOD_FLAG_CONST = 8, + GODOT_METHOD_FLAG_REVERSE = 16, + GODOT_METHOD_FLAG_VIRTUAL = 32, + GODOT_METHOD_FLAG_FROM_SCRIPT = 64, + GODOT_METHOD_FLAG_VARARG = 128, + GODOT_METHOD_FLAGS_DEFAULT = GODOT_METHOD_FLAG_NORMAL + }} godot_method_flags; + \"\"\" + + ctypedef enum godot_method_flags: + GODOT_METHOD_FLAG_NORMAL = 1 + GODOT_METHOD_FLAG_EDITOR = 2 + GODOT_METHOD_FLAG_NOSCRIPT = 4 + GODOT_METHOD_FLAG_CONST = 8 + GODOT_METHOD_FLAG_REVERSE = 16 # used for events + GODOT_METHOD_FLAG_VIRTUAL = 32 + GODOT_METHOD_FLAG_FROM_SCRIPT = 64 + GODOT_METHOD_FLAG_VARARG = 128 + GODOT_METHOD_FLAGS_DEFAULT = 1 # METHOD_FLAG_NORMAL + + ctypedef bint bool + +{pxd_cdef_body} +""" + args.output.write(pxd) diff --git a/generation/generate_pool_arrays.py b/generation/generate_pool_arrays.py new file mode 100644 index 0000000..6dc7672 --- /dev/null +++ b/generation/generate_pool_arrays.py @@ -0,0 +1,105 @@ +import os +import argparse +import json +import re +from keyword import iskeyword +from collections import defaultdict +from jinja2 import Environment, FileSystemLoader + + +BASEDIR = os.path.dirname(__file__) +env = Environment( + loader=FileSystemLoader(f"{BASEDIR}/pool_arrays_templates"), + trim_blocks=True, + lstrip_blocks=True, +) + + +class TypeItem: + def __init__(self, **kwargs): + self.__dict__.update(**kwargs) + + +TYPES = [ + # Base types + TypeItem( + gd_pool=f"godot_pool_int_array", + py_pool=f"PoolIntArray", + gd_value=f"godot_int", + py_value=f"godot_int", + is_base_type=True, + is_stack_only=True, + ), + TypeItem( + gd_pool=f"godot_pool_real_array", + py_pool=f"PoolRealArray", + gd_value=f"godot_real", + py_value=f"godot_real", + is_base_type=True, + is_stack_only=True, + ), + TypeItem( + gd_pool="godot_pool_byte_array", + py_pool="PoolByteArray", + gd_value="uint8_t", + py_value="uint8_t", + is_base_type=True, + is_stack_only=True, + ), + # Stack only builtin types + TypeItem( + gd_pool=f"godot_pool_vector2_array", + py_pool=f"PoolVector2Array", + gd_value=f"godot_vector2", + py_value=f"Vector2", + is_base_type=False, + is_stack_only=True, + ), + TypeItem( + gd_pool=f"godot_pool_vector3_array", + py_pool=f"PoolVector3Array", + gd_value=f"godot_vector3", + py_value=f"Vector3", + is_base_type=False, + is_stack_only=True, + ), + TypeItem( + gd_pool=f"godot_pool_color_array", + py_pool=f"PoolColorArray", + gd_value=f"godot_color", + py_value=f"Color", + is_base_type=False, + is_stack_only=True, + ), + # Stack&heap builtin types + TypeItem( + gd_pool="godot_pool_string_array", + py_pool="PoolStringArray", + gd_value="godot_string", + py_value="GDString", + is_base_type=False, + is_stack_only=False, + ), +] + + +def generate_pool_array(output_path): + template = env.get_template("pool_arrays.tmpl.pyx") + out = template.render(types=TYPES) + with open(output_path, "w") as fd: + fd.write(out) + + pxd_output_path = output_path.rsplit(".", 1)[0] + ".pxd" + template = env.get_template("pool_arrays.tmpl.pxd") + out = template.render(types=TYPES) + with open(pxd_output_path, "w") as fd: + fd.write(out) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Generate godot pool_x_array builtins bindings files" + ) + parser.add_argument("--output", "-o", default=None) + args = parser.parse_args() + generate_pool_array(args.output or f"pool_arrays.pyx") diff --git a/generation/pool_arrays_templates/pool_arrays.tmpl.pxd b/generation/pool_arrays_templates/pool_arrays.tmpl.pxd new file mode 100644 index 0000000..d883899 --- /dev/null +++ b/generation/pool_arrays_templates/pool_arrays.tmpl.pxd @@ -0,0 +1,32 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_pool_arrays.py` + +cimport cython + +from godot._hazmat.gdapi cimport ( + pythonscript_gdapi10 as gdapi10, + pythonscript_gdapi11 as gdapi11, + pythonscript_gdapi12 as gdapi12, +) +from godot._hazmat.gdnative_api_struct cimport ( +{% for t in types %} + {{ t.gd_value }}, + {{ t.gd_pool }}, + {{ t.gd_pool }}_write_access, + {{ t.gd_pool }}_read_access, +{% endfor %} +) +from godot.builtins cimport ( + Array, +{% for t in types %} +{% if not t.is_base_type %} + {{ t.py_value }}, +{% endif %} +{% endfor %} +) + + +{% from 'pool_x_array.tmpl.pxd' import render_pool_array_pxd %} +{% for t in types %} +{{ render_pool_array_pxd(t) }} +{% endfor %} diff --git a/generation/pool_arrays_templates/pool_arrays.tmpl.pyx b/generation/pool_arrays_templates/pool_arrays.tmpl.pyx new file mode 100644 index 0000000..f0f78ac --- /dev/null +++ b/generation/pool_arrays_templates/pool_arrays.tmpl.pyx @@ -0,0 +1,35 @@ +# /!\ Autogenerated code, modifications will be lost /!\ +# see `generation/generate_pool_arrays.py` + +cimport cython +from libc.stdint cimport uintptr_t + +from godot._hazmat.gdapi cimport ( + pythonscript_gdapi10 as gdapi10, + pythonscript_gdapi11 as gdapi11, + pythonscript_gdapi12 as gdapi12, +) +from godot._hazmat.gdnative_api_struct cimport ( +{% for t in types %} + {{ t.gd_value }}, + {{ t.gd_pool }}, + {{ t.gd_pool }}_write_access, + {{ t.gd_pool }}_read_access, +{% endfor %} +) +from godot.builtins cimport ( + Array, +{% for t in types %} +{% if not t.is_base_type %} + {{ t.py_value }}, +{% endif %} +{% endfor %} +) + +from contextlib import contextmanager + + +{% from 'pool_x_array.tmpl.pyx' import render_pool_array_pyx %} +{% for t in types %} +{{ render_pool_array_pyx(t) }} +{% endfor %} diff --git a/generation/pool_arrays_templates/pool_x_array.tmpl.pxd b/generation/pool_arrays_templates/pool_x_array.tmpl.pxd new file mode 100644 index 0000000..fe24773 --- /dev/null +++ b/generation/pool_arrays_templates/pool_x_array.tmpl.pxd @@ -0,0 +1,33 @@ +{% macro render_pool_array_pxd(t) %} +@cython.final +cdef class {{ t.py_pool }}: + cdef {{ t.gd_pool }} _gd_data + + @staticmethod + cdef inline {{ t.py_pool }} new() + + @staticmethod + cdef inline {{ t.py_pool }} new_with_array(Array other) + + # Operators + + cdef inline bint operator_equal(self, {{ t.py_pool }} other) + cdef inline {{ t.py_value }} operator_getitem(self, godot_int index) + cdef inline {{ t.py_pool }} operator_getslice(self, godot_int start, godot_int end, godot_int step) + + # Methods + + cpdef inline {{ t.py_pool }} copy(self) + cpdef inline void append(self, {{ t.py_value }} data) + cdef inline void append_array(self, {{ t.py_pool }} array) + cpdef inline void invert(self) + cpdef inline void push_back(self, {{ t.py_value }} data) + cpdef inline void resize(self, godot_int size) + cdef inline godot_int size(self) + + +@cython.final +cdef class {{ t.py_pool }}WriteAccess: + cdef {{ t.gd_value }} *_gd_ptr + +{% endmacro %} diff --git a/generation/pool_arrays_templates/pool_x_array.tmpl.pyx b/generation/pool_arrays_templates/pool_x_array.tmpl.pyx new file mode 100644 index 0000000..617c0ec --- /dev/null +++ b/generation/pool_arrays_templates/pool_x_array.tmpl.pyx @@ -0,0 +1,326 @@ +{% macro gd_to_py(type, src, dst) %} +{% if type['gd_value'] == type['py_value'] %} +{{ dst }} = {{ src }} +{% else %} +dst = godot_string_to_pyobj(&src) +gdapi10.godot_string_destroy(&src) +{% endif %} +{% endmacro %} + +{% macro py_to_gd(target) %} +{% endmacro %} + +{% macro render_pool_array_pyx(t) %} +@cython.final +cdef class {{ t.py_pool }}: + + def __init__(self, other=None): + cdef {{ t.py_pool }} other_as_pool_array + cdef Array other_as_array + if other is None: + gdapi10.{{ t.gd_pool }}_new(&self._gd_data) + else: + try: + other_as_pool_array = <{{ t.py_pool }}?>other + gdapi10.{{ t.gd_pool }}_new_copy(&self._gd_data, &other_as_pool_array._gd_data) + except TypeError: + try: + other_as_array = other + gdapi10.{{ t.gd_pool }}_new_with_array(&self._gd_data, &other_as_array._gd_data) + except TypeError: + gdapi10.{{ t.gd_pool }}_new(&self._gd_data) + for item in other: +{% if t.is_base_type %} + {{ t.py_pool }}.append(self, item) +{% else %} + {{ t.py_pool }}.append(self, (<{{ t.py_value }}?>item)) +{% endif %} + + def __dealloc__(self): + # /!\ if `__init__` is skipped, `_gd_data` must be initialized by + # hand otherwise we will get a segfault here + gdapi10.{{ t.gd_pool }}_destroy(&self._gd_data) + + @staticmethod + cdef inline {{ t.py_pool }} new(): + # Call to __new__ bypasses __init__ constructor + cdef {{ t.py_pool }} ret = {{ t.py_pool }}.__new__({{ t.py_pool }}) + gdapi10.{{ t.gd_pool }}_new(&ret._gd_data) + return ret + + @staticmethod + cdef inline {{ t.py_pool }} new_with_array(Array other): + # Call to __new__ bypasses __init__ constructor + cdef {{ t.py_pool }} ret = {{ t.py_pool }}.__new__({{ t.py_pool }}) + gdapi10.{{ t.gd_pool }}_new_with_array(&ret._gd_data, &other._gd_data) + return ret + + def __repr__(self): + return f"<{{ t.py_pool }}([{', '.join(repr(x) for x in self)}])>" + + # Operators + + def __getitem__(self, index): + cdef godot_int size = self.size() + cdef godot_int start + cdef godot_int stop + cdef godot_int step + if isinstance(index, slice): + step = index.step if index.step is not None else 1 + if step == 0: + raise ValueError("range() arg 3 must not be zero") + elif step > 0: + start = index.start if index.start is not None else 0 + stop = index.stop if index.stop is not None else size + else: + start = index.start if index.start is not None else size + stop = index.stop if index.stop is not None else -size - 1 + return self.operator_getslice( + start, + stop, + step, + ) + else: + if index < 0: + index = index + size + if index < 0 or index >= size: + raise IndexError("list index out of range") + return self.operator_getitem(index) + + cdef inline {{ t.py_value }} operator_getitem(self, godot_int index): +{% if t.is_base_type %} + return gdapi10.{{ t.gd_pool }}_get(&self._gd_data, index) +{% else %} + cdef {{ t.py_value }} ret = {{ t.py_value }}.__new__({{ t.py_value }}) + ret._gd_data = gdapi10.{{ t.gd_pool }}_get(&self._gd_data, index) + return ret +{% endif %} + + cdef inline {{ t.py_pool }} operator_getslice(self, godot_int start, godot_int stop, godot_int step): + cdef {{ t.py_pool }} ret = {{ t.py_pool }}.new() + cdef godot_int size = self.size() + + if start > size - 1: + start = size - 1 + elif start < 0: + start += size + if start < 0: + start = 0 + + if stop > size: + stop = size + elif stop < -size: + stop = -1 + elif stop < 0: + stop += size + + if step > 0: + if start >= stop: + return ret + items = 1 + (stop - start - 1) // step + if items <= 0: + return ret + else: + if start <= stop: + return ret + items = 1 + (stop - start + 1) // step + if items <= 0: + return ret + + ret.resize(items) + cdef {{ t.gd_pool }}_read_access *src_access = gdapi10.{{ t.gd_pool }}_read( + &self._gd_data + ) + cdef {{ t.gd_pool }}_write_access *dst_access = gdapi10.{{ t.gd_pool }}_write( + &ret._gd_data + ) + cdef const {{ t.gd_value }} *src_ptr = gdapi10.{{ t.gd_pool }}_read_access_ptr(src_access) + cdef {{ t.gd_value }} *dst_ptr = gdapi10.{{ t.gd_pool }}_write_access_ptr(dst_access) + cdef godot_int i + for i in range(items): +{% if t.is_stack_only %} + dst_ptr[i] = src_ptr[i * step + start] +{% else %} + gdapi10.{{ t.gd_value }}_destroy(&dst_ptr[i]) + gdapi10.{{ t.gd_value }}_new_copy(&dst_ptr[i], &src_ptr[i * step + start]) +{% endif %} + gdapi10.{{ t.gd_pool }}_read_access_destroy(src_access) + gdapi10.{{ t.gd_pool }}_write_access_destroy(dst_access) + + return ret + + # TODO: support slice + def __setitem__(self, godot_int index, {{ t.py_value }} value): + cdef godot_int size + size = self.size() + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError("list index out of range") +{% if t.is_base_type %} + gdapi10.{{ t.gd_pool }}_set(&self._gd_data, index, value) +{% else %} + gdapi10.{{ t.gd_pool }}_set(&self._gd_data, index, &value._gd_data) +{% endif %} + + # TODO: support slice + def __delitem__(self, godot_int index): + cdef godot_int size + size = self.size() + if index < 0: + index += size + if index < 0 or index >= size: + raise IndexError("list index out of range") + gdapi10.{{ t.gd_pool }}_remove(&self._gd_data, index) + + def __len__(self): + return self.size() + + def __iter__(self): + # TODO: mid iteration mutation should throw exception ? + cdef int i + {% if not t.is_base_type %} + cdef {{ t.py_value }} item + {% endif %} + for i in range(self.size()): +{% if t.is_base_type %} + yield gdapi10.{{ t.gd_pool }}_get(&self._gd_data, i) +{% else %} + item = {{ t.py_value }}.__new__({{ t.py_value }}) + item._gd_data = gdapi10.{{ t.gd_pool }}_get(&self._gd_data, i) + yield item +{% endif %} + + def __copy__(self): + return self.copy() + + def __eq__(self, other): + try: + return {{ t.py_pool }}.operator_equal(self, other) + except TypeError: + return False + + def __ne__(self, other): + try: + return not {{ t.py_pool }}.operator_equal(self, other) + except TypeError: + return True + + def __iadd__(self, {{ t.py_pool }} items not None): + self.append_array(items) + return self + + def __add__(self, {{ t.py_pool }} items not None): + cdef {{ t.py_pool }} ret = {{ t.py_pool }}.copy(self) + ret.append_array(items) + return ret + + cdef inline bint operator_equal(self, {{ t.py_pool }} other): + if other is None: + return False + # TODO `godot_array_operator_equal` is missing in gdapi, submit a PR ? + cdef godot_int size = self.size() + if size != other.size(): + return False + + cdef {{ t.gd_pool }}_read_access *a_access = gdapi10.{{ t.gd_pool }}_read( + &self._gd_data + ) + cdef {{ t.gd_pool }}_read_access *b_access = gdapi10.{{ t.gd_pool }}_read( + &other._gd_data + ) + cdef const {{ t.gd_value }} *a_ptr = gdapi10.{{ t.gd_pool }}_read_access_ptr(a_access) + cdef const {{ t.gd_value }} *b_ptr = gdapi10.{{ t.gd_pool }}_read_access_ptr(b_access) + cdef godot_int i + cdef bint ret = True + for i in range(size): +{% if t.is_base_type %} + if a_ptr[i] != b_ptr[i]: +{% else %} + if not gdapi10.{{ t.gd_value }}_operator_equal(&a_ptr[i], &b_ptr[i]): +{% endif %} + ret = False + break + gdapi10.{{ t.gd_pool }}_read_access_destroy(a_access) + gdapi10.{{ t.gd_pool }}_read_access_destroy(b_access) + return ret + + # Methods + + cpdef inline {{ t.py_pool }} copy(self): + # Call to __new__ bypasses __init__ constructor + cdef {{ t.py_pool }} ret = {{ t.py_pool }}.__new__({{ t.py_pool }}) + gdapi10.{{ t.gd_pool }}_new_copy(&ret._gd_data, &self._gd_data) + return ret + + cpdef inline void append(self, {{ t.py_value }} data): +{% if t.is_base_type %} + gdapi10.{{ t.gd_pool }}_append(&self._gd_data, data) +{% else %} + gdapi10.{{ t.gd_pool }}_append(&self._gd_data, &data._gd_data) +{% endif %} + + cdef inline void append_array(self, {{ t.py_pool }} array): + gdapi10.{{ t.gd_pool }}_append_array(&self._gd_data, &array._gd_data) + + cpdef inline void invert(self): + gdapi10.{{ t.gd_pool }}_invert(&self._gd_data) + + cpdef inline void push_back(self, {{ t.py_value }} data): +{% if t.is_base_type %} + gdapi10.{{ t.gd_pool }}_push_back(&self._gd_data, data) +{% else %} + gdapi10.{{ t.gd_pool }}_push_back(&self._gd_data, &data._gd_data) +{% endif %} + + cpdef inline void resize(self, godot_int size): + gdapi10.{{ t.gd_pool }}_resize(&self._gd_data, size) + + cdef inline godot_int size(self): + return gdapi10.{{ t.gd_pool }}_size(&self._gd_data) + + # Raw access + + @contextmanager + def raw_access(self): + cdef {{ t.gd_pool }}_write_access *access = gdapi10.{{ t.gd_pool }}_write( + &self._gd_data + ) + cdef {{ t.py_pool }}WriteAccess pyaccess = {{ t.py_pool }}WriteAccess.__new__({{ t.py_pool }}WriteAccess) + pyaccess._gd_ptr = gdapi10.{{ t.gd_pool }}_write_access_ptr(access) + try: + yield pyaccess + + finally: + gdapi10.{{ t.gd_pool }}_write_access_destroy(access) + + +@cython.final +cdef class {{ t.py_pool }}WriteAccess: + + def get_address(self): + return self._gd_ptr + + def __getitem__(self, int idx): +{% if t.is_base_type %} + return self._gd_ptr[idx] +{% else %} + cdef {{ t.py_value }} ret = {{ t.py_value }}.__new__({{ t.py_value }}) +{% if t.is_stack_only %} + ret._gd_data = self._gd_ptr[idx] +{% else %} + gdapi10.{{ t.gd_value }}_new_copy(&ret._gd_data, &self._gd_ptr[idx]) +{% endif %} + return ret +{% endif %} + + def __setitem__(self, int idx, {{ t.py_value }} val): +{% if t.is_base_type %} + self._gd_ptr[idx] = val +{% elif t.is_stack_only %} + self._gd_ptr[idx] = val._gd_data +{% else %} + gdapi10.{{ t.gd_value }}_new_copy(&self._gd_ptr[idx], &val._gd_data) +{% endif %} + +{% endmacro %} diff --git a/generation/type_specs.py b/generation/type_specs.py new file mode 100644 index 0000000..22ae841 --- /dev/null +++ b/generation/type_specs.py @@ -0,0 +1,265 @@ +# Describe all base types (i.e. scalar such as int and Godot builtins) + +from dataclasses import dataclass + + +@dataclass +class TypeSpec: + # Type used within Godot api.json + gdapi_type: str + # Type used when calling C api functions + c_type: str + # Type used in Cython, basically similar to c_type for scalars&enums + # and to py_type for Godot objects&builtins + cy_type: str + # TODO: typing should be divided between argument and return (e.g. `Union[str, NodePath]` vs `NodePath`) + # Type used for PEP 484 Python typing + py_type: str = "" + # Type is a Godot object (i.e. defined in api.json) + is_object: bool = False + # Type is a Godot builtin (e.g. Vector2) + is_builtin: bool = False + # Type is a scalar (e.g. int, float) or void + is_base_type: bool = False + # Type doesn't use the heap (hence no need for freeing it) + is_stack_only: bool = False + # Type is an enum (e.g. godot_error, Camera::KeepAspect) + is_enum: bool = False + + @property + def is_void(self) -> bool: + return self.c_type == "void" + + @property + def is_variant(self) -> bool: + return self.c_type == "godot_variant" + + def __post_init__(self): + self.py_type = self.py_type or self.cy_type + if self.is_object: + assert not self.is_builtin + assert not self.is_base_type + assert not self.is_stack_only + if self.is_builtin: + assert not self.is_base_type + + +# Base types +TYPE_VOID = TypeSpec( + gdapi_type="void", c_type="void", cy_type="None", is_base_type=True, is_stack_only=True +) +TYPE_BOOL = TypeSpec( + gdapi_type="bool", + c_type="godot_bool", + cy_type="bint", + py_type="bool", + is_base_type=True, + is_stack_only=True, +) +TYPE_INT = TypeSpec( + gdapi_type="int", c_type="godot_int", cy_type="int", is_base_type=True, is_stack_only=True +) +TYPE_FLOAT = TypeSpec( + gdapi_type="float", c_type="godot_real", cy_type="float", is_base_type=True, is_stack_only=True +) +TYPE_ERROR = TypeSpec( + gdapi_type="enum.Error", + c_type="godot_error", + cy_type="godot_error", + py_type="Error", + is_base_type=True, + is_stack_only=True, + is_enum=True, +) +TYPE_VECTOR3_AXIS = TypeSpec( + gdapi_type="enum.Vector3::Axis", + c_type="godot_vector3_axis", + cy_type="godot_vector3_axis", + py_type="Vector3.Axis", + is_base_type=True, + is_stack_only=True, + is_enum=True, +) +TYPE_VARIANT_TYPE = TypeSpec( + gdapi_type="enum.Variant::Type", + c_type="godot_variant_type", + cy_type="godot_variant_type", + py_type="VariantType", + is_base_type=True, + is_stack_only=True, + is_enum=True, +) +TYPE_VARIANT_OPERATOR = TypeSpec( + gdapi_type="enum.Variant::Operator", + c_type="godot_variant_operator", + cy_type="godot_variant_operator", + py_type="VariantOperator", + is_base_type=True, + is_stack_only=True, + is_enum=True, +) + +# Stack&heap types +TYPE_VARIANT = TypeSpec( + gdapi_type="Variant", c_type="godot_variant", cy_type="object", is_builtin=True +) +TYPE_STRING = TypeSpec( + gdapi_type="String", + c_type="godot_string", + cy_type="GDString", + py_type="Union[str, GDString]", + is_builtin=True, +) + +# Stack only types +TYPE_AABB = TypeSpec( + gdapi_type="AABB", c_type="godot_aabb", cy_type="AABB", is_builtin=True, is_stack_only=True +) +TYPE_ARRAY = TypeSpec( + gdapi_type="Array", c_type="godot_array", cy_type="Array", is_builtin=True, is_stack_only=True +) +TYPE_BASIS = TypeSpec( + gdapi_type="Basis", c_type="godot_basis", cy_type="Basis", is_builtin=True, is_stack_only=True +) +TYPE_COLOR = TypeSpec( + gdapi_type="Color", c_type="godot_color", cy_type="Color", is_builtin=True, is_stack_only=True +) +TYPE_DICTIONARY = TypeSpec( + gdapi_type="Dictionary", + c_type="godot_dictionary", + cy_type="Dictionary", + is_builtin=True, + is_stack_only=True, +) +TYPE_NODEPATH = TypeSpec( + gdapi_type="NodePath", + c_type="godot_node_path", + cy_type="NodePath", + py_type="Union[str, NodePath]", + is_builtin=True, + is_stack_only=True, +) +TYPE_PLANE = TypeSpec( + gdapi_type="Plane", c_type="godot_plane", cy_type="Plane", is_builtin=True, is_stack_only=True +) +TYPE_QUAT = TypeSpec( + gdapi_type="Quat", c_type="godot_quat", cy_type="Quat", is_builtin=True, is_stack_only=True +) +TYPE_RECT2 = TypeSpec( + gdapi_type="Rect2", c_type="godot_rect2", cy_type="Rect2", is_builtin=True, is_stack_only=True +) +TYPE_RID = TypeSpec( + gdapi_type="RID", c_type="godot_rid", cy_type="RID", is_builtin=True, is_stack_only=True +) +TYPE_TRANSFORM = TypeSpec( + gdapi_type="Transform", + c_type="godot_transform", + cy_type="Transform", + is_builtin=True, + is_stack_only=True, +) +TYPE_TRANSFORM2D = TypeSpec( + gdapi_type="Transform2D", + c_type="godot_transform2d", + cy_type="Transform2D", + is_builtin=True, + is_stack_only=True, +) +TYPE_VECTOR2 = TypeSpec( + gdapi_type="Vector2", + c_type="godot_vector2", + cy_type="Vector2", + is_builtin=True, + is_stack_only=True, +) +TYPE_VECTOR3 = TypeSpec( + gdapi_type="Vector3", + c_type="godot_vector3", + cy_type="Vector3", + is_builtin=True, + is_stack_only=True, +) +TYPE_POOLBYTEARRAY = TypeSpec( + gdapi_type="PoolByteArray", + c_type="godot_pool_byte_array", + cy_type="PoolByteArray", + is_builtin=True, + is_stack_only=True, +) +TYPE_POOLINTARRAY = TypeSpec( + gdapi_type="PoolIntArray", + c_type="godot_pool_int_array", + cy_type="PoolIntArray", + is_builtin=True, + is_stack_only=True, +) +TYPE_POOLREALARRAY = TypeSpec( + gdapi_type="PoolRealArray", + c_type="godot_pool_real_array", + cy_type="PoolRealArray", + is_builtin=True, + is_stack_only=True, +) +TYPE_POOLSTRINGARRAY = TypeSpec( + gdapi_type="PoolStringArray", + c_type="godot_pool_string_array", + cy_type="PoolStringArray", + is_builtin=True, + is_stack_only=True, +) +TYPE_POOLVECTOR2ARRAY = TypeSpec( + gdapi_type="PoolVector2Array", + c_type="godot_pool_vector2_array", + cy_type="PoolVector2Array", + is_builtin=True, + is_stack_only=True, +) +TYPE_POOLVECTOR3ARRAY = TypeSpec( + gdapi_type="PoolVector3Array", + c_type="godot_pool_vector3_array", + cy_type="PoolVector3Array", + is_builtin=True, + is_stack_only=True, +) +TYPE_POOLCOLORARRAY = TypeSpec( + gdapi_type="PoolColorArray", + c_type="godot_pool_color_array", + cy_type="PoolColorArray", + is_builtin=True, + is_stack_only=True, +) + + +ALL_TYPES_EXCEPT_OBJECTS = [ + TYPE_VOID, + TYPE_BOOL, + TYPE_INT, + TYPE_FLOAT, + TYPE_ERROR, + TYPE_VECTOR3_AXIS, + TYPE_VARIANT_TYPE, + TYPE_VARIANT_OPERATOR, + TYPE_VARIANT, + TYPE_STRING, + TYPE_AABB, + TYPE_ARRAY, + TYPE_BASIS, + TYPE_COLOR, + TYPE_DICTIONARY, + TYPE_NODEPATH, + TYPE_PLANE, + TYPE_QUAT, + TYPE_RECT2, + TYPE_RID, + TYPE_TRANSFORM, + TYPE_TRANSFORM2D, + TYPE_VECTOR2, + TYPE_VECTOR3, + TYPE_POOLBYTEARRAY, + TYPE_POOLINTARRAY, + TYPE_POOLREALARRAY, + TYPE_POOLSTRINGARRAY, + TYPE_POOLVECTOR2ARRAY, + TYPE_POOLVECTOR3ARRAY, + TYPE_POOLCOLORARRAY, +] diff --git a/misc/godot_python.png b/misc/godot_python.png new file mode 100644 index 0000000..6c80600 Binary files /dev/null and b/misc/godot_python.png differ diff --git a/misc/godot_python.svg b/misc/godot_python.svg new file mode 100644 index 0000000..cf3f0c6 --- /dev/null +++ b/misc/godot_python.svg @@ -0,0 +1,156 @@ + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/misc/pin_github_actions.py b/misc/pin_github_actions.py new file mode 100644 index 0000000..d424a65 --- /dev/null +++ b/misc/pin_github_actions.py @@ -0,0 +1,83 @@ +#! /usr/bin/env python +# +# see: https://julienrenaux.fr/2019/12/20/github-actions-security-risk/ +# TL;DR: Using GitHub actions with branch names or tags is unsafe. Use commit hash instead. + + +import re +import sys +import json +import argparse +from pathlib import Path +from functools import lru_cache +from urllib.request import urlopen + + +GITHUB_CONF_DIR = Path(".").joinpath("../.github").resolve() +REPO_REGEX = r"(?P[\w\-_]+/[\w\-_]+)" +SHA_REGEX = r"(?P[a-fA-F0-9]{40})" +TAG_REGEX = r"(?P[\w\-_]+)" +PIN_REGEX = r"(?P[\w\-_]+)" +USES_REGEX = re.compile( + rf"uses\W*:\W*{REPO_REGEX}@({SHA_REGEX}|{TAG_REGEX})(\W*#\W*pin@{PIN_REGEX})?", re.MULTILINE +) + + +def get_files(pathes): + for path in pathes: + if path.is_dir(): + yield from path.rglob("*.yml") + elif path.is_file(): + yield path + + +@lru_cache(maxsize=None) +def resolve_tag(repo, tag): + url = f"https://api.github.com/repos/{repo}/git/ref/tags/{tag}" + with urlopen(url) as f: + data = json.loads(f.read()) + return data["object"]["sha"] + + +def add_pin(pathes): + for file in get_files(pathes): + txt = file.read_text() + overwrite_needed = False + # Start by the end so that we can use match.start/end to do in-place modifications + for match in reversed(list(USES_REGEX.finditer(txt))): + repo = match.group("repo") + tag = match.group("tag") + if tag is not None: + sha = resolve_tag(repo, tag) + print(f"Pinning github action {file}: {repo}@{tag} => {sha}") + txt = txt[: match.start()] + f"uses: {repo}@{sha} # pin@{tag}" + txt[match.end() :] + overwrite_needed = True + if overwrite_needed: + file.write_text(txt) + return 0 + + +def check_pin(pathes): + ret = 0 + for file in get_files(pathes): + for match in USES_REGEX.finditer(file.read_text()): + repo = match.group("repo") + tag = match.group("tag") + if tag is not None: + print(f"Unpinned github action {file}: {repo}@{tag}") + ret = 1 + return ret + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("cmd", choices=["check", "add"]) + parser.add_argument( + "files", nargs="*", type=Path, default=[Path(__name__).joinpath("../.github/").resolve()] + ) + + args = parser.parse_args() + if args.cmd == "check": + sys.exit(check_pin(args.files)) + else: + sys.exit(add_pin(args.files)) diff --git a/misc/release_LICENSE.txt b/misc/release_LICENSE.txt new file mode 100644 index 0000000..a1a7ae4 --- /dev/null +++ b/misc/release_LICENSE.txt @@ -0,0 +1,305 @@ ++---------------------------------------------------------------------------+ +| Godot Python | ++---------------------------------------------------------------------------+ + +Copyright (c) 2016 by Emmanuel Leblond. +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +Godot Python Logo (C) Pinswell +Distributed under the terms of the Creative Commons Attribution License +version 3.0 (CC-BY 3.0) +https://creativecommons.org/licenses/by/3.0/legalcode. + + ++---------------------------------------------------------------------------+ +| CPython | ++---------------------------------------------------------------------------+ + + +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights +Reserved" are retained in Python alone or in any derivative version prepared by +Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ++---------------------------------------------------------------------------+ +| Afterword | ++---------------------------------------------------------------------------+ + + + + +... + + + + +You didn't read a thing, didn't you ? ยฏ\(ใƒ„)/ยฏ diff --git a/misc/release_README.txt b/misc/release_README.txt new file mode 100644 index 0000000..458f4e9 --- /dev/null +++ b/misc/release_README.txt @@ -0,0 +1,61 @@ + ________ .___ __ __________ __ .__ + / _____/ ____ __| _/_____/ |_ \______ \___.__._/ |_| |__ ____ ____ +/ \ ___ / _ \ / __ |/ _ \ __\ | ___< | |\ __\ | \ / _ \ / \ +\ \_\ ( <_> ) /_/ ( <_> ) | | | \___ | | | | Y ( <_> ) | \ + \______ /\____/\____ |\____/|__| |____| / ____| |__| |___| /\____/|___| / + \/ \/ \/ \/ \/ + v{version} ({date}) + + +Introduction +------------ + +This is a beta version of the Python module for Godot. + +You are likely to encounter bugs and catastrophic crashes, if so please +report them to https://github.com/touilleMan/godot-python/issues. + + +Working features +---------------- + +Every Godot core features are expected to work fine: +- builtins (e.g. Vector2) +- Objects classes (e.g. Node) +- signals +- variable export +- rpc synchronisation + +On top of that, mixing GDscript and Python code inside a project should work fine. + + +Using Pip +--------- + +Pip must be installed first with `ensurepip`: + +On Windows: +``` +$ /windows-64/python.exe -m ensurepip # Only need to do that once +$ /windows-64/python.exe -m pip install whatever +``` + +On Linux/macOS: +``` +$ /x11-64/bin/python3 -m ensurepip # Only need to do that once +$ /x11-64/bin/python3 -m pip install whatever +``` + +Note you must use `python -m pip` to invoke pip (using the command `pip` +directly will likely fail in a cryptic manner) + + +Not so well features +-------------------- + +Exporting the project hasn't been tested at all (however exporting for linux should be pretty simple and may work out of the box...). + + +Have fun ;-) + + - touilleMan diff --git a/misc/release_pythonscript.gdnlib b/misc/release_pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/misc/release_pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/misc/showcase.png b/misc/showcase.png new file mode 100644 index 0000000..ca40031 Binary files /dev/null and b/misc/showcase.png differ diff --git a/platforms/SConscript b/platforms/SConscript new file mode 100644 index 0000000..638bf9f --- /dev/null +++ b/platforms/SConscript @@ -0,0 +1,108 @@ +import os +import re +from uuid import uuid4 +from io import BytesIO +from zipfile import ZipFile +from urllib.request import urlopen, HTTPError +from SCons.Errors import UserError + + +Import("env") + + +def resolve_godot_download_url(major, minor, patch, extra, platform): + version = f"{major}.{minor}.{patch}" if patch != 0 else f"{major}.{minor}" + if extra == "stable": + return f"https://downloads.tuxfamily.org/godotengine/{version}/Godot_v{version}-{extra}_{platform}.zip" + else: + return f"https://downloads.tuxfamily.org/godotengine/{version}/{extra}/Godot_v{version}-{extra}_{platform}.zip" + + +def resolve_godot_binary_name(major, minor, patch, extra, platform): + version = f"{major}.{minor}.{patch}" if patch != 0 else f"{major}.{minor}" + return f"Godot_v{version}-{extra}_{platform}" + + +SConscript([f"{env['platform']}/SConscript"]) +# Platform-dependant variables +assert "bits" in env +assert "godot_binary_download_platform" in env +assert "cpython_build" in env +assert "cpython_build_dir" in env +assert "DIST_SITE_PACKAGES" in env + + +# Cython modules need to link against libpython.so +env.AppendUnique(CYTHON_COMPILE_DEPS=[env["cpython_build"]]) + + +### Install CPython build into dist ### + + +# Installing cpython build into dist cannot be simply done by a +# `env.InstallAs("$DIST_PLATFORM", cypthon_build)` rule given it would +# conflict with the rules that install libpythonscript&godot modules. +# To solve this we represent the installation of the build by a virtual target. +cpython_build_install_marker = env.File("cpython_build_installed_in_dist.marker") +env.VirtualTargetCommand( + marker=cpython_build_install_marker, + condition=lambda env: os.path.exists(env.Dir("$DIST_PLATFORM").abspath), + source=env["cpython_build"], # Note we don't use `cpython_build_dir` ! + action=[Delete("$DIST_PLATFORM"), Copy("$DIST_PLATFORM", env["cpython_build_dir"])], +) + + +# Replace default Install command to always depend on cpython build install +env.VanillaInstall = env.Install +env.VanillaInstallAs = env.InstallAs + + +def install(env, target, source): + out = env.VanillaInstall(target, source) + env.Depends(out, cpython_build_install_marker) + return out + + +def install_as(env, target, source): + out = env.VanillaInstallAs(target, source) + env.Depends(out, cpython_build_install_marker) + return out + + +env.AddMethod(install, "Install") +env.AddMethod(install_as, "InstallAs") + + +### Godot binary (to run tests) ### + + +if not env["godot_binary"]: + godot_download_url = resolve_godot_download_url( + *env["godot_binary_download_version"], env["godot_binary_download_platform"] + ) + godot_binary_name = resolve_godot_binary_name( + *env["godot_binary_download_version"], env["godot_binary_download_platform"] + ) + env["godot_binary"] = File(godot_binary_name) + godot_binary_zip_path = env.get("godot_binary_download_zip_path", godot_binary_name) + + def download_and_extract(target, source, env): + try: + with urlopen(godot_download_url) as rep: + zipfile = ZipFile(BytesIO(rep.read())) + except HTTPError as exc: + # It seems SCons swallows HTTPError, so we have to wrap it + raise UserError(exc) from exc + if godot_binary_zip_path not in zipfile.namelist(): + raise UserError(f"Archive doesn't contain {godot_binary_zip_path}") + with open(target[0].abspath, "wb") as fd: + fd.write(zipfile.open(godot_binary_zip_path).read()) + if env["HOST_OS"] != "win32": + os.chmod(target[0].abspath, 0o755) + + env.Command( + env["godot_binary"], + None, + Action(download_and_extract, f"Download&extract {godot_download_url}"), + ) + env.NoClean(env["godot_binary"]) diff --git a/platforms/osx-64/SConscript b/platforms/osx-64/SConscript new file mode 100644 index 0000000..2103a85 --- /dev/null +++ b/platforms/osx-64/SConscript @@ -0,0 +1,134 @@ +import zstandard +import tarfile +import json +import shutil +import subprocess +from pathlib import Path + + +Import("env") + + +cpython_build = Dir("cpython_build") + + +env["bits"] = "64" +env["godot_binary_download_platform"] = "osx.64" +env["godot_binary_download_zip_path"] = "Godot.app/Contents/MacOS/Godot" +env["cpython_build"] = cpython_build +env["cpython_build_dir"] = cpython_build +env["DIST_SITE_PACKAGES"] = Dir(f"{env['DIST_PLATFORM']}/lib/python3.8/site-packages") + + +### Build config for pythonscript ### + + +env.AppendUnique(CFLAGS=["-m64"]) +env.AppendUnique(LINKFLAGS=["-m64"]) +# Cannot use CPPPATH&LIBPATH here given headers are within `cpython_build` target, +# so Scons consider the headers are a missing target +env.AppendUnique(CFLAGS=[f"-I{cpython_build.abspath}/include/python3.8/"]) +env.AppendUnique(LINKFLAGS=[f"-L{cpython_build.abspath}/lib"]) + + +### Fetch Python prebuild ### + + +CPYTHON_PREBUILD_URL = "https://github.com/indygreg/python-build-standalone/releases/download/20200823/cpython-3.8.5-x86_64-apple-darwin-pgo-20200823T2228.tar.zst" +cpython_prebuild_archive = env.Download( + target=File(CPYTHON_PREBUILD_URL.rsplit("/", 1)[1]), url=CPYTHON_PREBUILD_URL +) +env.NoClean(cpython_prebuild_archive) + + +### Extract prebuild ### + + +def extract_cpython_prebuild(target, source, env): + archive_path = source[0].abspath + target_path = target[0].abspath + with open(archive_path, "rb") as fh: + dctx = zstandard.ZstdDecompressor() + with dctx.stream_reader(fh) as reader: + with tarfile.open(mode="r|", fileobj=reader) as tf: + tf.extractall(target_path) + + +cpython_prebuild_src = env.Command( + Dir("cpython_prebuild"), cpython_prebuild_archive, extract_cpython_prebuild +) +env.NoClean(cpython_prebuild_src) + + +### Generate custom build from the prebuild ### + + +def generate_cpython_build(target, source, env): + build = Path(target[0].abspath) + prebuild = Path(source[0].abspath) / "python" + + conf = json.loads((prebuild / "PYTHON.json").read_text()) + assert conf["version"] == "5" + assert conf["libpython_link_mode"] == "shared" + assert conf["target_triple"] == "x86_64-apple-darwin" + + shutil.copytree(str(prebuild / "install"), str(build), symlinks=True) + shutil.copytree(str(prebuild / "licenses"), str(build / "licenses"), symlinks=True) + + shutil.rmtree(str(build / "share")) + + # Remove static library stuff + config = conf["python_stdlib_platform_config"] + assert config.startswith("install/lib/") + config = build / config[len("install/") :] + assert config.exists() + shutil.rmtree(str(config)) + + # Patch binaries to load libpython3.x.dylib with a relative path + # Lib paths are hardcoded into the executable, and if the lib is not found at the path, then it craps out. + # Unfortunately compiling python will hardcode the absolute path of libpython.dylib into the executable, + # so if you move it around it will break. + # the solution here is to modify the executable and make sure the lib path is not an absolute path, + # but an path relative to @loader_path, which is a special symbol that points to the executable. + # See: http://joaoventura.net/blog/2016/embeddable-python-osx-from-src/ + # and https://stackoverflow.com/questions/7880454/python-executable-not-finding-libpython-shared-library + prebuild_shared_lib_path = conf["build_info"]["core"]["shared_lib"] + path, _ = prebuild_shared_lib_path.rsplit("/", 1) + assert path == "install/lib" # Make sure libpython.so is on lib folder + binary = build / "bin/python3.8" + assert binary.is_file() + dylib = build / "lib/libpython3.8.dylib" + cmd = f"install_name_tool -id @rpath/{dylib.name} {dylib}" + subprocess.run(cmd.split(), check=True) + + stdlib_path = build / "lib/python3.8" + + # Remove tests lib (pretty big and basically useless) + shutil.rmtree(str(stdlib_path / "test")) + + # Also remove __pycache__ & .pyc stuff + for pycache in stdlib_path.glob("**/__pycache__"): + shutil.rmtree(str(pycache)) + + # Make sure site-packages is empty to avoid including pip (ensurepip should be used instead) + shutil.rmtree(str(stdlib_path / "site-packages")) + + # Zip the stdlib to save plenty of space \o/ + if env["compressed_stdlib"]: + tmp_stdlib_path = build / "lib/tmp_python3.8" + shutil.move(str(stdlib_path), str(tmp_stdlib_path)) + stdlib_path.mkdir() + shutil.move(str(tmp_stdlib_path / "lib-dynload"), str(stdlib_path / "lib-dynload")) + shutil.make_archive( + base_name=build / "lib/python38", format="zip", root_dir=str(tmp_stdlib_path) + ) + shutil.rmtree(str(tmp_stdlib_path)) + # Oddly enough, os.py must be present (even if empty !) otherwise + # Python failed to find it home... + (stdlib_path / "os.py").touch() + + (stdlib_path / "site-packages").mkdir() + + +env.Command(cpython_build, cpython_prebuild_src, generate_cpython_build) +env.NoClean(cpython_build) diff --git a/platforms/windows-32/SConscript b/platforms/windows-32/SConscript new file mode 100644 index 0000000..5ebc743 --- /dev/null +++ b/platforms/windows-32/SConscript @@ -0,0 +1,104 @@ +import zstandard +import tarfile +import json +import shutil +import subprocess +from pathlib import Path + + +Import("env") + + +cpython_build = Dir("cpython_build") + + +env["bits"] = "32" +env["godot_binary_download_platform"] = "win32.exe" +env["cpython_build"] = cpython_build +env["cpython_build_dir"] = cpython_build +env["DIST_SITE_PACKAGES"] = Dir(f"{env['DIST_PLATFORM']}/Lib/site-packages") + + +### Build config for pythonscript ### + +# Cannot use CPPPATH&LIBPATH here given headers are within `cpython_build` target, +# so Scons consider the headers are a missing target +env.AppendUnique(CFLAGS=[f"-I{cpython_build.abspath}/include"]) +env.AppendUnique(LINKFLAGS=[f"/LIBPATH:{cpython_build.abspath}/libs"]) +env.AppendUnique(CYTHON_COMPILE_DEPS=[cpython_build]) + + +### Fetch Python prebuild ### + + +CPYTHON_PREBUILD_URL = "https://github.com/indygreg/python-build-standalone/releases/download/20200830/cpython-3.8.5-i686-pc-windows-msvc-shared-pgo-20200830T2311.tar.zst" +cpython_prebuild_archive = env.Download( + target=File(CPYTHON_PREBUILD_URL.rsplit("/", 1)[1]), url=CPYTHON_PREBUILD_URL +) +env.NoClean(cpython_prebuild_archive) + + +### Extract prebuild ### + + +def extract_cpython_prebuild(target, source, env): + archive_path = source[0].abspath + target_path = target[0].abspath + with open(archive_path, "rb") as fh: + dctx = zstandard.ZstdDecompressor() + with dctx.stream_reader(fh) as reader: + with tarfile.open(mode="r|", fileobj=reader) as tf: + tf.extractall(target_path) + + +cpython_prebuild_src = env.Command( + Dir("cpython_prebuild"), cpython_prebuild_archive, extract_cpython_prebuild +) +env.NoClean(cpython_prebuild_src) + + +### Generate custom build from the prebuild ### + + +def generate_cpython_build(target, source, env): + build = Path(target[0].abspath) + prebuild = Path(source[0].abspath) / "python" + + conf = json.loads((prebuild / "PYTHON.json").read_text()) + assert conf["version"] == "5" + assert conf["libpython_link_mode"] == "shared" + assert conf["target_triple"] == "i686-pc-windows-msvc" + + shutil.copytree(str(prebuild / "install"), str(build), symlinks=True) + shutil.copytree(str(prebuild / "licenses"), str(build / "licenses"), symlinks=True) + + stdlib_path = build / "Lib" + + # Remove tests lib (pretty big and basically useless) + shutil.rmtree(str(stdlib_path / "test")) + + # Remove .pdb debug symbols + for pdbfile in (build / "DLLs").glob("*.pdb"): + pdbfile.unlink() + + # Also remove __pycache__ & .pyc stuff + for pycache in stdlib_path.glob("**/__pycache__"): + shutil.rmtree(str(pycache)) + + # Make sure site-packages is empty to avoid including pip (ensurepip should be used instead) + shutil.rmtree(str(stdlib_path / "site-packages")) + + # Zip the stdlib to save plenty of space \o/ + if env["compressed_stdlib"]: + shutil.make_archive(base_name=build / "python38", format="zip", root_dir=str(stdlib_path)) + shutil.rmtree(str(stdlib_path)) + stdlib_path.mkdir() + # Oddly enough, os.py must be present (even if empty !) otherwise + # Python failed to find it home... + (stdlib_path / "os.py").touch() + + (stdlib_path / "site-packages").mkdir() + + +env.Command(cpython_build, cpython_prebuild_src, generate_cpython_build) +env.NoClean(cpython_build) diff --git a/platforms/windows-64/SConscript b/platforms/windows-64/SConscript new file mode 100644 index 0000000..db11486 --- /dev/null +++ b/platforms/windows-64/SConscript @@ -0,0 +1,104 @@ +import zstandard +import tarfile +import json +import shutil +import subprocess +from pathlib import Path + + +Import("env") + + +cpython_build = Dir("cpython_build") + + +env["bits"] = "64" +env["godot_binary_download_platform"] = "win64.exe" +env["cpython_build"] = cpython_build +env["cpython_build_dir"] = cpython_build +env["DIST_SITE_PACKAGES"] = Dir(f"{env['DIST_PLATFORM']}/Lib/site-packages") + + +### Build config for pythonscript ### + +# Cannot use CPPPATH&LIBPATH here given headers are within `cpython_build` target, +# so Scons consider the headers are a missing target +env.AppendUnique(CFLAGS=[f"-I{cpython_build.abspath}/include"]) +env.AppendUnique(LINKFLAGS=[f"/LIBPATH:{cpython_build.abspath}/libs"]) +env.AppendUnique(CYTHON_COMPILE_DEPS=[cpython_build]) + + +### Fetch Python prebuild ### + + +CPYTHON_PREBUILD_URL = "https://github.com/indygreg/python-build-standalone/releases/download/20200830/cpython-3.8.5-x86_64-pc-windows-msvc-shared-pgo-20200830T2254.tar.zst" +cpython_prebuild_archive = env.Download( + target=File(CPYTHON_PREBUILD_URL.rsplit("/", 1)[1]), url=CPYTHON_PREBUILD_URL +) +env.NoClean(cpython_prebuild_archive) + + +### Extract prebuild ### + + +def extract_cpython_prebuild(target, source, env): + archive_path = source[0].abspath + target_path = target[0].abspath + with open(archive_path, "rb") as fh: + dctx = zstandard.ZstdDecompressor() + with dctx.stream_reader(fh) as reader: + with tarfile.open(mode="r|", fileobj=reader) as tf: + tf.extractall(target_path) + + +cpython_prebuild_src = env.Command( + Dir("cpython_prebuild"), cpython_prebuild_archive, extract_cpython_prebuild +) +env.NoClean(cpython_prebuild_src) + + +### Generate custom build from the prebuild ### + + +def generate_cpython_build(target, source, env): + build = Path(target[0].abspath) + prebuild = Path(source[0].abspath) / "python" + + conf = json.loads((prebuild / "PYTHON.json").read_text()) + assert conf["version"] == "5" + assert conf["libpython_link_mode"] == "shared" + assert conf["target_triple"] == "x86_64-pc-windows-msvc" + + shutil.copytree(str(prebuild / "install"), str(build), symlinks=True) + shutil.copytree(str(prebuild / "licenses"), str(build / "licenses"), symlinks=True) + + stdlib_path = build / "Lib" + + # Remove tests lib (pretty big and basically useless) + shutil.rmtree(str(stdlib_path / "test")) + + # Remove .pdb debug symbols + for pdbfile in (build / "DLLs").glob("*.pdb"): + pdbfile.unlink() + + # Also remove __pycache__ & .pyc stuff + for pycache in stdlib_path.glob("**/__pycache__"): + shutil.rmtree(str(pycache)) + + # Make sure site-packages is empty to avoid including pip (ensurepip should be used instead) + shutil.rmtree(str(stdlib_path / "site-packages")) + + # Zip the stdlib to save plenty of space \o/ + if env["compressed_stdlib"]: + shutil.make_archive(base_name=build / "python38", format="zip", root_dir=str(stdlib_path)) + shutil.rmtree(str(stdlib_path)) + stdlib_path.mkdir() + # Oddly enough, os.py must be present (even if empty !) otherwise + # Python failed to find it home... + (stdlib_path / "os.py").touch() + + (stdlib_path / "site-packages").mkdir() + + +env.Command(cpython_build, cpython_prebuild_src, generate_cpython_build) +env.NoClean(cpython_build) diff --git a/platforms/x11-32/SConscript b/platforms/x11-32/SConscript new file mode 100644 index 0000000..10b83cc --- /dev/null +++ b/platforms/x11-32/SConscript @@ -0,0 +1,2 @@ +# Python-standalone-build doesn't support linux 32bits platform +assert False, "platform x11-32 is not supported yet :'-(" diff --git a/platforms/x11-64/SConscript b/platforms/x11-64/SConscript new file mode 100644 index 0000000..31323c9 --- /dev/null +++ b/platforms/x11-64/SConscript @@ -0,0 +1,120 @@ +import zstandard +import tarfile +import json +import shutil +import subprocess +from pathlib import Path + + +Import("env") + + +cpython_build = Dir("cpython_build") + + +env["bits"] = "64" +if env["headless"]: + env["godot_binary_download_platform"] = "linux_headless.64" +else: + env["godot_binary_download_platform"] = "x11.64" +env["cpython_build"] = cpython_build +env["cpython_build_dir"] = cpython_build +env["DIST_SITE_PACKAGES"] = Dir(f"{env['DIST_PLATFORM']}/lib/python3.8/site-packages") + + +### Build config for pythonscript ### + + +env.AppendUnique(CFLAGS=["-m64"]) +env.AppendUnique(LINKFLAGS=["-m64"]) +# Cannot use CPPPATH&LIBPATH here given headers are within `cpython_build` target, +# so Scons consider the headers are a missing target +env.AppendUnique(CFLAGS=[f"-I{cpython_build.abspath}/include/python3.8/"]) +env.AppendUnique(LINKFLAGS=[f"-L{cpython_build.abspath}/lib"]) +env.AppendUnique(CYTHON_COMPILE_DEPS=[cpython_build]) + + +### Fetch Python prebuild ### + + +CPYTHON_PREBUILD_URL = "https://github.com/indygreg/python-build-standalone/releases/download/20200822/cpython-3.8.5-x86_64-unknown-linux-gnu-pgo-20200823T0036.tar.zst" +cpython_prebuild_archive = env.Download( + target=File(CPYTHON_PREBUILD_URL.rsplit("/", 1)[1]), url=CPYTHON_PREBUILD_URL +) +env.NoClean(cpython_prebuild_archive) + + +### Extract prebuild ### + + +def extract_cpython_prebuild(target, source, env): + archive_path = source[0].abspath + target_path = target[0].abspath + with open(archive_path, "rb") as fh: + dctx = zstandard.ZstdDecompressor() + with dctx.stream_reader(fh) as reader: + with tarfile.open(mode="r|", fileobj=reader) as tf: + tf.extractall(target_path) + + +cpython_prebuild_src = env.Command( + Dir("cpython_prebuild"), cpython_prebuild_archive, extract_cpython_prebuild +) +env.NoClean(cpython_prebuild_src) + + +### Generate custom build from the prebuild ### + + +def generate_cpython_build(target, source, env): + build = Path(target[0].abspath) + prebuild = Path(source[0].abspath) / "python" + + conf = json.loads((prebuild / "PYTHON.json").read_text()) + assert conf["version"] == "5" + assert conf["libpython_link_mode"] == "shared" + assert conf["target_triple"] == "x86_64-unknown-linux-gnu" + + shutil.copytree(str(prebuild / "install"), str(build), symlinks=True) + shutil.copytree(str(prebuild / "licenses"), str(build / "licenses"), symlinks=True) + + shutil.rmtree(str(build / "share")) + + # Remove static library stuff + config = conf["python_stdlib_platform_config"] + assert config.startswith("install/lib/") + config = build / config[len("install/") :] + assert config.exists() + shutil.rmtree(str(config)) + + stdlib_path = build / "lib/python3.8" + + # Remove tests lib (pretty big and basically useless) + shutil.rmtree(str(stdlib_path / "test")) + + # Also remove __pycache__ & .pyc stuff + for pycache in stdlib_path.glob("**/__pycache__"): + shutil.rmtree(str(pycache)) + + # Make sure site-packages is empty to avoid including pip (ensurepip should be used instead) + shutil.rmtree(str(stdlib_path / "site-packages")) + + # Zip the stdlib to save plenty of space \o/ + if env["compressed_stdlib"]: + tmp_stdlib_path = build / "lib/tmp_python3.8" + shutil.move(str(stdlib_path), str(tmp_stdlib_path)) + stdlib_path.mkdir() + shutil.move(str(tmp_stdlib_path / "lib-dynload"), str(stdlib_path / "lib-dynload")) + shutil.make_archive( + base_name=build / "lib/python38", format="zip", root_dir=str(tmp_stdlib_path) + ) + shutil.rmtree(str(tmp_stdlib_path)) + # Oddly enough, os.py must be present (even if empty !) otherwise + # Python failed to find it home... + (stdlib_path / "os.py").touch() + + (stdlib_path / "site-packages").mkdir() + + +env.Command(cpython_build, cpython_prebuild_src, generate_cpython_build) +env.NoClean(cpython_build) diff --git a/pythonscript/SConscript b/pythonscript/SConscript new file mode 100644 index 0000000..00783e2 --- /dev/null +++ b/pythonscript/SConscript @@ -0,0 +1,48 @@ +Import("env") + +c_env = env.Clone() +if env["platform"].startswith("windows"): + c_env.AppendUnique(LIBS=["python38"]) + +elif env["platform"].startswith("osx"): + c_env.AppendUnique(LIBS=["python3.8"]) + # if we don't give the lib a proper install_name, macos won't be able to find it, + # and will link the cython modules with a relative path + c_env.AppendUnique( + LINKFLAGS=["-Wl,-rpath,'@loader_path/lib'", "-install_name", "@rpath/libpythonscript.dylib"] + ) + c_env.AppendUnique(CFLAGS=["-Werror-implicit-function-declaration"]) + +else: # x11 + c_env.AppendUnique(LIBS=["python3.8"]) + c_env.AppendUnique(LINKFLAGS=["-Wl,-rpath,'$$ORIGIN/lib'"]) + c_env.AppendUnique(CFLAGS=["-Werror-implicit-function-declaration"]) +c_env.Depends("pythonscript.c", env["cpython_build"]) + + +libpythonscript, *libpythonscript_extra = c_env.SharedLibrary("pythonscript", ["pythonscript.c"]) +env.Install("$DIST_PLATFORM", [libpythonscript, *libpythonscript_extra]) + + +# Cython modules depend on libpythonscript +env.AppendUnique(LIBPATH=[Dir(".")]) +env.AppendUnique(CYTHON_COMPILE_DEPS=[libpythonscript]) + + +SConscript(["godot/SConscript"]) + + +# `_godot_api.h` is only for internal use between _godot and pythonscript +# libraries, hence no need to provide it as part of the release +*mods, _ = env.CythonModule( + ["_godot", "_godot_api.h"], + [ + "_godot.pyx", + "_godot_editor.pxi", + "_godot_instance.pxi", + "_godot_profiling.pxi", + "_godot_script.pxi", + "_godot_io.pxi", + ], +) +env.Install("$DIST_SITE_PACKAGES", mods) diff --git a/pythonscript/_godot.pyx b/pythonscript/_godot.pyx new file mode 100644 index 0000000..43f1589 --- /dev/null +++ b/pythonscript/_godot.pyx @@ -0,0 +1,74 @@ +# `_godot` module contains all the callbacks needed by Godot's Pluginscript +# system to expose Python as a language to Godot (see pythonscript.c for +# more on this). +# Hence there is no point of importing this module from Python given it +# only expose C functions. +# Beside this module depend on the `godot.hazmat` module so it would be a bad +# idea to make the `godot` module depend on it... +include "_godot_editor.pxi" +include "_godot_profiling.pxi" +include "_godot_script.pxi" +include "_godot_instance.pxi" +include "_godot_io.pxi" + +from godot._hazmat.gdnative_api_struct cimport ( + godot_gdnative_init_options, + godot_pluginscript_language_data, +) +from godot._hazmat.internal cimport set_pythonscript_verbose, get_pythonscript_verbose +from godot.builtins cimport GDString + +import sys + +# OS and ProjectSettings are singletons exposed as global python objects, +# hence there are not available from a cimport +from godot.bindings import OS, ProjectSettings +from godot._version import __version__ as pythonscript_version + + +def _setup_config_entry(name, default_value): + gdname = GDString(name) + if not ProjectSettings.has_setting(gdname): + ProjectSettings.set_setting(gdname, default_value) + ProjectSettings.set_initial_value(gdname, default_value) + # TODO: `set_builtin_order` is not exposed by gdnative... but is it useful ? + return ProjectSettings.get_setting(gdname) + + +cdef api godot_pluginscript_language_data *pythonscript_init() with gil: + # Pass argv arguments + sys.argv = ["godot"] + [str(x) for x in OS.get_cmdline_args()] + + # Update PYTHONPATH according to configuration + pythonpath = str(_setup_config_entry("python_script/path", "res://;res://lib")) + for p in pythonpath.split(";"): + p = ProjectSettings.globalize_path(GDString(p)) + sys.path.insert(0, str(p)) + + # Redirect stdout/stderr to have it in the Godot editor console + if _setup_config_entry("python_script/io_streams_capture", True): + # Note we don't have to remove the stream capture in `pythonscript_finish` given + # Godot print API is available until after the Python interpreter is teardown + install_io_streams_capture() + + # Enable verbose output from pythonscript framework + if _setup_config_entry("python_script/verbose", False): + set_pythonscript_verbose(True) + + # Finally proudly print banner ;-) + if _setup_config_entry("python_script/print_startup_info", True): + cooked_sys_version = '.'.join(map(str, sys.version_info)) + print(f"Pythonscript {pythonscript_version} (CPython {cooked_sys_version})") + + if get_pythonscript_verbose(): + print(f"PYTHONPATH: {sys.path}") + + return NULL + + +cdef api void pythonscript_finish(godot_pluginscript_language_data *data) with gil: + # /!\ When this function is called, the Python interpreter is fully operational + # and might be running user-created threads doing concurrent stuff. + # That will continue until `godot_gdnative_terminate` is called (which is + # responsible for the actual teardown of the interpreter). + pass diff --git a/pythonscript/_godot_editor.pxi b/pythonscript/_godot_editor.pxi new file mode 100644 index 0000000..8ae115d --- /dev/null +++ b/pythonscript/_godot_editor.pxi @@ -0,0 +1,256 @@ +# cython: c_string_type=unicode, c_string_encoding=utf8 + +from libc.stddef cimport wchar_t + +from godot._hazmat.gdnative_api_struct cimport ( + godot_pluginscript_language_data, + godot_string, + godot_bool, + godot_array, + godot_pool_string_array, + godot_object, + godot_variant, + godot_error, + godot_dictionary +) +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot._hazmat.conversion cimport ( + godot_string_to_pyobj, + pyobj_to_godot_string, + godot_variant_to_pyobj, +) + + +cdef api godot_string pythonscript_get_template_source_code( + godot_pluginscript_language_data *p_data, + const godot_string *p_class_name, + const godot_string *p_base_class_name +) with gil: + cdef str class_name + if p_class_name == NULL: + class_name = "MyExportedCls" + else: + class_name = godot_string_to_pyobj(p_class_name) + cdef str base_class_name = godot_string_to_pyobj(p_base_class_name) + cdef str src = f"""from godot import exposed, export +from godot import * + + +@exposed +class {class_name}({base_class_name}): + + # member variables here, example: + a = export(int) + b = export(str, default='foo') + + def _ready(self): + \"\"\" + Called every time the node is added to the scene. + Initialization here. + \"\"\" + pass +""" + cdef godot_string ret + pyobj_to_godot_string(src, &ret) + return ret + + +cdef api godot_bool pythonscript_validate( + godot_pluginscript_language_data *p_data, + const godot_string *p_script, + int *r_line_error, + int *r_col_error, + godot_string *r_test_error, + const godot_string *p_path, + godot_pool_string_array *r_functions +) with gil: + return True + + +cdef api int pythonscript_find_function( + godot_pluginscript_language_data *p_data, + const godot_string *p_function, + const godot_string *p_code +) with gil: + return 0 + + +cdef api godot_string pythonscript_make_function( + godot_pluginscript_language_data *p_data, + const godot_string *p_class, + const godot_string *p_name, + const godot_pool_string_array *p_args +) with gil: + cdef str name = godot_string_to_pyobj(p_name) + + # TODO: replace this with PoolStringArray binding once implemented + cdef int i + cdef godot_string gdarg + cdef list args_names = [] + for i in range(gdapi10.godot_pool_string_array_size(p_args)): + gdarg = gdapi10.godot_pool_string_array_get(p_args, i) + arg = godot_string_to_pyobj(&gdarg) + gdapi10.godot_string_destroy(&gdarg) + args_names.append(arg.split(":", 1)[0]) + + cdef str src = """\ + def {name}(self, { ','.join(args_names) }): + pass +""" + cdef godot_string ret + pyobj_to_godot_string(src, &ret) + return ret + + +cdef api godot_error pythonscript_complete_code( + godot_pluginscript_language_data *p_data, + const godot_string *p_code, + const godot_string *p_base_path, + godot_object *p_owner, + godot_array *r_options, + godot_bool *r_force, + godot_string *r_call_hint +) with gil: + return godot_error.GODOT_OK + + +cdef api void pythonscript_auto_indent_code( + godot_pluginscript_language_data *p_data, + godot_string *p_code, + int p_from_line, + int p_to_line +) with gil: + # TODO: use black for this job +# try: +# import autopep8 +# except ImportError: +# print( +# "[Pythonscript] Auto indent requires module `autopep8`, " +# "install it with `pip install autopep8`" +# ) +# pycode = godot_string_to_pyobj(code).splitlines() +# before = "\n".join(pycode[:from_line]) +# to_fix = "\n".join(pycode[from_line:to_line]) +# after = "\n".join(pycode[to_line:]) +# fixed = autopep8.fix_code(to_fix) +# final_code = "\n".join((before, fixed, after)) +# # TODO: modify code instead of replace it when binding on godot_string +# # operation is available +# lib.godot_string_destroy(code) +# lib.godot_string_new_unicode_data(code, final_code, len(final_code)) + pass + + +__global_constants = {} + + +cdef api void pythonscript_add_global_constant( + godot_pluginscript_language_data *p_data, + const godot_string *p_variable, + const godot_variant *p_value +) with gil: + # However, Godot add global constants very early (first as an empty variant + # placeholder before any script is loaded, then as a proper loaded script). + # So it's possible this function get called before `pythonscript_script_init` + # (which is supposed to do the lazy `_initialize_bindings`). + _initialize_bindings() + name = godot_string_to_pyobj(p_variable) + value = godot_variant_to_pyobj(p_value) + __global_constants[name] = value + + +cdef api godot_string pythonscript_debug_get_error( + godot_pluginscript_language_data *p_data +) with gil: + cdef godot_string ret + pyobj_to_godot_string("Nothing", &ret) + return ret + + +cdef api int pythonscript_debug_get_stack_level_count( + godot_pluginscript_language_data *p_data +) with gil: + return 1 + + +cdef api int pythonscript_debug_get_stack_level_line( + godot_pluginscript_language_data *p_data, + int p_level +) with gil: + return 1 + + +cdef api godot_string pythonscript_debug_get_stack_level_function( + godot_pluginscript_language_data *p_data, + int p_level +) with gil: + cdef godot_string ret + pyobj_to_godot_string("Nothing", &ret) + return ret + + +cdef api godot_string pythonscript_debug_get_stack_level_source( + godot_pluginscript_language_data *p_data, + int p_level +) with gil: + cdef godot_string ret + pyobj_to_godot_string("Nothing", &ret) + return ret + + +cdef api void pythonscript_debug_get_stack_level_locals( + godot_pluginscript_language_data *p_data, + int p_level, + godot_pool_string_array *p_locals, + godot_array *p_values, + int p_max_subitems, + int p_max_depth +) with gil: + pass + + +cdef api void pythonscript_debug_get_stack_level_members( + godot_pluginscript_language_data *p_data, + int p_level, + godot_pool_string_array *p_members, + godot_array *p_values, + int p_max_subitems, + int p_max_depth +) with gil: + pass + + +cdef api void pythonscript_debug_get_globals( + godot_pluginscript_language_data *p_data, + godot_pool_string_array *p_locals, + godot_array *p_values, + int p_max_subitems, + int p_max_depth +) with gil: + pass + + +cdef api godot_string pythonscript_debug_parse_stack_level_expression( + godot_pluginscript_language_data *p_data, + int p_level, + const godot_string *p_expression, + int p_max_subitems, + int p_max_depth +) with gil: + cdef godot_string ret + pyobj_to_godot_string("Nothing", &ret) + return ret + + +cdef api void pythonscript_get_public_functions( + godot_pluginscript_language_data *p_data, + godot_array *r_functions +) with gil: + pass + + +cdef api void pythonscript_get_public_constants( + godot_pluginscript_language_data *p_data, + godot_dictionary *r_constants +) with gil: + pass diff --git a/pythonscript/_godot_instance.pxi b/pythonscript/_godot_instance.pxi new file mode 100644 index 0000000..852c04d --- /dev/null +++ b/pythonscript/_godot_instance.pxi @@ -0,0 +1,181 @@ +# cython: c_string_type=unicode, c_string_encoding=utf8 + +from libc.stddef cimport wchar_t +from cpython cimport Py_INCREF, Py_DECREF, PyObject + +from godot._hazmat.gdnative_api_struct cimport ( + godot_string, + godot_string_name, + godot_bool, + godot_array, + godot_pool_string_array, + godot_object, + godot_variant, + godot_variant_call_error, + godot_method_rpc_mode, + godot_pluginscript_script_data, + godot_pluginscript_instance_data, + godot_variant_call_error_error, + godot_variant_type, +) +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot._hazmat.conversion cimport ( + godot_variant_to_pyobj, + pyobj_to_godot_variant, + godot_string_name_to_pyobj, +) + + +cdef api godot_pluginscript_instance_data* pythonscript_instance_init( + godot_pluginscript_script_data *p_data, + godot_object *p_owner +) with gil: + cdef object instance = (p_data)() + (instance)._gd_ptr = p_owner + Py_INCREF(instance) + return instance + + +cdef api void pythonscript_instance_finish( + godot_pluginscript_instance_data *p_data +) with gil: + Py_DECREF(p_data) + + +cdef api godot_bool pythonscript_instance_set_prop( + godot_pluginscript_instance_data *p_data, + const godot_string *p_name, + const godot_variant *p_value +) with gil: + cdef object instance = p_data + cdef str key = godot_string_to_pyobj(p_name) + + # Should look among properties added by the script and it parents, + # not Godot native properties that are handled by the caller + try: + field = instance.__exported[key] + except KeyError: + return False + if not isinstance(field, ExportedField): + return False + + try: + setattr(instance, key, godot_variant_to_pyobj(p_value)) + return True + except Exception: + traceback.print_exc() + return False + + +cdef api godot_bool pythonscript_instance_get_prop( + godot_pluginscript_instance_data *p_data, + const godot_string *p_name, + godot_variant *r_ret +) with gil: + cdef object instance = p_data + cdef object ret + cdef object field + cdef str key = godot_string_to_pyobj(p_name) + + # Should look among properties added by the script and it parents, + # not Godot native properties that are handled by the caller + try: + field = instance.__exported[key] + except KeyError: + return False + + try: + if isinstance(field, ExportedField): + ret = getattr(instance, godot_string_to_pyobj(p_name)) + pyobj_to_godot_variant(ret, r_ret) + elif isinstance(field, SignalField): + # TODO: Not sure how to create a Variant::Signal from GDNative + return False + else: + # TODO: Not sure how to create a Variant::Callable from GDNative + return False + return True + + except Exception: + traceback.print_exc() + return False + + +cdef api godot_variant pythonscript_instance_call_method( + godot_pluginscript_instance_data *p_data, + const godot_string_name *p_method, + const godot_variant **p_args, + int p_argcount, + godot_variant_call_error *r_error +) with gil: + cdef godot_variant var_ret + cdef object instance = p_data + cdef object fn + cdef str key = godot_string_name_to_pyobj(p_method) + + # TODO: optimize this by caching godot_string_name -> method lookup + fn = instance.__exported.get(key) + if not callable(fn): + r_error.error = godot_variant_call_error_error.GODOT_CALL_ERROR_CALL_ERROR_INVALID_METHOD + gdapi10.godot_variant_new_nil(&var_ret) + return var_ret + + cdef int i + cdef list pyargs + cdef object ret + try: + pyargs = [godot_variant_to_pyobj(p_args[i]) for i in range(p_argcount)] + ret = fn(instance, *pyargs) + r_error.error = godot_variant_call_error_error.GODOT_CALL_ERROR_CALL_OK + pyobj_to_godot_variant(ret, &var_ret) + return var_ret + + except NotImplementedError: + r_error.error = godot_variant_call_error_error.GODOT_CALL_ERROR_CALL_ERROR_INVALID_METHOD + + except TypeError: + traceback.print_exc() + # TODO: handle errors here + r_error.error = godot_variant_call_error_error.GODOT_CALL_ERROR_CALL_ERROR_INVALID_ARGUMENT + r_error.argument = 1 + r_error.expected = godot_variant_type.GODOT_VARIANT_TYPE_NIL + except Exception: + traceback.print_exc() + r_error.error = godot_variant_call_error_error.GODOT_CALL_ERROR_CALL_ERROR_INVALID_METHOD + + # TODO: also catch other exceptions types ? + + # Something bad occured, return a default None variant + gdapi10.godot_variant_new_nil(&var_ret) + return var_ret + + +cdef api void pythonscript_instance_notification( + godot_pluginscript_instance_data *p_data, + int p_notification +) with gil: + cdef object instance = p_data + # Godot's notification should call all parent `_notification` + # methods (better not use `super()._notification` in those methods...) + # TODO: cache the methods to call ? + for parentcls in instance.__class__.__mro__: + try: + fn = parentcls.__exported["_notification"] + except (AttributeError, KeyError): + pass + else: + fn(instance, p_notification) + + +# Useful ? + +# cdef api void pythonscript_instance_refcount_incremented( +# godot_pluginscript_instance_data *p_data +# ) with gil: +# pass + + +# cdef api bool pythonscript_instance_refcount_decremented( +# godot_pluginscript_instance_data *p_data +# ) with gil: +# pass diff --git a/pythonscript/_godot_io.pxi b/pythonscript/_godot_io.pxi new file mode 100644 index 0000000..70befc6 --- /dev/null +++ b/pythonscript/_godot_io.pxi @@ -0,0 +1,129 @@ +import sys +import builtins +import traceback +from io import TextIOBase +from threading import Lock + +from godot._hazmat.conversion cimport ( + godot_string_to_pyobj, + pyobj_to_godot_string, + godot_variant_to_pyobj, +) +from godot._hazmat.gdnative_api_struct cimport ( + godot_string, + godot_string_name, + godot_bool, + godot_array, + godot_pool_string_array, + godot_object, + godot_variant, + godot_variant_call_error, + godot_method_rpc_mode, + godot_pluginscript_script_data, + godot_pluginscript_instance_data, + godot_variant_call_error_error, + godot_variant_type +) + + +cpdef inline void godot_print(str pystr): + cdef godot_string gdstr + pyobj_to_godot_string(pystr, &gdstr) + with nogil: + gdapi10.godot_print(&gdstr) + gdapi10.godot_string_destroy(&gdstr) + + +class StdinCapture(TextIOBase): + def __init__(self): + self._enabled = False + self._old_stdin = None + + def install(self): + if self._enabled: + raise RuntimeError("Already enabled !") + + self._old_stdin = sys.stdin + sys.stdin = self + self._enabled = True + + def remove(self): + if not self._enabled: + raise RuntimeError("Not enabled !") + sys.stdin = self._old_stdin + self._enabled = False + + +class StdoutStderrCapture(TextIOBase): + def __init__(self): + self._enabled = False + self._old_stdout = None + self._old_stderr = None + + def install(self): + if self._enabled: + raise RuntimeError("Already enabled !") + + self._old_stderr = sys.stderr + sys.stderr = self + self._old_stdout = sys.stdout + sys.stdout = self + self._enabled = True + + # Don't forget to flush the original streams if any (for instance Windows + # GUI app without console have sys.__stdout__/__stderr__ set to None) + if self._old_stdout is not None: + self._old_stdout.flush() + if self._old_stdout is not None: + self._old_stdout.flush() + + def remove(self): + if not self._enabled: + raise RuntimeError("Not enabled !") + # # Sanity check, we shouldn't be mixing + # if sys.stderr is not self._stderr or sys.stdout is not self._stdout: + # raise RuntimeError("sys.stderr/stdout has been patched in our back !") + sys.stderr = self._old_stderr + sys.stdout = self._old_stdout + self._enabled = False + + +class StdoutStderrCaptureToGodot(StdoutStderrCapture): + + def __init__(self): + self.buffer = "" + self.callbacks = {} + self._enabled = False + self._old_stdout = None + self._old_stderr = None + self._lock = Lock() + + def write(self, b): + with self._lock: + self.buffer += b + if "\n" in self.buffer: + to_print, self.buffer = self.buffer.rsplit("\n", 1) + self._write(to_print) + + def flush(self): + with self._lock: + if self.buffer: + self._write(self.buffer) + self.buffer = "" + + def _write(self, buff): + cdef godot_string gdstr + pyobj_to_godot_string(buff, &gdstr) + with nogil: + gdapi10.godot_print(&gdstr) + gdapi10.godot_string_destroy(&gdstr) + + +cdef _capture_io_streams = None + + +cdef install_io_streams_capture(): + global _capture_io_streams + assert _capture_io_streams is None + _capture_io_streams = StdoutStderrCaptureToGodot() + _capture_io_streams.install() diff --git a/pythonscript/_godot_profiling.pxi b/pythonscript/_godot_profiling.pxi new file mode 100644 index 0000000..04c0acf --- /dev/null +++ b/pythonscript/_godot_profiling.pxi @@ -0,0 +1,196 @@ +# cython: c_string_type=unicode, c_string_encoding=utf8 + +from godot._hazmat.gdnative_api_struct cimport ( + godot_pluginscript_language_data, + godot_pluginscript_profiling_data, +) +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 + +import sys +from collections import defaultdict +from time import perf_counter + + +# TODO: should be greatly improved by using cdef struct and godot_string_name everywhere + + +class MethProfile: + __slots__ = ( + "call_count", + "self_time", + "total_time", + "cur_frame_call_count", + "cur_frame_self_time", + "cur_frame_total_time", + "last_frame_call_count", + "last_frame_self_time", + "last_frame_total_time", + ) + + def __init__(self): + self.call_count = 0 + self.self_time = 0 + self.total_time = 0 + self.cur_frame_call_count = 0 + self.cur_frame_self_time = 0 + self.cur_frame_total_time = 0 + self.last_frame_call_count = 0 + self.last_frame_self_time = 0 + self.last_frame_total_time = 0 + + +class FuncCallProfile: + __slots__ = ("signature", "start", "end", "out_of_func_time") + + def __init__(self, signature): + self.signature = signature + self.start = perf_counter() + self.end = None + # Time spend calling another function + self.out_of_func_time = 0 + + def add_out_of_func(self, time): + self.out_of_func_time += time + + def get_self_time(self): + return self.get_total_time() - self.out_of_func_time + + def done(self): + self.end = perf_counter() + + def get_total_time(self): + return self.end - self.start + + +class Profiler: + def __init__(self): + self.per_meth_profiling = defaultdict(MethProfile) + self._profile_stack = [] + + @property + def _per_thread_profile_stack(self): + return self._profile_stack + + # TODO: Make this thread safe + # Not sure if multithreading is supported by sys.setprofile anyway... + # loc = threading.local() + # key = 'profile_stack_%s' % id(self) + # stack = getattr(loc, key, None) + # if not stack: + # stack = [] + # setattr(loc, key, stack) + # return stack + + def next_frame(self): + for meth_profile in self.per_meth_profiling.values(): + meth_profile.call_count = meth_profile.cur_frame_call_count + meth_profile.self_time = meth_profile.cur_frame_self_time + meth_profile.total_time = meth_profile.cur_frame_total_time + meth_profile.last_frame_call_count = meth_profile.cur_frame_call_count + meth_profile.last_frame_self_time = meth_profile.cur_frame_self_time + meth_profile.last_frame_total_time = meth_profile.cur_frame_total_time + meth_profile.cur_frame_call_count = 0 + meth_profile.cur_frame_self_time = 0 + meth_profile.cur_frame_total_time = 0 + + def get_profilefunc(self): + def profilefunc(frame, event, arg): + # TODO: improve this hack to avoid profiling builtins functions + if frame.f_code.co_filename.startswith("<"): + return + + if event in ("call", "c_call"): + # TODO generate signature ahead of time and store it into the object + signature = "{path}::{line}::{name}".format( + path=frame.f_code.co_filename, + line=frame.f_lineno, + name=frame.f_code.co_name, + ) + self.per_meth_profiling[signature].cur_frame_call_count += 1 + self._per_thread_profile_stack.append(FuncCallProfile(signature)) + else: + try: + callprof = self._per_thread_profile_stack.pop() + except IndexError: + # `pybind_profiling_start` has been called before the + # profiler was enable, so _per_thread_profile_stack lacks + # it representation + return + + callprof.done() + signature = callprof.signature + prof = self.per_meth_profiling[signature] + prof.cur_frame_total_time += callprof.get_total_time() + prof.cur_frame_self_time += callprof.get_self_time() + if self._per_thread_profile_stack: + self._per_thread_profile_stack[-1].add_out_of_func( + callprof.get_total_time() + ) + + return profilefunc + + +cdef object profiler = None + + +cdef api void pythonscript_profiling_start( + godot_pluginscript_language_data *p_data +) with gil: + global profiler + profiler = Profiler() + sys.setprofile(profiler.get_profilefunc()) + + +cdef api void pythonscript_profiling_stop( + godot_pluginscript_language_data *p_data +) with gil: + global profiler + profiler = None + sys.setprofile(None) + + +cdef api int pythonscript_profiling_get_accumulated_data( + godot_pluginscript_language_data *p_data, + godot_pluginscript_profiling_data *r_info, + int p_info_max +) with gil: + # Sort function to make sure we can display the most consuming ones + sorted_and_limited = sorted( + profiler.per_meth_profiling.items(), key=lambda x: -x[1].self_time + )[:p_info_max] + cdef int i + cdef object signature + cdef object profile + for i, (signature, profile) in enumerate(sorted_and_limited): + pyobj_to_godot_string_name(signature, &r_info[i].signature) + r_info[i].call_count = profile.call_count + r_info[i].total_time = int(profile.total_time * 1e6) + r_info[i].self_time = int(profile.self_time * 1e6) + return len(sorted_and_limited) + + +cdef api int pythonscript_profiling_get_frame_data( + godot_pluginscript_language_data *p_data, + godot_pluginscript_profiling_data *r_info, + int p_info_max +) with gil: + # Sort function to make sure we can display the most consuming ones + sorted_and_limited = sorted( + profiler.per_meth_profiling.items(), key=lambda x: -x[1].last_frame_self_time + )[:p_info_max] + cdef int i + cdef object signature + cdef object profile + for i, (signature, profile) in enumerate(sorted_and_limited): + pyobj_to_godot_string_name(signature, &r_info[i].signature) + r_info[i].call_count = profile.last_frame_call_count + r_info[i].total_time = int(profile.last_frame_total_time * 1e6) + r_info[i].self_time = int(profile.last_frame_self_time * 1e6) + return len(sorted_and_limited) + + +cdef api void pythonscript_profiling_frame( + godot_pluginscript_language_data *p_data +) with gil: + if profiler is not None: + profiler.next_frame() diff --git a/pythonscript/_godot_script.pxi b/pythonscript/_godot_script.pxi new file mode 100644 index 0000000..9a82e6e --- /dev/null +++ b/pythonscript/_godot_script.pxi @@ -0,0 +1,232 @@ +# cython: c_string_type=unicode, c_string_encoding=utf8 + +import importlib + +from cpython.ref cimport PyObject + +from godot._hazmat.gdnative_api_struct cimport ( + godot_pluginscript_language_data, + godot_string, + godot_bool, + godot_array, + godot_pool_string_array, + godot_object, + godot_variant, + godot_error, + godot_string_name, + godot_pluginscript_script_data, + godot_pluginscript_script_manifest, + GODOT_OK, + GODOT_ERR_UNAVAILABLE, + GODOT_ERR_FILE_BAD_PATH, + GODOT_ERR_PARSE_ERROR, + GODOT_METHOD_FLAG_FROM_SCRIPT, + GODOT_METHOD_RPC_MODE_DISABLED, +) +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot._hazmat.conversion cimport ( + godot_string_to_pyobj, + pyobj_to_godot_string, + pyobj_to_godot_string_name, + pytype_to_godot_type, +) +from godot._hazmat.internal cimport ( + get_pythonscript_verbose, + get_exposed_class, + set_exposed_class, + destroy_exposed_class, +) +from godot.bindings cimport _initialize_bindings, Object +from godot.builtins cimport Array, Dictionary + +import inspect +import traceback + +from godot.tags import ExportedField, SignalField + + +cdef inline godot_pluginscript_script_manifest _build_empty_script_manifest(): + cdef godot_pluginscript_script_manifest manifest + manifest.data = NULL + gdapi10.godot_string_name_new_data(&manifest.name, "") + manifest.is_tool = False + gdapi10.godot_string_name_new_data(&manifest.base, "") + gdapi10.godot_dictionary_new(&manifest.member_lines) + gdapi10.godot_array_new(&manifest.methods) + gdapi10.godot_array_new(&manifest.signals) + gdapi10.godot_array_new(&manifest.properties) + return manifest + + +cdef Dictionary _build_signal_info(object signal): + cdef Dictionary methinfo = Dictionary() + methinfo["name"] = signal.name + # Dummy data, only name is important here + methinfo["args"] = Array() + methinfo["default_args"] = Array() + methinfo["return"] = None + methinfo["flags"] = GODOT_METHOD_FLAG_FROM_SCRIPT + return methinfo + + +cdef Dictionary _build_method_info(object meth, object methname): + cdef Dictionary methinfo = Dictionary() + spec = inspect.getfullargspec(meth) + methinfo["name"] = methname + # TODO: Handle classmethod/staticmethod + methinfo["args"] = Array(spec.args) + methinfo["default_args"] = Array() # TODO + # TODO: use annotation to determine return type ? + methinfo["return"] = None + methinfo["flags"] = GODOT_METHOD_FLAG_FROM_SCRIPT + methinfo["rpc_mode"] = getattr( + meth, "__rpc", GODOT_METHOD_RPC_MODE_DISABLED + ) + return methinfo + + +cdef Dictionary _build_property_info(object prop): + cdef Dictionary propinfo = Dictionary() + propinfo["name"] = prop.name + propinfo["type"] = pytype_to_godot_type(prop.type) + propinfo["hint"] = prop.hint + propinfo["hint_string"] = prop.hint_string + propinfo["usage"] = prop.usage + propinfo["default_value"] = prop.default + propinfo["rset_mode"] = prop.rpc + return propinfo + +cdef inline object is_method(object meth): + if inspect.isfunction(meth): + return True + + if 'cython_function' in type(meth).__name__: + return True + + return False + +cdef godot_pluginscript_script_manifest _build_script_manifest(object cls): + cdef godot_pluginscript_script_manifest manifest + # No need to increase refcount here given `cls` is guaranteed to be kept + # until we call `destroy_exposed_class` + manifest.data = cls + pyobj_to_godot_string_name(cls.__name__, &manifest.name) + manifest.is_tool = cls.__tool + gdapi10.godot_dictionary_new(&manifest.member_lines) + + if cls.__bases__: + # Only one Godot parent class (checked at class definition time) + godot_parent_class = next( + (b for b in cls.__bases__ if issubclass(b, Object)) + ) + if not godot_parent_class.__dict__.get("__exposed_python_class"): + base = godot_parent_class.__name__ + else: + # Pluginscript wants us to return the parent as a path + base = f"res://{godot_parent_class.__module__.replace('.', '/')}.py" + pyobj_to_godot_string_name(base, &manifest.base) + + methods = Array() + signals = Array() + properties = Array() + for k, v in cls.__exported.items(): + if isinstance(v, ExportedField): + properties.append(_build_property_info(v)) + elif isinstance(v, SignalField): + signals.append(_build_signal_info(v)) + else: + assert is_method(v) + methods.append(_build_method_info(v, k)) + gdapi10.godot_array_new_copy(&manifest.methods, &methods._gd_data) + gdapi10.godot_array_new_copy(&manifest.signals, &signals._gd_data) + gdapi10.godot_array_new_copy(&manifest.properties, &properties._gd_data) + + return manifest + + +cdef api godot_pluginscript_script_manifest pythonscript_script_init( + godot_pluginscript_language_data *p_data, + const godot_string *p_path, + const godot_string *p_source, + godot_error *r_error +) with gil: + # Godot class&singleton are not all available at Pythonscript bootstrap. + # Hence we wait until the Pythonscript start being actually used (i.e. until + # the first Python script is loaded) before initializing the bindings. + _initialize_bindings() + + cdef object path = godot_string_to_pyobj(p_path) + if get_pythonscript_verbose(): + print(f"Loading python script from {path}") + + if not path.startswith("res://") or not path.rsplit(".", 1)[-1] in ( + "py", + "pyc", + "pyo", + "pyd", + ): + print( + f"Bad python script path `{path}`, must starts by `res://` and ends with `.py/pyc/pyo/pyd`" + ) + r_error[0] = GODOT_ERR_FILE_BAD_PATH + return _build_empty_script_manifest() + + # TODO: possible bug if res:// is not part of PYTHONPATH + # Remove `res://`, `.py` and replace / by . + modname = path[6:].rsplit(".", 1)[0].replace("/", ".") + + is_reload = modname in sys.modules + if is_reload: + # Reloading is done in two steps: remove the exported class, + # then do module reloading through importlib. + cls = get_exposed_class(modname) + + # If the module has no exported class, it has no real connection with + # Godot and doesn't need to be reloaded + if cls: + if get_pythonscript_verbose(): + print(f"Reloading python script from {path} ({modname})") + destroy_exposed_class(cls) + importlib.reload(sys.modules[modname]) + + try: + importlib.import_module(modname) # Force lazy loading of the module + cls = get_exposed_class(modname) + + except BaseException: + # If we are here it could be because the file doesn't exists + # or (more possibly) the file content is not valid python (or + # doesn't provide an exposed class) + print( + f"Got exception loading {path} ({modname}): {traceback.format_exc()}" + ) + r_error[0] = GODOT_ERR_PARSE_ERROR + return _build_empty_script_manifest() + + if cls is None: + print( + f"Cannot load {path} ({modname}) because it doesn't expose any class to Godot" + ) + r_error[0] = GODOT_ERR_PARSE_ERROR + return _build_empty_script_manifest() + + if is_reload: + # During reloading, Godot calls the new class init before the old class finish (so + # `pythonscript_script_finish` is going to be called after this function returns). + # Hence we must manually increase the refcount to prevent finish to remove + # the class. + # Apparently multiple PluginScript instances can exist at the same time for the same script. + set_exposed_class(cls) + + r_error[0] = GODOT_OK + return _build_script_manifest(cls) + + +cdef api void pythonscript_script_finish( + godot_pluginscript_script_data *p_data +) with gil: + cdef object cls = p_data + if get_pythonscript_verbose(): + # Using print here will cause a crash on editor/game shutdown + sys.__stdout__.write(f"Destroying python script {cls.__name__}\n") + destroy_exposed_class(cls) diff --git a/pythonscript/godot/SConscript b/pythonscript/godot/SConscript new file mode 100644 index 0000000..8db5f21 --- /dev/null +++ b/pythonscript/godot/SConscript @@ -0,0 +1,100 @@ +Import("env") + + +SConscript(["_hazmat/SConscript"]) + + +pxds = [ + File(x) + for x in ( + "__init__.py", # Not really a .pxd but still needed + "bindings.pxd", + "builtins.pxd", + "hazmat.pxd", + "pool_arrays.pxd", + ) +] +env.Install("$DIST_SITE_PACKAGES/godot", [File("_version.py"), File("globals.py"), *pxds]) +env.AppendUnique(CYTHON_DEPS=pxds) + + +env.Install("$DIST_SITE_PACKAGES/godot", env.CythonModule("tags", "tags.pyx")) + + +### Builtins ### + + +# TODO: merge pool_arrays into builtins +godot_pool_arrays_srcs = env.Command( + target=("pool_arrays.pyx", "pool_arrays.pxd"), + source=("#/generation/generate_pool_arrays.py",), + action="python ${SOURCE} --output ${TARGET}", +) +env.Depends( + godot_pool_arrays_srcs, + ["#/generation/generate_pool_arrays.py", env.Glob("#/generation/pool_arrays_templates/*")], +) + + +godot_builtins_srcs = env.Command( + target=("builtins.pyx", "builtins.pxd"), + source=("#/generation/generate_builtins.py", "${godot_headers}/gdnative_api.json"), + action="python ${SOURCES[0]} --input ${SOURCES[1]} --output ${TARGET}", +) +env.Depends( + godot_builtins_srcs, + ["#/generation/generate_builtins.py", env.Glob("#/generation/builtins_templates/*")], +) +# TODO: remove this once pool_array is merged into builtins +env.Depends(godot_builtins_srcs, godot_pool_arrays_srcs) + + +env.Install("$DIST_SITE_PACKAGES/godot", env.CythonModule("pool_arrays", "pool_arrays.pyx")) +env.Install("$DIST_SITE_PACKAGES/godot", env.CythonModule("builtins", "builtins.pyx")) + + +### Bindings ### + + +# Bindings module is a special snowflake given it size +bindings_env = env.Clone() +sample = env["bindings_generate_sample"] + +# dont strip on debug builds +if not sample and not env["debug"]: + if env["CC_IS_GCC"]: + bindings_env.AppendUnique(LINKFLAGS=["-Wl,--strip-all"]) + elif env["CC_IS_CLANG"]: + bindings_env.AppendUnique(LINKFLAGS=["-Wl,-s"]) + +if sample or env["debug"]: + # Disable optimization for faster dev builds and ease of debugging + if env["CC_IS_MSVC"]: + bindings_env.AppendUnique(CFLAGS=["/Od"]) + else: + bindings_env.AppendUnique(CFLAGS=["-O0"]) +else: + if env["CC_IS_GCC"]: + bindings_env.AppendUnique(CFLAGS=["-Os", "-Wno-misleading-indentation"]) + elif env["CC_IS_CLANG"]: + bindings_env.AppendUnique(CFLAGS=["-Os"]) + elif env["CC_IS_MSVC"]: + bindings_env.AppendUnique(CFLAGS=["/Os"]) + + +godot_bindings_srcs = bindings_env.Command( + target=("bindings.pyx", "bindings.pxd", "bindings.pyi"), + source=("#/generation/generate_bindings.py", "${godot_headers}/api.json"), + action=("python ${SOURCES[0]} ${opts} --input ${SOURCES[1]} --output ${TARGET} "), + opts="--sample" if sample else "", +) +bindings_env.Depends( + godot_bindings_srcs, + ["#/generation/generate_bindings.py", bindings_env.Glob("#/generation/bindings_templates/*")], +) + + +bindings_env.Install( + "$DIST_SITE_PACKAGES/godot", bindings_env.CythonModule("bindings", "bindings.pyx") +) +bindings_env.Install("$DIST_SITE_PACKAGES/godot", "bindings.pyi") diff --git a/pythonscript/godot/__init__.py b/pythonscript/godot/__init__.py new file mode 100644 index 0000000..c75620d --- /dev/null +++ b/pythonscript/godot/__init__.py @@ -0,0 +1,44 @@ +# Start with a sanity check to ensure the loading is done from Godot-Python +# (and not from a regular Python interpreter which would lead to a segfault). +# The idea is we should have the following loading order: +# godot binary -> pythonscript.so -> _godot.so -> godot/__init__.py +import sys + +if "_godot" not in sys.modules: + raise ImportError( + "Cannot initialize godot module given Godot GDNative API not available.\n" + "This is most likely because you are running code from a regular Python interpreter" + " (i.e. doing something like `python my_script.py`) while godot module is only available" + " to Python code loaded from Godot through Godot-Python plugin." + ) +del sys + +from godot._version import __version__ +from godot.tags import ( + MethodRPCMode, + PropertyHint, + PropertyUsageFlag, + rpcdisabled, + rpcremote, + rpcmaster, + rpcpuppet, + rpcslave, + rpcremotesync, + rpcsync, + rpcmastersync, + rpcpuppetsync, + signal, + export, + exposed, +) +from godot.pool_arrays import ( + PoolIntArray, + PoolRealArray, + PoolByteArray, + PoolVector2Array, + PoolVector3Array, + PoolColorArray, + PoolStringArray, +) +from godot.builtins import * +from godot.bindings import * diff --git a/pythonscript/godot/_hazmat/SConscript b/pythonscript/godot/_hazmat/SConscript new file mode 100644 index 0000000..5e6efac --- /dev/null +++ b/pythonscript/godot/_hazmat/SConscript @@ -0,0 +1,33 @@ +Import("env") + + +pxds = [ + File(x) + for x in ( + "__init__.py", # Not really a .pxd but still needed + "conversion.pxd", + "internal.pxd", + "gdapi.pxd", + "gdnative_api_struct.pxd", + ) +] +env.AppendUnique(CYTHON_DEPS=pxds) +env.Install("$DIST_SITE_PACKAGES/godot/_hazmat", pxds) + + +### Generate godot api .h -> gdnative_api_struct.pxd ### + + +gdnative_api_struct_pxd = env.Command( + target="gdnative_api_struct.pxd", + source=["#/generation/generate_gdnative_api_struct.py", env["godot_headers"]], + action="python ${SOURCES[0]} --input ${SOURCES[1]} --output ${TARGET}", +) + + +### Cython modules ### + + +env.Install("$DIST_SITE_PACKAGES/godot/_hazmat", env.CythonModule("conversion", "conversion.pyx")) + +env.Install("$DIST_SITE_PACKAGES/godot/_hazmat", env.CythonModule("internal", "internal.pyx")) diff --git a/pythonscript/godot/_hazmat/__init__.py b/pythonscript/godot/_hazmat/__init__.py new file mode 100644 index 0000000..3db0f9d --- /dev/null +++ b/pythonscript/godot/_hazmat/__init__.py @@ -0,0 +1,3 @@ +# `_hazmat` package containing all stuff not to be exposed to the user. +# Note we don't reexport anything in the `__init__.pxd` (i.e. this file) +# to avoid subtle recursive dependency errors within cython. diff --git a/pythonscript/godot/_hazmat/conversion.pxd b/pythonscript/godot/_hazmat/conversion.pxd new file mode 100644 index 0000000..369b081 --- /dev/null +++ b/pythonscript/godot/_hazmat/conversion.pxd @@ -0,0 +1,136 @@ +from libc.stddef cimport wchar_t +from libc.stdio cimport printf + +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot._hazmat.gdnative_api_struct cimport ( + godot_string, + godot_string_name, + godot_int, + godot_vector2, + godot_variant, + godot_variant_type, +) +from godot.builtins cimport GDString, NodePath + + +# Godot string are basically a vector of wchar_t, each wchar_t representing +# a single unicode character (i.e. there is no surrogates support). +# The sad part is wchar_t is not portable: it is 16bits long on Windows and +# 32bits long on Linux and MacOS... +# So we end up with a UCS2 encoding on Windows and UCS4 everywhere else :'( +IF UNAME_SYSNAME == "Windows": + # Specify endianess otherwise `encode` appends a BOM at the start of the converted string + DEF _STRING_ENCODING = "UTF-16-LE" + DEF _STRING_CODEPOINT_LENGTH = 2 +ELSE: + DEF _STRING_ENCODING = "UTF-32-LE" + DEF _STRING_CODEPOINT_LENGTH = 4 + + +cdef inline str godot_string_to_pyobj(const godot_string *p_gdstr): + # TODO: unicode&windows support is most likely broken... + cdef char *raw = gdapi10.godot_string_wide_str(p_gdstr) + cdef godot_int length = gdapi10.godot_string_length(p_gdstr) + return raw[:length * _STRING_CODEPOINT_LENGTH].decode(_STRING_ENCODING) + + # cdef char *raw = gdapi10.godot_string_wide_str(p_gdstr) + # cdef godot_int length = gdapi10.godot_string_length(p_gdstr) + # printf("==========> godot_string_to_pyobj ") + # cdef int i + # for i in range(length): + # printf("%c ", raw[i * 4]); + # printf("\n") + # cdef object ret = raw[:length * _STRING_CODEPOINT_LENGTH].decode(_STRING_ENCODING) + # print('==>ret: %r' % ret) + # return ret + + +cdef inline void pyobj_to_godot_string(str pystr, godot_string *p_gdstr): + # TODO: unicode&windows support is most likely broken... + cdef bytes raw = pystr.encode(_STRING_ENCODING) + gdapi10.godot_string_new_with_wide_string( + p_gdstr, (raw), len(pystr) + ) + + +cdef inline str godot_string_name_to_pyobj(const godot_string_name *p_gdname): + cdef godot_string strname = gdapi10.godot_string_name_get_name(p_gdname) + cdef ret = godot_string_to_pyobj(&strname) + gdapi10.godot_string_destroy(&strname) + return ret + + +cdef inline void pyobj_to_godot_string_name(str pystr, godot_string_name *p_gdname): + cdef godot_string strname + pyobj_to_godot_string(pystr, &strname) + gdapi10.godot_string_name_new(p_gdname, &strname) + gdapi10.godot_string_destroy(&strname) + + +cdef object godot_variant_to_pyobj(const godot_variant *p_gdvar) +cdef bint pyobj_to_godot_variant(object pyobj, godot_variant *p_var) + +cdef bint is_pytype_compatible_with_godot_variant(object pytype) +cdef object godot_type_to_pytype(godot_variant_type gdtype) +cdef godot_variant_type pytype_to_godot_type(object pytype) + +cdef GDString ensure_is_gdstring(object gdstring_or_pystr) +cdef NodePath ensure_is_nodepath(object nodepath_or_pystr) + + +# TODO: finish this... + +# cdef inline object cook_slice(slice slice_, godot_int size, godot_int *r_start, godot_int *r_stop, godot_int *r_step, godot_int *r_items): +# cdef godot_int start +# cdef godot_int stop +# cdef godot_int step + +# step = slice_.step if slice_.step is not None else 1 +# if step == 0: +# raise ValueError("range() arg 3 must not be zero") +# elif step > 0: +# start = slice_.start if slice_.start is not None else 0 +# stop = slice_.stop if slice_.stop is not None else size +# else: +# start = slice_.start if slice_.start is not None else size +# stop = slice_.stop if slice_.stop is not None else -size - 1 + +# r_start[0] = cook_slice_start(size, start) +# r_stop[0] = cook_slice_stop(size, stop) +# r_step[0] = step +# r_items[0] = cook_slice_get_items(size, start, stop, step) + +# return None + + +# cdef inline godot_int cook_slice_start(godot_int size, godot_int start): +# if start > size - 1: +# return size - 1 +# elif start < 0: +# start += size +# if start < 0: +# return 0 +# return start + + +# cdef inline godot_int cook_slice_stop(godot_int size, godot_int stop): +# if stop > size: +# return size +# elif stop < -size: +# return -1 +# elif stop < 0: +# stop += size +# return stop + + +# cdef inline godot_int cook_slice_get_items(godot_int size, godot_int start, godot_int stop, godot_int step): +# cdef godot_int items +# if step > 0: +# if start >= stop: +# return 0 +# items = 1 + (stop - start - 1) // step +# else: +# if start <= stop: +# return 0 +# items = 1 + (stop - start + 1) // step +# return items if items > 0 else 0 diff --git a/pythonscript/godot/_hazmat/conversion.pyx b/pythonscript/godot/_hazmat/conversion.pyx new file mode 100644 index 0000000..0b0a429 --- /dev/null +++ b/pythonscript/godot/_hazmat/conversion.pyx @@ -0,0 +1,434 @@ +from libc.stddef cimport wchar_t +from libc.stdio cimport printf + +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot._hazmat.gdnative_api_struct cimport ( + godot_string, + godot_string_name, + godot_int, + godot_vector2, + godot_variant, + godot_variant_type, +) +from godot.bindings cimport Object +from godot.builtins cimport ( + Vector2, + Rect2, + Vector3, + Transform2D, + Plane, + Quat, + AABB, + Basis, + Transform, + Color, + NodePath, + RID, + Dictionary, + Array, + GDString, + PoolByteArray, + PoolIntArray, + PoolRealArray, + PoolStringArray, + PoolVector2Array, + PoolVector3Array, + PoolColorArray, +) + +from warnings import warn + + +GD_PY_TYPES = ( + (godot_variant_type.GODOT_VARIANT_TYPE_NIL, type(None)), + (godot_variant_type.GODOT_VARIANT_TYPE_BOOL, bool), + (godot_variant_type.GODOT_VARIANT_TYPE_INT, int), + (godot_variant_type.GODOT_VARIANT_TYPE_REAL, float), + (godot_variant_type.GODOT_VARIANT_TYPE_STRING, GDString), + (godot_variant_type.GODOT_VARIANT_TYPE_OBJECT, Object), + (godot_variant_type.GODOT_VARIANT_TYPE_VECTOR2, Vector2), + (godot_variant_type.GODOT_VARIANT_TYPE_RECT2, Rect2), + (godot_variant_type.GODOT_VARIANT_TYPE_VECTOR3, Vector3), + (godot_variant_type.GODOT_VARIANT_TYPE_TRANSFORM2D, Transform2D), + (godot_variant_type.GODOT_VARIANT_TYPE_PLANE, Plane), + (godot_variant_type.GODOT_VARIANT_TYPE_QUAT, Quat), + (godot_variant_type.GODOT_VARIANT_TYPE_AABB, AABB), + (godot_variant_type.GODOT_VARIANT_TYPE_BASIS, Basis), + (godot_variant_type.GODOT_VARIANT_TYPE_TRANSFORM, Transform), + (godot_variant_type.GODOT_VARIANT_TYPE_COLOR, Color), + (godot_variant_type.GODOT_VARIANT_TYPE_NODE_PATH, NodePath), + (godot_variant_type.GODOT_VARIANT_TYPE_RID, RID), + (godot_variant_type.GODOT_VARIANT_TYPE_DICTIONARY, Dictionary), + (godot_variant_type.GODOT_VARIANT_TYPE_ARRAY, Array), + ( + godot_variant_type.GODOT_VARIANT_TYPE_POOL_BYTE_ARRAY, + PoolByteArray, + ), + (godot_variant_type.GODOT_VARIANT_TYPE_POOL_INT_ARRAY, PoolIntArray), + ( + godot_variant_type.GODOT_VARIANT_TYPE_POOL_REAL_ARRAY, + PoolRealArray, + ), + (godot_variant_type.GODOT_VARIANT_TYPE_POOL_STRING_ARRAY, PoolStringArray), + ( + godot_variant_type.GODOT_VARIANT_TYPE_POOL_VECTOR2_ARRAY, + PoolVector2Array, + ), + ( + godot_variant_type.GODOT_VARIANT_TYPE_POOL_VECTOR3_ARRAY, + PoolVector3Array, + ), + ( + godot_variant_type.GODOT_VARIANT_TYPE_POOL_COLOR_ARRAY, + PoolColorArray, + ), +) + + +cdef bint is_pytype_compatible_with_godot_variant(object pytype): + return next((True for _, py in GD_PY_TYPES if py == pytype), issubclass(pytype, Object)) + + +cdef object godot_type_to_pytype(godot_variant_type gdtype): + cdef pytype = next((py for gd, py in GD_PY_TYPES if gd == gdtype), None) + if pytype is None: + warn(f"No Python equivalent for Godot type `{gdtype}`") + return None + + return pytype + + +cdef godot_variant_type pytype_to_godot_type(object pytype): + cdef gdtype = next((gd for gd, py in GD_PY_TYPES if py == pytype), None) + if gdtype is None: + if issubclass(pytype, Object): + return godot_variant_type.GODOT_VARIANT_TYPE_OBJECT + else: + warn(f"No Godot equivalent for Python type `{pytype}`") + return godot_variant_type.GODOT_VARIANT_TYPE_NIL + + return gdtype + + +cdef object godot_variant_to_pyobj(const godot_variant *p_gdvar): + cdef godot_variant_type gdtype = gdapi10.godot_variant_get_type(p_gdvar) + + if gdtype == godot_variant_type.GODOT_VARIANT_TYPE_NIL: + return None + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_BOOL: + return bool(gdapi10.godot_variant_as_bool(p_gdvar)) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_INT: + return int(gdapi10.godot_variant_as_int(p_gdvar)) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_REAL: + return float(gdapi10.godot_variant_as_real(p_gdvar)) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_STRING: + return _godot_variant_to_pyobj_string(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_VECTOR2: + return _godot_variant_to_pyobj_vector2(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_RECT2: + return _godot_variant_to_pyobj_rect2(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_VECTOR3: + return _godot_variant_to_pyobj_vector3(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_TRANSFORM2D: + return _godot_variant_to_pyobj_transform2d(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_PLANE: + return _godot_variant_to_pyobj_plane(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_QUAT: + return _godot_variant_to_pyobj_quat(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_AABB: + return _godot_variant_to_pyobj_aabb(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_BASIS: + return _godot_variant_to_pyobj_basis(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_TRANSFORM: + return _godot_variant_to_pyobj_transform(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_COLOR: + return _godot_variant_to_pyobj_color(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_NODE_PATH: + return _godot_variant_to_pyobj_node_path(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_RID: + return _godot_variant_to_pyobj_rid(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_OBJECT: + return _godot_variant_to_pyobj_object(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_DICTIONARY: + return _godot_variant_to_pyobj_dictionary(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_ARRAY: + return _godot_variant_to_pyobj_array(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_POOL_BYTE_ARRAY: + return _godot_variant_to_pyobj_pool_byte_array(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_POOL_INT_ARRAY: + return _godot_variant_to_pyobj_pool_int_array(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_POOL_REAL_ARRAY: + return _godot_variant_to_pyobj_pool_real_array(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_POOL_STRING_ARRAY: + return _godot_variant_to_pyobj_pool_string_array(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_POOL_VECTOR2_ARRAY: + return _godot_variant_to_pyobj_pool_vector2_array(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_POOL_VECTOR3_ARRAY: + return _godot_variant_to_pyobj_pool_vector3_array(p_gdvar) + + elif gdtype == godot_variant_type.GODOT_VARIANT_TYPE_POOL_COLOR_ARRAY: + return _godot_variant_to_pyobj_pool_color_array(p_gdvar) + + else: + warn(f"Unknown Variant type `{gdtype}` (this should never happen !)") + return None + + +cdef inline GDString _godot_variant_to_pyobj_string(const godot_variant *p_gdvar): + cdef GDString ret = GDString.__new__(GDString) + ret._gd_data = gdapi10.godot_variant_as_string(p_gdvar) + return ret + + +cdef inline Vector2 _godot_variant_to_pyobj_vector2(const godot_variant *p_gdvar): + cdef Vector2 ret = Vector2.__new__(Vector2) + ret._gd_data = gdapi10.godot_variant_as_vector2(p_gdvar) + return ret + + +cdef inline Rect2 _godot_variant_to_pyobj_rect2(const godot_variant *p_gdvar): + cdef Rect2 ret = Rect2.__new__(Rect2) + ret._gd_data = gdapi10.godot_variant_as_rect2(p_gdvar) + return ret + + +cdef inline Vector3 _godot_variant_to_pyobj_vector3(const godot_variant *p_gdvar): + cdef Vector3 ret = Vector3.__new__(Vector3) + ret._gd_data = gdapi10.godot_variant_as_vector3(p_gdvar) + return ret + + +cdef inline Transform2D _godot_variant_to_pyobj_transform2d(const godot_variant *p_gdvar): + cdef Transform2D ret = Transform2D.__new__(Transform2D) + ret._gd_data = gdapi10.godot_variant_as_transform2d(p_gdvar) + return ret + + +cdef inline Transform _godot_variant_to_pyobj_transform(const godot_variant *p_gdvar): + cdef Transform ret = Transform.__new__(Transform) + ret._gd_data = gdapi10.godot_variant_as_transform(p_gdvar) + return ret + + +cdef inline Plane _godot_variant_to_pyobj_plane(const godot_variant *p_gdvar): + cdef Plane ret = Plane.__new__(Plane) + ret._gd_data = gdapi10.godot_variant_as_plane(p_gdvar) + return ret + + +cdef inline Quat _godot_variant_to_pyobj_quat(const godot_variant *p_gdvar): + cdef Quat ret = Quat.__new__(Quat) + ret._gd_data = gdapi10.godot_variant_as_quat(p_gdvar) + return ret + + +cdef inline AABB _godot_variant_to_pyobj_aabb(const godot_variant *p_gdvar): + cdef AABB ret = AABB.__new__(AABB) + ret._gd_data = gdapi10.godot_variant_as_aabb(p_gdvar) + return ret + + +cdef inline Basis _godot_variant_to_pyobj_basis(const godot_variant *p_gdvar): + cdef Basis ret = Basis.__new__(Basis) + ret._gd_data = gdapi10.godot_variant_as_basis(p_gdvar) + return ret + + +cdef inline Color _godot_variant_to_pyobj_color(const godot_variant *p_gdvar): + cdef Color ret = Color.__new__(Color) + ret._gd_data = gdapi10.godot_variant_as_color(p_gdvar) + return ret + + +cdef inline NodePath _godot_variant_to_pyobj_node_path(const godot_variant *p_gdvar): + cdef NodePath ret = NodePath.__new__(NodePath) + ret._gd_data = gdapi10.godot_variant_as_node_path(p_gdvar) + return ret + + +cdef inline RID _godot_variant_to_pyobj_rid(const godot_variant *p_gdvar): + cdef RID ret = RID.__new__(RID) + ret._gd_data = gdapi10.godot_variant_as_rid(p_gdvar) + return ret + + +cdef inline Object _godot_variant_to_pyobj_object(const godot_variant *p_gdvar): + # TODO: This conversion relies on godot String and lookup into bindings + # modules, wouldn't it be better to create a `ObjectFromVariant` lazy + # class instead ? + return Object.cast_from_variant(p_gdvar) + + +cdef inline Dictionary _godot_variant_to_pyobj_dictionary(const godot_variant *p_gdvar): + cdef Dictionary d = Dictionary.__new__(Dictionary) + d._gd_data = gdapi10.godot_variant_as_dictionary(p_gdvar) + return d + + +cdef inline Array _godot_variant_to_pyobj_array(const godot_variant *p_gdvar): + cdef Array a = Array.__new__(Array) + a._gd_data = gdapi10.godot_variant_as_array(p_gdvar) + return a + + +cdef inline PoolByteArray _godot_variant_to_pyobj_pool_byte_array(const godot_variant *p_gdvar): + cdef PoolByteArray a = PoolByteArray.__new__(PoolByteArray) + a._gd_data = gdapi10.godot_variant_as_pool_byte_array(p_gdvar) + return a + + +cdef inline PoolIntArray _godot_variant_to_pyobj_pool_int_array(const godot_variant *p_gdvar): + cdef PoolIntArray a = PoolIntArray.__new__(PoolIntArray) + a._gd_data = gdapi10.godot_variant_as_pool_int_array(p_gdvar) + return a + + +cdef inline PoolRealArray _godot_variant_to_pyobj_pool_real_array(const godot_variant *p_gdvar): + cdef PoolRealArray a = PoolRealArray.__new__(PoolRealArray) + a._gd_data = gdapi10.godot_variant_as_pool_real_array(p_gdvar) + return a + + +cdef inline PoolStringArray _godot_variant_to_pyobj_pool_string_array(const godot_variant *p_gdvar): + cdef PoolStringArray a = PoolStringArray.__new__(PoolStringArray) + a._gd_data = gdapi10.godot_variant_as_pool_string_array(p_gdvar) + return a + + +cdef inline PoolVector2Array _godot_variant_to_pyobj_pool_vector2_array(const godot_variant *p_gdvar): + cdef PoolVector2Array a = PoolVector2Array.__new__(PoolVector2Array) + a._gd_data = gdapi10.godot_variant_as_pool_vector2_array(p_gdvar) + return a + + +cdef inline PoolVector3Array _godot_variant_to_pyobj_pool_vector3_array(const godot_variant *p_gdvar): + cdef PoolVector3Array a = PoolVector3Array.__new__(PoolVector3Array) + a._gd_data = gdapi10.godot_variant_as_pool_vector3_array(p_gdvar) + return a + + +cdef inline PoolColorArray _godot_variant_to_pyobj_pool_color_array(const godot_variant *p_gdvar): + cdef PoolColorArray a = PoolColorArray.__new__(PoolColorArray) + a._gd_data = gdapi10.godot_variant_as_pool_color_array(p_gdvar) + return a + + +cdef bint pyobj_to_godot_variant(object pyobj, godot_variant *p_var): + if pyobj is None: + gdapi10.godot_variant_new_nil(p_var) + elif isinstance(pyobj, bool): + gdapi10.godot_variant_new_bool(p_var, pyobj) + elif isinstance(pyobj, int): + gdapi10.godot_variant_new_int(p_var, pyobj) + elif isinstance(pyobj, float): + gdapi10.godot_variant_new_real(p_var, pyobj) + elif isinstance(pyobj, str): + _pyobj_to_godot_variant_convert_string(pyobj, p_var) + elif isinstance(pyobj, GDString): + gdapi10.godot_variant_new_string(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Vector2): + gdapi10.godot_variant_new_vector2(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Vector3): + gdapi10.godot_variant_new_vector3(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Plane): + gdapi10.godot_variant_new_plane(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Quat): + gdapi10.godot_variant_new_quat(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, AABB): + gdapi10.godot_variant_new_aabb(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Basis): + gdapi10.godot_variant_new_basis(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Color): + gdapi10.godot_variant_new_color(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, NodePath): + gdapi10.godot_variant_new_node_path(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, RID): + gdapi10.godot_variant_new_rid(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Rect2): + gdapi10.godot_variant_new_rect2(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Transform2D): + gdapi10.godot_variant_new_transform2d(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Transform): + gdapi10.godot_variant_new_transform(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Dictionary): + gdapi10.godot_variant_new_dictionary(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Array): + gdapi10.godot_variant_new_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, PoolByteArray): + gdapi10.godot_variant_new_pool_byte_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, PoolIntArray): + gdapi10.godot_variant_new_pool_int_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, PoolRealArray): + gdapi10.godot_variant_new_pool_real_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, PoolStringArray): + gdapi10.godot_variant_new_pool_string_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, PoolVector2Array): + gdapi10.godot_variant_new_pool_vector2_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, PoolVector3Array): + gdapi10.godot_variant_new_pool_vector3_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, PoolColorArray): + gdapi10.godot_variant_new_pool_color_array(p_var, &(pyobj)._gd_data) + elif isinstance(pyobj, Object): + gdapi10.godot_variant_new_object(p_var, (pyobj)._gd_ptr) + else: + warn(f"Cannot convert `{type(pyobj)}` to Godot's Variant") + gdapi10.godot_variant_new_nil(p_var) + return False + return True + + +# Needed to define gdstr in it own scope +cdef inline void _pyobj_to_godot_variant_convert_string(object pyobj, godot_variant *p_var): + cdef godot_string gdstr + pyobj_to_godot_string(pyobj, &gdstr) + try: + gdapi10.godot_variant_new_string(p_var, &gdstr) + finally: + gdapi10.godot_string_destroy(&gdstr) + + +cdef GDString ensure_is_gdstring(object gdstring_or_pystr): + cdef GDString gdstring_converted + try: + return gdstring_or_pystr + except TypeError: + try: + return GDString(gdstring_or_pystr) + except TypeError: + raise TypeError(f"Invalid value {gdstring_or_pystr!r}, must be str or GDString") + + +cdef NodePath ensure_is_nodepath(object nodepath_or_pystr): + cdef NodePath NodePath_converted + try: + return nodepath_or_pystr + except TypeError: + try: + return NodePath(nodepath_or_pystr) + except TypeError: + raise TypeError(f"Invalid value {nodepath_or_pystr!r}, must be str or NodePath") diff --git a/pythonscript/godot/_hazmat/gdapi.pxd b/pythonscript/godot/_hazmat/gdapi.pxd new file mode 100644 index 0000000..0bb063a --- /dev/null +++ b/pythonscript/godot/_hazmat/gdapi.pxd @@ -0,0 +1,38 @@ +from godot._hazmat.gdnative_api_struct cimport ( + godot_gdnative_core_api_struct, + godot_gdnative_core_1_1_api_struct, + godot_gdnative_core_1_2_api_struct, + godot_gdnative_ext_nativescript_api_struct, + godot_gdnative_ext_pluginscript_api_struct, + godot_gdnative_ext_android_api_struct, + godot_gdnative_ext_arvr_api_struct, +) + + +cdef extern from * nogil: + # Global variables defined in pythonscript.c + # Just easier to inline the definitions instead of use a header file + # and having to tweak compile flags. + """ + #include + #ifdef _WIN32 + # define PYTHONSCRIPT_IMPORT __declspec(dllimport) + #else + # define PYTHONSCRIPT_IMPORT + #endif + PYTHONSCRIPT_IMPORT extern const godot_gdnative_core_api_struct *pythonscript_gdapi10; + PYTHONSCRIPT_IMPORT extern const godot_gdnative_core_1_1_api_struct *pythonscript_gdapi11; + PYTHONSCRIPT_IMPORT extern const godot_gdnative_core_1_2_api_struct *pythonscript_gdapi12; + PYTHONSCRIPT_IMPORT extern const godot_gdnative_ext_nativescript_api_struct *pythonscript_gdapi_ext_nativescript; + PYTHONSCRIPT_IMPORT extern const godot_gdnative_ext_pluginscript_api_struct *pythonscript_gdapi_ext_pluginscript; + PYTHONSCRIPT_IMPORT extern const godot_gdnative_ext_android_api_struct *pythonscript_gdapi_ext_android; + PYTHONSCRIPT_IMPORT extern const godot_gdnative_ext_arvr_api_struct *pythonscript_gdapi_ext_arvr; + """ + + cdef const godot_gdnative_core_api_struct *pythonscript_gdapi10 + cdef const godot_gdnative_core_1_1_api_struct *pythonscript_gdapi11 + cdef const godot_gdnative_core_1_2_api_struct *pythonscript_gdapi12 + cdef const godot_gdnative_ext_nativescript_api_struct *pythonscript_gdapi_ext_nativescript + cdef const godot_gdnative_ext_pluginscript_api_struct *pythonscript_gdapi_ext_pluginscript + cdef const godot_gdnative_ext_android_api_struct *pythonscript_gdapi_ext_android + cdef const godot_gdnative_ext_arvr_api_struct *pythonscript_gdapi_ext_arvr diff --git a/pythonscript/godot/_hazmat/internal.pxd b/pythonscript/godot/_hazmat/internal.pxd new file mode 100644 index 0000000..fa67213 --- /dev/null +++ b/pythonscript/godot/_hazmat/internal.pxd @@ -0,0 +1,18 @@ +from godot.bindings cimport Object + + +cdef bint __pythonscript_verbose + + +cdef inline bint get_pythonscript_verbose(): + return __pythonscript_verbose + + +cdef inline void set_pythonscript_verbose(bint status): + global __pythonscript_verbose + __pythonscript_verbose = status + + +cdef object get_exposed_class(str module_name) +cdef void set_exposed_class(object cls) +cdef void destroy_exposed_class(object cls) diff --git a/pythonscript/godot/_hazmat/internal.pyx b/pythonscript/godot/_hazmat/internal.pyx new file mode 100644 index 0000000..990dc66 --- /dev/null +++ b/pythonscript/godot/_hazmat/internal.pyx @@ -0,0 +1,86 @@ +import threading + +from godot.bindings cimport Object + + +cdef bint __pythonscript_verbose = False + + +cdef class ModExposedClass: + cdef object kls + cdef int refcount + + def __init__(self, object kls): + self.kls = kls + self.refcount = 1 + + +# /!\ Those containers are strictly private /!\ +# They contain class objects that are referenced from Godot without refcounting, +# so droping an item from there will likely cause a segfault ! +cdef dict __modules_with_exposed_class = {} +cdef list __all_exposed_classes = [] +cdef object __exposed_classes_lock = threading.Lock() + + +cdef object get_exposed_class(str module_name): + with __exposed_classes_lock: + try: + return (__modules_with_exposed_class[module_name]).kls + except KeyError: + return None + + +cdef void set_exposed_class(object cls): + cdef ModExposedClass mod + cdef str modname = cls.__module__ + + # Use a threadlock to avoid data races in case godot loads/unloads scripts in multiple threads + with __exposed_classes_lock: + + # We must keep track of reference counts for the module when reloading a script, + # godot calls pythonscript_script_init BEFORE pythonscript_script_finish + # this happens because Godot can make multiple PluginScript instances for the same resource. + + # Godot calls + try: + mod = __modules_with_exposed_class[modname] + except KeyError: + __modules_with_exposed_class[modname] = ModExposedClass(cls) + else: + # When reloading a script, Godot calls `pythonscript_script_init` BEFORE + # `pythonscript_script_finish`. Hence we drop replace the old class + # here but have to increase the refcount so + mod.kls = cls + mod.refcount += 1 + + # Sometimes Godot fails to reload a script, and when this happens we end + # up with a stale PyObject* for the class, which is then garbage collected by Python + # so next time a script is instantiated from Godot we end up with a sefault :( + # To avoid this we keep reference forever to all the classes. + # TODO: This may be troublesome when running the Godot editor given the classes are + # reloaded each time they are modified, hence leading to a small memory leak... + __all_exposed_classes.append(cls) + + +cdef void destroy_exposed_class(object cls): + cdef ModExposedClass mod + cdef str modname = cls.__module__ + + # Use a threadlock to avoid data races in case godot loads/unloads scripts in multiple threads + with __exposed_classes_lock: + + try: + mod = __modules_with_exposed_class[modname] + except KeyError: + print(f'Error: class module is already destroyed: {modname}') + else: + if mod.refcount == 1: + del __modules_with_exposed_class[modname] + # Not safe to ever get rid of all references... + # see: https://github.com/touilleMan/godot-python/issues/170 + # and: https://github.com/godotengine/godot/issues/10946 + # sometimes script reloading craps out leaving dangling references + # __all_exposed_classes.remove(modname, cls) + else: + mod.refcount -= 1 diff --git a/pythonscript/godot/_version.py b/pythonscript/godot/_version.py new file mode 100644 index 0000000..efee59b --- /dev/null +++ b/pythonscript/godot/_version.py @@ -0,0 +1 @@ +__version__ = "0.50.0+dev" diff --git a/pythonscript/godot/globals.py b/pythonscript/godot/globals.py new file mode 100644 index 0000000..6d28681 --- /dev/null +++ b/pythonscript/godot/globals.py @@ -0,0 +1,12 @@ +from _godot import __global_constants + + +def __getattr__(name): + try: + return __global_constants[name] + except KeyError: + raise AttributeError + + +def __dir__(): + return list(__global_constants.keys()) diff --git a/pythonscript/godot/hazmat.pxd b/pythonscript/godot/hazmat.pxd new file mode 100644 index 0000000..c689903 --- /dev/null +++ b/pythonscript/godot/hazmat.pxd @@ -0,0 +1,19 @@ +# Public low-level APIs are exposed here + +from godot._hazmat cimport gdnative_api_struct +# Re-expose Godot API with better names +from godot._hazmat.gdapi cimport ( + pythonscript_gdapi10 as gdapi10, + pythonscript_gdapi11 as gdapi11, + pythonscript_gdapi12 as gdapi12, + pythonscript_gdapi_ext_nativescript as gdapi_ext_nativescript, + pythonscript_gdapi_ext_pluginscript as gdapi_ext_pluginscript, + pythonscript_gdapi_ext_android as gdapi_ext_android, + pythonscript_gdapi_ext_arvr as gdapi_ext_arvr, +) +from godot._hazmat.conversion cimport ( + godot_string_to_pyobj, + pyobj_to_godot_string, + godot_variant_to_pyobj, + pyobj_to_godot_variant, +) diff --git a/pythonscript/godot/tags.pyx b/pythonscript/godot/tags.pyx new file mode 100644 index 0000000..cf9fead --- /dev/null +++ b/pythonscript/godot/tags.pyx @@ -0,0 +1,367 @@ +import builtins +import enum + +from godot._hazmat.gdnative_api_struct cimport ( + godot_method_rpc_mode, + godot_property_usage_flags, + godot_method_rpc_mode, + godot_property_hint, + godot_variant, +) +from godot._hazmat.gdapi cimport pythonscript_gdapi10 as gdapi10 +from godot._hazmat.conversion cimport ( + is_pytype_compatible_with_godot_variant, + pyobj_to_godot_variant, + godot_variant_to_pyobj, +) +from godot._hazmat.internal cimport get_exposed_class, set_exposed_class +from godot.builtins cimport Array, Dictionary, GDString +from godot.bindings cimport Object, Resource + + +# Make Godot enums accesible from Python at runtime + + +class MethodRPCMode(enum.IntEnum): + DISABLED = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_DISABLED + REMOTE = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_REMOTE + MASTER = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_MASTER + PUPPET = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_PUPPET + SLAVE = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_SLAVE + REMOTESYNC = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_REMOTESYNC + SYNC = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_SYNC + MASTERSYNC = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_MASTERSYNC + PUPPETSYNC = godot_method_rpc_mode.GODOT_METHOD_RPC_MODE_PUPPETSYNC + + +class PropertyHint(enum.IntEnum): + NONE = godot_property_hint.GODOT_PROPERTY_HINT_NONE + RANGE = godot_property_hint.GODOT_PROPERTY_HINT_RANGE + EXP_RANGE = godot_property_hint.GODOT_PROPERTY_HINT_EXP_RANGE + ENUM = godot_property_hint.GODOT_PROPERTY_HINT_ENUM + EXP_EASING = godot_property_hint.GODOT_PROPERTY_HINT_EXP_EASING + LENGTH = godot_property_hint.GODOT_PROPERTY_HINT_LENGTH + SPRITE_FRAME = godot_property_hint.GODOT_PROPERTY_HINT_SPRITE_FRAME + KEY_ACCEL = godot_property_hint.GODOT_PROPERTY_HINT_KEY_ACCEL + FLAGS = godot_property_hint.GODOT_PROPERTY_HINT_FLAGS + LAYERS_2D_RENDER = godot_property_hint.GODOT_PROPERTY_HINT_LAYERS_2D_RENDER + LAYERS_2D_PHYSICS = godot_property_hint.GODOT_PROPERTY_HINT_LAYERS_2D_PHYSICS + LAYERS_3D_RENDER = godot_property_hint.GODOT_PROPERTY_HINT_LAYERS_3D_RENDER + LAYERS_3D_PHYSICS = godot_property_hint.GODOT_PROPERTY_HINT_LAYERS_3D_PHYSICS + FILE = godot_property_hint.GODOT_PROPERTY_HINT_FILE + DIR = godot_property_hint.GODOT_PROPERTY_HINT_DIR + GLOBAL_FILE = godot_property_hint.GODOT_PROPERTY_HINT_GLOBAL_FILE + GLOBAL_DIR = godot_property_hint.GODOT_PROPERTY_HINT_GLOBAL_DIR + RESOURCE_TYPE = godot_property_hint.GODOT_PROPERTY_HINT_RESOURCE_TYPE + MULTILINE_TEXT = godot_property_hint.GODOT_PROPERTY_HINT_MULTILINE_TEXT + PLACEHOLDER_TEXT = godot_property_hint.GODOT_PROPERTY_HINT_PLACEHOLDER_TEXT + COLOR_NO_ALPHA = godot_property_hint.GODOT_PROPERTY_HINT_COLOR_NO_ALPHA + IMAGE_COMPRESS_LOSSY = godot_property_hint.GODOT_PROPERTY_HINT_IMAGE_COMPRESS_LOSSY + IMAGE_COMPRESS_LOSSLESS = godot_property_hint.GODOT_PROPERTY_HINT_IMAGE_COMPRESS_LOSSLESS + OBJECT_ID = godot_property_hint.GODOT_PROPERTY_HINT_OBJECT_ID + TYPE_STRING = godot_property_hint.GODOT_PROPERTY_HINT_TYPE_STRING + NODE_PATH_TO_EDITED_NODE = godot_property_hint.GODOT_PROPERTY_HINT_NODE_PATH_TO_EDITED_NODE + METHOD_OF_VARIANT_TYPE = godot_property_hint.GODOT_PROPERTY_HINT_METHOD_OF_VARIANT_TYPE + METHOD_OF_BASE_TYPE = godot_property_hint.GODOT_PROPERTY_HINT_METHOD_OF_BASE_TYPE + METHOD_OF_INSTANCE = godot_property_hint.GODOT_PROPERTY_HINT_METHOD_OF_INSTANCE + METHOD_OF_SCRIPT = godot_property_hint.GODOT_PROPERTY_HINT_METHOD_OF_SCRIPT + PROPERTY_OF_VARIANT_TYPE = godot_property_hint.GODOT_PROPERTY_HINT_PROPERTY_OF_VARIANT_TYPE + PROPERTY_OF_BASE_TYPE = godot_property_hint.GODOT_PROPERTY_HINT_PROPERTY_OF_BASE_TYPE + PROPERTY_OF_INSTANCE = godot_property_hint.GODOT_PROPERTY_HINT_PROPERTY_OF_INSTANCE + PROPERTY_OF_SCRIPT = godot_property_hint.GODOT_PROPERTY_HINT_PROPERTY_OF_SCRIPT + MAX = godot_property_hint.GODOT_PROPERTY_HINT_MAX + + +class PropertyUsageFlag(enum.IntFlag): + STORAGE = godot_property_usage_flags.GODOT_PROPERTY_USAGE_STORAGE + EDITOR = godot_property_usage_flags.GODOT_PROPERTY_USAGE_EDITOR + NETWORK = godot_property_usage_flags.GODOT_PROPERTY_USAGE_NETWORK + EDITOR_HELPER = godot_property_usage_flags.GODOT_PROPERTY_USAGE_EDITOR_HELPER + CHECKABLE = godot_property_usage_flags.GODOT_PROPERTY_USAGE_CHECKABLE + CHECKED = godot_property_usage_flags.GODOT_PROPERTY_USAGE_CHECKED + INTERNATIONALIZED = godot_property_usage_flags.GODOT_PROPERTY_USAGE_INTERNATIONALIZED + GROUP = godot_property_usage_flags.GODOT_PROPERTY_USAGE_GROUP + CATEGORY = godot_property_usage_flags.GODOT_PROPERTY_USAGE_CATEGORY + STORE_IF_NONZERO = godot_property_usage_flags.GODOT_PROPERTY_USAGE_STORE_IF_NONZERO + STORE_IF_NONONE = godot_property_usage_flags.GODOT_PROPERTY_USAGE_STORE_IF_NONONE + NO_INSTANCE_STATE = godot_property_usage_flags.GODOT_PROPERTY_USAGE_NO_INSTANCE_STATE + RESTART_IF_CHANGED = godot_property_usage_flags.GODOT_PROPERTY_USAGE_RESTART_IF_CHANGED + SCRIPT_VARIABLE = godot_property_usage_flags.GODOT_PROPERTY_USAGE_SCRIPT_VARIABLE + STORE_IF_NULL = godot_property_usage_flags.GODOT_PROPERTY_USAGE_STORE_IF_NULL + ANIMATE_AS_TRIGGER = godot_property_usage_flags.GODOT_PROPERTY_USAGE_ANIMATE_AS_TRIGGER + UPDATE_ALL_IF_MODIFIED = godot_property_usage_flags.GODOT_PROPERTY_USAGE_UPDATE_ALL_IF_MODIFIED + DEFAULT = godot_property_usage_flags.GODOT_PROPERTY_USAGE_DEFAULT + DEFAULT_INTL = godot_property_usage_flags.GODOT_PROPERTY_USAGE_DEFAULT_INTL + NOEDITOR = godot_property_usage_flags.GODOT_PROPERTY_USAGE_NOEDITOR + + +# Expose RPC modes can be used both as a decorator and as a value to pass +# to ExportedField ;-) + + +class RPCMode: + def __init__(self, mod, modname): + self.mod = mod + self.modname = modname + + def __call__(self, decorated): + if isinstance(decorated, ExportedField): + decorated.rpc = self.mod + else: + decorated.__rpc = self.mod + + def __repr__(self): + return f"<{type(self).__name__}({self.modname!r})>" + + +rpcdisabled = RPCMode(MethodRPCMode.DISABLED, "disabled") +rpcremote = RPCMode(MethodRPCMode.REMOTE, "remote") +rpcmaster = RPCMode(MethodRPCMode.MASTER, "master") +rpcpuppet = RPCMode(MethodRPCMode.PUPPET, "puppet") +rpcslave = RPCMode(MethodRPCMode.SLAVE, "slave") +rpcremotesync = RPCMode(MethodRPCMode.REMOTESYNC, "remotesync") +rpcsync = RPCMode(MethodRPCMode.SYNC, "sync") +rpcmastersync = RPCMode(MethodRPCMode.MASTERSYNC, "mastersync") +rpcpuppetsync = RPCMode(MethodRPCMode.PUPPETSYNC, "puppetsync") + + +class SignalField: + def __init__(self, name): + self.name = name + + def __repr__(self): + return f"<{type(self).__name__}({self.name!r})>" + + +def signal(name: str=None): + # If signal name is None, we will determine the name + # later by using the class's attribute containing it + if name is not None and not isinstance(name, str): + raise ValueError("`name` must be a str") + return SignalField(name) + + +# TODO: this can be greatly improved to make it more pythonic + + +class ExportedField: + def __init__( + self, + type, + default, + name, + hint, + usage, + hint_string, + rpc, + ): + self.property = None + + type = GDString if type == str else type + type = Array if type == list else type + type = Dictionary if type == dict else type + + if not is_pytype_compatible_with_godot_variant(type): + raise ValueError(f"{type!r} type value not compatible with Godot") + + cdef godot_variant gd_default + if default is not None: + # Convert `default` to a Godot-compatible value (e.g. str -> GDString) + if not pyobj_to_godot_variant(default, &gd_default): + gdapi10.godot_variant_destroy(&gd_default) + raise ValueError(f"{default!r} default value not compatible with Godot") + default = godot_variant_to_pyobj(&gd_default) + gdapi10.godot_variant_destroy(&gd_default) + + if not isinstance(default, type): + raise ValueError(f"{default!r} default value not compatible with {type!r} type") + + if issubclass(type, Resource): + if hint not in (PropertyHint.NONE, PropertyHint.RESOURCE_TYPE) or hint_string not in ("", type.__name__): + raise ValueError(f"Resource type doesn't support hint/hint_string fields") + hint = PropertyHint.RESOURCE_TYPE + hint_string = type.__name__ + type = Object + + self.type = type + self.default = default + self.name = name + self.hint = hint + self.usage = usage + self.hint_string = hint_string + if isinstance(rpc, RPCMode): + self.rpc = rpc.mod + else: + self.rpc = rpc + + def __repr__(self): + return f"<{type(self).__name__}(type={self.type!r}, default={self.default!r})>" + + def _copy(self): + return ExportedField( + type=self.type, + default=self.default, + name=self.name, + hint=self.hint, + usage=self.usage, + hint_string=self.hint_string, + rpc=self.rpc, + ) + + def __call__(self, decorated): + if self.default is not None: + raise ValueError("export should not define a default attribute when used as a decorator") + + # This object is used as a decorator + if not callable(decorated) and not isinstance(decorated, builtins.property): + raise ValueError("@export should decorate function or property.") + + updated = self._copy() + + # It's possible decorated has already been passed through a rpc decorator + rpc = getattr(decorated, "__rpc", None) + if rpc: + updated.rpc = rpc + updated.property = decorated + return updated + + def setter(self, setfunc): + if not self.property: + raise ValueError( + "Cannot use setter attribute before defining the getter !" + ) + + updated = self._copy() + updated.property = self.property.setter(setfunc) + return updated + + +def export( + type, + default=None, + hint: PropertyHint=PropertyHint.NONE, + usage: PropertyUsageFlag=PropertyUsageFlag.DEFAULT, + hint_string: str="", + rpc: MethodRPCMode=MethodRPCMode.DISABLED + ): + """ + Decorator used to mark a class attribute as beeing exported to Godot + (hence making it readable/writable from Godot) + + usage:: + @exposed + class CustomObject(godot.bindings.Object): + a = export(str) # Expose attribute + b = export(int, default=42) + + @export(int) # Expose property + @property + def c(self): + return 42 + + @export(str) # Expose method + def d(self): + return "foo" + """ + return ExportedField( + type=type, + default=default, + name=None, + hint=hint, + usage=usage, + hint_string=hint_string, + rpc=rpc, + ) + + +def exposed(cls=None, tool=False): + """ + Decorator used to mark a class as beeing exposed to Godot (hence making + it available from other Godot languages and the Godot IDE). + Due to how Godot identifiest classes by their file pathes, only a single + class can be marked with this decorator per file. + + usage:: + + @exposed + class CustomObject(godot.bindings.Object): + pass + """ + def wrapper(cls): + if not issubclass(cls, Object): + raise ValueError( + f"{cls!r} must inherit from a Godot (e.g. `godot.bindings.Node`) " + "class to be marked as @exposed" + ) + + existing_cls_for_module = get_exposed_class(cls.__module__) + if existing_cls_for_module: + raise ValueError( + "Only a single class can be marked as @exposed per module" + f" (already got {existing_cls_for_module!r})" + ) + + cls.__tool = tool + cls.__exposed_python_class = True + cls.__exported = {} + + # Retrieve parent exported stuff + for b in cls.__bases__: + cls.__exported.update(getattr(b, "__exported", {})) + + init_func_code = "def __init__(self):\n pass\n" + + # Collect exported stuff: attributes (marked with @exported), properties, signals, and methods + for k, v in cls.__dict__.items(): + if isinstance(v, ExportedField): + cls.__exported[k] = v + v.name = k # hard to bind this earlier... + if v.property: + # If export has been used to decorate a property, expose it + # in the generated class + setattr(cls, k, v.property) + else: + # Otherwise, the value must be initialized as part of __init__ + if v.default is None or isinstance(v.default, (int, float, bool)): + init_func_code += f" self.{k} = {repr(v.default)}\n" + else: + init_func_code += f" self.{k} = self.__exported['{k}'].default\n" + elif isinstance(v, SignalField): + v.name = v.name or k + cls.__exported[v.name] = v + setattr(cls, k, v) + elif callable(v): + cls.__exported[k] = v + + # Overwrite parent __init__ to avoid creating a Godot object given + # exported script are always initialized with an existing Godot object + # On top of that, we must initialize the attributes defined in the class + # and it parents + g = {} + exec(init_func_code, g) + cls.__init__ = g["__init__"] + # Also overwrite parent new otherwise we would return an instance + # of a Godot class without our script attached to it... + @classmethod + def new(cls): + raise NotImplementedError("Instantiating Python script from Python is not implemented yet :'(") + # try: + # ptr = cls._new() + # except AttributeError: + # # It's also possible we try to instantiate a singleton, but a better + # # message will be provided anyway if the user try the provided hint + # raise RuntimeError(f"Refcounted Godot object must be created with `{ cls.__name__ }()`") + # instance = cls._from_ptr(ptr) + # # TODO: We should generate a Resource instance containing the script + # # and attach it to the main class here. + # # instance.set_script(???) + cls.new = new + + set_exposed_class(cls) + return cls + + if cls is not None: + return wrapper(cls) + + else: + return wrapper diff --git a/pythonscript/pythonscript.c b/pythonscript/pythonscript.c new file mode 100644 index 0000000..a49526b --- /dev/null +++ b/pythonscript/pythonscript.c @@ -0,0 +1,268 @@ +/* + * This file gets compiled as a shared library that act as the entry point + * to the pythonscript plugin. + * It should be loaded by Godot's GDNative system (see the `pythonscript.gdnlib` + * file in the example/test projects). + * As part of the loading, GDNative will call the `godot_gdnative_init` + * function which will in turn initialize the CPython interpreter then register + * Python as a new language using Godot's Pluginscript system. + */ + +#define PY_SSIZE_T_CLEAN +#include + +#ifndef _WIN32 +#include +#endif +#include + +#include + +#include "_godot_api.h" + + +static const char *PYTHONSCRIPT_RECOGNIZED_EXTENSIONS[] = { "py", "pyc", "pyo", "pyd", 0 }; +static const char *PYTHONSCRIPT_RESERVED_WORDS[] = { + "False", + "None", + "True", + "and", + "as", + "assert", + "break", + "class", + "continue", + "def", + "del", + "elif", + "else", + "except", + "finally", + "for", + "from", + "global", + "if", + "import", + "in", + "is", + "lambda", + "nonlocal", + "not", + "or", + "pass", + "raise", + "return", + "try", + "while", + "with", + "yield", + 0 +}; +static const char *PYTHONSCRIPT_COMMENT_DELIMITERS[] = { "#", "\"\"\"\"\"\"", 0 }; +static const char *PYTHONSCRIPT_STRING_DELIMITERS[] = { "\" \"", "' '", 0 }; +static godot_pluginscript_language_desc desc; +static PyThreadState *gilstate = NULL; + + +/* + * Global variables exposing Godot API to the godot.hazmat cython module. + * Hence we must initialized them before loading `_godot`/`godot` modules + * (which both depend on `godot.hazmat`). + */ +#ifdef _WIN32 +# define PYTHONSCRIPT_EXPORT __declspec(dllexport) +#else +# define PYTHONSCRIPT_EXPORT +#endif +PYTHONSCRIPT_EXPORT const godot_gdnative_core_api_struct *pythonscript_gdapi10 = NULL; +PYTHONSCRIPT_EXPORT const godot_gdnative_core_1_1_api_struct *pythonscript_gdapi11 = NULL; +PYTHONSCRIPT_EXPORT const godot_gdnative_core_1_2_api_struct *pythonscript_gdapi12 = NULL; +PYTHONSCRIPT_EXPORT const godot_gdnative_ext_nativescript_api_struct *pythonscript_gdapi_ext_nativescript = NULL; +PYTHONSCRIPT_EXPORT const godot_gdnative_ext_pluginscript_api_struct *pythonscript_gdapi_ext_pluginscript = NULL; +PYTHONSCRIPT_EXPORT const godot_gdnative_ext_android_api_struct *pythonscript_gdapi_ext_android = NULL; +PYTHONSCRIPT_EXPORT const godot_gdnative_ext_arvr_api_struct *pythonscript_gdapi_ext_arvr = NULL; + + +static void _register_gdapi(const godot_gdnative_init_options *options) { + pythonscript_gdapi10 = (const godot_gdnative_core_api_struct *)options->api_struct; + if (pythonscript_gdapi10->next) { + pythonscript_gdapi11 = (const godot_gdnative_core_1_1_api_struct *)pythonscript_gdapi10->next; + if (pythonscript_gdapi11->next) { + pythonscript_gdapi12 = (const godot_gdnative_core_1_2_api_struct *)pythonscript_gdapi11->next; + } + } + + for (unsigned int i = 0; i < pythonscript_gdapi10->num_extensions; i++) { + const godot_gdnative_api_struct *ext = pythonscript_gdapi10->extensions[i]; + switch (ext->type) { + case GDNATIVE_EXT_NATIVESCRIPT: + pythonscript_gdapi_ext_nativescript = (const godot_gdnative_ext_nativescript_api_struct *)ext; + break; + case GDNATIVE_EXT_PLUGINSCRIPT: + pythonscript_gdapi_ext_pluginscript = (const godot_gdnative_ext_pluginscript_api_struct *)ext; + break; + case GDNATIVE_EXT_ANDROID: + pythonscript_gdapi_ext_android = (const godot_gdnative_ext_android_api_struct *)ext; + break; + case GDNATIVE_EXT_ARVR: + pythonscript_gdapi_ext_arvr = (const godot_gdnative_ext_arvr_api_struct *)ext; + break; + default: + break; + } + } +} + + +GDN_EXPORT void godot_gdnative_init(godot_gdnative_init_options *options) { + // Registering the api should be the very first thing to do ! + _register_gdapi(options); + + // Now those macros are usable + + #define GD_PRINT(c_msg) { \ + godot_string gd_msg; \ + pythonscript_gdapi10->godot_string_new_with_wide_string( \ + &gd_msg, c_msg, -1); \ + pythonscript_gdapi10->godot_print(&gd_msg); \ + pythonscript_gdapi10->godot_string_destroy(&gd_msg); \ + } + + #define GD_ERROR_PRINT(msg) { \ + pythonscript_gdapi10->godot_print_error(msg, __func__, __FILE__, __LINE__); \ + } + + // Check for mandatory plugins + + if (!pythonscript_gdapi10 || !pythonscript_gdapi11 || !pythonscript_gdapi12) { + GD_ERROR_PRINT("Godot-Python requires GDNative API >= v1.2"); + return; + } + if (!pythonscript_gdapi_ext_pluginscript) { + GD_ERROR_PRINT("Pluginscript extension not available"); + return; + } + +#ifndef _WIN32 + // Make sure the shared library has all it symbols loaded + // (strange bug with libpython3.x.so otherwise...) + { + const wchar_t *wpath = pythonscript_gdapi10->godot_string_wide_str( + options->active_library_path + ); + char path[300]; + wcstombs(path, wpath, 300); + dlopen(path, RTLD_NOW | RTLD_GLOBAL); + } + + const char *err = dlerror(); + if (err) { + GD_ERROR_PRINT(err); + return; + } +#endif + + // Initialize CPython interpreter + + // Retrieve path and set pythonhome + { + static wchar_t pythonhome[300]; + godot_string _pythonhome = pythonscript_gdapi10->godot_string_get_base_dir( + options->active_library_path + ); + wcsncpy(pythonhome, pythonscript_gdapi10->godot_string_wide_str(&_pythonhome), 300); + pythonscript_gdapi10->godot_string_destroy(&_pythonhome); + Py_SetPythonHome(pythonhome); + } + // TODO: site.USER_SITE seems to point to an invalid location in ~/.local + // // Add current dir to PYTHONPATH + // wchar_t *path = Py_GetPath(); + // int new_path_len = wcslen(path) + 3; + // wchar_t new_path[new_path_len * sizeof(wchar_t)]; + // wcsncpy(new_path, L".:", new_path_len); + // wcsncpy(new_path + 2, path, new_path_len - 2); + // Py_SetPath(new_path); + // PyRun_SimpleString("import sys\nprint('PYTHON_PATH:', sys.path)\n"); + + Py_SetProgramName(L"godot"); + // Initialize interpreter but skip initialization registration of signal handlers + Py_InitializeEx(0); + // PyEval_InitThreads acquires the GIL, so we must release it later. + // Since python3.7 PyEval_InitThreads is automatically called by Py_InitializeEx, but it's better to leave it here + // to be explicit. Calling it again does nothing. + PyEval_InitThreads(); + int ret = import__godot(); + if (ret != 0){ + GD_ERROR_PRINT("Cannot load godot python module"); + return; + } + + desc.name = "Python"; + desc.type = "Python"; + desc.extension = "py"; + desc.recognized_extensions = PYTHONSCRIPT_RECOGNIZED_EXTENSIONS; + desc.init = pythonscript_init; + desc.finish = pythonscript_finish; + desc.reserved_words = PYTHONSCRIPT_RESERVED_WORDS; + desc.comment_delimiters = PYTHONSCRIPT_COMMENT_DELIMITERS; + desc.string_delimiters = PYTHONSCRIPT_STRING_DELIMITERS; + desc.has_named_classes = false; + desc.add_global_constant = pythonscript_add_global_constant; + + desc.script_desc.init = pythonscript_script_init; + desc.script_desc.finish = pythonscript_script_finish; + + desc.script_desc.instance_desc.init = pythonscript_instance_init; + desc.script_desc.instance_desc.finish = pythonscript_instance_finish; + desc.script_desc.instance_desc.set_prop = pythonscript_instance_set_prop; + desc.script_desc.instance_desc.get_prop = pythonscript_instance_get_prop; + desc.script_desc.instance_desc.call_method = pythonscript_instance_call_method; + desc.script_desc.instance_desc.notification = pythonscript_instance_notification; + desc.script_desc.instance_desc.refcount_incremented = NULL; + desc.script_desc.instance_desc.refcount_decremented = NULL; + + if (options->in_editor) { + + desc.get_template_source_code = pythonscript_get_template_source_code; + desc.validate = pythonscript_validate; + desc.find_function = pythonscript_find_function; + desc.make_function = pythonscript_make_function; + desc.complete_code = pythonscript_complete_code; + desc.auto_indent_code = pythonscript_auto_indent_code; + + desc.debug_get_error = pythonscript_debug_get_error; + desc.debug_get_stack_level_count = pythonscript_debug_get_stack_level_count; + desc.debug_get_stack_level_line = pythonscript_debug_get_stack_level_line; + desc.debug_get_stack_level_function = pythonscript_debug_get_stack_level_function; + desc.debug_get_stack_level_source = pythonscript_debug_get_stack_level_source; + desc.debug_get_stack_level_locals = pythonscript_debug_get_stack_level_locals; + desc.debug_get_stack_level_members = pythonscript_debug_get_stack_level_members; + desc.debug_get_globals = pythonscript_debug_get_globals; + desc.debug_parse_stack_level_expression = pythonscript_debug_parse_stack_level_expression; + + desc.profiling_start = pythonscript_profiling_start; + desc.profiling_stop = pythonscript_profiling_stop; + desc.profiling_get_accumulated_data = pythonscript_profiling_get_accumulated_data; + desc.profiling_get_frame_data = pythonscript_profiling_get_frame_data; + desc.profiling_frame = pythonscript_profiling_frame; + } + pythonscript_gdapi_ext_pluginscript->godot_pluginscript_register_language(&desc); + + // Release the Kraken... er I mean the GIL ! + gilstate = PyEval_SaveThread(); +} + + +GDN_EXPORT void godot_gdnative_singleton() { +} + + +GDN_EXPORT void godot_gdnative_terminate() { + // Re-acquire the gil in order to finalize properly + PyEval_RestoreThread(gilstate); + + int ret = Py_FinalizeEx(); + if (ret != 0) { + GD_ERROR_PRINT("Cannot finalize python interpreter"); + } +} diff --git a/requirements.in b/requirements.in new file mode 100644 index 0000000..7c0cf05 --- /dev/null +++ b/requirements.in @@ -0,0 +1,8 @@ +pip-tools~=6.0.1 +black~=20.8b1 +scons~=3.1 +cython~=0.29.21 +autopxd2~=1.1.0 +pycparser~=2.20 +jinja2~=2.11.3 +zstandard~=0.15.2 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..7ebcabf --- /dev/null +++ b/requirements.txt @@ -0,0 +1,302 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +appdirs==1.4.4 \ + --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ + --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 + # via black +autopxd2==1.1.0 \ + --hash=sha256:faa4b7eeb6b1d7217c46d3660640d4a72fb5c86d4559017c1f58a55d34332b2a + # via -r requirements.in +black==20.8b1 \ + --hash=sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea + # via -r requirements.in +click==7.1.2 \ + --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ + --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc + # via + # autopxd2 + # black + # pip-tools +cython==0.29.23 \ + --hash=sha256:0c4b9f7e3aa004cf3f364e3e772f55fec5740485bafea99d1f13bdc9bbd8a545 \ + --hash=sha256:20402ef316393168909926ab21848aa6e08e39bed5003b657139774e66166cd0 \ + --hash=sha256:20cb50d9fede8029bdb50875458f07a27f909289aeed4cdb9c19544dd9a9bc45 \ + --hash=sha256:2365f3b5e6451b6bc6dcd262230656f4ade1d862ec2f6c22154deebef37c08b6 \ + --hash=sha256:266459c7e48fe3c6c492b297e4033e42d4c6863cc1a1ff7cc4034949fc574fa6 \ + --hash=sha256:282263628c5d601b313d5920f7b6d7e08c7fedbddacd080c4858aa04d86b6b4b \ + --hash=sha256:2a3bbce689a2fddb85aa66712d93875c99bf7f64ac82b1d149ecce522a7a4e0c \ + --hash=sha256:2af52d312e96b38ded38b34d06e22685c226b1b0e58278bd27209f5d2385d115 \ + --hash=sha256:355a6e768d91e21fbf477b61881bab64b7a2da386a166898997bccefd532cf5d \ + --hash=sha256:37ff66039e3d138ec968ee1d1e12441fa5fb4e6a9c5458bc3c3a232f01be4a7d \ + --hash=sha256:3b29224eb62309a10819d923dc6262f769e4f3facfee3cd06372c355e5b38b33 \ + --hash=sha256:3ef530f975e3a760e7282fce2a25f900fa63f96d17321b4aa5f5542eb9859cdf \ + --hash=sha256:41cd0dd2ff5d78466e73409db509887a84449b400074d4f217980cedbb18e4be \ + --hash=sha256:474c1a29ab43e29d990df279e2cf6aa96baa9208f5cd4bc76ac87ffcdf1e2945 \ + --hash=sha256:4858043ac5f96a8f0277cf63760bb39b9521c1f897678cf1d22423f3e758f4ed \ + --hash=sha256:4b0bcf2e06a9063fc78c3243ed4003228375d532ef13b9e5d7183be8f0a52cf5 \ + --hash=sha256:4b6824b58d4373224fc76ee8bee6b35c2d17c91a1ed0fa67b88440f63daebe50 \ + --hash=sha256:4d7c3b0882d8757c601eaf288fc0d321d5c7ac6c3afb8c42eddf9325a3419cf5 \ + --hash=sha256:519fccf526d26b377e1db22f22aa44889b28bc5833ec106588cb13557e8ba2da \ + --hash=sha256:58dc06871bfdb0592542d779714fe9f918e11ba20ac07757dd63b198bdc704fe \ + --hash=sha256:5a6792153b728a0240e55bbb5b643f4f7e45c76319e03abf15bf367471ea1d1a \ + --hash=sha256:5be3ae3189cf7d0e9bbeafb854496dc7030c6f6a5602d809435fab8223543a41 \ + --hash=sha256:625a16103770fd92b487b701fb0c07e5790b080f40fa11ce572a2d56d9e9fcca \ + --hash=sha256:6a0d31452f0245daacb14c979c77e093eb1a546c760816b5eed0047686baad8e \ + --hash=sha256:794e3df0b57e16bce7583ac909126f4cb381fe566adadb20484d89095855eedb \ + --hash=sha256:7b7a766726d207d7cd57aff0fcb4b35ce042d3cc88a421fcdb45eeb61a5b9d12 \ + --hash=sha256:7d6a33c8a11f05f698e215bfdb837f32c27f63c20f3af863557ed91c748dc2be \ + --hash=sha256:a8eed9c82e8fe07b8a8ffbd36018871a17458903fc25c9d015f37b54513a3efd \ + --hash=sha256:aa3bb0928fb2aa3a8828801eb8b29af2261c199f805ae835467489e2bdd00372 \ + --hash=sha256:b0699f0dc90181f2458fdb8170455e7798a309e18f41379eda7a2dc8c7aadee0 \ + --hash=sha256:c4b82461edbbcf90f19b319006345b77474a2d7514e1476d49a14bbd55d6b797 \ + --hash=sha256:ceccc03b633113ede1f14ad914a6db5c278ce108c8ddb308a5c01c1567d8a02a \ + --hash=sha256:ef21c51350462160456eb71df31b0869e5141e940f22c61c358bdb6e3ebc3388 \ + --hash=sha256:f4aca6bffb1c1c3c4ada3347d0b162a699c18a66e097ee08b63b3a35118fdfcc \ + --hash=sha256:ff885f18d169759b57f116d3956e45cd2b9cba989fde348bba091544c668dc11 + # via -r requirements.in +importlib-metadata==4.0.1 \ + --hash=sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581 \ + --hash=sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d + # via pep517 +jinja2==2.11.3 \ + --hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \ + --hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6 + # via -r requirements.in +markupsafe==1.1.1 \ + --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \ + --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \ + --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \ + --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \ + --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \ + --hash=sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f \ + --hash=sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39 \ + --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \ + --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \ + --hash=sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014 \ + --hash=sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f \ + --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \ + --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \ + --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \ + --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \ + --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \ + --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \ + --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \ + --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \ + --hash=sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85 \ + --hash=sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1 \ + --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \ + --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \ + --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \ + --hash=sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850 \ + --hash=sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0 \ + --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \ + --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \ + --hash=sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb \ + --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \ + --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \ + --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \ + --hash=sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1 \ + --hash=sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2 \ + --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \ + --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \ + --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \ + --hash=sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7 \ + --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \ + --hash=sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8 \ + --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \ + --hash=sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193 \ + --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \ + --hash=sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b \ + --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \ + --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \ + --hash=sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5 \ + --hash=sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c \ + --hash=sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032 \ + --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \ + --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \ + --hash=sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621 + # via jinja2 +mypy-extensions==0.4.3 \ + --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ + --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 + # via black +pathspec==0.8.1 \ + --hash=sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd \ + --hash=sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d + # via black +pep517==0.10.0 \ + --hash=sha256:ac59f3f6b9726a49e15a649474539442cf76e0697e39df4869d25e68e880931b \ + --hash=sha256:eba39d201ef937584ad3343df3581069085bacc95454c80188291d5b3ac7a249 + # via pip-tools +pip-tools==6.0.1 \ + --hash=sha256:3b0c7b95e8d3dfb011bb42cb38f356fcf5d0630480462b59c4d0a112b8d90281 \ + --hash=sha256:50ec26df7710557ab574f19f7511830294999e6121b42b87473b48cb9984d788 + # via -r requirements.in +pycparser==2.20 \ + --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \ + --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 + # via + # -r requirements.in + # autopxd2 +regex==2021.4.4 \ + --hash=sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5 \ + --hash=sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79 \ + --hash=sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31 \ + --hash=sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500 \ + --hash=sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11 \ + --hash=sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14 \ + --hash=sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3 \ + --hash=sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439 \ + --hash=sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c \ + --hash=sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82 \ + --hash=sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711 \ + --hash=sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093 \ + --hash=sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a \ + --hash=sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb \ + --hash=sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8 \ + --hash=sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17 \ + --hash=sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000 \ + --hash=sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d \ + --hash=sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480 \ + --hash=sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc \ + --hash=sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0 \ + --hash=sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9 \ + --hash=sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765 \ + --hash=sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e \ + --hash=sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a \ + --hash=sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07 \ + --hash=sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f \ + --hash=sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac \ + --hash=sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7 \ + --hash=sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed \ + --hash=sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968 \ + --hash=sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7 \ + --hash=sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2 \ + --hash=sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4 \ + --hash=sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87 \ + --hash=sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8 \ + --hash=sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10 \ + --hash=sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29 \ + --hash=sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605 \ + --hash=sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6 \ + --hash=sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042 + # via black +scons==3.1.2 \ + --hash=sha256:0f860678cd96fc943ff2294389b0f33cbe51080801591497bc652e72237f0176 \ + --hash=sha256:8aaa483c303efeb678e6f7c776c8444a482f8ddc3ad891f8b6cdd35264da9a1f + # via -r requirements.in +six==1.15.0 \ + --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ + --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced + # via autopxd2 +toml==0.10.2 \ + --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ + --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via + # black + # pep517 +typed-ast==1.4.3 \ + --hash=sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace \ + --hash=sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff \ + --hash=sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266 \ + --hash=sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528 \ + --hash=sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6 \ + --hash=sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808 \ + --hash=sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4 \ + --hash=sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363 \ + --hash=sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341 \ + --hash=sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04 \ + --hash=sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41 \ + --hash=sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e \ + --hash=sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3 \ + --hash=sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899 \ + --hash=sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805 \ + --hash=sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c \ + --hash=sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c \ + --hash=sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39 \ + --hash=sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a \ + --hash=sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3 \ + --hash=sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7 \ + --hash=sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f \ + --hash=sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075 \ + --hash=sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0 \ + --hash=sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40 \ + --hash=sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428 \ + --hash=sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927 \ + --hash=sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3 \ + --hash=sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f \ + --hash=sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65 + # via black +typing-extensions==3.7.4.3 \ + --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ + --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ + --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f + # via + # black + # importlib-metadata +zipp==3.4.1 \ + --hash=sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76 \ + --hash=sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098 + # via + # importlib-metadata + # pep517 +zstandard==0.15.2 \ + --hash=sha256:1c5ef399f81204fbd9f0df3debf80389fd8aa9660fe1746d37c80b0d45f809e9 \ + --hash=sha256:1faefe33e3d6870a4dce637bcb41f7abb46a1872a595ecc7b034016081c37543 \ + --hash=sha256:1fb23b1754ce834a3a1a1e148cc2faad76eeadf9d889efe5e8199d3fb839d3c6 \ + --hash=sha256:22f127ff5da052ffba73af146d7d61db874f5edb468b36c9cb0b857316a21b3d \ + --hash=sha256:2353b61f249a5fc243aae3caa1207c80c7e6919a58b1f9992758fa496f61f839 \ + --hash=sha256:24cdcc6f297f7c978a40fb7706877ad33d8e28acc1786992a52199502d6da2a4 \ + --hash=sha256:31e35790434da54c106f05fa93ab4d0fab2798a6350e8a73928ec602e8505836 \ + --hash=sha256:3547ff4eee7175d944a865bbdf5529b0969c253e8a148c287f0668fe4eb9c935 \ + --hash=sha256:378ac053c0cfc74d115cbb6ee181540f3e793c7cca8ed8cd3893e338af9e942c \ + --hash=sha256:3e1cd2db25117c5b7c7e86a17cde6104a93719a9df7cb099d7498e4c1d13ee5c \ + --hash=sha256:3fe469a887f6142cc108e44c7f42c036e43620ebaf500747be2317c9f4615d4f \ + --hash=sha256:4800ab8ec94cbf1ed09c2b4686288750cab0642cb4d6fba2a56db66b923aeb92 \ + --hash=sha256:52de08355fd5cfb3ef4533891092bb96229d43c2069703d4aff04fdbedf9c92f \ + --hash=sha256:5752f44795b943c99be367fee5edf3122a1690b0d1ecd1bd5ec94c7fd2c39c94 \ + --hash=sha256:5d53f02aeb8fdd48b88bc80bece82542d084fb1a7ba03bf241fd53b63aee4f22 \ + --hash=sha256:69b7a5720b8dfab9005a43c7ddb2e3ccacbb9a2442908ae4ed49dd51ab19698a \ + --hash=sha256:6cc162b5b6e3c40b223163a9ea86cd332bd352ddadb5fd142fc0706e5e4eaaff \ + --hash=sha256:6f5d0330bc992b1e267a1b69fbdbb5ebe8c3a6af107d67e14c7a5b1ede2c5945 \ + --hash=sha256:6ffadd48e6fe85f27ca3ca10cfd3ef3d0f933bef7316870285ffeb58d791ca9c \ + --hash=sha256:72a011678c654df8323aa7b687e3147749034fdbe994d346f139ab9702b59cea \ + --hash=sha256:77d26452676f471223571efd73131fd4a626622c7960458aab2763e025836fc5 \ + --hash=sha256:7a88cc773ffe55992ff7259a8df5fb3570168d7138c69aadba40142d0e5ce39a \ + --hash=sha256:7b16bd74ae7bfbaca407a127e11058b287a4267caad13bd41305a5e630472549 \ + --hash=sha256:855d95ec78b6f0ff66e076d5461bf12d09d8e8f7e2b3fc9de7236d1464fd730e \ + --hash=sha256:8baf7991547441458325ca8fafeae79ef1501cb4354022724f3edd62279c5b2b \ + --hash=sha256:8fb77dd152054c6685639d855693579a92f276b38b8003be5942de31d241ebfb \ + --hash=sha256:92d49cc3b49372cfea2d42f43a2c16a98a32a6bc2f42abcde121132dbfc2f023 \ + --hash=sha256:94d0de65e37f5677165725f1fc7fb1616b9542d42a9832a9a0bdcba0ed68b63b \ + --hash=sha256:9867206093d7283d7de01bd2bf60389eb4d19b67306a0a763d1a8a4dbe2fb7c3 \ + --hash=sha256:9ee3c992b93e26c2ae827404a626138588e30bdabaaf7aa3aa25082a4e718790 \ + --hash=sha256:a4f8af277bb527fa3d56b216bda4da931b36b2d3fe416b6fc1744072b2c1dbd9 \ + --hash=sha256:ab9f19460dfa4c5dd25431b75bee28b5f018bf43476858d64b1aa1046196a2a0 \ + --hash=sha256:ac43c1821ba81e9344d818c5feed574a17f51fca27976ff7d022645c378fbbf5 \ + --hash=sha256:af5a011609206e390b44847da32463437505bf55fd8985e7a91c52d9da338d4b \ + --hash=sha256:b0975748bb6ec55b6d0f6665313c2cf7af6f536221dccd5879b967d76f6e7899 \ + --hash=sha256:b4963dad6cf28bfe0b61c3265d1c74a26a7605df3445bfcd3ba25de012330b2d \ + --hash=sha256:b7d3a484ace91ed827aa2ef3b44895e2ec106031012f14d28bd11a55f24fa734 \ + --hash=sha256:bd3c478a4a574f412efc58ba7e09ab4cd83484c545746a01601636e87e3dbf23 \ + --hash=sha256:c9e2dcb7f851f020232b991c226c5678dc07090256e929e45a89538d82f71d2e \ + --hash=sha256:d25c8eeb4720da41e7afbc404891e3a945b8bb6d5230e4c53d23ac4f4f9fc52c \ + --hash=sha256:dc8c03d0c5c10c200441ffb4cce46d869d9e5c4ef007f55856751dc288a2dffd \ + --hash=sha256:ec58e84d625553d191a23d5988a19c3ebfed519fff2a8b844223e3f074152163 \ + --hash=sha256:eda0719b29792f0fea04a853377cfff934660cb6cd72a0a0eeba7a1f0df4a16e \ + --hash=sha256:edde82ce3007a64e8434ccaf1b53271da4f255224d77b880b59e7d6d73df90c8 \ + --hash=sha256:f36722144bc0a5068934e51dca5a38a5b4daac1be84f4423244277e4baf24e7a \ + --hash=sha256:f8bb00ced04a8feff05989996db47906673ed45b11d86ad5ce892b5741e5f9dd \ + --hash=sha256:f98fc5750aac2d63d482909184aac72a979bfd123b112ec53fd365104ea15b1c \ + --hash=sha256:ff5b75f94101beaa373f1511319580a010f6e03458ee51b1a386d7de5331440a + # via -r requirements.in + +# The following packages are considered to be unsafe in a requirements file: +pip==21.1 \ + --hash=sha256:a810bf07c3723a28621c29abe8e34429fa082c337f89aea9a795865416b66d3e \ + --hash=sha256:ea9f2668484893e90149fd5a6124e04651ffedd67203a8aaf030d31406b937a4 + # via pip-tools diff --git a/site_scons/site_tools/cython.py b/site_scons/site_tools/cython.py new file mode 100644 index 0000000..eeb316b --- /dev/null +++ b/site_scons/site_tools/cython.py @@ -0,0 +1,159 @@ +import re +from itertools import takewhile +from SCons.Script import Builder, SharedLibrary +from SCons.Util import CLVar, is_List +from SCons.Errors import UserError + + +### Cython to C ### + + +def _cython_to_c_emitter(target, source, env): + if not source: + source = [] + elif not is_List(source): + source = [source] + # Consider we always depend on all .pxd files + source += env["CYTHON_DEPS"] + + # Add .html target if cython is in annotate mode + if "-a" in env["CYTHON_FLAGS"] or "--annotate" in env["CYTHON_FLAGS"]: + pyx = next(x for x in target if x.name.endswith(".pyx")) + base_name = pyx.get_path().rsplit(".")[0] + return [target[0], f"{base_name}.html"], source + else: + return target, source + + +CythonToCBuilder = Builder( + action="cython $CYTHON_FLAGS $SOURCE -o $TARGET", + suffix=".c", + src_suffix=".pyx", + emitter=_cython_to_c_emitter, +) + + +### C compilation to .so ### + + +def _get_hops_to_site_packages(target): + *parts, _ = target.abspath.split("/") + # Modules installed in `site-packages` come from `pythonscript` folder + return len(list(takewhile(lambda part: part != "pythonscript", reversed(parts)))) + + +def _get_relative_path_to_libpython(env, target): + hops_to_site_packages = _get_hops_to_site_packages(target) + # site_packages is in `/lib/python3.7/site-packages/` + # and libpython in `/lib/libpython3.so` + hops_to_libpython_dir = hops_to_site_packages + 2 + return "/".join([".."] * hops_to_libpython_dir) + + +def _get_relative_path_to_libpythonscript(env, target): + hops_to_site_packages = _get_hops_to_site_packages(target) + # site_packages is in `/lib/python3.7/site-packages/` + # and libpythonscript in `/libpythonscript.so` + hops_to_libpython_dir = hops_to_site_packages + 3 + return "/".join([".."] * hops_to_libpython_dir) + + +def CythonCompile(env, target, source): + env.Depends(source, env["cpython_build"]) + + # C code generated by Cython is not *that* clean + if not env["CC_IS_MSVC"]: + cflags = ["-Wno-unused", *env["CFLAGS"]] + else: + cflags = env["CFLAGS"] + + # Python native module must have .pyd suffix on windows and .so on POSIX (even on macOS) + if env["platform"].startswith("windows"): + ret = env.SharedLibrary( + target=target, + source=source, + LIBPREFIX="", + SHLIBSUFFIX=".pyd", + CFLAGS=cflags, + LIBS=["python38", "pythonscript"], + # LIBS=[*env["CYTHON_LIBS"], *env["LIBS"]], + # LIBPATH=[*env['CYTHON_LIBPATH'], *env['LIBPATH']] + ) + else: # x11 / macos + # Cyton modules depend on libpython.so and libpythonscript.so + # given they won't be available in the default OS lib path we + # must provide their path to the linker + loader_token = "@loader_path" if env["platform"].startswith("osx") else "$$ORIGIN" + libpython_path = _get_relative_path_to_libpython(env, env.File(target)) + libpythonscript_path = _get_relative_path_to_libpythonscript(env, env.File(target)) + linkflags = [ + f"-Wl,-rpath,'{loader_token}/{libpython_path}'", + f"-Wl,-rpath,'{loader_token}/{libpythonscript_path}'", + ] + # TODO: use scons `env.LoadableModule` for better macos support ? + ret = env.SharedLibrary( + target=target, + source=source, + LIBPREFIX="", + SHLIBSUFFIX=".so", + CFLAGS=cflags, + LINKFLAGS=[*linkflags, *env["LINKFLAGS"]], + LIBS=["python3.8", "pythonscript"], + # LIBS=[*env["CYTHON_LIBS"], *env["LIBS"]], + # LIBPATH=[*env['CYTHON_LIBPATH'], *env['LIBPATH']] + ) + + env.Depends(ret, env["CYTHON_COMPILE_DEPS"]) + return ret + + +### Direct Cython to .so ### + + +def CythonModule(env, target, source=None): + if not target: + target = [] + elif not is_List(target): + target = [target] + + if not source: + source = [] + elif not is_List(source): + source = [source] + + # mod_target is passed to the compile builder + mod_target, *other_targets = target + + if not source: + source.append(f"{mod_target}.pyx") + + pyx_mod, *too_much_mods = [x for x in source if str(x).endswith(".pyx")] + if too_much_mods: + raise UserError( + f"Must have exactly one .pyx file in sources (got `{[mod, *too_much_mods]}`)" + ) + c_mod = pyx_mod.split(".", 1)[0] + ".c" # Useful to do `xxx.gen.pyx` ==> `xxx` + CythonToCBuilder(env, target=[c_mod, *other_targets], source=source) + + c_compile_target = CythonCompile(env, target=mod_target, source=[c_mod]) + + return [*c_compile_target, *other_targets] + + +### Scons tool hooks ### + + +def generate(env): + """Add Builders and construction variables for ar to an Environment.""" + + env["CYTHON_FLAGS"] = CLVar("--fast-fail -3") + env["CYTHON_DEPS"] = [] + env["CYTHON_COMPILE_DEPS"] = [] + + env.Append(BUILDERS={"CythonToC": CythonToCBuilder}) + env.AddMethod(CythonCompile, "CythonCompile") + env.AddMethod(CythonModule, "CythonModule") + + +def exists(env): + return env.Detect("cython") diff --git a/site_scons/site_tools/download.py b/site_scons/site_tools/download.py new file mode 100644 index 0000000..1d23c07 --- /dev/null +++ b/site_scons/site_tools/download.py @@ -0,0 +1,27 @@ +from SCons.Util import is_List +from SCons.Action import Action +from urllib.request import urlopen + + +def Download(env, target, url): + def _do_download(target, source, env): + if not target: + target = [] + elif not is_List(target): + target = [target] + with urlopen(url) as infd: + with open(target[0].abspath, "bw") as outfd: + outfd.write(infd.read()) + + return env.Command(target, None, Action(_do_download, f"Download {url}")) + + +### Scons tool hooks ### + + +def generate(env): + env.AddMethod(Download, "Download") + + +def exists(env): + return True diff --git a/site_scons/site_tools/symlink.py b/site_scons/site_tools/symlink.py new file mode 100644 index 0000000..9299ee0 --- /dev/null +++ b/site_scons/site_tools/symlink.py @@ -0,0 +1,103 @@ +import os +from SCons.Script import Builder, COMMAND_LINE_TARGETS +from SCons.Util import is_List +from SCons.Errors import UserError + + +def SymlinkAction(target, source, env): + target = target if is_List(target) else [target] + source = source if is_List(source) else [source] + + if len(target) != 1 or len(source) != 1: + raise UserError("Symlink only takes a single target and source") + + abs_src = os.path.abspath(str(source[0])) + abs_trg = os.path.abspath(str(target[0])) + + if not os.path.isdir(abs_src): + raise UserError("Only folder symlink are allowed due to Windows limitation") + + try: + os.unlink(abs_trg) + except Exception: + pass + + if env["HOST_OS"] == "win32": + try: + import _winapi + + _winapi.CreateJunction(abs_src, abs_trg) + except Exception as e: + raise UserError( + f"Can't do a NTFS junction as symlink fallback ({abs_src} -> {abs_trg})" + ) from e + + else: + try: + os.symlink(abs_src, abs_trg) + except Exception as e: + raise UserError(f"Can't create symlink ({abs_src} -> {abs_trg})") from e + + +def SymlinkBuilder(env, target, source, action=SymlinkAction): + results = env.Command(target, source, action) + if env["HOST_OS"] == "win32": + abs_trg = os.path.abspath(str(target[0])) + + def _rm(env, target, source): + # assert len(target) == 1 + try: + os.unlink(abs_trg) + except FileNotFoundError: + pass + except Exception as e: + raise UserError(f"Can't remove NTFS junction {abs_trg}") from e + + env.CustomClean( + target, + # RemoveSymlink + env.Action(_rm, f"Removing symlink {abs_trg}"), + ) + return results + + +def CustomClean(env, targets, action): + # Inspired by https://github.com/SCons/scons/wiki/CustomCleanActions + + if not env.GetOption("clean"): + return + + # normalize targets to absolute paths + targets = [env.Entry(target).abspath for target in env.Flatten(targets)] + launchdir = env.GetLaunchDir() + topdir = env.Dir("#").abspath + cl_targets = COMMAND_LINE_TARGETS + + if not cl_targets: + cl_targets.append(".") + + for cl_target in cl_targets: + if cl_target.startswith("#"): + full_target = os.path.join(topdir, cl_target[:1]) + else: + full_target = os.path.join(launchdir, cl_target) + full_target = os.path.normpath(full_target) + for target in targets: + if target.startswith(full_target): + env.Execute(action) + return + + +### Scons tool hooks ### + + +def generate(env): + """ + Scons doesn't provide cross-platform symlink out of the box due to Windows... + """ + env.AddMethod(CustomClean, "CustomClean") + env.Append(BUILDERS={"Symlink": SymlinkBuilder}) + + +def exists(env): + return True diff --git a/site_scons/site_tools/virtual_target.py b/site_scons/site_tools/virtual_target.py new file mode 100644 index 0000000..018e719 --- /dev/null +++ b/site_scons/site_tools/virtual_target.py @@ -0,0 +1,49 @@ +import os +from uuid import uuid4 +from SCons.Node.FS import File +from SCons.Action import Action +from SCons.Defaults import Delete +from SCons.Util import is_List +from SCons.Errors import UserError + + +def install_marker(target): + with open(target.abspath, "w") as fd: + fd.write( + "Dummy file to represent the completion of a virtual action.\n" + "Modifying or removing this file will force rebuild.\n" + "\n" + f"Unique hash: {uuid4().hex}\n" + ) + + +def virtual_target_command(env, marker, condition, source, action): + if not isinstance(marker, File): + raise UserError("`marker` must be a File") + + if not condition(env) and os.path.exists(marker.abspath): + # Condition has changed in our back, force rebuild + env.Execute(Delete(marker)) + + return env.Command( + marker, + source, + [ + *(action if is_List(action) else [action]), + Action( + lambda target, source, env: install_marker(target[0]), + "Write $TARGET to mark task complete", + ), + ], + ) + + +### Scons tool hooks ### + + +def generate(env): + env.AddMethod(virtual_target_command, "VirtualTargetCommand") + + +def exists(env): + return True diff --git a/tests/SConscript b/tests/SConscript new file mode 100644 index 0000000..c79fb64 --- /dev/null +++ b/tests/SConscript @@ -0,0 +1,35 @@ +Import("env") + + +if env["debugger"]: + cmd_prefx = f"{env['debugger']} -ex r --args " +else: + cmd_prefx = "" + + +if env["pytest_args"]: + cmd_suffix = " ".join(f"--pytest={arg}" for arg in env["pytest_args"].split()) +else: + cmd_suffix = "" + + +if env["headless"]: + cmd_suffix += " --no-window " + + +# TODO: fix&reenable work_with_gdscript test... +for test in ["bindings", "helloworld", "threading", "global_constants"]: + dist_symlink = env.Symlink(f"{test}/addons", "$DIST_ROOT/addons") + dist_symlink = env.Symlink(f"{test}/lib", "_lib_vendors") + target = env.Command( + test, + ["$godot_binary", dist_symlink], + cmd_prefx + "${SOURCE.abspath} ${godot_args} --path ${TARGET} " + cmd_suffix, + ) + env.Depends(target, env["DIST_ROOT"]) + env.AlwaysBuild(target) + + +SConscript(["python_binary/SConscript"]) +env.Alias("test", "bindings") +env.Alias("test-threading", "threading") diff --git a/tests/_lib_vendors/.gdignore b/tests/_lib_vendors/.gdignore new file mode 100644 index 0000000..e69de29 diff --git a/tests/_lib_vendors/_pytest/__init__.py b/tests/_lib_vendors/_pytest/__init__.py new file mode 100644 index 0000000..be20d3d --- /dev/null +++ b/tests/_lib_vendors/_pytest/__init__.py @@ -0,0 +1,2 @@ +# +__version__ = '3.0.5' diff --git a/tests/_lib_vendors/_pytest/_argcomplete.py b/tests/_lib_vendors/_pytest/_argcomplete.py new file mode 100644 index 0000000..3ab679d --- /dev/null +++ b/tests/_lib_vendors/_pytest/_argcomplete.py @@ -0,0 +1,102 @@ + +"""allow bash-completion for argparse with argcomplete if installed +needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code. + +argcomplete does not support python 2.5 (although the changes for that +are minor). + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*' + ).completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh ) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK + +INSTALL/DEBUGGING +================= +To include this support in another application that has setup.py generated +scripts: +- add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point +- include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + , call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument() +If things do not work right away: +- switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 +- run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not +- sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" + +import sys +import os +from glob import glob + +class FastFilesCompleter: + 'Fast file completer class' + def __init__(self, directories=True): + self.directories = directories + + def __call__(self, prefix, **kwargs): + """only called on non option completions""" + if os.path.sep in prefix[1:]: # + prefix_dir = len(os.path.dirname(prefix) + os.path.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if '*' not in prefix and '?' not in prefix: + if prefix[-1] == os.path.sep: # we are on unix, otherwise no bash + globbed.extend(glob(prefix + '.*')) + prefix += '*' + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += '/' + # append stripping the prefix (like bash, not like compgen) + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get('_ARGCOMPLETE'): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter = FastFilesCompleter() + + def try_argcomplete(parser): + argcomplete.autocomplete(parser) +else: + def try_argcomplete(parser): pass + filescompleter = None diff --git a/tests/_lib_vendors/_pytest/_code/__init__.py b/tests/_lib_vendors/_pytest/_code/__init__.py new file mode 100644 index 0000000..3463c11 --- /dev/null +++ b/tests/_lib_vendors/_pytest/_code/__init__.py @@ -0,0 +1,9 @@ +""" python inspection/code generation API """ +from .code import Code # noqa +from .code import ExceptionInfo # noqa +from .code import Frame # noqa +from .code import Traceback # noqa +from .code import getrawcode # noqa +from .source import Source # noqa +from .source import compile_ as compile # noqa +from .source import getfslineno # noqa diff --git a/tests/_lib_vendors/_pytest/_code/_py2traceback.py b/tests/_lib_vendors/_pytest/_code/_py2traceback.py new file mode 100644 index 0000000..a830d98 --- /dev/null +++ b/tests/_lib_vendors/_pytest/_code/_py2traceback.py @@ -0,0 +1,81 @@ +# copied from python-2.7.3's traceback.py +# CHANGES: +# - some_str is replaced, trying to create unicode strings +# +import types + +def format_exception_only(etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. + + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. + + The message indicating which exception occurred is always the last + string in the list. + + """ + + # An instance should not have a meaningful value parameter, but + # sometimes does, particularly for string exceptions, such as + # >>> raise string1, string2 # deprecated + # + # Clear these out first because issubtype(string1, SyntaxError) + # would throw another exception and mask the original problem. + if (isinstance(etype, BaseException) or + isinstance(etype, types.InstanceType) or + etype is None or type(etype) is str): + return [_format_final_exc_line(etype, value)] + + stype = etype.__name__ + + if not issubclass(etype, SyntaxError): + return [_format_final_exc_line(stype, value)] + + # It was a syntax error; show exactly where the problem was found. + lines = [] + try: + msg, (filename, lineno, offset, badline) = value.args + except Exception: + pass + else: + filename = filename or "" + lines.append(' File "%s", line %d\n' % (filename, lineno)) + if badline is not None: + if isinstance(badline, bytes): # python 2 only + badline = badline.decode('utf-8', 'replace') + lines.append(u' %s\n' % badline.strip()) + if offset is not None: + caretspace = badline.rstrip('\n')[:offset].lstrip() + # non-space whitespace (likes tabs) must be kept for alignment + caretspace = ((c.isspace() and c or ' ') for c in caretspace) + # only three spaces to account for offset1 == pos 0 + lines.append(' %s^\n' % ''.join(caretspace)) + value = msg + + lines.append(_format_final_exc_line(stype, value)) + return lines + +def _format_final_exc_line(etype, value): + """Return a list of a single line -- normal case for format_exception_only""" + valuestr = _some_str(value) + if value is None or not valuestr: + line = "%s\n" % etype + else: + line = "%s: %s\n" % (etype, valuestr) + return line + +def _some_str(value): + try: + return unicode(value) + except Exception: + try: + return str(value) + except Exception: + pass + return '' % type(value).__name__ diff --git a/tests/_lib_vendors/_pytest/_code/code.py b/tests/_lib_vendors/_pytest/_code/code.py new file mode 100644 index 0000000..616d5c4 --- /dev/null +++ b/tests/_lib_vendors/_pytest/_code/code.py @@ -0,0 +1,861 @@ +import sys +from inspect import CO_VARARGS, CO_VARKEYWORDS +import re +from weakref import ref + +import py +builtin_repr = repr + +reprlib = py.builtin._tryimport('repr', 'reprlib') + +if sys.version_info[0] >= 3: + from traceback import format_exception_only +else: + from ._py2traceback import format_exception_only + + +class Code(object): + """ wrapper around Python code objects """ + def __init__(self, rawcode): + if not hasattr(rawcode, "co_filename"): + rawcode = getrawcode(rawcode) + try: + self.filename = rawcode.co_filename + self.firstlineno = rawcode.co_firstlineno - 1 + self.name = rawcode.co_name + except AttributeError: + raise TypeError("not a code object: %r" %(rawcode,)) + self.raw = rawcode + + def __eq__(self, other): + return self.raw == other.raw + + __hash__ = None + + def __ne__(self, other): + return not self == other + + @property + def path(self): + """ return a path object pointing to source code (note that it + might not point to an actually existing file). """ + try: + p = py.path.local(self.raw.co_filename) + # maybe don't try this checking + if not p.check(): + raise OSError("py.path check failed.") + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + p = self.raw.co_filename + + return p + + @property + def fullsource(self): + """ return a _pytest._code.Source object for the full source file of the code + """ + from _pytest._code import source + full, _ = source.findsource(self.raw) + return full + + def source(self): + """ return a _pytest._code.Source object for the code object's source only + """ + # return source only for that part of code + import _pytest._code + return _pytest._code.Source(self.raw) + + def getargs(self, var=False): + """ return a tuple with the argument names for the code object + + if 'var' is set True also return the names of the variable and + keyword arguments when present + """ + # handfull shortcut for getting args + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + +class Frame(object): + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + def __init__(self, frame): + self.lineno = frame.f_lineno - 1 + self.f_globals = frame.f_globals + self.f_locals = frame.f_locals + self.raw = frame + self.code = Code(frame.f_code) + + @property + def statement(self): + """ statement this frame is at """ + import _pytest._code + if self.code.fullsource is None: + return _pytest._code.Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """ evaluate 'code' in the frame + + 'vars' are optional additional local variables + + returns the result of the evaluation + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def exec_(self, code, **vars): + """ exec 'code' in the frame + + 'vars' are optiona; additional local variables + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + py.builtin.exec_(code, self.f_globals, f_locals ) + + def repr(self, object): + """ return a 'safe' (non-recursive, one-line) string repr for 'object' + """ + return py.io.saferepr(object) + + def is_true(self, object): + return object + + def getargs(self, var=False): + """ return a list of tuples (name, value) for all arguments + + if 'var' is set True also include the variable and keyword + arguments when present + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + +class TracebackEntry(object): + """ a single entry in a traceback """ + + _repr_style = None + exprinfo = None + + def __init__(self, rawentry, excinfo=None): + self._excinfo = excinfo + self._rawentry = rawentry + self.lineno = rawentry.tb_lineno - 1 + + def set_repr_style(self, mode): + assert mode in ("short", "long") + self._repr_style = mode + + @property + def frame(self): + import _pytest._code + return _pytest._code.Frame(self._rawentry.tb_frame) + + @property + def relline(self): + return self.lineno - self.frame.code.firstlineno + + def __repr__(self): + return "" %(self.frame.code.path, self.lineno+1) + + @property + def statement(self): + """ _pytest._code.Source object for the current statement """ + source = self.frame.code.fullsource + return source.getstatement(self.lineno) + + @property + def path(self): + """ path to the source code """ + return self.frame.code.path + + def getlocals(self): + return self.frame.f_locals + locals = property(getlocals, None, None, "locals of underlaying frame") + + def getfirstlinesource(self): + # on Jython this firstlineno can be -1 apparently + return max(self.frame.code.firstlineno, 0) + + def getsource(self, astcache=None): + """ return failing source code. """ + # we use the passed in astcache to not reparse asttrees + # within exception info printing + from _pytest._code.source import getstatementrange_ast + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast(self.lineno, source, + astnode=astnode) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self): + """ return True if the current frame has a var __tracebackhide__ + resolving to True + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + mostly for internal use + """ + try: + tbh = self.frame.f_locals['__tracebackhide__'] + except KeyError: + try: + tbh = self.frame.f_globals['__tracebackhide__'] + except KeyError: + return False + + if py.builtin.callable(tbh): + return tbh(None if self._excinfo is None else self._excinfo()) + else: + return tbh + + def __str__(self): + try: + fn = str(self.path) + except py.error.Error: + fn = '???' + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except: + line = "???" + return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line) + + def name(self): + return self.frame.code.raw.co_name + name = property(name, None, None, "co_name of underlaying code") + +class Traceback(list): + """ Traceback objects encapsulate and offer higher level + access to Traceback entries. + """ + Entry = TracebackEntry + def __init__(self, tb, excinfo=None): + """ initialize from given python traceback object and ExceptionInfo """ + self._excinfo = excinfo + if hasattr(tb, 'tb_next'): + def f(cur): + while cur is not None: + yield self.Entry(cur, excinfo=excinfo) + cur = cur.tb_next + list.__init__(self, f(tb)) + else: + list.__init__(self, tb) + + def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None): + """ return a Traceback instance wrapping part of this Traceback + + by provding any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined + + this allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback) + """ + for x in self: + code = x.frame.code + codepath = code.path + if ((path is None or codepath == path) and + (excludepath is None or not hasattr(codepath, 'relto') or + not codepath.relto(excludepath)) and + (lineno is None or x.lineno == lineno) and + (firstlineno is None or x.frame.code.firstlineno == firstlineno)): + return Traceback(x._rawentry, self._excinfo) + return self + + def __getitem__(self, key): + val = super(Traceback, self).__getitem__(key) + if isinstance(key, type(slice(0))): + val = self.__class__(val) + return val + + def filter(self, fn=lambda x: not x.ishidden()): + """ return a Traceback instance with certain items removed + + fn is a function that gets a single argument, a TracebackEntry + instance, and should return True when the item should be added + to the Traceback, False when not + + by default this removes all the TracebackEntries which are hidden + (see ishidden() above) + """ + return Traceback(filter(fn, self), self._excinfo) + + def getcrashentry(self): + """ return last non-hidden traceback entry that lead + to the exception of a traceback. + """ + for i in range(-1, -len(self)-1, -1): + entry = self[i] + if not entry.ishidden(): + return entry + return self[-1] + + def recursionindex(self): + """ return the index of the frame/TracebackEntry where recursion + originates if appropriate, None if no recursion occurred + """ + cache = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + #XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + #print "checking for recursion at", key + l = cache.setdefault(key, []) + if l: + f = entry.frame + loc = f.f_locals + for otherloc in l: + if f.is_true(f.eval(co_equal, + __recursioncache_locals_1=loc, + __recursioncache_locals_2=otherloc)): + return i + l.append(entry.frame.f_locals) + return None + + +co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2', + '?', 'eval') + +class ExceptionInfo(object): + """ wraps sys.exc_info() objects and offers + help for navigating the traceback. + """ + _striptext = '' + def __init__(self, tup=None, exprinfo=None): + import _pytest._code + if tup is None: + tup = sys.exc_info() + if exprinfo is None and isinstance(tup[1], AssertionError): + exprinfo = getattr(tup[1], 'msg', None) + if exprinfo is None: + exprinfo = py._builtin._totext(tup[1]) + if exprinfo and exprinfo.startswith('assert '): + self._striptext = 'AssertionError: ' + self._excinfo = tup + #: the exception class + self.type = tup[0] + #: the exception instance + self.value = tup[1] + #: the exception raw traceback + self.tb = tup[2] + #: the exception type name + self.typename = self.type.__name__ + #: the exception traceback (_pytest._code.Traceback instance) + self.traceback = _pytest._code.Traceback(self.tb, excinfo=ref(self)) + + def __repr__(self): + return "" % (self.typename, len(self.traceback)) + + def exconly(self, tryshort=False): + """ return the exception as a string + + when 'tryshort' resolves to True, and the exception is a + _pytest._code._AssertionError, only the actual exception part of + the exception representation is returned (so 'AssertionError: ' is + removed from the beginning) + """ + lines = format_exception_only(self.type, self.value) + text = ''.join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext):] + return text + + def errisinstance(self, exc): + """ return True if the exception is an instance of exc """ + return isinstance(self.value, exc) + + def _getreprcrash(self): + exconly = self.exconly(tryshort=True) + entry = self.traceback.getcrashentry() + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + return ReprFileLocation(path, lineno+1, exconly) + + def getrepr(self, showlocals=False, style="long", + abspath=False, tbfilter=True, funcargs=False): + """ return str()able representation of this exception info. + showlocals: show locals per traceback entry + style: long|short|no|native traceback style + tbfilter: hide entries (where __tracebackhide__ is true) + + in case of style==native, tbfilter and showlocals is ignored. + """ + if style == 'native': + return ReprExceptionInfo(ReprTracebackNative( + py.std.traceback.format_exception( + self.type, + self.value, + self.traceback[0]._rawentry, + )), self._getreprcrash()) + + fmt = FormattedExcinfo(showlocals=showlocals, style=style, + abspath=abspath, tbfilter=tbfilter, funcargs=funcargs) + return fmt.repr_excinfo(self) + + def __str__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return str(loc) + + def __unicode__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return unicode(loc) + + def match(self, regexp): + """ + Match the regular expression 'regexp' on the string representation of + the exception. If it matches then True is returned (so that it is + possible to write 'assert excinfo.match()'). If it doesn't match an + AssertionError is raised. + """ + __tracebackhide__ = True + if not re.search(regexp, str(self.value)): + assert 0, "Pattern '{0!s}' not found in '{1!s}'".format( + regexp, self.value) + return True + + +class FormattedExcinfo(object): + """ presenting information about failing Functions and Generators. """ + # for traceback entries + flow_marker = ">" + fail_marker = "E" + + def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False): + self.showlocals = showlocals + self.style = style + self.tbfilter = tbfilter + self.funcargs = funcargs + self.abspath = abspath + self.astcache = {} + + def _getindent(self, source): + # figure out indent for given source + try: + s = str(source.getstatement(len(source)-1)) + except KeyboardInterrupt: + raise + except: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry): + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def _saferepr(self, obj): + return py.io.saferepr(obj) + + def repr_args(self, entry): + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + args.append((argname, self._saferepr(argvalue))) + return ReprFuncArgs(args) + + def get_source(self, source, line_index=-1, excinfo=None, short=False): + """ return formatted and marked up source lines. """ + import _pytest._code + lines = [] + if source is None or line_index >= len(source.lines): + source = _pytest._code.Source("???") + line_index = 0 + if line_index < 0: + line_index += len(source) + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + for line in source.lines[line_index+1:]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_exconly(self, excinfo, indent=4, markall=False): + lines = [] + indent = " " * indent + # get the real exception information out + exlines = excinfo.exconly(tryshort=True).split('\n') + failindent = self.fail_marker + indent[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indent + return lines + + def repr_locals(self, locals): + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == '__builtins__': + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + str_repr = self._saferepr(value) + #if len(str_repr) < 70 or not isinstance(value, + # (list, tuple, dict)): + lines.append("%-10s = %s" %(name, str_repr)) + #else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # py.std.pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + + def repr_traceback_entry(self, entry, excinfo=None): + import _pytest._code + source = self._getentrysource(entry) + if source is None: + source = _pytest._code.Source("???") + line_index = 0 + else: + # entry.getfirstlinesource() can be -1, should be 0 on jython + line_index = entry.lineno - max(entry.getfirstlinesource(), 0) + + lines = [] + style = entry._repr_style + if style is None: + style = self.style + if style in ("short", "long"): + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source(source, line_index, excinfo, short=short) + lines.extend(s) + if short: + message = "in %s" %(entry.name) + else: + message = excinfo and excinfo.typename or "" + path = self._makepath(entry.path) + filelocrepr = ReprFileLocation(path, entry.lineno+1, message) + localsrepr = None + if not short: + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style) + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path): + if not self.abspath: + try: + np = py.path.local().bestrelpath(path) + except OSError: + return path + if len(np) < len(str(path)): + path = np + return path + + def repr_traceback(self, excinfo): + traceback = excinfo.traceback + if self.tbfilter: + traceback = traceback.filter() + recursionindex = None + if is_recursion_error(excinfo): + recursionindex = traceback.recursionindex() + last = traceback[-1] + entries = [] + extraline = None + for index, entry in enumerate(traceback): + einfo = (last == entry) and excinfo or None + reprentry = self.repr_traceback_entry(entry, einfo) + entries.append(reprentry) + if index == recursionindex: + extraline = "!!! Recursion detected (same locals & position)" + break + return ReprTraceback(entries, extraline, style=self.style) + + + def repr_excinfo(self, excinfo): + if sys.version_info[0] < 3: + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + + return ReprExceptionInfo(reprtraceback, reprcrash) + else: + repr_chain = [] + e = excinfo.value + descr = None + while e is not None: + if excinfo: + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + else: + # fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work + reprtraceback = ReprTracebackNative(py.std.traceback.format_exception(type(e), e, None)) + reprcrash = None + + repr_chain += [(reprtraceback, reprcrash, descr)] + if e.__cause__ is not None: + e = e.__cause__ + excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None + descr = 'The above exception was the direct cause of the following exception:' + elif e.__context__ is not None: + e = e.__context__ + excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None + descr = 'During handling of the above exception, another exception occurred:' + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +class TerminalRepr(object): + def __str__(self): + s = self.__unicode__() + if sys.version_info[0] < 3: + s = s.encode('utf-8') + return s + + def __unicode__(self): + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = py.io.TextIO() + tw = py.io.TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self): + return "<%s instance at %0x>" %(self.__class__, id(self)) + + +class ExceptionRepr(TerminalRepr): + def __init__(self): + self.sections = [] + + def addsection(self, name, content, sep="-"): + self.sections.append((name, content, sep)) + + def toterminal(self, tw): + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +class ExceptionChainRepr(ExceptionRepr): + def __init__(self, chain): + super(ExceptionChainRepr, self).__init__() + self.chain = chain + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain + self.reprtraceback = chain[-1][0] + self.reprcrash = chain[-1][1] + + def toterminal(self, tw): + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super(ExceptionChainRepr, self).toterminal(tw) + + +class ReprExceptionInfo(ExceptionRepr): + def __init__(self, reprtraceback, reprcrash): + super(ReprExceptionInfo, self).__init__() + self.reprtraceback = reprtraceback + self.reprcrash = reprcrash + + def toterminal(self, tw): + self.reprtraceback.toterminal(tw) + super(ReprExceptionInfo, self).toterminal(tw) + +class ReprTraceback(TerminalRepr): + entrysep = "_ " + + def __init__(self, reprentries, extraline, style): + self.reprentries = reprentries + self.extraline = extraline + self.style = style + + def toterminal(self, tw): + # the entries might have different styles + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i+1] + if entry.style == "long" or \ + entry.style == "short" and next_entry.style == "long": + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines): + self.style = "native" + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + +class ReprEntryNative(TerminalRepr): + style = "native" + + def __init__(self, tblines): + self.lines = tblines + + def toterminal(self, tw): + tw.write("".join(self.lines)) + +class ReprEntry(TerminalRepr): + localssep = "_ " + + def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style): + self.lines = lines + self.reprfuncargs = reprfuncargs + self.reprlocals = reprlocals + self.reprfileloc = filelocrepr + self.style = style + + def toterminal(self, tw): + if self.style == "short": + self.reprfileloc.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + #tw.line("") + return + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + if self.reprlocals: + #tw.sep(self.localssep, "Locals") + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self): + return "%s\n%s\n%s" % ("\n".join(self.lines), + self.reprlocals, + self.reprfileloc) + +class ReprFileLocation(TerminalRepr): + def __init__(self, path, lineno, message): + self.path = str(path) + self.lineno = lineno + self.message = message + + def toterminal(self, tw): + # filename and lineno output for each entry, + # using an output format that most editors unterstand + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(":%s: %s" % (self.lineno, msg)) + +class ReprLocals(TerminalRepr): + def __init__(self, lines): + self.lines = lines + + def toterminal(self, tw): + for line in self.lines: + tw.line(line) + +class ReprFuncArgs(TerminalRepr): + def __init__(self, args): + self.args = args + + def toterminal(self, tw): + if self.args: + linesofar = "" + for name, value in self.args: + ns = "%s = %s" %(name, value) + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getrawcode(obj, trycall=True): + """ return code object for given function. """ + try: + return obj.__code__ + except AttributeError: + obj = getattr(obj, 'im_func', obj) + obj = getattr(obj, 'func_code', obj) + obj = getattr(obj, 'f_code', obj) + obj = getattr(obj, '__code__', obj) + if trycall and not hasattr(obj, 'co_firstlineno'): + if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj): + x = getrawcode(obj.__call__, trycall=False) + if hasattr(x, 'co_firstlineno'): + return x + return obj + + +if sys.version_info[:2] >= (3, 5): # RecursionError introduced in 3.5 + def is_recursion_error(excinfo): + return excinfo.errisinstance(RecursionError) # noqa +else: + def is_recursion_error(excinfo): + if not excinfo.errisinstance(RuntimeError): + return False + try: + return "maximum recursion depth exceeded" in str(excinfo.value) + except UnicodeError: + return False diff --git a/tests/_lib_vendors/_pytest/_code/source.py b/tests/_lib_vendors/_pytest/_code/source.py new file mode 100644 index 0000000..fcec0f5 --- /dev/null +++ b/tests/_lib_vendors/_pytest/_code/source.py @@ -0,0 +1,414 @@ +from __future__ import generators + +from bisect import bisect_right +import sys +import inspect, tokenize +import py +cpy_compile = compile + +try: + import _ast + from _ast import PyCF_ONLY_AST as _AST_FLAG +except ImportError: + _AST_FLAG = 0 + _ast = None + + +class Source(object): + """ a immutable object holding a source code fragment, + possibly deindenting it. + """ + _compilecounter = 0 + def __init__(self, *parts, **kwargs): + self.lines = lines = [] + de = kwargs.get('deindent', True) + rstrip = kwargs.get('rstrip', True) + for part in parts: + if not part: + partlines = [] + if isinstance(part, Source): + partlines = part.lines + elif isinstance(part, (tuple, list)): + partlines = [x.rstrip("\n") for x in part] + elif isinstance(part, py.builtin._basestring): + partlines = part.split('\n') + if rstrip: + while partlines: + if partlines[-1].strip(): + break + partlines.pop() + else: + partlines = getsource(part, deindent=de).lines + if de: + partlines = deindent(partlines) + lines.extend(partlines) + + def __eq__(self, other): + try: + return self.lines == other.lines + except AttributeError: + if isinstance(other, str): + return str(self) == other + return False + + __hash__ = None + + def __getitem__(self, key): + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start:key.stop] + return newsource + + def __len__(self): + return len(self.lines) + + def strip(self): + """ return new source object with trailing + and leading blank lines removed. + """ + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end-1].strip(): + end -= 1 + source = Source() + source.lines[:] = self.lines[start:end] + return source + + def putaround(self, before='', after='', indent=' ' * 4): + """ return a copy of the source object with + 'before' and 'after' wrapped around it. + """ + before = Source(before) + after = Source(after) + newsource = Source() + lines = [ (indent + line) for line in self.lines] + newsource.lines = before.lines + lines + after.lines + return newsource + + def indent(self, indent=' ' * 4): + """ return a copy of the source object with + all lines indented by the given indent-string. + """ + newsource = Source() + newsource.lines = [(indent+line) for line in self.lines] + return newsource + + def getstatement(self, lineno, assertion=False): + """ return Source statement which contains the + given linenumber (counted from 0). + """ + start, end = self.getstatementrange(lineno, assertion) + return self[start:end] + + def getstatementrange(self, lineno, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + """ + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self, offset=None): + """ return a new source object deindented by offset. + If offset is None then guess an indentation offset from + the first non-blank line. Subsequent lines which have a + lower indentation offset will be copied verbatim as + they are assumed to be part of multilines. + """ + # XXX maybe use the tokenizer to properly handle multiline + # strings etc.pp? + newsource = Source() + newsource.lines[:] = deindent(self.lines, offset) + return newsource + + def isparseable(self, deindent=True): + """ return True if source is parseable, heuristically + deindenting it by default. + """ + try: + import parser + except ImportError: + syntax_checker = lambda x: compile(x, 'asd', 'exec') + else: + syntax_checker = parser.suite + + if deindent: + source = str(self.deindent()) + else: + source = str(self) + try: + #compile(source+'\n', "x", "exec") + syntax_checker(source+'\n') + except KeyboardInterrupt: + raise + except Exception: + return False + else: + return True + + def __str__(self): + return "\n".join(self.lines) + + def compile(self, filename=None, mode='exec', + flag=generators.compiler_flag, + dont_inherit=0, _genframe=None): + """ return compiled code object. if filename is None + invent an artificial filename which displays + the source/line position of the caller frame. + """ + if not filename or py.path.local(filename).check(file=0): + if _genframe is None: + _genframe = sys._getframe(1) # the caller + fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno + base = "<%d-codegen " % self._compilecounter + self.__class__._compilecounter += 1 + if not filename: + filename = base + '%s:%d>' % (fn, lineno) + else: + filename = base + '%r %s:%d>' % (filename, fn, lineno) + source = "\n".join(self.lines) + '\n' + try: + co = cpy_compile(source, filename, mode, flag) + except SyntaxError: + ex = sys.exc_info()[1] + # re-represent syntax errors from parsing python strings + msglines = self.lines[:ex.lineno] + if ex.offset: + msglines.append(" "*ex.offset + '^') + msglines.append("(code was compiled probably from here: %s)" % filename) + newex = SyntaxError('\n'.join(msglines)) + newex.offset = ex.offset + newex.lineno = ex.lineno + newex.text = ex.text + raise newex + else: + if flag & _AST_FLAG: + return co + lines = [(x + "\n") for x in self.lines] + py.std.linecache.cache[filename] = (1, None, lines, filename) + return co + +# +# public API shortcut functions +# + +def compile_(source, filename=None, mode='exec', flags= + generators.compiler_flag, dont_inherit=0): + """ compile the given source to a raw code object, + and maintain an internal cache which allows later + retrieval of the source code for the code object + and any recursively created code objects. + """ + if _ast is not None and isinstance(source, _ast.AST): + # XXX should Source support having AST? + return cpy_compile(source, filename, mode, flags, dont_inherit) + _genframe = sys._getframe(1) # the caller + s = Source(source) + co = s.compile(filename, mode, flags, _genframe=_genframe) + return co + + +def getfslineno(obj): + """ Return source location (path, lineno) for the given object. + If the source cannot be determined return ("", -1) + """ + import _pytest._code + try: + code = _pytest._code.Code(obj) + except TypeError: + try: + fn = (py.std.inspect.getsourcefile(obj) or + py.std.inspect.getfile(obj)) + except TypeError: + return "", -1 + + fspath = fn and py.path.local(fn) or None + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except IOError: + pass + else: + fspath = code.path + lineno = code.firstlineno + assert isinstance(lineno, int) + return fspath, lineno + +# +# helper functions +# + +def findsource(obj): + try: + sourcelines, lineno = py.std.inspect.findsource(obj) + except py.builtin._sysex: + raise + except: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + return source, lineno + + +def getsource(obj, **kwargs): + import _pytest._code + obj = _pytest._code.getrawcode(obj) + try: + strsrc = inspect.getsource(obj) + except IndentationError: + strsrc = "\"Buggy python version consider upgrading, cannot get source\"" + assert isinstance(strsrc, str) + return Source(strsrc, **kwargs) + + +def deindent(lines, offset=None): + if offset is None: + for line in lines: + line = line.expandtabs() + s = line.lstrip() + if s: + offset = len(line)-len(s) + break + else: + offset = 0 + if offset == 0: + return list(lines) + newlines = [] + + def readline_generator(lines): + for line in lines: + yield line + '\n' + while True: + yield '' + + it = readline_generator(lines) + + try: + for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)): + if sline > len(lines): + break # End of input reached + if sline > len(newlines): + line = lines[sline - 1].expandtabs() + if line.lstrip() and line[:offset].isspace(): + line = line[offset:] # Deindent + newlines.append(line) + + for i in range(sline, eline): + # Don't deindent continuing lines of + # multiline tokens (i.e. multiline strings) + newlines.append(lines[i]) + except (IndentationError, tokenize.TokenError): + pass + # Add any lines we didn't see. E.g. if an exception was raised. + newlines.extend(lines[len(newlines):]) + return newlines + + +def get_statement_startend2(lineno, node): + import ast + # flatten all statements and except handlers into one lineno-list + # AST's line numbers start indexing at 1 + l = [] + for x in ast.walk(node): + if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler): + l.append(x.lineno - 1) + for name in "finalbody", "orelse": + val = getattr(x, name, None) + if val: + # treat the finally/orelse part as its own statement + l.append(val[0].lineno - 1 - 1) + l.sort() + insert_index = bisect_right(l, lineno) + start = l[insert_index - 1] + if insert_index >= len(l): + end = None + else: + end = l[insert_index] + return start, end + + +def getstatementrange_ast(lineno, source, assertion=False, astnode=None): + if astnode is None: + content = str(source) + if sys.version_info < (2,7): + content += "\n" + try: + astnode = compile(content, "source", "exec", 1024) # 1024 for AST + except ValueError: + start, end = getstatementrange_old(lineno, source, assertion) + return None, start, end + start, end = get_statement_startend2(lineno, astnode) + # we need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself + block_finder = inspect.BlockFinder() + # if we start with an indented line, put blockfinder to "started" mode + block_finder.started = source.lines[start][0].isspace() + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # the end might still point to a comment or empty line, correct it + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end + + +def getstatementrange_old(lineno, source, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + raise an IndexError if no such statementrange can be found. + """ + # XXX this logic is only used on python2.4 and below + # 1. find the start of the statement + from codeop import compile_command + for start in range(lineno, -1, -1): + if assertion: + line = source.lines[start] + # the following lines are not fully tested, change with care + if 'super' in line and 'self' in line and '__init__' in line: + raise IndexError("likely a subclass") + if "assert" not in line and "raise" not in line: + continue + trylines = source.lines[start:lineno+1] + # quick hack to prepare parsing an indented line with + # compile_command() (which errors on "return" outside defs) + trylines.insert(0, 'def xxx():') + trysource = '\n '.join(trylines) + # ^ space here + try: + compile_command(trysource) + except (SyntaxError, OverflowError, ValueError): + continue + + # 2. find the end of the statement + for end in range(lineno+1, len(source)+1): + trysource = source[start:end] + if trysource.isparseable(): + return start, end + raise SyntaxError("no valid source range around line %d " % (lineno,)) + + diff --git a/tests/_lib_vendors/_pytest/_pluggy.py b/tests/_lib_vendors/_pytest/_pluggy.py new file mode 100644 index 0000000..87d32cf --- /dev/null +++ b/tests/_lib_vendors/_pytest/_pluggy.py @@ -0,0 +1,11 @@ +""" +imports symbols from vendored "pluggy" if available, otherwise +falls back to importing "pluggy" from the default namespace. +""" + +try: + from _pytest.vendored_packages.pluggy import * # noqa + from _pytest.vendored_packages.pluggy import __version__ # noqa +except ImportError: + from pluggy import * # noqa + from pluggy import __version__ # noqa diff --git a/tests/_lib_vendors/_pytest/assertion/__init__.py b/tests/_lib_vendors/_pytest/assertion/__init__.py new file mode 100644 index 0000000..3f14a7a --- /dev/null +++ b/tests/_lib_vendors/_pytest/assertion/__init__.py @@ -0,0 +1,164 @@ +""" +support for presenting detailed information in failing assertions. +""" +import py +import os +import sys + +from _pytest.assertion import util +from _pytest.assertion import rewrite + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption('--assert', + action="store", + dest="assertmode", + choices=("rewrite", "plain",), + default="rewrite", + metavar="MODE", + help="""Control assertion debugging tools. 'plain' + performs no assertion debugging. 'rewrite' + (the default) rewrites assert statements in + test modules on import to provide assert + expression information.""") + + +def pytest_namespace(): + return {'register_assert_rewrite': register_assert_rewrite} + + +def register_assert_rewrite(*names): + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :raise TypeError: if the given module names are not strings. + """ + for name in names: + if not isinstance(name, str): + msg = 'expected module names as *args, got {0} instead' + raise TypeError(msg.format(repr(names))) + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + importhook = hook + break + else: + importhook = DummyRewriteHook() + importhook.mark_rewrite(*names) + + +class DummyRewriteHook(object): + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names): + pass + + +class AssertionState: + """State for the assertion plugin.""" + + def __init__(self, config, mode): + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook = None + + +def install_importhook(config): + """Try to install the rewrite hook, raise SystemError if it fails.""" + # Both Jython and CPython 2.6.0 have AST bugs that make the + # assertion rewriting hook malfunction. + if (sys.platform.startswith('java') or + sys.version_info[:3] == (2, 6, 0)): + raise SystemError('rewrite not supported') + + config._assertstate = AssertionState(config, 'rewrite') + config._assertstate.hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config._assertstate.trace('installed rewrite import hook') + + def undo(): + hook = config._assertstate.hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session): + # this hook is only called when test modules are collected + # so for example not in the master process of pytest-xdist + # (which does not collect test modules) + assertstate = getattr(session.config, '_assertstate', None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +def _running_on_ci(): + """Check if we're currently running on a CI system.""" + env_vars = ['CI', 'BUILD_NUMBER'] + return any(var in os.environ for var in env_vars) + + +def pytest_runtest_setup(item): + """Setup the pytest_assertrepr_compare hook + + The newinterpret and rewrite modules will use util._reprcompare if + it exists to use custom reporting via the + pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + def callbinrepr(op, left, right): + """Call the pytest_assertrepr_compare hook and prepare the result + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are dropped unless -vv was used or + running on a CI. + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = item.ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right) + for new_expl in hook_result: + if new_expl: + if (sum(len(p) for p in new_expl[1:]) > 80*8 and + item.config.option.verbose < 2 and + not _running_on_ci()): + show_max = 10 + truncated_lines = len(new_expl) - show_max + new_expl[show_max:] = [py.builtin._totext( + 'Detailed information truncated (%d more lines)' + ', use "-vv" to show' % truncated_lines)] + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = py.builtin._totext("\n~").join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + util._reprcompare = callbinrepr + + +def pytest_runtest_teardown(item): + util._reprcompare = None + + +def pytest_sessionfinish(session): + assertstate = getattr(session.config, '_assertstate', None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +# Expose this plugin's implementation for the pytest_assertrepr_compare hook +pytest_assertrepr_compare = util.assertrepr_compare diff --git a/tests/_lib_vendors/_pytest/assertion/rewrite.py b/tests/_lib_vendors/_pytest/assertion/rewrite.py new file mode 100644 index 0000000..abf5b49 --- /dev/null +++ b/tests/_lib_vendors/_pytest/assertion/rewrite.py @@ -0,0 +1,945 @@ +"""Rewrite assertion AST to produce nice error messages""" + +import ast +import _ast +import errno +import itertools +import imp +import marshal +import os +import re +import struct +import sys +import types +from fnmatch import fnmatch + +import py +from _pytest.assertion import util + + +# pytest caches rewritten pycs in __pycache__. +if hasattr(imp, "get_tag"): + PYTEST_TAG = imp.get_tag() + "-PYTEST" +else: + if hasattr(sys, "pypy_version_info"): + impl = "pypy" + elif sys.platform == "java": + impl = "jython" + else: + impl = "cpython" + ver = sys.version_info + PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1]) + del ver, impl + +PYC_EXT = ".py" + (__debug__ and "c" or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + +REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2) +ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3 + +if sys.version_info >= (3,5): + ast_Call = ast.Call +else: + ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None) + + +class AssertionRewritingHook(object): + """PEP302 Import hook which rewrites asserts.""" + + def __init__(self, config): + self.config = config + self.fnpats = config.getini("python_files") + self.session = None + self.modules = {} + self._rewritten_names = set() + self._register_with_pkg_resources() + self._must_rewrite = set() + + def set_session(self, session): + self.session = session + + def find_module(self, name, path=None): + state = self.config._assertstate + state.trace("find_module called for: %s" % name) + names = name.rsplit(".", 1) + lastname = names[-1] + pth = None + if path is not None: + # Starting with Python 3.3, path is a _NamespacePath(), which + # causes problems if not converted to list. + path = list(path) + if len(path) == 1: + pth = path[0] + if pth is None: + try: + fd, fn, desc = imp.find_module(lastname, path) + except ImportError: + return None + if fd is not None: + fd.close() + tp = desc[2] + if tp == imp.PY_COMPILED: + if hasattr(imp, "source_from_cache"): + try: + fn = imp.source_from_cache(fn) + except ValueError: + # Python 3 doesn't like orphaned but still-importable + # .pyc files. + fn = fn[:-1] + else: + fn = fn[:-1] + elif tp != imp.PY_SOURCE: + # Don't know what this is. + return None + else: + fn = os.path.join(pth, name.rpartition(".")[2] + ".py") + + fn_pypath = py.path.local(fn) + if not self._should_rewrite(name, fn_pypath, state): + return None + + self._rewritten_names.add(name) + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = os.path.join(fn_pypath.dirname, "__pycache__") + if write: + try: + os.mkdir(cache_dir) + except OSError: + e = sys.exc_info()[1].errno + if e == errno.EEXIST: + # Either the __pycache__ directory already exists (the + # common case) or it's blocked by a non-dir node. In the + # latter case, we'll ignore it in _write_pyc. + pass + elif e in [errno.ENOENT, errno.ENOTDIR]: + # One of the path components was not a directory, likely + # because we're in a zip file. + write = False + elif e in [errno.EACCES, errno.EROFS, errno.EPERM]: + state.trace("read only directory: %r" % fn_pypath.dirname) + write = False + else: + raise + cache_name = fn_pypath.basename[:-3] + PYC_TAIL + pyc = os.path.join(cache_dir, cache_name) + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn_pypath, pyc, state.trace) + if co is None: + state.trace("rewriting %r" % (fn,)) + source_stat, co = _rewrite_test(self.config, fn_pypath) + if co is None: + # Probably a SyntaxError in the test. + return None + if write: + _make_rewritten_pyc(state, source_stat, pyc, co) + else: + state.trace("found cached rewritten pyc for %r" % (fn,)) + self.modules[name] = co, pyc + return self + + def _should_rewrite(self, name, fn_pypath, state): + # always rewrite conftest files + fn = str(fn_pypath) + if fn_pypath.basename == 'conftest.py': + state.trace("rewriting conftest file: %r" % (fn,)) + return True + + if self.session is not None: + if self.session.isinitpath(fn): + state.trace("matched test file (was specified on cmdline): %r" % + (fn,)) + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + for pat in self.fnpats: + # use fnmatch instead of fn_pypath.fnmatch because the + # latter might trigger an import to fnmatch.fnmatch + # internally, which would cause this method to be + # called recursively + if fnmatch(fn_pypath.basename, pat): + state.trace("matched test file %r" % (fn,)) + return True + + for marked in self._must_rewrite: + if name.startswith(marked): + state.trace("matched marked file %r (from %r)" % (name, marked)) + return True + + return False + + def mark_rewrite(self, *names): + """Mark import names as needing to be re-written. + + The named module or package as well as any nested modules will + be re-written on import. + """ + already_imported = set(names).intersection(set(sys.modules)) + if already_imported: + for name in already_imported: + if name not in self._rewritten_names: + self._warn_already_imported(name) + self._must_rewrite.update(names) + + def _warn_already_imported(self, name): + self.config.warn( + 'P1', + 'Module already imported so can not be re-written: %s' % name) + + def load_module(self, name): + # If there is an existing module object named 'fullname' in + # sys.modules, the loader must use that existing module. (Otherwise, + # the reload() builtin will not work correctly.) + if name in sys.modules: + return sys.modules[name] + + co, pyc = self.modules.pop(name) + # I wish I could just call imp.load_compiled here, but __file__ has to + # be set properly. In Python 3.2+, this all would be handled correctly + # by load_compiled. + mod = sys.modules[name] = imp.new_module(name) + try: + mod.__file__ = co.co_filename + # Normally, this attribute is 3.2+. + mod.__cached__ = pyc + mod.__loader__ = self + py.builtin.exec_(co, mod.__dict__) + except: + del sys.modules[name] + raise + return sys.modules[name] + + + + def is_package(self, name): + try: + fd, fn, desc = imp.find_module(name) + except ImportError: + return False + if fd is not None: + fd.close() + tp = desc[2] + return tp == imp.PKG_DIRECTORY + + @classmethod + def _register_with_pkg_resources(cls): + """ + Ensure package resources can be loaded from this loader. May be called + multiple times, as the operation is idempotent. + """ + try: + import pkg_resources + # access an attribute in case a deferred importer is present + pkg_resources.__name__ + except ImportError: + return + + # Since pytest tests are always located in the file system, the + # DefaultProvider is appropriate. + pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider) + + def get_data(self, pathname): + """Optional PEP302 get_data API. + """ + with open(pathname, 'rb') as f: + return f.read() + + +def _write_pyc(state, co, source_stat, pyc): + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason deviate, and I hope + # sometime to be able to use imp.load_compiled to load them. (See + # the comment in load_module above.) + try: + fp = open(pyc, "wb") + except IOError: + err = sys.exc_info()[1].errno + state.trace("error writing pyc file at %s: errno=%s" %(pyc, err)) + # we ignore any failure to write the cache file + # there are many reasons, permission-denied, __pycache__ being a + # file etc. + return False + try: + fp.write(imp.get_magic()) + mtime = int(source_stat.mtime) + size = source_stat.size & 0xFFFFFFFF + fp.write(struct.pack(">", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in" +} +# Python 3.5+ compatibility +try: + binop_map[ast.MatMult] = "@" +except AttributeError: + pass + +# Python 3.4+ compatibility +if hasattr(ast, "NameConstant"): + _NameConstant = ast.NameConstant +else: + def _NameConstant(c): + return ast.Name(str(c), ast.Load()) + + +def set_location(node, lineno, col_offset): + """Set node location information recursively.""" + def _fix(node, lineno, col_offset): + if "lineno" in node._attributes: + node.lineno = lineno + if "col_offset" in node._attributes: + node.col_offset = col_offset + for child in ast.iter_child_nodes(node): + _fix(child, lineno, col_offset) + _fix(node, lineno, col_offset) + return node + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and re-write them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it re-writes the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :on_failure: The AST statements which will be executed if the + assertion test fails. This is the code which will construct + the failure message and raises the AssertionError. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + This state is reset on every new assert statement visited and used + by the other visitors. + + """ + + def __init__(self, module_path, config): + super(AssertionRewriter, self).__init__() + self.module_path = module_path + self.config = config + + def run(self, mod): + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + # Insert some special imports at the top of the module but after any + # docstrings and __future__ imports. + aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"), + ast.alias("_pytest.assertion.rewrite", "@pytest_ar")] + expect_docstring = True + pos = 0 + lineno = 0 + for item in mod.body: + if (expect_docstring and isinstance(item, ast.Expr) and + isinstance(item.value, ast.Str)): + doc = item.value.s + if "PYTEST_DONT_REWRITE" in doc: + # The module has disabled assertion rewriting. + return + lineno += len(doc) - 1 + expect_docstring = False + elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or + item.module != "__future__"): + lineno = item.lineno + break + pos += 1 + imports = [ast.Import([alias], lineno=lineno, col_offset=0) + for alias in aliases] + mod.body[pos:pos] = imports + # Collect asserts. + nodes = [mod] + while nodes: + node = nodes.pop() + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif (isinstance(field, ast.AST) and + # Don't recurse into expressions as they can't contain + # asserts. + not isinstance(field, ast.expr)): + nodes.append(field) + + def variable(self): + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr): + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.Name(name, ast.Load()) + + def display(self, expr): + """Call py.io.saferepr on the expression.""" + return self.helper("saferepr", expr) + + def helper(self, name, *args): + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, "_" + name, ast.Load()) + return ast_Call(attr, list(args), []) + + def builtin(self, name): + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr): + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self): + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + + """ + self.explanation_specifiers = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr): + """Format the %-formatted string with current format context. + + The expl_expr should be an ast.Str instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .on_failure and + return the ast.Name instance of the formatted string. + + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys = [ast.Str(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node): + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_): + """Return the AST statements to replace the ast.Assert instance. + + This re-writes the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + + """ + if isinstance(assert_.test, ast.Tuple) and self.config is not None: + fslocation = (self.module_path, assert_.lineno) + self.config.warn('R1', 'assertion is always true, perhaps ' + 'remove parentheses?', fslocation=fslocation) + self.statements = [] + self.variables = [] + self.variable_counter = itertools.count() + self.stack = [] + self.on_failure = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + # Create failure message. + body = self.on_failure + negation = ast.UnaryOp(ast.Not(), top_condition) + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper('format_assertmsg', assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = ast.Str("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast_Call(err_name, [fmt], []) + if sys.version_info[0] >= 3: + raise_ = ast.Raise(exc, None) + else: + raise_ = ast.Raise(exc, None, None) + body.append(raise_) + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) + for name in self.variables] + clear = ast.Assign(variables, _NameConstant(None)) + self.statements.append(clear) + # Fix line numbers. + for stmt in self.statements: + set_location(stmt, assert_.lineno, assert_.col_offset) + return self.statements + + def visit_Name(self, name): + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast_Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs]) + dorepr = self.helper("should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Str(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop): + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.on_failure + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner = [] + # cond is set in a prior loop iteration below + self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa + self.on_failure = fail_inner + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(ast.Str(expl)) + call = ast_Call(app, [expl_format], []) + self.on_failure.append(ast.Expr(call)) + if i < levels: + cond = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.on_failure = fail_save + expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary): + pattern = unary_map[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.UnaryOp(unary.op, operand_res)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop): + symbol = binop_map[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = "(%s %s %s)" % (left_expl, symbol, right_expl) + res = self.assign(ast.BinOp(left_expr, binop.op, right_expr)) + return res, explanation + + def visit_Call_35(self, call): + """ + visit `ast.Call` nodes on Python3.5 and after + """ + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: ## **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "%s(%s)" % (func_expl, ', '.join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl) + return res, outer_expl + + def visit_Starred(self, starred): + # From Python 3.5, a Starred node can appear in a function call + res, expl = self.visit(starred.value) + return starred, '*' + expl + + def visit_Call_legacy(self, call): + """ + visit `ast.Call nodes on 3.4 and below` + """ + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + new_star = new_kwarg = None + for arg in call.args: + res, expl = self.visit(arg) + new_args.append(res) + arg_expls.append(expl) + for keyword in call.keywords: + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + arg_expls.append(keyword.arg + "=" + expl) + if call.starargs: + new_star, expl = self.visit(call.starargs) + arg_expls.append("*" + expl) + if call.kwargs: + new_kwarg, expl = self.visit(call.kwargs) + arg_expls.append("**" + expl) + expl = "%s(%s)" % (func_expl, ', '.join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs, + new_star, new_kwarg) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl) + return res, outer_expl + + # ast.Call signature changed on 3.5, + # conditionally change which methods is named + # visit_Call depending on Python version + if sys.version_info >= (3, 5): + visit_Call = visit_Call_35 + else: + visit_Call = visit_Call_legacy + + + def visit_Attribute(self, attr): + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign(ast.Attribute(value, attr.attr, ast.Load())) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp): + self.push_format_context() + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, (_ast.Compare, _ast.BoolOp)): + left_expl = "({0})".format(left_expl) + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators) + expls = [] + syms = [] + results = [left_res] + for i, op, next_operand in it: + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, (_ast.Compare, _ast.BoolOp)): + next_expl = "({0})".format(next_expl) + results.append(next_res) + sym = binop_map[op.__class__] + syms.append(ast.Str(sym)) + expl = "%s %s %s" % (left_expl, sym, next_expl) + expls.append(ast.Str(expl)) + res_expr = ast.Compare(left_res, [op], [next_res]) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper("call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load())) + if len(comp.ops) > 1: + res = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + return res, self.explanation_param(self.pop_format_context(expl_call)) diff --git a/tests/_lib_vendors/_pytest/assertion/util.py b/tests/_lib_vendors/_pytest/assertion/util.py new file mode 100644 index 0000000..4a0a4e4 --- /dev/null +++ b/tests/_lib_vendors/_pytest/assertion/util.py @@ -0,0 +1,300 @@ +"""Utilities for assertion debugging""" +import pprint + +import _pytest._code +import py +try: + from collections import Sequence +except ImportError: + Sequence = list + +BuiltinAssertionError = py.builtin.builtins.AssertionError +u = py.builtin._totext + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare = None + + +# the re-encoding is needed for python2 repr +# with non-ascii characters (see issue 877 and 1379) +def ecu(s): + try: + return u(s, 'utf-8', 'replace') + except TypeError: + return s + + +def format_explanation(explanation): + """This formats an explanation + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + explanation = ecu(explanation) + lines = _split_explanation(explanation) + result = _format_lines(lines) + return u('\n').join(result) + + +def _split_explanation(explanation): + """Return a list of individual lines in the explanation + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or u('')).split('\n') + lines = [raw_lines[0]] + for l in raw_lines[1:]: + if l and l[0] in ['{', '}', '~', '>']: + lines.append(l) + else: + lines[-1] += '\\n' + l + return lines + + +def _format_lines(lines): + """Format the individual lines + + This will replace the '{', '}' and '~' characters of our mini + formatting language with the proper 'where ...', 'and ...' and ' + + ...' text, taking care of indentation along the way. + + Return a list of formatted lines. + """ + result = lines[:1] + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith('{'): + if stackcnt[-1]: + s = u('and ') + else: + s = u('where ') + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(u(' +') + u(' ')*(len(stack)-1) + s + line[1:]) + elif line.startswith('}'): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ['~', '>'] + stack[-1] += 1 + indent = len(stack) if line.startswith('~') else len(stack) - 1 + result.append(u(' ')*indent + line[1:]) + assert len(stack) == 1 + return result + + +# Provide basestring in python3 +try: + basestring = basestring +except NameError: + basestring = str + + +def assertrepr_compare(config, op, left, right): + """Return specialised explanations for some operators/operands""" + width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op + left_repr = py.io.saferepr(left, maxsize=int(width//2)) + right_repr = py.io.saferepr(right, maxsize=width-len(left_repr)) + + summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr)) + + issequence = lambda x: (isinstance(x, (list, tuple, Sequence)) and + not isinstance(x, basestring)) + istext = lambda x: isinstance(x, basestring) + isdict = lambda x: isinstance(x, dict) + isset = lambda x: isinstance(x, (set, frozenset)) + + def isiterable(obj): + try: + iter(obj) + return not istext(obj) + except TypeError: + return False + + verbose = config.getoption('verbose') + explanation = None + try: + if op == '==': + if istext(left) and istext(right): + explanation = _diff_text(left, right, verbose) + else: + if issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, verbose) + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, verbose) + if explanation is not None: + explanation.extend(expl) + else: + explanation = expl + elif op == 'not in': + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + except Exception: + explanation = [ + u('(pytest_assertion plugin: representation of details failed. ' + 'Probably an object has a faulty __repr__.)'), + u(_pytest._code.ExceptionInfo())] + + if not explanation: + return None + + return [summary] + explanation + + +def _diff_text(left, right, verbose=False): + """Return the explanation for the diff between text or bytes + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + + If the input are bytes they will be safely converted to text. + """ + from difflib import ndiff + explanation = [] + if isinstance(left, py.builtin.bytes): + left = u(repr(left)[1:-1]).replace(r'\n', '\n') + if isinstance(right, py.builtin.bytes): + right = u(repr(right)[1:-1]).replace(r'\n', '\n') + if not verbose: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [u('Skipping %s identical leading ' + 'characters in diff, use -v to show') % i] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [u('Skipping %s identical trailing ' + 'characters in diff, use -v to show') % i] + left = left[:-i] + right = right[:-i] + keepends = True + explanation += [line.strip('\n') + for line in ndiff(left.splitlines(keepends), + right.splitlines(keepends))] + return explanation + + +def _compare_eq_iterable(left, right, verbose=False): + if not verbose: + return [u('Use -v to get the full diff')] + # dynamic import to speedup pytest + import difflib + + try: + left_formatting = pprint.pformat(left).splitlines() + right_formatting = pprint.pformat(right).splitlines() + explanation = [u('Full diff:')] + except Exception: + # hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling + # sorted() on a list would raise. See issue #718. + # As a workaround, the full diff is generated by using the repr() string of each item of each container. + left_formatting = sorted(repr(x) for x in left) + right_formatting = sorted(repr(x) for x in right) + explanation = [u('Full diff (fallback to calling repr on each item):')] + explanation.extend(line.strip() for line in difflib.ndiff(left_formatting, right_formatting)) + return explanation + + +def _compare_eq_sequence(left, right, verbose=False): + explanation = [] + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + explanation += [u('At index %s diff: %r != %r') + % (i, left[i], right[i])] + break + if len(left) > len(right): + explanation += [u('Left contains more items, first extra item: %s') + % py.io.saferepr(left[len(right)],)] + elif len(left) < len(right): + explanation += [ + u('Right contains more items, first extra item: %s') % + py.io.saferepr(right[len(left)],)] + return explanation + + +def _compare_eq_set(left, right, verbose=False): + explanation = [] + diff_left = left - right + diff_right = right - left + if diff_left: + explanation.append(u('Extra items in the left set:')) + for item in diff_left: + explanation.append(py.io.saferepr(item)) + if diff_right: + explanation.append(u('Extra items in the right set:')) + for item in diff_right: + explanation.append(py.io.saferepr(item)) + return explanation + + +def _compare_eq_dict(left, right, verbose=False): + explanation = [] + common = set(left).intersection(set(right)) + same = dict((k, left[k]) for k in common if left[k] == right[k]) + if same and not verbose: + explanation += [u('Omitting %s identical items, use -v to show') % + len(same)] + elif same: + explanation += [u('Common items:')] + explanation += pprint.pformat(same).splitlines() + diff = set(k for k in common if left[k] != right[k]) + if diff: + explanation += [u('Differing items:')] + for k in diff: + explanation += [py.io.saferepr({k: left[k]}) + ' != ' + + py.io.saferepr({k: right[k]})] + extra_left = set(left) - set(right) + if extra_left: + explanation.append(u('Left contains more items:')) + explanation.extend(pprint.pformat( + dict((k, left[k]) for k in extra_left)).splitlines()) + extra_right = set(right) - set(left) + if extra_right: + explanation.append(u('Right contains more items:')) + explanation.extend(pprint.pformat( + dict((k, right[k]) for k in extra_right)).splitlines()) + return explanation + + +def _notin_text(term, text, verbose=False): + index = text.find(term) + head = text[:index] + tail = text[index+len(term):] + correct_text = head + tail + diff = _diff_text(correct_text, text, verbose) + newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)] + for line in diff: + if line.startswith(u('Skipping')): + continue + if line.startswith(u('- ')): + continue + if line.startswith(u('+ ')): + newdiff.append(u(' ') + line[2:]) + else: + newdiff.append(line) + return newdiff diff --git a/tests/_lib_vendors/_pytest/cacheprovider.py b/tests/_lib_vendors/_pytest/cacheprovider.py new file mode 100644 index 0000000..0657001 --- /dev/null +++ b/tests/_lib_vendors/_pytest/cacheprovider.py @@ -0,0 +1,245 @@ +""" +merged implementation of the cache provider + +the name cache was not choosen to ensure pluggy automatically +ignores the external pytest-cache +""" + +import py +import pytest +import json +from os.path import sep as _sep, altsep as _altsep + + +class Cache(object): + def __init__(self, config): + self.config = config + self._cachedir = config.rootdir.join(".cache") + self.trace = config.trace.root.get("cache") + if config.getvalue("cacheclear"): + self.trace("clearing cachedir") + if self._cachedir.check(): + self._cachedir.remove() + self._cachedir.mkdir() + + def makedir(self, name): + """ return a directory path object with the given name. If the + directory does not yet exist, it will be created. You can use it + to manage files likes e. g. store/retrieve database + dumps across test sessions. + + :param name: must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + if _sep in name or _altsep is not None and _altsep in name: + raise ValueError("name is not allowed to contain path separators") + return self._cachedir.ensure_dir("d", name) + + def _getvaluepath(self, key): + return self._cachedir.join('v', *key.split('/')) + + def get(self, key, default): + """ return cached value for the given key. If no value + was yet cached or the value cannot be read, the specified + default is returned. + + :param key: must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: must be provided in case of a cache-miss or + invalid cache values. + + """ + path = self._getvaluepath(key) + if path.check(): + try: + with path.open("r") as f: + return json.load(f) + except ValueError: + self.trace("cache-invalid at %s" % (path,)) + return default + + def set(self, key, value): + """ save value for the given key. + + :param key: must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: must be of any combination of basic + python types, including nested types + like e. g. lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + path.dirpath().ensure_dir() + except (py.error.EEXIST, py.error.EACCES): + self.config.warn( + code='I9', message='could not create cache path %s' % (path,) + ) + return + try: + f = path.open('w') + except py.error.ENOTDIR: + self.config.warn( + code='I9', message='cache could not write path %s' % (path,)) + else: + with f: + self.trace("cache-write %s: %r" % (key, value,)) + json.dump(value, f, indent=2, sort_keys=True) + + +class LFPlugin: + """ Plugin which implements the --lf (run last-failing) option """ + def __init__(self, config): + self.config = config + active_keys = 'lf', 'failedfirst' + self.active = any(config.getvalue(key) for key in active_keys) + if self.active: + self.lastfailed = config.cache.get("cache/lastfailed", {}) + else: + self.lastfailed = {} + + def pytest_report_header(self): + if self.active: + if not self.lastfailed: + mode = "run all (no recorded failures)" + else: + mode = "rerun last %d failures%s" % ( + len(self.lastfailed), + " first" if self.config.getvalue("failedfirst") else "") + return "run-last-failure: %s" % mode + + def pytest_runtest_logreport(self, report): + if report.failed and "xfail" not in report.keywords: + self.lastfailed[report.nodeid] = True + elif not report.failed: + if report.when == "call": + self.lastfailed.pop(report.nodeid, None) + + def pytest_collectreport(self, report): + passed = report.outcome in ('passed', 'skipped') + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update( + (item.nodeid, True) + for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + def pytest_collection_modifyitems(self, session, config, items): + if self.active and self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + if not previously_failed and previously_passed: + # running a subset of all tests with recorded failures outside + # of the set of tests currently executing + pass + elif self.config.getvalue("failedfirst"): + items[:] = previously_failed + previously_passed + else: + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + + def pytest_sessionfinish(self, session): + config = self.config + if config.getvalue("cacheshow") or hasattr(config, "slaveinput"): + return + prev_failed = config.cache.get("cache/lastfailed", None) is not None + if (session.testscollected and prev_failed) or self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption( + '--lf', '--last-failed', action='store_true', dest="lf", + help="rerun only the tests that failed " + "at the last run (or all if none failed)") + group.addoption( + '--ff', '--failed-first', action='store_true', dest="failedfirst", + help="run all tests but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown") + group.addoption( + '--cache-show', action='store_true', dest="cacheshow", + help="show cache contents, don't perform collection or tests") + group.addoption( + '--cache-clear', action='store_true', dest="cacheclear", + help="remove all cache contents at start of test run.") + + +def pytest_cmdline_main(config): + if config.option.cacheshow: + from _pytest.main import wrap_session + return wrap_session(config, cacheshow) + + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config): + config.cache = Cache(config) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + + +@pytest.fixture +def cache(request): + """ + Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be a ``/`` separated value, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + return request.config.cache + + +def pytest_report_header(config): + if config.option.verbose: + relpath = py.path.local().bestrelpath(config.cache._cachedir) + return "cachedir: %s" % relpath + + +def cacheshow(config, session): + from pprint import pprint + tw = py.io.TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.check(): + tw.line("cache is empty") + return 0 + dummy = object() + basedir = config.cache._cachedir + vdir = basedir.join("v") + tw.sep("-", "cache values") + for valpath in vdir.visit(lambda x: x.isfile()): + key = valpath.relto(vdir).replace(valpath.sep, "/") + val = config.cache.get(key, dummy) + if val is dummy: + tw.line("%s contains unreadable content, " + "will be ignored" % key) + else: + tw.line("%s contains:" % key) + stream = py.io.TextIO() + pprint(val, stream=stream) + for line in stream.getvalue().splitlines(): + tw.line(" " + line) + + ddir = basedir.join("d") + if ddir.isdir() and ddir.listdir(): + tw.sep("-", "cache directories") + for p in basedir.join("d").visit(): + #if p.check(dir=1): + # print("%s/" % p.relto(basedir)) + if p.isfile(): + key = p.relto(basedir) + tw.line("%s is a file of length %d" % ( + key, p.size())) + return 0 diff --git a/tests/_lib_vendors/_pytest/capture.py b/tests/_lib_vendors/_pytest/capture.py new file mode 100644 index 0000000..eea81ca --- /dev/null +++ b/tests/_lib_vendors/_pytest/capture.py @@ -0,0 +1,491 @@ +""" +per-test stdout/stderr capturing mechanism. + +""" +from __future__ import with_statement + +import contextlib +import sys +import os +from tempfile import TemporaryFile + +import py +import pytest + +from py.io import TextIO +unicode = py.builtin.text + +patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'} + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + '--capture', action="store", + default="fd" if hasattr(os, "dup") else "sys", + metavar="method", choices=['fd', 'sys', 'no'], + help="per-test capturing method: one of fd|sys|no.") + group._addoption( + '-s', action="store_const", const="no", dest="capture", + help="shortcut for --capture=no.") + + +@pytest.hookimpl(hookwrapper=True) +def pytest_load_initial_conftests(early_config, parser, args): + _readline_workaround() + ns = early_config.known_args_namespace + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # make sure that capturemanager is properly reset at final shutdown + early_config.add_cleanup(capman.reset_capturings) + + # make sure logging does not raise exceptions at the end + def silence_logging_at_shutdown(): + if "logging" in sys.modules: + sys.modules["logging"].raiseExceptions = False + early_config.add_cleanup(silence_logging_at_shutdown) + + # finally trigger conftest loading but while capturing (issue93) + capman.init_capturings() + outcome = yield + out, err = capman.suspendcapture() + if outcome.excinfo is not None: + sys.stdout.write(out) + sys.stderr.write(err) + + +class CaptureManager: + def __init__(self, method): + self._method = method + + def _getcapture(self, method): + if method == "fd": + return MultiCapture(out=True, err=True, Capture=FDCapture) + elif method == "sys": + return MultiCapture(out=True, err=True, Capture=SysCapture) + elif method == "no": + return MultiCapture(out=False, err=False, in_=False) + else: + raise ValueError("unknown capturing method: %r" % method) + + def init_capturings(self): + assert not hasattr(self, "_capturing") + self._capturing = self._getcapture(self._method) + self._capturing.start_capturing() + + def reset_capturings(self): + cap = self.__dict__.pop("_capturing", None) + if cap is not None: + cap.pop_outerr_to_orig() + cap.stop_capturing() + + def resumecapture(self): + self._capturing.resume_capturing() + + def suspendcapture(self, in_=False): + self.deactivate_funcargs() + cap = getattr(self, "_capturing", None) + if cap is not None: + try: + outerr = cap.readouterr() + finally: + cap.suspend_capturing(in_=in_) + return outerr + + def activate_funcargs(self, pyfuncitem): + capfuncarg = pyfuncitem.__dict__.pop("_capfuncarg", None) + if capfuncarg is not None: + capfuncarg._start() + self._capfuncarg = capfuncarg + + def deactivate_funcargs(self): + capfuncarg = self.__dict__.pop("_capfuncarg", None) + if capfuncarg is not None: + capfuncarg.close() + + @pytest.hookimpl(hookwrapper=True) + def pytest_make_collect_report(self, collector): + if isinstance(collector, pytest.File): + self.resumecapture() + outcome = yield + out, err = self.suspendcapture() + rep = outcome.get_result() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_setup(self, item): + self.resumecapture() + yield + self.suspendcapture_item(item, "setup") + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_call(self, item): + self.resumecapture() + self.activate_funcargs(item) + yield + #self.deactivate_funcargs() called from suspendcapture() + self.suspendcapture_item(item, "call") + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_teardown(self, item): + self.resumecapture() + yield + self.suspendcapture_item(item, "teardown") + + @pytest.hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self, excinfo): + self.reset_capturings() + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(self, excinfo): + self.reset_capturings() + + def suspendcapture_item(self, item, when, in_=False): + out, err = self.suspendcapture(in_=in_) + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + +error_capsysfderror = "cannot use capsys and capfd at the same time" + + +@pytest.fixture +def capsys(request): + """Enable capturing of writes to sys.stdout/sys.stderr and make + captured output available via ``capsys.readouterr()`` method calls + which return a ``(out, err)`` tuple. + """ + if "capfd" in request.fixturenames: + raise request.raiseerror(error_capsysfderror) + request.node._capfuncarg = c = CaptureFixture(SysCapture, request) + return c + +@pytest.fixture +def capfd(request): + """Enable capturing of writes to file descriptors 1 and 2 and make + captured output available via ``capfd.readouterr()`` method calls + which return a ``(out, err)`` tuple. + """ + if "capsys" in request.fixturenames: + request.raiseerror(error_capsysfderror) + if not hasattr(os, 'dup'): + pytest.skip("capfd funcarg needs os.dup") + request.node._capfuncarg = c = CaptureFixture(FDCapture, request) + return c + + +class CaptureFixture: + def __init__(self, captureclass, request): + self.captureclass = captureclass + self.request = request + + def _start(self): + self._capture = MultiCapture(out=True, err=True, in_=False, + Capture=self.captureclass) + self._capture.start_capturing() + + def close(self): + cap = self.__dict__.pop("_capture", None) + if cap is not None: + self._outerr = cap.pop_outerr_to_orig() + cap.stop_capturing() + + def readouterr(self): + try: + return self._capture.readouterr() + except AttributeError: + return self._outerr + + @contextlib.contextmanager + def disabled(self): + capmanager = self.request.config.pluginmanager.getplugin('capturemanager') + capmanager.suspendcapture_item(self.request.node, "call", in_=True) + try: + yield + finally: + capmanager.resumecapture() + + +def safe_text_dupfile(f, mode, default_encoding="UTF8"): + """ return a open text file object that's a duplicate of f on the + FD-level if possible. + """ + encoding = getattr(f, "encoding", None) + try: + fd = f.fileno() + except Exception: + if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"): + # we seem to have a text stream, let's just use it + return f + else: + newfd = os.dup(fd) + if "b" not in mode: + mode += "b" + f = os.fdopen(newfd, mode, 0) # no buffering + return EncodedFile(f, encoding or default_encoding) + + +class EncodedFile(object): + errors = "strict" # possibly needed by py3 code (issue555) + def __init__(self, buffer, encoding): + self.buffer = buffer + self.encoding = encoding + + def write(self, obj): + if isinstance(obj, unicode): + obj = obj.encode(self.encoding, "replace") + self.buffer.write(obj) + + def writelines(self, linelist): + data = ''.join(linelist) + self.write(data) + + def __getattr__(self, name): + return getattr(object.__getattribute__(self, "buffer"), name) + + +class MultiCapture(object): + out = err = in_ = None + + def __init__(self, out=True, err=True, in_=True, Capture=None): + if in_: + self.in_ = Capture(0) + if out: + self.out = Capture(1) + if err: + self.err = Capture(2) + + def start_capturing(self): + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self): + """ pop current snapshot out/err capture and flush to orig streams. """ + out, err = self.readouterr() + if out: + self.out.writeorg(out) + if err: + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_=False): + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self): + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if hasattr(self, "_in_suspended"): + self.in_.resume() + del self._in_suspended + + def stop_capturing(self): + """ stop capturing and reset capturing streams """ + if hasattr(self, '_reset'): + raise ValueError("was already stopped") + self._reset = True + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def readouterr(self): + """ return snapshot unicode value of stdout/stderr capturings. """ + return (self.out.snap() if self.out is not None else "", + self.err.snap() if self.err is not None else "") + +class NoCapture: + __init__ = start = done = suspend = resume = lambda *args: None + +class FDCapture: + """ Capture IO to/from a given os-level filedescriptor. """ + + def __init__(self, targetfd, tmpfile=None): + self.targetfd = targetfd + try: + self.targetfd_save = os.dup(self.targetfd) + except OSError: + self.start = lambda: None + self.done = lambda: None + else: + if targetfd == 0: + assert not tmpfile, "cannot set tmpfile with stdin" + tmpfile = open(os.devnull, "r") + self.syscapture = SysCapture(targetfd) + else: + if tmpfile is None: + f = TemporaryFile() + with f: + tmpfile = safe_text_dupfile(f, mode="wb+") + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, tmpfile) + else: + self.syscapture = NoCapture() + self.tmpfile = tmpfile + self.tmpfile_fd = tmpfile.fileno() + + def __repr__(self): + return "" % (self.targetfd, self.targetfd_save) + + def start(self): + """ Start capturing on targetfd using memorized tmpfile. """ + try: + os.fstat(self.targetfd_save) + except (AttributeError, OSError): + raise ValueError("saved filedescriptor not valid anymore") + os.dup2(self.tmpfile_fd, self.targetfd) + self.syscapture.start() + + def snap(self): + f = self.tmpfile + f.seek(0) + res = f.read() + if res: + enc = getattr(f, "encoding", None) + if enc and isinstance(res, bytes): + res = py.builtin._totext(res, enc, "replace") + f.truncate(0) + f.seek(0) + return res + return '' + + def done(self): + """ stop capturing, restore streams, return original capture file, + seeked to position zero. """ + targetfd_save = self.__dict__.pop("targetfd_save") + os.dup2(targetfd_save, self.targetfd) + os.close(targetfd_save) + self.syscapture.done() + self.tmpfile.close() + + def suspend(self): + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + + def resume(self): + self.syscapture.resume() + os.dup2(self.tmpfile_fd, self.targetfd) + + def writeorg(self, data): + """ write to original file descriptor. """ + if py.builtin._istext(data): + data = data.encode("utf8") # XXX use encoding of original stream + os.write(self.targetfd_save, data) + + +class SysCapture: + def __init__(self, fd, tmpfile=None): + name = patchsysdict[fd] + self._old = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = TextIO() + self.tmpfile = tmpfile + + def start(self): + setattr(sys, self.name, self.tmpfile) + + def snap(self): + f = self.tmpfile + res = f.getvalue() + f.truncate(0) + f.seek(0) + return res + + def done(self): + setattr(sys, self.name, self._old) + del self._old + self.tmpfile.close() + + def suspend(self): + setattr(sys, self.name, self._old) + + def resume(self): + setattr(sys, self.name, self.tmpfile) + + def writeorg(self, data): + self._old.write(data) + self._old.flush() + + +class DontReadFromInput: + """Temporary stub class. Ideally when stdin is accessed, the + capturing should be turned off, with possibly all data captured + so far sent to the screen. This should be configurable, though, + because in automated test runs it is better to crash than + hang indefinitely. + """ + + encoding = None + + def read(self, *args): + raise IOError("reading from stdin while output is captured") + readline = read + readlines = read + __iter__ = read + + def fileno(self): + raise ValueError("redirected Stdin is pseudofile, has no fileno()") + + def isatty(self): + return False + + def close(self): + pass + + @property + def buffer(self): + if sys.version_info >= (3,0): + return self + else: + raise AttributeError('redirected stdin has no attribute buffer') + + +def _readline_workaround(): + """ + Ensure readline is imported so that it attaches to the correct stdio + handles on Windows. + + Pdb uses readline support where available--when not running from the Python + prompt, the readline module is not imported until running the pdb REPL. If + running pytest with the --pdb option this means the readline module is not + imported until after I/O capture has been started. + + This is a problem for pyreadline, which is often used to implement readline + support on Windows, as it does not attach to the correct handles for stdout + and/or stdin if they have been redirected by the FDCapture mechanism. This + workaround ensures that readline is imported before I/O capture is setup so + that it can attach to the actual stdin/out for the console. + + See https://github.com/pytest-dev/pytest/pull/1281 + """ + + if not sys.platform.startswith('win32'): + return + try: + import readline # noqa + except ImportError: + pass diff --git a/tests/_lib_vendors/_pytest/compat.py b/tests/_lib_vendors/_pytest/compat.py new file mode 100644 index 0000000..51fc3bc --- /dev/null +++ b/tests/_lib_vendors/_pytest/compat.py @@ -0,0 +1,230 @@ +""" +python version compatibility code +""" +import sys +import inspect +import types +import re +import functools + +import py + +import _pytest + + + +try: + import enum +except ImportError: # pragma: no cover + # Only available in Python 3.4+ or as a backport + enum = None + +_PY3 = sys.version_info > (3, 0) +_PY2 = not _PY3 + + +NoneType = type(None) +NOTSET = object() + +if hasattr(inspect, 'signature'): + def _format_args(func): + return str(inspect.signature(func)) +else: + def _format_args(func): + return inspect.formatargspec(*inspect.getargspec(func)) + +isfunction = inspect.isfunction +isclass = inspect.isclass +# used to work around a python2 exception info leak +exc_clear = getattr(sys, 'exc_clear', lambda: None) +# The type of re.compile objects is not exposed in Python. +REGEX_TYPE = type(re.compile('')) + + +def is_generator(func): + try: + return _pytest._code.getrawcode(func).co_flags & 32 # generator function + except AttributeError: # builtin functions have no bytecode + # assume them to not be generators + return False + + +def getlocation(function, curdir): + import inspect + fn = py.path.local(inspect.getfile(function)) + lineno = py.builtin._getcode(function).co_firstlineno + if fn.relto(curdir): + fn = fn.relto(curdir) + return "%s:%d" %(fn, lineno+1) + + +def num_mock_patch_args(function): + """ return number of arguments used up by mock arguments (if any) """ + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None)) + if mock is not None: + return len([p for p in patchings + if not p.attribute_name and p.new is mock.DEFAULT]) + return len(patchings) + + +def getfuncargnames(function, startindex=None): + # XXX merge with main.py's varnames + #assert not isclass(function) + realfunction = function + while hasattr(realfunction, "__wrapped__"): + realfunction = realfunction.__wrapped__ + if startindex is None: + startindex = inspect.ismethod(function) and 1 or 0 + if realfunction != function: + startindex += num_mock_patch_args(function) + function = realfunction + if isinstance(function, functools.partial): + argnames = inspect.getargs(_pytest._code.getrawcode(function.func))[0] + partial = function + argnames = argnames[len(partial.args):] + if partial.keywords: + for kw in partial.keywords: + argnames.remove(kw) + else: + argnames = inspect.getargs(_pytest._code.getrawcode(function))[0] + defaults = getattr(function, 'func_defaults', + getattr(function, '__defaults__', None)) or () + numdefaults = len(defaults) + if numdefaults: + return tuple(argnames[startindex:-numdefaults]) + return tuple(argnames[startindex:]) + + + +if sys.version_info[:2] == (2, 6): + def isclass(object): + """ Return true if the object is a class. Overrides inspect.isclass for + python 2.6 because it will return True for objects which always return + something on __getattr__ calls (see #1035). + Backport of https://hg.python.org/cpython/rev/35bf8f7a8edc + """ + return isinstance(object, (type, types.ClassType)) + + +if _PY3: + import codecs + + STRING_TYPES = bytes, str + + def _escape_strings(val): + """If val is pure ascii, returns it as a str(). Otherwise, escapes + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6' + + and escapes unicode objects into a sequence of escaped unicode + ids, e.g.: + + '4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944' + + note: + the obvious "v.decode('unicode-escape')" will return + valid utf-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a utf-8 string. + + """ + if isinstance(val, bytes): + if val: + # source: http://goo.gl/bGsnwC + encoded_bytes, _ = codecs.escape_encode(val) + return encoded_bytes.decode('ascii') + else: + # empty bytes crashes codecs.escape_encode (#1087) + return '' + else: + return val.encode('unicode_escape').decode('ascii') +else: + STRING_TYPES = bytes, str, unicode + + def _escape_strings(val): + """In py2 bytes and str are the same type, so return if it's a bytes + object, return it unchanged if it is a full ascii string, + otherwise escape it into its binary form. + + If it's a unicode string, change the unicode characters into + unicode escapes. + + """ + if isinstance(val, bytes): + try: + return val.encode('ascii') + except UnicodeDecodeError: + return val.encode('string-escape') + else: + return val.encode('unicode-escape') + + +def get_real_func(obj): + """ gets the real function object of the (possibly) wrapped object by + functools.wraps or functools.partial. + """ + while hasattr(obj, "__wrapped__"): + obj = obj.__wrapped__ + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def getfslineno(obj): + # xxx let decorators etc specify a sane ordering + obj = get_real_func(obj) + if hasattr(obj, 'place_as'): + obj = obj.place_as + fslineno = _pytest._code.getfslineno(obj) + assert isinstance(fslineno[1], int), obj + return fslineno + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + try: + return func.im_func + except AttributeError: + return func + + +def safe_getattr(object, name, default): + """ Like getattr but return default upon any Exception. + + Attribute access can potentially fail for 'evil' Python objects. + See issue214 + """ + try: + return getattr(object, name, default) + except Exception: + return default + + +def _is_unittest_unexpected_success_a_failure(): + """Return if the test suite should fail if a @expectedFailure unittest test PASSES. + + From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful: + Changed in version 3.4: Returns False if there were any + unexpectedSuccesses from tests marked with the expectedFailure() decorator. + """ + return sys.version_info >= (3, 4) + + +if _PY3: + def safe_str(v): + """returns v as string""" + return str(v) +else: + def safe_str(v): + """returns v as string, converting to ascii if necessary""" + try: + return str(v) + except UnicodeError: + errors = 'replace' + return v.encode('ascii', errors) diff --git a/tests/_lib_vendors/_pytest/config.py b/tests/_lib_vendors/_pytest/config.py new file mode 100644 index 0000000..fe386ed --- /dev/null +++ b/tests/_lib_vendors/_pytest/config.py @@ -0,0 +1,1340 @@ +""" command line options, ini-file and conftest.py processing. """ +import argparse +import shlex +import traceback +import types +import warnings + +import py +# DON't import pytest here because it causes import cycle troubles +import sys, os +import _pytest._code +import _pytest.hookspec # the extension point definitions +import _pytest.assertion +from _pytest._pluggy import PluginManager, HookimplMarker, HookspecMarker +from _pytest.compat import safe_str + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + +# pytest startup +# + + +class ConftestImportFailure(Exception): + def __init__(self, path, excinfo): + Exception.__init__(self, path, excinfo) + self.path = path + self.excinfo = excinfo + + def __str__(self): + etype, evalue, etb = self.excinfo + formatted = traceback.format_tb(etb) + # The level of the tracebacks we want to print is hand crafted :( + return repr(evalue) + '\n' + ''.join(formatted[2:]) + + +def main(args=None, plugins=None): + """ return exit code, after performing an in-process test run. + + :arg args: list of command line arguments. + + :arg plugins: list of plugin objects to be auto-registered during + initialization. + """ + try: + try: + config = _prepareconfig(args, plugins) + except ConftestImportFailure as e: + tw = py.io.TerminalWriter(sys.stderr) + for line in traceback.format_exception(*e.excinfo): + tw.line(line.rstrip(), red=True) + tw.line("ERROR: could not load %s\n" % (e.path), red=True) + return 4 + else: + try: + config.pluginmanager.check_pending() + return config.hook.pytest_cmdline_main(config=config) + finally: + config._ensure_unconfigure() + except UsageError as e: + for msg in e.args: + sys.stderr.write("ERROR: %s\n" %(msg,)) + return 4 + +class cmdline: # compatibility namespace + main = staticmethod(main) + + +class UsageError(Exception): + """ error in pytest usage or invocation""" + + +def filename_arg(path, optname): + """ Argparse type validator for filename arguments. + + :path: path of filename + :optname: name of the option + """ + if os.path.isdir(path): + raise UsageError("{0} must be a filename, given: {1}".format(optname, path)) + return path + + +def directory_arg(path, optname): + """Argparse type validator for directory arguments. + + :path: path of directory + :optname: name of the option + """ + if not os.path.isdir(path): + raise UsageError("{0} must be a directory, given: {1}".format(optname, path)) + return path + + +_preinit = [] + +default_plugins = ( + "mark main terminal runner python fixtures debugging unittest capture skipping " + "tmpdir monkeypatch recwarn pastebin helpconfig nose assertion " + "junitxml resultlog doctest cacheprovider freeze_support " + "setuponly setupplan").split() + +builtin_plugins = set(default_plugins) +builtin_plugins.add("pytester") + + +def _preloadplugins(): + assert not _preinit + _preinit.append(get_config()) + +def get_config(): + if _preinit: + return _preinit.pop(0) + # subsequent calls to main will create a fresh instance + pluginmanager = PytestPluginManager() + config = Config(pluginmanager) + for spec in default_plugins: + pluginmanager.import_plugin(spec) + return config + +def get_plugin_manager(): + """ + Obtain a new instance of the + :py:class:`_pytest.config.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + +def _prepareconfig(args=None, plugins=None): + warning = None + if args is None: + args = sys.argv[1:] + elif isinstance(args, py.path.local): + args = [str(args)] + elif not isinstance(args, (tuple, list)): + if not isinstance(args, str): + raise ValueError("not a string or argument list: %r" % (args,)) + args = shlex.split(args, posix=sys.platform != "win32") + from _pytest import deprecated + warning = deprecated.MAIN_STR_ARGS + config = get_config() + pluginmanager = config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, py.builtin._basestring): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + if warning: + config.warn('C1', warning) + return pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args) + except BaseException: + config._ensure_unconfigure() + raise + + +class PytestPluginManager(PluginManager): + """ + Overwrites :py:class:`pluggy.PluginManager` to add pytest-specific + functionality: + + * loading plugins from the command line, ``PYTEST_PLUGIN`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded; + * ``conftest.py`` loading during start-up; + """ + def __init__(self): + super(PytestPluginManager, self).__init__("pytest", implprefix="pytest_") + self._conftest_plugins = set() + + # state related to local conftest plugins + self._path2confmods = {} + self._conftestpath2mod = {} + self._confcutdir = None + self._noconftest = False + self._duplicatepaths = set() + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get('PYTEST_DEBUG'): + err = sys.stderr + encoding = getattr(err, 'encoding', 'utf8') + try: + err = py.io.dupfile(err, encoding=encoding) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook = _pytest.assertion.DummyRewriteHook() + + def addhooks(self, module_or_class): + """ + .. deprecated:: 2.8 + + Use :py:meth:`pluggy.PluginManager.add_hookspecs` instead. + """ + warning = dict(code="I2", + fslocation=_pytest._code.getfslineno(sys._getframe(1)), + nodeid=None, + message="use pluginmanager.add_hookspecs instead of " + "deprecated addhooks() method.") + self._warn(warning) + return self.add_hookspecs(module_or_class) + + def parse_hookimpl_opts(self, plugin, name): + # pytest hooks are always prefixed with pytest_ + # so we avoid accessing possibly non-readable attributes + # (see issue #1073) + if not name.startswith("pytest_"): + return + # ignore some historic special names which can not be hooks anyway + if name == "pytest_plugins" or name.startswith("pytest_funcarg__"): + return + + method = getattr(plugin, name) + opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name) + if opts is not None: + for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"): + opts.setdefault(name, hasattr(method, name)) + return opts + + def parse_hookspec_opts(self, module_or_class, name): + opts = super(PytestPluginManager, self).parse_hookspec_opts( + module_or_class, name) + if opts is None: + method = getattr(module_or_class, name) + if name.startswith("pytest_"): + opts = {"firstresult": hasattr(method, "firstresult"), + "historic": hasattr(method, "historic")} + return opts + + def _verify_hook(self, hook, hookmethod): + super(PytestPluginManager, self)._verify_hook(hook, hookmethod) + if "__multicall__" in hookmethod.argnames: + fslineno = _pytest._code.getfslineno(hookmethod.function) + warning = dict(code="I1", + fslocation=fslineno, + nodeid=None, + message="%r hook uses deprecated __multicall__ " + "argument" % (hook.name)) + self._warn(warning) + + def register(self, plugin, name=None): + ret = super(PytestPluginManager, self).register(plugin, name) + if ret: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict(plugin=plugin, manager=self)) + return ret + + def getplugin(self, name): + # support deprecated naming because plugins (xdist e.g.) use it + return self.get_plugin(name) + + def hasplugin(self, name): + """Return True if the plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config): + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers + config.addinivalue_line("markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible.") + config.addinivalue_line("markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible.") + + def _warn(self, message): + kwargs = message if isinstance(message, dict) else { + 'code': 'I1', + 'message': message, + 'fslocation': None, + 'nodeid': None, + } + self.hook.pytest_logwarning.call_historic(kwargs=kwargs) + + # + # internal API for local conftest plugin handling + # + def _set_initial_conftests(self, namespace): + """ load initial conftest files given a preparsed "namespace". + As conftest files may add their own command line options + which have arguments ('--my-opt somepath') we might get some + false positives. All builtin and 3rd party plugins will have + been loaded, however, so common options will not confuse our logic + here. + """ + current = py.path.local() + self._confcutdir = current.join(namespace.confcutdir, abs=True) \ + if namespace.confcutdir else None + self._noconftest = namespace.noconftest + testpaths = namespace.file_or_dir + foundanchor = False + for path in testpaths: + path = str(path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = current.join(path, abs=1) + if exists(anchor): # we found some file object + self._try_load_conftest(anchor) + foundanchor = True + if not foundanchor: + self._try_load_conftest(current) + + def _try_load_conftest(self, anchor): + self._getconftestmodules(anchor) + # let's also consider test* subdirs + if anchor.check(dir=1): + for x in anchor.listdir("test*"): + if x.check(dir=1): + self._getconftestmodules(x) + + def _getconftestmodules(self, path): + if self._noconftest: + return [] + try: + return self._path2confmods[path] + except KeyError: + if path.isfile(): + clist = self._getconftestmodules(path.dirpath()) + else: + # XXX these days we may rather want to use config.rootdir + # and allow users to opt into looking into the rootdir parent + # directories instead of requiring to specify confcutdir + clist = [] + for parent in path.parts(): + if self._confcutdir and self._confcutdir.relto(parent): + continue + conftestpath = parent.join("conftest.py") + if conftestpath.isfile(): + mod = self._importconftest(conftestpath) + clist.append(mod) + + self._path2confmods[path] = clist + return clist + + def _rget_with_confmod(self, name, path): + modules = self._getconftestmodules(path) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest(self, conftestpath): + try: + return self._conftestpath2mod[conftestpath] + except KeyError: + pkgpath = conftestpath.pypkgpath() + if pkgpath is None: + _ensure_removed_sysmodule(conftestpath.purebasename) + try: + mod = conftestpath.pyimport() + except Exception: + raise ConftestImportFailure(conftestpath, sys.exc_info()) + + self._conftest_plugins.add(mod) + self._conftestpath2mod[conftestpath] = mod + dirpath = conftestpath.dirpath() + if dirpath in self._path2confmods: + for path, mods in self._path2confmods.items(): + if path and path.relto(dirpath) or path == dirpath: + assert mod not in mods + mods.append(mod) + self.trace("loaded conftestmodule %r" %(mod)) + self.consider_conftest(mod) + return mod + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse(self, args): + for opt1,opt2 in zip(args, args[1:]): + if opt1 == "-p": + self.consider_pluginarg(opt2) + + def consider_pluginarg(self, arg): + if arg.startswith("no:"): + name = arg[3:] + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + self.import_plugin(arg) + + def consider_conftest(self, conftestmodule): + if self.register(conftestmodule, name=conftestmodule.__file__): + self.consider_module(conftestmodule) + + def consider_env(self): + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod): + plugins = getattr(mod, 'pytest_plugins', []) + if isinstance(plugins, str): + plugins = [plugins] + self.rewrite_hook.mark_rewrite(*plugins) + self._import_plugin_specs(plugins) + + def _import_plugin_specs(self, spec): + if spec: + if isinstance(spec, str): + spec = spec.split(",") + for import_spec in spec: + self.import_plugin(import_spec) + + def import_plugin(self, modname): + # most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, str) + if self.get_plugin(modname) is not None: + return + if modname in builtin_plugins: + importspec = "_pytest." + modname + else: + importspec = modname + try: + __import__(importspec) + except ImportError as e: + new_exc = ImportError('Error importing plugin "%s": %s' % (modname, safe_str(e.args[0]))) + # copy over name and path attributes + for attr in ('name', 'path'): + if hasattr(e, attr): + setattr(new_exc, attr, getattr(e, attr)) + raise new_exc + except Exception as e: + import pytest + if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception): + raise + self._warn("skipped plugin %r: %s" %((modname, e.msg))) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + self.consider_module(mod) + + +class Parser: + """ Parser for command line arguments and ini-file values. + + :ivar extra_info: dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + def __init__(self, usage=None, processopt=None): + self._anonymous = OptionGroup("custom options", parser=self) + self._groups = [] + self._processopt = processopt + self._usage = usage + self._inidict = {} + self._ininames = [] + self.extra_info = {} + + def processoption(self, option): + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup(self, name, description="", after=None): + """ get (or create) a named option Group. + + :name: name of the option group. + :description: long description for --help output. + :after: name of other group, used for ordering --help output. + + The returned group object has an ``addoption`` method with the same + signature as :py:func:`parser.addoption + <_pytest.config.Parser.addoption>` but will be shown in the + respective group in the output of ``pytest. --help``. + """ + for group in self._groups: + if group.name == name: + return group + group = OptionGroup(name, description, parser=self) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i+1, group) + return group + + def addoption(self, *opts, **attrs): + """ register a command line option. + + :opts: option names, can be short or long options. + :attrs: same attributes which the ``add_option()`` function of the + `argparse library + `_ + accepts. + + After command line parsing options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse(self, args, namespace=None): + from _pytest._argcomplete import try_argcomplete + self.optparser = self._getparser() + try_argcomplete(self.optparser) + return self.optparser.parse_args([str(x) for x in args], namespace=namespace) + + def _getparser(self): + from _pytest._argcomplete import filescompleter + optparser = MyOptionParser(self, self.extra_info) + groups = self._groups + [self._anonymous] + for group in groups: + if group.options: + desc = group.description or group.name + arggroup = optparser.add_argument_group(desc) + for option in group.options: + n = option.names() + a = option.attrs() + arggroup.add_argument(*n, **a) + # bash like autocompletion for dirs (appending '/') + optparser.add_argument(FILE_OR_DIR, nargs='*').completer=filescompleter + return optparser + + def parse_setoption(self, args, option, namespace=None): + parsedoption = self.parse(args, namespace=namespace) + for name, value in parsedoption.__dict__.items(): + setattr(option, name, value) + return getattr(parsedoption, FILE_OR_DIR) + + def parse_known_args(self, args, namespace=None): + """parses and returns a namespace object with known arguments at this + point. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args(self, args, namespace=None): + """parses and returns a namespace object with known arguments, and + the remaining arguments unknown at this point. + """ + optparser = self._getparser() + args = [str(x) for x in args] + return optparser.parse_known_args(args, namespace=namespace) + + def addini(self, name, help, type=None, default=None): + """ register an ini-file option. + + :name: name of the ini-variable + :type: type of the variable, can be ``pathlist``, ``args``, ``linelist`` + or ``bool``. + :default: default value if no ini-file option exists but is queried. + + The value of ini-variables can be retrieved via a call to + :py:func:`config.getini(name) <_pytest.config.Config.getini>`. + """ + assert type in (None, "pathlist", "args", "linelist", "bool") + self._inidict[name] = (help, type, default) + self._ininames.append(name) + + +class ArgumentError(Exception): + """ + Raised if an Argument instance is created with invalid or + inconsistent arguments. + """ + + def __init__(self, msg, option): + self.msg = msg + self.option_id = str(option) + + def __str__(self): + if self.option_id: + return "option %s: %s" % (self.option_id, self.msg) + else: + return self.msg + + +class Argument: + """class that mimics the necessary behaviour of optparse.Option + + its currently a least effort implementation + and ignoring choices and integer prefixes + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + _typ_map = { + 'int': int, + 'string': str, + 'float': float, + 'complex': complex, + } + + def __init__(self, *names, **attrs): + """store parms in private vars for use in add_argument""" + self._attrs = attrs + self._short_opts = [] + self._long_opts = [] + self.dest = attrs.get('dest') + if '%default' in (attrs.get('help') or ''): + warnings.warn( + 'pytest now uses argparse. "%default" should be' + ' changed to "%(default)s" ', + DeprecationWarning, + stacklevel=3) + try: + typ = attrs['type'] + except KeyError: + pass + else: + # this might raise a keyerror as well, don't want to catch that + if isinstance(typ, py.builtin._basestring): + if typ == 'choice': + warnings.warn( + 'type argument to addoption() is a string %r.' + ' For parsearg this is optional and when supplied' + ' should be a type.' + ' (options: %s)' % (typ, names), + DeprecationWarning, + stacklevel=3) + # argparse expects a type here take it from + # the type of the first element + attrs['type'] = type(attrs['choices'][0]) + else: + warnings.warn( + 'type argument to addoption() is a string %r.' + ' For parsearg this should be a type.' + ' (options: %s)' % (typ, names), + DeprecationWarning, + stacklevel=3) + attrs['type'] = Argument._typ_map[typ] + # used in test_parseopt -> test_parse_defaultgetter + self.type = attrs['type'] + else: + self.type = typ + try: + # attribute existence is tested in Config._processopt + self.default = attrs['default'] + except KeyError: + pass + self._set_opt_strings(names) + if not self.dest: + if self._long_opts: + self.dest = self._long_opts[0][2:].replace('-', '_') + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError: + raise ArgumentError( + 'need a long or short option', self) + + def names(self): + return self._short_opts + self._long_opts + + def attrs(self): + # update any attributes set by processopt + attrs = 'default dest help'.split() + if self.dest: + attrs.append(self.dest) + for attr in attrs: + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + if self._attrs.get('help'): + a = self._attrs['help'] + a = a.replace('%default', '%(default)s') + #a = a.replace('%prog', '%(prog)s') + self._attrs['help'] = a + return self._attrs + + def _set_opt_strings(self, opts): + """directly from optparse + + might not be necessary as this is passed to argparse later on""" + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + "invalid option string %r: " + "must be at least two characters long" % opt, self) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + "invalid short option string %r: " + "must be of the form -x, (x any non-dash char)" % opt, + self) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + "invalid long option string %r: " + "must start with --, followed by non-dash" % opt, + self) + self._long_opts.append(opt) + + def __repr__(self): + args = [] + if self._short_opts: + args += ['_short_opts: ' + repr(self._short_opts)] + if self._long_opts: + args += ['_long_opts: ' + repr(self._long_opts)] + args += ['dest: ' + repr(self.dest)] + if hasattr(self, 'type'): + args += ['type: ' + repr(self.type)] + if hasattr(self, 'default'): + args += ['default: ' + repr(self.default)] + return 'Argument({0})'.format(', '.join(args)) + + +class OptionGroup: + def __init__(self, name, description="", parser=None): + self.name = name + self.description = description + self.options = [] + self.parser = parser + + def addoption(self, *optnames, **attrs): + """ add an option to this group. + + if a shortened version of a long option is specified it will + be suppressed in the help. addoption('--twowords', '--two-words') + results in help showing '--two-words' only, but --twowords gets + accepted **and** the automatic destination is in args.twowords + """ + conflict = set(optnames).intersection( + name for opt in self.options for name in opt.names()) + if conflict: + raise ValueError("option names %s already added" % conflict) + option = Argument(*optnames, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *optnames, **attrs): + option = Argument(*optnames, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option, shortupper=False): + if not shortupper: + for opt in option._short_opts: + if opt[0] == '-' and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + if self.parser: + self.parser.processoption(option) + self.options.append(option) + + +class MyOptionParser(argparse.ArgumentParser): + def __init__(self, parser, extra_info=None): + if not extra_info: + extra_info = {} + self._parser = parser + argparse.ArgumentParser.__init__(self, usage=parser._usage, + add_help=False, formatter_class=DropShorterLongHelpFormatter) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user + self.extra_info = extra_info + + def parse_args(self, args=None, namespace=None): + """allow splitting of positional arguments""" + args, argv = self.parse_known_args(args, namespace) + if argv: + for arg in argv: + if arg and arg[0] == '-': + lines = ['unrecognized arguments: %s' % (' '.join(argv))] + for k, v in sorted(self.extra_info.items()): + lines.append(' %s: %s' % (k, v)) + self.error('\n'.join(lines)) + getattr(args, FILE_OR_DIR).extend(argv) + return args + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """shorten help for long options that differ only in extra hyphens + + - collapse **long** options that are the same except for extra hyphens + - special action attribute map_long_option allows surpressing additional + long options + - shortcut if there are only two options and one of them is a short one + - cache result on action object as this is called at least 2 times + """ + def _format_action_invocation(self, action): + orgstr = argparse.HelpFormatter._format_action_invocation(self, action) + if orgstr and orgstr[0] != '-': # only optional arguments + return orgstr + res = getattr(action, '_formatted_action_invocation', None) + if res: + return res + options = orgstr.split(', ') + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr + return orgstr + return_list = [] + option_map = getattr(action, 'map_long_option', {}) + if option_map is None: + option_map = {} + short_long = {} + for option in options: + if len(option) == 2 or option[2] == ' ': + continue + if not option.startswith('--'): + raise ArgumentError('long optional argument without "--": [%s]' + % (option), self) + xxoption = option[2:] + if xxoption.split()[0] not in option_map: + shortened = xxoption.replace('-', '') + if shortened not in short_long or \ + len(short_long[shortened]) < len(xxoption): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: # + if len(option) == 2 or option[2] == ' ': + return_list.append(option) + if option[2:] == short_long.get(option.replace('-', '')): + return_list.append(option.replace(' ', '=', 1)) + action._formatted_action_invocation = ', '.join(return_list) + return action._formatted_action_invocation + + + +def _ensure_removed_sysmodule(modname): + try: + del sys.modules[modname] + except KeyError: + pass + +class CmdOptions(object): + """ holds cmdline options as attributes.""" + def __init__(self, values=()): + self.__dict__.update(values) + def __repr__(self): + return "" %(self.__dict__,) + def copy(self): + return CmdOptions(self.__dict__) + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() +FILE_OR_DIR = 'file_or_dir' + + +class Config(object): + """ access to configuration values, pluginmanager and plugin hooks. """ + + def __init__(self, pluginmanager): + #: access to command line option as attributes. + #: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead + self.option = CmdOptions() + _a = FILE_OR_DIR + self._parser = Parser( + usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a), + processopt=self._processopt, + ) + #: a pluginmanager instance + self.pluginmanager = pluginmanager + self.trace = self.pluginmanager.trace.root.get("config") + self.hook = self.pluginmanager.hook + self._inicache = {} + self._opt2dest = {} + self._cleanup = [] + self._warn = self.pluginmanager._warn + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + + def do_setns(dic): + import pytest + setns(pytest, dic) + + self.hook.pytest_namespace.call_historic(do_setns, {}) + self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser)) + + def add_cleanup(self, func): + """ Add a function to be called when the config object gets out of + use (usually coninciding with pytest_unconfigure).""" + self._cleanup.append(func) + + def _do_configure(self): + assert not self._configured + self._configured = True + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self): + if self._configured: + self._configured = False + self.hook.pytest_unconfigure(config=self) + self.hook.pytest_configure._call_history = [] + while self._cleanup: + fin = self._cleanup.pop() + fin() + + def warn(self, code, message, fslocation=None): + """ generate a warning for this test session. """ + self.hook.pytest_logwarning.call_historic(kwargs=dict( + code=code, message=message, + fslocation=fslocation, nodeid=None)) + + def get_terminal_writer(self): + return self.pluginmanager.get_plugin("terminalreporter")._tw + + def pytest_cmdline_parse(self, pluginmanager, args): + # REF1 assert self == pluginmanager.config, (self, pluginmanager.config) + self.parse(args) + return self + + def notify_exception(self, excinfo, option=None): + if option and option.fulltrace: + style = "long" + else: + style = "native" + excrepr = excinfo.getrepr(funcargs=True, + showlocals=getattr(option, 'showlocals', False), + style=style, + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, + excinfo=excinfo) + if not py.builtin.any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write("INTERNALERROR> %s\n" %line) + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid): + # nodeid's are relative to the rootpath, compute relative to cwd + if self.invocation_dir != self.rootdir: + fullpath = self.rootdir.join(nodeid) + nodeid = self.invocation_dir.bestrelpath(fullpath) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict, args): + """ constructor useable for subprocesses. """ + config = get_config() + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt): + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, 'default') and opt.dest: + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config): + self.pluginmanager._set_initial_conftests(early_config.known_args_namespace) + + def _initini(self, args): + ns, unknown_args = self._parser.parse_known_and_unknown_args(args, namespace=self.option.copy()) + r = determine_setup(ns.inifilename, ns.file_or_dir + unknown_args, warnfunc=self.warn) + self.rootdir, self.inifile, self.inicfg = r + self._parser.extra_info['rootdir'] = self.rootdir + self._parser.extra_info['inifile'] = self.inifile + self.invocation_dir = py.path.local() + self._parser.addini('addopts', 'extra command line options', 'args') + self._parser.addini('minversion', 'minimally required pytest version') + + def _consider_importhook(self, args, entrypoint_name): + """Install the PEP 302 import hook if using assertion re-writing. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for re-writing + by the importhook. + """ + ns, unknown_args = self._parser.parse_known_and_unknown_args(args) + mode = ns.assertmode + if mode == 'rewrite': + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = 'plain' + else: + import pkg_resources + self.pluginmanager.rewrite_hook = hook + for entrypoint in pkg_resources.iter_entry_points('pytest11'): + # 'RECORD' available for plugins installed normally (pip install) + # 'SOURCES.txt' available for plugins installed in dev mode (pip install -e) + # for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa + # so it shouldn't be an issue + for metadata in ('RECORD', 'SOURCES.txt'): + for entry in entrypoint.dist._get_metadata(metadata): + fn = entry.split(',')[0] + is_simple_module = os.sep not in fn and fn.endswith('.py') + is_package = fn.count(os.sep) == 1 and fn.endswith('__init__.py') + if is_simple_module: + module_name, ext = os.path.splitext(fn) + hook.mark_rewrite(module_name) + elif is_package: + package_name = os.path.dirname(fn) + hook.mark_rewrite(package_name) + self._warn_about_missing_assertion(mode) + + def _warn_about_missing_assertion(self, mode): + try: + assert False + except AssertionError: + pass + else: + if mode == 'plain': + sys.stderr.write("WARNING: ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?") + else: + sys.stderr.write("WARNING: assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n") + + def _preparse(self, args, addopts=True): + self._initini(args) + if addopts: + args[:] = shlex.split(os.environ.get('PYTEST_ADDOPTS', '')) + args + args[:] = self.getini("addopts") + args + self._checkversion() + entrypoint_name = 'pytest11' + self._consider_importhook(args, entrypoint_name) + self.pluginmanager.consider_preparse(args) + self.pluginmanager.load_setuptools_entrypoints(entrypoint_name) + self.pluginmanager.consider_env() + self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy()) + confcutdir = self.known_args_namespace.confcutdir + if self.known_args_namespace.confcutdir is None and self.inifile: + confcutdir = py.path.local(self.inifile).dirname + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests(early_config=self, + args=args, parser=self._parser) + except ConftestImportFailure: + e = sys.exc_info()[1] + if ns.help or ns.version: + # we don't want to prevent --help/--version to work + # so just let is pass and print a warning at the end + self._warn("could not load initial conftests (%s)\n" % e.path) + else: + raise + + def _checkversion(self): + import pytest + minver = self.inicfg.get('minversion', None) + if minver: + ver = minver.split(".") + myver = pytest.__version__.split(".") + if myver < ver: + raise pytest.UsageError( + "%s:%d: requires pytest-%s, actual pytest-%s'" %( + self.inicfg.config.path, self.inicfg.lineof('minversion'), + minver, pytest.__version__)) + + def parse(self, args, addopts=True): + # parse given cmdline arguments into this config object. + assert not hasattr(self, 'args'), ( + "can only parse cmdline args at most once per Config object") + self._origargs = args + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager)) + self._preparse(args, addopts=addopts) + # XXX deprecated hook: + self.hook.pytest_cmdline_preparse(config=self, args=args) + args = self._parser.parse_setoption(args, self.option, namespace=self.option) + if not args: + cwd = os.getcwd() + if cwd == self.rootdir: + args = self.getini('testpaths') + if not args: + args = [cwd] + self.args = args + + def addinivalue_line(self, name, line): + """ add a line to an ini-file option. The option must have been + declared but might not yet be set in which case the line becomes the + the first line in its value. """ + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name): + """ return configuration value from an :ref:`ini file `. If the + specified name hasn't been registered through a prior + :py:func:`parser.addini ` + call (usually from a plugin), a ValueError is raised. """ + try: + return self._inicache[name] + except KeyError: + self._inicache[name] = val = self._getini(name) + return val + + def _getini(self, name): + try: + description, type, default = self._parser._inidict[name] + except KeyError: + raise ValueError("unknown configuration value: %r" %(name,)) + value = self._get_override_ini_value(name) + if value is None: + try: + value = self.inicfg[name] + except KeyError: + if default is not None: + return default + if type is None: + return '' + return [] + if type == "pathlist": + dp = py.path.local(self.inicfg.config.path).dirpath() + l = [] + for relpath in shlex.split(value): + l.append(dp.join(relpath, abs=True)) + return l + elif type == "args": + return shlex.split(value) + elif type == "linelist": + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + elif type == "bool": + return bool(_strtobool(value.strip())) + else: + assert type is None + return value + + def _getconftest_pathlist(self, name, path): + try: + mod, relroots = self.pluginmanager._rget_with_confmod(name, path) + except KeyError: + return None + modpath = py.path.local(mod.__file__).dirpath() + l = [] + for relroot in relroots: + if not isinstance(relroot, py.path.local): + relroot = relroot.replace("/", py.path.local.sep) + relroot = modpath.join(relroot, abs=True) + l.append(relroot) + return l + + def _get_override_ini_value(self, name): + value = None + # override_ini is a list of list, to support both -o foo1=bar1 foo2=bar2 and + # and -o foo1=bar1 -o foo2=bar2 options + # always use the last item if multiple value set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2 + if self.getoption("override_ini", None): + for ini_config_list in self.option.override_ini: + for ini_config in ini_config_list: + try: + (key, user_ini_value) = ini_config.split("=", 1) + except ValueError: + raise UsageError("-o/--override-ini expects option=value style.") + if key == name: + value = user_ini_value + return value + + def getoption(self, name, default=notset, skip=False): + """ return command line option value. + + :arg name: name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :arg default: default value if no option of that name exists. + :arg skip: if True raise pytest.skip if option does not exists + or has a None value. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError: + if default is not notset: + return default + if skip: + import pytest + pytest.skip("no %r option found" %(name,)) + raise ValueError("no option named %r" % (name,)) + + def getvalue(self, name, path=None): + """ (deprecated, use getoption()) """ + return self.getoption(name) + + def getvalueorskip(self, name, path=None): + """ (deprecated, use getoption(skip=True)) """ + return self.getoption(name, skip=True) + +def exists(path, ignore=EnvironmentError): + try: + return path.check() + except ignore: + return False + +def getcfg(args, warnfunc=None): + """ + Search the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict). + + note: warnfunc is an optional function used to warn + about ini-files that use deprecated features. + This parameter should be removed when pytest + adopts standard deprecation warnings (#1804). + """ + from _pytest.deprecated import SETUP_CFG_PYTEST + inibasenames = ["pytest.ini", "tox.ini", "setup.cfg"] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [py.path.local()] + for arg in args: + arg = py.path.local(arg) + for base in arg.parts(reverse=True): + for inibasename in inibasenames: + p = base.join(inibasename) + if exists(p): + iniconfig = py.iniconfig.IniConfig(p) + if 'pytest' in iniconfig.sections: + if inibasename == 'setup.cfg' and warnfunc: + warnfunc('C1', SETUP_CFG_PYTEST) + return base, p, iniconfig['pytest'] + if inibasename == 'setup.cfg' and 'tool:pytest' in iniconfig.sections: + return base, p, iniconfig['tool:pytest'] + elif inibasename == "pytest.ini": + # allowed to be empty + return base, p, {} + return None, None, None + + +def get_common_ancestor(args): + # args are what we get after early command line parsing (usually + # strings, but can be py.path.local objects as well) + common_ancestor = None + for arg in args: + if str(arg)[0] == "-": + continue + p = py.path.local(arg) + if not p.exists(): + continue + if common_ancestor is None: + common_ancestor = p + else: + if p.relto(common_ancestor) or p == common_ancestor: + continue + elif common_ancestor.relto(p): + common_ancestor = p + else: + shared = p.common(common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = py.path.local() + elif common_ancestor.isfile(): + common_ancestor = common_ancestor.dirpath() + return common_ancestor + + +def get_dirs_from_args(args): + return [d for d in (py.path.local(x) for x in args + if not str(x).startswith("-")) + if d.exists()] + + +def determine_setup(inifile, args, warnfunc=None): + dirs = get_dirs_from_args(args) + if inifile: + iniconfig = py.iniconfig.IniConfig(inifile) + try: + inicfg = iniconfig["pytest"] + except KeyError: + inicfg = None + rootdir = get_common_ancestor(dirs) + else: + ancestor = get_common_ancestor(dirs) + rootdir, inifile, inicfg = getcfg([ancestor], warnfunc=warnfunc) + if rootdir is None: + for rootdir in ancestor.parts(reverse=True): + if rootdir.join("setup.py").exists(): + break + else: + rootdir, inifile, inicfg = getcfg(dirs, warnfunc=warnfunc) + if rootdir is None: + rootdir = get_common_ancestor([py.path.local(), ancestor]) + is_fs_root = os.path.splitdrive(str(rootdir))[1] == os.sep + if is_fs_root: + rootdir = ancestor + return rootdir, inifile, inicfg or {} + + +def setns(obj, dic): + import pytest + for name, value in dic.items(): + if isinstance(value, dict): + mod = getattr(obj, name, None) + if mod is None: + modname = "pytest.%s" % name + mod = types.ModuleType(modname) + sys.modules[modname] = mod + mod.__all__ = [] + setattr(obj, name, mod) + obj.__all__.append(name) + setns(mod, value) + else: + setattr(obj, name, value) + obj.__all__.append(name) + #if obj != pytest: + # pytest.__all__.append(name) + setattr(pytest, name, value) + + +def create_terminal_writer(config, *args, **kwargs): + """Create a TerminalWriter instance configured according to the options + in the config object. Every code which requires a TerminalWriter object + and has access to a config object should use this function. + """ + tw = py.io.TerminalWriter(*args, **kwargs) + if config.option.color == 'yes': + tw.hasmarkup = True + if config.option.color == 'no': + tw.hasmarkup = False + return tw + + +def _strtobool(val): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: copied from distutils.util + """ + val = val.lower() + if val in ('y', 'yes', 't', 'true', 'on', '1'): + return 1 + elif val in ('n', 'no', 'f', 'false', 'off', '0'): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) diff --git a/tests/_lib_vendors/_pytest/debugging.py b/tests/_lib_vendors/_pytest/debugging.py new file mode 100644 index 0000000..d96170b --- /dev/null +++ b/tests/_lib_vendors/_pytest/debugging.py @@ -0,0 +1,124 @@ +""" interactive debugging with PDB, the Python Debugger. """ +from __future__ import absolute_import +import pdb +import sys + +import pytest + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + '--pdb', dest="usepdb", action="store_true", + help="start the interactive Python debugger on errors.") + group._addoption( + '--pdbcls', dest="usepdb_cls", metavar="modulename:classname", + help="start a custom interactive Python debugger on errors. " + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb") + +def pytest_namespace(): + return {'set_trace': pytestPDB().set_trace} + +def pytest_configure(config): + if config.getvalue("usepdb") or config.getvalue("usepdb_cls"): + config.pluginmanager.register(PdbInvoke(), 'pdbinvoke') + if config.getvalue("usepdb_cls"): + modname, classname = config.getvalue("usepdb_cls").split(":") + __import__(modname) + pdb_cls = getattr(sys.modules[modname], classname) + else: + pdb_cls = pdb.Pdb + pytestPDB._pdb_cls = pdb_cls + + old = (pdb.set_trace, pytestPDB._pluginmanager) + + def fin(): + pdb.set_trace, pytestPDB._pluginmanager = old + pytestPDB._config = None + pytestPDB._pdb_cls = pdb.Pdb + + pdb.set_trace = pytest.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + config._cleanup.append(fin) + +class pytestPDB: + """ Pseudo PDB that defers to the real pdb. """ + _pluginmanager = None + _config = None + _pdb_cls = pdb.Pdb + + def set_trace(self): + """ invoke PDB set_trace debugging, dropping any IO capturing. """ + import _pytest.config + frame = sys._getframe().f_back + if self._pluginmanager is not None: + capman = self._pluginmanager.getplugin("capturemanager") + if capman: + capman.suspendcapture(in_=True) + tw = _pytest.config.create_terminal_writer(self._config) + tw.line() + tw.sep(">", "PDB set_trace (IO-capturing turned off)") + self._pluginmanager.hook.pytest_enter_pdb(config=self._config) + self._pdb_cls().set_trace(frame) + + +class PdbInvoke: + def pytest_exception_interact(self, node, call, report): + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + out, err = capman.suspendcapture(in_=True) + sys.stdout.write(out) + sys.stdout.write(err) + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excrepr, excinfo): + for line in str(excrepr).split("\n"): + sys.stderr.write("INTERNALERROR> %s\n" %line) + sys.stderr.flush() + tb = _postmortem_traceback(excinfo) + post_mortem(tb) + + +def _enter_pdb(node, excinfo, rep): + # XXX we re-use the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb = _postmortem_traceback(excinfo) + post_mortem(tb) + rep._pdbshown = True + return rep + + +def _postmortem_traceback(excinfo): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + from doctest import UnexpectedException + if isinstance(excinfo.value, UnexpectedException): + return excinfo.value.exc_info[2] + else: + return excinfo._excinfo[2] + + +def _find_last_non_hidden_frame(stack): + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return i + + +def post_mortem(t): + class Pdb(pytestPDB._pdb_cls): + def get_stack(self, f, t): + stack, i = pdb.Pdb.get_stack(self, f, t) + if f is None: + i = _find_last_non_hidden_frame(stack) + return stack, i + p = Pdb() + p.reset() + p.interaction(None, t) diff --git a/tests/_lib_vendors/_pytest/deprecated.py b/tests/_lib_vendors/_pytest/deprecated.py new file mode 100644 index 0000000..6edc475 --- /dev/null +++ b/tests/_lib_vendors/_pytest/deprecated.py @@ -0,0 +1,24 @@ +""" +This module contains deprecation messages and bits of code used elsewhere in the codebase +that is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. +""" + + +MAIN_STR_ARGS = 'passing a string to pytest.main() is deprecated, ' \ + 'pass a list of arguments instead.' + +YIELD_TESTS = 'yield tests are deprecated, and scheduled to be removed in pytest 4.0' + +FUNCARG_PREFIX = ( + '{name}: declaring fixtures using "pytest_funcarg__" prefix is deprecated ' + 'and scheduled to be removed in pytest 4.0. ' + 'Please remove the prefix and use the @pytest.fixture decorator instead.') + +SETUP_CFG_PYTEST = '[pytest] section in setup.cfg files is deprecated, use [tool:pytest] instead.' + +GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue" + +RESULT_LOG = '--result-log is deprecated and scheduled for removal in pytest 4.0' diff --git a/tests/_lib_vendors/_pytest/doctest.py b/tests/_lib_vendors/_pytest/doctest.py new file mode 100644 index 0000000..f4782dd --- /dev/null +++ b/tests/_lib_vendors/_pytest/doctest.py @@ -0,0 +1,331 @@ +""" discover and run doctests in modules and test files.""" +from __future__ import absolute_import + +import traceback + +import pytest +from _pytest._code.code import ExceptionInfo, ReprFileLocation, TerminalRepr +from _pytest.fixtures import FixtureRequest + + +DOCTEST_REPORT_CHOICE_NONE = 'none' +DOCTEST_REPORT_CHOICE_CDIFF = 'cdiff' +DOCTEST_REPORT_CHOICE_NDIFF = 'ndiff' +DOCTEST_REPORT_CHOICE_UDIFF = 'udiff' +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = 'only_first_failure' + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +def pytest_addoption(parser): + parser.addini('doctest_optionflags', 'option flags for doctests', + type="args", default=["ELLIPSIS"]) + group = parser.getgroup("collect") + group.addoption("--doctest-modules", + action="store_true", default=False, + help="run doctests in all .py modules", + dest="doctestmodules") + group.addoption("--doctest-report", + type=str.lower, default="udiff", + help="choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport") + group.addoption("--doctest-glob", + action="append", default=[], metavar="pat", + help="doctests file matching pattern, default: test*.txt", + dest="doctestglob") + group.addoption("--doctest-ignore-import-errors", + action="store_true", default=False, + help="ignore doctest ImportErrors", + dest="doctest_ignore_import_errors") + + +def pytest_collect_file(path, parent): + config = parent.config + if path.ext == ".py": + if config.option.doctestmodules: + return DoctestModule(path, parent) + elif _is_doctest(config, path, parent): + return DoctestTextfile(path, parent) + + +def _is_doctest(config, path, parent): + if path.ext in ('.txt', '.rst') and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ['test*.txt'] + for glob in globs: + if path.check(fnmatch=glob): + return True + return False + + +class ReprFailDoctest(TerminalRepr): + + def __init__(self, reprlocation, lines): + self.reprlocation = reprlocation + self.lines = lines + + def toterminal(self, tw): + for line in self.lines: + tw.line(line) + self.reprlocation.toterminal(tw) + + +class DoctestItem(pytest.Item): + def __init__(self, name, parent, runner=None, dtest=None): + super(DoctestItem, self).__init__(name, parent) + self.runner = runner + self.dtest = dtest + self.obj = None + self.fixture_request = None + + def setup(self): + if self.dtest is not None: + self.fixture_request = _setup_fixtures(self) + globs = dict(getfixture=self.fixture_request.getfixturevalue) + for name, value in self.fixture_request.getfixturevalue('doctest_namespace').items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self): + _check_all_skipped(self.dtest) + self.runner.run(self.dtest) + + def repr_failure(self, excinfo): + import doctest + if excinfo.errisinstance((doctest.DocTestFailure, + doctest.UnexpectedException)): + doctestfailure = excinfo.value + example = doctestfailure.example + test = doctestfailure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = excinfo.type.__name__ + reprlocation = ReprFileLocation(filename, lineno, message) + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("doctestreport")) + if lineno is not None: + lines = doctestfailure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + lines = ["%03d %s" % (i + test.lineno + 1, x) + for (i, x) in enumerate(lines)] + # trim docstring error lines to 10 + lines = lines[example.lineno - 9:example.lineno + 1] + else: + lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example'] + indent = '>>>' + for line in example.source.splitlines(): + lines.append('??? %s %s' % (indent, line)) + indent = '...' + if excinfo.errisinstance(doctest.DocTestFailure): + lines += checker.output_difference(example, + doctestfailure.got, report_choice).split("\n") + else: + inner_excinfo = ExceptionInfo(excinfo.value.exc_info) + lines += ["UNEXPECTED EXCEPTION: %s" % + repr(inner_excinfo.value)] + lines += traceback.format_exception(*excinfo.value.exc_info) + return ReprFailDoctest(reprlocation, lines) + else: + return super(DoctestItem, self).repr_failure(excinfo) + + def reportinfo(self): + return self.fspath, None, "[doctest] %s" % self.name + + +def _get_flag_lookup(): + import doctest + return dict(DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + ) + + +def get_optionflags(parent): + optionflags_str = parent.config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +class DoctestTextfile(pytest.Module): + obj = None + + def collect(self): + import doctest + + # inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker + text = self.fspath.read() + filename = str(self.fspath) + name = self.fspath.basename + globs = {'__name__': '__main__'} + + + optionflags = get_optionflags(self) + runner = doctest.DebugRunner(verbose=0, optionflags=optionflags, + checker=_get_checker()) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem(test.name, self, runner, test) + + +def _check_all_skipped(test): + """raises pytest.skip() if all examples in the given DocTest have the SKIP + option set. + """ + import doctest + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + pytest.skip('all tests skipped by +SKIP option') + + +class DoctestModule(pytest.Module): + def collect(self): + import doctest + if self.fspath.basename == "conftest.py": + module = self.config.pluginmanager._importconftest(self.fspath) + else: + try: + module = self.fspath.pyimport() + except ImportError: + if self.config.getvalue('doctest_ignore_import_errors'): + pytest.skip('unable to import module %r' % self.fspath) + else: + raise + # uses internal doctest module parsing mechanism + finder = doctest.DocTestFinder() + optionflags = get_optionflags(self) + runner = doctest.DebugRunner(verbose=0, optionflags=optionflags, + checker=_get_checker()) + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem(test.name, self, runner, test) + + +def _setup_fixtures(doctest_item): + """ + Used by DoctestTextfile and DoctestItem to setup fixture information. + """ + def func(): + pass + + doctest_item.funcargs = {} + fm = doctest_item.session._fixturemanager + doctest_item._fixtureinfo = fm.getfixtureinfo(node=doctest_item, func=func, + cls=None, funcargs=False) + fixture_request = FixtureRequest(doctest_item) + fixture_request._fillfixtures() + return fixture_request + + +def _get_checker(): + """ + Returns a doctest.OutputChecker subclass that takes in account the + ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES + to strip b'' prefixes. + Useful when the same doctest should run in Python 2 and Python 3. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + if hasattr(_get_checker, 'LiteralsOutputChecker'): + return _get_checker.LiteralsOutputChecker() + + import doctest + import re + + class LiteralsOutputChecker(doctest.OutputChecker): + """ + Copied from doctest_nose_plugin.py from the nltk project: + https://github.com/nltk/nltk + + Further extended to also support byte literals. + """ + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + + def check_output(self, want, got, optionflags): + res = doctest.OutputChecker.check_output(self, want, got, + optionflags) + if res: + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + if not allow_unicode and not allow_bytes: + return False + + else: # pragma: no cover + def remove_prefixes(regex, txt): + return re.sub(regex, r'\1\2', txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + res = doctest.OutputChecker.check_output(self, want, got, + optionflags) + return res + + _get_checker.LiteralsOutputChecker = LiteralsOutputChecker + return _get_checker.LiteralsOutputChecker() + + +def _get_allow_unicode_flag(): + """ + Registers and returns the ALLOW_UNICODE flag. + """ + import doctest + return doctest.register_optionflag('ALLOW_UNICODE') + + +def _get_allow_bytes_flag(): + """ + Registers and returns the ALLOW_BYTES flag. + """ + import doctest + return doctest.register_optionflag('ALLOW_BYTES') + + +def _get_report_choice(key): + """ + This function returns the actual `doctest` module flag value, we want to do it as late as possible to avoid + importing `doctest` and all its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + +@pytest.fixture(scope='session') +def doctest_namespace(): + """ + Inject names into the doctest namespace. + """ + return dict() diff --git a/tests/_lib_vendors/_pytest/fixtures.py b/tests/_lib_vendors/_pytest/fixtures.py new file mode 100644 index 0000000..28bcd4d --- /dev/null +++ b/tests/_lib_vendors/_pytest/fixtures.py @@ -0,0 +1,1134 @@ +import sys + +from py._code.code import FormattedExcinfo + +import py +import pytest +import warnings + +import inspect +import _pytest +from _pytest._code.code import TerminalRepr +from _pytest.compat import ( + NOTSET, exc_clear, _format_args, + getfslineno, get_real_func, + is_generator, isclass, getimfunc, + getlocation, getfuncargnames, +) + +def pytest_sessionstart(session): + session._fixturemanager = FixtureManager(session) + + +scopename2class = {} + + +scope2props = dict(session=()) +scope2props["module"] = ("fspath", "module") +scope2props["class"] = scope2props["module"] + ("cls",) +scope2props["instance"] = scope2props["class"] + ("instance", ) +scope2props["function"] = scope2props["instance"] + ("function", "keywords") + +def scopeproperty(name=None, doc=None): + def decoratescope(func): + scopename = name or func.__name__ + + def provide(self): + if func.__name__ in scope2props[self.scope]: + return func(self) + raise AttributeError("%s not available in %s-scoped context" % ( + scopename, self.scope)) + + return property(provide, None, None, func.__doc__) + return decoratescope + + +def pytest_namespace(): + scopename2class.update({ + 'class': pytest.Class, + 'module': pytest.Module, + 'function': pytest.Item, + }) + return { + 'fixture': fixture, + 'yield_fixture': yield_fixture, + 'collect': {'_fillfuncargs': fillfixtures} + } + + +def get_scope_node(node, scope): + cls = scopename2class.get(scope) + if cls is None: + if scope == "session": + return node.session + raise ValueError("unknown scope") + return node.getparent(cls) + + +def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager): + # this function will transform all collected calls to a functions + # if they use direct funcargs (i.e. direct parametrization) + # because we want later test execution to be able to rely on + # an existing FixtureDef structure for all arguments. + # XXX we can probably avoid this algorithm if we modify CallSpec2 + # to directly care for creating the fixturedefs within its methods. + if not metafunc._calls[0].funcargs: + return # this function call does not have direct parametrization + # collect funcargs of all callspecs into a list of values + arg2params = {} + arg2scope = {} + for callspec in metafunc._calls: + for argname, argvalue in callspec.funcargs.items(): + assert argname not in callspec.params + callspec.params[argname] = argvalue + arg2params_list = arg2params.setdefault(argname, []) + callspec.indices[argname] = len(arg2params_list) + arg2params_list.append(argvalue) + if argname not in arg2scope: + scopenum = callspec._arg2scopenum.get(argname, + scopenum_function) + arg2scope[argname] = scopes[scopenum] + callspec.funcargs.clear() + + # register artificial FixtureDef's so that later at test execution + # time we can rely on a proper FixtureDef to exist for fixture setup. + arg2fixturedefs = metafunc._arg2fixturedefs + for argname, valuelist in arg2params.items(): + # if we have a scope that is higher than function we need + # to make sure we only ever create an according fixturedef on + # a per-scope basis. We thus store and cache the fixturedef on the + # node related to the scope. + scope = arg2scope[argname] + node = None + if scope != "function": + node = get_scope_node(collector, scope) + if node is None: + assert scope == "class" and isinstance(collector, pytest.Module) + # use module-level collector for class-scope (for now) + node = collector + if node and argname in node._name2pseudofixturedef: + arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]] + else: + fixturedef = FixtureDef(fixturemanager, '', argname, + get_direct_param_fixture_func, + arg2scope[argname], + valuelist, False, False) + arg2fixturedefs[argname] = [fixturedef] + if node is not None: + node._name2pseudofixturedef[argname] = fixturedef + + + +def getfixturemarker(obj): + """ return fixturemarker or None if it doesn't exist or raised + exceptions.""" + try: + return getattr(obj, "_pytestfixturefunction", None) + except KeyboardInterrupt: + raise + except Exception: + # some objects raise errors like request (from flask import request) + # we don't expect them to be fixture functions + return None + + + +def get_parametrized_fixture_keys(item, scopenum): + """ return list of keys for all parametrized arguments which match + the specified scope. """ + assert scopenum < scopenum_function # function + try: + cs = item.callspec + except AttributeError: + pass + else: + # cs.indictes.items() is random order of argnames but + # then again different functions (items) can change order of + # arguments so it doesn't matter much probably + for argname, param_index in cs.indices.items(): + if cs._arg2scopenum[argname] != scopenum: + continue + if scopenum == 0: # session + key = (argname, param_index) + elif scopenum == 1: # module + key = (argname, param_index, item.fspath) + elif scopenum == 2: # class + key = (argname, param_index, item.fspath, item.cls) + yield key + + +# algorithm for sorting on a per-parametrized resource setup basis +# it is called for scopenum==0 (session) first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns + +def reorder_items(items): + argkeys_cache = {} + for scopenum in range(0, scopenum_function): + argkeys_cache[scopenum] = d = {} + for item in items: + keys = set(get_parametrized_fixture_keys(item, scopenum)) + if keys: + d[item] = keys + return reorder_items_atscope(items, set(), argkeys_cache, 0) + +def reorder_items_atscope(items, ignore, argkeys_cache, scopenum): + if scopenum >= scopenum_function or len(items) < 3: + return items + items_done = [] + while 1: + items_before, items_same, items_other, newignore = \ + slice_items(items, ignore, argkeys_cache[scopenum]) + items_before = reorder_items_atscope( + items_before, ignore, argkeys_cache,scopenum+1) + if items_same is None: + # nothing to reorder in this scope + assert items_other is None + return items_done + items_before + items_done.extend(items_before) + items = items_same + items_other + ignore = newignore + + +def slice_items(items, ignore, scoped_argkeys_cache): + # we pick the first item which uses a fixture instance in the + # requested scope and which we haven't seen yet. We slice the input + # items list into a list of items_nomatch, items_same and + # items_other + if scoped_argkeys_cache: # do we need to do work at all? + it = iter(items) + # first find a slicing key + for i, item in enumerate(it): + argkeys = scoped_argkeys_cache.get(item) + if argkeys is not None: + argkeys = argkeys.difference(ignore) + if argkeys: # found a slicing key + slicing_argkey = argkeys.pop() + items_before = items[:i] + items_same = [item] + items_other = [] + # now slice the remainder of the list + for item in it: + argkeys = scoped_argkeys_cache.get(item) + if argkeys and slicing_argkey in argkeys and \ + slicing_argkey not in ignore: + items_same.append(item) + else: + items_other.append(item) + newignore = ignore.copy() + newignore.add(slicing_argkey) + return (items_before, items_same, items_other, newignore) + return items, None, None, None + + + +class FuncargnamesCompatAttr: + """ helper class so that Metafunc, Function and FixtureRequest + don't need to each define the "funcargnames" compatibility attribute. + """ + @property + def funcargnames(self): + """ alias attribute for ``fixturenames`` for pre-2.3 compatibility""" + return self.fixturenames + + +def fillfixtures(function): + """ fill missing funcargs for a test function. """ + try: + request = function._request + except AttributeError: + # XXX this special code path is only expected to execute + # with the oejskit plugin. It uses classes with funcargs + # and we thus have to work a bit to allow this. + fm = function.session._fixturemanager + fi = fm.getfixtureinfo(function.parent, function.obj, None) + function._fixtureinfo = fi + request = function._request = FixtureRequest(function) + request._fillfixtures() + # prune out funcargs for jstests + newfuncargs = {} + for name in fi.argnames: + newfuncargs[name] = function.funcargs[name] + function.funcargs = newfuncargs + else: + request._fillfixtures() + + + +def get_direct_param_fixture_func(request): + return request.param + +class FuncFixtureInfo: + def __init__(self, argnames, names_closure, name2fixturedefs): + self.argnames = argnames + self.names_closure = names_closure + self.name2fixturedefs = name2fixturedefs + + +class FixtureRequest(FuncargnamesCompatAttr): + """ A request for a fixture from a test or fixture function. + + A request object gives access to the requesting test context + and has an optional ``param`` attribute in case + the fixture is parametrized indirectly. + """ + + def __init__(self, pyfuncitem): + self._pyfuncitem = pyfuncitem + #: fixture for which this request is being performed + self.fixturename = None + #: Scope string, one of "function", "class", "module", "session" + self.scope = "function" + self._fixture_values = {} # argname -> fixture value + self._fixture_defs = {} # argname -> FixtureDef + fixtureinfo = pyfuncitem._fixtureinfo + self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy() + self._arg2index = {} + self._fixturemanager = pyfuncitem.session._fixturemanager + + @property + def fixturenames(self): + # backward incompatible note: now a readonly property + return list(self._pyfuncitem._fixtureinfo.names_closure) + + @property + def node(self): + """ underlying collection node (depends on current request scope)""" + return self._getscopeitem(self.scope) + + + def _getnextfixturedef(self, argname): + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # we arrive here because of a a dynamic call to + # getfixturevalue(argname) usage which was naturally + # not known at parsing/collection time + parentid = self._pyfuncitem.parent.nodeid + fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid) + self._arg2fixturedefs[argname] = fixturedefs + # fixturedefs list is immutable so we maintain a decreasing index + index = self._arg2index.get(argname, 0) - 1 + if fixturedefs is None or (-index > len(fixturedefs)): + raise FixtureLookupError(argname, self) + self._arg2index[argname] = index + return fixturedefs[index] + + @property + def config(self): + """ the pytest config object associated with this request. """ + return self._pyfuncitem.config + + + @scopeproperty() + def function(self): + """ test function object if the request has a per-function scope. """ + return self._pyfuncitem.obj + + @scopeproperty("class") + def cls(self): + """ class (can be None) where the test function was collected. """ + clscol = self._pyfuncitem.getparent(pytest.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """ instance (can be None) on which test function was collected. """ + # unittest support hack, see _pytest.unittest.TestCaseFunction + try: + return self._pyfuncitem._testcase + except AttributeError: + function = getattr(self, "function", None) + if function is not None: + return py.builtin._getimself(function) + + @scopeproperty() + def module(self): + """ python module object where the test function was collected. """ + return self._pyfuncitem.getparent(pytest.Module).obj + + @scopeproperty() + def fspath(self): + """ the file system path of the test module which collected this test. """ + return self._pyfuncitem.fspath + + @property + def keywords(self): + """ keywords/markers dictionary for the underlying node. """ + return self.node.keywords + + @property + def session(self): + """ pytest session object. """ + return self._pyfuncitem.session + + def addfinalizer(self, finalizer): + """ add finalizer/teardown function to be called after the + last test within the requesting test context finished + execution. """ + # XXX usually this method is shadowed by fixturedef specific ones + self._addfinalizer(finalizer, scope=self.scope) + + def _addfinalizer(self, finalizer, scope): + colitem = self._getscopeitem(scope) + self._pyfuncitem.session._setupstate.addfinalizer( + finalizer=finalizer, colitem=colitem) + + def applymarker(self, marker): + """ Apply a marker to a single test function invocation. + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object + created by a call to ``pytest.mark.NAME(...)``. + """ + try: + self.node.keywords[marker.markname] = marker + except AttributeError: + raise ValueError(marker) + + def raiseerror(self, msg): + """ raise a FixtureLookupError with the given message. """ + raise self._fixturemanager.FixtureLookupError(None, self, msg) + + def _fillfixtures(self): + item = self._pyfuncitem + fixturenames = getattr(item, "fixturenames", self.fixturenames) + for argname in fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def cached_setup(self, setup, teardown=None, scope="module", extrakey=None): + """ (deprecated) Return a testing resource managed by ``setup`` & + ``teardown`` calls. ``scope`` and ``extrakey`` determine when the + ``teardown`` function will be called so that subsequent calls to + ``setup`` would recreate the resource. With pytest-2.3 you often + do not need ``cached_setup()`` as you can directly declare a scope + on a fixture function and register a finalizer through + ``request.addfinalizer()``. + + :arg teardown: function receiving a previously setup resource. + :arg setup: a no-argument function creating a resource. + :arg scope: a string value out of ``function``, ``class``, ``module`` + or ``session`` indicating the caching lifecycle of the resource. + :arg extrakey: added to internal caching key of (funcargname, scope). + """ + if not hasattr(self.config, '_setupcache'): + self.config._setupcache = {} # XXX weakref? + cachekey = (self.fixturename, self._getscopeitem(scope), extrakey) + cache = self.config._setupcache + try: + val = cache[cachekey] + except KeyError: + self._check_scope(self.fixturename, self.scope, scope) + val = setup() + cache[cachekey] = val + if teardown is not None: + def finalizer(): + del cache[cachekey] + teardown(val) + self._addfinalizer(finalizer, scope=scope) + return val + + def getfixturevalue(self, argname): + """ Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + """ + return self._get_active_fixturedef(argname).cached_result[0] + + def getfuncargvalue(self, argname): + """ Deprecated, use getfixturevalue. """ + from _pytest import deprecated + warnings.warn( + deprecated.GETFUNCARGVALUE, + DeprecationWarning) + return self.getfixturevalue(argname) + + def _get_active_fixturedef(self, argname): + try: + return self._fixture_defs[argname] + except KeyError: + try: + fixturedef = self._getnextfixturedef(argname) + except FixtureLookupError: + if argname == "request": + class PseudoFixtureDef: + cached_result = (self, [0], None) + scope = "function" + return PseudoFixtureDef + raise + # remove indent to prevent the python3 exception + # from leaking into the call + result = self._getfixturevalue(fixturedef) + self._fixture_values[argname] = result + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _get_fixturestack(self): + current = self + l = [] + while 1: + fixturedef = getattr(current, "_fixturedef", None) + if fixturedef is None: + l.reverse() + return l + l.append(fixturedef) + current = current._parent_request + + def _getfixturevalue(self, fixturedef): + # prepare a subrequest object before calling fixture function + # (latter managed by fixturedef) + argname = fixturedef.argname + funcitem = self._pyfuncitem + scope = fixturedef.scope + try: + param = funcitem.callspec.getparam(argname) + except (AttributeError, ValueError): + param = NOTSET + param_index = 0 + if fixturedef.params is not None: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = frameinfo.filename + source_lineno = frameinfo.lineno + source_path = py.path.local(source_path) + if source_path.relto(funcitem.config.rootdir): + source_path = source_path.relto(funcitem.config.rootdir) + msg = ( + "The requested fixture has no parameter defined for the " + "current test.\n\nRequested fixture '{0}' defined in:\n{1}" + "\n\nRequested here:\n{2}:{3}".format( + fixturedef.argname, + getlocation(fixturedef.func, funcitem.config.rootdir), + source_path, + source_lineno, + ) + ) + pytest.fail(msg) + else: + # indices might not be set if old-style metafunc.addcall() was used + param_index = funcitem.callspec.indices.get(argname, 0) + # if a parametrize invocation set a scope it will override + # the static scope defined with the fixture function + paramscopenum = funcitem.callspec._arg2scopenum.get(argname) + if paramscopenum is not None: + scope = scopes[paramscopenum] + + subrequest = SubRequest(self, scope, param, param_index, fixturedef) + + # check if a higher-level scoped fixture accesses a lower level one + subrequest._check_scope(argname, self.scope, scope) + + # clear sys.exc_info before invoking the fixture (python bug?) + # if its not explicitly cleared it will leak into the call + exc_clear() + try: + # call the fixture function + val = fixturedef.execute(request=subrequest) + finally: + # if fixture function failed it might have registered finalizers + self.session._setupstate.addfinalizer(fixturedef.finish, + subrequest.node) + return val + + def _check_scope(self, argname, invoking_scope, requested_scope): + if argname == "request": + return + if scopemismatch(invoking_scope, requested_scope): + # try to report something helpful + lines = self._factorytraceback() + pytest.fail("ScopeMismatch: You tried to access the %r scoped " + "fixture %r with a %r scoped request object, " + "involved factories\n%s" %( + (requested_scope, argname, invoking_scope, "\n".join(lines))), + pytrace=False) + + def _factorytraceback(self): + lines = [] + for fixturedef in self._get_fixturestack(): + factory = fixturedef.func + fs, lineno = getfslineno(factory) + p = self._pyfuncitem.session.fspath.bestrelpath(fs) + args = _format_args(factory) + lines.append("%s:%d: def %s%s" %( + p, lineno, factory.__name__, args)) + return lines + + def _getscopeitem(self, scope): + if scope == "function": + # this might also be a non-function Item despite its attribute name + return self._pyfuncitem + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope == "class": + # fallback to function item itself + node = self._pyfuncitem + assert node + return node + + def __repr__(self): + return "" %(self.node) + + +class SubRequest(FixtureRequest): + """ a sub request for handling getting a fixture from a + test function/fixture. """ + def __init__(self, request, scope, param, param_index, fixturedef): + self._parent_request = request + self.fixturename = fixturedef.argname + if param is not NOTSET: + self.param = param + self.param_index = param_index + self.scope = scope + self._fixturedef = fixturedef + self.addfinalizer = fixturedef.addfinalizer + self._pyfuncitem = request._pyfuncitem + self._fixture_values = request._fixture_values + self._fixture_defs = request._fixture_defs + self._arg2fixturedefs = request._arg2fixturedefs + self._arg2index = request._arg2index + self._fixturemanager = request._fixturemanager + + def __repr__(self): + return "" % (self.fixturename, self._pyfuncitem) + + +class ScopeMismatchError(Exception): + """ A fixture function tries to use a different fixture function which + which has a lower scope (e.g. a Session one calls a function one) + """ + + +scopes = "session module class function".split() +scopenum_function = scopes.index("function") + + +def scopemismatch(currentscope, newscope): + return scopes.index(newscope) > scopes.index(currentscope) + + +def scope2index(scope, descr, where=None): + """Look up the index of ``scope`` and raise a descriptive value error + if not defined. + """ + try: + return scopes.index(scope) + except ValueError: + raise ValueError( + "{0} {1}has an unsupported scope value '{2}'".format( + descr, 'from {0} '.format(where) if where else '', + scope) + ) + + +class FixtureLookupError(LookupError): + """ could not return a requested Fixture (missing or invalid). """ + def __init__(self, argname, request, msg=None): + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self): + tblines = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + if msg is not None: + # the last fixture raise an error, let's present + # it at the requesting side + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (IOError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno+1)) + else: + addline("file %s, line %s" % (fspath, lineno+1)) + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith('def'): + break + + if msg is None: + fm = self.request._fixturemanager + available = [] + parentid = self.request._pyfuncitem.parent.nodeid + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parentid)) + if faclist and name not in available: + available.append(name) + msg = "fixture %r not found" % (self.argname,) + msg += "\n available fixtures: %s" %(", ".join(sorted(available)),) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__(self, filename, firstlineno, tblines, errorstring, argname): + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw): + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line('{0} {1}'.format(FormattedExcinfo.fail_marker, + lines[0].strip()), red=True) + for line in lines[1:]: + tw.line('{0} {1}'.format(FormattedExcinfo.flow_marker, + line.strip()), red=True) + tw.line() + tw.line("%s:%d" % (self.filename, self.firstlineno+1)) + + +def fail_fixturefunc(fixturefunc, msg): + fs, lineno = getfslineno(fixturefunc) + location = "%s:%s" % (fs, lineno+1) + source = _pytest._code.Source(fixturefunc) + pytest.fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, + pytrace=False) + +def call_fixture_func(fixturefunc, request, kwargs): + yieldctx = is_generator(fixturefunc) + if yieldctx: + it = fixturefunc(**kwargs) + res = next(it) + + def teardown(): + try: + next(it) + except StopIteration: + pass + else: + fail_fixturefunc(fixturefunc, + "yield_fixture function has more than one 'yield'") + + request.addfinalizer(teardown) + else: + res = fixturefunc(**kwargs) + return res + + +class FixtureDef: + """ A container for a factory definition. """ + def __init__(self, fixturemanager, baseid, argname, func, scope, params, + unittest=False, ids=None): + self._fixturemanager = fixturemanager + self.baseid = baseid or '' + self.has_location = baseid is not None + self.func = func + self.argname = argname + self.scope = scope + self.scopenum = scope2index( + scope or "function", + descr='fixture {0}'.format(func.__name__), + where=baseid + ) + self.params = params + startindex = unittest and 1 or None + self.argnames = getfuncargnames(func, startindex=startindex) + self.unittest = unittest + self.ids = ids + self._finalizer = [] + + def addfinalizer(self, finalizer): + self._finalizer.append(finalizer) + + def finish(self): + try: + while self._finalizer: + func = self._finalizer.pop() + func() + finally: + ihook = self._fixturemanager.session.ihook + ihook.pytest_fixture_post_finalizer(fixturedef=self) + # even if finalization fails, we invalidate + # the cached fixture value + if hasattr(self, "cached_result"): + del self.cached_result + + def execute(self, request): + # get required arguments and register our own finish() + # with their finalization + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + if argname != "request": + fixturedef.addfinalizer(self.finish) + + my_cache_key = request.param_index + cached_result = getattr(self, "cached_result", None) + if cached_result is not None: + result, cache_key, err = cached_result + if my_cache_key == cache_key: + if err is not None: + py.builtin._reraise(*err) + else: + return result + # we have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one + self.finish() + assert not hasattr(self, "cached_result") + + ihook = self._fixturemanager.session.ihook + return ihook.pytest_fixture_setup(fixturedef=self, request=request) + + def __repr__(self): + return ("" % + (self.argname, self.scope, self.baseid)) + +def pytest_fixture_setup(fixturedef, request): + """ Execution of fixture setup. """ + kwargs = {} + for argname in fixturedef.argnames: + fixdef = request._get_active_fixturedef(argname) + result, arg_cache_key, exc = fixdef.cached_result + request._check_scope(argname, request.scope, fixdef.scope) + kwargs[argname] = result + + fixturefunc = fixturedef.func + if fixturedef.unittest: + if request.instance is not None: + # bind the unbound method to the TestCase instance + fixturefunc = fixturedef.func.__get__(request.instance) + else: + # the fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + if request.instance is not None: + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(request.instance) + my_cache_key = request.param_index + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except Exception: + fixturedef.cached_result = (None, my_cache_key, sys.exc_info()) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +class FixtureFunctionMarker: + def __init__(self, scope, params, autouse=False, ids=None, name=None): + self.scope = scope + self.params = params + self.autouse = autouse + self.ids = ids + self.name = name + + def __call__(self, function): + if isclass(function): + raise ValueError( + "class fixtures not supported (may be in the future)") + function._pytestfixturefunction = self + return function + + + +def fixture(scope="function", params=None, autouse=False, ids=None, name=None): + """ (return a) decorator to mark a fixture factory function. + + This decorator can be used (with or or without parameters) to define + a fixture function. The name of the fixture function can later be + referenced to cause its invocation ahead of running tests: test + modules or classes can use the pytest.mark.usefixtures(fixturename) + marker. Test functions can directly use fixture names as input + arguments in which case the fixture instance returned from the fixture + function will be injected. + + :arg scope: the scope for which this fixture is shared, one of + "function" (default), "class", "module" or "session". + + :arg params: an optional list of parameters which will cause multiple + invocations of the fixture function and all of the tests + using it. + + :arg autouse: if True, the fixture func is activated for all tests that + can see it. If False (the default) then an explicit + reference is needed to activate the fixture. + + :arg ids: list of string ids each corresponding to the params + so that they are part of the test id. If no ids are provided + they will be generated automatically from the params. + + :arg name: the name of the fixture. This defaults to the name of the + decorated function. If a fixture is used in the same module in + which it is defined, the function name of the fixture will be + shadowed by the function arg that requests the fixture; one way + to resolve this is to name the decorated function + ``fixture_`` and then use + ``@pytest.fixture(name='')``. + + Fixtures can optionally provide their values to test functions using a ``yield`` statement, + instead of ``return``. In this case, the code block after the ``yield`` statement is executed + as teardown code regardless of the test outcome. A fixture function must yield exactly once. + """ + if callable(scope) and params is None and autouse == False: + # direct decoration + return FixtureFunctionMarker( + "function", params, autouse, name=name)(scope) + if params is not None and not isinstance(params, (list, tuple)): + params = list(params) + return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name) + + +def yield_fixture(scope="function", params=None, autouse=False, ids=None, name=None): + """ (return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + if callable(scope) and params is None and not autouse: + # direct decoration + return FixtureFunctionMarker( + "function", params, autouse, ids=ids, name=name)(scope) + else: + return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name) + + +defaultfuncargprefixmarker = fixture() + + +@fixture(scope="session") +def pytestconfig(request): + """ the pytest config object with access to command line opts.""" + return request.config + + +class FixtureManager: + """ + pytest fixtures definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - ini-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i. e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + _argprefix = "pytest_funcarg__" + FixtureLookupError = FixtureLookupError + FixtureLookupErrorRepr = FixtureLookupErrorRepr + + def __init__(self, session): + self.session = session + self.config = session.config + self._arg2fixturedefs = {} + self._holderobjseen = set() + self._arg2finish = {} + self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))] + session.config.pluginmanager.register(self, "funcmanage") + + + def getfixtureinfo(self, node, func, cls, funcargs=True): + if funcargs and not hasattr(node, "nofuncargs"): + if cls is not None: + startindex = 1 + else: + startindex = None + argnames = getfuncargnames(func, startindex) + else: + argnames = () + usefixtures = getattr(func, "usefixtures", None) + initialnames = argnames + if usefixtures is not None: + initialnames = usefixtures.args + initialnames + fm = node.session._fixturemanager + names_closure, arg2fixturedefs = fm.getfixtureclosure(initialnames, + node) + return FuncFixtureInfo(argnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin): + nodeid = None + try: + p = py.path.local(plugin.__file__) + except AttributeError: + pass + else: + # construct the base nodeid which is later used to check + # what fixtures are visible for particular tests (as denoted + # by their test id) + if p.basename.startswith("conftest.py"): + nodeid = p.dirpath().relto(self.config.rootdir) + if p.sep != "/": + nodeid = nodeid.replace(p.sep, "/") + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, nodeid): + """ return a tuple of fixture names to be used. """ + autousenames = [] + for baseid, basenames in self._nodeid_and_autousenames: + if nodeid.startswith(baseid): + if baseid: + i = len(baseid) + nextchar = nodeid[i:i+1] + if nextchar and nextchar not in ":/": + continue + autousenames.extend(basenames) + # make sure autousenames are sorted by scope, scopenum 0 is session + autousenames.sort( + key=lambda x: self._arg2fixturedefs[x][-1].scopenum) + return autousenames + + def getfixtureclosure(self, fixturenames, parentnode): + # collect the closure of all fixtures , starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return a arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive) + + parentid = parentnode.nodeid + fixturenames_closure = self._getautousenames(parentid) + + def merge(otherlist): + for arg in otherlist: + if arg not in fixturenames_closure: + fixturenames_closure.append(arg) + + merge(fixturenames) + arg2fixturedefs = {} + lastlen = -1 + while lastlen != len(fixturenames_closure): + lastlen = len(fixturenames_closure) + for argname in fixturenames_closure: + if argname in arg2fixturedefs: + continue + fixturedefs = self.getfixturedefs(argname, parentid) + if fixturedefs: + arg2fixturedefs[argname] = fixturedefs + merge(fixturedefs[-1].argnames) + return fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc): + for argname in metafunc.fixturenames: + faclist = metafunc._arg2fixturedefs.get(argname) + if faclist: + fixturedef = faclist[-1] + if fixturedef.params is not None: + func_params = getattr(getattr(metafunc.function, 'parametrize', None), 'args', [[None]]) + # skip directly parametrized arguments + argnames = func_params[0] + if not isinstance(argnames, (tuple, list)): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + if argname not in func_params and argname not in argnames: + metafunc.parametrize(argname, fixturedef.params, + indirect=True, scope=fixturedef.scope, + ids=fixturedef.ids) + else: + continue # will raise FixtureLookupError at setup time + + def pytest_collection_modifyitems(self, items): + # separate parametrized setups + items[:] = reorder_items(items) + + def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False): + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + holderobj = node_or_obj.obj + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + self._holderobjseen.add(holderobj) + autousenames = [] + for name in dir(holderobj): + obj = getattr(holderobj, name, None) + # fixture functions have a pytest_funcarg__ prefix (pre-2.3 style) + # or are "@pytest.fixture" marked + marker = getfixturemarker(obj) + if marker is None: + if not name.startswith(self._argprefix): + continue + if not callable(obj): + continue + marker = defaultfuncargprefixmarker + from _pytest import deprecated + self.config.warn('C1', deprecated.FUNCARG_PREFIX.format(name=name)) + name = name[len(self._argprefix):] + elif not isinstance(marker, FixtureFunctionMarker): + # magic globals with __getattr__ might have got us a wrong + # fixture attribute + continue + else: + if marker.name: + name = marker.name + msg = 'fixtures cannot have "pytest_funcarg__" prefix ' \ + 'and be decorated with @pytest.fixture:\n%s' % name + assert not name.startswith(self._argprefix), msg + + fixture_def = FixtureDef(self, nodeid, name, obj, + marker.scope, marker.params, + unittest=unittest, ids=marker.ids) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if marker.autouse: + autousenames.append(name) + + if autousenames: + self._nodeid_and_autousenames.append((nodeid or '', autousenames)) + + def getfixturedefs(self, argname, nodeid): + """ + Gets a list of fixtures which are applicable to the given node id. + + :param str argname: name of the fixture to search for + :param str nodeid: full node id of the requesting test. + :return: list[FixtureDef] + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + else: + return tuple(self._matchfactories(fixturedefs, nodeid)) + + def _matchfactories(self, fixturedefs, nodeid): + for fixturedef in fixturedefs: + if nodeid.startswith(fixturedef.baseid): + yield fixturedef + diff --git a/tests/_lib_vendors/_pytest/freeze_support.py b/tests/_lib_vendors/_pytest/freeze_support.py new file mode 100644 index 0000000..f78ccd2 --- /dev/null +++ b/tests/_lib_vendors/_pytest/freeze_support.py @@ -0,0 +1,45 @@ +""" +Provides a function to report all internal modules for using freezing tools +pytest +""" + +def pytest_namespace(): + return {'freeze_includes': freeze_includes} + + +def freeze_includes(): + """ + Returns a list of module names used by py.test that should be + included by cx_freeze. + """ + import py + import _pytest + result = list(_iter_all_modules(py)) + result += list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules(package, prefix=''): + """ + Iterates over the names of all modules that can be found in the given + package, recursively. + Example: + _iter_all_modules(_pytest) -> + ['_pytest.assertion.newinterpret', + '_pytest.capture', + '_pytest.core', + ... + ] + """ + import os + import pkgutil + if type(package) is not str: + path, prefix = package.__path__[0], package.__name__ + '.' + else: + path = package + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + '.'): + yield prefix + m + else: + yield prefix + name \ No newline at end of file diff --git a/tests/_lib_vendors/_pytest/helpconfig.py b/tests/_lib_vendors/_pytest/helpconfig.py new file mode 100644 index 0000000..6e66b11 --- /dev/null +++ b/tests/_lib_vendors/_pytest/helpconfig.py @@ -0,0 +1,144 @@ +""" version info, help messages, tracing configuration. """ +import py +import pytest +import os, sys + +def pytest_addoption(parser): + group = parser.getgroup('debugconfig') + group.addoption('--version', action="store_true", + help="display pytest lib version and import information.") + group._addoption("-h", "--help", action="store_true", dest="help", + help="show help message and configuration info") + group._addoption('-p', action="append", dest="plugins", default = [], + metavar="name", + help="early-load given plugin (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`.") + group.addoption('--traceconfig', '--trace-config', + action="store_true", default=False, + help="trace considerations of conftest.py files."), + group.addoption('--debug', + action="store_true", dest="debug", default=False, + help="store internal tracing debug information in 'pytestdebug.log'.") + group._addoption( + '-o', '--override-ini', nargs='*', dest="override_ini", + action="append", + help="override config option with option=value style, e.g. `-o xfail_strict=True`.") + + +@pytest.hookimpl(hookwrapper=True) +def pytest_cmdline_parse(): + outcome = yield + config = outcome.get_result() + if config.option.debug: + path = os.path.abspath("pytestdebug.log") + debugfile = open(path, 'w') + debugfile.write("versions pytest-%s, py-%s, " + "python-%s\ncwd=%s\nargs=%s\n\n" %( + pytest.__version__, py.__version__, + ".".join(map(str, sys.version_info)), + os.getcwd(), config._origargs)) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write("writing pytestdebug information to %s\n" % path) + + def unset_tracing(): + debugfile.close() + sys.stderr.write("wrote pytestdebug information to %s\n" % + debugfile.name) + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + +def pytest_cmdline_main(config): + if config.option.version: + p = py.path.local(pytest.__file__) + sys.stderr.write("This is pytest version %s, imported from %s\n" % + (pytest.__version__, p)) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stderr.write(line + "\n") + return 0 + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return 0 + +def showhelp(config): + reporter = config.pluginmanager.get_plugin('terminalreporter') + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line() + tw.line("[pytest] ini-options in the first " + "pytest.ini|tox.ini|setup.cfg file found:") + tw.line() + + for name in config._parser._ininames: + help, type, default = config._parser._inidict[name] + if type is None: + type = "string" + spec = "%s (%s)" % (name, type) + line = " %-24s %s" %(spec, help) + tw.line(line[:tw.fullwidth]) + + tw.line() + tw.line("environment variables:") + vars = [ + ("PYTEST_ADDOPTS", "extra command line options"), + ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"), + ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals") + ] + for name, help in vars: + tw.line(" %-24s %s" % (name, help)) + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line("(shown according to specified file_or_dir or current dir " + "if not specified)") + + for warningreport in reporter.stats.get('warnings', []): + tw.line("warning : " + warningreport.message, red=True) + return + + +conftest_options = [ + ('pytest_plugins', 'list of plugin names to load'), +] + +def getpluginversioninfo(config): + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("setuptools registered plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, '__file__', repr(plugin)) + content = "%s-%s at %s" % (dist.project_name, dist.version, loc) + lines.append(" " + content) + return lines + +def pytest_report_header(config): + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append("using: pytest-%s pylib-%s" % + (pytest.__version__,py.__version__)) + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, '__file__'): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(" %-20s: %s" %(name, r)) + return lines diff --git a/tests/_lib_vendors/_pytest/hookspec.py b/tests/_lib_vendors/_pytest/hookspec.py new file mode 100644 index 0000000..b5f51ec --- /dev/null +++ b/tests/_lib_vendors/_pytest/hookspec.py @@ -0,0 +1,314 @@ +""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """ + +from _pytest._pluggy import HookspecMarker + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager): + """called at plugin registration time to allow adding new hooks via a call to + pluginmanager.add_hookspecs(module_or_class, prefix).""" + + +@hookspec(historic=True) +def pytest_namespace(): + """return dict of name->object to be made globally available in + the pytest namespace. This hook is called at plugin registration + time. + """ + +@hookspec(historic=True) +def pytest_plugin_registered(plugin, manager): + """ a new pytest plugin got registered. """ + + +@hookspec(historic=True) +def pytest_addoption(parser): + """register argparse-style options and ini-style config values, + called once at the beginning of a test run. + + .. note:: + + This function should be implemented only in plugins or ``conftest.py`` + files situated at the tests root directory due to how pytest + :ref:`discovers plugins during startup `. + + :arg parser: To add command line options, call + :py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`. + To add ini-file values call :py:func:`parser.addini(...) + <_pytest.config.Parser.addini>`. + + Options can later be accessed through the + :py:class:`config <_pytest.config.Config>` object, respectively: + + - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve + a value read from an ini-style file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture or accessed + via (deprecated) ``pytest.config``. + """ + +@hookspec(historic=True) +def pytest_configure(config): + """ called after command line options have been parsed + and all plugins and initial conftest files been loaded. + This hook is called for every plugin. + """ + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins as well as directly +# discoverable conftest.py local plugins. +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_cmdline_parse(pluginmanager, args): + """return initialized config object, parsing the specified args. """ + +def pytest_cmdline_preparse(config, args): + """(deprecated) modify command line arguments before option parsing. """ + +@hookspec(firstresult=True) +def pytest_cmdline_main(config): + """ called for performing the main command line action. The default + implementation will invoke the configure hooks and runtest_mainloop. """ + +def pytest_load_initial_conftests(early_config, parser, args): + """ implements the loading of initial conftest files ahead + of command line option parsing. """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_collection(session): + """ perform the collection protocol for the given session. """ + +def pytest_collection_modifyitems(session, config, items): + """ called after collection has been performed, may filter or re-order + the items in-place.""" + +def pytest_collection_finish(session): + """ called after collection has been performed and modified. """ + +@hookspec(firstresult=True) +def pytest_ignore_collect(path, config): + """ return True to prevent considering this path for collection. + This hook is consulted for all files and directories prior to calling + more specific hooks. + """ + +@hookspec(firstresult=True) +def pytest_collect_directory(path, parent): + """ called before traversing a directory for collection files. """ + +def pytest_collect_file(path, parent): + """ return collection Node or None for the given path. Any new node + needs to have the specified ``parent`` as a parent.""" + +# logging hooks for collection +def pytest_collectstart(collector): + """ collector starts collecting. """ + +def pytest_itemcollected(item): + """ we just collected a test item. """ + +def pytest_collectreport(report): + """ collector finished collecting. """ + +def pytest_deselected(items): + """ called for test items deselected by keyword. """ + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector): + """ perform ``collector.collect()`` and return a CollectReport. """ + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_pycollect_makemodule(path, parent): + """ return a Module collector or None for the given path. + This hook will be called for each matching test module path. + The pytest_collect_file hook needs to be used if you want to + create test modules for files that do not match as a test module. + """ + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem(collector, name, obj): + """ return custom item/collector for a python object in a module, or None. """ + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem): + """ call underlying test function. """ + +def pytest_generate_tests(metafunc): + """ generate (multiple) parametrized calls to a test function.""" + +@hookspec(firstresult=True) +def pytest_make_parametrize_id(config, val): + """Return a user-friendly string representation of the given ``val`` that will be used + by @pytest.mark.parametrize calls. Return None if the hook doesn't know about ``val``. + """ + +# ------------------------------------------------------------------------- +# generic runtest related hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_runtestloop(session): + """ called for performing the main runtest loop + (after collection finished). """ + +def pytest_itemstart(item, node): + """ (deprecated, use pytest_runtest_logstart). """ + +@hookspec(firstresult=True) +def pytest_runtest_protocol(item, nextitem): + """ implements the runtest_setup/call/teardown protocol for + the given test item, including capturing exceptions and calling + reporting hooks. + + :arg item: test item for which the runtest protocol is performed. + + :arg nextitem: the scheduled-to-be-next test item (or None if this + is the end my friend). This argument is passed on to + :py:func:`pytest_runtest_teardown`. + + :return boolean: True if no further hook implementations should be invoked. + """ + +def pytest_runtest_logstart(nodeid, location): + """ signal the start of running a single test item. """ + +def pytest_runtest_setup(item): + """ called before ``pytest_runtest_call(item)``. """ + +def pytest_runtest_call(item): + """ called to execute the test ``item``. """ + +def pytest_runtest_teardown(item, nextitem): + """ called after ``pytest_runtest_call``. + + :arg nextitem: the scheduled-to-be-next test item (None if no further + test item is scheduled). This argument can be used to + perform exact teardowns, i.e. calling just enough finalizers + so that nextitem only needs to call setup-functions. + """ + +@hookspec(firstresult=True) +def pytest_runtest_makereport(item, call): + """ return a :py:class:`_pytest.runner.TestReport` object + for the given :py:class:`pytest.Item` and + :py:class:`_pytest.runner.CallInfo`. + """ + +def pytest_runtest_logreport(report): + """ process a test setup/call/teardown report relating to + the respective phase of executing a test. """ + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_fixture_setup(fixturedef, request): + """ performs fixture setup execution. """ + +def pytest_fixture_post_finalizer(fixturedef): + """ called after fixture teardown, but before the cache is cleared so + the fixture result cache ``fixturedef.cached_result`` can + still be accessed.""" + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + +def pytest_sessionstart(session): + """ before session.main() is called. """ + +def pytest_sessionfinish(session, exitstatus): + """ whole test run finishes. """ + +def pytest_unconfigure(config): + """ called before test process is exited. """ + + +# ------------------------------------------------------------------------- +# hooks for customising the assert methods +# ------------------------------------------------------------------------- + +def pytest_assertrepr_compare(config, op, left, right): + """return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented sligthly, the intention is for the first line to be a summary. + """ + +# ------------------------------------------------------------------------- +# hooks for influencing reporting (invoked from _pytest_terminal) +# ------------------------------------------------------------------------- + +def pytest_report_header(config, startdir): + """ return a string to be displayed as header info for terminal reporting.""" + +@hookspec(firstresult=True) +def pytest_report_teststatus(report): + """ return result-category, shortletter and verbose word for reporting.""" + +def pytest_terminal_summary(terminalreporter, exitstatus): + """ add additional section in terminal summary reporting. """ + + +@hookspec(historic=True) +def pytest_logwarning(message, code, nodeid, fslocation): + """ process a warning specified by a message, a code string, + a nodeid and fslocation (both of which may be None + if the warning is not tied to a partilar node/location).""" + +# ------------------------------------------------------------------------- +# doctest hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_doctest_prepare_content(content): + """ return processed content for a given doctest""" + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + +def pytest_internalerror(excrepr, excinfo): + """ called for internal errors. """ + +def pytest_keyboard_interrupt(excinfo): + """ called for keyboard interrupt. """ + +def pytest_exception_interact(node, call, report): + """called when an exception was raised which can potentially be + interactively handled. + + This hook is only called if an exception was raised + that is not an internal exception like ``skip.Exception``. + """ + +def pytest_enter_pdb(config): + """ called upon pdb.set_trace(), can be used by plugins to take special + action just before the python debugger enters in interactive mode. + + :arg config: pytest config object + :type config: _pytest.config.Config + """ diff --git a/tests/_lib_vendors/_pytest/junitxml.py b/tests/_lib_vendors/_pytest/junitxml.py new file mode 100644 index 0000000..317382e --- /dev/null +++ b/tests/_lib_vendors/_pytest/junitxml.py @@ -0,0 +1,413 @@ +""" + report test results in JUnit-XML format, + for use with Jenkins and build integration servers. + + +Based on initial code from Ross Lawley. +""" +# Output conforms to https://github.com/jenkinsci/xunit-plugin/blob/master/ +# src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd + +import functools +import py +import os +import re +import sys +import time +import pytest +from _pytest.config import filename_arg + +# Python 2.X and 3.X compatibility +if sys.version_info[0] < 3: + from codecs import open +else: + unichr = chr + unicode = str + long = int + + +class Junit(py.xml.Namespace): + pass + + +# We need to get the subset of the invalid unicode ranges according to +# XML 1.0 which are valid in this python build. Hence we calculate +# this dynamically instead of hardcoding it. The spec range of valid +# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] +# | [#x10000-#x10FFFF] +_legal_chars = (0x09, 0x0A, 0x0d) +_legal_ranges = ( + (0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF), +) +_legal_xml_re = [ + unicode("%s-%s") % (unichr(low), unichr(high)) + for (low, high) in _legal_ranges if low < sys.maxunicode +] +_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re +illegal_xml_re = re.compile(unicode('[^%s]') % unicode('').join(_legal_xml_re)) +del _legal_chars +del _legal_ranges +del _legal_xml_re + +_py_ext_re = re.compile(r"\.py$") + + +def bin_xml_escape(arg): + def repl(matchobj): + i = ord(matchobj.group()) + if i <= 0xFF: + return unicode('#x%02X') % i + else: + return unicode('#x%04X') % i + + return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg))) + + +class _NodeReporter(object): + def __init__(self, nodeid, xml): + + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.duration = 0 + self.properties = [] + self.nodes = [] + self.testcase = None + self.attrs = {} + + def append(self, node): + self.xml.add_stats(type(node).__name__) + self.nodes.append(node) + + def add_property(self, name, value): + self.properties.append((str(name), bin_xml_escape(value))) + + def make_properties_node(self): + """Return a Junit node containing custom properties, if any. + """ + if self.properties: + return Junit.properties([ + Junit.property(name=name, value=value) + for name, value in self.properties + ]) + return '' + + def record_testreport(self, testreport): + assert not self.testcase + names = mangle_test_address(testreport.nodeid) + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = testreport.location[1] + self.attrs = attrs + + def to_xml(self): + testcase = Junit.testcase(time=self.duration, **self.attrs) + testcase.append(self.make_properties_node()) + for node in self.nodes: + testcase.append(node) + return testcase + + def _add_simple(self, kind, message, data=None): + data = bin_xml_escape(data) + node = kind(data, message=message) + self.append(node) + + def _write_captured_output(self, report): + for capname in ('out', 'err'): + content = getattr(report, 'capstd' + capname) + if content: + tag = getattr(Junit, 'system-' + capname) + self.append(tag(bin_xml_escape(content))) + + def append_pass(self, report): + self.add_stats('passed') + self._write_captured_output(report) + + def append_failure(self, report): + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple( + Junit.skipped, + "xfail-marked test passes unexpectedly") + else: + if hasattr(report.longrepr, "reprcrash"): + message = report.longrepr.reprcrash.message + elif isinstance(report.longrepr, (unicode, str)): + message = report.longrepr + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + fail = Junit.failure(message=message) + fail.append(bin_xml_escape(report.longrepr)) + self.append(fail) + self._write_captured_output(report) + + def append_collect_error(self, report): + # msg = str(report.longrepr.reprtraceback.extraline) + self.append(Junit.error(bin_xml_escape(report.longrepr), + message="collection failure")) + + def append_collect_skipped(self, report): + self._add_simple( + Junit.skipped, "collection skipped", report.longrepr) + + def append_error(self, report): + if getattr(report, 'when', None) == 'teardown': + msg = "test teardown failure" + else: + msg = "test setup failure" + self._add_simple( + Junit.error, msg, report.longrepr) + self._write_captured_output(report) + + def append_skipped(self, report): + if hasattr(report, "wasxfail"): + self._add_simple( + Junit.skipped, "expected test failure", report.wasxfail + ) + else: + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = bin_xml_escape(skipreason[9:]) + self.append( + Junit.skipped("%s:%s: %s" % (filename, lineno, skipreason), + type="pytest.skip", + message=skipreason)) + self._write_captured_output(report) + + def finalize(self): + data = self.to_xml().unicode(indent=0) + self.__dict__.clear() + self.to_xml = lambda: py.xml.raw(data) + + +@pytest.fixture +def record_xml_property(request): + """Add extra xml properties to the tag for the calling test. + The fixture is callable with ``(name, value)``, with value being automatically + xml-encoded. + """ + request.node.warn( + code='C3', + message='record_xml_property is an experimental feature', + ) + xml = getattr(request.config, "_xml", None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + return node_reporter.add_property + else: + def add_property_noop(name, value): + pass + + return add_property_noop + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting") + group.addoption( + '--junitxml', '--junit-xml', + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="create junit-xml style report file at given path.") + group.addoption( + '--junitprefix', '--junit-prefix', + action="store", + metavar="str", + default=None, + help="prepend prefix to classnames in junit-xml output") + + +def pytest_configure(config): + xmlpath = config.option.xmlpath + # prevent opening xmllog on slave nodes (xdist) + if xmlpath and not hasattr(config, 'slaveinput'): + config._xml = LogXML(xmlpath, config.option.junitprefix) + config.pluginmanager.register(config._xml) + + +def pytest_unconfigure(config): + xml = getattr(config, '_xml', None) + if xml: + del config._xml + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address): + path, possible_open_bracket, params = address.partition('[') + names = path.split("::") + try: + names.remove('()') + except ValueError: + pass + # convert file path to dotted path + names[0] = names[0].replace("/", '.') + names[0] = _py_ext_re.sub("", names[0]) + # put any params back + names[-1] += possible_open_bracket + params + return names + + +class LogXML(object): + def __init__(self, logfile, prefix): + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.stats = dict.fromkeys([ + 'error', + 'passed', + 'failure', + 'skipped', + ], 0) + self.node_reporters = {} # nodeid -> _NodeReporter + self.node_reporters_ordered = [] + self.global_properties = [] + + def finalize(self, report): + nodeid = getattr(report, 'nodeid', report) + # local hack to handle xdist report order + slavenode = getattr(report, 'node', None) + reporter = self.node_reporters.pop((nodeid, slavenode)) + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report): + nodeid = getattr(report, 'nodeid', report) + # local hack to handle xdist report order + slavenode = getattr(report, 'node', None) + + key = nodeid, slavenode + + if key in self.node_reporters: + # TODO: breasks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key): + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report): + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report): + """handle a setup/call/teardown report, generating the appropriate + xml tags as necessary. + + note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. for example: + + usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + self.finalize(report) + + def update_testcase_duration(self, report): + """accumulates total duration for nodeid from given report and updates + the Junit.testcase with the new total if already created. + """ + reporter = self.node_reporter(report) + reporter.duration += getattr(report, 'duration', 0.0) + + def pytest_collectreport(self, report): + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr): + reporter = self.node_reporter('internal') + reporter.attrs.update(classname="pytest", name='internal') + reporter._add_simple(Junit.error, 'internal error', excrepr) + + def pytest_sessionstart(self): + self.suite_start_time = time.time() + + def pytest_sessionfinish(self): + dirname = os.path.dirname(os.path.abspath(self.logfile)) + if not os.path.isdir(dirname): + os.makedirs(dirname) + logfile = open(self.logfile, 'w', encoding='utf-8') + suite_stop_time = time.time() + suite_time_delta = suite_stop_time - self.suite_start_time + + numtests = self.stats['passed'] + self.stats['failure'] + self.stats['skipped'] + self.stats['error'] + + logfile.write('') + + logfile.write(Junit.testsuite( + self._get_global_properties_node(), + [x.to_xml() for x in self.node_reporters_ordered], + name="pytest", + errors=self.stats['error'], + failures=self.stats['failure'], + skips=self.stats['skipped'], + tests=numtests, + time="%.3f" % suite_time_delta, ).unicode(indent=0)) + logfile.close() + + def pytest_terminal_summary(self, terminalreporter): + terminalreporter.write_sep("-", + "generated xml file: %s" % (self.logfile)) + + def add_global_property(self, name, value): + self.global_properties.append((str(name), bin_xml_escape(value))) + + def _get_global_properties_node(self): + """Return a Junit node containing custom properties, if any. + """ + if self.global_properties: + return Junit.properties( + [ + Junit.property(name=name, value=value) + for name, value in self.global_properties + ] + ) + return '' diff --git a/tests/_lib_vendors/_pytest/main.py b/tests/_lib_vendors/_pytest/main.py new file mode 100644 index 0000000..52876c1 --- /dev/null +++ b/tests/_lib_vendors/_pytest/main.py @@ -0,0 +1,762 @@ +""" core implementation of testing process: init, session, runtest loop. """ +import functools +import os +import sys + +import _pytest +import _pytest._code +import py +import pytest +try: + from collections import MutableMapping as MappingMixin +except ImportError: + from UserDict import DictMixin as MappingMixin + +from _pytest.config import directory_arg +from _pytest.runner import collect_one_node + +tracebackcutdir = py.path.local(_pytest.__file__).dirpath() + +# exitcodes for the command line +EXIT_OK = 0 +EXIT_TESTSFAILED = 1 +EXIT_INTERRUPTED = 2 +EXIT_INTERNALERROR = 3 +EXIT_USAGEERROR = 4 +EXIT_NOTESTSCOLLECTED = 5 + +def pytest_addoption(parser): + parser.addini("norecursedirs", "directory patterns to avoid for recursion", + type="args", default=['.*', 'build', 'dist', 'CVS', '_darcs', '{arch}', '*.egg']) + parser.addini("testpaths", "directories to search for tests when no files or directories are given in the command line.", + type="args", default=[]) + #parser.addini("dirpatterns", + # "patterns specifying possible locations of test files", + # type="linelist", default=["**/test_*.txt", + # "**/test_*.py", "**/*_test.py"] + #) + group = parser.getgroup("general", "running and selection options") + group._addoption('-x', '--exitfirst', action="store_const", + dest="maxfail", const=1, + help="exit instantly on first error or failed test."), + group._addoption('--maxfail', metavar="num", + action="store", type=int, dest="maxfail", default=0, + help="exit after first num failures or errors.") + group._addoption('--strict', action="store_true", + help="run pytest in strict mode, warnings become errors.") + group._addoption("-c", metavar="file", type=str, dest="inifilename", + help="load configuration from `file` instead of trying to locate one of the implicit configuration files.") + group._addoption("--continue-on-collection-errors", action="store_true", + default=False, dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur.") + + group = parser.getgroup("collect", "collection") + group.addoption('--collectonly', '--collect-only', action="store_true", + help="only collect tests, don't execute them."), + group.addoption('--pyargs', action="store_true", + help="try to interpret all arguments as python packages.") + group.addoption("--ignore", action="append", metavar="path", + help="ignore path during collection (multi-allowed).") + # when changing this to --conf-cut-dir, config.py Conftest.setinitial + # needs upgrading as well + group.addoption('--confcutdir', dest="confcutdir", default=None, + metavar="dir", type=functools.partial(directory_arg, optname="--confcutdir"), + help="only load conftest.py's relative to specified dir.") + group.addoption('--noconftest', action="store_true", + dest="noconftest", default=False, + help="Don't load any conftest.py files.") + group.addoption('--keepduplicates', '--keep-duplicates', action="store_true", + dest="keepduplicates", default=False, + help="Keep duplicate tests.") + + group = parser.getgroup("debugconfig", + "test session debugging and configuration") + group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir", + help="base temporary directory for this test run.") + + +def pytest_namespace(): + collect = dict(Item=Item, Collector=Collector, File=File, Session=Session) + return dict(collect=collect) + + +def pytest_configure(config): + pytest.config = config # compatibiltiy + + +def wrap_session(config, doit): + """Skeleton command line program""" + session = Session(config) + session.exitstatus = EXIT_OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except pytest.UsageError: + raise + except KeyboardInterrupt: + excinfo = _pytest._code.ExceptionInfo() + if initstate < 2 and isinstance( + excinfo.value, pytest.exit.Exception): + sys.stderr.write('{0}: {1}\n'.format( + excinfo.typename, excinfo.value.msg)) + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = EXIT_INTERRUPTED + except: + excinfo = _pytest._code.ExceptionInfo() + config.notify_exception(excinfo, config.option) + session.exitstatus = EXIT_INTERNALERROR + if excinfo.errisinstance(SystemExit): + sys.stderr.write("mainloop: caught Spurious SystemExit!\n") + + finally: + excinfo = None # Explicitly break reference cycle. + session.startdir.chdir() + if initstate >= 2: + config.hook.pytest_sessionfinish( + session=session, + exitstatus=session.exitstatus) + config._ensure_unconfigure() + return session.exitstatus + +def pytest_cmdline_main(config): + return wrap_session(config, _main) + +def _main(config, session): + """ default command line protocol for initialization, session, + running tests and reporting. """ + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return EXIT_TESTSFAILED + elif session.testscollected == 0: + return EXIT_NOTESTSCOLLECTED + +def pytest_collection(session): + return session.perform_collect() + +def pytest_runtestloop(session): + if (session.testsfailed and + not session.config.option.continue_on_collection_errors): + raise session.Interrupted( + "%d errors during collection" % session.testsfailed) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i+1] if i+1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + +def pytest_ignore_collect(path, config): + p = path.dirpath() + ignore_paths = config._getconftest_pathlist("collect_ignore", path=p) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend([py.path.local(x) for x in excludeopt]) + + if path in ignore_paths: + return True + + # Skip duplicate paths. + keepduplicates = config.getoption("keepduplicates") + duplicate_paths = config.pluginmanager._duplicatepaths + if not keepduplicates: + if path in duplicate_paths: + return True + else: + duplicate_paths.add(path) + + return False + + +class FSHookProxy: + def __init__(self, fspath, pm, remove_mods): + self.fspath = fspath + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name): + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + +def compatproperty(name): + def fget(self): + import warnings + warnings.warn("This usage is deprecated, please use pytest.{0} instead".format(name), + PendingDeprecationWarning, stacklevel=2) + return getattr(pytest, name) + + return property(fget) + +class NodeKeywords(MappingMixin): + def __init__(self, node): + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key): + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key, value): + self._markers[key] = value + + def __delitem__(self, key): + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self): + seen = set(self._markers) + if self.parent is not None: + seen.update(self.parent.keywords) + return iter(seen) + + def __len__(self): + return len(self.__iter__()) + + def keys(self): + return list(self) + + def __repr__(self): + return "" % (self.node, ) + + +class Node(object): + """ base class for Collector and Item the test collection tree. + Collector subclasses have children, Items are terminal nodes.""" + + def __init__(self, name, parent=None, config=None, session=None): + #: a unique name within the scope of the parent node + self.name = name + + #: the parent collector node. + self.parent = parent + + #: the pytest config object + self.config = config or parent.config + + #: the session this node is part of + self.session = session or parent.session + + #: filesystem path where this node was collected from (can be None) + self.fspath = getattr(parent, 'fspath', None) + + #: keywords/markers collected from all scopes + self.keywords = NodeKeywords(self) + + #: allow adding of extra keywords to use for matching + self.extra_keyword_matches = set() + + # used for storing artificial fixturedefs for direct parametrization + self._name2pseudofixturedef = {} + + @property + def ihook(self): + """ fspath sensitive hook proxy used to call pytest hooks""" + return self.session.gethookproxy(self.fspath) + + Module = compatproperty("Module") + Class = compatproperty("Class") + Instance = compatproperty("Instance") + Function = compatproperty("Function") + File = compatproperty("File") + Item = compatproperty("Item") + + def _getcustomclass(self, name): + cls = getattr(self, name) + if cls != getattr(pytest, name): + py.log._apiwarn("2.0", "use of node.%s is deprecated, " + "use pytest_pycollect_makeitem(...) to create custom " + "collection nodes" % name) + return cls + + def __repr__(self): + return "<%s %r>" %(self.__class__.__name__, + getattr(self, 'name', None)) + + def warn(self, code, message): + """ generate a warning with the given code and message for this + item. """ + assert isinstance(code, str) + fslocation = getattr(self, "location", None) + if fslocation is None: + fslocation = getattr(self, "fspath", None) + else: + fslocation = "%s:%s" % (fslocation[0], fslocation[1] + 1) + + self.ihook.pytest_logwarning.call_historic(kwargs=dict( + code=code, message=message, + nodeid=self.nodeid, fslocation=fslocation)) + + # methods for ordering nodes + @property + def nodeid(self): + """ a ::-separated string denoting its collection tree address. """ + try: + return self._nodeid + except AttributeError: + self._nodeid = x = self._makeid() + return x + + def _makeid(self): + return self.parent.nodeid + "::" + self.name + + def __hash__(self): + return hash(self.nodeid) + + def setup(self): + pass + + def teardown(self): + pass + + def _memoizedcall(self, attrname, function): + exattrname = "_ex_" + attrname + failure = getattr(self, exattrname, None) + if failure is not None: + py.builtin._reraise(failure[0], failure[1], failure[2]) + if hasattr(self, attrname): + return getattr(self, attrname) + try: + res = function() + except py.builtin._sysex: + raise + except: + failure = sys.exc_info() + setattr(self, exattrname, failure) + raise + setattr(self, attrname, res) + return res + + def listchain(self): + """ return list of all parent collectors up to self, + starting from root of collection tree. """ + chain = [] + item = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker(self, marker): + """ dynamically add a marker object to the node. + + ``marker`` can be a string or pytest.mark.* instance. + """ + from _pytest.mark import MarkDecorator + if isinstance(marker, py.builtin._basestring): + marker = MarkDecorator(marker) + elif not isinstance(marker, MarkDecorator): + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker.name] = marker + + def get_marker(self, name): + """ get a marker object from this node or None if + the node doesn't have a marker with that name. """ + val = self.keywords.get(name, None) + if val is not None: + from _pytest.mark import MarkInfo, MarkDecorator + if isinstance(val, (MarkDecorator, MarkInfo)): + return val + + def listextrakeywords(self): + """ Return a set of all extra keywords in self and any parents.""" + extra_keywords = set() + item = self + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self): + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin): + """ register a function to be called when this node is finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls): + """ get the next parent node (including ourself) + which is an instance of the given class""" + current = self + while current and not isinstance(current, cls): + current = current.parent + return current + + def _prunetraceback(self, excinfo): + pass + + def _repr_failure_py(self, excinfo, style=None): + fm = self.session._fixturemanager + if excinfo.errisinstance(fm.FixtureLookupError): + return excinfo.value.formatrepr() + tbfilter = True + if self.config.option.fulltrace: + style="long" + else: + tb = _pytest._code.Traceback([excinfo.traceback[-1]]) + self._prunetraceback(excinfo) + if len(excinfo.traceback) == 0: + excinfo.traceback = tb + tbfilter = False # prunetraceback already does it + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.option.tbstyle == "short": + style = "short" + else: + style = "long" + + try: + os.getcwd() + abspath = False + except OSError: + abspath = True + + return excinfo.getrepr(funcargs=True, abspath=abspath, + showlocals=self.config.option.showlocals, + style=style, tbfilter=tbfilter) + + repr_failure = _repr_failure_py + +class Collector(Node): + """ Collector instances create children through collect() + and thus iteratively build a tree. + """ + + class CollectError(Exception): + """ an error during collection, contains a custom message. """ + + def collect(self): + """ returns a list of children (items and collectors) + for this collection node. + """ + raise NotImplementedError("abstract") + + def repr_failure(self, excinfo): + """ represent a collection failure. """ + if excinfo.errisinstance(self.CollectError): + exc = excinfo.value + return str(exc.args[0]) + return self._repr_failure_py(excinfo, style="short") + + def _memocollect(self): + """ internal helper method to cache results of calling collect(). """ + return self._memoizedcall('_collected', lambda: list(self.collect())) + + def _prunetraceback(self, excinfo): + if hasattr(self, 'fspath'): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.fspath) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + excinfo.traceback = ntraceback.filter() + +class FSCollector(Collector): + def __init__(self, fspath, parent=None, config=None, session=None): + fspath = py.path.local(fspath) # xxx only for test_resultlog.py? + name = fspath.basename + if parent is not None: + rel = fspath.relto(parent.fspath) + if rel: + name = rel + name = name.replace(os.sep, "/") + super(FSCollector, self).__init__(name, parent, config, session) + self.fspath = fspath + + def _makeid(self): + relpath = self.fspath.relto(self.config.rootdir) + if os.sep != "/": + relpath = relpath.replace(os.sep, "/") + return relpath + +class File(FSCollector): + """ base class for collecting tests from a file. """ + +class Item(Node): + """ a basic test invocation item. Note that for a single function + there might be multiple test invocation items. + """ + nextitem = None + + def __init__(self, name, parent=None, config=None, session=None): + super(Item, self).__init__(name, parent, config, session) + self._report_sections = [] + + def add_report_section(self, when, key, content): + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self): + return self.fspath, None, "" + + @property + def location(self): + try: + return self._location + except AttributeError: + location = self.reportinfo() + # bestrelpath is a quite slow function + cache = self.config.__dict__.setdefault("_bestrelpathcache", {}) + try: + fspath = cache[location[0]] + except KeyError: + fspath = self.session.fspath.bestrelpath(location[0]) + cache[location[0]] = fspath + location = (fspath, location[1], str(location[2])) + self._location = location + return location + +class NoMatch(Exception): + """ raised if matching cannot locate a matching names. """ + +class Interrupted(KeyboardInterrupt): + """ signals an interrupted test run. """ + __module__ = 'builtins' # for py3 + +class Session(FSCollector): + Interrupted = Interrupted + + def __init__(self, config): + FSCollector.__init__(self, config.rootdir, parent=None, + config=config, session=self) + self.testsfailed = 0 + self.testscollected = 0 + self.shouldstop = False + self.trace = config.trace.root.get("collection") + self._norecursepatterns = config.getini("norecursedirs") + self.startdir = py.path.local() + self.config.pluginmanager.register(self, name="session") + + def _makeid(self): + return "" + + @pytest.hookimpl(tryfirst=True) + def pytest_collectstart(self): + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @pytest.hookimpl(tryfirst=True) + def pytest_runtest_logreport(self, report): + if report.failed and not hasattr(report, 'wasxfail'): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldstop = "stopping after %d failures" % ( + self.testsfailed) + pytest_collectreport = pytest_runtest_logreport + + def isinitpath(self, path): + return path in self._initialpaths + + def gethookproxy(self, fspath): + # check if we have the common case of running + # hooks with all conftest.py filesall conftest.py + pm = self.config.pluginmanager + my_conftestmodules = pm._getconftestmodules(fspath) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + if remove_mods: + # one or more conftests are not in use at this fspath + proxy = FSHookProxy(fspath, pm, remove_mods) + else: + # all plugis are active for this fspath + proxy = self.config.hook + return proxy + + def perform_collect(self, args=None, genitems=True): + hook = self.config.hook + try: + items = self._perform_collect(args, genitems) + hook.pytest_collection_modifyitems(session=self, + config=self.config, items=items) + finally: + hook.pytest_collection_finish(session=self) + self.testscollected = len(items) + return items + + def _perform_collect(self, args, genitems): + if args is None: + args = self.config.args + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + self._notfound = [] + self._initialpaths = set() + self._initialparts = [] + self.items = items = [] + for arg in args: + parts = self._parsearg(arg) + self._initialparts.append(parts) + self._initialpaths.add(parts[0]) + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, exc in self._notfound: + line = "(no name %r in any of %r)" % (arg, exc.args[0]) + errors.append("not found: %s\n%s" % (arg, line)) + #XXX: test this + raise pytest.UsageError(*errors) + if not genitems: + return rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + return items + + def collect(self): + for parts in self._initialparts: + arg = "::".join(map(str, parts)) + self.trace("processing argument", arg) + self.trace.root.indent += 1 + try: + for x in self._collect(arg): + yield x + except NoMatch: + # we are inside a make_report hook so + # we cannot directly pass through the exception + self._notfound.append((arg, sys.exc_info()[1])) + + self.trace.root.indent -= 1 + + def _collect(self, arg): + names = self._parsearg(arg) + path = names.pop(0) + if path.check(dir=1): + assert not names, "invalid arg %r" %(arg,) + for path in path.visit(fil=lambda x: x.check(file=1), + rec=self._recurse, bf=True, sort=True): + for x in self._collectfile(path): + yield x + else: + assert path.check(file=1) + for x in self.matchnodes(self._collectfile(path), names): + yield x + + def _collectfile(self, path): + ihook = self.gethookproxy(path) + if not self.isinitpath(path): + if ihook.pytest_ignore_collect(path=path, config=self.config): + return () + return ihook.pytest_collect_file(path=path, parent=self) + + def _recurse(self, path): + ihook = self.gethookproxy(path.dirpath()) + if ihook.pytest_ignore_collect(path=path, config=self.config): + return + for pat in self._norecursepatterns: + if path.check(fnmatch=pat): + return False + ihook = self.gethookproxy(path) + ihook.pytest_collect_directory(path=path, parent=self) + return True + + def _tryconvertpyarg(self, x): + """Convert a dotted module name to path. + + """ + import pkgutil + try: + loader = pkgutil.find_loader(x) + except ImportError: + return x + if loader is None: + return x + # This method is sometimes invoked when AssertionRewritingHook, which + # does not define a get_filename method, is already in place: + try: + path = loader.get_filename(x) + except AttributeError: + # Retrieve path from AssertionRewritingHook: + path = loader.modules[x][0].co_filename + if loader.is_package(x): + path = os.path.dirname(path) + return path + + def _parsearg(self, arg): + """ return (fspath, names) tuple after checking the file exists. """ + parts = str(arg).split("::") + if self.config.option.pyargs: + parts[0] = self._tryconvertpyarg(parts[0]) + relpath = parts[0].replace("/", os.sep) + path = self.config.invocation_dir.join(relpath, abs=True) + if not path.check(): + if self.config.option.pyargs: + raise pytest.UsageError("file or package not found: " + arg + " (missing __init__.py?)") + else: + raise pytest.UsageError("file not found: " + arg) + parts[0] = path + return parts + + def matchnodes(self, matching, names): + self.trace("matchnodes", matching, names) + self.trace.root.indent += 1 + nodes = self._matchnodes(matching, names) + num = len(nodes) + self.trace("matchnodes finished -> ", num, "nodes") + self.trace.root.indent -= 1 + if num == 0: + raise NoMatch(matching, names[:1]) + return nodes + + def _matchnodes(self, matching, names): + if not matching or not names: + return matching + name = names[0] + assert name + nextnames = names[1:] + resultnodes = [] + for node in matching: + if isinstance(node, pytest.Item): + if not names: + resultnodes.append(node) + continue + assert isinstance(node, pytest.Collector) + rep = collect_one_node(node) + if rep.passed: + has_matched = False + for x in rep.result: + # TODO: remove parametrized workaround once collection structure contains parametrization + if x.name == name or x.name.split("[")[0] == name: + resultnodes.extend(self.matchnodes([x], nextnames)) + has_matched = True + # XXX accept IDs that don't have "()" for class instances + if not has_matched and len(rep.result) == 1 and x.name == "()": + nextnames.insert(0, name) + resultnodes.extend(self.matchnodes([x], nextnames)) + node.ihook.pytest_collectreport(report=rep) + return resultnodes + + def genitems(self, node): + self.trace("genitems", node) + if isinstance(node, pytest.Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, pytest.Collector) + rep = collect_one_node(node) + if rep.passed: + for subnode in rep.result: + for x in self.genitems(subnode): + yield x + node.ihook.pytest_collectreport(report=rep) diff --git a/tests/_lib_vendors/_pytest/mark.py b/tests/_lib_vendors/_pytest/mark.py new file mode 100644 index 0000000..357a604 --- /dev/null +++ b/tests/_lib_vendors/_pytest/mark.py @@ -0,0 +1,328 @@ +""" generic mechanism for marking and selecting python functions. """ +import inspect + + +class MarkerError(Exception): + + """Error in use of a pytest marker/attribute.""" + + +def pytest_namespace(): + return {'mark': MarkGenerator()} + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + '-k', + action="store", dest="keyword", default='', metavar="EXPRESSION", + help="only run tests which match the given substring expression. " + "An expression is a python evaluatable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other'. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them." + ) + + group._addoption( + "-m", + action="store", dest="markexpr", default="", metavar="MARKEXPR", + help="only run tests matching given mark expression. " + "example: -m 'mark1 and not mark2'." + ) + + group.addoption( + "--markers", action="store_true", + help="show markers (builtin, plugin and per-project ones)." + ) + + parser.addini("markers", "markers for test functions", 'linelist') + + +def pytest_cmdline_main(config): + import _pytest.config + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + name, rest = line.split(":", 1) + tw.write("@pytest.mark.%s:" % name, bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + +pytest_cmdline_main.tryfirst = True + + +def pytest_collection_modifyitems(items, config): + keywordexpr = config.option.keyword.lstrip() + matchexpr = config.option.markexpr + if not keywordexpr and not matchexpr: + return + # pytest used to allow "-" for negating + # but today we just allow "-" at the beginning, use "not" instead + # we probably remove "-" alltogether soon + if keywordexpr.startswith("-"): + keywordexpr = "not " + keywordexpr[1:] + selectuntil = False + if keywordexpr[-1:] == ":": + selectuntil = True + keywordexpr = keywordexpr[:-1] + + remaining = [] + deselected = [] + for colitem in items: + if keywordexpr and not matchkeyword(colitem, keywordexpr): + deselected.append(colitem) + else: + if selectuntil: + keywordexpr = None + if matchexpr: + if not matchmark(colitem, matchexpr): + deselected.append(colitem) + continue + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +class MarkMapping: + """Provides a local mapping for markers where item access + resolves to True if the marker is present. """ + def __init__(self, keywords): + mymarks = set() + for key, value in keywords.items(): + if isinstance(value, MarkInfo) or isinstance(value, MarkDecorator): + mymarks.add(key) + self._mymarks = mymarks + + def __getitem__(self, name): + return name in self._mymarks + + +class KeywordMapping: + """Provides a local mapping for keywords. + Given a list of names, map any substring of one of these names to True. + """ + def __init__(self, names): + self._names = names + + def __getitem__(self, subname): + for name in self._names: + if subname in name: + return True + return False + + +def matchmark(colitem, markexpr): + """Tries to match on any marker names, attached to the given colitem.""" + return eval(markexpr, {}, MarkMapping(colitem.keywords)) + + +def matchkeyword(colitem, keywordexpr): + """Tries to match given keyword expression to given collector item. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + mapped_names = set() + + # Add the names of the current item and any parent items + import pytest + for item in colitem.listchain(): + if not isinstance(item, pytest.Instance): + mapped_names.add(item.name) + + # Add the names added as extra keywords to current or parent items + for name in colitem.listextrakeywords(): + mapped_names.add(name) + + # Add the names attached to the current function through direct assignment + if hasattr(colitem, 'function'): + for name in colitem.function.__dict__: + mapped_names.add(name) + + mapping = KeywordMapping(mapped_names) + if " " not in keywordexpr: + # special case to allow for simple "-k pass" and "-k 1.3" + return mapping[keywordexpr] + elif keywordexpr.startswith("not ") and " " not in keywordexpr[4:]: + return not mapping[keywordexpr[4:]] + return eval(keywordexpr, {}, mapping) + + +def pytest_configure(config): + import pytest + if config.option.strict: + pytest.mark._config = config + + +class MarkGenerator: + """ Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. Example:: + + import pytest + @pytest.mark.slowtest + def test_function(): + pass + + will set a 'slowtest' :class:`MarkInfo` object + on the ``test_function`` object. """ + + def __getattr__(self, name): + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + if hasattr(self, '_config'): + self._check(name) + return MarkDecorator(name) + + def _check(self, name): + try: + if name in self._markers: + return + except AttributeError: + pass + self._markers = l = set() + for line in self._config.getini("markers"): + beginning = line.split(":", 1) + x = beginning[0].split("(", 1)[0] + l.add(x) + if name not in self._markers: + raise AttributeError("%r not a registered marker" % (name,)) + +def istestfunc(func): + return hasattr(func, "__call__") and \ + getattr(func, "__name__", "") != "" + +class MarkDecorator: + """ A decorator for test functions and test classes. When applied + it will create :class:`MarkInfo` objects which may be + :ref:`retrieved by hooks as item keywords `. + MarkDecorator instances are often created like this:: + + mark1 = pytest.mark.NAME # simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a MarkDecorator instance is called it does the following: + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches itself to the class so it + gets applied automatically to all test cases found in that class. + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches a MarkInfo object to the + function, containing all the arguments already stored internally in + the MarkDecorator. + 3. When called in any other case, it performs a 'fake construction' call, + i.e. it returns a new MarkDecorator instance with the original + MarkDecorator's content updated with the arguments passed to this + call. + + Note: The rules above prevent MarkDecorator objects from storing only a + single function or class reference as their positional argument with no + additional keyword or positional arguments. + + """ + def __init__(self, name, args=None, kwargs=None): + self.name = name + self.args = args or () + self.kwargs = kwargs or {} + + @property + def markname(self): + return self.name # for backward-compat (2.4.1 had this attr) + + def __repr__(self): + d = self.__dict__.copy() + name = d.pop('name') + return "" % (name, d) + + def __call__(self, *args, **kwargs): + """ if passed a single callable argument: decorate it with mark info. + otherwise add *args/**kwargs in-place to mark information. """ + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + if len(args) == 1 and (istestfunc(func) or is_class): + if is_class: + if hasattr(func, 'pytestmark'): + mark_list = func.pytestmark + if not isinstance(mark_list, list): + mark_list = [mark_list] + # always work on a copy to avoid updating pytestmark + # from a superclass by accident + mark_list = mark_list + [self] + func.pytestmark = mark_list + else: + func.pytestmark = [self] + else: + holder = getattr(func, self.name, None) + if holder is None: + holder = MarkInfo( + self.name, self.args, self.kwargs + ) + setattr(func, self.name, holder) + else: + holder.add(self.args, self.kwargs) + return func + kw = self.kwargs.copy() + kw.update(kwargs) + args = self.args + args + return self.__class__(self.name, args=args, kwargs=kw) + + +def extract_argvalue(maybe_marked_args): + # TODO: incorrect mark data, the old code wanst able to collect lists + # individual parametrized argument sets can be wrapped in a series + # of markers in which case we unwrap the values and apply the mark + # at Function init + newmarks = {} + argval = maybe_marked_args + while isinstance(argval, MarkDecorator): + newmark = MarkDecorator(argval.markname, + argval.args[:-1], argval.kwargs) + newmarks[newmark.markname] = newmark + argval = argval.args[-1] + return argval, newmarks + + +class MarkInfo: + """ Marking object created by :class:`MarkDecorator` instances. """ + def __init__(self, name, args, kwargs): + #: name of attribute + self.name = name + #: positional argument list, empty if none specified + self.args = args + #: keyword argument dictionary, empty if nothing specified + self.kwargs = kwargs.copy() + self._arglist = [(args, kwargs.copy())] + + def __repr__(self): + return "" % ( + self.name, self.args, self.kwargs + ) + + def add(self, args, kwargs): + """ add a MarkInfo with the given args and kwargs. """ + self._arglist.append((args, kwargs)) + self.args += args + self.kwargs.update(kwargs) + + def __iter__(self): + """ yield MarkInfo objects each relating to a marking-call. """ + for args, kwargs in self._arglist: + yield MarkInfo(self.name, args, kwargs) diff --git a/tests/_lib_vendors/_pytest/monkeypatch.py b/tests/_lib_vendors/_pytest/monkeypatch.py new file mode 100644 index 0000000..852e72b --- /dev/null +++ b/tests/_lib_vendors/_pytest/monkeypatch.py @@ -0,0 +1,258 @@ +""" monkeypatching and mocking functionality. """ + +import os, sys +import re + +from py.builtin import _basestring + +import pytest + +RE_IMPORT_ERROR_NAME = re.compile("^No module named (.*)$") + + +@pytest.fixture +def monkeypatch(request): + """The returned ``monkeypatch`` fixture provides these + helper methods to modify objects, dictionaries or os.environ:: + + monkeypatch.setattr(obj, name, value, raising=True) + monkeypatch.delattr(obj, name, raising=True) + monkeypatch.setitem(mapping, name, value) + monkeypatch.delitem(obj, name, raising=True) + monkeypatch.setenv(name, value, prepend=False) + monkeypatch.delenv(name, value, raising=True) + monkeypatch.syspath_prepend(path) + monkeypatch.chdir(path) + + All modifications will be undone after the requesting + test function or fixture has finished. The ``raising`` + parameter determines if a KeyError or AttributeError + will be raised if the set/deletion operation has no target. + """ + mpatch = MonkeyPatch() + request.addfinalizer(mpatch.undo) + return mpatch + + +def resolve(name): + # simplified from zope.dottedname + parts = name.split('.') + + used = parts.pop(0) + found = __import__(used) + for part in parts: + used += '.' + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # we use explicit un-nesting of the handling block in order + # to avoid nested exceptions on python 3 + try: + __import__(used) + except ImportError as ex: + # str is used for py2 vs py3 + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError( + 'import error in %s: %s' % (used, ex) + ) + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj, name, ann): + try: + obj = getattr(obj, name) + except AttributeError: + raise AttributeError( + '%r object at %s has no attribute %r' % ( + type(obj).__name__, ann, name + ) + ) + return obj + + +def derive_importpath(import_path, raising): + if not isinstance(import_path, _basestring) or "." not in import_path: + raise TypeError("must be absolute import path string, not %r" % + (import_path,)) + module, attr = import_path.rsplit('.', 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() + + +class MonkeyPatch: + """ Object returned by the ``monkeypatch`` fixture keeping a record of setattr/item/env/syspath changes. + """ + + def __init__(self): + self._setattr = [] + self._setitem = [] + self._cwd = None + self._savesyspath = None + + def setattr(self, target, name, value=notset, raising=True): + """ Set attribute value on target, memorizing the old value. + By default raise AttributeError if the attribute did not exist. + + For convenience you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name. Example: + ``monkeypatch.setattr("os.getcwd", lambda x: "/")`` + would set the ``getcwd`` function of the ``os`` module. + + The ``raising`` value determines if the setattr should fail + if the attribute is not already present (defaults to True + which means it will raise). + """ + __tracebackhide__ = True + import inspect + + if value is notset: + if not isinstance(target, _basestring): + raise TypeError("use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string") + value = name + name, target = derive_importpath(target, raising) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError("%r has no attribute %r" % (target, name)) + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr(self, target, name=notset, raising=True): + """ Delete attribute ``name`` from ``target``, by default raise + AttributeError it the attribute did not previously exist. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + If ``raising`` is set to False, no exception will be raised if the + attribute is missing. + """ + __tracebackhide__ = True + if name is notset: + if not isinstance(target, _basestring): + raise TypeError("use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string") + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + self._setattr.append((target, name, getattr(target, name, notset))) + delattr(target, name) + + def setitem(self, dic, name, value): + """ Set dictionary entry ``name`` to value. """ + self._setitem.append((dic, name, dic.get(name, notset))) + dic[name] = value + + def delitem(self, dic, name, raising=True): + """ Delete ``name`` from dict. Raise KeyError if it doesn't exist. + + If ``raising`` is set to False, no exception will be raised if the + key is missing. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + del dic[name] + + def setenv(self, name, value, prepend=None): + """ Set environment variable ``name`` to ``value``. If ``prepend`` + is a character, read the current environment variable value + and prepend the ``value`` adjoined with the ``prepend`` character.""" + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self.setitem(os.environ, name, value) + + def delenv(self, name, raising=True): + """ Delete ``name`` from the environment. Raise KeyError it does not + exist. + + If ``raising`` is set to False, no exception will be raised if the + environment variable is missing. + """ + self.delitem(os.environ, name, raising=raising) + + def syspath_prepend(self, path): + """ Prepend ``path`` to ``sys.path`` list of import locations. """ + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + def chdir(self, path): + """ Change the current working directory to the specified path. + Path can be a string or a py.path.local object. + """ + if self._cwd is None: + self._cwd = os.getcwd() + if hasattr(path, "chdir"): + path.chdir() + else: + os.chdir(path) + + def undo(self): + """ Undo previous changes. This call consumes the + undo stack. Calling it a second time has no effect unless + you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + Note that the same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, name, value in reversed(self._setitem): + if value is notset: + try: + del dictionary[name] + except KeyError: + pass # was already deleted, so we have the desired state + else: + dictionary[name] = value + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/tests/_lib_vendors/_pytest/nose.py b/tests/_lib_vendors/_pytest/nose.py new file mode 100644 index 0000000..0387468 --- /dev/null +++ b/tests/_lib_vendors/_pytest/nose.py @@ -0,0 +1,71 @@ +""" run test suites written for nose. """ + +import sys + +import py +import pytest +from _pytest import unittest + + +def get_skip_exceptions(): + skip_classes = set() + for module_name in ('unittest', 'unittest2', 'nose'): + mod = sys.modules.get(module_name) + if hasattr(mod, 'SkipTest'): + skip_classes.add(mod.SkipTest) + return tuple(skip_classes) + + +def pytest_runtest_makereport(item, call): + if call.excinfo and call.excinfo.errisinstance(get_skip_exceptions()): + # let's substitute the excinfo with a pytest.skip one + call2 = call.__class__(lambda: + pytest.skip(str(call.excinfo.value)), call.when) + call.excinfo = call2.excinfo + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_setup(item): + if is_potential_nosetest(item): + if isinstance(item.parent, pytest.Generator): + gen = item.parent + if not hasattr(gen, '_nosegensetup'): + call_optional(gen.obj, 'setup') + if isinstance(gen.parent, pytest.Instance): + call_optional(gen.parent.obj, 'setup') + gen._nosegensetup = True + if not call_optional(item.obj, 'setup'): + # call module level setup if there is no object level one + call_optional(item.parent.obj, 'setup') + #XXX this implies we only call teardown when setup worked + item.session._setupstate.addfinalizer((lambda: teardown_nose(item)), item) + +def teardown_nose(item): + if is_potential_nosetest(item): + if not call_optional(item.obj, 'teardown'): + call_optional(item.parent.obj, 'teardown') + #if hasattr(item.parent, '_nosegensetup'): + # #call_optional(item._nosegensetup, 'teardown') + # del item.parent._nosegensetup + + +def pytest_make_collect_report(collector): + if isinstance(collector, pytest.Generator): + call_optional(collector.obj, 'setup') + + +def is_potential_nosetest(item): + # extra check needed since we do not do nose style setup/teardown + # on direct unittest style classes + return isinstance(item, pytest.Function) and \ + not isinstance(item, unittest.TestCaseFunction) + + +def call_optional(obj, name): + method = getattr(obj, name, None) + isfixture = hasattr(method, "_pytestfixturefunction") + if method is not None and not isfixture and py.builtin.callable(method): + # If there's any problems allow the exception to raise rather than + # silently ignoring them + method() + return True diff --git a/tests/_lib_vendors/_pytest/pastebin.py b/tests/_lib_vendors/_pytest/pastebin.py new file mode 100644 index 0000000..9f1cf90 --- /dev/null +++ b/tests/_lib_vendors/_pytest/pastebin.py @@ -0,0 +1,98 @@ +""" submit failure or test session information to a pastebin service. """ +import pytest +import sys +import tempfile + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting") + group._addoption('--pastebin', metavar="mode", + action='store', dest="pastebin", default=None, + choices=['failed', 'all'], + help="send failed|all info to bpaste.net pastebin service.") + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config): + import py + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin('terminalreporter') + # if no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a slave node + # when using pytest-xdist, for example + if tr is not None: + # pastebin file will be utf-8 encoded binary file + config._pastebinfile = tempfile.TemporaryFile('w+b') + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if py.builtin._istext(s): + s = s.encode('utf-8') + config._pastebinfile.write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config): + if hasattr(config, '_pastebinfile'): + # get terminal contents and delete file + config._pastebinfile.seek(0) + sessionlog = config._pastebinfile.read() + config._pastebinfile.close() + del config._pastebinfile + # undo our patching in the terminal reporter + tr = config.pluginmanager.getplugin('terminalreporter') + del tr._tw.__dict__['write'] + # write summary + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line("pastebin session-log: %s\n" % pastebinurl) + + +def create_new_paste(contents): + """ + Creates a new paste using bpaste.net service. + + :contents: paste contents as utf-8 encoded bytes + :returns: url to the pasted contents + """ + import re + if sys.version_info < (3, 0): + from urllib import urlopen, urlencode + else: + from urllib.request import urlopen + from urllib.parse import urlencode + + params = { + 'code': contents, + 'lexer': 'python3' if sys.version_info[0] == 3 else 'python', + 'expiry': '1week', + } + url = 'https://bpaste.net' + response = urlopen(url, data=urlencode(params).encode('ascii')).read() + m = re.search(r'href="/raw/(\w+)"', response.decode('utf-8')) + if m: + return '%s/show/%s' % (url, m.group(1)) + else: + return 'bad response: ' + response + + +def pytest_terminal_summary(terminalreporter): + import _pytest.config + if terminalreporter.config.option.pastebin != "failed": + return + tr = terminalreporter + if 'failed' in tr.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats.get('failed'): + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = tr._getfailureheadline(rep) + tw = _pytest.config.create_terminal_writer(terminalreporter.config, stringio=True) + rep.toterminal(tw) + s = tw.stringio.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + tr.write_line("%s --> %s" %(msg, pastebinurl)) diff --git a/tests/_lib_vendors/_pytest/pytester.py b/tests/_lib_vendors/_pytest/pytester.py new file mode 100644 index 0000000..17ff529 --- /dev/null +++ b/tests/_lib_vendors/_pytest/pytester.py @@ -0,0 +1,1139 @@ +""" (disabled by default) support for testing pytest and pytest plugins. """ +import codecs +import gc +import os +import platform +import re +import subprocess +import sys +import time +import traceback +from fnmatch import fnmatch + +from py.builtin import print_ + +from _pytest._code import Source +import py +import pytest +from _pytest.main import Session, EXIT_OK +from _pytest.assertion.rewrite import AssertionRewritingHook + + +def pytest_addoption(parser): + # group = parser.getgroup("pytester", "pytester (self-tests) options") + parser.addoption('--lsof', + action="store_true", dest="lsof", default=False, + help=("run FD checks if lsof is available")) + + parser.addoption('--runpytest', default="inprocess", dest="runpytest", + choices=("inprocess", "subprocess", ), + help=("run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method")) + + +def pytest_configure(config): + # This might be called multiple times. Only take the first. + global _pytest_fullpath + try: + _pytest_fullpath + except NameError: + _pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc")) + _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py") + + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + +class LsofFdLeakChecker(object): + def get_open_files(self): + out = self._exec_lsof() + open_files = self._parse_lsof_output(out) + return open_files + + def _exec_lsof(self): + pid = os.getpid() + return py.process.cmdexec("lsof -Ffn0 -p %d" % pid) + + def _parse_lsof_output(self, out): + def isopen(line): + return line.startswith('f') and ("deleted" not in line and + 'mem' not in line and "txt" not in line and 'cwd' not in line) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split('\0') + fd = fields[0][1:] + filename = fields[1][1:] + if filename.startswith('/'): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self): + try: + py.process.cmdexec("lsof -v") + except (py.process.cmdexec.Error, UnicodeDecodeError): + # cmdexec may raise UnicodeDecodeError on Windows systems + # with locale other than english: + # https://bitbucket.org/pytest-dev/py/issues/66 + return False + else: + return True + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_runtest_item(self, item): + lines1 = self.get_open_files() + yield + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = set([t[0] for t in lines2]) - set([t[0] for t in lines1]) + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [] + error.append("***** %s FD leakage detected" % len(leaked_files)) + error.extend([str(f) for f in leaked_files]) + error.append("*** Before:") + error.extend([str(f) for f in lines1]) + error.append("*** After:") + error.extend([str(f) for f in lines2]) + error.append(error[0]) + error.append("*** function %s:%s: %s " % item.location) + pytest.fail("\n".join(error), pytrace=False) + + +# XXX copied from execnet's conftest.py - needs to be merged +winpymap = { + 'python2.7': r'C:\Python27\python.exe', + 'python2.6': r'C:\Python26\python.exe', + 'python3.1': r'C:\Python31\python.exe', + 'python3.2': r'C:\Python32\python.exe', + 'python3.3': r'C:\Python33\python.exe', + 'python3.4': r'C:\Python34\python.exe', + 'python3.5': r'C:\Python35\python.exe', +} + +def getexecutable(name, cache={}): + try: + return cache[name] + except KeyError: + executable = py.path.local.sysfind(name) + if executable: + import subprocess + popen = subprocess.Popen([str(executable), "--version"], + universal_newlines=True, stderr=subprocess.PIPE) + out, err = popen.communicate() + if name == "jython": + if not err or "2.5" not in err: + executable = None + if "2.5.2" in err: + executable = None # http://bugs.jython.org/issue1790 + elif popen.returncode != 0: + # Handle pyenv's 127. + executable = None + cache[name] = executable + return executable + +@pytest.fixture(params=['python2.6', 'python2.7', 'python3.3', "python3.4", + 'pypy', 'pypy3']) +def anypython(request): + name = request.param + executable = getexecutable(name) + if executable is None: + if sys.platform == "win32": + executable = winpymap.get(name, None) + if executable: + executable = py.path.local(executable) + if executable.check(): + return executable + pytest.skip("no suitable %s found" % (name,)) + return executable + +# used at least by pytest-xdist plugin +@pytest.fixture +def _pytest(request): + """ Return a helper which offers a gethookrecorder(hook) + method which returns a HookRecorder instance which helps + to make assertions about called hooks. + """ + return PytestArg(request) + +class PytestArg: + def __init__(self, request): + self.request = request + + def gethookrecorder(self, hook): + hookrecorder = HookRecorder(hook._pm) + self.request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(l): + """Only return names from iterator l without a leading underscore.""" + return [x for x in l if x[0] != "_"] + + +class ParsedCall: + def __init__(self, name, kwargs): + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self): + d = self.__dict__.copy() + del d['_name'] + return "" %(self._name, d) + + +class HookRecorder: + """Record all hooks called in a plugin manager. + + This wraps all the hook calls in the plugin manager, recording + each call before propagating the normal calls. + + """ + + def __init__(self, pluginmanager): + self._pluginmanager = pluginmanager + self.calls = [] + + def before(hook_name, hook_impls, kwargs): + self.calls.append(ParsedCall(hook_name, kwargs)) + + def after(outcome, hook_name, hook_impls, kwargs): + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self): + self._undo_wrapping() + + def getcalls(self, names): + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries): + __tracebackhide__ = True + i = 0 + entries = list(entries) + backlocals = sys._getframe(1).f_locals + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print_("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print_("CHECKERMATCH", repr(check), "->", call) + else: + print_("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print_("NONAMEMATCH", name, "with", call) + else: + pytest.fail("could not find %r check %r" % (name, check)) + + def popcall(self, name): + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = ["could not find call %r, in:" % (name,)] + lines.extend([" %s" % str(x) for x in self.calls]) + pytest.fail("\n".join(lines)) + + def getcall(self, name): + l = self.getcalls(name) + assert len(l) == 1, (name, l) + return l[0] + + # functionality for test reports + + def getreports(self, + names="pytest_runtest_logreport pytest_collectreport"): + return [x.report for x in self.getcalls(names)] + + def matchreport(self, inamepart="", + names="pytest_runtest_logreport pytest_collectreport", when=None): + """ return a testreport whose dotted import path matches """ + l = [] + for rep in self.getreports(names=names): + try: + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + except AttributeError: + pass + if when and getattr(rep, 'when', None) != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + l.append(rep) + if not l: + raise ValueError("could not find test report matching %r: " + "no test reports at all!" % (inamepart,)) + if len(l) > 1: + raise ValueError( + "found 2 or more testreports matching %r: %s" %(inamepart, l)) + return l[0] + + def getfailures(self, + names='pytest_runtest_logreport pytest_collectreport'): + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self): + return self.getfailures('pytest_collectreport') + + def listoutcomes(self): + passed = [] + skipped = [] + failed = [] + for rep in self.getreports( + "pytest_collectreport pytest_runtest_logreport"): + if rep.passed: + if getattr(rep, "when", None) == "call": + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + elif rep.failed: + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self): + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed=0, skipped=0, failed=0): + realpassed, realskipped, realfailed = self.listoutcomes() + assert passed == len(realpassed) + assert skipped == len(realskipped) + assert failed == len(realfailed) + + def clear(self): + self.calls[:] = [] + + +@pytest.fixture +def linecomp(request): + return LineComp() + + +@pytest.fixture(name='LineMatcher') +def LineMatcher_fixture(request): + return LineMatcher + + +@pytest.fixture +def testdir(request, tmpdir_factory): + return Testdir(request, tmpdir_factory) + + +rex_outcome = re.compile("(\d+) ([\w-]+)") +class RunResult: + """The result of running a command. + + Attributes: + + :ret: The return value. + :outlines: List of lines captured from stdout. + :errlines: List of lines captures from stderr. + :stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to + reconstruct stdout or the commonly used + ``stdout.fnmatch_lines()`` method. + :stderrr: :py:class:`LineMatcher` of stderr. + :duration: Duration in seconds. + + """ + def __init__(self, ret, outlines, errlines, duration): + self.ret = ret + self.outlines = outlines + self.errlines = errlines + self.stdout = LineMatcher(outlines) + self.stderr = LineMatcher(errlines) + self.duration = duration + + def parseoutcomes(self): + """ Return a dictionary of outcomestring->num from parsing + the terminal output that the test process produced.""" + for line in reversed(self.outlines): + if 'seconds' in line: + outcomes = rex_outcome.findall(line) + if outcomes: + d = {} + for num, cat in outcomes: + d[cat] = int(num) + return d + + def assert_outcomes(self, passed=0, skipped=0, failed=0): + """ assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run.""" + d = self.parseoutcomes() + assert passed == d.get("passed", 0) + assert skipped == d.get("skipped", 0) + assert failed == d.get("failed", 0) + + + +class Testdir: + """Temporary test directory with tools to test/run pytest itself. + + This is based on the ``tmpdir`` fixture but provides a number of + methods which aid with testing pytest itself. Unless + :py:meth:`chdir` is used all methods will use :py:attr:`tmpdir` as + current working directory. + + Attributes: + + :tmpdir: The :py:class:`py.path.local` instance of the temporary + directory. + + :plugins: A list of plugins to use with :py:meth:`parseconfig` and + :py:meth:`runpytest`. Initially this is an empty list but + plugins can be added to the list. The type of items to add to + the list depend on the method which uses them so refer to them + for details. + + """ + + def __init__(self, request, tmpdir_factory): + self.request = request + # XXX remove duplication with tmpdir plugin + basetmp = tmpdir_factory.ensuretemp("testdir") + name = request.function.__name__ + for i in range(100): + try: + tmpdir = basetmp.mkdir(name + str(i)) + except py.error.EEXIST: + continue + break + self.tmpdir = tmpdir + self.plugins = [] + self._savesyspath = (list(sys.path), list(sys.meta_path)) + self._savemodulekeys = set(sys.modules) + self.chdir() # always chdir + self.request.addfinalizer(self.finalize) + method = self.request.config.getoption("--runpytest") + if method == "inprocess": + self._runpytest_method = self.runpytest_inprocess + elif method == "subprocess": + self._runpytest_method = self.runpytest_subprocess + + def __repr__(self): + return "" % (self.tmpdir,) + + def finalize(self): + """Clean up global state artifacts. + + Some methods modify the global interpreter state and this + tries to clean this up. It does not remove the temporary + directory however so it can be looked at after the test run + has finished. + + """ + sys.path[:], sys.meta_path[:] = self._savesyspath + if hasattr(self, '_olddir'): + self._olddir.chdir() + self.delete_loaded_modules() + + def delete_loaded_modules(self): + """Delete modules that have been loaded during a test. + + This allows the interpreter to catch module changes in case + the module is re-imported. + """ + for name in set(sys.modules).difference(self._savemodulekeys): + # it seems zope.interfaces is keeping some state + # (used by twisted related tests) + if name != "zope.interface": + del sys.modules[name] + + def make_hook_recorder(self, pluginmanager): + """Create a new :py:class:`HookRecorder` for a PluginManager.""" + assert not hasattr(pluginmanager, "reprec") + pluginmanager.reprec = reprec = HookRecorder(pluginmanager) + self.request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self): + """Cd into the temporary directory. + + This is done automatically upon instantiation. + + """ + old = self.tmpdir.chdir() + if not hasattr(self, '_olddir'): + self._olddir = old + + def _makefile(self, ext, args, kwargs): + items = list(kwargs.items()) + if args: + source = py.builtin._totext("\n").join( + map(py.builtin._totext, args)) + py.builtin._totext("\n") + basename = self.request.function.__name__ + items.insert(0, (basename, source)) + ret = None + for name, value in items: + p = self.tmpdir.join(name).new(ext=ext) + p.dirpath().ensure_dir() + source = Source(value) + + def my_totext(s, encoding="utf-8"): + if py.builtin._isbytes(s): + s = py.builtin._totext(s, encoding=encoding) + return s + + source_unicode = "\n".join([my_totext(line) for line in source.lines]) + source = py.builtin._totext(source_unicode) + content = source.strip().encode("utf-8") # + "\n" + #content = content.rstrip() + "\n" + p.write(content, "wb") + if ret is None: + ret = p + return ret + + def makefile(self, ext, *args, **kwargs): + """Create a new file in the testdir. + + ext: The extension the file should use, including the dot. + E.g. ".py". + + args: All args will be treated as strings and joined using + newlines. The result will be written as contents to the + file. The name of the file will be based on the test + function requesting this fixture. + E.g. "testdir.makefile('.txt', 'line1', 'line2')" + + kwargs: Each keyword is the name of a file, while the value of + it will be written as contents of the file. + E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')" + + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source): + """Write a contest.py file with 'source' as contents.""" + return self.makepyfile(conftest=source) + + def makeini(self, source): + """Write a tox.ini file with 'source' as contents.""" + return self.makefile('.ini', tox=source) + + def getinicfg(self, source): + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return py.iniconfig.IniConfig(p)['pytest'] + + def makepyfile(self, *args, **kwargs): + """Shortcut for .makefile() with a .py extension.""" + return self._makefile('.py', args, kwargs) + + def maketxtfile(self, *args, **kwargs): + """Shortcut for .makefile() with a .txt extension.""" + return self._makefile('.txt', args, kwargs) + + def syspathinsert(self, path=None): + """Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`. + + This is undone automatically after the test. + """ + if path is None: + path = self.tmpdir + sys.path.insert(0, str(path)) + # a call to syspathinsert() usually means that the caller + # wants to import some dynamically created files. + # with python3 we thus invalidate import caches. + self._possibly_invalidate_import_caches() + + def _possibly_invalidate_import_caches(self): + # invalidate caches if we can (py33 and above) + try: + import importlib + except ImportError: + pass + else: + if hasattr(importlib, "invalidate_caches"): + importlib.invalidate_caches() + + def mkdir(self, name): + """Create a new (sub)directory.""" + return self.tmpdir.mkdir(name) + + def mkpydir(self, name): + """Create a new python package. + + This creates a (sub)direcotry with an empty ``__init__.py`` + file so that is recognised as a python package. + + """ + p = self.mkdir(name) + p.ensure("__init__.py") + return p + + Session = Session + def getnode(self, config, arg): + """Return the collection node of a file. + + :param config: :py:class:`_pytest.config.Config` instance, see + :py:meth:`parseconfig` and :py:meth:`parseconfigure` to + create the configuration. + + :param arg: A :py:class:`py.path.local` instance of the file. + + """ + session = Session(config) + assert '::' not in str(arg) + p = py.path.local(arg) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK) + return res + + def getpathnode(self, path): + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses + :py:meth:`parseconfigure` to create the (configured) pytest + Config instance. + + :param path: A :py:class:`py.path.local` instance of the file. + + """ + config = self.parseconfigure(path) + session = Session(config) + x = session.fspath.bestrelpath(path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK) + return res + + def genitems(self, colitems): + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of + all the test items contained within. + + """ + session = colitems[0].session + result = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source): + """Run the "test_func" Item. + + The calling test instance (the class which contains the test + method) must provide a ``.getrunner()`` method which should + return a runner which can run the test protocol for a single + item, like e.g. :py:func:`_pytest.runner.runtestprotocol`. + + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self.request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source, *cmdlineargs): + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` + instance for the result. + + :param source: The source code of the test module. + + :param cmdlineargs: Any extra command line arguments to use. + + :return: :py:class:`HookRecorder` instance of the result. + + """ + p = self.makepyfile(source) + l = list(cmdlineargs) + [p] + return self.inline_run(*l) + + def inline_genitems(self, *args): + """Run ``pytest.main(['--collectonly'])`` in-process. + + Retuns a tuple of the collected items and a + :py:class:`HookRecorder` instance. + + This runs the :py:func:`pytest.main` function to run all of + pytest inside the test process itself like + :py:meth:`inline_run`. However the return value is a tuple of + the collection items and a :py:class:`HookRecorder` instance. + + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run(self, *args, **kwargs): + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + This runs the :py:func:`pytest.main` function to run all of + pytest inside the test process itself. This means it can + return a :py:class:`HookRecorder` instance which gives more + detailed results from then run then can be done by matching + stdout/stderr from :py:meth:`runpytest`. + + :param args: Any command line arguments to pass to + :py:func:`pytest.main`. + + :param plugin: (keyword-only) Extra plugin instances the + ``pytest.main()`` instance should use. + + :return: A :py:class:`HookRecorder` instance. + """ + # When running py.test inline any plugins active in the main + # test process are already imported. So this disables the + # warning which will trigger to say they can no longer be + # re-written, which is fine as they are already re-written. + orig_warn = AssertionRewritingHook._warn_already_imported + + def revert(): + AssertionRewritingHook._warn_already_imported = orig_warn + + self.request.addfinalizer(revert) + AssertionRewritingHook._warn_already_imported = lambda *a: None + + rec = [] + + class Collect: + def pytest_configure(x, config): + rec.append(self.make_hook_recorder(config.pluginmanager)) + + plugins = kwargs.get("plugins") or [] + plugins.append(Collect()) + ret = pytest.main(list(args), plugins=plugins) + self.delete_loaded_modules() + if len(rec) == 1: + reprec = rec.pop() + else: + class reprec: + pass + reprec.ret = ret + + # typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing + if ret == 2 and not kwargs.get("no_reraise_ctrlc"): + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + + def runpytest_inprocess(self, *args, **kwargs): + """ Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides. """ + if kwargs.get("syspathinsert"): + self.syspathinsert() + now = time.time() + capture = py.io.StdCapture() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + + class reprec: + ret = e.args[0] + + except Exception: + traceback.print_exc() + + class reprec: + ret = 3 + finally: + out, err = capture.reset() + sys.stdout.write(out) + sys.stderr.write(err) + + res = RunResult(reprec.ret, + out.split("\n"), err.split("\n"), + time.time()-now) + res.reprec = reprec + return res + + def runpytest(self, *args, **kwargs): + """ Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`RunResult`. + + """ + args = self._ensure_basetemp(args) + return self._runpytest_method(*args, **kwargs) + + def _ensure_basetemp(self, args): + args = [str(x) for x in args] + for x in args: + if str(x).startswith('--basetemp'): + #print ("basedtemp exists: %s" %(args,)) + break + else: + args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp')) + #print ("added basetemp: %s" %(args,)) + return args + + def parseconfig(self, *args): + """Return a new pytest Config instance from given commandline args. + + This invokes the pytest bootstrapping code in _pytest.config + to create a new :py:class:`_pytest.core.PluginManager` and + call the pytest_cmdline_parse hook to create new + :py:class:`_pytest.config.Config` instance. + + If :py:attr:`plugins` has been populated they should be plugin + modules which will be registered with the PluginManager. + + """ + args = self._ensure_basetemp(args) + + import _pytest.config + config = _pytest.config._prepareconfig(args, self.plugins) + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self.request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args): + """Return a new pytest configured Config instance. + + This returns a new :py:class:`_pytest.config.Config` instance + like :py:meth:`parseconfig`, but also calls the + pytest_configure hook. + + """ + config = self.parseconfig(*args) + config._do_configure() + self.request.addfinalizer(config._ensure_unconfigure) + return config + + def getitem(self, source, funcname="test_func"): + """Return the test item for a test function. + + This writes the source to a python file and runs pytest's + collection on the resulting module, returning the test item + for the requested function name. + + :param source: The module source. + + :param funcname: The name of the test function for which the + Item must be returned. + + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, "%r item not found in module:\n%s\nitems: %s" %( + funcname, source, items) + + def getitems(self, source): + """Return all test items collected from the module. + + This writes the source to a python file and runs pytest's + collection on the resulting module, returning all test items + contained within. + + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol(self, source, configargs=(), withinit=False): + """Return the module collection node for ``source``. + + This writes ``source`` to a file using :py:meth:`makepyfile` + and then runs the pytest collection on it, returning the + collection node for the test module. + + :param source: The source code of the module to collect. + + :param configargs: Any extra arguments to pass to + :py:meth:`parseconfigure`. + + :param withinit: Whether to also write a ``__init__.py`` file + to the temporarly directory to ensure it is a package. + + """ + kw = {self.request.function.__name__: Source(source).strip()} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__ = "#") + self.config = config = self.parseconfigure(path, *configargs) + node = self.getnode(config, path) + return node + + def collect_by_name(self, modcol, name): + """Return the collection node for name from the module collection. + + This will search a module collection node for a collection + node matching the given name. + + :param modcol: A module collection node, see + :py:meth:`getmodulecol`. + + :param name: The name of the node to return. + + """ + for colitem in modcol._memocollect(): + if colitem.name == name: + return colitem + + def popen(self, cmdargs, stdout, stderr, **kw): + """Invoke subprocess.Popen. + + This calls subprocess.Popen making sure the current working + directory is the PYTHONPATH. + + You probably want to use :py:meth:`run` instead. + + """ + env = os.environ.copy() + env['PYTHONPATH'] = os.pathsep.join(filter(None, [ + str(os.getcwd()), env.get('PYTHONPATH', '')])) + kw['env'] = env + return subprocess.Popen(cmdargs, + stdout=stdout, stderr=stderr, **kw) + + def run(self, *cmdargs): + """Run a command with arguments. + + Run a process using subprocess.Popen saving the stdout and + stderr. + + Returns a :py:class:`RunResult`. + + """ + return self._run(*cmdargs) + + def _run(self, *cmdargs): + cmdargs = [str(x) for x in cmdargs] + p1 = self.tmpdir.join("stdout") + p2 = self.tmpdir.join("stderr") + print_("running:", ' '.join(cmdargs)) + print_(" in:", str(py.path.local())) + f1 = codecs.open(str(p1), "w", encoding="utf8") + f2 = codecs.open(str(p2), "w", encoding="utf8") + try: + now = time.time() + popen = self.popen(cmdargs, stdout=f1, stderr=f2, + close_fds=(sys.platform != "win32")) + ret = popen.wait() + finally: + f1.close() + f2.close() + f1 = codecs.open(str(p1), "r", encoding="utf8") + f2 = codecs.open(str(p2), "r", encoding="utf8") + try: + out = f1.read().splitlines() + err = f2.read().splitlines() + finally: + f1.close() + f2.close() + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + return RunResult(ret, out, err, time.time()-now) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + py.builtin.print_(line, file=fp) + except UnicodeEncodeError: + print("couldn't print to %s because of encoding" % (fp,)) + + def _getpytestargs(self): + # we cannot use "(sys.executable,script)" + # because on windows the script is e.g. a pytest.exe + return (sys.executable, _pytest_fullpath,) # noqa + + def runpython(self, script): + """Run a python script using sys.executable as interpreter. + + Returns a :py:class:`RunResult`. + """ + return self.run(sys.executable, script) + + def runpython_c(self, command): + """Run python -c "command", return a :py:class:`RunResult`.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess(self, *args, **kwargs): + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will added + using the ``-p`` command line option. Addtionally + ``--basetemp`` is used put any temporary files and directories + in a numbered directory prefixed with "runpytest-" so they do + not conflict with the normal numberd pytest location for + temporary files and directories. + + Returns a :py:class:`RunResult`. + + """ + p = py.path.local.make_numbered_dir(prefix="runpytest-", + keep=None, rootdir=self.tmpdir) + args = ('--basetemp=%s' % p, ) + args + #for x in args: + # if '--confcutdir' in str(x): + # break + #else: + # pass + # args = ('--confcutdir=.',) + args + plugins = [x for x in self.plugins if isinstance(x, str)] + if plugins: + args = ('-p', plugins[0]) + args + args = self._getpytestargs() + args + return self.run(*args) + + def spawn_pytest(self, string, expect_timeout=10.0): + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the + temporary directory locations. + + The pexpect child is returned. + + """ + basetemp = self.tmpdir.mkdir("pexpect") + invoke = " ".join(map(str, self._getpytestargs())) + cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string) + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd, expect_timeout=10.0): + """Run a command using pexpect. + + The pexpect child is returned. + """ + pexpect = pytest.importorskip("pexpect", "3.0") + if hasattr(sys, 'pypy_version_info') and '64' in platform.machine(): + pytest.skip("pypy-64 bit not supported") + if sys.platform.startswith("freebsd"): + pytest.xfail("pexpect does not work reliably on freebsd") + logfile = self.tmpdir.join("spawn.out").open("wb") + child = pexpect.spawn(cmd, logfile=logfile) + self.request.addfinalizer(logfile.close) + child.timeout = expect_timeout + return child + +def getdecoded(out): + try: + return out.decode("utf-8") + except UnicodeDecodeError: + return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % ( + py.io.saferepr(out),) + + +class LineComp: + def __init__(self): + self.stringio = py.io.TextIO() + + def assert_contains_lines(self, lines2): + """ assert that lines2 are contained (linearly) in lines1. + return a list of extralines found. + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + return LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher: + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing + newlines, i.e. ``text.splitlines()``. + + """ + + def __init__(self, lines): + self.lines = lines + self._log_output = [] + + def str(self): + """Return the entire original text.""" + return "\n".join(self.lines) + + def _getlines(self, lines2): + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2): + """Check lines exist in the output. + + The argument is a list of lines which have to occur in the + output, in any order. Each line can contain glob whildcards. + + """ + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or fnmatch(x, line): + self._log("matched: ", repr(line)) + break + else: + self._log("line %r not found in output" % line) + raise ValueError(self._log_text) + + def get_lines_after(self, fnline): + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i+1:] + raise ValueError("line %r not found in output" % fnline) + + def _log(self, *args): + self._log_output.append(' '.join((str(x) for x in args))) + + @property + def _log_text(self): + return '\n'.join(self._log_output) + + def fnmatch_lines(self, lines2): + """Search the text for matching lines. + + The argument is a list of lines which have to match and can + use glob wildcards. If they do not match an pytest.fail() is + called. The matches and non-matches are also printed on + stdout. + + """ + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + nextline = None + extralines = [] + __tracebackhide__ = True + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + break + elif fnmatch(nextline, line): + self._log("fnmatch:", repr(line)) + self._log(" with:", repr(nextline)) + break + else: + if not nomatchprinted: + self._log("nomatch:", repr(line)) + nomatchprinted = True + self._log(" and:", repr(nextline)) + extralines.append(nextline) + else: + self._log("remains unmatched: %r" % (line,)) + pytest.fail(self._log_text) diff --git a/tests/_lib_vendors/_pytest/python.py b/tests/_lib_vendors/_pytest/python.py new file mode 100644 index 0000000..e46f2f1 --- /dev/null +++ b/tests/_lib_vendors/_pytest/python.py @@ -0,0 +1,1578 @@ +""" Python test discovery, setup and run of test functions. """ + +import fnmatch +import inspect +import sys +import collections +import math +from itertools import count + +import py +import pytest +from _pytest.mark import MarkerError + + +import _pytest +import _pytest._pluggy as pluggy +from _pytest import fixtures +from _pytest.compat import ( + isclass, isfunction, is_generator, _escape_strings, + REGEX_TYPE, STRING_TYPES, NoneType, NOTSET, + get_real_func, getfslineno, safe_getattr, + getlocation, enum, +) + +cutdir1 = py.path.local(pluggy.__file__.rstrip("oc")) +cutdir2 = py.path.local(_pytest.__file__).dirpath() +cutdir3 = py.path.local(py.__file__).dirpath() + + +def filter_traceback(entry): + """Return True if a TracebackEntry instance should be removed from tracebacks: + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code + # see https://bitbucket.org/pytest-dev/py/issues/71 + raw_filename = entry.frame.code.raw.co_filename + is_generated = '<' in raw_filename and '>' in raw_filename + if is_generated: + return False + # entry.path might point to an inexisting file, in which case it will + # alsso return a str object. see #1133 + p = py.path.local(entry.path) + return p != cutdir1 and not p.relto(cutdir2) and not p.relto(cutdir3) + + + +def pyobj_property(name): + def get(self): + node = self.getparent(getattr(pytest, name)) + if node is not None: + return node.obj + doc = "python %s object this node was collected from (can be None)." % ( + name.lower(),) + return property(get, None, None, doc) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption('--fixtures', '--funcargs', + action="store_true", dest="showfixtures", default=False, + help="show available fixtures, sorted by plugin appearance") + group.addoption( + '--fixtures-per-test', + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="show fixtures per test", + ) + parser.addini("usefixtures", type="args", default=[], + help="list of default fixtures to be used with this project") + parser.addini("python_files", type="args", + default=['test_*.py', '*_test.py'], + help="glob-style file patterns for Python test module discovery") + parser.addini("python_classes", type="args", default=["Test",], + help="prefixes or glob names for Python test class discovery") + parser.addini("python_functions", type="args", default=["test",], + help="prefixes or glob names for Python test function and " + "method discovery") + + group.addoption("--import-mode", default="prepend", + choices=["prepend", "append"], dest="importmode", + help="prepend/append to sys.path when importing test modules, " + "default is to prepend.") + + +def pytest_cmdline_main(config): + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + + +def pytest_generate_tests(metafunc): + # those alternative spellings are common - raise a specific error to alert + # the user + alt_spellings = ['parameterize', 'parametrise', 'parameterise'] + for attr in alt_spellings: + if hasattr(metafunc.function, attr): + msg = "{0} has '{1}', spelling should be 'parametrize'" + raise MarkerError(msg.format(metafunc.function.__name__, attr)) + try: + markers = metafunc.function.parametrize + except AttributeError: + return + for marker in markers: + metafunc.parametrize(*marker.args, **marker.kwargs) + +def pytest_configure(config): + config.addinivalue_line("markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see http://pytest.org/latest/parametrize.html for more info and " + "examples." + ) + config.addinivalue_line("markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures " + ) + +@pytest.hookimpl(trylast=True) +def pytest_namespace(): + raises.Exception = pytest.fail.Exception + return { + 'raises': raises, + 'approx': approx, + 'collect': { + 'Module': Module, + 'Class': Class, + 'Instance': Instance, + 'Function': Function, + 'Generator': Generator, + } + } + + +@pytest.hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem): + testfunction = pyfuncitem.obj + if pyfuncitem._isyieldedfunction(): + testfunction(*pyfuncitem._args) + else: + funcargs = pyfuncitem.funcargs + testargs = {} + for arg in pyfuncitem._fixtureinfo.argnames: + testargs[arg] = funcargs[arg] + testfunction(**testargs) + return True + +def pytest_collect_file(path, parent): + ext = path.ext + if ext == ".py": + if not parent.session.isinitpath(path): + for pat in parent.config.getini('python_files'): + if path.fnmatch(pat): + break + else: + return + ihook = parent.session.gethookproxy(path) + return ihook.pytest_pycollect_makemodule(path=path, parent=parent) + +def pytest_pycollect_makemodule(path, parent): + return Module(path, parent) + +@pytest.hookimpl(hookwrapper=True) +def pytest_pycollect_makeitem(collector, name, obj): + outcome = yield + res = outcome.get_result() + if res is not None: + raise StopIteration + # nothing was collected elsewhere, let's do it here + if isclass(obj): + if collector.istestclass(obj, name): + Class = collector._getcustomclass("Class") + outcome.force_result(Class(name, parent=collector)) + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a funtools.wrapped. + # We musn't if it's been wrapped with mock.patch (python 2 only) + if not (isfunction(obj) or isfunction(get_real_func(obj))): + collector.warn(code="C2", message= + "cannot collect %r because it is not a function." + % name, ) + elif getattr(obj, "__test__", True): + if is_generator(obj): + res = Generator(name, parent=collector) + else: + res = list(collector._genfunctions(name, obj)) + outcome.force_result(res) + +def pytest_make_parametrize_id(config, val): + return None + + + +class PyobjContext(object): + module = pyobj_property("Module") + cls = pyobj_property("Class") + instance = pyobj_property("Instance") + +class PyobjMixin(PyobjContext): + def obj(): + def fget(self): + obj = getattr(self, '_obj', None) + if obj is None: + self._obj = obj = self._getobj() + return obj + + def fset(self, value): + self._obj = value + + return property(fget, fset, None, "underlying python object") + + obj = obj() + + def _getobj(self): + return getattr(self.parent.obj, self.name) + + def getmodpath(self, stopatmodule=True, includemodule=False): + """ return python path relative to the containing module. """ + chain = self.listchain() + chain.reverse() + parts = [] + for node in chain: + if isinstance(node, Instance): + continue + name = node.name + if isinstance(node, Module): + assert name.endswith(".py") + name = name[:-3] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + s = ".".join(parts) + return s.replace(".[", "[") + + def _getfslineno(self): + return getfslineno(self.obj) + + def reportinfo(self): + # XXX caching? + obj = self.obj + compat_co_firstlineno = getattr(obj, 'compat_co_firstlineno', None) + if isinstance(compat_co_firstlineno, int): + # nose compatibility + fspath = sys.modules[obj.__module__].__file__ + if fspath.endswith(".pyc"): + fspath = fspath[:-1] + lineno = compat_co_firstlineno + else: + fspath, lineno = getfslineno(obj) + modpath = self.getmodpath() + assert isinstance(lineno, int) + return fspath, lineno, modpath + +class PyCollector(PyobjMixin, pytest.Collector): + + def funcnamefilter(self, name): + return self._matches_prefix_or_glob_option('python_functions', name) + + def isnosetest(self, obj): + """ Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, '__test__', False) is True + + def classnamefilter(self, name): + return self._matches_prefix_or_glob_option('python_classes', name) + + def istestfunction(self, obj, name): + return ( + (self.funcnamefilter(name) or self.isnosetest(obj)) and + safe_getattr(obj, "__call__", False) and fixtures.getfixturemarker(obj) is None + ) + + def istestclass(self, obj, name): + return self.classnamefilter(name) or self.isnosetest(obj) + + def _matches_prefix_or_glob_option(self, option_name, name): + """ + checks if the given name matches the prefix or glob-pattern defined + in ini configuration. + """ + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call + elif ('*' in option or '?' in option or '[' in option) and \ + fnmatch.fnmatch(name, option): + return True + return False + + def collect(self): + if not getattr(self.obj, "__test__", True): + return [] + + # NB. we avoid random getattrs and peek in the __dict__ instead + # (XXX originally introduced from a PyPy need, still true?) + dicts = [getattr(self.obj, '__dict__', {})] + for basecls in inspect.getmro(self.obj.__class__): + dicts.append(basecls.__dict__) + seen = {} + l = [] + for dic in dicts: + for name, obj in list(dic.items()): + if name in seen: + continue + seen[name] = True + res = self.makeitem(name, obj) + if res is None: + continue + if not isinstance(res, list): + res = [res] + l.extend(res) + l.sort(key=lambda item: item.reportinfo()[:2]) + return l + + def makeitem(self, name, obj): + #assert self.ihook.fspath == self.fspath, self + return self.ihook.pytest_pycollect_makeitem( + collector=self, name=name, obj=obj) + + def _genfunctions(self, name, funcobj): + module = self.getparent(Module).obj + clscol = self.getparent(Class) + cls = clscol and clscol.obj or None + transfer_markers(funcobj, cls, module) + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(self, funcobj, cls) + metafunc = Metafunc(funcobj, fixtureinfo, self.config, + cls=cls, module=module) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + if methods: + self.ihook.pytest_generate_tests.call_extra(methods, + dict(metafunc=metafunc)) + else: + self.ihook.pytest_generate_tests(metafunc=metafunc) + + Function = self._getcustomclass("Function") + if not metafunc._calls: + yield Function(name, parent=self, fixtureinfo=fixtureinfo) + else: + # add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs + fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm) + + for callspec in metafunc._calls: + subname = "%s[%s]" % (name, callspec.id) + yield Function(name=subname, parent=self, + callspec=callspec, callobj=funcobj, + fixtureinfo=fixtureinfo, + keywords={callspec.id:True}, + originalname=name, + ) + + +def _marked(func, mark): + """ Returns True if :func: is already marked with :mark:, False otherwise. + This can happen if marker is applied to class and the test file is + invoked more than once. + """ + try: + func_mark = getattr(func, mark.name) + except AttributeError: + return False + return mark.args == func_mark.args and mark.kwargs == func_mark.kwargs + + +def transfer_markers(funcobj, cls, mod): + # XXX this should rather be code in the mark plugin or the mark + # plugin should merge with the python plugin. + for holder in (cls, mod): + try: + pytestmark = holder.pytestmark + except AttributeError: + continue + if isinstance(pytestmark, list): + for mark in pytestmark: + if not _marked(funcobj, mark): + mark(funcobj) + else: + if not _marked(funcobj, pytestmark): + pytestmark(funcobj) + +class Module(pytest.File, PyCollector): + """ Collector for test classes and functions. """ + def _getobj(self): + return self._memoizedcall('_obj', self._importtestmodule) + + def collect(self): + self.session._fixturemanager.parsefactories(self) + return super(Module, self).collect() + + def _importtestmodule(self): + # we assume we are only called once per module + importmode = self.config.getoption("--import-mode") + try: + mod = self.fspath.pyimport(ensuresyspath=importmode) + except SyntaxError: + raise self.CollectError( + _pytest._code.ExceptionInfo().getrepr(style="short")) + except self.fspath.ImportMismatchError: + e = sys.exc_info()[1] + raise self.CollectError( + "import file mismatch:\n" + "imported module %r has this __file__ attribute:\n" + " %s\n" + "which is not the same as the test file we want to collect:\n" + " %s\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules" + % e.args + ) + except ImportError: + from _pytest._code.code import ExceptionInfo + exc_info = ExceptionInfo() + if self.config.getoption('verbose') < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = exc_info.getrepr(style='short') if exc_info.traceback else exc_info.exconly() + formatted_tb = py._builtin._totext(exc_repr) + raise self.CollectError( + "ImportError while importing test module '{fspath}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + "{traceback}".format(fspath=self.fspath, traceback=formatted_tb) + ) + except _pytest.runner.Skipped as e: + if e.allow_module_level: + raise + raise self.CollectError( + "Using pytest.skip outside of a test is not allowed. If you are " + "trying to decorate a test function, use the @pytest.mark.skip " + "or @pytest.mark.skipif decorators instead." + ) + self.config.pluginmanager.consider_module(mod) + return mod + + def setup(self): + setup_module = _get_xunit_setup_teardown(self.obj, "setUpModule") + if setup_module is None: + setup_module = _get_xunit_setup_teardown(self.obj, "setup_module") + if setup_module is not None: + setup_module() + + teardown_module = _get_xunit_setup_teardown(self.obj, 'tearDownModule') + if teardown_module is None: + teardown_module = _get_xunit_setup_teardown(self.obj, 'teardown_module') + if teardown_module is not None: + self.addfinalizer(teardown_module) + + +def _get_xunit_setup_teardown(holder, attr_name, param_obj=None): + """ + Return a callable to perform xunit-style setup or teardown if + the function exists in the ``holder`` object. + The ``param_obj`` parameter is the parameter which will be passed to the function + when the callable is called without arguments, defaults to the ``holder`` object. + Return ``None`` if a suitable callable is not found. + """ + param_obj = param_obj if param_obj is not None else holder + result = _get_xunit_func(holder, attr_name) + if result is not None: + arg_count = result.__code__.co_argcount + if inspect.ismethod(result): + arg_count -= 1 + if arg_count: + return lambda: result(param_obj) + else: + return result + + +def _get_xunit_func(obj, name): + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to + avoid calling it twice. + """ + meth = getattr(obj, name, None) + if fixtures.getfixturemarker(meth) is None: + return meth + + +class Class(PyCollector): + """ Collector for test methods. """ + def collect(self): + if hasinit(self.obj): + self.warn("C1", "cannot collect test class %r because it has a " + "__init__ constructor" % self.obj.__name__) + return [] + elif hasnew(self.obj): + self.warn("C1", "cannot collect test class %r because it has a " + "__new__ constructor" % self.obj.__name__) + return [] + return [self._getcustomclass("Instance")(name="()", parent=self)] + + def setup(self): + setup_class = _get_xunit_func(self.obj, 'setup_class') + if setup_class is not None: + setup_class = getattr(setup_class, 'im_func', setup_class) + setup_class = getattr(setup_class, '__func__', setup_class) + setup_class(self.obj) + + fin_class = getattr(self.obj, 'teardown_class', None) + if fin_class is not None: + fin_class = getattr(fin_class, 'im_func', fin_class) + fin_class = getattr(fin_class, '__func__', fin_class) + self.addfinalizer(lambda: fin_class(self.obj)) + +class Instance(PyCollector): + def _getobj(self): + return self.parent.obj() + + def collect(self): + self.session._fixturemanager.parsefactories(self) + return super(Instance, self).collect() + + def newinstance(self): + self.obj = self._getobj() + return self.obj + +class FunctionMixin(PyobjMixin): + """ mixin for the code common to Function and Generator. + """ + + def setup(self): + """ perform setup for this test function. """ + if hasattr(self, '_preservedparent'): + obj = self._preservedparent + elif isinstance(self.parent, Instance): + obj = self.parent.newinstance() + self.obj = self._getobj() + else: + obj = self.parent.obj + if inspect.ismethod(self.obj): + setup_name = 'setup_method' + teardown_name = 'teardown_method' + else: + setup_name = 'setup_function' + teardown_name = 'teardown_function' + setup_func_or_method = _get_xunit_setup_teardown(obj, setup_name, param_obj=self.obj) + if setup_func_or_method is not None: + setup_func_or_method() + teardown_func_or_method = _get_xunit_setup_teardown(obj, teardown_name, param_obj=self.obj) + if teardown_func_or_method is not None: + self.addfinalizer(teardown_func_or_method) + + def _prunetraceback(self, excinfo): + if hasattr(self, '_obj') and not self.config.option.fulltrace: + code = _pytest._code.Code(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + #ntraceback = ntraceback.cut(excludepath=cutdir2) + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + + excinfo.traceback = ntraceback.filter() + # issue364: mark all but first and last frames to + # only show a single-line message for each frame + if self.config.option.tbstyle == "auto": + if len(excinfo.traceback) > 2: + for entry in excinfo.traceback[1:-1]: + entry.set_repr_style('short') + + def _repr_failure_py(self, excinfo, style="long"): + if excinfo.errisinstance(pytest.fail.Exception): + if not excinfo.value.pytrace: + return py._builtin._totext(excinfo.value) + return super(FunctionMixin, self)._repr_failure_py(excinfo, + style=style) + + def repr_failure(self, excinfo, outerr=None): + assert outerr is None, "XXX outerr usage is deprecated" + style = self.config.option.tbstyle + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +class Generator(FunctionMixin, PyCollector): + def collect(self): + # test generators are seen as collectors but they also + # invoke setup/teardown on popular request + # (induced by the common "test_*" naming shared with normal tests) + from _pytest import deprecated + self.session._setupstate.prepare(self) + # see FunctionMixin.setup and test_setupstate_is_preserved_134 + self._preservedparent = self.parent.obj + l = [] + seen = {} + for i, x in enumerate(self.obj()): + name, call, args = self.getcallargs(x) + if not callable(call): + raise TypeError("%r yielded non callable test %r" %(self.obj, call,)) + if name is None: + name = "[%d]" % i + else: + name = "['%s']" % name + if name in seen: + raise ValueError("%r generated tests with non-unique name %r" %(self, name)) + seen[name] = True + l.append(self.Function(name, self, args=args, callobj=call)) + self.config.warn('C1', deprecated.YIELD_TESTS, fslocation=self.fspath) + return l + + def getcallargs(self, obj): + if not isinstance(obj, (tuple, list)): + obj = (obj,) + # explict naming + if isinstance(obj[0], py.builtin._basestring): + name = obj[0] + obj = obj[1:] + else: + name = None + call, args = obj[0], obj[1:] + return name, call, args + + +def hasinit(obj): + init = getattr(obj, '__init__', None) + if init: + return init != object.__init__ + + +def hasnew(obj): + new = getattr(obj, '__new__', None) + if new: + return new != object.__new__ + + +class CallSpec2(object): + def __init__(self, metafunc): + self.metafunc = metafunc + self.funcargs = {} + self._idlist = [] + self.params = {} + self._globalid = NOTSET + self._globalid_args = set() + self._globalparam = NOTSET + self._arg2scopenum = {} # used for sorting parametrized resources + self.keywords = {} + self.indices = {} + + def copy(self, metafunc): + cs = CallSpec2(self.metafunc) + cs.funcargs.update(self.funcargs) + cs.params.update(self.params) + cs.keywords.update(self.keywords) + cs.indices.update(self.indices) + cs._arg2scopenum.update(self._arg2scopenum) + cs._idlist = list(self._idlist) + cs._globalid = self._globalid + cs._globalid_args = self._globalid_args + cs._globalparam = self._globalparam + return cs + + def _checkargnotcontained(self, arg): + if arg in self.params or arg in self.funcargs: + raise ValueError("duplicate %r" %(arg,)) + + def getparam(self, name): + try: + return self.params[name] + except KeyError: + if self._globalparam is NOTSET: + raise ValueError(name) + return self._globalparam + + @property + def id(self): + return "-".join(map(str, filter(None, self._idlist))) + + def setmulti(self, valtypes, argnames, valset, id, keywords, scopenum, + param_index): + for arg,val in zip(argnames, valset): + self._checkargnotcontained(arg) + valtype_for_arg = valtypes[arg] + getattr(self, valtype_for_arg)[arg] = val + self.indices[arg] = param_index + self._arg2scopenum[arg] = scopenum + self._idlist.append(id) + self.keywords.update(keywords) + + def setall(self, funcargs, id, param): + for x in funcargs: + self._checkargnotcontained(x) + self.funcargs.update(funcargs) + if id is not NOTSET: + self._idlist.append(id) + if param is not NOTSET: + assert self._globalparam is NOTSET + self._globalparam = param + for arg in funcargs: + self._arg2scopenum[arg] = fixtures.scopenum_function + + +class Metafunc(fixtures.FuncargnamesCompatAttr): + """ + Metafunc objects are passed to the ``pytest_generate_tests`` hook. + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + def __init__(self, function, fixtureinfo, config, cls=None, module=None): + #: access to the :class:`_pytest.config.Config` object for the test session + self.config = config + + #: the module object where the test function is defined in. + self.module = module + + #: underlying python test function + self.function = function + + #: set of fixture names required by the test function + self.fixturenames = fixtureinfo.names_closure + + #: class object where the test function is defined in or ``None``. + self.cls = cls + + self._calls = [] + self._ids = py.builtin.set() + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + def parametrize(self, argnames, argvalues, indirect=False, ids=None, + scope=None): + """ Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting indirect to do it rather at test setup time. + + :arg argnames: a comma-separated string denoting one or more argument + names, or a list/tuple of argument strings. + + :arg argvalues: The list of argvalues determines how often a + test is invoked with different argument values. If only one + argname was specified argvalues is a list of values. If N + argnames were specified, argvalues must be a list of N-tuples, + where each tuple-element specifies a value for its respective + argname. + + :arg indirect: The list of argnames or boolean. A list of arguments' + names (subset of argnames). If True the list contains all names from + the argnames. Each argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :arg ids: list of string ids, or a callable. + If strings, each is corresponding to the argvalues so that they are + part of the test id. If None is given as id of specific test, the + automatically generated id for that argument will be used. + If callable, it should take one argument (a single argvalue) and return + a string or return None. If None, the automatically generated id for that + argument will be used. + If no ids are provided they will be generated automatically from + the argvalues. + + :arg scope: if specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + from _pytest.fixtures import scope2index + from _pytest.mark import extract_argvalue + from py.io import saferepr + + unwrapped_argvalues = [] + newkeywords = [] + for maybe_marked_args in argvalues: + argval, newmarks = extract_argvalue(maybe_marked_args) + unwrapped_argvalues.append(argval) + newkeywords.append(newmarks) + argvalues = unwrapped_argvalues + + if not isinstance(argnames, (tuple, list)): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + if len(argnames) == 1: + argvalues = [(val,) for val in argvalues] + if not argvalues: + argvalues = [(NOTSET,) * len(argnames)] + # we passed a empty list to parameterize, skip that test + # + fs, lineno = getfslineno(self.function) + newmark = pytest.mark.skip( + reason="got empty parameter set %r, function %s at %s:%d" % ( + argnames, self.function.__name__, fs, lineno)) + newkeywords = [{newmark.markname: newmark}] + + if scope is None: + scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + scopenum = scope2index( + scope, descr='call to {0}'.format(self.parametrize)) + valtypes = {} + for arg in argnames: + if arg not in self.fixturenames: + if isinstance(indirect, (tuple, list)): + name = 'fixture' if arg in indirect else 'argument' + else: + name = 'fixture' if indirect else 'argument' + raise ValueError( + "%r uses no %s %r" % ( + self.function, name, arg)) + + if indirect is True: + valtypes = dict.fromkeys(argnames, "params") + elif indirect is False: + valtypes = dict.fromkeys(argnames, "funcargs") + elif isinstance(indirect, (tuple, list)): + valtypes = dict.fromkeys(argnames, "funcargs") + for arg in indirect: + if arg not in argnames: + raise ValueError("indirect given to %r: fixture %r doesn't exist" % ( + self.function, arg)) + valtypes[arg] = "params" + idfn = None + if callable(ids): + idfn = ids + ids = None + if ids: + if len(ids) != len(argvalues): + raise ValueError('%d tests specified with %d ids' %( + len(argvalues), len(ids))) + for id_value in ids: + if id_value is not None and not isinstance(id_value, py.builtin._basestring): + msg = 'ids must be list of strings, found: %s (type: %s)' + raise ValueError(msg % (saferepr(id_value), type(id_value).__name__)) + ids = idmaker(argnames, argvalues, idfn, ids, self.config) + newcalls = [] + for callspec in self._calls or [CallSpec2(self)]: + elements = zip(ids, argvalues, newkeywords, count()) + for a_id, valset, keywords, param_index in elements: + assert len(valset) == len(argnames) + newcallspec = callspec.copy(self) + newcallspec.setmulti(valtypes, argnames, valset, a_id, + keywords, scopenum, param_index) + newcalls.append(newcallspec) + self._calls = newcalls + + def addcall(self, funcargs=None, id=NOTSET, param=NOTSET): + """ (deprecated, use parametrize) Add a new call to the underlying + test function during the collection phase of a test run. Note that + request.addcall() is called during the test collection phase prior and + independently to actual test execution. You should only use addcall() + if you need to specify multiple arguments of a test function. + + :arg funcargs: argument keyword dictionary used when invoking + the test function. + + :arg id: used for reporting and identification purposes. If you + don't supply an `id` an automatic unique id will be generated. + + :arg param: a parameter which will be exposed to a later fixture function + invocation through the ``request.param`` attribute. + """ + assert funcargs is None or isinstance(funcargs, dict) + if funcargs is not None: + for name in funcargs: + if name not in self.fixturenames: + pytest.fail("funcarg %r not used in this function." % name) + else: + funcargs = {} + if id is None: + raise ValueError("id=None not allowed") + if id is NOTSET: + id = len(self._calls) + id = str(id) + if id in self._ids: + raise ValueError("duplicate id %r" % id) + self._ids.add(id) + + cs = CallSpec2(self) + cs.setall(funcargs, id, param) + self._calls.append(cs) + + +def _find_parametrized_scope(argnames, arg2fixturedefs, indirect): + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + from _pytest.fixtures import scopes + indirect_as_list = isinstance(indirect, (list, tuple)) + all_arguments_are_fixtures = indirect is True or \ + indirect_as_list and len(indirect) == argnames + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [fixturedef[0].scope for name, fixturedef in fixturedefs.items()] + if used_scopes: + # Takes the most narrow scope from used fixtures + for scope in reversed(scopes): + if scope in used_scopes: + return scope + + return 'function' + + +def _idval(val, argname, idx, idfn, config=None): + if idfn: + try: + s = idfn(val) + if s: + return _escape_strings(s) + except Exception: + pass + + if config: + hook_id = config.hook.pytest_make_parametrize_id(config=config, val=val) + if hook_id: + return hook_id + + if isinstance(val, STRING_TYPES): + return _escape_strings(val) + elif isinstance(val, (float, int, bool, NoneType)): + return str(val) + elif isinstance(val, REGEX_TYPE): + return _escape_strings(val.pattern) + elif enum is not None and isinstance(val, enum.Enum): + return str(val) + elif isclass(val) and hasattr(val, '__name__'): + return val.__name__ + return str(argname)+str(idx) + +def _idvalset(idx, valset, argnames, idfn, ids, config=None): + if ids is None or (idx >= len(ids) or ids[idx] is None): + this_id = [_idval(val, argname, idx, idfn, config) + for val, argname in zip(valset, argnames)] + return "-".join(this_id) + else: + return _escape_strings(ids[idx]) + +def idmaker(argnames, argvalues, idfn=None, ids=None, config=None): + ids = [_idvalset(valindex, valset, argnames, idfn, ids, config) + for valindex, valset in enumerate(argvalues)] + if len(set(ids)) != len(ids): + # The ids are not unique + duplicates = [testid for testid in ids if ids.count(testid) > 1] + counters = collections.defaultdict(lambda: 0) + for index, testid in enumerate(ids): + if testid in duplicates: + ids[index] = testid + str(counters[testid]) + counters[testid] += 1 + return ids + + +def show_fixtures_per_test(config): + from _pytest.main import wrap_session + return wrap_session(config, _show_fixtures_per_test) + + +def _show_fixtures_per_test(config, session): + import _pytest.config + session.perform_collect() + curdir = py.path.local() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + def get_best_rel(func): + loc = getlocation(func, curdir) + return curdir.bestrelpath(loc) + + def write_fixture(fixture_def): + argname = fixture_def.argname + + if verbose <= 0 and argname.startswith("_"): + return + if verbose > 0: + bestrel = get_best_rel(fixture_def.func) + funcargspec = "{0} -- {1}".format(argname, bestrel) + else: + funcargspec = argname + tw.line(funcargspec, green=True) + + INDENT = ' {0}' + fixture_doc = fixture_def.func.__doc__ + + if fixture_doc: + for line in fixture_doc.strip().split('\n'): + tw.line(INDENT.format(line.strip())) + else: + tw.line(INDENT.format('no docstring available'), red=True) + + def write_item(item): + name2fixturedefs = item._fixtureinfo.name2fixturedefs + + if not name2fixturedefs: + # The given test item does not use any fixtures + return + bestrel = get_best_rel(item.function) + + tw.line() + tw.sep('-', 'fixtures used by {0}'.format(item.name)) + tw.sep('-', '({0})'.format(bestrel)) + for argname, fixture_defs in sorted(name2fixturedefs.items()): + assert fixture_defs is not None + if not fixture_defs: + continue + # The last fixture def item in the list is expected + # to be the one used by the test item + write_fixture(fixture_defs[-1]) + + for item in session.items: + write_item(item) + + +def showfixtures(config): + from _pytest.main import wrap_session + return wrap_session(config, _showfixtures_main) + +def _showfixtures_main(config, session): + import _pytest.config + session.perform_collect() + curdir = py.path.local() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + fm = session._fixturemanager + + available = [] + seen = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, curdir) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append((len(fixturedef.baseid), + fixturedef.func.__module__, + curdir.bestrelpath(loc), + fixturedef.argname, fixturedef)) + + available.sort() + currentmodule = None + for baseid, module, bestrel, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", "fixtures defined from %s" %(module,)) + currentmodule = module + if verbose <= 0 and argname[0] == "_": + continue + if verbose > 0: + funcargspec = "%s -- %s" %(argname, bestrel,) + else: + funcargspec = argname + tw.line(funcargspec, green=True) + loc = getlocation(fixturedef.func, curdir) + doc = fixturedef.func.__doc__ or "" + if doc: + for line in doc.strip().split("\n"): + tw.line(" " + line.strip()) + else: + tw.line(" %s: no docstring available" %(loc,), + red=True) + + +# builtin pytest.raises helper + +def raises(expected_exception, *args, **kwargs): + """ + Assert that a code block/function call raises ``expected_exception`` + and raise a failure exception otherwise. + + This helper produces a ``ExceptionInfo()`` object (see below). + + If using Python 2.5 or above, you may use this function as a + context manager:: + + >>> with raises(ZeroDivisionError): + ... 1/0 + + .. versionchanged:: 2.10 + + In the context manager form you may use the keyword argument + ``message`` to specify a custom failure message:: + + >>> with raises(ZeroDivisionError, message="Expecting ZeroDivisionError"): + ... pass + Traceback (most recent call last): + ... + Failed: Expecting ZeroDivisionError + + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert str(exc_info.value) == "value must be <= 10" # this will not execute + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert str(exc_info.value) == "value must be <= 10" + + + Or you can specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + A third possibility is to use a string to be executed:: + + >>> raises(ZeroDivisionError, "f(0)") + + + .. autoclass:: _pytest._code.ExceptionInfo + :members: + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. See the + official Python ``try`` statement documentation for more detailed + information. + + """ + __tracebackhide__ = True + if expected_exception is AssertionError: + # we want to catch a AssertionError + # replace our subclass with the builtin one + # see https://github.com/pytest-dev/pytest/issues/176 + from _pytest.assertion.util import BuiltinAssertionError \ + as expected_exception + msg = ("exceptions must be old-style classes or" + " derived from BaseException, not %s") + if isinstance(expected_exception, tuple): + for exc in expected_exception: + if not isclass(exc): + raise TypeError(msg % type(exc)) + elif not isclass(expected_exception): + raise TypeError(msg % type(expected_exception)) + + message = "DID NOT RAISE {0}".format(expected_exception) + + if not args: + if "message" in kwargs: + message = kwargs.pop("message") + return RaisesContext(expected_exception, message) + elif isinstance(args[0], str): + code, = args + assert isinstance(code, str) + frame = sys._getframe(1) + loc = frame.f_locals.copy() + loc.update(kwargs) + #print "raises frame scope: %r" % frame.f_locals + try: + code = _pytest._code.Source(code).compile() + py.builtin.exec_(code, frame.f_globals, loc) + # XXX didn'T mean f_globals == f_locals something special? + # this is destroyed here ... + except expected_exception: + return _pytest._code.ExceptionInfo() + else: + func = args[0] + try: + func(*args[1:], **kwargs) + except expected_exception: + return _pytest._code.ExceptionInfo() + pytest.fail(message) + +class RaisesContext(object): + def __init__(self, expected_exception, message): + self.expected_exception = expected_exception + self.message = message + self.excinfo = None + + def __enter__(self): + self.excinfo = object.__new__(_pytest._code.ExceptionInfo) + return self.excinfo + + def __exit__(self, *tp): + __tracebackhide__ = True + if tp[0] is None: + pytest.fail(self.message) + if sys.version_info < (2, 7): + # py26: on __exit__() exc_value often does not contain the + # exception value. + # http://bugs.python.org/issue7853 + if not isinstance(tp[1], BaseException): + exc_type, value, traceback = tp + tp = exc_type, exc_type(value), traceback + self.excinfo.__init__(tp) + suppress_exception = issubclass(self.excinfo.type, self.expected_exception) + if sys.version_info[0] == 2 and suppress_exception: + sys.exc_clear() + return suppress_exception + + +# builtin pytest.approx helper + +class approx(object): + """ + Assert that two numbers (or two sets of numbers) are equal to each other + within some tolerance. + + Due to the `intricacies of floating-point arithmetic`__, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + __ https://docs.python.org/3/tutorial/floatingpoint.html + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works on sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinite numbers are another special case. They are only + considered equal to themselves, regardless of the relative tolerance. Both + the relative and absolute tolerances can be changed by passing arguments to + the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. Only available in python>=3.5. `More information...`__ + + __ https://docs.python.org/3/library/math.html#math.isclose + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by ``numpy.allclose``. `More information...`__ + + __ http://docs.scipy.org/doc/numpy-1.10.0/reference/generated/numpy.isclose.html + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered and the absolute tolerance cannot be changed, so this function + is not appropriate for very large or very small numbers. Also, it's only + available in subclasses of ``unittest.TestCase`` and it's ugly because it + doesn't follow PEP8. `More information...`__ + + __ https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertAlmostEqual + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + """ + + def __init__(self, expected, rel=None, abs=None): + self.expected = expected + self.abs = abs + self.rel = rel + + def __repr__(self): + return ', '.join(repr(x) for x in self.expected) + + def __eq__(self, actual): + from collections import Iterable + if not isinstance(actual, Iterable): + actual = [actual] + if len(actual) != len(self.expected): + return False + return all(a == x for a, x in zip(actual, self.expected)) + + __hash__ = None + + def __ne__(self, actual): + return not (actual == self) + + @property + def expected(self): + # Regardless of whether the user-specified expected value is a number + # or a sequence of numbers, return a list of ApproxNotIterable objects + # that can be compared against. + from collections import Iterable + approx_non_iter = lambda x: ApproxNonIterable(x, self.rel, self.abs) + if isinstance(self._expected, Iterable): + return [approx_non_iter(x) for x in self._expected] + else: + return [approx_non_iter(self._expected)] + + @expected.setter + def expected(self, expected): + self._expected = expected + + +class ApproxNonIterable(object): + """ + Perform approximate comparisons for single numbers only. + + In other words, the ``expected`` attribute for objects of this class must + be some sort of number. This is in contrast to the ``approx`` class, where + the ``expected`` attribute can either be a number of a sequence of numbers. + This class is responsible for making comparisons, while ``approx`` is + responsible for abstracting the difference between numbers and sequences of + numbers. Although this class can stand on its own, it's only meant to be + used within ``approx``. + """ + + def __init__(self, expected, rel=None, abs=None): + self.expected = expected + self.abs = abs + self.rel = rel + + def __repr__(self): + if isinstance(self.expected, complex): + return str(self.expected) + + # Infinities aren't compared using tolerances, so don't show a + # tolerance. + if math.isinf(self.expected): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + vetted_tolerance = '{:.1e}'.format(self.tolerance) + except ValueError: + vetted_tolerance = '???' + + if sys.version_info[0] == 2: + return '{0} +- {1}'.format(self.expected, vetted_tolerance) + else: + return u'{0} \u00b1 {1}'.format(self.expected, vetted_tolerance) + + def __eq__(self, actual): + # Short-circuit exact equality. + if actual == self.expected: + return True + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): + return False + + # Return true if the two numbers are within the tolerance. + return abs(self.expected - actual) <= self.tolerance + + __hash__ = None + + def __ne__(self, actual): + return not (actual == self) + + @property + def tolerance(self): + set_default = lambda x, default: x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, 1e-12) + + if absolute_tolerance < 0: + raise ValueError("absolute tolerance can't be negative: {}".format(absolute_tolerance)) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default(self.rel, 1e-6) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError("relative tolerance can't be negative: {}".format(absolute_tolerance)) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +# +# the basic pytest Function item +# + +class Function(FunctionMixin, pytest.Item, fixtures.FuncargnamesCompatAttr): + """ a Function Item is responsible for setting up and executing a + Python test function. + """ + _genid = None + def __init__(self, name, parent, args=None, config=None, + callspec=None, callobj=NOTSET, keywords=None, session=None, + fixtureinfo=None, originalname=None): + super(Function, self).__init__(name, parent, config=config, + session=session) + self._args = args + if callobj is not NOTSET: + self.obj = callobj + + self.keywords.update(self.obj.__dict__) + if callspec: + self.callspec = callspec + self.keywords.update(callspec.keywords) + if keywords: + self.keywords.update(keywords) + + if fixtureinfo is None: + fixtureinfo = self.session._fixturemanager.getfixtureinfo( + self.parent, self.obj, self.cls, + funcargs=not self._isyieldedfunction()) + self._fixtureinfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + #: original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname + + def _initrequest(self): + self.funcargs = {} + if self._isyieldedfunction(): + assert not hasattr(self, "callspec"), ( + "yielded functions (deprecated) cannot have funcargs") + else: + if hasattr(self, "callspec"): + callspec = self.callspec + assert not callspec.funcargs + self._genid = callspec.id + if hasattr(callspec, "param"): + self.param = callspec.param + self._request = fixtures.FixtureRequest(self) + + @property + def function(self): + "underlying python 'function' object" + return getattr(self.obj, 'im_func', self.obj) + + def _getobj(self): + name = self.name + i = name.find("[") # parametrization + if i != -1: + name = name[:i] + return getattr(self.parent.obj, name) + + @property + def _pyfuncitem(self): + "(compatonly) for code expecting pytest-2.2 style request objects" + return self + + def _isyieldedfunction(self): + return getattr(self, "_args", None) is not None + + def runtest(self): + """ execute the underlying test function. """ + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self): + super(Function, self).setup() + fixtures.fillfixtures(self) diff --git a/tests/_lib_vendors/_pytest/recwarn.py b/tests/_lib_vendors/_pytest/recwarn.py new file mode 100644 index 0000000..87823bf --- /dev/null +++ b/tests/_lib_vendors/_pytest/recwarn.py @@ -0,0 +1,226 @@ +""" recording warnings during test function execution. """ + +import inspect + +import _pytest._code +import py +import sys +import warnings +import pytest + + +@pytest.yield_fixture +def recwarn(request): + """Return a WarningsRecorder instance that provides these methods: + + * ``pop(category=None)``: return last warning matching the category. + * ``clear()``: clear list of warnings + + See http://docs.python.org/library/warnings.html for information + on warning categories. + """ + wrec = WarningsRecorder() + with wrec: + warnings.simplefilter('default') + yield wrec + + +def pytest_namespace(): + return {'deprecated_call': deprecated_call, + 'warns': warns} + + +def deprecated_call(func=None, *args, **kwargs): + """ assert that calling ``func(*args, **kwargs)`` triggers a + ``DeprecationWarning`` or ``PendingDeprecationWarning``. + + This function can be used as a context manager:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> with deprecated_call(): + ... assert api_call_v2() == 200 + + Note: we cannot use WarningsRecorder here because it is still subject + to the mechanism that prevents warnings of the same type from being + triggered twice for the same module. See #1190. + """ + if not func: + return WarningsChecker(expected_warning=DeprecationWarning) + + categories = [] + + def warn_explicit(message, category, *args, **kwargs): + categories.append(category) + old_warn_explicit(message, category, *args, **kwargs) + + def warn(message, category=None, *args, **kwargs): + if isinstance(message, Warning): + categories.append(message.__class__) + else: + categories.append(category) + old_warn(message, category, *args, **kwargs) + + old_warn = warnings.warn + old_warn_explicit = warnings.warn_explicit + warnings.warn_explicit = warn_explicit + warnings.warn = warn + try: + ret = func(*args, **kwargs) + finally: + warnings.warn_explicit = old_warn_explicit + warnings.warn = old_warn + deprecation_categories = (DeprecationWarning, PendingDeprecationWarning) + if not any(issubclass(c, deprecation_categories) for c in categories): + __tracebackhide__ = True + raise AssertionError("%r did not produce DeprecationWarning" % (func,)) + return ret + + +def warns(expected_warning, *args, **kwargs): + """Assert that code raises a particular class of warning. + + Specifically, the input @expected_warning can be a warning class or + tuple of warning classes, and the code must return that warning + (if a single class) or one of those warnings (if a tuple). + + This helper produces a list of ``warnings.WarningMessage`` objects, + one for each warning raised. + + This function can be used as a context manager, or any of the other ways + ``pytest.raises`` can be used:: + + >>> with warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + """ + wcheck = WarningsChecker(expected_warning) + if not args: + return wcheck + elif isinstance(args[0], str): + code, = args + assert isinstance(code, str) + frame = sys._getframe(1) + loc = frame.f_locals.copy() + loc.update(kwargs) + + with wcheck: + code = _pytest._code.Source(code).compile() + py.builtin.exec_(code, frame.f_globals, loc) + else: + func = args[0] + with wcheck: + return func(*args[1:], **kwargs) + + +class RecordedWarning(object): + def __init__(self, message, category, filename, lineno, file, line): + self.message = message + self.category = category + self.filename = filename + self.lineno = lineno + self.file = file + self.line = line + + +class WarningsRecorder(object): + """A context manager to record raised warnings. + + Adapted from `warnings.catch_warnings`. + """ + + def __init__(self, module=None): + self._module = sys.modules['warnings'] if module is None else module + self._entered = False + self._list = [] + + @property + def list(self): + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i): + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self): + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self): + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls=Warning): + """Pop the first recorded warning, raise exception if not exists.""" + for i, w in enumerate(self._list): + if issubclass(w.category, cls): + return self._list.pop(i) + __tracebackhide__ = True + raise AssertionError("%r not found in warning list" % cls) + + def clear(self): + """Clear the list of recorded warnings.""" + self._list[:] = [] + + def __enter__(self): + if self._entered: + __tracebackhide__ = True + raise RuntimeError("Cannot enter %r twice" % self) + self._entered = True + self._filters = self._module.filters + self._module.filters = self._filters[:] + self._showwarning = self._module.showwarning + + def showwarning(message, category, filename, lineno, + file=None, line=None): + self._list.append(RecordedWarning( + message, category, filename, lineno, file, line)) + + # still perform old showwarning functionality + self._showwarning( + message, category, filename, lineno, file=file, line=line) + + self._module.showwarning = showwarning + + # allow the same warning to be raised more than once + + self._module.simplefilter('always') + return self + + def __exit__(self, *exc_info): + if not self._entered: + __tracebackhide__ = True + raise RuntimeError("Cannot exit %r without entering first" % self) + self._module.filters = self._filters + self._module.showwarning = self._showwarning + + +class WarningsChecker(WarningsRecorder): + def __init__(self, expected_warning=None, module=None): + super(WarningsChecker, self).__init__(module=module) + + msg = ("exceptions must be old-style classes or " + "derived from Warning, not %s") + if isinstance(expected_warning, tuple): + for exc in expected_warning: + if not inspect.isclass(exc): + raise TypeError(msg % type(exc)) + elif inspect.isclass(expected_warning): + expected_warning = (expected_warning,) + elif expected_warning is not None: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning + + def __exit__(self, *exc_info): + super(WarningsChecker, self).__exit__(*exc_info) + + # only check if we're not currently handling an exception + if all(a is None for a in exc_info): + if self.expected_warning is not None: + if not any(r.category in self.expected_warning for r in self): + __tracebackhide__ = True + pytest.fail("DID NOT WARN") diff --git a/tests/_lib_vendors/_pytest/resultlog.py b/tests/_lib_vendors/_pytest/resultlog.py new file mode 100644 index 0000000..fc00259 --- /dev/null +++ b/tests/_lib_vendors/_pytest/resultlog.py @@ -0,0 +1,107 @@ +""" log machine-parseable test session result information in a plain +text file. +""" + +import py +import os + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "resultlog plugin options") + group.addoption('--resultlog', '--result-log', action="store", + metavar="path", default=None, + help="DEPRECATED path for machine-readable result log.") + +def pytest_configure(config): + resultlog = config.option.resultlog + # prevent opening resultlog on slave nodes (xdist) + if resultlog and not hasattr(config, 'slaveinput'): + dirname = os.path.dirname(os.path.abspath(resultlog)) + if not os.path.isdir(dirname): + os.makedirs(dirname) + logfile = open(resultlog, 'w', 1) # line buffered + config._resultlog = ResultLog(config, logfile) + config.pluginmanager.register(config._resultlog) + + from _pytest.deprecated import RESULT_LOG + config.warn('C1', RESULT_LOG) + +def pytest_unconfigure(config): + resultlog = getattr(config, '_resultlog', None) + if resultlog: + resultlog.logfile.close() + del config._resultlog + config.pluginmanager.unregister(resultlog) + +def generic_path(item): + chain = item.listchain() + gpath = [chain[0].name] + fspath = chain[0].fspath + fspart = False + for node in chain[1:]: + newfspath = node.fspath + if newfspath == fspath: + if fspart: + gpath.append(':') + fspart = False + else: + gpath.append('.') + else: + gpath.append('/') + fspart = True + name = node.name + if name[0] in '([': + gpath.pop() + gpath.append(name) + fspath = newfspath + return ''.join(gpath) + +class ResultLog(object): + def __init__(self, config, logfile): + self.config = config + self.logfile = logfile # preferably line buffered + + def write_log_entry(self, testpath, lettercode, longrepr): + py.builtin.print_("%s %s" % (lettercode, testpath), file=self.logfile) + for line in longrepr.splitlines(): + py.builtin.print_(" %s" % line, file=self.logfile) + + def log_outcome(self, report, lettercode, longrepr): + testpath = getattr(report, 'nodeid', None) + if testpath is None: + testpath = report.fspath + self.write_log_entry(testpath, lettercode, longrepr) + + def pytest_runtest_logreport(self, report): + if report.when != "call" and report.passed: + return + res = self.config.hook.pytest_report_teststatus(report=report) + code = res[1] + if code == 'x': + longrepr = str(report.longrepr) + elif code == 'X': + longrepr = '' + elif report.passed: + longrepr = "" + elif report.failed: + longrepr = str(report.longrepr) + elif report.skipped: + longrepr = str(report.longrepr[2]) + self.log_outcome(report, code, longrepr) + + def pytest_collectreport(self, report): + if not report.passed: + if report.failed: + code = "F" + longrepr = str(report.longrepr) + else: + assert report.skipped + code = "S" + longrepr = "%s:%d: %s" % report.longrepr + self.log_outcome(report, code, longrepr) + + def pytest_internalerror(self, excrepr): + reprcrash = getattr(excrepr, 'reprcrash', None) + path = getattr(reprcrash, "path", None) + if path is None: + path = "cwd:%s" % py.path.local() + self.write_log_entry(path, '!', str(excrepr)) diff --git a/tests/_lib_vendors/_pytest/runner.py b/tests/_lib_vendors/_pytest/runner.py new file mode 100644 index 0000000..eb29e73 --- /dev/null +++ b/tests/_lib_vendors/_pytest/runner.py @@ -0,0 +1,578 @@ +""" basic collect and runtest protocol implementations """ +import bdb +import sys +from time import time + +import py +import pytest +from _pytest._code.code import TerminalRepr, ExceptionInfo + + +def pytest_namespace(): + return { + 'fail' : fail, + 'skip' : skip, + 'importorskip' : importorskip, + 'exit' : exit, + } + +# +# pytest plugin hooks + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "reporting", after="general") + group.addoption('--durations', + action="store", type=int, default=None, metavar="N", + help="show N slowest setup/test durations (N=0 for all)."), + +def pytest_terminal_summary(terminalreporter): + durations = terminalreporter.config.option.durations + if durations is None: + return + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, 'duration'): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration) + dlist.reverse() + if not durations: + tr.write_sep("=", "slowest test durations") + else: + tr.write_sep("=", "slowest %s test durations" % durations) + dlist = dlist[:durations] + + for rep in dlist: + nodeid = rep.nodeid.replace("::()::", "::") + tr.write_line("%02.2fs %-8s %s" % + (rep.duration, rep.when, nodeid)) + +def pytest_sessionstart(session): + session._setupstate = SetupState() +def pytest_sessionfinish(session): + session._setupstate.teardown_all() + +class NodeInfo: + def __init__(self, location): + self.location = location + +def pytest_runtest_protocol(item, nextitem): + item.ihook.pytest_runtest_logstart( + nodeid=item.nodeid, location=item.location, + ) + runtestprotocol(item, nextitem=nextitem) + return True + +def runtestprotocol(item, log=True, nextitem=None): + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: + item._initrequest() + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.option.setupshow: + show_test_item(item) + if not item.config.option.setuponly: + reports.append(call_and_report(item, "call", log)) + reports.append(call_and_report(item, "teardown", log, + nextitem=nextitem)) + # after all teardown hooks have been called + # want funcargs and request info to go away + if hasrequest: + item._request = False + item.funcargs = None + return reports + +def show_test_item(item): + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(' ' * 8) + tw.write(item._nodeid) + used_fixtures = sorted(item._fixtureinfo.name2fixturedefs.keys()) + if used_fixtures: + tw.write(' (fixtures used: {0})'.format(', '.join(used_fixtures))) + +def pytest_runtest_setup(item): + item.session._setupstate.prepare(item) + +def pytest_runtest_call(item): + try: + item.runtest() + except Exception: + # Store trace info to allow postmortem debugging + type, value, tb = sys.exc_info() + tb = tb.tb_next # Skip *this* frame + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + del tb # Get rid of it in this namespace + raise + +def pytest_runtest_teardown(item, nextitem): + item.session._setupstate.teardown_exact(item, nextitem) + +def pytest_report_teststatus(report): + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + + +# +# Implementation + +def call_and_report(item, when, log=True, **kwds): + call = call_runtest_hook(item, when, **kwds) + hook = item.ihook + report = hook.pytest_runtest_makereport(item=item, call=call) + if log: + hook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + hook.pytest_exception_interact(node=item, call=call, report=report) + return report + +def check_interactive_exception(call, report): + return call.excinfo and not ( + hasattr(report, "wasxfail") or + call.excinfo.errisinstance(skip.Exception) or + call.excinfo.errisinstance(bdb.BdbQuit)) + +def call_runtest_hook(item, when, **kwds): + hookname = "pytest_runtest_" + when + ihook = getattr(item.ihook, hookname) + return CallInfo(lambda: ihook(item=item, **kwds), when=when) + +class CallInfo: + """ Result/Exception info a function invocation. """ + #: None or ExceptionInfo object. + excinfo = None + def __init__(self, func, when): + #: context of invocation: one of "setup", "call", + #: "teardown", "memocollect" + self.when = when + self.start = time() + try: + self.result = func() + except KeyboardInterrupt: + self.stop = time() + raise + except: + self.excinfo = ExceptionInfo() + self.stop = time() + + def __repr__(self): + if self.excinfo: + status = "exception: %s" % str(self.excinfo.value) + else: + status = "result: %r" % (self.result,) + return "" % (self.when, status) + +def getslaveinfoline(node): + try: + return node._slaveinfocache + except AttributeError: + d = node.slaveinfo + ver = "%s.%s.%s" % d['version_info'][:3] + node._slaveinfocache = s = "[%s] %s -- Python %s %s" % ( + d['id'], d['sysplatform'], ver, d['executable']) + return s + +class BaseReport(object): + + def __init__(self, **kw): + self.__dict__.update(kw) + + def toterminal(self, out): + if hasattr(self, 'node'): + out.line(getslaveinfoline(self.node)) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, 'toterminal'): + longrepr.toterminal(out) + else: + try: + out.line(longrepr) + except UnicodeEncodeError: + out.line("") + + def get_sections(self, prefix): + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self): + """ + Read-only property that returns the full string representation + of ``longrepr``. + + .. versionadded:: 3.0 + """ + tw = py.io.TerminalWriter(stringio=True) + tw.hasmarkup = False + self.toterminal(tw) + exc = tw.stringio.getvalue() + return exc.strip() + + @property + def capstdout(self): + """Return captured text from stdout, if capturing is enabled + + .. versionadded:: 3.0 + """ + return ''.join(content for (prefix, content) in self.get_sections('Captured stdout')) + + @property + def capstderr(self): + """Return captured text from stderr, if capturing is enabled + + .. versionadded:: 3.0 + """ + return ''.join(content for (prefix, content) in self.get_sections('Captured stderr')) + + passed = property(lambda x: x.outcome == "passed") + failed = property(lambda x: x.outcome == "failed") + skipped = property(lambda x: x.outcome == "skipped") + + @property + def fspath(self): + return self.nodeid.split("::")[0] + +def pytest_runtest_makereport(item, call): + when = call.when + duration = call.stop-call.start + keywords = dict([(x,1) for x in item.keywords]) + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome = "passed" + longrepr = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif excinfo.errisinstance(pytest.skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: # exception in setup or teardown + longrepr = item._repr_failure_py(excinfo, + style=item.config.option.tbstyle) + for rwhen, key, content in item._report_sections: + sections.append(("Captured %s %s" %(key, rwhen), content)) + return TestReport(item.nodeid, item.location, + keywords, outcome, longrepr, when, + sections, duration) + +class TestReport(BaseReport): + """ Basic test report object (also used for setup and teardown calls if + they fail). + """ + def __init__(self, nodeid, location, keywords, outcome, + longrepr, when, sections=(), duration=0, **extra): + #: normalized collection node id + self.nodeid = nodeid + + #: a (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + self.location = location + + #: a name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords = keywords + + #: test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: one of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when = when + + #: list of pairs ``(str, str)`` of extra information which needs to + #: marshallable. Used by pytest to add captured text + #: from ``stdout`` and ``stderr``, but may be used by other plugins + #: to add arbitrary information to reports. + self.sections = list(sections) + + #: time it took to run just the test + self.duration = duration + + self.__dict__.update(extra) + + def __repr__(self): + return "" % ( + self.nodeid, self.when, self.outcome) + +class TeardownErrorReport(BaseReport): + outcome = "failed" + when = "teardown" + def __init__(self, longrepr, **extra): + self.longrepr = longrepr + self.sections = [] + self.__dict__.update(extra) + +def pytest_make_collect_report(collector): + call = CallInfo(collector._memocollect, "memocollect") + longrepr = None + if not call.excinfo: + outcome = "passed" + else: + from _pytest import nose + skip_exceptions = (Skipped,) + nose.get_skip_exceptions() + if call.excinfo.errisinstance(skip_exceptions): + outcome = "skipped" + r = collector._repr_failure_py(call.excinfo, "line").reprcrash + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + rep = CollectReport(collector.nodeid, outcome, longrepr, + getattr(call, 'result', None)) + rep.call = call # see collect_one_node + return rep + + +class CollectReport(BaseReport): + def __init__(self, nodeid, outcome, longrepr, result, + sections=(), **extra): + self.nodeid = nodeid + self.outcome = outcome + self.longrepr = longrepr + self.result = result or [] + self.sections = list(sections) + self.__dict__.update(extra) + + @property + def location(self): + return (self.fspath, None, self.fspath) + + def __repr__(self): + return "" % ( + self.nodeid, len(self.result), self.outcome) + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg): + self.longrepr = msg + def toterminal(self, out): + out.line(self.longrepr, red=True) + +class SetupState(object): + """ shared state for setting up/tearing down test items or collectors. """ + def __init__(self): + self.stack = [] + self._finalizers = {} + + def addfinalizer(self, finalizer, colitem): + """ attach a finalizer to the given colitem. + if colitem is None, this will add a finalizer that + is called at the end of teardown_all(). + """ + assert colitem and not isinstance(colitem, tuple) + assert py.builtin.callable(finalizer) + #assert colitem in self.stack # some unit tests don't setup stack :/ + self._finalizers.setdefault(colitem, []).append(finalizer) + + def _pop_and_teardown(self): + colitem = self.stack.pop() + self._teardown_with_finalization(colitem) + + def _callfinalizers(self, colitem): + finalizers = self._finalizers.pop(colitem, None) + exc = None + while finalizers: + fin = finalizers.pop() + try: + fin() + except Exception: + # XXX Only first exception will be seen by user, + # ideally all should be reported. + if exc is None: + exc = sys.exc_info() + if exc: + py.builtin._reraise(*exc) + + def _teardown_with_finalization(self, colitem): + self._callfinalizers(colitem) + if hasattr(colitem, "teardown"): + colitem.teardown() + for colitem in self._finalizers: + assert colitem is None or colitem in self.stack \ + or isinstance(colitem, tuple) + + def teardown_all(self): + while self.stack: + self._pop_and_teardown() + for key in list(self._finalizers): + self._teardown_with_finalization(key) + assert not self._finalizers + + def teardown_exact(self, item, nextitem): + needed_collectors = nextitem and nextitem.listchain() or [] + self._teardown_towards(needed_collectors) + + def _teardown_towards(self, needed_collectors): + while self.stack: + if self.stack == needed_collectors[:len(self.stack)]: + break + self._pop_and_teardown() + + def prepare(self, colitem): + """ setup objects along the collector chain to the test-method + and teardown previously setup objects.""" + needed_collectors = colitem.listchain() + self._teardown_towards(needed_collectors) + + # check if the last collection node has raised an error + for col in self.stack: + if hasattr(col, '_prepare_exc'): + py.builtin._reraise(*col._prepare_exc) + for col in needed_collectors[len(self.stack):]: + self.stack.append(col) + try: + col.setup() + except Exception: + col._prepare_exc = sys.exc_info() + raise + +def collect_one_node(collector): + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep + + +# ============================================================= +# Test OutcomeExceptions and helpers for creating them. + + +class OutcomeException(Exception): + """ OutcomeException and its subclass instances indicate and + contain info about test and collection outcomes. + """ + def __init__(self, msg=None, pytrace=True): + Exception.__init__(self, msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self): + if self.msg: + val = self.msg + if isinstance(val, bytes): + val = py._builtin._totext(val, errors='replace') + return val + return "<%s instance>" %(self.__class__.__name__,) + __str__ = __repr__ + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = 'builtins' + + def __init__(self, msg=None, pytrace=True, allow_module_level=False): + OutcomeException.__init__(self, msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + + +class Failed(OutcomeException): + """ raised from an explicit call to pytest.fail() """ + __module__ = 'builtins' + + +class Exit(KeyboardInterrupt): + """ raised for immediate program exits (no tracebacks/summaries)""" + def __init__(self, msg="unknown reason"): + self.msg = msg + KeyboardInterrupt.__init__(self, msg) + +# exposed helper methods + +def exit(msg): + """ exit testing process as if KeyboardInterrupt was triggered. """ + __tracebackhide__ = True + raise Exit(msg) + + +exit.Exception = Exit + + +def skip(msg=""): + """ skip an executing test with the given message. Note: it's usually + better to use the pytest.mark.skipif marker to declare a test to be + skipped under certain conditions like mismatching platforms or + dependencies. See the pytest_skipping plugin for details. + """ + __tracebackhide__ = True + raise Skipped(msg=msg) + + +skip.Exception = Skipped + + +def fail(msg="", pytrace=True): + """ explicitly fail an currently-executing test with the given Message. + + :arg pytrace: if false the msg represents the full failure information + and no python traceback will be reported. + """ + __tracebackhide__ = True + raise Failed(msg=msg, pytrace=pytrace) + + +fail.Exception = Failed + + +def importorskip(modname, minversion=None): + """ return imported module if it has at least "minversion" as its + __version__ attribute. If no minversion is specified the a skip + is only triggered if the module can not be imported. + """ + __tracebackhide__ = True + compile(modname, '', 'eval') # to catch syntaxerrors + should_skip = False + try: + __import__(modname) + except ImportError: + # Do not raise chained exception here(#1485) + should_skip = True + if should_skip: + raise Skipped("could not import %r" %(modname,), allow_module_level=True) + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, '__version__', None) + if minversion is not None: + try: + from pkg_resources import parse_version as pv + except ImportError: + raise Skipped("we have a required version for %r but can not import " + "pkg_resources to parse version strings." % (modname,), + allow_module_level=True) + if verattr is None or pv(verattr) < pv(minversion): + raise Skipped("module %r has __version__ %r, required is: %r" %( + modname, verattr, minversion), allow_module_level=True) + return mod + diff --git a/tests/_lib_vendors/_pytest/setuponly.py b/tests/_lib_vendors/_pytest/setuponly.py new file mode 100644 index 0000000..1752c57 --- /dev/null +++ b/tests/_lib_vendors/_pytest/setuponly.py @@ -0,0 +1,72 @@ +import pytest +import sys + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption('--setuponly', '--setup-only', action="store_true", + help="only setup fixtures, do not execute tests.") + group.addoption('--setupshow', '--setup-show', action="store_true", + help="show setup of fixtures while executing tests.") + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef, request): + yield + config = request.config + if config.option.setupshow: + if hasattr(request, 'param'): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + fixturedef.cached_param = fixturedef.ids(request.param) + else: + fixturedef.cached_param = fixturedef.ids[ + request.param_index] + else: + fixturedef.cached_param = request.param + _show_fixture_action(fixturedef, 'SETUP') + + +def pytest_fixture_post_finalizer(fixturedef): + if hasattr(fixturedef, "cached_result"): + config = fixturedef._fixturemanager.config + if config.option.setupshow: + _show_fixture_action(fixturedef, 'TEARDOWN') + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param + + +def _show_fixture_action(fixturedef, msg): + config = fixturedef._fixturemanager.config + capman = config.pluginmanager.getplugin('capturemanager') + if capman: + out, err = capman.suspendcapture() + + tw = config.get_terminal_writer() + tw.line() + tw.write(' ' * 2 * fixturedef.scopenum) + tw.write('{step} {scope} {fixture}'.format( + step=msg.ljust(8), # align the output to TEARDOWN + scope=fixturedef.scope[0].upper(), + fixture=fixturedef.argname)) + + if msg == 'SETUP': + deps = sorted(arg for arg in fixturedef.argnames if arg != 'request') + if deps: + tw.write(' (fixtures used: {0})'.format(', '.join(deps))) + + if hasattr(fixturedef, 'cached_param'): + tw.write('[{0}]'.format(fixturedef.cached_param)) + + if capman: + capman.resumecapture() + sys.stdout.write(out) + sys.stderr.write(err) + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config): + if config.option.setuponly: + config.option.setupshow = True diff --git a/tests/_lib_vendors/_pytest/setupplan.py b/tests/_lib_vendors/_pytest/setupplan.py new file mode 100644 index 0000000..f0853de --- /dev/null +++ b/tests/_lib_vendors/_pytest/setupplan.py @@ -0,0 +1,23 @@ +import pytest + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption('--setupplan', '--setup-plan', action="store_true", + help="show what fixtures and tests would be executed but " + "don't execute anything.") + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup(fixturedef, request): + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + fixturedef.cached_result = (None, None, None) + return fixturedef.cached_result + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config): + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True diff --git a/tests/_lib_vendors/_pytest/skipping.py b/tests/_lib_vendors/_pytest/skipping.py new file mode 100644 index 0000000..a8eaea9 --- /dev/null +++ b/tests/_lib_vendors/_pytest/skipping.py @@ -0,0 +1,375 @@ +""" support for skip/xfail functions and markers. """ +import os +import sys +import traceback + +import py +import pytest +from _pytest.mark import MarkInfo, MarkDecorator + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption('--runxfail', + action="store_true", dest="runxfail", default=False, + help="run tests even if they are marked xfail") + + parser.addini("xfail_strict", "default for the strict parameter of xfail " + "markers when not given explicitly (default: " + "False)", + default=False, + type="bool") + + +def pytest_configure(config): + if config.option.runxfail: + old = pytest.xfail + config._cleanup.append(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = XFailed + setattr(pytest, "xfail", nop) + + config.addinivalue_line("markers", + "skip(reason=None): skip the given test function with an optional reason. " + "Example: skip(reason=\"no way of currently testing this\") skips the " + "test." + ) + config.addinivalue_line("markers", + "skipif(condition): skip the given test function if eval(condition) " + "results in a True value. Evaluation happens within the " + "module global context. Example: skipif('sys.platform == \"win32\"') " + "skips the test if we are on the win32 platform. see " + "http://pytest.org/latest/skipping.html" + ) + config.addinivalue_line("markers", + "xfail(condition, reason=None, run=True, raises=None, strict=False): " + "mark the the test function as an expected failure if eval(condition) " + "has a True value. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See http://pytest.org/latest/skipping.html" + ) + + +def pytest_namespace(): + return dict(xfail=xfail) + + +class XFailed(pytest.fail.Exception): + """ raised from an explicit call to pytest.xfail() """ + + +def xfail(reason=""): + """ xfail an executing test or setup functions with the given reason.""" + __tracebackhide__ = True + raise XFailed(reason) + + +xfail.Exception = XFailed + + +class MarkEvaluator: + def __init__(self, item, name): + self.item = item + self.name = name + + @property + def holder(self): + return self.item.keywords.get(self.name) + + def __bool__(self): + return bool(self.holder) + __nonzero__ = __bool__ + + def wasvalid(self): + return not hasattr(self, 'exc') + + def invalidraise(self, exc): + raises = self.get('raises') + if not raises: + return + return not isinstance(exc, raises) + + def istrue(self): + try: + return self._istrue() + except Exception: + self.exc = sys.exc_info() + if isinstance(self.exc[1], SyntaxError): + msg = [" " * (self.exc[1].offset + 4) + "^",] + msg.append("SyntaxError: invalid syntax") + else: + msg = traceback.format_exception_only(*self.exc[:2]) + pytest.fail("Error evaluating %r expression\n" + " %s\n" + "%s" + %(self.name, self.expr, "\n".join(msg)), + pytrace=False) + + def _getglobals(self): + d = {'os': os, 'sys': sys, 'config': self.item.config} + d.update(self.item.obj.__globals__) + return d + + def _istrue(self): + if hasattr(self, 'result'): + return self.result + if self.holder: + d = self._getglobals() + if self.holder.args or 'condition' in self.holder.kwargs: + self.result = False + # "holder" might be a MarkInfo or a MarkDecorator; only + # MarkInfo keeps track of all parameters it received in an + # _arglist attribute + if hasattr(self.holder, '_arglist'): + arglist = self.holder._arglist + else: + arglist = [(self.holder.args, self.holder.kwargs)] + for args, kwargs in arglist: + if 'condition' in kwargs: + args = (kwargs['condition'],) + for expr in args: + self.expr = expr + if isinstance(expr, py.builtin._basestring): + result = cached_eval(self.item.config, expr, d) + else: + if "reason" not in kwargs: + # XXX better be checked at collection time + msg = "you need to specify reason=STRING " \ + "when using booleans as conditions." + pytest.fail(msg) + result = bool(expr) + if result: + self.result = True + self.reason = kwargs.get('reason', None) + self.expr = expr + return self.result + else: + self.result = True + return getattr(self, 'result', False) + + def get(self, attr, default=None): + return self.holder.kwargs.get(attr, default) + + def getexplanation(self): + expl = getattr(self, 'reason', None) or self.get('reason', None) + if not expl: + if not hasattr(self, 'expr'): + return "" + else: + return "condition: " + str(self.expr) + return expl + + +@pytest.hookimpl(tryfirst=True) +def pytest_runtest_setup(item): + # Check if skip or skipif are specified as pytest marks + + skipif_info = item.keywords.get('skipif') + if isinstance(skipif_info, (MarkInfo, MarkDecorator)): + eval_skipif = MarkEvaluator(item, 'skipif') + if eval_skipif.istrue(): + item._evalskip = eval_skipif + pytest.skip(eval_skipif.getexplanation()) + + skip_info = item.keywords.get('skip') + if isinstance(skip_info, (MarkInfo, MarkDecorator)): + item._evalskip = True + if 'reason' in skip_info.kwargs: + pytest.skip(skip_info.kwargs['reason']) + elif skip_info.args: + pytest.skip(skip_info.args[0]) + else: + pytest.skip("unconditional skip") + + item._evalxfail = MarkEvaluator(item, 'xfail') + check_xfail_no_run(item) + + +@pytest.mark.hookwrapper +def pytest_pyfunc_call(pyfuncitem): + check_xfail_no_run(pyfuncitem) + outcome = yield + passed = outcome.excinfo is None + if passed: + check_strict_xfail(pyfuncitem) + + +def check_xfail_no_run(item): + """check xfail(run=False)""" + if not item.config.option.runxfail: + evalxfail = item._evalxfail + if evalxfail.istrue(): + if not evalxfail.get('run', True): + pytest.xfail("[NOTRUN] " + evalxfail.getexplanation()) + + +def check_strict_xfail(pyfuncitem): + """check xfail(strict=True) for the given PASSING test""" + evalxfail = pyfuncitem._evalxfail + if evalxfail.istrue(): + strict_default = pyfuncitem.config.getini('xfail_strict') + is_strict_xfail = evalxfail.get('strict', strict_default) + if is_strict_xfail: + del pyfuncitem._evalxfail + explanation = evalxfail.getexplanation() + pytest.fail('[XPASS(strict)] ' + explanation, pytrace=False) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): + outcome = yield + rep = outcome.get_result() + evalxfail = getattr(item, '_evalxfail', None) + evalskip = getattr(item, '_evalskip', None) + # unitttest special case, see setting of _unexpectedsuccess + if hasattr(item, '_unexpectedsuccess') and rep.when == "call": + from _pytest.compat import _is_unittest_unexpected_success_a_failure + if item._unexpectedsuccess: + rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess) + else: + rep.longrepr = "Unexpected success" + if _is_unittest_unexpected_success_a_failure(): + rep.outcome = "failed" + else: + rep.outcome = "passed" + rep.wasxfail = rep.longrepr + elif item.config.option.runxfail: + pass # don't interefere + elif call.excinfo and call.excinfo.errisinstance(pytest.xfail.Exception): + rep.wasxfail = "reason: " + call.excinfo.value.msg + rep.outcome = "skipped" + elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \ + evalxfail.istrue(): + if call.excinfo: + if evalxfail.invalidraise(call.excinfo.value): + rep.outcome = "failed" + else: + rep.outcome = "skipped" + rep.wasxfail = evalxfail.getexplanation() + elif call.when == "call": + strict_default = item.config.getini('xfail_strict') + is_strict_xfail = evalxfail.get('strict', strict_default) + explanation = evalxfail.getexplanation() + if is_strict_xfail: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] {0}".format(explanation) + else: + rep.outcome = "passed" + rep.wasxfail = explanation + elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple: + # skipped by mark.skipif; change the location of the failure + # to point to the item definition, otherwise it will display + # the location of where the skip exception was raised within pytest + filename, line, reason = rep.longrepr + filename, line = item.location[:2] + rep.longrepr = filename, line, reason + +# called by terminalreporter progress reporting +def pytest_report_teststatus(report): + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "xfail" + elif report.passed: + return "xpassed", "X", ("XPASS", {'yellow': True}) + +# called by the terminalreporter instance/plugin +def pytest_terminal_summary(terminalreporter): + tr = terminalreporter + if not tr.reportchars: + #for name in "xfailed skipped failed xpassed": + # if not tr.stats.get(name, 0): + # tr.write_line("HINT: use '-r' option to see extra " + # "summary info about tests") + # break + return + + lines = [] + for char in tr.reportchars: + if char == "x": + show_xfailed(terminalreporter, lines) + elif char == "X": + show_xpassed(terminalreporter, lines) + elif char in "fF": + show_simple(terminalreporter, lines, 'failed', "FAIL %s") + elif char in "sS": + show_skipped(terminalreporter, lines) + elif char == "E": + show_simple(terminalreporter, lines, 'error', "ERROR %s") + elif char == 'p': + show_simple(terminalreporter, lines, 'passed', "PASSED %s") + + if lines: + tr._tw.sep("=", "short test summary info") + for line in lines: + tr._tw.line(line) + +def show_simple(terminalreporter, lines, stat, format): + failed = terminalreporter.stats.get(stat) + if failed: + for rep in failed: + pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid) + lines.append(format %(pos,)) + +def show_xfailed(terminalreporter, lines): + xfailed = terminalreporter.stats.get("xfailed") + if xfailed: + for rep in xfailed: + pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid) + reason = rep.wasxfail + lines.append("XFAIL %s" % (pos,)) + if reason: + lines.append(" " + str(reason)) + +def show_xpassed(terminalreporter, lines): + xpassed = terminalreporter.stats.get("xpassed") + if xpassed: + for rep in xpassed: + pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid) + reason = rep.wasxfail + lines.append("XPASS %s %s" %(pos, reason)) + +def cached_eval(config, expr, d): + if not hasattr(config, '_evalcache'): + config._evalcache = {} + try: + return config._evalcache[expr] + except KeyError: + import _pytest._code + exprcode = _pytest._code.compile(expr, mode="eval") + config._evalcache[expr] = x = eval(exprcode, d) + return x + + +def folded_skips(skipped): + d = {} + for event in skipped: + key = event.longrepr + assert len(key) == 3, (event, key) + d.setdefault(key, []).append(event) + l = [] + for key, events in d.items(): + l.append((len(events),) + key) + return l + +def show_skipped(terminalreporter, lines): + tr = terminalreporter + skipped = tr.stats.get('skipped', []) + if skipped: + #if not tr.hasopt('skipped'): + # tr.write_line( + # "%d skipped tests, specify -rs for more info" % + # len(skipped)) + # return + fskips = folded_skips(skipped) + if fskips: + #tr.write_sep("_", "skipped test summary") + for num, fspath, lineno, reason in fskips: + if reason.startswith("Skipped: "): + reason = reason[9:] + lines.append("SKIP [%d] %s:%d: %s" % + (num, fspath, lineno, reason)) diff --git a/tests/_lib_vendors/_pytest/terminal.py b/tests/_lib_vendors/_pytest/terminal.py new file mode 100644 index 0000000..16bf757 --- /dev/null +++ b/tests/_lib_vendors/_pytest/terminal.py @@ -0,0 +1,593 @@ +""" terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" +from _pytest.main import EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, \ + EXIT_USAGEERROR, EXIT_NOTESTSCOLLECTED +import pytest +import py +import sys +import time +import platform + +import _pytest._pluggy as pluggy + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "reporting", after="general") + group._addoption('-v', '--verbose', action="count", + dest="verbose", default=0, help="increase verbosity."), + group._addoption('-q', '--quiet', action="count", + dest="quiet", default=0, help="decrease verbosity."), + group._addoption('-r', + action="store", dest="reportchars", default='', metavar="chars", + help="show extra test summary info as specified by chars (f)ailed, " + "(E)error, (s)skipped, (x)failed, (X)passed, " + "(p)passed, (P)passed with output, (a)all except pP. " + "The pytest warnings are displayed at all times except when " + "--disable-pytest-warnings is set") + group._addoption('--disable-pytest-warnings', default=False, + dest='disablepytestwarnings', action='store_true', + help='disable warnings summary, overrides -r w flag') + group._addoption('-l', '--showlocals', + action="store_true", dest="showlocals", default=False, + help="show locals in tracebacks (disabled by default).") + group._addoption('--tb', metavar="style", + action="store", dest="tbstyle", default='auto', + choices=['auto', 'long', 'short', 'no', 'line', 'native'], + help="traceback print mode (auto/long/short/line/native/no).") + group._addoption('--fulltrace', '--full-trace', + action="store_true", default=False, + help="don't cut any tracebacks (default is to cut).") + group._addoption('--color', metavar="color", + action="store", dest="color", default='auto', + choices=['yes', 'no', 'auto'], + help="color terminal output (yes/no/auto).") + +def pytest_configure(config): + config.option.verbose -= config.option.quiet + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, 'terminalreporter') + if config.option.debug or config.option.traceconfig: + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + config.trace.root.setprocessor("pytest:config", mywriter) + +def getreportopt(config): + reportopts = "" + reportchars = config.option.reportchars + if not config.option.disablepytestwarnings and 'w' not in reportchars: + reportchars += 'w' + elif config.option.disablepytestwarnings and 'w' in reportchars: + reportchars = reportchars.replace('w', '') + if reportchars: + for char in reportchars: + if char not in reportopts and char != 'a': + reportopts += char + elif char == 'a': + reportopts = 'fEsxXw' + return reportopts + +def pytest_report_teststatus(report): + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + elif report.failed: + letter = "F" + if report.when != "call": + letter = "f" + return report.outcome, letter, report.outcome.upper() + +class WarningReport: + def __init__(self, code, message, nodeid=None, fslocation=None): + self.code = code + self.message = message + self.nodeid = nodeid + self.fslocation = fslocation + + +class TerminalReporter: + def __init__(self, config, file=None): + import _pytest.config + self.config = config + self.verbosity = self.config.option.verbose + self.showheader = self.verbosity >= 0 + self.showfspath = self.verbosity >= 0 + self.showlongtestinfo = self.verbosity > 0 + self._numcollected = 0 + + self.stats = {} + self.startdir = py.path.local() + if file is None: + file = sys.stdout + self._tw = self.writer = _pytest.config.create_terminal_writer(config, + file) + self.currentfspath = None + self.reportchars = getreportopt(config) + self.hasmarkup = self._tw.hasmarkup + self.isatty = file.isatty() + + def hasopt(self, char): + char = {'xfailed': 'x', 'skipped': 's'}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid, res): + fspath = self.config.rootdir.join(nodeid.split("::")[0]) + if fspath != self.currentfspath: + self.currentfspath = fspath + fspath = self.startdir.bestrelpath(fspath) + self._tw.line() + self._tw.write(fspath + " ") + self._tw.write(res) + + def write_ensure_prefix(self, prefix, extra="", **kwargs): + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self): + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def write(self, content, **markup): + self._tw.write(content, **markup) + + def write_line(self, line, **markup): + if not py.builtin._istext(line): + line = py.builtin.text(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line, **markup): + line = str(line) + self._tw.write("\r" + line, **markup) + + def write_sep(self, sep, title=None, **markup): + self.ensure_newline() + self._tw.sep(sep, title, **markup) + + def section(self, title, sep="=", **kw): + self._tw.sep(sep, title, **kw) + + def line(self, msg, **kw): + self._tw.line(msg, **kw) + + def pytest_internalerror(self, excrepr): + for line in py.builtin.text(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return 1 + + def pytest_logwarning(self, code, fslocation, message, nodeid): + warnings = self.stats.setdefault("warnings", []) + if isinstance(fslocation, tuple): + fslocation = "%s:%d" % fslocation + warning = WarningReport(code=code, fslocation=fslocation, + message=message, nodeid=nodeid) + warnings.append(warning) + + def pytest_plugin_registered(self, plugin): + if self.config.option.traceconfig: + msg = "PLUGIN registered: %s" % (plugin,) + # XXX this event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line + self.write_line(msg) + + def pytest_deselected(self, items): + self.stats.setdefault('deselected', []).extend(items) + + def pytest_runtest_logstart(self, nodeid, location): + # ensure that the path is printed before the + # 1st test of a module starts running + if self.showlongtestinfo: + line = self._locationline(nodeid, *location) + self.write_ensure_prefix(line, "") + elif self.showfspath: + fsid = nodeid.split("::")[0] + self.write_fspath_result(fsid, "") + + def pytest_runtest_logreport(self, report): + rep = report + res = self.config.hook.pytest_report_teststatus(report=rep) + cat, letter, word = res + self.stats.setdefault(cat, []).append(rep) + self._tests_ran = True + if not letter and not word: + # probably passed setup/teardown + return + if self.verbosity <= 0: + if not hasattr(rep, 'node') and self.showfspath: + self.write_fspath_result(rep.nodeid, letter) + else: + self._tw.write(letter) + else: + if isinstance(word, tuple): + word, markup = word + else: + if rep.passed: + markup = {'green':True} + elif rep.failed: + markup = {'red':True} + elif rep.skipped: + markup = {'yellow':True} + line = self._locationline(rep.nodeid, *rep.location) + if not hasattr(rep, 'node'): + self.write_ensure_prefix(line, word, **markup) + #self._tw.write(word, **markup) + else: + self.ensure_newline() + if hasattr(rep, 'node'): + self._tw.write("[%s] " % rep.node.gateway.id) + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + + def pytest_collection(self): + if not self.isatty and self.config.option.verbose >= 1: + self.write("collecting ... ", bold=True) + + def pytest_collectreport(self, report): + if report.failed: + self.stats.setdefault("error", []).append(report) + elif report.skipped: + self.stats.setdefault("skipped", []).append(report) + items = [x for x in report.result if isinstance(x, pytest.Item)] + self._numcollected += len(items) + if self.isatty: + #self.write_fspath_result(report.nodeid, 'E') + self.report_collect() + + def report_collect(self, final=False): + if self.config.option.verbose < 0: + return + + errors = len(self.stats.get('error', [])) + skipped = len(self.stats.get('skipped', [])) + if final: + line = "collected " + else: + line = "collecting " + line += str(self._numcollected) + " items" + if errors: + line += " / %d errors" % errors + if skipped: + line += " / %d skipped" % skipped + if self.isatty: + if final: + line += " \n" + self.rewrite(line, bold=True) + else: + self.write_line(line) + + def pytest_collection_modifyitems(self): + self.report_collect(True) + + @pytest.hookimpl(trylast=True) + def pytest_sessionstart(self, session): + self._sessionstarttime = time.time() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + msg = "platform %s -- Python %s" % (sys.platform, verinfo) + if hasattr(sys, 'pypy_version_info'): + verinfo = ".".join(map(str, sys.pypy_version_info[:3])) + msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3]) + msg += ", pytest-%s, py-%s, pluggy-%s" % ( + pytest.__version__, py.__version__, pluggy.__version__) + if self.verbosity > 0 or self.config.option.debug or \ + getattr(self.config.option, 'pastebin', None): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, startdir=self.startdir) + lines.reverse() + for line in flatten(lines): + self.write_line(line) + + def pytest_report_header(self, config): + inifile = "" + if config.inifile: + inifile = config.rootdir.bestrelpath(config.inifile) + lines = ["rootdir: %s, inifile: %s" %(config.rootdir, inifile)] + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + + lines.append( + "plugins: %s" % ", ".join(_plugin_nameversions(plugininfo))) + return lines + + def pytest_collection_finish(self, session): + if self.config.option.collectonly: + self._printcollecteditems(session.items) + if self.stats.get('failed'): + self._tw.sep("!", "collection failures") + for rep in self.stats.get('failed'): + rep.toterminal(self._tw) + return 1 + return 0 + if not self.showheader: + return + #for i, testarg in enumerate(self.config.args): + # self.write_line("test path %d: %s" %(i+1, testarg)) + + def _printcollecteditems(self, items): + # to print out items and their parent collectors + # we take care to leave out Instances aka () + # because later versions are going to get rid of them anyway + if self.config.option.verbose < 0: + if self.config.option.verbose < -1: + counts = {} + for item in items: + name = item.nodeid.split('::', 1)[0] + counts[name] = counts.get(name, 0) + 1 + for name, count in sorted(counts.items()): + self._tw.line("%s: %d" % (name, count)) + else: + for item in items: + nodeid = item.nodeid + nodeid = nodeid.replace("::()::", "::") + self._tw.line(nodeid) + return + stack = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[:len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack):]: + stack.append(col) + #if col.name == "()": + # continue + indent = (len(stack) - 1) * " " + self._tw.line("%s%s" % (indent, col)) + + @pytest.hookimpl(hookwrapper=True) + def pytest_sessionfinish(self, exitstatus): + outcome = yield + outcome.get_result() + self._tw.line("") + summary_exit_codes = ( + EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, EXIT_USAGEERROR, + EXIT_NOTESTSCOLLECTED) + if exitstatus in summary_exit_codes: + self.config.hook.pytest_terminal_summary(terminalreporter=self, + exitstatus=exitstatus) + self.summary_errors() + self.summary_failures() + self.summary_warnings() + self.summary_passes() + if exitstatus == EXIT_INTERRUPTED: + self._report_keyboardinterrupt() + del self._keyboardinterrupt_memo + self.summary_deselected() + self.summary_stats() + + def pytest_keyboard_interrupt(self, excinfo): + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self): + if hasattr(self, '_keyboardinterrupt_memo'): + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self): + excrepr = self._keyboardinterrupt_memo + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + self._tw.line("to show a full traceback on KeyboardInterrupt use --fulltrace", yellow=True) + excrepr.reprcrash.toterminal(self._tw) + + def _locationline(self, nodeid, fspath, lineno, domain): + def mkrel(nodeid): + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[:-len(domain)] + l = domain.split("[") + l[0] = l[0].replace('.', '::') # don't replace '.' in params + line += "[".join(l) + return line + # collect_fspath comes from testid which has a "/"-normalized path + + if fspath: + res = mkrel(nodeid).replace("::()", "") # parens-normalization + if nodeid.split("::")[0] != fspath.replace("\\", "/"): + res += " <- " + self.startdir.bestrelpath(fspath) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + if hasattr(rep, 'location'): + fspath, lineno, domain = rep.location + return domain + else: + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # summaries for sessionfinish + # + def getreports(self, name): + l = [] + for x in self.stats.get(name, []): + if not hasattr(x, '_pdbshown'): + l.append(x) + return l + + def summary_warnings(self): + if self.hasopt("w"): + warnings = self.stats.get("warnings") + if not warnings: + return + self.write_sep("=", "pytest-warning summary") + for w in warnings: + self._tw.line("W%s %s %s" % (w.code, + w.fslocation, w.message)) + + def summary_passes(self): + if self.config.option.tbstyle != "no": + if self.hasopt("P"): + reports = self.getreports('passed') + if not reports: + return + self.write_sep("=", "PASSES") + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg) + self._outrep_summary(rep) + + def print_teardown_sections(self, rep): + for secname, content in rep.sections: + if 'teardown' in secname: + self._tw.sep('-', secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + + def summary_failures(self): + if self.config.option.tbstyle != "no": + reports = self.getreports('failed') + if not reports: + return + self.write_sep("=", "FAILURES") + for rep in reports: + if self.config.option.tbstyle == "line": + line = self._getcrashline(rep) + self.write_line(line) + else: + msg = self._getfailureheadline(rep) + markup = {'red': True, 'bold': True} + self.write_sep("_", msg, **markup) + self._outrep_summary(rep) + for report in self.getreports(''): + if report.nodeid == rep.nodeid and report.when == 'teardown': + self.print_teardown_sections(report) + + def summary_errors(self): + if self.config.option.tbstyle != "no": + reports = self.getreports('error') + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats['error']: + msg = self._getfailureheadline(rep) + if not hasattr(rep, 'when'): + # collect + msg = "ERROR collecting " + msg + elif rep.when == "setup": + msg = "ERROR at setup of " + msg + elif rep.when == "teardown": + msg = "ERROR at teardown of " + msg + self.write_sep("_", msg) + self._outrep_summary(rep) + + def _outrep_summary(self, rep): + rep.toterminal(self._tw) + for secname, content in rep.sections: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self): + session_duration = time.time() - self._sessionstarttime + (line, color) = build_summary_stats_line(self.stats) + msg = "%s in %.2f seconds" % (line, session_duration) + markup = {color: True, 'bold': True} + + if self.verbosity >= 0: + self.write_sep("=", msg, **markup) + if self.verbosity == -1: + self.write_line(msg, **markup) + + def summary_deselected(self): + if 'deselected' in self.stats: + self.write_sep("=", "%d tests deselected" % ( + len(self.stats['deselected'])), bold=True) + +def repr_pythonversion(v=None): + if v is None: + v = sys.version_info + try: + return "%s.%s.%s-%s-%s" % v + except (TypeError, ValueError): + return str(v) + +def flatten(l): + for x in l: + if isinstance(x, (list, tuple)): + for y in flatten(x): + yield y + else: + yield x + +def build_summary_stats_line(stats): + keys = ("failed passed skipped deselected " + "xfailed xpassed warnings error").split() + key_translation = {'warnings': 'pytest-warnings'} + unknown_key_seen = False + for key in stats.keys(): + if key not in keys: + if key: # setup/teardown reports have an empty key, ignore them + keys.append(key) + unknown_key_seen = True + parts = [] + for key in keys: + val = stats.get(key, None) + if val: + key_name = key_translation.get(key, key) + parts.append("%d %s" % (len(val), key_name)) + + if parts: + line = ", ".join(parts) + else: + line = "no tests ran" + + if 'failed' in stats or 'error' in stats: + color = 'red' + elif 'warnings' in stats or unknown_key_seen: + color = 'yellow' + elif 'passed' in stats: + color = 'green' + else: + color = 'yellow' + + return (line, color) + + +def _plugin_nameversions(plugininfo): + l = [] + for plugin, dist in plugininfo: + # gets us name and version! + name = '{dist.project_name}-{dist.version}'.format(dist=dist) + # questionable convenience, but it keeps things short + if name.startswith("pytest-"): + name = name[7:] + # we decided to print python package names + # they can have more than one plugin + if name not in l: + l.append(name) + return l diff --git a/tests/_lib_vendors/_pytest/tmpdir.py b/tests/_lib_vendors/_pytest/tmpdir.py new file mode 100644 index 0000000..28a6b06 --- /dev/null +++ b/tests/_lib_vendors/_pytest/tmpdir.py @@ -0,0 +1,124 @@ +""" support for providing temporary directories to test functions. """ +import re + +import pytest +import py +from _pytest.monkeypatch import MonkeyPatch + + +class TempdirFactory: + """Factory for temporary directories under the common base temp directory. + + The base directory can be configured using the ``--basetemp`` option. + """ + + def __init__(self, config): + self.config = config + self.trace = config.trace.get("tmpdir") + + def ensuretemp(self, string, dir=1): + """ (deprecated) return temporary directory path with + the given string as the trailing part. It is usually + better to use the 'tmpdir' function argument which + provides an empty unique-per-test-invocation directory + and is guaranteed to be empty. + """ + #py.log._apiwarn(">1.1", "use tmpdir function argument") + return self.getbasetemp().ensure(string, dir=dir) + + def mktemp(self, basename, numbered=True): + """Create a subdirectory of the base temporary directory and return it. + If ``numbered``, ensure the directory is unique by adding a number + prefix greater than any existing one. + """ + basetemp = self.getbasetemp() + if not numbered: + p = basetemp.mkdir(basename) + else: + p = py.path.local.make_numbered_dir(prefix=basename, + keep=0, rootdir=basetemp, lock_timeout=None) + self.trace("mktemp", p) + return p + + def getbasetemp(self): + """ return base temporary directory. """ + try: + return self._basetemp + except AttributeError: + basetemp = self.config.option.basetemp + if basetemp: + basetemp = py.path.local(basetemp) + if basetemp.check(): + basetemp.remove() + basetemp.mkdir() + else: + temproot = py.path.local.get_temproot() + user = get_user() + if user: + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.join('pytest-of-%s' % user) + else: + rootdir = temproot + rootdir.ensure(dir=1) + basetemp = py.path.local.make_numbered_dir(prefix='pytest-', + rootdir=rootdir) + self._basetemp = t = basetemp.realpath() + self.trace("new basetemp", t) + return t + + def finish(self): + self.trace("finish") + + +def get_user(): + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010). + """ + import getpass + try: + return getpass.getuser() + except (ImportError, KeyError): + return None + + +# backward compatibility +TempdirHandler = TempdirFactory + + +def pytest_configure(config): + """Create a TempdirFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmpdir_factory session fixture. + """ + mp = MonkeyPatch() + t = TempdirFactory(config) + config._cleanup.extend([mp.undo, t.finish]) + mp.setattr(config, '_tmpdirhandler', t, raising=False) + mp.setattr(pytest, 'ensuretemp', t.ensuretemp, raising=False) + + +@pytest.fixture(scope='session') +def tmpdir_factory(request): + """Return a TempdirFactory instance for the test session. + """ + return request.config._tmpdirhandler + + +@pytest.fixture +def tmpdir(request, tmpdir_factory): + """Return a temporary directory path object + which is unique to each test function invocation, + created as a sub directory of the base temporary + directory. The returned object is a `py.path.local`_ + path object. + """ + name = request.node.name + name = re.sub("[\W]", "_", name) + MAXVAL = 30 + if len(name) > MAXVAL: + name = name[:MAXVAL] + x = tmpdir_factory.mktemp(name, numbered=True) + return x diff --git a/tests/_lib_vendors/_pytest/unittest.py b/tests/_lib_vendors/_pytest/unittest.py new file mode 100644 index 0000000..7322401 --- /dev/null +++ b/tests/_lib_vendors/_pytest/unittest.py @@ -0,0 +1,217 @@ +""" discovery and running of std-library "unittest" style tests. """ +from __future__ import absolute_import + +import sys +import traceback + +import pytest +# for transfering markers +import _pytest._code +from _pytest.python import transfer_markers +from _pytest.skipping import MarkEvaluator + + +def pytest_pycollect_makeitem(collector, name, obj): + # has unittest been imported and is obj a subclass of its TestCase? + try: + if not issubclass(obj, sys.modules["unittest"].TestCase): + return + except Exception: + return + # yes, so let's collect it + return UnitTestCase(name, parent=collector) + + +class UnitTestCase(pytest.Class): + # marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs + nofuncargs = True + + def setup(self): + cls = self.obj + if getattr(cls, '__unittest_skip__', False): + return # skipped + setup = getattr(cls, 'setUpClass', None) + if setup is not None: + setup() + teardown = getattr(cls, 'tearDownClass', None) + if teardown is not None: + self.addfinalizer(teardown) + super(UnitTestCase, self).setup() + + def collect(self): + from unittest import TestLoader + cls = self.obj + if not getattr(cls, "__test__", True): + return + self.session._fixturemanager.parsefactories(self, unittest=True) + loader = TestLoader() + module = self.getparent(pytest.Module).obj + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, '__test__', True): + continue + funcobj = getattr(x, 'im_func', x) + transfer_markers(funcobj, cls, module) + yield TestCaseFunction(name, parent=self) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, 'runTest', None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + if ut is None or runtest != ut.TestCase.runTest: + yield TestCaseFunction('runTest', parent=self) + + + +class TestCaseFunction(pytest.Function): + _excinfo = None + + def setup(self): + self._testcase = self.parent.obj(self.name) + self._fix_unittest_skip_decorator() + self._obj = getattr(self._testcase, self.name) + if hasattr(self._testcase, 'setup_method'): + self._testcase.setup_method(self._obj) + if hasattr(self, "_request"): + self._request._fillfixtures() + + def _fix_unittest_skip_decorator(self): + """ + The @unittest.skip decorator calls functools.wraps(self._testcase) + The call to functools.wraps() fails unless self._testcase + has a __name__ attribute. This is usually automatically supplied + if the test is a function or method, but we need to add manually + here. + + See issue #1169 + """ + if sys.version_info[0] == 2: + setattr(self._testcase, "__name__", self.name) + + def teardown(self): + if hasattr(self._testcase, 'teardown_method'): + self._testcase.teardown_method(self._obj) + # Allow garbage collection on TestCase instance attributes. + self._testcase = None + self._obj = None + + def startTest(self, testcase): + pass + + def _addexcinfo(self, rawexcinfo): + # unwrap potential exception info (see twisted trial support below) + rawexcinfo = getattr(rawexcinfo, '_rawexcinfo', rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo(rawexcinfo) + except TypeError: + try: + try: + l = traceback.format_exception(*rawexcinfo) + l.insert(0, "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n") + pytest.fail("".join(l), pytrace=False) + except (pytest.fail.Exception, KeyboardInterrupt): + raise + except: + pytest.fail("ERROR: Unknown Incompatible Exception " + "representation:\n%r" %(rawexcinfo,), pytrace=False) + except KeyboardInterrupt: + raise + except pytest.fail.Exception: + excinfo = _pytest._code.ExceptionInfo() + self.__dict__.setdefault('_excinfo', []).append(excinfo) + + def addError(self, testcase, rawexcinfo): + self._addexcinfo(rawexcinfo) + def addFailure(self, testcase, rawexcinfo): + self._addexcinfo(rawexcinfo) + + def addSkip(self, testcase, reason): + try: + pytest.skip(reason) + except pytest.skip.Exception: + self._evalskip = MarkEvaluator(self, 'SkipTest') + self._evalskip.result = True + self._addexcinfo(sys.exc_info()) + + def addExpectedFailure(self, testcase, rawexcinfo, reason=""): + try: + pytest.xfail(str(reason)) + except pytest.xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess(self, testcase, reason=""): + self._unexpectedsuccess = reason + + def addSuccess(self, testcase): + pass + + def stopTest(self, testcase): + pass + + def runtest(self): + if self.config.pluginmanager.get_plugin("pdbinvoke") is None: + self._testcase(result=self) + else: + # disables tearDown and cleanups for post mortem debugging (see #1890) + self._testcase.debug() + + + def _prunetraceback(self, excinfo): + pytest.Function._prunetraceback(self, excinfo) + traceback = excinfo.traceback.filter( + lambda x:not x.frame.f_globals.get('__unittest')) + if traceback: + excinfo.traceback = traceback + +@pytest.hookimpl(tryfirst=True) +def pytest_runtest_makereport(item, call): + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + +# twisted trial support + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_protocol(item): + if isinstance(item, TestCaseFunction) and \ + 'twisted.trial.unittest' in sys.modules: + ut = sys.modules['twisted.python.failure'] + Failure__init__ = ut.Failure.__init__ + check_testcase_implements_trial_reporter() + + def excstore(self, exc_value=None, exc_type=None, exc_tb=None, + captureVars=None): + if exc_value is None: + self._rawexcinfo = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + self._rawexcinfo = (exc_type, exc_value, exc_tb) + try: + Failure__init__(self, exc_value, exc_type, exc_tb, + captureVars=captureVars) + except TypeError: + Failure__init__(self, exc_value, exc_type, exc_tb) + + ut.Failure.__init__ = excstore + yield + ut.Failure.__init__ = Failure__init__ + else: + yield + + +def check_testcase_implements_trial_reporter(done=[]): + if done: + return + from zope.interface import classImplements + from twisted.trial.itrial import IReporter + classImplements(TestCaseFunction, IReporter) + done.append(1) diff --git a/tests/_lib_vendors/_pytest/vendored_packages/__init__.py b/tests/_lib_vendors/_pytest/vendored_packages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/_lib_vendors/_pytest/vendored_packages/pluggy.py b/tests/_lib_vendors/_pytest/vendored_packages/pluggy.py new file mode 100644 index 0000000..9c13932 --- /dev/null +++ b/tests/_lib_vendors/_pytest/vendored_packages/pluggy.py @@ -0,0 +1,802 @@ +""" +PluginManager, basic initialization and tracing. + +pluggy is the cristallized core of plugin management as used +by some 150 plugins for pytest. + +Pluggy uses semantic versioning. Breaking changes are only foreseen for +Major releases (incremented X in "X.Y.Z"). If you want to use pluggy in +your project you should thus use a dependency restriction like +"pluggy>=0.1.0,<1.0" to avoid surprises. + +pluggy is concerned with hook specification, hook implementations and hook +calling. For any given hook specification a hook call invokes up to N implementations. +A hook implementation can influence its position and type of execution: +if attributed "tryfirst" or "trylast" it will be tried to execute +first or last. However, if attributed "hookwrapper" an implementation +can wrap all calls to non-hookwrapper implementations. A hookwrapper +can thus execute some code ahead and after the execution of other hooks. + +Hook specification is done by way of a regular python function where +both the function name and the names of all its arguments are significant. +Each hook implementation function is verified against the original specification +function, including the names of all its arguments. To allow for hook specifications +to evolve over the livetime of a project, hook implementations can +accept less arguments. One can thus add new arguments and semantics to +a hook specification by adding another argument typically without breaking +existing hook implementations. + +The chosen approach is meant to let a hook designer think carefuly about +which objects are needed by an extension writer. By contrast, subclass-based +extension mechanisms often expose a lot more state and behaviour than needed, +thus restricting future developments. + +Pluggy currently consists of functionality for: + +- a way to register new hook specifications. Without a hook + specification no hook calling can be performed. + +- a registry of plugins which contain hook implementation functions. It + is possible to register plugins for which a hook specification is not yet + known and validate all hooks when the system is in a more referentially + consistent state. Setting an "optionalhook" attribution to a hook + implementation will avoid PluginValidationError's if a specification + is missing. This allows to have optional integration between plugins. + +- a "hook" relay object from which you can launch 1:N calls to + registered hook implementation functions + +- a mechanism for ordering hook implementation functions + +- mechanisms for two different type of 1:N calls: "firstresult" for when + the call should stop when the first implementation returns a non-None result. + And the other (default) way of guaranteeing that all hook implementations + will be called and their non-None result collected. + +- mechanisms for "historic" extension points such that all newly + registered functions will receive all hook calls that happened + before their registration. + +- a mechanism for discovering plugin objects which are based on + setuptools based entry points. + +- a simple tracing mechanism, including tracing of plugin calls and + their arguments. + +""" +import sys +import inspect + +__version__ = '0.4.0' + +__all__ = ["PluginManager", "PluginValidationError", "HookCallError", + "HookspecMarker", "HookimplMarker"] + +_py3 = sys.version_info > (3, 0) + + +class HookspecMarker: + """ Decorator helper class for marking functions as hook specifications. + + You can instantiate it with a project_name to get a decorator. + Calling PluginManager.add_hookspecs later will discover all marked functions + if the PluginManager uses the same project_name. + """ + + def __init__(self, project_name): + self.project_name = project_name + + def __call__(self, function=None, firstresult=False, historic=False): + """ if passed a function, directly sets attributes on the function + which will make it discoverable to add_hookspecs(). If passed no + function, returns a decorator which can be applied to a function + later using the attributes supplied. + + If firstresult is True the 1:N hook call (N being the number of registered + hook implementation functions) will stop at I<=N when the I'th function + returns a non-None result. + + If historic is True calls to a hook will be memorized and replayed + on later registered plugins. + + """ + def setattr_hookspec_opts(func): + if historic and firstresult: + raise ValueError("cannot have a historic firstresult hook") + setattr(func, self.project_name + "_spec", + dict(firstresult=firstresult, historic=historic)) + return func + + if function is not None: + return setattr_hookspec_opts(function) + else: + return setattr_hookspec_opts + + +class HookimplMarker: + """ Decorator helper class for marking functions as hook implementations. + + You can instantiate with a project_name to get a decorator. + Calling PluginManager.register later will discover all marked functions + if the PluginManager uses the same project_name. + """ + def __init__(self, project_name): + self.project_name = project_name + + def __call__(self, function=None, hookwrapper=False, optionalhook=False, + tryfirst=False, trylast=False): + + """ if passed a function, directly sets attributes on the function + which will make it discoverable to register(). If passed no function, + returns a decorator which can be applied to a function later using + the attributes supplied. + + If optionalhook is True a missing matching hook specification will not result + in an error (by default it is an error if no matching spec is found). + + If tryfirst is True this hook implementation will run as early as possible + in the chain of N hook implementations for a specfication. + + If trylast is True this hook implementation will run as late as possible + in the chain of N hook implementations. + + If hookwrapper is True the hook implementations needs to execute exactly + one "yield". The code before the yield is run early before any non-hookwrapper + function is run. The code after the yield is run after all non-hookwrapper + function have run. The yield receives an ``_CallOutcome`` object representing + the exception or result outcome of the inner calls (including other hookwrapper + calls). + + """ + def setattr_hookimpl_opts(func): + setattr(func, self.project_name + "_impl", + dict(hookwrapper=hookwrapper, optionalhook=optionalhook, + tryfirst=tryfirst, trylast=trylast)) + return func + + if function is None: + return setattr_hookimpl_opts + else: + return setattr_hookimpl_opts(function) + + +def normalize_hookimpl_opts(opts): + opts.setdefault("tryfirst", False) + opts.setdefault("trylast", False) + opts.setdefault("hookwrapper", False) + opts.setdefault("optionalhook", False) + + +class _TagTracer: + def __init__(self): + self._tag2proc = {} + self.writer = None + self.indent = 0 + + def get(self, name): + return _TagTracerSub(self, (name,)) + + def format_message(self, tags, args): + if isinstance(args[-1], dict): + extra = args[-1] + args = args[:-1] + else: + extra = {} + + content = " ".join(map(str, args)) + indent = " " * self.indent + + lines = [ + "%s%s [%s]\n" % (indent, content, ":".join(tags)) + ] + + for name, value in extra.items(): + lines.append("%s %s: %s\n" % (indent, name, value)) + return lines + + def processmessage(self, tags, args): + if self.writer is not None and args: + lines = self.format_message(tags, args) + self.writer(''.join(lines)) + try: + self._tag2proc[tags](tags, args) + except KeyError: + pass + + def setwriter(self, writer): + self.writer = writer + + def setprocessor(self, tags, processor): + if isinstance(tags, str): + tags = tuple(tags.split(":")) + else: + assert isinstance(tags, tuple) + self._tag2proc[tags] = processor + + +class _TagTracerSub: + def __init__(self, root, tags): + self.root = root + self.tags = tags + + def __call__(self, *args): + self.root.processmessage(self.tags, args) + + def setmyprocessor(self, processor): + self.root.setprocessor(self.tags, processor) + + def get(self, name): + return self.__class__(self.root, self.tags + (name,)) + + +def _raise_wrapfail(wrap_controller, msg): + co = wrap_controller.gi_code + raise RuntimeError("wrap_controller at %r %s:%d %s" % + (co.co_name, co.co_filename, co.co_firstlineno, msg)) + + +def _wrapped_call(wrap_controller, func): + """ Wrap calling to a function with a generator which needs to yield + exactly once. The yield point will trigger calling the wrapped function + and return its _CallOutcome to the yield point. The generator then needs + to finish (raise StopIteration) in order for the wrapped call to complete. + """ + try: + next(wrap_controller) # first yield + except StopIteration: + _raise_wrapfail(wrap_controller, "did not yield") + call_outcome = _CallOutcome(func) + try: + wrap_controller.send(call_outcome) + _raise_wrapfail(wrap_controller, "has second yield") + except StopIteration: + pass + return call_outcome.get_result() + + +class _CallOutcome: + """ Outcome of a function call, either an exception or a proper result. + Calling the ``get_result`` method will return the result or reraise + the exception raised when the function was called. """ + excinfo = None + + def __init__(self, func): + try: + self.result = func() + except BaseException: + self.excinfo = sys.exc_info() + + def force_result(self, result): + self.result = result + self.excinfo = None + + def get_result(self): + if self.excinfo is None: + return self.result + else: + ex = self.excinfo + if _py3: + raise ex[1].with_traceback(ex[2]) + _reraise(*ex) # noqa + +if not _py3: + exec(""" +def _reraise(cls, val, tb): + raise cls, val, tb +""") + + +class _TracedHookExecution: + def __init__(self, pluginmanager, before, after): + self.pluginmanager = pluginmanager + self.before = before + self.after = after + self.oldcall = pluginmanager._inner_hookexec + assert not isinstance(self.oldcall, _TracedHookExecution) + self.pluginmanager._inner_hookexec = self + + def __call__(self, hook, hook_impls, kwargs): + self.before(hook.name, hook_impls, kwargs) + outcome = _CallOutcome(lambda: self.oldcall(hook, hook_impls, kwargs)) + self.after(outcome, hook.name, hook_impls, kwargs) + return outcome.get_result() + + def undo(self): + self.pluginmanager._inner_hookexec = self.oldcall + + +class PluginManager(object): + """ Core Pluginmanager class which manages registration + of plugin objects and 1:N hook calling. + + You can register new hooks by calling ``add_hookspec(module_or_class)``. + You can register plugin objects (which contain hooks) by calling + ``register(plugin)``. The Pluginmanager is initialized with a + prefix that is searched for in the names of the dict of registered + plugin objects. An optional excludefunc allows to blacklist names which + are not considered as hooks despite a matching prefix. + + For debugging purposes you can call ``enable_tracing()`` + which will subsequently send debug information to the trace helper. + """ + + def __init__(self, project_name, implprefix=None): + """ if implprefix is given implementation functions + will be recognized if their name matches the implprefix. """ + self.project_name = project_name + self._name2plugin = {} + self._plugin2hookcallers = {} + self._plugin_distinfo = [] + self.trace = _TagTracer().get("pluginmanage") + self.hook = _HookRelay(self.trace.root.get("hook")) + self._implprefix = implprefix + self._inner_hookexec = lambda hook, methods, kwargs: \ + _MultiCall(methods, kwargs, hook.spec_opts).execute() + + def _hookexec(self, hook, methods, kwargs): + # called from all hookcaller instances. + # enable_tracing will set its own wrapping function at self._inner_hookexec + return self._inner_hookexec(hook, methods, kwargs) + + def register(self, plugin, name=None): + """ Register a plugin and return its canonical name or None if the name + is blocked from registering. Raise a ValueError if the plugin is already + registered. """ + plugin_name = name or self.get_canonical_name(plugin) + + if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers: + if self._name2plugin.get(plugin_name, -1) is None: + return # blocked plugin, return None to indicate no registration + raise ValueError("Plugin already registered: %s=%s\n%s" % + (plugin_name, plugin, self._name2plugin)) + + # XXX if an error happens we should make sure no state has been + # changed at point of return + self._name2plugin[plugin_name] = plugin + + # register matching hook implementations of the plugin + self._plugin2hookcallers[plugin] = hookcallers = [] + for name in dir(plugin): + hookimpl_opts = self.parse_hookimpl_opts(plugin, name) + if hookimpl_opts is not None: + normalize_hookimpl_opts(hookimpl_opts) + method = getattr(plugin, name) + hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts) + hook = getattr(self.hook, name, None) + if hook is None: + hook = _HookCaller(name, self._hookexec) + setattr(self.hook, name, hook) + elif hook.has_spec(): + self._verify_hook(hook, hookimpl) + hook._maybe_apply_history(hookimpl) + hook._add_hookimpl(hookimpl) + hookcallers.append(hook) + return plugin_name + + def parse_hookimpl_opts(self, plugin, name): + method = getattr(plugin, name) + try: + res = getattr(method, self.project_name + "_impl", None) + except Exception: + res = {} + if res is not None and not isinstance(res, dict): + # false positive + res = None + elif res is None and self._implprefix and name.startswith(self._implprefix): + res = {} + return res + + def unregister(self, plugin=None, name=None): + """ unregister a plugin object and all its contained hook implementations + from internal data structures. """ + if name is None: + assert plugin is not None, "one of name or plugin needs to be specified" + name = self.get_name(plugin) + + if plugin is None: + plugin = self.get_plugin(name) + + # if self._name2plugin[name] == None registration was blocked: ignore + if self._name2plugin.get(name): + del self._name2plugin[name] + + for hookcaller in self._plugin2hookcallers.pop(plugin, []): + hookcaller._remove_plugin(plugin) + + return plugin + + def set_blocked(self, name): + """ block registrations of the given name, unregister if already registered. """ + self.unregister(name=name) + self._name2plugin[name] = None + + def is_blocked(self, name): + """ return True if the name blogs registering plugins of that name. """ + return name in self._name2plugin and self._name2plugin[name] is None + + def add_hookspecs(self, module_or_class): + """ add new hook specifications defined in the given module_or_class. + Functions are recognized if they have been decorated accordingly. """ + names = [] + for name in dir(module_or_class): + spec_opts = self.parse_hookspec_opts(module_or_class, name) + if spec_opts is not None: + hc = getattr(self.hook, name, None) + if hc is None: + hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts) + setattr(self.hook, name, hc) + else: + # plugins registered this hook without knowing the spec + hc.set_specification(module_or_class, spec_opts) + for hookfunction in (hc._wrappers + hc._nonwrappers): + self._verify_hook(hc, hookfunction) + names.append(name) + + if not names: + raise ValueError("did not find any %r hooks in %r" % + (self.project_name, module_or_class)) + + def parse_hookspec_opts(self, module_or_class, name): + method = getattr(module_or_class, name) + return getattr(method, self.project_name + "_spec", None) + + def get_plugins(self): + """ return the set of registered plugins. """ + return set(self._plugin2hookcallers) + + def is_registered(self, plugin): + """ Return True if the plugin is already registered. """ + return plugin in self._plugin2hookcallers + + def get_canonical_name(self, plugin): + """ Return canonical name for a plugin object. Note that a plugin + may be registered under a different name which was specified + by the caller of register(plugin, name). To obtain the name + of an registered plugin use ``get_name(plugin)`` instead.""" + return getattr(plugin, "__name__", None) or str(id(plugin)) + + def get_plugin(self, name): + """ Return a plugin or None for the given name. """ + return self._name2plugin.get(name) + + def has_plugin(self, name): + """ Return True if a plugin with the given name is registered. """ + return self.get_plugin(name) is not None + + def get_name(self, plugin): + """ Return name for registered plugin or None if not registered. """ + for name, val in self._name2plugin.items(): + if plugin == val: + return name + + def _verify_hook(self, hook, hookimpl): + if hook.is_historic() and hookimpl.hookwrapper: + raise PluginValidationError( + "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper" % + (hookimpl.plugin_name, hook.name)) + + for arg in hookimpl.argnames: + if arg not in hook.argnames: + raise PluginValidationError( + "Plugin %r\nhook %r\nargument %r not available\n" + "plugin definition: %s\n" + "available hookargs: %s" % + (hookimpl.plugin_name, hook.name, arg, + _formatdef(hookimpl.function), ", ".join(hook.argnames))) + + def check_pending(self): + """ Verify that all hooks which have not been verified against + a hook specification are optional, otherwise raise PluginValidationError""" + for name in self.hook.__dict__: + if name[0] != "_": + hook = getattr(self.hook, name) + if not hook.has_spec(): + for hookimpl in (hook._wrappers + hook._nonwrappers): + if not hookimpl.optionalhook: + raise PluginValidationError( + "unknown hook %r in plugin %r" % + (name, hookimpl.plugin)) + + def load_setuptools_entrypoints(self, entrypoint_name): + """ Load modules from querying the specified setuptools entrypoint name. + Return the number of loaded plugins. """ + from pkg_resources import (iter_entry_points, DistributionNotFound, + VersionConflict) + for ep in iter_entry_points(entrypoint_name): + # is the plugin registered or blocked? + if self.get_plugin(ep.name) or self.is_blocked(ep.name): + continue + try: + plugin = ep.load() + except DistributionNotFound: + continue + except VersionConflict as e: + raise PluginValidationError( + "Plugin %r could not be loaded: %s!" % (ep.name, e)) + self.register(plugin, name=ep.name) + self._plugin_distinfo.append((plugin, ep.dist)) + return len(self._plugin_distinfo) + + def list_plugin_distinfo(self): + """ return list of distinfo/plugin tuples for all setuptools registered + plugins. """ + return list(self._plugin_distinfo) + + def list_name_plugin(self): + """ return list of name/plugin pairs. """ + return list(self._name2plugin.items()) + + def get_hookcallers(self, plugin): + """ get all hook callers for the specified plugin. """ + return self._plugin2hookcallers.get(plugin) + + def add_hookcall_monitoring(self, before, after): + """ add before/after tracing functions for all hooks + and return an undo function which, when called, + will remove the added tracers. + + ``before(hook_name, hook_impls, kwargs)`` will be called ahead + of all hook calls and receive a hookcaller instance, a list + of HookImpl instances and the keyword arguments for the hook call. + + ``after(outcome, hook_name, hook_impls, kwargs)`` receives the + same arguments as ``before`` but also a :py:class:`_CallOutcome`` object + which represents the result of the overall hook call. + """ + return _TracedHookExecution(self, before, after).undo + + def enable_tracing(self): + """ enable tracing of hook calls and return an undo function. """ + hooktrace = self.hook._trace + + def before(hook_name, methods, kwargs): + hooktrace.root.indent += 1 + hooktrace(hook_name, kwargs) + + def after(outcome, hook_name, methods, kwargs): + if outcome.excinfo is None: + hooktrace("finish", hook_name, "-->", outcome.result) + hooktrace.root.indent -= 1 + + return self.add_hookcall_monitoring(before, after) + + def subset_hook_caller(self, name, remove_plugins): + """ Return a new _HookCaller instance for the named method + which manages calls to all registered plugins except the + ones from remove_plugins. """ + orig = getattr(self.hook, name) + plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)] + if plugins_to_remove: + hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class, + orig.spec_opts) + for hookimpl in (orig._wrappers + orig._nonwrappers): + plugin = hookimpl.plugin + if plugin not in plugins_to_remove: + hc._add_hookimpl(hookimpl) + # we also keep track of this hook caller so it + # gets properly removed on plugin unregistration + self._plugin2hookcallers.setdefault(plugin, []).append(hc) + return hc + return orig + + +class _MultiCall: + """ execute a call into multiple python functions/methods. """ + + # XXX note that the __multicall__ argument is supported only + # for pytest compatibility reasons. It was never officially + # supported there and is explicitely deprecated since 2.8 + # so we can remove it soon, allowing to avoid the below recursion + # in execute() and simplify/speed up the execute loop. + + def __init__(self, hook_impls, kwargs, specopts={}): + self.hook_impls = hook_impls + self.kwargs = kwargs + self.kwargs["__multicall__"] = self + self.specopts = specopts + + def execute(self): + all_kwargs = self.kwargs + self.results = results = [] + firstresult = self.specopts.get("firstresult") + + while self.hook_impls: + hook_impl = self.hook_impls.pop() + try: + args = [all_kwargs[argname] for argname in hook_impl.argnames] + except KeyError: + for argname in hook_impl.argnames: + if argname not in all_kwargs: + raise HookCallError( + "hook call must provide argument %r" % (argname,)) + if hook_impl.hookwrapper: + return _wrapped_call(hook_impl.function(*args), self.execute) + res = hook_impl.function(*args) + if res is not None: + if firstresult: + return res + results.append(res) + + if not firstresult: + return results + + def __repr__(self): + status = "%d meths" % (len(self.hook_impls),) + if hasattr(self, "results"): + status = ("%d results, " % len(self.results)) + status + return "<_MultiCall %s, kwargs=%r>" % (status, self.kwargs) + + +def varnames(func, startindex=None): + """ return argument name tuple for a function, method, class or callable. + + In case of a class, its "__init__" method is considered. + For methods the "self" parameter is not included unless you are passing + an unbound method with Python3 (which has no supports for unbound methods) + """ + cache = getattr(func, "__dict__", {}) + try: + return cache["_varnames"] + except KeyError: + pass + if inspect.isclass(func): + try: + func = func.__init__ + except AttributeError: + return () + startindex = 1 + else: + if not inspect.isfunction(func) and not inspect.ismethod(func): + try: + func = getattr(func, '__call__', func) + except Exception: + return () + if startindex is None: + startindex = int(inspect.ismethod(func)) + + try: + rawcode = func.__code__ + except AttributeError: + return () + try: + x = rawcode.co_varnames[startindex:rawcode.co_argcount] + except AttributeError: + x = () + else: + defaults = func.__defaults__ + if defaults: + x = x[:-len(defaults)] + try: + cache["_varnames"] = x + except TypeError: + pass + return x + + +class _HookRelay: + """ hook holder object for performing 1:N hook calls where N is the number + of registered plugins. + + """ + + def __init__(self, trace): + self._trace = trace + + +class _HookCaller(object): + def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None): + self.name = name + self._wrappers = [] + self._nonwrappers = [] + self._hookexec = hook_execute + if specmodule_or_class is not None: + assert spec_opts is not None + self.set_specification(specmodule_or_class, spec_opts) + + def has_spec(self): + return hasattr(self, "_specmodule_or_class") + + def set_specification(self, specmodule_or_class, spec_opts): + assert not self.has_spec() + self._specmodule_or_class = specmodule_or_class + specfunc = getattr(specmodule_or_class, self.name) + argnames = varnames(specfunc, startindex=inspect.isclass(specmodule_or_class)) + assert "self" not in argnames # sanity check + self.argnames = ["__multicall__"] + list(argnames) + self.spec_opts = spec_opts + if spec_opts.get("historic"): + self._call_history = [] + + def is_historic(self): + return hasattr(self, "_call_history") + + def _remove_plugin(self, plugin): + def remove(wrappers): + for i, method in enumerate(wrappers): + if method.plugin == plugin: + del wrappers[i] + return True + if remove(self._wrappers) is None: + if remove(self._nonwrappers) is None: + raise ValueError("plugin %r not found" % (plugin,)) + + def _add_hookimpl(self, hookimpl): + if hookimpl.hookwrapper: + methods = self._wrappers + else: + methods = self._nonwrappers + + if hookimpl.trylast: + methods.insert(0, hookimpl) + elif hookimpl.tryfirst: + methods.append(hookimpl) + else: + # find last non-tryfirst method + i = len(methods) - 1 + while i >= 0 and methods[i].tryfirst: + i -= 1 + methods.insert(i + 1, hookimpl) + + def __repr__(self): + return "<_HookCaller %r>" % (self.name,) + + def __call__(self, **kwargs): + assert not self.is_historic() + return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs) + + def call_historic(self, proc=None, kwargs=None): + self._call_history.append((kwargs or {}, proc)) + # historizing hooks don't return results + self._hookexec(self, self._nonwrappers + self._wrappers, kwargs) + + def call_extra(self, methods, kwargs): + """ Call the hook with some additional temporarily participating + methods using the specified kwargs as call parameters. """ + old = list(self._nonwrappers), list(self._wrappers) + for method in methods: + opts = dict(hookwrapper=False, trylast=False, tryfirst=False) + hookimpl = HookImpl(None, "", method, opts) + self._add_hookimpl(hookimpl) + try: + return self(**kwargs) + finally: + self._nonwrappers, self._wrappers = old + + def _maybe_apply_history(self, method): + if self.is_historic(): + for kwargs, proc in self._call_history: + res = self._hookexec(self, [method], kwargs) + if res and proc is not None: + proc(res[0]) + + +class HookImpl: + def __init__(self, plugin, plugin_name, function, hook_impl_opts): + self.function = function + self.argnames = varnames(self.function) + self.plugin = plugin + self.opts = hook_impl_opts + self.plugin_name = plugin_name + self.__dict__.update(hook_impl_opts) + + +class PluginValidationError(Exception): + """ plugin failed validation. """ + + +class HookCallError(Exception): + """ Hook was called wrongly. """ + + +if hasattr(inspect, 'signature'): + def _formatdef(func): + return "%s%s" % ( + func.__name__, + str(inspect.signature(func)) + ) +else: + def _formatdef(func): + return "%s%s" % ( + func.__name__, + inspect.formatargspec(*inspect.getargspec(func)) + ) diff --git a/tests/_lib_vendors/pkg_resources/__init__.py b/tests/_lib_vendors/pkg_resources/__init__.py new file mode 100644 index 0000000..049b8a5 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/__init__.py @@ -0,0 +1,3176 @@ +# coding: utf-8 +""" +Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +from __future__ import absolute_import + +import sys +import os +import io +import time +import re +import types +import zipfile +import zipimport +import warnings +import stat +import functools +import pkgutil +import operator +import platform +import collections +import plistlib +import email.parser +import errno +import tempfile +import textwrap +import itertools +import inspect +from pkgutil import get_importer + +try: + import _imp +except ImportError: + # Python 3.2 compatibility + import imp as _imp + +from pkg_resources.extern import six +from pkg_resources.extern.six.moves import urllib, map, filter + +# capture these to bypass sandboxing +from os import utime +try: + from os import mkdir, rename, unlink + WRITE_SUPPORT = True +except ImportError: + # no write support, probably under GAE + WRITE_SUPPORT = False + +from os import open as os_open +from os.path import isdir, split + +try: + import importlib.machinery as importlib_machinery + # access attribute to force import under delayed import mechanisms. + importlib_machinery.__name__ +except ImportError: + importlib_machinery = None + +from . import py31compat +from pkg_resources.extern import appdirs +from pkg_resources.extern import packaging +__import__('pkg_resources.extern.packaging.version') +__import__('pkg_resources.extern.packaging.specifiers') +__import__('pkg_resources.extern.packaging.requirements') +__import__('pkg_resources.extern.packaging.markers') + + +if (3, 0) < sys.version_info < (3, 3): + raise RuntimeError("Python 3.3 or later is required") + +if six.PY2: + # Those builtin exceptions are only defined in Python 3 + PermissionError = None + NotADirectoryError = None + +# declare some globals that will be defined later to +# satisfy the linters. +require = None +working_set = None + + +class PEP440Warning(RuntimeWarning): + """ + Used when there is an issue with a version or specifier not complying with + PEP 440. + """ + + +class _SetuptoolsVersionMixin(object): + def __hash__(self): + return super(_SetuptoolsVersionMixin, self).__hash__() + + def __lt__(self, other): + if isinstance(other, tuple): + return tuple(self) < other + else: + return super(_SetuptoolsVersionMixin, self).__lt__(other) + + def __le__(self, other): + if isinstance(other, tuple): + return tuple(self) <= other + else: + return super(_SetuptoolsVersionMixin, self).__le__(other) + + def __eq__(self, other): + if isinstance(other, tuple): + return tuple(self) == other + else: + return super(_SetuptoolsVersionMixin, self).__eq__(other) + + def __ge__(self, other): + if isinstance(other, tuple): + return tuple(self) >= other + else: + return super(_SetuptoolsVersionMixin, self).__ge__(other) + + def __gt__(self, other): + if isinstance(other, tuple): + return tuple(self) > other + else: + return super(_SetuptoolsVersionMixin, self).__gt__(other) + + def __ne__(self, other): + if isinstance(other, tuple): + return tuple(self) != other + else: + return super(_SetuptoolsVersionMixin, self).__ne__(other) + + def __getitem__(self, key): + return tuple(self)[key] + + def __iter__(self): + component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) + replace = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + }.get + + def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part, part) + if not part or part == '.': + continue + if part[:1] in '0123456789': + # pad for numeric comparison + yield part.zfill(8) + else: + yield '*' + part + + # ensure that alpha/beta/candidate are before final + yield '*final' + + def old_parse_version(s): + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + # remove '-' before a prerelease tag + if part < '*final': + while parts and parts[-1] == '*final-': + parts.pop() + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == '00000000': + parts.pop() + parts.append(part) + return tuple(parts) + + # Warn for use of this function + warnings.warn( + "You have iterated over the result of " + "pkg_resources.parse_version. This is a legacy behavior which is " + "inconsistent with the new version class introduced in setuptools " + "8.0. In most cases, conversion to a tuple is unnecessary. For " + "comparison of versions, sort the Version instances directly. If " + "you have another use case requiring the tuple, please file a " + "bug with the setuptools project describing that need.", + RuntimeWarning, + stacklevel=1, + ) + + for part in old_parse_version(str(self)): + yield part + + +class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version): + pass + + +class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, + packaging.version.LegacyVersion): + pass + + +def parse_version(v): + try: + return SetuptoolsVersion(v) + except packaging.version.InvalidVersion: + return SetuptoolsLegacyVersion(v) + + +_state_vars = {} + + +def _declare_state(vartype, **kw): + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) + + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.items(): + state[k] = g['_sget_' + v](g[k]) + return state + + +def __setstate__(state): + g = globals() + for k, v in state.items(): + g['_sset_' + _state_vars[k]](k, g[k], v) + return state + + +def _sget_dict(val): + return val.copy() + + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + + +def _sget_object(val): + return val.__getstate__() + + +def _sset_object(key, ob, state): + ob.__setstate__(state) + + +_sget_none = _sset_none = lambda *args: None + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of Mac OS X that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of Mac OS X that we are *running*. To allow usage of packages that + explicitly require a newer version of Mac OS X, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform() + m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + except ValueError: + # not Mac OS X + pass + return plat + + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Warnings + 'PEP440Warning', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] + + +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + + def __repr__(self): + return self.__class__.__name__ + repr(self.args) + + +class VersionConflict(ResolutionError): + """ + An already-installed version conflicts with the requested version. + + Should be initialized with the installed Distribution and the requested + Requirement. + """ + + _template = "{self.dist} is installed but {self.req} is required" + + @property + def dist(self): + return self.args[0] + + @property + def req(self): + return self.args[1] + + def report(self): + return self._template.format(**locals()) + + def with_context(self, required_by): + """ + If required_by is non-empty, return a version of self that is a + ContextualVersionConflict. + """ + if not required_by: + return self + args = self.args + (required_by,) + return ContextualVersionConflict(*args) + + +class ContextualVersionConflict(VersionConflict): + """ + A VersionConflict that accepts a third parameter, the set of the + requirements that required the installed Distribution. + """ + + _template = VersionConflict._template + ' by {self.required_by}' + + @property + def required_by(self): + return self.args[2] + + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + + _template = ("The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}") + + @property + def req(self): + return self.args[0] + + @property + def requirers(self): + return self.args[1] + + @property + def requirers_str(self): + if not self.requirers: + return 'the application' + return ', '.join(self.requirers) + + def report(self): + return self._template.format(**locals()) + + def __str__(self): + return self.report() + + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" + + +_provider_factories = {} + +PY_MAJOR = sys.version[:3] +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq, Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + + +def _macosx_vers(_cache=[]): + if not _cache: + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + if hasattr(plistlib, 'readPlist'): + plist_content = plistlib.readPlist(plist) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + + _cache.append(version.split('.')) + return _cache[0] + + +def _macosx_arch(machine): + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) + + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and Mac OS X. + """ + try: + # Python 2.7 or >=3.2 + from sysconfig import get_platform + except ImportError: + from distutils.util import get_platform + + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macosx_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), + _macosx_arch(machine)) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +# XXX backward compat +get_platform = get_build_platform + + +def compatible_platforms(provided, required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided == required: + # easy case + return True + + # Mac OS X special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macosx designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + return True + # egg isn't macosx or legacy darwin + return False + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + + +# backward compatibility +run_main = run_script + + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist, six.string_types): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + +class WorkingSet(object): + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry, True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + def __contains__(self, dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + # XXX add more info + raise VersionConflict(dist, req) + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + for dist in self: + entries = dist.get_entry_map(group) + if name is None: + for ep in entries.values(): + yield ep + elif name in entries: + yield entries[name] + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + if item not in self.entry_keys: + # workaround a cache issue + continue + + for key in self.entry_keys[item]: + if key not in seen: + seen[key] = 1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True, replace=False): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set, unless `replace=True`. + If it's added, any callbacks registered with the ``subscribe()`` method + will be called. + """ + if insert: + dist.insert_on(self.entries, entry, replace=replace) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry, []) + keys2 = self.entry_keys.setdefault(dist.location, []) + if not replace and dist.key in self.by_key: + # ignore hidden distros + return + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + def resolve(self, requirements, env=None, installer=None, + replace_conflicting=False, extras=None): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + + Unless `replace_conflicting=True`, raises a VersionConflict exception if + any requirements are found on the path that have the correct name but + the wrong version. Otherwise, if an `installer` is supplied it will be + invoked to obtain the correct version of the requirement and activate + it. + + `extras` is a list of the extras to be used with these requirements. + This is important because extra requirements may look like `my_req; + extra = "my_extra"`, which would otherwise be interpreted as a purely + optional requirement. Instead, we want to be able to assert that these + requirements are truly required. + """ + + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} + to_activate = [] + + req_extras = _ReqExtras() + + # Mapping of requirement to set of distributions that required it; + # useful for reporting info about conflicts. + required_by = collections.defaultdict(set) + + while requirements: + # process dependencies breadth-first + req = requirements.pop(0) + if req in processed: + # Ignore cyclic or redundant dependencies + continue + + if not req_extras.markers_pass(req, extras): + continue + + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match( + req, ws, installer, + replace_conflicting=replace_conflicting + ) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + + # push the new requirements onto the stack + new_requirements = dist.requires(req.extras)[::-1] + requirements.extend(new_requirements) + + # Register the new requirements needed by req + for new_requirement in new_requirements: + required_by[new_requirement].add(req.project_name) + req_extras[new_requirement] = req.extras + + processed[req] = True + + # return list of distros to activate + return to_activate + + def find_plugins(self, plugin_env, full_env=None, installer=None, + fallback=True): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + # scan project names in alphabetic order + plugin_projects.sort() + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + # put all our entries in shadow_set + list(map(shadow_set.add, self)) + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError as v: + # save error info + error_info[dist] = v + if fallback: + # try the next older version of project + continue + else: + # give up on this project, keep going + break + + else: + list(map(shadow_set.add, resolvees)) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + def subscribe(self, callback, existing=True): + """Invoke `callback` for all distributions + + If `existing=True` (default), + call on all existing ones, as well. + """ + if callback in self.callbacks: + return + self.callbacks.append(callback) + if not existing: + return + for dist in self: + callback(dist) + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return ( + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:] + ) + + def __setstate__(self, e_k_b_c): + entries, keys, by_key, callbacks = e_k_b_c + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + +class _ReqExtras(dict): + """ + Map each requirement to the extras that demanded it. + """ + + def markers_pass(self, req, extras=None): + """ + Evaluate markers for req against each extra that + demanded it. + + Return False if the req has a marker and fails + evaluation. Otherwise, return True. + """ + extra_evals = ( + req.marker.evaluate({'extra': extra}) + for extra in self.get(req, ()) + (extras or (None,)) + ) + return not req.marker or any(extra_evals) + + +class Environment(object): + """Searchable snapshot of distributions on a search path""" + + def __init__(self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'3.3'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + return (self.python is None or dist.py_version is None + or dist.py_version == self.python) \ + and compatible_platforms(dist.platform, self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self, project_name): + """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + + """ + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) + + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key, []) + if dist not in dists: + dists.append(dist) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) + + def best_match(self, req, working_set, installer=None, replace_conflicting=False): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) If a suitable distribution + isn't active, this method returns the newest distribution in the + environment that meets the ``Requirement`` in `req`. If no suitable + distribution is found, and `installer` is supplied, then the result of + calling the environment's ``obtain(req, installer)`` method will be + returned. + """ + try: + dist = working_set.find(req) + except VersionConflict: + if not replace_conflicting: + raise + dist = None + if dist is not None: + return dist + for dist in self[req.key]: + if dist in req: + return dist + # try to download/install + return self.obtain(req, installer) + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in self._distmap.keys(): + if self[key]: + yield key + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other, Distribution): + self.add(other) + elif isinstance(other, Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +# XXX backward compatibility +AvailableDistributions = Environment + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + tmpl = textwrap.dedent(""" + Can't extract file(s) to egg cache + + The following error occurred while trying to extract file(s) to the Python egg + cache: + + {old_exc} + + The Python egg cache directory is currently set to: + + {cache_path} + + Perhaps your account does not have write access to this directory? You can + change the cache directory by setting the PYTHON_EGG_CACHE environment + variable to point to an accessible directory. + """).lstrip() + err = ExtractionError(tmpl.format(**locals())) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name + '-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except: + self.extraction_error() + + self._warn_unsafe_extraction_path(extract_path) + + self.cached_files[target_path] = 1 + return target_path + + @staticmethod + def _warn_unsafe_extraction_path(path): + """ + If the default extraction path is overridden and set to an insecure + location, such as /tmp, it opens up an opportunity for an attacker to + replace an extracted file with an unauthorized payload. Warn the user + if a known insecure location is used. + + See Distribute #375 for more details. + """ + if os.name == 'nt' and not path.startswith(os.environ['windir']): + # On Windows, permissions are generally restrictive by default + # and temp directories are not writable by other users, so + # bypass the warning. + return + mode = os.stat(path).st_mode + if mode & stat.S_IWOTH or mode & stat.S_IWGRP: + msg = ("%s is writable by group/others and vulnerable to attack " + "when " + "used with get_resource_filename. Consider a more secure " + "location (set with .set_extraction_path or the " + "PYTHON_EGG_CACHE environment variable)." % path) + warnings.warn(msg, UserWarning) + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 + os.chmod(tempname, mode) + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + + +def get_default_cache(): + """ + Return the ``PYTHON_EGG_CACHE`` environment variable + or a platform-relevant user cache dir for an app + named "Python-Eggs". + """ + return ( + os.environ.get('PYTHON_EGG_CACHE') + or appdirs.user_cache_dir(appname='Python-Eggs') + ) + + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """ + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(packaging.version.Version(version)) + except packaging.version.InvalidVersion: + version = version.replace(' ', '.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-', '_') + + +def invalid_marker(text): + """ + Validate text as a PEP 508 environment marker; return an exception + if invalid or False otherwise. + """ + try: + evaluate_marker(text) + except SyntaxError as e: + e.filename = None + e.lineno = None + return e + return False + + +def evaluate_marker(text, extra=None): + """ + Evaluate a PEP 508 environment marker. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + + This implementation uses the 'pyparsing' module. + """ + try: + marker = packaging.markers.Marker(text) + return marker.evaluate() + except packaging.markers.InvalidMarker as e: + raise SyntaxError(e) + + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return io.BytesIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def has_metadata(self, name): + return self.egg_info and self._has(self._fn(self.egg_info, name)) + + def get_metadata(self, name): + if not self.egg_info: + return "" + value = self._get(self._fn(self.egg_info, name)) + return value.decode('utf-8') if six.PY3 else value + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self, resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) + + def metadata_listdir(self, name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info, name)) + return [] + + def run_script(self, script_name, namespace): + script = 'scripts/' + script_name + if not self.has_metadata(script): + raise ResolutionError("No script named %r" % script_name) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + source = open(script_filename).read() + code = compile(source, script_filename, 'exec') + exec(code, namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text, script_filename, 'exec') + exec(script_code, namespace, namespace) + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + + +register_loader_type(object, NullProvider) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self, module): + NullProvider.__init__(self, module) + self._setup_prefix() + + def _setup_prefix(self): + # we assume here that our metadata may be nested inside a "basket" + # of multiple eggs; that's why we use module_path instead of .archive + path = self.module_path + old = None + while path != old: + if _is_egg_path(path): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + break + old = path + path, base = os.path.split(path) + + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self, path): + return os.path.isdir(path) + + def _listdir(self, path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + with open(path, 'rb') as stream: + return stream.read() + + @classmethod + def _register(cls): + loader_cls = getattr(importlib_machinery, 'SourceFileLoader', + type(None)) + register_loader_type(loader_cls, cls) + + +DefaultProvider._register() + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + _isdir = _has = lambda self, path: False + _get = lambda self, path: '' + _listdir = lambda self, path: [] + module_path = None + + def __init__(self): + pass + + +empty_provider = EmptyProvider() + + +class ZipManifests(dict): + """ + zip manifest builder + """ + + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with ContextualZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): + """ + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ContextualZipFile(zipfile.ZipFile): + """ + Supplement ZipFile class to support context manager for Python 2.6 + """ + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __new__(cls, *args, **kwargs): + """ + Construct a ZipFile or ContextualZipFile as appropriate + """ + if hasattr(zipfile.ZipFile, '__exit__'): + return zipfile.ZipFile(*args, **kwargs) + return super(ContextualZipFile, cls).__new__(cls) + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + _zip_manifests = MemoizedZipManifests() + + def __init__(self, module): + EggProvider.__init__(self, module) + self.zip_pre = self.loader.archive + os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.zip_pre) + ) + + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre + zip_path + if fspath.startswith(self.egg_root + os.sep): + return fspath[len(self.egg_root) + 1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.egg_root) + ) + + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + @staticmethod + def _get_date_and_size(zip_stat): + size = zip_stat.file_size + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done + timestamp = time.mktime(date_time) + return timestamp, size + + def _extract_resource(self, manager, zip_path): + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + # return the extracted directory name + return os.path.dirname(last) + + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + + if not WRITE_SUPPORT: + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') + try: + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if self._is_current(real_path, zip_path): + return real_path + + outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp, timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + if self._is_current(real_path, zip_path): + # the file became current since it was checked above, + # so proceed. + return real_path + # Windows, del old file and retry + elif os.name == 'nt': + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + # report a user-friendly error + manager.extraction_error() + + return real_path + + def _is_current(self, file_path, zip_path): + """ + Return True if the file_path is current for this zip_path + """ + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + if not os.path.isfile(file_path): + return False + stat = os.stat(file_path) + if stat.st_size != size or stat.st_mtime != timestamp: + return False + # check that the contents match + zip_contents = self.loader.get_data(zip_path) + with open(file_path, 'rb') as f: + file_contents = f.read() + return zip_contents == file_contents + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self, fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self, fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) + + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) + + +register_loader_type(zipimport.zipimporter, ZipProvider) + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self, path): + self.path = path + + def has_metadata(self, name): + return name == 'PKG-INFO' and os.path.isfile(self.path) + + def get_metadata(self, name): + if name != 'PKG-INFO': + raise KeyError("No metadata except PKG-INFO is available") + + with io.open(self.path, encoding='utf-8', errors="replace") as f: + metadata = f.read() + self._warn_on_replacement(metadata) + return metadata + + def _warn_on_replacement(self, metadata): + # Python 2.6 and 3.2 compat for: replacement_char = '๏ฟฝ' + replacement_char = b'\xef\xbf\xbd'.decode('utf-8') + if replacement_char in metadata: + tmpl = "{self.path} could not be properly decoded in UTF-8" + msg = tmpl.format(**locals()) + warnings.warn(msg) + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zip_pre = importer.archive + os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + + +_declare_state('dict', _distribution_finders={}) + + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + + +def find_eggs_in_zip(importer, path_item, only=False): + """ + Find eggs in zip files; possibly multiple nested eggs. + """ + if importer.archive.endswith('.whl'): + # wheels are not supported with this finder + # they don't have PKG-INFO metadata, and won't ever contain eggs + return + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + # don't yield nested distros + return + for subitem in metadata.resource_listdir('/'): + if _is_egg_path(subitem): + subpath = os.path.join(path_item, subitem) + for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): + yield dist + elif subitem.lower().endswith('.dist-info'): + subpath = os.path.join(path_item, subitem) + submeta = EggMetadata(zipimport.zipimporter(subpath)) + submeta.egg_info = subpath + yield Distribution.from_location(path_item, subitem, submeta) + + + +register_finder(zipimport.zipimporter, find_eggs_in_zip) + + +def find_nothing(importer, path_item, only=False): + return () + + +register_finder(object, find_nothing) + + +def _by_version_descending(names): + """ + Given a list of filenames, return them in descending order + by version number. + + >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' + >>> _by_version_descending(names) + ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] + """ + def _by_version(name): + """ + Parse each component of the filename + """ + name, ext = os.path.splitext(name) + parts = itertools.chain(name.split('-'), [ext]) + return [packaging.version.parse(part) for part in parts] + + return sorted(names, key=_by_version, reverse=True) + + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if _is_unpacked_egg(path_item): + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item, 'EGG-INFO') + ) + ) + return + + entries = safe_listdir(path_item) + + # for performance, before sorting by version, + # screen entries for only those that will yield + # distributions + filtered = ( + entry + for entry in entries + if dist_factory(path_item, entry, only) + ) + + # scan for .egg and .egg-info in directory + path_item_entries = _by_version_descending(filtered) + for entry in path_item_entries: + fullpath = os.path.join(path_item, entry) + factory = dist_factory(path_item, entry, only) + for dist in factory(fullpath): + yield dist + + +def dist_factory(path_item, entry, only): + """ + Return a dist_factory for a path_item and entry + """ + lower = entry.lower() + is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) + return ( + distributions_from_metadata + if is_meta else + find_distributions + if not only and _is_egg_path(entry) else + resolve_egg_link + if not only and lower.endswith('.egg-link') else + NoDists() + ) + + +class NoDists: + """ + >>> bool(NoDists()) + False + + >>> list(NoDists()('anything')) + [] + """ + def __bool__(self): + return False + if six.PY2: + __nonzero__ = __bool__ + + def __call__(self, fullpath): + return iter(()) + + +def safe_listdir(path): + """ + Attempt to list contents of path, but suppress some exceptions. + """ + try: + return os.listdir(path) + except (PermissionError, NotADirectoryError): + pass + except OSError as e: + # Ignore the directory if does not exist, not a directory or + # permission denied + ignorable = ( + e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) + # Python 2 on Windows needs to be handled this way :( + or getattr(e, "winerror", None) == 267 + ) + if not ignorable: + raise + return () + + +def distributions_from_metadata(path): + root = os.path.dirname(path) + if os.path.isdir(path): + if len(os.listdir(path)) == 0: + # empty metadata dir; skip + return + metadata = PathMetadata(root, path) + else: + metadata = FileMetadata(path) + entry = os.path.basename(path) + yield Distribution.from_location( + root, entry, metadata, precedence=DEVELOP_DIST, + ) + + +def non_empty_lines(path): + """ + Yield non-empty lines from file at path + """ + with open(path) as f: + for line in f: + line = line.strip() + if line: + yield line + + +def resolve_egg_link(path): + """ + Given a path to an .egg-link, resolve distributions + present in the referenced path. + """ + referenced_paths = non_empty_lines(path) + resolved_paths = ( + os.path.join(os.path.dirname(path), ref) + for ref in referenced_paths + ) + dist_groups = map(find_distributions, resolved_paths) + return next(dist_groups, ()) + + +register_finder(pkgutil.ImpImporter, find_on_path) + +if hasattr(importlib_machinery, 'FileFinder'): + register_finder(importlib_machinery.FileFinder, find_on_path) + +_declare_state('dict', _namespace_handlers={}) +_declare_state('dict', _namespace_packages={}) + + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer, path_entry, moduleName, module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + + importer = get_importer(path_item) + if importer is None: + return None + loader = importer.find_module(packageName) + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = types.ModuleType(packageName) + module.__path__ = [] + _set_parent_ns(packageName) + elif not hasattr(module, '__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer, path_item, packageName, module) + if subpath is not None: + path = module.__path__ + path.append(subpath) + loader.load_module(packageName) + _rebuild_mod_path(path, packageName, module) + return subpath + + +def _rebuild_mod_path(orig_path, package_name, module): + """ + Rebuild module.__path__ ensuring that all entries are ordered + corresponding to their sys.path order + """ + sys_path = [_normalize_cached(p) for p in sys.path] + + def safe_sys_path_index(entry): + """ + Workaround for #520 and #513. + """ + try: + return sys_path.index(entry) + except ValueError: + return float('inf') + + def position_in_sys_path(path): + """ + Return the ordinal of the path based on its position in sys.path + """ + path_parts = path.split(os.sep) + module_parts = package_name.count('.') + 1 + parts = path_parts[:-module_parts] + return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) + + if not isinstance(orig_path, list): + # Is this behavior useful when module.__path__ is not a list? + return + + orig_path.sort(key=position_in_sys_path) + module.__path__[:] = [_normalize_cached(p) for p in orig_path] + + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + _imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path, parent = sys.path, None + if '.' in packageName: + parent = '.'.join(packageName.split('.')[:-1]) + declare_namespace(parent) + if parent not in _namespace_packages: + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError: + raise TypeError("Not a package:", parent) + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent, []).append(packageName) + _namespace_packages.setdefault(packageName, []) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + _imp.release_lock() + + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + _imp.acquire_lock() + try: + for package in _namespace_packages.get(parent, ()): + subpath = _handle_ns(package, path_item) + if subpath: + fixup_namespace_packages(subpath, package) + finally: + _imp.release_lock() + + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item) == normalized: + break + else: + # Only return the path if it's not already there + return subpath + + +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) + +if hasattr(importlib_machinery, 'FileFinder'): + register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + + +register_namespace_handler(object, null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(filename)) + + +def _normalize_cached(filename, _cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + + +def _is_egg_path(path): + """ + Determine if given path appears to be an egg. + """ + return path.lower().endswith('.egg') + + +def _is_unpacked_egg(path): + """ + Determine if given path appears to be an unpacked egg. + """ + return ( + _is_egg_path(path) and + os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) + ) + + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a string or sequence""" + if isinstance(strs, six.string_types): + for s in strs.splitlines(): + s = s.strip() + # skip blank lines/comments + if s and not s.startswith('#'): + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + + +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r""" + (?P[^-]+) ( + -(?P[^-]+) ( + -py(?P[^-]+) ( + -(?P.+) + )? + )? + )? + """, + re.VERBOSE | re.IGNORECASE, +).match + + +class EntryPoint(object): + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, *args, **kwargs): + """ + Require packages for this EntryPoint, then resolve it. + """ + if not require or args or kwargs: + warnings.warn( + "Parameters to load are deprecated. Call .resolve and " + ".require separately.", + DeprecationWarning, + stacklevel=2, + ) + if require: + self.require(*args, **kwargs) + return self.resolve() + + def resolve(self): + """ + Resolve the entry point from its module and attrs. + """ + module = __import__(self.module_name, fromlist=['__name__'], level=0) + try: + return functools.reduce(getattr, self.attrs, module) + except AttributeError as exc: + raise ImportError(str(exc)) + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + + # Get the requirements for this entry point with all its extras and + # then resolve them. We have to pass `extras` along when resolving so + # that the working set knows what extras we want. Otherwise, for + # dist-info distributions, the working set will assume that the + # requirements for that extra are purely optional and skip over them. + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer, extras=self.extras) + list(map(working_set.add, items)) + + pattern = re.compile( + r'\s*' + r'(?P.+?)\s*' + r'=\s*' + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+))?\s*' + r'(?P\[.*\])?\s*$' + ) + + @classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1, extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + m = cls.pattern.match(src) + if not m: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + res = m.groupdict() + extras = cls._parse_extras(res['extras']) + attrs = res['attr'].split('.') if res['attr'] else () + return cls(res['name'], res['module'], attrs, extras, dist) + + @classmethod + def _parse_extras(cls, extras_spec): + if not extras_spec: + return () + req = Requirement.parse('x' + extras_spec) + if req.specs: + raise ValueError() + return req.extras + + @classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name] = ep + return this + + @classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data, dict): + data = data.items() + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + +def _remove_md5_fragment(location): + if not location: + return '' + parsed = urllib.parse.urlparse(location) + if parsed[-1].startswith('md5='): + return urllib.parse.urlunparse(parsed[:-1] + ('',)) + return location + + +def _version_from_file(lines): + """ + Given an iterable of lines from a Metadata file, return + the value of the Version field, if present, or None otherwise. + """ + is_version_line = lambda line: line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) + line = next(iter(version_lines), '') + _, _, value = line.partition(':') + return safe_version(value.strip()) or None + + +class Distribution(object): + """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' + + def __init__(self, location=None, metadata=None, project_name=None, + version=None, py_version=PY_MAJOR, platform=None, + precedence=EGG_DIST): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + @classmethod + def from_location(cls, location, basename, metadata=None, **kw): + project_name, version, py_version, platform = [None] * 4 + basename, ext = os.path.splitext(basename) + if ext.lower() in _distributionImpl: + cls = _distributionImpl[ext.lower()] + + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name', 'ver', 'pyver', 'plat' + ) + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + )._reload_version() + + def _reload_version(self): + return self + + @property + def hashcmp(self): + return ( + self.parsed_version, + self.precedence, + self.key, + _remove_md5_fragment(self.location), + self.py_version or '', + self.platform or '', + ) + + def __hash__(self): + return hash(self.hashcmp) + + def __lt__(self, other): + return self.hashcmp < other.hashcmp + + def __le__(self, other): + return self.hashcmp <= other.hashcmp + + def __gt__(self, other): + return self.hashcmp > other.hashcmp + + def __ge__(self, other): + return self.hashcmp >= other.hashcmp + + def __eq__(self, other): + if not isinstance(other, self.__class__): + # It's not a Distribution, so they are not equal + return False + return self.hashcmp == other.hashcmp + + def __ne__(self, other): + return not self == other + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + @property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + + @property + def parsed_version(self): + if not hasattr(self, "_parsed_version"): + self._parsed_version = parse_version(self.version) + + return self._parsed_version + + def _warn_legacy_version(self): + LV = packaging.version.LegacyVersion + is_legacy = isinstance(self._parsed_version, LV) + if not is_legacy: + return + + # While an empty version is technically a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if not self.version: + return + + tmpl = textwrap.dedent(""" + '{project_name} ({version})' is being parsed as a legacy, + non PEP 440, + version. You may find odd behavior and sort order. + In particular it will be sorted as less than 0.0. It + is recommended to migrate to PEP 440 compatible + versions. + """).strip().replace('\n', ' ') + + warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + + @property + def version(self): + try: + return self._version + except AttributeError: + version = _version_from_file(self._get_metadata(self.PKG_INFO)) + if version is None: + tmpl = "Missing 'Version:' header and/or %s file" + raise ValueError(tmpl % self.PKG_INFO, self) + return version + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + dm = self.__dep_map = {None: []} + for name in 'requires.txt', 'depends.txt': + for extra, reqs in split_sections(self._get_metadata(name)): + if extra: + if ':' in extra: + extra, marker = extra.split(':', 1) + if invalid_marker(marker): + # XXX warn + reqs = [] + elif not evaluate_marker(marker): + reqs = [] + extra = safe_extra(extra) or None + dm.setdefault(extra, []).extend(parse_requirements(reqs)) + return dm + + def requires(self, extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None, ())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) + return deps + + def _get_metadata(self, name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def activate(self, path=None, replace=False): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: + path = sys.path + self.insert_on(path, replace=replace) + if path is sys.path: + fixup_namespace_packages(self.location) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-' + self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self, self.location) + else: + return str(self) + + def __str__(self): + try: + version = getattr(self, 'version', None) + except ValueError: + version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name, version) + + def __getattr__(self, attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError(attr) + return getattr(self._provider, attr) + + @classmethod + def from_filename(cls, filename, metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + if isinstance(self.parsed_version, packaging.version.Version): + spec = "%s==%s" % (self.project_name, self.parsed_version) + else: + spec = "%s===%s" % (self.project_name, self.parsed_version) + + return Requirement.parse(spec) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group, name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group, name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group, {}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + def insert_on(self, path, loc=None, replace=False): + """Ensure self.location is on path + + If replace=False (default): + - If location is already in path anywhere, do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent. + - Else: add to the end of path. + If replace=True: + - If location is already on path anywhere (not eggs) + or higher priority than its parent (eggs) + do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent, + removing any lower-priority entries. + - Else: add it to the front of path. + """ + + loc = loc or self.location + if not loc: + return + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath = [(p and _normalize_cached(p) or p) for p in path] + + for p, item in enumerate(npath): + if item == nloc: + if replace: + break + else: + # don't modify path (even removing duplicates) if found and not replace + return + elif item == bdir and self.precedence == EGG_DIST: + # if it's an .egg, give it precedence over its directory + # UNLESS it's already been added to sys.path and replace=False + if (not replace) and nloc in npath[p:]: + return + if path is sys.path: + self.check_version_conflict() + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + if path is sys.path: + self.check_version_conflict() + if replace: + path.insert(0, loc) + else: + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while True: + try: + np = npath.index(nloc, p + 1) + except ValueError: + break + else: + del npath[np], path[np] + # ha! + p = np + + return + + def check_version_conflict(self): + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages): + continue + if modname in ('pkg_resources', 'setuptools', 'site'): + continue + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for " + repr(self)) + return False + return True + + def clone(self, **kw): + """Copy this distribution, substituting in any changed keyword args""" + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + @property + def extras(self): + return [dep for dep in self._dep_map if dep] + + +class EggInfoDistribution(Distribution): + def _reload_version(self): + """ + Packages installed by distutils (e.g. numpy or scipy), + which uses an old safe_version, and so + their version numbers can get mangled when + converted to filenames (e.g., 1.11.0.dev0+2329eae to + 1.11.0.dev0_2329eae). These distributions will not be + parsed properly + downstream by Distribution and safe_version, so + take an extra step and try to get the version number from + the metadata file itself instead of the filename. + """ + md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) + if md_version: + self._version = md_version + return self + + +class DistInfoDistribution(Distribution): + """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" + PKG_INFO = 'METADATA' + EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") + + @property + def _parsed_pkg_info(self): + """Parse and cache metadata""" + try: + return self._pkg_info + except AttributeError: + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) + return self._pkg_info + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._compute_dependencies() + return self.__dep_map + + def _compute_dependencies(self): + """Recompute this distribution's dependencies.""" + dm = self.__dep_map = {None: []} + + reqs = [] + # Including any condition expressions + for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: + reqs.extend(parse_requirements(req)) + + def reqs_for_extra(extra): + for req in reqs: + if not req.marker or req.marker.evaluate({'extra': extra}): + yield req + + common = frozenset(reqs_for_extra(None)) + dm[None].extend(common) + + for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: + s_extra = safe_extra(extra.strip()) + dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) + + return dm + + +_distributionImpl = { + '.egg': Distribution, + '.egg-info': EggInfoDistribution, + '.dist-info': DistInfoDistribution, + } + + +def issue_warning(*args, **kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + warnings.warn(stacklevel=level + 1, *args, **kw) + + +class RequirementParseError(ValueError): + def __str__(self): + return ' '.join(self.args) + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be a string, or a (possibly-nested) iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + for line in lines: + # Drop comments -- a hash without a space may be in a URL. + if ' #' in line: + line = line[:line.find(' #')] + # If there is a line continuation, drop it, and append the next line. + if line.endswith('\\'): + line = line[:-2].strip() + line += next(lines) + yield Requirement(line) + + +class Requirement(packaging.requirements.Requirement): + def __init__(self, requirement_string): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + try: + super(Requirement, self).__init__(requirement_string) + except packaging.requirements.InvalidRequirement as e: + raise RequirementParseError(str(e)) + self.unsafe_name = self.name + project_name = safe_name(self.name) + self.project_name, self.key = project_name, project_name.lower() + self.specs = [ + (spec.operator, spec.version) for spec in self.specifier] + self.extras = tuple(map(safe_extra, self.extras)) + self.hashCmp = ( + self.key, + self.specifier, + frozenset(self.extras), + str(self.marker) if self.marker else None, + ) + self.__hash = hash(self.hashCmp) + + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) + + def __ne__(self, other): + return not self == other + + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: + return False + + item = item.version + + # Allow prereleases always in order to match the previous behavior of + # this method. In the future this should be smarter and follow PEP 440 + # more accurately. + return self.specifier.contains(item, prereleases=True) + + def __hash__(self): + return self.__hash + + def __repr__(self): return "Requirement.parse(%r)" % str(self) + + @staticmethod + def parse(s): + req, = parse_requirements(s) + return req + + +def _always_object(classes): + """ + Ensure object appears in the mro even + for old-style classes. + """ + if object not in classes: + return classes + (object,) + return classes + + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) + for t in types: + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + py31compat.makedirs(dirname, exist_ok=True) + + +def _bypass_ensure_directory(path): + """Sandbox-bypassing version of ensure_directory()""" + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(path) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, 0o755) + + +def split_sections(s): + """Split a string or iterable thereof into (section, content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + + +def _mkstemp(*args, **kw): + old_open = os.open + try: + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args, **kw) + finally: + # and then put it back + os.open = old_open + + +# Silence the PEP440Warning by default, so that end users don't get hit by it +# randomly just because they use pkg_resources. We want to append the rule +# because we want earlier uses of filterwarnings to take precedence over this +# one. +warnings.filterwarnings("ignore", category=PEP440Warning, append=True) + + +# from jaraco.functools 1.3 +def _call_aside(f, *args, **kwargs): + f(*args, **kwargs) + return f + + +@_call_aside +def _initialize(g=globals()): + "Set up global resource manager (deliberately not state-saved)" + manager = ResourceManager() + g['_manager'] = manager + g.update( + (name, getattr(manager, name)) + for name in dir(manager) + if not name.startswith('_') + ) + + +@_call_aside +def _initialize_master_working_set(): + """ + Prepare the master working set and make the ``require()`` + API available. + + This function has explicit effects on the global state + of pkg_resources. It is intended to be invoked once at + the initialization of this module. + + Invocation by other packages is unsupported and done + at their own risk. + """ + working_set = WorkingSet._build_master() + _declare_state('object', working_set=working_set) + + require = working_set.require + iter_entry_points = working_set.iter_entry_points + add_activation_listener = working_set.subscribe + run_script = working_set.run_script + # backward compatibility + run_main = run_script + # Activate all distributions already on sys.path with replace=False and + # ensure that all distributions added to the working set in the future + # (e.g. by calling ``require()``) will get activated as well, + # with higher priority (replace=True). + tuple( + dist.activate(replace=False) + for dist in working_set + ) + add_activation_listener(lambda dist: dist.activate(replace=True), existing=False) + working_set.entries = [] + # match order + list(map(working_set.add_entry, sys.path)) + globals().update(locals()) diff --git a/tests/_lib_vendors/pkg_resources/_vendor/__init__.py b/tests/_lib_vendors/pkg_resources/_vendor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/_lib_vendors/pkg_resources/_vendor/appdirs.py b/tests/_lib_vendors/pkg_resources/_vendor/appdirs.py new file mode 100644 index 0000000..f4dba09 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/appdirs.py @@ -0,0 +1,552 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petriศ™or + +"""Utilities for determining application-specific dirs. + +See for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 0) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/ + Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\\Application Data\\ + Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ + Win 7 (not roaming): C:\Users\\AppData\Local\\ + Win 7 (roaming): C:\Users\\AppData\Roaming\\ + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/', + if XDG_DATA_DIRS is not set + + Typical user data directories are: + Mac OS X: /Library/Application Support/ + Unix: /usr/local/share/ or /usr/share/ + Win XP: C:\Documents and Settings\All Users\Application Data\\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + + for a discussion of issues. + + Typical user data directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by deafult "~/.config/". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + """Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set + + Typical user data directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/ + Unix: ~/.cache/ (XDG default) + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache + Vista: C:\Users\\AppData\Local\\\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be ".". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Logs/ + Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs + Vista: C:\Users\\AppData\Local\\\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname, appauthor=None, version=None, roaming=False, + multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # . + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernal.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", "site_data_dir", + "user_config_dir", "site_config_dir", + "user_cache_dir", "user_log_dir") + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/__about__.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/__about__.py new file mode 100644 index 0000000..95d330e --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/__about__.py @@ -0,0 +1,21 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "16.8" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2014-2016 %s" % __author__ diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/__init__.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/__init__.py new file mode 100644 index 0000000..5ee6220 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/__init__.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/_compat.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 0000000..210bb80 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/_structures.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 0000000..ccc2786 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + +NegativeInfinity = NegativeInfinity() diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/markers.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/markers.py new file mode 100644 index 0000000..892e578 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/markers.py @@ -0,0 +1,301 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from pkg_resources.extern.pyparsing import Literal as L # noqa + +from ._compat import string_types +from .specifiers import Specifier, InvalidSpecifier + + +__all__ = [ + "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", + "Marker", "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + + def __init__(self, value): + self.value = value + + def __str__(self): + return str(self.value) + + def __repr__(self): + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + def serialize(self): + raise NotImplementedError + + +class Variable(Node): + + def serialize(self): + return str(self) + + +class Value(Node): + + def serialize(self): + return '"{0}"'.format(self) + + +class Op(Node): + + def serialize(self): + return str(self) + + +VARIABLE = ( + L("implementation_version") | + L("platform_python_implementation") | + L("implementation_name") | + L("python_full_version") | + L("platform_release") | + L("platform_version") | + L("platform_machine") | + L("platform_system") | + L("python_version") | + L("sys_platform") | + L("os_name") | + L("os.name") | # PEP-345 + L("sys.platform") | # PEP-345 + L("platform.version") | # PEP-345 + L("platform.machine") | # PEP-345 + L("platform.python_implementation") | # PEP-345 + L("python_implementation") | # undocumented setuptools legacy + L("extra") +) +ALIASES = { + 'os.name': 'os_name', + 'sys.platform': 'sys_platform', + 'platform.version': 'platform_version', + 'platform.machine': 'platform_machine', + 'platform.python_implementation': 'platform_python_implementation', + 'python_implementation': 'platform_python_implementation' +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | + L("==") | + L(">=") | + L("<=") | + L("!=") | + L("~=") | + L(">") | + L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if (isinstance(marker, list) and len(marker) == 1 and + isinstance(marker[0], (list, tuple))): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs, op, rhs): + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +_undefined = object() + + +def _get_env(environment, name): + value = environment.get(name, _undefined) + + if value is _undefined: + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + groups = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + version = '{0.major}.{0.minor}.{0.micro}'.format(info) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + if hasattr(sys, 'implementation'): + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + iver = '0' + implementation_name = '' + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": platform.python_version()[:3], + "sys_platform": sys.platform, + } + + +class Marker(object): + + def __init__(self, marker): + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc:e.loc + 8]) + raise InvalidMarker(err_str) + + def __str__(self): + return _format_marker(self._markers) + + def __repr__(self): + return "".format(str(self)) + + def evaluate(self, environment=None): + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/requirements.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/requirements.py new file mode 100644 index 0000000..0c8c4a3 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/requirements.py @@ -0,0 +1,127 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from pkg_resources.extern.pyparsing import Literal as L # noqa +from pkg_resources.extern.six.moves.urllib import parse as urlparse + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r'[^ ]+')("url") +URL = (AT + URI) + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), + joinString=",", adjacent=False)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start:t._original_end]) +) +MARKER_SEPERATOR = SEMICOLON +MARKER = MARKER_SEPERATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = \ + NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + "Invalid requirement, parse error at \"{0!r}\"".format( + requirement_string[e.loc:e.loc + 8])) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc): + raise InvalidRequirement("Invalid URL given") + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + parts = [self.name] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + return "".format(str(self)) diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/specifiers.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..7f5a76c --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,774 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self.operator)(item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease and not + (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the begining. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P(==|!=|<=|>=|<|>)) + \s* + (?P + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not + x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) and + self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]):]) + right_split.append(right[len(right_split[0]):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/utils.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/utils.py new file mode 100644 index 0000000..942387c --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/utils.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # This is taken from PEP 503. + return _canonicalize_regex.sub("-", name).lower() diff --git a/tests/_lib_vendors/pkg_resources/_vendor/packaging/version.py b/tests/_lib_vendors/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 0000000..83b5ee8 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,393 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?P(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    def __init__(self, version):
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(
+                match.group("pre_l"),
+                match.group("pre_n"),
+            ),
+            post=_parse_letter_version(
+                match.group("post_l"),
+                match.group("post_n1") or match.group("post_n2"),
+            ),
+            dev=_parse_letter_version(
+                match.group("dev_l"),
+                match.group("dev_n"),
+            ),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        return "".format(repr(str(self)))
+
+    def __str__(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        # Pre-release
+        if self._version.pre is not None:
+            parts.append("".join(str(x) for x in self._version.pre))
+
+        # Post-release
+        if self._version.post is not None:
+            parts.append(".post{0}".format(self._version.post[1]))
+
+        # Development release
+        if self._version.dev is not None:
+            parts.append(".dev{0}".format(self._version.dev[1]))
+
+        # Local version segment
+        if self._version.local is not None:
+            parts.append(
+                "+{0}".format(".".join(str(x) for x in self._version.local))
+            )
+
+        return "".join(parts)
+
+    @property
+    def public(self):
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        return "".join(parts)
+
+    @property
+    def local(self):
+        version_string = str(self)
+        if "+" in version_string:
+            return version_string.split("+", 1)[1]
+
+    @property
+    def is_prerelease(self):
+        return bool(self._version.dev or self._version.pre)
+
+    @property
+    def is_postrelease(self):
+        return bool(self._version.post)
+
+
+def _parse_letter_version(letter, number):
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+
+_local_version_seperators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_seperators.split(local)
+        )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    release = tuple(
+        reversed(list(
+            itertools.dropwhile(
+                lambda x: x == 0,
+                reversed(release),
+            )
+        ))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        pre = -Infinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        pre = Infinity
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        post = -Infinity
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        dev = Infinity
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        local = -Infinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        local = tuple(
+            (i, "") if isinstance(i, int) else (-Infinity, i)
+            for i in local
+        )
+
+    return epoch, release, pre, post, dev, local
diff --git a/tests/_lib_vendors/pkg_resources/_vendor/pyparsing.py b/tests/_lib_vendors/pkg_resources/_vendor/pyparsing.py
new file mode 100644
index 0000000..a212243
--- /dev/null
+++ b/tests/_lib_vendors/pkg_resources/_vendor/pyparsing.py
@@ -0,0 +1,5696 @@
+# module pyparsing.py
+#
+# Copyright (c) 2003-2016  Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+
+The pyparsing module is an alternative approach to creating and executing simple grammars,
+vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
+don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
+provides a library of classes that you use to construct the grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form 
+C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements 
+(L{'+'} operator gives L{And} expressions, strings are auto-converted to
+L{Literal} expressions)::
+
+    from pyparsing import Word, alphas
+
+    # define grammar of a greeting
+    greet = Word(alphas) + "," + Word(alphas) + "!"
+
+    hello = "Hello, World!"
+    print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+    Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the self-explanatory
+class names, and the use of '+', '|' and '^' operators.
+
+The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an
+object with named attributes.
+
+The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
+ - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
+ - quoted strings
+ - embedded comments
+"""
+
+__version__ = "2.1.10"
+__versionTime__ = "07 Oct 2016 01:31 UTC"
+__author__ = "Paul McGuire "
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+
+try:
+    from _thread import RLock
+except ImportError:
+    from threading import RLock
+
+try:
+    from collections import OrderedDict as _OrderedDict
+except ImportError:
+    try:
+        from ordereddict import OrderedDict as _OrderedDict
+    except ImportError:
+        _OrderedDict = None
+
+#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
+
+__all__ = [
+'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 
+'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
+'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 
+'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
+'CloseMatch', 'tokenMap', 'pyparsing_common',
+]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+    _MAX_INT = sys.maxsize
+    basestring = str
+    unichr = chr
+    _ustr = str
+
+    # build list of single arg builtins, that can be used as parse actions
+    singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+    _MAX_INT = sys.maxint
+    range = xrange
+
+    def _ustr(obj):
+        """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
+           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
+           then < returns the unicode object | encodes it with the default encoding | ... >.
+        """
+        if isinstance(obj,unicode):
+            return obj
+
+        try:
+            # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+            # it won't break any existing code.
+            return str(obj)
+
+        except UnicodeEncodeError:
+            # Else encode it
+            ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+            xmlcharref = Regex('&#\d+;')
+            xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+            return xmlcharref.transformString(ret)
+
+    # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+    singleArgBuiltins = []
+    import __builtin__
+    for fname in "sum len sorted reversed list tuple set any all min max".split():
+        try:
+            singleArgBuiltins.append(getattr(__builtin__,fname))
+        except AttributeError:
+            continue
+            
+_generatorType = type((y for y in range(1)))
+ 
+def _xml_escape(data):
+    """Escape &, <, >, ", ', etc. in a string of data."""
+
+    # ampersand must be replaced first
+    from_symbols = '&><"\''
+    to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
+    for from_,to_ in zip(from_symbols, to_symbols):
+        data = data.replace(from_, to_)
+    return data
+
+class _Constants(object):
+    pass
+
+alphas     = string.ascii_uppercase + string.ascii_lowercase
+nums       = "0123456789"
+hexnums    = nums + "ABCDEFabcdef"
+alphanums  = alphas + nums
+_bslash    = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+class ParseBaseException(Exception):
+    """base exception class for all parsing runtime exceptions"""
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__( self, pstr, loc=0, msg=None, elem=None ):
+        self.loc = loc
+        if msg is None:
+            self.msg = pstr
+            self.pstr = ""
+        else:
+            self.msg = msg
+            self.pstr = pstr
+        self.parserElement = elem
+        self.args = (pstr, loc, msg)
+
+    @classmethod
+    def _from_exception(cls, pe):
+        """
+        internal factory method to simplify creating one type of ParseException 
+        from another - avoids having __init__ signature conflicts among subclasses
+        """
+        return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+    def __getattr__( self, aname ):
+        """supported attributes by name are:
+            - lineno - returns the line number of the exception text
+            - col - returns the column number of the exception text
+            - line - returns the line containing the exception text
+        """
+        if( aname == "lineno" ):
+            return lineno( self.loc, self.pstr )
+        elif( aname in ("col", "column") ):
+            return col( self.loc, self.pstr )
+        elif( aname == "line" ):
+            return line( self.loc, self.pstr )
+        else:
+            raise AttributeError(aname)
+
+    def __str__( self ):
+        return "%s (at char %d), (line:%d, col:%d)" % \
+                ( self.msg, self.loc, self.lineno, self.column )
+    def __repr__( self ):
+        return _ustr(self)
+    def markInputline( self, markerString = ">!<" ):
+        """Extracts the exception line from the input string, and marks
+           the location of the exception with a special symbol.
+        """
+        line_str = self.line
+        line_column = self.column - 1
+        if markerString:
+            line_str = "".join((line_str[:line_column],
+                                markerString, line_str[line_column:]))
+        return line_str.strip()
+    def __dir__(self):
+        return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+    """
+    Exception thrown when parse expressions don't match class;
+    supported attributes by name are:
+     - lineno - returns the line number of the exception text
+     - col - returns the column number of the exception text
+     - line - returns the line containing the exception text
+        
+    Example::
+        try:
+            Word(nums).setName("integer").parseString("ABC")
+        except ParseException as pe:
+            print(pe)
+            print("column: {}".format(pe.col))
+            
+    prints::
+       Expected integer (at char 0), (line:1, col:1)
+        column: 1
+    """
+    pass
+
+class ParseFatalException(ParseBaseException):
+    """user-throwable exception thrown when inconsistent parse content
+       is found; stops all parsing immediately"""
+    pass
+
+class ParseSyntaxException(ParseFatalException):
+    """just like L{ParseFatalException}, but thrown internally when an
+       L{ErrorStop} ('-' operator) indicates that parsing is to stop 
+       immediately because an unbacktrackable syntax error has been found"""
+    pass
+
+#~ class ReparseException(ParseBaseException):
+    #~ """Experimental class - parse actions can raise this exception to cause
+       #~ pyparsing to reparse the input string:
+        #~ - with a modified input string, and/or
+        #~ - with a modified start location
+       #~ Set the values of the ReparseException in the constructor, and raise the
+       #~ exception in a parse action to cause pyparsing to use the new string/location.
+       #~ Setting the values as None causes no change to be made.
+       #~ """
+    #~ def __init_( self, newstring, restartLoc ):
+        #~ self.newParseText = newstring
+        #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+    """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
+    def __init__( self, parseElementList ):
+        self.parseElementTrace = parseElementList
+
+    def __str__( self ):
+        return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+    def __init__(self,p1,p2):
+        self.tup = (p1,p2)
+    def __getitem__(self,i):
+        return self.tup[i]
+    def __repr__(self):
+        return repr(self.tup[0])
+    def setOffset(self,i):
+        self.tup = (self.tup[0],i)
+
+class ParseResults(object):
+    """
+    Structured parse results, to provide multiple means of access to the parsed data:
+       - as a list (C{len(results)})
+       - by list index (C{results[0], results[1]}, etc.)
+       - by attribute (C{results.} - see L{ParserElement.setResultsName})
+
+    Example::
+        integer = Word(nums)
+        date_str = (integer.setResultsName("year") + '/' 
+                        + integer.setResultsName("month") + '/' 
+                        + integer.setResultsName("day"))
+        # equivalent form:
+        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+        # parseString returns a ParseResults object
+        result = date_str.parseString("1999/12/31")
+
+        def test(s, fn=repr):
+            print("%s -> %s" % (s, fn(eval(s))))
+        test("list(result)")
+        test("result[0]")
+        test("result['month']")
+        test("result.day")
+        test("'month' in result")
+        test("'minutes' in result")
+        test("result.dump()", str)
+    prints::
+        list(result) -> ['1999', '/', '12', '/', '31']
+        result[0] -> '1999'
+        result['month'] -> '12'
+        result.day -> '31'
+        'month' in result -> True
+        'minutes' in result -> False
+        result.dump() -> ['1999', '/', '12', '/', '31']
+        - day: 31
+        - month: 12
+        - year: 1999
+    """
+    def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
+        if isinstance(toklist, cls):
+            return toklist
+        retobj = object.__new__(cls)
+        retobj.__doinit = True
+        return retobj
+
+    # Performance tuning: we construct a *lot* of these, so keep this
+    # constructor as small and fast as possible
+    def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
+        if self.__doinit:
+            self.__doinit = False
+            self.__name = None
+            self.__parent = None
+            self.__accumNames = {}
+            self.__asList = asList
+            self.__modal = modal
+            if toklist is None:
+                toklist = []
+            if isinstance(toklist, list):
+                self.__toklist = toklist[:]
+            elif isinstance(toklist, _generatorType):
+                self.__toklist = list(toklist)
+            else:
+                self.__toklist = [toklist]
+            self.__tokdict = dict()
+
+        if name is not None and name:
+            if not modal:
+                self.__accumNames[name] = 0
+            if isinstance(name,int):
+                name = _ustr(name) # will always return a str, but use _ustr for consistency
+            self.__name = name
+            if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
+                if isinstance(toklist,basestring):
+                    toklist = [ toklist ]
+                if asList:
+                    if isinstance(toklist,ParseResults):
+                        self[name] = _ParseResultsWithOffset(toklist.copy(),0)
+                    else:
+                        self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
+                    self[name].__name = name
+                else:
+                    try:
+                        self[name] = toklist[0]
+                    except (KeyError,TypeError,IndexError):
+                        self[name] = toklist
+
+    def __getitem__( self, i ):
+        if isinstance( i, (int,slice) ):
+            return self.__toklist[i]
+        else:
+            if i not in self.__accumNames:
+                return self.__tokdict[i][-1][0]
+            else:
+                return ParseResults([ v[0] for v in self.__tokdict[i] ])
+
+    def __setitem__( self, k, v, isinstance=isinstance ):
+        if isinstance(v,_ParseResultsWithOffset):
+            self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
+            sub = v[0]
+        elif isinstance(k,(int,slice)):
+            self.__toklist[k] = v
+            sub = v
+        else:
+            self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
+            sub = v
+        if isinstance(sub,ParseResults):
+            sub.__parent = wkref(self)
+
+    def __delitem__( self, i ):
+        if isinstance(i,(int,slice)):
+            mylen = len( self.__toklist )
+            del self.__toklist[i]
+
+            # convert int to slice
+            if isinstance(i, int):
+                if i < 0:
+                    i += mylen
+                i = slice(i, i+1)
+            # get removed indices
+            removed = list(range(*i.indices(mylen)))
+            removed.reverse()
+            # fixup indices in token dictionary
+            for name,occurrences in self.__tokdict.items():
+                for j in removed:
+                    for k, (value, position) in enumerate(occurrences):
+                        occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+        else:
+            del self.__tokdict[i]
+
+    def __contains__( self, k ):
+        return k in self.__tokdict
+
+    def __len__( self ): return len( self.__toklist )
+    def __bool__(self): return ( not not self.__toklist )
+    __nonzero__ = __bool__
+    def __iter__( self ): return iter( self.__toklist )
+    def __reversed__( self ): return iter( self.__toklist[::-1] )
+    def _iterkeys( self ):
+        if hasattr(self.__tokdict, "iterkeys"):
+            return self.__tokdict.iterkeys()
+        else:
+            return iter(self.__tokdict)
+
+    def _itervalues( self ):
+        return (self[k] for k in self._iterkeys())
+            
+    def _iteritems( self ):
+        return ((k, self[k]) for k in self._iterkeys())
+
+    if PY_3:
+        keys = _iterkeys       
+        """Returns an iterator of all named result keys (Python 3.x only)."""
+
+        values = _itervalues
+        """Returns an iterator of all named result values (Python 3.x only)."""
+
+        items = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 3.x only)."""
+
+    else:
+        iterkeys = _iterkeys
+        """Returns an iterator of all named result keys (Python 2.x only)."""
+
+        itervalues = _itervalues
+        """Returns an iterator of all named result values (Python 2.x only)."""
+
+        iteritems = _iteritems
+        """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+        def keys( self ):
+            """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iterkeys())
+
+        def values( self ):
+            """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.itervalues())
+                
+        def items( self ):
+            """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+            return list(self.iteritems())
+
+    def haskeys( self ):
+        """Since keys() returns an iterator, this method is helpful in bypassing
+           code that looks for the existence of any defined results names."""
+        return bool(self.__tokdict)
+        
+    def pop( self, *args, **kwargs):
+        """
+        Removes and returns item at specified index (default=C{last}).
+        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
+        argument or an integer argument, it will use C{list} semantics
+        and pop tokens from the list of parsed tokens. If passed a 
+        non-integer argument (most likely a string), it will use C{dict}
+        semantics and pop the corresponding value from any defined 
+        results names. A second default return value argument is 
+        supported, just as in C{dict.pop()}.
+
+        Example::
+            def remove_first(tokens):
+                tokens.pop(0)
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+            label = Word(alphas)
+            patt = label("LABEL") + OneOrMore(Word(nums))
+            print(patt.parseString("AAB 123 321").dump())
+
+            # Use pop() in a parse action to remove named result (note that corresponding value is not
+            # removed from list form of results)
+            def remove_LABEL(tokens):
+                tokens.pop("LABEL")
+                return tokens
+            patt.addParseAction(remove_LABEL)
+            print(patt.parseString("AAB 123 321").dump())
+        prints::
+            ['AAB', '123', '321']
+            - LABEL: AAB
+
+            ['AAB', '123', '321']
+        """
+        if not args:
+            args = [-1]
+        for k,v in kwargs.items():
+            if k == 'default':
+                args = (args[0], v)
+            else:
+                raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+        if (isinstance(args[0], int) or 
+                        len(args) == 1 or 
+                        args[0] in self):
+            index = args[0]
+            ret = self[index]
+            del self[index]
+            return ret
+        else:
+            defaultvalue = args[1]
+            return defaultvalue
+
+    def get(self, key, defaultValue=None):
+        """
+        Returns named result matching the given key, or if there is no
+        such name, then returns the given C{defaultValue} or C{None} if no
+        C{defaultValue} is specified.
+
+        Similar to C{dict.get()}.
+        
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            result = date_str.parseString("1999/12/31")
+            print(result.get("year")) # -> '1999'
+            print(result.get("hour", "not specified")) # -> 'not specified'
+            print(result.get("hour")) # -> None
+        """
+        if key in self:
+            return self[key]
+        else:
+            return defaultValue
+
+    def insert( self, index, insStr ):
+        """
+        Inserts new element at location index in the list of parsed tokens.
+        
+        Similar to C{list.insert()}.
+
+        Example::
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+            # use a parse action to insert the parse location in the front of the parsed results
+            def insert_locn(locn, tokens):
+                tokens.insert(0, locn)
+            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+        """
+        self.__toklist.insert(index, insStr)
+        # fixup indices in token dictionary
+        for name,occurrences in self.__tokdict.items():
+            for k, (value, position) in enumerate(occurrences):
+                occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+    def append( self, item ):
+        """
+        Add single element to end of ParseResults list of elements.
+
+        Example::
+            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+            
+            # use a parse action to compute the sum of the parsed integers, and add it to the end
+            def append_sum(tokens):
+                tokens.append(sum(map(int, tokens)))
+            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+        """
+        self.__toklist.append(item)
+
+    def extend( self, itemseq ):
+        """
+        Add sequence of elements to end of ParseResults list of elements.
+
+        Example::
+            patt = OneOrMore(Word(alphas))
+            
+            # use a parse action to append the reverse of the matched strings, to make a palindrome
+            def make_palindrome(tokens):
+                tokens.extend(reversed([t[::-1] for t in tokens]))
+                return ''.join(tokens)
+            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+        """
+        if isinstance(itemseq, ParseResults):
+            self += itemseq
+        else:
+            self.__toklist.extend(itemseq)
+
+    def clear( self ):
+        """
+        Clear all elements and results names.
+        """
+        del self.__toklist[:]
+        self.__tokdict.clear()
+
+    def __getattr__( self, name ):
+        try:
+            return self[name]
+        except KeyError:
+            return ""
+            
+        if name in self.__tokdict:
+            if name not in self.__accumNames:
+                return self.__tokdict[name][-1][0]
+            else:
+                return ParseResults([ v[0] for v in self.__tokdict[name] ])
+        else:
+            return ""
+
+    def __add__( self, other ):
+        ret = self.copy()
+        ret += other
+        return ret
+
+    def __iadd__( self, other ):
+        if other.__tokdict:
+            offset = len(self.__toklist)
+            addoffset = lambda a: offset if a<0 else a+offset
+            otheritems = other.__tokdict.items()
+            otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
+                                for (k,vlist) in otheritems for v in vlist]
+            for k,v in otherdictitems:
+                self[k] = v
+                if isinstance(v[0],ParseResults):
+                    v[0].__parent = wkref(self)
+            
+        self.__toklist += other.__toklist
+        self.__accumNames.update( other.__accumNames )
+        return self
+
+    def __radd__(self, other):
+        if isinstance(other,int) and other == 0:
+            # useful for merging many ParseResults using sum() builtin
+            return self.copy()
+        else:
+            # this may raise a TypeError - so be it
+            return other + self
+        
+    def __repr__( self ):
+        return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
+
+    def __str__( self ):
+        return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+    def _asStringList( self, sep='' ):
+        out = []
+        for item in self.__toklist:
+            if out and sep:
+                out.append(sep)
+            if isinstance( item, ParseResults ):
+                out += item._asStringList()
+            else:
+                out.append( _ustr(item) )
+        return out
+
+    def asList( self ):
+        """
+        Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+        Example::
+            patt = OneOrMore(Word(alphas))
+            result = patt.parseString("sldkj lsdkj sldkj")
+            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
+            
+            # Use asList() to create an actual list
+            result_list = result.asList()
+            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
+        """
+        return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
+
+    def asDict( self ):
+        """
+        Returns the named parse results as a nested dictionary.
+
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+            
+            result = date_str.parseString('12/31/1999')
+            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+            
+            result_dict = result.asDict()
+            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}
+
+            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+            import json
+            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+        """
+        if PY_3:
+            item_fn = self.items
+        else:
+            item_fn = self.iteritems
+            
+        def toItem(obj):
+            if isinstance(obj, ParseResults):
+                if obj.haskeys():
+                    return obj.asDict()
+                else:
+                    return [toItem(v) for v in obj]
+            else:
+                return obj
+                
+        return dict((k,toItem(v)) for k,v in item_fn())
+
+    def copy( self ):
+        """
+        Returns a new copy of a C{ParseResults} object.
+        """
+        ret = ParseResults( self.__toklist )
+        ret.__tokdict = self.__tokdict.copy()
+        ret.__parent = self.__parent
+        ret.__accumNames.update( self.__accumNames )
+        ret.__name = self.__name
+        return ret
+
+    def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
+        """
+        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+        """
+        nl = "\n"
+        out = []
+        namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
+                                                            for v in vlist)
+        nextLevelIndent = indent + "  "
+
+        # collapse out indents if formatting is not desired
+        if not formatted:
+            indent = ""
+            nextLevelIndent = ""
+            nl = ""
+
+        selfTag = None
+        if doctag is not None:
+            selfTag = doctag
+        else:
+            if self.__name:
+                selfTag = self.__name
+
+        if not selfTag:
+            if namedItemsOnly:
+                return ""
+            else:
+                selfTag = "ITEM"
+
+        out += [ nl, indent, "<", selfTag, ">" ]
+
+        for i,res in enumerate(self.__toklist):
+            if isinstance(res,ParseResults):
+                if i in namedItems:
+                    out += [ res.asXML(namedItems[i],
+                                        namedItemsOnly and doctag is None,
+                                        nextLevelIndent,
+                                        formatted)]
+                else:
+                    out += [ res.asXML(None,
+                                        namedItemsOnly and doctag is None,
+                                        nextLevelIndent,
+                                        formatted)]
+            else:
+                # individual token, see if there is a name for it
+                resTag = None
+                if i in namedItems:
+                    resTag = namedItems[i]
+                if not resTag:
+                    if namedItemsOnly:
+                        continue
+                    else:
+                        resTag = "ITEM"
+                xmlBodyText = _xml_escape(_ustr(res))
+                out += [ nl, nextLevelIndent, "<", resTag, ">",
+                                                xmlBodyText,
+                                                "" ]
+
+        out += [ nl, indent, "" ]
+        return "".join(out)
+
+    def __lookup(self,sub):
+        for k,vlist in self.__tokdict.items():
+            for v,loc in vlist:
+                if sub is v:
+                    return k
+        return None
+
+    def getName(self):
+        """
+        Returns the results name for this token expression. Useful when several 
+        different expressions might match at a particular location.
+
+        Example::
+            integer = Word(nums)
+            ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+            house_number_expr = Suppress('#') + Word(nums, alphanums)
+            user_data = (Group(house_number_expr)("house_number") 
+                        | Group(ssn_expr)("ssn")
+                        | Group(integer)("age"))
+            user_info = OneOrMore(user_data)
+            
+            result = user_info.parseString("22 111-22-3333 #221B")
+            for item in result:
+                print(item.getName(), ':', item[0])
+        prints::
+            age : 22
+            ssn : 111-22-3333
+            house_number : 221B
+        """
+        if self.__name:
+            return self.__name
+        elif self.__parent:
+            par = self.__parent()
+            if par:
+                return par.__lookup(self)
+            else:
+                return None
+        elif (len(self) == 1 and
+               len(self.__tokdict) == 1 and
+               next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+            return next(iter(self.__tokdict.keys()))
+        else:
+            return None
+
+    def dump(self, indent='', depth=0, full=True):
+        """
+        Diagnostic method for listing out the contents of a C{ParseResults}.
+        Accepts an optional C{indent} argument so that this string can be embedded
+        in a nested display of other data.
+
+        Example::
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+            
+            result = date_str.parseString('12/31/1999')
+            print(result.dump())
+        prints::
+            ['12', '/', '31', '/', '1999']
+            - day: 1999
+            - month: 31
+            - year: 12
+        """
+        out = []
+        NL = '\n'
+        out.append( indent+_ustr(self.asList()) )
+        if full:
+            if self.haskeys():
+                items = sorted((str(k), v) for k,v in self.items())
+                for k,v in items:
+                    if out:
+                        out.append(NL)
+                    out.append( "%s%s- %s: " % (indent,('  '*depth), k) )
+                    if isinstance(v,ParseResults):
+                        if v:
+                            out.append( v.dump(indent,depth+1) )
+                        else:
+                            out.append(_ustr(v))
+                    else:
+                        out.append(repr(v))
+            elif any(isinstance(vv,ParseResults) for vv in self):
+                v = self
+                for i,vv in enumerate(v):
+                    if isinstance(vv,ParseResults):
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),vv.dump(indent,depth+1) ))
+                    else:
+                        out.append("\n%s%s[%d]:\n%s%s%s" % (indent,('  '*(depth)),i,indent,('  '*(depth+1)),_ustr(vv)))
+            
+        return "".join(out)
+
+    def pprint(self, *args, **kwargs):
+        """
+        Pretty-printer for parsed results as a list, using the C{pprint} module.
+        Accepts additional positional or keyword args as defined for the 
+        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})
+
+        Example::
+            ident = Word(alphas, alphanums)
+            num = Word(nums)
+            func = Forward()
+            term = ident | num | Group('(' + func + ')')
+            func <<= ident + Group(Optional(delimitedList(term)))
+            result = func.parseString("fna a,b,(fnb c,d,200),100")
+            result.pprint(width=40)
+        prints::
+            ['fna',
+             ['a',
+              'b',
+              ['(', 'fnb', ['c', 'd', '200'], ')'],
+              '100']]
+        """
+        pprint.pprint(self.asList(), *args, **kwargs)
+
+    # add support for pickle protocol
+    def __getstate__(self):
+        return ( self.__toklist,
+                 ( self.__tokdict.copy(),
+                   self.__parent is not None and self.__parent() or None,
+                   self.__accumNames,
+                   self.__name ) )
+
+    def __setstate__(self,state):
+        self.__toklist = state[0]
+        (self.__tokdict,
+         par,
+         inAccumNames,
+         self.__name) = state[1]
+        self.__accumNames = {}
+        self.__accumNames.update(inAccumNames)
+        if par is not None:
+            self.__parent = wkref(par)
+        else:
+            self.__parent = None
+
+    def __getnewargs__(self):
+        return self.__toklist, self.__name, self.__asList, self.__modal
+
+    def __dir__(self):
+        return (dir(type(self)) + list(self.keys()))
+
+collections.MutableMapping.register(ParseResults)
+
+def col (loc,strg):
+    """Returns current column within a string, counting newlines as line separators.
+   The first column is number 1.
+
+   Note: the default parsing behavior is to expand tabs in the input string
+   before starting the parsing process.  See L{I{ParserElement.parseString}} for more information
+   on parsing strings containing C{}s, and suggested methods to maintain a
+   consistent view of the parsed string, the parse location, and line and column
+   positions within the parsed string.
+   """
+    s = strg
+    return 1 if 0} for more information
+   on parsing strings containing C{}s, and suggested methods to maintain a
+   consistent view of the parsed string, the parse location, and line and column
+   positions within the parsed string.
+   """
+    return strg.count("\n",0,loc) + 1
+
+def line( loc, strg ):
+    """Returns the line of text containing loc within a string, counting newlines as line separators.
+       """
+    lastCR = strg.rfind("\n", 0, loc)
+    nextCR = strg.find("\n", loc)
+    if nextCR >= 0:
+        return strg[lastCR+1:nextCR]
+    else:
+        return strg[lastCR+1:]
+
+def _defaultStartDebugAction( instring, loc, expr ):
+    print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
+
+def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
+    print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction( instring, loc, expr, exc ):
+    print ("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+    """'Do-nothing' debug action, to suppress debugging output during parsing."""
+    pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+    #~ if func in singleArgBuiltins:
+        #~ return lambda s,l,t: func(t)
+    #~ limit = 0
+    #~ foundArity = False
+    #~ def wrapper(*args):
+        #~ nonlocal limit,foundArity
+        #~ while 1:
+            #~ try:
+                #~ ret = func(*args[limit:])
+                #~ foundArity = True
+                #~ return ret
+            #~ except TypeError:
+                #~ if limit == maxargs or foundArity:
+                    #~ raise
+                #~ limit += 1
+                #~ continue
+    #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+    if func in singleArgBuiltins:
+        return lambda s,l,t: func(t)
+    limit = [0]
+    foundArity = [False]
+    
+    # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+    if system_version[:2] >= (3,5):
+        def extract_stack(limit=0):
+            # special handling for Python 3.5.0 - extra deep call stack by 1
+            offset = -3 if system_version == (3,5,0) else -2
+            frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
+            return [(frame_summary.filename, frame_summary.lineno)]
+        def extract_tb(tb, limit=0):
+            frames = traceback.extract_tb(tb, limit=limit)
+            frame_summary = frames[-1]
+            return [(frame_summary.filename, frame_summary.lineno)]
+    else:
+        extract_stack = traceback.extract_stack
+        extract_tb = traceback.extract_tb
+    
+    # synthesize what would be returned by traceback.extract_stack at the call to 
+    # user's parse action 'func', so that we don't incur call penalty at parse time
+    
+    LINE_DIFF = 6
+    # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND 
+    # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+    this_line = extract_stack(limit=2)[-1]
+    pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
+
+    def wrapper(*args):
+        while 1:
+            try:
+                ret = func(*args[limit[0]:])
+                foundArity[0] = True
+                return ret
+            except TypeError:
+                # re-raise TypeErrors if they did not come from our arity testing
+                if foundArity[0]:
+                    raise
+                else:
+                    try:
+                        tb = sys.exc_info()[-1]
+                        if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+                            raise
+                    finally:
+                        del tb
+
+                if limit[0] <= maxargs:
+                    limit[0] += 1
+                    continue
+                raise
+
+    # copy func name to wrapper for sensible debug output
+    func_name = ""
+    try:
+        func_name = getattr(func, '__name__', 
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    wrapper.__name__ = func_name
+
+    return wrapper
+
+class ParserElement(object):
+    """Abstract base level parser element class."""
+    DEFAULT_WHITE_CHARS = " \n\t\r"
+    verbose_stacktrace = False
+
+    @staticmethod
+    def setDefaultWhitespaceChars( chars ):
+        r"""
+        Overrides the default whitespace chars
+
+        Example::
+            # default whitespace chars are space,  and newline
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
+            
+            # change to just treat newline as significant
+            ParserElement.setDefaultWhitespaceChars(" \t")
+            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
+        """
+        ParserElement.DEFAULT_WHITE_CHARS = chars
+
+    @staticmethod
+    def inlineLiteralsUsing(cls):
+        """
+        Set class to be used for inclusion of string literals into a parser.
+        
+        Example::
+            # default literal class used is Literal
+            integer = Word(nums)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+
+            # change to Suppress
+            ParserElement.inlineLiteralsUsing(Suppress)
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
+        """
+        ParserElement._literalStringClass = cls
+
+    def __init__( self, savelist=False ):
+        self.parseAction = list()
+        self.failAction = None
+        #~ self.name = ""  # don't define self.name, let subclasses try/except upcall
+        self.strRepr = None
+        self.resultsName = None
+        self.saveAsList = savelist
+        self.skipWhitespace = True
+        self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        self.copyDefaultWhiteChars = True
+        self.mayReturnEmpty = False # used when checking for left-recursion
+        self.keepTabs = False
+        self.ignoreExprs = list()
+        self.debug = False
+        self.streamlined = False
+        self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+        self.errmsg = ""
+        self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+        self.debugActions = ( None, None, None ) #custom debug actions
+        self.re = None
+        self.callPreparse = True # used to avoid redundant calls to preParse
+        self.callDuringTry = False
+
+    def copy( self ):
+        """
+        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
+        for the same parsing pattern, using copies of the original parse element.
+        
+        Example::
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
+            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+            
+            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+        prints::
+            [5120, 100, 655360, 268435456]
+        Equivalent form of C{expr.copy()} is just C{expr()}::
+            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+        """
+        cpy = copy.copy( self )
+        cpy.parseAction = self.parseAction[:]
+        cpy.ignoreExprs = self.ignoreExprs[:]
+        if self.copyDefaultWhiteChars:
+            cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+        return cpy
+
+    def setName( self, name ):
+        """
+        Define name for this expression, makes debugging and exception messages clearer.
+        
+        Example::
+            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
+        """
+        self.name = name
+        self.errmsg = "Expected " + self.name
+        if hasattr(self,"exception"):
+            self.exception.msg = self.errmsg
+        return self
+
+    def setResultsName( self, name, listAllMatches=False ):
+        """
+        Define name for referencing matching tokens as a nested attribute
+        of the returned parse results.
+        NOTE: this returns a *copy* of the original C{ParserElement} object;
+        this is so that the client can define a basic element, such as an
+        integer, and reference it in multiple places with different names.
+
+        You can also set results names using the abbreviated syntax,
+        C{expr("name")} in place of C{expr.setResultsName("name")} - 
+        see L{I{__call__}<__call__>}.
+
+        Example::
+            date_str = (integer.setResultsName("year") + '/' 
+                        + integer.setResultsName("month") + '/' 
+                        + integer.setResultsName("day"))
+
+            # equivalent form:
+            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+        """
+        newself = self.copy()
+        if name.endswith("*"):
+            name = name[:-1]
+            listAllMatches=True
+        newself.resultsName = name
+        newself.modalResults = not listAllMatches
+        return newself
+
+    def setBreak(self,breakFlag = True):
+        """Method to invoke the Python pdb debugger when this element is
+           about to be parsed. Set C{breakFlag} to True to enable, False to
+           disable.
+        """
+        if breakFlag:
+            _parseMethod = self._parse
+            def breaker(instring, loc, doActions=True, callPreParse=True):
+                import pdb
+                pdb.set_trace()
+                return _parseMethod( instring, loc, doActions, callPreParse )
+            breaker._originalParseMethod = _parseMethod
+            self._parse = breaker
+        else:
+            if hasattr(self._parse,"_originalParseMethod"):
+                self._parse = self._parse._originalParseMethod
+        return self
+
+    def setParseAction( self, *fns, **kwargs ):
+        """
+        Define action to perform when successfully matching parse element definition.
+        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
+        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
+         - s   = the original string being parsed (see note below)
+         - loc = the location of the matching substring
+         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
+        If the functions in fns modify the tokens, they can return them as the return
+        value from fn, and the modified list of tokens will replace the original.
+        Otherwise, fn does not need to return any value.
+
+        Optional keyword arguments:
+         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing
+
+        Note: the default parsing behavior is to expand tabs in the input string
+        before starting the parsing process.  See L{I{parseString}} for more information
+        on parsing strings containing C{}s, and suggested methods to maintain a
+        consistent view of the parsed string, the parse location, and line and column
+        positions within the parsed string.
+        
+        Example::
+            integer = Word(nums)
+            date_str = integer + '/' + integer + '/' + integer
+
+            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']
+
+            # use parse action to convert to ints at parse time
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            date_str = integer + '/' + integer + '/' + integer
+
+            # note that integer fields are now ints, not strings
+            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
+        """
+        self.parseAction = list(map(_trim_arity, list(fns)))
+        self.callDuringTry = kwargs.get("callDuringTry", False)
+        return self
+
+    def addParseAction( self, *fns, **kwargs ):
+        """
+        Add parse action to expression's list of parse actions. See L{I{setParseAction}}.
+        
+        See examples in L{I{copy}}.
+        """
+        self.parseAction += list(map(_trim_arity, list(fns)))
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def addCondition(self, *fns, **kwargs):
+        """Add a boolean predicate function to expression's list of parse actions. See 
+        L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, 
+        functions passed to C{addCondition} need to return boolean success/fail of the condition.
+
+        Optional keyword arguments:
+         - message = define a custom message to be used in the raised exception
+         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+         
+        Example::
+            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+            year_int = integer.copy()
+            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+            date_str = year_int + '/' + integer + '/' + integer
+
+            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+        """
+        msg = kwargs.get("message", "failed user-defined condition")
+        exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
+        for fn in fns:
+            def pa(s,l,t):
+                if not bool(_trim_arity(fn)(s,l,t)):
+                    raise exc_type(s,l,msg)
+            self.parseAction.append(pa)
+        self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+        return self
+
+    def setFailAction( self, fn ):
+        """Define action to perform if parsing fails at this expression.
+           Fail acton fn is a callable function that takes the arguments
+           C{fn(s,loc,expr,err)} where:
+            - s = string being parsed
+            - loc = location where expression match was attempted and failed
+            - expr = the parse expression that failed
+            - err = the exception thrown
+           The function returns no value.  It may throw C{L{ParseFatalException}}
+           if it is desired to stop parsing immediately."""
+        self.failAction = fn
+        return self
+
+    def _skipIgnorables( self, instring, loc ):
+        exprsFound = True
+        while exprsFound:
+            exprsFound = False
+            for e in self.ignoreExprs:
+                try:
+                    while 1:
+                        loc,dummy = e._parse( instring, loc )
+                        exprsFound = True
+                except ParseException:
+                    pass
+        return loc
+
+    def preParse( self, instring, loc ):
+        if self.ignoreExprs:
+            loc = self._skipIgnorables( instring, loc )
+
+        if self.skipWhitespace:
+            wt = self.whiteChars
+            instrlen = len(instring)
+            while loc < instrlen and instring[loc] in wt:
+                loc += 1
+
+        return loc
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        return loc, []
+
+    def postParse( self, instring, loc, tokenlist ):
+        return tokenlist
+
+    #~ @profile
+    def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
+        debugging = ( self.debug ) #and doActions )
+
+        if debugging or self.failAction:
+            #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
+            if (self.debugActions[0] ):
+                self.debugActions[0]( instring, loc, self )
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse( instring, loc )
+            else:
+                preloc = loc
+            tokensStart = preloc
+            try:
+                try:
+                    loc,tokens = self.parseImpl( instring, preloc, doActions )
+                except IndexError:
+                    raise ParseException( instring, len(instring), self.errmsg, self )
+            except ParseBaseException as err:
+                #~ print ("Exception raised:", err)
+                if self.debugActions[2]:
+                    self.debugActions[2]( instring, tokensStart, self, err )
+                if self.failAction:
+                    self.failAction( instring, tokensStart, self, err )
+                raise
+        else:
+            if callPreParse and self.callPreparse:
+                preloc = self.preParse( instring, loc )
+            else:
+                preloc = loc
+            tokensStart = preloc
+            if self.mayIndexError or loc >= len(instring):
+                try:
+                    loc,tokens = self.parseImpl( instring, preloc, doActions )
+                except IndexError:
+                    raise ParseException( instring, len(instring), self.errmsg, self )
+            else:
+                loc,tokens = self.parseImpl( instring, preloc, doActions )
+
+        tokens = self.postParse( instring, loc, tokens )
+
+        retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
+        if self.parseAction and (doActions or self.callDuringTry):
+            if debugging:
+                try:
+                    for fn in self.parseAction:
+                        tokens = fn( instring, tokensStart, retTokens )
+                        if tokens is not None:
+                            retTokens = ParseResults( tokens,
+                                                      self.resultsName,
+                                                      asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+                                                      modal=self.modalResults )
+                except ParseBaseException as err:
+                    #~ print "Exception raised in user parse action:", err
+                    if (self.debugActions[2] ):
+                        self.debugActions[2]( instring, tokensStart, self, err )
+                    raise
+            else:
+                for fn in self.parseAction:
+                    tokens = fn( instring, tokensStart, retTokens )
+                    if tokens is not None:
+                        retTokens = ParseResults( tokens,
+                                                  self.resultsName,
+                                                  asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+                                                  modal=self.modalResults )
+
+        if debugging:
+            #~ print ("Matched",self,"->",retTokens.asList())
+            if (self.debugActions[1] ):
+                self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
+
+        return loc, retTokens
+
+    def tryParse( self, instring, loc ):
+        try:
+            return self._parse( instring, loc, doActions=False )[0]
+        except ParseFatalException:
+            raise ParseException( instring, loc, self.errmsg, self)
+    
+    def canParseNext(self, instring, loc):
+        try:
+            self.tryParse(instring, loc)
+        except (ParseException, IndexError):
+            return False
+        else:
+            return True
+
+    class _UnboundedCache(object):
+        def __init__(self):
+            cache = {}
+            self.not_in_cache = not_in_cache = object()
+
+            def get(self, key):
+                return cache.get(key, not_in_cache)
+
+            def set(self, key, value):
+                cache[key] = value
+
+            def clear(self):
+                cache.clear()
+
+            self.get = types.MethodType(get, self)
+            self.set = types.MethodType(set, self)
+            self.clear = types.MethodType(clear, self)
+
+    if _OrderedDict is not None:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = _OrderedDict()
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    if len(cache) > size:
+                        cache.popitem(False)
+
+                def clear(self):
+                    cache.clear()
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+
+    else:
+        class _FifoCache(object):
+            def __init__(self, size):
+                self.not_in_cache = not_in_cache = object()
+
+                cache = {}
+                key_fifo = collections.deque([], size)
+
+                def get(self, key):
+                    return cache.get(key, not_in_cache)
+
+                def set(self, key, value):
+                    cache[key] = value
+                    if len(cache) > size:
+                        cache.pop(key_fifo.popleft(), None)
+                    key_fifo.append(key)
+
+                def clear(self):
+                    cache.clear()
+                    key_fifo.clear()
+
+                self.get = types.MethodType(get, self)
+                self.set = types.MethodType(set, self)
+                self.clear = types.MethodType(clear, self)
+
+    # argument cache for optimizing repeated calls when backtracking through recursive expressions
+    packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+    packrat_cache_lock = RLock()
+    packrat_cache_stats = [0, 0]
+
+    # this method gets repeatedly called during backtracking with the same arguments -
+    # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+    def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
+        HIT, MISS = 0, 1
+        lookup = (self, instring, loc, callPreParse, doActions)
+        with ParserElement.packrat_cache_lock:
+            cache = ParserElement.packrat_cache
+            value = cache.get(lookup)
+            if value is cache.not_in_cache:
+                ParserElement.packrat_cache_stats[MISS] += 1
+                try:
+                    value = self._parseNoCache(instring, loc, doActions, callPreParse)
+                except ParseBaseException as pe:
+                    # cache a copy of the exception, without the traceback
+                    cache.set(lookup, pe.__class__(*pe.args))
+                    raise
+                else:
+                    cache.set(lookup, (value[0], value[1].copy()))
+                    return value
+            else:
+                ParserElement.packrat_cache_stats[HIT] += 1
+                if isinstance(value, Exception):
+                    raise value
+                return (value[0], value[1].copy())
+
+    _parse = _parseNoCache
+
+    @staticmethod
+    def resetCache():
+        ParserElement.packrat_cache.clear()
+        ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+    _packratEnabled = False
+    @staticmethod
+    def enablePackrat(cache_size_limit=128):
+        """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+           Repeated parse attempts at the same string location (which happens
+           often in many complex grammars) can immediately return a cached value,
+           instead of re-executing parsing/validating code.  Memoizing is done of
+           both valid results and parsing exceptions.
+           
+           Parameters:
+            - cache_size_limit - (default=C{128}) - if an integer value is provided
+              will limit the size of the packrat cache; if None is passed, then
+              the cache size will be unbounded; if 0 is passed, the cache will
+              be effectively disabled.
+            
+           This speedup may break existing programs that use parse actions that
+           have side-effects.  For this reason, packrat parsing is disabled when
+           you first import pyparsing.  To activate the packrat feature, your
+           program must call the class method C{ParserElement.enablePackrat()}.  If
+           your program uses C{psyco} to "compile as you go", you must call
+           C{enablePackrat} before calling C{psyco.full()}.  If you do not do this,
+           Python will crash.  For best results, call C{enablePackrat()} immediately
+           after importing pyparsing.
+           
+           Example::
+               import pyparsing
+               pyparsing.ParserElement.enablePackrat()
+        """
+        if not ParserElement._packratEnabled:
+            ParserElement._packratEnabled = True
+            if cache_size_limit is None:
+                ParserElement.packrat_cache = ParserElement._UnboundedCache()
+            else:
+                ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+            ParserElement._parse = ParserElement._parseCache
+
+    def parseString( self, instring, parseAll=False ):
+        """
+        Execute the parse expression with the given string.
+        This is the main interface to the client code, once the complete
+        expression has been built.
+
+        If you want the grammar to require that the entire input string be
+        successfully parsed, then set C{parseAll} to True (equivalent to ending
+        the grammar with C{L{StringEnd()}}).
+
+        Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
+        in order to report proper column numbers in parse actions.
+        If the input string contains tabs and
+        the grammar uses parse actions that use the C{loc} argument to index into the
+        string being parsed, you can ensure you have a consistent view of the input
+        string by:
+         - calling C{parseWithTabs} on your grammar before calling C{parseString}
+           (see L{I{parseWithTabs}})
+         - define your parse action using the full C{(s,loc,toks)} signature, and
+           reference the input string using the parse action's C{s} argument
+         - explictly expand the tabs in your input string before calling
+           C{parseString}
+        
+        Example::
+            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
+            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
+        """
+        ParserElement.resetCache()
+        if not self.streamlined:
+            self.streamline()
+            #~ self.saveAsList = True
+        for e in self.ignoreExprs:
+            e.streamline()
+        if not self.keepTabs:
+            instring = instring.expandtabs()
+        try:
+            loc, tokens = self._parse( instring, 0 )
+            if parseAll:
+                loc = self.preParse( instring, loc )
+                se = Empty() + StringEnd()
+                se._parse( instring, loc )
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+        else:
+            return tokens
+
+    def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
+        """
+        Scan the input string for expression matches.  Each match will return the
+        matching tokens, start location, and end location.  May be called with optional
+        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
+        C{overlap} is specified, then overlapping matches will be reported.
+
+        Note that the start and end locations are reported relative to the string
+        being parsed.  See L{I{parseString}} for more information on parsing
+        strings with embedded tabs.
+
+        Example::
+            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+            print(source)
+            for tokens,start,end in Word(alphas).scanString(source):
+                print(' '*start + '^'*(end-start))
+                print(' '*start + tokens[0])
+        
+        prints::
+        
+            sldjf123lsdjjkf345sldkjf879lkjsfd987
+            ^^^^^
+            sldjf
+                    ^^^^^^^
+                    lsdjjkf
+                              ^^^^^^
+                              sldkjf
+                                       ^^^^^^
+                                       lkjsfd
+        """
+        if not self.streamlined:
+            self.streamline()
+        for e in self.ignoreExprs:
+            e.streamline()
+
+        if not self.keepTabs:
+            instring = _ustr(instring).expandtabs()
+        instrlen = len(instring)
+        loc = 0
+        preparseFn = self.preParse
+        parseFn = self._parse
+        ParserElement.resetCache()
+        matches = 0
+        try:
+            while loc <= instrlen and matches < maxMatches:
+                try:
+                    preloc = preparseFn( instring, loc )
+                    nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
+                except ParseException:
+                    loc = preloc+1
+                else:
+                    if nextLoc > loc:
+                        matches += 1
+                        yield tokens, preloc, nextLoc
+                        if overlap:
+                            nextloc = preparseFn( instring, loc )
+                            if nextloc > loc:
+                                loc = nextLoc
+                            else:
+                                loc += 1
+                        else:
+                            loc = nextLoc
+                    else:
+                        loc = preloc+1
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def transformString( self, instring ):
+        """
+        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
+        be returned from a parse action.  To use C{transformString}, define a grammar and
+        attach a parse action to it that modifies the returned token list.
+        Invoking C{transformString()} on a target string will then scan for matches,
+        and replace the matched text patterns according to the logic in the parse
+        action.  C{transformString()} returns the resulting transformed string.
+        
+        Example::
+            wd = Word(alphas)
+            wd.setParseAction(lambda toks: toks[0].title())
+            
+            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+        Prints::
+            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+        """
+        out = []
+        lastE = 0
+        # force preservation of s, to minimize unwanted transformation of string, and to
+        # keep string locs straight between transformString and scanString
+        self.keepTabs = True
+        try:
+            for t,s,e in self.scanString( instring ):
+                out.append( instring[lastE:s] )
+                if t:
+                    if isinstance(t,ParseResults):
+                        out += t.asList()
+                    elif isinstance(t,list):
+                        out += t
+                    else:
+                        out.append(t)
+                lastE = e
+            out.append(instring[lastE:])
+            out = [o for o in out if o]
+            return "".join(map(_ustr,_flatten(out)))
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def searchString( self, instring, maxMatches=_MAX_INT ):
+        """
+        Another extension to C{L{scanString}}, simplifying the access to the tokens found
+        to match the given parse expression.  May be called with optional
+        C{maxMatches} argument, to clip searching after 'n' matches are found.
+        
+        Example::
+            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+            cap_word = Word(alphas.upper(), alphas.lower())
+            
+            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+        prints::
+            ['More', 'Iron', 'Lead', 'Gold', 'I']
+        """
+        try:
+            return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+        """
+        Generator method to split a string using the given expression as a separator.
+        May be called with optional C{maxsplit} argument, to limit the number of splits;
+        and the optional C{includeSeparators} argument (default=C{False}), if the separating
+        matching text should be included in the split results.
+        
+        Example::        
+            punc = oneOf(list(".,;:/-!?"))
+            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+        prints::
+            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+        """
+        splits = 0
+        last = 0
+        for t,s,e in self.scanString(instring, maxMatches=maxsplit):
+            yield instring[last:s]
+            if includeSeparators:
+                yield t[0]
+            last = e
+        yield instring[last:]
+
+    def __add__(self, other ):
+        """
+        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
+        converts them to L{Literal}s by default.
+        
+        Example::
+            greet = Word(alphas) + "," + Word(alphas) + "!"
+            hello = "Hello, World!"
+            print (hello, "->", greet.parseString(hello))
+        Prints::
+            Hello, World! -> ['Hello', ',', 'World', '!']
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return And( [ self, other ] )
+
+    def __radd__(self, other ):
+        """
+        Implementation of + operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other + self
+
+    def __sub__(self, other):
+        """
+        Implementation of - operator, returns C{L{And}} with error stop
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return And( [ self, And._ErrorStop(), other ] )
+
+    def __rsub__(self, other ):
+        """
+        Implementation of - operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other - self
+
+    def __mul__(self,other):
+        """
+        Implementation of * operator, allows use of C{expr * 3} in place of
+        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
+        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
+        may also include C{None} as in:
+         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
+              to C{expr*n + L{ZeroOrMore}(expr)}
+              (read as "at least n instances of C{expr}")
+         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
+              (read as "0 to n instances of C{expr}")
+         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
+         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
+
+        Note that C{expr*(None,n)} does not raise an exception if
+        more than n exprs exist in the input stream; that is,
+        C{expr*(None,n)} does not enforce a maximum number of expr
+        occurrences.  If this behavior is desired, then write
+        C{expr*(None,n) + ~expr}
+        """
+        if isinstance(other,int):
+            minElements, optElements = other,0
+        elif isinstance(other,tuple):
+            other = (other + (None, None))[:2]
+            if other[0] is None:
+                other = (0, other[1])
+            if isinstance(other[0],int) and other[1] is None:
+                if other[0] == 0:
+                    return ZeroOrMore(self)
+                if other[0] == 1:
+                    return OneOrMore(self)
+                else:
+                    return self*other[0] + ZeroOrMore(self)
+            elif isinstance(other[0],int) and isinstance(other[1],int):
+                minElements, optElements = other
+                optElements -= minElements
+            else:
+                raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
+        else:
+            raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+        if minElements < 0:
+            raise ValueError("cannot multiply ParserElement by negative value")
+        if optElements < 0:
+            raise ValueError("second tuple value must be greater or equal to first tuple value")
+        if minElements == optElements == 0:
+            raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
+
+        if (optElements):
+            def makeOptionalList(n):
+                if n>1:
+                    return Optional(self + makeOptionalList(n-1))
+                else:
+                    return Optional(self)
+            if minElements:
+                if minElements == 1:
+                    ret = self + makeOptionalList(optElements)
+                else:
+                    ret = And([self]*minElements) + makeOptionalList(optElements)
+            else:
+                ret = makeOptionalList(optElements)
+        else:
+            if minElements == 1:
+                ret = self
+            else:
+                ret = And([self]*minElements)
+        return ret
+
+    def __rmul__(self, other):
+        return self.__mul__(other)
+
+    def __or__(self, other ):
+        """
+        Implementation of | operator - returns C{L{MatchFirst}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return MatchFirst( [ self, other ] )
+
+    def __ror__(self, other ):
+        """
+        Implementation of | operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other | self
+
+    def __xor__(self, other ):
+        """
+        Implementation of ^ operator - returns C{L{Or}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return Or( [ self, other ] )
+
+    def __rxor__(self, other ):
+        """
+        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other ^ self
+
+    def __and__(self, other ):
+        """
+        Implementation of & operator - returns C{L{Each}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return Each( [ self, other ] )
+
+    def __rand__(self, other ):
+        """
+        Implementation of & operator when left operand is not a C{L{ParserElement}}
+        """
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        if not isinstance( other, ParserElement ):
+            warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+                    SyntaxWarning, stacklevel=2)
+            return None
+        return other & self
+
+    def __invert__( self ):
+        """
+        Implementation of ~ operator - returns C{L{NotAny}}
+        """
+        return NotAny( self )
+
+    def __call__(self, name=None):
+        """
+        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
+        
+        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
+        passed as C{True}.
+           
+        If C{name} is omitted, same as calling C{L{copy}}.
+
+        Example::
+            # these are equivalent
+            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
+            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
+        """
+        if name is not None:
+            return self.setResultsName(name)
+        else:
+            return self.copy()
+
+    def suppress( self ):
+        """
+        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
+        cluttering up returned output.
+        """
+        return Suppress( self )
+
+    def leaveWhitespace( self ):
+        """
+        Disables the skipping of whitespace before matching the characters in the
+        C{ParserElement}'s defined pattern.  This is normally only used internally by
+        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+        """
+        self.skipWhitespace = False
+        return self
+
+    def setWhitespaceChars( self, chars ):
+        """
+        Overrides the default whitespace chars
+        """
+        self.skipWhitespace = True
+        self.whiteChars = chars
+        self.copyDefaultWhiteChars = False
+        return self
+
+    def parseWithTabs( self ):
+        """
+        Overrides default behavior to expand C{}s to spaces before parsing the input string.
+        Must be called before C{parseString} when the input grammar contains elements that
+        match C{} characters.
+        """
+        self.keepTabs = True
+        return self
+
+    def ignore( self, other ):
+        """
+        Define expression to be ignored (e.g., comments) while doing pattern
+        matching; may be called repeatedly, to define multiple comment or other
+        ignorable patterns.
+        
+        Example::
+            patt = OneOrMore(Word(alphas))
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+            
+            patt.ignore(cStyleComment)
+            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+        """
+        if isinstance(other, basestring):
+            other = Suppress(other)
+
+        if isinstance( other, Suppress ):
+            if other not in self.ignoreExprs:
+                self.ignoreExprs.append(other)
+        else:
+            self.ignoreExprs.append( Suppress( other.copy() ) )
+        return self
+
+    def setDebugActions( self, startAction, successAction, exceptionAction ):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        """
+        self.debugActions = (startAction or _defaultStartDebugAction,
+                             successAction or _defaultSuccessDebugAction,
+                             exceptionAction or _defaultExceptionDebugAction)
+        self.debug = True
+        return self
+
+    def setDebug( self, flag=True ):
+        """
+        Enable display of debugging messages while doing pattern matching.
+        Set C{flag} to True to enable, False to disable.
+
+        Example::
+            wd = Word(alphas).setName("alphaword")
+            integer = Word(nums).setName("numword")
+            term = wd | integer
+            
+            # turn on debugging for wd
+            wd.setDebug()
+
+            OneOrMore(term).parseString("abc 123 xyz 890")
+        
+        prints::
+            Match alphaword at loc 0(1,1)
+            Matched alphaword -> ['abc']
+            Match alphaword at loc 3(1,4)
+            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+            Match alphaword at loc 7(1,8)
+            Matched alphaword -> ['xyz']
+            Match alphaword at loc 11(1,12)
+            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+            Match alphaword at loc 15(1,16)
+            Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+        The output shown is that produced by the default debug actions - custom debug actions can be
+        specified using L{setDebugActions}. Prior to attempting
+        to match the C{wd} expression, the debugging message C{"Match  at loc (,)"}
+        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
+        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
+        which makes debugging and exception messages easier to understand - for instance, the default
+        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
+        """
+        if flag:
+            self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
+        else:
+            self.debug = False
+        return self
+
+    def __str__( self ):
+        return self.name
+
+    def __repr__( self ):
+        return _ustr(self)
+
+    def streamline( self ):
+        self.streamlined = True
+        self.strRepr = None
+        return self
+
+    def checkRecursion( self, parseElementList ):
+        pass
+
+    def validate( self, validateTrace=[] ):
+        """
+        Check defined expressions for valid structure, check for infinite recursive definitions.
+        """
+        self.checkRecursion( [] )
+
+    def parseFile( self, file_or_filename, parseAll=False ):
+        """
+        Execute the parse expression on the given file or filename.
+        If a filename is specified (instead of a file object),
+        the entire file is opened, read, and closed before parsing.
+        """
+        try:
+            file_contents = file_or_filename.read()
+        except AttributeError:
+            with open(file_or_filename, "r") as f:
+                file_contents = f.read()
+        try:
+            return self.parseString(file_contents, parseAll)
+        except ParseBaseException as exc:
+            if ParserElement.verbose_stacktrace:
+                raise
+            else:
+                # catch and re-raise exception from here, clears out pyparsing internal stack trace
+                raise exc
+
+    def __eq__(self,other):
+        if isinstance(other, ParserElement):
+            return self is other or vars(self) == vars(other)
+        elif isinstance(other, basestring):
+            return self.matches(other)
+        else:
+            return super(ParserElement,self)==other
+
+    def __ne__(self,other):
+        return not (self == other)
+
+    def __hash__(self):
+        return hash(id(self))
+
+    def __req__(self,other):
+        return self == other
+
+    def __rne__(self,other):
+        return not (self == other)
+
+    def matches(self, testString, parseAll=True):
+        """
+        Method for quick testing of a parser against a test string. Good for simple 
+        inline microtests of sub expressions while building up larger parser.
+           
+        Parameters:
+         - testString - to test against this expression for a match
+         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+            
+        Example::
+            expr = Word(nums)
+            assert expr.matches("100")
+        """
+        try:
+            self.parseString(_ustr(testString), parseAll=parseAll)
+            return True
+        except ParseBaseException:
+            return False
+                
+    def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
+        """
+        Execute the parse expression on a series of test strings, showing each
+        test, the parsed results or where the parse failed. Quick and easy way to
+        run a parse expression against a list of sample strings.
+           
+        Parameters:
+         - tests - a list of separate test strings, or a multiline string of test strings
+         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
+         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
+              string; pass None to disable comment filtering
+         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
+              if False, only dump nested list
+         - printResults - (default=C{True}) prints test output to stdout
+         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing
+
+        Returns: a (success, results) tuple, where success indicates that all tests succeeded
+        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
+        test's output
+        
+        Example::
+            number_expr = pyparsing_common.number.copy()
+
+            result = number_expr.runTests('''
+                # unsigned integer
+                100
+                # negative integer
+                -100
+                # float with scientific notation
+                6.02e23
+                # integer with scientific notation
+                1e-12
+                ''')
+            print("Success" if result[0] else "Failed!")
+
+            result = number_expr.runTests('''
+                # stray character
+                100Z
+                # missing leading digit before '.'
+                -.100
+                # too many '.'
+                3.14.159
+                ''', failureTests=True)
+            print("Success" if result[0] else "Failed!")
+        prints::
+            # unsigned integer
+            100
+            [100]
+
+            # negative integer
+            -100
+            [-100]
+
+            # float with scientific notation
+            6.02e23
+            [6.02e+23]
+
+            # integer with scientific notation
+            1e-12
+            [1e-12]
+
+            Success
+            
+            # stray character
+            100Z
+               ^
+            FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+            # missing leading digit before '.'
+            -.100
+            ^
+            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+            # too many '.'
+            3.14.159
+                ^
+            FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+            Success
+
+        Each test string must be on a single line. If you want to test a string that spans multiple
+        lines, create a test like this::
+
+            expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+        
+        (Note that this is a raw string literal, you must include the leading 'r'.)
+        """
+        if isinstance(tests, basestring):
+            tests = list(map(str.strip, tests.rstrip().splitlines()))
+        if isinstance(comment, basestring):
+            comment = Literal(comment)
+        allResults = []
+        comments = []
+        success = True
+        for t in tests:
+            if comment is not None and comment.matches(t, False) or comments and not t:
+                comments.append(t)
+                continue
+            if not t:
+                continue
+            out = ['\n'.join(comments), t]
+            comments = []
+            try:
+                t = t.replace(r'\n','\n')
+                result = self.parseString(t, parseAll=parseAll)
+                out.append(result.dump(full=fullDump))
+                success = success and not failureTests
+            except ParseBaseException as pe:
+                fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+                if '\n' in t:
+                    out.append(line(pe.loc, t))
+                    out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
+                else:
+                    out.append(' '*pe.loc + '^' + fatal)
+                out.append("FAIL: " + str(pe))
+                success = success and failureTests
+                result = pe
+            except Exception as exc:
+                out.append("FAIL-EXCEPTION: " + str(exc))
+                success = success and failureTests
+                result = exc
+
+            if printResults:
+                if fullDump:
+                    out.append('')
+                print('\n'.join(out))
+
+            allResults.append((t, result))
+        
+        return success, allResults
+
+        
+class Token(ParserElement):
+    """
+    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
+    """
+    def __init__( self ):
+        super(Token,self).__init__( savelist=False )
+
+
+class Empty(Token):
+    """
+    An empty token, will always match.
+    """
+    def __init__( self ):
+        super(Empty,self).__init__()
+        self.name = "Empty"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+
+class NoMatch(Token):
+    """
+    A token that will never match.
+    """
+    def __init__( self ):
+        super(NoMatch,self).__init__()
+        self.name = "NoMatch"
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.errmsg = "Unmatchable token"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+    """
+    Token to exactly match a specified string.
+    
+    Example::
+        Literal('blah').parseString('blah')  # -> ['blah']
+        Literal('blah').parseString('blahfooblah')  # -> ['blah']
+        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
+    
+    For case-insensitive matching, use L{CaselessLiteral}.
+    
+    For keyword matching (force word break before and after the matched string),
+    use L{Keyword} or L{CaselessKeyword}.
+    """
+    def __init__( self, matchString ):
+        super(Literal,self).__init__()
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Literal; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+            self.__class__ = Empty
+        self.name = '"%s"' % _ustr(self.match)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+
+    # Performance tuning: this routine gets called a *lot*
+    # if this is a single character match string  and the first character matches,
+    # short-circuit as quickly as possible, and avoid calling startswith
+    #~ @profile
+    def parseImpl( self, instring, loc, doActions=True ):
+        if (instring[loc] == self.firstMatchChar and
+            (self.matchLen==1 or instring.startswith(self.match,loc)) ):
+            return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+    """
+    Token to exactly match a specified string as a keyword, that is, it must be
+    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
+     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
+     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
+    Accepts two optional constructor arguments in addition to the keyword string:
+     - C{identChars} is a string of characters that would be valid identifier characters,
+          defaulting to all alphanumerics + "_" and "$"
+     - C{caseless} allows case-insensitive matching, default is C{False}.
+       
+    Example::
+        Keyword("start").parseString("start")  # -> ['start']
+        Keyword("start").parseString("starting")  # -> Exception
+
+    For case-insensitive matching, use L{CaselessKeyword}.
+    """
+    DEFAULT_KEYWORD_CHARS = alphanums+"_$"
+
+    def __init__( self, matchString, identChars=None, caseless=False ):
+        super(Keyword,self).__init__()
+        if identChars is None:
+            identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        self.match = matchString
+        self.matchLen = len(matchString)
+        try:
+            self.firstMatchChar = matchString[0]
+        except IndexError:
+            warnings.warn("null string passed to Keyword; use Empty() instead",
+                            SyntaxWarning, stacklevel=2)
+        self.name = '"%s"' % self.match
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = False
+        self.mayIndexError = False
+        self.caseless = caseless
+        if caseless:
+            self.caselessmatch = matchString.upper()
+            identChars = identChars.upper()
+        self.identChars = set(identChars)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.caseless:
+            if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+                 (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
+                 (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
+                return loc+self.matchLen, self.match
+        else:
+            if (instring[loc] == self.firstMatchChar and
+                (self.matchLen==1 or instring.startswith(self.match,loc)) and
+                (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
+                (loc == 0 or instring[loc-1] not in self.identChars) ):
+                return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+    def copy(self):
+        c = super(Keyword,self).copy()
+        c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+        return c
+
+    @staticmethod
+    def setDefaultKeywordChars( chars ):
+        """Overrides the default Keyword chars
+        """
+        Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+    """
+    Token to match a specified string, ignoring case of letters.
+    Note: the matched results will always be in the case of the given
+    match string, NOT the case of the input text.
+
+    Example::
+        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+        
+    (Contrast with example for L{CaselessKeyword}.)
+    """
+    def __init__( self, matchString ):
+        super(CaselessLiteral,self).__init__( matchString.upper() )
+        # Preserve the defining literal.
+        self.returnString = matchString
+        self.name = "'%s'" % self.returnString
+        self.errmsg = "Expected " + self.name
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if instring[ loc:loc+self.matchLen ].upper() == self.match:
+            return loc+self.matchLen, self.returnString
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+    """
+    Caseless version of L{Keyword}.
+
+    Example::
+        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+        
+    (Contrast with example for L{CaselessLiteral}.)
+    """
+    def __init__( self, matchString, identChars=None ):
+        super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+             (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
+            return loc+self.matchLen, self.match
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class CloseMatch(Token):
+    """
+    A variation on L{Literal} which matches "close" matches, that is, 
+    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
+     - C{match_string} - string to be matched
+     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
+    
+    The results from a successful parse will contain the matched text from the input string and the following named results:
+     - C{mismatches} - a list of the positions within the match_string where mismatches were found
+     - C{original} - the original match_string used to compare against the input string
+    
+    If C{mismatches} is an empty list, then the match was an exact match.
+    
+    Example::
+        patt = CloseMatch("ATCATCGAATGGA")
+        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+        # exact match
+        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+        # close match allowing up to 2 mismatches
+        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+    """
+    def __init__(self, match_string, maxMismatches=1):
+        super(CloseMatch,self).__init__()
+        self.name = match_string
+        self.match_string = match_string
+        self.maxMismatches = maxMismatches
+        self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+        self.mayIndexError = False
+        self.mayReturnEmpty = False
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        start = loc
+        instrlen = len(instring)
+        maxloc = start + len(self.match_string)
+
+        if maxloc <= instrlen:
+            match_string = self.match_string
+            match_stringloc = 0
+            mismatches = []
+            maxMismatches = self.maxMismatches
+
+            for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+                src,mat = s_m
+                if src != mat:
+                    mismatches.append(match_stringloc)
+                    if len(mismatches) > maxMismatches:
+                        break
+            else:
+                loc = match_stringloc + 1
+                results = ParseResults([instring[start:loc]])
+                results['original'] = self.match_string
+                results['mismatches'] = mismatches
+                return loc, results
+
+        raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+    """
+    Token for matching words composed of allowed character sets.
+    Defined with string containing all allowed initial characters,
+    an optional string containing allowed body characters (if omitted,
+    defaults to the initial character set), and an optional minimum,
+    maximum, and/or exact length.  The default value for C{min} is 1 (a
+    minimum value < 1 is not valid); the default values for C{max} and C{exact}
+    are 0, meaning no maximum or exact length restriction. An optional
+    C{excludeChars} parameter can list characters that might be found in 
+    the input C{bodyChars} string; useful to define a word of all printables
+    except for one or two characters, for instance.
+    
+    L{srange} is useful for defining custom character set strings for defining 
+    C{Word} expressions, using range notation from regular expression character sets.
+    
+    A common mistake is to use C{Word} to match a specific literal string, as in 
+    C{Word("Address")}. Remember that C{Word} uses the string argument to define
+    I{sets} of matchable characters. This expression would match "Add", "AAA",
+    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
+    To match an exact literal string, use L{Literal} or L{Keyword}.
+
+    pyparsing includes helper strings for building Words:
+     - L{alphas}
+     - L{nums}
+     - L{alphanums}
+     - L{hexnums}
+     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
+     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
+     - L{printables} (any non-whitespace character)
+
+    Example::
+        # a word composed of digits
+        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+        
+        # a word with a leading capital, and zero or more lowercase
+        capital_word = Word(alphas.upper(), alphas.lower())
+
+        # hostnames are alphanumeric, with leading alpha, and '-'
+        hostname = Word(alphas, alphanums+'-')
+        
+        # roman numeral (not a strict parser, accepts invalid mix of characters)
+        roman = Word("IVXLCDM")
+        
+        # any string of non-whitespace characters, except for ','
+        csv_value = Word(printables, excludeChars=",")
+    """
+    def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
+        super(Word,self).__init__()
+        if excludeChars:
+            initChars = ''.join(c for c in initChars if c not in excludeChars)
+            if bodyChars:
+                bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+        self.initCharsOrig = initChars
+        self.initChars = set(initChars)
+        if bodyChars :
+            self.bodyCharsOrig = bodyChars
+            self.bodyChars = set(bodyChars)
+        else:
+            self.bodyCharsOrig = initChars
+            self.bodyChars = set(initChars)
+
+        self.maxSpecified = max > 0
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.asKeyword = asKeyword
+
+        if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
+            if self.bodyCharsOrig == self.initCharsOrig:
+                self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+            elif len(self.initCharsOrig) == 1:
+                self.reString = "%s[%s]*" % \
+                                      (re.escape(self.initCharsOrig),
+                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
+            else:
+                self.reString = "[%s][%s]*" % \
+                                      (_escapeRegexRangeChars(self.initCharsOrig),
+                                      _escapeRegexRangeChars(self.bodyCharsOrig),)
+            if self.asKeyword:
+                self.reString = r"\b"+self.reString+r"\b"
+            try:
+                self.re = re.compile( self.reString )
+            except Exception:
+                self.re = None
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.re:
+            result = self.re.match(instring,loc)
+            if not result:
+                raise ParseException(instring, loc, self.errmsg, self)
+
+            loc = result.end()
+            return loc, result.group()
+
+        if not(instring[ loc ] in self.initChars):
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        instrlen = len(instring)
+        bodychars = self.bodyChars
+        maxloc = start + self.maxLen
+        maxloc = min( maxloc, instrlen )
+        while loc < maxloc and instring[loc] in bodychars:
+            loc += 1
+
+        throwException = False
+        if loc - start < self.minLen:
+            throwException = True
+        if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+            throwException = True
+        if self.asKeyword:
+            if (start>0 and instring[start-1] in bodychars) or (loc4:
+                    return s[:4]+"..."
+                else:
+                    return s
+
+            if ( self.initCharsOrig != self.bodyCharsOrig ):
+                self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
+            else:
+                self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+        return self.strRepr
+
+
+class Regex(Token):
+    """
+    Token for matching strings that match a given regular expression.
+    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
+    If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as 
+    named parse results.
+
+    Example::
+        realnum = Regex(r"[+-]?\d+\.\d*")
+        date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)')
+        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+    """
+    compiledREtype = type(re.compile("[A-Z]"))
+    def __init__( self, pattern, flags=0):
+        """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
+        super(Regex,self).__init__()
+
+        if isinstance(pattern, basestring):
+            if not pattern:
+                warnings.warn("null string passed to Regex; use Empty() instead",
+                        SyntaxWarning, stacklevel=2)
+
+            self.pattern = pattern
+            self.flags = flags
+
+            try:
+                self.re = re.compile(self.pattern, self.flags)
+                self.reString = self.pattern
+            except sre_constants.error:
+                warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+                    SyntaxWarning, stacklevel=2)
+                raise
+
+        elif isinstance(pattern, Regex.compiledREtype):
+            self.re = pattern
+            self.pattern = \
+            self.reString = str(pattern)
+            self.flags = flags
+            
+        else:
+            raise ValueError("Regex may only be constructed with a string or a compiled RE object")
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        result = self.re.match(instring,loc)
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        d = result.groupdict()
+        ret = ParseResults(result.group())
+        if d:
+            for k in d:
+                ret[k] = d[k]
+        return loc,ret
+
+    def __str__( self ):
+        try:
+            return super(Regex,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+        return self.strRepr
+
+
+class QuotedString(Token):
+    r"""
+    Token for matching strings that are delimited by quoting characters.
+    
+    Defined with the following parameters:
+        - quoteChar - string of one or more characters defining the quote delimiting string
+        - escChar - character to escape quotes, typically backslash (default=C{None})
+        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
+        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
+        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
+        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
+        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})
+
+    Example::
+        qs = QuotedString('"')
+        print(qs.searchString('lsjdf "This is the quote" sldjf'))
+        complex_qs = QuotedString('{{', endQuoteChar='}}')
+        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+        sql_qs = QuotedString('"', escQuote='""')
+        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+    prints::
+        [['This is the quote']]
+        [['This is the "quote"']]
+        [['This is the quote with "embedded" quotes']]
+    """
+    def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+        super(QuotedString,self).__init__()
+
+        # remove white space from quote chars - wont work anyway
+        quoteChar = quoteChar.strip()
+        if not quoteChar:
+            warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+            raise SyntaxError()
+
+        if endQuoteChar is None:
+            endQuoteChar = quoteChar
+        else:
+            endQuoteChar = endQuoteChar.strip()
+            if not endQuoteChar:
+                warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+                raise SyntaxError()
+
+        self.quoteChar = quoteChar
+        self.quoteCharLen = len(quoteChar)
+        self.firstQuoteChar = quoteChar[0]
+        self.endQuoteChar = endQuoteChar
+        self.endQuoteCharLen = len(endQuoteChar)
+        self.escChar = escChar
+        self.escQuote = escQuote
+        self.unquoteResults = unquoteResults
+        self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+        if multiline:
+            self.flags = re.MULTILINE | re.DOTALL
+            self.pattern = r'%s(?:[^%s%s]' % \
+                ( re.escape(self.quoteChar),
+                  _escapeRegexRangeChars(self.endQuoteChar[0]),
+                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+        else:
+            self.flags = 0
+            self.pattern = r'%s(?:[^%s\n\r%s]' % \
+                ( re.escape(self.quoteChar),
+                  _escapeRegexRangeChars(self.endQuoteChar[0]),
+                  (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+        if len(self.endQuoteChar) > 1:
+            self.pattern += (
+                '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+                                               _escapeRegexRangeChars(self.endQuoteChar[i]))
+                                    for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
+                )
+        if escQuote:
+            self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+        if escChar:
+            self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+            self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
+        self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+        try:
+            self.re = re.compile(self.pattern, self.flags)
+            self.reString = self.pattern
+        except sre_constants.error:
+            warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+                SyntaxWarning, stacklevel=2)
+            raise
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayIndexError = False
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
+        if not result:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        loc = result.end()
+        ret = result.group()
+
+        if self.unquoteResults:
+
+            # strip off quotes
+            ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
+
+            if isinstance(ret,basestring):
+                # replace escaped whitespace
+                if '\\' in ret and self.convertWhitespaceEscapes:
+                    ws_map = {
+                        r'\t' : '\t',
+                        r'\n' : '\n',
+                        r'\f' : '\f',
+                        r'\r' : '\r',
+                    }
+                    for wslit,wschar in ws_map.items():
+                        ret = ret.replace(wslit, wschar)
+
+                # replace escaped characters
+                if self.escChar:
+                    ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
+
+                # replace escaped quotes
+                if self.escQuote:
+                    ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+        return loc, ret
+
+    def __str__( self ):
+        try:
+            return super(QuotedString,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+        return self.strRepr
+
+
+class CharsNotIn(Token):
+    """
+    Token for matching words composed of characters I{not} in a given set (will
+    include whitespace in matched characters if not listed in the provided exclusion set - see example).
+    Defined with string containing all disallowed characters, and an optional
+    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
+    minimum value < 1 is not valid); the default values for C{max} and C{exact}
+    are 0, meaning no maximum or exact length restriction.
+
+    Example::
+        # define a comma-separated-value as anything that is not a ','
+        csv_value = CharsNotIn(',')
+        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+    prints::
+        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+    """
+    def __init__( self, notChars, min=1, max=0, exact=0 ):
+        super(CharsNotIn,self).__init__()
+        self.skipWhitespace = False
+        self.notChars = notChars
+
+        if min < 1:
+            raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+        self.name = _ustr(self)
+        self.errmsg = "Expected " + self.name
+        self.mayReturnEmpty = ( self.minLen == 0 )
+        self.mayIndexError = False
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if instring[loc] in self.notChars:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        start = loc
+        loc += 1
+        notchars = self.notChars
+        maxlen = min( start+self.maxLen, len(instring) )
+        while loc < maxlen and \
+              (instring[loc] not in notchars):
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+    def __str__( self ):
+        try:
+            return super(CharsNotIn, self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None:
+            if len(self.notChars) > 4:
+                self.strRepr = "!W:(%s...)" % self.notChars[:4]
+            else:
+                self.strRepr = "!W:(%s)" % self.notChars
+
+        return self.strRepr
+
+class White(Token):
+    """
+    Special matching class for matching whitespace.  Normally, whitespace is ignored
+    by pyparsing grammars.  This class is included when some whitespace structures
+    are significant.  Define with a string containing the whitespace characters to be
+    matched; default is C{" \\t\\r\\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
+    as defined for the C{L{Word}} class.
+    """
+    whiteStrs = {
+        " " : "",
+        "\t": "",
+        "\n": "",
+        "\r": "",
+        "\f": "",
+        }
+    def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+        super(White,self).__init__()
+        self.matchWhite = ws
+        self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
+        #~ self.leaveWhitespace()
+        self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+        self.mayReturnEmpty = True
+        self.errmsg = "Expected " + self.name
+
+        self.minLen = min
+
+        if max > 0:
+            self.maxLen = max
+        else:
+            self.maxLen = _MAX_INT
+
+        if exact > 0:
+            self.maxLen = exact
+            self.minLen = exact
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if not(instring[ loc ] in self.matchWhite):
+            raise ParseException(instring, loc, self.errmsg, self)
+        start = loc
+        loc += 1
+        maxloc = start + self.maxLen
+        maxloc = min( maxloc, len(instring) )
+        while loc < maxloc and instring[loc] in self.matchWhite:
+            loc += 1
+
+        if loc - start < self.minLen:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+    def __init__( self ):
+        super(_PositionToken,self).__init__()
+        self.name=self.__class__.__name__
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+    """
+    Token to advance to a specific column of input text; useful for tabular report scraping.
+    """
+    def __init__( self, colno ):
+        super(GoToColumn,self).__init__()
+        self.col = colno
+
+    def preParse( self, instring, loc ):
+        if col(loc,instring) != self.col:
+            instrlen = len(instring)
+            if self.ignoreExprs:
+                loc = self._skipIgnorables( instring, loc )
+            while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
+                loc += 1
+        return loc
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        thiscol = col( loc, instring )
+        if thiscol > self.col:
+            raise ParseException( instring, loc, "Text not in expected column", self )
+        newloc = loc + self.col - thiscol
+        ret = instring[ loc: newloc ]
+        return newloc, ret
+
+
+class LineStart(_PositionToken):
+    """
+    Matches if current position is at the beginning of a line within the parse string
+    
+    Example::
+    
+        test = '''\
+        AAA this line
+        AAA and this line
+          AAA but not this one
+        B AAA and definitely not this one
+        '''
+
+        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+            print(t)
+    
+    Prints::
+        ['AAA', ' this line']
+        ['AAA', ' and this line']    
+
+    """
+    def __init__( self ):
+        super(LineStart,self).__init__()
+        self.errmsg = "Expected start of line"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if col(loc, instring) == 1:
+            return loc, []
+        raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+    """
+    Matches if current position is at the end of a line within the parse string
+    """
+    def __init__( self ):
+        super(LineEnd,self).__init__()
+        self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
+        self.errmsg = "Expected end of line"
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if loc len(instring):
+            return loc, []
+        else:
+            raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+    """
+    Matches if the current position is at the beginning of a Word, and
+    is not preceded by any character in a given set of C{wordChars}
+    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
+    the string being parsed, or at the beginning of a line.
+    """
+    def __init__(self, wordChars = printables):
+        super(WordStart,self).__init__()
+        self.wordChars = set(wordChars)
+        self.errmsg = "Not at the start of a word"
+
+    def parseImpl(self, instring, loc, doActions=True ):
+        if loc != 0:
+            if (instring[loc-1] in self.wordChars or
+                instring[loc] not in self.wordChars):
+                raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+class WordEnd(_PositionToken):
+    """
+    Matches if the current position is at the end of a Word, and
+    is not followed by any character in a given set of C{wordChars}
+    (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
+    the string being parsed, or at the end of a line.
+    """
+    def __init__(self, wordChars = printables):
+        super(WordEnd,self).__init__()
+        self.wordChars = set(wordChars)
+        self.skipWhitespace = False
+        self.errmsg = "Not at the end of a word"
+
+    def parseImpl(self, instring, loc, doActions=True ):
+        instrlen = len(instring)
+        if instrlen>0 and loc maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring,len(instring),e.errmsg,self)
+                    maxExcLoc = len(instring)
+            else:
+                # save match among all matches, to retry longest to shortest
+                matches.append((loc2, e))
+
+        if matches:
+            matches.sort(key=lambda x: -x[0])
+            for _,e in matches:
+                try:
+                    return e._parse( instring, loc, doActions )
+                except ParseException as err:
+                    err.__traceback__ = None
+                    if err.loc > maxExcLoc:
+                        maxException = err
+                        maxExcLoc = err.loc
+
+        if maxException is not None:
+            maxException.msg = self.errmsg
+            raise maxException
+        else:
+            raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+    def __ixor__(self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        return self.append( other ) #Or( [ self, other ] )
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class MatchFirst(ParseExpression):
+    """
+    Requires that at least one C{ParseExpression} is found.
+    If two expressions match, the first one listed is the one that will match.
+    May be constructed using the C{'|'} operator.
+
+    Example::
+        # construct MatchFirst using '|' operator
+        
+        # watch the order of expressions to match
+        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+        # put more selective expression first
+        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
+    """
+    def __init__( self, exprs, savelist = False ):
+        super(MatchFirst,self).__init__(exprs, savelist)
+        if self.exprs:
+            self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+        else:
+            self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        maxExcLoc = -1
+        maxException = None
+        for e in self.exprs:
+            try:
+                ret = e._parse( instring, loc, doActions )
+                return ret
+            except ParseException as err:
+                if err.loc > maxExcLoc:
+                    maxException = err
+                    maxExcLoc = err.loc
+            except IndexError:
+                if len(instring) > maxExcLoc:
+                    maxException = ParseException(instring,len(instring),e.errmsg,self)
+                    maxExcLoc = len(instring)
+
+        # only got here if no expression matched, raise exception for match that made it the furthest
+        else:
+            if maxException is not None:
+                maxException.msg = self.errmsg
+                raise maxException
+            else:
+                raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+    def __ior__(self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass( other )
+        return self.append( other ) #MatchFirst( [ self, other ] )
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class Each(ParseExpression):
+    """
+    Requires all given C{ParseExpression}s to be found, but in any order.
+    Expressions may be separated by whitespace.
+    May be constructed using the C{'&'} operator.
+
+    Example::
+        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+        integer = Word(nums)
+        shape_attr = "shape:" + shape_type("shape")
+        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+        color_attr = "color:" + color("color")
+        size_attr = "size:" + integer("size")
+
+        # use Each (using operator '&') to accept attributes in any order 
+        # (shape and posn are required, color and size are optional)
+        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+        shape_spec.runTests('''
+            shape: SQUARE color: BLACK posn: 100, 120
+            shape: CIRCLE size: 50 color: BLUE posn: 50,80
+            color:GREEN size:20 shape:TRIANGLE posn:20,40
+            '''
+            )
+    prints::
+        shape: SQUARE color: BLACK posn: 100, 120
+        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+        - color: BLACK
+        - posn: ['100', ',', '120']
+          - x: 100
+          - y: 120
+        - shape: SQUARE
+
+
+        shape: CIRCLE size: 50 color: BLUE posn: 50,80
+        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+        - color: BLUE
+        - posn: ['50', ',', '80']
+          - x: 50
+          - y: 80
+        - shape: CIRCLE
+        - size: 50
+
+
+        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+        - color: GREEN
+        - posn: ['20', ',', '40']
+          - x: 20
+          - y: 40
+        - shape: TRIANGLE
+        - size: 20
+    """
+    def __init__( self, exprs, savelist = True ):
+        super(Each,self).__init__(exprs, savelist)
+        self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+        self.skipWhitespace = True
+        self.initExprGroups = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.initExprGroups:
+            self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
+            opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
+            opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
+            self.optionals = opt1 + opt2
+            self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
+            self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
+            self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
+            self.required += self.multirequired
+            self.initExprGroups = False
+        tmpLoc = loc
+        tmpReqd = self.required[:]
+        tmpOpt  = self.optionals[:]
+        matchOrder = []
+
+        keepMatching = True
+        while keepMatching:
+            tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+            failed = []
+            for e in tmpExprs:
+                try:
+                    tmpLoc = e.tryParse( instring, tmpLoc )
+                except ParseException:
+                    failed.append(e)
+                else:
+                    matchOrder.append(self.opt1map.get(id(e),e))
+                    if e in tmpReqd:
+                        tmpReqd.remove(e)
+                    elif e in tmpOpt:
+                        tmpOpt.remove(e)
+            if len(failed) == len(tmpExprs):
+                keepMatching = False
+
+        if tmpReqd:
+            missing = ", ".join(_ustr(e) for e in tmpReqd)
+            raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
+
+        # add any unmatched Optionals, in case they have default values defined
+        matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
+
+        resultlist = []
+        for e in matchOrder:
+            loc,results = e._parse(instring,loc,doActions)
+            resultlist.append(results)
+
+        finalResults = sum(resultlist, ParseResults([]))
+        return loc, finalResults
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+        return self.strRepr
+
+    def checkRecursion( self, parseElementList ):
+        subRecCheckList = parseElementList[:] + [ self ]
+        for e in self.exprs:
+            e.checkRecursion( subRecCheckList )
+
+
+class ParseElementEnhance(ParserElement):
+    """
+    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
+    """
+    def __init__( self, expr, savelist=False ):
+        super(ParseElementEnhance,self).__init__(savelist)
+        if isinstance( expr, basestring ):
+            if issubclass(ParserElement._literalStringClass, Token):
+                expr = ParserElement._literalStringClass(expr)
+            else:
+                expr = ParserElement._literalStringClass(Literal(expr))
+        self.expr = expr
+        self.strRepr = None
+        if expr is not None:
+            self.mayIndexError = expr.mayIndexError
+            self.mayReturnEmpty = expr.mayReturnEmpty
+            self.setWhitespaceChars( expr.whiteChars )
+            self.skipWhitespace = expr.skipWhitespace
+            self.saveAsList = expr.saveAsList
+            self.callPreparse = expr.callPreparse
+            self.ignoreExprs.extend(expr.ignoreExprs)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.expr is not None:
+            return self.expr._parse( instring, loc, doActions, callPreParse=False )
+        else:
+            raise ParseException("",loc,self.errmsg,self)
+
+    def leaveWhitespace( self ):
+        self.skipWhitespace = False
+        self.expr = self.expr.copy()
+        if self.expr is not None:
+            self.expr.leaveWhitespace()
+        return self
+
+    def ignore( self, other ):
+        if isinstance( other, Suppress ):
+            if other not in self.ignoreExprs:
+                super( ParseElementEnhance, self).ignore( other )
+                if self.expr is not None:
+                    self.expr.ignore( self.ignoreExprs[-1] )
+        else:
+            super( ParseElementEnhance, self).ignore( other )
+            if self.expr is not None:
+                self.expr.ignore( self.ignoreExprs[-1] )
+        return self
+
+    def streamline( self ):
+        super(ParseElementEnhance,self).streamline()
+        if self.expr is not None:
+            self.expr.streamline()
+        return self
+
+    def checkRecursion( self, parseElementList ):
+        if self in parseElementList:
+            raise RecursiveGrammarException( parseElementList+[self] )
+        subRecCheckList = parseElementList[:] + [ self ]
+        if self.expr is not None:
+            self.expr.checkRecursion( subRecCheckList )
+
+    def validate( self, validateTrace=[] ):
+        tmp = validateTrace[:]+[self]
+        if self.expr is not None:
+            self.expr.validate(tmp)
+        self.checkRecursion( [] )
+
+    def __str__( self ):
+        try:
+            return super(ParseElementEnhance,self).__str__()
+        except Exception:
+            pass
+
+        if self.strRepr is None and self.expr is not None:
+            self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
+        return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+    """
+    Lookahead matching of the given parse expression.  C{FollowedBy}
+    does I{not} advance the parsing position within the input string, it only
+    verifies that the specified parse expression matches at the current
+    position.  C{FollowedBy} always returns a null token list.
+
+    Example::
+        # use FollowedBy to match a label only if it is followed by a ':'
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        
+        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+    prints::
+        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+    """
+    def __init__( self, expr ):
+        super(FollowedBy,self).__init__(expr)
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        self.expr.tryParse( instring, loc )
+        return loc, []
+
+
+class NotAny(ParseElementEnhance):
+    """
+    Lookahead to disallow matching with the given parse expression.  C{NotAny}
+    does I{not} advance the parsing position within the input string, it only
+    verifies that the specified parse expression does I{not} match at the current
+    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
+    always returns a null token list.  May be constructed using the '~' operator.
+
+    Example::
+        
+    """
+    def __init__( self, expr ):
+        super(NotAny,self).__init__(expr)
+        #~ self.leaveWhitespace()
+        self.skipWhitespace = False  # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+        self.mayReturnEmpty = True
+        self.errmsg = "Found unwanted token, "+_ustr(self.expr)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        if self.expr.canParseNext(instring, loc):
+            raise ParseException(instring, loc, self.errmsg, self)
+        return loc, []
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+        return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+    def __init__( self, expr, stopOn=None):
+        super(_MultipleMatch, self).__init__(expr)
+        self.saveAsList = True
+        ender = stopOn
+        if isinstance(ender, basestring):
+            ender = ParserElement._literalStringClass(ender)
+        self.not_ender = ~ender if ender is not None else None
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        self_expr_parse = self.expr._parse
+        self_skip_ignorables = self._skipIgnorables
+        check_ender = self.not_ender is not None
+        if check_ender:
+            try_not_ender = self.not_ender.tryParse
+        
+        # must be at least one (but first see if we are the stopOn sentinel;
+        # if so, fail)
+        if check_ender:
+            try_not_ender(instring, loc)
+        loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
+        try:
+            hasIgnoreExprs = (not not self.ignoreExprs)
+            while 1:
+                if check_ender:
+                    try_not_ender(instring, loc)
+                if hasIgnoreExprs:
+                    preloc = self_skip_ignorables( instring, loc )
+                else:
+                    preloc = loc
+                loc, tmptokens = self_expr_parse( instring, preloc, doActions )
+                if tmptokens or tmptokens.haskeys():
+                    tokens += tmptokens
+        except (ParseException,IndexError):
+            pass
+
+        return loc, tokens
+        
+class OneOrMore(_MultipleMatch):
+    """
+    Repetition of one or more of the given expression.
+    
+    Parameters:
+     - expr - expression that must match one or more times
+     - stopOn - (default=C{None}) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition 
+          expression)          
+
+    Example::
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: BLACK"
+        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+        
+        # could also be written as
+        (attr_expr * (1,)).parseString(text).pprint()
+    """
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+        return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+    """
+    Optional repetition of zero or more of the given expression.
+    
+    Parameters:
+     - expr - expression that must match zero or more times
+     - stopOn - (default=C{None}) - expression for a terminating sentinel
+          (only required if the sentinel would ordinarily match the repetition 
+          expression)          
+
+    Example: similar to L{OneOrMore}
+    """
+    def __init__( self, expr, stopOn=None):
+        super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
+        self.mayReturnEmpty = True
+        
+    def parseImpl( self, instring, loc, doActions=True ):
+        try:
+            return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+        except (ParseException,IndexError):
+            return loc, []
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+        return self.strRepr
+
+class _NullToken(object):
+    def __bool__(self):
+        return False
+    __nonzero__ = __bool__
+    def __str__(self):
+        return ""
+
+_optionalNotMatched = _NullToken()
+class Optional(ParseElementEnhance):
+    """
+    Optional matching of the given expression.
+
+    Parameters:
+     - expr - expression that must match zero or more times
+     - default (optional) - value to be returned if the optional expression is not found.
+
+    Example::
+        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+        zip.runTests('''
+            # traditional ZIP code
+            12345
+            
+            # ZIP+4 form
+            12101-0001
+            
+            # invalid ZIP
+            98765-
+            ''')
+    prints::
+        # traditional ZIP code
+        12345
+        ['12345']
+
+        # ZIP+4 form
+        12101-0001
+        ['12101-0001']
+
+        # invalid ZIP
+        98765-
+             ^
+        FAIL: Expected end of text (at char 5), (line:1, col:6)
+    """
+    def __init__( self, expr, default=_optionalNotMatched ):
+        super(Optional,self).__init__( expr, savelist=False )
+        self.saveAsList = self.expr.saveAsList
+        self.defaultValue = default
+        self.mayReturnEmpty = True
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        try:
+            loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
+        except (ParseException,IndexError):
+            if self.defaultValue is not _optionalNotMatched:
+                if self.expr.resultsName:
+                    tokens = ParseResults([ self.defaultValue ])
+                    tokens[self.expr.resultsName] = self.defaultValue
+                else:
+                    tokens = [ self.defaultValue ]
+            else:
+                tokens = []
+        return loc, tokens
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+
+        if self.strRepr is None:
+            self.strRepr = "[" + _ustr(self.expr) + "]"
+
+        return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+    """
+    Token for skipping over all undefined text until the matched expression is found.
+
+    Parameters:
+     - expr - target expression marking the end of the data to be skipped
+     - include - (default=C{False}) if True, the target expression is also parsed 
+          (the skipped text and target expression are returned as a 2-element list).
+     - ignore - (default=C{None}) used to define grammars (typically quoted strings and 
+          comments) that might contain false matches to the target expression
+     - failOn - (default=C{None}) define expressions that are not allowed to be 
+          included in the skipped test; if found before the target expression is found, 
+          the SkipTo is not a match
+
+    Example::
+        report = '''
+            Outstanding Issues Report - 1 Jan 2000
+
+               # | Severity | Description                               |  Days Open
+            -----+----------+-------------------------------------------+-----------
+             101 | Critical | Intermittent system crash                 |          6
+              94 | Cosmetic | Spelling error on Login ('log|n')         |         14
+              79 | Minor    | System slow when running too many reports |         47
+            '''
+        integer = Word(nums)
+        SEP = Suppress('|')
+        # use SkipTo to simply match everything up until the next SEP
+        # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+        # - parse action will call token.strip() for each matched token, i.e., the description body
+        string_data = SkipTo(SEP, ignore=quotedString)
+        string_data.setParseAction(tokenMap(str.strip))
+        ticket_expr = (integer("issue_num") + SEP 
+                      + string_data("sev") + SEP 
+                      + string_data("desc") + SEP 
+                      + integer("days_open"))
+        
+        for tkt in ticket_expr.searchString(report):
+            print tkt.dump()
+    prints::
+        ['101', 'Critical', 'Intermittent system crash', '6']
+        - days_open: 6
+        - desc: Intermittent system crash
+        - issue_num: 101
+        - sev: Critical
+        ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+        - days_open: 14
+        - desc: Spelling error on Login ('log|n')
+        - issue_num: 94
+        - sev: Cosmetic
+        ['79', 'Minor', 'System slow when running too many reports', '47']
+        - days_open: 47
+        - desc: System slow when running too many reports
+        - issue_num: 79
+        - sev: Minor
+    """
+    def __init__( self, other, include=False, ignore=None, failOn=None ):
+        super( SkipTo, self ).__init__( other )
+        self.ignoreExpr = ignore
+        self.mayReturnEmpty = True
+        self.mayIndexError = False
+        self.includeMatch = include
+        self.asList = False
+        if isinstance(failOn, basestring):
+            self.failOn = ParserElement._literalStringClass(failOn)
+        else:
+            self.failOn = failOn
+        self.errmsg = "No match found for "+_ustr(self.expr)
+
+    def parseImpl( self, instring, loc, doActions=True ):
+        startloc = loc
+        instrlen = len(instring)
+        expr = self.expr
+        expr_parse = self.expr._parse
+        self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+        self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+        
+        tmploc = loc
+        while tmploc <= instrlen:
+            if self_failOn_canParseNext is not None:
+                # break if failOn expression matches
+                if self_failOn_canParseNext(instring, tmploc):
+                    break
+                    
+            if self_ignoreExpr_tryParse is not None:
+                # advance past ignore expressions
+                while 1:
+                    try:
+                        tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+                    except ParseBaseException:
+                        break
+            
+            try:
+                expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+            except (ParseException, IndexError):
+                # no match, advance loc in string
+                tmploc += 1
+            else:
+                # matched skipto expr, done
+                break
+
+        else:
+            # ran off the end of the input string without matching skipto expr, fail
+            raise ParseException(instring, loc, self.errmsg, self)
+
+        # build up return values
+        loc = tmploc
+        skiptext = instring[startloc:loc]
+        skipresult = ParseResults(skiptext)
+        
+        if self.includeMatch:
+            loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
+            skipresult += mat
+
+        return loc, skipresult
+
+class Forward(ParseElementEnhance):
+    """
+    Forward declaration of an expression to be defined later -
+    used for recursive grammars, such as algebraic infix notation.
+    When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
+
+    Note: take care when assigning to C{Forward} not to overlook precedence of operators.
+    Specifically, '|' has a lower precedence than '<<', so that::
+        fwdExpr << a | b | c
+    will actually be evaluated as::
+        (fwdExpr << a) | b | c
+    thereby leaving b and c out as parseable alternatives.  It is recommended that you
+    explicitly group the values inserted into the C{Forward}::
+        fwdExpr << (a | b | c)
+    Converting to use the '<<=' operator instead will avoid this problem.
+
+    See L{ParseResults.pprint} for an example of a recursive parser created using
+    C{Forward}.
+    """
+    def __init__( self, other=None ):
+        super(Forward,self).__init__( other, savelist=False )
+
+    def __lshift__( self, other ):
+        if isinstance( other, basestring ):
+            other = ParserElement._literalStringClass(other)
+        self.expr = other
+        self.strRepr = None
+        self.mayIndexError = self.expr.mayIndexError
+        self.mayReturnEmpty = self.expr.mayReturnEmpty
+        self.setWhitespaceChars( self.expr.whiteChars )
+        self.skipWhitespace = self.expr.skipWhitespace
+        self.saveAsList = self.expr.saveAsList
+        self.ignoreExprs.extend(self.expr.ignoreExprs)
+        return self
+        
+    def __ilshift__(self, other):
+        return self << other
+    
+    def leaveWhitespace( self ):
+        self.skipWhitespace = False
+        return self
+
+    def streamline( self ):
+        if not self.streamlined:
+            self.streamlined = True
+            if self.expr is not None:
+                self.expr.streamline()
+        return self
+
+    def validate( self, validateTrace=[] ):
+        if self not in validateTrace:
+            tmp = validateTrace[:]+[self]
+            if self.expr is not None:
+                self.expr.validate(tmp)
+        self.checkRecursion([])
+
+    def __str__( self ):
+        if hasattr(self,"name"):
+            return self.name
+        return self.__class__.__name__ + ": ..."
+
+        # stubbed out for now - creates awful memory and perf issues
+        self._revertClass = self.__class__
+        self.__class__ = _ForwardNoRecurse
+        try:
+            if self.expr is not None:
+                retString = _ustr(self.expr)
+            else:
+                retString = "None"
+        finally:
+            self.__class__ = self._revertClass
+        return self.__class__.__name__ + ": " + retString
+
+    def copy(self):
+        if self.expr is not None:
+            return super(Forward,self).copy()
+        else:
+            ret = Forward()
+            ret <<= self
+            return ret
+
+class _ForwardNoRecurse(Forward):
+    def __str__( self ):
+        return "..."
+
+class TokenConverter(ParseElementEnhance):
+    """
+    Abstract subclass of C{ParseExpression}, for converting parsed results.
+    """
+    def __init__( self, expr, savelist=False ):
+        super(TokenConverter,self).__init__( expr )#, savelist )
+        self.saveAsList = False
+
+class Combine(TokenConverter):
+    """
+    Converter to concatenate all matching tokens to a single string.
+    By default, the matching patterns must also be contiguous in the input string;
+    this can be disabled by specifying C{'adjacent=False'} in the constructor.
+
+    Example::
+        real = Word(nums) + '.' + Word(nums)
+        print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+        # will also erroneously match the following
+        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+        real = Combine(Word(nums) + '.' + Word(nums))
+        print(real.parseString('3.1416')) # -> ['3.1416']
+        # no match when there are internal spaces
+        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+    """
+    def __init__( self, expr, joinString="", adjacent=True ):
+        super(Combine,self).__init__( expr )
+        # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+        if adjacent:
+            self.leaveWhitespace()
+        self.adjacent = adjacent
+        self.skipWhitespace = True
+        self.joinString = joinString
+        self.callPreparse = True
+
+    def ignore( self, other ):
+        if self.adjacent:
+            ParserElement.ignore(self, other)
+        else:
+            super( Combine, self).ignore( other )
+        return self
+
+    def postParse( self, instring, loc, tokenlist ):
+        retToks = tokenlist.copy()
+        del retToks[:]
+        retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
+
+        if self.resultsName and retToks.haskeys():
+            return [ retToks ]
+        else:
+            return retToks
+
+class Group(TokenConverter):
+    """
+    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.
+
+    Example::
+        ident = Word(alphas)
+        num = Word(nums)
+        term = ident | num
+        func = ident + Optional(delimitedList(term))
+        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']
+
+        func = ident + Group(Optional(delimitedList(term)))
+        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
+    """
+    def __init__( self, expr ):
+        super(Group,self).__init__( expr )
+        self.saveAsList = True
+
+    def postParse( self, instring, loc, tokenlist ):
+        return [ tokenlist ]
+
+class Dict(TokenConverter):
+    """
+    Converter to return a repetitive expression as a list, but also as a dictionary.
+    Each element can also be referenced using the first token in the expression as its key.
+    Useful for tabular report scraping when the first column can be used as a item key.
+
+    Example::
+        data_word = Word(alphas)
+        label = data_word + FollowedBy(':')
+        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        
+        # print attributes as plain groups
+        print(OneOrMore(attr_expr).parseString(text).dump())
+        
+        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+        print(result.dump())
+        
+        # access named fields as dict entries, or output as dict
+        print(result['shape'])        
+        print(result.asDict())
+    prints::
+        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+    See more examples at L{ParseResults} of accessing fields by results name.
+    """
+    def __init__( self, expr ):
+        super(Dict,self).__init__( expr )
+        self.saveAsList = True
+
+    def postParse( self, instring, loc, tokenlist ):
+        for i,tok in enumerate(tokenlist):
+            if len(tok) == 0:
+                continue
+            ikey = tok[0]
+            if isinstance(ikey,int):
+                ikey = _ustr(tok[0]).strip()
+            if len(tok)==1:
+                tokenlist[ikey] = _ParseResultsWithOffset("",i)
+            elif len(tok)==2 and not isinstance(tok[1],ParseResults):
+                tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
+            else:
+                dictvalue = tok.copy() #ParseResults(i)
+                del dictvalue[0]
+                if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
+                else:
+                    tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
+
+        if self.resultsName:
+            return [ tokenlist ]
+        else:
+            return tokenlist
+
+
+class Suppress(TokenConverter):
+    """
+    Converter for ignoring the results of a parsed expression.
+
+    Example::
+        source = "a, b, c,d"
+        wd = Word(alphas)
+        wd_list1 = wd + ZeroOrMore(',' + wd)
+        print(wd_list1.parseString(source))
+
+        # often, delimiters that are useful during parsing are just in the
+        # way afterward - use Suppress to keep them out of the parsed output
+        wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+        print(wd_list2.parseString(source))
+    prints::
+        ['a', ',', 'b', ',', 'c', ',', 'd']
+        ['a', 'b', 'c', 'd']
+    (See also L{delimitedList}.)
+    """
+    def postParse( self, instring, loc, tokenlist ):
+        return []
+
+    def suppress( self ):
+        return self
+
+
+class OnlyOnce(object):
+    """
+    Wrapper for parse actions, to ensure they are only called once.
+    """
+    def __init__(self, methodCall):
+        self.callable = _trim_arity(methodCall)
+        self.called = False
+    def __call__(self,s,l,t):
+        if not self.called:
+            results = self.callable(s,l,t)
+            self.called = True
+            return results
+        raise ParseException(s,l,"")
+    def reset(self):
+        self.called = False
+
+def traceParseAction(f):
+    """
+    Decorator for debugging parse actions. 
+    
+    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
+    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
+
+    Example::
+        wd = Word(alphas)
+
+        @traceParseAction
+        def remove_duplicate_chars(tokens):
+            return ''.join(sorted(set(''.join(tokens)))
+
+        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+    prints::
+        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+        <3:
+            thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+        sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
+        try:
+            ret = f(*paArgs)
+        except Exception as exc:
+            sys.stderr.write( "< ['aa', 'bb', 'cc']
+        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+    """
+    dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
+    if combine:
+        return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
+    else:
+        return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
+
+def countedArray( expr, intExpr=None ):
+    """
+    Helper to define a counted list of expressions.
+    This helper defines a pattern of the form::
+        integer expr expr expr...
+    where the leading integer tells how many expr expressions follow.
+    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
+    
+    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
+
+    Example::
+        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']
+
+        # in this parser, the leading integer value is given in binary,
+        # '10' indicating that 2 values are in the array
+        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
+    """
+    arrayExpr = Forward()
+    def countFieldParseAction(s,l,t):
+        n = t[0]
+        arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
+        return []
+    if intExpr is None:
+        intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
+    else:
+        intExpr = intExpr.copy()
+    intExpr.setName("arrayLen")
+    intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+    return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+    ret = []
+    for i in L:
+        if isinstance(i,list):
+            ret.extend(_flatten(i))
+        else:
+            ret.append(i)
+    return ret
+
+def matchPreviousLiteral(expr):
+    """
+    Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks
+    for a 'repeat' of a previous expression.  For example::
+        first = Word(nums)
+        second = matchPreviousLiteral(first)
+        matchExpr = first + ":" + second
+    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
+    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
+    If this is not desired, use C{matchPreviousExpr}.
+    Do I{not} use with packrat parsing enabled.
+    """
+    rep = Forward()
+    def copyTokenToRepeater(s,l,t):
+        if t:
+            if len(t) == 1:
+                rep << t[0]
+            else:
+                # flatten t tokens
+                tflat = _flatten(t.asList())
+                rep << And(Literal(tt) for tt in tflat)
+        else:
+            rep << Empty()
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def matchPreviousExpr(expr):
+    """
+    Helper to define an expression that is indirectly defined from
+    the tokens matched in a previous expression, that is, it looks
+    for a 'repeat' of a previous expression.  For example::
+        first = Word(nums)
+        second = matchPreviousExpr(first)
+        matchExpr = first + ":" + second
+    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
+    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
+    the expressions are evaluated first, and then compared, so
+    C{"1"} is compared with C{"10"}.
+    Do I{not} use with packrat parsing enabled.
+    """
+    rep = Forward()
+    e2 = expr.copy()
+    rep <<= e2
+    def copyTokenToRepeater(s,l,t):
+        matchTokens = _flatten(t.asList())
+        def mustMatchTheseTokens(s,l,t):
+            theseTokens = _flatten(t.asList())
+            if  theseTokens != matchTokens:
+                raise ParseException("",0,"")
+        rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
+    expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+    rep.setName('(prev) ' + _ustr(expr))
+    return rep
+
+def _escapeRegexRangeChars(s):
+    #~  escape these chars: ^-]
+    for c in r"\^-]":
+        s = s.replace(c,_bslash+c)
+    s = s.replace("\n",r"\n")
+    s = s.replace("\t",r"\t")
+    return _ustr(s)
+
+def oneOf( strs, caseless=False, useRegex=True ):
+    """
+    Helper to quickly define a set of alternative Literals, and makes sure to do
+    longest-first testing when there is a conflict, regardless of the input order,
+    but returns a C{L{MatchFirst}} for best performance.
+
+    Parameters:
+     - strs - a string of space-delimited literals, or a collection of string literals
+     - caseless - (default=C{False}) - treat all literals as caseless
+     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
+          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
+          if creating a C{Regex} raises an exception)
+
+    Example::
+        comp_oper = oneOf("< = > <= >= !=")
+        var = Word(alphas)
+        number = Word(nums)
+        term = var | number
+        comparison_expr = term + comp_oper + term
+        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
+    prints::
+        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+    """
+    if caseless:
+        isequal = ( lambda a,b: a.upper() == b.upper() )
+        masks = ( lambda a,b: b.upper().startswith(a.upper()) )
+        parseElementClass = CaselessLiteral
+    else:
+        isequal = ( lambda a,b: a == b )
+        masks = ( lambda a,b: b.startswith(a) )
+        parseElementClass = Literal
+
+    symbols = []
+    if isinstance(strs,basestring):
+        symbols = strs.split()
+    elif isinstance(strs, collections.Iterable):
+        symbols = list(strs)
+    else:
+        warnings.warn("Invalid argument to oneOf, expected string or iterable",
+                SyntaxWarning, stacklevel=2)
+    if not symbols:
+        return NoMatch()
+
+    i = 0
+    while i < len(symbols)-1:
+        cur = symbols[i]
+        for j,other in enumerate(symbols[i+1:]):
+            if ( isequal(other, cur) ):
+                del symbols[i+j+1]
+                break
+            elif ( masks(cur, other) ):
+                del symbols[i+j+1]
+                symbols.insert(i,other)
+                cur = other
+                break
+        else:
+            i += 1
+
+    if not caseless and useRegex:
+        #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
+        try:
+            if len(symbols)==len("".join(symbols)):
+                return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
+            else:
+                return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
+        except Exception:
+            warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+                    SyntaxWarning, stacklevel=2)
+
+
+    # last resort, just use MatchFirst
+    return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf( key, value ):
+    """
+    Helper to easily and clearly define a dictionary by specifying the respective patterns
+    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
+    in the proper order.  The key pattern can include delimiting markers or punctuation,
+    as long as they are suppressed, thereby leaving the significant key text.  The value
+    pattern can include named results, so that the C{Dict} results can include named token
+    fields.
+
+    Example::
+        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+        print(OneOrMore(attr_expr).parseString(text).dump())
+        
+        attr_label = label
+        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+        # similar to Dict, but simpler call format
+        result = dictOf(attr_label, attr_value).parseString(text)
+        print(result.dump())
+        print(result['shape'])
+        print(result.shape)  # object attribute access works too
+        print(result.asDict())
+    prints::
+        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+        - color: light blue
+        - posn: upper left
+        - shape: SQUARE
+        - texture: burlap
+        SQUARE
+        SQUARE
+        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+    """
+    return Dict( ZeroOrMore( Group ( key + value ) ) )
+
+def originalTextFor(expr, asString=True):
+    """
+    Helper to return the original, untokenized text for a given expression.  Useful to
+    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
+    revert separate tokens with intervening whitespace back to the original matching
+    input text. By default, returns astring containing the original parsed text.  
+       
+    If the optional C{asString} argument is passed as C{False}, then the return value is a 
+    C{L{ParseResults}} containing any results names that were originally matched, and a 
+    single token containing the original matched text from the input string.  So if 
+    the expression passed to C{L{originalTextFor}} contains expressions with defined
+    results names, you must set C{asString} to C{False} if you want to preserve those
+    results name values.
+
+    Example::
+        src = "this is test  bold text  normal text "
+        for tag in ("b","i"):
+            opener,closer = makeHTMLTags(tag)
+            patt = originalTextFor(opener + SkipTo(closer) + closer)
+            print(patt.searchString(src)[0])
+    prints::
+        [' bold text ']
+        ['text']
+    """
+    locMarker = Empty().setParseAction(lambda s,loc,t: loc)
+    endlocMarker = locMarker.copy()
+    endlocMarker.callPreparse = False
+    matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+    if asString:
+        extractText = lambda s,l,t: s[t._original_start:t._original_end]
+    else:
+        def extractText(s,l,t):
+            t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+    matchExpr.setParseAction(extractText)
+    matchExpr.ignoreExprs = expr.ignoreExprs
+    return matchExpr
+
+def ungroup(expr): 
+    """
+    Helper to undo pyparsing's default grouping of And expressions, even
+    if all but one are non-empty.
+    """
+    return TokenConverter(expr).setParseAction(lambda t:t[0])
+
+def locatedExpr(expr):
+    """
+    Helper to decorate a returned token with its starting and ending locations in the input string.
+    This helper adds the following results names:
+     - locn_start = location where matched expression begins
+     - locn_end = location where matched expression ends
+     - value = the actual parsed results
+
+    Be careful if the input text contains C{} characters, you may want to call
+    C{L{ParserElement.parseWithTabs}}
+
+    Example::
+        wd = Word(alphas)
+        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+            print(match)
+    prints::
+        [[0, 'ljsdf', 5]]
+        [[8, 'lksdjjf', 15]]
+        [[18, 'lkkjj', 23]]
+    """
+    locator = Empty().setParseAction(lambda s,l,t: l)
+    return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty       = Empty().setName("empty")
+lineStart   = LineStart().setName("lineStart")
+lineEnd     = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd   = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
+
+def srange(s):
+    r"""
+    Helper to easily define string ranges for use in Word construction.  Borrows
+    syntax from regexp '[]' string range definitions::
+        srange("[0-9]")   -> "0123456789"
+        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
+        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+    The input string must be enclosed in []'s, and the returned string is the expanded
+    character set joined into a single string.
+    The values enclosed in the []'s may be:
+     - a single character
+     - an escaped character with a leading backslash (such as C{\-} or C{\]})
+     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
+         (C{\0x##} is also supported for backwards compatibility) 
+     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
+     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
+     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
+    """
+    _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
+    try:
+        return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+    except Exception:
+        return ""
+
+def matchOnlyAtCol(n):
+    """
+    Helper method for defining parse actions that require matching at a specific
+    column in the input text.
+    """
+    def verifyCol(strg,locn,toks):
+        if col(locn,strg) != n:
+            raise ParseException(strg,locn,"matched token not at column %d" % n)
+    return verifyCol
+
+def replaceWith(replStr):
+    """
+    Helper method for common parse actions that simply return a literal value.  Especially
+    useful when used with C{L{transformString}()}.
+
+    Example::
+        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+        term = na | num
+        
+        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+    """
+    return lambda s,l,t: [replStr]
+
+def removeQuotes(s,l,t):
+    """
+    Helper parse action for removing quotation marks from parsed quoted strings.
+
+    Example::
+        # by default, quotation marks are included in parsed results
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+        # use removeQuotes to strip quotation marks from parsed results
+        quotedString.setParseAction(removeQuotes)
+        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+    """
+    return t[0][1:-1]
+
+def tokenMap(func, *args):
+    """
+    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
+    args are passed, they are forwarded to the given function as additional arguments after
+    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
+    parsed data to an integer using base 16.
+
+    Example (compare the last to example in L{ParserElement.transformString}::
+        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+        hex_ints.runTests('''
+            00 11 22 aa FF 0a 0d 1a
+            ''')
+        
+        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+        OneOrMore(upperword).runTests('''
+            my kingdom for a horse
+            ''')
+
+        wd = Word(alphas).setParseAction(tokenMap(str.title))
+        OneOrMore(wd).setParseAction(' '.join).runTests('''
+            now is the winter of our discontent made glorious summer by this sun of york
+            ''')
+    prints::
+        00 11 22 aa FF 0a 0d 1a
+        [0, 17, 34, 170, 255, 10, 13, 26]
+
+        my kingdom for a horse
+        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+        now is the winter of our discontent made glorious summer by this sun of york
+        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+    """
+    def pa(s,l,t):
+        return [func(tokn, *args) for tokn in t]
+
+    try:
+        func_name = getattr(func, '__name__', 
+                            getattr(func, '__class__').__name__)
+    except Exception:
+        func_name = str(func)
+    pa.__name__ = func_name
+
+    return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
+    
+def _makeTags(tagStr, xml):
+    """Internal helper to construct opening and closing tag expressions, given a tag name"""
+    if isinstance(tagStr,basestring):
+        resname = tagStr
+        tagStr = Keyword(tagStr, caseless=not xml)
+    else:
+        resname = tagStr.name
+
+    tagAttrName = Word(alphas,alphanums+"_-:")
+    if (xml):
+        tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
+        openTag = Suppress("<") + tagStr("tag") + \
+                Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
+                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+    else:
+        printablesLessRAbrack = "".join(c for c in printables if c not in ">")
+        tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
+        openTag = Suppress("<") + tagStr("tag") + \
+                Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
+                Optional( Suppress("=") + tagAttrValue ) ))) + \
+                Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+    closeTag = Combine(_L("")
+
+    openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
+    closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname)
+    openTag.tag = resname
+    closeTag.tag = resname
+    return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+    """
+    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
+    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.
+
+    Example::
+        text = 'More info at the pyparsing wiki page'
+        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
+        a,a_end = makeHTMLTags("A")
+        link_expr = a + SkipTo(a_end)("link_text") + a_end
+        
+        for link in link_expr.searchString(text):
+            # attributes in the  tag (like "href" shown here) are also accessible as named results
+            print(link.link_text, '->', link.href)
+    prints::
+        pyparsing -> http://pyparsing.wikispaces.com
+    """
+    return _makeTags( tagStr, False )
+
+def makeXMLTags(tagStr):
+    """
+    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
+    tags only in the given upper/lower case.
+
+    Example: similar to L{makeHTMLTags}
+    """
+    return _makeTags( tagStr, True )
+
+def withAttribute(*args,**attrDict):
+    """
+    Helper to create a validating parse action to be used with start tags created
+    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
+    with a required attribute value, to avoid false matches on common tags such as
+    C{} or C{
}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this has no type
+
+ + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' +
+ Some text +
1 4 0 1 0
+
1,3 2,3 1,1
+
this <div> has no class
+
+ + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + matchExpr.setParseAction( pa ) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=C{True}) + + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" + +htmlComment = Regex(r"").setName("HTML comment") +"Comment of the form C{}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" + +javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers}, L{reals}, L{scientific notation}) + - common L{programming identifiers} + - network addresses (L{MAC}, L{IPv4}, L{IPv6}) + - ISO8601 L{dates} and L{datetime} + - L{UUID} + - L{comma-separated list} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = 'More info at the
pyparsing wiki page' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/tests/_lib_vendors/pkg_resources/_vendor/six.py b/tests/_lib_vendors/pkg_resources/_vendor/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/_vendor/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/tests/_lib_vendors/pkg_resources/extern/__init__.py b/tests/_lib_vendors/pkg_resources/extern/__init__.py new file mode 100644 index 0000000..b4156fe --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/extern/__init__.py @@ -0,0 +1,73 @@ +import sys + + +class VendorImporter: + """ + A PEP 302 meta path importer for finding optionally-vendored + or otherwise naturally-installed packages from root_name. + """ + + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): + self.root_name = root_name + self.vendored_names = set(vendored_names) + self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') + + @property + def search_path(self): + """ + Search first the vendor package then as a natural package. + """ + yield self.vendor_pkg + '.' + yield '' + + def find_module(self, fullname, path=None): + """ + Return self when fullname starts with root_name and the + target module is one vendored through this importer. + """ + root, base, target = fullname.partition(self.root_name + '.') + if root: + return + if not any(map(target.startswith, self.vendored_names)): + return + return self + + def load_module(self, fullname): + """ + Iterate over the search path to locate and load fullname. + """ + root, base, target = fullname.partition(self.root_name + '.') + for prefix in self.search_path: + try: + extant = prefix + target + __import__(extant) + mod = sys.modules[extant] + sys.modules[fullname] = mod + # mysterious hack: + # Remove the reference to the extant package/module + # on later Python versions to cause relative imports + # in the vendor package to resolve the same modules + # as those going through this importer. + if sys.version_info > (3, 3): + del sys.modules[extant] + return mod + except ImportError: + pass + else: + raise ImportError( + "The '{target}' package is required; " + "normally this is bundled with this package so if you get " + "this warning, consult the packager of your " + "distribution.".format(**locals()) + ) + + def install(self): + """ + Install this importer into sys.meta_path if not already present. + """ + if self not in sys.meta_path: + sys.meta_path.append(self) + + +names = 'packaging', 'pyparsing', 'six', 'appdirs' +VendorImporter(__name__, names).install() diff --git a/tests/_lib_vendors/pkg_resources/py31compat.py b/tests/_lib_vendors/pkg_resources/py31compat.py new file mode 100644 index 0000000..331a51b --- /dev/null +++ b/tests/_lib_vendors/pkg_resources/py31compat.py @@ -0,0 +1,22 @@ +import os +import errno +import sys + + +def _makedirs_31(path, exist_ok=False): + try: + os.makedirs(path) + except OSError as exc: + if not exist_ok or exc.errno != errno.EEXIST: + raise + + +# rely on compatibility behavior until mode considerations +# and exists_ok considerations are disentangled. +# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 +needs_makedirs = ( + sys.version_info < (3, 2, 5) or + (3, 3) <= sys.version_info < (3, 3, 6) or + (3, 4) <= sys.version_info < (3, 4, 1) +) +makedirs = _makedirs_31 if needs_makedirs else os.makedirs diff --git a/tests/_lib_vendors/py/__init__.py b/tests/_lib_vendors/py/__init__.py new file mode 100644 index 0000000..bdb9aa2 --- /dev/null +++ b/tests/_lib_vendors/py/__init__.py @@ -0,0 +1,150 @@ +""" +py.test and pylib: rapid testing and development utils + +this module uses apipkg.py for lazy-loading sub modules +and classes. The initpkg-dictionary below specifies +name->value mappings where value can be another namespace +dictionary or an import path. + +(c) Holger Krekel and others, 2004-2014 +""" +__version__ = '1.4.31' + +from py import _apipkg + +# so that py.error.* instances are picklable +import sys +sys.modules['py.error'] = _apipkg.AliasModule("py.error", "py._error", 'error') + +_apipkg.initpkg(__name__, attr={'_apipkg': _apipkg}, exportdefs={ + # access to all standard lib modules + 'std': '._std:std', + # access to all posix errno's as classes + 'error': '._error:error', + + '_pydir' : '.__metainfo:pydir', + 'version': 'py:__version__', # backward compatibility + + # pytest-2.0 has a flat namespace, we use alias modules + # to keep old references compatible + 'test' : 'pytest', + 'test.collect' : 'pytest', + 'test.cmdline' : 'pytest', + + # hook into the top-level standard library + 'process' : { + '__doc__' : '._process:__doc__', + 'cmdexec' : '._process.cmdexec:cmdexec', + 'kill' : '._process.killproc:kill', + 'ForkedFunc' : '._process.forkedfunc:ForkedFunc', + }, + + 'apipkg' : { + 'initpkg' : '._apipkg:initpkg', + 'ApiModule' : '._apipkg:ApiModule', + }, + + 'iniconfig' : { + 'IniConfig' : '._iniconfig:IniConfig', + 'ParseError' : '._iniconfig:ParseError', + }, + + 'path' : { + '__doc__' : '._path:__doc__', + 'svnwc' : '._path.svnwc:SvnWCCommandPath', + 'svnurl' : '._path.svnurl:SvnCommandPath', + 'local' : '._path.local:LocalPath', + 'SvnAuth' : '._path.svnwc:SvnAuth', + }, + + # python inspection/code-generation API + 'code' : { + '__doc__' : '._code:__doc__', + 'compile' : '._code.source:compile_', + 'Source' : '._code.source:Source', + 'Code' : '._code.code:Code', + 'Frame' : '._code.code:Frame', + 'ExceptionInfo' : '._code.code:ExceptionInfo', + 'Traceback' : '._code.code:Traceback', + 'getfslineno' : '._code.source:getfslineno', + 'getrawcode' : '._code.code:getrawcode', + 'patch_builtins' : '._code.code:patch_builtins', + 'unpatch_builtins' : '._code.code:unpatch_builtins', + '_AssertionError' : '._code.assertion:AssertionError', + '_reinterpret_old' : '._code.assertion:reinterpret_old', + '_reinterpret' : '._code.assertion:reinterpret', + '_reprcompare' : '._code.assertion:_reprcompare', + '_format_explanation' : '._code.assertion:_format_explanation', + }, + + # backports and additions of builtins + 'builtin' : { + '__doc__' : '._builtin:__doc__', + 'enumerate' : '._builtin:enumerate', + 'reversed' : '._builtin:reversed', + 'sorted' : '._builtin:sorted', + 'any' : '._builtin:any', + 'all' : '._builtin:all', + 'set' : '._builtin:set', + 'frozenset' : '._builtin:frozenset', + 'BaseException' : '._builtin:BaseException', + 'GeneratorExit' : '._builtin:GeneratorExit', + '_sysex' : '._builtin:_sysex', + 'print_' : '._builtin:print_', + '_reraise' : '._builtin:_reraise', + '_tryimport' : '._builtin:_tryimport', + 'exec_' : '._builtin:exec_', + '_basestring' : '._builtin:_basestring', + '_totext' : '._builtin:_totext', + '_isbytes' : '._builtin:_isbytes', + '_istext' : '._builtin:_istext', + '_getimself' : '._builtin:_getimself', + '_getfuncdict' : '._builtin:_getfuncdict', + '_getcode' : '._builtin:_getcode', + 'builtins' : '._builtin:builtins', + 'execfile' : '._builtin:execfile', + 'callable' : '._builtin:callable', + 'bytes' : '._builtin:bytes', + 'text' : '._builtin:text', + }, + + # input-output helping + 'io' : { + '__doc__' : '._io:__doc__', + 'dupfile' : '._io.capture:dupfile', + 'TextIO' : '._io.capture:TextIO', + 'BytesIO' : '._io.capture:BytesIO', + 'FDCapture' : '._io.capture:FDCapture', + 'StdCapture' : '._io.capture:StdCapture', + 'StdCaptureFD' : '._io.capture:StdCaptureFD', + 'TerminalWriter' : '._io.terminalwriter:TerminalWriter', + 'ansi_print' : '._io.terminalwriter:ansi_print', + 'get_terminal_width' : '._io.terminalwriter:get_terminal_width', + 'saferepr' : '._io.saferepr:saferepr', + }, + + # small and mean xml/html generation + 'xml' : { + '__doc__' : '._xmlgen:__doc__', + 'html' : '._xmlgen:html', + 'Tag' : '._xmlgen:Tag', + 'raw' : '._xmlgen:raw', + 'Namespace' : '._xmlgen:Namespace', + 'escape' : '._xmlgen:escape', + }, + + 'log' : { + # logging API ('producers' and 'consumers' connected via keywords) + '__doc__' : '._log:__doc__', + '_apiwarn' : '._log.warning:_apiwarn', + 'Producer' : '._log.log:Producer', + 'setconsumer' : '._log.log:setconsumer', + '_setstate' : '._log.log:setstate', + '_getstate' : '._log.log:getstate', + 'Path' : '._log.log:Path', + 'STDOUT' : '._log.log:STDOUT', + 'STDERR' : '._log.log:STDERR', + 'Syslog' : '._log.log:Syslog', + }, + +}) diff --git a/tests/_lib_vendors/py/__metainfo.py b/tests/_lib_vendors/py/__metainfo.py new file mode 100644 index 0000000..12581eb --- /dev/null +++ b/tests/_lib_vendors/py/__metainfo.py @@ -0,0 +1,2 @@ +import py +pydir = py.path.local(py.__file__).dirpath() diff --git a/tests/_lib_vendors/py/_apipkg.py b/tests/_lib_vendors/py/_apipkg.py new file mode 100644 index 0000000..a73b8f6 --- /dev/null +++ b/tests/_lib_vendors/py/_apipkg.py @@ -0,0 +1,181 @@ +""" +apipkg: control the exported namespace of a python package. + +see http://pypi.python.org/pypi/apipkg + +(c) holger krekel, 2009 - MIT license +""" +import os +import sys +from types import ModuleType + +__version__ = '1.3.dev' + +def _py_abspath(path): + """ + special version of abspath + that will leave paths from jython jars alone + """ + if path.startswith('__pyclasspath__'): + + return path + else: + return os.path.abspath(path) + +def initpkg(pkgname, exportdefs, attr=dict()): + """ initialize given package from the export definitions. """ + oldmod = sys.modules.get(pkgname) + d = {} + f = getattr(oldmod, '__file__', None) + if f: + f = _py_abspath(f) + d['__file__'] = f + if hasattr(oldmod, '__version__'): + d['__version__'] = oldmod.__version__ + if hasattr(oldmod, '__loader__'): + d['__loader__'] = oldmod.__loader__ + if hasattr(oldmod, '__path__'): + d['__path__'] = [_py_abspath(p) for p in oldmod.__path__] + if '__doc__' not in exportdefs and getattr(oldmod, '__doc__', None): + d['__doc__'] = oldmod.__doc__ + d.update(attr) + if hasattr(oldmod, "__dict__"): + oldmod.__dict__.update(d) + mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d) + sys.modules[pkgname] = mod + +def importobj(modpath, attrname): + module = __import__(modpath, None, None, ['__doc__']) + if not attrname: + return module + + retval = module + names = attrname.split(".") + for x in names: + retval = getattr(retval, x) + return retval + +class ApiModule(ModuleType): + def __docget(self): + try: + return self.__doc + except AttributeError: + if '__doc__' in self.__map__: + return self.__makeattr('__doc__') + def __docset(self, value): + self.__doc = value + __doc__ = property(__docget, __docset) + + def __init__(self, name, importspec, implprefix=None, attr=None): + self.__name__ = name + self.__all__ = [x for x in importspec if x != '__onfirstaccess__'] + self.__map__ = {} + self.__implprefix__ = implprefix or name + if attr: + for name, val in attr.items(): + # print "setting", self.__name__, name, val + setattr(self, name, val) + for name, importspec in importspec.items(): + if isinstance(importspec, dict): + subname = '%s.%s' % (self.__name__, name) + apimod = ApiModule(subname, importspec, implprefix) + sys.modules[subname] = apimod + setattr(self, name, apimod) + else: + parts = importspec.split(':') + modpath = parts.pop(0) + attrname = parts and parts[0] or "" + if modpath[0] == '.': + modpath = implprefix + modpath + + if not attrname: + subname = '%s.%s' % (self.__name__, name) + apimod = AliasModule(subname, modpath) + sys.modules[subname] = apimod + if '.' not in name: + setattr(self, name, apimod) + else: + self.__map__[name] = (modpath, attrname) + + def __repr__(self): + l = [] + if hasattr(self, '__version__'): + l.append("version=" + repr(self.__version__)) + if hasattr(self, '__file__'): + l.append('from ' + repr(self.__file__)) + if l: + return '' % (self.__name__, " ".join(l)) + return '' % (self.__name__,) + + def __makeattr(self, name): + """lazily compute value for name or raise AttributeError if unknown.""" + # print "makeattr", self.__name__, name + target = None + if '__onfirstaccess__' in self.__map__: + target = self.__map__.pop('__onfirstaccess__') + importobj(*target)() + try: + modpath, attrname = self.__map__[name] + except KeyError: + if target is not None and name != '__onfirstaccess__': + # retry, onfirstaccess might have set attrs + return getattr(self, name) + raise AttributeError(name) + else: + result = importobj(modpath, attrname) + setattr(self, name, result) + try: + del self.__map__[name] + except KeyError: + pass # in a recursive-import situation a double-del can happen + return result + + __getattr__ = __makeattr + + def __dict__(self): + # force all the content of the module to be loaded when __dict__ is read + dictdescr = ModuleType.__dict__['__dict__'] + dict = dictdescr.__get__(self) + if dict is not None: + hasattr(self, 'some') + for name in self.__all__: + try: + self.__makeattr(name) + except AttributeError: + pass + return dict + __dict__ = property(__dict__) + + +def AliasModule(modname, modpath, attrname=None): + mod = [] + + def getmod(): + if not mod: + x = importobj(modpath, None) + if attrname is not None: + x = getattr(x, attrname) + mod.append(x) + return mod[0] + + class AliasModule(ModuleType): + + def __repr__(self): + x = modpath + if attrname: + x += "." + attrname + return '' % (modname, x) + + def __getattribute__(self, name): + try: + return getattr(getmod(), name) + except ImportError: + return None + + def __setattr__(self, name, value): + setattr(getmod(), name, value) + + def __delattr__(self, name): + delattr(getmod(), name) + + return AliasModule(str(modname)) diff --git a/tests/_lib_vendors/py/_builtin.py b/tests/_lib_vendors/py/_builtin.py new file mode 100644 index 0000000..52ee9d7 --- /dev/null +++ b/tests/_lib_vendors/py/_builtin.py @@ -0,0 +1,248 @@ +import sys + +try: + reversed = reversed +except NameError: + def reversed(sequence): + """reversed(sequence) -> reverse iterator over values of the sequence + + Return a reverse iterator + """ + if hasattr(sequence, '__reversed__'): + return sequence.__reversed__() + if not hasattr(sequence, '__getitem__'): + raise TypeError("argument to reversed() must be a sequence") + return reversed_iterator(sequence) + + class reversed_iterator(object): + + def __init__(self, seq): + self.seq = seq + self.remaining = len(seq) + + def __iter__(self): + return self + + def next(self): + i = self.remaining + if i > 0: + i -= 1 + item = self.seq[i] + self.remaining = i + return item + raise StopIteration + + def __length_hint__(self): + return self.remaining + +try: + any = any +except NameError: + def any(iterable): + for x in iterable: + if x: + return True + return False + +try: + all = all +except NameError: + def all(iterable): + for x in iterable: + if not x: + return False + return True + +try: + sorted = sorted +except NameError: + builtin_cmp = cmp # need to use cmp as keyword arg + + def sorted(iterable, cmp=None, key=None, reverse=0): + use_cmp = None + if key is not None: + if cmp is None: + def use_cmp(x, y): + return builtin_cmp(x[0], y[0]) + else: + def use_cmp(x, y): + return cmp(x[0], y[0]) + l = [(key(element), element) for element in iterable] + else: + if cmp is not None: + use_cmp = cmp + l = list(iterable) + if use_cmp is not None: + l.sort(use_cmp) + else: + l.sort() + if reverse: + l.reverse() + if key is not None: + return [element for (_, element) in l] + return l + +try: + set, frozenset = set, frozenset +except NameError: + from sets import set, frozenset + +# pass through +enumerate = enumerate + +try: + BaseException = BaseException +except NameError: + BaseException = Exception + +try: + GeneratorExit = GeneratorExit +except NameError: + class GeneratorExit(Exception): + """ This exception is never raised, it is there to make it possible to + write code compatible with CPython 2.5 even in lower CPython + versions.""" + pass + GeneratorExit.__module__ = 'exceptions' + +_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit) + +try: + callable = callable +except NameError: + def callable(obj): + return hasattr(obj, "__call__") + +if sys.version_info >= (3, 0): + exec ("print_ = print ; exec_=exec") + import builtins + + # some backward compatibility helpers + _basestring = str + def _totext(obj, encoding=None, errors=None): + if isinstance(obj, bytes): + if errors is None: + obj = obj.decode(encoding) + else: + obj = obj.decode(encoding, errors) + elif not isinstance(obj, str): + obj = str(obj) + return obj + + def _isbytes(x): + return isinstance(x, bytes) + def _istext(x): + return isinstance(x, str) + + text = str + bytes = bytes + + + def _getimself(function): + return getattr(function, '__self__', None) + + def _getfuncdict(function): + return getattr(function, "__dict__", None) + + def _getcode(function): + return getattr(function, "__code__", None) + + def execfile(fn, globs=None, locs=None): + if globs is None: + back = sys._getframe(1) + globs = back.f_globals + locs = back.f_locals + del back + elif locs is None: + locs = globs + fp = open(fn, "r") + try: + source = fp.read() + finally: + fp.close() + co = compile(source, fn, "exec", dont_inherit=True) + exec_(co, globs, locs) + +else: + import __builtin__ as builtins + _totext = unicode + _basestring = basestring + text = unicode + bytes = str + execfile = execfile + callable = callable + def _isbytes(x): + return isinstance(x, str) + def _istext(x): + return isinstance(x, unicode) + + def _getimself(function): + return getattr(function, 'im_self', None) + + def _getfuncdict(function): + return getattr(function, "__dict__", None) + + def _getcode(function): + try: + return getattr(function, "__code__") + except AttributeError: + return getattr(function, "func_code", None) + + def print_(*args, **kwargs): + """ minimal backport of py3k print statement. """ + sep = ' ' + if 'sep' in kwargs: + sep = kwargs.pop('sep') + end = '\n' + if 'end' in kwargs: + end = kwargs.pop('end') + file = 'file' in kwargs and kwargs.pop('file') or sys.stdout + if kwargs: + args = ", ".join([str(x) for x in kwargs]) + raise TypeError("invalid keyword arguments: %s" % args) + at_start = True + for x in args: + if not at_start: + file.write(sep) + file.write(str(x)) + at_start = False + file.write(end) + + def exec_(obj, globals=None, locals=None): + """ minimal backport of py3k exec statement. """ + __tracebackhide__ = True + if globals is None: + frame = sys._getframe(1) + globals = frame.f_globals + if locals is None: + locals = frame.f_locals + elif locals is None: + locals = globals + exec2(obj, globals, locals) + +if sys.version_info >= (3, 0): + def _reraise(cls, val, tb): + __tracebackhide__ = True + assert hasattr(val, '__traceback__') + raise cls.with_traceback(val, tb) +else: + exec (""" +def _reraise(cls, val, tb): + __tracebackhide__ = True + raise cls, val, tb +def exec2(obj, globals, locals): + __tracebackhide__ = True + exec obj in globals, locals +""") + +def _tryimport(*names): + """ return the first successfully imported module. """ + assert names + for name in names: + try: + __import__(name) + except ImportError: + excinfo = sys.exc_info() + else: + return sys.modules[name] + _reraise(*excinfo) diff --git a/tests/_lib_vendors/py/_code/__init__.py b/tests/_lib_vendors/py/_code/__init__.py new file mode 100644 index 0000000..f15acf8 --- /dev/null +++ b/tests/_lib_vendors/py/_code/__init__.py @@ -0,0 +1 @@ +""" python inspection/code generation API """ diff --git a/tests/_lib_vendors/py/_code/_assertionnew.py b/tests/_lib_vendors/py/_code/_assertionnew.py new file mode 100644 index 0000000..afb1b31 --- /dev/null +++ b/tests/_lib_vendors/py/_code/_assertionnew.py @@ -0,0 +1,339 @@ +""" +Find intermediate evalutation results in assert statements through builtin AST. +This should replace _assertionold.py eventually. +""" + +import sys +import ast + +import py +from py._code.assertion import _format_explanation, BuiltinAssertionError + + +if sys.platform.startswith("java") and sys.version_info < (2, 5, 2): + # See http://bugs.jython.org/issue1497 + _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict", + "ListComp", "GeneratorExp", "Yield", "Compare", "Call", + "Repr", "Num", "Str", "Attribute", "Subscript", "Name", + "List", "Tuple") + _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign", + "AugAssign", "Print", "For", "While", "If", "With", "Raise", + "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom", + "Exec", "Global", "Expr", "Pass", "Break", "Continue") + _expr_nodes = set(getattr(ast, name) for name in _exprs) + _stmt_nodes = set(getattr(ast, name) for name in _stmts) + def _is_ast_expr(node): + return node.__class__ in _expr_nodes + def _is_ast_stmt(node): + return node.__class__ in _stmt_nodes +else: + def _is_ast_expr(node): + return isinstance(node, ast.expr) + def _is_ast_stmt(node): + return isinstance(node, ast.stmt) + + +class Failure(Exception): + """Error found while interpreting AST.""" + + def __init__(self, explanation=""): + self.cause = sys.exc_info() + self.explanation = explanation + + +def interpret(source, frame, should_fail=False): + mod = ast.parse(source) + visitor = DebugInterpreter(frame) + try: + visitor.visit(mod) + except Failure: + failure = sys.exc_info()[1] + return getfailure(failure) + if should_fail: + return ("(assertion failed, but when it was re-run for " + "printing intermediate values, it did not fail. Suggestions: " + "compute assert expression before the assert or use --no-assert)") + +def run(offending_line, frame=None): + if frame is None: + frame = py.code.Frame(sys._getframe(1)) + return interpret(offending_line, frame) + +def getfailure(failure): + explanation = _format_explanation(failure.explanation) + value = failure.cause[1] + if str(value): + lines = explanation.splitlines() + if not lines: + lines.append("") + lines[0] += " << %s" % (value,) + explanation = "\n".join(lines) + text = "%s: %s" % (failure.cause[0].__name__, explanation) + if text.startswith("AssertionError: assert "): + text = text[16:] + return text + + +operator_map = { + ast.BitOr : "|", + ast.BitXor : "^", + ast.BitAnd : "&", + ast.LShift : "<<", + ast.RShift : ">>", + ast.Add : "+", + ast.Sub : "-", + ast.Mult : "*", + ast.Div : "/", + ast.FloorDiv : "//", + ast.Mod : "%", + ast.Eq : "==", + ast.NotEq : "!=", + ast.Lt : "<", + ast.LtE : "<=", + ast.Gt : ">", + ast.GtE : ">=", + ast.Pow : "**", + ast.Is : "is", + ast.IsNot : "is not", + ast.In : "in", + ast.NotIn : "not in" +} + +unary_map = { + ast.Not : "not %s", + ast.Invert : "~%s", + ast.USub : "-%s", + ast.UAdd : "+%s" +} + + +class DebugInterpreter(ast.NodeVisitor): + """Interpret AST nodes to gleam useful debugging information. """ + + def __init__(self, frame): + self.frame = frame + + def generic_visit(self, node): + # Fallback when we don't have a special implementation. + if _is_ast_expr(node): + mod = ast.Expression(node) + co = self._compile(mod) + try: + result = self.frame.eval(co) + except Exception: + raise Failure() + explanation = self.frame.repr(result) + return explanation, result + elif _is_ast_stmt(node): + mod = ast.Module([node]) + co = self._compile(mod, "exec") + try: + self.frame.exec_(co) + except Exception: + raise Failure() + return None, None + else: + raise AssertionError("can't handle %s" %(node,)) + + def _compile(self, source, mode="eval"): + return compile(source, "", mode) + + def visit_Expr(self, expr): + return self.visit(expr.value) + + def visit_Module(self, mod): + for stmt in mod.body: + self.visit(stmt) + + def visit_Name(self, name): + explanation, result = self.generic_visit(name) + # See if the name is local. + source = "%r in locals() is not globals()" % (name.id,) + co = self._compile(source) + try: + local = self.frame.eval(co) + except Exception: + # have to assume it isn't + local = False + if not local: + return name.id, result + return explanation, result + + def visit_Compare(self, comp): + left = comp.left + left_explanation, left_result = self.visit(left) + for op, next_op in zip(comp.ops, comp.comparators): + next_explanation, next_result = self.visit(next_op) + op_symbol = operator_map[op.__class__] + explanation = "%s %s %s" % (left_explanation, op_symbol, + next_explanation) + source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,) + co = self._compile(source) + try: + result = self.frame.eval(co, __exprinfo_left=left_result, + __exprinfo_right=next_result) + except Exception: + raise Failure(explanation) + try: + if not result: + break + except KeyboardInterrupt: + raise + except: + break + left_explanation, left_result = next_explanation, next_result + + rcomp = py.code._reprcompare + if rcomp: + res = rcomp(op_symbol, left_result, next_result) + if res: + explanation = res + return explanation, result + + def visit_BoolOp(self, boolop): + is_or = isinstance(boolop.op, ast.Or) + explanations = [] + for operand in boolop.values: + explanation, result = self.visit(operand) + explanations.append(explanation) + if result == is_or: + break + name = is_or and " or " or " and " + explanation = "(" + name.join(explanations) + ")" + return explanation, result + + def visit_UnaryOp(self, unary): + pattern = unary_map[unary.op.__class__] + operand_explanation, operand_result = self.visit(unary.operand) + explanation = pattern % (operand_explanation,) + co = self._compile(pattern % ("__exprinfo_expr",)) + try: + result = self.frame.eval(co, __exprinfo_expr=operand_result) + except Exception: + raise Failure(explanation) + return explanation, result + + def visit_BinOp(self, binop): + left_explanation, left_result = self.visit(binop.left) + right_explanation, right_result = self.visit(binop.right) + symbol = operator_map[binop.op.__class__] + explanation = "(%s %s %s)" % (left_explanation, symbol, + right_explanation) + source = "__exprinfo_left %s __exprinfo_right" % (symbol,) + co = self._compile(source) + try: + result = self.frame.eval(co, __exprinfo_left=left_result, + __exprinfo_right=right_result) + except Exception: + raise Failure(explanation) + return explanation, result + + def visit_Call(self, call): + func_explanation, func = self.visit(call.func) + arg_explanations = [] + ns = {"__exprinfo_func" : func} + arguments = [] + for arg in call.args: + arg_explanation, arg_result = self.visit(arg) + arg_name = "__exprinfo_%s" % (len(ns),) + ns[arg_name] = arg_result + arguments.append(arg_name) + arg_explanations.append(arg_explanation) + for keyword in call.keywords: + arg_explanation, arg_result = self.visit(keyword.value) + arg_name = "__exprinfo_%s" % (len(ns),) + ns[arg_name] = arg_result + keyword_source = "%s=%%s" % (keyword.arg) + arguments.append(keyword_source % (arg_name,)) + arg_explanations.append(keyword_source % (arg_explanation,)) + if call.starargs: + arg_explanation, arg_result = self.visit(call.starargs) + arg_name = "__exprinfo_star" + ns[arg_name] = arg_result + arguments.append("*%s" % (arg_name,)) + arg_explanations.append("*%s" % (arg_explanation,)) + if call.kwargs: + arg_explanation, arg_result = self.visit(call.kwargs) + arg_name = "__exprinfo_kwds" + ns[arg_name] = arg_result + arguments.append("**%s" % (arg_name,)) + arg_explanations.append("**%s" % (arg_explanation,)) + args_explained = ", ".join(arg_explanations) + explanation = "%s(%s)" % (func_explanation, args_explained) + args = ", ".join(arguments) + source = "__exprinfo_func(%s)" % (args,) + co = self._compile(source) + try: + result = self.frame.eval(co, **ns) + except Exception: + raise Failure(explanation) + pattern = "%s\n{%s = %s\n}" + rep = self.frame.repr(result) + explanation = pattern % (rep, rep, explanation) + return explanation, result + + def _is_builtin_name(self, name): + pattern = "%r not in globals() and %r not in locals()" + source = pattern % (name.id, name.id) + co = self._compile(source) + try: + return self.frame.eval(co) + except Exception: + return False + + def visit_Attribute(self, attr): + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + source_explanation, source_result = self.visit(attr.value) + explanation = "%s.%s" % (source_explanation, attr.attr) + source = "__exprinfo_expr.%s" % (attr.attr,) + co = self._compile(source) + try: + result = self.frame.eval(co, __exprinfo_expr=source_result) + except Exception: + raise Failure(explanation) + explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result), + self.frame.repr(result), + source_explanation, attr.attr) + # Check if the attr is from an instance. + source = "%r in getattr(__exprinfo_expr, '__dict__', {})" + source = source % (attr.attr,) + co = self._compile(source) + try: + from_instance = self.frame.eval(co, __exprinfo_expr=source_result) + except Exception: + from_instance = True + if from_instance: + rep = self.frame.repr(result) + pattern = "%s\n{%s = %s\n}" + explanation = pattern % (rep, rep, explanation) + return explanation, result + + def visit_Assert(self, assrt): + test_explanation, test_result = self.visit(assrt.test) + if test_explanation.startswith("False\n{False =") and \ + test_explanation.endswith("\n"): + test_explanation = test_explanation[15:-2] + explanation = "assert %s" % (test_explanation,) + if not test_result: + try: + raise BuiltinAssertionError + except Exception: + raise Failure(explanation) + return explanation, test_result + + def visit_Assign(self, assign): + value_explanation, value_result = self.visit(assign.value) + explanation = "... = %s" % (value_explanation,) + name = ast.Name("__exprinfo_expr", ast.Load(), + lineno=assign.value.lineno, + col_offset=assign.value.col_offset) + new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno, + col_offset=assign.col_offset) + mod = ast.Module([new_assign]) + co = self._compile(mod, "exec") + try: + self.frame.exec_(co, __exprinfo_expr=value_result) + except Exception: + raise Failure(explanation) + return explanation, value_result diff --git a/tests/_lib_vendors/py/_code/_assertionold.py b/tests/_lib_vendors/py/_code/_assertionold.py new file mode 100644 index 0000000..4e81fb3 --- /dev/null +++ b/tests/_lib_vendors/py/_code/_assertionold.py @@ -0,0 +1,555 @@ +import py +import sys, inspect +from compiler import parse, ast, pycodegen +from py._code.assertion import BuiltinAssertionError, _format_explanation + +passthroughex = py.builtin._sysex + +class Failure: + def __init__(self, node): + self.exc, self.value, self.tb = sys.exc_info() + self.node = node + +class View(object): + """View base class. + + If C is a subclass of View, then C(x) creates a proxy object around + the object x. The actual class of the proxy is not C in general, + but a *subclass* of C determined by the rules below. To avoid confusion + we call view class the class of the proxy (a subclass of C, so of View) + and object class the class of x. + + Attributes and methods not found in the proxy are automatically read on x. + Other operations like setting attributes are performed on the proxy, as + determined by its view class. The object x is available from the proxy + as its __obj__ attribute. + + The view class selection is determined by the __view__ tuples and the + optional __viewkey__ method. By default, the selected view class is the + most specific subclass of C whose __view__ mentions the class of x. + If no such subclass is found, the search proceeds with the parent + object classes. For example, C(True) will first look for a subclass + of C with __view__ = (..., bool, ...) and only if it doesn't find any + look for one with __view__ = (..., int, ...), and then ..., object,... + If everything fails the class C itself is considered to be the default. + + Alternatively, the view class selection can be driven by another aspect + of the object x, instead of the class of x, by overriding __viewkey__. + See last example at the end of this module. + """ + + _viewcache = {} + __view__ = () + + def __new__(rootclass, obj, *args, **kwds): + self = object.__new__(rootclass) + self.__obj__ = obj + self.__rootclass__ = rootclass + key = self.__viewkey__() + try: + self.__class__ = self._viewcache[key] + except KeyError: + self.__class__ = self._selectsubclass(key) + return self + + def __getattr__(self, attr): + # attributes not found in the normal hierarchy rooted on View + # are looked up in the object's real class + return getattr(self.__obj__, attr) + + def __viewkey__(self): + return self.__obj__.__class__ + + def __matchkey__(self, key, subclasses): + if inspect.isclass(key): + keys = inspect.getmro(key) + else: + keys = [key] + for key in keys: + result = [C for C in subclasses if key in C.__view__] + if result: + return result + return [] + + def _selectsubclass(self, key): + subclasses = list(enumsubclasses(self.__rootclass__)) + for C in subclasses: + if not isinstance(C.__view__, tuple): + C.__view__ = (C.__view__,) + choices = self.__matchkey__(key, subclasses) + if not choices: + return self.__rootclass__ + elif len(choices) == 1: + return choices[0] + else: + # combine the multiple choices + return type('?', tuple(choices), {}) + + def __repr__(self): + return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__) + + +def enumsubclasses(cls): + for subcls in cls.__subclasses__(): + for subsubclass in enumsubclasses(subcls): + yield subsubclass + yield cls + + +class Interpretable(View): + """A parse tree node with a few extra methods.""" + explanation = None + + def is_builtin(self, frame): + return False + + def eval(self, frame): + # fall-back for unknown expression nodes + try: + expr = ast.Expression(self.__obj__) + expr.filename = '' + self.__obj__.filename = '' + co = pycodegen.ExpressionCodeGenerator(expr).getCode() + result = frame.eval(co) + except passthroughex: + raise + except: + raise Failure(self) + self.result = result + self.explanation = self.explanation or frame.repr(self.result) + + def run(self, frame): + # fall-back for unknown statement nodes + try: + expr = ast.Module(None, ast.Stmt([self.__obj__])) + expr.filename = '' + co = pycodegen.ModuleCodeGenerator(expr).getCode() + frame.exec_(co) + except passthroughex: + raise + except: + raise Failure(self) + + def nice_explanation(self): + return _format_explanation(self.explanation) + + +class Name(Interpretable): + __view__ = ast.Name + + def is_local(self, frame): + source = '%r in locals() is not globals()' % self.name + try: + return frame.is_true(frame.eval(source)) + except passthroughex: + raise + except: + return False + + def is_global(self, frame): + source = '%r in globals()' % self.name + try: + return frame.is_true(frame.eval(source)) + except passthroughex: + raise + except: + return False + + def is_builtin(self, frame): + source = '%r not in locals() and %r not in globals()' % ( + self.name, self.name) + try: + return frame.is_true(frame.eval(source)) + except passthroughex: + raise + except: + return False + + def eval(self, frame): + super(Name, self).eval(frame) + if not self.is_local(frame): + self.explanation = self.name + +class Compare(Interpretable): + __view__ = ast.Compare + + def eval(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + for operation, expr2 in self.ops: + if hasattr(self, 'result'): + # shortcutting in chained expressions + if not frame.is_true(self.result): + break + expr2 = Interpretable(expr2) + expr2.eval(frame) + self.explanation = "%s %s %s" % ( + expr.explanation, operation, expr2.explanation) + source = "__exprinfo_left %s __exprinfo_right" % operation + try: + self.result = frame.eval(source, + __exprinfo_left=expr.result, + __exprinfo_right=expr2.result) + except passthroughex: + raise + except: + raise Failure(self) + expr = expr2 + +class And(Interpretable): + __view__ = ast.And + + def eval(self, frame): + explanations = [] + for expr in self.nodes: + expr = Interpretable(expr) + expr.eval(frame) + explanations.append(expr.explanation) + self.result = expr.result + if not frame.is_true(expr.result): + break + self.explanation = '(' + ' and '.join(explanations) + ')' + +class Or(Interpretable): + __view__ = ast.Or + + def eval(self, frame): + explanations = [] + for expr in self.nodes: + expr = Interpretable(expr) + expr.eval(frame) + explanations.append(expr.explanation) + self.result = expr.result + if frame.is_true(expr.result): + break + self.explanation = '(' + ' or '.join(explanations) + ')' + + +# == Unary operations == +keepalive = [] +for astclass, astpattern in { + ast.Not : 'not __exprinfo_expr', + ast.Invert : '(~__exprinfo_expr)', + }.items(): + + class UnaryArith(Interpretable): + __view__ = astclass + + def eval(self, frame, astpattern=astpattern): + expr = Interpretable(self.expr) + expr.eval(frame) + self.explanation = astpattern.replace('__exprinfo_expr', + expr.explanation) + try: + self.result = frame.eval(astpattern, + __exprinfo_expr=expr.result) + except passthroughex: + raise + except: + raise Failure(self) + + keepalive.append(UnaryArith) + +# == Binary operations == +for astclass, astpattern in { + ast.Add : '(__exprinfo_left + __exprinfo_right)', + ast.Sub : '(__exprinfo_left - __exprinfo_right)', + ast.Mul : '(__exprinfo_left * __exprinfo_right)', + ast.Div : '(__exprinfo_left / __exprinfo_right)', + ast.Mod : '(__exprinfo_left % __exprinfo_right)', + ast.Power : '(__exprinfo_left ** __exprinfo_right)', + }.items(): + + class BinaryArith(Interpretable): + __view__ = astclass + + def eval(self, frame, astpattern=astpattern): + left = Interpretable(self.left) + left.eval(frame) + right = Interpretable(self.right) + right.eval(frame) + self.explanation = (astpattern + .replace('__exprinfo_left', left .explanation) + .replace('__exprinfo_right', right.explanation)) + try: + self.result = frame.eval(astpattern, + __exprinfo_left=left.result, + __exprinfo_right=right.result) + except passthroughex: + raise + except: + raise Failure(self) + + keepalive.append(BinaryArith) + + +class CallFunc(Interpretable): + __view__ = ast.CallFunc + + def is_bool(self, frame): + source = 'isinstance(__exprinfo_value, bool)' + try: + return frame.is_true(frame.eval(source, + __exprinfo_value=self.result)) + except passthroughex: + raise + except: + return False + + def eval(self, frame): + node = Interpretable(self.node) + node.eval(frame) + explanations = [] + vars = {'__exprinfo_fn': node.result} + source = '__exprinfo_fn(' + for a in self.args: + if isinstance(a, ast.Keyword): + keyword = a.name + a = a.expr + else: + keyword = None + a = Interpretable(a) + a.eval(frame) + argname = '__exprinfo_%d' % len(vars) + vars[argname] = a.result + if keyword is None: + source += argname + ',' + explanations.append(a.explanation) + else: + source += '%s=%s,' % (keyword, argname) + explanations.append('%s=%s' % (keyword, a.explanation)) + if self.star_args: + star_args = Interpretable(self.star_args) + star_args.eval(frame) + argname = '__exprinfo_star' + vars[argname] = star_args.result + source += '*' + argname + ',' + explanations.append('*' + star_args.explanation) + if self.dstar_args: + dstar_args = Interpretable(self.dstar_args) + dstar_args.eval(frame) + argname = '__exprinfo_kwds' + vars[argname] = dstar_args.result + source += '**' + argname + ',' + explanations.append('**' + dstar_args.explanation) + self.explanation = "%s(%s)" % ( + node.explanation, ', '.join(explanations)) + if source.endswith(','): + source = source[:-1] + source += ')' + try: + self.result = frame.eval(source, **vars) + except passthroughex: + raise + except: + raise Failure(self) + if not node.is_builtin(frame) or not self.is_bool(frame): + r = frame.repr(self.result) + self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation) + +class Getattr(Interpretable): + __view__ = ast.Getattr + + def eval(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + source = '__exprinfo_expr.%s' % self.attrname + try: + self.result = frame.eval(source, __exprinfo_expr=expr.result) + except passthroughex: + raise + except: + raise Failure(self) + self.explanation = '%s.%s' % (expr.explanation, self.attrname) + # if the attribute comes from the instance, its value is interesting + source = ('hasattr(__exprinfo_expr, "__dict__") and ' + '%r in __exprinfo_expr.__dict__' % self.attrname) + try: + from_instance = frame.is_true( + frame.eval(source, __exprinfo_expr=expr.result)) + except passthroughex: + raise + except: + from_instance = True + if from_instance: + r = frame.repr(self.result) + self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation) + +# == Re-interpretation of full statements == + +class Assert(Interpretable): + __view__ = ast.Assert + + def run(self, frame): + test = Interpretable(self.test) + test.eval(frame) + # simplify 'assert False where False = ...' + if (test.explanation.startswith('False\n{False = ') and + test.explanation.endswith('\n}')): + test.explanation = test.explanation[15:-2] + # print the result as 'assert ' + self.result = test.result + self.explanation = 'assert ' + test.explanation + if not frame.is_true(test.result): + try: + raise BuiltinAssertionError + except passthroughex: + raise + except: + raise Failure(self) + +class Assign(Interpretable): + __view__ = ast.Assign + + def run(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + self.result = expr.result + self.explanation = '... = ' + expr.explanation + # fall-back-run the rest of the assignment + ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr')) + mod = ast.Module(None, ast.Stmt([ass])) + mod.filename = '' + co = pycodegen.ModuleCodeGenerator(mod).getCode() + try: + frame.exec_(co, __exprinfo_expr=expr.result) + except passthroughex: + raise + except: + raise Failure(self) + +class Discard(Interpretable): + __view__ = ast.Discard + + def run(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + self.result = expr.result + self.explanation = expr.explanation + +class Stmt(Interpretable): + __view__ = ast.Stmt + + def run(self, frame): + for stmt in self.nodes: + stmt = Interpretable(stmt) + stmt.run(frame) + + +def report_failure(e): + explanation = e.node.nice_explanation() + if explanation: + explanation = ", in: " + explanation + else: + explanation = "" + sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation)) + +def check(s, frame=None): + if frame is None: + frame = sys._getframe(1) + frame = py.code.Frame(frame) + expr = parse(s, 'eval') + assert isinstance(expr, ast.Expression) + node = Interpretable(expr.node) + try: + node.eval(frame) + except passthroughex: + raise + except Failure: + e = sys.exc_info()[1] + report_failure(e) + else: + if not frame.is_true(node.result): + sys.stderr.write("assertion failed: %s\n" % node.nice_explanation()) + + +########################################################### +# API / Entry points +# ######################################################### + +def interpret(source, frame, should_fail=False): + module = Interpretable(parse(source, 'exec').node) + #print "got module", module + if isinstance(frame, py.std.types.FrameType): + frame = py.code.Frame(frame) + try: + module.run(frame) + except Failure: + e = sys.exc_info()[1] + return getfailure(e) + except passthroughex: + raise + except: + import traceback + traceback.print_exc() + if should_fail: + return ("(assertion failed, but when it was re-run for " + "printing intermediate values, it did not fail. Suggestions: " + "compute assert expression before the assert or use --nomagic)") + else: + return None + +def getmsg(excinfo): + if isinstance(excinfo, tuple): + excinfo = py.code.ExceptionInfo(excinfo) + #frame, line = gettbline(tb) + #frame = py.code.Frame(frame) + #return interpret(line, frame) + + tb = excinfo.traceback[-1] + source = str(tb.statement).strip() + x = interpret(source, tb.frame, should_fail=True) + if not isinstance(x, str): + raise TypeError("interpret returned non-string %r" % (x,)) + return x + +def getfailure(e): + explanation = e.node.nice_explanation() + if str(e.value): + lines = explanation.split('\n') + lines[0] += " << %s" % (e.value,) + explanation = '\n'.join(lines) + text = "%s: %s" % (e.exc.__name__, explanation) + if text.startswith('AssertionError: assert '): + text = text[16:] + return text + +def run(s, frame=None): + if frame is None: + frame = sys._getframe(1) + frame = py.code.Frame(frame) + module = Interpretable(parse(s, 'exec').node) + try: + module.run(frame) + except Failure: + e = sys.exc_info()[1] + report_failure(e) + + +if __name__ == '__main__': + # example: + def f(): + return 5 + def g(): + return 3 + def h(x): + return 'never' + check("f() * g() == 5") + check("not f()") + check("not (f() and g() or 0)") + check("f() == g()") + i = 4 + check("i == f()") + check("len(f()) == 0") + check("isinstance(2+3+4, float)") + + run("x = i") + check("x == 5") + + run("assert not f(), 'oops'") + run("a, b, c = 1, 2") + run("a, b, c = f()") + + check("max([f(),g()]) == 4") + check("'hello'[g()] == 'h'") + run("'guk%d' % h(f())") diff --git a/tests/_lib_vendors/py/_code/_py2traceback.py b/tests/_lib_vendors/py/_code/_py2traceback.py new file mode 100644 index 0000000..d65e27c --- /dev/null +++ b/tests/_lib_vendors/py/_code/_py2traceback.py @@ -0,0 +1,79 @@ +# copied from python-2.7.3's traceback.py +# CHANGES: +# - some_str is replaced, trying to create unicode strings +# +import types + +def format_exception_only(etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. + + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. + + The message indicating which exception occurred is always the last + string in the list. + + """ + + # An instance should not have a meaningful value parameter, but + # sometimes does, particularly for string exceptions, such as + # >>> raise string1, string2 # deprecated + # + # Clear these out first because issubtype(string1, SyntaxError) + # would throw another exception and mask the original problem. + if (isinstance(etype, BaseException) or + isinstance(etype, types.InstanceType) or + etype is None or type(etype) is str): + return [_format_final_exc_line(etype, value)] + + stype = etype.__name__ + + if not issubclass(etype, SyntaxError): + return [_format_final_exc_line(stype, value)] + + # It was a syntax error; show exactly where the problem was found. + lines = [] + try: + msg, (filename, lineno, offset, badline) = value.args + except Exception: + pass + else: + filename = filename or "" + lines.append(' File "%s", line %d\n' % (filename, lineno)) + if badline is not None: + lines.append(' %s\n' % badline.strip()) + if offset is not None: + caretspace = badline.rstrip('\n')[:offset].lstrip() + # non-space whitespace (likes tabs) must be kept for alignment + caretspace = ((c.isspace() and c or ' ') for c in caretspace) + # only three spaces to account for offset1 == pos 0 + lines.append(' %s^\n' % ''.join(caretspace)) + value = msg + + lines.append(_format_final_exc_line(stype, value)) + return lines + +def _format_final_exc_line(etype, value): + """Return a list of a single line -- normal case for format_exception_only""" + valuestr = _some_str(value) + if value is None or not valuestr: + line = "%s\n" % etype + else: + line = "%s: %s\n" % (etype, valuestr) + return line + +def _some_str(value): + try: + return unicode(value) + except Exception: + try: + return str(value) + except Exception: + pass + return '' % type(value).__name__ diff --git a/tests/_lib_vendors/py/_code/assertion.py b/tests/_lib_vendors/py/_code/assertion.py new file mode 100644 index 0000000..4ce80c7 --- /dev/null +++ b/tests/_lib_vendors/py/_code/assertion.py @@ -0,0 +1,94 @@ +import sys +import py + +BuiltinAssertionError = py.builtin.builtins.AssertionError + +_reprcompare = None # if set, will be called by assert reinterp for comparison ops + +def _format_explanation(explanation): + """This formats an explanation + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + raw_lines = (explanation or '').split('\n') + # escape newlines not followed by {, } and ~ + lines = [raw_lines[0]] + for l in raw_lines[1:]: + if l.startswith('{') or l.startswith('}') or l.startswith('~'): + lines.append(l) + else: + lines[-1] += '\\n' + l + + result = lines[:1] + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith('{'): + if stackcnt[-1]: + s = 'and ' + else: + s = 'where ' + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(' +' + ' '*(len(stack)-1) + s + line[1:]) + elif line.startswith('}'): + assert line.startswith('}') + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line.startswith('~') + result.append(' '*len(stack) + line[1:]) + assert len(stack) == 1 + return '\n'.join(result) + + +class AssertionError(BuiltinAssertionError): + def __init__(self, *args): + BuiltinAssertionError.__init__(self, *args) + if args: + try: + self.msg = str(args[0]) + except py.builtin._sysex: + raise + except: + self.msg = "<[broken __repr__] %s at %0xd>" %( + args[0].__class__, id(args[0])) + else: + f = py.code.Frame(sys._getframe(1)) + try: + source = f.code.fullsource + if source is not None: + try: + source = source.getstatement(f.lineno, assertion=True) + except IndexError: + source = None + else: + source = str(source.deindent()).strip() + except py.error.ENOENT: + source = None + # this can also occur during reinterpretation, when the + # co_filename is set to "". + if source: + self.msg = reinterpret(source, f, should_fail=True) + else: + self.msg = "" + if not self.args: + self.args = (self.msg,) + +if sys.version_info > (3, 0): + AssertionError.__module__ = "builtins" + reinterpret_old = "old reinterpretation not available for py3" +else: + from py._code._assertionold import interpret as reinterpret_old +if sys.version_info >= (2, 6) or (sys.platform.startswith("java")): + from py._code._assertionnew import interpret as reinterpret +else: + reinterpret = reinterpret_old + diff --git a/tests/_lib_vendors/py/_code/code.py b/tests/_lib_vendors/py/_code/code.py new file mode 100644 index 0000000..f14c562 --- /dev/null +++ b/tests/_lib_vendors/py/_code/code.py @@ -0,0 +1,787 @@ +import py +import sys +from inspect import CO_VARARGS, CO_VARKEYWORDS + +builtin_repr = repr + +reprlib = py.builtin._tryimport('repr', 'reprlib') + +if sys.version_info[0] >= 3: + from traceback import format_exception_only +else: + from py._code._py2traceback import format_exception_only + +class Code(object): + """ wrapper around Python code objects """ + def __init__(self, rawcode): + if not hasattr(rawcode, "co_filename"): + rawcode = py.code.getrawcode(rawcode) + try: + self.filename = rawcode.co_filename + self.firstlineno = rawcode.co_firstlineno - 1 + self.name = rawcode.co_name + except AttributeError: + raise TypeError("not a code object: %r" %(rawcode,)) + self.raw = rawcode + + def __eq__(self, other): + return self.raw == other.raw + + def __ne__(self, other): + return not self == other + + @property + def path(self): + """ return a path object pointing to source code (note that it + might not point to an actually existing file). """ + p = py.path.local(self.raw.co_filename) + # maybe don't try this checking + if not p.check(): + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + p = self.raw.co_filename + return p + + @property + def fullsource(self): + """ return a py.code.Source object for the full source file of the code + """ + from py._code import source + full, _ = source.findsource(self.raw) + return full + + def source(self): + """ return a py.code.Source object for the code object's source only + """ + # return source only for that part of code + return py.code.Source(self.raw) + + def getargs(self, var=False): + """ return a tuple with the argument names for the code object + + if 'var' is set True also return the names of the variable and + keyword arguments when present + """ + # handfull shortcut for getting args + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + +class Frame(object): + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + def __init__(self, frame): + self.lineno = frame.f_lineno - 1 + self.f_globals = frame.f_globals + self.f_locals = frame.f_locals + self.raw = frame + self.code = py.code.Code(frame.f_code) + + @property + def statement(self): + """ statement this frame is at """ + if self.code.fullsource is None: + return py.code.Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """ evaluate 'code' in the frame + + 'vars' are optional additional local variables + + returns the result of the evaluation + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def exec_(self, code, **vars): + """ exec 'code' in the frame + + 'vars' are optiona; additional local variables + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + py.builtin.exec_(code, self.f_globals, f_locals ) + + def repr(self, object): + """ return a 'safe' (non-recursive, one-line) string repr for 'object' + """ + return py.io.saferepr(object) + + def is_true(self, object): + return object + + def getargs(self, var=False): + """ return a list of tuples (name, value) for all arguments + + if 'var' is set True also include the variable and keyword + arguments when present + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + +class TracebackEntry(object): + """ a single entry in a traceback """ + + _repr_style = None + exprinfo = None + + def __init__(self, rawentry): + self._rawentry = rawentry + self.lineno = rawentry.tb_lineno - 1 + + def set_repr_style(self, mode): + assert mode in ("short", "long") + self._repr_style = mode + + @property + def frame(self): + return py.code.Frame(self._rawentry.tb_frame) + + @property + def relline(self): + return self.lineno - self.frame.code.firstlineno + + def __repr__(self): + return "" %(self.frame.code.path, self.lineno+1) + + @property + def statement(self): + """ py.code.Source object for the current statement """ + source = self.frame.code.fullsource + return source.getstatement(self.lineno) + + @property + def path(self): + """ path to the source code """ + return self.frame.code.path + + def getlocals(self): + return self.frame.f_locals + locals = property(getlocals, None, None, "locals of underlaying frame") + + def reinterpret(self): + """Reinterpret the failing statement and returns a detailed information + about what operations are performed.""" + if self.exprinfo is None: + source = str(self.statement).strip() + x = py.code._reinterpret(source, self.frame, should_fail=True) + if not isinstance(x, str): + raise TypeError("interpret returned non-string %r" % (x,)) + self.exprinfo = x + return self.exprinfo + + def getfirstlinesource(self): + # on Jython this firstlineno can be -1 apparently + return max(self.frame.code.firstlineno, 0) + + def getsource(self, astcache=None): + """ return failing source code. """ + # we use the passed in astcache to not reparse asttrees + # within exception info printing + from py._code.source import getstatementrange_ast + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast(self.lineno, source, + astnode=astnode) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self): + """ return True if the current frame has a var __tracebackhide__ + resolving to True + + mostly for internal use + """ + try: + return self.frame.f_locals['__tracebackhide__'] + except KeyError: + try: + return self.frame.f_globals['__tracebackhide__'] + except KeyError: + return False + + def __str__(self): + try: + fn = str(self.path) + except py.error.Error: + fn = '???' + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except: + line = "???" + return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line) + + def name(self): + return self.frame.code.raw.co_name + name = property(name, None, None, "co_name of underlaying code") + +class Traceback(list): + """ Traceback objects encapsulate and offer higher level + access to Traceback entries. + """ + Entry = TracebackEntry + def __init__(self, tb): + """ initialize from given python traceback object. """ + if hasattr(tb, 'tb_next'): + def f(cur): + while cur is not None: + yield self.Entry(cur) + cur = cur.tb_next + list.__init__(self, f(tb)) + else: + list.__init__(self, tb) + + def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None): + """ return a Traceback instance wrapping part of this Traceback + + by provding any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined + + this allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback) + """ + for x in self: + code = x.frame.code + codepath = code.path + if ((path is None or codepath == path) and + (excludepath is None or not hasattr(codepath, 'relto') or + not codepath.relto(excludepath)) and + (lineno is None or x.lineno == lineno) and + (firstlineno is None or x.frame.code.firstlineno == firstlineno)): + return Traceback(x._rawentry) + return self + + def __getitem__(self, key): + val = super(Traceback, self).__getitem__(key) + if isinstance(key, type(slice(0))): + val = self.__class__(val) + return val + + def filter(self, fn=lambda x: not x.ishidden()): + """ return a Traceback instance with certain items removed + + fn is a function that gets a single argument, a TracebackItem + instance, and should return True when the item should be added + to the Traceback, False when not + + by default this removes all the TracebackItems which are hidden + (see ishidden() above) + """ + return Traceback(filter(fn, self)) + + def getcrashentry(self): + """ return last non-hidden traceback entry that lead + to the exception of a traceback. + """ + for i in range(-1, -len(self)-1, -1): + entry = self[i] + if not entry.ishidden(): + return entry + return self[-1] + + def recursionindex(self): + """ return the index of the frame/TracebackItem where recursion + originates if appropriate, None if no recursion occurred + """ + cache = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + #XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + #print "checking for recursion at", key + l = cache.setdefault(key, []) + if l: + f = entry.frame + loc = f.f_locals + for otherloc in l: + if f.is_true(f.eval(co_equal, + __recursioncache_locals_1=loc, + __recursioncache_locals_2=otherloc)): + return i + l.append(entry.frame.f_locals) + return None + +co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2', + '?', 'eval') + +class ExceptionInfo(object): + """ wraps sys.exc_info() objects and offers + help for navigating the traceback. + """ + _striptext = '' + def __init__(self, tup=None, exprinfo=None): + if tup is None: + tup = sys.exc_info() + if exprinfo is None and isinstance(tup[1], AssertionError): + exprinfo = getattr(tup[1], 'msg', None) + if exprinfo is None: + exprinfo = str(tup[1]) + if exprinfo and exprinfo.startswith('assert '): + self._striptext = 'AssertionError: ' + self._excinfo = tup + #: the exception class + self.type = tup[0] + #: the exception instance + self.value = tup[1] + #: the exception raw traceback + self.tb = tup[2] + #: the exception type name + self.typename = self.type.__name__ + #: the exception traceback (py.code.Traceback instance) + self.traceback = py.code.Traceback(self.tb) + + def __repr__(self): + return "" % (self.typename, len(self.traceback)) + + def exconly(self, tryshort=False): + """ return the exception as a string + + when 'tryshort' resolves to True, and the exception is a + py.code._AssertionError, only the actual exception part of + the exception representation is returned (so 'AssertionError: ' is + removed from the beginning) + """ + lines = format_exception_only(self.type, self.value) + text = ''.join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext):] + return text + + def errisinstance(self, exc): + """ return True if the exception is an instance of exc """ + return isinstance(self.value, exc) + + def _getreprcrash(self): + exconly = self.exconly(tryshort=True) + entry = self.traceback.getcrashentry() + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + return ReprFileLocation(path, lineno+1, exconly) + + def getrepr(self, showlocals=False, style="long", + abspath=False, tbfilter=True, funcargs=False): + """ return str()able representation of this exception info. + showlocals: show locals per traceback entry + style: long|short|no|native traceback style + tbfilter: hide entries (where __tracebackhide__ is true) + + in case of style==native, tbfilter and showlocals is ignored. + """ + if style == 'native': + return ReprExceptionInfo(ReprTracebackNative( + py.std.traceback.format_exception( + self.type, + self.value, + self.traceback[0]._rawentry, + )), self._getreprcrash()) + + fmt = FormattedExcinfo(showlocals=showlocals, style=style, + abspath=abspath, tbfilter=tbfilter, funcargs=funcargs) + return fmt.repr_excinfo(self) + + def __str__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return str(loc) + + def __unicode__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return unicode(loc) + + +class FormattedExcinfo(object): + """ presenting information about failing Functions and Generators. """ + # for traceback entries + flow_marker = ">" + fail_marker = "E" + + def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False): + self.showlocals = showlocals + self.style = style + self.tbfilter = tbfilter + self.funcargs = funcargs + self.abspath = abspath + self.astcache = {} + + def _getindent(self, source): + # figure out indent for given source + try: + s = str(source.getstatement(len(source)-1)) + except KeyboardInterrupt: + raise + except: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry): + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def _saferepr(self, obj): + return py.io.saferepr(obj) + + def repr_args(self, entry): + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + args.append((argname, self._saferepr(argvalue))) + return ReprFuncArgs(args) + + def get_source(self, source, line_index=-1, excinfo=None, short=False): + """ return formatted and marked up source lines. """ + lines = [] + if source is None or line_index >= len(source.lines): + source = py.code.Source("???") + line_index = 0 + if line_index < 0: + line_index += len(source) + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + for line in source.lines[line_index+1:]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_exconly(self, excinfo, indent=4, markall=False): + lines = [] + indent = " " * indent + # get the real exception information out + exlines = excinfo.exconly(tryshort=True).split('\n') + failindent = self.fail_marker + indent[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indent + return lines + + def repr_locals(self, locals): + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == '__builtins__': + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + str_repr = self._saferepr(value) + #if len(str_repr) < 70 or not isinstance(value, + # (list, tuple, dict)): + lines.append("%-10s = %s" %(name, str_repr)) + #else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # py.std.pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + + def repr_traceback_entry(self, entry, excinfo=None): + source = self._getentrysource(entry) + if source is None: + source = py.code.Source("???") + line_index = 0 + else: + # entry.getfirstlinesource() can be -1, should be 0 on jython + line_index = entry.lineno - max(entry.getfirstlinesource(), 0) + + lines = [] + style = entry._repr_style + if style is None: + style = self.style + if style in ("short", "long"): + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source(source, line_index, excinfo, short=short) + lines.extend(s) + if short: + message = "in %s" %(entry.name) + else: + message = excinfo and excinfo.typename or "" + path = self._makepath(entry.path) + filelocrepr = ReprFileLocation(path, entry.lineno+1, message) + localsrepr = None + if not short: + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style) + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path): + if not self.abspath: + try: + np = py.path.local().bestrelpath(path) + except OSError: + return path + if len(np) < len(str(path)): + path = np + return path + + def repr_traceback(self, excinfo): + traceback = excinfo.traceback + if self.tbfilter: + traceback = traceback.filter() + recursionindex = None + if excinfo.errisinstance(RuntimeError): + if "maximum recursion depth exceeded" in str(excinfo.value): + recursionindex = traceback.recursionindex() + last = traceback[-1] + entries = [] + extraline = None + for index, entry in enumerate(traceback): + einfo = (last == entry) and excinfo or None + reprentry = self.repr_traceback_entry(entry, einfo) + entries.append(reprentry) + if index == recursionindex: + extraline = "!!! Recursion detected (same locals & position)" + break + return ReprTraceback(entries, extraline, style=self.style) + + def repr_excinfo(self, excinfo): + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + return ReprExceptionInfo(reprtraceback, reprcrash) + +class TerminalRepr: + def __str__(self): + s = self.__unicode__() + if sys.version_info[0] < 3: + s = s.encode('utf-8') + return s + + def __unicode__(self): + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = py.io.TextIO() + tw = py.io.TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self): + return "<%s instance at %0x>" %(self.__class__, id(self)) + + +class ReprExceptionInfo(TerminalRepr): + def __init__(self, reprtraceback, reprcrash): + self.reprtraceback = reprtraceback + self.reprcrash = reprcrash + self.sections = [] + + def addsection(self, name, content, sep="-"): + self.sections.append((name, content, sep)) + + def toterminal(self, tw): + self.reprtraceback.toterminal(tw) + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + +class ReprTraceback(TerminalRepr): + entrysep = "_ " + + def __init__(self, reprentries, extraline, style): + self.reprentries = reprentries + self.extraline = extraline + self.style = style + + def toterminal(self, tw): + # the entries might have different styles + last_style = None + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i+1] + if entry.style == "long" or \ + entry.style == "short" and next_entry.style == "long": + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines): + self.style = "native" + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + +class ReprEntryNative(TerminalRepr): + style = "native" + + def __init__(self, tblines): + self.lines = tblines + + def toterminal(self, tw): + tw.write("".join(self.lines)) + +class ReprEntry(TerminalRepr): + localssep = "_ " + + def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style): + self.lines = lines + self.reprfuncargs = reprfuncargs + self.reprlocals = reprlocals + self.reprfileloc = filelocrepr + self.style = style + + def toterminal(self, tw): + if self.style == "short": + self.reprfileloc.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + #tw.line("") + return + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + if self.reprlocals: + #tw.sep(self.localssep, "Locals") + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self): + return "%s\n%s\n%s" % ("\n".join(self.lines), + self.reprlocals, + self.reprfileloc) + +class ReprFileLocation(TerminalRepr): + def __init__(self, path, lineno, message): + self.path = str(path) + self.lineno = lineno + self.message = message + + def toterminal(self, tw): + # filename and lineno output for each entry, + # using an output format that most editors unterstand + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.line("%s:%s: %s" %(self.path, self.lineno, msg)) + +class ReprLocals(TerminalRepr): + def __init__(self, lines): + self.lines = lines + + def toterminal(self, tw): + for line in self.lines: + tw.line(line) + +class ReprFuncArgs(TerminalRepr): + def __init__(self, args): + self.args = args + + def toterminal(self, tw): + if self.args: + linesofar = "" + for name, value in self.args: + ns = "%s = %s" %(name, value) + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + + +oldbuiltins = {} + +def patch_builtins(assertion=True, compile=True): + """ put compile and AssertionError builtins to Python's builtins. """ + if assertion: + from py._code import assertion + l = oldbuiltins.setdefault('AssertionError', []) + l.append(py.builtin.builtins.AssertionError) + py.builtin.builtins.AssertionError = assertion.AssertionError + if compile: + l = oldbuiltins.setdefault('compile', []) + l.append(py.builtin.builtins.compile) + py.builtin.builtins.compile = py.code.compile + +def unpatch_builtins(assertion=True, compile=True): + """ remove compile and AssertionError builtins from Python builtins. """ + if assertion: + py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop() + if compile: + py.builtin.builtins.compile = oldbuiltins['compile'].pop() + +def getrawcode(obj, trycall=True): + """ return code object for given function. """ + try: + return obj.__code__ + except AttributeError: + obj = getattr(obj, 'im_func', obj) + obj = getattr(obj, 'func_code', obj) + obj = getattr(obj, 'f_code', obj) + obj = getattr(obj, '__code__', obj) + if trycall and not hasattr(obj, 'co_firstlineno'): + if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj): + x = getrawcode(obj.__call__, trycall=False) + if hasattr(x, 'co_firstlineno'): + return x + return obj + diff --git a/tests/_lib_vendors/py/_code/source.py b/tests/_lib_vendors/py/_code/source.py new file mode 100644 index 0000000..3a648e6 --- /dev/null +++ b/tests/_lib_vendors/py/_code/source.py @@ -0,0 +1,419 @@ +from __future__ import generators + +from bisect import bisect_right +import sys +import inspect, tokenize +import py +from types import ModuleType +cpy_compile = compile + +try: + import _ast + from _ast import PyCF_ONLY_AST as _AST_FLAG +except ImportError: + _AST_FLAG = 0 + _ast = None + + +class Source(object): + """ a immutable object holding a source code fragment, + possibly deindenting it. + """ + _compilecounter = 0 + def __init__(self, *parts, **kwargs): + self.lines = lines = [] + de = kwargs.get('deindent', True) + rstrip = kwargs.get('rstrip', True) + for part in parts: + if not part: + partlines = [] + if isinstance(part, Source): + partlines = part.lines + elif isinstance(part, (tuple, list)): + partlines = [x.rstrip("\n") for x in part] + elif isinstance(part, py.builtin._basestring): + partlines = part.split('\n') + if rstrip: + while partlines: + if partlines[-1].strip(): + break + partlines.pop() + else: + partlines = getsource(part, deindent=de).lines + if de: + partlines = deindent(partlines) + lines.extend(partlines) + + def __eq__(self, other): + try: + return self.lines == other.lines + except AttributeError: + if isinstance(other, str): + return str(self) == other + return False + + def __getitem__(self, key): + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + return self.__getslice__(key.start, key.stop) + + def __len__(self): + return len(self.lines) + + def __getslice__(self, start, end): + newsource = Source() + newsource.lines = self.lines[start:end] + return newsource + + def strip(self): + """ return new source object with trailing + and leading blank lines removed. + """ + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end-1].strip(): + end -= 1 + source = Source() + source.lines[:] = self.lines[start:end] + return source + + def putaround(self, before='', after='', indent=' ' * 4): + """ return a copy of the source object with + 'before' and 'after' wrapped around it. + """ + before = Source(before) + after = Source(after) + newsource = Source() + lines = [ (indent + line) for line in self.lines] + newsource.lines = before.lines + lines + after.lines + return newsource + + def indent(self, indent=' ' * 4): + """ return a copy of the source object with + all lines indented by the given indent-string. + """ + newsource = Source() + newsource.lines = [(indent+line) for line in self.lines] + return newsource + + def getstatement(self, lineno, assertion=False): + """ return Source statement which contains the + given linenumber (counted from 0). + """ + start, end = self.getstatementrange(lineno, assertion) + return self[start:end] + + def getstatementrange(self, lineno, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + """ + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self, offset=None): + """ return a new source object deindented by offset. + If offset is None then guess an indentation offset from + the first non-blank line. Subsequent lines which have a + lower indentation offset will be copied verbatim as + they are assumed to be part of multilines. + """ + # XXX maybe use the tokenizer to properly handle multiline + # strings etc.pp? + newsource = Source() + newsource.lines[:] = deindent(self.lines, offset) + return newsource + + def isparseable(self, deindent=True): + """ return True if source is parseable, heuristically + deindenting it by default. + """ + try: + import parser + except ImportError: + syntax_checker = lambda x: compile(x, 'asd', 'exec') + else: + syntax_checker = parser.suite + + if deindent: + source = str(self.deindent()) + else: + source = str(self) + try: + #compile(source+'\n', "x", "exec") + syntax_checker(source+'\n') + except KeyboardInterrupt: + raise + except Exception: + return False + else: + return True + + def __str__(self): + return "\n".join(self.lines) + + def compile(self, filename=None, mode='exec', + flag=generators.compiler_flag, + dont_inherit=0, _genframe=None): + """ return compiled code object. if filename is None + invent an artificial filename which displays + the source/line position of the caller frame. + """ + if not filename or py.path.local(filename).check(file=0): + if _genframe is None: + _genframe = sys._getframe(1) # the caller + fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno + base = "<%d-codegen " % self._compilecounter + self.__class__._compilecounter += 1 + if not filename: + filename = base + '%s:%d>' % (fn, lineno) + else: + filename = base + '%r %s:%d>' % (filename, fn, lineno) + source = "\n".join(self.lines) + '\n' + try: + co = cpy_compile(source, filename, mode, flag) + except SyntaxError: + ex = sys.exc_info()[1] + # re-represent syntax errors from parsing python strings + msglines = self.lines[:ex.lineno] + if ex.offset: + msglines.append(" "*ex.offset + '^') + msglines.append("(code was compiled probably from here: %s)" % filename) + newex = SyntaxError('\n'.join(msglines)) + newex.offset = ex.offset + newex.lineno = ex.lineno + newex.text = ex.text + raise newex + else: + if flag & _AST_FLAG: + return co + lines = [(x + "\n") for x in self.lines] + if sys.version_info[0] >= 3: + # XXX py3's inspect.getsourcefile() checks for a module + # and a pep302 __loader__ ... we don't have a module + # at code compile-time so we need to fake it here + m = ModuleType("_pycodecompile_pseudo_module") + py.std.inspect.modulesbyfile[filename] = None + py.std.sys.modules[None] = m + m.__loader__ = 1 + py.std.linecache.cache[filename] = (1, None, lines, filename) + return co + +# +# public API shortcut functions +# + +def compile_(source, filename=None, mode='exec', flags= + generators.compiler_flag, dont_inherit=0): + """ compile the given source to a raw code object, + and maintain an internal cache which allows later + retrieval of the source code for the code object + and any recursively created code objects. + """ + if _ast is not None and isinstance(source, _ast.AST): + # XXX should Source support having AST? + return cpy_compile(source, filename, mode, flags, dont_inherit) + _genframe = sys._getframe(1) # the caller + s = Source(source) + co = s.compile(filename, mode, flags, _genframe=_genframe) + return co + + +def getfslineno(obj): + """ Return source location (path, lineno) for the given object. + If the source cannot be determined return ("", -1) + """ + try: + code = py.code.Code(obj) + except TypeError: + try: + fn = (py.std.inspect.getsourcefile(obj) or + py.std.inspect.getfile(obj)) + except TypeError: + return "", -1 + + fspath = fn and py.path.local(fn) or None + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except IOError: + pass + else: + fspath = code.path + lineno = code.firstlineno + assert isinstance(lineno, int) + return fspath, lineno + +# +# helper functions +# + +def findsource(obj): + try: + sourcelines, lineno = py.std.inspect.findsource(obj) + except py.builtin._sysex: + raise + except: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + return source, lineno + +def getsource(obj, **kwargs): + obj = py.code.getrawcode(obj) + try: + strsrc = inspect.getsource(obj) + except IndentationError: + strsrc = "\"Buggy python version consider upgrading, cannot get source\"" + assert isinstance(strsrc, str) + return Source(strsrc, **kwargs) + +def deindent(lines, offset=None): + if offset is None: + for line in lines: + line = line.expandtabs() + s = line.lstrip() + if s: + offset = len(line)-len(s) + break + else: + offset = 0 + if offset == 0: + return list(lines) + newlines = [] + def readline_generator(lines): + for line in lines: + yield line + '\n' + while True: + yield '' + + it = readline_generator(lines) + + try: + for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)): + if sline > len(lines): + break # End of input reached + if sline > len(newlines): + line = lines[sline - 1].expandtabs() + if line.lstrip() and line[:offset].isspace(): + line = line[offset:] # Deindent + newlines.append(line) + + for i in range(sline, eline): + # Don't deindent continuing lines of + # multiline tokens (i.e. multiline strings) + newlines.append(lines[i]) + except (IndentationError, tokenize.TokenError): + pass + # Add any lines we didn't see. E.g. if an exception was raised. + newlines.extend(lines[len(newlines):]) + return newlines + + +def get_statement_startend2(lineno, node): + import ast + # flatten all statements and except handlers into one lineno-list + # AST's line numbers start indexing at 1 + l = [] + for x in ast.walk(node): + if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler): + l.append(x.lineno - 1) + for name in "finalbody", "orelse": + val = getattr(x, name, None) + if val: + # treat the finally/orelse part as its own statement + l.append(val[0].lineno - 1 - 1) + l.sort() + insert_index = bisect_right(l, lineno) + start = l[insert_index - 1] + if insert_index >= len(l): + end = None + else: + end = l[insert_index] + return start, end + + +def getstatementrange_ast(lineno, source, assertion=False, astnode=None): + if astnode is None: + content = str(source) + if sys.version_info < (2,7): + content += "\n" + try: + astnode = compile(content, "source", "exec", 1024) # 1024 for AST + except ValueError: + start, end = getstatementrange_old(lineno, source, assertion) + return None, start, end + start, end = get_statement_startend2(lineno, astnode) + # we need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself + block_finder = inspect.BlockFinder() + # if we start with an indented line, put blockfinder to "started" mode + block_finder.started = source.lines[start][0].isspace() + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # the end might still point to a comment or empty line, correct it + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end + + +def getstatementrange_old(lineno, source, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + raise an IndexError if no such statementrange can be found. + """ + # XXX this logic is only used on python2.4 and below + # 1. find the start of the statement + from codeop import compile_command + for start in range(lineno, -1, -1): + if assertion: + line = source.lines[start] + # the following lines are not fully tested, change with care + if 'super' in line and 'self' in line and '__init__' in line: + raise IndexError("likely a subclass") + if "assert" not in line and "raise" not in line: + continue + trylines = source.lines[start:lineno+1] + # quick hack to prepare parsing an indented line with + # compile_command() (which errors on "return" outside defs) + trylines.insert(0, 'def xxx():') + trysource = '\n '.join(trylines) + # ^ space here + try: + compile_command(trysource) + except (SyntaxError, OverflowError, ValueError): + continue + + # 2. find the end of the statement + for end in range(lineno+1, len(source)+1): + trysource = source[start:end] + if trysource.isparseable(): + return start, end + raise SyntaxError("no valid source range around line %d " % (lineno,)) + + diff --git a/tests/_lib_vendors/py/_error.py b/tests/_lib_vendors/py/_error.py new file mode 100644 index 0000000..550fb52 --- /dev/null +++ b/tests/_lib_vendors/py/_error.py @@ -0,0 +1,88 @@ +""" +create errno-specific classes for IO or os calls. + +""" +import sys, os, errno + +class Error(EnvironmentError): + def __repr__(self): + return "%s.%s %r: %s " %(self.__class__.__module__, + self.__class__.__name__, + self.__class__.__doc__, + " ".join(map(str, self.args)), + #repr(self.args) + ) + + def __str__(self): + s = "[%s]: %s" %(self.__class__.__doc__, + " ".join(map(str, self.args)), + ) + return s + +_winerrnomap = { + 2: errno.ENOENT, + 3: errno.ENOENT, + 17: errno.EEXIST, + 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable + 22: errno.ENOTDIR, + 20: errno.ENOTDIR, + 267: errno.ENOTDIR, + 5: errno.EACCES, # anything better? +} + +class ErrorMaker(object): + """ lazily provides Exception classes for each possible POSIX errno + (as defined per the 'errno' module). All such instances + subclass EnvironmentError. + """ + Error = Error + _errno2class = {} + + def __getattr__(self, name): + if name[0] == "_": + raise AttributeError(name) + eno = getattr(errno, name) + cls = self._geterrnoclass(eno) + setattr(self, name, cls) + return cls + + def _geterrnoclass(self, eno): + try: + return self._errno2class[eno] + except KeyError: + clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,)) + errorcls = type(Error)(clsname, (Error,), + {'__module__':'py.error', + '__doc__': os.strerror(eno)}) + self._errno2class[eno] = errorcls + return errorcls + + def checked_call(self, func, *args, **kwargs): + """ call a function and raise an errno-exception if applicable. """ + __tracebackhide__ = True + try: + return func(*args, **kwargs) + except self.Error: + raise + except (OSError, EnvironmentError): + cls, value, tb = sys.exc_info() + if not hasattr(value, 'errno'): + raise + __tracebackhide__ = False + errno = value.errno + try: + if not isinstance(value, WindowsError): + raise NameError + except NameError: + # we are not on Windows, or we got a proper OSError + cls = self._geterrnoclass(errno) + else: + try: + cls = self._geterrnoclass(_winerrnomap[errno]) + except KeyError: + raise value + raise cls("%s%r" % (func.__name__, args)) + __tracebackhide__ = True + + +error = ErrorMaker() diff --git a/tests/_lib_vendors/py/_iniconfig.py b/tests/_lib_vendors/py/_iniconfig.py new file mode 100644 index 0000000..92b50bd --- /dev/null +++ b/tests/_lib_vendors/py/_iniconfig.py @@ -0,0 +1,162 @@ +""" brain-dead simple parser for ini-style files. +(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed +""" +__version__ = "0.2.dev2" + +__all__ = ['IniConfig', 'ParseError'] + +COMMENTCHARS = "#;" + +class ParseError(Exception): + def __init__(self, path, lineno, msg): + Exception.__init__(self, path, lineno, msg) + self.path = path + self.lineno = lineno + self.msg = msg + + def __str__(self): + return "%s:%s: %s" %(self.path, self.lineno+1, self.msg) + +class SectionWrapper(object): + def __init__(self, config, name): + self.config = config + self.name = name + + def lineof(self, name): + return self.config.lineof(self.name, name) + + def get(self, key, default=None, convert=str): + return self.config.get(self.name, key, convert=convert, default=default) + + def __getitem__(self, key): + return self.config.sections[self.name][key] + + def __iter__(self): + section = self.config.sections.get(self.name, []) + def lineof(key): + return self.config.lineof(self.name, key) + for name in sorted(section, key=lineof): + yield name + + def items(self): + for name in self: + yield name, self[name] + + +class IniConfig(object): + def __init__(self, path, data=None): + self.path = str(path) # convenience + if data is None: + f = open(self.path) + try: + tokens = self._parse(iter(f)) + finally: + f.close() + else: + tokens = self._parse(data.splitlines(True)) + + self._sources = {} + self.sections = {} + + for lineno, section, name, value in tokens: + if section is None: + self._raise(lineno, 'no section header defined') + self._sources[section, name] = lineno + if name is None: + if section in self.sections: + self._raise(lineno, 'duplicate section %r'%(section, )) + self.sections[section] = {} + else: + if name in self.sections[section]: + self._raise(lineno, 'duplicate name %r'%(name, )) + self.sections[section][name] = value + + def _raise(self, lineno, msg): + raise ParseError(self.path, lineno, msg) + + def _parse(self, line_iter): + result = [] + section = None + for lineno, line in enumerate(line_iter): + name, data = self._parseline(line, lineno) + # new value + if name is not None and data is not None: + result.append((lineno, section, name, data)) + # new section + elif name is not None and data is None: + if not name: + self._raise(lineno, 'empty section name') + section = name + result.append((lineno, section, None, None)) + # continuation + elif name is None and data is not None: + if not result: + self._raise(lineno, 'unexpected value continuation') + last = result.pop() + last_name, last_data = last[-2:] + if last_name is None: + self._raise(lineno, 'unexpected value continuation') + + if last_data: + data = '%s\n%s' % (last_data, data) + result.append(last[:-1] + (data,)) + return result + + def _parseline(self, line, lineno): + # blank lines + if iscommentline(line): + line = "" + else: + line = line.rstrip() + if not line: + return None, None + # section + if line[0] == '[': + realline = line + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + if line[-1] == "]": + return line[1:-1], None + return None, realline.strip() + # value + elif not line[0].isspace(): + try: + name, value = line.split('=', 1) + if ":" in name: + raise ValueError() + except ValueError: + try: + name, value = line.split(":", 1) + except ValueError: + self._raise(lineno, 'unexpected line: %r' % line) + return name.strip(), value.strip() + # continuation + else: + return None, line.strip() + + def lineof(self, section, name=None): + lineno = self._sources.get((section, name)) + if lineno is not None: + return lineno + 1 + + def get(self, section, name, default=None, convert=str): + try: + return convert(self.sections[section][name]) + except KeyError: + return default + + def __getitem__(self, name): + if name not in self.sections: + raise KeyError(name) + return SectionWrapper(self, name) + + def __iter__(self): + for name in sorted(self.sections, key=self.lineof): + yield SectionWrapper(self, name) + + def __contains__(self, arg): + return arg in self.sections + +def iscommentline(line): + c = line.lstrip()[:1] + return c in COMMENTCHARS diff --git a/tests/_lib_vendors/py/_io/__init__.py b/tests/_lib_vendors/py/_io/__init__.py new file mode 100644 index 0000000..835f01f --- /dev/null +++ b/tests/_lib_vendors/py/_io/__init__.py @@ -0,0 +1 @@ +""" input/output helping """ diff --git a/tests/_lib_vendors/py/_io/capture.py b/tests/_lib_vendors/py/_io/capture.py new file mode 100644 index 0000000..bc157ed --- /dev/null +++ b/tests/_lib_vendors/py/_io/capture.py @@ -0,0 +1,371 @@ +import os +import sys +import py +import tempfile + +try: + from io import StringIO +except ImportError: + from StringIO import StringIO + +if sys.version_info < (3,0): + class TextIO(StringIO): + def write(self, data): + if not isinstance(data, unicode): + data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace') + StringIO.write(self, data) +else: + TextIO = StringIO + +try: + from io import BytesIO +except ImportError: + class BytesIO(StringIO): + def write(self, data): + if isinstance(data, unicode): + raise TypeError("not a byte value: %r" %(data,)) + StringIO.write(self, data) + +patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'} + +class FDCapture: + """ Capture IO to/from a given os-level filedescriptor. """ + + def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False): + """ save targetfd descriptor, and open a new + temporary file there. If no tmpfile is + specified a tempfile.Tempfile() will be opened + in text mode. + """ + self.targetfd = targetfd + if tmpfile is None and targetfd != 0: + f = tempfile.TemporaryFile('wb+') + tmpfile = dupfile(f, encoding="UTF-8") + f.close() + self.tmpfile = tmpfile + self._savefd = os.dup(self.targetfd) + if patchsys: + self._oldsys = getattr(sys, patchsysdict[targetfd]) + if now: + self.start() + + def start(self): + try: + os.fstat(self._savefd) + except OSError: + raise ValueError("saved filedescriptor not valid, " + "did you call start() twice?") + if self.targetfd == 0 and not self.tmpfile: + fd = os.open(devnullpath, os.O_RDONLY) + os.dup2(fd, 0) + os.close(fd) + if hasattr(self, '_oldsys'): + setattr(sys, patchsysdict[self.targetfd], DontReadFromInput()) + else: + os.dup2(self.tmpfile.fileno(), self.targetfd) + if hasattr(self, '_oldsys'): + setattr(sys, patchsysdict[self.targetfd], self.tmpfile) + + def done(self): + """ unpatch and clean up, returns the self.tmpfile (file object) + """ + os.dup2(self._savefd, self.targetfd) + os.close(self._savefd) + if self.targetfd != 0: + self.tmpfile.seek(0) + if hasattr(self, '_oldsys'): + setattr(sys, patchsysdict[self.targetfd], self._oldsys) + return self.tmpfile + + def writeorg(self, data): + """ write a string to the original file descriptor + """ + tempfp = tempfile.TemporaryFile() + try: + os.dup2(self._savefd, tempfp.fileno()) + tempfp.write(data) + finally: + tempfp.close() + + +def dupfile(f, mode=None, buffering=0, raising=False, encoding=None): + """ return a new open file object that's a duplicate of f + + mode is duplicated if not given, 'buffering' controls + buffer size (defaulting to no buffering) and 'raising' + defines whether an exception is raised when an incompatible + file object is passed in (if raising is False, the file + object itself will be returned) + """ + try: + fd = f.fileno() + mode = mode or f.mode + except AttributeError: + if raising: + raise + return f + newfd = os.dup(fd) + if sys.version_info >= (3,0): + if encoding is not None: + mode = mode.replace("b", "") + buffering = True + return os.fdopen(newfd, mode, buffering, encoding, closefd=True) + else: + f = os.fdopen(newfd, mode, buffering) + if encoding is not None: + return EncodedFile(f, encoding) + return f + +class EncodedFile(object): + def __init__(self, _stream, encoding): + self._stream = _stream + self.encoding = encoding + + def write(self, obj): + if isinstance(obj, unicode): + obj = obj.encode(self.encoding) + elif isinstance(obj, str): + pass + else: + obj = str(obj) + self._stream.write(obj) + + def writelines(self, linelist): + data = ''.join(linelist) + self.write(data) + + def __getattr__(self, name): + return getattr(self._stream, name) + +class Capture(object): + def call(cls, func, *args, **kwargs): + """ return a (res, out, err) tuple where + out and err represent the output/error output + during function execution. + call the given function with args/kwargs + and capture output/error during its execution. + """ + so = cls() + try: + res = func(*args, **kwargs) + finally: + out, err = so.reset() + return res, out, err + call = classmethod(call) + + def reset(self): + """ reset sys.stdout/stderr and return captured output as strings. """ + if hasattr(self, '_reset'): + raise ValueError("was already reset") + self._reset = True + outfile, errfile = self.done(save=False) + out, err = "", "" + if outfile and not outfile.closed: + out = outfile.read() + outfile.close() + if errfile and errfile != outfile and not errfile.closed: + err = errfile.read() + errfile.close() + return out, err + + def suspend(self): + """ return current snapshot captures, memorize tempfiles. """ + outerr = self.readouterr() + outfile, errfile = self.done() + return outerr + + +class StdCaptureFD(Capture): + """ This class allows to capture writes to FD1 and FD2 + and may connect a NULL file to FD0 (and prevent + reads from sys.stdin). If any of the 0,1,2 file descriptors + is invalid it will not be captured. + """ + def __init__(self, out=True, err=True, mixed=False, + in_=True, patchsys=True, now=True): + self._options = { + "out": out, + "err": err, + "mixed": mixed, + "in_": in_, + "patchsys": patchsys, + "now": now, + } + self._save() + if now: + self.startall() + + def _save(self): + in_ = self._options['in_'] + out = self._options['out'] + err = self._options['err'] + mixed = self._options['mixed'] + patchsys = self._options['patchsys'] + if in_: + try: + self.in_ = FDCapture(0, tmpfile=None, now=False, + patchsys=patchsys) + except OSError: + pass + if out: + tmpfile = None + if hasattr(out, 'write'): + tmpfile = out + try: + self.out = FDCapture(1, tmpfile=tmpfile, + now=False, patchsys=patchsys) + self._options['out'] = self.out.tmpfile + except OSError: + pass + if err: + if out and mixed: + tmpfile = self.out.tmpfile + elif hasattr(err, 'write'): + tmpfile = err + else: + tmpfile = None + try: + self.err = FDCapture(2, tmpfile=tmpfile, + now=False, patchsys=patchsys) + self._options['err'] = self.err.tmpfile + except OSError: + pass + + def startall(self): + if hasattr(self, 'in_'): + self.in_.start() + if hasattr(self, 'out'): + self.out.start() + if hasattr(self, 'err'): + self.err.start() + + def resume(self): + """ resume capturing with original temp files. """ + self.startall() + + def done(self, save=True): + """ return (outfile, errfile) and stop capturing. """ + outfile = errfile = None + if hasattr(self, 'out') and not self.out.tmpfile.closed: + outfile = self.out.done() + if hasattr(self, 'err') and not self.err.tmpfile.closed: + errfile = self.err.done() + if hasattr(self, 'in_'): + tmpfile = self.in_.done() + if save: + self._save() + return outfile, errfile + + def readouterr(self): + """ return snapshot value of stdout/stderr capturings. """ + if hasattr(self, "out"): + out = self._readsnapshot(self.out.tmpfile) + else: + out = "" + if hasattr(self, "err"): + err = self._readsnapshot(self.err.tmpfile) + else: + err = "" + return [out, err] + + def _readsnapshot(self, f): + f.seek(0) + res = f.read() + enc = getattr(f, "encoding", None) + if enc: + res = py.builtin._totext(res, enc, "replace") + f.truncate(0) + f.seek(0) + return res + + +class StdCapture(Capture): + """ This class allows to capture writes to sys.stdout|stderr "in-memory" + and will raise errors on tries to read from sys.stdin. It only + modifies sys.stdout|stderr|stdin attributes and does not + touch underlying File Descriptors (use StdCaptureFD for that). + """ + def __init__(self, out=True, err=True, in_=True, mixed=False, now=True): + self._oldout = sys.stdout + self._olderr = sys.stderr + self._oldin = sys.stdin + if out and not hasattr(out, 'file'): + out = TextIO() + self.out = out + if err: + if mixed: + err = out + elif not hasattr(err, 'write'): + err = TextIO() + self.err = err + self.in_ = in_ + if now: + self.startall() + + def startall(self): + if self.out: + sys.stdout = self.out + if self.err: + sys.stderr = self.err + if self.in_: + sys.stdin = self.in_ = DontReadFromInput() + + def done(self, save=True): + """ return (outfile, errfile) and stop capturing. """ + outfile = errfile = None + if self.out and not self.out.closed: + sys.stdout = self._oldout + outfile = self.out + outfile.seek(0) + if self.err and not self.err.closed: + sys.stderr = self._olderr + errfile = self.err + errfile.seek(0) + if self.in_: + sys.stdin = self._oldin + return outfile, errfile + + def resume(self): + """ resume capturing with original temp files. """ + self.startall() + + def readouterr(self): + """ return snapshot value of stdout/stderr capturings. """ + out = err = "" + if self.out: + out = self.out.getvalue() + self.out.truncate(0) + self.out.seek(0) + if self.err: + err = self.err.getvalue() + self.err.truncate(0) + self.err.seek(0) + return out, err + +class DontReadFromInput: + """Temporary stub class. Ideally when stdin is accessed, the + capturing should be turned off, with possibly all data captured + so far sent to the screen. This should be configurable, though, + because in automated test runs it is better to crash than + hang indefinitely. + """ + def read(self, *args): + raise IOError("reading from stdin while output is captured") + readline = read + readlines = read + __iter__ = read + + def fileno(self): + raise ValueError("redirected Stdin is pseudofile, has no fileno()") + def isatty(self): + return False + def close(self): + pass + +try: + devnullpath = os.devnull +except AttributeError: + if os.name == 'nt': + devnullpath = 'NUL' + else: + devnullpath = '/dev/null' diff --git a/tests/_lib_vendors/py/_io/saferepr.py b/tests/_lib_vendors/py/_io/saferepr.py new file mode 100644 index 0000000..8518290 --- /dev/null +++ b/tests/_lib_vendors/py/_io/saferepr.py @@ -0,0 +1,71 @@ +import py +import sys + +builtin_repr = repr + +reprlib = py.builtin._tryimport('repr', 'reprlib') + +class SafeRepr(reprlib.Repr): + """ subclass of repr.Repr that limits the resulting size of repr() + and includes information on exceptions raised during the call. + """ + def repr(self, x): + return self._callhelper(reprlib.Repr.repr, self, x) + + def repr_unicode(self, x, level): + # Strictly speaking wrong on narrow builds + def repr(u): + if "'" not in u: + return py.builtin._totext("'%s'") % u + elif '"' not in u: + return py.builtin._totext('"%s"') % u + else: + return py.builtin._totext("'%s'") % u.replace("'", r"\'") + s = repr(x[:self.maxstring]) + if len(s) > self.maxstring: + i = max(0, (self.maxstring-3)//2) + j = max(0, self.maxstring-3-i) + s = repr(x[:i] + x[len(x)-j:]) + s = s[:i] + '...' + s[len(s)-j:] + return s + + def repr_instance(self, x, level): + return self._callhelper(builtin_repr, x) + + def _callhelper(self, call, x, *args): + try: + # Try the vanilla repr and make sure that the result is a string + s = call(x, *args) + except py.builtin._sysex: + raise + except: + cls, e, tb = sys.exc_info() + exc_name = getattr(cls, '__name__', 'unknown') + try: + exc_info = str(e) + except py.builtin._sysex: + raise + except: + exc_info = 'unknown' + return '<[%s("%s") raised in repr()] %s object at 0x%x>' % ( + exc_name, exc_info, x.__class__.__name__, id(x)) + else: + if len(s) > self.maxsize: + i = max(0, (self.maxsize-3)//2) + j = max(0, self.maxsize-3-i) + s = s[:i] + '...' + s[len(s)-j:] + return s + +def saferepr(obj, maxsize=240): + """ return a size-limited safe repr-string for the given object. + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. This function is a wrapper + around the Repr/reprlib functionality of the standard 2.6 lib. + """ + # review exception handling + srepr = SafeRepr() + srepr.maxstring = maxsize + srepr.maxsize = maxsize + srepr.maxother = 160 + return srepr.repr(obj) diff --git a/tests/_lib_vendors/py/_io/terminalwriter.py b/tests/_lib_vendors/py/_io/terminalwriter.py new file mode 100644 index 0000000..cef1ff5 --- /dev/null +++ b/tests/_lib_vendors/py/_io/terminalwriter.py @@ -0,0 +1,348 @@ +""" + +Helper functions for writing to terminals and files. + +""" + + +import sys, os +import py +py3k = sys.version_info[0] >= 3 +from py.builtin import text, bytes + +win32_and_ctypes = False +colorama = None +if sys.platform == "win32": + try: + import colorama + except ImportError: + try: + import ctypes + win32_and_ctypes = True + except ImportError: + pass + + +def _getdimensions(): + import termios,fcntl,struct + call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8) + height,width = struct.unpack( "hhhh", call ) [:2] + return height, width + + +def get_terminal_width(): + height = width = 0 + try: + height, width = _getdimensions() + except py.builtin._sysex: + raise + except: + # pass to fallback below + pass + + if width == 0: + # FALLBACK: + # * some exception happened + # * or this is emacs terminal which reports (0,0) + width = int(os.environ.get('COLUMNS', 80)) + + # XXX the windows getdimensions may be bogus, let's sanify a bit + if width < 40: + width = 80 + return width + +terminal_width = get_terminal_width() + +# XXX unify with _escaped func below +def ansi_print(text, esc, file=None, newline=True, flush=False): + if file is None: + file = sys.stderr + text = text.rstrip() + if esc and not isinstance(esc, tuple): + esc = (esc,) + if esc and sys.platform != "win32" and file.isatty(): + text = (''.join(['\x1b[%sm' % cod for cod in esc]) + + text + + '\x1b[0m') # ANSI color code "reset" + if newline: + text += '\n' + + if esc and win32_and_ctypes and file.isatty(): + if 1 in esc: + bold = True + esc = tuple([x for x in esc if x != 1]) + else: + bold = False + esctable = {() : FOREGROUND_WHITE, # normal + (31,): FOREGROUND_RED, # red + (32,): FOREGROUND_GREEN, # green + (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow + (34,): FOREGROUND_BLUE, # blue + (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple + (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan + (37,): FOREGROUND_WHITE, # white + (39,): FOREGROUND_WHITE, # reset + } + attr = esctable.get(esc, FOREGROUND_WHITE) + if bold: + attr |= FOREGROUND_INTENSITY + STD_OUTPUT_HANDLE = -11 + STD_ERROR_HANDLE = -12 + if file is sys.stderr: + handle = GetStdHandle(STD_ERROR_HANDLE) + else: + handle = GetStdHandle(STD_OUTPUT_HANDLE) + oldcolors = GetConsoleInfo(handle).wAttributes + attr |= (oldcolors & 0x0f0) + SetConsoleTextAttribute(handle, attr) + while len(text) > 32768: + file.write(text[:32768]) + text = text[32768:] + if text: + file.write(text) + SetConsoleTextAttribute(handle, oldcolors) + else: + file.write(text) + + if flush: + file.flush() + +def should_do_markup(file): + if os.environ.get('PY_COLORS') == '1': + return True + if os.environ.get('PY_COLORS') == '0': + return False + return hasattr(file, 'isatty') and file.isatty() \ + and os.environ.get('TERM') != 'dumb' \ + and not (sys.platform.startswith('java') and os._name == 'nt') + +class TerminalWriter(object): + _esctable = dict(black=30, red=31, green=32, yellow=33, + blue=34, purple=35, cyan=36, white=37, + Black=40, Red=41, Green=42, Yellow=43, + Blue=44, Purple=45, Cyan=46, White=47, + bold=1, light=2, blink=5, invert=7) + + # XXX deprecate stringio argument + def __init__(self, file=None, stringio=False, encoding=None): + if file is None: + if stringio: + self.stringio = file = py.io.TextIO() + else: + file = py.std.sys.stdout + elif py.builtin.callable(file) and not ( + hasattr(file, "write") and hasattr(file, "flush")): + file = WriteFile(file, encoding=encoding) + if hasattr(file, "isatty") and file.isatty() and colorama: + file = colorama.AnsiToWin32(file).stream + self.encoding = encoding or getattr(file, 'encoding', "utf-8") + self._file = file + self.fullwidth = get_terminal_width() + self.hasmarkup = should_do_markup(file) + self._lastlen = 0 + + def _escaped(self, text, esc): + if esc and self.hasmarkup: + text = (''.join(['\x1b[%sm' % cod for cod in esc]) + + text +'\x1b[0m') + return text + + def markup(self, text, **kw): + esc = [] + for name in kw: + if name not in self._esctable: + raise ValueError("unknown markup: %r" %(name,)) + if kw[name]: + esc.append(self._esctable[name]) + return self._escaped(text, tuple(esc)) + + def sep(self, sepchar, title=None, fullwidth=None, **kw): + if fullwidth is None: + fullwidth = self.fullwidth + # the goal is to have the line be as long as possible + # under the condition that len(line) <= fullwidth + if sys.platform == "win32": + # if we print in the last column on windows we are on a + # new line but there is no way to verify/neutralize this + # (we may not know the exact line width) + # so let's be defensive to avoid empty lines in the output + fullwidth -= 1 + if title is not None: + # we want 2 + 2*len(fill) + len(title) <= fullwidth + # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth + # 2*len(sepchar)*N <= fullwidth - len(title) - 2 + # N <= (fullwidth - len(title) - 2) // (2*len(sepchar)) + N = (fullwidth - len(title) - 2) // (2*len(sepchar)) + fill = sepchar * N + line = "%s %s %s" % (fill, title, fill) + else: + # we want len(sepchar)*N <= fullwidth + # i.e. N <= fullwidth // len(sepchar) + line = sepchar * (fullwidth // len(sepchar)) + # in some situations there is room for an extra sepchar at the right, + # in particular if we consider that with a sepchar like "_ " the + # trailing space is not important at the end of the line + if len(line) + len(sepchar.rstrip()) <= fullwidth: + line += sepchar.rstrip() + + self.line(line, **kw) + + def write(self, msg, **kw): + if msg: + if not isinstance(msg, (bytes, text)): + msg = text(msg) + if self.hasmarkup and kw: + markupmsg = self.markup(msg, **kw) + else: + markupmsg = msg + write_out(self._file, markupmsg) + + def line(self, s='', **kw): + self.write(s, **kw) + self._checkfill(s) + self.write('\n') + + def reline(self, line, **kw): + if not self.hasmarkup: + raise ValueError("cannot use rewrite-line without terminal") + self.write(line, **kw) + self._checkfill(line) + self.write('\r') + self._lastlen = len(line) + + def _checkfill(self, line): + diff2last = self._lastlen - len(line) + if diff2last > 0: + self.write(" " * diff2last) + +class Win32ConsoleWriter(TerminalWriter): + def write(self, msg, **kw): + if msg: + if not isinstance(msg, (bytes, text)): + msg = text(msg) + oldcolors = None + if self.hasmarkup and kw: + handle = GetStdHandle(STD_OUTPUT_HANDLE) + oldcolors = GetConsoleInfo(handle).wAttributes + default_bg = oldcolors & 0x00F0 + attr = default_bg + if kw.pop('bold', False): + attr |= FOREGROUND_INTENSITY + + if kw.pop('red', False): + attr |= FOREGROUND_RED + elif kw.pop('blue', False): + attr |= FOREGROUND_BLUE + elif kw.pop('green', False): + attr |= FOREGROUND_GREEN + elif kw.pop('yellow', False): + attr |= FOREGROUND_GREEN|FOREGROUND_RED + else: + attr |= oldcolors & 0x0007 + + SetConsoleTextAttribute(handle, attr) + write_out(self._file, msg) + if oldcolors: + SetConsoleTextAttribute(handle, oldcolors) + +class WriteFile(object): + def __init__(self, writemethod, encoding=None): + self.encoding = encoding + self._writemethod = writemethod + + def write(self, data): + if self.encoding: + data = data.encode(self.encoding, "replace") + self._writemethod(data) + + def flush(self): + return + + +if win32_and_ctypes: + TerminalWriter = Win32ConsoleWriter + import ctypes + from ctypes import wintypes + + # ctypes access to the Windows console + STD_OUTPUT_HANDLE = -11 + STD_ERROR_HANDLE = -12 + FOREGROUND_BLACK = 0x0000 # black text + FOREGROUND_BLUE = 0x0001 # text color contains blue. + FOREGROUND_GREEN = 0x0002 # text color contains green. + FOREGROUND_RED = 0x0004 # text color contains red. + FOREGROUND_WHITE = 0x0007 + FOREGROUND_INTENSITY = 0x0008 # text color is intensified. + BACKGROUND_BLACK = 0x0000 # background color black + BACKGROUND_BLUE = 0x0010 # background color contains blue. + BACKGROUND_GREEN = 0x0020 # background color contains green. + BACKGROUND_RED = 0x0040 # background color contains red. + BACKGROUND_WHITE = 0x0070 + BACKGROUND_INTENSITY = 0x0080 # background color is intensified. + + SHORT = ctypes.c_short + class COORD(ctypes.Structure): + _fields_ = [('X', SHORT), + ('Y', SHORT)] + class SMALL_RECT(ctypes.Structure): + _fields_ = [('Left', SHORT), + ('Top', SHORT), + ('Right', SHORT), + ('Bottom', SHORT)] + class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): + _fields_ = [('dwSize', COORD), + ('dwCursorPosition', COORD), + ('wAttributes', wintypes.WORD), + ('srWindow', SMALL_RECT), + ('dwMaximumWindowSize', COORD)] + + _GetStdHandle = ctypes.windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [wintypes.DWORD] + _GetStdHandle.restype = wintypes.HANDLE + def GetStdHandle(kind): + return _GetStdHandle(kind) + + SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute + SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD] + SetConsoleTextAttribute.restype = wintypes.BOOL + + _GetConsoleScreenBufferInfo = \ + ctypes.windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE, + ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + def GetConsoleInfo(handle): + info = CONSOLE_SCREEN_BUFFER_INFO() + _GetConsoleScreenBufferInfo(handle, ctypes.byref(info)) + return info + + def _getdimensions(): + handle = GetStdHandle(STD_OUTPUT_HANDLE) + info = GetConsoleInfo(handle) + # Substract one from the width, otherwise the cursor wraps + # and the ending \n causes an empty line to display. + return info.dwSize.Y, info.dwSize.X - 1 + +def write_out(fil, msg): + # XXX sometimes "msg" is of type bytes, sometimes text which + # complicates the situation. Should we try to enforce unicode? + try: + # on py27 and above writing out to sys.stdout with an encoding + # should usually work for unicode messages (if the encoding is + # capable of it) + fil.write(msg) + except UnicodeEncodeError: + # on py26 it might not work because stdout expects bytes + if fil.encoding: + try: + fil.write(msg.encode(fil.encoding)) + except UnicodeEncodeError: + # it might still fail if the encoding is not capable + pass + else: + fil.flush() + return + # fallback: escape all unicode characters + msg = msg.encode("unicode-escape").decode("ascii") + fil.write(msg) + fil.flush() diff --git a/tests/_lib_vendors/py/_log/__init__.py b/tests/_lib_vendors/py/_log/__init__.py new file mode 100644 index 0000000..fad62e9 --- /dev/null +++ b/tests/_lib_vendors/py/_log/__init__.py @@ -0,0 +1,2 @@ +""" logging API ('producers' and 'consumers' connected via keywords) """ + diff --git a/tests/_lib_vendors/py/_log/log.py b/tests/_lib_vendors/py/_log/log.py new file mode 100644 index 0000000..ce47e8c --- /dev/null +++ b/tests/_lib_vendors/py/_log/log.py @@ -0,0 +1,186 @@ +""" +basic logging functionality based on a producer/consumer scheme. + +XXX implement this API: (maybe put it into slogger.py?) + + log = Logger( + info=py.log.STDOUT, + debug=py.log.STDOUT, + command=None) + log.info("hello", "world") + log.command("hello", "world") + + log = Logger(info=Logger(something=...), + debug=py.log.STDOUT, + command=None) +""" +import py, sys + +class Message(object): + def __init__(self, keywords, args): + self.keywords = keywords + self.args = args + + def content(self): + return " ".join(map(str, self.args)) + + def prefix(self): + return "[%s] " % (":".join(self.keywords)) + + def __str__(self): + return self.prefix() + self.content() + + +class Producer(object): + """ (deprecated) Log producer API which sends messages to be logged + to a 'consumer' object, which then prints them to stdout, + stderr, files, etc. Used extensively by PyPy-1.1. + """ + + Message = Message # to allow later customization + keywords2consumer = {} + + def __init__(self, keywords, keywordmapper=None, **kw): + if hasattr(keywords, 'split'): + keywords = tuple(keywords.split()) + self._keywords = keywords + if keywordmapper is None: + keywordmapper = default_keywordmapper + self._keywordmapper = keywordmapper + + def __repr__(self): + return "" % ":".join(self._keywords) + + def __getattr__(self, name): + if '_' in name: + raise AttributeError(name) + producer = self.__class__(self._keywords + (name,)) + setattr(self, name, producer) + return producer + + def __call__(self, *args): + """ write a message to the appropriate consumer(s) """ + func = self._keywordmapper.getconsumer(self._keywords) + if func is not None: + func(self.Message(self._keywords, args)) + +class KeywordMapper: + def __init__(self): + self.keywords2consumer = {} + + def getstate(self): + return self.keywords2consumer.copy() + def setstate(self, state): + self.keywords2consumer.clear() + self.keywords2consumer.update(state) + + def getconsumer(self, keywords): + """ return a consumer matching the given keywords. + + tries to find the most suitable consumer by walking, starting from + the back, the list of keywords, the first consumer matching a + keyword is returned (falling back to py.log.default) + """ + for i in range(len(keywords), 0, -1): + try: + return self.keywords2consumer[keywords[:i]] + except KeyError: + continue + return self.keywords2consumer.get('default', default_consumer) + + def setconsumer(self, keywords, consumer): + """ set a consumer for a set of keywords. """ + # normalize to tuples + if isinstance(keywords, str): + keywords = tuple(filter(None, keywords.split())) + elif hasattr(keywords, '_keywords'): + keywords = keywords._keywords + elif not isinstance(keywords, tuple): + raise TypeError("key %r is not a string or tuple" % (keywords,)) + if consumer is not None and not py.builtin.callable(consumer): + if not hasattr(consumer, 'write'): + raise TypeError( + "%r should be None, callable or file-like" % (consumer,)) + consumer = File(consumer) + self.keywords2consumer[keywords] = consumer + +def default_consumer(msg): + """ the default consumer, prints the message to stdout (using 'print') """ + sys.stderr.write(str(msg)+"\n") + +default_keywordmapper = KeywordMapper() + +def setconsumer(keywords, consumer): + default_keywordmapper.setconsumer(keywords, consumer) + +def setstate(state): + default_keywordmapper.setstate(state) +def getstate(): + return default_keywordmapper.getstate() + +# +# Consumers +# + +class File(object): + """ log consumer wrapping a file(-like) object """ + def __init__(self, f): + assert hasattr(f, 'write') + #assert isinstance(f, file) or not hasattr(f, 'open') + self._file = f + + def __call__(self, msg): + """ write a message to the log """ + self._file.write(str(msg) + "\n") + if hasattr(self._file, 'flush'): + self._file.flush() + +class Path(object): + """ log consumer that opens and writes to a Path """ + def __init__(self, filename, append=False, + delayed_create=False, buffering=False): + self._append = append + self._filename = str(filename) + self._buffering = buffering + if not delayed_create: + self._openfile() + + def _openfile(self): + mode = self._append and 'a' or 'w' + f = open(self._filename, mode) + self._file = f + + def __call__(self, msg): + """ write a message to the log """ + if not hasattr(self, "_file"): + self._openfile() + self._file.write(str(msg) + "\n") + if not self._buffering: + self._file.flush() + +def STDOUT(msg): + """ consumer that writes to sys.stdout """ + sys.stdout.write(str(msg)+"\n") + +def STDERR(msg): + """ consumer that writes to sys.stderr """ + sys.stderr.write(str(msg)+"\n") + +class Syslog: + """ consumer that writes to the syslog daemon """ + + def __init__(self, priority = None): + if priority is None: + priority = self.LOG_INFO + self.priority = priority + + def __call__(self, msg): + """ write a message to the log """ + py.std.syslog.syslog(self.priority, str(msg)) + +for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split(): + _prio = "LOG_" + _prio + try: + setattr(Syslog, _prio, getattr(py.std.syslog, _prio)) + except AttributeError: + pass diff --git a/tests/_lib_vendors/py/_log/warning.py b/tests/_lib_vendors/py/_log/warning.py new file mode 100644 index 0000000..722e31e --- /dev/null +++ b/tests/_lib_vendors/py/_log/warning.py @@ -0,0 +1,76 @@ +import py, sys + +class DeprecationWarning(DeprecationWarning): + def __init__(self, msg, path, lineno): + self.msg = msg + self.path = path + self.lineno = lineno + def __repr__(self): + return "%s:%d: %s" %(self.path, self.lineno+1, self.msg) + def __str__(self): + return self.msg + +def _apiwarn(startversion, msg, stacklevel=2, function=None): + # below is mostly COPIED from python2.4/warnings.py's def warn() + # Get context information + if isinstance(stacklevel, str): + frame = sys._getframe(1) + level = 1 + found = frame.f_code.co_filename.find(stacklevel) != -1 + while frame: + co = frame.f_code + if co.co_filename.find(stacklevel) == -1: + if found: + stacklevel = level + break + else: + found = True + level += 1 + frame = frame.f_back + else: + stacklevel = 1 + msg = "%s (since version %s)" %(msg, startversion) + warn(msg, stacklevel=stacklevel+1, function=function) + +def warn(msg, stacklevel=1, function=None): + if function is not None: + filename = py.std.inspect.getfile(function) + lineno = py.code.getrawcode(function).co_firstlineno + else: + try: + caller = sys._getframe(stacklevel) + except ValueError: + globals = sys.__dict__ + lineno = 1 + else: + globals = caller.f_globals + lineno = caller.f_lineno + if '__name__' in globals: + module = globals['__name__'] + else: + module = "" + filename = globals.get('__file__') + if filename: + fnl = filename.lower() + if fnl.endswith(".pyc") or fnl.endswith(".pyo"): + filename = filename[:-1] + elif fnl.endswith("$py.class"): + filename = filename.replace('$py.class', '.py') + else: + if module == "__main__": + try: + filename = sys.argv[0] + except AttributeError: + # embedded interpreters don't have sys.argv, see bug #839151 + filename = '__main__' + if not filename: + filename = module + path = py.path.local(filename) + warning = DeprecationWarning(msg, path, lineno) + py.std.warnings.warn_explicit(warning, category=Warning, + filename=str(warning.path), + lineno=warning.lineno, + registry=py.std.warnings.__dict__.setdefault( + "__warningsregistry__", {}) + ) + diff --git a/tests/_lib_vendors/py/_path/__init__.py b/tests/_lib_vendors/py/_path/__init__.py new file mode 100644 index 0000000..51f3246 --- /dev/null +++ b/tests/_lib_vendors/py/_path/__init__.py @@ -0,0 +1 @@ +""" unified file system api """ diff --git a/tests/_lib_vendors/py/_path/cacheutil.py b/tests/_lib_vendors/py/_path/cacheutil.py new file mode 100644 index 0000000..9922504 --- /dev/null +++ b/tests/_lib_vendors/py/_path/cacheutil.py @@ -0,0 +1,114 @@ +""" +This module contains multithread-safe cache implementations. + +All Caches have + + getorbuild(key, builder) + delentry(key) + +methods and allow configuration when instantiating the cache class. +""" +from time import time as gettime + +class BasicCache(object): + def __init__(self, maxentries=128): + self.maxentries = maxentries + self.prunenum = int(maxentries - maxentries/8) + self._dict = {} + + def clear(self): + self._dict.clear() + + def _getentry(self, key): + return self._dict[key] + + def _putentry(self, key, entry): + self._prunelowestweight() + self._dict[key] = entry + + def delentry(self, key, raising=False): + try: + del self._dict[key] + except KeyError: + if raising: + raise + + def getorbuild(self, key, builder): + try: + entry = self._getentry(key) + except KeyError: + entry = self._build(key, builder) + self._putentry(key, entry) + return entry.value + + def _prunelowestweight(self): + """ prune out entries with lowest weight. """ + numentries = len(self._dict) + if numentries >= self.maxentries: + # evict according to entry's weight + items = [(entry.weight, key) + for key, entry in self._dict.items()] + items.sort() + index = numentries - self.prunenum + if index > 0: + for weight, key in items[:index]: + # in MT situations the element might be gone + self.delentry(key, raising=False) + +class BuildcostAccessCache(BasicCache): + """ A BuildTime/Access-counting cache implementation. + the weight of a value is computed as the product of + + num-accesses-of-a-value * time-to-build-the-value + + The values with the least such weights are evicted + if the cache maxentries threshold is superceded. + For implementation flexibility more than one object + might be evicted at a time. + """ + # time function to use for measuring build-times + + def _build(self, key, builder): + start = gettime() + val = builder() + end = gettime() + return WeightedCountingEntry(val, end-start) + + +class WeightedCountingEntry(object): + def __init__(self, value, oneweight): + self._value = value + self.weight = self._oneweight = oneweight + + def value(self): + self.weight += self._oneweight + return self._value + value = property(value) + +class AgingCache(BasicCache): + """ This cache prunes out cache entries that are too old. + """ + def __init__(self, maxentries=128, maxseconds=10.0): + super(AgingCache, self).__init__(maxentries) + self.maxseconds = maxseconds + + def _getentry(self, key): + entry = self._dict[key] + if entry.isexpired(): + self.delentry(key) + raise KeyError(key) + return entry + + def _build(self, key, builder): + val = builder() + entry = AgingEntry(val, gettime() + self.maxseconds) + return entry + +class AgingEntry(object): + def __init__(self, value, expirationtime): + self.value = value + self.weight = expirationtime + + def isexpired(self): + t = gettime() + return t >= self.weight diff --git a/tests/_lib_vendors/py/_path/common.py b/tests/_lib_vendors/py/_path/common.py new file mode 100644 index 0000000..d407434 --- /dev/null +++ b/tests/_lib_vendors/py/_path/common.py @@ -0,0 +1,403 @@ +""" +""" +import os, sys, posixpath +import py + +# Moved from local.py. +iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt') + +class Checkers: + _depend_on_existence = 'exists', 'link', 'dir', 'file' + + def __init__(self, path): + self.path = path + + def dir(self): + raise NotImplementedError + + def file(self): + raise NotImplementedError + + def dotfile(self): + return self.path.basename.startswith('.') + + def ext(self, arg): + if not arg.startswith('.'): + arg = '.' + arg + return self.path.ext == arg + + def exists(self): + raise NotImplementedError + + def basename(self, arg): + return self.path.basename == arg + + def basestarts(self, arg): + return self.path.basename.startswith(arg) + + def relto(self, arg): + return self.path.relto(arg) + + def fnmatch(self, arg): + return self.path.fnmatch(arg) + + def endswith(self, arg): + return str(self.path).endswith(arg) + + def _evaluate(self, kw): + for name, value in kw.items(): + invert = False + meth = None + try: + meth = getattr(self, name) + except AttributeError: + if name[:3] == 'not': + invert = True + try: + meth = getattr(self, name[3:]) + except AttributeError: + pass + if meth is None: + raise TypeError( + "no %r checker available for %r" % (name, self.path)) + try: + if py.code.getrawcode(meth).co_argcount > 1: + if (not meth(value)) ^ invert: + return False + else: + if bool(value) ^ bool(meth()) ^ invert: + return False + except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY): + # EBUSY feels not entirely correct, + # but its kind of necessary since ENOMEDIUM + # is not accessible in python + for name in self._depend_on_existence: + if name in kw: + if kw.get(name): + return False + name = 'not' + name + if name in kw: + if not kw.get(name): + return False + return True + +class NeverRaised(Exception): + pass + +class PathBase(object): + """ shared implementation for filesystem path objects.""" + Checkers = Checkers + + def __div__(self, other): + return self.join(str(other)) + __truediv__ = __div__ # py3k + + def basename(self): + """ basename part of path. """ + return self._getbyspec('basename')[0] + basename = property(basename, None, None, basename.__doc__) + + def dirname(self): + """ dirname part of path. """ + return self._getbyspec('dirname')[0] + dirname = property(dirname, None, None, dirname.__doc__) + + def purebasename(self): + """ pure base name of the path.""" + return self._getbyspec('purebasename')[0] + purebasename = property(purebasename, None, None, purebasename.__doc__) + + def ext(self): + """ extension of the path (including the '.').""" + return self._getbyspec('ext')[0] + ext = property(ext, None, None, ext.__doc__) + + def dirpath(self, *args, **kwargs): + """ return the directory path joined with any given path arguments. """ + return self.new(basename='').join(*args, **kwargs) + + def read_binary(self): + """ read and return a bytestring from reading the path. """ + with self.open('rb') as f: + return f.read() + + def read_text(self, encoding): + """ read and return a Unicode string from reading the path. """ + with self.open("r", encoding=encoding) as f: + return f.read() + + + def read(self, mode='r'): + """ read and return a bytestring from reading the path. """ + with self.open(mode) as f: + return f.read() + + def readlines(self, cr=1): + """ read and return a list of lines from the path. if cr is False, the +newline will be removed from the end of each line. """ + if not cr: + content = self.read('rU') + return content.split('\n') + else: + f = self.open('rU') + try: + return f.readlines() + finally: + f.close() + + def load(self): + """ (deprecated) return object unpickled from self.read() """ + f = self.open('rb') + try: + return py.error.checked_call(py.std.pickle.load, f) + finally: + f.close() + + def move(self, target): + """ move this path to target. """ + if target.relto(self): + raise py.error.EINVAL(target, + "cannot move path into a subdirectory of itself") + try: + self.rename(target) + except py.error.EXDEV: # invalid cross-device link + self.copy(target) + self.remove() + + def __repr__(self): + """ return a string representation of this path. """ + return repr(str(self)) + + def check(self, **kw): + """ check a path for existence and properties. + + Without arguments, return True if the path exists, otherwise False. + + valid checkers:: + + file=1 # is a file + file=0 # is not a file (may not even exist) + dir=1 # is a dir + link=1 # is a link + exists=1 # exists + + You can specify multiple checker definitions, for example:: + + path.check(file=1, link=1) # a link pointing to a file + """ + if not kw: + kw = {'exists' : 1} + return self.Checkers(self)._evaluate(kw) + + def fnmatch(self, pattern): + """return true if the basename/fullname matches the glob-'pattern'. + + valid pattern characters:: + + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + If the pattern contains a path-separator then the full path + is used for pattern matching and a '*' is prepended to the + pattern. + + if the pattern doesn't contain a path-separator the pattern + is only matched against the basename. + """ + return FNMatcher(pattern)(self) + + def relto(self, relpath): + """ return a string which is the relative part of the path + to the given 'relpath'. + """ + if not isinstance(relpath, (str, PathBase)): + raise TypeError("%r: not a string or path object" %(relpath,)) + strrelpath = str(relpath) + if strrelpath and strrelpath[-1] != self.sep: + strrelpath += self.sep + #assert strrelpath[-1] == self.sep + #assert strrelpath[-2] != self.sep + strself = self.strpath + if sys.platform == "win32" or getattr(os, '_name', None) == 'nt': + if os.path.normcase(strself).startswith( + os.path.normcase(strrelpath)): + return strself[len(strrelpath):] + elif strself.startswith(strrelpath): + return strself[len(strrelpath):] + return "" + + def ensure_dir(self, *args): + """ ensure the path joined with args is a directory. """ + return self.ensure(*args, **{"dir": True}) + + def bestrelpath(self, dest): + """ return a string which is a relative path from self + (assumed to be a directory) to dest such that + self.join(bestrelpath) == dest and if not such + path can be determined return dest. + """ + try: + if self == dest: + return os.curdir + base = self.common(dest) + if not base: # can be the case on windows + return str(dest) + self2base = self.relto(base) + reldest = dest.relto(base) + if self2base: + n = self2base.count(self.sep) + 1 + else: + n = 0 + l = [os.pardir] * n + if reldest: + l.append(reldest) + target = dest.sep.join(l) + return target + except AttributeError: + return str(dest) + + def exists(self): + return self.check() + + def isdir(self): + return self.check(dir=1) + + def isfile(self): + return self.check(file=1) + + def parts(self, reverse=False): + """ return a root-first list of all ancestor directories + plus the path itself. + """ + current = self + l = [self] + while 1: + last = current + current = current.dirpath() + if last == current: + break + l.append(current) + if not reverse: + l.reverse() + return l + + def common(self, other): + """ return the common part shared with the other path + or None if there is no common part. + """ + last = None + for x, y in zip(self.parts(), other.parts()): + if x != y: + return last + last = x + return last + + def __add__(self, other): + """ return new path object with 'other' added to the basename""" + return self.new(basename=self.basename+str(other)) + + def __cmp__(self, other): + """ return sort value (-1, 0, +1). """ + try: + return cmp(self.strpath, other.strpath) + except AttributeError: + return cmp(str(self), str(other)) # self.path, other.path) + + def __lt__(self, other): + try: + return self.strpath < other.strpath + except AttributeError: + return str(self) < str(other) + + def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): + """ yields all paths below the current one + + fil is a filter (glob pattern or callable), if not matching the + path will not be yielded, defaulting to None (everything is + returned) + + rec is a filter (glob pattern or callable) that controls whether + a node is descended, defaulting to None + + ignore is an Exception class that is ignoredwhen calling dirlist() + on any of the paths (by default, all exceptions are reported) + + bf if True will cause a breadthfirst search instead of the + default depthfirst. Default: False + + sort if True will sort entries within each directory level. + """ + for x in Visitor(fil, rec, ignore, bf, sort).gen(self): + yield x + + def _sortlist(self, res, sort): + if sort: + if hasattr(sort, '__call__'): + res.sort(sort) + else: + res.sort() + + def samefile(self, other): + """ return True if other refers to the same stat object as self. """ + return self.strpath == str(other) + +class Visitor: + def __init__(self, fil, rec, ignore, bf, sort): + if isinstance(fil, str): + fil = FNMatcher(fil) + if isinstance(rec, str): + self.rec = FNMatcher(rec) + elif not hasattr(rec, '__call__') and rec: + self.rec = lambda path: True + else: + self.rec = rec + self.fil = fil + self.ignore = ignore + self.breadthfirst = bf + self.optsort = sort and sorted or (lambda x: x) + + def gen(self, path): + try: + entries = path.listdir() + except self.ignore: + return + rec = self.rec + dirs = self.optsort([p for p in entries + if p.check(dir=1) and (rec is None or rec(p))]) + if not self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + for p in self.optsort(entries): + if self.fil is None or self.fil(p): + yield p + if self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + +class FNMatcher: + def __init__(self, pattern): + self.pattern = pattern + + def __call__(self, path): + pattern = self.pattern + + if (pattern.find(path.sep) == -1 and + iswin32 and + pattern.find(posixpath.sep) != -1): + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posixpath.sep, path.sep) + + if pattern.find(path.sep) == -1: + name = path.basename + else: + name = str(path) # path.strpath # XXX svn? + if not os.path.isabs(pattern): + pattern = '*' + path.sep + pattern + return py.std.fnmatch.fnmatch(name, pattern) + diff --git a/tests/_lib_vendors/py/_path/local.py b/tests/_lib_vendors/py/_path/local.py new file mode 100644 index 0000000..d569404 --- /dev/null +++ b/tests/_lib_vendors/py/_path/local.py @@ -0,0 +1,911 @@ +""" +local path implementation. +""" +from __future__ import with_statement + +from contextlib import contextmanager +import sys, os, re, atexit, io +import py +from py._path import common +from py._path.common import iswin32 +from stat import S_ISLNK, S_ISDIR, S_ISREG + +from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname + +if sys.version_info > (3,0): + def map_as_list(func, iter): + return list(map(func, iter)) +else: + map_as_list = map + +class Stat(object): + def __getattr__(self, name): + return getattr(self._osstatresult, "st_" + name) + + def __init__(self, path, osstatresult): + self.path = path + self._osstatresult = osstatresult + + @property + def owner(self): + if iswin32: + raise NotImplementedError("XXX win32") + import pwd + entry = py.error.checked_call(pwd.getpwuid, self.uid) + return entry[0] + + @property + def group(self): + """ return group name of file. """ + if iswin32: + raise NotImplementedError("XXX win32") + import grp + entry = py.error.checked_call(grp.getgrgid, self.gid) + return entry[0] + + def isdir(self): + return S_ISDIR(self._osstatresult.st_mode) + + def isfile(self): + return S_ISREG(self._osstatresult.st_mode) + + def islink(self): + st = self.path.lstat() + return S_ISLNK(self._osstatresult.st_mode) + +class PosixPath(common.PathBase): + def chown(self, user, group, rec=0): + """ change ownership to the given user and group. + user and group may be specified by a number or + by a name. if rec is True change ownership + recursively. + """ + uid = getuserid(user) + gid = getgroupid(group) + if rec: + for x in self.visit(rec=lambda x: x.check(link=0)): + if x.check(link=0): + py.error.checked_call(os.chown, str(x), uid, gid) + py.error.checked_call(os.chown, str(self), uid, gid) + + def readlink(self): + """ return value of a symbolic link. """ + return py.error.checked_call(os.readlink, self.strpath) + + def mklinkto(self, oldname): + """ posix style hard link to another name. """ + py.error.checked_call(os.link, str(oldname), str(self)) + + def mksymlinkto(self, value, absolute=1): + """ create a symbolic link with the given value (pointing to another name). """ + if absolute: + py.error.checked_call(os.symlink, str(value), self.strpath) + else: + base = self.common(value) + # with posix local paths '/' is always a common base + relsource = self.__class__(value).relto(base) + reldest = self.relto(base) + n = reldest.count(self.sep) + target = self.sep.join(('..', )*n + (relsource, )) + py.error.checked_call(os.symlink, target, self.strpath) + +def getuserid(user): + import pwd + if not isinstance(user, int): + user = pwd.getpwnam(user)[2] + return user + +def getgroupid(group): + import grp + if not isinstance(group, int): + group = grp.getgrnam(group)[2] + return group + +FSBase = not iswin32 and PosixPath or common.PathBase + +class LocalPath(FSBase): + """ object oriented interface to os.path and other local filesystem + related information. + """ + class ImportMismatchError(ImportError): + """ raised on pyimport() if there is a mismatch of __file__'s""" + + sep = os.sep + class Checkers(common.Checkers): + def _stat(self): + try: + return self._statcache + except AttributeError: + try: + self._statcache = self.path.stat() + except py.error.ELOOP: + self._statcache = self.path.lstat() + return self._statcache + + def dir(self): + return S_ISDIR(self._stat().mode) + + def file(self): + return S_ISREG(self._stat().mode) + + def exists(self): + return self._stat() + + def link(self): + st = self.path.lstat() + return S_ISLNK(st.mode) + + def __init__(self, path=None, expanduser=False): + """ Initialize and return a local Path instance. + + Path can be relative to the current directory. + If path is None it defaults to the current working directory. + If expanduser is True, tilde-expansion is performed. + Note that Path instances always carry an absolute path. + Note also that passing in a local path object will simply return + the exact same path object. Use new() to get a new copy. + """ + if path is None: + self.strpath = py.error.checked_call(os.getcwd) + elif isinstance(path, common.PathBase): + self.strpath = path.strpath + elif isinstance(path, py.builtin._basestring): + if expanduser: + path = os.path.expanduser(path) + self.strpath = abspath(path) + else: + raise ValueError("can only pass None, Path instances " + "or non-empty strings to LocalPath") + + def __hash__(self): + return hash(self.strpath) + + def __eq__(self, other): + s1 = self.strpath + s2 = getattr(other, "strpath", other) + if iswin32: + s1 = s1.lower() + try: + s2 = s2.lower() + except AttributeError: + return False + return s1 == s2 + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return self.strpath < getattr(other, "strpath", other) + + def __gt__(self, other): + return self.strpath > getattr(other, "strpath", other) + + def samefile(self, other): + """ return True if 'other' references the same file as 'self'. + """ + other = getattr(other, "strpath", other) + if not isabs(other): + other = abspath(other) + if self == other: + return True + if iswin32: + return False # there is no samefile + return py.error.checked_call( + os.path.samefile, self.strpath, other) + + def remove(self, rec=1, ignore_errors=False): + """ remove a file or directory (or a directory tree if rec=1). + if ignore_errors is True, errors while removing directories will + be ignored. + """ + if self.check(dir=1, link=0): + if rec: + # force remove of readonly files on windows + if iswin32: + self.chmod(448, rec=1) # octcal 0700 + py.error.checked_call(py.std.shutil.rmtree, self.strpath, + ignore_errors=ignore_errors) + else: + py.error.checked_call(os.rmdir, self.strpath) + else: + if iswin32: + self.chmod(448) # octcal 0700 + py.error.checked_call(os.remove, self.strpath) + + def computehash(self, hashtype="md5", chunksize=524288): + """ return hexdigest of hashvalue for this file. """ + try: + try: + import hashlib as mod + except ImportError: + if hashtype == "sha1": + hashtype = "sha" + mod = __import__(hashtype) + hash = getattr(mod, hashtype)() + except (AttributeError, ImportError): + raise ValueError("Don't know how to compute %r hash" %(hashtype,)) + f = self.open('rb') + try: + while 1: + buf = f.read(chunksize) + if not buf: + return hash.hexdigest() + hash.update(buf) + finally: + f.close() + + def new(self, **kw): + """ create a modified version of this path. + the following keyword arguments modify various path parts:: + + a:/some/path/to/a/file.ext + xx drive + xxxxxxxxxxxxxxxxx dirname + xxxxxxxx basename + xxxx purebasename + xxx ext + """ + obj = object.__new__(self.__class__) + if not kw: + obj.strpath = self.strpath + return obj + drive, dirname, basename, purebasename,ext = self._getbyspec( + "drive,dirname,basename,purebasename,ext") + if 'basename' in kw: + if 'purebasename' in kw or 'ext' in kw: + raise ValueError("invalid specification %r" % kw) + else: + pb = kw.setdefault('purebasename', purebasename) + try: + ext = kw['ext'] + except KeyError: + pass + else: + if ext and not ext.startswith('.'): + ext = '.' + ext + kw['basename'] = pb + ext + + if ('dirname' in kw and not kw['dirname']): + kw['dirname'] = drive + else: + kw.setdefault('dirname', dirname) + kw.setdefault('sep', self.sep) + obj.strpath = normpath( + "%(dirname)s%(sep)s%(basename)s" % kw) + return obj + + def _getbyspec(self, spec): + """ see new for what 'spec' can be. """ + res = [] + parts = self.strpath.split(self.sep) + + args = filter(None, spec.split(',') ) + append = res.append + for name in args: + if name == 'drive': + append(parts[0]) + elif name == 'dirname': + append(self.sep.join(parts[:-1])) + else: + basename = parts[-1] + if name == 'basename': + append(basename) + else: + i = basename.rfind('.') + if i == -1: + purebasename, ext = basename, '' + else: + purebasename, ext = basename[:i], basename[i:] + if name == 'purebasename': + append(purebasename) + elif name == 'ext': + append(ext) + else: + raise ValueError("invalid part specification %r" % name) + return res + + def dirpath(self, *args, **kwargs): + """ return the directory path joined with any given path arguments. """ + if not kwargs: + path = object.__new__(self.__class__) + path.strpath = dirname(self.strpath) + if args: + path = path.join(*args) + return path + return super(LocalPath, self).dirpath(*args, **kwargs) + + def join(self, *args, **kwargs): + """ return a new path by appending all 'args' as path + components. if abs=1 is used restart from root if any + of the args is an absolute path. + """ + sep = self.sep + strargs = [getattr(arg, "strpath", arg) for arg in args] + strpath = self.strpath + if kwargs.get('abs'): + newargs = [] + for arg in reversed(strargs): + if isabs(arg): + strpath = arg + strargs = newargs + break + newargs.insert(0, arg) + for arg in strargs: + arg = arg.strip(sep) + if iswin32: + # allow unix style paths even on windows. + arg = arg.strip('/') + arg = arg.replace('/', sep) + strpath = strpath + sep + arg + obj = object.__new__(self.__class__) + obj.strpath = normpath(strpath) + return obj + + def open(self, mode='r', ensure=False, encoding=None): + """ return an opened file with the given mode. + + If ensure is True, create parent directories if needed. + """ + if ensure: + self.dirpath().ensure(dir=1) + if encoding: + return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding) + return py.error.checked_call(open, self.strpath, mode) + + def _fastjoin(self, name): + child = object.__new__(self.__class__) + child.strpath = self.strpath + self.sep + name + return child + + def islink(self): + return islink(self.strpath) + + def check(self, **kw): + if not kw: + return exists(self.strpath) + if len(kw) == 1: + if "dir" in kw: + return not kw["dir"] ^ isdir(self.strpath) + if "file" in kw: + return not kw["file"] ^ isfile(self.strpath) + return super(LocalPath, self).check(**kw) + + _patternchars = set("*?[" + os.path.sep) + def listdir(self, fil=None, sort=None): + """ list directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if fil is None and sort is None: + names = py.error.checked_call(os.listdir, self.strpath) + return map_as_list(self._fastjoin, names) + if isinstance(fil, py.builtin._basestring): + if not self._patternchars.intersection(fil): + child = self._fastjoin(fil) + if exists(child.strpath): + return [child] + return [] + fil = common.FNMatcher(fil) + names = py.error.checked_call(os.listdir, self.strpath) + res = [] + for name in names: + child = self._fastjoin(name) + if fil is None or fil(child): + res.append(child) + self._sortlist(res, sort) + return res + + def size(self): + """ return size of the underlying file object """ + return self.stat().size + + def mtime(self): + """ return last modification time of the path. """ + return self.stat().mtime + + def copy(self, target, mode=False): + """ copy path to target.""" + if self.check(file=1): + if target.check(dir=1): + target = target.join(self.basename) + assert self!=target + copychunked(self, target) + if mode: + copymode(self.strpath, target.strpath) + else: + def rec(p): + return p.check(link=0) + for x in self.visit(rec=rec): + relpath = x.relto(self) + newx = target.join(relpath) + newx.dirpath().ensure(dir=1) + if x.check(link=1): + newx.mksymlinkto(x.readlink()) + continue + elif x.check(file=1): + copychunked(x, newx) + elif x.check(dir=1): + newx.ensure(dir=1) + if mode: + copymode(x.strpath, newx.strpath) + + def rename(self, target): + """ rename this path to target. """ + target = getattr(target, "strpath", target) + return py.error.checked_call(os.rename, self.strpath, target) + + def dump(self, obj, bin=1): + """ pickle object into path location""" + f = self.open('wb') + try: + py.error.checked_call(py.std.pickle.dump, obj, f, bin) + finally: + f.close() + + def mkdir(self, *args): + """ create & return the directory joined with args. """ + p = self.join(*args) + py.error.checked_call(os.mkdir, getattr(p, "strpath", p)) + return p + + def write_binary(self, data, ensure=False): + """ write binary data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open('wb') as f: + f.write(data) + + def write_text(self, data, encoding, ensure=False): + """ write text data into path using the specified encoding. + If ensure is True create missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open('w', encoding=encoding) as f: + f.write(data) + + def write(self, data, mode='w', ensure=False): + """ write data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + if 'b' in mode: + if not py.builtin._isbytes(data): + raise ValueError("can only process bytes") + else: + if not py.builtin._istext(data): + if not py.builtin._isbytes(data): + data = str(data) + else: + data = py.builtin._totext(data, sys.getdefaultencoding()) + f = self.open(mode) + try: + f.write(data) + finally: + f.close() + + def _ensuredirs(self): + parent = self.dirpath() + if parent == self: + return self + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + try: + self.mkdir() + except py.error.EEXIST: + # race condition: file/dir created by another thread/process. + # complain if it is not a dir + if self.check(dir=0): + raise + return self + + def ensure(self, *args, **kwargs): + """ ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if kwargs.get('dir', 0): + return p._ensuredirs() + else: + p.dirpath()._ensuredirs() + if not p.check(file=1): + p.open('w').close() + return p + + def stat(self, raising=True): + """ Return an os.stat() tuple. """ + if raising == True: + return Stat(self, py.error.checked_call(os.stat, self.strpath)) + try: + return Stat(self, os.stat(self.strpath)) + except KeyboardInterrupt: + raise + except Exception: + return None + + def lstat(self): + """ Return an os.lstat() tuple. """ + return Stat(self, py.error.checked_call(os.lstat, self.strpath)) + + def setmtime(self, mtime=None): + """ set modification time for the given path. if 'mtime' is None + (the default) then the file's mtime is set to current time. + + Note that the resolution for 'mtime' is platform dependent. + """ + if mtime is None: + return py.error.checked_call(os.utime, self.strpath, mtime) + try: + return py.error.checked_call(os.utime, self.strpath, (-1, mtime)) + except py.error.EINVAL: + return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime)) + + def chdir(self): + """ change directory to self and return old current directory """ + try: + old = self.__class__() + except py.error.ENOENT: + old = None + py.error.checked_call(os.chdir, self.strpath) + return old + + + @contextmanager + def as_cwd(self): + """ return context manager which changes to current dir during the + managed "with" context. On __enter__ it returns the old dir. + """ + old = self.chdir() + try: + yield old + finally: + old.chdir() + + def realpath(self): + """ return a new path which contains no symbolic links.""" + return self.__class__(os.path.realpath(self.strpath)) + + def atime(self): + """ return last access time of the path. """ + return self.stat().atime + + def __repr__(self): + return 'local(%r)' % self.strpath + + def __str__(self): + """ return string representation of the Path. """ + return self.strpath + + def chmod(self, mode, rec=0): + """ change permissions to the given mode. If mode is an + integer it directly encodes the os-specific modes. + if rec is True perform recursively. + """ + if not isinstance(mode, int): + raise TypeError("mode %r must be an integer" % (mode,)) + if rec: + for x in self.visit(rec=rec): + py.error.checked_call(os.chmod, str(x), mode) + py.error.checked_call(os.chmod, self.strpath, mode) + + def pypkgpath(self): + """ return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + Return None if a pkgpath can not be determined. + """ + pkgpath = None + for parent in self.parts(reverse=True): + if parent.isdir(): + if not parent.join('__init__.py').exists(): + break + if not isimportable(parent.basename): + break + pkgpath = parent + return pkgpath + + def _ensuresyspath(self, ensuremode, path): + if ensuremode: + s = str(path) + if ensuremode == "append": + if s not in sys.path: + sys.path.append(s) + else: + if s != sys.path[0]: + sys.path.insert(0, s) + + def pyimport(self, modname=None, ensuresyspath=True): + """ return path as an imported python module. + + If modname is None, look for the containing package + and construct an according module name. + The module will be put/looked up in sys.modules. + if ensuresyspath is True then the root dir for importing + the file (taking __init__.py files into account) will + be prepended to sys.path if it isn't there already. + If ensuresyspath=="append" the root dir will be appended + if it isn't already contained in sys.path. + if ensuresyspath is False no modification of syspath happens. + """ + if not self.check(): + raise py.error.ENOENT(self) + + pkgpath = None + if modname is None: + pkgpath = self.pypkgpath() + if pkgpath is not None: + pkgroot = pkgpath.dirpath() + names = self.new(ext="").relto(pkgroot).split(self.sep) + if names[-1] == "__init__": + names.pop() + modname = ".".join(names) + else: + pkgroot = self.dirpath() + modname = self.purebasename + + self._ensuresyspath(ensuresyspath, pkgroot) + __import__(modname) + mod = sys.modules[modname] + if self.basename == "__init__.py": + return mod # we don't check anything as we might + # we in a namespace package ... too icky to check + modfile = mod.__file__ + if modfile[-4:] in ('.pyc', '.pyo'): + modfile = modfile[:-1] + elif modfile.endswith('$py.class'): + modfile = modfile[:-9] + '.py' + if modfile.endswith(os.path.sep + "__init__.py"): + if self.basename != "__init__.py": + modfile = modfile[:-12] + try: + issame = self.samefile(modfile) + except py.error.ENOENT: + issame = False + if not issame: + raise self.ImportMismatchError(modname, modfile, self) + return mod + else: + try: + return sys.modules[modname] + except KeyError: + # we have a custom modname, do a pseudo-import + mod = py.std.types.ModuleType(modname) + mod.__file__ = str(self) + sys.modules[modname] = mod + try: + py.builtin.execfile(str(self), mod.__dict__) + except: + del sys.modules[modname] + raise + return mod + + def sysexec(self, *argv, **popen_opts): + """ return stdout text from executing a system child process, + where the 'self' path points to executable. + The process is directly invoked and not through a system shell. + """ + from subprocess import Popen, PIPE + argv = map_as_list(str, argv) + popen_opts['stdout'] = popen_opts['stderr'] = PIPE + proc = Popen([str(self)] + argv, **popen_opts) + stdout, stderr = proc.communicate() + ret = proc.wait() + if py.builtin._isbytes(stdout): + stdout = py.builtin._totext(stdout, sys.getdefaultencoding()) + if ret != 0: + if py.builtin._isbytes(stderr): + stderr = py.builtin._totext(stderr, sys.getdefaultencoding()) + raise py.process.cmdexec.Error(ret, ret, str(self), + stdout, stderr,) + return stdout + + def sysfind(cls, name, checker=None, paths=None): + """ return a path object found by looking at the systems + underlying PATH specification. If the checker is not None + it will be invoked to filter matching paths. If a binary + cannot be found, None is returned + Note: This is probably not working on plain win32 systems + but may work on cygwin. + """ + if isabs(name): + p = py.path.local(name) + if p.check(file=1): + return p + else: + if paths is None: + if iswin32: + paths = py.std.os.environ['Path'].split(';') + if '' not in paths and '.' not in paths: + paths.append('.') + try: + systemroot = os.environ['SYSTEMROOT'] + except KeyError: + pass + else: + paths = [re.sub('%SystemRoot%', systemroot, path) + for path in paths] + else: + paths = py.std.os.environ['PATH'].split(':') + tryadd = [] + if iswin32: + tryadd += os.environ['PATHEXT'].split(os.pathsep) + tryadd.append("") + + for x in paths: + for addext in tryadd: + p = py.path.local(x).join(name, abs=True) + addext + try: + if p.check(file=1): + if checker: + if not checker(p): + continue + return p + except py.error.EACCES: + pass + return None + sysfind = classmethod(sysfind) + + def _gethomedir(cls): + try: + x = os.environ['HOME'] + except KeyError: + try: + x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH'] + except KeyError: + return None + return cls(x) + _gethomedir = classmethod(_gethomedir) + + #""" + #special class constructors for local filesystem paths + #""" + def get_temproot(cls): + """ return the system's temporary directory + (where tempfiles are usually created in) + """ + return py.path.local(py.std.tempfile.gettempdir()) + get_temproot = classmethod(get_temproot) + + def mkdtemp(cls, rootdir=None): + """ return a Path object pointing to a fresh new temporary directory + (which we created ourself). + """ + import tempfile + if rootdir is None: + rootdir = cls.get_temproot() + return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir))) + mkdtemp = classmethod(mkdtemp) + + def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3, + lock_timeout = 172800): # two days + """ return unique directory with a number greater than the current + maximum one. The number is assumed to start directly after prefix. + if keep is true directories with a number less than (maxnum-keep) + will be removed. + """ + if rootdir is None: + rootdir = cls.get_temproot() + + def parse_num(path): + """ parse the number out of a path (if it matches the prefix) """ + bn = path.basename + if bn.startswith(prefix): + try: + return int(bn[len(prefix):]) + except ValueError: + pass + + # compute the maximum number currently in use with the + # prefix + lastmax = None + while True: + maxnum = -1 + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None: + maxnum = max(maxnum, num) + + # make the new directory + try: + udir = rootdir.mkdir(prefix + str(maxnum+1)) + except py.error.EEXIST: + # race condition: another thread/process created the dir + # in the meantime. Try counting again + if lastmax == maxnum: + raise + lastmax = maxnum + continue + break + + # put a .lock file in the new directory that will be removed at + # process exit + if lock_timeout: + lockfile = udir.join('.lock') + mypid = os.getpid() + if hasattr(lockfile, 'mksymlinkto'): + lockfile.mksymlinkto(str(mypid)) + else: + lockfile.write(str(mypid)) + def try_remove_lockfile(): + # in a fork() situation, only the last process should + # remove the .lock, otherwise the other processes run the + # risk of seeing their temporary dir disappear. For now + # we remove the .lock in the parent only (i.e. we assume + # that the children finish before the parent). + if os.getpid() != mypid: + return + try: + lockfile.remove() + except py.error.Error: + pass + atexit.register(try_remove_lockfile) + + # prune old directories + if keep: + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None and num <= (maxnum - keep): + lf = path.join('.lock') + try: + t1 = lf.lstat().mtime + t2 = lockfile.lstat().mtime + if not lock_timeout or abs(t2-t1) < lock_timeout: + continue # skip directories still locked + except py.error.Error: + pass # assume that it means that there is no 'lf' + try: + path.remove(rec=1) + except KeyboardInterrupt: + raise + except: # this might be py.error.Error, WindowsError ... + pass + + # make link... + try: + username = os.environ['USER'] #linux, et al + except KeyError: + try: + username = os.environ['USERNAME'] #windows + except KeyError: + username = 'current' + + src = str(udir) + dest = src[:src.rfind('-')] + '-' + username + try: + os.unlink(dest) + except OSError: + pass + try: + os.symlink(src, dest) + except (OSError, AttributeError, NotImplementedError): + pass + + return udir + make_numbered_dir = classmethod(make_numbered_dir) + +def copymode(src, dest): + py.std.shutil.copymode(src, dest) + +def copychunked(src, dest): + chunksize = 524288 # half a meg of bytes + fsrc = src.open('rb') + try: + fdest = dest.open('wb') + try: + while 1: + buf = fsrc.read(chunksize) + if not buf: + break + fdest.write(buf) + finally: + fdest.close() + finally: + fsrc.close() + +def isimportable(name): + if name and (name[0].isalpha() or name[0] == '_'): + name = name.replace("_", '') + return not name or name.isalnum() diff --git a/tests/_lib_vendors/py/_path/svnurl.py b/tests/_lib_vendors/py/_path/svnurl.py new file mode 100644 index 0000000..78d7131 --- /dev/null +++ b/tests/_lib_vendors/py/_path/svnurl.py @@ -0,0 +1,380 @@ +""" +module defining a subversion path object based on the external +command 'svn'. This modules aims to work with svn 1.3 and higher +but might also interact well with earlier versions. +""" + +import os, sys, time, re +import py +from py import path, process +from py._path import common +from py._path import svnwc as svncommon +from py._path.cacheutil import BuildcostAccessCache, AgingCache + +DEBUG=False + +class SvnCommandPath(svncommon.SvnPathBase): + """ path implementation that offers access to (possibly remote) subversion + repositories. """ + + _lsrevcache = BuildcostAccessCache(maxentries=128) + _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0) + + def __new__(cls, path, rev=None, auth=None): + self = object.__new__(cls) + if isinstance(path, cls): + rev = path.rev + auth = path.auth + path = path.strpath + svncommon.checkbadchars(path) + path = path.rstrip('/') + self.strpath = path + self.rev = rev + self.auth = auth + return self + + def __repr__(self): + if self.rev == -1: + return 'svnurl(%r)' % self.strpath + else: + return 'svnurl(%r, %r)' % (self.strpath, self.rev) + + def _svnwithrev(self, cmd, *args): + """ execute an svn command, append our own url and revision """ + if self.rev is None: + return self._svnwrite(cmd, *args) + else: + args = ['-r', self.rev] + list(args) + return self._svnwrite(cmd, *args) + + def _svnwrite(self, cmd, *args): + """ execute an svn command, append our own url """ + l = ['svn %s' % cmd] + args = ['"%s"' % self._escape(item) for item in args] + l.extend(args) + l.append('"%s"' % self._encodedurl()) + # fixing the locale because we can't otherwise parse + string = " ".join(l) + if DEBUG: + print("execing %s" % string) + out = self._svncmdexecauth(string) + return out + + def _svncmdexecauth(self, cmd): + """ execute an svn command 'as is' """ + cmd = svncommon.fixlocale() + cmd + if self.auth is not None: + cmd += ' ' + self.auth.makecmdoptions() + return self._cmdexec(cmd) + + def _cmdexec(self, cmd): + try: + out = process.cmdexec(cmd) + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + if (e.err.find('File Exists') != -1 or + e.err.find('File already exists') != -1): + raise py.error.EEXIST(self) + raise + return out + + def _svnpopenauth(self, cmd): + """ execute an svn command, return a pipe for reading stdin """ + cmd = svncommon.fixlocale() + cmd + if self.auth is not None: + cmd += ' ' + self.auth.makecmdoptions() + return self._popen(cmd) + + def _popen(self, cmd): + return os.popen(cmd) + + def _encodedurl(self): + return self._escape(self.strpath) + + def _norev_delentry(self, path): + auth = self.auth and self.auth.makecmdoptions() or None + self._lsnorevcache.delentry((str(path), auth)) + + def open(self, mode='r'): + """ return an opened file with the given mode. """ + if mode not in ("r", "rU",): + raise ValueError("mode %r not supported" % (mode,)) + assert self.check(file=1) # svn cat returns an empty file otherwise + if self.rev is None: + return self._svnpopenauth('svn cat "%s"' % ( + self._escape(self.strpath), )) + else: + return self._svnpopenauth('svn cat -r %s "%s"' % ( + self.rev, self._escape(self.strpath))) + + def dirpath(self, *args, **kwargs): + """ return the directory path of the current path joined + with any given path arguments. + """ + l = self.strpath.split(self.sep) + if len(l) < 4: + raise py.error.EINVAL(self, "base is not valid") + elif len(l) == 4: + return self.join(*args, **kwargs) + else: + return self.new(basename='').join(*args, **kwargs) + + # modifying methods (cache must be invalidated) + def mkdir(self, *args, **kwargs): + """ create & return the directory joined with args. + pass a 'msg' keyword argument to set the commit message. + """ + commit_msg = kwargs.get('msg', "mkdir by py lib invocation") + createpath = self.join(*args) + createpath._svnwrite('mkdir', '-m', commit_msg) + self._norev_delentry(createpath.dirpath()) + return createpath + + def copy(self, target, msg='copied by py lib invocation'): + """ copy path to target with checkin message msg.""" + if getattr(target, 'rev', None) is not None: + raise py.error.EINVAL(target, "revisions are immutable") + self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg, + self._escape(self), self._escape(target))) + self._norev_delentry(target.dirpath()) + + def rename(self, target, msg="renamed by py lib invocation"): + """ rename this path to target with checkin message msg. """ + if getattr(self, 'rev', None) is not None: + raise py.error.EINVAL(self, "revisions are immutable") + self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %( + msg, self._escape(self), self._escape(target))) + self._norev_delentry(self.dirpath()) + self._norev_delentry(self) + + def remove(self, rec=1, msg='removed by py lib invocation'): + """ remove a file or directory (or a directory tree if rec=1) with +checkin message msg.""" + if self.rev is not None: + raise py.error.EINVAL(self, "revisions are immutable") + self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self))) + self._norev_delentry(self.dirpath()) + + def export(self, topath): + """ export to a local path + + topath should not exist prior to calling this, returns a + py.path.local instance + """ + topath = py.path.local(topath) + args = ['"%s"' % (self._escape(self),), + '"%s"' % (self._escape(topath),)] + if self.rev is not None: + args = ['-r', str(self.rev)] + args + self._svncmdexecauth('svn export %s' % (' '.join(args),)) + return topath + + def ensure(self, *args, **kwargs): + """ ensure that an args-joined path exists (by default as + a file). If you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + if getattr(self, 'rev', None) is not None: + raise py.error.EINVAL(self, "revisions are immutable") + target = self.join(*args) + dir = kwargs.get('dir', 0) + for x in target.parts(reverse=True): + if x.check(): + break + else: + raise py.error.ENOENT(target, "has not any valid base!") + if x == target: + if not x.check(dir=dir): + raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x) + return x + tocreate = target.relto(x) + basename = tocreate.split(self.sep, 1)[0] + tempdir = py.path.local.mkdtemp() + try: + tempdir.ensure(tocreate, dir=dir) + cmd = 'svn import -m "%s" "%s" "%s"' % ( + "ensure %s" % self._escape(tocreate), + self._escape(tempdir.join(basename)), + x.join(basename)._encodedurl()) + self._svncmdexecauth(cmd) + self._norev_delentry(x) + finally: + tempdir.remove() + return target + + # end of modifying methods + def _propget(self, name): + res = self._svnwithrev('propget', name) + return res[:-1] # strip trailing newline + + def _proplist(self): + res = self._svnwithrev('proplist') + lines = res.split('\n') + lines = [x.strip() for x in lines[1:]] + return svncommon.PropListDict(self, lines) + + def info(self): + """ return an Info structure with svn-provided information. """ + parent = self.dirpath() + nameinfo_seq = parent._listdir_nameinfo() + bn = self.basename + for name, info in nameinfo_seq: + if name == bn: + return info + raise py.error.ENOENT(self) + + + def _listdir_nameinfo(self): + """ return sequence of name-info directory entries of self """ + def builder(): + try: + res = self._svnwithrev('ls', '-v') + except process.cmdexec.Error: + e = sys.exc_info()[1] + if e.err.find('non-existent in that revision') != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find("E200009:") != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find('File not found') != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find('not part of a repository')!=-1: + raise py.error.ENOENT(self, e.err) + elif e.err.find('Unable to open')!=-1: + raise py.error.ENOENT(self, e.err) + elif e.err.lower().find('method not allowed')!=-1: + raise py.error.EACCES(self, e.err) + raise py.error.Error(e.err) + lines = res.split('\n') + nameinfo_seq = [] + for lsline in lines: + if lsline: + info = InfoSvnCommand(lsline) + if info._name != '.': # svn 1.5 produces '.' dirs, + nameinfo_seq.append((info._name, info)) + nameinfo_seq.sort() + return nameinfo_seq + auth = self.auth and self.auth.makecmdoptions() or None + if self.rev is not None: + return self._lsrevcache.getorbuild((self.strpath, self.rev, auth), + builder) + else: + return self._lsnorevcache.getorbuild((self.strpath, auth), + builder) + + def listdir(self, fil=None, sort=None): + """ list directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if isinstance(fil, str): + fil = common.FNMatcher(fil) + nameinfo_seq = self._listdir_nameinfo() + if len(nameinfo_seq) == 1: + name, info = nameinfo_seq[0] + if name == self.basename and info.kind == 'file': + #if not self.check(dir=1): + raise py.error.ENOTDIR(self) + paths = [self.join(name) for (name, info) in nameinfo_seq] + if fil: + paths = [x for x in paths if fil(x)] + self._sortlist(paths, sort) + return paths + + + def log(self, rev_start=None, rev_end=1, verbose=False): + """ return a list of LogEntry instances for this path. +rev_start is the starting revision (defaulting to the first one). +rev_end is the last revision (defaulting to HEAD). +if verbose is True, then the LogEntry instances also know which files changed. +""" + assert self.check() #make it simpler for the pipe + rev_start = rev_start is None and "HEAD" or rev_start + rev_end = rev_end is None and "HEAD" or rev_end + + if rev_start == "HEAD" and rev_end == 1: + rev_opt = "" + else: + rev_opt = "-r %s:%s" % (rev_start, rev_end) + verbose_opt = verbose and "-v" or "" + xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' % + (rev_opt, verbose_opt, self.strpath)) + from xml.dom import minidom + tree = minidom.parse(xmlpipe) + result = [] + for logentry in filter(None, tree.firstChild.childNodes): + if logentry.nodeType == logentry.ELEMENT_NODE: + result.append(svncommon.LogEntry(logentry)) + return result + +#01234567890123456789012345678901234567890123467 +# 2256 hpk 165 Nov 24 17:55 __init__.py +# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!! +# 1312 johnny 1627 May 05 14:32 test_decorators.py +# +class InfoSvnCommand: + # the '0?' part in the middle is an indication of whether the resource is + # locked, see 'svn help ls' + lspattern = re.compile( + r'^ *(?P\d+) +(?P.+?) +(0? *(?P\d+))? ' + '*(?P\w+ +\d{2} +[\d:]+) +(?P.*)$') + def __init__(self, line): + # this is a typical line from 'svn ls http://...' + #_ 1127 jum 0 Jul 13 15:28 branch/ + match = self.lspattern.match(line) + data = match.groupdict() + self._name = data['file'] + if self._name[-1] == '/': + self._name = self._name[:-1] + self.kind = 'dir' + else: + self.kind = 'file' + #self.has_props = l.pop(0) == 'P' + self.created_rev = int(data['rev']) + self.last_author = data['author'] + self.size = data['size'] and int(data['size']) or 0 + self.mtime = parse_time_with_missing_year(data['date']) + self.time = self.mtime * 1000000 + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + + +#____________________________________________________ +# +# helper functions +#____________________________________________________ +def parse_time_with_missing_year(timestr): + """ analyze the time part from a single line of "svn ls -v" + the svn output doesn't show the year makes the 'timestr' + ambigous. + """ + import calendar + t_now = time.gmtime() + + tparts = timestr.split() + month = time.strptime(tparts.pop(0), '%b')[1] + day = time.strptime(tparts.pop(0), '%d')[2] + last = tparts.pop(0) # year or hour:minute + try: + if ":" in last: + raise ValueError() + year = time.strptime(last, '%Y')[0] + hour = minute = 0 + except ValueError: + hour, minute = time.strptime(last, '%H:%M')[3:5] + year = t_now[0] + + t_result = (year, month, day, hour, minute, 0,0,0,0) + if t_result > t_now: + year -= 1 + t_result = (year, month, day, hour, minute, 0,0,0,0) + return calendar.timegm(t_result) + +class PathEntry: + def __init__(self, ppart): + self.strpath = ppart.firstChild.nodeValue.encode('UTF-8') + self.action = ppart.getAttribute('action').encode('UTF-8') + if self.action == 'A': + self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8') + if self.copyfrom_path: + self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev')) + diff --git a/tests/_lib_vendors/py/_path/svnwc.py b/tests/_lib_vendors/py/_path/svnwc.py new file mode 100644 index 0000000..00d3b4b --- /dev/null +++ b/tests/_lib_vendors/py/_path/svnwc.py @@ -0,0 +1,1240 @@ +""" +svn-Command based Implementation of a Subversion WorkingCopy Path. + + SvnWCCommandPath is the main class. + +""" + +import os, sys, time, re, calendar +import py +import subprocess +from py._path import common + +#----------------------------------------------------------- +# Caching latest repository revision and repo-paths +# (getting them is slow with the current implementations) +# +# XXX make mt-safe +#----------------------------------------------------------- + +class cache: + proplist = {} + info = {} + entries = {} + prop = {} + +class RepoEntry: + def __init__(self, url, rev, timestamp): + self.url = url + self.rev = rev + self.timestamp = timestamp + + def __str__(self): + return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp) + +class RepoCache: + """ The Repocache manages discovered repository paths + and their revisions. If inside a timeout the cache + will even return the revision of the root. + """ + timeout = 20 # seconds after which we forget that we know the last revision + + def __init__(self): + self.repos = [] + + def clear(self): + self.repos = [] + + def put(self, url, rev, timestamp=None): + if rev is None: + return + if timestamp is None: + timestamp = time.time() + + for entry in self.repos: + if url == entry.url: + entry.timestamp = timestamp + entry.rev = rev + #print "set repo", entry + break + else: + entry = RepoEntry(url, rev, timestamp) + self.repos.append(entry) + #print "appended repo", entry + + def get(self, url): + now = time.time() + for entry in self.repos: + if url.startswith(entry.url): + if now < entry.timestamp + self.timeout: + #print "returning immediate Etrny", entry + return entry.url, entry.rev + return entry.url, -1 + return url, -1 + +repositories = RepoCache() + + +# svn support code + +ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested +if sys.platform == "win32": + ALLOWED_CHARS += ":" +ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:' + +def _getsvnversion(ver=[]): + try: + return ver[0] + except IndexError: + v = py.process.cmdexec("svn -q --version") + v.strip() + v = '.'.join(v.split('.')[:2]) + ver.append(v) + return v + +def _escape_helper(text): + text = str(text) + if py.std.sys.platform != 'win32': + text = str(text).replace('$', '\\$') + return text + +def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS): + for c in str(text): + if c.isalnum(): + continue + if c in allowed_chars: + continue + return True + return False + +def checkbadchars(url): + # (hpk) not quite sure about the exact purpose, guido w.? + proto, uri = url.split("://", 1) + if proto != "file": + host, uripath = uri.split('/', 1) + # only check for bad chars in the non-protocol parts + if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \ + or _check_for_bad_chars(uripath, ALLOWED_CHARS)): + raise ValueError("bad char in %r" % (url, )) + + +#_______________________________________________________________ + +class SvnPathBase(common.PathBase): + """ Base implementation for SvnPath implementations. """ + sep = '/' + + def _geturl(self): + return self.strpath + url = property(_geturl, None, None, "url of this svn-path.") + + def __str__(self): + """ return a string representation (including rev-number) """ + return self.strpath + + def __hash__(self): + return hash(self.strpath) + + def new(self, **kw): + """ create a modified version of this path. A 'rev' argument + indicates a new revision. + the following keyword arguments modify various path parts:: + + http://host.com/repo/path/file.ext + |-----------------------| dirname + |------| basename + |--| purebasename + |--| ext + """ + obj = object.__new__(self.__class__) + obj.rev = kw.get('rev', self.rev) + obj.auth = kw.get('auth', self.auth) + dirname, basename, purebasename, ext = self._getbyspec( + "dirname,basename,purebasename,ext") + if 'basename' in kw: + if 'purebasename' in kw or 'ext' in kw: + raise ValueError("invalid specification %r" % kw) + else: + pb = kw.setdefault('purebasename', purebasename) + ext = kw.setdefault('ext', ext) + if ext and not ext.startswith('.'): + ext = '.' + ext + kw['basename'] = pb + ext + + kw.setdefault('dirname', dirname) + kw.setdefault('sep', self.sep) + if kw['basename']: + obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw + else: + obj.strpath = "%(dirname)s" % kw + return obj + + def _getbyspec(self, spec): + """ get specified parts of the path. 'arg' is a string + with comma separated path parts. The parts are returned + in exactly the order of the specification. + + you may specify the following parts: + + http://host.com/repo/path/file.ext + |-----------------------| dirname + |------| basename + |--| purebasename + |--| ext + """ + res = [] + parts = self.strpath.split(self.sep) + for name in spec.split(','): + name = name.strip() + if name == 'dirname': + res.append(self.sep.join(parts[:-1])) + elif name == 'basename': + res.append(parts[-1]) + else: + basename = parts[-1] + i = basename.rfind('.') + if i == -1: + purebasename, ext = basename, '' + else: + purebasename, ext = basename[:i], basename[i:] + if name == 'purebasename': + res.append(purebasename) + elif name == 'ext': + res.append(ext) + else: + raise NameError("Don't know part %r" % name) + return res + + def __eq__(self, other): + """ return true if path and rev attributes each match """ + return (str(self) == str(other) and + (self.rev == other.rev or self.rev == other.rev)) + + def __ne__(self, other): + return not self == other + + def join(self, *args): + """ return a new Path (with the same revision) which is composed + of the self Path followed by 'args' path components. + """ + if not args: + return self + + args = tuple([arg.strip(self.sep) for arg in args]) + parts = (self.strpath, ) + args + newpath = self.__class__(self.sep.join(parts), self.rev, self.auth) + return newpath + + def propget(self, name): + """ return the content of the given property. """ + value = self._propget(name) + return value + + def proplist(self): + """ list all property names. """ + content = self._proplist() + return content + + def size(self): + """ Return the size of the file content of the Path. """ + return self.info().size + + def mtime(self): + """ Return the last modification time of the file. """ + return self.info().mtime + + # shared help methods + + def _escape(self, cmd): + return _escape_helper(cmd) + + + #def _childmaxrev(self): + # """ return maximum revision number of childs (or self.rev if no childs) """ + # rev = self.rev + # for name, info in self._listdir_nameinfo(): + # rev = max(rev, info.created_rev) + # return rev + + #def _getlatestrevision(self): + # """ return latest repo-revision for this path. """ + # url = self.strpath + # path = self.__class__(url, None) + # + # # we need a long walk to find the root-repo and revision + # while 1: + # try: + # rev = max(rev, path._childmaxrev()) + # previous = path + # path = path.dirpath() + # except (IOError, process.cmdexec.Error): + # break + # if rev is None: + # raise IOError, "could not determine newest repo revision for %s" % self + # return rev + + class Checkers(common.Checkers): + def dir(self): + try: + return self.path.info().kind == 'dir' + except py.error.Error: + return self._listdirworks() + + def _listdirworks(self): + try: + self.path.listdir() + except py.error.ENOENT: + return False + else: + return True + + def file(self): + try: + return self.path.info().kind == 'file' + except py.error.ENOENT: + return False + + def exists(self): + try: + return self.path.info() + except py.error.ENOENT: + return self._listdirworks() + +def parse_apr_time(timestr): + i = timestr.rfind('.') + if i == -1: + raise ValueError("could not parse %s" % timestr) + timestr = timestr[:i] + parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S") + return time.mktime(parsedtime) + +class PropListDict(dict): + """ a Dictionary which fetches values (InfoSvnCommand instances) lazily""" + def __init__(self, path, keynames): + dict.__init__(self, [(x, None) for x in keynames]) + self.path = path + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + if value is None: + value = self.path.propget(key) + dict.__setitem__(self, key, value) + return value + +def fixlocale(): + if sys.platform != 'win32': + return 'LC_ALL=C ' + return '' + +# some nasty chunk of code to solve path and url conversion and quoting issues +ILLEGAL_CHARS = '* | \ / : < > ? \t \n \x0b \x0c \r'.split(' ') +if os.sep in ILLEGAL_CHARS: + ILLEGAL_CHARS.remove(os.sep) +ISWINDOWS = sys.platform == 'win32' +_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I) +def _check_path(path): + illegal = ILLEGAL_CHARS[:] + sp = path.strpath + if ISWINDOWS: + illegal.remove(':') + if not _reg_allow_disk.match(sp): + raise ValueError('path may not contain a colon (:)') + for char in sp: + if char not in string.printable or char in illegal: + raise ValueError('illegal character %r in path' % (char,)) + +def path_to_fspath(path, addat=True): + _check_path(path) + sp = path.strpath + if addat and path.rev != -1: + sp = '%s@%s' % (sp, path.rev) + elif addat: + sp = '%s@HEAD' % (sp,) + return sp + +def url_from_path(path): + fspath = path_to_fspath(path, False) + quote = py.std.urllib.quote + if ISWINDOWS: + match = _reg_allow_disk.match(fspath) + fspath = fspath.replace('\\', '/') + if match.group(1): + fspath = '/%s%s' % (match.group(1).replace('\\', '/'), + quote(fspath[len(match.group(1)):])) + else: + fspath = quote(fspath) + else: + fspath = quote(fspath) + if path.rev != -1: + fspath = '%s@%s' % (fspath, path.rev) + else: + fspath = '%s@HEAD' % (fspath,) + return 'file://%s' % (fspath,) + +class SvnAuth(object): + """ container for auth information for Subversion """ + def __init__(self, username, password, cache_auth=True, interactive=True): + self.username = username + self.password = password + self.cache_auth = cache_auth + self.interactive = interactive + + def makecmdoptions(self): + uname = self.username.replace('"', '\\"') + passwd = self.password.replace('"', '\\"') + ret = [] + if uname: + ret.append('--username="%s"' % (uname,)) + if passwd: + ret.append('--password="%s"' % (passwd,)) + if not self.cache_auth: + ret.append('--no-auth-cache') + if not self.interactive: + ret.append('--non-interactive') + return ' '.join(ret) + + def __str__(self): + return "" %(self.username,) + +rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)') + +class SvnWCCommandPath(common.PathBase): + """ path implementation offering access/modification to svn working copies. + It has methods similar to the functions in os.path and similar to the + commands of the svn client. + """ + sep = os.sep + + def __new__(cls, wcpath=None, auth=None): + self = object.__new__(cls) + if isinstance(wcpath, cls): + if wcpath.__class__ == cls: + return wcpath + wcpath = wcpath.localpath + if _check_for_bad_chars(str(wcpath), + ALLOWED_CHARS): + raise ValueError("bad char in wcpath %s" % (wcpath, )) + self.localpath = py.path.local(wcpath) + self.auth = auth + return self + + strpath = property(lambda x: str(x.localpath), None, None, "string path") + rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision") + + def __eq__(self, other): + return self.localpath == getattr(other, 'localpath', None) + + def _geturl(self): + if getattr(self, '_url', None) is None: + info = self.info() + self._url = info.url #SvnPath(info.url, info.rev) + assert isinstance(self._url, py.builtin._basestring) + return self._url + + url = property(_geturl, None, None, "url of this WC item") + + def _escape(self, cmd): + return _escape_helper(cmd) + + def dump(self, obj): + """ pickle object into path location""" + return self.localpath.dump(obj) + + def svnurl(self): + """ return current SvnPath for this WC-item. """ + info = self.info() + return py.path.svnurl(info.url) + + def __repr__(self): + return "svnwc(%r)" % (self.strpath) # , self._url) + + def __str__(self): + return str(self.localpath) + + def _makeauthoptions(self): + if self.auth is None: + return '' + return self.auth.makecmdoptions() + + def _authsvn(self, cmd, args=None): + args = args and list(args) or [] + args.append(self._makeauthoptions()) + return self._svn(cmd, *args) + + def _svn(self, cmd, *args): + l = ['svn %s' % cmd] + args = [self._escape(item) for item in args] + l.extend(args) + l.append('"%s"' % self._escape(self.strpath)) + # try fixing the locale because we can't otherwise parse + string = fixlocale() + " ".join(l) + try: + try: + key = 'LC_MESSAGES' + hold = os.environ.get(key) + os.environ[key] = 'C' + out = py.process.cmdexec(string) + finally: + if hold: + os.environ[key] = hold + else: + del os.environ[key] + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + strerr = e.err.lower() + if strerr.find('not found') != -1: + raise py.error.ENOENT(self) + elif strerr.find("E200009:") != -1: + raise py.error.ENOENT(self) + if (strerr.find('file exists') != -1 or + strerr.find('file already exists') != -1 or + strerr.find('w150002:') != -1 or + strerr.find("can't create directory") != -1): + raise py.error.EEXIST(strerr) #self) + raise + return out + + def switch(self, url): + """ switch to given URL. """ + self._authsvn('switch', [url]) + + def checkout(self, url=None, rev=None): + """ checkout from url to local wcpath. """ + args = [] + if url is None: + url = self.url + if rev is None or rev == -1: + if (py.std.sys.platform != 'win32' and + _getsvnversion() == '1.3'): + url += "@HEAD" + else: + if _getsvnversion() == '1.3': + url += "@%d" % rev + else: + args.append('-r' + str(rev)) + args.append(url) + self._authsvn('co', args) + + def update(self, rev='HEAD', interactive=True): + """ update working copy item to given revision. (None -> HEAD). """ + opts = ['-r', rev] + if not interactive: + opts.append("--non-interactive") + self._authsvn('up', opts) + + def write(self, content, mode='w'): + """ write content into local filesystem wc. """ + self.localpath.write(content, mode) + + def dirpath(self, *args): + """ return the directory Path of the current Path. """ + return self.__class__(self.localpath.dirpath(*args), auth=self.auth) + + def _ensuredirs(self): + parent = self.dirpath() + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + self.mkdir() + return self + + def ensure(self, *args, **kwargs): + """ ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'directory=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if p.check(): + if p.check(versioned=False): + p.add() + return p + if kwargs.get('dir', 0): + return p._ensuredirs() + parent = p.dirpath() + parent._ensuredirs() + p.write("") + p.add() + return p + + def mkdir(self, *args): + """ create & return the directory joined with args. """ + if args: + return self.join(*args).mkdir() + else: + self._svn('mkdir') + return self + + def add(self): + """ add ourself to svn """ + self._svn('add') + + def remove(self, rec=1, force=1): + """ remove a file or a directory tree. 'rec'ursive is + ignored and considered always true (because of + underlying svn semantics. + """ + assert rec, "svn cannot remove non-recursively" + if not self.check(versioned=True): + # not added to svn (anymore?), just remove + py.path.local(self).remove() + return + flags = [] + if force: + flags.append('--force') + self._svn('remove', *flags) + + def copy(self, target): + """ copy path to target.""" + py.process.cmdexec("svn copy %s %s" %(str(self), str(target))) + + def rename(self, target): + """ rename this path to target. """ + py.process.cmdexec("svn move --force %s %s" %(str(self), str(target))) + + def lock(self): + """ set a lock (exclusive) on the resource """ + out = self._authsvn('lock').strip() + if not out: + # warning or error, raise exception + raise ValueError("unknown error in svn lock command") + + def unlock(self): + """ unset a previously set lock """ + out = self._authsvn('unlock').strip() + if out.startswith('svn:'): + # warning or error, raise exception + raise Exception(out[4:]) + + def cleanup(self): + """ remove any locks from the resource """ + # XXX should be fixed properly!!! + try: + self.unlock() + except: + pass + + def status(self, updates=0, rec=0, externals=0): + """ return (collective) Status object for this file. """ + # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1 + # 2201 2192 jum test + # XXX + if externals: + raise ValueError("XXX cannot perform status() " + "on external items yet") + else: + #1.2 supports: externals = '--ignore-externals' + externals = '' + if rec: + rec= '' + else: + rec = '--non-recursive' + + # XXX does not work on all subversion versions + #if not externals: + # externals = '--ignore-externals' + + if updates: + updates = '-u' + else: + updates = '' + + try: + cmd = 'status -v --xml --no-ignore %s %s %s' % ( + updates, rec, externals) + out = self._authsvn(cmd) + except py.process.cmdexec.Error: + cmd = 'status -v --no-ignore %s %s %s' % ( + updates, rec, externals) + out = self._authsvn(cmd) + rootstatus = WCStatus(self).fromstring(out, self) + else: + rootstatus = XMLWCStatus(self).fromstring(out, self) + return rootstatus + + def diff(self, rev=None): + """ return a diff of the current path against revision rev (defaulting + to the last one). + """ + args = [] + if rev is not None: + args.append("-r %d" % rev) + out = self._authsvn('diff', args) + return out + + def blame(self): + """ return a list of tuples of three elements: + (revision, commiter, line) + """ + out = self._svn('blame') + result = [] + blamelines = out.splitlines() + reallines = py.path.svnurl(self.url).readlines() + for i, (blameline, line) in enumerate( + zip(blamelines, reallines)): + m = rex_blame.match(blameline) + if not m: + raise ValueError("output line %r of svn blame does not match " + "expected format" % (line, )) + rev, name, _ = m.groups() + result.append((int(rev), name, line)) + return result + + _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL) + def commit(self, msg='', rec=1): + """ commit with support for non-recursive commits """ + # XXX i guess escaping should be done better here?!? + cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),) + if not rec: + cmd += ' -N' + out = self._authsvn(cmd) + try: + del cache.info[self] + except KeyError: + pass + if out: + m = self._rex_commit.match(out) + return int(m.group(1)) + + def propset(self, name, value, *args): + """ set property name to value on this path. """ + d = py.path.local.mkdtemp() + try: + p = d.join('value') + p.write(value) + self._svn('propset', name, '--file', str(p), *args) + finally: + d.remove() + + def propget(self, name): + """ get property name on this path. """ + res = self._svn('propget', name) + return res[:-1] # strip trailing newline + + def propdel(self, name): + """ delete property name on this path. """ + res = self._svn('propdel', name) + return res[:-1] # strip trailing newline + + def proplist(self, rec=0): + """ return a mapping of property names to property values. +If rec is True, then return a dictionary mapping sub-paths to such mappings. +""" + if rec: + res = self._svn('proplist -R') + return make_recursive_propdict(self, res) + else: + res = self._svn('proplist') + lines = res.split('\n') + lines = [x.strip() for x in lines[1:]] + return PropListDict(self, lines) + + def revert(self, rec=0): + """ revert the local changes of this path. if rec is True, do so +recursively. """ + if rec: + result = self._svn('revert -R') + else: + result = self._svn('revert') + return result + + def new(self, **kw): + """ create a modified version of this path. A 'rev' argument + indicates a new revision. + the following keyword arguments modify various path parts: + + http://host.com/repo/path/file.ext + |-----------------------| dirname + |------| basename + |--| purebasename + |--| ext + """ + if kw: + localpath = self.localpath.new(**kw) + else: + localpath = self.localpath + return self.__class__(localpath, auth=self.auth) + + def join(self, *args, **kwargs): + """ return a new Path (with the same revision) which is composed + of the self Path followed by 'args' path components. + """ + if not args: + return self + localpath = self.localpath.join(*args, **kwargs) + return self.__class__(localpath, auth=self.auth) + + def info(self, usecache=1): + """ return an Info structure with svn-provided information. """ + info = usecache and cache.info.get(self) + if not info: + try: + output = self._svn('info') + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + if e.err.find('Path is not a working copy directory') != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find("is not under version control") != -1: + raise py.error.ENOENT(self, e.err) + raise + # XXX SVN 1.3 has output on stderr instead of stdout (while it does + # return 0!), so a bit nasty, but we assume no output is output + # to stderr... + if (output.strip() == '' or + output.lower().find('not a versioned resource') != -1): + raise py.error.ENOENT(self, output) + info = InfoSvnWCCommand(output) + + # Can't reliably compare on Windows without access to win32api + if py.std.sys.platform != 'win32': + if info.path != self.localpath: + raise py.error.ENOENT(self, "not a versioned resource:" + + " %s != %s" % (info.path, self.localpath)) + cache.info[self] = info + return info + + def listdir(self, fil=None, sort=None): + """ return a sequence of Paths. + + listdir will return either a tuple or a list of paths + depending on implementation choices. + """ + if isinstance(fil, str): + fil = common.FNMatcher(fil) + # XXX unify argument naming with LocalPath.listdir + def notsvn(path): + return path.basename != '.svn' + + paths = [] + for localpath in self.localpath.listdir(notsvn): + p = self.__class__(localpath, auth=self.auth) + if notsvn(p) and (not fil or fil(p)): + paths.append(p) + self._sortlist(paths, sort) + return paths + + def open(self, mode='r'): + """ return an opened file with the given mode. """ + return open(self.strpath, mode) + + def _getbyspec(self, spec): + return self.localpath._getbyspec(spec) + + class Checkers(py.path.local.Checkers): + def __init__(self, path): + self.svnwcpath = path + self.path = path.localpath + def versioned(self): + try: + s = self.svnwcpath.info() + except (py.error.ENOENT, py.error.EEXIST): + return False + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + if e.err.find('is not a working copy')!=-1: + return False + if e.err.lower().find('not a versioned resource') != -1: + return False + raise + else: + return True + + def log(self, rev_start=None, rev_end=1, verbose=False): + """ return a list of LogEntry instances for this path. +rev_start is the starting revision (defaulting to the first one). +rev_end is the last revision (defaulting to HEAD). +if verbose is True, then the LogEntry instances also know which files changed. +""" + assert self.check() # make it simpler for the pipe + rev_start = rev_start is None and "HEAD" or rev_start + rev_end = rev_end is None and "HEAD" or rev_end + if rev_start == "HEAD" and rev_end == 1: + rev_opt = "" + else: + rev_opt = "-r %s:%s" % (rev_start, rev_end) + verbose_opt = verbose and "-v" or "" + locale_env = fixlocale() + # some blather on stderr + auth_opt = self._makeauthoptions() + #stdin, stdout, stderr = os.popen3(locale_env + + # 'svn log --xml %s %s %s "%s"' % ( + # rev_opt, verbose_opt, auth_opt, + # self.strpath)) + cmd = locale_env + 'svn log --xml %s %s %s "%s"' % ( + rev_opt, verbose_opt, auth_opt, self.strpath) + + popen = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=True, + ) + stdout, stderr = popen.communicate() + stdout = py.builtin._totext(stdout, sys.getdefaultencoding()) + minidom,ExpatError = importxml() + try: + tree = minidom.parseString(stdout) + except ExpatError: + raise ValueError('no such revision') + result = [] + for logentry in filter(None, tree.firstChild.childNodes): + if logentry.nodeType == logentry.ELEMENT_NODE: + result.append(LogEntry(logentry)) + return result + + def size(self): + """ Return the size of the file content of the Path. """ + return self.info().size + + def mtime(self): + """ Return the last modification time of the file. """ + return self.info().mtime + + def __hash__(self): + return hash((self.strpath, self.__class__, self.auth)) + + +class WCStatus: + attrnames = ('modified','added', 'conflict', 'unchanged', 'external', + 'deleted', 'prop_modified', 'unknown', 'update_available', + 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced' + ) + + def __init__(self, wcpath, rev=None, modrev=None, author=None): + self.wcpath = wcpath + self.rev = rev + self.modrev = modrev + self.author = author + + for name in self.attrnames: + setattr(self, name, []) + + def allpath(self, sort=True, **kw): + d = {} + for name in self.attrnames: + if name not in kw or kw[name]: + for path in getattr(self, name): + d[path] = 1 + l = d.keys() + if sort: + l.sort() + return l + + # XXX a bit scary to assume there's always 2 spaces between username and + # path, however with win32 allowing spaces in user names there doesn't + # seem to be a more solid approach :( + _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)') + + def fromstring(data, rootwcpath, rev=None, modrev=None, author=None): + """ return a new WCStatus object from data 's' + """ + rootstatus = WCStatus(rootwcpath, rev, modrev, author) + update_rev = None + for line in data.split('\n'): + if not line.strip(): + continue + #print "processing %r" % line + flags, rest = line[:8], line[8:] + # first column + c0,c1,c2,c3,c4,c5,x6,c7 = flags + #if '*' in line: + # print "flags", repr(flags), "rest", repr(rest) + + if c0 in '?XI': + fn = line.split(None, 1)[1] + if c0 == '?': + wcpath = rootwcpath.join(fn, abs=1) + rootstatus.unknown.append(wcpath) + elif c0 == 'X': + wcpath = rootwcpath.__class__( + rootwcpath.localpath.join(fn, abs=1), + auth=rootwcpath.auth) + rootstatus.external.append(wcpath) + elif c0 == 'I': + wcpath = rootwcpath.join(fn, abs=1) + rootstatus.ignored.append(wcpath) + + continue + + #elif c0 in '~!' or c4 == 'S': + # raise NotImplementedError("received flag %r" % c0) + + m = WCStatus._rex_status.match(rest) + if not m: + if c7 == '*': + fn = rest.strip() + wcpath = rootwcpath.join(fn, abs=1) + rootstatus.update_available.append(wcpath) + continue + if line.lower().find('against revision:')!=-1: + update_rev = int(rest.split(':')[1].strip()) + continue + if line.lower().find('status on external') > -1: + # XXX not sure what to do here... perhaps we want to + # store some state instead of just continuing, as right + # now it makes the top-level external get added twice + # (once as external, once as 'normal' unchanged item) + # because of the way SVN presents external items + continue + # keep trying + raise ValueError("could not parse line %r" % line) + else: + rev, modrev, author, fn = m.groups() + wcpath = rootwcpath.join(fn, abs=1) + #assert wcpath.check() + if c0 == 'M': + assert wcpath.check(file=1), "didn't expect a directory with changed content here" + rootstatus.modified.append(wcpath) + elif c0 == 'A' or c3 == '+' : + rootstatus.added.append(wcpath) + elif c0 == 'D': + rootstatus.deleted.append(wcpath) + elif c0 == 'C': + rootstatus.conflict.append(wcpath) + elif c0 == '~': + rootstatus.kindmismatch.append(wcpath) + elif c0 == '!': + rootstatus.incomplete.append(wcpath) + elif c0 == 'R': + rootstatus.replaced.append(wcpath) + elif not c0.strip(): + rootstatus.unchanged.append(wcpath) + else: + raise NotImplementedError("received flag %r" % c0) + + if c1 == 'M': + rootstatus.prop_modified.append(wcpath) + # XXX do we cover all client versions here? + if c2 == 'L' or c5 == 'K': + rootstatus.locked.append(wcpath) + if c7 == '*': + rootstatus.update_available.append(wcpath) + + if wcpath == rootwcpath: + rootstatus.rev = rev + rootstatus.modrev = modrev + rootstatus.author = author + if update_rev: + rootstatus.update_rev = update_rev + continue + return rootstatus + fromstring = staticmethod(fromstring) + +class XMLWCStatus(WCStatus): + def fromstring(data, rootwcpath, rev=None, modrev=None, author=None): + """ parse 'data' (XML string as outputted by svn st) into a status obj + """ + # XXX for externals, the path is shown twice: once + # with external information, and once with full info as if + # the item was a normal non-external... the current way of + # dealing with this issue is by ignoring it - this does make + # externals appear as external items as well as 'normal', + # unchanged ones in the status object so this is far from ideal + rootstatus = WCStatus(rootwcpath, rev, modrev, author) + update_rev = None + minidom, ExpatError = importxml() + try: + doc = minidom.parseString(data) + except ExpatError: + e = sys.exc_info()[1] + raise ValueError(str(e)) + urevels = doc.getElementsByTagName('against') + if urevels: + rootstatus.update_rev = urevels[-1].getAttribute('revision') + for entryel in doc.getElementsByTagName('entry'): + path = entryel.getAttribute('path') + statusel = entryel.getElementsByTagName('wc-status')[0] + itemstatus = statusel.getAttribute('item') + + if itemstatus == 'unversioned': + wcpath = rootwcpath.join(path, abs=1) + rootstatus.unknown.append(wcpath) + continue + elif itemstatus == 'external': + wcpath = rootwcpath.__class__( + rootwcpath.localpath.join(path, abs=1), + auth=rootwcpath.auth) + rootstatus.external.append(wcpath) + continue + elif itemstatus == 'ignored': + wcpath = rootwcpath.join(path, abs=1) + rootstatus.ignored.append(wcpath) + continue + elif itemstatus == 'incomplete': + wcpath = rootwcpath.join(path, abs=1) + rootstatus.incomplete.append(wcpath) + continue + + rev = statusel.getAttribute('revision') + if itemstatus == 'added' or itemstatus == 'none': + rev = '0' + modrev = '?' + author = '?' + date = '' + elif itemstatus == "replaced": + pass + else: + #print entryel.toxml() + commitel = entryel.getElementsByTagName('commit')[0] + if commitel: + modrev = commitel.getAttribute('revision') + author = '' + author_els = commitel.getElementsByTagName('author') + if author_els: + for c in author_els[0].childNodes: + author += c.nodeValue + date = '' + for c in commitel.getElementsByTagName('date')[0]\ + .childNodes: + date += c.nodeValue + + wcpath = rootwcpath.join(path, abs=1) + + assert itemstatus != 'modified' or wcpath.check(file=1), ( + 'did\'t expect a directory with changed content here') + + itemattrname = { + 'normal': 'unchanged', + 'unversioned': 'unknown', + 'conflicted': 'conflict', + 'none': 'added', + }.get(itemstatus, itemstatus) + + attr = getattr(rootstatus, itemattrname) + attr.append(wcpath) + + propsstatus = statusel.getAttribute('props') + if propsstatus not in ('none', 'normal'): + rootstatus.prop_modified.append(wcpath) + + if wcpath == rootwcpath: + rootstatus.rev = rev + rootstatus.modrev = modrev + rootstatus.author = author + rootstatus.date = date + + # handle repos-status element (remote info) + rstatusels = entryel.getElementsByTagName('repos-status') + if rstatusels: + rstatusel = rstatusels[0] + ritemstatus = rstatusel.getAttribute('item') + if ritemstatus in ('added', 'modified'): + rootstatus.update_available.append(wcpath) + + lockels = entryel.getElementsByTagName('lock') + if len(lockels): + rootstatus.locked.append(wcpath) + + return rootstatus + fromstring = staticmethod(fromstring) + +class InfoSvnWCCommand: + def __init__(self, output): + # Path: test + # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test + # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada + # Revision: 2151 + # Node Kind: directory + # Schedule: normal + # Last Changed Author: hpk + # Last Changed Rev: 2100 + # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003) + # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003) + + d = {} + for line in output.split('\n'): + if not line.strip(): + continue + key, value = line.split(':', 1) + key = key.lower().replace(' ', '') + value = value.strip() + d[key] = value + try: + self.url = d['url'] + except KeyError: + raise ValueError("Not a versioned resource") + #raise ValueError, "Not a versioned resource %r" % path + self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind'] + try: + self.rev = int(d['revision']) + except KeyError: + self.rev = None + + self.path = py.path.local(d['path']) + self.size = self.path.size() + if 'lastchangedrev' in d: + self.created_rev = int(d['lastchangedrev']) + if 'lastchangedauthor' in d: + self.last_author = d['lastchangedauthor'] + if 'lastchangeddate' in d: + self.mtime = parse_wcinfotime(d['lastchangeddate']) + self.time = self.mtime * 1000000 + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + +def parse_wcinfotime(timestr): + """ Returns seconds since epoch, UTC. """ + # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003) + m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr) + if not m: + raise ValueError("timestring %r does not match" % timestr) + timestr, timezone = m.groups() + # do not handle timezone specially, return value should be UTC + parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S") + return calendar.timegm(parsedtime) + +def make_recursive_propdict(wcroot, + output, + rex = re.compile("Properties on '(.*)':")): + """ Return a dictionary of path->PropListDict mappings. """ + lines = [x for x in output.split('\n') if x] + pdict = {} + while lines: + line = lines.pop(0) + m = rex.match(line) + if not m: + raise ValueError("could not parse propget-line: %r" % line) + path = m.groups()[0] + wcpath = wcroot.join(path, abs=1) + propnames = [] + while lines and lines[0].startswith(' '): + propname = lines.pop(0).strip() + propnames.append(propname) + assert propnames, "must have found properties!" + pdict[wcpath] = PropListDict(wcpath, propnames) + return pdict + + +def importxml(cache=[]): + if cache: + return cache + from xml.dom import minidom + from xml.parsers.expat import ExpatError + cache.extend([minidom, ExpatError]) + return cache + +class LogEntry: + def __init__(self, logentry): + self.rev = int(logentry.getAttribute('revision')) + for lpart in filter(None, logentry.childNodes): + if lpart.nodeType == lpart.ELEMENT_NODE: + if lpart.nodeName == 'author': + self.author = lpart.firstChild.nodeValue + elif lpart.nodeName == 'msg': + if lpart.firstChild: + self.msg = lpart.firstChild.nodeValue + else: + self.msg = '' + elif lpart.nodeName == 'date': + #2003-07-29T20:05:11.598637Z + timestr = lpart.firstChild.nodeValue + self.date = parse_apr_time(timestr) + elif lpart.nodeName == 'paths': + self.strpaths = [] + for ppart in filter(None, lpart.childNodes): + if ppart.nodeType == ppart.ELEMENT_NODE: + self.strpaths.append(PathEntry(ppart)) + def __repr__(self): + return '' % ( + self.rev, self.author, self.date) + + diff --git a/tests/_lib_vendors/py/_process/__init__.py b/tests/_lib_vendors/py/_process/__init__.py new file mode 100644 index 0000000..86c714a --- /dev/null +++ b/tests/_lib_vendors/py/_process/__init__.py @@ -0,0 +1 @@ +""" high-level sub-process handling """ diff --git a/tests/_lib_vendors/py/_process/cmdexec.py b/tests/_lib_vendors/py/_process/cmdexec.py new file mode 100644 index 0000000..f83a249 --- /dev/null +++ b/tests/_lib_vendors/py/_process/cmdexec.py @@ -0,0 +1,49 @@ +import sys +import subprocess +import py +from subprocess import Popen, PIPE + +def cmdexec(cmd): + """ return unicode output of executing 'cmd' in a separate process. + + raise cmdexec.Error exeception if the command failed. + the exception will provide an 'err' attribute containing + the error-output from the command. + if the subprocess module does not provide a proper encoding/unicode strings + sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'. + """ + process = subprocess.Popen(cmd, shell=True, + universal_newlines=True, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = process.communicate() + if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not + try: + default_encoding = sys.getdefaultencoding() # jython may not have it + except AttributeError: + default_encoding = sys.stdout.encoding or 'UTF-8' + out = unicode(out, process.stdout.encoding or default_encoding) + err = unicode(err, process.stderr.encoding or default_encoding) + status = process.poll() + if status: + raise ExecutionFailed(status, status, cmd, out, err) + return out + +class ExecutionFailed(py.error.Error): + def __init__(self, status, systemstatus, cmd, out, err): + Exception.__init__(self) + self.status = status + self.systemstatus = systemstatus + self.cmd = cmd + self.err = err + self.out = out + + def __str__(self): + return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err) + +# export the exception under the name 'py.process.cmdexec.Error' +cmdexec.Error = ExecutionFailed +try: + ExecutionFailed.__module__ = 'py.process.cmdexec' + ExecutionFailed.__name__ = 'Error' +except (AttributeError, TypeError): + pass diff --git a/tests/_lib_vendors/py/_process/forkedfunc.py b/tests/_lib_vendors/py/_process/forkedfunc.py new file mode 100644 index 0000000..1c28530 --- /dev/null +++ b/tests/_lib_vendors/py/_process/forkedfunc.py @@ -0,0 +1,120 @@ + +""" + ForkedFunc provides a way to run a function in a forked process + and get at its return value, stdout and stderr output as well + as signals and exitstatusus. +""" + +import py +import os +import sys +import marshal + + +def get_unbuffered_io(fd, filename): + f = open(str(filename), "w") + if fd != f.fileno(): + os.dup2(f.fileno(), fd) + class AutoFlush: + def write(self, data): + f.write(data) + f.flush() + def __getattr__(self, name): + return getattr(f, name) + return AutoFlush() + + +class ForkedFunc: + EXITSTATUS_EXCEPTION = 3 + + + def __init__(self, fun, args=None, kwargs=None, nice_level=0, + child_on_start=None, child_on_exit=None): + if args is None: + args = [] + if kwargs is None: + kwargs = {} + self.fun = fun + self.args = args + self.kwargs = kwargs + self.tempdir = tempdir = py.path.local.mkdtemp() + self.RETVAL = tempdir.ensure('retval') + self.STDOUT = tempdir.ensure('stdout') + self.STDERR = tempdir.ensure('stderr') + + pid = os.fork() + if pid: # in parent process + self.pid = pid + else: # in child process + self.pid = None + self._child(nice_level, child_on_start, child_on_exit) + + def _child(self, nice_level, child_on_start, child_on_exit): + # right now we need to call a function, but first we need to + # map all IO that might happen + sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT) + sys.stderr = stderr = get_unbuffered_io(2, self.STDERR) + retvalf = self.RETVAL.open("wb") + EXITSTATUS = 0 + try: + if nice_level: + os.nice(nice_level) + try: + if child_on_start is not None: + child_on_start() + retval = self.fun(*self.args, **self.kwargs) + retvalf.write(marshal.dumps(retval)) + if child_on_exit is not None: + child_on_exit() + except: + excinfo = py.code.ExceptionInfo() + stderr.write(str(excinfo._getreprcrash())) + EXITSTATUS = self.EXITSTATUS_EXCEPTION + finally: + stdout.close() + stderr.close() + retvalf.close() + os.close(1) + os.close(2) + os._exit(EXITSTATUS) + + def waitfinish(self, waiter=os.waitpid): + pid, systemstatus = waiter(self.pid, 0) + if systemstatus: + if os.WIFSIGNALED(systemstatus): + exitstatus = os.WTERMSIG(systemstatus) + 128 + else: + exitstatus = os.WEXITSTATUS(systemstatus) + else: + exitstatus = 0 + signal = systemstatus & 0x7f + if not exitstatus and not signal: + retval = self.RETVAL.open('rb') + try: + retval_data = retval.read() + finally: + retval.close() + retval = marshal.loads(retval_data) + else: + retval = None + stdout = self.STDOUT.read() + stderr = self.STDERR.read() + self._removetemp() + return Result(exitstatus, signal, retval, stdout, stderr) + + def _removetemp(self): + if self.tempdir.check(): + self.tempdir.remove() + + def __del__(self): + if self.pid is not None: # only clean up in main process + self._removetemp() + + +class Result(object): + def __init__(self, exitstatus, signal, retval, stdout, stderr): + self.exitstatus = exitstatus + self.signal = signal + self.retval = retval + self.out = stdout + self.err = stderr diff --git a/tests/_lib_vendors/py/_process/killproc.py b/tests/_lib_vendors/py/_process/killproc.py new file mode 100644 index 0000000..18e8310 --- /dev/null +++ b/tests/_lib_vendors/py/_process/killproc.py @@ -0,0 +1,23 @@ +import py +import os, sys + +if sys.platform == "win32" or getattr(os, '_name', '') == 'nt': + try: + import ctypes + except ImportError: + def dokill(pid): + py.process.cmdexec("taskkill /F /PID %d" %(pid,)) + else: + def dokill(pid): + PROCESS_TERMINATE = 1 + handle = ctypes.windll.kernel32.OpenProcess( + PROCESS_TERMINATE, False, pid) + ctypes.windll.kernel32.TerminateProcess(handle, -1) + ctypes.windll.kernel32.CloseHandle(handle) +else: + def dokill(pid): + os.kill(pid, 15) + +def kill(pid): + """ kill process by id. """ + dokill(pid) diff --git a/tests/_lib_vendors/py/_std.py b/tests/_lib_vendors/py/_std.py new file mode 100644 index 0000000..97a9853 --- /dev/null +++ b/tests/_lib_vendors/py/_std.py @@ -0,0 +1,18 @@ +import sys + +class Std(object): + """ makes top-level python modules available as an attribute, + importing them on first access. + """ + + def __init__(self): + self.__dict__ = sys.modules + + def __getattr__(self, name): + try: + m = __import__(name) + except ImportError: + raise AttributeError("py.std: could not import %s" % name) + return m + +std = Std() diff --git a/tests/_lib_vendors/py/_xmlgen.py b/tests/_lib_vendors/py/_xmlgen.py new file mode 100644 index 0000000..2ffcaa1 --- /dev/null +++ b/tests/_lib_vendors/py/_xmlgen.py @@ -0,0 +1,253 @@ +""" +module for generating and serializing xml and html structures +by using simple python objects. + +(c) holger krekel, holger at merlinux eu. 2009 +""" +import sys, re + +if sys.version_info >= (3,0): + def u(s): + return s + def unicode(x, errors=None): + if hasattr(x, '__unicode__'): + return x.__unicode__() + return str(x) +else: + def u(s): + return unicode(s) + unicode = unicode + + +class NamespaceMetaclass(type): + def __getattr__(self, name): + if name[:1] == '_': + raise AttributeError(name) + if self == Namespace: + raise ValueError("Namespace class is abstract") + tagspec = self.__tagspec__ + if tagspec is not None and name not in tagspec: + raise AttributeError(name) + classattr = {} + if self.__stickyname__: + classattr['xmlname'] = name + cls = type(name, (self.__tagclass__,), classattr) + setattr(self, name, cls) + return cls + +class Tag(list): + class Attr(object): + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + def __init__(self, *args, **kwargs): + super(Tag, self).__init__(args) + self.attr = self.Attr(**kwargs) + + def __unicode__(self): + return self.unicode(indent=0) + __str__ = __unicode__ + + def unicode(self, indent=2): + l = [] + SimpleUnicodeVisitor(l.append, indent).visit(self) + return u("").join(l) + + def __repr__(self): + name = self.__class__.__name__ + return "<%r tag object %d>" % (name, id(self)) + +Namespace = NamespaceMetaclass('Namespace', (object, ), { + '__tagspec__': None, + '__tagclass__': Tag, + '__stickyname__': False, +}) + +class HtmlTag(Tag): + def unicode(self, indent=2): + l = [] + HtmlVisitor(l.append, indent, shortempty=False).visit(self) + return u("").join(l) + +# exported plain html namespace +class html(Namespace): + __tagclass__ = HtmlTag + __stickyname__ = True + __tagspec__ = dict([(x,1) for x in ( + 'a,abbr,acronym,address,applet,area,b,bdo,big,blink,' + 'blockquote,body,br,button,caption,center,cite,code,col,' + 'colgroup,comment,dd,del,dfn,dir,div,dl,dt,em,embed,' + 'fieldset,font,form,frameset,h1,h2,h3,h4,h5,h6,head,html,' + 'i,iframe,img,input,ins,kbd,label,legend,li,link,listing,' + 'map,marquee,menu,meta,multicol,nobr,noembed,noframes,' + 'noscript,object,ol,optgroup,option,p,pre,q,s,script,' + 'select,small,span,strike,strong,style,sub,sup,table,' + 'tbody,td,textarea,tfoot,th,thead,title,tr,tt,u,ul,xmp,' + 'base,basefont,frame,hr,isindex,param,samp,var' + ).split(',') if x]) + + class Style(object): + def __init__(self, **kw): + for x, y in kw.items(): + x = x.replace('_', '-') + setattr(self, x, y) + + +class raw(object): + """just a box that can contain a unicode string that will be + included directly in the output""" + def __init__(self, uniobj): + self.uniobj = uniobj + +class SimpleUnicodeVisitor(object): + """ recursive visitor to write unicode. """ + def __init__(self, write, indent=0, curindent=0, shortempty=True): + self.write = write + self.cache = {} + self.visited = {} # for detection of recursion + self.indent = indent + self.curindent = curindent + self.parents = [] + self.shortempty = shortempty # short empty tags or not + + def visit(self, node): + """ dispatcher on node's class/bases name. """ + cls = node.__class__ + try: + visitmethod = self.cache[cls] + except KeyError: + for subclass in cls.__mro__: + visitmethod = getattr(self, subclass.__name__, None) + if visitmethod is not None: + break + else: + visitmethod = self.__object + self.cache[cls] = visitmethod + visitmethod(node) + + # the default fallback handler is marked private + # to avoid clashes with the tag name object + def __object(self, obj): + #self.write(obj) + self.write(escape(unicode(obj))) + + def raw(self, obj): + self.write(obj.uniobj) + + def list(self, obj): + assert id(obj) not in self.visited + self.visited[id(obj)] = 1 + for elem in obj: + self.visit(elem) + + def Tag(self, tag): + assert id(tag) not in self.visited + try: + tag.parent = self.parents[-1] + except IndexError: + tag.parent = None + self.visited[id(tag)] = 1 + tagname = getattr(tag, 'xmlname', tag.__class__.__name__) + if self.curindent and not self._isinline(tagname): + self.write("\n" + u(' ') * self.curindent) + if tag: + self.curindent += self.indent + self.write(u('<%s%s>') % (tagname, self.attributes(tag))) + self.parents.append(tag) + for x in tag: + self.visit(x) + self.parents.pop() + self.write(u('') % tagname) + self.curindent -= self.indent + else: + nameattr = tagname+self.attributes(tag) + if self._issingleton(tagname): + self.write(u('<%s/>') % (nameattr,)) + else: + self.write(u('<%s>') % (nameattr, tagname)) + + def attributes(self, tag): + # serialize attributes + attrlist = dir(tag.attr) + attrlist.sort() + l = [] + for name in attrlist: + res = self.repr_attribute(tag.attr, name) + if res is not None: + l.append(res) + l.extend(self.getstyle(tag)) + return u("").join(l) + + def repr_attribute(self, attrs, name): + if name[:2] != '__': + value = getattr(attrs, name) + if name.endswith('_'): + name = name[:-1] + if isinstance(value, raw): + insert = value.uniobj + else: + insert = escape(unicode(value)) + return ' %s="%s"' % (name, insert) + + def getstyle(self, tag): + """ return attribute list suitable for styling. """ + try: + styledict = tag.style.__dict__ + except AttributeError: + return [] + else: + stylelist = [x+': ' + y for x,y in styledict.items()] + return [u(' style="%s"') % u('; ').join(stylelist)] + + def _issingleton(self, tagname): + """can (and will) be overridden in subclasses""" + return self.shortempty + + def _isinline(self, tagname): + """can (and will) be overridden in subclasses""" + return False + +class HtmlVisitor(SimpleUnicodeVisitor): + + single = dict([(x, 1) for x in + ('br,img,area,param,col,hr,meta,link,base,' + 'input,frame').split(',')]) + inline = dict([(x, 1) for x in + ('a abbr acronym b basefont bdo big br cite code dfn em font ' + 'i img input kbd label q s samp select small span strike ' + 'strong sub sup textarea tt u var'.split(' '))]) + + def repr_attribute(self, attrs, name): + if name == 'class_': + value = getattr(attrs, name) + if value is None: + return + return super(HtmlVisitor, self).repr_attribute(attrs, name) + + def _issingleton(self, tagname): + return tagname in self.single + + def _isinline(self, tagname): + return tagname in self.inline + + +class _escape: + def __init__(self): + self.escape = { + u('"') : u('"'), u('<') : u('<'), u('>') : u('>'), + u('&') : u('&'), u("'") : u('''), + } + self.charef_rex = re.compile(u("|").join(self.escape.keys())) + + def _replacer(self, match): + return self.escape[match.group(0)] + + def __call__(self, ustring): + """ xml-escape the given unicode string. """ + try: + ustring = unicode(ustring) + except UnicodeDecodeError: + ustring = unicode(ustring, 'utf-8', errors='replace') + return self.charef_rex.sub(self._replacer, ustring) + +escape = _escape() diff --git a/tests/_lib_vendors/py/test.py b/tests/_lib_vendors/py/test.py new file mode 100644 index 0000000..aa5beb1 --- /dev/null +++ b/tests/_lib_vendors/py/test.py @@ -0,0 +1,10 @@ +import sys +if __name__ == '__main__': + import pytest + sys.exit(pytest.main()) +else: + import sys, pytest + sys.modules['py.test'] = pytest + +# for more API entry points see the 'tests' definition +# in __init__.py diff --git a/tests/_lib_vendors/pytest.py b/tests/_lib_vendors/pytest.py new file mode 100644 index 0000000..e376e41 --- /dev/null +++ b/tests/_lib_vendors/pytest.py @@ -0,0 +1,28 @@ +# PYTHON_ARGCOMPLETE_OK +""" +pytest: unit and functional testing with Python. +""" +__all__ = [ + 'main', + 'UsageError', + 'cmdline', + 'hookspec', + 'hookimpl', + '__version__', +] + +if __name__ == '__main__': # if run as a script or by 'python -m pytest' + # we trigger the below "else" condition by the following import + import pytest + raise SystemExit(pytest.main()) + +# else we are imported + +from _pytest.config import ( + main, UsageError, _preloadplugins, cmdline, + hookspec, hookimpl +) +from _pytest import __version__ + +_preloadplugins() # to populate pytest.* namespace so help(pytest) works + diff --git a/tests/bindings/conftest.py b/tests/bindings/conftest.py new file mode 100644 index 0000000..724cf4d --- /dev/null +++ b/tests/bindings/conftest.py @@ -0,0 +1,75 @@ +import pytest + +from godot import OS, Node, Reference + + +__global_objs = [] + + +def generate_global_obj(type): + obj = type.new() + __global_objs.append(obj) + return obj + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_global_objs(): + yield + for obj in __global_objs: + obj.free() + + +@pytest.fixture() +def generate_obj(check_memory_leak): + # Make this fixture depend on `check_memory_leak` to ensure it will + # check for memory leak after our own teardown + + objs = [] + + def _generate_obj(type): + obj = type.new() + objs.append(obj) + return obj + + yield _generate_obj + + # Node must be removed from the scenetree to avoid segfault on free + for obj in objs: + if isinstance(obj, Node): + parent = obj.get_parent() + if parent: + parent.remove_child(obj) + + while objs: + # Pop values to trigger gc for Reference instances + obj = objs.pop() + if not isinstance(obj, Reference): + obj.free() + + +@pytest.fixture +def current_node(): + # `conftest.py` is imported weirdly by pytest so we cannot just put a + # global variable in it and set it from `Main._ready` + from main import get_current_node + + return get_current_node() + + +@pytest.fixture(autouse=True) +def check_memory_leak(request): + if request.node.get_marker("ignore_leaks"): + yield + else: + dynamic_mem_start = OS.get_dynamic_memory_usage() + static_mem_start = OS.get_static_memory_usage() + + yield + + static_mem_end = OS.get_static_memory_usage() + dynamic_mem_end = OS.get_dynamic_memory_usage() + + static_leak = static_mem_end - static_mem_start + dynamic_leak = dynamic_mem_end - dynamic_mem_start + assert static_leak == 0 + assert dynamic_leak == 0 diff --git a/tests/bindings/main.py b/tests/bindings/main.py new file mode 100644 index 0000000..659a209 --- /dev/null +++ b/tests/bindings/main.py @@ -0,0 +1,39 @@ +import os +import pytest + +from godot import exposed, Node, OS + + +__current_node = None + + +def set_current_node(node): + global __current_node + assert __current_node is None + __current_node = node + + +def get_current_node(): + return __current_node + + +@exposed +class Main(Node): + def _ready(self): + set_current_node(self) + # Retrieve command line arguments passed through --pytest=... + prefix = "--pytest=" + pytest_args = [] + for gdarg in OS.get_cmdline_args(): + arg = str(gdarg) + if arg.startswith(prefix): + pytest_args += arg[len(prefix) :].split(",") + if all(arg.startswith("-") for arg in pytest_args): + # Filter to avoid scanning `plugins` and `lib` directories + pytest_args += [x for x in os.listdir() if x.startswith("test_")] + # Run tests here + print(f"running `pytest {' '.join(pytest_args)}`") + if pytest.main(pytest_args): + OS.set_exit_code(1) + # Exit godot + self.get_tree().quit() diff --git a/tests/bindings/main.tscn b/tests/bindings/main.tscn new file mode 100644 index 0000000..f634a7c --- /dev/null +++ b/tests/bindings/main.tscn @@ -0,0 +1,9 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://main.py" type="Script" id=1] + +[node name="main" type="Node" index="0"] + +script = ExtResource( 1 ) + + diff --git a/tests/bindings/project.godot b/tests/bindings/project.godot new file mode 100644 index 0000000..9f1a62a --- /dev/null +++ b/tests/bindings/project.godot @@ -0,0 +1,29 @@ +; Engine configuration file. +; It's best edited using the editor UI and not directly, +; since the parameters that go here are not all obvious. +; +; Format: +; [section] ; section goes between [] +; param=value ; assign values to parameters + +config_version=4 + +_global_script_classes=[ ] +_global_script_class_icons={ + +} + +[application] + +run/main_scene="res://main.tscn" +name="godo-bindings-tests" +main_scene="res://main.tscn" + +[gdnative] + +singletons=[ "res://pythonscript.gdnlib" ] + +[python_script] + +io_streams_capture=false +verbose=true diff --git a/tests/bindings/pytest.ini b/tests/bindings/pytest.ini new file mode 100644 index 0000000..570aca9 --- /dev/null +++ b/tests/bindings/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +filterwarnings = + error + error::UserWarning diff --git a/tests/bindings/pythonscript.gdnlib b/tests/bindings/pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/tests/bindings/pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/tests/bindings/test_aabb.py b/tests/bindings/test_aabb.py new file mode 100644 index 0000000..1ce0a60 --- /dev/null +++ b/tests/bindings/test_aabb.py @@ -0,0 +1,115 @@ +import pytest + +from godot import AABB, Vector3, Plane + + +def test_base(): + v = AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)) + assert type(v) == AABB + v2 = AABB(Vector3(1, 2, 3), Vector3(4, 5, 7)) + assert type(v) == AABB + assert v2 == AABB(Vector3(1, 2, 3), Vector3(4, 5, 7)) + assert v != v2 + + +def test_repr(): + v = AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)) + assert repr(v) == "" + + +def test_instantiate(): + # Can build it with int or float or nothing + msg_tmpl = "%s vs (expected) %s (args=%s)" + for args, expected_pos, expected_size in ( + [(), Vector3(0, 0, 0), Vector3(0, 0, 0)], + [(Vector3(0, 1, 0), Vector3(0, 0, 1)), Vector3(0, 1, 0), Vector3(0, 0, 1)], + ): + v = AABB(*args) + assert v.position == expected_pos, msg_tmpl % (v.position, expected_pos, args) + assert v.size == expected_size, msg_tmpl % (v.size, expected_size, args) + with pytest.raises(TypeError): + AABB("a", Vector3()) + with pytest.raises(TypeError): + AABB(Vector3(), "b") + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["get_area", float, ()], + ["has_no_area", bool, ()], + ["has_no_surface", bool, ()], + ["intersects", bool, (AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)),)], + ["encloses", bool, (AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)),)], + ["merge", AABB, (AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)),)], + ["intersection", AABB, (AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)),)], + # ['intersects_plane', bool, (Plane(), )], # TODO: wait for plane + ["intersects_segment", bool, (Vector3(1, 2, 3), Vector3(4, 5, 6))], + ["has_point", bool, (Vector3(1, 2, 3),)], + ["get_support", Vector3, (Vector3(1, 2, 3),)], + ["get_longest_axis", Vector3, ()], + ["get_longest_axis_index", int, ()], + ["get_longest_axis_size", float, ()], + ["get_shortest_axis", Vector3, ()], + ["get_shortest_axis_index", int, ()], + ["get_shortest_axis_size", float, ()], + ["expand", AABB, (Vector3(1, 2, 3),)], + ["grow", AABB, (0.5,)], + ["get_endpoint", Vector3, (0,)], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = AABB() + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +@pytest.mark.parametrize( + "field,ret_type", [("position", Vector3), ("size", Vector3)], ids=lambda x: x[0] +) +def test_properties(field, ret_type): + v = AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)) + assert hasattr(v, field) + field_val = getattr(v, field) + assert type(field_val) == ret_type + for val in (Vector3(), Vector3(0.1, -0.1, 2)): + setattr(v, field, val) + field_val = getattr(v, field) + assert field_val == val + + +@pytest.mark.parametrize( + "field,bad_value", + [ + ("position", "dummy"), + ("size", "dummy"), + ("position", None), + ("size", None), + ("position", 42), + ("size", 42), + ], + ids=lambda x: x[0], +) +def test_bad_properties(field, bad_value): + v = AABB() + with pytest.raises(TypeError): + setattr(v, field, bad_value) + + +def test_equal(): + arr = AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)) + other = AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)) + assert arr == other + bad = AABB(Vector3(6, 5, 4), Vector3(3, 2, 1)) + assert not arr == bad # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", AABB(Vector3(6, 5, 4), Vector3(3, 2, 1))]) +def test_bad_equal(arg): + arr = AABB(Vector3(1, 2, 3), Vector3(4, 5, 6)) + assert arr != arg diff --git a/tests/bindings/test_array.py b/tests/bindings/test_array.py new file mode 100644 index 0000000..c9ec622 --- /dev/null +++ b/tests/bindings/test_array.py @@ -0,0 +1,299 @@ +import pytest + +from godot import ( + GDString, + Array, + Vector2, + PoolColorArray, + PoolVector3Array, + PoolVector2Array, + PoolStringArray, + PoolRealArray, + PoolIntArray, + PoolByteArray, + Node, + Resource, + Area2D, + OS, +) + + +def test_base(): + v = Array() + assert type(v) == Array + + +def test_equal(current_node): + arr = Array() + other = Array() + for item in [1, "foo", current_node, OS, Vector2()]: + arr.append(item) + other.append(item) + assert arr == other + bad = Array([0, 0, 0]) + assert not arr == bad # Force use of __eq__ + assert not arr == None # Force use of __eq__ + + +@pytest.mark.parametrize( + "arg", [None, 0, "foo", Vector2(), [1], Array([1, 2]), PoolByteArray([1]), PoolIntArray([1])] +) +def test_bad_equal(arg): + arr = Array([1]) + assert arr != arg + + +def test_add(): + arr = Array([None]) + arr += Array([1, "two"]) # __iadd__ + assert arr == Array([None, 1, "two"]) + arr2 = arr + Array([3]) # __add__ + assert arr2 == Array([None, 1, "two", 3]) + + +def test_add_with_non_array(): + arr = Array([0]) + arr += [1, "two"] # __iadd__ + assert arr == Array([0, 1, "two"]) + arr2 = arr + [3] # __add__ + assert arr2 == Array([0, 1, "two", 3]) + assert arr == Array([0, 1, "two"]) # arr shouldn't have been modified + + # list.__iadd__ only works with other lists + arr3 = ["-1"] + with pytest.raises(TypeError): + arr3 += arr + + # list.__add__ only works with other lists + with pytest.raises(TypeError): + ["-1"] + arr + + arr4 = ["-1"] + list(arr) + assert arr4 == ["-1", 0, 1, GDString("two")] + + +@pytest.mark.parametrize("arg", [None, 0, Vector2(), OS]) +def test_bad_add(arg): + v = Array() + with pytest.raises(TypeError): + v + arg # __add__ + with pytest.raises(TypeError): + v += arg # __iadd__ + + +@pytest.mark.parametrize("deep", [False, True]) +def test_duplicate(deep): + inner = Array([0]) + arr = Array([inner]) + arr2 = arr.duplicate(deep) + arr[0].append(1) + arr2[0].append(2) + + if deep: + assert arr == Array([Array([0, 1])]) + assert arr2 == Array([Array([0, 2])]) + else: + assert arr == Array([Array([0, 1, 2])]) + assert arr2 == arr + + +def test_mix_add_duplicate(): + arr = Array([0]) + arr2 = arr.duplicate(True) + arr.append(1) + arr2.append(2) + arr3 = arr + arr2 + arr.append(3) + arr3 += arr + + assert list(arr) == [0, 1, 3] + assert list(arr2) == [0, 2] + assert list(arr3) == [0, 1, 0, 2, 0, 1, 3] + + +def test_repr(): + v = Array() + assert repr(v) == "" + v = Array([1, "foo", Vector2()]) + assert repr(v) == ", ])>" + + +@pytest.mark.parametrize("arg", [42, OS, Vector2()]) +def test_bad_instantiate(arg): + with pytest.raises(TypeError): + Array(arg) + + +def test_instantiate_with_non_godot_data(recwarn): + with pytest.raises(TypeError): + Array([object()]) + + +def test_append_with_non_godot_data(recwarn): + v = Array() + with pytest.raises(TypeError): + v.append(object()) + + +def test_add_with_non_godot_data(recwarn): + v = Array() + with pytest.raises(TypeError): + v += [object()] + + +@pytest.mark.parametrize( + "arg", + [ + Array(), + PoolColorArray(), + PoolVector3Array(), + PoolVector2Array(), + PoolStringArray(), + PoolRealArray(), + PoolIntArray(), + PoolByteArray(), + [], + (), + [42, 43, 44], + (GDString("foo"), GDString("bar"), GDString("spam")), + (OS,), + [Vector2(), Vector2(), Vector2()], + (OS, Vector2(), GDString("foo"), 0), # Enjoy the mix + ], +) +def test_instantiate_from_copy(arg): + v = Array(arg) + assert list(v) == list(arg) + original_len = len(arg) + v.append(42) + assert len(arg) == original_len + assert len(v) == original_len + 1 + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["append", type(None), ("bar",)], + ["clear", type(None), ()], + ["count", int, ("foo",)], + ["empty", bool, ()], + ["erase", type(None), ("foo",)], + ["front", GDString, ()], + ["back", GDString, ()], + ["find", int, ("foo", 0)], + ["find_last", int, ("foo",)], + # ["has", bool, ("foo",)], # provided by __in__ instead + ["hash", int, ()], + ["insert", type(None), (0, "bar")], + ["invert", type(None), ()], + ["pop_back", GDString, ()], + ["pop_front", GDString, ()], + ["push_back", type(None), ("bar",)], + ["push_front", type(None), ("bar",)], + ["resize", type(None), (2,)], + ["rfind", int, ("foo", 0)], + ["sort", type(None), ()], + # ['sort_custom', type(None), (obj, func)], + ], + ids=lambda f, r, p: f, +) +def test_methods(field, ret_type, params): + v = Array(["foo"]) + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +def test_len(): + v = Array() + assert len(v) == 0 + v.append("foo") + assert len(v) == 1 + + +def test_getitem(): + v = Array(["foo", 0, OS, 0.42]) + assert v[0] == GDString("foo") + assert v[1] == 0 + assert v[-1] == 0.42 + + +@pytest.mark.skip(reason="Not supported yet") +@pytest.mark.parametrize( + "slice_", + [ + slice(1, 3), + slice(1, 3, -1), + slice(None, None, -1), + slice(None, None, 2), + slice(None, None, 10), + slice(-10, 10, 1), + slice(-10, None, 1), + slice(-1, None, 1), + slice(-1, 1, -1), + ], +) +def test_getitem_slice(slice_): + vals = [GDString("foo"), 0, OS, False] + arr = Array(vals) + expected = vals[slice_] + sub_arr = arr[slice_] + assert isinstance(sub_arr, Array) + assert list(sub_arr) == expected + + +def test_outofrange_getitem(): + v = Array(["foo", 0]) + with pytest.raises(IndexError): + v[2] + + +def test_setitem(): + v = Array(["foo", 0, OS]) + v[0] = "bar" + assert len(v) == 3 + assert v[0] == GDString("bar") + v[-1] = 4 + assert len(v) == 3 + assert v[2] == 4 + + +def test_outofrange_setitem(): + v = Array(["foo", 0]) + with pytest.raises(IndexError): + v[2] = 42 + + +def test_delitem(): + v = Array(["foo", 0, OS]) + del v[0] + assert len(v) == 2 + assert v[0] == 0 + del v[-1] + assert len(v) == 1 + v[0] == 0 + + +def test_outofrange_delitem(): + v = Array(["foo", 0]) + with pytest.raises(IndexError): + del v[2] + + +def test_iter(): + items = [GDString("foo"), 0, OS] + v = Array(items) + items_from_v = [x for x in v] + assert items_from_v == items + + +def test_append(): + items = [1, "foo", OS] + v = Array() + for item in items: + v.append(item) + assert len(v) == 3 + assert v == Array(items) diff --git a/tests/bindings/test_basis.py b/tests/bindings/test_basis.py new file mode 100644 index 0000000..654bfd5 --- /dev/null +++ b/tests/bindings/test_basis.py @@ -0,0 +1,157 @@ +import pytest + +from godot import Basis, Vector3, Quat + + +def test_default(): + v = Basis() + assert isinstance(v, Basis) + assert v.x == Vector3(1, 0, 0) + assert v.y == Vector3(0, 1, 0) + assert v.z == Vector3(0, 0, 1) + + +def test_init_from_rows(): + v = Basis(Vector3(1, 2, 3), Vector3(4, 5, 6), Vector3(7, 8, 9)) + assert isinstance(v, Basis) + assert (v.x, v.y, v.z) == (Vector3(1, 4, 7), Vector3(2, 5, 8), Vector3(3, 6, 9)) + + +@pytest.mark.parametrize( + "args", + [ + (0, Vector3.ONE, Vector3.ONE), + (None, Vector3.ONE, Vector3.ONE), + (Vector3.ONE, 0, Vector3.ONE), + (Vector3.ONE, None, Vector3.ONE), + (Vector3.ONE, Vector3.ONE, 0), + (Vector3.ONE, Vector3.ONE, None), + ], +) +def test_bad_init_from_rows(args): + with pytest.raises(TypeError): + Basis(*args) + + +@pytest.mark.parametrize( + "field,args", + [ + ["from_axis_angle", (Vector3.ONE, 1.1)], + ["from_euler", (Vector3.ONE,)], + ["from_euler", (Quat(),)], + ], +) +def test_inits(field, args): + build = getattr(Basis, field) + v = build(*args) + assert isinstance(v, Basis) + + +@pytest.mark.parametrize( + "field,args", + [ + ["from_axis_angle", (None, 1.1)], + ["from_euler", (None,)], + ["from_axis_angle", (Vector3.ONE, None)], + ["from_axis_angle", (Vector3.ONE, "dummy")], + ["from_axis_angle", ("dummy", 1.1)], + ["from_euler", ("dummy",)], + ], +) +def test_bad_inits(field, args): + build = getattr(Basis, field) + with pytest.raises(TypeError): + v = build(*args) + + +def test_equal(): + basis1 = Basis.from_euler(Vector3(1, 2, 3)) + basis2 = Basis.from_euler(Vector3(1, 2, 3)) + assert basis1 == basis2 + basis2.x = Vector3(1, 2, 3) + assert basis1 != basis2 + basis1.x = Vector3(1, 2, 3) + assert basis1 == basis2 + bad = Basis.from_euler(Vector3(1, 2, 4)) + assert not basis1 == bad # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", Basis.from_euler(Vector3(1, 2, 4))]) +def test_bad_equal(arg): + basis = Basis.from_euler(Vector3(1, 2, 3)) + assert basis != arg + + +def test_repr(): + v = Basis(Vector3(1, 2, 3), Vector3(4, 5, 6), Vector3(7, 8, 9)) + assert repr(v) == "" + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["inverse", Basis, ()], + ["transposed", Basis, ()], + ["orthonormalized", Basis, ()], + ["determinant", float, ()], + ["rotated", Basis, (Vector3(), 0.5)], + ["scaled", Basis, (Vector3(),)], + ["get_scale", Vector3, ()], + ["get_euler", Vector3, ()], + ["get_quat", Quat, ()], + ["set_quat", type(None), (Quat(),)], + ["set_axis_angle_scale", type(None), (Vector3.ONE, 1.1, Vector3.ONE)], + ["set_euler_scale", type(None), (Vector3.ONE, Vector3.ONE)], + ["set_quat_scale", type(None), (Quat(), Vector3.ONE)], + ["tdotx", float, (Vector3(),)], + ["tdoty", float, (Vector3(),)], + ["tdotz", float, (Vector3(),)], + ["xform", Vector3, (Vector3(),)], + ["xform_inv", Vector3, (Vector3(),)], + ["get_orthogonal_index", int, ()], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = Basis() + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert isinstance(ret, ret_type) + + +@pytest.mark.parametrize( + "field,ret_type", [("x", Vector3), ("y", Vector3), ("z", Vector3)], ids=lambda x: x[0] +) +def test_properties(field, ret_type): + v = Basis() + assert hasattr(v, field) + field_val = getattr(v, field) + assert isinstance(field_val, ret_type) + val = Vector3(1, 2, 3) + setattr(v, field, val) + field_val = getattr(v, field) + assert field_val == val + + +@pytest.mark.parametrize( + "field,bad_value", + [ + ("x", "Not a Vector3"), + ("y", "Not a Vector3"), + ("z", "Not a Vector3"), + ("x", 1), + ("y", 2), + ("z", 3), + ("x", None), + ("y", None), + ("z", None), + ], + ids=lambda x: x[0], +) +def test_bad_properties(field, bad_value): + v = Basis() + with pytest.raises(TypeError): + setattr(v, field, bad_value) diff --git a/tests/bindings/test_bindings.py b/tests/bindings/test_bindings.py new file mode 100644 index 0000000..ab201da --- /dev/null +++ b/tests/bindings/test_bindings.py @@ -0,0 +1,287 @@ +import pytest +from math import inf +from struct import unpack + +import godot +from godot import ( + Vector3, + GDString, + NodePath, + Object, + Node, + CanvasItem, + Node2D, + PluginScript, + OpenSimplexNoise, + OS, + Error, + OK, + exposed, +) + + +@exposed +class virtualtestbedcls(Node): + def _to_string(self): + # Implemented for test_bindings::test_virtual_to_string_customize + return GDString("
") + + def _notification(self, what): + on_notification = getattr(self, "on_notification", None) + if on_notification: + on_notification(what) + + +def test_free_node(): + v = Node.new() + v.free() + # `check_memory_leak` auto fixture will do the bookkeeping + + +def test_expose_contains_constant(): + assert "OK" in dir(godot) + assert OK is not None + + +def test_expose_contains_class(): + assert "Node" in dir(godot) + assert Node is not None + + +def test_expose_contains_builtins(): + assert "Vector3" in dir(godot) + assert Vector3 is not None + + +def test_call_one_arg_short(current_node): + with pytest.raises(TypeError) as exc: + current_node.get_child() + assert str(exc.value) == "get_child() takes exactly one argument (0 given)" + + +def test_call_too_few_args(current_node): + with pytest.raises(TypeError) as exc: + current_node.move_child() + assert str(exc.value) == "move_child() takes exactly 2 positional arguments (0 given)" + + +def test_call_with_defaults_and_too_few_args(current_node): + with pytest.raises(TypeError) as exc: + current_node.add_child() + assert str(exc.value) == "add_child() takes at least 1 positional argument (0 given)" + + +def test_call_none_in_base_type_args(current_node): + with pytest.raises(TypeError) as exc: + # signature: def get_child(self, godot_int idx) + current_node.get_child(None) + assert str(exc.value) == "an integer is required" + + +def test_call_none_in_builtin_args(current_node): + with pytest.raises(TypeError) as exc: + # signature: def get_node(self, NodePath path not None) + current_node.get_node(None) + assert str(exc.value) == "Invalid value None, must be str or NodePath" + + +def test_call_none_in_bindings_args(current_node): + with pytest.raises(TypeError) as exc: + # signature: def get_path_to(self, Node node not None) + current_node.get_path_to(None) + assert ( + str(exc.value) + == "Argument 'node' has incorrect type (expected godot.bindings.Node, got NoneType)" + ) + + +def test_call_too_many_args(current_node): + with pytest.raises(TypeError) as exc: + current_node.get_child(1, 2) + assert str(exc.value) == "get_child() takes exactly one argument (2 given)" + + +def test_call_with_default_and_too_many_args(current_node): + with pytest.raises(TypeError) as exc: + current_node.add_child(1, 2, 3) + assert str(exc.value) == "add_child() takes at most 2 positional arguments (3 given)" + + +def test_call_with_defaults(generate_obj): + node = generate_obj(Node) + child = generate_obj(Node) + # signature: void add_child(Node node, bool legible_unique_name=false) + node.add_child(child) + + # legible_unique_name is False by default, check name is not human-redable + children_names = [str(x.name) for x in node.get_children()] + assert children_names == ["@@2"] + + +def test_call_returns_enum(generate_obj): + node = generate_obj(Node) + ret = node.connect("foo", node, "bar") + assert isinstance(ret, Error) + + +def test_call_with_kwargs(generate_obj): + node = generate_obj(Node) + child = generate_obj(Node) + new_child = generate_obj(Node) + + node.add_child(child, legible_unique_name=True) + # Check name is readable + children_names = [str(x.name) for x in node.get_children()] + assert children_names == ["Node"] + + # Kwargs are passed out of order + node.add_child_below_node(legible_unique_name=True, child_node=new_child, node=node) + # Check names are still readable + children_names = [str(x.name) for x in node.get_children()] + assert children_names == ["Node", "Node2"] + + +def test_inheritance(generate_obj): + node = generate_obj(Node) + + # CanvasItem is a direct subclass of Node + canvas_item = generate_obj(CanvasItem) + assert isinstance(node, Object) + assert isinstance(canvas_item, Object) + assert isinstance(canvas_item, Node) + + # TODO: headless server end up with a static memory leak + # when instanciating a Node2D... + if not OS.has_feature("Server"): + # Node2D is a grand child subclass of Node + node2d = generate_obj(Node2D) + assert isinstance(node, Object) + assert isinstance(node2d, Object) + assert isinstance(node2d, Node) + + +def test_call_with_refcounted_return_value(current_node): + script = current_node.get_script() + assert isinstance(script, PluginScript) + + +def test_call_with_refcounted_param_value(generate_obj): + node = generate_obj(Node) + script = PluginScript() + node.set_script(script) + + +def test_access_property(generate_obj): + node = generate_obj(Node) + path = NodePath("/foo/bar") + node._import_path = path + assert node._import_path == path + + +@pytest.mark.xfail(reason="Create Python class from Python not implemented yet") +def test_new_on_overloaded_class(generate_obj): + node = generate_obj(virtualtestbedcls) + # Make sure doing MyClass.new() doesn't return an instance of the + # Godot class we inherit from + assert isinstance(node, virtualtestbedcls) + + +@pytest.mark.xfail(reason="Create Python class from Python not implemented yet") +def test_virtual_call_overloaded_notification(generate_obj): + node = generate_obj(virtualtestbedcls) + + notifications = [] + + def _on_notification(what): + notifications.append(what) + + node.on_notification = _on_notification + try: + node.notification(1) + node.notification(2) + node.notification(3) + + finally: + node.on_notification = None + + assert notifications == [1, 2, 3] + + +@pytest.mark.xfail(reason="Pluginscript doesn't support _to_string overloading") +def test_virtual_to_string_customize(generate_obj): + node = generate_obj(virtualtestbedcls) + # Object.to_string() can be customized when defining _to_string() + expected = GDString("
") + assert node._to_string() == expected + assert node.to_string() == expected + + # Try to access undefined _to_string + node = generate_obj(Node) + with pytest.raises(AttributeError): + node._to_string() + + +@pytest.fixture(params=["godot_class", "python_subclass"]) +def node_for_access(request, current_node, generate_obj): + if request.param == "godot_class": + return generate_obj(Node) + else: + return current_node + + +@pytest.mark.xfail(reason="Current implement uses Object.callv which doesn't inform of the failure") +def test_virtual_call__to_string_not_customized(node_for_access): + with pytest.raises(AttributeError): + node_for_access._to_string() + + +@pytest.mark.xfail(reason="Current implement uses Object.callv which doesn't inform of the failure") +def test_virtual_call__notification_not_customized(node_for_access): + with pytest.raises(AttributeError): + node_for_access._notification(42) + + +def test_access_unknown_attribute(node_for_access): + with pytest.raises(AttributeError): + node_for_access.dummy + + +def test_call_unknown_method(node_for_access): + with pytest.raises(AttributeError): + node_for_access.dummy(42) + + +def test_create_refcounted_value(): + script1_ref1 = PluginScript() + script2_ref1 = PluginScript() + script1_ref2 = script1_ref1 + script2_ref2 = script2_ref1 + del script1_ref1 + + +def test_late_initialized_bindings_and_float_param_ret(): + # OpenSimplexNoise is refcounted, so no need to create it with `generate_obj` + obj = OpenSimplexNoise() + + # Float are tricky given they must be converted back and forth to double + ret = obj.get_noise_1d(inf) + assert ret == 0 + + # Build a double number that cannot be reprented on a float + double_only_number, = unpack("!d", b"\x11" * 8) + ret = obj.get_noise_1d(double_only_number) + assert ret == pytest.approx(-0.02726514) + + # Now try with better parameter to have a correct return value + ret = obj.get_noise_3d(100, 200, 300) + assert ret == pytest.approx(-0.10482934) + + +def test_bad_meth_to_create_non_refcounted_object(): + with pytest.raises(RuntimeError): + Node() + + +def test_bad_meth_to_create_refcounted_object(): + with pytest.raises(RuntimeError): + OpenSimplexNoise.new() diff --git a/tests/bindings/test_color.py b/tests/bindings/test_color.py new file mode 100644 index 0000000..70efb9d --- /dev/null +++ b/tests/bindings/test_color.py @@ -0,0 +1,181 @@ +import pytest + +from godot import Color, Vector2, GDString, Node + +from conftest import generate_global_obj + + +NODE = generate_global_obj(Node) + + +def test_base(): + v = Color() + assert type(v) == Color + + +@pytest.mark.parametrize( + "arg", + [ + (), + (0xFF,), + (0xFF, 0x77), + (0xFF, 0x77, 0x33), + (0xFF, 0x77, 0x33, 0x11), + {"r": 0xFF, "g": 0x77, "b": 0x33, "a": 0x11}, + ], +) +def test_initialize(arg): + if isinstance(arg, dict): + v1 = Color(**arg) + v2 = Color(**arg) + else: + v1 = Color(*arg) + v2 = Color(*arg) + assert v1 == v2 + + +def test_equal(): + v1 = Color() + v2 = Color() + assert v1 == v2 + vrgba = Color(1, 2, 3, 4) + vrgb = Color(1, 2, 3) + assert not vrgb == vrgba # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", Color(1, 2, 3, 5)]) +def test_bad_equal(arg): + basis = Color(1, 2, 3, 4) + assert basis != arg + + +def test_repr(): + v = Color() + assert repr(v) == "" + + +@pytest.mark.parametrize( + "arg", [(None,), (1, None), (1, 2, None), ("dummy",), (NODE,), (Vector2(),)] +) +def test_bad_instantiate(arg): + with pytest.raises(TypeError): + Color(*arg) + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["to_rgba32", int, ()], + ["to_abgr32", int, ()], + ["to_abgr64", int, ()], + ["to_argb64", int, ()], + ["to_rgba64", int, ()], + ["to_argb32", int, ()], + ["gray", float, ()], + ["inverted", Color, ()], + ["contrasted", Color, ()], + ["linear_interpolate", Color, (Color(0xAA, 0xBB, 0xCC), 2.2)], + ["blend", Color, (Color(0xAA, 0xBB, 0xCC),)], + ["darkened", Color, (2.2,)], + ["from_hsv", Color, (1.1, 2.2, 3.3, 4.4)], + ["lightened", Color, (2.2,)], + ["to_html", GDString, (True,)], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = Color() + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +@pytest.mark.parametrize( + "small,big", + [ + (Color(0, 0, 0), Color(1, 0, 0)), + (Color(0, 1, 0), Color(1, 0, 0)), + (Color(1, 0, 0), Color(1, 0, 1)), + ], + ids=lambda x: x[0], +) +def test_lt(small, big): + assert small < big + + +@pytest.mark.parametrize( + "field,ret_type", + [ + ("r", float), + ("r8", int), + ("g", float), + ("g8", int), + ("b", float), + ("b8", int), + ("a", float), + ("a8", int), + ], + ids=lambda x: x[0], +) +def test_properties_rw(field, ret_type): + v = Color() + assert hasattr(v, field) + field_val = getattr(v, field) + assert type(field_val) == ret_type + if ret_type is float: + vals = (0, 10, 10.0, 42.5) + else: + vals = (0, 10, 0xFF) + for val in vals: + setattr(v, field, val) + field_val = getattr(v, field) + assert field_val == val + + +@pytest.mark.parametrize("args", [("h", float), ("s", float), ("v", float)], ids=lambda x: x[0]) +def test_properties_ro(args): + v = Color(4.2) + field, ret_type = args + assert hasattr(v, field) + field_val = getattr(v, field) + assert type(field_val) == ret_type + with pytest.raises(AttributeError): + setattr(v, field, 0.5) + + +@pytest.mark.parametrize( + "args", + [ + ("r", "Nan"), + ("r8", "Nan"), + ("g", "Nan"), + ("g8", "Nan"), + ("b", "Nan"), + ("b8", "Nan"), + ("a", "Nan"), + ("a8", "Nan"), + ("r", None), + ("r8", None), + ("g", None), + ("g8", None), + ("b", None), + ("b8", None), + ("a", None), + ("a8", None), + ], + ids=lambda x: x[0], +) +def test_bad_properties(args): + v = Color() + field, bad_value = args + with pytest.raises(TypeError): + setattr(v, field, bad_value) + + +def test_constants(): + assert isinstance(Color.LEMONCHIFFON, Color) + # I don't have a single clue what those colors are... + assert Color.LEMONCHIFFON != Color.MEDIUMSPRINGGREEN diff --git a/tests/bindings/test_dictionary.py b/tests/bindings/test_dictionary.py new file mode 100644 index 0000000..81b937b --- /dev/null +++ b/tests/bindings/test_dictionary.py @@ -0,0 +1,226 @@ +import pytest +import json + +from godot import Dictionary, Vector2, Array, GDString, Node, Resource, OS + + +def test_base(): + v = Dictionary() + assert type(v) == Dictionary + + +def test_equal(): + arr = Dictionary() + other = Dictionary() + for key, value in [("a", 1), ("b", "foo"), ("c", OS), ("d", Vector2())]: + other[key] = arr[key] = value + assert arr == other + bad = Dictionary({"a": 1}) + assert not arr == bad # Force use of __eq__ + assert not arr == None # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", Vector2(), {"a": 1}, Dictionary({"b": 2})]) +def test_bad_equal(arg): + arr = Dictionary({"a": 1}) + assert arr != arg + + +def test_repr(): + v = Dictionary() + assert repr(v) == "" + v = Dictionary({"a": 1, 2: "foo", 0.5: Vector2()}) + assert repr(v).startswith(""]: + assert item in repr(v) + + +@pytest.mark.parametrize("arg", [42, "dummy", Vector2(), [object()], {object(): 1}, {1: object()}]) +def test_bad_instantiate(arg): + with pytest.raises((TypeError, ValueError)): + Dictionary(arg) + + +@pytest.mark.parametrize( + "arg", + [ + Dictionary(), + {}, + {"a": 1, 2: "foo", 0.5: Vector2()}, + Dictionary({"a": 1, 2: "foo", 0.5: Vector2()}), + ], +) +def test_instantiate_from_copy(arg): + arr = Dictionary(arg) + if hasattr(arg, "_gd_ptr"): + assert arr._gd_ptr != arg._gd_ptr + + +def test_len(): + v = Dictionary() + assert len(v) == 0 + v["foo"] = "bar" + assert len(v) == 1 + + +def test_getitem(): + v = Dictionary({"a": 1, 2: "foo", 0.5: Vector2()}) + assert v["a"] == 1 + assert v[0.5] == Vector2() + # Missing items are stored as None + assert v["dummy"] is None + # Cannot store non Godot types + with pytest.raises(TypeError): + v[object()] + + +def test_setitem(): + v = Dictionary({"a": 1, 2: "foo", 0.5: Vector2()}) + v[0] = GDString("bar") + assert len(v) == 4 + assert v[0] == GDString("bar") + v["a"] = 4 + assert len(v) == 4 + assert v["a"] == 4 + # Cannot store non Godot types + with pytest.raises(TypeError): + v[object()] = 4 + with pytest.raises(TypeError): + v[4] = object() + + +def test_delitem(): + v = Dictionary({"a": 1, 2: "foo", 0.5: Vector2()}) + del v["a"] + assert len(v) == 2 + del v[0.5] + assert len(v) == 1 + v[2] == GDString("foo") + # Delete on missing items should raise error + with pytest.raises(KeyError): + del v["missing"] + # Cannot store non Godot types + with pytest.raises(TypeError): + del v[object()] + + +def test_update(): + v = Dictionary({"a": 1, "b": 2, "c": 3}) + v.update({"a": "one", "d": "four"}) + v.update(Dictionary({"b": "two", "e": "five"})) + assert list(v.keys()) == [ + GDString("a"), + GDString("b"), + GDString("c"), + GDString("d"), + GDString("e"), + ] + assert list(v.values()) == [ + GDString("one"), + GDString("two"), + 3, + GDString("four"), + GDString("five"), + ] + + +def test_iter(): + v = Dictionary({"a": 1, 2: "foo", 0.5: Vector2()}) + items = [GDString("a"), 2, 0.5] + items_from_v = [x for x in v] + assert items_from_v == items + + +def test_keys(): + v = Dictionary({"a": 1, 2: "foo", 0.5: Vector2()}) + keys = v.keys() + assert list(keys) == [GDString("a"), 2, 0.5] + + +def test_values(): + v = Dictionary({"a": 1, 2: "foo"}) + values = v.values() + assert list(values) == [1, GDString("foo")] + + +def test_items(): + v = Dictionary({"a": 1, 2: "foo"}) + items = v.items() + assert list(items) == [(GDString("a"), 1), (2, GDString("foo"))] + + +def test_empty_and_clear(): + v = Dictionary({"a": 1, 2: "foo"}) + assert not v.empty() + v.clear() + assert len(v) == 0 + assert v.empty() + + +def test_in(): + v = Dictionary({"a": 1, 2: "foo"}) + assert "a" in v + assert 2 in v + assert "dummy" not in v + assert None not in v + + +def test_hash(): + v = Dictionary({"a": 1, 2: "foo"}) + h1 = v.hash() + h2 = v.hash() + assert h1 == h2 + v["b"] = 42 + h3 = v.hash() + assert h3 != h2 + + +def test_has_all(): + v = Dictionary({"a": 1, 2: "foo", None: None}) + elems = Array(["a", None]) + assert v.has_all(elems) + bad_elems = Array(["a", 42]) + assert not v.has_all(bad_elems) + + +def test_to_json(): + v = Dictionary({"a": 1, "b": "foo"}) + jsoned = v.to_json() + v2 = json.loads(str(jsoned)) + assert v2 == {"a": 1, "b": "foo"} + assert json + + +def test_update(): + v1 = Dictionary({"a": 1, "b": 2}) + v2 = Dictionary({"b": 3, "c": 4}) + v1.update(v2) + assert v1 == Dictionary({"a": 1, "b": 3, "c": 4}) + assert v2 == Dictionary({"b": 3, "c": 4}) + + v2.update({"d": 5, "e": 6}) + assert v1 == Dictionary({"a": 1, "b": 3, "c": 4}) + assert v2 == Dictionary({"b": 3, "c": 4, "d": 5, "e": 6}) + + +@pytest.mark.parametrize("arg", [None, 0, Vector2(), OS, Array([1, 2])]) +def test_bad_update(arg): + v = Dictionary() + with pytest.raises(TypeError): + v.update(arg) + + +@pytest.mark.parametrize("deep", [False, True]) +def test_duplicate(deep): + inner = Dictionary({0: 0}) + d1 = Dictionary({0: inner}) + d2 = d1.duplicate(deep) + d1[0][1] = 1 + d2[0][2] = 2 + + if deep: + assert d1 == Dictionary({0: Dictionary({0: 0, 1: 1})}) + assert d2 == Dictionary({0: Dictionary({0: 0, 2: 2})}) + else: + assert d1 == Dictionary({0: Dictionary({0: 0, 1: 1, 2: 2})}) + assert d2 == d1 diff --git a/tests/bindings/test_dynamic_bindings.py b/tests/bindings/test_dynamic_bindings.py new file mode 100644 index 0000000..5d45427 --- /dev/null +++ b/tests/bindings/test_dynamic_bindings.py @@ -0,0 +1,76 @@ +# import pytest + +# from godot.bindings import ( +# Object, +# Node, +# Viewport, +# Input, +# LineEdit, +# Engine, +# _Engine, +# KEY_ESCAPE, +# OK, +# FAILED, +# ) + + +# class TestDynamicBindings: +# def test_singletons(self): +# assert isinstance(Engine, _Engine) +# assert callable(Engine.get_main_loop) +# ml = Engine.get_main_loop() +# assert isinstance(ml, Object) + +# def test_constants(self): +# assert OK == 0 +# assert FAILED == 1 +# assert isinstance(KEY_ESCAPE, int) + +# def test_objects_unicity(self): +# # Main loop object is a Godot Object, calling `get_main_loop` from +# # python returns a different python wrapper on the same object each time. +# # However those wrappers should feel like they are the same object. +# ml = Engine.get_main_loop() +# ml2 = Engine.get_main_loop() +# assert ml == ml2 +# # Of course different objects should be different and equality +# # should not crash with bad given types +# obj = Object() +# assert ml != obj +# assert ml != None # noqa +# assert ml != "" +# assert ml != 42 +# # Don't forget to free the Godot Object +# obj.free() + +# def test_class(self): +# assert isinstance(Node, type) + +# def test_class_constants(self): +# assert hasattr(Input, "MOUSE_MODE_VISIBLE") +# assert isinstance(Input.MOUSE_MODE_VISIBLE, int) + +# def test_class_inheritance(self): +# assert issubclass(Node, Object) +# assert issubclass(Viewport, Node) +# assert issubclass(Viewport, Object) + +# def test_class_methods(self): +# assert hasattr(LineEdit, "is_secret") +# v = LineEdit() +# assert callable(v.is_secret) +# assert v.is_secret() is False +# assert callable(v.set_secret) +# v.set_secret(True) +# assert v.is_secret() is True + +# @pytest.mark.xfail(reason="Not implemented yet.") +# def test_class_signals(self): +# pass + +# def test_class_properties(self): +# assert hasattr(LineEdit, "max_length") +# v = LineEdit() +# assert v.max_length == 0 +# v.max_length = 42 +# assert v.max_length == 42 diff --git a/tests/bindings/test_node_path.py b/tests/bindings/test_node_path.py new file mode 100644 index 0000000..2ca931a --- /dev/null +++ b/tests/bindings/test_node_path.py @@ -0,0 +1,84 @@ +import pytest + +from godot import Vector3, NodePath, GDString + + +def test_init(): + v1 = NodePath("parent/child") + v2 = NodePath(GDString("parent/child")) + assert v1 == v2 + + +@pytest.mark.parametrize("arg", [None, 0]) +def test_bad_init(arg): + with pytest.raises(TypeError): + NodePath(arg) + + +def test_equal(): + v1 = NodePath("parent/child") + v2 = NodePath("parent/child") + assert v1 == v2 + other = NodePath("parent/other_child") + assert not v1 == other # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "parent/child", NodePath("parent/other_child")]) +def test_bad_equal(arg): + basis = NodePath("parent/child") + assert basis != arg + + +def test_repr(): + v = NodePath("/root/leaf") + assert repr(v) == "" + + +@pytest.mark.parametrize("args", [(), (42,), (None,)]) +def test_bad_build(args): + with pytest.raises(TypeError): + NodePath(*args) + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["get_name", GDString, (0,)], + ["get_name_count", int, ()], + ["get_concatenated_subnames", GDString, ()], + ["get_subname", GDString, (0,)], + ["get_subname_count", int, ()], + ["is_absolute", bool, ()], + ["is_empty", bool, ()], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = NodePath("/foo") + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert isinstance(ret, ret_type) + + +@pytest.mark.ignore_leaks # Node.get_path() keep cache after first call +def test_as_binding_return_value(current_node): + ret = current_node.get_path() + assert isinstance(ret, NodePath) + + ret2 = current_node.get_path() + assert ret == ret2 + + assert str(ret) == "/root/main" + + +@pytest.mark.ignore_leaks # Node.get_path() keep cache after first call +def test_as_binding_param(current_node): + root = current_node.get_parent() + path = current_node.get_path() + dummy_path = NodePath("/foo/bar") + + assert root.has_node(path) is True + assert root.has_node(dummy_path) is False diff --git a/tests/bindings/test_plane.py b/tests/bindings/test_plane.py new file mode 100644 index 0000000..c1ba32c --- /dev/null +++ b/tests/bindings/test_plane.py @@ -0,0 +1,164 @@ +import pytest + +from godot import Vector3, Plane + + +def test_init(): + v = Plane(1, 2, 3, 4) + assert type(v) == Plane + assert v.normal == Vector3(1, 2, 3) + assert v.d == 4 + + +@pytest.mark.parametrize( + "args", + [ + ("NaN", 2.2, 3.3, 4.4), + (1.1, "NaN", 3.3, 4.4), + (1.1, 2.2, "NaN", 4.4), + (1.1, 2.2, 3.3, "NaN"), + (None, 2.2, 3.3, 4.4), + (1.1, None, 3.3, 4.4), + (1.1, 2.2, None, 4.4), + (1.1, 2.2, 3.3, None), + ], +) +def test_bad_init(args): + with pytest.raises(TypeError): + Plane(*args) + + +@pytest.mark.parametrize( + "expected_normal,expected_d", [(Vector3(0, 0, 0), 0), (Vector3(1, 2, 3), 1)] +) +def test_init_from_normal(expected_normal, expected_d): + v = Plane.from_normal(expected_normal, expected_d) + assert v.normal == expected_normal, msg_tmpl % (v.normal, expected_normal) + assert v.d == expected_d, msg_tmpl % (v.d, expected_d) + + +@pytest.mark.parametrize( + "bad_normal,bad_d", + [("dummy", 0), (None, 0), (Vector3(1, 2, 3), "NaN"), (Vector3(1, 2, 3), None)], +) +def test_bad_init_from_normal(bad_normal, bad_d): + with pytest.raises(TypeError): + Plane.from_normal(bad_normal, bad_d) + + +def test_init_from_vectors(): + v = Plane.from_vectors(Vector3(), Vector3(), Vector3()) + assert v.normal == Vector3() + assert v.d == 0 + + +@pytest.mark.parametrize( + "bad_v1,bad_v2,bad_v3", + [ + ("dummy", Vector3(4, 5, 6), Vector3(7, 8, 9)), + (Vector3(1, 2, 3), "dummy", Vector3(7, 8, 9)), + (Vector3(1, 2, 3), Vector3(4, 5, 6), "dummy"), + (None, Vector3(4, 5, 6), Vector3(7, 8, 9)), + (Vector3(1, 2, 3), None, Vector3(7, 8, 9)), + (Vector3(1, 2, 3), Vector3(4, 5, 6), None), + ], +) +def test_bad_init_from_vectors(bad_v1, bad_v2, bad_v3): + with pytest.raises(TypeError): + Plane.from_vectors(bad_v1, bad_v2, bad_v3) + + +def test_repr(): + v = Plane(1, 2, 3, 4) + assert repr(v) == "" + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["normalized", Plane, ()], + ["center", Vector3, ()], + ["get_any_point", Vector3, ()], + ["is_point_over", bool, (Vector3(),)], + ["distance_to", float, (Vector3(),)], + ["has_point", bool, (Vector3(), 0.5)], + ["project", Vector3, (Vector3(),)], + ["intersect_3", Vector3, (Plane.PLANE_XZ, Plane.PLANE_XY)], + ["intersects_ray", Vector3, (Vector3(1, 0, 0), Vector3(-1, 0, 0))], + ["intersects_segment", Vector3, (Vector3(1, 0, 0), Vector3(-1, 0, 0))], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = Plane.PLANE_YZ + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +@pytest.mark.parametrize( + "field,params", + [ + ["is_point_over", (None,)], + ["distance_to", (None,)], + ["has_point", (None, 0.5)], + ["project", (None,)], + ["intersect_3", (None, Plane(1, 1, 1, 1))], + ["intersect_3", (Plane(1, 1, 1, 1), None)], + ["intersects_ray", (None, Vector3())], + ["intersects_ray", (Vector3(), None)], + ["intersects_segment", (None, Vector3())], + ["intersects_segment", (Vector3(), None)], + ], + ids=lambda x: x[0], +) +def test_methods_call_with_none(field, params): + v = Plane(1, 2, 3, 4) + method = getattr(v, field) + with pytest.raises(TypeError): + method(*params) + + +def test_property_d(): + v = Plane(1, 2, 3, 4) + assert hasattr(v, "d") + field_val = v.d + assert isinstance(field_val, (float, int)) + for val in (0.5, -1, 2): + v.d = val + field_val = v.d + assert field_val == val + for bad in ("dummy", None, b"b"): + with pytest.raises(TypeError): + v.d = bad + + +def test_property_normal(): + v = Plane(1, 2, 3, 4) + assert hasattr(v, "normal") + field_val = v.normal + assert isinstance(field_val, Vector3) + for val in (Vector3(), Vector3(0.1, -0.1, 2)): + v.normal = val + field_val = v.normal + assert field_val == val + for bad in ("dummy", None, b"b"): + with pytest.raises(TypeError): + v.normal = bad + + +def test_equal(): + arr = Plane(1, 2, 3, 4) + same = Plane(1, 2, 3, 4) + assert arr == same # Force use of __eq__ + assert not arr != same # Force use of __ne__ + + +@pytest.mark.parametrize("bad", [None, 0, "foo", Plane(1, 2, 3, 5)]) +def test_not_equal(bad): + arr = Plane(1, 2, 3, 4) + assert not arr == bad # Force use of __eq__ + assert arr != bad # Force use of __ne__ diff --git a/tests/bindings/test_pool_arrays.py b/tests/bindings/test_pool_arrays.py new file mode 100644 index 0000000..f3efc1f --- /dev/null +++ b/tests/bindings/test_pool_arrays.py @@ -0,0 +1,413 @@ +import sys +import pytest +from random import Random +from inspect import isfunction +from functools import partial + +from godot import ( + Array, + Vector2, + Vector3, + Color, + GDString, + PoolIntArray, + PoolRealArray, + PoolByteArray, + PoolVector2Array, + PoolVector3Array, + PoolColorArray, + PoolStringArray, + Node, +) + +from conftest import generate_global_obj + + +is_windows_32 = (sys.platform == "win32") and (sys.maxsize <= 2 ** 32) + + +NODE = generate_global_obj(Node) + + +class BasePoolArrayBench: + cls = None + + def __init__(self): + # Fixed seed for reproducibility + self.random = Random(0) + + def generate_value(self): + raise NotImplemented + + def generate_values(self, count): + return [self.generate_value() for _ in range(count)] + + def expand_arg(self, arg): + if isfunction(arg): + return arg(self) + else: + return arg + + +class PoolIntArrayBench(BasePoolArrayBench): + cls = PoolIntArray + + def generate_value(self): + return self.random.randint(-(2 ** 31), 2 ** 31 - 1) + + +class PoolRealArrayBench(BasePoolArrayBench): + cls = PoolRealArray + + def generate_value(self): + # Use integer instead of float to avoid floating point imprecision in comparisons + return float(self.random.randint(0, 100)) + + +class PoolByteArrayBench(BasePoolArrayBench): + cls = PoolByteArray + + def generate_value(self): + return self.random.randint(0, 255) + + +class PoolColorArrayBench(BasePoolArrayBench): + cls = PoolColorArray + + def generate_value(self): + # Use integer instead of float to avoid floating point imprecision in comparisons + return Color(self.random.randint(0, 100)) + + +class PoolStringArrayBench(BasePoolArrayBench): + cls = PoolStringArray + + def generate_value(self): + return GDString(str(self.random.random())) + + +class PoolVector2ArrayBench(BasePoolArrayBench): + cls = PoolVector2Array + + def generate_value(self): + # Use integer instead of float to avoid floating point imprecision in comparisons + return Vector2(self.random.randint(0, 100)) + + +class PoolVector3ArrayBench(BasePoolArrayBench): + cls = PoolVector3Array + + def generate_value(self): + # Use integer instead of float to avoid floating point imprecision in comparisons + return Vector3(self.random.randint(0, 100)) + + +@pytest.fixture( + scope="module", + ids=lambda x: x.cls.__name__, + params=[ + PoolIntArrayBench, + PoolRealArrayBench, + PoolByteArrayBench, + PoolColorArrayBench, + PoolStringArrayBench, + PoolVector2ArrayBench, + PoolVector3ArrayBench, + ], +) +def pool_x_array(request): + return request.param() + + +def test_empty_init(pool_x_array): + v1 = pool_x_array.cls() + v2 = pool_x_array.cls() + assert type(v1) == pool_x_array.cls + assert v1 == v2 + assert len(v1) == 0 + + +@pytest.mark.parametrize( + "bad_val", + [ + lambda x: x.generate_value(), + lambda x: (object() for _ in range(1)), # Must be generated each time + 42, + "dummy", + NODE, + Vector2(), + [object()], + lambda x: [x.generate_value(), object(), x.generate_value()], + ], +) +def test_bad_init(pool_x_array, bad_val): + bad_val = pool_x_array.expand_arg(bad_val) + with pytest.raises(TypeError): + pool_x_array.cls(bad_val) + + +def test_initialized_init(pool_x_array): + if is_windows_32: + pytest.skip("Cause segfault on windows-32, see issue #185") + + vals = pool_x_array.generate_values(4) + v1 = pool_x_array.cls(vals) + v2 = pool_x_array.cls(Array(vals)) + v3 = pool_x_array.cls(v2) + assert type(v1) == pool_x_array.cls + assert type(v2) == pool_x_array.cls + assert type(v3) == pool_x_array.cls + assert v1 == v2 + assert v2 == v3 + assert len(v1) == 4 + + +def test_equal(pool_x_array): + vals = pool_x_array.generate_values(4) + + v1 = pool_x_array.cls(vals) + v2 = pool_x_array.cls() + for item in vals: + v2.append(item) + v3 = pool_x_array.cls() + v3 += v2 + + # Test __eq__ operator + assert v1 == v2 + assert v2 == v3 + + # Test __ne__ operator + assert not v1 != v2 + assert not v2 != v3 + + +@pytest.mark.parametrize("other_type", [list, tuple, Array]) +def test_bad_equal_on_different_types(pool_x_array, other_type): + if is_windows_32 and other_type is Array: + pytest.skip("Cause segfault on windows-32, see issue #185") + + vals = pool_x_array.generate_values(4) + + pool = pool_x_array.cls(vals) + other = other_type(vals) + + # Test __eq__ operator + assert not pool == other + + # Test __ne__ operator + assert pool != other + + +@pytest.mark.parametrize( + "arg", + [ + None, + 0, + Array(), + [], + (), + "", + Vector2(), + NODE, + lambda s: s.generate_value(), + lambda s: s.cls(s.generate_values(2)), + ], +) +def test_bad_equal(pool_x_array, arg): + pool = pool_x_array.cls() + other = pool_x_array.expand_arg(arg) + + # Test __ne__ operator + assert not pool == other + + # Test __eq__ operator + assert pool != other + + +def test_add(pool_x_array): + v0 = pool_x_array.generate_values(2) + arr = pool_x_array.cls(v0) + v1 = pool_x_array.generate_values(2) + arr += pool_x_array.cls(v1) # __iadd__ + assert arr == pool_x_array.cls(v0 + v1) + v2 = pool_x_array.generate_values(2) + arr2 = arr + pool_x_array.cls(v2) # __add__ + assert arr2 == pool_x_array.cls(v0 + v1 + v2) + + +@pytest.mark.parametrize("arg", [None, [], (), Array(), 0, "foo", Vector2(), NODE]) +def test_bad_add(pool_x_array, arg): + with pytest.raises(TypeError): + pool_x_array.cls() + arg + + +@pytest.mark.parametrize("arg", [None, [], (), Array(), 0, "foo", Vector2(), NODE]) +def test_bad_iadd(pool_x_array, arg): + arr = pool_x_array.cls() + with pytest.raises(TypeError): + arr += arg + + +def test_repr(pool_x_array): + name = pool_x_array.cls.__name__ + v = pool_x_array.cls() + assert repr(v) == f"<{name}([])>" + items = pool_x_array.generate_values(3) + v = pool_x_array.cls(items) + assert repr(v) == f"<{name}({items!r})>" + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["append", type(None), lambda x: (x.generate_value(),)], + ["push_back", type(None), lambda x: (x.generate_value(),)], + ["resize", type(None), (2,)], + ], + ids=lambda x: x[0], +) +def test_methods(pool_x_array, field, ret_type, params): + # Don't test methods' validity but bindings one + v = pool_x_array.cls(pool_x_array.generate_values(1)) + params = pool_x_array.expand_arg(params) + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +def test_len(pool_x_array): + arr = pool_x_array.cls() + assert len(arr) == 0 + arr.append(pool_x_array.generate_value()) + assert len(arr) == 1 + + +def test_getitem(pool_x_array): + vals = pool_x_array.generate_values(3) + arr = pool_x_array.cls(vals) + assert arr[0] == vals[0] + assert arr[1] == vals[1] + assert arr[-1] == vals[-1] + + +@pytest.mark.parametrize( + "slice_", + [ + slice(1, 3), + slice(1, 3, -1), + slice(None, None, -1), + slice(None, None, 2), + slice(None, None, 10), + slice(-10, 10, 1), + slice(-10, None, 1), + slice(-1, None, 1), + slice(-1, 1, -1), + ], +) +def test_getitem_slice(pool_x_array, slice_): + vals = pool_x_array.generate_values(4) + arr = pool_x_array.cls(vals) + expected = vals[slice_] + sub_arr = arr[slice_] + assert isinstance(sub_arr, pool_x_array.cls) + assert sub_arr == pool_x_array.cls(expected) + + +def test_getitem_slice_zero_step(pool_x_array): + arr = pool_x_array.cls(pool_x_array.generate_values(2)) + with pytest.raises(ValueError): + arr[::0] + + +def test_outofrange_getitem(pool_x_array): + arr = pool_x_array.cls(pool_x_array.generate_values(2)) + with pytest.raises(IndexError): + arr[2] + with pytest.raises(IndexError): + arr[-3] + + +def test_setitem(pool_x_array): + arr = pool_x_array.cls(pool_x_array.generate_values(3)) + v = pool_x_array.generate_value() + arr[0] = v + assert len(arr) == 3 + assert arr[0] == v + arr[-1] = v + assert len(arr) == 3 + assert arr[-1] == v + + +def test_outofrange_setitem(pool_x_array): + arr = pool_x_array.cls(pool_x_array.generate_values(2)) + v = pool_x_array.generate_value() + with pytest.raises(IndexError): + arr[2] = v + with pytest.raises(IndexError): + arr[-3] = v + + +def test_delitem(pool_x_array): + items = pool_x_array.generate_values(3) + arr = pool_x_array.cls(items) + del arr[0] + assert len(arr) == 2 + assert arr[0] == items[1] + assert arr[1] == items[2] + del arr[-1] + assert len(arr) == 1 + assert arr[-1] == items[1] + + +def test_outofrange_delitem(pool_x_array): + arr = pool_x_array.cls(pool_x_array.generate_values(2)) + with pytest.raises(IndexError): + del arr[2] + with pytest.raises(IndexError): + del arr[-3] + + +def test_iter(pool_x_array): + items = pool_x_array.generate_values(3) + arr = pool_x_array.cls(items) + items_from_v = [x for x in arr] + assert items_from_v == items + + +def test_append(pool_x_array): + items = pool_x_array.generate_values(3) + arr = pool_x_array.cls() + for item in items: + arr.append(item) + assert len(arr) == 3 + assert arr == pool_x_array.cls(items) + + +def test_raw_access(pool_x_array): + arr = pool_x_array.cls() + arr.resize(100) + values = pool_x_array.generate_values(10) + + with arr.raw_access() as ptr: + assert isinstance(ptr.get_address(), int) + + for i in range(100): + ptr[i] = values[i % len(values)] + + with arr.raw_access() as ptr: + for i in range(100): + assert ptr[i] == values[i % len(values)] + + +def test_pool_byte_array_overflow(): + with pytest.raises(OverflowError): + PoolByteArray([256]) + with pytest.raises(OverflowError): + PoolByteArray([1, 2, 256, 4]) + arr = PoolByteArray([1]) + with pytest.raises(OverflowError): + arr.append(256) + with pytest.raises(OverflowError): + arr.push_back(256) diff --git a/tests/bindings/test_quat.py b/tests/bindings/test_quat.py new file mode 100644 index 0000000..ca3adab --- /dev/null +++ b/tests/bindings/test_quat.py @@ -0,0 +1,262 @@ +import pytest + +from godot import Basis, Quat, Vector3 + + +def test_base(): + v = Quat() + assert type(v) == Quat + + +@pytest.mark.parametrize( + "field,args", + [ + ["from_axis_angle", (Vector3.ONE, 1.1)], + ["from_euler", (Vector3.ONE,)], + ["from_basis", (Basis(),)], + ], +) +def test_inits(field, args): + build = getattr(Quat, field) + v = build(*args) + assert isinstance(v, Quat) + + +@pytest.mark.parametrize( + "field,args", + [ + ["from_axis_angle", (None, 1.1)], + ["from_euler", (None,)], + ["from_basis", (None,)], + ["from_axis_angle", (Vector3.ONE, None)], + ["from_axis_angle", (Vector3.ONE, "dummy")], + ["from_axis_angle", ("dummy", 1.1)], + ["from_euler", ("dummy",)], + ["from_basis", ("dummy",)], + ], +) +def test_bad_inits(field, args): + build = getattr(Quat, field) + with pytest.raises(TypeError): + v = build(*args) + + +def test_repr(): + v = Quat(1.0, 2.0, 3.0, 4.0) + assert repr(v) == "" + + +@pytest.mark.parametrize( + "args", + [ + [(), 0, 0, 0, 0], + [(0.1, 0.2, 0.3, 0.4), 0.1, 0.2, 0.3, 0.4], + [(1, 2, 3), 1, 2, 3, 0], + [(1,), 1, 0, 0, 0], + ], +) +def test_instantiate(args): + # Can build it with int or float or nothing + msg_tmpl = "%s vs (expected) %s (args=%s)" + args, expected_x, expected_y, expected_z, expected_w = args + v = Quat(*args) + assert pytest.approx(v.x) == expected_x, msg_tmpl % (v.x, expected_x, args) + assert pytest.approx(v.y) == expected_y, msg_tmpl % (v.y, expected_y, args) + assert pytest.approx(v.z) == expected_z, msg_tmpl % (v.z, expected_z, args) + assert pytest.approx(v.w) == expected_w, msg_tmpl % (v.w, expected_w, args) + + +def test_bad_instantiate(): + with pytest.raises(TypeError): + Quat("a", 2, 3, 4) + with pytest.raises(TypeError): + Quat(1, "b", 2, 4) + with pytest.raises(TypeError): + Quat(1, 2, "c", 4) + with pytest.raises(TypeError): + Quat(1, 2, 3, "d") + with pytest.raises(TypeError): + Quat(None, 2, 3, 4) + with pytest.raises(TypeError): + Quat(1, None, 2, 4) + with pytest.raises(TypeError): + Quat(1, 2, None, 4) + with pytest.raises(TypeError): + Quat(1, 2, 3, None) + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["length", float, ()], + ["length_squared", float, ()], + ["normalized", Quat, ()], + ["is_normalized", bool, ()], + ["inverse", Quat, ()], + ["dot", float, (Quat(),)], + ["xform", Vector3, (Vector3(),)], + ["slerp", Quat, (Quat(), 1.0)], + ["slerpni", Quat, (Quat(), 1.0)], + ["cubic_slerp", Quat, (Quat(), Quat(), Quat(), 1.0)], + ["set_axis_angle", type(None), (Vector3(1, 2, 3), 3.3)], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = Quat() + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +@pytest.mark.parametrize( + "field,ret_type", [("x", float), ("y", float), ("z", float), ("w", float)], ids=lambda x: x[0] +) +def test_properties(field, ret_type): + v = Quat() + assert hasattr(v, field) + field_val = getattr(v, field) + assert type(field_val) == ret_type + for val in (0, 10, 10.0, 42.5): + setattr(v, field, val) + field_val = getattr(v, field) + assert pytest.approx(field_val) == val + + +@pytest.mark.parametrize( + "field,bad_value", + [ + ("x", "NaN"), + ("y", "NaN"), + ("z", "NaN"), + ("w", "NaN"), + ("x", None), + ("y", None), + ("z", None), + ("w", None), + ], + ids=lambda x: x[0], +) +def test_bad_properties(field, bad_value): + v = Quat() + with pytest.raises(TypeError): + setattr(v, field, bad_value) + + +def test_unary(): + v = Quat(1, 2, 3, 4) + v2 = -v + assert v2.x == -1 + assert v2.y == -2 + assert v2.z == -3 + assert v2.w == -4 + v3 = +v + assert v3.x == 1 + assert v3.y == 2 + assert v3.z == 3 + assert v3.w == 4 + v = Quat(1.5, 2.5, 3.5, 4.5) + v2 = -v + assert v2.x == -1.5 + assert v2.y == -2.5 + assert v2.z == -3.5 + assert v2.w == -4.5 + v3 = +v + assert v3.x == 1.5 + assert v3.y == 2.5 + assert v2.z == -3.5 + assert v2.w == -4.5 + + +@pytest.mark.parametrize( + "param,result", + [ + (Quat(0, 0, 0, 0), Quat(2, 3, 4, 5)), + (Quat(4, 3, 2, 1), Quat(6, 6, 6, 6)), + (Quat(-4, -3, -2, -1), Quat(-2, -0, 2, 4)), + ], + ids=lambda x: x[0], +) +def test_add(param, result): + calc = Quat(2, 3, 4, 5) + param + assert calc == result + + +@pytest.mark.parametrize( + "param,result", + [ + (Quat(0, 0, 0, 0), Quat(2, 3, 4, 5)), + (Quat(5, 4, 3, 2), Quat(-3, -1, 1, 3)), + (Quat(-1, -1, -1, -1), Quat(3, 4, 5, 6)), + ], + ids=lambda x: x[0], +) +def test_sub(param, result): + calc = Quat(2, 3, 4, 5) - param + assert calc == result + + +@pytest.mark.parametrize("arg", [None, 1, "dummy"], ids=lambda x: x[0]) +def test_bad_add(arg): + with pytest.raises(TypeError): + Quat(2, 3, 4, 5) + arg + + +@pytest.mark.parametrize("arg", [None, 1, "dummy"], ids=lambda x: x[0]) +def test_bad_sub(arg): + with pytest.raises(TypeError): + Quat(2, 3, 4, 5) - arg + + +@pytest.mark.parametrize("arg", [None, "dummy", Quat(1, 1, 1, 1)], ids=lambda x: x[0]) +def test_bad_div(arg): + with pytest.raises(TypeError): + Quat(2, 3, 4, 5) / arg + + +def test_zero_div(): + with pytest.raises(ZeroDivisionError): + Quat(2, 3, 4, 5) / 0 + + +@pytest.mark.parametrize("arg", [None, "dummy"], ids=lambda x: x[0]) +def test_bad_mul(arg): + with pytest.raises(TypeError): + Quat(2, 3, 4, 5) * arg + + +@pytest.mark.parametrize( + "param,result", + [(0, Quat(0, 0, 0, 0)), (1, Quat(2, 3, 4, 5)), (2.5, Quat(5, 7.5, 10, 12.5))], + ids=lambda x: x[0], +) +def test_mul(param, result): + calc = Quat(2, 3, 4, 5) * param + assert calc == result + + +@pytest.mark.parametrize( + "param,result", + [(1, Quat(2, 3, 4, 5)), (0.5, Quat(4, 6, 8, 10)), (2, Quat(1, 1.5, 2, 2.5))], + ids=lambda x: x[0], +) +def test_div(param, result): + calc = Quat(2, 3, 4, 5) / param + assert calc == result + + +def test_equal(): + arr = Quat(0.1, 1, 2, 3) + other = Quat(0.1, 1, 2, 3) + assert arr == other + bad = Quat(0.1, 1, 2, 4) + assert not arr == bad # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", Quat(0.1, 1, 2, 4)]) +def test_bad_equal(arg): + arr = Quat(0.1, 1, 2, 3) + assert arr != arg diff --git a/tests/bindings/test_rect2.py b/tests/bindings/test_rect2.py new file mode 100644 index 0000000..e44c519 --- /dev/null +++ b/tests/bindings/test_rect2.py @@ -0,0 +1,122 @@ +import pytest + +from godot import Rect2, Vector2 + + +def test_base(): + v = Rect2(4, 3, 2, 1) + assert type(v) == Rect2 + v2 = Rect2(1, 2, 3, 4) + assert type(v) == Rect2 + assert v2 == Rect2(1, 2, 3, 4) + assert v != v2 + + +def test_repr(): + v = Rect2(1, 2) + assert repr(v) == "" + + +def test_instantiate(): + # Can build it with int or float or nothing + msg_tmpl = "%s vs (expected) %s (args=%s)" + for args, expected_pos, expected_size in ( + [(), Vector2(0, 0), Vector2(0, 0)], + [(0.5, 0.5), Vector2(0.5, 0.5), Vector2(0, 0)], + [(1, 2, 1, 2), Vector2(1, 2), Vector2(1, 2)], + ): + v = Rect2(*args) + assert v.position == expected_pos, msg_tmpl % (v.position, expected_pos, args) + assert v.size == expected_size, msg_tmpl % (v.size, expected_size, args) + with pytest.raises(TypeError): + Rect2("a", 2, 3, 4) + with pytest.raises(TypeError): + Rect2(1, "b", 3, 4) + with pytest.raises(TypeError): + Rect2(1, 2, "c", 4) + with pytest.raises(TypeError): + Rect2(1, 2, 3, "d") + with pytest.raises(TypeError): + Rect2(None, 2) + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["get_area", float, ()], + ["intersects", bool, (Rect2(),)], + ["encloses", bool, (Rect2(),)], + ["has_no_area", bool, ()], + ["clip", Rect2, (Rect2(),)], + ["merge", Rect2, (Rect2(),)], + ["has_point", bool, (Vector2(),)], + ["grow", Rect2, (0.5,)], + ["grow_individual", Rect2, (0.1, 0.2, 0.3, 0.4)], + ["grow_margin", Rect2, (42, 0.5)], + ["abs", Rect2, ()], + ["expand", Rect2, (Vector2(),)], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = Rect2() + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +@pytest.mark.parametrize( + "field,ret_type", [("position", Vector2), ("size", Vector2)], ids=lambda x: x[0] +) +def test_rw_properties(field, ret_type): + v = Rect2() + assert hasattr(v, field) + field_val = getattr(v, field) + assert type(field_val) == ret_type + for val in (Vector2(), Vector2(0.1, -0.1)): + setattr(v, field, val) + field_val = getattr(v, field) + assert field_val == val + + +def test_ro_end_property(): + v = Rect2() + assert hasattr(v, "end") + assert type(v.end) == Vector2 + with pytest.raises(AttributeError): + v.end = Vector2() + + +@pytest.mark.parametrize( + "field,bad_value", + [ + ("position", "dummy"), + ("size", "dummy"), + ("position", None), + ("size", None), + ("position", 42), + ("size", 42), + ], + ids=lambda x: x[0], +) +def test_bad_rw_properties(field, bad_value): + v = Rect2() + with pytest.raises(TypeError): + setattr(v, field, bad_value) + + +def test_equal(): + arr = Rect2(0.1, 1, 2, 3) + other = Rect2(0.1, 1, 2, 3) + assert arr == other + bad = Rect2(0.1, 1, 2, 4) + assert not arr == bad # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", Rect2(0.1, 1, 2, 4)]) +def test_bad_equal(arg): + arr = Rect2(0.1, 1, 2, 3) + assert arr != arg diff --git a/tests/bindings/test_rid.py b/tests/bindings/test_rid.py new file mode 100644 index 0000000..0f0a157 --- /dev/null +++ b/tests/bindings/test_rid.py @@ -0,0 +1,113 @@ +import pytest + +from godot import RID, Environment, Node, OS + + +@pytest.fixture +def environment_factory(): + # Environment objects are stubbed on headless server, hence + # their corresponding RID is always the same default value + if OS.has_feature("Server"): + pytest.skip("Not available on headless Godot") + + def _factory(): + return Environment() + + return _factory + + +def test_base(): + v = RID() + assert type(v) == RID + + +def test_equal(environment_factory): + v1 = RID() + v2 = RID() + assert v1 == v2 + # Environment is a Ressource which provides unique rid per instance + res_a = environment_factory() + v_a_1 = RID(res_a) + assert v_a_1 != v1 + v_a_2 = RID(res_a) + assert v_a_1 == v_a_2 + res_b = environment_factory() + v_b = RID(res_b) + assert not v_a_1 == v_b # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo"]) +def test_bad_equal(arg): + arr = RID(Environment()) + assert arr != arg + + +def test_bad_equal_with_rid(environment_factory): + # Doing `RID(Environment())` will cause garbage collection of enclosed + # Environment object and possible reuse of it id + env1 = environment_factory() + env2 = environment_factory() + rid1 = RID(env1) + rid2 = RID(env2) + assert rid1 != rid2 + + +def test_lt(environment_factory): + env1 = environment_factory() + env2 = environment_factory() + rid1 = RID(env1) + rid2 = RID(env2) + # Ordered is based on resource pointer, so cannot know the order ahead of time + small, big = sorted([rid1, rid2]) + assert small < big + assert big > small + assert not small > big + assert not big < small + + +def test_repr(): + v = RID() + assert repr(v) == "" + + +@pytest.mark.parametrize("arg", [42, "dummy", RID()]) +def test_bad_instantiate(arg): + with pytest.raises(TypeError): + RID(arg) + + +def test_bad_instantiate_with_not_resource(generate_obj): + # Node doesn't inherit from Resource + node = generate_obj(Node) + with pytest.raises(TypeError): + RID(node) + + +@pytest.mark.parametrize("args", [["get_id", int, ()]], ids=lambda x: x[0]) +def test_methods(args): + v = RID() + # Don't test methods' validity but bindings one + field, ret_type, params = args + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +# @pytest.mark.parametrize('args', [ +# (Vector2(0, 0), Vector2(2, 3)), +# (Vector2(3, 2), Vector2(-1, 1)), +# (Vector2(-1, -1), Vector2(3, 4)), +# ], ids=lambda x: x[0]) +# def test_lt(args): +# param, result = args +# calc = Vector2(2, 3) - param +# assert calc == result + +# @pytest.mark.parametrize('arg', [ +# None, 1, 'dummy' +# ], ids=lambda x: x[0]) +# def test_bad_add(arg): +# with pytest.raises(TypeError): +# Vector2(2, 3) + arg diff --git a/tests/bindings/test_starimport.py b/tests/bindings/test_starimport.py new file mode 100644 index 0000000..7c7edc6 --- /dev/null +++ b/tests/bindings/test_starimport.py @@ -0,0 +1,11 @@ +# This test is in it own file to protect other tests from the `import *` side effects +from godot import * + +# Class with trailing underscore not are provided on star import +from godot.bindings import _OS, _ProjectSettings + + +def test_starimport(): + assert issubclass(Node, Object) + assert isinstance(OS, _OS) + assert isinstance(ProjectSettings, _ProjectSettings) diff --git a/tests/bindings/test_string.py b/tests/bindings/test_string.py new file mode 100644 index 0000000..9eb9c1c --- /dev/null +++ b/tests/bindings/test_string.py @@ -0,0 +1,38 @@ +import sys +import pytest + +from godot import GDString + + +def test_base(): + assert GDString().empty() + # Todo later: GDString creation from GD types: Vector2/3, Transform, Plane, Quat, AABB, Color, ... + s = GDString("12") + assert s.begins_with(GDString("1")) + assert s.bigrams().size() == 1 + assert GDString("\ta").dedent() == GDString("a") + assert s.ends_with(GDString("2")) + abc = GDString("abc") + abc.erase(1, 1) + assert abc == GDString("ac") + assert GDString("abc").capitalize() == GDString("Abc") + assert GDString("abc").find(GDString("b")) == 1 + assert GDString("file.ext").get_extension() == GDString("ext") + assert GDString("127.0.0.1").is_valid_ip_address() + assert not GDString("127.0.0.1.xxx").is_valid_ip_address() + assert GDString("abc").length() == 3 + assert GDString("3.14").to_float() == pytest.approx(3.14) + assert GDString("42").to_int() == 42 + # GDString.humanize_size is a static method + assert GDString.humanize_size(133790307) == GDString("127.5 MiB") + + +@pytest.mark.parametrize("char", ["e", "รฉ", "โ‚ฌ", "่›‡", "๐Ÿ"]) +def test_unicode(char): + # Godot supports UCS2 on Windows and UCS4 on other platforms + if len(char.encode("utf8")) > 2 and sys.platform == "win32": + pytest.skip("Windows only supports UCS2") + + gdchar = GDString(char) + assert str(gdchar) == char + assert gdchar.length() == len(char) diff --git a/tests/bindings/test_tools.py b/tests/bindings/test_tools.py new file mode 100644 index 0000000..3293dbb --- /dev/null +++ b/tests/bindings/test_tools.py @@ -0,0 +1,120 @@ +# import pytest + +# from godot import Array, Dictionary +# from godot.hazmat.tools import ( +# variant_to_pyobj, +# pyobj_to_variant, +# gdobj_to_pyobj, +# pyobj_to_gdobj, +# gd_to_py_type, +# py_to_gd_type, +# godot_string_to_pyobj, +# ) +# from pythonscriptcffi import lib + +# # @pytest.mark.parametrize('arg', [ +# # None, +# # 0, +# # 42, +# # 0.0, +# # 42.5, +# # '', +# # 'test', +# # Dictionary(), +# # Dictionary({'foo': 1, 2: 'bar'}), +# # Array(), +# # Array(['foo', 2]), +# # ]) +# # def test_pyobj_variant_conversion(arg): +# # variant = pyobj_to_variant(arg) +# # ret_arg = variant_to_pyobj(variant) +# # assert ret_arg == arg + + +# # @pytest.mark.parametrize('arg', [ +# # None, +# # 0, +# # 42, +# # 0.0, +# # 42.5, +# # '', +# # 'test', +# # Dictionary(), +# # Dictionary({'foo': 1, 2: 'bar'}), +# # Array(), +# # Array(['foo', 2]), +# # ]) +# # def test_pyobj_raw_conversion(arg): +# # variant = pyobj_to_gdobj(arg) +# # ret_arg = gdobj_to_pyobj(variant, ) +# # assert ret_arg == arg + + +# @pytest.mark.parametrize("arg", ["", "foo", "l" + "o" * 25000 + "ong"]) +# def test_godot_string_to_pyobj(arg): +# gdstr = pyobj_to_gdobj(arg) +# ret_arg = godot_string_to_pyobj(gdstr) +# assert ret_arg == arg + + +# @pytest.mark.parametrize( +# "args", +# [ +# (lib.GODOT_VARIANT_TYPE_NIL, type(None)), +# (lib.GODOT_VARIANT_TYPE_REAL, float), +# (lib.GODOT_VARIANT_TYPE_STRING, str), +# (lib.GODOT_VARIANT_TYPE_DICTIONARY, Dictionary), +# ], +# ) +# def test_gd_py_type_translation(args): +# gdtype, pytype = args + +# rettype = gd_to_py_type(gdtype) +# assert rettype == pytype + +# rettype = py_to_gd_type(pytype) +# assert rettype == gdtype + + +# # @pytest.mark.parametrize('args', [ +# # (None, lib.GODOT_VARIANT_TYPE_NIL), +# # (0, lib.GODOT_VARIANT_TYPE_INT), +# # (42, lib.GODOT_VARIANT_TYPE_INT), +# # (0.0, lib.GODOT_VARIANT_TYPE_REAL), +# # (42.5, lib.GODOT_VARIANT_TYPE_REAL), +# # ('', lib.GODOT_VARIANT_TYPE_STRING), +# # ('test', lib.GODOT_VARIANT_TYPE_STRING), +# # (Dictionary(), lib.GODOT_VARIANT_TYPE_DICTIONARY), +# # (Dictionary({'foo': 1, 2: 'bar'}), lib.GODOT_VARIANT_TYPE_DICTIONARY), +# # (Array(), lib.GODOT_VARIANT_TYPE_ARRAY), +# # (Array(['foo', 2]), lib.GODOT_VARIANT_TYPE_ARRAY), +# # ]) +# # def test_new_raw_initialized(args): +# # pyobj, gdtype = args +# # new_raw() +# # raw = pyobj_to_gdobj(pyobj) +# # ret_pyobj = gdobj_to_pyobj(gdtype, raw) +# # assert ret_pyobj == pyobj + + +# @pytest.mark.parametrize( +# "args", +# [ +# (None, lib.GODOT_VARIANT_TYPE_NIL), +# (0, lib.GODOT_VARIANT_TYPE_INT), +# (42, lib.GODOT_VARIANT_TYPE_INT), +# (0.0, lib.GODOT_VARIANT_TYPE_REAL), +# (42.5, lib.GODOT_VARIANT_TYPE_REAL), +# ("", lib.GODOT_VARIANT_TYPE_STRING), +# ("test", lib.GODOT_VARIANT_TYPE_STRING), +# (Dictionary(), lib.GODOT_VARIANT_TYPE_DICTIONARY), +# (Dictionary({"foo": 1, 2: "bar"}), lib.GODOT_VARIANT_TYPE_DICTIONARY), +# (Array(), lib.GODOT_VARIANT_TYPE_ARRAY), +# (Array(["foo", 2]), lib.GODOT_VARIANT_TYPE_ARRAY), +# ], +# ) +# def test_pyobj_raw_conversion(args): +# pyobj, gdtype = args +# raw = pyobj_to_gdobj(pyobj) +# ret_pyobj = gdobj_to_pyobj(gdtype, raw) +# assert ret_pyobj == pyobj diff --git a/tests/bindings/test_transform.py b/tests/bindings/test_transform.py new file mode 100644 index 0000000..286a3bf --- /dev/null +++ b/tests/bindings/test_transform.py @@ -0,0 +1,17 @@ +import pytest + +from godot import Transform, Basis, Vector3 + + +def test_base(): + v = Transform() + assert type(v) == Transform + v2 = Transform.from_basis_origin(Basis(), Vector3(1, 2, 3)) + assert type(v) == Transform + assert v2 == Transform.from_basis_origin(Basis(), Vector3(1, 2, 3)) + assert v != v2 + + +def test_repr(): + v = Transform() + assert repr(v).startswith("" + + +def test_instantiate(): + # Can build it with int or float or nothing + msg_tmpl = "%s vs (expected) %s (args=%s)" + for args, expected_x, expected_y in ( + [(), 0, 0], + [(0.5, 0.5), 0.5, 0.5], + [(1, 2), 1, 2], + [(1,), 1, 0], + ): + v = Vector2(*args) + assert v.x == expected_x, msg_tmpl % (v.x, expected_x, args) + assert v.y == expected_y, msg_tmpl % (v.y, expected_y, args) + assert v.width == expected_x, msg_tmpl % (v.width, expected_y, args) + assert v.height == expected_y, msg_tmpl % (v.height, expected_x, args) + with pytest.raises(TypeError): + Vector2("a", 2) + with pytest.raises(TypeError): + Vector2("a", 2) + with pytest.raises(TypeError): + Vector2(1, "b") + with pytest.raises(TypeError): + Vector2(None, 2) + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["abs", Vector2, ()], + ["angle", float, ()], + ["angle_to", float, (Vector2(),)], + ["angle_to_point", float, (Vector2(),)], + ["clamped", Vector2, (0.5,)], + ["cubic_interpolate", Vector2, (Vector2(), Vector2(), Vector2(), 0.5)], + ["distance_squared_to", float, (Vector2(),)], + ["distance_to", float, (Vector2(),)], + ["dot", float, (Vector2(),)], + ["floor", Vector2, ()], + ["aspect", float, ()], + ["length", float, ()], + ["length_squared", float, ()], + ["linear_interpolate", Vector2, (Vector2(), 0.5)], + ["normalized", Vector2, ()], + ["reflect", Vector2, (Vector2(),)], + ["rotated", Vector2, (0.5,)], + ["slide", Vector2, (Vector2(),)], + ["snapped", Vector2, (Vector2(),)], + ["tangent", Vector2, ()], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = Vector2() + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert type(ret) == ret_type + + +@pytest.mark.parametrize( + "field,ret_type", + [("height", float), ("width", float), ("x", float), ("y", float)], + ids=lambda x: x[0], +) +def test_properties(field, ret_type): + v = Vector2() + assert hasattr(v, field) + field_val = getattr(v, field) + assert type(field_val) == ret_type + for val in (0, 10, 10.0, 42.5): + setattr(v, field, val) + field_val = getattr(v, field) + assert field_val == val + + +@pytest.mark.parametrize( + "field,bad_value", + [ + ("height", "NaN"), + ("width", "NaN"), + ("x", "NaN"), + ("y", "NaN"), + ("height", None), + ("width", None), + ("x", None), + ("y", None), + ], + ids=lambda x: x[0], +) +def test_bad_properties(field, bad_value): + v = Vector2() + with pytest.raises(TypeError): + setattr(v, field, bad_value) + + +def test_unary(): + v = Vector2(1, 2) + v2 = -v + assert v2.x == -1 + assert v2.y == -2 + v3 = +v + assert v3.x == 1 + assert v3.y == 2 + v = Vector2(1.5, 2.5) + v2 = -v + assert v2.x == -1.5 + assert v2.y == -2.5 + v3 = +v + assert v3.x == 1.5 + assert v3.y == 2.5 + + +@pytest.mark.parametrize( + "param,result", + [ + (Vector2(0, 0), Vector2(2, 3)), + (Vector2(3, 2), Vector2(5, 5)), + (Vector2(-1, -4), Vector2(1, -1)), + ], + ids=lambda x: x[0], +) +def test_add(param, result): + calc = Vector2(2, 3) + param + assert calc == result + + +@pytest.mark.parametrize( + "param,result", + [ + (Vector2(0, 0), Vector2(2, 3)), + (Vector2(3, 2), Vector2(-1, 1)), + (Vector2(-1, -1), Vector2(3, 4)), + ], + ids=lambda x: x[0], +) +def test_sub(param, result): + calc = Vector2(2, 3) - param + assert calc == result + + +@pytest.mark.parametrize("arg", [None, 1, "dummy"], ids=lambda x: x[0]) +def test_bad_add(arg): + with pytest.raises(TypeError): + Vector2(2, 3) + arg + + +@pytest.mark.parametrize("arg", [None, 1, "dummy"], ids=lambda x: x[0]) +def test_bad_sub(arg): + with pytest.raises(TypeError): + Vector2(2, 3) - arg + + +@pytest.mark.parametrize("arg", [None, "dummy"], ids=lambda x: x[0]) +def test_bad_div(arg): + with pytest.raises(TypeError): + Vector2(2, 3) / arg + + +@pytest.mark.parametrize( + "arg", [0, Vector2(0, 1), Vector2(1, 0), Vector2(0, 0)], ids=lambda x: x[0] +) +def test_zero_div(arg): + with pytest.raises(ZeroDivisionError): + Vector2(2, 3) / arg + + +@pytest.mark.parametrize("arg", [None, "dummy"], ids=lambda x: x[0]) +def test_bad_mult(arg): + with pytest.raises(TypeError): + Vector2(2, 3) * arg + + +@pytest.mark.parametrize( + "param,result", + [ + (0, Vector2(0, 0)), + (1, Vector2(2, 3)), + (2.5, Vector2(5, 7.5)), + (Vector2(1, 1), Vector2(2, 3)), + (Vector2(2, 3), Vector2(4, 9)), + ], + ids=lambda x: x[0], +) +def test_mult(param, result): + calc = Vector2(2, 3) * param + assert calc == result + + +@pytest.mark.parametrize( + "param,result", + [ + (1, Vector2(2, 3)), + (0.5, Vector2(4, 6)), + (2, Vector2(1, 1.5)), + (Vector2(1, 1), Vector2(2, 3)), + (Vector2(2, 3), Vector2(1, 1)), + ], + ids=lambda x: x[0], +) +def test_div(param, result): + calc = Vector2(2, 3) / param + assert calc == result + + +def test_equal(): + arr = Vector2(1, 2) + other = Vector2(1, 2) + assert arr == other + bad = Vector2(1, 3) + assert not arr == bad # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", Vector2(1, 3)]) +def test_bad_equal(arg): + arr = Vector2(1, 2) + assert arr != arg + + +@pytest.mark.parametrize( + "field,type", + [ + ("AXIS_X", int), + ("AXIS_Y", int), + ("ZERO", Vector2), + ("ONE", Vector2), + ("INF", Vector2), + ("LEFT", Vector2), + ("RIGHT", Vector2), + ("UP", Vector2), + ("DOWN", Vector2), + ], + ids=lambda x: x[0], +) +def test_contants(field, type): + field_val = getattr(Vector2, field) + assert isinstance(field_val, type) diff --git a/tests/bindings/test_vector3.py b/tests/bindings/test_vector3.py new file mode 100644 index 0000000..b4d2877 --- /dev/null +++ b/tests/bindings/test_vector3.py @@ -0,0 +1,230 @@ +import pytest +from enum import IntEnum + +from godot import Vector3 + + +def test_base(): + v = Vector3() + assert isinstance(v, Vector3) + v2 = Vector3(1, -2, 5) + assert isinstance(v2, Vector3) + assert v2 == Vector3(1, -2, 5) + assert v != v2 + + +def test_repr(): + v = Vector3(1, 2, 3) + assert repr(v) == "" + + +def test_instantiate(): + # Can build it with int or float or nothing + for args, expected_x, expected_y, expected_z in ( + [(), 0, 0, 0], + [(0.5, 0.5, 0.5), 0.5, 0.5, 0.5], + [(1,), 1, 0, 0], + [(1, 1), 1, 1, 0], + [(1, 2, 3), 1, 2, 3], + ): + v = Vector3(*args) + assert v.x == expected_x + assert v.y == expected_y + assert v.z == expected_z + with pytest.raises(TypeError): + Vector3("a", 2, 3) + with pytest.raises(TypeError): + Vector3("a", 2) + with pytest.raises(TypeError): + Vector3(1, "b", 5) + with pytest.raises(TypeError): + Vector3(None, 2, "c") + + +@pytest.mark.parametrize( + "field,ret_type,params", + [ + ["abs", Vector3, ()], + ["angle_to", float, (Vector3(),)], + ["ceil", Vector3, ()], + ["cross", Vector3, (Vector3(),)], + ["cubic_interpolate", Vector3, (Vector3(), Vector3(), Vector3(), 0.5)], + ["distance_squared_to", float, (Vector3(),)], + ["distance_to", float, (Vector3(),)], + ["dot", float, (Vector3(),)], + ["floor", Vector3, ()], + ["inverse", Vector3, ()], + ["length", float, ()], + ["length_squared", float, ()], + ["linear_interpolate", Vector3, (Vector3(), 0.5)], + ["max_axis", int, ()], + ["min_axis", int, ()], + ["normalized", Vector3, ()], + ["reflect", Vector3, (Vector3(),)], + ["rotated", Vector3, (Vector3(), 0.5)], + ["slide", Vector3, (Vector3(),)], + ["snapped", Vector3, (Vector3(),)], + ], + ids=lambda x: x[0], +) +def test_methods(field, ret_type, params): + v = Vector3() + # Don't test methods' validity but bindings one + assert hasattr(v, field) + method = getattr(v, field) + assert callable(method) + ret = method(*params) + assert isinstance(ret, ret_type) + + +@pytest.mark.parametrize( + "field,type", [("x", float), ("y", float), ("z", float)], ids=lambda x: x[0] +) +def test_properties(field, type): + v = Vector3() + field_val = getattr(v, field) + assert isinstance(field_val, type) + val = 10.0 + setattr(v, field, val) + field_val = getattr(v, field) + assert field_val == val + + +@pytest.mark.parametrize( + "field,bad_value", + [("x", "NaN"), ("y", "NaN"), ("z", "NaN"), ("x", None), ("y", None), ("z", None)], + ids=lambda x: x[0], +) +def test_bad_properties(field, bad_value): + v = Vector3() + with pytest.raises(TypeError): + setattr(v, field, bad_value) + + +@pytest.mark.parametrize( + "param,result", + [ + (0, Vector3(0, 0, 0)), + (1, Vector3(2, 3, 4)), + (2.5, Vector3(5, 7.5, 10)), + (Vector3(1, 1, 1), Vector3(2, 3, 4)), + (Vector3(2, 3, 4), Vector3(4, 9, 16)), + ], + ids=lambda x: x[0], +) +def test_mult(param, result): + calc = Vector3(2, 3, 4) * param + assert calc == result + + +@pytest.mark.parametrize( + "param,result", + [ + (1, Vector3(2, 3, 4)), + (0.5, Vector3(4, 6, 8)), + (2, Vector3(1, 1.5, 2)), + (Vector3(1, 1, 1), Vector3(2, 3, 4)), + (Vector3(2, 3, 4), Vector3(1, 1, 1)), + ], + ids=lambda x: x[0], +) +def test_div(param, result): + calc = Vector3(2, 3, 4) / param + assert calc == result + + +@pytest.mark.parametrize( + "param,result", + [ + (Vector3(0, 0, 0), Vector3(2, 3, 4)), + (Vector3(3, 2, 1), Vector3(5, 5, 5)), + (Vector3(-1, -4, -2), Vector3(1, -1, 2)), + ], + ids=lambda x: x[0], +) +def test_add(param, result): + calc = Vector3(2, 3, 4) + param + assert calc == result + + +@pytest.mark.parametrize( + "param,result", + [ + (Vector3(0, 0, 0), Vector3(2, 3, 4)), + (Vector3(3, 2, 1), Vector3(-1, 1, 3)), + (Vector3(-1, -1, -1), Vector3(3, 4, 5)), + ], + ids=lambda x: x[0], +) +def test_sub(param, result): + calc = Vector3(2, 3, 4) - param + assert calc == result + + +@pytest.mark.parametrize("arg", [None, 1, "dummy"], ids=lambda x: x[0]) +def test_bad_add(arg): + with pytest.raises(TypeError): + Vector3(2, 3, 4) + arg + + +@pytest.mark.parametrize("arg", [None, 1, "dummy"], ids=lambda x: x[0]) +def test_bad_sub(arg): + with pytest.raises(TypeError): + Vector3(2, 3, 4) - arg + + +@pytest.mark.parametrize("arg", [None, "dummy"], ids=lambda x: x[0]) +def test_bad_div(arg): + with pytest.raises(TypeError): + Vector3(2, 3, 4) / arg + + +@pytest.mark.parametrize( + "arg", + [0, Vector3(0, 1, 1), Vector3(1, 0, 1), Vector3(1, 1, 0), Vector3(0, 0, 0)], + ids=lambda x: x[0], +) +def test_zero_div(arg): + with pytest.raises(ZeroDivisionError): + Vector3(2, 3, 4) / arg + + +@pytest.mark.parametrize("arg", [None, "dummy"], ids=lambda x: x[0]) +def test_bad_mult(arg): + with pytest.raises(TypeError): + Vector3(2, 3, 4) * arg + + +def test_equal(): + arr = Vector3(1, 2, 3) + other = Vector3(1, 2, 3) + assert arr == other + bad = Vector3(1, 2, 4) + assert not arr == bad # Force use of __eq__ + + +@pytest.mark.parametrize("arg", [None, 0, "foo", Vector3(1, 2, 4)]) +def test_bad_equal(arg): + arr = Vector3(1, 2, 3) + assert arr != arg + + +@pytest.mark.parametrize( + "field,type", + [ + ("ZERO", Vector3), + ("ONE", Vector3), + ("INF", Vector3), + ("LEFT", Vector3), + ("RIGHT", Vector3), + ("UP", Vector3), + ("DOWN", Vector3), + ("FORWARD", Vector3), + ("BACK", Vector3), + ("AXIS", type(IntEnum)), + ], + ids=lambda x: x[0], +) +def test_contants(field, type): + field_val = getattr(Vector3, field) + assert isinstance(field_val, type) diff --git a/tests/global_constants/access_from_gdscript.gd b/tests/global_constants/access_from_gdscript.gd new file mode 100644 index 0000000..63a3588 --- /dev/null +++ b/tests/global_constants/access_from_gdscript.gd @@ -0,0 +1,19 @@ +extends Node + + +var outcome = null + +func _ready(): + for data in [["global_py", "Python"], ["global_gd", "GDScript"]]: + var name = data[0] + var type = data[1] + var path = "/root/%s" % name + var node = get_node(path) + if not node: + outcome = "Cannot retrieve node `%s`" % path + return + if node.type != type: + outcome = "Invalid Node type for `%s` (expected `%s`, got `%s`)" % [path, type, node.type] + return + node.set_accessed("GDScript") + outcome = "ok" diff --git a/tests/global_constants/access_from_python.py b/tests/global_constants/access_from_python.py new file mode 100644 index 0000000..747a3d8 --- /dev/null +++ b/tests/global_constants/access_from_python.py @@ -0,0 +1,62 @@ +import traceback + +from godot import Node, exposed, export + +try: + from godot.globals import global_gd, global_py + + global_import_outcome = "ok" +except Exception as exc: + traceback.print_exc() + global_import_outcome = ( + f"Error doing `from godot.globals import global_gd, global_py` at module level: {exc!r}" + ) + + +@exposed +class access_from_python(Node): + + outcome = export(str, default=None) + + def _ready(self): + try: + self.do_test() + except Exception as exc: + self.outcome = f"Unexpected error: {exc!r}" + raise # Stacktrace will be displayed on stdout this way + self.outcome = self.outcome or "ok" + + def do_test(self): + # Test accessing from `Node.get_node` + for name, type in (("global_py", "Python"), ("global_gd", "GDScript")): + path = f"/root/{name}" + node = self.get_node(path) + if not node: + self.outcome = f"Cannot retrieve node `{path}`" + return + if str(node.type) != type: + self.outcome = ( + f"Invalid Node type for `{path}` (expected `{type}`, got `{node.type}`)" + ) + return + node.set_accessed("Python") + + # Also test accessing from `godot.globals` module + if global_import_outcome != "ok": + self.outcome = global_import_outcome + return + + from godot import globals as godot_globals + + godot_globals_dir = dir(godot_globals) + expected_godot_globals_dir = ["global_gd", "global_py"] + if godot_globals_dir != expected_godot_globals_dir: + self.outcome = f"Invalid `dir(godot.globals)` (expected: `{expected_godot_globals_dir}`, got `{godot_globals_dir}`)" + return + for name, type in (("global_py", "Python"), ("global_gd", "GDScript")): + node_from_globals = getattr(godot_globals, name) + if str(node_from_globals.type) != type: + self.outcome = ( + f"Invalid Node type for `{path}` (expected `{type}`, got `{node.type}`)" + ) + return diff --git a/tests/global_constants/global_gd.gd b/tests/global_constants/global_gd.gd new file mode 100644 index 0000000..6987b69 --- /dev/null +++ b/tests/global_constants/global_gd.gd @@ -0,0 +1,7 @@ +extends Node + +var accessors = [] +var type = "GDScript" + +func set_accessed(name): + accessors.append(name) diff --git a/tests/global_constants/global_py.py b/tests/global_constants/global_py.py new file mode 100644 index 0000000..65bcbe5 --- /dev/null +++ b/tests/global_constants/global_py.py @@ -0,0 +1,11 @@ +from godot import Node, exposed, export, Array + + +@exposed +class global_py(Node): + + accessors = export(Array, default=Array()) + type = export(str, default="Python") + + def set_accessed(self, name): + self.accessors.append(name) diff --git a/tests/global_constants/main.py b/tests/global_constants/main.py new file mode 100644 index 0000000..dad6cef --- /dev/null +++ b/tests/global_constants/main.py @@ -0,0 +1,47 @@ +import traceback + +from godot import exposed, Node, OS + + +@exposed +class Main(Node): + def check_accessor_ok(self, name): + node = self.get_node(name) + if not node: + print(f"Cannot retrieve node `{name}`") + return False + print(f"Node {name}, outcome: {node.outcome}") + if str(node.outcome) != "ok": + print(f"Node `{name}` had bad outcome `{node.outcome}`") + return False + return True + + def check_global_ok(self, name): + path = f"/root/{name}" + node = self.get_node(path) + if not node: + print(f"Cannot retrieve node `{path}`") + return False + accessors = {str(x) for x in node.accessors} + if accessors != {"Python", "GDScript"}: + print(f"Node `{name}` hasn't been correctly visited: {accessors}") + return False + return True + + def _ready(self): + ok = True + # Children _ready should have been called before us + try: + ok &= self.check_accessor_ok("access_from_gdscript") + ok &= self.check_accessor_ok("access_from_python") + ok &= self.check_global_ok("global_gd") + ok &= self.check_global_ok("global_py") + except Exception as exc: + print("Unexpected error !") + traceback.print_exc() + ok = False + + if not ok: + OS.set_exit_code(1) + # Exit godot + self.get_tree().quit() diff --git a/tests/global_constants/main.tscn b/tests/global_constants/main.tscn new file mode 100644 index 0000000..a8af5e2 --- /dev/null +++ b/tests/global_constants/main.tscn @@ -0,0 +1,14 @@ +[gd_scene load_steps=4 format=2] + +[ext_resource path="res://main.py" type="Script" id=1] +[ext_resource path="res://access_from_gdscript.gd" type="Script" id=2] +[ext_resource path="res://access_from_python.py" type="Script" id=3] + +[node name="main" type="Node"] +script = ExtResource( 1 ) + +[node name="access_from_gdscript" type="Node" parent="."] +script = ExtResource( 2 ) + +[node name="access_from_python" type="Node" parent="."] +script = ExtResource( 3 ) diff --git a/tests/global_constants/project.godot b/tests/global_constants/project.godot new file mode 100644 index 0000000..c2411ea --- /dev/null +++ b/tests/global_constants/project.godot @@ -0,0 +1,34 @@ +; Engine configuration file. +; It's best edited using the editor UI and not directly, +; since the parameters that go here are not all obvious. +; +; Format: +; [section] ; section goes between [] +; param=value ; assign values to parameters + +config_version=4 + +_global_script_classes=[ ] +_global_script_class_icons={ + +} + +[application] + +run/main_scene="res://main.tscn" +name="godo-bindings-tests" +main_scene="res://main.tscn" + +[autoload] + +global_gd="*res://global_gd.gd" +global_py="*res://global_py.py" + +[gdnative] + +singletons=[ "res://pythonscript.gdnlib" ] + +[python_script] + +io_streams_capture=false +verbose=true diff --git a/tests/global_constants/pythonscript.gdnlib b/tests/global_constants/pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/tests/global_constants/pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/tests/helloworld/main.py b/tests/helloworld/main.py new file mode 100644 index 0000000..8728d40 --- /dev/null +++ b/tests/helloworld/main.py @@ -0,0 +1,10 @@ +from godot import exposed +from godot.bindings import Node, OS + + +@exposed +class Main(Node): + def _ready(self): + # Exit godot + OS.set_exit_code(0) + self.get_tree().quit() diff --git a/tests/helloworld/main.tscn b/tests/helloworld/main.tscn new file mode 100644 index 0000000..47d8487 --- /dev/null +++ b/tests/helloworld/main.tscn @@ -0,0 +1,9 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://main.py" type="Script" id=1] + +[node name="Node" type="Node" index="0"] + +script = ExtResource( 1 ) + + diff --git a/tests/helloworld/project.godot b/tests/helloworld/project.godot new file mode 100644 index 0000000..b141560 --- /dev/null +++ b/tests/helloworld/project.godot @@ -0,0 +1,24 @@ +; Engine configuration file. +; It's best edited using the editor UI and not directly, +; since the parameters that go here are not all obvious. +; +; Format: +; [section] ; section goes between [] +; param=value ; assign values to parameters + +config_version=3 + +[application] + +run/main_scene="res://main.tscn" +name="godo-helloworld-tests" +main_scene="res://main.tscn" + +[gdnative] + +singletons=[ "res://pythonscript.gdnlib" ] + +[python_script] + +io_streams_capture=false +verbose=true diff --git a/tests/helloworld/pythonscript.gdnlib b/tests/helloworld/pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/tests/helloworld/pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/tests/python_binary/SConscript b/tests/python_binary/SConscript new file mode 100644 index 0000000..c518d1a --- /dev/null +++ b/tests/python_binary/SConscript @@ -0,0 +1,120 @@ +import os +import re +import shutil +import tempfile +import subprocess +from pathlib import Path +from SCons.Errors import UserError +from contextlib import contextmanager + + +Import("env") + + +if env["platform"].startswith("windows"): + PYTHON_RELATIVE_PATH = "python.exe" + python = f"{env['DIST_PLATFORM']}/{PYTHON_RELATIVE_PATH}" + scripts = f"{env['DIST_PLATFORM']}/Scripts" +else: + PYTHON_RELATIVE_PATH = "bin/python3" + python = f"{env['DIST_PLATFORM']}/{PYTHON_RELATIVE_PATH}" + scripts = f"{env['DIST_PLATFORM']}/bin" + + +def run_cmd(cmd): + if isinstance(cmd, str): + cmd = cmd.split() + try: + return subprocess.run(cmd, check=True, capture_output=True) + except subprocess.CalledProcessError as exc: + print(f"Error !!! Non-zero return code: {exc.returncode}") + print(f"command: {cmd}") + print(f"stdout: {exc.stdout.decode()}") + print(f"stderr: {exc.stderr.decode()}") + raise UserError(f"Test has failed (returncode: {exc.returncode})") from exc + + +@contextmanager +def run_cmd_and_handle_errors(cmd): + out = run_cmd(cmd) + try: + yield out + except Exception as exc: + print(f"Error !!! {str(exc)}") + print(f"command: {cmd}") + print(f"stdout: {out.stdout.decode()}") + print(f"stderr: {out.stderr.decode()}") + raise UserError(f"Test has failed ({str(exc)})") from exc + + +def test_factory(target, cmd): + if callable(cmd): + fn = cmd + else: + + def fn(target, source, env): + run_cmd(cmd) + + fn.__name__ = f"test_{target}" + + env.Command([target], [], fn, strfunction=lambda target, source, env: f"Test: {target[0]}") + env.Depends(target, env["DIST_ROOT"]) + env.AlwaysBuild(target) + + return target + + +test_factory("run_python", f"{python} --version") + + +test_factory( + "import_godot_module", + [ + python, + "-c", + """ +try: + import godot +except ImportError as exc: + assert "Cannot initialize godot module given Godot GDNative API not available." in str(exc) +""", + ], +) + + +def test_ensurepip_and_run_pip(target, source, env): + # ensurepip does modification, so copy dist first + with tempfile.TemporaryDirectory() as tmpdirname: + pythonscript_path = f"{tmpdirname}/pythonscript" + shutil.copytree(str(env["DIST_PLATFORM"].abspath), pythonscript_path, symlinks=True) + python = f"{pythonscript_path}/{PYTHON_RELATIVE_PATH}" + + run_cmd(f"{python} -m ensurepip") + + with run_cmd_and_handle_errors( + f"{python} -m pip --disable-pip-version-check --version" + ) as out: + # Check pip site-packages location + stdout = out.stdout.decode().strip() + regex = r"^pip\s+[0-9.]+\s+from\s+(.*)\s+\(python\s+[0-9.+]+\)$" + match = re.match(regex, stdout) + if match: + site_packages_path = Path(match.group(1)) + dist_platform_path = Path(pythonscript_path) + try: + site_packages_path.relative_to(dist_platform_path) + except ValueError as exc: + raise AssertionError( + f"pip site-packages is not located inside dist folder `{env['DIST_PLATFORM']}`" + ) from exc + + else: + raise AssertionError(f"stdout doesn't match regex `{regex}`") + + run_cmd( + f"{python} -m pip install requests" + ) # Basically the most downloaded packages on pypi + run_cmd([python, "-c", "import requests"]) + + +test_factory("ensurepip_and_run_pip", test_ensurepip_and_run_pip) diff --git a/tests/threading/conftest.py b/tests/threading/conftest.py new file mode 100644 index 0000000..268a02f --- /dev/null +++ b/tests/threading/conftest.py @@ -0,0 +1,75 @@ +import pytest + +from godot import OS, Node, Reference + + +__global_objs = [] + + +def generate_global_obj(type): + obj = type.new() + __global_objs.append(obj) + return obj + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_global_objs(): + yield + for obj in __global_objs: + obj.free() + + +@pytest.fixture() +def generate_obj(check_memory_leak): + # Make this fixture depend on `check_memory_leak` to ensure it will + # check for memory leak after our own teardown + + objs = [] + + def _generate_obj(type): + obj = type.new() + objs.append(obj) + return obj + + yield _generate_obj + + # Node must be removed from the scenetree to avoid segfault on free + for obj in objs: + if isinstance(obj, Node): + parent = obj.get_parent() + if parent: + parent.remove_child(obj) + + while objs: + # Pop values to trigger gc for Reference instances + obj = objs.pop() + if not isinstance(obj, Reference): + obj.free() + + +@pytest.fixture +def current_node(): + # `conftest.py` is imported weirdly by pytest so we cannot just put a + # global variable in it and set it from `Main._ready` + from main import get_current_node + + return get_current_node() + + +@pytest.fixture() +def check_memory_leak(request): + if request.node.get_marker("ignore_leaks"): + yield + else: + dynamic_mem_start = OS.get_dynamic_memory_usage() + static_mem_start = OS.get_static_memory_usage() + + yield + + static_mem_end = OS.get_static_memory_usage() + dynamic_mem_end = OS.get_dynamic_memory_usage() + + static_leak = static_mem_end - static_mem_start + dynamic_leak = dynamic_mem_end - dynamic_mem_start + assert static_leak == 0 + assert dynamic_leak == 0 diff --git a/tests/threading/main.py b/tests/threading/main.py new file mode 100644 index 0000000..97d6f4b --- /dev/null +++ b/tests/threading/main.py @@ -0,0 +1,40 @@ +import os +import pytest + +from godot import exposed +from godot.bindings import Node, OS + + +__current_node = None + + +def set_current_node(node): + global __current_node + assert __current_node is None + __current_node = node + + +def get_current_node(): + return __current_node + + +@exposed +class Main(Node): + def _ready(self): + set_current_node(self) + # Retrieve command line arguments passed through --pytest=... + prefix = "--pytest=" + pytest_args = [] + for gdarg in OS.get_cmdline_args(): + arg = str(gdarg) + if arg.startswith(prefix): + pytest_args += arg[len(prefix) :].split(",") + if all(arg.startswith("-") for arg in pytest_args): + # Filter to avoid scanning `plugins` and `lib` directories + pytest_args += [x for x in os.listdir() if x.startswith("test_")] + # Run tests here + print(f"running `pytest {' '.join(pytest_args)}`") + if pytest.main(pytest_args): + OS.set_exit_code(1) + # Exit godot + self.get_tree().quit() diff --git a/tests/threading/main.tscn b/tests/threading/main.tscn new file mode 100644 index 0000000..ccaba7b --- /dev/null +++ b/tests/threading/main.tscn @@ -0,0 +1,6 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://main.py" type="Script" id=1] + +[node name="main" type="Node"] +script = ExtResource( 1 ) diff --git a/tests/threading/project.godot b/tests/threading/project.godot new file mode 100644 index 0000000..a714fc3 --- /dev/null +++ b/tests/threading/project.godot @@ -0,0 +1,33 @@ +; Engine configuration file. +; It's best edited using the editor UI and not directly, +; since the parameters that go here are not all obvious. +; +; Format: +; [section] ; section goes between [] +; param=value ; assign values to parameters + +config_version=4 + +_global_script_classes=[ ] +_global_script_class_icons={ + +} + +[application] + +run/main_scene="res://main.tscn" +name="godot-threading-tests" +main_scene="res://main.tscn" + +[gdnative] + +singletons=[ "res://pythonscript.gdnlib" ] + +[python_script] + +io_streams_capture=false +verbose=true + +[rendering] + +threads/thread_model=2 diff --git a/tests/threading/pytest.ini b/tests/threading/pytest.ini new file mode 100644 index 0000000..570aca9 --- /dev/null +++ b/tests/threading/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +filterwarnings = + error + error::UserWarning diff --git a/tests/threading/pythonscript.gdnlib b/tests/threading/pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/tests/threading/pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/tests/threading/test_threads.py b/tests/threading/test_threads.py new file mode 100644 index 0000000..6d5665a --- /dev/null +++ b/tests/threading/test_threads.py @@ -0,0 +1,37 @@ +import pytest +from threading import Thread + +from godot import Vector3, SurfaceTool, Mesh, MeshInstance + + +def test_simple_thread(): + + thread_said_hello = False + + def target(): + nonlocal thread_said_hello + thread_said_hello = True + + t = Thread(target=target, daemon=True) + t.start() + + t.join(timeout=1) + assert thread_said_hello + + +def test_use_godot_from_thread(): + def target(): + st = SurfaceTool() + st.begin(Mesh.PRIMITIVE_TRIANGLES) + st.add_vertex(Vector3(-1, -1, 0)) + st.add_vertex(Vector3(-1, 1, 0)) + st.add_vertex(Vector3(1, 1, 0)) + mesh = st.commit() + mi = MeshInstance.new() + mi.mesh = mesh + mi.free() + + t = Thread(target=target, daemon=True) + t.start() + + t.join(timeout=1) diff --git a/tests/work_with_gdscript/conftest.py b/tests/work_with_gdscript/conftest.py new file mode 100644 index 0000000..8f09c9f --- /dev/null +++ b/tests/work_with_gdscript/conftest.py @@ -0,0 +1,75 @@ +import pytest + +from godot import ResourceLoader + +import pymain + + +assert pymain.root_node + + +@pytest.fixture +def root_node(): + return pymain.root_node + + +@pytest.fixture( + params=[ + "/root/main/test/pynode", + "/root/main/test/gdnode", + "/root/main/test/pysubnode", + "/root/main/test/gdsubnode", + ] +) +def anynode(request, root_node): + return root_node.get_node(request.param) + + +@pytest.fixture(params=["/root/main/test/pynode", "/root/main/test/gdnode"]) +def node(request, root_node): + return root_node.get_node(request.param) + + +@pytest.fixture(params=["/root/main/test/pysubnode", "/root/main/test/gdsubnode"]) +def subnode(request, root_node): + return root_node.get_node(request.param) + + +@pytest.fixture +def pynode(root_node): + return root_node.get_node("/root/main/test/pynode") + + +@pytest.fixture +def pysubnode(root_node): + return root_node.get_node("/root/main/test/pysubnode") + + +@pytest.fixture +def gdnode(root_node): + return root_node.get_node("/root/main/test/gdnode") + + +@pytest.fixture +def gdsubnode(root_node): + return root_node.get_node("/root/main/test/gdsubnode") + + +@pytest.fixture +def pynode_scene(): + return ResourceLoader.load("res://pynode.tscn", "", False) + + +@pytest.fixture +def pysubnode_scene(): + return ResourceLoader.load("res://pysubnode.tscn", "", False) + + +@pytest.fixture +def gdnode_scene(): + return ResourceLoader.load("res://gdnode.tscn", "", False) + + +@pytest.fixture +def gdsubnode_scene(): + return ResourceLoader.load("res://gdsubnode.tscn", "", False) diff --git a/tests/work_with_gdscript/gdmain.gd b/tests/work_with_gdscript/gdmain.gd new file mode 100644 index 0000000..050deff --- /dev/null +++ b/tests/work_with_gdscript/gdmain.gd @@ -0,0 +1,130 @@ +extends Node + + +var _py_node_scene = preload("res://pynode.tscn") +var _py_subnode_scene = preload("res://pysubnode.tscn") +var _gd_node_scene = preload("res://gdnode.tscn") +var _gd_subnode_scene = preload("res://gdsubnode.tscn") + + +var _test_error = 0 +var _test_total = 0 +var _current_test = "" + + +func set_test(name): + self._test_total += 1 + self._current_test = name + + +func assert_true(val, msg): + if not val: + self._test_error += 1 + print("- [ERROR] ", self._current_test, ": '", msg, "' expected to be true") + return false + else: + return true + + +func assert_eq(a, b, msg): + if not a == b: + self._test_error += 1 + print("- [ERROR] ", self._current_test, ": '", a, "' != '", b, "' in '", msg, "'") + return false + else: + return true + + +func test_native_method(node): + var original_name = node.get_name() + node.set_name("foo") + var name = node.get_name() + assert_eq(name, "foo", "node.get_name()") + # Reset to original name to allow this test to work again with same node name + node.set_name(original_name) + + +func test_prop(node, sub_node): + var value + + # No default value means the property is set to null + value = node.prop + assert_eq(value, null, "node.prop") + value = sub_node.prop + assert_eq(value, null, "sub_node.prop") + + node.prop = 42 + value = node.prop + assert_eq(value, 42, "node.prop") + sub_node.prop = 42 + value = sub_node.prop + assert_eq(value, 42, "sub_node.prop") + + value = node.overloaded_by_child_prop + assert_eq(value, "default", "node.overloaded_by_child_prop") + value = sub_node.overloaded_by_child_prop + assert_eq(value, "sub:default", "sub_node.overloaded_by_child_prop") + + # node.overloaded_by_child_prop = "foo" + # value = node.overloaded_by_child_prop + # assert_eq(value, "foo", "node.overloaded_by_child_prop") + # sub_node.overloaded_by_child_prop = "foo" + # value = sub_node.overloaded_by_child_prop + # assert_eq(value, "sub:foo", "sub_node.overloaded_by_child_prop") + + +func test_method(node, sub_node): + var ret + + ret = node.meth("foo") + assert_eq(ret, "foo", "node.meth(\"foo\")") + ret = sub_node.meth("foo") + assert_eq(ret, "foo", "sub_node.meth(\"foo\")") + ret = node.overloaded_by_child_meth("foo") + assert_eq(ret, "foo", "node.overloaded_by_child_meth(\"foo\")") + ret = sub_node.overloaded_by_child_meth("foo") + assert_eq(ret, "sub:foo", "sub_node.overloaded_by_child_meth(\"foo\")") + + +func test_ready_called(node, sub_node): + assert_true(node.is_ready_called(), "node.is_ready_called()") + assert_true(sub_node.is_ready_called(), "sub_node.is_ready_called()") + assert_true(sub_node.is_sub_ready_called(), "sub_node.is_sub_ready_called()") + + +func run_tests(): + print('======= gdmain tests ==========') + for args in [ + ["pynodes", _py_node_scene, _py_subnode_scene], + ["gdnodes", _gd_node_scene, _gd_subnode_scene], + ]: + var nodes_type = args[0] + var node_scene = args[1] + var sub_node_scene = args[2] + + var node = node_scene.instance() + var sub_node = sub_node_scene.instance() + self.add_child(node) + self.add_child(sub_node) + print('---', nodes_type, ' ', self.get_node(node.get_name())) + + # self.set_test(nodes_type + ":test_ready_called") + # test_ready_called(node, sub_node) + self.set_test(nodes_type + ":test_native_method(node)") + test_native_method(node) + self.set_test(nodes_type + ":test_native_method(sub_node)") + test_native_method(sub_node) + self.set_test(nodes_type + ":test_prop") + test_prop(node, sub_node) + self.set_test(nodes_type + ":test_method") + test_method(node, sub_node) + + self.remove_child(node) + self.remove_child(sub_node) + + if self._test_error: + print('ERROR %s errors in %s tests' % [self._test_error, self._test_total]) + return 1 + else: + print('SUCCESS %s tests' % self._test_total) + return 0 diff --git a/tests/work_with_gdscript/gdnode.gd b/tests/work_with_gdscript/gdnode.gd new file mode 100644 index 0000000..5379ad8 --- /dev/null +++ b/tests/work_with_gdscript/gdnode.gd @@ -0,0 +1,26 @@ +extends Node + + +var ready_called = false +export(int) var prop +export var overloaded_by_child_prop = "default" + + +func _ready(): + ready_called = true + + +func is_ready_called(): + return ready_called + + +func meth(attr): + return attr + + +func overloaded_by_child_meth(attr): + return attr + + +static func static_meth(attr): + return 'static:' + attr diff --git a/tests/work_with_gdscript/gdnode.tscn b/tests/work_with_gdscript/gdnode.tscn new file mode 100644 index 0000000..3f657df --- /dev/null +++ b/tests/work_with_gdscript/gdnode.tscn @@ -0,0 +1,10 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://gdnode.gd" type="Script" id=1] + +[node name="Node" type="Node"] + +script = ExtResource( 1 ) +overloaded_by_child_prop = "default" + + diff --git a/tests/work_with_gdscript/gdnode_with_pyparent.gd b/tests/work_with_gdscript/gdnode_with_pyparent.gd new file mode 100644 index 0000000..f2a4771 --- /dev/null +++ b/tests/work_with_gdscript/gdnode_with_pyparent.gd @@ -0,0 +1 @@ +extends "res://pynode.py" diff --git a/tests/work_with_gdscript/gdsubnode.gd b/tests/work_with_gdscript/gdsubnode.gd new file mode 100644 index 0000000..4b59b4f --- /dev/null +++ b/tests/work_with_gdscript/gdsubnode.gd @@ -0,0 +1,17 @@ +extends "res://gdnode.gd" + + +var sub_ready_called = false +export var child_prop = "sub:default" +# Cannot overload a property in GDScript + +func _ready(): + sub_ready_called = true + + +func is_sub_ready_called(): + return sub_ready_called + + +func overloaded_by_child_meth(attr): + return "sub:" + attr diff --git a/tests/work_with_gdscript/gdsubnode.tscn b/tests/work_with_gdscript/gdsubnode.tscn new file mode 100644 index 0000000..51a06eb --- /dev/null +++ b/tests/work_with_gdscript/gdsubnode.tscn @@ -0,0 +1,9 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://gdsubnode.gd" type="Script" id=1] + +[node name="Node" type="Node"] + +script = ExtResource( 1 ) + + diff --git a/tests/work_with_gdscript/main.gd b/tests/work_with_gdscript/main.gd new file mode 100644 index 0000000..712769f --- /dev/null +++ b/tests/work_with_gdscript/main.gd @@ -0,0 +1,12 @@ +extends Node + +var _test_done = false + +func _process(delta): + if _test_done: + return + for path in ["pymain", "gdmain"]: +# for path in ["pymain"]: + self.get_node(path).run_tests() + _test_done = true + self.get_tree().quit() diff --git a/tests/work_with_gdscript/main.tscn b/tests/work_with_gdscript/main.tscn new file mode 100644 index 0000000..583e2e0 --- /dev/null +++ b/tests/work_with_gdscript/main.tscn @@ -0,0 +1,56 @@ +[gd_scene load_steps=10 format=2] + +[ext_resource path="res://main.gd" type="Script" id=1] +[ext_resource path="res://pymain.py" type="Script" id=2] +[ext_resource path="res://gdmain.gd" type="Script" id=3] +[ext_resource path="res://pynode.py" type="Script" id=4] +[ext_resource path="res://pysubnode.py" type="Script" id=5] +[ext_resource path="res://gdnode.gd" type="Script" id=6] +[ext_resource path="res://gdsubnode.gd" type="Script" id=7] +[ext_resource path="res://gdnode_with_pyparent.gd" type="Script" id=8] +[ext_resource path="res://pynode_with_gdparent.py" type="Script" id=9] + +[node name="main" type="Node"] + +script = ExtResource( 1 ) + +[node name="pymain" type="Node" parent="."] + +script = ExtResource( 2 ) + +[node name="gdmain" type="Node" parent="."] + +script = ExtResource( 3 ) + +[node name="test" type="Node" parent="."] + +[node name="pynode" type="Node" parent="test"] + +script = ExtResource( 4 ) + +[node name="pysubnode" type="Node" parent="test"] + +script = ExtResource( 5 ) + +[node name="gdnode" type="Node" parent="test"] + +script = ExtResource( 6 ) +prop = null +overloaded_by_child_prop = "default" + +[node name="gdsubnode" type="Node" parent="test"] + +script = ExtResource( 7 ) +prop = null +overloaded_by_child_prop = "default" +child_prop = "sub:default" + +[node name="gdnode_with_pyparent" type="Node" parent="test"] + +script = ExtResource( 8 ) + +[node name="pynode_with_gdparent" type="Node" parent="test"] + +script = ExtResource( 9 ) + + diff --git a/tests/work_with_gdscript/main2.gd b/tests/work_with_gdscript/main2.gd new file mode 100644 index 0000000..4fb49fb --- /dev/null +++ b/tests/work_with_gdscript/main2.gd @@ -0,0 +1,50 @@ +extends Node + +export (int) var exported = 42 + +var python_scene = preload("res://python_scene.tscn") + + +func _ready(): + var python_scene_instance = python_scene.instance() + python_scene_instance.set_name("python_scene") + self.add_child(python_scene_instance) + + +func exit_test(error): + if error: + print("Error: " + error) + OS.set_exit_code(1) + else: + print('Test success !') + self.test_closing = true + self.get_tree().quit() + + +var test_started = false +var test_ended = false +var test_closing = false + + +func _process(delta): + if self.test_closing: + # Wait for application to stop + return + if self.test_started: + if self.test_ended: + self.exit_test("") + else: + self.exit_test("Test started but didn't ended !") + self.test_started = true + var python_scene_instance = self.get_node("python_scene") + # Test property + python_scene_instance.python_prop = 42 + var prop_val = python_scene_instance.python_prop + if prop_val != 42: + self.exit_test('python_scene_instance.python_prop != 42 (value = %s)' % prop_val) + # Test method + var meth_ret = python_scene_instance.python_method("foo") + if meth_ret != "foo": + self.exit_test('python_scene_instance.python_method("foo") != "foo" (value = %s)' % meth_ret) + # End of test, we will exit at next _process call + self.test_ended = true diff --git a/tests/work_with_gdscript/project.godot b/tests/work_with_gdscript/project.godot new file mode 100644 index 0000000..1a5e642 --- /dev/null +++ b/tests/work_with_gdscript/project.godot @@ -0,0 +1,15 @@ +config_version=3 + +[application] + +config/name="test:work_with_gdscript" +run/main_scene="res://main.tscn" + +[gdnative] + +singletons=[ "res://pythonscript.gdnlib" ] + +[python_script] + +io_streams_capture=false +verbose=true diff --git a/tests/work_with_gdscript/pymain.py b/tests/work_with_gdscript/pymain.py new file mode 100644 index 0000000..c3b6d46 --- /dev/null +++ b/tests/work_with_gdscript/pymain.py @@ -0,0 +1,29 @@ +# TODO: - test GDScript static functions +# - allow inheritance from GDScript class +import os +import pytest + +from godot import exposed, Node, OS + + +root_node = None + + +@exposed +class PyMain(Node): + def run_tests(self): + global root_node + root_node = self + # Retrieve command line arguments passed through --pytest=... + prefix = "--pytest=" + pytest_args = [] + for gdarg in OS.get_cmdline_args(): + arg = str(gdarg) + if arg.startswith(prefix): + pytest_args += arg[len(prefix) :].split(",") + if all(arg.startswith("-") for arg in pytest_args): + # Filter to avoid scanning `plugins` and `lib` directories + pytest_args += [x for x in os.listdir() if x.startswith("test_")] + # Run tests here + print(f"running `pytest {' '.join(pytest_args)}`") + return pytest.main(pytest_args) diff --git a/tests/work_with_gdscript/pynode.py b/tests/work_with_gdscript/pynode.py new file mode 100644 index 0000000..f3e2671 --- /dev/null +++ b/tests/work_with_gdscript/pynode.py @@ -0,0 +1,43 @@ +from godot import exposed, export, Node + + +@exposed +class PyNode(Node): + _ready_called = False + _overloaded_by_child_prop_value = None + + def _ready(self): + self._ready_called = True + + def is_ready_called(self): + return self._ready_called + + def meth(self, attr): + return attr + + def overloaded_by_child_meth(self, attr): + return attr + + @staticmethod + def static_meth(attr): + return f"static:{attr}" + + prop = export(int) + + @export(str, default="default") + @property + def overloaded_by_child_prop(self): + return self._overloaded_by_child_prop_value + + @overloaded_by_child_prop.setter + def overloaded_by_child_prop(self, value): + self._overloaded_by_child_prop_value = value + + def print_tree(self): + # Overloaded native method + return """ + * + *** +***** + | +""" diff --git a/tests/work_with_gdscript/pynode.tscn b/tests/work_with_gdscript/pynode.tscn new file mode 100644 index 0000000..bd8540e --- /dev/null +++ b/tests/work_with_gdscript/pynode.tscn @@ -0,0 +1,9 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://pynode.py" type="Script" id=1] + +[node name="Node" type="Node"] + +script = ExtResource( 1 ) + + diff --git a/tests/work_with_gdscript/pynode_with_gdparent.py b/tests/work_with_gdscript/pynode_with_gdparent.py new file mode 100644 index 0000000..60a0156 --- /dev/null +++ b/tests/work_with_gdscript/pynode_with_gdparent.py @@ -0,0 +1,9 @@ +from godot import exposed, ResourceLoader + + +GDNode = ResourceLoader.load("res://gdnode.gd", "", False) + + +@exposed +class PyNodeWithGDParent(GDNode): + pass diff --git a/tests/work_with_gdscript/pysubnode.py b/tests/work_with_gdscript/pysubnode.py new file mode 100644 index 0000000..aac04d1 --- /dev/null +++ b/tests/work_with_gdscript/pysubnode.py @@ -0,0 +1,28 @@ +from godot import exposed, export + +from pynode import PyNode + + +@exposed +class PySubNode(PyNode): + _sub_ready_called = False + _overloaded_by_child_prop_value = None + + def _ready(self): + super()._ready() + self._sub_ready_called = True + + def is_sub_ready_called(self): + return self._sub_ready_called + + def overloaded_by_child_meth(self, attr): + return f"sub:{attr}" + + @export(str, default="default") + @property + def overloaded_by_child_prop(self): + return self._overloaded_by_child_prop_value + + @overloaded_by_child_prop.setter + def overloaded_by_child_prop(self, value): + self._overloaded_by_child_prop_value = f"sub:{value}" diff --git a/tests/work_with_gdscript/pysubnode.tscn b/tests/work_with_gdscript/pysubnode.tscn new file mode 100644 index 0000000..3efcd20 --- /dev/null +++ b/tests/work_with_gdscript/pysubnode.tscn @@ -0,0 +1,9 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://pysubnode.py" type="Script" id=1] + +[node name="Node" type="Node"] + +script = ExtResource( 1 ) + + diff --git a/tests/work_with_gdscript/pytest.ini b/tests/work_with_gdscript/pytest.ini new file mode 100644 index 0000000..570aca9 --- /dev/null +++ b/tests/work_with_gdscript/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +filterwarnings = + error + error::UserWarning diff --git a/tests/work_with_gdscript/python_scene.py b/tests/work_with_gdscript/python_scene.py new file mode 100644 index 0000000..39f6435 --- /dev/null +++ b/tests/work_with_gdscript/python_scene.py @@ -0,0 +1,24 @@ +from godot import exposed, export, Node2D + + +@exposed +class MyExportedCls(Node2D): + initialized = False + _python_prop = None + + def _ready(self): + self.initialized = True + + def python_method(self, attr): + return attr + + python_prop_static = export(str) + + @export(int, default=42) + @property + def python_prop(self): + return self._python_prop + + @python_prop.setter + def python_prop(self, value): + self._python_prop = value diff --git a/tests/work_with_gdscript/python_scene.tscn b/tests/work_with_gdscript/python_scene.tscn new file mode 100644 index 0000000..9e2cbca --- /dev/null +++ b/tests/work_with_gdscript/python_scene.tscn @@ -0,0 +1,32 @@ +[gd_scene load_steps=2 format=2] + +[ext_resource path="res://python_scene.py" type="Script" id=1] + +[node name="Node2D" type="Node2D"] + +script = ExtResource( 1 ) + +[node name="Particles2D" type="Particles2D" parent="."] + +position = Vector2( 1.85071, -1.85062 ) +config/amount = 32 +config/lifetime = 2.0 +config/process_mode = 1 +params/direction = 0.0 +params/spread = 10.0 +params/linear_velocity = 20.0 +params/spin_velocity = 0.0 +params/orbit_velocity = 0.0 +params/gravity_direction = 0.0 +params/gravity_strength = 9.8 +params/radial_accel = 0.0 +params/tangential_accel = 0.0 +params/damping = 0.0 +params/initial_angle = 0.0 +params/initial_size = 1.0 +params/final_size = 1.0 +params/hue_variation = 0.0 +params/anim_speed_scale = 1.0 +params/anim_initial_pos = 0.0 + + diff --git a/tests/work_with_gdscript/pythonscript.gdnlib b/tests/work_with_gdscript/pythonscript.gdnlib new file mode 100644 index 0000000..1510867 --- /dev/null +++ b/tests/work_with_gdscript/pythonscript.gdnlib @@ -0,0 +1,23 @@ +[general] + +singleton=true +load_once=true +symbol_prefix="godot_" + +[entry] + +X11.64="res://addons/pythonscript/x11-64/libpythonscript.so" +X11.32="res://addons/pythonscript/x11-32/libpythonscript.so" +Server.64="res://addons/pythonscript/x11-64/libpythonscript.so" +Windows.64="res://addons/pythonscript/windows-64/pythonscript.dll" +Windows.32="res://addons/pythonscript/windows-32/pythonscript.dll" +OSX.64="res://addons/pythonscript/osx-64/libpythonscript.dylib" + +[dependencies] + +X11.64=[] +X11.32=[] +Server.64=[] +Windows.64=[] +Windows.32=[] +OSX.64=[] diff --git a/tests/work_with_gdscript/test_native_call.py b/tests/work_with_gdscript/test_native_call.py new file mode 100644 index 0000000..9257632 --- /dev/null +++ b/tests/work_with_gdscript/test_native_call.py @@ -0,0 +1,101 @@ +# TODO: +# - allow inheritance from GDScript class +# - overload native method ? +import pytest + +from godot import GDString, ResourceLoader, GDScript, PluginScript + + +def test_native_method(node): + original_name = node.get_name() + try: + node.set_name("foo") + name = node.get_name() + assert name == GDString("foo") + finally: + node.set_name(original_name) + + +@pytest.mark.xfail +def test_overloaded_native_method(node, subnode): + expected = """ + * + *** +***** + | +""" + ret = node.print_tree() + assert ret == expected + ret = subnode.print_tree() + assert ret == expected + + +def test_node_ready_called(node): + assert node.is_ready_called() + + +def test_subnode_ready_called(subnode): + assert subnode.is_ready_called() + assert subnode.is_sub_ready_called() + + +def test_method_call(anynode): + ret = anynode.meth("foo") + assert ret == GDString("foo") + + +def test_overloaded_method_call(subnode): + ret = subnode.overloaded_by_child_meth("foo") + assert ret == GDString("sub:foo") + + +def test_property_without_default_value(anynode): + value = anynode.prop + assert value is None + + +def test_property(anynode): + anynode.prop = 42 + value = anynode.prop + assert value == 42 + + +@pytest.mark.xfail(reason="default value seems to be only set in .tscn") +def test_overloaded_property_default_value(pynode, pysubnode): + # Parent property + value = pynode.overloaded_by_child_prop + assert value == "default" + # Overloaded property + value = pysubnode.overloaded_by_child_prop + assert value == "sub:default" + + +def test_overloaded_property(pynode, pysubnode): + # Not supported by GDScript + + # Parent property + pynode.overloaded_by_child_prop = "foo" + value = pynode.overloaded_by_child_prop + assert value == GDString("foo") + + # Overloaded property + pysubnode.overloaded_by_child_prop = "foo" + value = pysubnode.overloaded_by_child_prop + assert value == GDString("sub:foo") + + +def test_static_method_call(node): + value = node.static_meth("foo") + assert value == GDString("static:foo") + + +@pytest.mark.parametrize( + "path,expected_type", [("res://gdnode.gd", GDScript), ("res://pynode.py", PluginScript)] +) +def test_load_script(path, expected_type): + script = ResourceLoader.load(path, "", False) + try: + assert isinstance(script, expected_type) + assert script.can_instance() + finally: + script.free() diff --git a/tools/assetlib_release.py b/tools/assetlib_release.py new file mode 100644 index 0000000..c607b63 --- /dev/null +++ b/tools/assetlib_release.py @@ -0,0 +1,143 @@ +#! /usr/bin/env python3 + +""" +Build system is designed to created a build targetting a single platform. +This script aims at bundle together multiple builds to generate a final +multi-platform release. +""" + +import json +from pathlib import Path +from urllib.request import urlopen + +import argparse +import tarfile +from datetime import datetime +import os +import shutil +from urllib.request import urlretrieve +from zipfile import ZipFile +from concurrent.futures import ThreadPoolExecutor + + +API_REPO_URL = "https://api.github.com/repos/touilleMan/godot-python/releases" +PLATFORMS = ("x11-32", "x11-64", "osx-64", "windows-32", "windows-64") +MISC_DIR = Path(__file__).parent / "../misc" + + +def get_release_info(version=None): + data = json.loads(urlopen(API_REPO_URL).read()) + if not version: + release_info = data[0] + else: + tag_name = version if version.startswith("v") else f"v{version}" + release_info = next(x for x in data if x["tag_name"] == tag_name) + + info = { + "tag_name": release_info["tag_name"], + "version": release_info["tag_name"][1:], + "platforms": {}, + } + + for platform in PLATFORMS: + asset = next((asset for asset in release_info["assets"] if platform in asset["name"]), None) + if asset: + info["platforms"][platform] = { + "name": asset["name"], + "url": asset["browser_download_url"], + } + else: + print(f"Warning: release info for platform {platform} not found") + + return info + + +def pipeline_executor(dirs, release_info, platform_name): + platform_info = release_info["platforms"][platform_name] + release_archive = dirs["build"] / platform_info["name"] + + if not release_archive.exists(): + print(f"{platform_name} - Dowloading release") + with urlopen(platform_info["url"]) as f: + release_archive.write_bytes(f.read()) + + if not (dirs["pythonscript"] / platform_name).exists(): + print(f"{platform_name} - Extracting release") + if platform_info["name"].endswith(".zip"): + zipobj = ZipFile(release_archive) + # Only extract platform-specific stuff + members = ( + x + for x in zipobj.namelist() + if x.startswith(f"addons/pythonscript/{platform_name}/") + ) + zipobj.extractall(path=dirs["dist"], members=members) + + elif platform_info["name"].endswith(".tar.bz2"): + tarobj = tarfile.open(release_archive) + # Only extract platform-specific stuff + members = ( + x for x in tarobj if x.name.startswith(f"./addons/pythonscript/{platform_name}/") + ) + tarobj.extractall(path=dirs["dist"], members=members) + + else: + raise RuntimeError(f"Unknown archive format for {release_archive}") + + +def orchestrator(dirs, release_info): + futures = [] + with ThreadPoolExecutor() as executor: + for platform_name in release_info["platforms"].keys(): + futures.append(executor.submit(pipeline_executor, dirs, release_info, platform_name)) + for future in futures: + if not future.cancelled(): + future.result() # Raise exception if any + + print("Add bonuses...") + + (dirs["pythonscript"] / ".gdignore").touch() + license_txt = (MISC_DIR / "release_LICENSE.txt").read_text() + for entry in ["dist", "pythonscript"]: + (dirs[entry] / "LICENSE.txt").write_text(license_txt) + (dirs["dist"] / "pythonscript.gdnlib").write_text( + (MISC_DIR / "release_pythonscript.gdnlib").read_text() + ) + (dirs["dist"] / "README.txt").write_text( + (MISC_DIR / "release_README.txt") + .read_text() + .format(version=release_info["version"], date=datetime.utcnow().strftime("%Y-%m-%d")) + ) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--version", default=None) + args = parser.parse_args() + + release_info = get_release_info(args.version) + print(f"Release version: {release_info['version']}") + + build_dir = Path(f"pythonscript-assetlib-release-{release_info['version']}").resolve() + dist_dir = build_dir / f"pythonscript-{release_info['version']}" + addons_dir = dist_dir / "addons" + pythonscript_dir = addons_dir / "pythonscript" + + build_dir.mkdir(exist_ok=True) + dist_dir.mkdir(exist_ok=True) + addons_dir.mkdir(exist_ok=True) + pythonscript_dir.mkdir(exist_ok=True) + + dirs = { + "build": build_dir, + "dist": dist_dir, + "addons": addons_dir, + "pythonscript": pythonscript_dir, + } + orchestrator(dirs, release_info) + + print(f"{dist_dir} is ready !") + + +if __name__ == "__main__": + main()