Skip to content
Commits on Source (4)
Metadata-Version: 2.1
Name: mypy
Version: 0.641
Version: 0.650
Summary: Optional static typing for Python
Home-page: http://www.mypy-lang.org/
Author: Jukka Lehtosalo
......
......@@ -250,6 +250,20 @@ tracker:
Feel free to also ask questions on the tracker.
mypy_mypyc
----------
We have built an experimental compiled version of mypy using the
[mypyc compiler](https://github.com/mypyc/mypyc) for mypy-annotated
Python code. It is approximately 4 times faster than interpreted mypy.
If you wish to test out the compiled version of mypy, and are running
OS X or Linux, you can directly install a binary from
https://github.com/mypyc/mypy_mypyc-wheels/releases/latest.
Compiled mypy packages on PyPI are Coming Soon.
Help wanted
-----------
......
mypy (0.650-1) unstable; urgency=medium
* New upstream version
-- Michael R. Crusoe <michael.crusoe@gmail.com> Mon, 10 Dec 2018 20:28:20 -0800
mypy (0.641-1) unstable; urgency=medium
* New upstream release.
......
extensions/mypy_extensions.egg-info/
......@@ -28,50 +28,51 @@ override_dh_auto_build:
sed -i 's/python3 -m mypy/mypy/g' debian/mypy.1
sed -i 's/python3/mypy/g' debian/mypy.1
sed -i 's/PYTHON3/MYPY/g' debian/mypy.1
PYTHONPATH=$(PPATH) help2man scripts/dmypy --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--name 'Client for mypy daemon mode' > debian/dmypy.1
--include <(echo -e "[NAME]\ndmymy \- Client for mypy daemon mode") \
> debian/dmypy.1
sed -i '/\.\.\./d' debian/dmypy.1 # Delete the "..."
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy start' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy start' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_start \- Start a mypy daemon") \
> debian/dmypy-start.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy restart' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy restart' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_restart \- Restart a mypy daemon") \
> debian/dmypy-restart.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy status' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy status' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_status \- Show a mypy daemon status") \
> debian/dmypy-status.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy stop' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy stop' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_stop \- Stop a mypy daemon") \
> debian/dmypy-stop.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy kill' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy kill' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_kill \- Kill a mypy daemon") \
> debian/dmypy-kill.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy check' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy check' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_check \- Type check some Python files with a mypy daemon") \
> debian/dmypy-check.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy recheck' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy recheck' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_recheck \- Type check the same files from the previous run") \
> debian/dmypy-recheck.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy hang' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy hang' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_hang \- Hang a mypy daemon, as a debug hack") \
> debian/dmypy-hang.1
PYTHONPATH=$(PPATH) help2man 'scripts/dmypy daemon' --no-info \
PYTHONPATH=$(PPATH) help2man 'python3 -m mypy.dmypy daemon' --no-info \
--version-string="${DEB_VERSION_UPSTREAM}" \
--include <(echo -e "[NAME]\ndmypy_daemon \- Run a mypy daemon in the foreground") \
> debian/dmypy-daemon.1
PYTHONPATH=$(PPATH) help2man 'python${PY3V} scripts/stubgen' --no-info \
PYTHONPATH=$(PPATH) help2man 'python${PY3V} -m mypy.stubgen' --no-info \
--no-discard-stderr --version-string="${DEB_VERSION_UPSTREAM}" \
--name 'Generate draft stubs for Python modules.' > \
debian/stubgen.1
--include <(echo -e "[NAME]\nstubgen \- Generate draft stubs for Python modules.") \
> debian/stubgen.1
PYTHONPATH=$(CURDIR) $(MAKE) -C docs html
override_dh_auto_clean:
......
......@@ -204,8 +204,8 @@ that are common in idiomatic Python are standardized.
def f(my_mapping):
# type: (MutableMapping[int, str]) -> Set[str]
my_dict[5] = 'maybe'
return set(my_dict.values())
my_mapping[5] = 'maybe'
return set(my_mapping.values())
f({3: 'yes', 4: 'no'})
......
......@@ -8,7 +8,7 @@ rules.
Instance and class attributes
*****************************
Mypy type checker detects if you are trying to access a missing
The mypy type checker detects if you are trying to access a missing
attribute, which is a very common programming error. For this to work
correctly, instance and class attributes must be defined or
initialized within the class. Mypy infers the types of attributes:
......@@ -38,8 +38,9 @@ a type annotation:
a = A()
a.x = [1] # OK
As in Python generally, a variable defined in the class body can used
as a class or an instance variable.
As in Python generally, a variable defined in the class body can be used
as a class or an instance variable. (As discussed in the next section, you
can override this with a ``ClassVar`` annotation.)
Type comments work as well, if you need to support Python versions earlier
than 3.6:
......@@ -77,6 +78,98 @@ to it explicitly using ``self``:
a = self
a.x = 1 # Error: 'x' not defined
Annotating `__init__` methods
*****************************
The ``__init__`` method is somewhat special -- it doesn't return a
value. This is best expressed as ``-> None``. However, since many feel
this is redundant, it is allowed to omit the return type declaration
on ``__init__`` methods **if at least one argument is annotated**. For
example, in the following classes ``__init__`` is considered fully
annotated:
.. code-block:: python
class C1:
def __init__(self) -> None:
self.var = 42
class C2:
def __init__(self, arg: int):
self.var = arg
However, if ``__init__`` has no annotated arguments and no return type
annotation, it is considered an untyped method:
.. code-block:: python
class C3:
def __init__(self):
# This body is not type checked
self.var = 42 + 'abc'
Class attribute annotations
***************************
You can use a ``ClassVar[t]`` annotation to explicitly declare that a
particular attribute should not be set on instances:
.. code-block:: python
from typing import ClassVar
class A:
x: ClassVar[int] = 0 # Class variable only
A.x += 1 # OK
a = A()
a.x = 1 # Error: Cannot assign to class variable "x" via instance
print(a.x) # OK -- can be read through an instance
.. note::
If you need to support Python 3 versions 3.5.2 or earlier, you have
to import ``ClassVar`` from ``typing_extensions`` instead (available on
PyPI). If you use Python 2.7, you can import it from ``typing``.
It's not necessary to annotate all class variables using
``ClassVar``. An attribute without the ``ClassVar`` annotation can
still be used as a class variable. However, mypy won't prevent it from
being used as an instance variable, as discussed previously:
.. code-block:: python
class A:
x = 0 # Can be used as a class or instance variable
A.x += 1 # OK
a = A()
a.x = 1 # Also OK
Note that ``ClassVar`` is not a class, and you can't use it with
``isinstance()`` or ``issubclass()``. It does not change Python
runtime behavior -- it's only for type checkers such as mypy (and
also helpful for human readers).
You can also omit the square brackets and the variable type in
a ``ClassVar`` annotation, but this might not do what you'd expect:
.. code-block:: python
class A:
y: ClassVar = 0 # Type implicitly Any!
In this case the type of the attribute will be implicitly ``Any``.
This behavior will change in the future, since it's surprising.
.. note::
A ``ClassVar`` type parameter cannot include type variables:
``ClassVar[T]`` and ``ClassVar[List[T]]``
are both invalid if ``T`` is a type variable (see :ref:`generic-classes`
for more about type variables).
Overriding statically typed methods
***********************************
......@@ -85,27 +178,35 @@ override has a compatible signature:
.. code-block:: python
class A:
class Base:
def f(self, x: int) -> None:
...
class B(A):
class Derived1(Base):
def f(self, x: str) -> None: # Error: type of 'x' incompatible
...
class C(A):
class Derived2(Base):
def f(self, x: int, y: int) -> None: # Error: too many arguments
...
class D(A):
class Derived3(Base):
def f(self, x: int) -> None: # OK
...
class Derived4(Base):
def f(self, x: float) -> None: # OK: mypy treats int as a subtype of float
...
class Derived5(Base):
def f(self, x: int, y: int = 0) -> None: # OK: accepts more than the base
... # class method
.. note::
You can also vary return types **covariantly** in overriding. For
example, you could override the return type ``object`` with a subtype
such as ``int``. Similarly, you can vary argument types
example, you could override the return type ``Iterable[int]`` with a
subtype such as ``List[int]``. Similarly, you can vary argument types
**contravariantly** -- subclasses can have more general argument types.
You can also override a statically typed method with a dynamically
......@@ -120,50 +221,103 @@ effect at runtime:
.. code-block:: python
class A:
class Base:
def inc(self, x: int) -> int:
return x + 1
class B(A):
class Derived(Base):
def inc(self, x): # Override, dynamically typed
return 'hello' # Incompatible with 'A', but no mypy error
return 'hello' # Incompatible with 'Base', but no mypy error
Abstract base classes and multiple inheritance
**********************************************
Mypy supports Python abstract base classes (ABCs). Abstract classes
have at least one abstract method or property that must be implemented
by a subclass. You can define abstract base classes using the
``abc.ABCMeta`` metaclass, and the ``abc.abstractmethod`` and
``abc.abstractproperty`` function decorators. Example:
by any *concrete* (non-abstract) subclass. You can define abstract base
classes using the ``abc.ABCMeta`` metaclass and the ``abc.abstractmethod``
function decorator. Example:
.. code-block:: python
from abc import ABCMeta, abstractmethod
class A(metaclass=ABCMeta):
class Animal(metaclass=ABCMeta):
@abstractmethod
def foo(self, x: int) -> None: pass
def eat(self, food: str) -> None: pass
@property
@abstractmethod
def bar(self) -> str: pass
def can_walk(self) -> bool: pass
class Cat(Animal):
def eat(self, food: str) -> None:
... # Body omitted
class B(A):
def foo(self, x: int) -> None: ...
def bar(self) -> str:
return 'x'
@property
def can_walk(self) -> bool:
return True
a = A() # Error: 'A' is abstract
b = B() # OK
x = Animal() # Error: 'Animal' is abstract due to 'eat' and 'can_walk'
y = Cat() # OK
.. note::
In Python 2.7 you have to use ``@abc.abstractproperty`` to define
an abstract property.
Note that mypy performs checking for unimplemented abstract methods
even if you omit the ``ABCMeta`` metaclass. This can be useful if the
metaclass would cause runtime metaclass conflicts.
Since you can't create instances of ABCs, they are most commonly used in
type annotations. For example, this method accepts arbitrary iterables
containing arbitrary animals (instances of concrete ``Animal``
subclasses):
.. code-block:: python
def feed_all(animals: Iterable[Animal], food: str) -> None:
for animal in animals:
animal.eat(food)
There is one important peculiarity about how ABCs work in Python --
whether a particular class is abstract or not is somewhat implicit.
In the example below, ``Derived`` is treated as an abstract base class
since ``Derived`` inherits an abstract ``f`` method from ``Base`` and
doesn't explicitly implement it. The definition of ``Derived``
generates no errors from mypy, since it's a valid ABC:
.. code-block:: python
from abc import ABCMeta, abstractmethod
class Base(metaclass=ABCMeta):
@abstractmethod
def f(self, x: int) -> None: pass
class Derived(Base): # No error -- Derived is implicitly abstract
def g(self) -> None:
...
Attempting to create an instance of ``Derived`` will be rejected,
however:
.. code-block:: python
d = Derived() # Error: 'Derived' is abstract
.. note::
It's a common error to forget to implement an abstract method.
As shown above, the class definition will not generate an error
in this case, but any attempt to construct an instance will be
flagged as an error.
A class can inherit any number of classes, both abstract and
concrete. As with normal overrides, a dynamically typed method can
implement a statically typed method defined in any base class,
including an abstract method defined in an abstract base class.
override or implement a statically typed method defined in any base
class, including an abstract method defined in an abstract base class.
You can implement an abstract property using either a normal
property or an instance variable.
......@@ -382,6 +382,23 @@ More specifically, mypy will understand the use of ``sys.version_info`` and
else:
# Other systems
As a special case, you can also use one of these checks in a top-level
(unindented) ``assert``; this makes mypy skip the rest of the file.
Example:
.. code-block:: python
import sys
assert sys.platform != 'win32'
# The rest of this file doesn't apply to Windows.
Some other expressions exhibit similar behavior; in particular,
``typing.TYPE_CHECKING``, variables named ``MYPY``, and any variable
whose name is passed to ``--always-true`` or ``--always-false``.
(However, ``True`` and ``False`` are not treated specially!)
.. note::
Mypy currently does not support more complex checks, and does not assign
......@@ -496,6 +513,41 @@ Here's the above example modified to use ``MYPY``:
return [arg]
Using classes that are generic in stubs but not at runtime
----------------------------------------------------------
Some classes are declared as generic in stubs, but not at runtime. Examples
in the standard library include ``os.PathLike`` and ``queue.Queue``.
Subscripting such a class will result in a runtime error:
.. code-block:: python
from queue import Queue
class Tasks(Queue[str]): # TypeError: 'type' object is not subscriptable
...
results: Queue[int] = Queue() # TypeError: 'type' object is not subscriptable
To avoid these errors while still having precise types you can either use
string literal types or ``typing.TYPE_CHECKING``:
.. code-block:: python
from queue import Queue
from typing import TYPE_CHECKING
if TYPE_CHECKING:
BaseQueue = Queue[str] # this is only processed by mypy
else:
BaseQueue = Queue # this is not seen by mypy but will be executed at runtime.
class Tasks(BaseQueue): # OK
...
results: 'Queue[int]' = Queue() # OK
.. _silencing-linters:
Silencing linters
......
......@@ -62,7 +62,6 @@ Mypy is a static type checker for Python 3 and Python 2.7.
python36
additional_features
faq
revision_history
Indices and tables
==================
......
......@@ -22,16 +22,19 @@ you'll find errors sooner.
The mypy daemon is experimental. In particular, the command-line
interface may change in future mypy releases.
.. note::
The mypy daemon currently supports macOS and Linux only.
.. note::
Each mypy daemon process supports one user and one set of source files,
and it can only process one type checking request at a time. You can
run multiple mypy daemon processes to type check multiple repositories.
.. note::
On Windows, due to platform limitations, the mypy daemon does not currently
support a timeout for the server process. The client will still time out if
a connection to the server cannot be made, but the server will wait forever
for a new client connection.
Basic usage
***********
......@@ -103,5 +106,3 @@ Limitations
limitation. This can be defined
through the command line or through a
:ref:`configuration file <config-file>`.
* Windows is not supported.
Revision history
================
List of major changes (the `Mypy Blog <http://mypy-lang.blogspot.com/>`_ contains more
detailed release notes):
- October 2018
* Publish ``mypy`` version 0.640 on PyPI.
* Document final qualifiers.
* Document ``--namespace-packages``.
* Remove deprecated options, and mark ``--quick-and-dirty`` as deprecated.
* Document ``--permissive-toplevel``.
* Reorganize config file docs.
- September 2018
* Publish ``mypy`` version 0.630 on PyPI.
* Document ``--warn-incomplete-stub`` (:ref:`docs <warn-incomplete-stub>`).
* Document incompatibility of stub-only packages and ``MYPYPATH``
(:ref:`docs <installed-packages>`).
* Reorganize command line :ref:`documentation <command-line>`
(see also :ref:`docs <running-mypy>` and :ref:`more docs <extending-mypy>`).
* Document :ref:`callback protocols <callback_protocols>`.
- July 2018
* Publish ``mypy`` version 0.620 on PyPI.
* Improve support for :ref:`overloads <function-overloading>`.
* Add support for :ref:`dataclasses <dataclasses_support>`.
- June 2018
* Publish ``mypy`` version 0.610 on PyPI.
* Major overhaul of documentation.
* Add the ``dmypy run`` command to the :ref:`daemon <mypy_daemon>`.
* Partially revert the prior changes to section pattern semantics in
configuration files
(:ref:`docs <config-file>` and :ref:`more docs <per-module-flags>`).
- May 2018
* Publish ``mypy`` version 0.600 on PyPI.
* Enable :ref:`strict optional checking <strict_optional>` by default.
* Document :ref:`disabling strict optional checking <no_strict_optional>`.
* Add :ref:`mypy_daemon`.
* Add :ref:`remote-cache`.
* Support user-specific configuration file (:ref:`docs <config-file>`).
* Changes to section pattern semantics in configuration files
(:ref:`docs <config-file>` and :ref:`more docs <per-module-flags>`).
- April 2018
* Publish ``mypy`` version 0.590 on PyPI.
* Document :ref:`PEP 561 support <installed-packages>`.
* Made :ref:`incremental mode <incremental>` the default.
* Document ``--always-true`` and ``--always-false`` (:ref:`docs <always-true>`).
* Document ``follow_imports_for_stubs`` (:ref:`docs<per-module-flags>`).
* Add coroutines to :ref:`Python 3 cheat sheet <cheat-sheet-py3>`.
* Add ``None`` return/strict-optional to :ref:`common issues <annotations_needed>`.
* Clarify that ``SupportsInt`` etc. don't support arithmetic operations (see :ref:`docs <supports-int-etc>`).
- March 2018
* Publish ``mypy`` version 0.580 on PyPI.
* Allow specifying multiple packages on the command line with ``-p`` and ``-m`` flags.
* Publish ``mypy`` version 0.570 on PyPI.
* Add support for :ref:`attrs_package`.
- December 2017
* Publish ``mypy`` version 0.560 on PyPI.
* Various types in ``typing`` that used to be ABCs
:ref:`are now protocols <predefined_protocols>`
and support :ref:`structural subtyping <protocol-types>`.
* Explain how to :ref:`silence invalid complaints <silencing-linters>`
by linters about unused imports due to type comments.
- November 2017
* Publish ``mypy`` version 0.550 on PyPI.
* Running mypy now requires Python 3.4 or higher.
However Python 3.3 is still valid for the target
of the analysis (i.e. the ``--python-version`` flag).
* Split ``--disallow-any`` flag into
:ref:`separate boolean flags <disallow-dynamic-typing>`.
* The ``--old-html-report`` flag was removed.
- October 2017
* Publish ``mypy`` version 0.540 on PyPI.
* Publish ``mypy`` version 0.530 on PyPI.
- August-September 2017
* Add :ref:`protocol-types`.
* Other updates to :ref:`command-line`:
* Add ``--warn-unused-configs``.
* Add ``--disallow-untyped-decorators``.
* Add ``--disallow-incomplete-defs``.
- July 2017
* Publish ``mypy`` version 0.521 on PyPI.
* Publish ``mypy`` version 0.520 on PyPI.
* Add :ref:`fine-grained control of Any types <disallow-dynamic-typing>`.
* Add :ref:`typeddict`.
* Other updates to :ref:`command-line`:
* Add ``--no-implicit-optional``.
* Add ``--shadow-file``.
* Add ``--no-incremental``.
- May 2017
* Publish ``mypy`` version 0.510 on PyPI.
* Remove option ``--no-fast-parser``.
* Deprecate option ``--strict-boolean``.
* Drop support for Python 3.2 as type checking target.
* Add support for :ref:`overloaded functions with implementations <function-overloading>`.
* Add :ref:`extended_callable`.
* Add :ref:`async_generators_and_comprehensions`.
* Add :ref:`ClassVar <class-var>`.
* Add :ref:`quick mode <quick-mode>`.
- March 2017
* Publish ``mypy`` version 0.500 on PyPI.
* Add :ref:`noreturn`.
* Add :ref:`generic-subclasses`.
* Add :ref:`variance-of-generics`.
* Add :ref:`variance`.
* Updates to :ref:`python-36`.
* Updates to :ref:`integrating-mypy`.
* Updates to :ref:`command-line`:
* Add option ``--warn-return-any``.
* Add option ``--strict-boolean``.
* Add option ``--strict``.
* Updates to :ref:`config-file`:
* ``warn_no_return`` is on by default.
* Read settings from ``setup.cfg`` if ``mypy.ini`` does not exist.
* Add option ``warn_return_any``.
* Add option ``strict_boolean``.
- January 2017
* Publish ``mypy`` version 0.470 on PyPI.
* Change package name from ``mypy-lang`` to ``mypy``.
* Add :ref:`integrating-mypy`.
* Add :ref:`cheat-sheet-py3`.
* Major update to :ref:`finding-imports`.
* Add :ref:`--ignore-missing-imports <ignore-missing-imports>`.
* Updates to :ref:`config-file`.
* Document underscore support in numeric literals.
* Document that arguments prefixed with ``__`` are positional-only.
* Document that ``--hide-error-context`` is now on by default,
and there is a new flag ``--show-error-context``.
* Add ``ignore_errors`` to :ref:`per-module-flags`.
- November 2016
* Publish ``mypy-lang`` version 0.4.6 on PyPI.
* Add Getting started.
* Add :ref:`generic-methods-and-generic-self` (experimental).
* Add :ref:`declaring-decorators`.
* Discuss generic type aliases in :ref:`type-aliases`.
* Discuss Python 3.6 named tuple syntax in :ref:`named-tuples`.
* Updates to :ref:`common_issues`.
* Updates to :ref:`python-36`.
* Updates to :ref:`command-line`:
* ``--custom-typeshed-dir``
* ``--junit-xml``
* ``--find-occurrences``
* ``--cobertura-xml-report``
* ``--warn-no-return``
* Updates to :ref:`config-file`:
* Sections with fnmatch patterns now use
module name patterns (previously they were path patterns).
* Added ``custom_typeshed_dir``, ``mypy_path`` and ``show_column_numbers``.
* Mention the magic ``MYPY`` constant in :ref:`import-cycles`.
- October 2016
* Publish ``mypy-lang`` version 0.4.5 on PyPI.
* Add :ref:`python-36`.
* Add :ref:`config-file`.
* Updates to :ref:`command-line`: ``--strict-optional-white-list``,
``--disallow-subclassing-any``, ``--config-file``, ``@flagfile``,
``--hide-error-context`` (replaces ``--suppress-error-context``),
``--show-column-numbers`` and ``--scripts-are-modules``.
* Mention ``typing.TYPE_CHECKING`` in :ref:`import-cycles`.
- August 2016
* Publish ``mypy-lang`` version 0.4.4 on PyPI.
* Add :ref:`newtypes`.
* Add :ref:`async-and-await`.
* Add :ref:`text-and-anystr`.
* Add :ref:`version_and_platform_checks`.
- July 2016
* Publish ``mypy-lang`` version 0.4.3 on PyPI.
* Add :ref:`strict optional checking <strict_optional>`.
* Add :ref:`multi_line_annotation`.
- June 2016
* Publish ``mypy-lang`` version 0.4.2 on PyPI.
* Add :ref:`type-of-class`.
* Add :ref:`cheat-sheet-py2`.
* Add :ref:`reveal-type`.
- May 2016
* Publish ``mypy-lang`` version 0.4 on PyPI.
* Add :ref:`type-variable-upper-bound`.
* Document :ref:`command-line`.
- Feb 2016
* Publish ``mypy-lang`` version 0.3.1 on PyPI.
* Document Python 2 support.
- Nov 2015
Add :ref:`stubs-intro`.
- Jun 2015
Remove ``Undefined`` and ``Dynamic``, as they are not in PEP 484.
- Apr 2015
Publish ``mypy-lang`` version 0.2.0 on PyPI.
- Mar 2015
Update documentation to reflect PEP 484:
* Add :ref:`named-tuples` and :ref:`Optional types <strict_optional>`.
* Do not mention type application syntax (for
example, ``List[int]()``), as it's no longer supported,
due to PEP 484 compatibility.
* Rename ``typevar`` to ``TypeVar``.
* Document ``# type: ignore`` which allows
locally ignoring spurious errors (:ref:`silencing_checker`).
* No longer mention
``Any(x)`` as a valid cast, as it will be phased out soon.
* Mention the new ``.pyi`` stub file extension. Stubs can live
in the same directory as the rest of the program.
- Jan 2015
Mypy moves closer to PEP 484:
* Add :ref:`type-aliases`.
* Update discussion of overloading -- it's now only supported in stubs.
* Rename ``Function[...]`` to ``Callable[...]``.
- Dec 2014
Publish mypy version 0.1.0 on PyPI.
- Oct 2014
Major restructuring.
Split the HTML documentation into
multiple pages.
- Sep 2014
Migrated docs to Sphinx.
- Aug 2014
Don't discuss native semantics. There is only Python
semantics.
- Jul 2013
Rewrite to use new syntax. Shift focus to discussing
Python semantics. Add more content, including short discussions of
:ref:`generic-functions` and :ref:`union-types`.
......@@ -131,34 +131,72 @@ Missing imports
When you import a module, mypy may report that it is unable to
follow the import.
This could happen if the code is importing a non-existent module
or if the code is importing a library that does not use type hints.
Specifically, the library is neither declared to be a
:ref:`PEP 561 compliant package <installed-packages>` nor has registered
any stubs on `typeshed <https://github.com/python/typeshed>`_, the
repository of stubs for the standard library and popular 3rd party libraries.
This can cause a lot of errors that look like the following::
main.py:1: error: No library stub file for standard library module 'antigravity'
main.py:2: error: No library stub file for module 'flask'
main.py:3: error: Cannot find module named 'this_module_does_not_exist'
If the module genuinely does not exist, you should of course fix the
import statement. If the module is a module within your codebase that mypy
is somehow unable to discover, we recommend reading the :ref:`finding-imports`
section below to help you debug the issue.
There are several different things you can try doing, depending on the exact
nature of the module.
If the module is a part of your own codebase, try:
1. Making sure your import does not contain a typo.
2. Reading the :ref:`finding-imports` section below to make sure you
understand how exactly mypy searches for and finds modules and modify
how you're invoking mypy accordingly.
3. Adding the directory containing that module to either the ``MYPYPATH``
environment variable or the ``mypy_path``
:ref:`config file option <config-file-import-discovery-global>`.
Note: if the module you are trying to import is actually a *submodule* of
some package, you should add the directory containing the *entire* package
to ``MYPYPATH``. For example, suppose you are trying to add the module
``foo.bar.baz``, which is located at ``~/foo-project/src/foo/bar/baz.py``.
In this case, you should add ``~/foo-project/src`` to ``MYPYPATH``.
If the module is a third party library, you must make sure that there are
type hints available for that library. Mypy by default will not attempt to
infer the types of any 3rd party libraries you may have installed
unless they either have declared themselves to be
:ref:`PEP 561 compliant stub package <installed-packages>` or have registered
themselves on `typeshed <https://github.com/python/typeshed>`_,
the repository of types for the standard library and some 3rd party libraries.
If you are getting an import-related error, this means the library you
are trying to use has done neither of these things. In that case, you can try:
1. Searching to see if there is a :ref:`PEP 561 compliant stub package <installed-packages>`.
corresponding to your third party library. Stub packages let you install
type hints independently from the library itself.
2. :ref:`Writing your own stub files <stub-files>` containing type hints for
the library. You can point mypy at your type hints either by passing
them in via the command line, by adding the location to the
`MYPYPATH` environment variable, or by using the ``mypy_path``
:ref:`config file option <config-file-import-discovery-global>`.
Note that if you decide to write your own stub files, they don't need
to be complete! A good strategy is to add stubs for just the parts
of the library you need and iterate on them over time.
If you want to share your work, you can try contributing your stubs back
to the library -- see our documentation on creating
:ref:`PEP 561 compliant packages <installed-packages>`.
If the module is a third party library, but you cannot find any existing
type hints nor have to time to write your own, you can *silence* the errors:
If the module is a library that does not use type hints, the easiest fix
is to silence the error messages by adding a ``# type: ignore`` comment on
each respective import statement.
1. To silence a *single* missing import error, add a `# type: ignore` at the end of the
line containing the import.
If you have many of these errors from a specific library, it may be more
convenient to silence all of those errors at once using the
:ref:`mypy config file <config-file>`. For example, suppose your codebase
makes heavy use of an (untyped) library named `foobar`. You can silence all
import errors associated with that library and that library alone by adding
the following section to your config file::
2. To silence *all* missing import imports errors from a single library, add
a section to your :ref:`mypy config file <config-file>` for that library setting
``ignore_missing_imports`` to True. For example, suppose your codebase
makes heavy use of an (untyped) library named ``foobar``. You can silence
all import errors associated with that library and that library alone by
adding the following section to your config file::
[mypy-foobar]
ignore_missing_imports = True
......@@ -168,22 +206,33 @@ import of ``foobar`` in your codebase. For more information, see the
documentation about configuring
:ref:`import discovery <config-file-import-discovery-per-module>` in config files.
If you would like to silence *all* missing import errors in your codebase,
you can do so by using the ``--ignore-missing-imports`` flag. We recommend
using this flag only as a last resort: it's equivalent to adding a
``# type: ignore`` to all unresolved imports in your codebase.
3. To silence *all* missing import errors for *all* libraries in your codebase,
invoke mypy with the ``--ignore-missing-imports`` command line flag or set
the ``ignore_missing_imports``
:ref:`config file option <config-file-import-discovery-per-module>` to True
in the *global* section of your mypy config file::
A more involved solution would be to reverse-engineer how the library
works, create type hints for the library, and point mypy at those
type hints either by passing in in via the command line or by adding
the location of your custom stubs to either the ``MYPYPATH`` environment
variable or the ``mypy_path``
:ref:`config file option <config-file-import-discovery-global>`.
[mypy]
ignore_missing_imports = True
If you want to share your work, you can try contributing your stubs back
to the library -- see our documentation on creating
:ref:`PEP 561 compliant packages <installed-packages>`.
We recommend using this approach only as a last resort: it's equivalent
to adding a ``# type: ignore`` to all unresolved imports in your codebase.
If the module is a part of the standard library, try:
1. Updating mypy and re-running it. It's possible type hints for that corner
of the standard library were added in a later version of mypy.
2. Filing a bug report on `typeshed <https://github.com/python/typeshed>`_,
the repository of type hints for the standard library that comes bundled
with mypy. You can expedite this process by also submitting a pull request
fixing the bug.
Changes to typeshed will come bundled with mypy the next time it's released.
In the meantime, you can add a ``# type: ignore`` to silence any relevant
errors. After upgrading, we recommend running mypy using the
``--warn-unused-ignores`` flag to help you find any ``# type: ignore``
annotations you no longer need.
.. _follow-imports:
......
Metadata-Version: 2.1
Name: mypy
Version: 0.641
Version: 0.650
Summary: Optional static typing for Python
Home-page: http://www.mypy-lang.org/
Author: Jukka Lehtosalo
......
......@@ -36,7 +36,6 @@ docs/source/mypy_daemon.rst
docs/source/protocols.rst
docs/source/python2.rst
docs/source/python36.rst
docs/source/revision_history.rst
docs/source/running_mypy.rst
docs/source/stubs.rst
docs/source/supported_python_features.rst
......@@ -51,6 +50,7 @@ mypy/__init__.py
mypy/__main__.py
mypy/api.py
mypy/applytype.py
mypy/argmap.py
mypy/binder.py
mypy/bogus_type.py
mypy/build.py
......@@ -61,6 +61,7 @@ mypy/checkstrformat.py
mypy/constraints.py
mypy/defaults.py
mypy/dmypy.py
mypy/dmypy_os.py
mypy/dmypy_server.py
mypy/dmypy_util.py
mypy/erasetype.py
......@@ -79,6 +80,7 @@ mypy/git.py
mypy/indirection.py
mypy/infer.py
mypy/interpreted_plugin.py
mypy/ipc.py
mypy/join.py
mypy/literals.py
mypy/main.py
......@@ -133,6 +135,7 @@ mypy.egg-info/top_level.txt
mypy/plugins/__init__.py
mypy/plugins/attrs.py
mypy/plugins/common.py
mypy/plugins/ctypes.py
mypy/plugins/dataclasses.py
mypy/server/__init__.py
mypy/server/astdiff.py
......@@ -153,6 +156,7 @@ mypy/test/helpers.py
mypy/test/testargs.py
mypy/test/testcheck.py
mypy/test/testcmdline.py
mypy/test/testdaemon.py
mypy/test/testdeps.py
mypy/test/testdiff.py
mypy/test/testerrorstream.py
......@@ -161,6 +165,7 @@ mypy/test/testfinegrained.py
mypy/test/testfinegrainedcache.py
mypy/test/testgraph.py
mypy/test/testinfer.py
mypy/test/testipc.py
mypy/test/testmerge.py
mypy/test/testmoduleinfo.py
mypy/test/testparse.py
......@@ -551,6 +556,7 @@ mypy/typeshed/stdlib/3/builtins.pyi
mypy/typeshed/stdlib/3/compileall.pyi
mypy/typeshed/stdlib/3/configparser.pyi
mypy/typeshed/stdlib/3/enum.pyi
mypy/typeshed/stdlib/3/faulthandler.pyi
mypy/typeshed/stdlib/3/fcntl.pyi
mypy/typeshed/stdlib/3/fnmatch.pyi
mypy/typeshed/stdlib/3/functools.pyi
......@@ -706,7 +712,6 @@ mypy/typeshed/tests/mypy_test.py
mypy/typeshed/tests/pytype_test.py
mypy/typeshed/third_party/2/enum.pyi
mypy/typeshed/third_party/2/gflags.pyi
mypy/typeshed/third_party/2/itsdangerous.pyi
mypy/typeshed/third_party/2/pathlib2.pyi
mypy/typeshed/third_party/2/pycurl.pyi
mypy/typeshed/third_party/2/pymssql.pyi
......@@ -790,6 +795,7 @@ mypy/typeshed/third_party/2and3/certifi.pyi
mypy/typeshed/third_party/2and3/croniter.pyi
mypy/typeshed/third_party/2and3/emoji.pyi
mypy/typeshed/third_party/2and3/first.pyi
mypy/typeshed/third_party/2and3/itsdangerous.pyi
mypy/typeshed/third_party/2and3/mock.pyi
mypy/typeshed/third_party/2and3/mypy_extensions.pyi
mypy/typeshed/third_party/2and3/singledispatch.pyi
......@@ -1100,6 +1106,7 @@ mypy/typeshed/third_party/2and3/werkzeug/debug/tbtools.pyi
mypy/typeshed/third_party/2and3/yaml/__init__.pyi
mypy/typeshed/third_party/2and3/yaml/composer.pyi
mypy/typeshed/third_party/2and3/yaml/constructor.pyi
mypy/typeshed/third_party/2and3/yaml/cyaml.pyi
mypy/typeshed/third_party/2and3/yaml/dumper.pyi
mypy/typeshed/third_party/2and3/yaml/emitter.pyi
mypy/typeshed/third_party/2and3/yaml/error.pyi
......@@ -1114,8 +1121,6 @@ mypy/typeshed/third_party/2and3/yaml/scanner.pyi
mypy/typeshed/third_party/2and3/yaml/serializer.pyi
mypy/typeshed/third_party/2and3/yaml/tokens.pyi
mypy/typeshed/third_party/3/dataclasses.pyi
mypy/typeshed/third_party/3/enum.pyi
mypy/typeshed/third_party/3/itsdangerous.pyi
mypy/typeshed/third_party/3.5/contextvars.pyi
mypy/typeshed/third_party/3/docutils/__init__.pyi
mypy/typeshed/third_party/3/docutils/examples.pyi
......@@ -1181,7 +1186,6 @@ mypy/xml/mypy-html.css
mypy/xml/mypy-html.xslt
mypy/xml/mypy-txt.xslt
mypy/xml/mypy.xsd
scripts/dmypy
scripts/dumpmodule.py
scripts/find_type.py
scripts/stubtest.py
......@@ -1195,16 +1199,14 @@ test-data/packages/typedpkg/typedpkg/__init__.py
test-data/packages/typedpkg/typedpkg/dne.py
test-data/packages/typedpkg/typedpkg/py.typed
test-data/packages/typedpkg/typedpkg/sample.py
test-data/packages/typedpkg_namespace-alpha/setup.py
test-data/packages/typedpkg_namespace-alpha/typedpkg_namespace/__init__.py
test-data/packages/typedpkg_namespace-alpha/typedpkg_namespace/alpha/__init__.py
test-data/packages/typedpkg_namespace-alpha/typedpkg_namespace/alpha/alpha_module.py
test-data/packages/typedpkg_namespace-alpha/typedpkg_namespace/alpha/py.typed
test-data/packages/typedpkg_nested/setup.py
test-data/packages/typedpkg_nested/typedpkg_nested/__init__.py
test-data/packages/typedpkg_nested/typedpkg_nested/nested_package/__init__.py
test-data/packages/typedpkg_nested/typedpkg_nested/nested_package/nested_module.py
test-data/packages/typedpkg_nested/typedpkg_nested/nested_package/py.typed
test-data/packages/typedpkg/typedpkg/pkg/__init__.py
test-data/packages/typedpkg/typedpkg/pkg/aaa.py
test-data/packages/typedpkg/typedpkg/pkg/py.typed
test-data/packages/typedpkg_ns/setup.py
test-data/packages/typedpkg_ns/typedpkg_ns/__init__.py
test-data/packages/typedpkg_ns/typedpkg_ns/ns/__init__.py
test-data/packages/typedpkg_ns/typedpkg_ns/ns/bbb.py
test-data/packages/typedpkg_ns/typedpkg_ns/ns/py.typed
test-data/samples/bottles.py
test-data/samples/class.py
test-data/samples/cmdline.py
......@@ -1273,6 +1275,7 @@ test-data/unit/check-class-namedtuple.test
test-data/unit/check-classes.test
test-data/unit/check-classvar.test
test-data/unit/check-columns.test
test-data/unit/check-ctypes.test
test-data/unit/check-custom-plugin.test
test-data/unit/check-dataclasses.test
test-data/unit/check-default-plugin.test
......@@ -1320,6 +1323,7 @@ test-data/unit/check-unsupported.test
test-data/unit/check-varargs.test
test-data/unit/check-warnings.test
test-data/unit/cmdline.test
test-data/unit/daemon.test
test-data/unit/deps-classes.test
test-data/unit/deps-expressions.test
test-data/unit/deps-generics.test
......@@ -1374,6 +1378,7 @@ test-data/unit/fixtures/f_string.pyi
test-data/unit/fixtures/fine_grained.pyi
test-data/unit/fixtures/float.pyi
test-data/unit/fixtures/floatdict.pyi
test-data/unit/fixtures/floatdict_python2.pyi
test-data/unit/fixtures/for.pyi
test-data/unit/fixtures/function.pyi
test-data/unit/fixtures/isinstance.pyi
......@@ -1418,7 +1423,10 @@ test-data/unit/plugins/badreturn.py
test-data/unit/plugins/badreturn2.py
test-data/unit/plugins/class_callable.py
test-data/unit/plugins/customentry.py
test-data/unit/plugins/dyn_class.py
test-data/unit/plugins/fnplugin.py
test-data/unit/plugins/fully_qualified_test_hook.py
test-data/unit/plugins/method_sig_hook.py
test-data/unit/plugins/named_callable.py
test-data/unit/plugins/noentry.py
test-data/unit/plugins/plugin2.py
......
[console_scripts]
dmypy = mypy.dmypy:main
dmypy = mypy.dmypy:console_entry
mypy = mypy.__main__:console_entry
stubgen = mypy.stubgen:main
......@@ -17,6 +17,12 @@ the exit status mypy normally returns to the operating system.
Any pretty formatting is left to the caller.
The 'run_dmypy' function is similar, but instead mimics invocation of
dmypy.
Note that these APIs don't support incremental generation of error
messages.
Trivial example of code using this module:
import sys
......@@ -33,15 +39,15 @@ if result[1]:
print(result[1]) # stderr
print ('\nExit status:', result[2])
"""
import sys
from io import StringIO
from typing import List, Tuple
from mypy.main import main
from typing import List, Tuple, Callable
def run(args: List[str]) -> Tuple[str, str, int]:
def _run(f: Callable[[], None]) -> Tuple[str, str, int]:
old_stdout = sys.stdout
new_stdout = StringIO()
sys.stdout = new_stdout
......@@ -51,7 +57,7 @@ def run(args: List[str]) -> Tuple[str, str, int]:
sys.stderr = new_stderr
try:
main(None, args=args)
f()
exit_status = 0
except SystemExit as system_exit:
exit_status = system_exit.code
......@@ -60,3 +66,14 @@ def run(args: List[str]) -> Tuple[str, str, int]:
sys.stderr = old_stderr
return new_stdout.getvalue(), new_stderr.getvalue(), exit_status
def run(args: List[str]) -> Tuple[str, str, int]:
# Lazy import to avoid needing to import all of mypy to call run_dmypy
from mypy.main import main
return _run(lambda: main(None, args=args))
def run_dmypy(args: List[str]) -> Tuple[str, str, int]:
from mypy.dmypy import main
return _run(lambda: main(args))
"""Utilities for mapping between actual and formal arguments (and their types)."""
from typing import List, Optional, Sequence, Callable, Set
from mypy.types import Type, Instance, TupleType, AnyType, TypeOfAny, TypedDictType
from mypy import nodes
def map_actuals_to_formals(caller_kinds: List[int],
caller_names: Optional[Sequence[Optional[str]]],
callee_kinds: List[int],
callee_names: Sequence[Optional[str]],
caller_arg_type: Callable[[int],
Type]) -> List[List[int]]:
"""Calculate mapping between actual (caller) args and formals.
The result contains a list of caller argument indexes mapping to each
callee argument index, indexed by callee index.
The caller_arg_type argument should evaluate to the type of the actual
argument type with the given index.
"""
ncallee = len(callee_kinds)
map = [[] for i in range(ncallee)] # type: List[List[int]]
j = 0
for i, kind in enumerate(caller_kinds):
if kind == nodes.ARG_POS:
if j < ncallee:
if callee_kinds[j] in [nodes.ARG_POS, nodes.ARG_OPT,
nodes.ARG_NAMED, nodes.ARG_NAMED_OPT]:
map[j].append(i)
j += 1
elif callee_kinds[j] == nodes.ARG_STAR:
map[j].append(i)
elif kind == nodes.ARG_STAR:
# We need to know the actual type to map varargs.
argt = caller_arg_type(i)
if isinstance(argt, TupleType):
# A tuple actual maps to a fixed number of formals.
for _ in range(len(argt.items)):
if j < ncallee:
if callee_kinds[j] != nodes.ARG_STAR2:
map[j].append(i)
else:
break
if callee_kinds[j] != nodes.ARG_STAR:
j += 1
else:
# Assume that it is an iterable (if it isn't, there will be
# an error later).
while j < ncallee:
if callee_kinds[j] in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT, nodes.ARG_STAR2):
break
else:
map[j].append(i)
if callee_kinds[j] == nodes.ARG_STAR:
break
j += 1
elif kind in (nodes.ARG_NAMED, nodes.ARG_NAMED_OPT):
assert caller_names is not None, "Internal error: named kinds without names given"
name = caller_names[i]
if name in callee_names:
map[callee_names.index(name)].append(i)
elif nodes.ARG_STAR2 in callee_kinds:
map[callee_kinds.index(nodes.ARG_STAR2)].append(i)
else:
assert kind == nodes.ARG_STAR2
argt = caller_arg_type(i)
if isinstance(argt, TypedDictType):
for name, value in argt.items.items():
if name in callee_names:
map[callee_names.index(name)].append(i)
elif nodes.ARG_STAR2 in callee_kinds:
map[callee_kinds.index(nodes.ARG_STAR2)].append(i)
else:
# We don't exactly know which **kwargs are provided by the
# caller. Assume that they will fill the remaining arguments.
for j in range(ncallee):
# TODO: If there are also tuple varargs, we might be missing some potential
# matches if the tuple was short enough to not match everything.
no_certain_match = (
not map[j] or caller_kinds[map[j][0]] == nodes.ARG_STAR)
if ((callee_names[j] and no_certain_match)
or callee_kinds[j] == nodes.ARG_STAR2):
map[j].append(i)
return map
def map_formals_to_actuals(caller_kinds: List[int],
caller_names: Optional[Sequence[Optional[str]]],
callee_kinds: List[int],
callee_names: List[Optional[str]],
caller_arg_type: Callable[[int],
Type]) -> List[List[int]]:
"""Calculate the reverse mapping of map_actuals_to_formals."""
formal_to_actual = map_actuals_to_formals(caller_kinds,
caller_names,
callee_kinds,
callee_names,
caller_arg_type)
# Now reverse the mapping.
actual_to_formal = [[] for _ in caller_kinds] # type: List[List[int]]
for formal, actuals in enumerate(formal_to_actual):
for actual in actuals:
actual_to_formal[actual].append(formal)
return actual_to_formal
class ArgTypeExpander:
"""Utility class for mapping actual argument types to formal arguments.
One of the main responsibilities is to expand caller tuple *args and TypedDict
**kwargs, and to keep track of which tuple/TypedDict items have already been
consumed.
Example:
def f(x: int, *args: str) -> None: ...
f(*(1, 'x', 1.1))
We'd call expand_actual_type three times:
1. The first call would provide 'int' as the actual type of 'x' (from '1').
2. The second call would provide 'str' as one of the actual types for '*args'.
2. The third call would provide 'float' as one of the actual types for '*args'.
A single instance can process all the arguments for a single call. Each call
needs a separate instance since instances have per-call state.
"""
def __init__(self) -> None:
# Next tuple *args index to use.
self.tuple_index = 0
# Keyword arguments in TypedDict **kwargs used.
self.kwargs_used = set() # type: Set[str]
def expand_actual_type(self,
actual_type: Type,
actual_kind: int,
formal_name: Optional[str],
formal_kind: int) -> Type:
"""Return the actual (caller) type(s) of a formal argument with the given kinds.
If the actual argument is a tuple *args, return the next individual tuple item that
maps to the formal arg.
If the actual argument is a TypedDict **kwargs, return the next matching typed dict
value type based on formal argument name and kind.
This is supposed to be called for each formal, in order. Call multiple times per
formal if multiple actuals map to a formal.
"""
if actual_kind == nodes.ARG_STAR:
if isinstance(actual_type, Instance):
if actual_type.type.fullname() == 'builtins.list':
# List *arg.
return actual_type.args[0]
elif actual_type.args:
# TODO: Try to map type arguments to Iterable
return actual_type.args[0]
else:
return AnyType(TypeOfAny.from_error)
elif isinstance(actual_type, TupleType):
# Get the next tuple item of a tuple *arg.
if self.tuple_index >= len(actual_type.items):
# Exhausted a tuple -- continue to the next *args.
self.tuple_index = 1
else:
self.tuple_index += 1
return actual_type.items[self.tuple_index - 1]
else:
return AnyType(TypeOfAny.from_error)
elif actual_kind == nodes.ARG_STAR2:
if isinstance(actual_type, TypedDictType):
if formal_kind != nodes.ARG_STAR2 and formal_name in actual_type.items:
# Lookup type based on keyword argument name.
assert formal_name is not None
else:
# Pick an arbitrary item if no specified keyword is expected.
formal_name = (set(actual_type.items.keys()) - self.kwargs_used).pop()
self.kwargs_used.add(formal_name)
return actual_type.items[formal_name]
elif (isinstance(actual_type, Instance)
and (actual_type.type.fullname() == 'builtins.dict')):
# Dict **arg.
# TODO: Handle arbitrary Mapping
return actual_type.args[1]
else:
return AnyType(TypeOfAny.from_error)
else:
# No translation for other kinds -- 1:1 mapping.
return actual_type
......@@ -18,7 +18,7 @@ from mypy.nodes import IndexExpr, MemberExpr, NameExpr
BindableExpression = Union[IndexExpr, MemberExpr, NameExpr]
class Frame(Dict[Key, Type]):
class Frame:
"""A Frame represents a specific point in the execution of a program.
It carries information about the current types of expressions at
that point, arising either from assignments to those expressions
......@@ -31,13 +31,7 @@ class Frame(Dict[Key, Type]):
"""
def __init__(self) -> None:
self.unreachable = False
class DeclarationsFrame(Dict[Key, Optional[Type]]):
"""Same as above, but allowed to have None values."""
def __init__(self) -> None:
self.types = {} # type: Dict[Key, Type]
self.unreachable = False
......@@ -89,7 +83,7 @@ class ConditionalTypeBinder:
# Maps literal_hash(expr) to get_declaration(expr)
# for every expr stored in the binder
self.declarations = DeclarationsFrame()
self.declarations = {} # type: Dict[Key, Optional[Type]]
# Set of other keys to invalidate if a key is changed, e.g. x -> {x.a, x[0]}
# Whenever a new key (e.g. x.a.b) is added, we update this
self.dependencies = {} # type: Dict[Key, Set[Key]]
......@@ -117,14 +111,14 @@ class ConditionalTypeBinder:
return f
def _put(self, key: Key, type: Type, index: int = -1) -> None:
self.frames[index][key] = type
self.frames[index].types[key] = type
def _get(self, key: Key, index: int = -1) -> Optional[Type]:
if index < 0:
index += len(self.frames)
for i in range(index, -1, -1):
if key in self.frames[i]:
return self.frames[i][key]
if key in self.frames[i].types:
return self.frames[i].types[key]
return None
def put(self, expr: Expression, typ: Type) -> None:
......@@ -161,8 +155,8 @@ class ConditionalTypeBinder:
def _cleanse_key(self, key: Key) -> None:
"""Remove all references to a key from the binder."""
for frame in self.frames:
if key in frame:
del frame[key]
if key in frame.types:
del frame.types[key]
def update_from_options(self, frames: List[Frame]) -> bool:
"""Update the frame to reflect that each key will be updated
......@@ -174,11 +168,11 @@ class ConditionalTypeBinder:
frames = [f for f in frames if not f.unreachable]
changed = False
keys = set(key for f in frames for key in f)
keys = set(key for f in frames for key in f.types)
for key in keys:
current_value = self._get(key)
resulting_values = [f.get(key, current_value) for f in frames]
resulting_values = [f.types.get(key, current_value) for f in frames]
if any(x is None for x in resulting_values):
# We didn't know anything about key before
# (current_value must be None), and we still don't
......@@ -321,8 +315,8 @@ class ConditionalTypeBinder:
key = literal_hash(expr)
assert key is not None
enclosers = ([get_declaration(expr)] +
[f[key] for f in self.frames
if key in f and is_subtype(type, f[key])])
[f.types[key] for f in self.frames
if key in f.types and is_subtype(type, f.types[key])])
return enclosers[-1]
def allow_jump(self, index: int) -> None:
......@@ -332,7 +326,7 @@ class ConditionalTypeBinder:
index += len(self.options_on_return)
frame = Frame()
for f in self.frames[index + 1:]:
frame.update(f)
frame.types.update(f.types)
if f.unreachable:
frame.unreachable = True
self.options_on_return[index].append(frame)
......
......@@ -21,6 +21,7 @@ import stat
import sys
import time
import errno
import types
from typing import (AbstractSet, Any, Dict, Iterable, Iterator, List,
Mapping, NamedTuple, Optional, Set, Tuple, Union, Callable)
......@@ -183,7 +184,7 @@ def _build(sources: List[BuildSource],
reports = Reports(data_dir, options.report_dirs)
source_set = BuildSourceSet(sources)
errors = Errors(options.show_error_context, options.show_column_numbers)
plugin = load_plugins(options, errors)
plugin, snapshot = load_plugins(options, errors)
# Construct a build manager object to hold state during the build.
#
......@@ -195,6 +196,7 @@ def _build(sources: List[BuildSource],
options=options,
version_id=__version__,
plugin=plugin,
plugins_snapshot=snapshot,
errors=errors,
flush_errors=flush_errors,
fscache=fscache)
......@@ -304,17 +306,20 @@ def import_priority(imp: ImportBase, toplevel_priority: int) -> int:
return toplevel_priority
def load_plugins(options: Options, errors: Errors) -> Plugin:
def load_plugins(options: Options, errors: Errors) -> Tuple[Plugin, Dict[str, str]]:
"""Load all configured plugins.
Return a plugin that encapsulates all plugins chained together. Always
at least include the default plugin (it's last in the chain).
The second return value is a snapshot of versions/hashes of loaded user
plugins (for cache validation).
"""
import importlib
snapshot = {} # type: Dict[str, str]
default_plugin = DefaultPlugin(options) # type: Plugin
if not options.config_file:
return default_plugin
return default_plugin, snapshot
line = find_config_file_line_number(options.config_file, 'mypy', 'plugins')
if line == -1:
......@@ -336,7 +341,10 @@ def load_plugins(options: Options, errors: Errors) -> Plugin:
plugin_path = os.path.join(os.path.dirname(options.config_file), plugin_path)
if not os.path.isfile(plugin_path):
plugin_error("Can't find plugin '{}'".format(plugin_path))
plugin_dir = os.path.dirname(plugin_path)
# Use an absolute path to avoid populating the cache entry
# for 'tmp' during tests, since it will be different in
# different tests.
plugin_dir = os.path.abspath(os.path.dirname(plugin_path))
fnam = os.path.basename(plugin_path)
module_name = fnam[:-3]
sys.path.insert(0, plugin_dir)
......@@ -375,11 +383,27 @@ def load_plugins(options: Options, errors: Errors) -> Plugin:
'(in {})'.format(plugin_path))
try:
custom_plugins.append(plugin_type(options))
snapshot[module_name] = take_module_snapshot(module)
except Exception:
print('Error constructing plugin instance of {}\n'.format(plugin_type.__name__))
raise # Propagate to display traceback
# Custom plugins take precedence over the default plugin.
return ChainedPlugin(options, custom_plugins + [default_plugin])
return ChainedPlugin(options, custom_plugins + [default_plugin]), snapshot
def take_module_snapshot(module: types.ModuleType) -> str:
"""Take plugin module snapshot by recording its version and hash.
We record _both_ hash and the version to detect more possible changes
(e.g. if there is a change in modules imported by a plugin).
"""
if hasattr(module, '__file__'):
with open(module.__file__, 'rb') as f:
digest = hashlib.md5(f.read()).hexdigest()
else:
digest = 'unknown'
ver = getattr(module, '__version__', 'none')
return '{}:{}'.format(ver, digest)
def find_config_file_line_number(path: str, section: str, setting_name: str) -> int:
......@@ -426,6 +450,11 @@ class BuildManager(BuildManagerBase):
stale_modules: Set of modules that needed to be rechecked (only used by tests)
version_id: The current mypy version (based on commit id when possible)
plugin: Active mypy plugin(s)
plugins_snapshot:
Snapshot of currently active user plugins (versions and hashes)
old_plugins_snapshot:
Plugins snapshot from previous incremental run (or None in
non-incremental mode and if cache was not found)
errors: Used for reporting all errors
flush_errors: A function for processing errors after each SCC
cache_enabled: Whether cache is being read. This is set based on options,
......@@ -446,6 +475,7 @@ class BuildManager(BuildManagerBase):
options: Options,
version_id: str,
plugin: Plugin,
plugins_snapshot: Dict[str, str],
errors: Errors,
flush_errors: Callable[[List[str], bool], None],
fscache: FileSystemCache,
......@@ -471,7 +501,6 @@ class BuildManager(BuildManagerBase):
self.indirection_detector = TypeIndirectionVisitor()
self.stale_modules = set() # type: Set[str]
self.rechecked_modules = set() # type: Set[str]
self.plugin = plugin
self.flush_errors = flush_errors
self.cache_enabled = options.incremental and (
not options.fine_grained_incremental or options.use_fine_grained_cache)
......@@ -487,6 +516,9 @@ class BuildManager(BuildManagerBase):
in self.options.shadow_file}
# a mapping from each file being typechecked to its possible shadow file
self.shadow_equivalence_map = {} # type: Dict[str, Optional[str]]
self.plugin = plugin
self.plugins_snapshot = plugins_snapshot
self.old_plugins_snapshot = read_plugins_snapshot(self)
def use_fine_grained_cache(self) -> bool:
return self.cache_enabled and self.options.use_fine_grained_cache
......@@ -685,6 +717,30 @@ def write_protocol_deps_cache(proto_deps: Dict[str, Set[str]],
blocker=True)
def write_plugins_snapshot(manager: BuildManager) -> None:
"""Write snapshot of versions and hashes of currently active plugins."""
name = os.path.join(_cache_dir_prefix(manager), '@plugins_snapshot.json')
if not atomic_write(name, json.dumps(manager.plugins_snapshot), '\n'):
manager.errors.set_file(_cache_dir_prefix(manager), None)
manager.errors.report(0, 0, "Error writing plugins snapshot",
blocker=True)
def read_plugins_snapshot(manager: BuildManager) -> Optional[Dict[str, str]]:
"""Read cached snapshot of versions and hashes of plugins from previous run."""
name = os.path.join(_cache_dir_prefix(manager), '@plugins_snapshot.json')
snapshot = _load_json_file(name, manager,
log_sucess='Plugins snapshot ',
log_error='Could not load plugins snapshot: ')
if snapshot is None:
return None
if not isinstance(snapshot, dict):
manager.log('Could not load plugins snapshot: cache is not a dict: {}'
.format(type(snapshot)))
return None
return snapshot
def read_protocol_cache(manager: BuildManager,
graph: Graph) -> Optional[Dict[str, Set[str]]]:
"""Read and validate protocol dependencies cache.
......@@ -848,6 +904,11 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
manager.trace(' {}: {} != {}'
.format(key, cached_options.get(key), current_options.get(key)))
return None
if manager.old_plugins_snapshot and manager.plugins_snapshot:
# Check if plugins are still the same.
if manager.plugins_snapshot != manager.old_plugins_snapshot:
manager.log('Metadata abandoned for {}: plugins differ'.format(id))
return None
manager.add_stats(fresh_metas=1)
return m
......@@ -865,7 +926,7 @@ def atomic_write(filename: str, line1: str, line2: str) -> bool:
for line in lines:
f.write(line)
os.replace(tmp_filename, filename)
except os.error as err:
except os.error:
return False
return True
......@@ -2070,10 +2131,9 @@ def module_not_found(manager: BuildManager, line: int, caller_state: State,
errors.report(line, 0, "No library stub file for module '{}'".format(target))
errors.report(line, 0, stub_msg, severity='note', only_once=True)
else:
note = "See https://mypy.readthedocs.io/en/latest/running_mypy.html#missing-imports"
errors.report(line, 0, "Cannot find module named '{}'".format(target))
errors.report(line, 0, '(Perhaps setting MYPYPATH '
'or using the "--ignore-missing-imports" flag would help)',
severity='note', only_once=True)
errors.report(line, 0, note, severity='note', only_once=True)
errors.set_import_context(save_import_context)
......@@ -2170,6 +2230,9 @@ def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
process_fine_grained_cache_graph(graph, manager)
else:
process_graph(graph, manager)
# Update plugins snapshot.
write_plugins_snapshot(manager)
manager.old_plugins_snapshot = manager.plugins_snapshot
if manager.options.cache_fine_grained or manager.options.fine_grained_incremental:
# If we are running a daemon or are going to write cache for further fine grained use,
# then we need to collect fine grained protocol dependencies.
......
......@@ -1787,7 +1787,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
elif (isinstance(lvalue, MemberExpr) and
lvalue.kind is None): # Ignore member access to modules
instance_type = self.expr_checker.accept(lvalue.expr)
rvalue_type, infer_lvalue_type = self.check_member_assignment(
rvalue_type, lvalue_type, infer_lvalue_type = self.check_member_assignment(
instance_type, lvalue_type, rvalue, lvalue)
else:
rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, lvalue)
......@@ -2491,59 +2491,80 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
return rvalue_type
def check_member_assignment(self, instance_type: Type, attribute_type: Type,
rvalue: Expression, context: Context) -> Tuple[Type, bool]:
rvalue: Expression, context: Context) -> Tuple[Type, Type, bool]:
"""Type member assignment.
This defers to check_simple_assignment, unless the member expression
is a descriptor, in which case this checks descriptor semantics as well.
Return the inferred rvalue_type and whether to infer anything about the attribute type.
Return the inferred rvalue_type, inferred lvalue_type, and whether to use the binder
for this assignment.
Note: this method exists here and not in checkmember.py, because we need to take
care about interaction between binder and __set__().
"""
# Descriptors don't participate in class-attribute access
if ((isinstance(instance_type, FunctionLike) and instance_type.is_type_obj()) or
isinstance(instance_type, TypeType)):
rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context)
return rvalue_type, True
return rvalue_type, attribute_type, True
if not isinstance(attribute_type, Instance):
# TODO: support __set__() for union types.
rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context)
return rvalue_type, True
return rvalue_type, attribute_type, True
get_type = analyze_descriptor_access(
instance_type, attribute_type, self.named_type,
self.msg, context, chk=self)
if not attribute_type.type.has_readable_member('__set__'):
# If there is no __set__, we type-check that the assigned value matches
# the return type of __get__. This doesn't match the python semantics,
# (which allow you to override the descriptor with any value), but preserves
# the type of accessing the attribute (even after the override).
if attribute_type.type.has_readable_member('__get__'):
attribute_type = analyze_descriptor_access(
instance_type, attribute_type, self.named_type,
self.msg, context, chk=self)
rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context)
return rvalue_type, True
rvalue_type = self.check_simple_assignment(get_type, rvalue, context)
return rvalue_type, get_type, True
dunder_set = attribute_type.type.get_method('__set__')
if dunder_set is None:
self.msg.fail("{}.__set__ is not callable".format(attribute_type), context)
return AnyType(TypeOfAny.from_error), False
return AnyType(TypeOfAny.from_error), get_type, False
function = function_type(dunder_set, self.named_type('builtins.function'))
bound_method = bind_self(function, attribute_type)
typ = map_instance_to_supertype(attribute_type, dunder_set.info)
dunder_set_type = expand_type_by_instance(bound_method, typ)
# Here we just infer the type, the result should be type-checked like a normal assignment.
# For this we use the rvalue as type context.
self.msg.disable_errors()
_, inferred_dunder_set_type = self.expr_checker.check_call(
dunder_set_type, [TempNode(instance_type), rvalue],
[nodes.ARG_POS, nodes.ARG_POS], context)
self.msg.enable_errors()
# And now we type check the call second time, to show errors related
# to wrong arguments count, etc.
self.expr_checker.check_call(
dunder_set_type, [TempNode(instance_type), TempNode(AnyType(TypeOfAny.special_form))],
[nodes.ARG_POS, nodes.ARG_POS], context)
if not isinstance(inferred_dunder_set_type, CallableType):
self.fail("__set__ is not callable", context)
return AnyType(TypeOfAny.from_error), True
return AnyType(TypeOfAny.from_error), get_type, True
if len(inferred_dunder_set_type.arg_types) < 2:
# A message already will have been recorded in check_call
return AnyType(TypeOfAny.from_error), False
return AnyType(TypeOfAny.from_error), get_type, False
return inferred_dunder_set_type.arg_types[1], False
set_type = inferred_dunder_set_type.arg_types[1]
# Special case: if the rvalue_type is a subtype of both '__get__' and '__set__' types,
# and '__get__' type is narrower than '__set__', then we invoke the binder to narrow type
# by this assignment. Technically, this is not safe, but in practice this is
# what a user expects.
rvalue_type = self.check_simple_assignment(set_type, rvalue, context)
infer = is_subtype(rvalue_type, get_type) and is_subtype(get_type, set_type)
return rvalue_type if infer else set_type, get_type, infer
def check_indexed_assignment(self, lvalue: IndexExpr,
rvalue: Expression, context: Context) -> None:
......@@ -2566,9 +2587,9 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
method_type = self.expr_checker.analyze_external_member_access(
'__setitem__', basetype, context)
lvalue.method_type = method_type
self.expr_checker.check_call(method_type, [lvalue.index, rvalue],
[nodes.ARG_POS, nodes.ARG_POS],
context)
self.expr_checker.check_method_call(
'__setitem__', basetype, method_type, [lvalue.index, rvalue],
[nodes.ARG_POS, nodes.ARG_POS], context)
def try_infer_partial_type_from_indexed_assignment(
self, lvalue: IndexExpr, rvalue: Expression) -> None:
......@@ -2939,10 +2960,8 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
"""Analyse async iterable expression and return iterator and iterator item types."""
echk = self.expr_checker
iterable = echk.accept(expr)
method = echk.analyze_external_member_access('__aiter__', iterable, expr)
iterator = echk.check_call(method, [], [], expr)[0]
method = echk.analyze_external_member_access('__anext__', iterator, expr)
awaitable = echk.check_call(method, [], [], expr)[0]
iterator = echk.check_method_call_by_name('__aiter__', iterable, [], [], expr)[0]
awaitable = echk.check_method_call_by_name('__anext__', iterator, [], [], expr)[0]
item_type = echk.check_awaitable_expr(awaitable, expr,
messages.INCOMPATIBLE_TYPES_IN_ASYNC_FOR)
return iterator, item_type
......@@ -2951,8 +2970,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
"""Analyse iterable expression and return iterator and iterator item types."""
echk = self.expr_checker
iterable = echk.accept(expr)
method = echk.analyze_external_member_access('__iter__', iterable, expr)
iterator = echk.check_call(method, [], [], expr)[0]
iterator = echk.check_method_call_by_name('__iter__', iterable, [], [], expr)[0]
if isinstance(iterable, TupleType):
joined = UninhabitedType() # type: Type
......@@ -2965,9 +2983,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
nextmethod = '__next__'
else:
nextmethod = 'next'
method = echk.analyze_external_member_access(nextmethod, iterator,
expr)
return iterator, echk.check_call(method, [], [], expr)[0]
return iterator, echk.check_method_call_by_name(nextmethod, iterator, [], [], expr)[0]
def analyze_index_variables(self, index: Expression, item_type: Type,
infer_lvalue_type: bool, context: Context) -> None:
......@@ -3067,15 +3083,14 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
infer_lvalue_type: bool) -> None:
echk = self.expr_checker
ctx = echk.accept(expr)
enter = echk.analyze_external_member_access('__aenter__', ctx, expr)
obj = echk.check_call(enter, [], [], expr)[0]
obj = echk.check_method_call_by_name('__aenter__', ctx, [], [], expr)[0]
obj = echk.check_awaitable_expr(
obj, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER)
if target:
self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type)
exit = echk.analyze_external_member_access('__aexit__', ctx, expr)
arg = self.temp_node(AnyType(TypeOfAny.special_form), expr)
res = echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)[0]
res = echk.check_method_call_by_name(
'__aexit__', ctx, [arg] * 3, [nodes.ARG_POS] * 3, expr)[0]
echk.check_awaitable_expr(
res, expr, messages.INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT)
......@@ -3083,13 +3098,11 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
infer_lvalue_type: bool) -> None:
echk = self.expr_checker
ctx = echk.accept(expr)
enter = echk.analyze_external_member_access('__enter__', ctx, expr)
obj = echk.check_call(enter, [], [], expr)[0]
obj = echk.check_method_call_by_name('__enter__', ctx, [], [], expr)[0]
if target:
self.check_assignment(target, self.temp_node(obj, expr), infer_lvalue_type)
exit = echk.analyze_external_member_access('__exit__', ctx, expr)
arg = self.temp_node(AnyType(TypeOfAny.special_form), expr)
echk.check_call(exit, [arg] * 3, [nodes.ARG_POS] * 3, expr)
echk.check_method_call_by_name('__exit__', ctx, [arg] * 3, [nodes.ARG_POS] * 3, expr)
def visit_print_stmt(self, s: PrintStmt) -> None:
for arg in s.args:
......