Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
d126ce4
Add foundation for fine-grained incremental type checking
JukkaL Jan 20, 2017
c690fec
Attempt to fix tests on Windows
JukkaL Feb 9, 2017
38d69aa
Try to work around travis failure
JukkaL Feb 10, 2017
3b98a1d
Add missing import
JukkaL Mar 2, 2017
4f3b192
Another attempt to fix the travis build
JukkaL Mar 2, 2017
da6631c
Fix typos
JukkaL Mar 3, 2017
2799278
Support multiple rounds of event propagation
JukkaL Feb 9, 2017
28d8279
Keep track of targets that generated an error
JukkaL Feb 11, 2017
f38024b
Refactor and continue reporting error if no changes
JukkaL Mar 3, 2017
01ed71c
Fix bugs
JukkaL Mar 3, 2017
709740a
Create dependencies for inheritance
JukkaL Mar 7, 2017
3a67189
Detect differences in MRO
JukkaL Mar 7, 2017
42a77b6
Merge base classes and MRO
JukkaL Mar 7, 2017
093d17d
Add support for minimal debug output
JukkaL Mar 7, 2017
5a6eb05
Add test cases
JukkaL Mar 7, 2017
2763794
Fix to attributes, inheritance and fine-grained incremenal
JukkaL Mar 7, 2017
d9cfc3f
Fix handling changes to attributes in base classes
JukkaL Mar 8, 2017
07bc54b
Fixes to dependency generation
JukkaL Mar 8, 2017
d95864b
Support classes as fine-grained incremental targets
JukkaL Mar 8, 2017
baeb421
Fix inheritance test case
JukkaL Mar 8, 2017
41c81aa
Add minimal package support
JukkaL Mar 8, 2017
82afe22
Add tests for __init__ modules
JukkaL Mar 8, 2017
e5199da
Add test cases for module attributes
JukkaL Mar 8, 2017
84b7a62
Fix test case
JukkaL Mar 9, 2017
67a54d7
Implement multiple propagation steps for module attributes
JukkaL Mar 9, 2017
54c800d
Support constructors for fine-grained incremental
JukkaL Mar 9, 2017
770cc61
Support from m import with fine-grained incremental
JukkaL Mar 9, 2017
2f0c0a7
Support nested classes with fine-grained incremental
JukkaL Mar 10, 2017
b480807
Fix merge test case
JukkaL Mar 10, 2017
0636545
Remove debug print
JukkaL Mar 10, 2017
e271844
More nested class test cases
JukkaL Mar 10, 2017
291286f
Fixes to classes with fine-grained incremental
JukkaL Mar 10, 2017
25a2846
Minor fixes
JukkaL Mar 21, 2017
2b9104c
Add review feedback
JukkaL Mar 28, 2017
ef97a75
Address more feedback and fix a bug
JukkaL Mar 29, 2017
23ca75b
Add additional debug output
JukkaL Mar 30, 2017
027f79b
Fix issues caused by rebase
JukkaL Apr 3, 2017
dbd5d67
Remove travis CI workaround
JukkaL Apr 3, 2017
a47ecb8
Fix another issue caused by the rebase
JukkaL Apr 3, 2017
ba17fe4
Fix flaky test case
JukkaL Apr 3, 2017
d3bf923
Fix flaky test by making processing order deterministic
JukkaL Apr 3, 2017
5c03d7e
Attempt to fix tests on Windows
JukkaL Apr 4, 2017
89adec4
Fix deferred lambdas
JukkaL Apr 5, 2017
287f831
Fix self check failure
JukkaL Apr 5, 2017
43d16e4
Merge branch 'master' into fine-grained
JukkaL Apr 5, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix bugs
* Use up-to-date type maps instead of the global type map which will
  get out of sync.

* Update dependencies of reprocessed targets.
  • Loading branch information
JukkaL committed Apr 3, 2017
commit 01ed71c932237a1eaac7a8ac05581a268c7de4c5
14 changes: 14 additions & 0 deletions mypy/server/deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,25 @@

def get_dependencies(prefix: str, node: Node,
type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
"""Get all dependencies of a node, recursively."""
visitor = DependencyVisitor(prefix, type_map)
node.accept(visitor)
return visitor.map


def get_dependencies_of_target(prefix: str, node: Node,
type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
"""Get dependencies of a target -- don't recursive into nested targets."""
visitor = DependencyVisitor(prefix, type_map)
if isinstance(node, MypyFile):
for defn in node.defs:
if not isinstance(defn, (ClassDef, FuncDef)):
defn.accept(visitor)
else:
node.accept(visitor)
return visitor.map


class DependencyVisitor(TraverserVisitor):
def __init__(self, prefix: str, type_map: Dict[Expression, Type]) -> None:
self.stack = [prefix]
Expand Down
76 changes: 48 additions & 28 deletions mypy/server/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,18 +46,19 @@
- Support multiple type checking passes
"""

from typing import Dict, List, Set
from typing import Dict, List, Set, Tuple

from mypy.build import BuildManager, State
from mypy.checker import DeferredNode
from mypy.errors import Errors
from mypy.nodes import MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var
from mypy.nodes import (
MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase,
)
from mypy.types import Type
from mypy.server.astdiff import compare_symbol_tables, is_identical_type
from mypy.server.astmerge import merge_asts
from mypy.server.aststrip import strip_target
from mypy.server.deps import get_dependencies
from mypy.server.subexpr import get_subexpressions
from mypy.server.deps import get_dependencies, get_dependencies_of_target
from mypy.server.target import module_prefix
from mypy.server.trigger import make_trigger

Expand All @@ -68,7 +69,7 @@ def __init__(self,
graph: Dict[str, State]) -> None:
self.manager = manager
self.graph = graph
self.deps = get_all_dependencies(manager)
self.deps = get_all_dependencies(manager, graph)
self.previous_targets_with_errors = manager.errors.targets()

def update(self, changed_modules: List[str]) -> List[str]:
Expand All @@ -91,29 +92,31 @@ def update(self, changed_modules: List[str]) -> List[str]:
A list of errors.
"""
manager = self.manager
graph = self.graph
old_modules = dict(manager.modules)
manager.errors.reset()
new_modules = build_incremental_step(manager, changed_modules)
new_modules, new_type_maps = build_incremental_step(manager, changed_modules)
# TODO: What to do with stale dependencies?
update_dependencies(new_modules, self.deps, manager.all_types)
triggered = calculate_active_triggers(manager, old_modules, new_modules)
replace_modules_with_new_variants(manager, old_modules, new_modules)
propagate_changes_using_dependencies(manager, self.graph, self.deps, triggered,
replace_modules_with_new_variants(manager, graph, old_modules, new_modules, new_type_maps)
update_dependencies(new_modules, self.deps, graph)
propagate_changes_using_dependencies(manager, graph, self.deps, triggered,
set(changed_modules),
self.previous_targets_with_errors)
self.previous_targets_with_errors = manager.errors.targets()
return manager.errors.messages()


def get_all_dependencies(manager: BuildManager) -> Dict[str, Set[str]]:
def get_all_dependencies(manager: BuildManager, graph: Dict[str, State]) -> Dict[str, Set[str]]:
"""Return the fine-grained dependency map for an entire build."""
deps = {} # type: Dict[str, Set[str]]
update_dependencies(manager.modules, deps, manager.all_types)
update_dependencies(manager.modules, deps, graph)
return deps


def build_incremental_step(manager: BuildManager,
changed_modules: List[str]) -> Dict[str, MypyFile]:
changed_modules: List[str]) -> Tuple[Dict[str, MypyFile],
Dict[str, Dict[Expression, Type]]]:
"""Build new versions of changed modules only.

Return the new ASTs for the changed modules. They will be totally
Expand Down Expand Up @@ -141,16 +144,16 @@ def build_incremental_step(manager: BuildManager,
# TODO: state.write_cache()?
# TODO: state.mark_as_rechecked()?

return {id: state.tree}
return {id: state.tree}, {id: state.type_checker.type_map}


def update_dependencies(new_modules: Dict[str, MypyFile],
deps: Dict[str, Set[str]],
type_map: Dict[Expression, Type]) -> None:
graph: Dict[str, State]) -> None:
for id, node in new_modules.items():
module_deps = get_dependencies(prefix=id,
node=node,
type_map=type_map)
type_map=graph[id].type_checker.type_map)
for trigger, targets in module_deps.items():
deps.setdefault(trigger, set()).update(targets)

Expand All @@ -171,8 +174,10 @@ def calculate_active_triggers(manager: BuildManager,

def replace_modules_with_new_variants(
manager: BuildManager,
graph: Dict[str, State],
old_modules: Dict[str, MypyFile],
new_modules: Dict[str, MypyFile]) -> None:
new_modules: Dict[str, MypyFile],
new_type_maps: Dict[str, Dict[Expression, Type]]) -> None:
"""Replace modules with newly builds versions.

Retain the identities of externally visible AST nodes in the
Expand All @@ -183,15 +188,10 @@ def replace_modules_with_new_variants(
propagate_changes_using_dependencies).
"""
for id in new_modules:
if id in old_modules:
# Remove nodes of old modules from the type map.
all_types = manager.all_types
for expr in get_subexpressions(old_modules[id]):
if expr in all_types:
del all_types[expr]
merge_asts(old_modules[id], old_modules[id].names,
new_modules[id], new_modules[id].names)
manager.modules[id] = old_modules[id]
graph[id].type_checker.type_map = new_type_maps[id]


def propagate_changes_using_dependencies(
Expand All @@ -201,7 +201,6 @@ def propagate_changes_using_dependencies(
triggered: Set[str],
up_to_date_modules: Set[str],
targets_with_errors: Set[str]) -> None:
# TODO: Multiple propagation passes
# TODO: Multiple type checking passes
# TODO: Restrict the number of iterations to some maximum to avoid infinite loops

Expand All @@ -221,7 +220,7 @@ def propagate_changes_using_dependencies(
# TODO: Preserve order (set is not optimal)
for id, nodes in sorted(todo.items(), key=lambda x: x[0]):
assert id not in up_to_date_modules
triggered |= reprocess_nodes(manager, graph, id, nodes)
triggered |= reprocess_nodes(manager, graph, id, nodes, deps)
# Changes elsewhere may require us to reprocess modules that were
# previously considered up to date. For example, there may be a
# dependency loop that loops back to an originally processed module.
Expand Down Expand Up @@ -267,9 +266,14 @@ def find_targets_recursive(

def reprocess_nodes(manager: BuildManager,
graph: Dict[str, State],
id: str,
nodes: List[DeferredNode]) -> Set[str]:
file_node = manager.modules[id]
module_id: str,
nodes: Set[DeferredNode],
deps: Dict[str, Set[str]]) -> Set[str]:
"""Reprocess a set of nodes within a single module.

Return fired triggers.
"""
file_node = manager.modules[module_id]
for deferred in nodes:
node = deferred.node
# Strip semantic analysis information
Expand All @@ -291,7 +295,7 @@ def reprocess_nodes(manager: BuildManager,
for name, node in info.names.items()
if isinstance(node.node, Var)}
# Type check
graph[id].type_checker.check_second_pass(list(nodes)) # TODO: check return value
graph[module_id].type_checker.check_second_pass(list(nodes)) # TODO: check return value
new_triggered = set()
if info:
# Check if we need to propagate any attribute type changes further.
Expand All @@ -302,9 +306,25 @@ def reprocess_nodes(manager: BuildManager,
not is_identical_type(member_node.node.type, old_types[name]))):
# Type checking a method changed an attribute type.
new_triggered.add(make_trigger('{}.{}'.format(info.fullname(), name)))
update_deps(module_id, nodes, graph, deps)
return new_triggered


def update_deps(module_id: str,
nodes: Set[DeferredNode],
graph: Dict[str, State],
deps: Dict[str, Set[str]]) -> None:
for deferred in nodes:
node = deferred.node
prefix = module_id
if isinstance(node, FuncBase) and node.info:
prefix += '.{}'.format(node.info.name())
type_map = graph[module_id].type_checker.type_map
new_deps = get_dependencies_of_target(prefix, node, type_map)
for trigger, targets in new_deps.items():
deps.setdefault(trigger, set()).update(targets)


def lookup_target(modules: Dict[str, MypyFile], target: str) -> DeferredNode:
"""Look up a target by fully-qualified name."""
components = target.split('.')
Expand Down
62 changes: 37 additions & 25 deletions mypy/test/testmerge.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@
from typing import List, Tuple, Dict

from mypy import build
from mypy.build import BuildManager, BuildSource
from mypy.build import BuildManager, BuildSource, State
from mypy.errors import Errors, CompileError
from mypy.nodes import Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression
from mypy.nodes import (
Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression
)
from mypy.options import Options
from mypy.server.astmerge import merge_asts
from mypy.server.subexpr import get_subexpressions
Expand Down Expand Up @@ -61,7 +63,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
kind = AST

main_src = '\n'.join(testcase.input)
messages, manager = self.build(main_src)
messages, manager, graph = self.build(main_src)

a = []
if messages:
Expand All @@ -70,29 +72,31 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
shutil.copy(os.path.join(test_temp_dir, 'target.py.next'),
os.path.join(test_temp_dir, 'target.py'))

a.extend(self.dump(manager.modules, manager.all_types, kind))
a.extend(self.dump(manager.modules, graph, kind))

old_modules = dict(manager.modules)
old_subexpr = get_subexpressions(old_modules['target'])

new_file = self.build_increment(manager, 'target')
new_file, new_types = self.build_increment(manager, 'target')
replace_modules_with_new_variants(manager,
graph,
old_modules,
{'target': new_file})
{'target': new_file},
{'target': new_types})

a.append('==>')
a.extend(self.dump(manager.modules, manager.all_types, kind))
a.extend(self.dump(manager.modules, graph, kind))

for expr in old_subexpr:
# Verify that old AST nodes are removed from the expression type map.
assert expr not in manager.all_types
assert expr not in new_types

assert_string_arrays_equal(
testcase.output, a,
'Invalid output ({}, line {})'.format(testcase.file,
testcase.line))

def build(self, source: str) -> Tuple[List[str], BuildManager]:
def build(self, source: str) -> Tuple[List[str], BuildManager, Dict[str, State]]:
options = Options()
options.use_builtins_fixtures = True
options.show_traceback = True
Expand All @@ -102,14 +106,18 @@ def build(self, source: str) -> Tuple[List[str], BuildManager]:
alt_lib_path=test_temp_dir)
except CompileError as e:
# TODO: Is it okay to return None?
return e.messages, None
return result.errors, result.manager

def build_increment(self, manager: BuildManager, module_id: str) -> MypyFile:
module_dict = build_incremental_step(manager, [module_id])
return module_dict[module_id]

def dump(self, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type],
return e.messages, None, {}
return result.errors, result.manager, result.graph

def build_increment(self, manager: BuildManager,
module_id: str) -> Tuple[MypyFile,
Dict[Expression, Type]]:
module_dict, type_maps = build_incremental_step(manager, [module_id])
return module_dict[module_id], type_maps[module_id]

def dump(self,
modules: Dict[str, MypyFile],
graph: Dict[str, State],
kind: str) -> List[str]:
if kind == AST:
return self.dump_asts(modules)
Expand All @@ -118,7 +126,7 @@ def dump(self, modules: Dict[str, MypyFile], type_map: Dict[Expression, Type],
elif kind == SYMTABLE:
return self.dump_symbol_tables(modules)
elif kind == TYPES:
return self.dump_types(type_map)
return self.dump_types(graph)
assert False, 'Invalid kind %s' % kind

def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]:
Expand Down Expand Up @@ -172,14 +180,18 @@ def dump_typeinfo(self, info: TypeInfo) -> List[str]:
type_str_conv=self.type_str_conv)
return s.splitlines()

def dump_types(self, type_map: Dict[Expression, Type]) -> List[str]:
def dump_types(self, graph: Dict[str, State]) -> List[str]:
a = []
# To make the results repeatable, we try to generate unique and
# deterministic sort keys.
for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),
str(n) + str(type_map[n]))):
typ = type_map[expr]
a.append('{}:{}: {}'.format(short_type(expr),
expr.line,
typ.accept(self.type_str_conv)))
for module_id in sorted(graph):
type_map = graph[module_id].type_checker.type_map
if type_map:
a.append('## {}'.format(module_id))
for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),
str(n) + str(type_map[n]))):
typ = type_map[expr]
a.append('{}:{}: {}'.format(short_type(expr),
expr.line,
typ.accept(self.type_str_conv)))
return a
15 changes: 15 additions & 0 deletions test-data/unit/fine-grained.test
Original file line number Diff line number Diff line change
Expand Up @@ -364,3 +364,18 @@ def h() -> None:
main:3: error: "module" has no attribute "g"
==
main:3: error: "module" has no attribute "g"

[case testFixErrorAndReintroduce]
import m
def h() -> None:
m.g()
[file m.py]
[file m.py.2]
def g() -> None: pass
[file m.py.3]
[builtins fixtures/fine_grained.pyi]
[out]
main:3: error: "module" has no attribute "g"
==
==
main:3: error: "module" has no attribute "g"
6 changes: 6 additions & 0 deletions test-data/unit/merge.test
Original file line number Diff line number Diff line change
Expand Up @@ -383,9 +383,11 @@ def f(a: A) -> None:
a
1
[out]
## target
IntExpr:3: builtins.int<0>
NameExpr:4: target.A<1>
==>
## target
NameExpr:3: target.A<1>
IntExpr:4: builtins.int<0>

Expand All @@ -406,6 +408,7 @@ class A:
self.x = A()
self.x
[out]
## target
CallExpr:3: target.A<0>
MemberExpr:3: target.A<0>
NameExpr:3: def () -> target.A<0>
Expand All @@ -418,6 +421,7 @@ NameExpr:5: target.A<0>
MemberExpr:6: builtins.int<1>
NameExpr:6: target.A<0>
==>
## target
IntExpr:3: builtins.int<1>
MemberExpr:3: builtins.int<1>
NameExpr:3: target.A<0>
Expand All @@ -442,10 +446,12 @@ class A:
def f(self) -> A:
return self.f()
[out]
## target
CallExpr:3: target.A<0>
MemberExpr:3: def () -> target.A<0>
NameExpr:3: target.A<0>
==>
## target
CallExpr:4: target.A<0>
MemberExpr:4: def () -> target.A<0>
NameExpr:4: target.A<0>
Expand Down