Skip to content

Commit

Permalink
Clean up a lot of cruft
Browse files Browse the repository at this point in the history
  • Loading branch information
msullivan committed Feb 27, 2018
1 parent d844227 commit 2218c11
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 72 deletions.
8 changes: 0 additions & 8 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,6 @@ def default_lib_path(data_dir: str,
CacheMeta = NamedTuple('CacheMeta',
[('id', str),
('path', str),
('memory_only', bool), # no corresponding json files (fine-grained only)
('mtime', int),
('size', int),
('hash', str),
Expand All @@ -415,7 +414,6 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
return CacheMeta(
meta.get('id', sentinel),
meta.get('path', sentinel),
meta.get('memory_only', False),
int(meta['mtime']) if 'mtime' in meta else sentinel,
meta.get('size', sentinel),
meta.get('hash', sentinel),
Expand Down Expand Up @@ -1121,12 +1119,6 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
manager.log('Metadata abandoned for {}: errors were previously ignored'.format(id))
return None

if meta.memory_only:
# Special case for fine-grained incremental mode when the JSON file is missing but
# we want to cache the module anyway.
manager.log('Memory-only metadata for {}'.format(id))
return meta

assert path is not None, "Internal error: meta was provided without a path"
# Check data_json; assume if its mtime matches it's good.
# TODO: stat() errors
Expand Down
70 changes: 16 additions & 54 deletions mypy/server/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,15 +115,14 @@
- Fully support multiple type checking passes
- Use mypy.fscache to access file system
- Don't use load_graph() and update the import graph incrementally
"""

import os.path
from typing import Dict, List, Set, Tuple, Iterable, Union, Optional, Mapping, NamedTuple

from mypy.build import (
BuildManager, State, BuildSource, Graph, load_graph, SavedCache, CacheMeta,
cache_meta_from_dict, find_module_clear_caches, DEBUG_FINE_GRAINED
BuildManager, State, BuildSource, Graph, load_graph, find_module_clear_caches,
DEBUG_FINE_GRAINED,
)
from mypy.checker import DeferredNode
from mypy.errors import Errors, CompileError
Expand Down Expand Up @@ -172,7 +171,6 @@ def __init__(self,
# this directly reflected in load_graph's interface.
self.options.cache_dir = os.devnull
manager.saved_cache = {}
self.type_maps = extract_type_maps(graph)
# Active triggers during the last update
self.triggered = [] # type: List[str]

Expand Down Expand Up @@ -253,6 +251,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
# TODO: If new module brings in other modules, we parse some files multiple times.
manager = self.manager
previous_modules = self.previous_modules
graph = self.graph

# Record symbol table snaphot of old version the changed module.
old_snapshots = {} # type: Dict[str, Dict[str, SnapshotItem]]
Expand All @@ -261,14 +260,14 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],
old_snapshots[module] = snapshot

manager.errors.reset()
result = update_single_isolated(module, path, manager, previous_modules, self.graph)
result = update_single_isolated(module, path, manager, previous_modules, graph)
if isinstance(result, BlockedUpdate):
# Blocking error -- just give up
module, path, remaining, errors = result
self.previous_modules = get_module_to_path_map(manager)
return errors, remaining, (module, path), True
assert isinstance(result, NormalUpdate) # Work around #4124
module, path, remaining, tree, graph = result
module, path, remaining, tree = result

# TODO: What to do with stale dependencies?
triggered = calculate_active_triggers(manager, old_snapshots, {module: tree})
Expand All @@ -285,20 +284,7 @@ def update_single(self, module: str, path: str) -> Tuple[List[str],

# Preserve state needed for the next update.
self.previous_targets_with_errors = manager.errors.targets()
# If deleted, module won't be in the graph.
if module in graph:
# Generate metadata so that we can reuse the AST in the next run.
graph[module].write_cache()
for id, state in graph.items():
# Look up missing ASTs from saved cache.
if state.tree is None and id in manager.saved_cache:
meta, tree, type_map = manager.saved_cache[id]
state.tree = tree
self.previous_modules = get_module_to_path_map(manager)
self.type_maps = extract_type_maps(graph)

# XXX: I want us to not need this
self.graph = graph

return manager.errors.new_messages(), remaining, (module, path), False

Expand All @@ -317,15 +303,13 @@ def get_all_dependencies(manager: BuildManager, graph: Dict[str, State],
# - Id of the changed module (can be different from the module argument)
# - Path of the changed module
# - New AST for the changed module (None if module was deleted)
# - The entire updated build graph
# - Remaining changed modules that are not processed yet as (module id, path)
# tuples (non-empty if the original changed module imported other new
# modules)
NormalUpdate = NamedTuple('NormalUpdate', [('module', str),
('path', str),
('remaining', List[Tuple[str, str]]),
('tree', Optional[MypyFile]),
('graph', Graph)])
('tree', Optional[MypyFile])])

# The result of update_single_isolated when there is a blocking error. Items
# are similar to NormalUpdate (but there are fewer).
Expand Down Expand Up @@ -362,10 +346,11 @@ def update_single_isolated(module: str,

old_modules = dict(manager.modules)
sources = get_sources(previous_modules, [(module, path)])
invalidate_stale_cache_entries(manager.saved_cache, graph, [(module, path)])

manager.missing_modules.clear()
try:
if module in graph:
del graph[module]
load_graph(sources, manager, graph)
except CompileError as err:
# Parse error somewhere in the program -- a blocker
Expand All @@ -383,8 +368,8 @@ def update_single_isolated(module: str,
return BlockedUpdate(err.module_with_blocker, path, remaining_modules, err.messages)

if not os.path.isfile(path):
graph = delete_module(module, graph, manager)
return NormalUpdate(module, path, [], None, graph)
delete_module(module, graph, manager)
return NormalUpdate(module, path, [], None)

# Find any other modules brought in by imports.
changed_modules = get_all_changed_modules(module, path, previous_modules, graph)
Expand Down Expand Up @@ -438,7 +423,7 @@ def update_single_isolated(module: str,

graph[module] = state

return NormalUpdate(module, path, remaining_modules, state.tree, graph)
return NormalUpdate(module, path, remaining_modules, state.tree)


def find_relative_leaf_module(modules: List[Tuple[str, str]], graph: Graph) -> Tuple[str, str]:
Expand Down Expand Up @@ -475,14 +460,13 @@ def assert_equivalent_paths(path1: str, path2: str) -> None:


def delete_module(module_id: str,
graph: Dict[str, State],
manager: BuildManager) -> Dict[str, State]:
graph: Graph,
manager: BuildManager) -> None:
manager.log_fine_grained('delete module %r' % module_id)
# TODO: Deletion of a package
# TODO: Remove deps for the module (this only affects memory use, not correctness)
new_graph = graph.copy()
if module_id in new_graph:
del new_graph[module_id]
if module_id in graph:
del graph[module_id]
if module_id in manager.modules:
del manager.modules[module_id]
if module_id in manager.saved_cache:
Expand All @@ -496,7 +480,6 @@ def delete_module(module_id: str,
parent = manager.modules[parent_id]
if components[-1] in parent.names:
del parent.names[components[-1]]
return new_graph


def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
Expand All @@ -518,15 +501,10 @@ def get_sources(modules: Dict[str, str],
changed_modules: List[Tuple[str, str]]) -> List[BuildSource]:
# TODO: Race condition when reading from the file system; we should only read each
# bit of external state once during a build to have a consistent view of the world
items = sorted(modules.items(), key=lambda x: x[0])
sources = [BuildSource(path, id, None)
for id, path in items
if os.path.isfile(path)]
sources = []
for id, path in changed_modules:
if os.path.isfile(path):# and id not in modules:
if os.path.isfile(path):
sources.append(BuildSource(path, id, None))
# print(changed_modules, sources)
return sources


Expand All @@ -544,16 +522,6 @@ def get_all_changed_modules(root_module: str,
return changed_modules


def invalidate_stale_cache_entries(cache: SavedCache,
graph: Graph,
changed_modules: List[Tuple[str, str]]) -> None:
for name, _ in changed_modules:
if name in cache:
del cache[name]
if name in graph:
del graph[name]


def verify_dependencies(state: State, manager: BuildManager) -> None:
"""Report errors for import targets in module that don't exist."""
for dep in state.dependencies + state.suppressed: # TODO: ancestors?
Expand Down Expand Up @@ -907,11 +875,5 @@ def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNod
return [DeferredNode(node, active_class_name, active_class)]


def extract_type_maps(graph: Graph) -> Dict[str, Dict[Expression, Type]]:
# This is used to export information used only by the testmerge harness.
return {id: state.type_map() for id, state in graph.items()
if state.tree}


def is_verbose(manager: BuildManager) -> bool:
return manager.options.verbosity >= 1 or DEBUG_FINE_GRAINED
1 change: 0 additions & 1 deletion mypy/test/testcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,6 @@ def run_case_once(self, testcase: DataDrivenTestCase, incremental_step: int = 0)
options = parse_options(original_program_text, testcase, incremental_step)
options.use_builtins_fixtures = True
options.show_traceback = True
options.verbosity = 1
if 'optional' in testcase.file:
options.strict_optional = True
if incremental_step:
Expand Down
18 changes: 9 additions & 9 deletions mypy/test/testmerge.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from typing import List, Tuple, Dict, Optional

from mypy import build
from mypy.build import BuildManager, BuildSource, State
from mypy.build import BuildManager, BuildSource, State, Graph
from mypy.errors import Errors, CompileError
from mypy.nodes import (
Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression, Var, UNBOUND_IMPORTED
Expand Down Expand Up @@ -77,13 +77,13 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
target_path = os.path.join(test_temp_dir, 'target.py')
shutil.copy(os.path.join(test_temp_dir, 'target.py.next'), target_path)

a.extend(self.dump(manager, kind))
a.extend(self.dump(fine_grained_manager, kind))
old_subexpr = get_subexpressions(manager.modules['target'])

a.append('==>')

new_file, new_types = self.build_increment(fine_grained_manager, 'target', target_path)
a.extend(self.dump(manager, kind))
a.extend(self.dump(fine_grained_manager, kind))

for expr in old_subexpr:
# Verify that old AST nodes are removed from the expression type map.
Expand Down Expand Up @@ -119,13 +119,13 @@ def build_increment(self, manager: FineGrainedBuildManager,
Dict[Expression, Type]]:
manager.update([(module_id, path)])
module = manager.manager.modules[module_id]
type_map = manager.type_maps[module_id]
type_map = manager.graph[module_id].type_map()
return module, type_map

def dump(self,
manager: BuildManager,
manager: FineGrainedBuildManager,
kind: str) -> List[str]:
modules = manager.modules
modules = manager.manager.modules
if kind == AST:
return self.dump_asts(modules)
elif kind == TYPEINFO:
Expand Down Expand Up @@ -203,14 +203,14 @@ def dump_typeinfo(self, info: TypeInfo) -> List[str]:
type_str_conv=self.type_str_conv)
return s.splitlines()

def dump_types(self, manager: BuildManager) -> List[str]:
def dump_types(self, manager: FineGrainedBuildManager) -> List[str]:
a = []
# To make the results repeatable, we try to generate unique and
# deterministic sort keys.
for module_id in sorted(manager.modules):
for module_id in sorted(manager.manager.modules):
if not is_dumped_module(module_id):
continue
type_map = manager.saved_cache[module_id][2]
type_map = manager.graph[module_id].type_map()
if type_map:
a.append('## {}'.format(module_id))
for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),
Expand Down

0 comments on commit 2218c11

Please sign in to comment.