Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Variety of changes, preparing for separate repair code release #9

Merged
merged 5 commits into from
May 7, 2021
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 4 additions & 15 deletions BUILD
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
load("@bazel_python//:bazel_python.bzl", "bazel_python_interpreter")
load("@bazel_python//:bazel_python.bzl", "bazel_python_coverage_report", "bazel_python_interpreter")

bazel_python_interpreter(
name = "bazel_python_venv",
Expand Down Expand Up @@ -36,21 +36,10 @@ genrule(
visibility = ["//:__subpackages__"],
)

# For generating the coverage report.
sh_binary(
bazel_python_coverage_report(
name = "coverage_report",
srcs = ["coverage_report.sh"],
deps = [":_dummy_coverage_report"],
)

# This is only to get bazel_python_venv as a data dependency for
# coverage_report above. For some reason, this doesn't work if we directly put
# it on the sh_binary. This is a known issue:
# https://github.com/bazelbuild/bazel/issues/1147#issuecomment-428698802
sh_library(
name = "_dummy_coverage_report",
srcs = ["coverage_report.sh"],
data = ["//:bazel_python_venv"],
code_paths = ["pysyrenn/*/*.py"],
test_paths = ["pysyrenn/*/tests/*"],
)

# For wheel-ifying the Python code.
Expand Down
8 changes: 2 additions & 6 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
.PHONY: pysyrenn_coverage start_server acas_lines_experiment \
integrated_gradients_experiment linearity_hypothesis_experiment \
exactline_experiments toy_examples_experiment acas_planes_experiment \
model_checking_experiment netpatch_experiment plane_experiments \
model_checking_experiment plane_experiments \
all_experiments

pysyrenn_coverage:
Expand Down Expand Up @@ -40,12 +40,8 @@ model_checking_experiment:
@echo "Running the Bounded Model Checking experiment"
bazel run experiments:model_checking

netpatch_experiment:
@echo "Running the Network Patching experiment"
bazel run experiments:netpatch

plane_experiments: toy_examples_experiment acas_planes_experiment \
model_checking_experiment netpatch_experiment
model_checking_experiment

# Run experiments from [1] and [2]
all_experiments: exactline_experiments plane_experiments
2 changes: 1 addition & 1 deletion WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")

git_repository(
name = "bazel_python",
commit = "5edcf3b97c4c77b654af177bfa27558d9b88b52f",
commit = "f99ab8738dced7257c97dc719457f50a601ed84c",
remote = "https://github.com/95616ARG/bazel_python.git",
)

Expand Down
29 changes: 0 additions & 29 deletions coverage_report.sh

This file was deleted.

10 changes: 0 additions & 10 deletions experiments/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -90,16 +90,6 @@ py_binary(
],
)

py_binary(
name = "netpatch",
srcs = ["netpatch.py"],
deps = [
":acas_planes",
":polar_image",
"//pysyrenn",
],
)

py_binary(
name = "toy_examples",
srcs = ["toy_examples.py"],
Expand Down
7 changes: 2 additions & 5 deletions experiments/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import numpy as np
from PIL import Image
import imageio
from pysyrenn import Network, MaskingNetwork
from pysyrenn import Network
from syrenn_proto import syrenn_pb2 as syrenn_pb

class Experiment:
Expand Down Expand Up @@ -318,7 +318,7 @@ def write_pb(path, pb_serialized):
elif artifact_type == "matplotlib":
filename += ".pdf"
artifact.savefig(filename)
elif artifact_type in ("network", "masking_network"):
elif artifact_type in "network":
filename += ".pb"
write_pb(filename, artifact.serialize())
elif artifact_type == "svg":
Expand Down Expand Up @@ -378,9 +378,6 @@ def read_pb(path, pb_type):
if artifact["type"] == "network":
return Network.deserialize(
read_pb(artifact["path"], syrenn_pb.Network))
if artifact["type"] == "masking_network":
return MaskingNetwork.deserialize(
read_pb(artifact["path"], syrenn_pb.MaskingNetwork))
raise NotImplementedError

@staticmethod
Expand Down
85 changes: 77 additions & 8 deletions experiments/model_checking.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Methods for performing bounded model checking on neural net controllers.
"""
from collections import defaultdict
import itertools
from timeit import default_timer as timer
import numpy as np
Expand Down Expand Up @@ -97,7 +98,7 @@ def transition_plane(cls, model, network, plane, transition_partitions):
"""
A_ub, b_ub = cls.facet_enumeration(plane)
resulting_planes = []
for pre_plane in transition_partitions:
for pre_plane in transition_partitions.possibly_intersecting(plane):
pre_intersection = cls.compute_intersection(pre_plane, plane)
if pre_intersection:
actions = network.compute(pre_intersection)
Expand All @@ -121,13 +122,15 @@ def in_h_rep(plane, faces):
value = product.T + faces[:, -1] # (n_points x n_constraints)
return np.all(value <= 0)

def run_for_model(self, model_name, timeout_minutes):
def run_for_model(self, model_name, timeout_minutes,
eval_network=None, write_name=None):
"""Runs the BMC for a particular model.
"""
network = self.load_network("vrl_%s" % model_name)
eval_network = eval_network or network
model = VRLModel(model_name)

out_file = self.begin_csv("%s/data" % model_name,
out_file = self.begin_csv(write_name or "%s/data" % model_name,
["Step", "Cumulative Time", "Counter Example?"])

safe = model.safe_set(as_box=False)
Expand All @@ -140,9 +143,9 @@ def run_for_model(self, model_name, timeout_minutes):
safe_transformed = network.transform_planes(disjunctive_safe,
compute_preimages=True,
include_post=False)
safe_transitions = []
for transformed_plane in safe_transformed:
safe_transitions.extend(transformed_plane)
safe_transitions = TransformerLUT([
vpolytope for upolytope in safe_transformed
for vpolytope in upolytope])
time_taken += (timer() - start_time)

planes = [model.init_set(as_vertices=True)]
Expand All @@ -153,12 +156,13 @@ def run_for_model(self, model_name, timeout_minutes):
new_planes = []
for plane in planes:
new_planes.extend(
self.transition_plane(model, network, plane,
self.transition_plane(model, eval_network, plane,
safe_transitions))
new_planes = list(map(np.array, new_planes))
print("Before removing in init_x/y:", len(new_planes))
new_planes = [plane for plane in new_planes
new_planes = [np.array(plane) for plane in new_planes
if not self.in_h_rep(plane, init)]
print("After removing:", len(new_planes))
found_bad_state = False
for plane in new_planes:
if not self.in_h_rep(plane, safe):
Expand Down Expand Up @@ -194,5 +198,70 @@ def analyze(self):
"""
return False

class TransformerLUT:
"""Stores a set of polytopes.

Optimized to quickly over-approximate the set of polytopes which may
intersect with a given polytope.
"""
def __init__(self, polytopes=None):
"""Initializes the TransformerLUT.
"""
polytopes = polytopes or []
self.polytope_lut = defaultdict(set)
self.all_polytopes = set()
if polytopes:
self.initialize_stats(polytopes)
else:
self.grid_lb = np.array([-1., -1.])
self.grid_ub = np.array([1., 1.])
self.grid_delta = np.array([1., 1.])
for polytope in polytopes:
self.register_polytope(polytope)

def initialize_stats(self, polytopes):
"""Sets the partitioning used by the TransformerLUT hash table.
"""
self.grid_lb = np.array([np.inf, np.inf])
self.grid_ub = np.array([-np.inf, -np.inf])
deltas = []
self.grid_delta = np.array([np.inf, np.inf])
for polytope in polytopes:
box_lb = np.min(polytope, axis=0)
box_ub = np.max(polytope, axis=0)
self.grid_lb = np.minimum(self.grid_lb, box_lb)
self.grid_ub = np.maximum(self.grid_ub, box_ub)
deltas.append(box_ub - box_lb)
deltas = np.array(deltas)
self.grid_delta = np.percentile(deltas, 25, axis=0)

def keys_for(self, polytope):
"""Returns the keys in the hash table corresponding to @polytope.
"""
keys = set()
box_lb = np.min(polytope, axis=0)
box_ub = np.max(polytope, axis=0)
min_key = np.floor((box_lb - self.grid_lb) / self.grid_delta).astype(int)
max_key = np.ceil((box_ub - self.grid_lb) / self.grid_delta).astype(int)
for x in range(min_key[0], max_key[0] + 1):
for y in range(min_key[1], max_key[1] + 1):
yield (x, y)

def register_polytope(self, polytope):
"""Adds @polytope to the TransformerLUT.
"""
polytope = tuple(map(tuple, polytope))
self.all_polytopes.add(polytope)
for key in self.keys_for(polytope):
self.polytope_lut[key].add(polytope)

def possibly_intersecting(self, polytope):
"""Returns a superset of the polytopes which may intersect @polytope.
"""
polytopes = set()
for key in self.keys_for(polytope):
polytopes |= self.polytope_lut[key]
return sorted(polytopes)

if __name__ == "__main__":
ModelCheckingExperiment("model_checking").main()
Loading