diff --git a/bin/quantomatic b/bin/quantomatic deleted file mode 100755 index 3063ffe5..00000000 --- a/bin/quantomatic +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env bash - -ROOT_DIR="$(cd "$(dirname $0)"; cd ..; pwd)"; -PRG="$(basename "$0")" - -# run the quanto-core -function run_quanto_core () -{ - CMD="$ROOT_DIR/core/bin/quanto-core $@" - echo "Running: $CMD" - echo - $CMD -} - -function run_quanto_gui () -{ - CMD="export PATH=\"$ROOT_DIR/core/bin/:$PATH\" && java -jar $ROOT_DIR/gui/dist/QuantoGui.jar $@" - echo "Running: $CMD" - echo - bash -c "$CMD" -} - -function run_quanto_ml () -{ - THE_POLY_HEAP="$ROOT_DIR/core/heaps/quanto.polyml-heap" - if [ "$EMACS" != "t" ] - then - LEDIT=`which ledit || which cat`; - else - LEDIT=`which cat` - fi - echo "Using line editor: ${LEDIT}"; - CMD="( echo \"PolyML.SaveState.loadState \\\"${THE_POLY_HEAP}\\\";\"; ${LEDIT} ) | $POLYML $@" - echo "Running: $CMD" - echo - bash -c "$CMD" -} - - -# choose from a collection of things -function choosefrom () -{ - local RESULT="" - local FILE="" - - for FILE in "$@" - do - [ -z "$RESULT" -a -e "$FILE" ] && RESULT="$FILE" - done - - [ -z "$RESULT" ] && RESULT="$FILE" - echo "$RESULT" -} - -# usage message -function usage() -{ - echo - echo "Usage: $PRG TOOL [ARGS ...]" - echo - echo "where TOOL is one of:" - echo " \"gui\" to start up using the java gui, or" - echo " \"core\" to start just the quanto-core process, or" - echo " \"ml\" to start just the PolyML shell with the quanto-heap loaded." - echo " \"info\" information on settings/variables" - echo " \"help\" for usage help (this message)" - echo -} - - -function fail() -{ - echo "$1" >&2 - exit 2 -} - -POLYML_OPTIONS="" -POLYML_HOME="$($ROOT_DIR/tools/findpoly.sh)" -POLYML_BIN="$POLYML_HOME/bin" -POLYML="$POLYML_BIN/poly" -POLYML_LIB="$POLYML_HOME/lib" - -# setup ML env -DYLD_LIBRARY_PATH="$POLYML_LIB:$DYLD_LIBRARY_PATH" -LD_LIBRARY_PATH="$POLYML_LIB:$LD_LIBRARY_PATH" -DYLD_RUN_PATH="$POLYML_LIB:$LD_RUN_PATH" -LD_RUN_PATH="$POLYML_LIB:$LD_RUN_PATH" - -## check for PolyML -if [ -z "$POLYML_HOME" ]; then - fail "cannot find PolyML: POLYML_HOME=$POLYML_HOME" -## if we have arguments -elif [ "$#" -ge 1 ]; then # no errors in basic setup - if [ "$1" = "gui" ] || [ "$1" = "go" ]; then - shift - run_quanto_gui - elif [ "$1" = "core" ]; then - shift - run_quanto_core - elif [ "$1" = "ml" ]; then - shift - run_quanto_ml - elif [ "$1" = "info" ]; then - echo - echo "Quantomatic settings are: " - echo - echo " PolyML variables" - echo " ----------------" - echo " POLYML_HOME=$POLYML_HOME" - echo " POLYML_BIN=$POLYML_BIN" - echo " POLYML_LIB=$POLYML_LIB" - echo " POLYML_OPTIONS=$POLYML_OPTIONS" - echo - elif [ "$1" = "--help" ] || [ "$1" = "help" ] || [ "$1" = "-help" ] || [ "$1" = "-?" ]; then - usage - else - echo - echo "*** unkown Quantomatic tool: $1 ***" - echo "" - echo "Type \"$PRG help\" for information on using Quantomatic." - echo "" - fi -else - usage -fi \ No newline at end of file diff --git a/bin/to_dir.rb b/bin/to_dir.rb deleted file mode 100644 index 473274bf..00000000 --- a/bin/to_dir.rb +++ /dev/null @@ -1,23 +0,0 @@ -# Convert a theory XML file to a theory directory. - -require 'rubygems' -require 'hpricot' - -ARGV.each do |theory| - doc = Hpricot(open(theory + ".xml")) - thy = theory + ".theory" - - Dir.mkdir(thy); Dir.mkdir(thy + "/rewrites") - Dir.chdir(thy + "/rewrites") - (doc/"rule").each do |rule| - rl = (rule/"/name").inner_text - Dir.mkdir(rl) - - f = File.open(rl + "/lhs.graph",'w') - f.puts((rule/"/lhs").inner_html); f.close - - f = File.open(rl + "/rhs.graph",'w') - f.puts((rule/"/rhs").inner_html); f.close - end -end - diff --git a/console/quanto.rb b/console/quanto.rb deleted file mode 100755 index b1afcae2..00000000 --- a/console/quanto.rb +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env ruby -require 'rubygems' - -require 'yajl' -require "readline" -require "open3" - -$debug = false - -class CoreException < Exception -end - -# class ControllerModule -# def initialize(core, name) -# @name = name -# @core = core -# end - -# def help(fn=nil) -# if fn == nil -# return @core.call_function('!!', 'help', {:controller=>@core.controller,:module=>@name}) -# else -# return @core.call_function('!!', 'help', -# {:controller=>@core.controller,:module=>@name, :function=>fn}) -# end -# end - -# def method_missing(*args) -# fn = args[0].to_s -# return @core.call_function(@name, fn, args[1]) -# end -# end - -class QuantoCore - attr_accessor :controller - attr_reader :main - - def initialize(quanto, controller) - @seq = 0 - @quanto = quanto - @controller = controller - @parser = Yajl::Parser.new(:symbolize_keys=>true) - @encoder = Yajl::Encoder.new - @parser.on_parse_complete = method(:parsed_json) - @reader_thr = nil - @handlers = {} - - # modules - # @main = ControllerModule.new(self, 'Main') - end - - def start - @qin, @qout, @qerr = Open3.popen3(@quanto) - @reader_thr = Thread.new do - loop do - c = @qout.readchar.chr - #puts "[#{c}]" - @parser << c - end - end - end - - def stop - @qin.close - @qout.close - @qerr.close - end - - def parsed_json(resp) - if resp[:success] - rid = resp[:request_id] - f = @handlers[rid] - @handlers.delete(rid) - f.call(resp[:output]) - else - raise CoreException.new(resp[:message]) - end - end - - # def pull_json - # json = nil - - # loop do - # if ! @json_stack.empty? - # json = @json_stack.pop - # break - # else - # sleep 0.01 - # end - # end - - # return json - # end - - def request(obj) - # return nil if quanto has not been started - return nil if @reader_thr == nil - - @encoder.encode(obj, @qin) - @seq += 1 - - return (@seq - 1) - end - - def call_function(modl, function, input, &handler) - obj = { - :request_id => @seq, - :controller => @controller, - :module => modl, - :function => function, - :input => input - } - - @handlers[:request_id] = handler - self.request(obj) - end - - def version - self.call_function('!!', 'version') - end -end - -if $0 == 'irb' - $q = QuantoCore.new('../core/bin/quanto-core', 'red_green') - $q.start - puts "Quantomatic v#{$q.version}" -end - - diff --git a/core/Makefile b/core/Makefile deleted file mode 100644 index 3e9eedcd..00000000 --- a/core/Makefile +++ /dev/null @@ -1,4 +0,0 @@ -# build the quantomatic heap (for development setup) - -all: - cat build_heap.ML | poly \ No newline at end of file diff --git a/core/Pure/Concurrent/bash.ML b/core/Pure/Concurrent/bash.ML deleted file mode 100644 index 11ba9ed3..00000000 --- a/core/Pure/Concurrent/bash.ML +++ /dev/null @@ -1,107 +0,0 @@ -(* Title: Pure/Concurrent/bash.ML - Author: Makarius - -GNU bash processes, with propagation of interrupts. -*) - -signature BASH = -sig - val process: string -> {out: string, err: string, rc: int, terminate: unit -> unit} -end; - -structure Bash: BASH = -struct - -val process = uninterruptible (fn restore_attributes => fn script => - let - datatype result = Wait | Signal | Result of int; - val result = Synchronized.var "bash_result" Wait; - - val id = serial_string (); - val script_path = File.tmp_path (Path.basic ("bash_script" ^ id)); - val out_path = File.tmp_path (Path.basic ("bash_out" ^ id)); - val err_path = File.tmp_path (Path.basic ("bash_err" ^ id)); - val pid_path = File.tmp_path (Path.basic ("bash_pid" ^ id)); - - fun cleanup_files () = - (try File.rm script_path; - try File.rm out_path; - try File.rm err_path; - try File.rm pid_path); - val _ = cleanup_files (); - - val system_thread = - Simple_Thread.fork false (fn () => - Multithreading.with_attributes Multithreading.private_interrupts (fn _ => - let - val _ = File.write script_path script; - val bash_script = - "exec bash " ^ - File.shell_path script_path ^ - " > " ^ File.shell_path out_path ^ - " 2> " ^ File.shell_path err_path; - val _ = getenv_strict "EXEC_PROCESS"; - val status = - OS.Process.system - ("exec \"$EXEC_PROCESS\" " ^ File.shell_path pid_path ^ " " ^ quote bash_script); - val res = - (case Posix.Process.fromStatus status of - Posix.Process.W_EXITED => Result 0 - | Posix.Process.W_EXITSTATUS 0wx82 => Signal - | Posix.Process.W_EXITSTATUS w => Result (Word8.toInt w) - | Posix.Process.W_SIGNALED s => - if s = Posix.Signal.int then Signal - else Result (256 + LargeWord.toInt (Posix.Signal.toWord s)) - | Posix.Process.W_STOPPED s => - Result (512 + LargeWord.toInt (Posix.Signal.toWord s))); - in Synchronized.change result (K res) end - handle exn => - (Synchronized.change result (fn Wait => Signal | res => res); reraise exn))); - - fun read_pid 0 = NONE - | read_pid count = - (case (Int.fromString (File.read pid_path) handle IO.Io _ => NONE) of - NONE => (OS.Process.sleep (seconds 0.1); read_pid (count - 1)) - | some => some); - - fun terminate NONE = () - | terminate (SOME pid) = - let - val sig_test = Posix.Signal.fromWord 0w0; - - fun kill_group pid s = - (Posix.Process.kill - (Posix.Process.K_GROUP (Posix.Process.wordToPid (LargeWord.fromInt pid)), s); true) - handle OS.SysErr _ => false; - - fun kill s = (kill_group pid s; kill_group pid sig_test); - - fun multi_kill count s = - count = 0 orelse - kill s andalso (OS.Process.sleep (seconds 0.1); multi_kill (count - 1) s); - val _ = - multi_kill 10 Posix.Signal.int andalso - multi_kill 10 Posix.Signal.term andalso - multi_kill 10 Posix.Signal.kill; - in () end; - - fun cleanup () = - (Simple_Thread.interrupt_unsynchronized system_thread; - cleanup_files ()); - in - let - val _ = - restore_attributes (fn () => - Synchronized.guarded_access result (fn Wait => NONE | x => SOME ((), x))) (); - - val out = the_default "" (try File.read out_path); - val err = the_default "" (try File.read err_path); - val rc = (case Synchronized.value result of Signal => Exn.interrupt () | Result rc => rc); - val pid = read_pid 1; - val _ = cleanup (); - in {out = out, err = err, rc = rc, terminate = fn () => terminate pid} end - handle exn => (terminate (read_pid 10); cleanup (); reraise exn) - end); - -end; - diff --git a/core/Pure/Concurrent/bash_sequential.ML b/core/Pure/Concurrent/bash_sequential.ML deleted file mode 100644 index 516ec394..00000000 --- a/core/Pure/Concurrent/bash_sequential.ML +++ /dev/null @@ -1,47 +0,0 @@ -(* Title: Pure/Concurrent/bash_sequential.ML - Author: Makarius - -Generic GNU bash processes (no provisions to propagate interrupts, but -could work via the controlling tty). -*) - -signature BASH = -sig - val process: string -> {out: string, err: string, rc: int, terminate: unit -> unit} -end; - -structure Bash: BASH = -struct - -fun process script = - let - val id = serial_string (); - val script_path = File.tmp_path (Path.basic ("bash_script" ^ id)); - val out_path = File.tmp_path (Path.basic ("bash_out" ^ id)); - val err_path = File.tmp_path (Path.basic ("bash_err" ^ id)); - fun cleanup () = (try File.rm script_path; try File.rm out_path; try File.rm err_path); - in - let - val _ = File.write script_path script; - val status = - OS.Process.system - ("exec \"$ISABELLE_HOME/lib/scripts/process\" no_group /dev/null" ^ - " script \"exec bash " ^ File.shell_path script_path ^ - " > " ^ File.shell_path out_path ^ - " 2> " ^ File.shell_path err_path ^ "\""); - val rc = - (case Posix.Process.fromStatus status of - Posix.Process.W_EXITED => 0 - | Posix.Process.W_EXITSTATUS w => Word8.toInt w - | Posix.Process.W_SIGNALED s => 256 + LargeWord.toInt (Posix.Signal.toWord s) - | Posix.Process.W_STOPPED s => 512 + LargeWord.toInt (Posix.Signal.toWord s)); - - val out = the_default "" (try File.read out_path); - val err = the_default "" (try File.read err_path); - val _ = cleanup (); - in {out = out, err = err, rc = rc, terminate = fn () => ()} end - handle exn => (cleanup (); reraise exn) - end; - -end; - diff --git a/core/Pure/Concurrent/cache.ML b/core/Pure/Concurrent/cache.ML deleted file mode 100644 index 4f6d9a71..00000000 --- a/core/Pure/Concurrent/cache.ML +++ /dev/null @@ -1,32 +0,0 @@ -(* Title: Pure/Concurrent/cache.ML - Author: Makarius - -Concurrently cached values, with minimal locking time and singleton -evaluation due to lazy storage. -*) - -signature CACHE = -sig - val create: 'table -> ('table -> 'key -> 'value lazy option) -> - ('key * 'value lazy -> 'table -> 'table) -> ('key -> 'value) -> 'key -> 'value -end; - -structure Cache: CACHE = -struct - -fun create empty lookup update f = - let - val cache = Synchronized.var "cache" empty; - fun apply x = - Synchronized.change_result cache - (fn tab => - (case lookup tab x of - SOME y => (y, tab) - | NONE => - let val y = Lazy.lazy (fn () => f x) - in (y, update (x, y) tab) end)) - |> Lazy.force; - in apply end; - -end; - diff --git a/core/Pure/Concurrent/consumer_thread.scala b/core/Pure/Concurrent/consumer_thread.scala deleted file mode 100644 index 6cf5a431..00000000 --- a/core/Pure/Concurrent/consumer_thread.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* Title: Pure/Concurrent/consumer_thread.scala - Module: PIDE - Author: Makarius - -Consumer thread with unbounded queueing of requests, and optional -acknowledgment. -*/ - -package isabelle - - -import scala.annotation.tailrec - - -object Consumer_Thread -{ - def fork[A](name: String = "", daemon: Boolean = false)( - consume: A => Boolean, - finish: () => Unit = () => ()): Consumer_Thread[A] = - new Consumer_Thread[A](name, daemon, consume, finish) - - - /* internal messages */ - - private type Ack = Synchronized[Option[Exn.Result[Boolean]]] - private type Request[A] = (A, Option[Ack]) -} - -final class Consumer_Thread[A] private( - name: String, daemon: Boolean, consume: A => Boolean, finish: () => Unit) -{ - private var active = true - private val mailbox = Mailbox[Option[Consumer_Thread.Request[A]]] - - private val thread = Simple_Thread.fork(name, daemon) { main_loop(Nil) } - def is_active: Boolean = active && thread.isAlive - - private def failure(exn: Throwable): Unit = - Output.error_message( - "Consumer thread failure: " + quote(thread.getName) + "\n" + Exn.message(exn)) - - private def robust_finish(): Unit = - try { finish() } catch { case exn: Throwable => failure(exn) } - - @tailrec private def main_loop(msgs: List[Option[Consumer_Thread.Request[A]]]): Unit = - msgs match { - case Nil => main_loop(mailbox.receive(None)) - case Some((arg, ack)) :: rest => - val result = Exn.capture { consume(arg) } - val continue = - result match { - case Exn.Res(cont) => cont - case Exn.Exn(exn) => - if (!ack.isDefined) failure(exn) - true - } - ack.foreach(a => a.change(_ => Some(result))) - if (continue) main_loop(rest) else robust_finish() - case None :: _ => robust_finish() - } - - assert(is_active) - - - /* main methods */ - - private def request(x: A, ack: Option[Consumer_Thread.Ack]) - { - synchronized { - if (is_active) mailbox.send(Some((x, ack))) - else error("Consumer thread not active: " + quote(thread.getName)) - } - ack.foreach(a => - Exn.release(a.guarded_access({ case None => None case res => Some((res.get, res)) }))) - } - - def send(arg: A) { request(arg, None) } - def send_wait(arg: A) { request(arg, Some(Synchronized(None))) } - - def shutdown(): Unit = - { - synchronized { if (is_active) { active = false; mailbox.send(None) } } - thread.join - } -} diff --git a/core/Pure/Concurrent/counter.ML b/core/Pure/Concurrent/counter.ML deleted file mode 100644 index b90b16c6..00000000 --- a/core/Pure/Concurrent/counter.ML +++ /dev/null @@ -1,28 +0,0 @@ -(* Title: Pure/Concurrent/counter.ML - Author: Makarius - -Synchronized counter for unique identifiers > 0. - -NB: ML ticks forwards, JVM ticks backwards. -*) - -signature COUNTER = -sig - val make: unit -> unit -> int -end; - -structure Counter: COUNTER = -struct - -fun make () = - let - val counter = Synchronized.var "counter" (0: int); - fun next () = - Synchronized.change_result counter - (fn i => - let val j = i + (1: int) - in (j, j) end); - in next end; - -end; - diff --git a/core/Pure/Concurrent/counter.scala b/core/Pure/Concurrent/counter.scala deleted file mode 100644 index 79e72394..00000000 --- a/core/Pure/Concurrent/counter.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* Title: Pure/Concurrent/counter.scala - Module: PIDE - Author: Makarius - -Synchronized counter for unique identifiers < 0. - -NB: ML ticks forwards, JVM ticks backwards. -*/ - -package isabelle - - -object Counter -{ - type ID = Long - def make(): Counter = new Counter -} - -final class Counter private -{ - private var count: Counter.ID = 0 - - def apply(): Counter.ID = synchronized { - require(count > java.lang.Long.MIN_VALUE) - count -= 1 - count - } - - override def toString: String = count.toString -} - diff --git a/core/Pure/Concurrent/event_timer.ML b/core/Pure/Concurrent/event_timer.ML deleted file mode 100644 index 27193c67..00000000 --- a/core/Pure/Concurrent/event_timer.ML +++ /dev/null @@ -1,135 +0,0 @@ -(* Title: Pure/Concurrent/event_timer.ML - Author: Makarius - -Initiate event after given point in time. - -Note: events are run as synchronized action within a dedicated thread -and should finish quickly without further ado. -*) - -signature EVENT_TIMER = -sig - eqtype request - val request: Time.time -> (unit -> unit) -> request - val cancel: request -> bool - val shutdown: unit -> unit - val future: Time.time -> unit future -end; - -structure Event_Timer: EVENT_TIMER = -struct - -(* type request *) - -val request_counter = Counter.make (); -datatype request = Request of int; -fun new_request () = Request (request_counter ()); - - -(* type requests *) - -structure Requests = Table(type key = Time.time val ord = Time.compare); -type requests = (request * (unit -> unit)) list Requests.table; - -fun add_request time entry (requests: requests) = - Requests.cons_list (time, entry) requests; - -fun del_request req (requests: requests) = - let - val old_request = - requests |> Requests.get_first (fn (key, entries) => - entries |> get_first (fn entry => if fst entry = req then SOME (key, entry) else NONE)); - in - (case old_request of - NONE => (false, requests) - | SOME old => (true, Requests.remove_list (eq_fst op =) old requests)) - end; - -fun next_request_time (requests: requests) = - Option.map fst (Requests.min requests); - -fun next_request_event t0 (requests: requests) = - (case Requests.min requests of - NONE => NONE - | SOME (time, entries) => - if Time.< (t0, time) then NONE - else - let - val (rest, (_, event)) = split_last entries; - val requests' = - if null rest then Requests.delete time requests - else Requests.update (time, rest) requests; - in SOME (event, requests') end); - - -(* global state *) - -type state = requests * Thread.thread option; -val init_state: state = (Requests.empty, NONE); - -val state = Synchronized.var "Event_Timer.state" init_state; - - -(* manager thread *) - -val manager_timeout = seconds 0.3; - -fun manager_loop () = - let - val success = - Synchronized.timed_access state - (fn (requests, _) => - (case next_request_time requests of - NONE => SOME (Time.+ (Time.now (), manager_timeout)) - | some => some)) - (fn (requests, manager) => - (case next_request_event (Time.now ()) requests of - NONE => NONE - | SOME (event, requests') => (Exn.capture event (); SOME ((), (requests', manager))))); - val finished = - is_none success andalso - Synchronized.change_result state (fn (requests, manager) => - if Requests.is_empty requests then (true, init_state) - else (false, (requests, manager))); - in if finished then () else manager_loop () end; - -fun manager_check manager = - if is_some manager andalso Thread.isActive (the manager) then manager - else SOME (Simple_Thread.fork false manager_loop); - - -(* main operations *) - -fun request time event = - Synchronized.change_result state (fn (requests, manager) => - let - val req = new_request (); - val requests' = add_request time (req, event) requests; - in (req, (requests', manager_check manager)) end); - -fun cancel req = - Synchronized.change_result state (fn (requests, manager) => - let - val (canceled, requests') = del_request req requests; - in (canceled, (requests', manager)) end); - -fun shutdown () = - Synchronized.guarded_access state (fn (requests, manager) => - if not (Requests.is_empty requests) - then raise Fail "Cannot shutdown event timer: pending requests" - else if is_none manager then SOME ((), init_state) - else NONE); - - -(* future *) - -val future = uninterruptible (fn _ => fn time => - let - val req: request Single_Assignment.var = Single_Assignment.var "request"; - fun abort () = ignore (cancel (Single_Assignment.await req)); - val promise: unit future = Future.promise abort; - val _ = Single_Assignment.assign req (request time (Future.fulfill promise)); - in promise end); - -end; - diff --git a/core/Pure/Concurrent/event_timer.scala b/core/Pure/Concurrent/event_timer.scala deleted file mode 100644 index a60a1432..00000000 --- a/core/Pure/Concurrent/event_timer.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* Title: Pure/Concurrent/event_timer.scala - Module: PIDE - Author: Makarius - -Initiate event after given point in time. - -Note: events are run as synchronized action within a dedicated thread -and should finish quickly without further ado. -*/ - -package isabelle - - -import java.util.{Timer, TimerTask, Date} - - -object Event_Timer -{ - private lazy val event_timer = new Timer("event_timer", true) - - final class Request private[Event_Timer](val time: Time, task: TimerTask) - { - def cancel: Boolean = task.cancel - } - - def request(time: Time)(event: => Unit): Request = - { - val task = new TimerTask { def run { event } } - event_timer.schedule(task, new Date(time.ms)) - new Request(time, task) - } -} - diff --git a/core/Pure/Concurrent/future.ML b/core/Pure/Concurrent/future.ML deleted file mode 100644 index 984f8bbd..00000000 --- a/core/Pure/Concurrent/future.ML +++ /dev/null @@ -1,686 +0,0 @@ -(* Title: Pure/Concurrent/future.ML - Author: Makarius - -Value-oriented parallel execution via futures and promises. -*) - -signature FUTURE = -sig - type task = Task_Queue.task - type group = Task_Queue.group - val new_group: group option -> group - val worker_task: unit -> task option - val worker_group: unit -> group option - val the_worker_group: unit -> group - val worker_subgroup: unit -> group - type 'a future - val task_of: 'a future -> task - val peek: 'a future -> 'a Exn.result option - val is_finished: 'a future -> bool - val ML_statistics: bool Unsynchronized.ref - val interruptible_task: ('a -> 'b) -> 'a -> 'b - val cancel_group: group -> unit - val cancel: 'a future -> unit - val error_message: Position.T -> (serial * string) * string option -> unit - val identify_result: Position.T -> 'a Exn.result -> 'a Exn.result - type params = {name: string, group: group option, deps: task list, pri: int, interrupts: bool} - val default_params: params - val forks: params -> (unit -> 'a) list -> 'a future list - val fork: (unit -> 'a) -> 'a future - val join_results: 'a future list -> 'a Exn.result list - val join_result: 'a future -> 'a Exn.result - val joins: 'a future list -> 'a list - val join: 'a future -> 'a - val join_tasks: task list -> unit - val task_context: string -> group -> ('a -> 'b) -> 'a -> 'b - val value_result: 'a Exn.result -> 'a future - val value: 'a -> 'a future - val cond_forks: params -> (unit -> 'a) list -> 'a future list - val map: ('a -> 'b) -> 'a future -> 'b future - val promise_group: group -> (unit -> unit) -> 'a future - val promise: (unit -> unit) -> 'a future - val fulfill_result: 'a future -> 'a Exn.result -> unit - val fulfill: 'a future -> 'a -> unit - val group_snapshot: group -> task list - val terminate: group -> unit - val shutdown: unit -> unit -end; - -structure Future: FUTURE = -struct - -(** future values **) - -type task = Task_Queue.task; -type group = Task_Queue.group; -val new_group = Task_Queue.new_group; - - -(* identifiers *) - -local - val tag = Universal.tag () : task option Universal.tag; -in - fun worker_task () = the_default NONE (Thread.getLocal tag); - fun setmp_worker_task task f x = setmp_thread_data tag (worker_task ()) (SOME task) f x; -end; - -val worker_group = Option.map Task_Queue.group_of_task o worker_task; - -fun the_worker_group () = - (case worker_group () of - SOME group => group - | NONE => raise Fail "Missing worker thread context"); - -fun worker_subgroup () = new_group (worker_group ()); - -fun worker_joining e = - (case worker_task () of - NONE => e () - | SOME task => Task_Queue.joining task e); - -fun worker_waiting deps e = - (case worker_task () of - NONE => e () - | SOME task => Task_Queue.waiting task deps e); - - -(* datatype future *) - -type 'a result = 'a Exn.result Single_Assignment.var; - -datatype 'a future = Future of - {promised: bool, - task: task, - result: 'a result}; - -fun task_of (Future {task, ...}) = task; -fun result_of (Future {result, ...}) = result; - -fun peek x = Single_Assignment.peek (result_of x); -fun is_finished x = is_some (peek x); - - - -(** scheduling **) - -(* synchronization *) - -val scheduler_event = ConditionVar.conditionVar (); -val work_available = ConditionVar.conditionVar (); -val work_finished = ConditionVar.conditionVar (); - -local - val lock = Mutex.mutex (); -in - -fun SYNCHRONIZED name = Simple_Thread.synchronized name lock; - -fun wait cond = (*requires SYNCHRONIZED*) - Multithreading.sync_wait NONE NONE cond lock; - -fun wait_timeout timeout cond = (*requires SYNCHRONIZED*) - Multithreading.sync_wait NONE (SOME (Time.+ (Time.now (), timeout))) cond lock; - -fun signal cond = (*requires SYNCHRONIZED*) - ConditionVar.signal cond; - -fun broadcast cond = (*requires SYNCHRONIZED*) - ConditionVar.broadcast cond; - -end; - - -(* global state *) - -val queue = Unsynchronized.ref Task_Queue.empty; -val next = Unsynchronized.ref 0; -val scheduler = Unsynchronized.ref (NONE: Thread.thread option); -val canceled = Unsynchronized.ref ([]: group list); -val do_shutdown = Unsynchronized.ref false; -val max_workers = Unsynchronized.ref 0; -val max_active = Unsynchronized.ref 0; -val worker_trend = Unsynchronized.ref 0; - -val status_ticks = Unsynchronized.ref 0; -val last_round = Unsynchronized.ref Time.zeroTime; -val next_round = seconds 0.05; - -datatype worker_state = Working | Waiting | Sleeping; -val workers = Unsynchronized.ref ([]: (Thread.thread * worker_state Unsynchronized.ref) list); - -fun count_workers state = (*requires SYNCHRONIZED*) - fold (fn (_, state_ref) => fn i => if ! state_ref = state then i + 1 else i) (! workers) 0; - - - -(* status *) - -val ML_statistics = Unsynchronized.ref false; - -fun report_status () = (*requires SYNCHRONIZED*) - if ! ML_statistics then - let - val {ready, pending, running, passive} = Task_Queue.status (! queue); - val total = length (! workers); - val active = count_workers Working; - val waiting = count_workers Waiting; - val stats = - [("now", Markup.print_real (Time.toReal (Time.now ()))), - ("tasks_ready", Markup.print_int ready), - ("tasks_pending", Markup.print_int pending), - ("tasks_running", Markup.print_int running), - ("tasks_passive", Markup.print_int passive), - ("workers_total", Markup.print_int total), - ("workers_active", Markup.print_int active), - ("workers_waiting", Markup.print_int waiting)] @ - ML_Statistics.get (); - in Output.try_protocol_message (Markup.ML_statistics :: stats) [] end - else (); - - -(* cancellation primitives *) - -fun cancel_now group = (*requires SYNCHRONIZED*) - let - val running = Task_Queue.cancel (! queue) group; - val _ = running |> List.app (fn thread => - if Simple_Thread.is_self thread then () - else Simple_Thread.interrupt_unsynchronized thread); - in running end; - -fun cancel_all () = (*requires SYNCHRONIZED*) - let - val (groups, threads) = Task_Queue.cancel_all (! queue); - val _ = List.app Simple_Thread.interrupt_unsynchronized threads; - in groups end; - -fun cancel_later group = (*requires SYNCHRONIZED*) - (Unsynchronized.change canceled (insert Task_Queue.eq_group group); - broadcast scheduler_event); - -fun interruptible_task f x = - (if Multithreading.available then - Multithreading.with_attributes - (if is_some (worker_task ()) - then Multithreading.private_interrupts - else Multithreading.public_interrupts) - (fn _ => f x) - else interruptible f x) - before Multithreading.interrupted (); - - -(* worker threads *) - -fun worker_exec (task, jobs) = - let - val group = Task_Queue.group_of_task task; - val valid = not (Task_Queue.is_canceled group); - val ok = - Task_Queue.running task (fn () => - setmp_worker_task task (fn () => - fold (fn job => fn ok => job valid andalso ok) jobs true) ()); - val _ = - if ! Multithreading.trace >= 2 then - Output.try_protocol_message (Markup.task_statistics :: Task_Queue.task_statistics task) [] - else (); - val _ = SYNCHRONIZED "finish" (fn () => - let - val maximal = Unsynchronized.change_result queue (Task_Queue.finish task); - val test = Exn.capture Multithreading.interrupted (); - val _ = - if ok andalso not (Exn.is_interrupt_exn test) then () - else if null (cancel_now group) then () - else cancel_later group; - val _ = broadcast work_finished; - val _ = if maximal then () else signal work_available; - in () end); - in () end; - -fun worker_wait active cond = (*requires SYNCHRONIZED*) - (case AList.lookup Thread.equal (! workers) (Thread.self ()) of - SOME state => - (state := (if active then Waiting else Sleeping); - wait cond; - state := Working) - | NONE => ignore (wait cond)); - -fun worker_next () = (*requires SYNCHRONIZED*) - if length (! workers) > ! max_workers then - (Unsynchronized.change workers (AList.delete Thread.equal (Thread.self ())); - signal work_available; - NONE) - else if count_workers Working > ! max_active then - (worker_wait false work_available; worker_next ()) - else - (case Unsynchronized.change_result queue (Task_Queue.dequeue (Thread.self ())) of - NONE => (worker_wait false work_available; worker_next ()) - | some => (signal work_available; some)); - -fun worker_loop name = - (case SYNCHRONIZED name (fn () => worker_next ()) of - NONE => () - | SOME work => (worker_exec work; worker_loop name)); - -fun worker_start name = (*requires SYNCHRONIZED*) - Unsynchronized.change workers (cons (Simple_Thread.fork false (fn () => worker_loop name), - Unsynchronized.ref Working)); - - -(* scheduler *) - -fun scheduler_next () = (*requires SYNCHRONIZED*) - let - val now = Time.now (); - val tick = Time.<= (Time.+ (! last_round, next_round), now); - val _ = if tick then last_round := now else (); - - - (* runtime status *) - - val _ = - if tick then Unsynchronized.change status_ticks (fn i => i + 1) else (); - val _ = - if tick andalso ! status_ticks mod (if ! Multithreading.trace >= 1 then 2 else 10) = 0 - then report_status () else (); - - val _ = - if forall (Thread.isActive o #1) (! workers) then () - else - let - val (alive, dead) = List.partition (Thread.isActive o #1) (! workers); - val _ = workers := alive; - in - Multithreading.tracing 0 (fn () => - "SCHEDULER: disposed " ^ string_of_int (length dead) ^ " dead worker threads") - end; - - - (* worker pool adjustments *) - - val max_active0 = ! max_active; - val max_workers0 = ! max_workers; - - val m = if ! do_shutdown then 0 else Multithreading.max_threads_value (); - val _ = max_active := m; - - val mm = - if ! do_shutdown then 0 - else Int.min (Int.max (count_workers Working + 2 * count_workers Waiting, m), 4 * m); - val _ = - if tick andalso mm > ! max_workers then - Unsynchronized.change worker_trend (fn w => if w < 0 then 0 else w + 1) - else if tick andalso mm < ! max_workers then - Unsynchronized.change worker_trend (fn w => if w > 0 then 0 else w - 1) - else (); - val _ = - if mm = 0 orelse ! worker_trend > 50 orelse ! worker_trend < ~50 then - max_workers := mm - else if ! worker_trend > 5 andalso ! max_workers < 2 * m orelse ! max_workers = 0 then - max_workers := Int.min (mm, 2 * m) - else (); - - val missing = ! max_workers - length (! workers); - val _ = - if missing > 0 then - funpow missing (fn () => - ignore (worker_start ("worker " ^ string_of_int (Unsynchronized.inc next)))) () - else (); - - val _ = - if ! max_active = max_active0 andalso ! max_workers = max_workers0 then () - else signal work_available; - - - (* canceled groups *) - - val _ = - if null (! canceled) then () - else - (Multithreading.tracing 1 (fn () => - string_of_int (length (! canceled)) ^ " canceled groups"); - Unsynchronized.change canceled (filter_out (null o cancel_now)); - signal work_available); - - - (* delay loop *) - - val _ = Exn.release (wait_timeout next_round scheduler_event); - - - (* shutdown *) - - val _ = if Task_Queue.all_passive (! queue) then do_shutdown := true else (); - val continue = not (! do_shutdown andalso null (! workers)); - val _ = if continue then () else (report_status (); scheduler := NONE); - - val _ = broadcast scheduler_event; - in continue end - handle exn => - if Exn.is_interrupt exn then - (Multithreading.tracing 1 (fn () => "SCHEDULER: Interrupt"); - List.app cancel_later (cancel_all ()); - signal work_available; true) - else reraise exn; - -fun scheduler_loop () = - (while - Multithreading.with_attributes - (Multithreading.sync_interrupts Multithreading.public_interrupts) - (fn _ => SYNCHRONIZED "scheduler" (fn () => scheduler_next ())) - do (); last_round := Time.zeroTime); - -fun scheduler_active () = (*requires SYNCHRONIZED*) - (case ! scheduler of NONE => false | SOME thread => Thread.isActive thread); - -fun scheduler_check () = (*requires SYNCHRONIZED*) - (do_shutdown := false; - if scheduler_active () then () - else scheduler := SOME (Simple_Thread.fork false scheduler_loop)); - - - -(** futures **) - -(* cancel *) - -fun cancel_group_unsynchronized group = (*requires SYNCHRONIZED*) - let - val _ = if null (cancel_now group) then () else cancel_later group; - val _ = signal work_available; - val _ = scheduler_check (); - in () end; - -fun cancel_group group = - SYNCHRONIZED "cancel_group" (fn () => cancel_group_unsynchronized group); - -fun cancel x = cancel_group (Task_Queue.group_of_task (task_of x)); - - -(* results *) - -fun error_message pos ((serial, msg), exec_id) = - Position.setmp_thread_data pos (fn () => - let val id = Position.get_id pos in - if is_none id orelse is_none exec_id orelse id = exec_id - then Output.error_message' (serial, msg) else () - end) (); - -fun identify_result pos res = - (case res of - Exn.Exn exn => - let val exec_id = - (case Position.get_id pos of - NONE => [] - | SOME id => [(Markup.exec_idN, id)]) - in Exn.Exn (Par_Exn.identify exec_id exn) end - | _ => res); - -fun assign_result group result res = - let - val _ = Single_Assignment.assign result res - handle exn as Fail _ => - (case Single_Assignment.peek result of - SOME (Exn.Exn e) => reraise (if Exn.is_interrupt e then e else exn) - | _ => reraise exn); - val ok = - (case the (Single_Assignment.peek result) of - Exn.Exn exn => - (SYNCHRONIZED "cancel" (fn () => Task_Queue.cancel_group group exn); false) - | Exn.Res _ => true); - in ok end; - - -(* future jobs *) - -fun future_job group atts (e: unit -> 'a) = - let - val result = Single_Assignment.var "future" : 'a result; - val pos = Position.thread_data (); - fun job ok = - let - val res = - if ok then - Exn.capture (fn () => - Multithreading.with_attributes atts (fn _ => Position.setmp_thread_data pos e ())) () - else Exn.interrupt_exn; - in assign_result group result (identify_result pos res) end; - in (result, job) end; - - -(* fork *) - -type params = {name: string, group: group option, deps: task list, pri: int, interrupts: bool}; -val default_params: params = {name = "", group = NONE, deps = [], pri = 0, interrupts = true}; - -fun forks ({name, group, deps, pri, interrupts}: params) es = - if null es then [] - else - let - val grp = - (case group of - NONE => worker_subgroup () - | SOME grp => grp); - fun enqueue e queue = - let - val atts = - if interrupts - then Multithreading.private_interrupts - else Multithreading.no_interrupts; - val (result, job) = future_job grp atts e; - val (task, queue') = Task_Queue.enqueue name grp deps pri job queue; - val future = Future {promised = false, task = task, result = result}; - in (future, queue') end; - in - SYNCHRONIZED "enqueue" (fn () => - let - val (futures, queue') = fold_map enqueue es (! queue); - val _ = queue := queue'; - val minimal = forall (not o Task_Queue.known_task queue') deps; - val _ = if minimal then signal work_available else (); - val _ = scheduler_check (); - in futures end) - end; - -fun fork e = - (singleton o forks) {name = "fork", group = NONE, deps = [], pri = 0, interrupts = true} e; - - -(* join *) - -fun get_result x = - (case peek x of - NONE => Exn.Exn (Fail "Unfinished future") - | SOME res => - if Exn.is_interrupt_exn res then - (case Task_Queue.group_status (Task_Queue.group_of_task (task_of x)) of - [] => res - | exns => Exn.Exn (Par_Exn.make exns)) - else res); - -local - -fun join_next deps = (*requires SYNCHRONIZED*) - if null deps then NONE - else - (case Unsynchronized.change_result queue (Task_Queue.dequeue_deps (Thread.self ()) deps) of - (NONE, []) => NONE - | (NONE, deps') => - (worker_waiting deps' (fn () => worker_wait true work_finished); join_next deps') - | (SOME work, deps') => SOME (work, deps')); - -fun execute_work NONE = () - | execute_work (SOME (work, deps')) = - (worker_joining (fn () => worker_exec work); join_work deps') -and join_work deps = - Multithreading.with_attributes Multithreading.no_interrupts - (fn _ => execute_work (SYNCHRONIZED "join" (fn () => join_next deps))); - -in - -fun join_results xs = - let - val _ = - if forall is_finished xs then () - else if Multithreading.self_critical () then - raise Fail "Cannot join future values within critical section" - else if is_some (worker_task ()) then join_work (map task_of xs) - else List.app (ignore o Single_Assignment.await o result_of) xs; - in map get_result xs end; - -end; - -fun join_result x = singleton join_results x; -fun joins xs = Par_Exn.release_all (join_results xs); -fun join x = Exn.release (join_result x); - -fun join_tasks tasks = - if null tasks then () - else - (singleton o forks) - {name = "join_tasks", group = SOME (new_group NONE), - deps = tasks, pri = 0, interrupts = false} I - |> join; - - -(* task context for running thread *) - -fun task_context name group f x = - Multithreading.with_attributes Multithreading.no_interrupts (fn orig_atts => - let - val (result, job) = future_job group orig_atts (fn () => f x); - val task = - SYNCHRONIZED "enroll" (fn () => - Unsynchronized.change_result queue (Task_Queue.enroll (Thread.self ()) name group)); - val _ = worker_exec (task, [job]); - in - (case Single_Assignment.peek result of - NONE => raise Fail "Missing task context result" - | SOME res => Exn.release res) - end); - - -(* fast-path operations -- bypass task queue if possible *) - -fun value_result (res: 'a Exn.result) = - let - val task = Task_Queue.dummy_task; - val group = Task_Queue.group_of_task task; - val result = Single_Assignment.var "value" : 'a result; - val _ = assign_result group result (identify_result (Position.thread_data ()) res); - in Future {promised = false, task = task, result = result} end; - -fun value x = value_result (Exn.Res x); - -fun cond_forks args es = - if Multithreading.enabled () then forks args es - else map (fn e => value_result (Exn.interruptible_capture e ())) es; - -fun map_future f x = - if is_finished x then value_result (Exn.interruptible_capture (f o join) x) - else - let - val task = task_of x; - val group = Task_Queue.group_of_task task; - val (result, job) = - future_job group Multithreading.private_interrupts (fn () => f (join x)); - - val extended = SYNCHRONIZED "extend" (fn () => - (case Task_Queue.extend task job (! queue) of - SOME queue' => (queue := queue'; true) - | NONE => false)); - in - if extended then Future {promised = false, task = task, result = result} - else - (singleton o cond_forks) - {name = "map_future", group = SOME group, deps = [task], - pri = Task_Queue.pri_of_task task, interrupts = true} - (fn () => f (join x)) - end; - - -(* promised futures -- fulfilled by external means *) - -fun promise_group group abort : 'a future = - let - val result = Single_Assignment.var "promise" : 'a result; - fun assign () = assign_result group result Exn.interrupt_exn - handle Fail _ => true - | exn => - if Exn.is_interrupt exn - then raise Fail "Concurrent attempt to fulfill promise" - else reraise exn; - fun job () = - Multithreading.with_attributes Multithreading.no_interrupts - (fn _ => Exn.release (Exn.capture assign () before abort ())); - val task = SYNCHRONIZED "enqueue_passive" (fn () => - Unsynchronized.change_result queue (Task_Queue.enqueue_passive group job)); - in Future {promised = true, task = task, result = result} end; - -fun promise abort = promise_group (worker_subgroup ()) abort; - -fun fulfill_result (Future {promised, task, result}) res = - if not promised then raise Fail "Not a promised future" - else - let - val group = Task_Queue.group_of_task task; - val pos = Position.thread_data (); - fun job ok = - assign_result group result (if ok then identify_result pos res else Exn.interrupt_exn); - val _ = - Multithreading.with_attributes Multithreading.no_interrupts (fn _ => - let - val passive_job = - SYNCHRONIZED "fulfill_result" (fn () => - Unsynchronized.change_result queue - (Task_Queue.dequeue_passive (Thread.self ()) task)); - in - (case passive_job of - SOME true => worker_exec (task, [job]) - | SOME false => () - | NONE => ignore (job (not (Task_Queue.is_canceled group)))) - end); - val _ = - if is_some (Single_Assignment.peek result) then () - else worker_waiting [task] (fn () => ignore (Single_Assignment.await result)); - in () end; - -fun fulfill x res = fulfill_result x (Exn.Res res); - - -(* group snapshot *) - -fun group_snapshot group = - SYNCHRONIZED "group_snapshot" (fn () => - Task_Queue.group_tasks (! queue) group); - - -(* terminate *) - -fun terminate group = - SYNCHRONIZED "terminate" (fn () => - let val _ = cancel_group_unsynchronized group; - in Task_Queue.group_tasks (! queue) group end) - |> join_tasks; - - -(* shutdown *) - -fun shutdown () = - if not Multithreading.available then () - else if is_some (worker_task ()) then - raise Fail "Cannot shutdown while running as worker thread" - else - SYNCHRONIZED "shutdown" (fn () => - while scheduler_active () do - (Multithreading.tracing 1 (fn () => "SHUTDOWN: wait"); - wait scheduler_event)); - - -(*final declarations of this structure!*) -val map = map_future; - -end; - -type 'a future = 'a Future.future; - diff --git a/core/Pure/Concurrent/future.scala b/core/Pure/Concurrent/future.scala deleted file mode 100644 index 5821cc61..00000000 --- a/core/Pure/Concurrent/future.scala +++ /dev/null @@ -1,88 +0,0 @@ -/* Title: Pure/Concurrent/future.scala - Module: PIDE - Author: Makarius - -Value-oriented parallel execution via futures and promises in Scala -- with -signatures as in Isabelle/ML. -*/ - -package isabelle - - -import scala.util.{Success, Failure} -import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, - Future => Scala_Future, Promise => Scala_Promise, Await} -import scala.concurrent.duration.Duration - - -object Future -{ - lazy val execution_context: ExecutionContextExecutor = - ExecutionContext.fromExecutorService(Simple_Thread.default_pool) - - def value[A](x: A): Future[A] = new Finished_Future(x) - - def fork[A](body: => A): Future[A] = - new Pending_Future(Scala_Future[A](body)(execution_context)) - - def promise[A]: Promise[A] = - new Promise_Future[A](Scala_Promise[A]) -} - -trait Future[A] -{ - def peek: Option[Exn.Result[A]] - def is_finished: Boolean = peek.isDefined - def get_finished: A = { require(is_finished); Exn.release(peek.get) } - def join: A - def map[B](f: A => B): Future[B] = Future.fork { f(join) } - - override def toString = - peek match { - case None => "" - case Some(Exn.Exn(_)) => "" - case Some(Exn.Res(x)) => x.toString - } -} - -trait Promise[A] extends Future[A] -{ - def fulfill_result(res: Exn.Result[A]): Unit - def fulfill(x: A): Unit -} - - -private class Finished_Future[A](x: A) extends Future[A] -{ - val peek: Option[Exn.Result[A]] = Some(Exn.Res(x)) - val join: A = x -} - -private class Pending_Future[A](future: Scala_Future[A]) extends Future[A] -{ - def peek: Option[Exn.Result[A]] = - future.value match { - case Some(Success(x)) => Some(Exn.Res(x)) - case Some(Failure(e)) => Some(Exn.Exn(e)) - case None => None - } - override def is_finished: Boolean = future.isCompleted - - def join: A = Await.result(future, Duration.Inf) - override def map[B](f: A => B): Future[B] = - new Pending_Future[B](future.map(f)(Future.execution_context)) -} - -private class Promise_Future[A](promise: Scala_Promise[A]) - extends Pending_Future(promise.future) with Promise[A] -{ - override def is_finished: Boolean = promise.isCompleted - - def fulfill_result(res: Exn.Result[A]): Unit = - res match { - case Exn.Res(x) => promise.success(x) - case Exn.Exn(e) => promise.failure(e) - } - def fulfill(x: A): Unit = promise.success(x) -} - diff --git a/core/Pure/Concurrent/lazy.ML b/core/Pure/Concurrent/lazy.ML deleted file mode 100644 index f03e40a4..00000000 --- a/core/Pure/Concurrent/lazy.ML +++ /dev/null @@ -1,92 +0,0 @@ -(* Title: Pure/Concurrent/lazy.ML - Author: Makarius - -Lazy evaluation with memoing of results and regular exceptions. -Parallel version based on (passive) futures, to avoid critical or -multiple evaluation (unless interrupted). -*) - -signature LAZY = -sig - type 'a lazy - val peek: 'a lazy -> 'a Exn.result option - val is_finished: 'a lazy -> bool - val lazy: (unit -> 'a) -> 'a lazy - val value: 'a -> 'a lazy - val force_result: 'a lazy -> 'a Exn.result - val force: 'a lazy -> 'a - val map: ('a -> 'b) -> 'a lazy -> 'b lazy - val future: Future.params -> 'a lazy -> 'a future -end; - -structure Lazy: LAZY = -struct - -(* datatype *) - -datatype 'a expr = - Expr of unit -> 'a | - Result of 'a future; - -abstype 'a lazy = Lazy of 'a expr Synchronized.var -with - -fun peek (Lazy var) = - (case Synchronized.value var of - Expr _ => NONE - | Result res => Future.peek res); - -fun lazy e = Lazy (Synchronized.var "lazy" (Expr e)); -fun value a = Lazy (Synchronized.var "lazy" (Result (Future.value a))); - -fun is_finished x = is_some (peek x); - - -(* force result *) - -fun force_result (Lazy var) = - (case peek (Lazy var) of - SOME res => res - | NONE => - uninterruptible (fn restore_attributes => fn () => - let - val (expr, x) = - Synchronized.change_result var - (fn Expr e => - let val x = Future.promise I - in ((SOME e, x), Result x) end - | Result x => ((NONE, x), Result x)); - in - (case expr of - SOME e => - let - val res0 = Exn.capture (restore_attributes e) (); - val _ = Exn.capture (fn () => Future.fulfill_result x res0) (); - val res = Future.join_result x; - (*semantic race: some other threads might see the same - interrupt, until there is a fresh start*) - val _ = - if Exn.is_interrupt_exn res then - Synchronized.change var (fn _ => Expr e) - else (); - in res end - | NONE => Exn.capture (restore_attributes (fn () => Future.join x)) ()) - end) ()); - - -end; - -fun force r = Exn.release (force_result r); -fun map f x = lazy (fn () => f (force x)); - - -(* future evaluation *) - -fun future params x = - if is_finished x then Future.value_result (force_result x) - else (singleton o Future.forks) params (fn () => force x); - -end; - -type 'a lazy = 'a Lazy.lazy; - diff --git a/core/Pure/Concurrent/lazy_sequential.ML b/core/Pure/Concurrent/lazy_sequential.ML deleted file mode 100644 index 225cd694..00000000 --- a/core/Pure/Concurrent/lazy_sequential.ML +++ /dev/null @@ -1,56 +0,0 @@ -(* Title: Pure/Concurrent/lazy_sequential.ML - Author: Florian Haftmann and Makarius, TU Muenchen - -Lazy evaluation with memoing of results and regular exceptions -(sequential version). -*) - -structure Lazy: LAZY = -struct - -(* datatype *) - -datatype 'a expr = - Expr of unit -> 'a | - Result of 'a Exn.result; - -abstype 'a lazy = Lazy of 'a expr Unsynchronized.ref -with - -fun peek (Lazy r) = - (case ! r of - Expr _ => NONE - | Result res => SOME res); - -fun lazy e = Lazy (Unsynchronized.ref (Expr e)); -fun value a = Lazy (Unsynchronized.ref (Result (Exn.Res a))); - -fun is_finished x = is_some (peek x); - - -(* force result *) - -fun force_result (Lazy r) = - let - val result = - (case ! r of - Expr e => Exn.capture e () - | Result res => res); - val _ = if Exn.is_interrupt_exn result then () else r := Result result; - in result end; - -fun force r = Exn.release (force_result r); -fun map f x = lazy (fn () => f (force x)); - - -(* future evaluation *) - -fun future params x = - if is_finished x then Future.value_result (force_result x) - else (singleton o Future.forks) params (fn () => force x); - -end; -end; - -type 'a lazy = 'a Lazy.lazy; - diff --git a/core/Pure/Concurrent/mailbox.ML b/core/Pure/Concurrent/mailbox.ML deleted file mode 100644 index a3ac7b5b..00000000 --- a/core/Pure/Concurrent/mailbox.ML +++ /dev/null @@ -1,35 +0,0 @@ -(* Title: Pure/Concurrent/mailbox.ML - Author: Makarius - -Message exchange via mailbox, with multiple senders (non-blocking, -unbounded buffering) and single receiver (bulk messages). -*) - -signature MAILBOX = -sig - type 'a T - val create: unit -> 'a T - val send: 'a T -> 'a -> unit - val receive: Time.time option -> 'a T -> 'a list - val await_empty: 'a T -> unit -end; - -structure Mailbox: MAILBOX = -struct - -datatype 'a T = Mailbox of 'a list Synchronized.var; - -fun create () = Mailbox (Synchronized.var "mailbox" []); - -fun send (Mailbox mailbox) msg = Synchronized.change mailbox (cons msg); - -fun receive timeout (Mailbox mailbox) = - Synchronized.timed_access mailbox - (fn _ => Option.map (fn t => (Time.+ (Time.now (), t))) timeout) - (fn [] => NONE | msgs => SOME (msgs, [])) - |> these |> rev; - -fun await_empty (Mailbox mailbox) = - Synchronized.guarded_access mailbox (fn [] => SOME ((), []) | _ => NONE); - -end; diff --git a/core/Pure/Concurrent/mailbox.scala b/core/Pure/Concurrent/mailbox.scala deleted file mode 100644 index d51b52e4..00000000 --- a/core/Pure/Concurrent/mailbox.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* Title: Pure/Concurrent/mailbox.scala - Module: PIDE - Author: Makarius - -Message exchange via mailbox, with multiple senders (non-blocking, -unbounded buffering) and single receiver (bulk messages). -*/ - -package isabelle - - -object Mailbox -{ - def apply[A]: Mailbox[A] = new Mailbox[A]() -} - - -class Mailbox[A] private() -{ - private val mailbox = Synchronized(List.empty[A]) - override def toString: String = mailbox.value.reverse.mkString("Mailbox(", ",", ")") - - def send(msg: A): Unit = mailbox.change(msg :: _) - - def receive(timeout: Option[Time]): List[A] = - (mailbox.timed_access(_ => timeout.map(t => Time.now() + t), - { case Nil => None case msgs => Some((msgs, Nil)) }) getOrElse Nil).reverse - - def await_empty: Unit = - mailbox.guarded_access({ case Nil => Some(((), Nil)) case _ => None }) -} diff --git a/core/Pure/Concurrent/par_exn.ML b/core/Pure/Concurrent/par_exn.ML deleted file mode 100644 index 4b72cd85..00000000 --- a/core/Pure/Concurrent/par_exn.ML +++ /dev/null @@ -1,79 +0,0 @@ -(* Title: Pure/Concurrent/par_exn.ML - Author: Makarius - -Parallel exceptions as flattened results from acyclic graph of -evaluations. Interrupt counts as neutral element. -*) - -signature PAR_EXN = -sig - val identify: Properties.entry list -> exn -> exn - val the_serial: exn -> int - val make: exn list -> exn - val dest: exn -> exn list option - val is_interrupted: 'a Exn.result list -> bool - val release_all: 'a Exn.result list -> 'a list - val release_first: 'a Exn.result list -> 'a list -end; - -structure Par_Exn: PAR_EXN = -struct - -(* identification via serial numbers -- NOT portable! *) - -fun identify default_props exn = - let - val props = Exn_Properties.get exn; - val update_serial = - if Properties.defined props Markup.serialN then [] - else [(Markup.serialN, serial_string ())]; - val update_props = filter_out (Properties.defined props o #1) default_props; - in Exn_Properties.update (update_serial @ update_props) exn end; - -fun the_serial exn = - Markup.parse_int (the (Properties.get (Exn_Properties.get exn) Markup.serialN)); - -val exn_ord = rev_order o int_ord o pairself the_serial; - - -(* parallel exceptions *) - -exception Par_Exn of exn list; - (*non-empty list with unique identified elements sorted by exn_ord; - no occurrences of Par_Exn or Exn.Interrupt*) - -fun par_exns (Par_Exn exns) = exns - | par_exns exn = if Exn.is_interrupt exn then [] else [identify [] exn]; - -fun make exns = - let - val exnss = map par_exns exns; - val exns' = Ord_List.unions exn_ord exnss handle Option.Option => flat exnss; - in if null exns' then Exn.Interrupt else Par_Exn exns' end; - -fun dest (Par_Exn exns) = SOME exns - | dest exn = if Exn.is_interrupt exn then SOME [] else NONE; - - -(* parallel results *) - -fun is_interrupted results = - exists (fn Exn.Exn _ => true | _ => false) results andalso - Exn.is_interrupt (make (map_filter Exn.get_exn results)); - -fun release_all results = - if forall (fn Exn.Res _ => true | _ => false) results - then map Exn.release results - else raise make (map_filter Exn.get_exn results); - -fun plain_exn (Exn.Res _) = NONE - | plain_exn (Exn.Exn (Par_Exn _)) = NONE - | plain_exn (Exn.Exn exn) = if Exn.is_interrupt exn then NONE else SOME exn; - -fun release_first results = - (case get_first plain_exn results of - NONE => release_all results - | SOME exn => reraise exn); - -end; - diff --git a/core/Pure/Concurrent/par_list.ML b/core/Pure/Concurrent/par_list.ML deleted file mode 100644 index 0277f485..00000000 --- a/core/Pure/Concurrent/par_list.ML +++ /dev/null @@ -1,74 +0,0 @@ -(* Title: Pure/Concurrent/par_list.ML - Author: Makarius - -Parallel list combinators. - -Notes: - - * These combinators only make sense if the operator (function or - predicate) applied to the list of operands takes considerable - time. The overhead of scheduling is significantly higher than - just traversing the list of operands sequentially. - - * The order of operator application is non-deterministic. Watch out - for operators that have side-effects or raise exceptions! -*) - -signature PAR_LIST = -sig - val managed_results: string -> ('a -> 'b) -> 'a list -> 'b Exn.result list - val map_name: string -> ('a -> 'b) -> 'a list -> 'b list - val map: ('a -> 'b) -> 'a list -> 'b list - val get_some: ('a -> 'b option) -> 'a list -> 'b option - val find_some: ('a -> bool) -> 'a list -> 'a option - val exists: ('a -> bool) -> 'a list -> bool - val forall: ('a -> bool) -> 'a list -> bool -end; - -structure Par_List: PAR_LIST = -struct - -fun managed_results name f xs = - if null xs orelse null (tl xs) orelse - not (Multithreading.enabled ()) orelse Multithreading.self_critical () - then map (Exn.capture f) xs - else - uninterruptible (fn restore_attributes => fn () => - let - val (group, pri) = - (case Future.worker_task () of - SOME task => - (Future.new_group (SOME (Task_Queue.group_of_task task)), Task_Queue.pri_of_task task) - | NONE => (Future.new_group NONE, 0)); - val futures = - Future.forks {name = name, group = SOME group, deps = [], pri = pri, interrupts = true} - (map (fn x => fn () => f x) xs); - val results = - restore_attributes Future.join_results futures - handle exn => - (if Exn.is_interrupt exn then Future.cancel_group group else (); reraise exn); - in results end) (); - -fun map_name name f xs = Par_Exn.release_first (managed_results name f xs); -fun map f = map_name "Par_List.map" f; - -fun get_some f xs = - let - exception FOUND of 'b option; - fun found (Exn.Exn (FOUND some)) = some - | found _ = NONE; - val results = - managed_results "Par_List.get_some" - (fn x => (case f x of NONE => () | some => raise FOUND some)) xs; - in - (case get_first found results of - SOME y => SOME y - | NONE => (Par_Exn.release_first results; NONE)) - end; - -fun find_some P = get_some (fn x => if P x then SOME x else NONE); - -fun exists P = is_some o get_some (fn x => if P x then SOME () else NONE); -fun forall P = not o exists (not o P); - -end; diff --git a/core/Pure/Concurrent/par_list_sequential.ML b/core/Pure/Concurrent/par_list_sequential.ML deleted file mode 100644 index c4c21ffa..00000000 --- a/core/Pure/Concurrent/par_list_sequential.ML +++ /dev/null @@ -1,18 +0,0 @@ -(* Title: Pure/Concurrent/par_list_sequential.ML - Author: Makarius - -Dummy version of parallel list combinators -- plain sequential evaluation. -*) - -structure Par_List: PAR_LIST = -struct - -fun managed_results _ f = map (Exn.capture f); -fun map_name _ = map; -val map = map; -val get_some = get_first; -val find_some = find_first; -val exists = exists; -val forall = forall; - -end; diff --git a/core/Pure/Concurrent/simple_thread.ML b/core/Pure/Concurrent/simple_thread.ML deleted file mode 100644 index 8c21f0b8..00000000 --- a/core/Pure/Concurrent/simple_thread.ML +++ /dev/null @@ -1,60 +0,0 @@ -(* Title: Pure/Concurrent/simple_thread.ML - Author: Makarius - -Simplified thread operations. -*) - -signature SIMPLE_THREAD = -sig - val is_self: Thread.thread -> bool - val attributes: bool -> Thread.threadAttribute list - val fork: bool -> (unit -> unit) -> Thread.thread - val join: Thread.thread -> unit - val interrupt_unsynchronized: Thread.thread -> unit - val synchronized: string -> Mutex.mutex -> (unit -> 'a) -> 'a -end; - -structure Simple_Thread: SIMPLE_THREAD = -struct - -fun is_self thread = Thread.equal (Thread.self (), thread); - -fun attributes interrupts = - if interrupts then Multithreading.public_interrupts else Multithreading.no_interrupts; - -fun fork interrupts body = - Thread.fork (fn () => - print_exception_trace General.exnMessage (fn () => - body () handle exn => if Exn.is_interrupt exn then () (*sic!*) else reraise exn), - attributes interrupts); - -fun join thread = - while Thread.isActive thread - do OS.Process.sleep (seconds 0.1); - -fun interrupt_unsynchronized thread = Thread.interrupt thread handle Thread _ => (); - - -(* basic synchronization *) - -fun synchronized name lock e = - if Multithreading.available then - Exn.release (uninterruptible (fn restore_attributes => fn () => - let - val immediate = - if Mutex.trylock lock then true - else - let - val _ = Multithreading.tracing 5 (fn () => name ^ ": locking ..."); - val time = Multithreading.real_time Mutex.lock lock; - val _ = Multithreading.tracing_time true time - (fn () => name ^ ": locked after " ^ Time.toString time); - in false end; - val result = Exn.capture (restore_attributes e) (); - val _ = - if immediate then () else Multithreading.tracing 5 (fn () => name ^ ": unlocking ..."); - val _ = Mutex.unlock lock; - in result end) ()) - else e (); - -end; diff --git a/core/Pure/Concurrent/simple_thread.scala b/core/Pure/Concurrent/simple_thread.scala deleted file mode 100644 index 12b91384..00000000 --- a/core/Pure/Concurrent/simple_thread.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* Title: Pure/Concurrent/simple_thread.scala - Module: PIDE - Author: Makarius - -Simplified thread operations. -*/ - -package isabelle - - -import java.lang.Thread -import java.util.concurrent.{Callable, Future => JFuture} - -import scala.collection.parallel.ForkJoinTasks - - -object Simple_Thread -{ - /* plain thread */ - - def fork(name: String = "", daemon: Boolean = false)(body: => Unit): Thread = - { - val thread = - if (name == null || name == "") new Thread() { override def run = body } - else new Thread(name) { override def run = body } - thread.setDaemon(daemon) - thread.start - thread - } - - - /* future result via thread */ - - def future[A](name: String = "", daemon: Boolean = false)(body: => A): (Thread, Future[A]) = - { - val result = Future.promise[A] - val thread = fork(name, daemon) { result.fulfill_result(Exn.capture(body)) } - (thread, result) - } - - - /* thread pool */ - - lazy val default_pool = ForkJoinTasks.defaultForkJoinPool - - def submit_task[A](body: => A): JFuture[A] = - default_pool.submit(new Callable[A] { def call = body }) - - - /* delayed events */ - - final class Delay private [Simple_Thread](first: Boolean, delay: => Time, event: => Unit) - { - private var running: Option[Event_Timer.Request] = None - - private def run: Unit = - { - val do_run = synchronized { - if (running.isDefined) { running = None; true } else false - } - if (do_run) event - } - - def invoke(): Unit = synchronized - { - val new_run = - running match { - case Some(request) => if (first) false else { request.cancel; true } - case None => true - } - if (new_run) - running = Some(Event_Timer.request(Time.now() + delay)(run)) - } - - def revoke(): Unit = synchronized - { - running match { - case Some(request) => request.cancel; running = None - case None => - } - } - - def postpone(alt_delay: Time): Unit = - { - running match { - case Some(request) => - val alt_time = Time.now() + alt_delay - if (request.time < alt_time && request.cancel) { - running = Some(Event_Timer.request(alt_time)(run)) - } - case None => - } - } - } - - // delayed event after first invocation - def delay_first(delay: => Time)(event: => Unit): Delay = new Delay(true, delay, event) - - // delayed event after last invocation - def delay_last(delay: => Time)(event: => Unit): Delay = new Delay(false, delay, event) -} - diff --git a/core/Pure/Concurrent/single_assignment.ML b/core/Pure/Concurrent/single_assignment.ML deleted file mode 100644 index 8fc9552a..00000000 --- a/core/Pure/Concurrent/single_assignment.ML +++ /dev/null @@ -1,57 +0,0 @@ -(* Title: Pure/Concurrent/single_assignment.ML - Author: Makarius - -Single-assignment variables with locking/signalling. -*) - -signature SINGLE_ASSIGNMENT = -sig - type 'a var - val var: string -> 'a var - val peek: 'a var -> 'a option - val await: 'a var -> 'a - val assign: 'a var -> 'a -> unit -end; - -structure Single_Assignment: SINGLE_ASSIGNMENT = -struct - -abstype 'a var = Var of - {name: string, - lock: Mutex.mutex, - cond: ConditionVar.conditionVar, - var: 'a SingleAssignment.saref} -with - -fun var name = Var - {name = name, - lock = Mutex.mutex (), - cond = ConditionVar.conditionVar (), - var = SingleAssignment.saref ()}; - -fun peek (Var {var, ...}) = SingleAssignment.savalue var; - -fun await (v as Var {name, lock, cond, ...}) = - Simple_Thread.synchronized name lock (fn () => - let - fun wait () = - (case peek v of - NONE => - (case Multithreading.sync_wait NONE NONE cond lock of - Exn.Res _ => wait () - | Exn.Exn exn => reraise exn) - | SOME x => x); - in wait () end); - -fun assign (v as Var {name, lock, cond, var}) x = - Simple_Thread.synchronized name lock (fn () => - (case peek v of - SOME _ => raise Fail ("Duplicate assignment to " ^ name) - | NONE => - uninterruptible (fn _ => fn () => - (SingleAssignment.saset (var, x); ConditionVar.broadcast cond)) ())); - -end; - -end; - diff --git a/core/Pure/Concurrent/single_assignment_sequential.ML b/core/Pure/Concurrent/single_assignment_sequential.ML deleted file mode 100644 index 3afd0860..00000000 --- a/core/Pure/Concurrent/single_assignment_sequential.ML +++ /dev/null @@ -1,30 +0,0 @@ -(* Title: Pure/Concurrent/single_assignment_sequential.ML - Author: Makarius - -Single-assignment variables (sequential version). -*) - -structure Single_Assignment: SINGLE_ASSIGNMENT = -struct - -abstype 'a var = Var of 'a SingleAssignment.saref -with - -fun var _ = Var (SingleAssignment.saref ()); - -fun peek (Var var) = SingleAssignment.savalue var; - -fun await v = - (case peek v of - SOME x => x - | NONE => Thread.unavailable ()); - -fun assign (v as Var var) x = - (case peek v of - SOME _ => raise Fail "Duplicate assignment to variable" - | NONE => SingleAssignment.saset (var, x)); - -end; - -end; - diff --git a/core/Pure/Concurrent/synchronized.ML b/core/Pure/Concurrent/synchronized.ML deleted file mode 100644 index d929f3c1..00000000 --- a/core/Pure/Concurrent/synchronized.ML +++ /dev/null @@ -1,68 +0,0 @@ -(* Title: Pure/Concurrent/synchronized.ML - Author: Fabian Immler and Makarius - -Synchronized variables. -*) - -signature SYNCHRONIZED = -sig - type 'a var - val var: string -> 'a -> 'a var - val value: 'a var -> 'a - val timed_access: 'a var -> ('a -> Time.time option) -> ('a -> ('b * 'a) option) -> 'b option - val guarded_access: 'a var -> ('a -> ('b * 'a) option) -> 'b - val change_result: 'a var -> ('a -> 'b * 'a) -> 'b - val change: 'a var -> ('a -> 'a) -> unit -end; - -structure Synchronized: SYNCHRONIZED = -struct - -(* state variable *) - -abstype 'a var = Var of - {name: string, - lock: Mutex.mutex, - cond: ConditionVar.conditionVar, - var: 'a Unsynchronized.ref} -with - -fun var name x = Var - {name = name, - lock = Mutex.mutex (), - cond = ConditionVar.conditionVar (), - var = Unsynchronized.ref x}; - -fun value (Var {var, ...}) = ! var; - - -(* synchronized access *) - -fun timed_access (Var {name, lock, cond, var}) time_limit f = - Simple_Thread.synchronized name lock (fn () => - let - fun try_change () = - let val x = ! var in - (case f x of - NONE => - (case Multithreading.sync_wait NONE (time_limit x) cond lock of - Exn.Res true => try_change () - | Exn.Res false => NONE - | Exn.Exn exn => reraise exn) - | SOME (y, x') => - uninterruptible (fn _ => fn () => - (var := x'; ConditionVar.broadcast cond; SOME y)) ()) - end; - in try_change () end); - -fun guarded_access var f = the (timed_access var (K NONE) f); - - -(* unconditional change *) - -fun change_result var f = guarded_access var (SOME o f); -fun change var f = change_result var (fn x => ((), f x)); - -end; - -end; diff --git a/core/Pure/Concurrent/synchronized.scala b/core/Pure/Concurrent/synchronized.scala deleted file mode 100644 index 9d686475..00000000 --- a/core/Pure/Concurrent/synchronized.scala +++ /dev/null @@ -1,79 +0,0 @@ -/* Title: Pure/Concurrent/synchronized.scala - Module: PIDE - Author: Makarius - -Synchronized variables. -*/ - -package isabelle - - -import scala.annotation.tailrec - - -object Synchronized -{ - def apply[A](init: A): Synchronized[A] = new Synchronized(init) -} - - -final class Synchronized[A] private(init: A) -{ - /* state variable */ - - private var state: A = init - - def value: A = synchronized { state } - override def toString: String = value.toString - - - /* synchronized access */ - - def timed_access[B](time_limit: A => Option[Time], f: A => Option[(B, A)]): Option[B] = - synchronized { - def check(x: A): Option[B] = - f(x) match { - case None => None - case Some((y, x1)) => - state = x1 - notifyAll() - Some(y) - } - @tailrec def try_change(): Option[B] = - { - val x = state - check(x) match { - case None => - time_limit(x) match { - case Some(t) => - val timeout = (t - Time.now()).ms - if (timeout > 0L) { - wait(timeout) - check(state) - } - else None - case None => - wait() - try_change() - } - case some => some - } - } - try_change() - } - - def guarded_access[B](f: A => Option[(B, A)]): B = - timed_access(_ => None, f).get - - - /* unconditional change */ - - def change(f: A => A): Unit = synchronized { state = f(state); notifyAll() } - - def change_result[B](f: A => (B, A)): B = synchronized { - val (result, new_state) = f(state) - state = new_state - notifyAll() - result - } -} diff --git a/core/Pure/Concurrent/synchronized_sequential.ML b/core/Pure/Concurrent/synchronized_sequential.ML deleted file mode 100644 index af2a9f97..00000000 --- a/core/Pure/Concurrent/synchronized_sequential.ML +++ /dev/null @@ -1,28 +0,0 @@ -(* Title: Pure/Concurrent/synchronized_sequential.ML - Author: Makarius - -Sequential version of state variables -- plain refs. -*) - -structure Synchronized: SYNCHRONIZED = -struct - -abstype 'a var = Var of 'a Unsynchronized.ref -with - -fun var _ x = Var (Unsynchronized.ref x); -fun value (Var var) = ! var; - -fun timed_access (Var var) _ f = - (case f (! var) of - SOME (y, x') => (var := x'; SOME y) - | NONE => Thread.unavailable ()); - -fun guarded_access var f = the (timed_access var (K NONE) f); - -fun change_result var f = guarded_access var (SOME o f); -fun change var f = change_result var (fn x => ((), f x)); - -end; - -end; diff --git a/core/Pure/Concurrent/task_queue.ML b/core/Pure/Concurrent/task_queue.ML deleted file mode 100644 index 1d49441f..00000000 --- a/core/Pure/Concurrent/task_queue.ML +++ /dev/null @@ -1,385 +0,0 @@ -(* Title: Pure/Concurrent/task_queue.ML - Author: Makarius - -Ordered queue of grouped tasks. -*) - -signature TASK_QUEUE = -sig - type group - val new_group: group option -> group - val group_id: group -> int - val eq_group: group * group -> bool - val cancel_group: group -> exn -> unit - val is_canceled: group -> bool - val group_status: group -> exn list - val str_of_group: group -> string - val str_of_groups: group -> string - type task - val dummy_task: task - val group_of_task: task -> group - val name_of_task: task -> string - val pri_of_task: task -> int - val str_of_task: task -> string - val str_of_task_groups: task -> string - val task_statistics: task -> Properties.T - val running: task -> (unit -> 'a) -> 'a - val joining: task -> (unit -> 'a) -> 'a - val waiting: task -> task list -> (unit -> 'a) -> 'a - type queue - val empty: queue - val group_tasks: queue -> group -> task list - val known_task: queue -> task -> bool - val all_passive: queue -> bool - val status: queue -> {ready: int, pending: int, running: int, passive: int} - val cancel: queue -> group -> Thread.thread list - val cancel_all: queue -> group list * Thread.thread list - val finish: task -> queue -> bool * queue - val enroll: Thread.thread -> string -> group -> queue -> task * queue - val enqueue_passive: group -> (unit -> bool) -> queue -> task * queue - val enqueue: string -> group -> task list -> int -> (bool -> bool) -> queue -> task * queue - val extend: task -> (bool -> bool) -> queue -> queue option - val dequeue_passive: Thread.thread -> task -> queue -> bool option * queue - val dequeue: Thread.thread -> queue -> (task * (bool -> bool) list) option * queue - val dequeue_deps: Thread.thread -> task list -> queue -> - (((task * (bool -> bool) list) option * task list) * queue) -end; - -structure Task_Queue: TASK_QUEUE = -struct - -val new_id = Counter.make (); - - -(** nested groups of tasks **) - -(* groups *) - -abstype group = Group of - {parent: group option, - id: int, - status: exn option Synchronized.var} -with - -fun make_group (parent, id, status) = Group {parent = parent, id = id, status = status}; - -fun new_group parent = make_group (parent, new_id (), Synchronized.var "group_status" NONE); - -fun group_id (Group {id, ...}) = id; -fun eq_group (group1, group2) = group_id group1 = group_id group2; - -fun fold_groups f (g as Group {parent = NONE, ...}) a = f g a - | fold_groups f (g as Group {parent = SOME group, ...}) a = fold_groups f group (f g a); - - -(* group status *) - -fun cancel_group (Group {status, ...}) exn = - Synchronized.change status - (fn exns => SOME (Par_Exn.make (exn :: the_list exns))); - -fun is_canceled (Group {parent, status, ...}) = - is_some (Synchronized.value status) orelse - (case parent of NONE => false | SOME group => is_canceled group); - -fun group_status (Group {parent, status, ...}) = - the_list (Synchronized.value status) @ - (case parent of NONE => [] | SOME group => group_status group); - -fun str_of_group group = - (is_canceled group ? enclose "(" ")") (string_of_int (group_id group)); - -fun str_of_groups group = - space_implode "/" (map str_of_group (rev (fold_groups cons group []))); - -end; - - -(* tasks *) - -type timing = Time.time * Time.time * string list; (*run, wait, wait dependencies*) - -val timing_start = (Time.zeroTime, Time.zeroTime, []): timing; - -fun new_timing () = - if ! Multithreading.trace < 2 then NONE - else SOME (Synchronized.var "timing" timing_start); - -abstype task = Task of - {group: group, - name: string, - id: int, - pri: int option, - timing: timing Synchronized.var option, - pos: Position.T} -with - -val dummy_task = - Task {group = new_group NONE, name = "", id = 0, pri = NONE, timing = NONE, - pos = Position.none}; - -fun new_task group name pri = - Task {group = group, name = name, id = new_id (), pri = pri, timing = new_timing (), - pos = Position.thread_data ()}; - -fun group_of_task (Task {group, ...}) = group; -fun name_of_task (Task {name, ...}) = name; -fun pri_of_task (Task {pri, ...}) = the_default 0 pri; - -fun str_of_task (Task {name, id, ...}) = - if name = "" then string_of_int id else string_of_int id ^ " (" ^ name ^ ")"; - -fun str_of_task_groups task = str_of_task task ^ " in " ^ str_of_groups (group_of_task task); - -fun update_timing update (Task {timing, ...}) e = - uninterruptible (fn restore_attributes => fn () => - let - val start = Time.now (); - val result = Exn.capture (restore_attributes e) (); - val t = Time.- (Time.now (), start); - val _ = (case timing of NONE => () | SOME var => Synchronized.change var (update t)); - in Exn.release result end) (); - -fun task_ord (Task {id = id1, pri = pri1, ...}, Task {id = id2, pri = pri2, ...}) = - prod_ord (rev_order o option_ord int_ord) int_ord ((pri1, id1), (pri2, id2)); - -fun task_statistics (Task {name, id, timing, pos, ...}) = - let - val (run, wait, wait_deps) = - (case timing of NONE => timing_start | SOME var => Synchronized.value var); - fun micros time = string_of_int (Time.toNanoseconds time div 1000); - in - [("now", Markup.print_real (Time.toReal (Time.now ()))), - ("task_name", name), ("task_id", Markup.print_int id), - ("run", micros run), ("wait", micros wait), ("wait_deps", commas wait_deps)] @ - Position.properties_of pos - end; - -end; - -structure Tasks = Table(type key = task val ord = task_ord); -structure Task_Graph = Graph(type key = task val ord = task_ord); - - -(* timing *) - -fun running task = - update_timing (fn t => fn (a, b, ds) => (Time.+ (a, t), b, ds)) task; - -fun joining task = - update_timing (fn t => fn (a, b, ds) => (Time.- (a, t), b, ds)) task; - -fun waiting task deps = - update_timing (fn t => fn (a, b, ds) => - (Time.- (a, t), Time.+ (b, t), - if ! Multithreading.trace > 0 - then fold (insert (op =) o name_of_task) deps ds else ds)) task; - - - -(** queue of jobs and groups **) - -(* known group members *) - -type groups = unit Tasks.table Inttab.table; - -fun get_tasks (groups: groups) gid = - the_default Tasks.empty (Inttab.lookup groups gid); - -fun add_task (gid, task) groups = - Inttab.update (gid, Tasks.update (task, ()) (get_tasks groups gid)) groups; - -fun del_task (gid, task) groups = - let val tasks = Tasks.delete_safe task (get_tasks groups gid) in - if Tasks.is_empty tasks then Inttab.delete_safe gid groups - else Inttab.update (gid, tasks) groups - end; - - -(* job dependency graph *) - -datatype job = - Job of (bool -> bool) list | - Running of Thread.thread | - Passive of unit -> bool; - -type jobs = job Task_Graph.T; - -fun get_job (jobs: jobs) task = Task_Graph.get_node jobs task; -fun set_job task job (jobs: jobs) = Task_Graph.map_node task (K job) jobs; - -fun add_job task dep (jobs: jobs) = - Task_Graph.add_edge (dep, task) jobs handle Task_Graph.UNDEF _ => jobs; - - -(* queue *) - -datatype queue = Queue of {groups: groups, jobs: jobs}; - -fun make_queue groups jobs = Queue {groups = groups, jobs = jobs}; -val empty = make_queue Inttab.empty Task_Graph.empty; - -fun group_tasks (Queue {groups, ...}) group = Tasks.keys (get_tasks groups (group_id group)); -fun known_task (Queue {jobs, ...}) task = can (Task_Graph.get_entry jobs) task; - - -(* job status *) - -fun ready_job task (Job list, (deps, _)) = - if Task_Graph.Keys.is_empty deps then SOME (task, rev list) else NONE - | ready_job task (Passive abort, (deps, _)) = - if Task_Graph.Keys.is_empty deps andalso is_canceled (group_of_task task) - then SOME (task, [fn _ => abort ()]) - else NONE - | ready_job _ _ = NONE; - -fun active_job (_, (Job _, _)) = SOME () - | active_job (_, (Running _, _)) = SOME () - | active_job (task, (Passive _, _)) = - if is_canceled (group_of_task task) then SOME () else NONE; - -fun all_passive (Queue {jobs, ...}) = is_none (Task_Graph.get_first active_job jobs); - - -(* queue status *) - -fun status (Queue {jobs, ...}) = - let - val (x, y, z, w) = - Task_Graph.fold (fn (_, (job, (deps, _))) => fn (x, y, z, w) => - (case job of - Job _ => if Task_Graph.Keys.is_empty deps then (x + 1, y, z, w) else (x, y + 1, z, w) - | Running _ => (x, y, z + 1, w) - | Passive _ => (x, y, z, w + 1))) - jobs (0, 0, 0, 0); - in {ready = x, pending = y, running = z, passive = w} end; - - - -(** task queue operations **) - -(* cancel -- peers and sub-groups *) - -fun cancel (Queue {groups, jobs}) group = - let - val _ = cancel_group group Exn.Interrupt; - val running = - Tasks.fold (fn (task, _) => - (case get_job jobs task of Running thread => insert Thread.equal thread | _ => I)) - (get_tasks groups (group_id group)) []; - in running end; - -fun cancel_all (Queue {jobs, ...}) = - let - fun cancel_job (task, (job, _)) (groups, running) = - let - val group = group_of_task task; - val _ = cancel_group group Exn.Interrupt; - in - (case job of - Running t => (insert eq_group group groups, insert Thread.equal t running) - | _ => (groups, running)) - end; - val running = Task_Graph.fold cancel_job jobs ([], []); - in running end; - - -(* finish *) - -fun finish task (Queue {groups, jobs}) = - let - val group = group_of_task task; - val groups' = fold_groups (fn g => del_task (group_id g, task)) group groups; - val jobs' = Task_Graph.del_node task jobs; - val maximal = Task_Graph.is_maximal jobs task; - in (maximal, make_queue groups' jobs') end; - - -(* enroll *) - -fun enroll thread name group (Queue {groups, jobs}) = - let - val task = new_task group name NONE; - val groups' = fold_groups (fn g => add_task (group_id g, task)) group groups; - val jobs' = jobs |> Task_Graph.new_node (task, Running thread); - in (task, make_queue groups' jobs') end; - - -(* enqueue *) - -fun enqueue_passive group abort (Queue {groups, jobs}) = - let - val task = new_task group "passive" NONE; - val groups' = fold_groups (fn g => add_task (group_id g, task)) group groups; - val jobs' = jobs |> Task_Graph.new_node (task, Passive abort); - in (task, make_queue groups' jobs') end; - -fun enqueue name group deps pri job (Queue {groups, jobs}) = - let - val task = new_task group name (SOME pri); - val groups' = fold_groups (fn g => add_task (group_id g, task)) group groups; - val jobs' = jobs - |> Task_Graph.new_node (task, Job [job]) - |> fold (add_job task) deps; - in (task, make_queue groups' jobs') end; - -fun extend task job (Queue {groups, jobs}) = - (case try (get_job jobs) task of - SOME (Job list) => SOME (make_queue groups (set_job task (Job (job :: list)) jobs)) - | _ => NONE); - - -(* dequeue *) - -fun dequeue_passive thread task (queue as Queue {groups, jobs}) = - (case try (get_job jobs) task of - SOME (Passive _) => - let val jobs' = set_job task (Running thread) jobs - in (SOME true, make_queue groups jobs') end - | SOME _ => (SOME false, queue) - | NONE => (NONE, queue)); - -fun dequeue thread (queue as Queue {groups, jobs}) = - (case Task_Graph.get_first (uncurry ready_job) jobs of - SOME (result as (task, _)) => - let val jobs' = set_job task (Running thread) jobs - in (SOME result, make_queue groups jobs') end - | NONE => (NONE, queue)); - - -(* dequeue wrt. dynamic dependencies *) - -fun dequeue_deps thread deps (queue as Queue {groups, jobs}) = - let - fun ready [] rest = (NONE, rev rest) - | ready (task :: tasks) rest = - (case try (Task_Graph.get_entry jobs) task of - NONE => ready tasks rest - | SOME (_, entry) => - (case ready_job task entry of - NONE => ready tasks (task :: rest) - | some => (some, fold cons rest tasks))); - - fun ready_dep _ [] = NONE - | ready_dep seen (task :: tasks) = - if Tasks.defined seen task then ready_dep seen tasks - else - let val entry as (_, (ds, _)) = #2 (Task_Graph.get_entry jobs task) in - (case ready_job task entry of - NONE => ready_dep (Tasks.update (task, ()) seen) (Task_Graph.Keys.dest ds @ tasks) - | some => some) - end; - - fun result (res as (task, _)) deps' = - let val jobs' = set_job task (Running thread) jobs - in ((SOME res, deps'), make_queue groups jobs') end; - in - (case ready deps [] of - (SOME res, deps') => result res deps' - | (NONE, deps') => - (case ready_dep Tasks.empty deps' of - SOME res => result res deps' - | NONE => ((NONE, deps'), queue))) - end; - -end; diff --git a/core/Pure/Concurrent/time_limit.ML b/core/Pure/Concurrent/time_limit.ML deleted file mode 100644 index c37cffc6..00000000 --- a/core/Pure/Concurrent/time_limit.ML +++ /dev/null @@ -1,39 +0,0 @@ -(* Title: Pure/Concurrent/time_limit.ML - Author: Makarius - -Execution with time limit (relative timeout). -*) - -signature TIME_LIMIT = -sig - exception TimeOut - val timeLimit : Time.time -> ('a -> 'b) -> 'a -> 'b -end; - -structure TimeLimit: TIME_LIMIT = -struct - -exception TimeOut; - -fun timeLimit timeout f x = - Multithreading.with_attributes Multithreading.no_interrupts (fn orig_atts => - let - val self = Thread.self (); - - val request = - Event_Timer.request (Time.+ (Time.now (), timeout)) - (fn () => Simple_Thread.interrupt_unsynchronized self); - - val result = - Exn.capture (fn () => Multithreading.with_attributes orig_atts (fn _ => f x)) (); - - val was_timeout = not (Event_Timer.cancel request); - val test = Exn.capture Multithreading.interrupted (); - in - if was_timeout andalso (Exn.is_interrupt_exn result orelse Exn.is_interrupt_exn test) - then raise TimeOut - else (Exn.release test; Exn.release result) - end); - -end; - diff --git a/core/Pure/GUI/color_value.scala b/core/Pure/GUI/color_value.scala deleted file mode 100644 index eccc7bf2..00000000 --- a/core/Pure/GUI/color_value.scala +++ /dev/null @@ -1,48 +0,0 @@ -/* Title: Pure/GUI/color_value.scala - Module: PIDE-GUI - Author: Makarius - -Cached color values. -*/ - -package isabelle - - -import java.awt.Color - - -object Color_Value -{ - private var cache = Map.empty[String, Color] - - def parse(s: String): Color = - { - val i = java.lang.Long.parseLong(s, 16) - val r = ((i >> 24) & 0xFF).toInt - val g = ((i >> 16) & 0xFF).toInt - val b = ((i >> 8) & 0xFF).toInt - val a = (i & 0xFF).toInt - new Color(r, g, b, a) - } - - def print(c: Color): String = - { - val r = new java.lang.Integer(c.getRed) - val g = new java.lang.Integer(c.getGreen) - val b = new java.lang.Integer(c.getBlue) - val a = new java.lang.Integer(c.getAlpha) - Word.uppercase(String.format("%02x%02x%02x%02x", r, g, b, a)) - } - - def apply(s: String): Color = - synchronized { - cache.get(s) match { - case Some(c) => c - case None => - val c = parse(s) - cache += (s -> c) - c - } - } -} - diff --git a/core/Pure/GUI/gui.scala b/core/Pure/GUI/gui.scala deleted file mode 100644 index de68acaf..00000000 --- a/core/Pure/GUI/gui.scala +++ /dev/null @@ -1,236 +0,0 @@ -/* Title: Pure/GUI/gui.scala - Module: PIDE-GUI - Author: Makarius - -Basic GUI tools (for AWT/Swing). -*/ - -package isabelle - - -import java.lang.{ClassLoader, ClassNotFoundException, NoSuchMethodException} -import java.awt.{Image, Component, Container, Toolkit, Window, Font, KeyboardFocusManager} -import java.awt.font.{TextAttribute, TransformAttribute, FontRenderContext, LineMetrics} -import java.awt.geom.AffineTransform -import javax.swing.{ImageIcon, JOptionPane, UIManager, JLayeredPane, JFrame, JWindow, JDialog, - JButton, JTextField} - -import scala.collection.convert.WrapAsJava -import scala.swing.{ComboBox, TextArea, ScrollPane} -import scala.swing.event.SelectionChanged - - -object GUI -{ - /* Swing look-and-feel */ - - def get_laf(): String = - { - if (Platform.is_windows || Platform.is_macos) - UIManager.getSystemLookAndFeelClassName() - else - UIManager.getInstalledLookAndFeels().find(_.getName == "Nimbus").map(_.getClassName) - .getOrElse(UIManager.getCrossPlatformLookAndFeelClassName()) - } - - def init_laf(): Unit = UIManager.setLookAndFeel(get_laf()) - - def is_macos_laf(): Boolean = - Platform.is_macos && - UIManager.getSystemLookAndFeelClassName() == UIManager.getLookAndFeel.getClass.getName - - - /* plain focus traversal, notably for text fields */ - - def plain_focus_traversal(component: Component) - { - val dummy_button = new JButton - def apply(id: Int): Unit = - component.setFocusTraversalKeys(id, dummy_button.getFocusTraversalKeys(id)) - apply(KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS) - apply(KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS) - } - - - /* X11 window manager */ - - def window_manager(): Option[String] = - { - if (Platform.is_windows || Platform.is_macos) None - else - try { - val XWM = Class.forName("sun.awt.X11.XWM", true, ClassLoader.getSystemClassLoader) - val getWM = XWM.getDeclaredMethod("getWM") - getWM.setAccessible(true) - getWM.invoke(null) match { - case null => None - case wm => Some(wm.toString) - } - } - catch { - case _: ClassNotFoundException => None - case _: NoSuchMethodException => None - } - } - - - /* simple dialogs */ - - def scrollable_text(txt: String, width: Int = 60, height: Int = 20, editable: Boolean = false) - : ScrollPane = - { - val text = new TextArea(txt) - if (width > 0) text.columns = width - if (height > 0 && split_lines(txt).length > height) text.rows = height - text.editable = editable - new ScrollPane(text) - } - - private def simple_dialog(kind: Int, default_title: String, - parent: Component, title: String, message: Seq[Any]) - { - GUI_Thread.now { - val java_message = message map { case x: scala.swing.Component => x.peer case x => x } - JOptionPane.showMessageDialog(parent, - java_message.toArray.asInstanceOf[Array[AnyRef]], - if (title == null) default_title else title, kind) - } - } - - def dialog(parent: Component, title: String, message: Any*): Unit = - simple_dialog(JOptionPane.PLAIN_MESSAGE, null, parent, title, message) - - def warning_dialog(parent: Component, title: String, message: Any*): Unit = - simple_dialog(JOptionPane.WARNING_MESSAGE, "Warning", parent, title, message) - - def error_dialog(parent: Component, title: String, message: Any*): Unit = - simple_dialog(JOptionPane.ERROR_MESSAGE, "Error", parent, title, message) - - def confirm_dialog(parent: Component, title: String, option_type: Int, message: Any*): Int = - GUI_Thread.now { - val java_message = message map { case x: scala.swing.Component => x.peer case x => x } - JOptionPane.showConfirmDialog(parent, - java_message.toArray.asInstanceOf[Array[AnyRef]], title, - option_type, JOptionPane.QUESTION_MESSAGE) - } - - - /* zoom box */ - - private val Zoom_Factor = "([0-9]+)%?".r - - abstract class Zoom_Box extends ComboBox[String]( - List("50%", "70%", "85%", "100%", "125%", "150%", "175%", "200%", "300%", "400%")) - { - def changed: Unit - def factor: Int = parse(selection.item) - - private def parse(text: String): Int = - text match { - case Zoom_Factor(s) => - val i = Integer.parseInt(s) - if (10 <= i && i < 1000) i else 100 - case _ => 100 - } - - private def print(i: Int): String = i.toString + "%" - - def set_item(i: Int) { - peer.getEditor match { - case null => - case editor => editor.setItem(print(i)) - } - } - - makeEditable()(c => new ComboBox.BuiltInEditor(c)(text => print(parse(text)), x => x)) - peer.getEditor.getEditorComponent match { - case text: JTextField => text.setColumns(4) - case _ => - } - - selection.index = 3 - - listenTo(selection) - reactions += { case SelectionChanged(_) => changed } - } - - - /* tooltip with multi-line support */ - - def tooltip_lines(text: String): String = - if (text == null || text == "") null - else "" + HTML.encode(text) + "" - - - /* screen resolution */ - - def resolution_scale(): Double = Toolkit.getDefaultToolkit.getScreenResolution.toDouble / 72 - def resolution_scale(i: Int): Int = (i.toDouble * resolution_scale()).round.toInt - - - /* icon */ - - def isabelle_icon(): ImageIcon = - new ImageIcon(getClass.getClassLoader.getResource("isabelle/isabelle_transparent-32.gif")) - - def isabelle_icons(): List[ImageIcon] = - for (icon <- List("isabelle/isabelle_transparent-32.gif", "isabelle/isabelle_transparent.gif")) - yield new ImageIcon(getClass.getClassLoader.getResource(icon)) - - def isabelle_image(): Image = isabelle_icon().getImage - - def isabelle_images(): java.util.List[Image] = - WrapAsJava.seqAsJavaList(isabelle_icons.map(_.getImage)) - - - /* component hierachy */ - - def get_parent(component: Component): Option[Container] = - component.getParent match { - case null => None - case parent => Some(parent) - } - - def ancestors(component: Component): Iterator[Container] = new Iterator[Container] { - private var next_elem = get_parent(component) - def hasNext(): Boolean = next_elem.isDefined - def next(): Container = - next_elem match { - case Some(parent) => - next_elem = get_parent(parent) - parent - case None => Iterator.empty.next() - } - } - - def parent_window(component: Component): Option[Window] = - ancestors(component).collectFirst({ case x: Window => x }) - - def layered_pane(component: Component): Option[JLayeredPane] = - parent_window(component) match { - case Some(w: JWindow) => Some(w.getLayeredPane) - case Some(w: JFrame) => Some(w.getLayeredPane) - case Some(w: JDialog) => Some(w.getLayeredPane) - case _ => None - } - - - /* font operations */ - - def font_metrics(font: Font): LineMetrics = - font.getLineMetrics("", new FontRenderContext(null, false, false)) - - def imitate_font(family: String, font: Font, scale: Double = 1.0): Font = - { - val font1 = new Font(family, font.getStyle, font.getSize) - val size = scale * (font_metrics(font).getAscent / font_metrics(font1).getAscent * font.getSize) - font1.deriveFont(size.round.toInt) - } - - def transform_font(font: Font, transform: AffineTransform): Font = - { - import scala.collection.JavaConversions._ - font.deriveFont(Map(TextAttribute.TRANSFORM -> new TransformAttribute(transform))) - } -} - diff --git a/core/Pure/GUI/gui_thread.scala b/core/Pure/GUI/gui_thread.scala deleted file mode 100644 index f50002ba..00000000 --- a/core/Pure/GUI/gui_thread.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* Title: Pure/GUI/gui_thread.scala - Module: PIDE-GUI - Author: Makarius - -Evaluation within the GUI thread (for AWT/Swing). -*/ - -package isabelle - - -import javax.swing.SwingUtilities - - -object GUI_Thread -{ - /* context check */ - - def assert[A](body: => A) = - { - Predef.assert(SwingUtilities.isEventDispatchThread()) - body - } - - def require[A](body: => A) = - { - Predef.require(SwingUtilities.isEventDispatchThread()) - body - } - - - /* event dispatch queue */ - - def now[A](body: => A): A = - { - if (SwingUtilities.isEventDispatchThread()) body - else { - lazy val result = { assert { Exn.capture(body) } } - SwingUtilities.invokeAndWait(new Runnable { def run = result }) - Exn.release(result) - } - } - - def later(body: => Unit) - { - if (SwingUtilities.isEventDispatchThread()) body - else SwingUtilities.invokeLater(new Runnable { def run = body }) - } - - - /* delayed events */ - - def delay_first(delay: => Time)(event: => Unit): Simple_Thread.Delay = - Simple_Thread.delay_first(delay) { later { event } } - - def delay_last(delay: => Time)(event: => Unit): Simple_Thread.Delay = - Simple_Thread.delay_last(delay) { later { event } } -} diff --git a/core/Pure/GUI/html5_panel.scala b/core/Pure/GUI/html5_panel.scala deleted file mode 100644 index 243ab1db..00000000 --- a/core/Pure/GUI/html5_panel.scala +++ /dev/null @@ -1,79 +0,0 @@ -/* Title: Pure/GUI/html5_panel.scala - Module: PIDE-GUI - Author: Makarius - -HTML5 panel based on Java FX WebView. -*/ - -package isabelle - - -import javafx.scene.Scene -import javafx.scene.web.{WebView, WebEngine} -import javafx.scene.input.KeyEvent -import javafx.scene.text.FontSmoothingType -import javafx.scene.layout.{HBox, VBox, Priority} -import javafx.geometry.{HPos, VPos, Insets} -import javafx.event.EventHandler - - -// see http://netbeans.org/bugzilla/show_bug.cgi?id=210414 -// and http://hg.netbeans.org/jet-main/rev/a88434cec458 -private class Web_View_Workaround extends javafx.scene.layout.Pane -{ - VBox.setVgrow(this, Priority.ALWAYS) - HBox.setHgrow(this, Priority.ALWAYS) - - setMaxWidth(java.lang.Double.MAX_VALUE) - setMaxHeight(java.lang.Double.MAX_VALUE) - - val web_view = new WebView - web_view.setMinSize(500, 400) - web_view.setPrefSize(500, 400) - - getChildren().add(web_view) - - override protected def layoutChildren() - { - val managed = getManagedChildren() - val width = getWidth() - val height = getHeight() - val top = getInsets().getTop() - val right = getInsets().getRight() - val left = getInsets().getLeft() - val bottom = getInsets().getBottom() - - for (i <- 0 until managed.size) - layoutInArea(managed.get(i), left, top, - width - left - right, height - top - bottom, - 0, Insets.EMPTY, true, true, HPos.CENTER, VPos.CENTER) - } -} - - -class HTML5_Panel extends javafx.embed.swing.JFXPanel -{ - private val future = - JFX_Thread.future { - val pane = new Web_View_Workaround - - val web_view = pane.web_view - web_view.setFontSmoothingType(FontSmoothingType.GRAY) - web_view.setOnKeyTyped(new EventHandler[KeyEvent] { - def handle(e: KeyEvent) { - if (e.isControlDown && e.getCharacter == "0") - web_view.setFontScale(1.0) - if (e.isControlDown && e.getCharacter == "+") - web_view.setFontScale(web_view.getFontScale * 1.1) - else if (e.isControlDown && e.getCharacter == "-") - web_view.setFontScale(web_view.getFontScale / 1.1) - } - }) - - setScene(new Scene(pane)) - pane - } - - def web_view: WebView = future.join.web_view - def web_engine: WebEngine = web_view.getEngine -} diff --git a/core/Pure/GUI/jfx_thread.scala b/core/Pure/GUI/jfx_thread.scala deleted file mode 100644 index a768b63d..00000000 --- a/core/Pure/GUI/jfx_thread.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* Title: Pure/GUI/jfx_thread.scala - Module: PIDE-GUI - Author: Makarius - -Evaluation within the Java FX application thread. -*/ - -package isabelle - -import javafx.application.{Platform => JFX_Platform} - - -object JFX_Thread -{ - /* checks */ - - def assert() = Predef.assert(JFX_Platform.isFxApplicationThread()) - def require() = Predef.require(JFX_Platform.isFxApplicationThread()) - - - /* asynchronous context switch */ - - def later(body: => Unit) - { - if (JFX_Platform.isFxApplicationThread()) body - else JFX_Platform.runLater(new Runnable { def run = body }) - } - - def future[A](body: => A): Future[A] = - { - if (JFX_Platform.isFxApplicationThread()) Future.value(body) - else { - val promise = Future.promise[A] - later { promise.fulfill_result(Exn.capture(body)) } - promise - } - } -} diff --git a/core/Pure/GUI/popup.scala b/core/Pure/GUI/popup.scala deleted file mode 100644 index 821f09b5..00000000 --- a/core/Pure/GUI/popup.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* Title: Pure/GUI/popup.scala - Module: PIDE-GUI - Author: Makarius - -Popup within layered pane. -*/ - -package isabelle - - -import java.awt.{Point, Dimension} -import javax.swing.{JLayeredPane, JComponent} - - -class Popup( - layered: JLayeredPane, - component: JComponent, - location: Point, - size: Dimension) -{ - def show - { - component.setLocation(location) - component.setSize(size) - component.setPreferredSize(size) - component.setOpaque(true) - layered.add(component, JLayeredPane.DEFAULT_LAYER) - layered.moveToFront(component) - layered.repaint(component.getBounds()) - } - - def hide - { - val bounds = component.getBounds() - layered.remove(component) - layered.repaint(bounds) - } -} - diff --git a/core/Pure/GUI/system_dialog.scala b/core/Pure/GUI/system_dialog.scala deleted file mode 100644 index 92a2daca..00000000 --- a/core/Pure/GUI/system_dialog.scala +++ /dev/null @@ -1,212 +0,0 @@ -/* Title: Pure/GUI/system_dialog.scala - Author: Makarius - -Dialog for system processes, with optional output window. -*/ - -package isabelle - - -import java.awt.{GraphicsEnvironment, Point, Font} -import javax.swing.WindowConstants -import java.io.{File => JFile, BufferedReader, InputStreamReader} - -import scala.swing.{ScrollPane, Button, CheckBox, FlowPanel, - BorderPanel, Frame, TextArea, Component, Label} -import scala.swing.event.ButtonClicked - - -class System_Dialog extends Build.Progress -{ - /* component state -- owned by GUI thread */ - - private var _title = "Isabelle" - private var _window: Option[Window] = None - private var _return_code: Option[Int] = None - - private def check_window(): Window = - { - GUI_Thread.require {} - - _window match { - case Some(window) => window - case None => - val window = new Window - _window = Some(window) - - window.pack() - val point = GraphicsEnvironment.getLocalGraphicsEnvironment().getCenterPoint() - window.location = - new Point(point.x - window.size.width / 2, point.y - window.size.height / 2) - window.visible = true - - window - } - } - - private val result = Future.promise[Int] - - private def conclude() - { - GUI_Thread.require {} - require(_return_code.isDefined) - - _window match { - case None => - case Some(window) => - window.visible = false - window.dispose - _window = None - } - - try { result.fulfill(_return_code.get) } - catch { case ERROR(_) => } - } - - def join(): Int = result.join - def join_exit(): Nothing = sys.exit(join) - - - /* window */ - - private class Window extends Frame - { - title = _title - peer.setIconImages(GUI.isabelle_images()) - - - /* text */ - - val text = new TextArea { - font = new Font("SansSerif", Font.PLAIN, GUI.resolution_scale(10) max 14) - editable = false - columns = 50 - rows = 20 - } - - val scroll_text = new ScrollPane(text) - - - /* layout panel with dynamic actions */ - - val action_panel = new FlowPanel(FlowPanel.Alignment.Center)() - val layout_panel = new BorderPanel - layout_panel.layout(scroll_text) = BorderPanel.Position.Center - layout_panel.layout(action_panel) = BorderPanel.Position.South - - contents = layout_panel - - def set_actions(cs: Component*) - { - action_panel.contents.clear - action_panel.contents ++= cs - layout_panel.revalidate - layout_panel.repaint - } - - - /* close */ - - peer.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE) - - override def closeOperation { - if (_return_code.isDefined) conclude() - else stopping() - } - - def stopping() - { - is_stopped = true - set_actions(new Label("Stopping ...")) - } - - val stop_button = new Button("Stop") { - reactions += { case ButtonClicked(_) => stopping() } - } - - var do_auto_close = true - def can_auto_close: Boolean = do_auto_close && _return_code == Some(0) - - val auto_close = new CheckBox("Auto close") { - reactions += { - case ButtonClicked(_) => do_auto_close = this.selected - if (can_auto_close) conclude() - } - } - auto_close.selected = do_auto_close - auto_close.tooltip = "Automatically close dialog when finished" - - set_actions(stop_button, auto_close) - - - /* exit */ - - val delay_exit = - GUI_Thread.delay_first(Time.seconds(1.0)) - { - if (can_auto_close) conclude() - else { - val button = - new Button(if (_return_code == Some(0)) "OK" else "Exit") { - reactions += { case ButtonClicked(_) => conclude() } - } - set_actions(button) - button.peer.getRootPane.setDefaultButton(button.peer) - } - } - } - - - /* progress operations */ - - def title(txt: String): Unit = - GUI_Thread.later { - _title = txt - _window.foreach(window => window.title = txt) - } - - def return_code(rc: Int): Unit = - GUI_Thread.later { - _return_code = Some(rc) - _window match { - case None => conclude() - case Some(window) => window.delay_exit.invoke - } - } - - override def echo(txt: String): Unit = - GUI_Thread.later { - val window = check_window() - window.text.append(txt + "\n") - val vertical = window.scroll_text.peer.getVerticalScrollBar - vertical.setValue(vertical.getMaximum) - } - - override def theory(session: String, theory: String): Unit = - echo(session + ": theory " + theory) - - @volatile private var is_stopped = false - override def stopped: Boolean = is_stopped - - - /* system operations */ - - def execute(cwd: JFile, env: Map[String, String], args: String*): Int = - { - val proc = Isabelle_System.raw_execute(cwd, env, true, args: _*) - proc.getOutputStream.close - - val stdout = new BufferedReader(new InputStreamReader(proc.getInputStream, UTF8.charset)) - try { - var line = stdout.readLine - while (line != null) { - echo(line) - line = stdout.readLine - } - } - finally { stdout.close } - - proc.waitFor - } -} - diff --git a/core/Pure/GUI/wrap_panel.scala b/core/Pure/GUI/wrap_panel.scala deleted file mode 100644 index c1e7e6cc..00000000 --- a/core/Pure/GUI/wrap_panel.scala +++ /dev/null @@ -1,124 +0,0 @@ -/* Title: Pure/GUI/wrap_panel.scala - Module: PIDE-GUI - Author: Makarius - -Panel with improved FlowLayout for wrapping of components over -multiple lines, see also -http://tips4java.wordpress.com/2008/11/06/wrap-layout/ and -scala.swing.FlowPanel. -*/ - -package isabelle - - -import java.awt.{FlowLayout, Container, Dimension} -import javax.swing.{JComponent, JPanel, JScrollPane} - -import scala.swing.{Panel, FlowPanel, Component, SequentialContainer, ScrollPane} - - -object Wrap_Panel -{ - val Alignment = FlowPanel.Alignment - - class Layout(align: Int = FlowLayout.CENTER, hgap: Int = 5, vgap: Int = 5) - extends FlowLayout(align: Int, hgap: Int, vgap: Int) - { - override def preferredLayoutSize(target: Container): Dimension = - layout_size(target, true) - - override def minimumLayoutSize(target: Container): Dimension = - { - val minimum = layout_size(target, false) - minimum.width -= (getHgap + 1) - minimum - } - - private def layout_size(target: Container, preferred: Boolean): Dimension = - { - target.getTreeLock.synchronized - { - val target_width = - if (target.getSize.width == 0) Integer.MAX_VALUE - else target.getSize.width - - val hgap = getHgap - val vgap = getVgap - val insets = target.getInsets - val horizontal_insets_and_gap = insets.left + insets.right + (hgap * 2) - val max_width = target_width - horizontal_insets_and_gap - - - /* fit components into rows */ - - val dim = new Dimension(0, 0) - var row_width = 0 - var row_height = 0 - def add_row() - { - dim.width = dim.width max row_width - if (dim.height > 0) dim.height += vgap - dim.height += row_height - } - - for { - i <- (0 until target.getComponentCount).iterator - m = target.getComponent(i) - if m.isVisible - d = if (preferred) m.getPreferredSize else m.getMinimumSize() - } - { - if (row_width + d.width > max_width) { - add_row() - row_width = 0 - row_height = 0 - } - - if (row_width != 0) row_width += hgap - - row_width += d.width - row_height = row_height max d.height - } - add_row() - - dim.width += horizontal_insets_and_gap - dim.height += insets.top + insets.bottom + vgap * 2 - - - /* special treatment for ScrollPane */ - - val scroll_pane = - GUI.ancestors(target).exists( - { - case _: JScrollPane => true - case c: JComponent if Component.wrap(c).isInstanceOf[ScrollPane] => true - case _ => false - }) - if (scroll_pane && target.isValid) - dim.width -= (hgap + 1) - - dim - } - } - } -} - - -class Wrap_Panel(alignment: Wrap_Panel.Alignment.Value)(contents0: Component*) - extends Panel with SequentialContainer.Wrapper -{ - override lazy val peer: JPanel = - new JPanel(new Wrap_Panel.Layout(alignment.id)) with SuperMixin - - def this(contents0: Component*) = this(Wrap_Panel.Alignment.Center)(contents0: _*) - def this() = this(Wrap_Panel.Alignment.Center)() - - contents ++= contents0 - - private def layoutManager = peer.getLayout.asInstanceOf[Wrap_Panel.Layout] - - def vGap: Int = layoutManager.getVgap - def vGap_=(n: Int) { layoutManager.setVgap(n) } - def hGap: Int = layoutManager.getHgap - def hGap_=(n: Int) { layoutManager.setHgap(n) } -} diff --git a/core/Pure/General/alist.ML b/core/Pure/General/alist.ML deleted file mode 100644 index aa5f468b..00000000 --- a/core/Pure/General/alist.ML +++ /dev/null @@ -1,127 +0,0 @@ -(* Title: Pure/General/alist.ML - Author: Florian Haftmann, TU Muenchen - -Association lists -- lists of (key, value) pairs. -*) - -signature ALIST = -sig - exception DUP - val lookup: ('a * 'b -> bool) -> ('b * 'c) list -> 'a -> 'c option - val defined: ('a * 'b -> bool) -> ('b * 'c) list -> 'a -> bool - val update: ('a * 'a -> bool) -> ('a * 'b) - -> ('a * 'b) list -> ('a * 'b) list - val default: ('a * 'a -> bool) -> ('a * 'b) - -> ('a * 'b) list -> ('a * 'b) list - val delete: ('a * 'b -> bool) -> 'a - -> ('b * 'c) list -> ('b * 'c) list - val map_entry: ('a * 'b -> bool) -> 'a -> ('c -> 'c) - -> ('b * 'c) list -> ('b * 'c) list - val map_entry_yield: ('a * 'b -> bool) -> 'a -> ('c -> 'd * 'c) - -> ('b * 'c) list -> 'd option * ('b * 'c) list - val map_default: ('a * 'a -> bool) -> 'a * 'b -> ('b -> 'b) - -> ('a * 'b) list -> ('a * 'b) list - val join: ('a * 'a -> bool) -> ('a -> 'b * 'b -> 'b) (*exception DUP*) - -> ('a * 'b) list * ('a * 'b) list -> ('a * 'b) list (*exception DUP*) - val merge: ('a * 'a -> bool) -> ('b * 'b -> bool) - -> ('a * 'b) list * ('a * 'b) list -> ('a * 'b) list (*exception DUP*) - val make: ('a -> 'b) -> 'a list -> ('a * 'b) list - val find: ('a * 'b -> bool) -> ('c * 'b) list -> 'a -> 'c list - val coalesce: ('a * 'a -> bool) -> ('a * 'b) list -> ('a * 'b list) list - (*coalesce ranges of equal neighbour keys*) - val group: ('a * 'a -> bool) -> ('a * 'b) list -> ('a * 'b list) list -end; - -structure AList: ALIST = -struct - -fun find_index eq xs key = - let - fun find [] _ = ~1 - | find ((key', value)::xs) i = - if eq (key, key') - then i - else find xs (i+1); - in find xs 0 end; - -fun map_index eq key f_none f_some xs = - let - val i = find_index eq xs key; - fun mapp 0 (x::xs) = f_some x xs - | mapp i (x::xs) = x :: mapp (i-1) xs; - in (if i = ~1 then f_none else mapp i) xs end; - -fun lookup _ [] _ = NONE - | lookup eq ((key, value)::xs) key' = - if eq (key', key) then SOME value - else lookup eq xs key'; - -fun defined _ [] _ = false - | defined eq ((key, value)::xs) key' = - eq (key', key) orelse defined eq xs key'; - -fun update eq (x as (key, value)) = - map_index eq key (cons x) (fn _ => cons x); - -fun default eq (key, value) xs = - if defined eq xs key then xs else (key, value) :: xs; - -fun delete eq key = - map_index eq key I (K I); - -fun map_entry eq key f = - map_index eq key I (fn (key, value) => cons (key, f value)); - -fun map_default eq (key, value) f = - map_index eq key (cons (key, f value)) (fn (key, value) => cons (key, f value)); - -fun map_entry_yield eq key f xs = - let - val i = find_index eq xs key; - fun mapp 0 ((x as (key, value))::xs) = - let val (r, value') = f value - in (SOME r, (key, value') :: xs) end - | mapp i (x::xs) = - let val (r, xs') = mapp (i-1) xs - in (r, x::xs') end; - in if i = ~1 then (NONE, xs) else mapp i xs end; - -exception DUP; - -fun join eq f (xs, ys) = - let - fun add (y as (key, value)) xs = - (case lookup eq xs key of - NONE => cons y xs - | SOME value' => update eq (key, f key (value', value)) xs); - in fold_rev add ys xs end; - -fun merge eq_key eq_val = - join eq_key (K (fn (yx as (_, x)) => if eq_val yx then x else raise DUP)); - -fun make keyfun = - let fun keypair x = (x, keyfun x) - in map keypair end; - -fun find eq [] _ = [] - | find eq ((key, value) :: xs) value' = - let - val values = find eq xs value'; - in if eq (value', value) then key :: values else values end; - -fun coalesce eq = - let - fun vals _ [] = ([], []) - | vals x (lst as (y, b) :: ps) = - if eq (x, y) then vals x ps |>> cons b - else ([], lst); - fun coal [] = [] - | coal ((x, a) :: ps) = - let val (bs, qs) = vals x ps - in (x, a :: bs) :: coal qs end; - in coal end; - -fun group eq xs = - fold_rev (fn (k, v) => map_default eq (k, []) (cons v)) xs []; - -end; diff --git a/core/Pure/General/antiquote.ML b/core/Pure/General/antiquote.ML deleted file mode 100644 index 7be36238..00000000 --- a/core/Pure/General/antiquote.ML +++ /dev/null @@ -1,83 +0,0 @@ -(* Title: Pure/General/antiquote.ML - Author: Makarius - -Antiquotations within plain text. -*) - -signature ANTIQUOTE = -sig - type antiq = Symbol_Pos.T list * {start: Position.T, stop: Position.T, range: Position.range} - datatype 'a antiquote = Text of 'a | Antiq of antiq - val is_text: 'a antiquote -> bool - val antiq_reports: antiq -> Position.report list - val antiquote_reports: ('a -> Position.report_text list) -> - 'a antiquote list -> Position.report_text list - val scan_antiq: Symbol_Pos.T list -> antiq * Symbol_Pos.T list - val scan_antiquote: Symbol_Pos.T list -> Symbol_Pos.T list antiquote * Symbol_Pos.T list - val read: Symbol_Pos.T list * Position.T -> Symbol_Pos.T list antiquote list -end; - -structure Antiquote: ANTIQUOTE = -struct - -(* datatype antiquote *) - -type antiq = Symbol_Pos.T list * {start: Position.T, stop: Position.T, range: Position.range}; -datatype 'a antiquote = Text of 'a | Antiq of antiq; - -fun is_text (Text _) = true - | is_text _ = false; - - -(* reports *) - -fun antiq_reports ((_, {start, stop, range = (pos, _)}): antiq) = - [(start, Markup.antiquote), (stop, Markup.antiquote), - (pos, Markup.antiquoted), (pos, Markup.language_antiquotation)]; - -fun antiquote_reports text = - maps (fn Text x => text x | Antiq antiq => map (rpair "") (antiq_reports antiq)); - - -(* scan *) - -open Basic_Symbol_Pos; - -local - -val err_prefix = "Antiquotation lexical error: "; - -val scan_txt = - Scan.repeat1 ($$$ "@" --| Scan.ahead (~$$ "{") || - Scan.many1 (fn (s, _) => s <> "@" andalso Symbol.is_regular s)) >> flat; - -val scan_antiq_body = - Scan.trace (Symbol_Pos.scan_string_qq err_prefix || Symbol_Pos.scan_string_bq err_prefix) >> #2 || - Scan.trace (Symbol_Pos.scan_cartouche err_prefix) >> #2 || - Scan.one (fn (s, _) => s <> "}" andalso Symbol.is_regular s) >> single; - -in - -val scan_antiq = - Symbol_Pos.scan_pos -- ($$ "@" |-- $$ "{" |-- Symbol_Pos.scan_pos -- - Symbol_Pos.!!! (fn () => err_prefix ^ "missing closing brace") - (Scan.repeat scan_antiq_body -- Symbol_Pos.scan_pos -- ($$ "}" |-- Symbol_Pos.scan_pos))) - >> (fn (pos1, (pos2, ((body, pos3), pos4))) => - (flat body, - {start = Position.set_range (pos1, pos2), - stop = Position.set_range (pos3, pos4), - range = Position.range pos1 pos4})); - -val scan_antiquote = scan_antiq >> Antiq || scan_txt >> Text; - -end; - - -(* read *) - -fun read (syms, pos) = - (case Scan.read Symbol_Pos.stopper (Scan.repeat scan_antiquote) syms of - SOME xs => (Position.reports_text (antiquote_reports (K []) xs); xs) - | NONE => error ("Malformed quotation/antiquotation source" ^ Position.here pos)); - -end; diff --git a/core/Pure/General/antiquote.scala b/core/Pure/General/antiquote.scala deleted file mode 100644 index cef4fbcc..00000000 --- a/core/Pure/General/antiquote.scala +++ /dev/null @@ -1,55 +0,0 @@ -/* Title: Pure/ML/antiquote.scala - Author: Makarius - -Antiquotations within plain text. -*/ - -package isabelle - - -import scala.util.parsing.input.CharSequenceReader - - -object Antiquote -{ - sealed abstract class Antiquote - case class Text(source: String) extends Antiquote - case class Antiq(source: String) extends Antiquote - - - /* parsers */ - - object Parsers extends Parsers - - trait Parsers extends Scan.Parsers - { - private val txt: Parser[String] = - rep1("@" ~ guard(one(s => s != "{")) | many1(s => s != "@")) ^^ (x => x.mkString) - - val antiq_other: Parser[String] = - many1(s => s != "\"" && s != "`" && s != "}" && !Symbol.is_open(s) && !Symbol.is_close(s)) - - private val antiq_body: Parser[String] = - quoted("\"") | (quoted("`") | (cartouche | antiq_other)) - - val antiq: Parser[String] = - "@{" ~ rep(antiq_body) ~ "}" ^^ { case x ~ y ~ z => x + y.mkString + z } - - val antiquote: Parser[Antiquote] = - antiq ^^ (x => Antiq(x)) | txt ^^ (x => Text(x)) - } - - - /* read */ - - def read(input: CharSequence): List[Antiquote] = - { - Parsers.parseAll(Parsers.rep(Parsers.antiquote), new CharSequenceReader(input)) match { - case Parsers.Success(xs, _) => xs - case Parsers.NoSuccess(_, next) => - error("Malformed quotation/antiquotation source" + - Position.here(Position.Line(next.pos.line))) - } - } -} - diff --git a/core/Pure/General/balanced_tree.ML b/core/Pure/General/balanced_tree.ML deleted file mode 100644 index 55aed17d..00000000 --- a/core/Pure/General/balanced_tree.ML +++ /dev/null @@ -1,60 +0,0 @@ -(* Title: Pure/General/balanced_tree.ML - Author: Lawrence C Paulson and Makarius - -Balanced binary trees. -*) - -signature BALANCED_TREE = -sig - val make: ('a * 'a -> 'a) -> 'a list -> 'a - val dest: ('a -> 'a * 'a) -> int -> 'a -> 'a list - val access: {left: 'a -> 'a, right: 'a -> 'a, init: 'a} -> int -> int -> 'a - val accesses: {left: 'a -> 'a, right: 'a -> 'a, init: 'a} -> int -> 'a list -end; - -structure Balanced_Tree: BALANCED_TREE = -struct - -fun make _ [] = raise List.Empty - | make _ [x] = x - | make f xs = - let - val m = length xs div 2; - val (ps, qs) = chop m xs; - in f (make f ps, make f qs) end; - -fun dest f n x = - if n <= 0 then raise List.Empty - else if n = 1 then [x] - else - let - val m = n div 2; - val (left, right) = f x; - in dest f m left @ dest f (n - m) right end; - -(*construct something of the form f(...g(...(x)...)) for balanced access*) -fun access {left = f, right = g, init = x} len i = - let - fun acc 1 _ = x - | acc n i = - let val m = n div 2 in - if i <= m then f (acc m i) - else g (acc (n - m) (i - m)) - end; - in if 1 <= i andalso i <= len then acc len i else raise Subscript end; - -(*construct ALL such accesses; could try harder to share recursive calls!*) -fun accesses {left = f, right = g, init = x} len = - let - fun acc 1 = [x] - | acc n = - let - val m = n div 2; - val accs_left = acc m; - val accs_right = - if n - m = m then accs_left - else acc (n - m); - in map f accs_left @ map g accs_right end; - in if 1 <= len then acc len else raise Subscript end; - -end; diff --git a/core/Pure/General/basics.ML b/core/Pure/General/basics.ML deleted file mode 100644 index 92a58803..00000000 --- a/core/Pure/General/basics.ML +++ /dev/null @@ -1,126 +0,0 @@ -(* Title: Pure/General/basics.ML - Author: Florian Haftmann and Makarius, TU Muenchen - -Fundamental concepts. -*) - -infix 1 |> |-> |>> ||> ||>> -infix 1 #> #-> #>> ##> ##>> - -signature BASICS = -sig - (*functions*) - val |> : 'a * ('a -> 'b) -> 'b - val |-> : ('c * 'a) * ('c -> 'a -> 'b) -> 'b - val |>> : ('a * 'c) * ('a -> 'b) -> 'b * 'c - val ||> : ('c * 'a) * ('a -> 'b) -> 'c * 'b - val ||>> : ('c * 'a) * ('a -> 'd * 'b) -> ('c * 'd) * 'b - val #> : ('a -> 'b) * ('b -> 'c) -> 'a -> 'c - val #-> : ('a -> 'c * 'b) * ('c -> 'b -> 'd) -> 'a -> 'd - val #>> : ('a -> 'c * 'b) * ('c -> 'd) -> 'a -> 'd * 'b - val ##> : ('a -> 'c * 'b) * ('b -> 'd) -> 'a -> 'c * 'd - val ##>> : ('a -> 'c * 'b) * ('b -> 'e * 'd) -> 'a -> ('c * 'e) * 'd - val ` : ('b -> 'a) -> 'b -> 'a * 'b - val tap: ('b -> 'a) -> 'b -> 'b - - (*options*) - val is_some: 'a option -> bool - val is_none: 'a option -> bool - val the: 'a option -> 'a - val these: 'a list option -> 'a list - val the_list: 'a option -> 'a list - val the_default: 'a -> 'a option -> 'a - val perhaps: ('a -> 'a option) -> 'a -> 'a - val merge_options: 'a option * 'a option -> 'a option - - (*partiality*) - val try: ('a -> 'b) -> 'a -> 'b option - val can: ('a -> 'b) -> 'a -> bool - - (*lists*) - val cons: 'a -> 'a list -> 'a list - val append: 'a list -> 'a list -> 'a list - val fold: ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b - val fold_rev: ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b - val fold_map: ('a -> 'b -> 'c * 'b) -> 'a list -> 'b -> 'c list * 'b -end; - -structure Basics: BASICS = -struct - -(* functions *) - -(*application and structured results*) -fun x |> f = f x; -fun (x, y) |-> f = f x y; -fun (x, y) |>> f = (f x, y); -fun (x, y) ||> f = (x, f y); -fun (x, y) ||>> f = let val (z, y') = f y in ((x, z), y') end; - -(*composition and structured results*) -fun (f #> g) x = x |> f |> g; -fun (f #-> g) x = x |> f |-> g; -fun (f #>> g) x = x |> f |>> g; -fun (f ##> g) x = x |> f ||> g; -fun (f ##>> g) x = x |> f ||>> g; - -(*result views*) -fun `f = fn x => (f x, x); -fun tap f = fn x => (f x; x); - - -(* options *) - -fun is_some (SOME _) = true - | is_some NONE = false; - -fun is_none (SOME _) = false - | is_none NONE = true; - -fun the (SOME x) = x - | the NONE = raise Option.Option; - -fun these (SOME x) = x - | these NONE = []; - -fun the_list (SOME x) = [x] - | the_list NONE = [] - -fun the_default x (SOME y) = y - | the_default x NONE = x; - -fun perhaps f x = the_default x (f x); - -fun merge_options (x, y) = if is_some x then x else y; - - -(* partiality *) - -fun try f x = SOME (f x) - handle exn => if Exn.is_interrupt exn then reraise exn else NONE; - -fun can f x = is_some (try f x); - - -(* lists *) - -fun cons x xs = x :: xs; - -fun append xs ys = xs @ ys; - -fun fold _ [] y = y - | fold f (x :: xs) y = fold f xs (f x y); - -fun fold_rev _ [] y = y - | fold_rev f (x :: xs) y = f x (fold_rev f xs y); - -fun fold_map _ [] y = ([], y) - | fold_map f (x :: xs) y = - let - val (x', y') = f x y; - val (xs', y'') = fold_map f xs y'; - in (x' :: xs', y'') end; - -end; - -open Basics; diff --git a/core/Pure/General/binding.ML b/core/Pure/General/binding.ML deleted file mode 100644 index 7520d7c9..00000000 --- a/core/Pure/General/binding.ML +++ /dev/null @@ -1,148 +0,0 @@ -(* Title: Pure/General/binding.ML - Author: Florian Haftmann, TU Muenchen - Author: Makarius - -Structured name bindings. -*) - -type bstring = string; (*primitive names to be bound*) - -signature BINDING = -sig - eqtype binding - val dest: binding -> bool * (string * bool) list * bstring - val make: bstring * Position.T -> binding - val pos_of: binding -> Position.T - val set_pos: Position.T -> binding -> binding - val name: bstring -> binding - val name_of: binding -> bstring - val map_name: (bstring -> bstring) -> binding -> binding - val prefix_name: string -> binding -> binding - val suffix_name: string -> binding -> binding - val eq_name: binding * binding -> bool - val empty: binding - val is_empty: binding -> bool - val qualify: bool -> string -> binding -> binding - val qualified: bool -> string -> binding -> binding - val qualified_name: string -> binding - val prefix_of: binding -> (string * bool) list - val map_prefix: ((string * bool) list -> (string * bool) list) -> binding -> binding - val prefix: bool -> string -> binding -> binding - val conceal: binding -> binding - val pretty: binding -> Pretty.T - val print: binding -> string - val bad: binding -> string - val check: binding -> unit -end; - -structure Binding: BINDING = -struct - -(** representation **) - -(* datatype *) - -datatype binding = Binding of - {conceal: bool, (*internal -- for foundational purposes only*) - prefix: (string * bool) list, (*system prefix*) - qualifier: (string * bool) list, (*user qualifier*) - name: bstring, (*base name*) - pos: Position.T}; (*source position*) - -fun make_binding (conceal, prefix, qualifier, name, pos) = - Binding {conceal = conceal, prefix = prefix, qualifier = qualifier, name = name, pos = pos}; - -fun map_binding f (Binding {conceal, prefix, qualifier, name, pos}) = - make_binding (f (conceal, prefix, qualifier, name, pos)); - -fun dest (Binding {conceal, prefix, qualifier, name, ...}) = - (conceal, prefix @ qualifier, name); - - - -(** basic operations **) - -(* name and position *) - -fun make (name, pos) = make_binding (false, [], [], name, pos); - -fun pos_of (Binding {pos, ...}) = pos; -fun set_pos pos = - map_binding (fn (conceal, prefix, qualifier, name, _) => (conceal, prefix, qualifier, name, pos)); - -fun name name = make (name, Position.none); -fun name_of (Binding {name, ...}) = name; - -fun eq_name (b, b') = name_of b = name_of b'; - -fun map_name f = - map_binding (fn (conceal, prefix, qualifier, name, pos) => - (conceal, prefix, qualifier, f name, pos)); - -val prefix_name = map_name o prefix; -val suffix_name = map_name o suffix; - -val empty = name ""; -fun is_empty b = name_of b = ""; - - -(* user qualifier *) - -fun qualify _ "" = I - | qualify mandatory qual = - map_binding (fn (conceal, prefix, qualifier, name, pos) => - (conceal, prefix, (qual, mandatory) :: qualifier, name, pos)); - -fun qualified mandatory name' = map_binding (fn (conceal, prefix, qualifier, name, pos) => - let val qualifier' = if name = "" then qualifier else qualifier @ [(name, mandatory)] - in (conceal, prefix, qualifier', name', pos) end); - -fun qualified_name "" = empty - | qualified_name s = - let val (qualifier, name) = split_last (Long_Name.explode s) - in make_binding (false, [], map (rpair false) qualifier, name, Position.none) end; - - -(* system prefix *) - -fun prefix_of (Binding {prefix, ...}) = prefix; - -fun map_prefix f = - map_binding (fn (conceal, prefix, qualifier, name, pos) => - (conceal, f prefix, qualifier, name, pos)); - -fun prefix _ "" = I - | prefix mandatory prfx = map_prefix (cons (prfx, mandatory)); - - -(* conceal *) - -val conceal = - map_binding (fn (_, prefix, qualifier, name, pos) => - (true, prefix, qualifier, name, pos)); - - -(* print *) - -fun pretty (Binding {prefix, qualifier, name, pos, ...}) = - if name = "" then Pretty.str "\"\"" - else - Pretty.markup (Position.markup pos Markup.binding) - [Pretty.str (Long_Name.implode (map #1 (prefix @ qualifier) @ [name]))] - |> Pretty.quote; - -val print = Pretty.str_of o pretty; - - -(* check *) - -fun bad binding = "Bad name binding: " ^ print binding ^ Position.here (pos_of binding); - -fun check binding = - if Symbol_Pos.is_identifier (name_of binding) then () - else legacy_feature (bad binding); - -end; - -type binding = Binding.binding; - diff --git a/core/Pure/General/buffer.ML b/core/Pure/General/buffer.ML deleted file mode 100644 index 02763db5..00000000 --- a/core/Pure/General/buffer.ML +++ /dev/null @@ -1,34 +0,0 @@ -(* Title: Pure/General/buffer.ML - Author: Markus Wenzel, TU Muenchen - -Efficient text buffers. -*) - -signature BUFFER = -sig - type T - val empty: T - val add: string -> T -> T - val markup: Markup.T -> (T -> T) -> T -> T - val content: T -> string - val output: T -> TextIO.outstream -> unit -end; - -structure Buffer: BUFFER = -struct - -datatype T = Buffer of string list; - -val empty = Buffer []; - -fun add "" buf = buf - | add x (Buffer xs) = Buffer (x :: xs); - -fun markup m body = - let val (bg, en) = Markup.output m - in add bg #> body #> add en end; - -fun content (Buffer xs) = implode (rev xs); -fun output (Buffer xs) stream = List.app (fn s => TextIO.output (stream, s)) (rev xs); - -end; diff --git a/core/Pure/General/bytes.scala b/core/Pure/General/bytes.scala deleted file mode 100644 index 32b6edf1..00000000 --- a/core/Pure/General/bytes.scala +++ /dev/null @@ -1,114 +0,0 @@ -/* Title: Pure/General/bytes.scala - Module: PIDE - Author: Makarius - -Immutable byte vectors versus UTF8 strings. -*/ - -package isabelle - - -import java.io.{File => JFile, OutputStream, FileInputStream} - - -object Bytes -{ - val empty: Bytes = new Bytes(Array[Byte](), 0, 0) - - def apply(s: CharSequence): Bytes = - { - val str = s.toString - if (str.isEmpty) empty - else { - val b = str.getBytes(UTF8.charset) - new Bytes(b, 0, b.length) - } - } - - def apply(a: Array[Byte], offset: Int, length: Int): Bytes = - if (length == 0) empty - else { - val b = new Array[Byte](length) - System.arraycopy(a, offset, b, 0, length) - new Bytes(b, 0, b.length) - } - - - /* read */ - - def read(file: JFile): Bytes = - { - var i = 0 - var m = 0 - val n = file.length.toInt - val bytes = new Array[Byte](n) - - val stream = new FileInputStream(file) - try { - do { - m = stream.read(bytes, i, n - i) - if (m != -1) i += m - } while (m != -1 && n > i) - } - finally { stream.close } - - new Bytes(bytes, 0, bytes.length) - } -} - -final class Bytes private( - protected val bytes: Array[Byte], - protected val offset: Int, - val length: Int) -{ - /* equality */ - - override def equals(that: Any): Boolean = - { - that match { - case other: Bytes => - if (this eq other) true - else if (length != other.length) false - else (0 until length).forall(i => bytes(offset + i) == other.bytes(other.offset + i)) - case _ => false - } - } - - private lazy val hash: Int = - { - var h = 0 - for (i <- offset until offset + length) { - val b = bytes(i).asInstanceOf[Int] & 0xFF - h = 31 * h + b - } - h - } - - override def hashCode(): Int = hash - - - /* content */ - - lazy val sha1_digest: SHA1.Digest = SHA1.digest(bytes) - - override def toString: String = - UTF8.decode_chars(s => s, bytes, offset, offset + length).toString - - def isEmpty: Boolean = length == 0 - - def +(other: Bytes): Bytes = - if (other.isEmpty) this - else if (isEmpty) other - else { - val new_bytes = new Array[Byte](length + other.length) - System.arraycopy(bytes, offset, new_bytes, 0, length) - System.arraycopy(other.bytes, other.offset, new_bytes, length, other.length) - new Bytes(new_bytes, 0, new_bytes.length) - } - - - /* write */ - - def write(stream: OutputStream): Unit = stream.write(bytes, offset, length) -} - diff --git a/core/Pure/General/change_table.ML b/core/Pure/General/change_table.ML deleted file mode 100644 index 7c92eb4b..00000000 --- a/core/Pure/General/change_table.ML +++ /dev/null @@ -1,158 +0,0 @@ -(* Title: Pure/General/change_table.ML - Author: Makarius - -Generic tables with extra bookkeeping of changes relative to some -common base version, subject to implicit block structure. Support for -efficient join/merge of big global tables with small local updates. -*) - -signature CHANGE_TABLE = -sig - structure Table: TABLE - type key = Table.key - exception DUP of key - exception SAME - type 'a T - val table_of: 'a T -> 'a Table.table - val empty: 'a T - val is_empty: 'a T -> bool - val change_base: bool -> 'a T -> 'a T - val change_ignore: 'a T -> 'a T - val join: (key -> 'a * 'a -> 'a) (*exception SAME*) -> 'a T * 'a T -> 'a T (*exception DUP*) - val merge: ('a * 'a -> bool) -> 'a T * 'a T -> 'a T (*exception DUP*) - val fold: (key * 'b -> 'a -> 'a) -> 'b T -> 'a -> 'a - val dest: 'a T -> (key * 'a) list - val lookup_key: 'a T -> key -> (key * 'a) option - val lookup: 'a T -> key -> 'a option - val defined: 'a T -> key -> bool - val update: key * 'a -> 'a T -> 'a T - val update_new: key * 'a -> 'a T -> 'a T (*exception DUP*) - val map_entry: key -> ('a -> 'a) -> 'a T -> 'a T - val map_default: key * 'a -> ('a -> 'a) -> 'a T -> 'a T - val delete_safe: key -> 'a T -> 'a T -end; - -functor Change_Table(Key: KEY): CHANGE_TABLE = -struct - -structure Table = Table(Key); -type key = Table.key; - -exception SAME = Table.SAME; -exception DUP = Table.DUP; - - -(* optional change *) - -datatype change = - No_Change | Change of {base: serial, depth: int, changes: Table.set option}; - -fun make_change base depth changes = - Change {base = base, depth = depth, changes = changes}; - -fun ignore_change (Change {base, depth, changes = SOME _}) = - make_change base depth NONE - | ignore_change change = change; - -fun update_change key (Change {base, depth, changes = SOME ch}) = - make_change base depth (SOME (Table.insert (K true) (key, ()) ch)) - | update_change _ change = change; - -fun base_change true No_Change = - make_change (serial ()) 0 (SOME Table.empty) - | base_change true (Change {base, depth, changes}) = - make_change base (depth + 1) changes - | base_change false (Change {base, depth, changes}) = - if depth = 0 then No_Change else make_change base (depth - 1) changes - | base_change false No_Change = raise Fail "Unbalanced change structure"; - -fun cannot_merge () = raise Fail "Cannot merge: incompatible change structure"; - -fun merge_change (No_Change, No_Change) = NONE - | merge_change (Change change1, Change change2) = - let - val {base = base1, depth = depth1, changes = changes1} = change1; - val {base = base2, depth = depth2, changes = changes2} = change2; - val _ = if base1 = base2 andalso depth1 = depth2 then () else cannot_merge (); - val (swapped, ch2) = - (case (changes1, changes2) of - (_, SOME ch2) => (false, ch2) - | (SOME ch1, _) => (true, ch1) - | _ => cannot_merge ()); - in SOME (swapped, ch2, make_change base1 depth1 NONE) end - | merge_change _ = cannot_merge (); - - -(* table with changes *) - -datatype 'a T = Change_Table of {change: change, table: 'a Table.table}; - -fun table_of (Change_Table {table, ...}) = table; - -val empty = Change_Table {change = No_Change, table = Table.empty}; - -fun is_empty (Change_Table {change, table}) = - (case change of No_Change => Table.is_empty table | _ => false); - -fun make_change_table (change, table) = Change_Table {change = change, table = table}; -fun map_change_table f (Change_Table {change, table}) = make_change_table (f (change, table)); - -fun change_base begin = (map_change_table o apfst) (base_change begin); -fun change_ignore arg = (map_change_table o apfst) ignore_change arg; - - -(* join and merge *) - -fun join f (arg1, arg2) = - let - val Change_Table {change = change1, table = table1} = arg1; - val Change_Table {change = change2, table = table2} = arg2; - in - if pointer_eq (change1, change2) andalso pointer_eq (table1, table2) then arg1 - else if is_empty arg2 then arg1 - else if is_empty arg1 then arg2 - else - (case merge_change (change1, change2) of - NONE => make_change_table (No_Change, Table.join f (table1, table2)) - | SOME (swapped, ch2, change') => - let - fun maybe_swap (x, y) = if swapped then (y, x) else (x, y); - val (tab1, tab2) = maybe_swap (table1, table2); - fun update key tab = - (case Table.lookup tab2 key of - NONE => tab - | SOME y => - (case Table.lookup tab key of - NONE => Table.update (key, y) tab - | SOME x => - (case (SOME (f key (maybe_swap (x, y))) handle Table.SAME => NONE) of - NONE => tab - | SOME z => Table.update (key, z) tab))); - in make_change_table (change', Table.fold (update o #1) ch2 tab1) end) - end; - -fun merge eq = - join (fn key => fn xy => if eq xy then raise Table.SAME else raise Table.DUP key); - - -(* derived operations *) - -fun fold f arg = Table.fold f (table_of arg); -fun dest arg = Table.dest (table_of arg); -fun lookup_key arg = Table.lookup_key (table_of arg); -fun lookup arg = Table.lookup (table_of arg); -fun defined arg = Table.defined (table_of arg); - -fun change_table key f = - map_change_table (fn (change, table) => (update_change key change, f table)); - -fun update (key, x) = change_table key (Table.update (key, x)); -fun update_new (key, x) = change_table key (Table.update_new (key, x)); -fun map_entry key f = change_table key (Table.map_entry key f); -fun map_default (key, x) f = change_table key (Table.map_default (key, x) f); -fun delete_safe key = change_table key (Table.delete_safe key); - -end; - -structure Change_Table = Change_Table(type key = string val ord = fast_string_ord); - diff --git a/core/Pure/General/completion.ML b/core/Pure/General/completion.ML deleted file mode 100644 index ae973a85..00000000 --- a/core/Pure/General/completion.ML +++ /dev/null @@ -1,56 +0,0 @@ -(* Title: Pure/Isar/completion.ML - Author: Makarius - -Semantic completion within the formal context. -*) - -signature COMPLETION = -sig - type T - val names: Position.T -> (string * (string * string)) list -> T - val none: T - val reported_text: T -> string - val suppress_abbrevs: string -> Markup.T list -end; - -structure Completion: COMPLETION = -struct - -abstype T = - Completion of {pos: Position.T, total: int, names: (string * (string * string)) list} -with - -(* completion of names *) - -fun dest (Completion args) = args; - -fun names pos names = - Completion - {pos = pos, - total = length names, - names = take (Options.default_int "completion_limit") names}; - -end; - -val none = names Position.none []; - -fun reported_text completion = - let val {pos, total, names} = dest completion in - if Position.is_reported pos andalso not (null names) then - let - val markup = Position.markup pos Markup.completion; - val body = (total, names) |> - let open XML.Encode in pair int (list (pair string (pair string string))) end; - in YXML.string_of (XML.Elem (markup, body)) end - else "" - end; - - -(* suppress short abbreviations *) - -fun suppress_abbrevs s = - if not (Symbol.is_ascii_identifier s) andalso (length (Symbol.explode s) <= 1 orelse s = "::") - then [Markup.no_completion] - else []; - -end; diff --git a/core/Pure/General/completion.scala b/core/Pure/General/completion.scala deleted file mode 100644 index c59ad5b7..00000000 --- a/core/Pure/General/completion.scala +++ /dev/null @@ -1,436 +0,0 @@ -/* Title: Pure/General/completion.scala - Author: Makarius - -Semantic completion within the formal context (reported names). -Syntactic completion of keywords and symbols, with abbreviations -(based on language context). -*/ - -package isabelle - - -import scala.collection.immutable.SortedMap -import scala.util.parsing.combinator.RegexParsers -import scala.util.matching.Regex -import scala.math.Ordering - - -object Completion -{ - /** completion result **/ - - sealed case class Item( - range: Text.Range, - original: String, - name: String, - description: List[String], - replacement: String, - move: Int, - immediate: Boolean) - - object Result - { - def empty(range: Text.Range): Result = Result(range, "", false, Nil) - def merge(history: History, result1: Option[Result], result2: Option[Result]): Option[Result] = - (result1, result2) match { - case (_, None) => result1 - case (None, _) => result2 - case (Some(res1), Some(res2)) => - if (res1.range != res2.range || res1.original != res2.original) result1 - else { - val items = (res1.items ::: res2.items).sorted(history.ordering) - Some(Result(res1.range, res1.original, false, items)) - } - } - } - - sealed case class Result( - range: Text.Range, - original: String, - unique: Boolean, - items: List[Item]) - - - - /** persistent history **/ - - private val COMPLETION_HISTORY = Path.explode("$ISABELLE_HOME_USER/etc/completion_history") - - object History - { - val empty: History = new History() - - def load(): History = - { - def ignore_error(msg: String): Unit = - Output.warning("Ignoring bad content of file " + COMPLETION_HISTORY + - (if (msg == "") "" else "\n" + msg)) - - val content = - if (COMPLETION_HISTORY.is_file) { - try { - import XML.Decode._ - list(pair(Symbol.decode_string, int))( - YXML.parse_body(File.read(COMPLETION_HISTORY))) - } - catch { - case ERROR(msg) => ignore_error(msg); Nil - case _: XML.Error => ignore_error(""); Nil - } - } - else Nil - (empty /: content)(_ + _) - } - } - - final class History private(rep: SortedMap[String, Int] = SortedMap.empty) - { - override def toString: String = rep.mkString("Completion.History(", ",", ")") - - def frequency(name: String): Int = - default_frequency(Symbol.encode(name)) getOrElse - rep.getOrElse(name, 0) - - def + (entry: (String, Int)): History = - { - val (name, freq) = entry - if (name == "") this - else new History(rep + (name -> (frequency(name) + freq))) - } - - def ordering: Ordering[Item] = - new Ordering[Item] { - def compare(item1: Item, item2: Item): Int = - frequency(item2.name) compare frequency(item1.name) - } - - def save() - { - Isabelle_System.mkdirs(COMPLETION_HISTORY.dir) - File.write_backup(COMPLETION_HISTORY, - { - import XML.Encode._ - YXML.string_of_body(list(pair(Symbol.encode_string, int))(rep.toList)) - }) - } - } - - class History_Variable - { - private var history = History.empty - def value: History = synchronized { history } - - def load() - { - val h = History.load() - synchronized { history = h } - } - - def update(item: Item, freq: Int = 1): Unit = synchronized { - history = history + (item.name -> freq) - } - } - - - - /** semantic completion **/ - - object Semantic - { - object Info - { - def unapply(info: Text.Markup): Option[Text.Info[Semantic]] = - info.info match { - case XML.Elem(Markup(Markup.COMPLETION, _), body) => - try { - val (total, names) = - { - import XML.Decode._ - pair(int, list(pair(string, pair(string, string))))(body) - } - Some(Text.Info(info.range, Names(total, names))) - } - catch { case _: XML.Error => None } - case XML.Elem(Markup(Markup.NO_COMPLETION, _), _) => - Some(Text.Info(info.range, No_Completion)) - case _ => None - } - } - } - - sealed abstract class Semantic - case object No_Completion extends Semantic - case class Names(total: Int, names: List[(String, (String, String))]) extends Semantic - { - def complete( - range: Text.Range, - history: Completion.History, - do_decode: Boolean, - original: String): Option[Completion.Result] = - { - def decode(s: String): String = if (do_decode) Symbol.decode(s) else s - val items = - for { - (xname, (kind, name)) <- names - xname1 = decode(xname) - if xname1 != original - (full_name, descr_name) = - if (kind == "") (name, quote(decode(name))) - else - (Long_Name.qualify(kind, name), - Word.implode(Word.explode('_', kind)) + " " + quote(decode(name))) - description = List(xname1, "(" + descr_name + ")") - } yield Item(range, original, full_name, description, xname1, 0, true) - - if (items.isEmpty) None - else Some(Result(range, original, names.length == 1, items.sorted(history.ordering))) - } - } - - - - /** syntactic completion **/ - - /* language context */ - - object Language_Context - { - val outer = Language_Context("", true, false) - val inner = Language_Context(Markup.Language.UNKNOWN, true, false) - val ML_outer = Language_Context(Markup.Language.ML, false, true) - val ML_inner = Language_Context(Markup.Language.ML, true, false) - val SML_outer = Language_Context(Markup.Language.SML, false, false) - } - - sealed case class Language_Context(language: String, symbols: Boolean, antiquotes: Boolean) - { - def is_outer: Boolean = language == "" - } - - - /* init */ - - val empty: Completion = new Completion() - def init(): Completion = empty.add_symbols() - - - /* word parsers */ - - private object Word_Parsers extends RegexParsers - { - override val whiteSpace = "".r - - private val symbol_regex: Regex = """\\<\^?[A-Za-z0-9_']+>""".r - def is_symbol(s: CharSequence): Boolean = symbol_regex.pattern.matcher(s).matches - - private def reverse_symbol: Parser[String] = """>[A-Za-z0-9_']+\^?<\\""".r - private def reverse_symb: Parser[String] = """[A-Za-z0-9_']{2,}\^?<\\""".r - private def escape: Parser[String] = """[a-zA-Z0-9_']+\\""".r - - private val word_regex = "[a-zA-Z0-9_'.]+".r - private def word: Parser[String] = word_regex - private def word3: Parser[String] = "[a-zA-Z0-9_'.]{3,}".r - private def underscores: Parser[String] = "_*".r - - def is_word(s: CharSequence): Boolean = word_regex.pattern.matcher(s).matches - def is_word_char(c: Char): Boolean = Symbol.is_ascii_letdig(c) || c == '.' - - def read_symbol(in: CharSequence): Option[String] = - { - val reverse_in = new Library.Reverse(in) - parse(reverse_symbol ^^ (_.reverse), reverse_in) match { - case Success(result, _) => Some(result) - case _ => None - } - } - - def read_word(explicit: Boolean, in: CharSequence): Option[(String, String)] = - { - val parse_word = if (explicit) word else word3 - val reverse_in = new Library.Reverse(in) - val parser = - (reverse_symbol | reverse_symb | escape) ^^ (x => (x.reverse, "")) | - underscores ~ parse_word ~ opt("?") ^^ - { case x ~ y ~ z => (z.getOrElse("") + y.reverse, x) } - parse(parser, reverse_in) match { - case Success(result, _) => Some(result) - case _ => None - } - } - } - - - /* abbreviations */ - - private val caret_indicator = '\u0007' - private val antiquote = "@{" - - private val default_abbrs = - List("@{" -> "@{\u0007}", - "`" -> "\\", - "`" -> "\\", - "`" -> "\\\u0007\\") - - private def default_frequency(name: String): Option[Int] = - default_abbrs.iterator.map(_._2).zipWithIndex.find(_._1 == name).map(_._2) -} - -final class Completion private( - keywords: Map[String, Boolean] = Map.empty, - words_lex: Scan.Lexicon = Scan.Lexicon.empty, - words_map: Multi_Map[String, String] = Multi_Map.empty, - abbrevs_lex: Scan.Lexicon = Scan.Lexicon.empty, - abbrevs_map: Multi_Map[String, (String, String)] = Multi_Map.empty) -{ - /* keywords */ - - private def is_symbol(name: String): Boolean = Symbol.names.isDefinedAt(name) - private def is_keyword(name: String): Boolean = !is_symbol(name) && keywords.isDefinedAt(name) - private def is_keyword_template(name: String, template: Boolean): Boolean = - is_keyword(name) && keywords(name) == template - - def + (keyword: String, template: String): Completion = - new Completion( - keywords + (keyword -> (keyword != template)), - words_lex + keyword, - words_map + (keyword -> template), - abbrevs_lex, - abbrevs_map) - - def + (keyword: String): Completion = this + (keyword, keyword) - - - /* symbols with abbreviations */ - - private def add_symbols(): Completion = - { - val words = - (for ((x, _) <- Symbol.names.toList) yield (x, x)) ::: - (for ((x, y) <- Symbol.names.toList) yield ("\\" + y, x)) ::: - (for ((x, y) <- Symbol.abbrevs.toList if Completion.Word_Parsers.is_word(y)) yield (y, x)) - - val symbol_abbrs = - (for ((x, y) <- Symbol.abbrevs.iterator if !Completion.Word_Parsers.is_word(y)) - yield (y, x)).toList - - val abbrs = - for ((a, b) <- symbol_abbrs ::: Completion.default_abbrs) - yield (a.reverse, (a, b)) - - new Completion( - keywords, - words_lex ++ words.map(_._1), - words_map ++ words, - abbrevs_lex ++ abbrs.map(_._1), - abbrevs_map ++ abbrs) - } - - - /* complete */ - - def complete( - history: Completion.History, - do_decode: Boolean, - explicit: Boolean, - start: Text.Offset, - text: CharSequence, - caret: Int, - language_context: Completion.Language_Context): Option[Completion.Result] = - { - def decode(s: String): String = if (do_decode) Symbol.decode(s) else s - val length = text.length - - val abbrevs_result = - { - val reverse_in = new Library.Reverse(text.subSequence(0, caret)) - Scan.Parsers.parse(Scan.Parsers.literal(abbrevs_lex), reverse_in) match { - case Scan.Parsers.Success(reverse_a, _) => - val abbrevs = abbrevs_map.get_list(reverse_a) - abbrevs match { - case Nil => None - case (a, _) :: _ => - val ok = - if (a == Completion.antiquote) language_context.antiquotes - else - language_context.symbols || - Completion.default_abbrs.exists(_._1 == a) || - Completion.Word_Parsers.is_symbol(a) - if (ok) Some((a, abbrevs)) - else None - } - case _ => None - } - } - - val words_result = - if (abbrevs_result.isDefined) None - else { - val word_context = - caret < length && Completion.Word_Parsers.is_word_char(text.charAt(caret)) - val result = - Completion.Word_Parsers.read_symbol(text.subSequence(0, caret)) match { - case Some(symbol) => Some((symbol, "")) - case None => Completion.Word_Parsers.read_word(explicit, text.subSequence(0, caret)) - } - result.map( - { - case (word, underscores) => - val complete_words = words_lex.completions(word) - val full_word = word + underscores - val completions = - if (complete_words.contains(full_word) && is_keyword_template(full_word, false)) Nil - else - for { - complete_word <- complete_words - ok = - if (is_keyword(complete_word)) !word_context && language_context.is_outer - else language_context.symbols - if ok - completion <- words_map.get_list(complete_word) - } yield (complete_word, completion) - ((full_word, completions)) - }) - } - - (abbrevs_result orElse words_result) match { - case Some((original, completions)) if !completions.isEmpty => - val range = Text.Range(- original.length, 0) + caret + start - val immediate = - explicit || - (!Completion.Word_Parsers.is_word(original) && - Character.codePointCount(original, 0, original.length) > 1) - val unique = completions.length == 1 - - val items = - for { - (complete_word, name0) <- completions - name1 = decode(name0) - if name1 != original - (s1, s2) = - space_explode(Completion.caret_indicator, name1) match { - case List(s1, s2) => (s1, s2) - case _ => (name1, "") - } - move = - s2.length - description = - if (is_symbol(name0)) { - if (name0 == name1) List(name0) - else List(name1, "(symbol " + quote(name0) + ")") - } - else if (is_keyword_template(complete_word, true)) - List(name1, "(template " + quote(complete_word) + ")") - else if (move != 0) List(name1, "(template)") - else if (is_keyword(complete_word)) List(name1, "(keyword)") - else List(name1) - } - yield Completion.Item(range, original, name1, description, s1 + s2, move, immediate) - - if (items.isEmpty) None - else - Some(Completion.Result(range, original, unique, - items.sortBy(_.name).sorted(history.ordering))) - - case _ => None - } - } -} diff --git a/core/Pure/General/exn.ML b/core/Pure/General/exn.ML deleted file mode 100644 index de040e9f..00000000 --- a/core/Pure/General/exn.ML +++ /dev/null @@ -1,88 +0,0 @@ -(* Title: Pure/General/exn.ML - Author: Makarius - -Support for exceptions. -*) - -signature EXN = -sig - datatype 'a result = Res of 'a | Exn of exn - val get_res: 'a result -> 'a option - val get_exn: 'a result -> exn option - val capture: ('a -> 'b) -> 'a -> 'b result - val release: 'a result -> 'a - val map_result: ('a -> 'b) -> 'a result -> 'b result - val maps_result: ('a -> 'b result) -> 'a result -> 'b result - exception Interrupt - val interrupt: unit -> 'a - val is_interrupt: exn -> bool - val interrupt_exn: 'a result - val is_interrupt_exn: 'a result -> bool - val interruptible_capture: ('a -> 'b) -> 'a -> 'b result - val return_code: exn -> int -> int - val capture_exit: int -> ('a -> 'b) -> 'a -> 'b - exception EXCEPTIONS of exn list -end; - -structure Exn: EXN = -struct - -(* exceptions as values *) - -datatype 'a result = - Res of 'a | - Exn of exn; - -fun get_res (Res x) = SOME x - | get_res _ = NONE; - -fun get_exn (Exn exn) = SOME exn - | get_exn _ = NONE; - -fun capture f x = Res (f x) handle e => Exn e; - -fun release (Res y) = y - | release (Exn e) = reraise e; - -fun map_result f (Res x) = Res (f x) - | map_result f (Exn e) = Exn e; - -fun maps_result f = (fn Res x => x | Exn e => Exn e) o map_result f; - - -(* interrupts *) - -exception Interrupt = Interrupt; - -fun interrupt () = raise Interrupt; - -fun is_interrupt Interrupt = true - | is_interrupt (IO.Io {cause, ...}) = is_interrupt cause - | is_interrupt _ = false; - -val interrupt_exn = Exn Interrupt; - -fun is_interrupt_exn (Exn exn) = is_interrupt exn - | is_interrupt_exn _ = false; - -fun interruptible_capture f x = - Res (f x) handle e => if is_interrupt e then reraise e else Exn e; - - -(* POSIX return code *) - -fun return_code exn rc = - if is_interrupt exn then (130: int) else rc; - -fun capture_exit rc f x = - f x handle exn => exit (return_code exn rc); - - -(* concatenated exceptions *) - -exception EXCEPTIONS of exn list; - -end; - -datatype illegal = Interrupt; - diff --git a/core/Pure/General/exn.scala b/core/Pure/General/exn.scala deleted file mode 100644 index bfd38fa6..00000000 --- a/core/Pure/General/exn.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* Title: Pure/General/exn.scala - Module: PIDE - Author: Makarius - -Support for exceptions (arbitrary throwables). -*/ - -package isabelle - - -object Exn -{ - /* exceptions as values */ - - sealed abstract class Result[A] - case class Res[A](res: A) extends Result[A] - case class Exn[A](exn: Throwable) extends Result[A] - - def capture[A](e: => A): Result[A] = - try { Res(e) } - catch { case exn: Throwable => Exn[A](exn) } - - def release[A](result: Result[A]): A = - result match { - case Res(x) => x - case Exn(exn) => throw exn - } - - - /* interrupts */ - - def is_interrupt(exn: Throwable): Boolean = - { - var found_interrupt = false - var e = exn - while (!found_interrupt && e != null) { - found_interrupt |= e.isInstanceOf[InterruptedException] - e = e.getCause - } - found_interrupt - } - - object Interrupt - { - def apply(): Throwable = new InterruptedException - def unapply(exn: Throwable): Boolean = is_interrupt(exn) - - def expose() { if (Thread.interrupted()) throw apply() } - def impose() { Thread.currentThread.interrupt } - - def postpone[A](body: => A): Option[A] = - { - val interrupted = Thread.interrupted - val result = capture { body } - if (interrupted) impose() - result match { - case Res(x) => Some(x) - case Exn(e) => - if (is_interrupt(e)) { impose(); None } - else throw e - } - } - - val return_code = 130 - } - - - /* POSIX return code */ - - def return_code(exn: Throwable, rc: Int): Int = - if (is_interrupt(exn)) Interrupt.return_code else rc - - - /* message */ - - private val runtime_exception = Class.forName("java.lang.RuntimeException") - - def user_message(exn: Throwable): Option[String] = - if (exn.isInstanceOf[java.io.IOException]) { - val msg = exn.getMessage - Some(if (msg == null) "I/O error" else "I/O error: " + msg) - } - else if (exn.getClass == runtime_exception) { - val msg = exn.getMessage - Some(if (msg == null || msg == "") "Error" else msg) - } - else if (exn.isInstanceOf[RuntimeException]) Some(exn.toString) - else None - - def message(exn: Throwable): String = - user_message(exn) getOrElse (if (is_interrupt(exn)) "Interrupt" else exn.toString) -} - diff --git a/core/Pure/General/file.ML b/core/Pure/General/file.ML deleted file mode 100644 index 509f8456..00000000 --- a/core/Pure/General/file.ML +++ /dev/null @@ -1,174 +0,0 @@ -(* Title: Pure/General/file.ML - Author: Markus Wenzel, TU Muenchen - -File system operations. -*) - -signature FILE = -sig - val platform_path: Path.T -> string - val shell_quote: string -> string - val shell_path: Path.T -> string - val cd: Path.T -> unit - val pwd: unit -> Path.T - val full_path: Path.T -> Path.T -> Path.T - val tmp_path: Path.T -> Path.T - val exists: Path.T -> bool - val rm: Path.T -> unit - val is_dir: Path.T -> bool - val check_dir: Path.T -> Path.T - val check_file: Path.T -> Path.T - val open_dir: (OS.FileSys.dirstream -> 'a) -> Path.T -> 'a - val open_input: (TextIO.instream -> 'a) -> Path.T -> 'a - val open_output: (TextIO.outstream -> 'a) -> Path.T -> 'a - val open_append: (TextIO.outstream -> 'a) -> Path.T -> 'a - val fold_dir: (string -> 'a -> 'a) -> Path.T -> 'a -> 'a - val read_dir: Path.T -> string list - val fold_lines: (string -> 'a -> 'a) -> Path.T -> 'a -> 'a - val fold_pages: (string -> 'a -> 'a) -> Path.T -> 'a -> 'a - val read_lines: Path.T -> string list - val read_pages: Path.T -> string list - val read: Path.T -> string - val write: Path.T -> string -> unit - val append: Path.T -> string -> unit - val write_list: Path.T -> string list -> unit - val append_list: Path.T -> string list -> unit - val write_buffer: Path.T -> Buffer.T -> unit - val eq: Path.T * Path.T -> bool -end; - -structure File: FILE = -struct - -(* system path representations *) - -val platform_path = Path.implode o Path.expand; - -val shell_quote = enclose "'" "'"; -val shell_path = shell_quote o platform_path; - - -(* current working directory *) - -val cd = cd o platform_path; -val pwd = Path.explode o pwd; - - -(* full_path *) - -fun full_path dir path = - let - val path' = Path.expand path; - val _ = Path.is_current path' andalso error "Bad file specification"; - val path'' = Path.append dir path'; - in - if Path.is_absolute path'' then path'' - else Path.append (pwd ()) path'' - end; - - -(* tmp_path *) - -fun tmp_path path = - Path.append (Path.variable "ISABELLE_TMP") (Path.base path); - - -(* directory entries *) - -val exists = can OS.FileSys.modTime o platform_path; - -val rm = OS.FileSys.remove o platform_path; - -fun is_dir path = - the_default false (try OS.FileSys.isDir (platform_path path)); - -fun check_dir path = - if exists path andalso is_dir path then path - else error ("No such directory: " ^ Path.print path); - -fun check_file path = - if exists path andalso not (is_dir path) then path - else error ("No such file: " ^ Path.print path); - - -(* open streams *) - -local - -fun with_file open_file close_file f path = - let val file = open_file path - in Exn.release (Exn.capture f file before close_file file) end; - -in - -fun open_dir f = with_file OS.FileSys.openDir OS.FileSys.closeDir f o platform_path; -fun open_input f = with_file TextIO.openIn TextIO.closeIn f o platform_path; -fun open_output f = with_file TextIO.openOut TextIO.closeOut f o platform_path; -fun open_append f = with_file TextIO.openAppend TextIO.closeOut f o platform_path; - -end; - - -(* directory content *) - -fun fold_dir f path a = open_dir (fn stream => - let - fun read x = - (case OS.FileSys.readDir stream of - NONE => x - | SOME entry => read (f entry x)); - in read a end) path; - -fun read_dir path = rev (fold_dir cons path []); - - -(* input *) - -(* - scalable iterator: - . avoid size limit of TextIO.inputAll and overhead of many TextIO.inputLine - . optional terminator at end-of-input -*) -fun fold_chunks terminator f path a = open_input (fn file => - let - fun read buf x = - (case TextIO.input file of - "" => (case Buffer.content buf of "" => x | line => f line x) - | input => - (case String.fields (fn c => c = terminator) input of - [rest] => read (Buffer.add rest buf) x - | line :: more => read_lines more (f (Buffer.content (Buffer.add line buf)) x))) - and read_lines [rest] x = read (Buffer.add rest Buffer.empty) x - | read_lines (line :: more) x = read_lines more (f line x); - in read Buffer.empty a end) path; - -fun fold_lines f = fold_chunks #"\n" f; -fun fold_pages f = fold_chunks #"\f" f; - -fun read_lines path = rev (fold_lines cons path []); -fun read_pages path = rev (fold_pages cons path []); - -val read = open_input TextIO.inputAll; - - -(* output *) - -fun output txts file = List.app (fn txt => TextIO.output (file, txt)) txts; - -fun write_list path txts = open_output (output txts) path; -fun append_list path txts = open_append (output txts) path; - -fun write path txt = write_list path [txt]; -fun append path txt = append_list path [txt]; - -fun write_buffer path buf = open_output (Buffer.output buf) path; - - -(* eq *) - -fun eq paths = - (case try (pairself (OS.FileSys.fileId o platform_path)) paths of - SOME ids => is_equal (OS.FileSys.compare ids) - | NONE => false); - -end; diff --git a/core/Pure/General/file.scala b/core/Pure/General/file.scala deleted file mode 100644 index fc8882e1..00000000 --- a/core/Pure/General/file.scala +++ /dev/null @@ -1,148 +0,0 @@ -/* Title: Pure/General/file.scala - Author: Makarius - -File system operations. -*/ - -package isabelle - - -import java.io.{BufferedWriter, OutputStreamWriter, FileOutputStream, BufferedOutputStream, - OutputStream, InputStream, FileInputStream, BufferedInputStream, BufferedReader, - InputStreamReader, File => JFile, IOException} -import java.util.zip.{GZIPInputStream, GZIPOutputStream} - -import scala.collection.mutable - - -object File -{ - /* directory content */ - - def read_dir(dir: Path): List[String] = - { - if (!dir.is_dir) error("Bad directory: " + dir.toString) - val files = dir.file.listFiles - if (files == null) Nil - else files.toList.map(_.getName) - } - - def find_files(dir: Path): Stream[Path] = - read_dir(dir).toStream.map(name => - if (Path.is_wellformed(name)) { - val path = dir + Path.basic(name) - path #:: (if (path.is_dir) find_files(path) else Stream.empty) - } - else Stream.empty).flatten - - - /* read */ - - def read(file: JFile): String = Bytes.read(file).toString - def read(path: Path): String = read(path.file) - - - def read_stream(reader: BufferedReader): String = - { - val output = new StringBuilder(100) - var c = -1 - while ({ c = reader.read; c != -1 }) output += c.toChar - reader.close - output.toString - } - - def read_stream(stream: InputStream): String = - read_stream(new BufferedReader(new InputStreamReader(stream, UTF8.charset))) - - def read_gzip(file: JFile): String = - read_stream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(file)))) - - def read_gzip(path: Path): String = read_gzip(path.file) - - - /* read lines */ - - def read_lines(reader: BufferedReader, progress: String => Unit): List[String] = - { - val result = new mutable.ListBuffer[String] - var line: String = null - while ({ line = try { reader.readLine} catch { case _: IOException => null }; line != null }) { - progress(line) - result += line - } - reader.close - result.toList - } - - - /* try_read */ - - def try_read(paths: Seq[Path]): String = - { - val buf = new StringBuilder - for (path <- paths if path.is_file) { - buf.append(read(path)) - buf.append('\n') - } - buf.toString - } - - - /* write */ - - def write_file(file: JFile, text: Iterable[CharSequence], - make_stream: OutputStream => OutputStream) - { - val stream = make_stream(new FileOutputStream(file)) - val writer = new BufferedWriter(new OutputStreamWriter(stream, UTF8.charset)) - try { text.iterator.foreach(writer.append(_)) } - finally { writer.close } - } - - def write(file: JFile, text: Iterable[CharSequence]): Unit = write_file(file, text, (s) => s) - def write(file: JFile, text: CharSequence): Unit = write(file, List(text)) - def write(path: Path, text: Iterable[CharSequence]): Unit = write(path.file, text) - def write(path: Path, text: CharSequence): Unit = write(path.file, text) - - def write_gzip(file: JFile, text: Iterable[CharSequence]): Unit = - write_file(file, text, (s: OutputStream) => new GZIPOutputStream(new BufferedOutputStream(s))) - def write_gzip(file: JFile, text: CharSequence): Unit = write_gzip(file, List(text)) - def write_gzip(path: Path, text: Iterable[CharSequence]): Unit = write_gzip(path.file, text) - def write_gzip(path: Path, text: CharSequence): Unit = write_gzip(path.file, text) - - def write_backup(path: Path, text: CharSequence) - { - path.file renameTo path.backup.file - File.write(path, text) - } - - - /* copy */ - - def eq(file1: JFile, file2: JFile): Boolean = - try { java.nio.file.Files.isSameFile(file1.toPath, file2.toPath) } - catch { case ERROR(_) => false } - - def copy(src: JFile, dst: JFile) - { - if (!eq(src, dst)) { - val in = new FileInputStream(src) - try { - val out = new FileOutputStream(dst) - try { - val buf = new Array[Byte](65536) - var m = 0 - do { - m = in.read(buf, 0, buf.length) - if (m != -1) out.write(buf, 0, m) - } while (m != -1) - } - finally { out.close } - } - finally { in.close } - } - } - - def copy(path1: Path, path2: Path): Unit = copy(path1.file, path2.file) -} - diff --git a/core/Pure/General/graph.ML b/core/Pure/General/graph.ML deleted file mode 100644 index ced77075..00000000 --- a/core/Pure/General/graph.ML +++ /dev/null @@ -1,350 +0,0 @@ -(* Title: Pure/General/graph.ML - Author: Markus Wenzel and Stefan Berghofer, TU Muenchen - -Directed graphs. -*) - -signature GRAPH = -sig - type key - structure Keys: - sig - type T - val is_empty: T -> bool - val fold: (key -> 'a -> 'a) -> T -> 'a -> 'a - val fold_rev: (key -> 'a -> 'a) -> T -> 'a -> 'a - val dest: T -> key list - end - type 'a T - exception DUP of key - exception SAME - exception UNDEF of key - val empty: 'a T - val is_empty: 'a T -> bool - val keys: 'a T -> key list - val get_first: (key * ('a * (Keys.T * Keys.T)) -> 'b option) -> 'a T -> 'b option - val fold: (key * ('a * (Keys.T * Keys.T)) -> 'b -> 'b) -> 'a T -> 'b -> 'b - val get_entry: 'a T -> key -> key * ('a * (Keys.T * Keys.T)) (*exception UNDEF*) - val get_node: 'a T -> key -> 'a (*exception UNDEF*) - val map_node: key -> ('a -> 'a) -> 'a T -> 'a T - val map: (key -> 'a -> 'b) -> 'a T -> 'b T - val imm_preds: 'a T -> key -> Keys.T - val imm_succs: 'a T -> key -> Keys.T - val immediate_preds: 'a T -> key -> key list - val immediate_succs: 'a T -> key -> key list - val all_preds: 'a T -> key list -> key list - val all_succs: 'a T -> key list -> key list - val strong_conn: 'a T -> key list list - val map_strong_conn: ((key * 'a) list -> 'b list) -> 'a T -> 'b T - val minimals: 'a T -> key list - val maximals: 'a T -> key list - val is_minimal: 'a T -> key -> bool - val is_maximal: 'a T -> key -> bool - val new_node: key * 'a -> 'a T -> 'a T (*exception DUP*) - val default_node: key * 'a -> 'a T -> 'a T - val del_node: key -> 'a T -> 'a T (*exception UNDEF*) - val is_edge: 'a T -> key * key -> bool - val add_edge: key * key -> 'a T -> 'a T (*exception UNDEF*) - val del_edge: key * key -> 'a T -> 'a T (*exception UNDEF*) - val restrict: (key -> bool) -> 'a T -> 'a T - val dest: 'a T -> ((key * 'a) * key list) list - val make: ((key * 'a) * key list) list -> 'a T (*exception DUP | UNDEF*) - val merge: ('a * 'a -> bool) -> 'a T * 'a T -> 'a T (*exception DUP*) - val join: (key -> 'a * 'a -> 'a) (*exception DUP/SAME*) -> - 'a T * 'a T -> 'a T (*exception DUP*) - val irreducible_paths: 'a T -> key * key -> key list list - exception CYCLES of key list list - val add_edge_acyclic: key * key -> 'a T -> 'a T (*exception UNDEF | CYCLES*) - val add_deps_acyclic: key * key list -> 'a T -> 'a T (*exception UNDEF | CYCLES*) - val merge_acyclic: ('a * 'a -> bool) -> 'a T * 'a T -> 'a T (*exception CYCLES*) - val topological_order: 'a T -> key list - val add_edge_trans_acyclic: key * key -> 'a T -> 'a T (*exception UNDEF | CYCLES*) - val merge_trans_acyclic: ('a * 'a -> bool) -> 'a T * 'a T -> 'a T (*exception CYCLES*) - exception DEP of key * key - val schedule: ((key * 'b) list -> key * 'a -> 'b) -> 'a T -> 'b list (*exception DEP*) - val encode: key XML.Encode.T -> 'a XML.Encode.T -> 'a T XML.Encode.T - val decode: key XML.Decode.T -> 'a XML.Decode.T -> 'a T XML.Decode.T -end; - -functor Graph(Key: KEY): GRAPH = -struct - -(* keys *) - -type key = Key.key; -val eq_key = is_equal o Key.ord; - -structure Table = Table(Key); - -structure Keys = -struct - -abstype T = Keys of unit Table.table -with - -val empty = Keys Table.empty; -fun is_empty (Keys tab) = Table.is_empty tab; - -fun member (Keys tab) = Table.defined tab; -fun insert x (Keys tab) = Keys (Table.insert (K true) (x, ()) tab); -fun remove x (Keys tab) = Keys (Table.delete_safe x tab); - -fun fold f (Keys tab) = Table.fold (f o #1) tab; -fun fold_rev f (Keys tab) = Table.fold_rev (f o #1) tab; - -fun dest keys = fold_rev cons keys []; - -fun filter P keys = fold (fn x => P x ? insert x) keys empty; - -end; -end; - - -(* graphs *) - -datatype 'a T = Graph of ('a * (Keys.T * Keys.T)) Table.table; - -exception DUP = Table.DUP; -exception UNDEF = Table.UNDEF; -exception SAME = Table.SAME; - -val empty = Graph Table.empty; -fun is_empty (Graph tab) = Table.is_empty tab; -fun keys (Graph tab) = Table.keys tab; - -fun get_first f (Graph tab) = Table.get_first f tab; -fun fold_graph f (Graph tab) = Table.fold f tab; - -fun get_entry (Graph tab) x = - (case Table.lookup_key tab x of - SOME entry => entry - | NONE => raise UNDEF x); - -fun map_entry x f (G as Graph tab) = Graph (Table.update (x, f (#2 (get_entry G x))) tab); - - -(* nodes *) - -fun get_node G = #1 o #2 o get_entry G; - -fun map_node x f = map_entry x (fn (i, ps) => (f i, ps)); - -fun map_nodes f (Graph tab) = Graph (Table.map (apfst o f) tab); - - -(* reachability *) - -(*nodes reachable from xs -- topologically sorted for acyclic graphs*) -fun reachable next xs = - let - fun reach x (rs, R) = - if Keys.member R x then (rs, R) - else Keys.fold_rev reach (next x) (rs, Keys.insert x R) |>> cons x; - fun reachs x (rss, R) = - reach x ([], R) |>> (fn rs => rs :: rss); - in fold reachs xs ([], Keys.empty) end; - -(*immediate*) -fun imm_preds G = #1 o #2 o #2 o get_entry G; -fun imm_succs G = #2 o #2 o #2 o get_entry G; - -fun immediate_preds G = Keys.dest o imm_preds G; -fun immediate_succs G = Keys.dest o imm_succs G; - -(*transitive*) -fun all_preds G = flat o #1 o reachable (imm_preds G); -fun all_succs G = flat o #1 o reachable (imm_succs G); - -(*strongly connected components; see: David King and John Launchbury, - "Structuring Depth First Search Algorithms in Haskell"*) -fun strong_conn G = - rev (filter_out null (#1 (reachable (imm_preds G) (all_succs G (keys G))))); - -fun map_strong_conn f G = - let - val xss = strong_conn G; - fun map' xs = - fold2 (curry Table.update) xs (f (AList.make (get_node G) xs)); - val tab' = Table.empty - |> fold map' xss; - in map_nodes (fn x => fn _ => the (Table.lookup tab' x)) G end; - - -(* minimal and maximal elements *) - -fun minimals G = fold_graph (fn (m, (_, (preds, _))) => Keys.is_empty preds ? cons m) G []; -fun maximals G = fold_graph (fn (m, (_, (_, succs))) => Keys.is_empty succs ? cons m) G []; -fun is_minimal G x = Keys.is_empty (imm_preds G x); -fun is_maximal G x = Keys.is_empty (imm_succs G x); - - -(* node operations *) - -fun new_node (x, info) (Graph tab) = - Graph (Table.update_new (x, (info, (Keys.empty, Keys.empty))) tab); - -fun default_node (x, info) (Graph tab) = - Graph (Table.default (x, (info, (Keys.empty, Keys.empty))) tab); - -fun del_node x (G as Graph tab) = - let - fun del_adjacent which y = - Table.map_entry y (fn (i, ps) => (i, (which (Keys.remove x) ps))); - val (preds, succs) = #2 (#2 (get_entry G x)); - in - Graph (tab - |> Table.delete x - |> Keys.fold (del_adjacent apsnd) preds - |> Keys.fold (del_adjacent apfst) succs) - end; - -fun restrict pred G = - fold_graph (fn (x, _) => not (pred x) ? del_node x) G G; - - -(* edge operations *) - -fun is_edge G (x, y) = Keys.member (imm_succs G x) y handle UNDEF _ => false; - -fun add_edge (x, y) G = - if is_edge G (x, y) then G - else - G |> map_entry y (fn (i, (preds, succs)) => (i, (Keys.insert x preds, succs))) - |> map_entry x (fn (i, (preds, succs)) => (i, (preds, Keys.insert y succs))); - -fun del_edge (x, y) G = - if is_edge G (x, y) then - G |> map_entry y (fn (i, (preds, succs)) => (i, (Keys.remove x preds, succs))) - |> map_entry x (fn (i, (preds, succs)) => (i, (preds, Keys.remove y succs))) - else G; - -fun diff_edges G1 G2 = - fold_graph (fn (x, (_, (_, succs))) => - Keys.fold (fn y => not (is_edge G2 (x, y)) ? cons (x, y)) succs) G1 []; - -fun edges G = diff_edges G empty; - - -(* dest and make *) - -fun dest G = fold_graph (fn (x, (i, (_, succs))) => cons ((x, i), Keys.dest succs)) G []; - -fun make entries = - empty - |> fold (new_node o fst) entries - |> fold (fn ((x, _), ys) => fold (fn y => add_edge (x, y)) ys) entries; - - -(* join and merge *) - -fun no_edges (i, _) = (i, (Keys.empty, Keys.empty)); - -fun join f (G1 as Graph tab1, G2 as Graph tab2) = - let fun join_node key ((i1, edges1), (i2, _)) = (f key (i1, i2), edges1) in - if pointer_eq (G1, G2) then G1 - else fold add_edge (edges G2) (Graph (Table.join join_node (tab1, Table.map (K no_edges) tab2))) - end; - -fun gen_merge add eq (G1 as Graph tab1, G2 as Graph tab2) = - let fun eq_node ((i1, _), (i2, _)) = eq (i1, i2) in - if pointer_eq (G1, G2) then G1 - else fold add (edges G2) (Graph (Table.merge eq_node (tab1, Table.map (K no_edges) tab2))) - end; - -fun merge eq GG = gen_merge add_edge eq GG; - - -(* irreducible paths -- Hasse diagram *) - -fun irreducible_preds G X path z = - let - fun red x x' = is_edge G (x, x') andalso not (eq_key (x', z)); - fun irreds [] xs' = xs' - | irreds (x :: xs) xs' = - if not (Keys.member X x) orelse eq_key (x, z) orelse member eq_key path x orelse - exists (red x) xs orelse exists (red x) xs' - then irreds xs xs' - else irreds xs (x :: xs'); - in irreds (immediate_preds G z) [] end; - -fun irreducible_paths G (x, y) = - let - val (_, X) = reachable (imm_succs G) [x]; - fun paths path z = - if eq_key (x, z) then cons (z :: path) - else fold (paths (z :: path)) (irreducible_preds G X path z); - in if eq_key (x, y) andalso not (is_edge G (x, x)) then [[]] else paths [] y [] end; - - -(* maintain acyclic graphs *) - -exception CYCLES of key list list; - -fun add_edge_acyclic (x, y) G = - if is_edge G (x, y) then G - else - (case irreducible_paths G (y, x) of - [] => add_edge (x, y) G - | cycles => raise CYCLES (map (cons x) cycles)); - -fun add_deps_acyclic (y, xs) = fold (fn x => add_edge_acyclic (x, y)) xs; - -fun merge_acyclic eq GG = gen_merge add_edge_acyclic eq GG; - -fun topological_order G = minimals G |> all_succs G; - - -(* maintain transitive acyclic graphs *) - -fun add_edge_trans_acyclic (x, y) G = - add_edge_acyclic (x, y) G - |> fold_product (curry add_edge) (all_preds G [x]) (all_succs G [y]); - -fun merge_trans_acyclic eq (G1, G2) = - if pointer_eq (G1, G2) then G1 - else - merge_acyclic eq (G1, G2) - |> fold add_edge_trans_acyclic (diff_edges G1 G2) - |> fold add_edge_trans_acyclic (diff_edges G2 G1); - - -(* schedule acyclic graph *) - -exception DEP of key * key; - -fun schedule f G = - let - val xs = topological_order G; - val results = (xs, Table.empty) |-> fold (fn x => fn tab => - let - val a = get_node G x; - val deps = immediate_preds G x |> map (fn y => - (case Table.lookup tab y of - SOME b => (y, b) - | NONE => raise DEP (x, y))); - in Table.update (x, f deps (x, a)) tab end); - in map (the o Table.lookup results) xs end; - - -(* XML data representation *) - -fun encode key info G = - dest G |> - let open XML.Encode - in list (pair (pair key info) (list key)) end; - -fun decode key info body = - body |> - let open XML.Decode - in list (pair (pair key info) (list key)) end |> make; - - -(*final declarations of this structure!*) -val map = map_nodes; -val fold = fold_graph; - -end; - -structure Graph = Graph(type key = string val ord = fast_string_ord); -structure String_Graph = Graph(type key = string val ord = string_ord); -structure Int_Graph = Graph(type key = int val ord = int_ord); diff --git a/core/Pure/General/graph.scala b/core/Pure/General/graph.scala deleted file mode 100644 index d9fc3dc3..00000000 --- a/core/Pure/General/graph.scala +++ /dev/null @@ -1,270 +0,0 @@ -/* Title: Pure/General/graph.scala - Module: PIDE - Author: Makarius - -Directed graphs. -*/ - -package isabelle - - -import scala.collection.immutable.{SortedMap, SortedSet} -import scala.annotation.tailrec - - -object Graph -{ - class Duplicate[Key](val key: Key) extends Exception - class Undefined[Key](val key: Key) extends Exception - class Cycles[Key](val cycles: List[List[Key]]) extends Exception - - def empty[Key, A](implicit ord: Ordering[Key]): Graph[Key, A] = - new Graph[Key, A](SortedMap.empty(ord)) - - def make[Key, A](entries: List[((Key, A), List[Key])])(implicit ord: Ordering[Key]) - : Graph[Key, A] = - { - val graph1 = - (empty[Key, A](ord) /: entries) { case (graph, ((x, info), _)) => graph.new_node(x, info) } - val graph2 = - (graph1 /: entries) { case (graph, ((x, _), ys)) => (graph /: ys)(_.add_edge(x, _)) } - graph2 - } - - def string[A]: Graph[String, A] = empty(Ordering.String) - def int[A]: Graph[Int, A] = empty(Ordering.Int) - def long[A]: Graph[Long, A] = empty(Ordering.Long) - - - /* XML data representation */ - - def encode[Key, A](key: XML.Encode.T[Key], info: XML.Encode.T[A]): XML.Encode.T[Graph[Key, A]] = - ((graph: Graph[Key, A]) => { - import XML.Encode._ - list(pair(pair(key, info), list(key)))(graph.dest) - }) - - def decode[Key, A](key: XML.Decode.T[Key], info: XML.Decode.T[A])(implicit ord: Ordering[Key]) - : XML.Decode.T[Graph[Key, A]] = - ((body: XML.Body) => { - import XML.Decode._ - make(list(pair(pair(key, info), list(key)))(body))(ord) - }) -} - - -final class Graph[Key, A] private(rep: SortedMap[Key, (A, (SortedSet[Key], SortedSet[Key]))]) -{ - type Keys = SortedSet[Key] - type Entry = (A, (Keys, Keys)) - - def ordering: Ordering[Key] = rep.ordering - def empty_keys: Keys = SortedSet.empty[Key](ordering) - - - /* graphs */ - - def is_empty: Boolean = rep.isEmpty - def defined(x: Key): Boolean = rep.isDefinedAt(x) - - def iterator: Iterator[(Key, Entry)] = rep.iterator - - def keys_iterator: Iterator[Key] = iterator.map(_._1) - def keys: List[Key] = keys_iterator.toList - - def dest: List[((Key, A), List[Key])] = - (for ((x, (i, (_, succs))) <- iterator) yield ((x, i), succs.toList)).toList - - override def toString: String = - dest.map({ case ((x, _), ys) => - x.toString + " -> " + ys.iterator.map(_.toString).mkString("{", ", ", "}") }) - .mkString("Graph(", ", ", ")") - - private def get_entry(x: Key): Entry = - rep.get(x) match { - case Some(entry) => entry - case None => throw new Graph.Undefined(x) - } - - private def map_entry(x: Key, f: Entry => Entry): Graph[Key, A] = - new Graph[Key, A](rep + (x -> f(get_entry(x)))) - - - /* nodes */ - - def get_node(x: Key): A = get_entry(x)._1 - - def map_node(x: Key, f: A => A): Graph[Key, A] = - map_entry(x, { case (i, ps) => (f(i), ps) }) - - - /* reachability */ - - /*nodes reachable from xs -- topologically sorted for acyclic graphs*/ - def reachable(next: Key => Keys, xs: List[Key]): (List[List[Key]], Keys) = - { - def reach(x: Key, reached: (List[Key], Keys)): (List[Key], Keys) = - { - val (rs, r_set) = reached - if (r_set(x)) reached - else { - val (rs1, r_set1) = (next(x) :\ (rs, r_set + x))(reach) - (x :: rs1, r_set1) - } - } - def reachs(reached: (List[List[Key]], Keys), x: Key): (List[List[Key]], Keys) = - { - val (rss, r_set) = reached - val (rs, r_set1) = reach(x, (Nil, r_set)) - (rs :: rss, r_set1) - } - ((List.empty[List[Key]], empty_keys) /: xs)(reachs) - } - - /*immediate*/ - def imm_preds(x: Key): Keys = get_entry(x)._2._1 - def imm_succs(x: Key): Keys = get_entry(x)._2._2 - - /*transitive*/ - def all_preds(xs: List[Key]): List[Key] = reachable(imm_preds, xs)._1.flatten - def all_succs(xs: List[Key]): List[Key] = reachable(imm_succs, xs)._1.flatten - - /*strongly connected components; see: David King and John Launchbury, - "Structuring Depth First Search Algorithms in Haskell"*/ - def strong_conn: List[List[Key]] = - reachable(imm_preds, all_succs(keys))._1.filterNot(_.isEmpty).reverse - - - /* minimal and maximal elements */ - - def minimals: List[Key] = - (List.empty[Key] /: rep) { - case (ms, (m, (_, (preds, _)))) => if (preds.isEmpty) m :: ms else ms } - - def maximals: List[Key] = - (List.empty[Key] /: rep) { - case (ms, (m, (_, (_, succs)))) => if (succs.isEmpty) m :: ms else ms } - - def is_minimal(x: Key): Boolean = imm_preds(x).isEmpty - def is_maximal(x: Key): Boolean = imm_succs(x).isEmpty - - - /* node operations */ - - def new_node(x: Key, info: A): Graph[Key, A] = - { - if (defined(x)) throw new Graph.Duplicate(x) - else new Graph[Key, A](rep + (x -> (info, (empty_keys, empty_keys)))) - } - - def default_node(x: Key, info: A): Graph[Key, A] = - if (defined(x)) this else new_node(x, info) - - private def del_adjacent(fst: Boolean, x: Key)(map: SortedMap[Key, Entry], y: Key) - : SortedMap[Key, Entry] = - map.get(y) match { - case None => map - case Some((i, (preds, succs))) => - map + (y -> (i, if (fst) (preds - x, succs) else (preds, succs - x))) - } - - def del_node(x: Key): Graph[Key, A] = - { - val (preds, succs) = get_entry(x)._2 - new Graph[Key, A]( - (((rep - x) /: preds)(del_adjacent(false, x)) /: succs)(del_adjacent(true, x))) - } - - def restrict(pred: Key => Boolean): Graph[Key, A] = - (this /: iterator){ case (graph, (x, _)) => if (!pred(x)) graph.del_node(x) else graph } - - - /* edge operations */ - - def is_edge(x: Key, y: Key): Boolean = - defined(x) && defined(y) && imm_succs(x)(y) - - def add_edge(x: Key, y: Key): Graph[Key, A] = - if (is_edge(x, y)) this - else - map_entry(y, { case (i, (preds, succs)) => (i, (preds + x, succs)) }). - map_entry(x, { case (i, (preds, succs)) => (i, (preds, succs + y)) }) - - def del_edge(x: Key, y: Key): Graph[Key, A] = - if (is_edge(x, y)) - map_entry(y, { case (i, (preds, succs)) => (i, (preds - x, succs)) }). - map_entry(x, { case (i, (preds, succs)) => (i, (preds, succs - y)) }) - else this - - - /* irreducible paths -- Hasse diagram */ - - private def irreducible_preds(x_set: Keys, path: List[Key], z: Key): List[Key] = - { - def red(x: Key)(x1: Key) = is_edge(x, x1) && x1 != z - @tailrec def irreds(xs0: List[Key], xs1: List[Key]): List[Key] = - xs0 match { - case Nil => xs1 - case x :: xs => - if (!(x_set(x)) || x == z || path.contains(x) || - xs.exists(red(x)) || xs1.exists(red(x))) - irreds(xs, xs1) - else irreds(xs, x :: xs1) - } - irreds(imm_preds(z).toList, Nil) - } - - def irreducible_paths(x: Key, y: Key): List[List[Key]] = - { - val (_, x_set) = reachable(imm_succs, List(x)) - def paths(path: List[Key])(ps: List[List[Key]], z: Key): List[List[Key]] = - if (x == z) (z :: path) :: ps - else (ps /: irreducible_preds(x_set, path, z))(paths(z :: path)) - if ((x == y) && !is_edge(x, x)) List(Nil) else paths(Nil)(Nil, y) - } - - - /* transitive closure and reduction */ - - private def transitive_step(z: Key): Graph[Key, A] = - { - val (preds, succs) = get_entry(z)._2 - var graph = this - for (x <- preds; y <- succs) graph = graph.add_edge(x, y) - graph - } - - def transitive_closure: Graph[Key, A] = (this /: keys_iterator)(_.transitive_step(_)) - - def transitive_reduction_acyclic: Graph[Key, A] = - { - val trans = this.transitive_closure - if (trans.iterator.exists({ case (x, (_, (_, succs))) => succs.contains(x) })) - error("Cyclic graph") - - var graph = this - for { - (x, (_, (_, succs))) <- iterator - y <- succs - if trans.imm_preds(y).exists(z => trans.is_edge(x, z)) - } graph = graph.del_edge(x, y) - graph - } - - - /* maintain acyclic graphs */ - - def add_edge_acyclic(x: Key, y: Key): Graph[Key, A] = - if (is_edge(x, y)) this - else { - irreducible_paths(y, x) match { - case Nil => add_edge(x, y) - case cycles => throw new Graph.Cycles(cycles.map(x :: _)) - } - } - - def add_deps_acyclic(y: Key, xs: List[Key]): Graph[Key, A] = - (this /: xs)(_.add_edge_acyclic(_, y)) - - def topological_order: List[Key] = all_succs(minimals) -} diff --git a/core/Pure/General/graph_display.ML b/core/Pure/General/graph_display.ML deleted file mode 100644 index c360cac2..00000000 --- a/core/Pure/General/graph_display.ML +++ /dev/null @@ -1,83 +0,0 @@ -(* Title: Pure/General/graph_display.ML - Author: Makarius - -Generic graph display, with browser and graphview backends. -*) - -signature GRAPH_DISPLAY = -sig - type node = - {name: string, ID: string, dir: string, unfold: bool, - path: string, parents: string list, content: Pretty.T list} - type graph = node list - val write_graph_browser: Path.T -> graph -> unit - val browserN: string - val graphviewN: string - val active_graphN: string - val display_graph: graph -> unit -end; - -structure Graph_Display: GRAPH_DISPLAY = -struct - -(* external graph representation *) - -type node = - {name: string, ID: string, dir: string, unfold: bool, - path: string, parents: string list, content: Pretty.T list}; - -type graph = node list; - - -(* print modes *) - -val browserN = "browser"; -val graphviewN = "graphview"; -val active_graphN = "active_graph"; - -fun is_browser () = - (case find_first (fn m => m = browserN orelse m = graphviewN) (print_mode_value ()) of - SOME m => m = browserN - | NONE => true); - - -(* encode graph *) - -fun encode_browser (graph: graph) = - cat_lines (map (fn {name, ID, dir, unfold, path, parents, ...} => - "\"" ^ name ^ "\" \"" ^ ID ^ "\" \"" ^ dir ^ (if unfold then "\" + \"" else "\" \"") ^ - path ^ "\" > " ^ space_implode " " (map quote parents) ^ " ;") graph); - -fun write_graph_browser path graph = File.write path (encode_browser graph); - - -val encode_content = YXML.parse_body o Pretty.symbolic_string_of o Pretty.chunks; - -fun encode_graphview (graph: graph) = - Graph.empty - |> fold (fn {ID, name, content, ...} => Graph.new_node (ID, (name, content))) graph - |> fold (fn {ID = a, parents = bs, ...} => fold (fn b => Graph.add_edge (b, a)) bs) graph - |> let open XML.Encode in Graph.encode string (pair string encode_content) end; - - -(* display graph *) - -fun display_graph graph = - if print_mode_active active_graphN then - let - val (markup, body) = - if is_browser () then (Markup.browserN, encode_browser graph) - else (Markup.graphviewN, YXML.string_of_body (encode_graphview graph)); - val ((bg1, bg2), en) = - YXML.output_markup_elem (Active.make_markup markup {implicit = false, properties = []}); - in writeln ("See " ^ bg1 ^ body ^ bg2 ^ "graph" ^ en) end - else - let - val _ = writeln "Displaying graph ..."; - val path = Isabelle_System.create_tmp_path "graph" ""; - val _ = write_graph_browser path graph; - val _ = Isabelle_System.isabelle_tool "browser" ("-c " ^ File.shell_path path ^ " &"); - in () end; - -end; - diff --git a/core/Pure/General/graphics_file.scala b/core/Pure/General/graphics_file.scala deleted file mode 100644 index b0ba83ec..00000000 --- a/core/Pure/General/graphics_file.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* Title: Pure/General/graphics_file.scala - Author: Makarius - -File system operations for Graphics2D output. -*/ - -package isabelle - - -import java.awt.Graphics2D -import java.awt.geom.Rectangle2D -import java.io.{FileOutputStream, BufferedOutputStream, File => JFile} - -import org.jfree.chart.JFreeChart - - -object Graphics_File -{ - /* PDF */ - - def write_pdf(file: JFile, paint: Graphics2D => Unit, width: Int, height: Int) - { - import com.lowagie.text.{Document, Rectangle} - import com.lowagie.text.pdf.PdfWriter - - val out = new BufferedOutputStream(new FileOutputStream(file)) - try { - val document = new Document() - try { - document.setPageSize(new Rectangle(width, height)) - val writer = PdfWriter.getInstance(document, out) - document.open() - - val cb = writer.getDirectContent() - val tp = cb.createTemplate(width, height) - val gfx = tp.createGraphics(width, height) - - paint(gfx) - gfx.dispose - - cb.addTemplate(tp, 1, 0, 0, 1, 0, 0) - } - finally { document.close() } - } - finally { out.close } - } - - def write_pdf(path: Path, paint: Graphics2D => Unit, width: Int, height: Int): Unit = - write_pdf(path.file, paint, width, height) - - def write_pdf(file: JFile, chart: JFreeChart, width: Int, height: Int) - { - def paint(gfx: Graphics2D) = chart.draw(gfx, new Rectangle2D.Double(0, 0, width, height)) - write_pdf(file, paint _, width, height) - } - - def write_pdf(path: Path, chart: JFreeChart, width: Int, height: Int): Unit = - write_pdf(path.file, chart, width, height) -} - diff --git a/core/Pure/General/heap.ML b/core/Pure/General/heap.ML deleted file mode 100644 index 8f4ce69c..00000000 --- a/core/Pure/General/heap.ML +++ /dev/null @@ -1,87 +0,0 @@ -(* Title: Pure/General/heap.ML - Author: Lawrence C Paulson and Markus Wenzel - -Heaps over linearly ordered types. See also Chris Okasaki: "Purely -Functional Data Structures" (Chapter 3), Cambridge University Press, -1998. -*) - -signature HEAP = -sig - type elem - type T - val empty: T - val is_empty: T -> bool - val merge: T * T -> T - val insert: elem -> T -> T - val min: T -> elem (*exception Empty*) - val delete_min: T -> T (*exception Empty*) - val min_elem: T -> elem * T (*exception Empty*) - val upto: elem -> T -> elem list * T -end; - -functor Heap(type elem val ord: elem * elem -> order): HEAP = -struct - - -(* datatype heap *) - -type elem = elem; -datatype T = Empty | Heap of int * elem * T * T; - - -(* empty heaps *) - -val empty = Empty; - -fun is_empty Empty = true - | is_empty (Heap _) = false; - - -(* build heaps *) - -local - -fun rank Empty = 0 - | rank (Heap (r, _, _, _)) = r; - -fun heap x a b = - if rank a >= rank b then Heap (rank b + 1, x, a, b) - else Heap (rank a + 1, x, b, a); - -in - -fun merge (h, Empty) = h - | merge (Empty, h) = h - | merge (h1 as Heap (_, x1, a1, b1), h2 as Heap (_, x2, a2, b2)) = - (case ord (x1, x2) of - GREATER => heap x2 a2 (merge (h1, b2)) - | _ => heap x1 a1 (merge (b1, h2))); - -fun insert x h = merge (Heap (1, x, Empty, Empty), h); - -end; - - -(* minimum element *) - -fun min Empty = raise List.Empty - | min (Heap (_, x, _, _)) = x; - -fun delete_min Empty = raise List.Empty - | delete_min (Heap (_, _, a, b)) = merge (a, b); - -fun min_elem h = (min h, delete_min h); - - -(* initial interval *) - -nonfix upto; - -fun upto _ Empty = ([], Empty) - | upto limit (h as Heap (_, x, _, _)) = - (case ord (x, limit) of - GREATER => ([], h) - | _ => upto limit (delete_min h) |>> cons x); - -end; diff --git a/core/Pure/General/integer.ML b/core/Pure/General/integer.ML deleted file mode 100644 index 08f10bfc..00000000 --- a/core/Pure/General/integer.ML +++ /dev/null @@ -1,69 +0,0 @@ -(* Title: Pure/General/integer.ML - Author: Florian Haftmann, TU Muenchen - -Auxiliary operations on (unbounded) integers. -*) - -signature INTEGER = -sig - val min: int -> int -> int - val max: int -> int -> int - val add: int -> int -> int - val mult: int -> int -> int - val sum: int list -> int - val prod: int list -> int - val sign: int -> order - val div_mod: int -> int -> int * int - val square: int -> int - val pow: int -> int -> int (* exponent -> base -> result *) - val gcd: int -> int -> int - val gcds: int list -> int - val lcm: int -> int -> int - val lcms: int list -> int -end; - -structure Integer : INTEGER = -struct - -fun min x y = Int.min (x, y); -fun max x y = Int.max (x, y); - -fun add x y = x + y; -fun mult x y = x * y; - -fun sum xs = fold add xs 0; -fun prod xs = fold mult xs 1; - -fun sign x = int_ord (x, 0); - -fun div_mod x y = IntInf.divMod (x, y); - -fun square x = x * x; - -fun pow k l = - let - fun pw 0 _ = 1 - | pw 1 l = l - | pw k l = - let - val (k', r) = div_mod k 2; - val l' = pw k' (l * l); - in if r = 0 then l' else l' * l end; - in - if k < 0 - then error "pow: negative exponent" - else pw k l - end; - -fun gcd x y = - let - fun gxd x y = if y = 0 then x else gxd y (x mod y) - in if x < y then gxd y x else gxd x y end; - -fun gcds xs = fold gcd xs 0; - -fun lcm x y = (x * y) div (gcd x y); -fun lcms xs = fold lcm xs 1; - -end; - diff --git a/core/Pure/General/linear_set.ML b/core/Pure/General/linear_set.ML deleted file mode 100644 index 72ac8d8f..00000000 --- a/core/Pure/General/linear_set.ML +++ /dev/null @@ -1,146 +0,0 @@ -(* Title: Pure/General/linear_set.ML - Author: Makarius - -Sets with canonical linear order, or immutable linked-lists. -*) - -signature LINEAR_SET = -sig - type key - type 'a T - exception DUPLICATE of key - exception UNDEFINED of key - exception NEXT_UNDEFINED of key option - val empty: 'a T - val is_empty: 'a T -> bool - val defined: 'a T -> key -> bool - val lookup: 'a T -> key -> ('a * key option) option - val update: key * 'a -> 'a T -> 'a T - val iterate: key option -> - ((key option * key) * 'a -> 'b -> 'b option) -> 'a T -> 'b -> 'b - val dest: 'a T -> (key * 'a) list - val get_after: 'a T -> key option -> key option - val insert_after: key option -> key * 'a -> 'a T -> 'a T - val delete_after: key option -> 'a T -> 'a T -end; - -functor Linear_Set(Key: KEY): LINEAR_SET = -struct - -(* type key *) - -type key = Key.key; -structure Table = Table(Key); - -exception DUPLICATE of key; -exception UNDEFINED of key; -exception NEXT_UNDEFINED of key option; - - -(* raw entries *) - -fun the_entry entries key = - (case Table.lookup entries key of - NONE => raise UNDEFINED key - | SOME entry => entry); - -fun next_entry entries key = snd (the_entry entries key); - -fun put_entry entry entries = Table.update entry entries; - -fun new_entry entry entries = Table.update_new entry entries - handle Table.DUP key => raise DUPLICATE key; - -fun del_entry key entries = Table.delete_safe key entries; - - -(* set representation and basic operations *) - -datatype 'a T = Set of {start: key option, entries: ('a * key option) Table.table}; - -fun make_set (start, entries) = Set {start = start, entries = entries}; -fun map_set f (Set {start, entries}) = make_set (f (start, entries)); - -fun start_of (Set {start, ...}) = start; -fun entries_of (Set {entries, ...}) = entries; - -val empty = Set {start = NONE, entries = Table.empty}; -fun is_empty set = is_none (start_of set); - -fun defined set key = Table.defined (entries_of set) key; - -fun lookup set key = Table.lookup (entries_of set) key; - -fun update (key, x) = map_set (fn (start, entries) => - (start, put_entry (key, (x, next_entry entries key)) entries)); - - -(* iterate entries *) - -fun optional_start set NONE = start_of set - | optional_start _ some = some; - -fun iterate opt_start f set = - let - val entries = entries_of set; - fun iter _ NONE y = y - | iter prev (SOME key) y = - let - val (x, next) = the_entry entries key; - val item = ((prev, key), x); - in - (case f item y of - NONE => y - | SOME y' => iter (SOME key) next y') - end; - in iter NONE (optional_start set opt_start) end; - -fun dest set = rev (iterate NONE (fn ((_, key), x) => SOME o cons (key, x)) set []); - - -(* relative addressing *) - -fun get_after set hook = - (case hook of - NONE => start_of set - | SOME key => next_entry (entries_of set) key); - -fun insert_after hook (key, x) = map_set (fn (start, entries) => - (case hook of - NONE => (SOME key, new_entry (key, (x, start)) entries) - | SOME key1 => - let - val (x1, next) = the_entry entries key1; - val entries' = entries - |> put_entry (key1, (x1, SOME key)) - |> new_entry (key, (x, next)); - in (start, entries') end)); - -fun delete_after hook set = set |> map_set (fn (start, entries) => - (case hook of - NONE => - (case start of - NONE => raise NEXT_UNDEFINED NONE - | SOME key1 => (next_entry entries key1, del_entry key1 entries)) - | SOME key1 => - (case the_entry entries key1 of - (_, NONE) => raise NEXT_UNDEFINED (SOME key1) - | (x1, SOME key2) => - let - val entries' = entries - |> put_entry (key1, (x1, next_entry entries key2)) - |> del_entry key2; - in (start, entries') end))); - - -(* ML pretty-printing *) - -val _ = - PolyML.addPrettyPrinter (fn depth => fn pretty => fn set => - ml_pretty - (ML_Pretty.enum "," "{" "}" - (ML_Pretty.pair (pretty_ml o PolyML.prettyRepresentation) (pretty_ml o pretty)) - (dest set, depth))); - -end; - diff --git a/core/Pure/General/linear_set.scala b/core/Pure/General/linear_set.scala deleted file mode 100644 index 310b26cc..00000000 --- a/core/Pure/General/linear_set.scala +++ /dev/null @@ -1,160 +0,0 @@ -/* Title: Pure/General/linear_set.scala - Module: PIDE - Author: Makarius - Author: Fabian Immler, TU Munich - -Sets with canonical linear order, or immutable linked-lists. -*/ - -package isabelle - - -import scala.collection.SetLike -import scala.collection.generic.{SetFactory, CanBuildFrom, GenericSetTemplate, GenericCompanion} -import scala.collection.mutable.{Builder, SetBuilder} -import scala.language.higherKinds - - -object Linear_Set extends SetFactory[Linear_Set] -{ - private val empty_val: Linear_Set[Nothing] = new Linear_Set[Nothing](None, None, Map(), Map()) - override def empty[A] = empty_val.asInstanceOf[Linear_Set[A]] - - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Linear_Set[A]] = setCanBuildFrom[A] - def newBuilder[A]: Builder[A, Linear_Set[A]] = new SetBuilder[A, Linear_Set[A]](empty[A]) - - class Duplicate[A](x: A) extends Exception - class Undefined[A](x: A) extends Exception - class Next_Undefined[A](x: Option[A]) extends Exception -} - - -final class Linear_Set[A] private( - start: Option[A], end: Option[A], val nexts: Map[A, A], prevs: Map[A, A]) - extends scala.collection.immutable.Set[A] - with GenericSetTemplate[A, Linear_Set] - with SetLike[A, Linear_Set[A]] -{ - override def companion: GenericCompanion[Linear_Set] = Linear_Set - - - /* relative addressing */ - - def next(elem: A): Option[A] = - if (contains(elem)) nexts.get(elem) - else throw new Linear_Set.Undefined(elem) - - def prev(elem: A): Option[A] = - if (contains(elem)) prevs.get(elem) - else throw new Linear_Set.Undefined(elem) - - def get_after(hook: Option[A]): Option[A] = - hook match { - case None => start - case Some(elem) => next(elem) - } - - def insert_after(hook: Option[A], elem: A): Linear_Set[A] = - if (contains(elem)) throw new Linear_Set.Duplicate(elem) - else - hook match { - case None => - start match { - case None => new Linear_Set[A](Some(elem), Some(elem), Map(), Map()) - case Some(elem1) => - new Linear_Set[A](Some(elem), end, - nexts + (elem -> elem1), prevs + (elem1 -> elem)) - } - case Some(elem1) => - if (!contains(elem1)) throw new Linear_Set.Undefined(elem1) - else - nexts.get(elem1) match { - case None => - new Linear_Set[A](start, Some(elem), - nexts + (elem1 -> elem), prevs + (elem -> elem1)) - case Some(elem2) => - new Linear_Set[A](start, end, - nexts + (elem1 -> elem) + (elem -> elem2), - prevs + (elem2 -> elem) + (elem -> elem1)) - } - } - - def append_after(hook: Option[A], elems: Seq[A]): Linear_Set[A] = // FIXME reverse fold - ((hook, this) /: elems) { - case ((last, set), elem) => (Some(elem), set.insert_after(last, elem)) - }._2 - - def delete_after(hook: Option[A]): Linear_Set[A] = - hook match { - case None => - start match { - case None => throw new Linear_Set.Next_Undefined[A](None) - case Some(elem1) => - nexts.get(elem1) match { - case None => empty - case Some(elem2) => - new Linear_Set[A](Some(elem2), end, nexts - elem1, prevs - elem2) - } - } - case Some(elem1) => - if (!contains(elem1)) throw new Linear_Set.Undefined(elem1) - else - nexts.get(elem1) match { - case None => throw new Linear_Set.Next_Undefined(Some(elem1)) - case Some(elem2) => - nexts.get(elem2) match { - case None => - new Linear_Set[A](start, Some(elem1), nexts - elem1, prevs - elem2) - case Some(elem3) => - new Linear_Set[A](start, end, - nexts - elem2 + (elem1 -> elem3), - prevs - elem2 + (elem3 -> elem1)) - } - } - } - - - /* Set methods */ - - override def stringPrefix = "Linear_Set" - - override def isEmpty: Boolean = !start.isDefined - override def size: Int = if (isEmpty) 0 else nexts.size + 1 - - def contains(elem: A): Boolean = - !isEmpty && (end.get == elem || nexts.isDefinedAt(elem)) - - private def make_iterator(from: Option[A]): Iterator[A] = new Iterator[A] { - private var next_elem = from - def hasNext(): Boolean = next_elem.isDefined - def next(): A = - next_elem match { - case Some(elem) => - next_elem = nexts.get(elem) - elem - case None => Iterator.empty.next() - } - } - - override def iterator: Iterator[A] = make_iterator(start) - - def iterator(elem: A): Iterator[A] = - if (contains(elem)) make_iterator(Some(elem)) - else throw new Linear_Set.Undefined(elem) - - def iterator(from: A, to: A): Iterator[A] = - if (contains(to)) - nexts.get(to) match { - case None => iterator(from) - case Some(stop) => iterator(from).takeWhile(_ != stop) - } - else throw new Linear_Set.Undefined(to) - - def reverse: Linear_Set[A] = new Linear_Set(end, start, prevs, nexts) - - override def last: A = reverse.head - - def + (elem: A): Linear_Set[A] = insert_after(end, elem) - - def - (elem: A): Linear_Set[A] = delete_after(prev(elem)) -} diff --git a/core/Pure/General/long_name.ML b/core/Pure/General/long_name.ML deleted file mode 100644 index b970be98..00000000 --- a/core/Pure/General/long_name.ML +++ /dev/null @@ -1,64 +0,0 @@ -(* Title: Pure/General/long_name.ML - Author: Makarius - -Long names. -*) - -signature LONG_NAME = -sig - val separator: string - val is_qualified: string -> bool - val hidden: string -> string - val is_hidden: string -> bool - val localN: string - val is_local: string -> bool - val implode: string list -> string - val explode: string -> string list - val append: string -> string -> string - val qualification: string -> int - val qualify: string -> string -> string - val qualifier: string -> string - val base_name: string -> string - val map_base_name: (string -> string) -> string -> string -end; - -structure Long_Name: LONG_NAME = -struct - -val separator = "."; - -val is_qualified = exists_string (fn s => s = separator); - -fun hidden name = "??." ^ name; -val is_hidden = String.isPrefix "??."; - -val localN = "local"; -val is_local = String.isPrefix "local."; - -val implode = space_implode separator; -val explode = space_explode separator; - -fun append name1 "" = name1 - | append "" name2 = name2 - | append name1 name2 = name1 ^ separator ^ name2; - -fun qualification "" = 0 - | qualification name = fold_string (fn s => s = separator ? Integer.add 1) name 1; - -fun qualify qual name = - if qual = "" orelse name = "" then name - else qual ^ separator ^ name; - -fun qualifier "" = "" - | qualifier name = implode (#1 (split_last (explode name))); - -fun base_name "" = "" - | base_name name = List.last (explode name); - -fun map_base_name _ "" = "" - | map_base_name f name = - let val names = explode name - in implode (nth_map (length names - 1) f names) end; - -end; - diff --git a/core/Pure/General/long_name.scala b/core/Pure/General/long_name.scala deleted file mode 100644 index bdcd9bbb..00000000 --- a/core/Pure/General/long_name.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* Title: Pure/General/long_name.scala - Author: Makarius - -Long names. -*/ - -package isabelle - - -object Long_Name -{ - val separator = "." - val separator_char = '.' - - def is_qualified(name: String): Boolean = name.contains(separator_char) - - def implode(names: List[String]): String = names.mkString(separator) - def explode(name: String): List[String] = Library.space_explode(separator_char, name) - - def qualify(qual: String, name: String): String = - if (qual == "" || name == "") name - else qual + separator + name - - def base_name(name: String): String = - if (name == "") "" - else explode(name).last -} - diff --git a/core/Pure/General/multi_map.scala b/core/Pure/General/multi_map.scala deleted file mode 100644 index 8e9b9347..00000000 --- a/core/Pure/General/multi_map.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* Title: Pure/General/multi_map.scala - Module: PIDE - Author: Makarius - -Maps with multiple entries per key. -*/ - -package isabelle - - -import scala.collection.generic.{ImmutableMapFactory, CanBuildFrom} - - -object Multi_Map extends ImmutableMapFactory[Multi_Map] -{ - private val empty_val: Multi_Map[Any, Nothing] = new Multi_Map[Any, Nothing](Map.empty) - override def empty[A, B] = empty_val.asInstanceOf[Multi_Map[A, B]] - - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Multi_Map[A, B]] = - new MapCanBuildFrom[A, B] -} - - -final class Multi_Map[A, +B] private(rep: Map[A, List[B]]) - extends scala.collection.immutable.Map[A, B] - with scala.collection.immutable.MapLike[A, B, Multi_Map[A, B]] -{ - /* Multi_Map operations */ - - def iterator_list: Iterator[(A, List[B])] = rep.iterator - - def get_list(a: A): List[B] = rep.getOrElse(a, Nil) - - def insert[B1 >: B](a: A, b: B1): Multi_Map[A, B1] = - { - val bs = get_list(a) - if (bs.contains(b)) this - else new Multi_Map(rep + (a -> (b :: bs))) - } - - def remove[B1 >: B](a: A, b: B1): Multi_Map[A, B1] = - { - val bs = get_list(a) - if (bs.contains(b)) { - bs.filterNot(_ == b) match { - case Nil => new Multi_Map(rep - a) - case bs1 => new Multi_Map(rep + (a -> bs1)) - } - } - else this - } - - - /* Map operations */ - - override def stringPrefix = "Multi_Map" - - override def empty = Multi_Map.empty - override def isEmpty: Boolean = rep.isEmpty - - override def keySet: Set[A] = rep.keySet - - override def iterator: Iterator[(A, B)] = - for ((a, bs) <- rep.iterator; b <- bs.iterator) yield (a, b) - - def get(a: A): Option[B] = get_list(a).headOption - - def + [B1 >: B](p: (A, B1)): Multi_Map[A, B1] = insert(p._1, p._2) - - def - (a: A): Multi_Map[A, B] = - if (rep.isDefinedAt(a)) new Multi_Map(rep - a) else this -} diff --git a/core/Pure/General/name_space.ML b/core/Pure/General/name_space.ML deleted file mode 100644 index 179cca51..00000000 --- a/core/Pure/General/name_space.ML +++ /dev/null @@ -1,562 +0,0 @@ -(* Title: Pure/General/name_space.ML - Author: Markus Wenzel, TU Muenchen - -Generic name spaces with declared and hidden entries; no support for -absolute addressing. -*) - -type xstring = string; (*external names*) - -signature NAME_SPACE = -sig - type T - val empty: string -> T - val kind_of: T -> string - val defined_entry: T -> string -> bool - val the_entry: T -> string -> - {concealed: bool, group: serial option, theory_name: string, pos: Position.T, id: serial} - val entry_ord: T -> string * string -> order - val markup: T -> string -> Markup.T - val is_concealed: T -> string -> bool - val intern: T -> xstring -> string - val names_long_raw: Config.raw - val names_long: bool Config.T - val names_short_raw: Config.raw - val names_short: bool Config.T - val names_unique_raw: Config.raw - val names_unique: bool Config.T - val extern: Proof.context -> T -> string -> xstring - val extern_ord: Proof.context -> T -> string * string -> order - val extern_shortest: Proof.context -> T -> string -> xstring - val markup_extern: Proof.context -> T -> string -> Markup.T * xstring - val pretty: Proof.context -> T -> string -> Pretty.T - val completion: Context.generic -> T -> xstring * Position.T -> Completion.T - val merge: T * T -> T - type naming - val conceal: naming -> naming - val get_group: naming -> serial option - val set_group: serial option -> naming -> naming - val set_theory_name: string -> naming -> naming - val new_group: naming -> naming - val reset_group: naming -> naming - val add_path: string -> naming -> naming - val root_path: naming -> naming - val parent_path: naming -> naming - val mandatory_path: string -> naming -> naming - val qualified_path: bool -> binding -> naming -> naming - val default_naming: naming - val local_naming: naming - val transform_binding: naming -> binding -> binding - val full_name: naming -> binding -> string - val base_name: binding -> string - val hide: bool -> string -> T -> T - val alias: naming -> binding -> string -> T -> T - val naming_of: Context.generic -> naming - val map_naming: (naming -> naming) -> Context.generic -> Context.generic - val declare: Context.generic -> bool -> binding -> T -> string * T - type 'a table - val change_base: bool -> 'a table -> 'a table - val change_ignore: 'a table -> 'a table - val space_of_table: 'a table -> T - val check_reports: Context.generic -> 'a table -> - xstring * Position.T list -> (string * Position.report list) * 'a - val check: Context.generic -> 'a table -> xstring * Position.T -> string * 'a - val lookup_key: 'a table -> string -> (string * 'a) option - val get: 'a table -> string -> 'a - val define: Context.generic -> bool -> binding * 'a -> 'a table -> string * 'a table - val alias_table: naming -> binding -> string -> 'a table -> 'a table - val hide_table: bool -> string -> 'a table -> 'a table - val del_table: string -> 'a table -> 'a table - val map_table_entry: string -> ('a -> 'a) -> 'a table -> 'a table - val fold_table: (string * 'a -> 'b -> 'b) -> 'a table -> 'b -> 'b - val empty_table: string -> 'a table - val merge_tables: 'a table * 'a table -> 'a table - val join_tables: (string -> 'a * 'a -> 'a) (*exception Change_Table.SAME*) -> - 'a table * 'a table -> 'a table - val extern_entries: Proof.context -> T -> (string * 'a) list -> ((string * xstring) * 'a) list - val markup_entries: Proof.context -> T -> (string * 'a) list -> ((Markup.T * xstring) * 'a) list - val extern_table: Proof.context -> 'a table -> ((string * xstring) * 'a) list - val markup_table: Proof.context -> 'a table -> ((Markup.T * xstring) * 'a) list -end; - -structure Name_Space: NAME_SPACE = -struct - - -(** name spaces **) - -(* datatype entry *) - -type entry = - {concealed: bool, - group: serial option, - theory_name: string, - pos: Position.T, - id: serial}; - -fun entry_markup def kind (name, {pos, id, ...}: entry) = - Markup.properties (Position.entity_properties_of def id pos) (Markup.entity kind name); - -fun print_entry_ref kind (name, entry) = - quote (Markup.markup (entry_markup false kind (name, entry)) name); - -fun err_dup kind entry1 entry2 pos = - error ("Duplicate " ^ plain_words kind ^ " declaration " ^ - print_entry_ref kind entry1 ^ " vs. " ^ print_entry_ref kind entry2 ^ Position.here pos); - -fun undefined kind name = "Undefined " ^ plain_words kind ^ ": " ^ quote name; - - -(* internal names *) - -type internals = (string list * string list) Change_Table.T; (*xname -> visible, hidden*) - -fun map_internals f xname : internals -> internals = - Change_Table.map_default (xname, ([], [])) f; - -val del_name = map_internals o apfst o remove (op =); -fun del_name_extra name = - map_internals (apfst (fn [] => [] | x :: xs => x :: remove (op =) name xs)); -val add_name = map_internals o apfst o update (op =); -val add_name' = map_internals o apsnd o update (op =); - - -(* datatype T *) - -datatype T = - Name_Space of - {kind: string, internals: internals, - entries: (xstring list * entry) Change_Table.T}; (*name -> externals, entry*) - -fun make_name_space (kind, internals, entries) = - Name_Space {kind = kind, internals = internals, entries = entries}; - -fun map_name_space f (Name_Space {kind = kind, internals = internals, entries = entries}) = - make_name_space (f (kind, internals, entries)); - -fun change_base_space begin = map_name_space (fn (kind, internals, entries) => - (kind, Change_Table.change_base begin internals, Change_Table.change_base begin entries)); - -val change_ignore_space = map_name_space (fn (kind, internals, entries) => - (kind, Change_Table.change_ignore internals, Change_Table.change_ignore entries)); - - -fun empty kind = make_name_space (kind, Change_Table.empty, Change_Table.empty); - -fun kind_of (Name_Space {kind, ...}) = kind; - -fun defined_entry (Name_Space {entries, ...}) = Change_Table.defined entries; - -fun the_entry (Name_Space {kind, entries, ...}) name = - (case Change_Table.lookup entries name of - NONE => error (undefined kind name) - | SOME (_, entry) => entry); - -fun entry_ord space = int_ord o pairself (#id o the_entry space); - -fun markup (Name_Space {kind, entries, ...}) name = - (case Change_Table.lookup entries name of - NONE => Markup.intensify - | SOME (_, entry) => entry_markup false kind (name, entry)); - -fun is_concealed space name = #concealed (the_entry space name); - - -(* name accesses *) - -fun lookup (Name_Space {internals, ...}) xname = - (case Change_Table.lookup internals xname of - NONE => (xname, true) - | SOME ([], []) => (xname, true) - | SOME ([name], _) => (name, true) - | SOME (name :: _, _) => (name, false) - | SOME ([], name' :: _) => (Long_Name.hidden name', true)); - -fun get_accesses (Name_Space {entries, ...}) name = - (case Change_Table.lookup entries name of - NONE => [name] - | SOME (externals, _) => externals); - -fun valid_accesses (Name_Space {internals, ...}) name = - Change_Table.fold (fn (xname, (names, _)) => - if not (null names) andalso hd names = name then cons xname else I) internals []; - - -(* intern *) - -fun intern space xname = #1 (lookup space xname); - - -(* extern *) - -val names_long_raw = Config.declare_option ("names_long", @{here}); -val names_long = Config.bool names_long_raw; - -val names_short_raw = Config.declare_option ("names_short", @{here}); -val names_short = Config.bool names_short_raw; - -val names_unique_raw = Config.declare_option ("names_unique", @{here}); -val names_unique = Config.bool names_unique_raw; - -fun extern ctxt space name = - let - val names_long = Config.get ctxt names_long; - val names_short = Config.get ctxt names_short; - val names_unique = Config.get ctxt names_unique; - - fun valid require_unique xname = - let val (name', is_unique) = lookup space xname - in name = name' andalso (not require_unique orelse is_unique) end; - - fun ext [] = if valid false name then name else Long_Name.hidden name - | ext (nm :: nms) = if valid names_unique nm then nm else ext nms; - in - if names_long then name - else if names_short then Long_Name.base_name name - else ext (get_accesses space name) - end; - -fun extern_ord ctxt space = string_ord o pairself (extern ctxt space); - -fun extern_shortest ctxt = - extern - (ctxt - |> Config.put names_long false - |> Config.put names_short false - |> Config.put names_unique false); - -fun markup_extern ctxt space name = (markup space name, extern ctxt space name); -fun pretty ctxt space name = Pretty.mark_str (markup_extern ctxt space name); - - -(* completion *) - -fun completion context space (xname, pos) = - if Position.is_reported pos andalso xname <> "" andalso xname <> "_" then - let - fun result_ord ((xname1, (_, name1)), (xname2, (_, name2))) = - (case bool_ord (pairself Long_Name.is_local (name2, name1)) of - EQUAL => - (case int_ord (pairself Long_Name.qualification (xname1, xname2)) of - EQUAL => string_ord (xname1, xname2) - | ord => ord) - | ord => ord); - val x = Name.clean xname; - val Name_Space {kind, internals, ...} = space; - val ext = extern_shortest (Context.proof_of context) space; - val names = - Change_Table.fold - (fn (a, (name :: _, _)) => - if String.isPrefix x a andalso not (is_concealed space name) - then - let val a' = ext name - in if a = a' then cons (a', (kind, name)) else I end - else I - | _ => I) internals [] - |> sort_distinct result_ord; - in Completion.names pos names end - else Completion.none; - - -(* merge *) - -fun merge - (Name_Space {kind = kind1, internals = internals1, entries = entries1}, - Name_Space {kind = kind2, internals = internals2, entries = entries2}) = - let - val kind' = - if kind1 = kind2 then kind1 - else error ("Attempt to merge different kinds of name spaces " ^ - quote kind1 ^ " vs. " ^ quote kind2); - val internals' = (internals1, internals2) |> Change_Table.join - (K (fn ((names1, names1'), (names2, names2')) => - if pointer_eq (names1, names2) andalso pointer_eq (names1', names2') - then raise Change_Table.SAME - else (Library.merge (op =) (names1, names2), Library.merge (op =) (names1', names2')))); - val entries' = (entries1, entries2) |> Change_Table.join - (fn name => fn ((_, entry1), (_, entry2)) => - if #id entry1 = #id entry2 then raise Change_Table.SAME - else err_dup kind' (name, entry1) (name, entry2) Position.none); - in make_name_space (kind', internals', entries') end; - - - -(** naming context **) - -(* datatype naming *) - -datatype naming = Naming of - {conceal: bool, - group: serial option, - theory_name: string, - path: (string * bool) list}; - -fun make_naming (conceal, group, theory_name, path) = - Naming {conceal = conceal, group = group, theory_name = theory_name, path = path}; - -fun map_naming f (Naming {conceal, group, theory_name, path}) = - make_naming (f (conceal, group, theory_name, path)); - -fun map_path f = map_naming (fn (conceal, group, theory_name, path) => - (conceal, group, theory_name, f path)); - - -val conceal = map_naming (fn (_, group, theory_name, path) => - (true, group, theory_name, path)); - -fun set_theory_name theory_name = map_naming (fn (conceal, group, _, path) => - (conceal, group, theory_name, path)); - - -fun get_group (Naming {group, ...}) = group; - -fun set_group group = map_naming (fn (conceal, _, theory_name, path) => - (conceal, group, theory_name, path)); - -fun new_group naming = set_group (SOME (serial ())) naming; -val reset_group = set_group NONE; - -fun add_path elems = map_path (fn path => path @ [(elems, false)]); -val root_path = map_path (fn _ => []); -val parent_path = map_path (perhaps (try (#1 o split_last))); -fun mandatory_path elems = map_path (fn path => path @ [(elems, true)]); - -fun qualified_path mandatory binding = map_path (fn path => - path @ #2 (Binding.dest (Binding.qualified mandatory "" binding))); - -val default_naming = make_naming (false, NONE, "", []); -val local_naming = default_naming |> add_path Long_Name.localN; - - -(* full name *) - -fun err_bad binding = error (Binding.bad binding); - -fun transform_binding (Naming {conceal = true, ...}) = Binding.conceal - | transform_binding _ = I; - -val bad_specs = ["", "??", "__"]; - -fun name_spec (naming as Naming {path, ...}) raw_binding = - let - val binding = transform_binding naming raw_binding; - val (concealed, prefix, name) = Binding.dest binding; - val _ = Long_Name.is_qualified name andalso err_bad binding; - - val spec1 = maps (fn (a, b) => map (rpair b) (Long_Name.explode a)) (path @ prefix); - val spec2 = if name = "" then [] else [(name, true)]; - val spec = spec1 @ spec2; - val _ = - exists (fn (a, _) => member (op =) bad_specs a orelse exists_string (fn s => s = "\"") a) spec - andalso err_bad binding; - in (concealed, if null spec2 then [] else spec) end; - -fun full_name naming = - name_spec naming #> #2 #> map #1 #> Long_Name.implode; - -val base_name = full_name default_naming #> Long_Name.base_name; - - -(* accesses *) - -fun mandatory xs = map_filter (fn (x, true) => SOME x | _ => NONE) xs; - -fun mandatory_prefixes xs = mandatory xs :: mandatory_prefixes1 xs -and mandatory_prefixes1 [] = [] - | mandatory_prefixes1 ((x, true) :: xs) = map (cons x) (mandatory_prefixes1 xs) - | mandatory_prefixes1 ((x, false) :: xs) = map (cons x) (mandatory_prefixes xs); - -fun mandatory_suffixes xs = map rev (mandatory_prefixes (rev xs)); - -fun accesses naming binding = - let - val spec = #2 (name_spec naming binding); - val sfxs = mandatory_suffixes spec; - val pfxs = mandatory_prefixes spec; - in pairself (map Long_Name.implode) (sfxs @ pfxs, sfxs) end; - - -(* hide *) - -fun hide fully name space = - space |> map_name_space (fn (kind, internals, entries) => - let - val _ = Change_Table.defined entries name orelse error (undefined kind name); - val names = valid_accesses space name; - val internals' = internals - |> add_name' name name - |> fold (del_name name) - (if fully then names else inter (op =) [Long_Name.base_name name] names) - |> fold (del_name_extra name) (get_accesses space name); - in (kind, internals', entries) end); - - -(* alias *) - -fun alias naming binding name space = - space |> map_name_space (fn (kind, internals, entries) => - let - val _ = Change_Table.defined entries name orelse error (undefined kind name); - val (accs, accs') = accesses naming binding; - val internals' = internals |> fold (add_name name) accs; - val entries' = entries - |> Change_Table.map_entry name (fn (externals, entry) => - (Library.merge (op =) (externals, accs'), entry)) - in (kind, internals', entries') end); - - - -(** context naming **) - -structure Data_Args = -struct - type T = naming; - val empty = default_naming; - fun extend _ = default_naming; - fun merge _ = default_naming; - fun init _ = local_naming; -end; - -structure Global_Naming = Theory_Data(Data_Args); -structure Local_Naming = Proof_Data(Data_Args); - -fun naming_of (Context.Theory thy) = Global_Naming.get thy - | naming_of (Context.Proof ctxt) = Local_Naming.get ctxt; - -fun map_naming f (Context.Theory thy) = Context.Theory (Global_Naming.map f thy) - | map_naming f (Context.Proof ctxt) = Context.Proof (Local_Naming.map f ctxt); - - - -(** entry definition **) - -(* declaration *) - -fun declare context strict binding space = - let - val naming = naming_of context; - val Naming {group, theory_name, ...} = naming; - val (concealed, spec) = name_spec naming binding; - val (accs, accs') = accesses naming binding; - - val name = Long_Name.implode (map fst spec); - val _ = name = "" andalso err_bad binding; - - val (proper_pos, pos) = Position.default (Binding.pos_of binding); - val entry = - {concealed = concealed, - group = group, - theory_name = theory_name, - pos = pos, - id = serial ()}; - val space' = - space |> map_name_space (fn (kind, internals, entries) => - let - val internals' = internals |> fold (add_name name) accs; - val entries' = - (if strict then Change_Table.update_new else Change_Table.update) - (name, (accs', entry)) entries - handle Change_Table.DUP dup => - err_dup kind (dup, #2 (the (Change_Table.lookup entries dup))) - (name, entry) (#pos entry); - in (kind, internals', entries') end); - val _ = - if proper_pos andalso Context_Position.is_reported_generic context pos then - Position.report pos (entry_markup true (kind_of space) (name, entry)) - else (); - in (name, space') end; - - -(* definition in symbol table *) - -datatype 'a table = Table of T * 'a Change_Table.T; - -fun change_base begin (Table (space, tab)) = - Table (change_base_space begin space, Change_Table.change_base begin tab); - -fun change_ignore (Table (space, tab)) = - Table (change_ignore_space space, Change_Table.change_ignore tab); - -fun space_of_table (Table (space, _)) = space; - -fun check_reports context (Table (space, tab)) (xname, ps) = - let val name = intern space xname in - (case Change_Table.lookup tab name of - SOME x => - let - val reports = - filter (Context_Position.is_reported_generic context) ps - |> map (fn pos => (pos, markup space name)); - in ((name, reports), x) end - | NONE => - let - val completions = map (fn pos => completion context space (xname, pos)) ps; - in - error (undefined (kind_of space) name ^ Position.here_list ps ^ - Markup.markup_report (implode (map Completion.reported_text completions))) - end) - end; - -fun check context table (xname, pos) = - let - val ((name, reports), x) = check_reports context table (xname, [pos]); - val _ = Position.reports reports; - in (name, x) end; - -fun lookup_key (Table (_, tab)) name = Change_Table.lookup_key tab name; - -fun get table name = - (case lookup_key table name of - SOME (_, x) => x - | NONE => error (undefined (kind_of (space_of_table table)) name)); - -fun define context strict (binding, x) (Table (space, tab)) = - let - val (name, space') = declare context strict binding space; - val tab' = Change_Table.update (name, x) tab; - in (name, Table (space', tab')) end; - - -(* derived table operations *) - -fun alias_table naming binding name (Table (space, tab)) = - Table (alias naming binding name space, tab); - -fun hide_table fully name (Table (space, tab)) = - Table (hide fully name space, tab); - -fun del_table name (Table (space, tab)) = - let - val space' = hide true name space handle ERROR _ => space; - val tab' = Change_Table.delete_safe name tab; - in Table (space', tab') end; - -fun map_table_entry name f (Table (space, tab)) = - Table (space, Change_Table.map_entry name f tab); - -fun fold_table f (Table (_, tab)) = Change_Table.fold f tab; - -fun empty_table kind = Table (empty kind, Change_Table.empty); - -fun merge_tables (Table (space1, tab1), Table (space2, tab2)) = - Table (merge (space1, space2), Change_Table.merge (K true) (tab1, tab2)); - -fun join_tables f (Table (space1, tab1), Table (space2, tab2)) = - Table (merge (space1, space2), Change_Table.join f (tab1, tab2)); - - -(* present table content *) - -fun extern_entries ctxt space entries = - fold (fn (name, x) => cons ((name, extern ctxt space name), x)) entries [] - |> Library.sort_wrt (#2 o #1); - -fun markup_entries ctxt space entries = - extern_entries ctxt space entries - |> map (fn ((name, xname), x) => ((markup space name, xname), x)); - -fun extern_table ctxt (Table (space, tab)) = extern_entries ctxt space (Change_Table.dest tab); -fun markup_table ctxt (Table (space, tab)) = markup_entries ctxt space (Change_Table.dest tab); - -end; - diff --git a/core/Pure/General/ord_list.ML b/core/Pure/General/ord_list.ML deleted file mode 100644 index 0c99d574..00000000 --- a/core/Pure/General/ord_list.ML +++ /dev/null @@ -1,125 +0,0 @@ -(* Title: Pure/General/ord_list.ML - Author: Makarius - -Ordered lists without duplicates -- a light-weight representation of -finite sets, all operations take linear time and economize heap usage. -*) - -signature ORD_LIST = -sig - type 'a T = 'a list - val make: ('a * 'a -> order) -> 'a list -> 'a T - val member: ('b * 'a -> order) -> 'a T -> 'b -> bool - val insert: ('a * 'a -> order) -> 'a -> 'a T -> 'a T - val remove: ('b * 'a -> order) -> 'b -> 'a T -> 'a T - val subset: ('b * 'a -> order) -> 'b T * 'a T -> bool - val union: ('a * 'a -> order) -> 'a T -> 'a T -> 'a T - val unions: ('a * 'a -> order) -> 'a T list -> 'a T - val merge: ('a * 'a -> order) -> 'a T * 'a T -> 'a T - val inter: ('b * 'a -> order) -> 'b T -> 'a T -> 'a T - val subtract: ('b * 'a -> order) -> 'b T -> 'a T -> 'a T -end; - -structure Ord_List: ORD_LIST = -struct - -type 'a T = 'a list; -fun make ord = sort_distinct ord; - - -(* single elements *) - -fun find_index ord list x = - let - fun find i [] = ~ i - | find i (y :: ys) = - (case ord (x, y) of - LESS => ~ i - | EQUAL => i - | GREATER => find (i + 1) ys); - in find 1 list end; - -fun member ord list x = find_index ord list x > 0; - -fun insert ord x list = - let - fun insrt 1 ys = x :: ys - | insrt i (y :: ys) = y :: insrt (i - 1) ys; - val idx = find_index ord list x; - in if idx > 0 then list else insrt (~ idx) list end; - -fun remove ord x list = - let - fun rmove 1 (_ :: ys) = ys - | rmove i (y :: ys) = y :: rmove (i - 1) ys; - val idx = find_index ord list x; - in if idx > 0 then rmove idx list else list end; - - -(* lists as sets *) - -fun subset ord (list1, list2) = - let - fun sub [] _ = true - | sub _ [] = false - | sub (lst1 as x :: xs) (y :: ys) = - (case ord (x, y) of - LESS => false - | EQUAL => sub xs ys - | GREATER => sub lst1 ys); - in sub list1 list2 end; - - -(* algebraic operations *) - -exception SAME; -fun handle_same f x = f x handle SAME => x; - -(*union: insert elements of first list into second list*) -fun union ord list1 list2 = - let - fun unio [] _ = raise SAME - | unio xs [] = xs - | unio (lst1 as x :: xs) (lst2 as y :: ys) = - (case ord (x, y) of - LESS => x :: handle_same (unio xs) lst2 - | EQUAL => y :: unio xs ys - | GREATER => y :: unio lst1 ys); - in if pointer_eq (list1, list2) then list1 else handle_same (unio list1) list2 end; - -fun unions ord lists = - let - fun unios (xs :: ys :: rest) acc = unios rest (union ord xs ys :: acc) - | unios [xs] (ys :: acc) = unios (union ord xs ys :: acc) [] - | unios [xs] [] = xs - | unios [] [] = [] - | unios [] acc = unios acc []; - in unios lists [] end; - -fun merge ord (list1, list2) = union ord list2 list1; - -(*intersection: filter second list for elements present in first list*) -fun inter ord list1 list2 = - let - fun intr _ [] = raise SAME - | intr [] _ = [] - | intr (lst1 as x :: xs) (lst2 as y :: ys) = - (case ord (x, y) of - LESS => intr xs lst2 - | EQUAL => y :: intr xs ys - | GREATER => handle_same (intr lst1) ys); - in handle_same (intr list1) list2 end; - -(*subtraction: filter second list for elements NOT present in first list*) -fun subtract ord list1 list2 = - let - fun subtr [] _ = raise SAME - | subtr _ [] = raise SAME - | subtr (lst1 as x :: xs) (lst2 as y :: ys) = - (case ord (x, y) of - LESS => subtr xs lst2 - | EQUAL => handle_same (subtr xs) ys - | GREATER => y :: subtr lst1 ys); - in handle_same (subtr list1) list2 end; - -end; diff --git a/core/Pure/General/output.ML b/core/Pure/General/output.ML deleted file mode 100644 index 6b780c12..00000000 --- a/core/Pure/General/output.ML +++ /dev/null @@ -1,132 +0,0 @@ -(* Title: Pure/General/output.ML - Author: Makarius, Hagia Maria Sion Abbey (Jerusalem) - -Isabelle channels for diagnostic output. -*) - -signature BASIC_OUTPUT = -sig - val writeln: string -> unit - val tracing: string -> unit - val warning: string -> unit -end; - -signature OUTPUT = -sig - include BASIC_OUTPUT - type output = string - val default_output: string -> output * int - val default_escape: output -> string - val add_mode: string -> (string -> output * int) -> (output -> string) -> unit - val output_width: string -> output * int - val output: string -> output - val escape: output -> string - val physical_stdout: output -> unit - val physical_stderr: output -> unit - val physical_writeln: output -> unit - exception Protocol_Message of Properties.T - val writelns: string list -> unit - val urgent_message: string -> unit - val error_message': serial * string -> unit - val error_message: string -> unit - val system_message: string -> unit - val prompt: string -> unit - val status: string -> unit - val report: string list -> unit - val result: Properties.T -> string list -> unit - val protocol_message: Properties.T -> string list -> unit - val try_protocol_message: Properties.T -> string list -> unit -end; - -signature PRIVATE_OUTPUT = -sig - include OUTPUT - val writeln_fn: (output list -> unit) Unsynchronized.ref - val urgent_message_fn: (output list -> unit) Unsynchronized.ref - val tracing_fn: (output list -> unit) Unsynchronized.ref - val warning_fn: (output list -> unit) Unsynchronized.ref - val error_message_fn: (serial * output list -> unit) Unsynchronized.ref - val system_message_fn: (output list -> unit) Unsynchronized.ref - val prompt_fn: (output -> unit) Unsynchronized.ref - val status_fn: (output list -> unit) Unsynchronized.ref - val report_fn: (output list -> unit) Unsynchronized.ref - val result_fn: (Properties.T -> output list -> unit) Unsynchronized.ref - val protocol_message_fn: (Properties.T -> output list -> unit) Unsynchronized.ref -end; - -structure Output: PRIVATE_OUTPUT = -struct - -(** print modes **) - -type output = string; (*raw system output*) - -fun default_output s = (s, size s); -fun default_escape (s: output) = s; - -local - val default = {output = default_output, escape = default_escape}; - val modes = Synchronized.var "Output.modes" (Symtab.make [("", default)]); -in - fun add_mode name output escape = - Synchronized.change modes (Symtab.update_new (name, {output = output, escape = escape})); - fun get_mode () = - the_default default - (Library.get_first (Symtab.lookup (Synchronized.value modes)) (print_mode_value ())); -end; - -fun output_width x = #output (get_mode ()) x; -val output = #1 o output_width; - -fun escape x = #escape (get_mode ()) x; - - - -(** output channels **) - -(* raw output primitives -- not to be used in user-space *) - -fun physical_stdout s = (TextIO.output (TextIO.stdOut, s); TextIO.flushOut TextIO.stdOut); -fun physical_stderr s = (TextIO.output (TextIO.stdErr, s); TextIO.flushOut TextIO.stdErr); - -fun physical_writeln "" = () - | physical_writeln s = physical_stdout (suffix "\n" s); (*atomic output!*) - - -(* Isabelle output channels *) - -exception Protocol_Message of Properties.T; - -val writeln_fn = Unsynchronized.ref (physical_writeln o implode); -val urgent_message_fn = Unsynchronized.ref (fn ss => ! writeln_fn ss); (*Proof General legacy*) -val tracing_fn = Unsynchronized.ref (fn ss => ! writeln_fn ss); -val warning_fn = Unsynchronized.ref (physical_writeln o prefix_lines "### " o implode); -val error_message_fn = - Unsynchronized.ref (fn (_: serial, ss) => physical_writeln (prefix_lines "*** " (implode ss))); -val system_message_fn = Unsynchronized.ref (fn ss => ! writeln_fn ss); -val prompt_fn = Unsynchronized.ref physical_stdout; (*Proof General legacy*) -val status_fn = Unsynchronized.ref (fn _: output list => ()); -val report_fn = Unsynchronized.ref (fn _: output list => ()); -val result_fn = Unsynchronized.ref (fn _: Properties.T => fn _: output list => ()); -val protocol_message_fn: (Properties.T -> output list -> unit) Unsynchronized.ref = - Unsynchronized.ref (fn props => fn _ => raise Protocol_Message props); - -fun writelns ss = ! writeln_fn (map output ss); -fun writeln s = writelns [s]; -fun urgent_message s = ! urgent_message_fn [output s]; (*Proof General legacy*) -fun tracing s = ! tracing_fn [output s]; -fun warning s = ! warning_fn [output s]; -fun error_message' (i, s) = ! error_message_fn (i, [output s]); -fun error_message s = error_message' (serial (), s); -fun system_message s = ! system_message_fn [output s]; -fun prompt s = ! prompt_fn (output s); -fun status s = ! status_fn [output s]; -fun report ss = ! report_fn (map output ss); -fun result props ss = ! result_fn props (map output ss); -fun protocol_message props ss = ! protocol_message_fn props (map output ss); -fun try_protocol_message props ss = protocol_message props ss handle Protocol_Message _ => (); - -end; - -structure Basic_Output: BASIC_OUTPUT = Output; -open Basic_Output; diff --git a/core/Pure/General/output.scala b/core/Pure/General/output.scala deleted file mode 100644 index 4d98ecc4..00000000 --- a/core/Pure/General/output.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* Title: Pure/General/output.scala - Module: PIDE - Author: Makarius - -Isabelle channels for diagnostic output. -*/ - -package isabelle - - -object Output -{ - def warning_text(msg: String): String = cat_lines(split_lines(msg).map("### " + _)) - def error_text(msg: String): String = cat_lines(split_lines(msg).map("*** " + _)) - - def writeln(msg: String) { Console.err.println(msg) } - def warning(msg: String) { Console.err.println(warning_text(msg)) } - def error_message(msg: String) { Console.err.println(error_text(msg)) } -} - diff --git a/core/Pure/General/path.ML b/core/Pure/General/path.ML deleted file mode 100644 index 0523ec44..00000000 --- a/core/Pure/General/path.ML +++ /dev/null @@ -1,230 +0,0 @@ -(* Title: Pure/General/path.ML - Author: Markus Wenzel, TU Muenchen - -Algebra of file-system paths: basic POSIX notation, extended by named -roots (e.g. //foo) and variables (e.g. $BAR). -*) - -signature PATH = -sig - eqtype T - val is_current: T -> bool - val current: T - val root: T - val named_root: string -> T - val parent: T - val basic: string -> T - val variable: string -> T - val is_absolute: T -> bool - val is_basic: T -> bool - val starts_basic: T -> bool - val append: T -> T -> T - val appends: T list -> T - val make: string list -> T - val implode: T -> string - val explode: string -> T - val split: string -> T list - val pretty: T -> Pretty.T - val print: T -> string - val dir: T -> T - val base: T -> T - val ext: string -> T -> T - val split_ext: T -> T * string - val expand: T -> T - val smart_implode: T -> string - val position: T -> Position.T -end; - -structure Path: PATH = -struct - -(* path elements *) - -datatype elem = - Root of string | - Basic of string | - Variable of string | - Parent; - -local - -fun err_elem msg s = error (msg ^ " path element specification " ^ quote s); - -fun check_elem s = - if s = "" orelse s = "~" orelse s = "~~" then err_elem "Illegal" s - else - let - fun check c = - if exists_string (fn c' => c = c') s then - err_elem ("Illegal character " ^ quote c ^ " in") s - else (); - val _ = List.app check ["/", "\\", "$", ":", "\"", "'"]; - in s end; - -in - -val root_elem = Root o check_elem; -val basic_elem = Basic o check_elem; -val variable_elem = Variable o check_elem; - -end; - - -(* type path *) - -datatype T = Path of elem list; (*reversed elements*) - -fun rep (Path xs) = xs; - -fun is_current (Path []) = true - | is_current _ = false; - -val current = Path []; -val root = Path [Root ""]; -fun named_root s = Path [root_elem s]; -fun basic s = Path [basic_elem s]; -fun variable s = Path [variable_elem s]; -val parent = Path [Parent]; - -fun is_absolute (Path xs) = - (case try List.last xs of - SOME (Root _) => true - | _ => false); - -fun is_basic (Path [Basic _]) = true - | is_basic _ = false; - -fun starts_basic (Path xs) = - (case try List.last xs of - SOME (Basic _) => true - | _ => false); - - -(* append and norm *) - -fun apply (y as Root _) _ = [y] - | apply Parent (xs as (Root _ :: _)) = xs - | apply Parent (Basic _ :: rest) = rest - | apply y xs = y :: xs; - -fun append (Path xs) (Path ys) = Path (fold_rev apply ys xs); -fun appends paths = Library.foldl (uncurry append) (current, paths); -val make = appends o map basic; - -fun norm elems = fold_rev apply elems []; - - -(* implode *) - -local - -fun implode_elem (Root "") = "" - | implode_elem (Root s) = "//" ^ s - | implode_elem (Basic s) = s - | implode_elem (Variable s) = "$" ^ s - | implode_elem Parent = ".."; - -in - -fun implode_path (Path []) = "." - | implode_path (Path [Root ""]) = "/" - | implode_path (Path xs) = space_implode "/" (rev (map implode_elem xs)); - -end; - - -(* explode *) - -fun explode_path str = - let - fun explode_elem s = - (if s = ".." then Parent - else if s = "~" then Variable "USER_HOME" - else if s = "~~" then Variable "ISABELLE_HOME" - else - (case try (unprefix "$") s of - SOME s' => variable_elem s' - | NONE => basic_elem s)) - handle ERROR msg => cat_error msg ("The error(s) above occurred in " ^ quote str); - - val (roots, raw_elems) = - (case take_prefix (equal "") (space_explode "/" str) |>> length of - (0, es) => ([], es) - | (1, es) => ([Root ""], es) - | (_, []) => ([Root ""], []) - | (_, e :: es) => ([root_elem e], es)); - val elems = raw_elems |> filter_out (fn c => c = "" orelse c = ".") |> map explode_elem; - - in Path (norm (rev elems @ roots)) end; - -fun split str = - space_explode ":" str - |> map_filter (fn s => if s = "" then NONE else SOME (explode_path s)); - - -(* print *) - -fun pretty path = - let val s = implode_path path - in Pretty.mark (Markup.path s) (Pretty.str (quote s)) end; - -val print = Pretty.str_of o pretty; - - -(* base element *) - -fun split_path f (Path (Basic s :: xs)) = f (Path xs, s) - | split_path _ path = error ("Cannot split path into dir/base: " ^ print path); - -val dir = split_path #1; -val base = split_path (fn (_, s) => Path [Basic s]); - -fun ext "" = I - | ext e = split_path (fn (prfx, s) => append prfx (basic (s ^ "." ^ e))); - -val split_ext = split_path (fn (prfx, s) => apfst (append prfx) - (case take_suffix (fn c => c <> ".") (raw_explode s) of - ([], _) => (Path [Basic s], "") - | (cs, e) => (Path [Basic (implode (take (length cs - 1) cs))], implode e))); - - -(* expand variables *) - -fun eval (Variable s) = - let val path = explode_path (getenv_strict s) in - if exists (fn Variable _ => true | _ => false) (rep path) then - error ("Illegal path variable nesting: " ^ s ^ "=" ^ print path) - else rep path - end - | eval x = [x]; - -val expand = rep #> maps eval #> norm #> Path; - - -(* smart implode *) - -fun smart_implode path = - let - val full_name = implode_path (expand path); - fun fold_path a = - let val b = implode_path (expand (explode_path a)) in - if full_name = b then SOME a - else - (case try (unprefix (b ^ "/")) full_name of - SOME name => SOME (a ^ "/" ^ name) - | NONE => NONE) - end; - in - (case get_first fold_path ["~~", "$ISABELLE_HOME_USER", "~"] of - SOME name => name - | NONE => implode_path path) - end; - -val position = Position.file o smart_implode; - -(*final declarations of this structure!*) -val implode = implode_path; -val explode = explode_path; - -end; - diff --git a/core/Pure/General/path.scala b/core/Pure/General/path.scala deleted file mode 100644 index c0194b7b..00000000 --- a/core/Pure/General/path.scala +++ /dev/null @@ -1,205 +0,0 @@ -/* Title: Pure/General/path.scala - Author: Makarius - -Algebra of file-system paths: basic POSIX notation, extended by named -roots (e.g. //foo) and variables (e.g. $BAR). -*/ - -package isabelle - - -import java.io.{File => JFile} - -import scala.util.matching.Regex - - -object Path -{ - /* path elements */ - - sealed abstract class Elem - private case class Root(val name: String) extends Elem - private case class Basic(val name: String) extends Elem - private case class Variable(val name: String) extends Elem - private case object Parent extends Elem - - private def err_elem(msg: String, s: String): Nothing = - error(msg + " path element specification " + quote(s)) - - private def check_elem(s: String): String = - if (s == "" || s == "~" || s == "~~") err_elem("Illegal", s) - else { - "/\\$:\"'".iterator.foreach(c => - if (s.iterator.exists(_ == c)) - err_elem("Illegal character " + quote(c.toString) + " in", s)) - s - } - - private def root_elem(s: String): Elem = Root(check_elem(s)) - private def basic_elem(s: String): Elem = Basic(check_elem(s)) - private def variable_elem(s: String): Elem = Variable(check_elem(s)) - - private def apply_elem(y: Elem, xs: List[Elem]): List[Elem] = - (y, xs) match { - case (Root(_), _) => List(y) - case (Parent, Root(_) :: _) => xs - case (Parent, Basic(_) :: rest) => rest - case _ => y :: xs - } - - private def norm_elems(elems: List[Elem]): List[Elem] = - (elems :\ (Nil: List[Elem]))(apply_elem) - - private def implode_elem(elem: Elem, short: Boolean): String = - elem match { - case Root("") => "" - case Root(s) => "//" + s - case Basic(s) => s - case Variable("USER_HOME") if short => "~" - case Variable("ISABELLE_HOME") if short => "~~" - case Variable(s) => "$" + s - case Parent => ".." - } - - - /* path constructors */ - - val current: Path = new Path(Nil) - val root: Path = new Path(List(Root(""))) - def named_root(s: String): Path = new Path(List(root_elem(s))) - def basic(s: String): Path = new Path(List(basic_elem(s))) - def variable(s: String): Path = new Path(List(variable_elem(s))) - val parent: Path = new Path(List(Parent)) - - - /* explode */ - - def explode(str: String): Path = - { - def explode_elem(s: String): Elem = - try { - if (s == "..") Parent - else if (s == "~") Variable("USER_HOME") - else if (s == "~~") Variable("ISABELLE_HOME") - else if (s.startsWith("$")) variable_elem(s.substring(1)) - else basic_elem(s) - } - catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred in " + quote(str)) } - - val ss = space_explode('/', str) - val r = ss.takeWhile(_.isEmpty).length - val es = ss.dropWhile(_.isEmpty) - val (roots, raw_elems) = - if (r == 0) (Nil, es) - else if (r == 1) (List(Root("")), es) - else if (es.isEmpty) (List(Root("")), Nil) - else (List(root_elem(es.head)), es.tail) - val elems = raw_elems.filterNot(s => s.isEmpty || s == ".").map(explode_elem) - - new Path(norm_elems(elems.reverse ::: roots)) - } - - def is_wellformed(str: String): Boolean = - try { explode(str); true } catch { case ERROR(_) => false } - - def is_valid(str: String): Boolean = - try { explode(str).expand; true } catch { case ERROR(_) => false } - - def split(str: String): List[Path] = - space_explode(':', str).filterNot(_.isEmpty).map(explode) - - - /* encode */ - - val encode: XML.Encode.T[Path] = (path => XML.Encode.string(path.implode)) -} - - -final class Path private(private val elems: List[Path.Elem]) // reversed elements -{ - def is_current: Boolean = elems.isEmpty - def is_absolute: Boolean = !elems.isEmpty && elems.last.isInstanceOf[Path.Root] - def is_basic: Boolean = elems match { case List(Path.Basic(_)) => true case _ => false } - - def +(other: Path): Path = new Path((other.elems :\ elems)(Path.apply_elem)) - - - /* implode */ - - private def gen_implode(short: Boolean): String = - elems match { - case Nil => "." - case List(Path.Root("")) => "/" - case _ => elems.map(Path.implode_elem(_, short)).reverse.mkString("/") - } - def implode: String = gen_implode(false) - def implode_short: String = gen_implode(true) - - override def toString: String = quote(implode) - - - /* base element */ - - private def split_path: (Path, String) = - elems match { - case Path.Basic(s) :: xs => (new Path(xs), s) - case _ => error("Cannot split path into dir/base: " + toString) - } - - def dir: Path = split_path._1 - def base: Path = new Path(List(Path.Basic(split_path._2))) - - def ext(e: String): Path = - if (e == "") this - else { - val (prfx, s) = split_path - prfx + Path.basic(s + "." + e) - } - - def backup: Path = - { - val (prfx, s) = split_path - prfx + Path.basic(s + "~") - } - - private val Ext = new Regex("(.*)\\.([^.]*)") - - def split_ext: (Path, String) = - { - val (prefix, base) = split_path - base match { - case Ext(b, e) => (prefix + Path.basic(b), e) - case _ => (prefix + Path.basic(base), "") - } - } - - - /* expand */ - - def expand: Path = - { - def eval(elem: Path.Elem): List[Path.Elem] = - elem match { - case Path.Variable(s) => - val path = Path.explode(Isabelle_System.getenv_strict(s)) - if (path.elems.exists(_.isInstanceOf[Path.Variable])) - error("Illegal path variable nesting: " + s + "=" + path.toString) - else path.elems - case x => List(x) - } - - new Path(Path.norm_elems(elems.map(eval).flatten)) - } - - - /* source position */ - - def position: Position.T = Position.File(implode) - - - /* platform file */ - - def file: JFile = Isabelle_System.platform_file(this) - def is_file: Boolean = file.isFile - def is_dir: Boolean = file.isDirectory -} diff --git a/core/Pure/General/position.ML b/core/Pure/General/position.ML deleted file mode 100644 index 20e66968..00000000 --- a/core/Pure/General/position.ML +++ /dev/null @@ -1,239 +0,0 @@ -(* Title: Pure/General/position.ML - Author: Markus Wenzel, TU Muenchen - -Source positions: counting Isabelle symbols, starting from 1. -*) - -signature POSITION = -sig - eqtype T - val make: {line: int, offset: int, end_offset: int, props: Properties.T} -> T - val dest: T -> {line: int, offset: int, end_offset: int, props: Properties.T} - val line_of: T -> int option - val offset_of: T -> int option - val end_offset_of: T -> int option - val file_of: T -> string option - val advance: Symbol.symbol -> T -> T - val distance_of: T -> T -> int - val none: T - val start: T - val file_name: string -> Properties.T - val file_only: string -> T - val file: string -> T - val line_file_only: int -> string -> T - val line_file: int -> string -> T - val line: int -> T - val id: string -> T - val id_only: string -> T - val get_id: T -> string option - val put_id: string -> T -> T - val parse_id: T -> int option - val of_properties: Properties.T -> T - val properties_of: T -> Properties.T - val def_properties_of: T -> Properties.T - val entity_properties_of: bool -> serial -> T -> Properties.T - val default_properties: T -> Properties.T -> Properties.T - val markup: T -> Markup.T -> Markup.T - val is_reported: T -> bool - val is_reported_range: T -> bool - val reported_text: T -> Markup.T -> string -> string - val report_text: T -> Markup.T -> string -> unit - val report: T -> Markup.T -> unit - type report = T * Markup.T - type report_text = report * string - val reports_text: report_text list -> unit - val reports: report list -> unit - val store_reports: report_text list Unsynchronized.ref -> - T list -> ('a -> Markup.T list) -> 'a -> unit - val append_reports: report_text list Unsynchronized.ref -> report list -> unit - val here: T -> string - val here_list: T list -> string - type range = T * T - val no_range: range - val set_range: range -> T - val reset_range: T -> T - val range: T -> T -> range - val thread_data: unit -> T - val setmp_thread_data: T -> ('a -> 'b) -> 'a -> 'b - val default: T -> bool * T -end; - -structure Position: POSITION = -struct - -(* datatype position *) - -datatype T = Pos of (int * int * int) * Properties.T; - -fun norm_props (props: Properties.T) = - maps (fn a => the_list (find_first (fn (b, _) => a = b) props)) - Markup.position_properties'; - -fun make {line = i, offset = j, end_offset = k, props} = Pos ((i, j, k), norm_props props); -fun dest (Pos ((i, j, k), props)) = {line = i, offset = j, end_offset = k, props = props}; - -fun valid (i: int) = i > 0; -fun if_valid i i' = if valid i then i' else i; - - -(* fields *) - -fun line_of (Pos ((i, _, _), _)) = if valid i then SOME i else NONE; -fun offset_of (Pos ((_, j, _), _)) = if valid j then SOME j else NONE; -fun end_offset_of (Pos ((_, _, k), _)) = if valid k then SOME k else NONE; - -fun file_of (Pos (_, props)) = Properties.get props Markup.fileN; - - -(* advance *) - -fun advance_count "\n" (i: int, j: int, k: int) = - (if_valid i (i + 1), if_valid j (j + 1), k) - | advance_count s (i, j, k) = - if Symbol.is_regular s then (i, if_valid j (j + 1), k) - else (i, j, k); - -fun invalid_count (i, j, _: int) = - not (valid i orelse valid j); - -fun advance sym (pos as (Pos (count, props))) = - if invalid_count count then pos else Pos (advance_count sym count, props); - - -(* distance of adjacent positions *) - -fun distance_of (Pos ((_, j, _), _)) (Pos ((_, j', _), _)) = - if valid j andalso valid j' then j' - j - else 0; - - -(* make position *) - -val none = Pos ((0, 0, 0), []); -val start = Pos ((1, 1, 0), []); - - -fun file_name "" = [] - | file_name name = [(Markup.fileN, name)]; - -fun file_only name = Pos ((0, 0, 0), file_name name); -fun file name = Pos ((1, 1, 0), file_name name); - -fun line_file_only i name = Pos ((i, 0, 0), file_name name); -fun line_file i name = Pos ((i, 1, 0), file_name name); -fun line i = line_file i ""; - -fun id id = Pos ((0, 1, 0), [(Markup.idN, id)]); -fun id_only id = Pos ((0, 0, 0), [(Markup.idN, id)]); - -fun get_id (Pos (_, props)) = Properties.get props Markup.idN; -fun put_id id (Pos (count, props)) = Pos (count, Properties.put (Markup.idN, id) props); - -fun parse_id pos = Option.map Markup.parse_int (get_id pos); - - -(* markup properties *) - -fun of_properties props = - let - fun get name = - (case Properties.get props name of - NONE => 0 - | SOME s => Markup.parse_int s); - in - make {line = get Markup.lineN, offset = get Markup.offsetN, - end_offset = get Markup.end_offsetN, props = props} - end; - - -fun value k i = if valid i then [(k, Markup.print_int i)] else []; - -fun properties_of (Pos ((i, j, k), props)) = - value Markup.lineN i @ value Markup.offsetN j @ value Markup.end_offsetN k @ props; - -val def_properties_of = properties_of #> map (fn (x, y) => ("def_" ^ x, y)); - -fun entity_properties_of def id pos = - if def then (Markup.defN, Markup.print_int id) :: properties_of pos - else (Markup.refN, Markup.print_int id) :: def_properties_of pos; - -fun default_properties default props = - if exists (member (op =) Markup.position_properties o #1) props then props - else properties_of default @ props; - -val markup = Markup.properties o properties_of; - - -(* reports *) - -fun is_reported pos = is_some (offset_of pos) andalso is_some (get_id pos); -fun is_reported_range pos = is_reported pos andalso is_some (end_offset_of pos); - -fun reported_text pos m txt = if is_reported pos then Markup.markup (markup pos m) txt else ""; -fun report_text pos markup txt = Output.report [reported_text pos markup txt]; -fun report pos markup = report_text pos markup ""; - -type report = T * Markup.T; -type report_text = report * string; - -val reports_text = - map (fn ((pos, m), txt) => if is_reported pos then Markup.markup (markup pos m) txt else "") - #> Output.report; - -val reports = map (rpair "") #> reports_text; - -fun store_reports _ [] _ _ = () - | store_reports (r: report_text list Unsynchronized.ref) ps markup x = - let val ms = markup x - in Unsynchronized.change r (fold (fn p => fold (fn m => cons ((p, m), "")) ms) ps) end; - -fun append_reports (r: report_text list Unsynchronized.ref) reports = - Unsynchronized.change r (append (map (rpair "") reports)); - - -(* here: user output *) - -fun here pos = - let - val props = properties_of pos; - val (s1, s2) = - (case (line_of pos, file_of pos) of - (SOME i, NONE) => (" ", "(line " ^ Markup.print_int i ^ ")") - | (SOME i, SOME name) => (" ", "(line " ^ Markup.print_int i ^ " of " ^ quote name ^ ")") - | (NONE, SOME name) => (" ", "(file " ^ quote name ^ ")") - | _ => if is_reported pos then ("", "\\") else ("", "")); - in - if null props then "" - else s1 ^ Markup.markup (Markup.properties props Markup.position) s2 - end; - -val here_list = space_implode " " o map here; - - -(* range *) - -type range = T * T; - -val no_range = (none, none); - -fun set_range (Pos ((i, j, _), props), Pos ((_, j', _), _)) = Pos ((i, j, j'), props); -fun reset_range (Pos ((i, j, _), props)) = Pos ((i, j, 0), props); - -fun range pos pos' = (set_range (pos, pos'), pos'); - - -(* thread data *) - -local val tag = Universal.tag () : T Universal.tag in - -fun thread_data () = the_default none (Thread.getLocal tag); - -fun setmp_thread_data pos = Library.setmp_thread_data tag (thread_data ()) pos; - -end; - -fun default pos = - if pos = none then (false, thread_data ()) - else (true, pos); - -end; diff --git a/core/Pure/General/position.scala b/core/Pure/General/position.scala deleted file mode 100644 index acdbe336..00000000 --- a/core/Pure/General/position.scala +++ /dev/null @@ -1,119 +0,0 @@ -/* Title: Pure/General/position.scala - Author: Makarius - -Position properties. -*/ - -package isabelle - - -import java.io.{File => JFile} - - -object Position -{ - type T = Properties.T - - val none: T = Nil - - val Line = new Properties.Int(Markup.LINE) - val Offset = new Properties.Int(Markup.OFFSET) - val End_Offset = new Properties.Int(Markup.END_OFFSET) - val File = new Properties.String(Markup.FILE) - val Id = new Properties.Long(Markup.ID) - - val Def_Line = new Properties.Int(Markup.DEF_LINE) - val Def_Offset = new Properties.Int(Markup.DEF_OFFSET) - val Def_End_Offset = new Properties.Int(Markup.DEF_END_OFFSET) - val Def_File = new Properties.String(Markup.DEF_FILE) - val Def_Id = new Properties.Long(Markup.DEF_ID) - - object Line_File - { - def apply(line: Int, file: String): T = - (if (line > 0) Line(line) else Nil) ::: - (if (file != "") File(file) else Nil) - - def unapply(pos: T): Option[(Int, String)] = - (pos, pos) match { - case (Line(i), File(name)) => Some((i, name)) - case (_, File(name)) => Some((1, name)) - case _ => None - } - } - - object Def_Line_File - { - def unapply(pos: T): Option[(Int, String)] = - (pos, pos) match { - case (Def_Line(i), Def_File(name)) => Some((i, name)) - case (_, Def_File(name)) => Some((1, name)) - case _ => None - } - } - - object Range - { - def apply(range: Symbol.Range): T = Offset(range.start) ::: Offset(range.stop) - def unapply(pos: T): Option[Symbol.Range] = - (pos, pos) match { - case (Offset(start), End_Offset(stop)) if start <= stop => Some(Text.Range(start, stop)) - case (Offset(start), _) => Some(Text.Range(start, start + 1)) - case _ => None - } - } - - object Id_Offset - { - def unapply(pos: T): Option[(Long, Symbol.Offset)] = - (pos, pos) match { - case (Id(id), Offset(offset)) => Some((id, offset)) - case _ => None - } - } - - object Def_Id_Offset - { - def unapply(pos: T): Option[(Long, Symbol.Offset)] = - (pos, pos) match { - case (Def_Id(id), Def_Offset(offset)) => Some((id, offset)) - case _ => None - } - } - - object Reported - { - def unapply(pos: T): Option[(Long, Symbol.Text_Chunk.Name, Symbol.Range)] = - (pos, pos) match { - case (Id(id), Range(range)) => - val chunk_name = - pos match { - case File(name) => Symbol.Text_Chunk.File(name) - case _ => Symbol.Text_Chunk.Default - } - Some((id, chunk_name, range)) - case _ => None - } - } - - def purge(props: T): T = props.filterNot(p => Markup.POSITION_PROPERTIES(p._1)) - - - /* here: user output */ - - def here(pos: T): String = - (Line.unapply(pos), File.unapply(pos)) match { - case (Some(i), None) => " (line " + i.toString + ")" - case (Some(i), Some(name)) => " (line " + i.toString + " of " + quote(name) + ")" - case (None, Some(name)) => " (file " + quote(name) + ")" - case _ => "" - } - - def here_undelimited(pos: T): String = - (Line.unapply(pos), File.unapply(pos)) match { - case (Some(i), None) => "line " + i.toString - case (Some(i), Some(name)) => "line " + i.toString + " of " + quote(name) - case (None, Some(name)) => "file " + quote(name) - case _ => "" - } -} diff --git a/core/Pure/General/pretty.ML b/core/Pure/General/pretty.ML deleted file mode 100644 index c09ccb00..00000000 --- a/core/Pure/General/pretty.ML +++ /dev/null @@ -1,389 +0,0 @@ -(* Title: Pure/General/pretty.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Markus Wenzel, TU Munich - -Generic pretty printing module. - -Loosely based on - D. C. Oppen, "Pretty Printing", - ACM Transactions on Programming Languages and Systems (1980), 465-483. - -The object to be printed is given as a tree with indentation and line -breaking information. A "break" inserts a newline if the text until -the next break is too long to fit on the current line. After the newline, -text is indented to the level of the enclosing block. Normally, if a block -is broken then all enclosing blocks will also be broken. Only "inconsistent -breaks" are provided. - -The stored length of a block is used in breakdist (to treat each inner block as -a unit for breaking). -*) - -signature PRETTY = -sig - val spaces: int -> string - val default_indent: string -> int -> Output.output - val add_mode: string -> (string -> int -> Output.output) -> unit - type T - val str: string -> T - val brk: int -> T - val fbrk: T - val breaks: T list -> T list - val fbreaks: T list -> T list - val blk: int * T list -> T - val block: T list -> T - val strs: string list -> T - val raw_markup: Output.output * Output.output -> int * T list -> T - val markup: Markup.T -> T list -> T - val mark: Markup.T -> T -> T - val mark_str: Markup.T * string -> T - val marks_str: Markup.T list * string -> T - val item: T list -> T - val text_fold: T list -> T - val keyword1: string -> T - val keyword2: string -> T - val text: string -> T list - val paragraph: T list -> T - val para: string -> T - val quote: T -> T - val backquote: T -> T - val cartouche: T -> T - val separate: string -> T list -> T list - val commas: T list -> T list - val enclose: string -> string -> T list -> T - val enum: string -> string -> string -> T list -> T - val position: Position.T -> T - val list: string -> string -> T list -> T - val str_list: string -> string -> string list -> T - val big_list: string -> T list -> T - val indent: int -> T -> T - val unbreakable: T -> T - val margin_default: int Unsynchronized.ref - val symbolicN: string - val output_buffer: int option -> T -> Buffer.T - val output: int option -> T -> Output.output - val string_of_margin: int -> T -> string - val string_of: T -> string - val writeln: T -> unit - val symbolic_output: T -> Output.output - val symbolic_string_of: T -> string - val str_of: T -> string - val markup_chunks: Markup.T -> T list -> T - val chunks: T list -> T - val chunks2: T list -> T - val block_enclose: T * T -> T list -> T - val writeln_chunks: T list -> unit - val writeln_chunks2: T list -> unit - val to_ML: T -> ML_Pretty.pretty - val from_ML: ML_Pretty.pretty -> T -end; - -structure Pretty: PRETTY = -struct - -(** spaces **) - -local - val small_spaces = Vector.tabulate (65, fn i => replicate_string i Symbol.space); -in - fun spaces k = - if k < 64 then Vector.sub (small_spaces, k) - else - replicate_string (k div 64) (Vector.sub (small_spaces, 64)) ^ - Vector.sub (small_spaces, k mod 64); -end; - - - -(** print mode operations **) - -fun default_indent (_: string) = spaces; - -local - val default = {indent = default_indent}; - val modes = Synchronized.var "Pretty.modes" (Symtab.make [("", default)]); -in - fun add_mode name indent = - Synchronized.change modes (fn tab => - (if not (Symtab.defined tab name) then () - else warning ("Redefining pretty mode " ^ quote name); - Symtab.update (name, {indent = indent}) tab)); - fun get_mode () = - the_default default - (Library.get_first (Symtab.lookup (Synchronized.value modes)) (print_mode_value ())); -end; - -fun mode_indent x y = #indent (get_mode ()) x y; - -val output_spaces = Output.output o spaces; -val add_indent = Buffer.add o output_spaces; - - - -(** printing items: compound phrases, strings, and breaks **) - -abstype T = - Block of (Output.output * Output.output) * T list * int * int - (*markup output, body, indentation, length*) - | String of Output.output * int (*text, length*) - | Break of bool * int (*mandatory flag, width if not taken*) -with - -fun length (Block (_, _, _, len)) = len - | length (String (_, len)) = len - | length (Break (_, wd)) = wd; - - - -(** derived operations to create formatting expressions **) - -val str = String o Output.output_width; - -fun brk wd = Break (false, wd); -val fbrk = Break (true, 1); - -fun breaks prts = Library.separate (brk 1) prts; -fun fbreaks prts = Library.separate fbrk prts; - -fun raw_markup m (indent, es) = - let - fun sum [] k = k - | sum (e :: es) k = sum es (length e + k); - in Block (m, es, indent, sum es 0) end; - -fun markup_block m arg = raw_markup (Markup.output m) arg; - -val blk = markup_block Markup.empty; -fun block prts = blk (2, prts); -val strs = block o breaks o map str; - -fun markup m prts = markup_block m (0, prts); -fun mark m prt = if m = Markup.empty then prt else markup m [prt]; -fun mark_str (m, s) = mark m (str s); -fun marks_str (ms, s) = fold_rev mark ms (str s); - -val item = markup Markup.item; -val text_fold = markup Markup.text_fold; - -fun keyword1 name = mark_str (Markup.keyword1, name); -fun keyword2 name = mark_str (Markup.keyword2, name); - -val text = breaks o map str o Symbol.explode_words; -val paragraph = markup Markup.paragraph; -val para = paragraph o text; - -fun quote prt = blk (1, [str "\"", prt, str "\""]); -fun backquote prt = blk (1, [str "`", prt, str "`"]); -fun cartouche prt = blk (1, [str "\\", prt, str "\\"]); - -fun separate sep prts = - flat (Library.separate [str sep, brk 1] (map single prts)); - -val commas = separate ","; - -fun enclose lpar rpar prts = - block (str lpar :: (prts @ [str rpar])); - -fun enum sep lpar rpar prts = enclose lpar rpar (separate sep prts); - -val position = - enum "," "{" "}" o map (fn (x, y) => str (x ^ "=" ^ y)) o Position.properties_of; - -val list = enum ","; -fun str_list lpar rpar strs = list lpar rpar (map str strs); - -fun big_list name prts = block (fbreaks (str name :: prts)); - -fun indent 0 prt = prt - | indent n prt = blk (0, [str (spaces n), prt]); - -fun unbreakable (Break (_, wd)) = String (output_spaces wd, wd) - | unbreakable (Block (m, es, indent, wd)) = Block (m, map unbreakable es, indent, wd) - | unbreakable (e as String _) = e; - - - -(** formatting **) - -(* formatted output *) - -local - -type text = {tx: Buffer.T, ind: Buffer.T, pos: int, nl: int}; - -val empty: text = - {tx = Buffer.empty, - ind = Buffer.empty, - pos = 0, - nl = 0}; - -fun newline {tx, ind = _, pos = _, nl} : text = - {tx = Buffer.add (Output.output "\n") tx, - ind = Buffer.empty, - pos = 0, - nl = nl + 1}; - -fun control s {tx, ind, pos: int, nl} : text = - {tx = Buffer.add s tx, - ind = ind, - pos = pos, - nl = nl}; - -fun string (s, len) {tx, ind, pos: int, nl} : text = - {tx = Buffer.add s tx, - ind = Buffer.add s ind, - pos = pos + len, - nl = nl}; - -fun blanks wd = string (output_spaces wd, wd); - -fun indentation (buf, len) {tx, ind, pos, nl} : text = - let val s = Buffer.content buf in - {tx = Buffer.add (mode_indent s len) tx, - ind = Buffer.add s ind, - pos = pos + len, - nl = nl} - end; - -(*Add the lengths of the expressions until the next Break; if no Break then - include "after", to account for text following this block.*) -fun breakdist (Break _ :: _, _) = 0 - | breakdist (Block (_, _, _, len) :: es, after) = len + breakdist (es, after) - | breakdist (String (_, len) :: es, after) = len + breakdist (es, after) - | breakdist ([], after) = after; - -(*Search for the next break (at this or higher levels) and force it to occur.*) -fun forcenext [] = [] - | forcenext (Break _ :: es) = fbrk :: es - | forcenext (e :: es) = e :: forcenext es; - -in - -fun formatted margin input = - let - val breakgain = margin div 20; (*minimum added space required of a break*) - val emergencypos = margin div 2; (*position too far to right*) - - (*es is list of expressions to print; - blockin is the indentation of the current block; - after is the width of the following context until next break.*) - fun format ([], _, _) text = text - | format (e :: es, block as (_, blockin), after) (text as {ind, pos, nl, ...}) = - (case e of - Block ((bg, en), bes, indent, _) => - let - val pos' = pos + indent; - val pos'' = pos' mod emergencypos; - val block' = - if pos' < emergencypos then (ind |> add_indent indent, pos') - else (add_indent pos'' Buffer.empty, pos''); - val btext: text = text - |> control bg - |> format (bes, block', breakdist (es, after)) - |> control en; - (*if this block was broken then force the next break*) - val es' = if nl < #nl btext then forcenext es else es; - in format (es', block, after) btext end - | Break (force, wd) => - (*no break if text to next break fits on this line - or if breaking would add only breakgain to space*) - format (es, block, after) - (if not force andalso - pos + wd <= Int.max (margin - breakdist (es, after), blockin + breakgain) - then text |> blanks wd (*just insert wd blanks*) - else text |> newline |> indentation block) - | String str => format (es, block, after) (string str text)); - in - #tx (format ([input], (Buffer.empty, 0), 0) empty) - end; - -end; - - -(* special output *) - -(*symbolic markup -- no formatting*) -fun symbolic prt = - let - fun out (Block ((bg, en), [], _, _)) = Buffer.add bg #> Buffer.add en - | out (Block ((bg, en), prts, indent, _)) = - Buffer.add bg #> - Buffer.markup (Markup.block indent) (fold out prts) #> - Buffer.add en - | out (String (s, _)) = Buffer.add s - | out (Break (false, wd)) = - Buffer.markup (Markup.break wd) (Buffer.add (output_spaces wd)) - | out (Break (true, _)) = Buffer.add (Output.output "\n"); - in out prt Buffer.empty end; - -(*unformatted output*) -fun unformatted prt = - let - fun fmt (Block ((bg, en), prts, _, _)) = Buffer.add bg #> fold fmt prts #> Buffer.add en - | fmt (String (s, _)) = Buffer.add s - | fmt (Break (_, wd)) = Buffer.add (output_spaces wd); - in fmt prt Buffer.empty end; - - -(* output interfaces *) - -val margin_default = Unsynchronized.ref 76; (*right margin, or page width*) - -val symbolicN = "pretty_symbolic"; - -fun output_buffer margin prt = - if print_mode_active symbolicN then symbolic prt - else formatted (the_default (! margin_default) margin) prt; - -val output = Buffer.content oo output_buffer; -fun string_of_margin margin = Output.escape o output (SOME margin); -val string_of = Output.escape o output NONE; -val writeln = Output.writeln o string_of; - -val symbolic_output = Buffer.content o symbolic; -val symbolic_string_of = Output.escape o symbolic_output; - -val str_of = Output.escape o Buffer.content o unformatted; - - -(* chunks *) - -fun markup_chunks m prts = markup m (fbreaks (map (text_fold o single) prts)); -val chunks = markup_chunks Markup.empty; - -fun chunks2 prts = - (case try split_last prts of - NONE => blk (0, []) - | SOME (prefix, last) => - blk (0, maps (fn prt => [text_fold [prt, fbrk], fbrk]) prefix @ [text_fold [last]])); - -fun block_enclose (prt1, prt2) prts = chunks [block (fbreaks (prt1 :: prts)), prt2]; - -fun string_of_text_fold prt = string_of prt |> Markup.markup Markup.text_fold; - -fun writeln_chunks prts = - Output.writelns (Library.separate "\n" (map string_of_text_fold prts)); - -fun writeln_chunks2 prts = - (case try split_last prts of - NONE => () - | SOME (prefix, last) => - (map (fn prt => Markup.markup Markup.text_fold (string_of prt ^ "\n") ^ "\n") prefix @ - [string_of_text_fold last]) - |> Output.writelns); - - - -(** ML toplevel pretty printing **) - -fun to_ML (Block (m, prts, ind, _)) = ML_Pretty.Block (m, map to_ML prts, ind) - | to_ML (String s) = ML_Pretty.String s - | to_ML (Break b) = ML_Pretty.Break b; - -fun from_ML (ML_Pretty.Block (m, prts, ind)) = raw_markup m (ind, map from_ML prts) - | from_ML (ML_Pretty.String s) = String s - | from_ML (ML_Pretty.Break b) = Break b; - -end; - -end; diff --git a/core/Pure/General/pretty.scala b/core/Pure/General/pretty.scala deleted file mode 100644 index 3da14471..00000000 --- a/core/Pure/General/pretty.scala +++ /dev/null @@ -1,189 +0,0 @@ -/* Title: Pure/General/pretty.scala - Author: Makarius - -Generic pretty printing module. -*/ - -package isabelle - - -object Pretty -{ - /* spaces */ - - val space = " " - - private val static_spaces = space * 4000 - - def spaces(k: Int): String = - { - require(k >= 0) - if (k < static_spaces.length) static_spaces.substring(0, k) - else space * k - } - - - /* text metric -- standardized to width of space */ - - abstract class Metric - { - val unit: Double - def apply(s: String): Double - } - - object Metric_Default extends Metric - { - val unit = 1.0 - def apply(s: String): Double = s.length.toDouble - } - - - /* markup trees with physical blocks and breaks */ - - def block(body: XML.Body): XML.Tree = Block(2, body) - - object Block - { - def apply(i: Int, body: XML.Body): XML.Tree = - XML.Elem(Markup.Block(i), body) - - def unapply(tree: XML.Tree): Option[(Int, XML.Body)] = - tree match { - case XML.Elem(Markup.Block(i), body) => Some((i, body)) - case _ => None - } - } - - object Break - { - def apply(w: Int): XML.Tree = - XML.Elem(Markup.Break(w), List(XML.Text(spaces(w)))) - - def unapply(tree: XML.Tree): Option[Int] = - tree match { - case XML.Elem(Markup.Break(w), _) => Some(w) - case _ => None - } - } - - val FBreak = XML.Text("\n") - - def item(body: XML.Body): XML.Tree = - Block(2, XML.elem(Markup.BULLET, List(XML.Text(space))) :: XML.Text(space) :: body) - - val Separator = List(XML.elem(Markup.SEPARATOR, List(XML.Text(space))), FBreak) - def separate(ts: List[XML.Tree]): XML.Body = Library.separate(Separator, ts.map(List(_))).flatten - - - /* standard form */ - - def standard_form(body: XML.Body): XML.Body = - body flatMap { - case XML.Wrapped_Elem(markup, body1, body2) => - List(XML.Wrapped_Elem(markup, body1, standard_form(body2))) - case XML.Elem(markup, body) => - if (markup.name == Markup.ITEM) List(item(standard_form(body))) - else List(XML.Elem(markup, standard_form(body))) - case XML.Text(text) => Library.separate(FBreak, split_lines(text).map(XML.Text)) - } - - - /* formatted output */ - - private val margin_default = 76.0 - - def formatted(input: XML.Body, margin: Double = margin_default, - metric: Metric = Metric_Default): XML.Body = - { - sealed case class Text(tx: XML.Body = Nil, pos: Double = 0.0, nl: Int = 0) - { - def newline: Text = copy(tx = FBreak :: tx, pos = 0.0, nl = nl + 1) - def string(s: String): Text = copy(tx = XML.Text(s) :: tx, pos = pos + metric(s)) - def blanks(wd: Int): Text = string(spaces(wd)) - def content: XML.Body = tx.reverse - } - - val breakgain = margin / 20 - val emergencypos = (margin / 2).round.toInt - - def content_length(tree: XML.Tree): Double = - XML.traverse_text(List(tree))(0.0)(_ + metric(_)) - - def breakdist(trees: XML.Body, after: Double): Double = - trees match { - case Break(_) :: _ => 0.0 - case FBreak :: _ => 0.0 - case t :: ts => content_length(t) + breakdist(ts, after) - case Nil => after - } - - def forcenext(trees: XML.Body): XML.Body = - trees match { - case Nil => Nil - case FBreak :: _ => trees - case Break(_) :: ts => FBreak :: ts - case t :: ts => t :: forcenext(ts) - } - - def format(trees: XML.Body, blockin: Int, after: Double, text: Text): Text = - trees match { - case Nil => text - - case Block(indent, body) :: ts => - val pos1 = (text.pos + indent).ceil.toInt - val pos2 = pos1 % emergencypos - val blockin1 = - if (pos1 < emergencypos) pos1 - else pos2 - val btext = format(body, blockin1, breakdist(ts, after), text) - val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts - format(ts1, blockin, after, btext) - - case Break(wd) :: ts => - if (text.pos + wd <= ((margin - breakdist(ts, after)) max (blockin + breakgain))) - format(ts, blockin, after, text.blanks(wd)) - else format(ts, blockin, after, text.newline.blanks(blockin)) - case FBreak :: ts => format(ts, blockin, after, text.newline.blanks(blockin)) - - case XML.Wrapped_Elem(markup, body1, body2) :: ts => - val btext = format(body2, blockin, breakdist(ts, after), text.copy(tx = Nil)) - val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts - val btext1 = btext.copy(tx = XML.Wrapped_Elem(markup, body1, btext.content) :: text.tx) - format(ts1, blockin, after, btext1) - - case XML.Elem(markup, body) :: ts => - val btext = format(body, blockin, breakdist(ts, after), text.copy(tx = Nil)) - val ts1 = if (text.nl < btext.nl) forcenext(ts) else ts - val btext1 = btext.copy(tx = XML.Elem(markup, btext.content) :: text.tx) - format(ts1, blockin, after, btext1) - - case XML.Text(s) :: ts => format(ts, blockin, after, text.string(s)) - } - - format(standard_form(input), 0, 0.0, Text()).content - } - - def string_of(input: XML.Body, margin: Double = margin_default, - metric: Metric = Metric_Default): String = - XML.content(formatted(input, margin, metric)) - - - /* unformatted output */ - - def unformatted(input: XML.Body): XML.Body = - { - def fmt(tree: XML.Tree): XML.Body = - tree match { - case Block(_, body) => body.flatMap(fmt) - case Break(wd) => List(XML.Text(spaces(wd))) - case FBreak => List(XML.Text(space)) - case XML.Wrapped_Elem(markup, body1, body2) => - List(XML.Wrapped_Elem(markup, body1, body2.flatMap(fmt))) - case XML.Elem(markup, body) => List(XML.Elem(markup, body.flatMap(fmt))) - case XML.Text(_) => List(tree) - } - standard_form(input).flatMap(fmt) - } - - def str_of(input: XML.Body): String = XML.content(unformatted(input)) -} diff --git a/core/Pure/General/print_mode.ML b/core/Pure/General/print_mode.ML deleted file mode 100644 index 66aea57f..00000000 --- a/core/Pure/General/print_mode.ML +++ /dev/null @@ -1,55 +0,0 @@ -(* Title: Pure/General/print_mode.ML - Author: Makarius - -Generic print mode as thread-local value derived from global template; -provides implicit configuration for various output mechanisms. - -The special print mode "input" is never enabled for output. -*) - -signature BASIC_PRINT_MODE = -sig - val print_mode: string list Unsynchronized.ref (*global template*) - val print_mode_value: unit -> string list (*thread-local value*) - val print_mode_active: string -> bool (*thread-local value*) -end; - -signature PRINT_MODE = -sig - include BASIC_PRINT_MODE - val input: string - val internal: string - val setmp: string list -> ('a -> 'b) -> 'a -> 'b - val with_modes: string list -> ('a -> 'b) -> 'a -> 'b - val closure: ('a -> 'b) -> 'a -> 'b -end; - -structure Print_Mode: PRINT_MODE = -struct - -val input = "input"; -val internal = "internal"; - -val print_mode = Unsynchronized.ref ([]: string list); -val tag = Universal.tag () : string list option Universal.tag; - -fun print_mode_value () = - let val modes = - (case Thread.getLocal tag of - SOME (SOME modes) => modes - | _ => ! print_mode) - in subtract (op =) [input, internal] modes end; - -fun print_mode_active mode = member (op =) (print_mode_value ()) mode; - -fun setmp modes f x = - let val orig_modes = (case Thread.getLocal tag of SOME (SOME ms) => SOME ms | _ => NONE) - in setmp_thread_data tag orig_modes (SOME modes) f x end; - -fun with_modes modes f x = setmp (modes @ print_mode_value ()) f x; -fun closure f = with_modes [] f; - -end; - -structure Basic_Print_Mode: BASIC_PRINT_MODE = Print_Mode; -open Basic_Print_Mode; diff --git a/core/Pure/General/properties.ML b/core/Pure/General/properties.ML deleted file mode 100644 index aa362324..00000000 --- a/core/Pure/General/properties.ML +++ /dev/null @@ -1,34 +0,0 @@ -(* Title: Pure/General/properties.ML - Author: Makarius - -Property lists. -*) - -signature PROPERTIES = -sig - type entry = string * string - type T = entry list - val defined: T -> string -> bool - val get: T -> string -> string option - val put: entry -> T -> T - val remove: string -> T -> T - val seconds: T -> string -> Time.time -end; - -structure Properties: PROPERTIES = -struct - -type entry = string * string; -type T = entry list; - -fun defined (props: T) name = AList.defined (op =) props name; -fun get (props: T) name = AList.lookup (op =) props name; -fun put entry (props: T) = AList.update (op =) entry props; -fun remove name (props: T) = AList.delete (op =) name props; - -fun seconds props name = - (case AList.lookup (op =) props name of - NONE => Time.zeroTime - | SOME s => Time.fromReal (the_default 0.0 (Real.fromString s))); - -end; diff --git a/core/Pure/General/properties.scala b/core/Pure/General/properties.scala deleted file mode 100644 index 1af94da6..00000000 --- a/core/Pure/General/properties.scala +++ /dev/null @@ -1,106 +0,0 @@ -/* Title: Pure/General/properties.scala - Module: PIDE - Author: Makarius - -Property lists. -*/ - -package isabelle - - -object Properties -{ - /* plain values */ - - object Value - { - object Boolean - { - def apply(x: scala.Boolean): java.lang.String = x.toString - def unapply(s: java.lang.String): Option[scala.Boolean] = - s match { - case "true" => Some(true) - case "false" => Some(false) - case _ => None - } - } - - object Int - { - def apply(x: scala.Int): java.lang.String = x.toString - def unapply(s: java.lang.String): Option[scala.Int] = - try { Some(Integer.parseInt(s)) } - catch { case _: NumberFormatException => None } - } - - object Long - { - def apply(x: scala.Long): java.lang.String = x.toString - def unapply(s: java.lang.String): Option[scala.Long] = - try { Some(java.lang.Long.parseLong(s)) } - catch { case _: NumberFormatException => None } - } - - object Double - { - def apply(x: scala.Double): java.lang.String = x.toString - def unapply(s: java.lang.String): Option[scala.Double] = - try { Some(java.lang.Double.parseDouble(s)) } - catch { case _: NumberFormatException => None } - } - } - - - /* named entries */ - - type Entry = (java.lang.String, java.lang.String) - type T = List[Entry] - - class String(val name: java.lang.String) - { - def apply(value: java.lang.String): T = List((name, value)) - def unapply(props: T): Option[java.lang.String] = - props.find(_._1 == name).map(_._2) - } - - class Boolean(val name: java.lang.String) - { - def apply(value: scala.Boolean): T = List((name, Value.Boolean(value))) - def unapply(props: T): Option[scala.Boolean] = - props.find(_._1 == name) match { - case None => None - case Some((_, value)) => Value.Boolean.unapply(value) - } - } - - class Int(val name: java.lang.String) - { - def apply(value: scala.Int): T = List((name, Value.Int(value))) - def unapply(props: T): Option[scala.Int] = - props.find(_._1 == name) match { - case None => None - case Some((_, value)) => Value.Int.unapply(value) - } - } - - class Long(val name: java.lang.String) - { - def apply(value: scala.Long): T = List((name, Value.Long(value))) - def unapply(props: T): Option[scala.Long] = - props.find(_._1 == name) match { - case None => None - case Some((_, value)) => Value.Long.unapply(value) - } - } - - class Double(val name: java.lang.String) - { - def apply(value: scala.Double): T = List((name, Value.Double(value))) - def unapply(props: T): Option[scala.Double] = - props.find(_._1 == name) match { - case None => None - case Some((_, value)) => Value.Double.unapply(value) - } - } -} - diff --git a/core/Pure/General/queue.ML b/core/Pure/General/queue.ML deleted file mode 100644 index 530e1fcc..00000000 --- a/core/Pure/General/queue.ML +++ /dev/null @@ -1,35 +0,0 @@ -(* Title: Pure/General/queue.ML - Author: Makarius - -Efficient queues. -*) - -signature QUEUE = -sig - type 'a T - val empty: 'a T - val is_empty: 'a T -> bool - val content: 'a T -> 'a list - val enqueue: 'a -> 'a T -> 'a T - val dequeue: 'a T -> 'a * 'a T (*exception List.Empty*) -end; - -structure Queue: QUEUE = -struct - -datatype 'a T = Queue of 'a list * 'a list; - -val empty = Queue ([], []); - -fun is_empty (Queue ([], [])) = true - | is_empty _ = false; - -fun content (Queue (xs, ys)) = ys @ rev xs; - -fun enqueue x (Queue (xs, ys)) = Queue (x :: xs, ys); - -fun dequeue (Queue (xs, y :: ys)) = (y, Queue (xs, ys)) - | dequeue (Queue (xs as _ :: _, [])) = let val y :: ys = rev xs in (y, Queue ([], ys)) end - | dequeue (Queue ([], [])) = raise List.Empty; - -end; diff --git a/core/Pure/General/same.ML b/core/Pure/General/same.ML deleted file mode 100644 index 907a0d04..00000000 --- a/core/Pure/General/same.ML +++ /dev/null @@ -1,45 +0,0 @@ -(* Title: Pure/General/same.ML - Author: Makarius - -Support for copy-avoiding functions on pure values, at the cost of -readability. -*) - -signature SAME = -sig - exception SAME - type ('a, 'b) function = 'a -> 'b (*exception SAME*) - type 'a operation = ('a, 'a) function (*exception SAME*) - val same: ('a, 'b) function - val commit: 'a operation -> 'a -> 'a - val function: ('a -> 'b option) -> ('a, 'b) function - val capture: ('a, 'b) function -> 'a -> 'b option - val map: 'a operation -> 'a list operation - val map_option: ('a, 'b) function -> ('a option, 'b option) function -end; - -structure Same: SAME = -struct - -exception SAME; - -type ('a, 'b) function = 'a -> 'b; -type 'a operation = ('a, 'a) function; - -fun same _ = raise SAME; -fun commit f x = f x handle SAME => x; - -fun capture f x = SOME (f x) handle SAME => NONE; - -fun function f x = - (case f x of - NONE => raise SAME - | SOME y => y); - -fun map f [] = raise SAME - | map f (x :: xs) = (f x :: commit (map f) xs handle SAME => x :: map f xs); - -fun map_option f NONE = raise SAME - | map_option f (SOME x) = SOME (f x); - -end; diff --git a/core/Pure/General/scan.ML b/core/Pure/General/scan.ML deleted file mode 100644 index 3fe5469f..00000000 --- a/core/Pure/General/scan.ML +++ /dev/null @@ -1,333 +0,0 @@ -(* Title: Pure/General/scan.ML - Author: Markus Wenzel and Tobias Nipkow, TU Muenchen - -Generic scanners (for potentially infinite input). -*) - -infix 5 -- :-- :|-- |-- --| ^^; -infixr 5 ::: @@@; -infix 3 >>; -infixr 0 ||; - -signature BASIC_SCAN = -sig - type message = unit -> string - (*error msg handler*) - val !! : ('a * message option -> message) -> ('a -> 'b) -> 'a -> 'b - (*apply function*) - val >> : ('a -> 'b * 'c) * ('b -> 'd) -> 'a -> 'd * 'c - (*alternative*) - val || : ('a -> 'b) * ('a -> 'b) -> 'a -> 'b - (*sequential pairing*) - val -- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> ('b * 'd) * 'e - (*dependent pairing*) - val :-- : ('a -> 'b * 'c) * ('b -> 'c -> 'd * 'e) -> 'a -> ('b * 'd) * 'e - (*projections*) - val :|-- : ('a -> 'b * 'c) * ('b -> 'c -> 'd * 'e) -> 'a -> 'd * 'e - val |-- : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> 'd * 'e - val --| : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> 'b * 'e - (*concatenation*) - val ^^ : ('a -> string * 'b) * ('b -> string * 'c) -> 'a -> string * 'c - val ::: : ('a -> 'b * 'c) * ('c -> 'b list * 'd) -> 'a -> 'b list * 'd - val @@@ : ('a -> 'b list * 'c) * ('c -> 'b list * 'd) -> 'a -> 'b list * 'd - (*one element literal*) - val $$ : string -> string list -> string * string list - val ~$$ : string -> string list -> string * string list -end; - -signature SCAN = -sig - include BASIC_SCAN - val prompt: string -> ('a -> 'b) -> 'a -> 'b - val permissive: ('a -> 'b) -> 'a -> 'b - val error: ('a -> 'b) -> 'a -> 'b - val catch: ('a -> 'b) -> 'a -> 'b (*exception Fail*) - val fail: 'a -> 'b - val fail_with: ('a -> message) -> 'a -> 'b - val succeed: 'a -> 'b -> 'a * 'b - val some: ('a -> 'b option) -> 'a list -> 'b * 'a list - val one: ('a -> bool) -> 'a list -> 'a * 'a list - val this: string list -> string list -> string list * string list - val this_string: string -> string list -> string * string list - val many: ('a -> bool) -> 'a list -> 'a list * 'a list - val many1: ('a -> bool) -> 'a list -> 'a list * 'a list - val optional: ('a -> 'b * 'a) -> 'b -> 'a -> 'b * 'a - val option: ('a -> 'b * 'a) -> 'a -> 'b option * 'a - val repeat: ('a -> 'b * 'a) -> 'a -> 'b list * 'a - val repeat1: ('a -> 'b * 'a) -> 'a -> 'b list * 'a - val single: ('a -> 'b * 'a) -> 'a -> 'b list * 'a - val bulk: ('a -> 'b * 'a) -> 'a -> 'b list * 'a - val max: ('a * 'a -> bool) -> ('b -> 'a * 'b) -> ('b -> 'a * 'b) -> 'b -> 'a * 'b - val ahead: ('a -> 'b * 'c) -> 'a -> 'b * 'a - val unless: ('a -> 'b * 'a) -> ('a -> 'c * 'd) -> 'a -> 'c * 'd - val first: ('a -> 'b) list -> 'a -> 'b - val state: 'a * 'b -> 'a * ('a * 'b) - val depend: ('a -> 'b -> ('c * 'd) * 'e) -> 'a * 'b -> 'd * ('c * 'e) - val peek: ('a -> 'b -> 'c * 'd) -> 'a * 'b -> 'c * ('a * 'd) - val provide: ('a -> bool) -> 'b -> ('b * 'c -> 'd * ('a * 'e)) -> 'c -> 'd * 'e - val pass: 'a -> ('a * 'b -> 'c * ('d * 'e)) -> 'b -> 'c * 'e - val lift: ('a -> 'b * 'c) -> 'd * 'a -> 'b * ('d * 'c) - val unlift: (unit * 'a -> 'b * ('c * 'd)) -> 'a -> 'b * 'd - val trace: ('a list -> 'b * 'c list) -> 'a list -> ('b * 'a list) * 'c list - type 'a stopper - val stopper: ('a list -> 'a) -> ('a -> bool) -> 'a stopper - val is_stopper: 'a stopper -> 'a -> bool - val finite': 'a stopper -> ('b * 'a list -> 'c * ('d * 'a list)) - -> 'b * 'a list -> 'c * ('d * 'a list) - val finite: 'a stopper -> ('a list -> 'b * 'a list) -> 'a list -> 'b * 'a list - val read: 'a stopper -> ('a list -> 'b * 'a list) -> 'a list -> 'b option - val drain: string -> (string -> 'a -> 'b list * 'a) -> 'b stopper -> - ('c * 'b list -> 'd * ('e * 'b list)) -> ('c * 'b list) * 'a -> ('d * ('e * 'b list)) * 'a - type lexicon - val is_literal: lexicon -> string list -> bool - val literal: lexicon -> (string * 'a) list -> (string * 'a) list * (string * 'a) list - val empty_lexicon: lexicon - val extend_lexicon: string list -> lexicon -> lexicon - val make_lexicon: string list list -> lexicon - val dest_lexicon: lexicon -> string list - val merge_lexicons: lexicon * lexicon -> lexicon -end; - -structure Scan: SCAN = -struct - - -(** scanners **) - -(* exceptions *) - -type message = unit -> string; - -exception MORE of string option; (*need more input (prompt)*) -exception FAIL of message option; (*try alternatives (reason of failure)*) -exception ABORT of message; (*dead end*) - -fun !! err scan xs = scan xs handle FAIL msg => raise ABORT (err (xs, msg)); -fun permissive scan xs = scan xs handle MORE _ => raise FAIL NONE | ABORT _ => raise FAIL NONE; -fun strict scan xs = scan xs handle MORE _ => raise FAIL NONE; -fun prompt str scan xs = scan xs handle MORE NONE => raise MORE (SOME str); -fun error scan xs = scan xs handle ABORT msg => Library.error (msg ()); - -fun catch scan xs = scan xs - handle ABORT msg => raise Fail (msg ()) - | FAIL msg => raise Fail (case msg of NONE => "Syntax error" | SOME m => m ()); - - -(* scanner combinators *) - -fun (scan >> f) xs = scan xs |>> f; - -fun (scan1 || scan2) xs = scan1 xs handle FAIL _ => scan2 xs; - -fun (scan1 :-- scan2) xs = - let - val (x, ys) = scan1 xs; - val (y, zs) = scan2 x ys; - in ((x, y), zs) end; - -fun (scan1 -- scan2) = scan1 :-- (fn _ => scan2); -fun (scan1 :|-- scan2) = scan1 :-- scan2 >> #2; -fun (scan1 |-- scan2) = scan1 -- scan2 >> #2; -fun (scan1 --| scan2) = scan1 -- scan2 >> #1; -fun (scan1 ^^ scan2) = scan1 -- scan2 >> op ^; -fun (scan1 ::: scan2) = scan1 -- scan2 >> op ::; -fun (scan1 @@@ scan2) = scan1 -- scan2 >> op @; - - -(* generic scanners *) - -fun fail _ = raise FAIL NONE; -fun fail_with msg_of xs = raise FAIL (SOME (msg_of xs)); -fun succeed y xs = (y, xs); - -fun some _ [] = raise MORE NONE - | some f (x :: xs) = - (case f x of SOME y => (y, xs) | _ => raise FAIL NONE); - -fun one _ [] = raise MORE NONE - | one pred (x :: xs) = - if pred x then (x, xs) else raise FAIL NONE; - -fun $$ a = one (fn s: string => s = a); -fun ~$$ a = one (fn s: string => s <> a); - -fun this ys xs = - let - fun drop_prefix [] xs = xs - | drop_prefix (_ :: _) [] = raise MORE NONE - | drop_prefix (y :: ys) (x :: xs) = - if (y: string) = x then drop_prefix ys xs else raise FAIL NONE; - in (ys, drop_prefix ys xs) end; - -fun this_string s = this (raw_explode s) >> K s; (*primitive string -- no symbols here!*) - -fun many _ [] = raise MORE NONE - | many pred (lst as x :: xs) = - if pred x then apfst (cons x) (many pred xs) - else ([], lst); - -fun many1 pred = one pred ::: many pred; - -fun optional scan def = scan || succeed def; -fun option scan = (scan >> SOME) || succeed NONE; - -fun repeat scan = - let - fun rep ys xs = - (case (SOME (scan xs) handle FAIL _ => NONE) of - NONE => (rev ys, xs) - | SOME (y, xs') => rep (y :: ys) xs'); - in rep [] end; - -fun repeat1 scan = scan ::: repeat scan; - -fun single scan = scan >> (fn x => [x]); -fun bulk scan = scan -- repeat (permissive scan) >> (op ::); - -fun max leq scan1 scan2 xs = - (case (option scan1 xs, option scan2 xs) of - ((NONE, _), (NONE, _)) => raise FAIL NONE (*looses FAIL msg!*) - | ((SOME tok1, xs'), (NONE, _)) => (tok1, xs') - | ((NONE, _), (SOME tok2, xs')) => (tok2, xs') - | ((SOME tok1, xs1'), (SOME tok2, xs2')) => - if leq (tok2, tok1) then (tok1, xs1') else (tok2, xs2')); - -fun ahead scan xs = (fst (scan xs), xs); - -fun unless test scan = - ahead (option test) :-- (fn NONE => scan | _ => fail) >> #2; - -fun first [] = fail - | first (scan :: scans) = scan || first scans; - - -(* state based scanners *) - -fun state (st, xs) = (st, (st, xs)); - -fun depend scan (st, xs) = - let val ((st', y), xs') = scan st xs - in (y, (st', xs')) end; - -fun peek scan = depend (fn st => scan st >> pair st); - -fun provide pred st scan xs = - let val (y, (st', xs')) = scan (st, xs) - in if pred st' then (y, xs') else fail () end; - -fun pass st = provide (K true) st; - -fun lift scan (st, xs) = - let val (y, xs') = scan xs - in (y, (st, xs')) end; - -fun unlift scan = pass () scan; - - -(* trace input *) - -fun trace scan xs = - let val (y, xs') = scan xs - in ((y, take (length xs - length xs') xs), xs') end; - - -(* stopper *) - -datatype 'a stopper = Stopper of ('a list -> 'a) * ('a -> bool); - -fun stopper mk_stopper is_stopper = Stopper (mk_stopper, is_stopper); -fun is_stopper (Stopper (_, is_stopper)) = is_stopper; - - -(* finite scans *) - -fun finite' (Stopper (mk_stopper, is_stopper)) scan (state, input) = - let - fun lost () = raise ABORT (fn () => "Bad scanner: lost stopper of finite scan!"); - - fun stop [] = lost () - | stop lst = - let val (xs, x) = split_last lst - in if is_stopper x then ((), xs) else lost () end; - in - if exists is_stopper input then - raise ABORT (fn () => "Stopper may not occur in input of finite scan!") - else (strict scan --| lift stop) (state, input @ [mk_stopper input]) - end; - -fun finite stopper scan = unlift (finite' stopper (lift scan)); - -fun read stopper scan xs = - (case error (finite stopper (option scan)) xs of - (y as SOME _, []) => y - | _ => NONE); - - -(* infinite scans -- draining state-based source *) - -fun drain def_prompt get stopper scan ((state, xs), src) = - (scan (state, xs), src) handle MORE prompt => - (case get (the_default def_prompt prompt) src of - ([], _) => (finite' stopper scan (state, xs), src) - | (xs', src') => drain def_prompt get stopper scan ((state, xs @ xs'), src')); - - - -(** datatype lexicon -- position tree **) - -datatype lexicon = Lexicon of (bool * lexicon) Symtab.table; - -val empty_lexicon = Lexicon Symtab.empty; - -fun is_literal _ [] = false - | is_literal (Lexicon tab) (c :: cs) = - (case Symtab.lookup tab c of - SOME (tip, lex) => tip andalso null cs orelse is_literal lex cs - | NONE => false); - - -(* scan longest match *) - -fun literal lexicon = - let - fun finish (SOME (res, rest)) = (rev res, rest) - | finish NONE = raise FAIL NONE; - fun scan _ res (Lexicon tab) [] = if Symtab.is_empty tab then finish res else raise MORE NONE - | scan path res (Lexicon tab) (c :: cs) = - (case Symtab.lookup tab (fst c) of - SOME (tip, lex) => - let val path' = c :: path - in scan path' (if tip then SOME (path', cs) else res) lex cs end - | NONE => finish res); - in scan [] NONE lexicon end; - - -(* build lexicons *) - -fun extend_lexicon chrs lexicon = - let - fun ext [] lex = lex - | ext (c :: cs) (Lexicon tab) = - (case Symtab.lookup tab c of - SOME (tip, lex) => Lexicon (Symtab.update (c, (tip orelse null cs, ext cs lex)) tab) - | NONE => Lexicon (Symtab.update (c, (null cs, ext cs empty_lexicon)) tab)); - in if is_literal lexicon chrs then lexicon else ext chrs lexicon end; - -fun make_lexicon chrss = fold extend_lexicon chrss empty_lexicon; - - -(* merge lexicons *) - -fun dest path (Lexicon tab) = Symtab.fold (fn (d, (tip, lex)) => - let - val path' = d :: path; - val content = dest path' lex; - in append (if tip then rev path' :: content else content) end) tab []; - -val dest_lexicon = map implode o dest []; -fun merge_lexicons (lex1, lex2) = fold extend_lexicon (dest [] lex2) lex1; - -end; - -structure Basic_Scan: BASIC_SCAN = Scan; -open Basic_Scan; diff --git a/core/Pure/General/scan.scala b/core/Pure/General/scan.scala deleted file mode 100644 index 188abe33..00000000 --- a/core/Pure/General/scan.scala +++ /dev/null @@ -1,481 +0,0 @@ -/* Title: Pure/General/scan.scala - Author: Makarius - -Efficient scanning of keywords and tokens. -*/ - -package isabelle - - -import scala.annotation.tailrec -import scala.collection.{IndexedSeq, TraversableOnce} -import scala.collection.immutable.PagedSeq -import scala.util.parsing.input.{OffsetPosition, Position => InputPosition, Reader} -import scala.util.parsing.combinator.RegexParsers - -import java.io.{File => JFile, BufferedInputStream, FileInputStream, InputStream} -import java.net.URL - - -object Scan -{ - /** context of partial line-oriented scans **/ - - abstract class Line_Context - case object Finished extends Line_Context - case class Quoted(quote: String) extends Line_Context - case object Verbatim extends Line_Context - case class Cartouche(depth: Int) extends Line_Context - case class Comment(depth: Int) extends Line_Context - - - - /** parser combinators **/ - - object Parsers extends Parsers - - trait Parsers extends RegexParsers - { - override val whiteSpace = "".r - - - /* optional termination */ - - def opt_term[T](p: => Parser[T]): Parser[Option[T]] = - p ^^ (x => Some(x)) | """\z""".r ^^ (_ => None) - - - /* repeated symbols */ - - def repeated(pred: Symbol.Symbol => Boolean, min_count: Int, max_count: Int): Parser[String] = - new Parser[String] - { - def apply(in: Input) = - { - val start = in.offset - val end = in.source.length - val matcher = new Symbol.Matcher(in.source) - - var i = start - var count = 0 - var finished = false - while (!finished && i < end && count < max_count) { - val n = matcher(i, end) - val sym = in.source.subSequence(i, i + n).toString - if (pred(sym)) { i += n; count += 1 } - else finished = true - } - if (count < min_count) Failure("bad input", in) - else Success(in.source.subSequence(start, i).toString, in.drop(i - start)) - } - }.named("repeated") - - def one(pred: Symbol.Symbol => Boolean): Parser[String] = - repeated(pred, 1, 1) - - def many(pred: Symbol.Symbol => Boolean): Parser[String] = - repeated(pred, 0, Integer.MAX_VALUE) - - def many1(pred: Symbol.Symbol => Boolean): Parser[String] = - repeated(pred, 1, Integer.MAX_VALUE) - - - /* character */ - - def character(pred: Char => Boolean): Symbol.Symbol => Boolean = - (s: Symbol. Symbol) => s.length == 1 && pred(s.charAt(0)) - - - /* quoted strings */ - - private def quoted_body(quote: Symbol.Symbol): Parser[String] = - { - rep(many1(sym => sym != quote && sym != "\\") | "\\" + quote | "\\\\" | - (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString) - } - - def quoted(quote: Symbol.Symbol): Parser[String] = - { - quote ~ quoted_body(quote) ~ quote ^^ { case x ~ y ~ z => x + y + z } - }.named("quoted") - - def quoted_content(quote: Symbol.Symbol, source: String): String = - { - require(parseAll(quoted(quote), source).successful) - val body = source.substring(1, source.length - 1) - if (body.exists(_ == '\\')) { - val content = - rep(many1(sym => sym != quote && sym != "\\") | - "\\" ~> (quote | "\\" | """\d\d\d""".r ^^ { case x => x.toInt.toChar.toString })) - parseAll(content ^^ (_.mkString), body).get - } - else body - } - - def quoted_line(quote: Symbol.Symbol, ctxt: Line_Context): Parser[(String, Line_Context)] = - { - ctxt match { - case Finished => - quote ~ quoted_body(quote) ~ opt_term(quote) ^^ - { case x ~ y ~ Some(z) => (x + y + z, Finished) - case x ~ y ~ None => (x + y, Quoted(quote)) } - case Quoted(q) if q == quote => - quoted_body(quote) ~ opt_term(quote) ^^ - { case x ~ Some(y) => (x + y, Finished) - case x ~ None => (x, ctxt) } - case _ => failure("") - } - }.named("quoted_line") - - def recover_quoted(quote: Symbol.Symbol): Parser[String] = - quote ~ quoted_body(quote) ^^ { case x ~ y => x + y } - - - /* verbatim text */ - - private def verbatim_body: Parser[String] = - rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString) - - def verbatim: Parser[String] = - { - "{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z } - }.named("verbatim") - - def verbatim_content(source: String): String = - { - require(parseAll(verbatim, source).successful) - source.substring(2, source.length - 2) - } - - def verbatim_line(ctxt: Line_Context): Parser[(String, Line_Context)] = - { - ctxt match { - case Finished => - "{*" ~ verbatim_body ~ opt_term("*}") ^^ - { case x ~ y ~ Some(z) => (x + y + z, Finished) - case x ~ y ~ None => (x + y, Verbatim) } - case Verbatim => - verbatim_body ~ opt_term("*}") ^^ - { case x ~ Some(y) => (x + y, Finished) - case x ~ None => (x, Verbatim) } - case _ => failure("") - } - }.named("verbatim_line") - - val recover_verbatim: Parser[String] = - "{*" ~ verbatim_body ^^ { case x ~ y => x + y } - - - /* nested text cartouches */ - - private def cartouche_depth(depth: Int): Parser[(String, Int)] = new Parser[(String, Int)] - { - require(depth >= 0) - - def apply(in: Input) = - { - val start = in.offset - val end = in.source.length - val matcher = new Symbol.Matcher(in.source) - - var i = start - var d = depth - var finished = false - while (!finished && i < end) { - val n = matcher(i, end) - val sym = in.source.subSequence(i, i + n).toString - if (Symbol.is_open(sym)) { i += n; d += 1 } - else if (d > 0) { i += n; if (Symbol.is_close(sym)) d -= 1 } - else finished = true - } - if (i == start) Failure("bad input", in) - else Success((in.source.subSequence(start, i).toString, d), in.drop(i - start)) - } - }.named("cartouche_depth") - - def cartouche: Parser[String] = - cartouche_depth(0) ^? { case (x, d) if d == 0 => x } - - def cartouche_line(ctxt: Line_Context): Parser[(String, Line_Context)] = - { - val depth = - ctxt match { - case Finished => 0 - case Cartouche(d) => d - case _ => -1 - } - if (depth >= 0) - cartouche_depth(depth) ^^ - { case (x, 0) => (x, Finished) - case (x, d) => (x, Cartouche(d)) } - else failure("") - } - - val recover_cartouche: Parser[String] = - cartouche_depth(0) ^^ (_._1) - - def cartouche_content(source: String): String = - { - def err(): Nothing = error("Malformed text cartouche: " + quote(source)) - val source1 = - Library.try_unprefix(Symbol.open_decoded, source) orElse - Library.try_unprefix(Symbol.open, source) getOrElse err() - Library.try_unsuffix(Symbol.close_decoded, source1) orElse - Library.try_unsuffix(Symbol.close, source1) getOrElse err() - } - - - /* nested comments */ - - private def comment_depth(depth: Int): Parser[(String, Int)] = new Parser[(String, Int)] - { - require(depth >= 0) - - val comment_text = - rep1(many1(sym => sym != "*" && sym != "(") | """\*(?!\))|\((?!\*)""".r) - - def apply(in: Input) = - { - var rest = in - def try_parse[A](p: Parser[A]): Boolean = - { - parse(p ^^^ (()), rest) match { - case Success(_, next) => { rest = next; true } - case _ => false - } - } - var d = depth - var finished = false - while (!finished) { - if (try_parse("(*")) d += 1 - else if (d > 0 && try_parse("*)")) d -= 1 - else if (d == 0 || !try_parse(comment_text)) finished = true - } - if (in.offset < rest.offset) - Success((in.source.subSequence(in.offset, rest.offset).toString, d), rest) - else Failure("comment expected", in) - } - }.named("comment_depth") - - def comment: Parser[String] = - comment_depth(0) ^? { case (x, d) if d == 0 => x } - - def comment_line(ctxt: Line_Context): Parser[(String, Line_Context)] = - { - val depth = - ctxt match { - case Finished => 0 - case Comment(d) => d - case _ => -1 - } - if (depth >= 0) - comment_depth(depth) ^^ - { case (x, 0) => (x, Finished) - case (x, d) => (x, Comment(d)) } - else failure("") - } - - val recover_comment: Parser[String] = - comment_depth(0) ^^ (_._1) - - def comment_content(source: String): String = - { - require(parseAll(comment, source).successful) - source.substring(2, source.length - 2) - } - - - /* keyword */ - - def literal(lexicon: Lexicon): Parser[String] = new Parser[String] - { - def apply(in: Input) = - { - val result = lexicon.scan(in) - if (result.isEmpty) Failure("keyword expected", in) - else Success(result, in.drop(result.length)) - } - }.named("keyword") - } - - - - /** Lexicon -- position tree **/ - - object Lexicon - { - /* representation */ - - private sealed case class Tree(val branches: Map[Char, (String, Tree)]) - private val empty_tree = Tree(Map()) - - val empty: Lexicon = new Lexicon(empty_tree) - def apply(elems: String*): Lexicon = empty ++ elems - } - - final class Lexicon private(rep: Lexicon.Tree) - { - /* auxiliary operations */ - - private def content(tree: Lexicon.Tree, result: List[String]): List[String] = - (result /: tree.branches.toList) ((res, entry) => - entry match { case (_, (s, tr)) => - if (s.isEmpty) content(tr, res) else content(tr, s :: res) }) - - private def lookup(str: CharSequence): Option[(Boolean, Lexicon.Tree)] = - { - val len = str.length - @tailrec def look(tree: Lexicon.Tree, tip: Boolean, i: Int): Option[(Boolean, Lexicon.Tree)] = - { - if (i < len) { - tree.branches.get(str.charAt(i)) match { - case Some((s, tr)) => look(tr, !s.isEmpty, i + 1) - case None => None - } - } - else Some(tip, tree) - } - look(rep, false, 0) - } - - def completions(str: CharSequence): List[String] = - lookup(str) match { - case Some((true, tree)) => content(tree, List(str.toString)) - case Some((false, tree)) => content(tree, Nil) - case None => Nil - } - - - /* pseudo Set methods */ - - def iterator: Iterator[String] = content(rep, Nil).sorted.iterator - - override def toString: String = iterator.mkString("Lexicon(", ", ", ")") - - def empty: Lexicon = Lexicon.empty - def isEmpty: Boolean = rep.branches.isEmpty - - def contains(elem: String): Boolean = - lookup(elem) match { - case Some((tip, _)) => tip - case _ => false - } - - - /* add elements */ - - def + (elem: String): Lexicon = - if (contains(elem)) this - else { - val len = elem.length - def extend(tree: Lexicon.Tree, i: Int): Lexicon.Tree = - if (i < len) { - val c = elem.charAt(i) - val end = (i + 1 == len) - tree.branches.get(c) match { - case Some((s, tr)) => - Lexicon.Tree(tree.branches + - (c -> (if (end) elem else s, extend(tr, i + 1)))) - case None => - Lexicon.Tree(tree.branches + - (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1)))) - } - } - else tree - new Lexicon(extend(rep, 0)) - } - - def ++ (elems: TraversableOnce[String]): Lexicon = (this /: elems)(_ + _) - - - /* scan */ - - def scan(in: Reader[Char]): String = - { - val source = in.source - val offset = in.offset - val len = source.length - offset - - def scan_tree(tree: Lexicon.Tree, result: String, i: Int): String = - { - if (i < len) { - tree.branches.get(source.charAt(offset + i)) match { - case Some((s, tr)) => scan_tree(tr, if (s.isEmpty) result else s, i + 1) - case None => result - } - } - else result - } - scan_tree(rep, "", 0) - } - } - - - - /** read stream without decoding: efficient length operation **/ - - private class Restricted_Seq(seq: IndexedSeq[Char], start: Int, end: Int) - extends CharSequence - { - def charAt(i: Int): Char = - if (0 <= i && i < length) seq(start + i) - else throw new IndexOutOfBoundsException - - def length: Int = end - start // avoid expensive seq.length - - def subSequence(i: Int, j: Int): CharSequence = - if (0 <= i && i <= j && j <= length) new Restricted_Seq(seq, start + i, start + j) - else throw new IndexOutOfBoundsException - - override def toString: String = - { - val buf = new StringBuilder(length) - for (offset <- start until end) buf.append(seq(offset)) - buf.toString - } - } - - abstract class Byte_Reader extends Reader[Char] { def close: Unit } - - private def make_byte_reader(stream: InputStream, stream_length: Int): Byte_Reader = - { - val buffered_stream = new BufferedInputStream(stream) - val seq = new PagedSeq( - (buf: Array[Char], offset: Int, length: Int) => - { - var i = 0 - var c = 0 - var eof = false - while (!eof && i < length) { - c = buffered_stream.read - if (c == -1) eof = true - else { buf(offset + i) = c.toChar; i += 1 } - } - if (i > 0) i else -1 - }) - val restricted_seq = new Restricted_Seq(seq, 0, stream_length) - - class Paged_Reader(override val offset: Int) extends Byte_Reader - { - override lazy val source: CharSequence = restricted_seq - def first: Char = if (seq.isDefinedAt(offset)) seq(offset) else '\u001a' - def rest: Paged_Reader = if (seq.isDefinedAt(offset)) new Paged_Reader(offset + 1) else this - def pos: InputPosition = new OffsetPosition(source, offset) - def atEnd: Boolean = !seq.isDefinedAt(offset) - override def drop(n: Int): Paged_Reader = new Paged_Reader(offset + n) - def close { buffered_stream.close } - } - new Paged_Reader(0) - } - - def byte_reader(file: JFile): Byte_Reader = - make_byte_reader(new FileInputStream(file), file.length.toInt) - - def byte_reader(url: URL): Byte_Reader = - { - val connection = url.openConnection - val stream = connection.getInputStream - val stream_length = connection.getContentLength - make_byte_reader(stream, stream_length) - } -} diff --git a/core/Pure/General/secure.ML b/core/Pure/General/secure.ML deleted file mode 100644 index f3bf1dd4..00000000 --- a/core/Pure/General/secure.ML +++ /dev/null @@ -1,60 +0,0 @@ -(* Title: Pure/General/secure.ML - Author: Makarius - -Secure critical operations. -*) - -signature SECURE = -sig - val set_secure: unit -> unit - val is_secure: unit -> bool - val deny_secure: string -> unit - val secure_mltext: unit -> unit - val use_text: use_context -> int * string -> bool -> string -> unit - val use_file: use_context -> bool -> string -> unit - val toplevel_pp: string list -> string -> unit - val PG_setup: unit -> unit - val commit: unit -> unit -end; - -structure Secure: SECURE = -struct - -(** secure flag **) - -val secure = Unsynchronized.ref false; - -fun set_secure () = secure := true; -fun is_secure () = ! secure; - -fun deny_secure msg = if is_secure () then error msg else (); - - - -(** critical operations **) - -(* ML evaluation *) - -fun secure_mltext () = deny_secure "Cannot evaluate ML source in secure mode"; - -val raw_use_text = use_text; -val raw_use_file = use_file; -val raw_toplevel_pp = toplevel_pp; - -fun use_text context pos verbose txt = (secure_mltext (); raw_use_text context pos verbose txt); -fun use_file context verbose name = (secure_mltext (); raw_use_file context verbose name); - -fun toplevel_pp path pp = (secure_mltext (); raw_toplevel_pp ML_Parse.global_context path pp); - - -(* global evaluation *) - -val use_global = raw_use_text ML_Parse.global_context (0, "") false; - -fun commit () = use_global "commit();"; (*commit is dynamically bound!*) - -fun PG_setup () = - use_global "val change = Unsynchronized.change; structure ThyLoad = ProofGeneral.ThyLoad;"; - -end; - diff --git a/core/Pure/General/seq.ML b/core/Pure/General/seq.ML deleted file mode 100644 index 09f4beb0..00000000 --- a/core/Pure/General/seq.ML +++ /dev/null @@ -1,284 +0,0 @@ -(* Title: Pure/General/seq.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Markus Wenzel, TU Munich - -Unbounded sequences implemented by closures. RECOMPUTES if sequence -is re-inspected. Memoing, using polymorphic refs, was found to be -slower! (More GCs) -*) - -signature SEQ = -sig - type 'a seq - val make: (unit -> ('a * 'a seq) option) -> 'a seq - val pull: 'a seq -> ('a * 'a seq) option - val empty: 'a seq - val cons: 'a -> 'a seq -> 'a seq - val single: 'a -> 'a seq - val try: ('a -> 'b) -> 'a -> 'b seq - val hd: 'a seq -> 'a - val tl: 'a seq -> 'a seq - val chop: int -> 'a seq -> 'a list * 'a seq - val take: int -> 'a seq -> 'a seq - val list_of: 'a seq -> 'a list - val of_list: 'a list -> 'a seq - val append: 'a seq -> 'a seq -> 'a seq - val mapp: ('a -> 'b) -> 'a seq -> 'b seq -> 'b seq - val interleave: 'a seq * 'a seq -> 'a seq - val filter: ('a -> bool) -> 'a seq -> 'a seq - val flat: 'a seq seq -> 'a seq - val map: ('a -> 'b) -> 'a seq -> 'b seq - val maps: ('a -> 'b seq) -> 'a seq -> 'b seq - val map_filter: ('a -> 'b option) -> 'a seq -> 'b seq - val lift: ('a -> 'b -> 'c) -> 'a seq -> 'b -> 'c seq - val lifts: ('a -> 'b -> 'c seq) -> 'a seq -> 'b -> 'c seq - val singleton: ('a list -> 'b list seq) -> 'a -> 'b seq - val print: (int -> 'a -> unit) -> int -> 'a seq -> unit - val it_right : ('a * 'b seq -> 'b seq) -> 'a seq * 'b seq -> 'b seq - datatype 'a result = Result of 'a | Error of unit -> string - val make_results: 'a seq -> 'a result seq - val filter_results: 'a result seq -> 'a seq - val maps_results: ('a -> 'b result seq) -> 'a result seq -> 'b result seq - val maps_result: ('a -> 'b seq) -> 'a result seq -> 'b result seq - val map_result: ('a -> 'b) -> 'a result seq -> 'b result seq - val first_result: string -> 'a result seq -> 'a * 'a seq - val the_result: string -> 'a result seq -> 'a - val succeed: 'a -> 'a seq - val fail: 'a -> 'b seq - val THEN: ('a -> 'b seq) * ('b -> 'c seq) -> 'a -> 'c seq - val ORELSE: ('a -> 'b seq) * ('a -> 'b seq) -> 'a -> 'b seq - val APPEND: ('a -> 'b seq) * ('a -> 'b seq) -> 'a -> 'b seq - val EVERY: ('a -> 'a seq) list -> 'a -> 'a seq - val FIRST: ('a -> 'b seq) list -> 'a -> 'b seq - val TRY: ('a -> 'a seq) -> 'a -> 'a seq - val REPEAT: ('a -> 'a seq) -> 'a -> 'a seq - val REPEAT1: ('a -> 'a seq) -> 'a -> 'a seq - val INTERVAL: (int -> 'a -> 'a seq) -> int -> int -> 'a -> 'a seq - val DETERM: ('a -> 'b seq) -> 'a -> 'b seq -end; - -structure Seq: SEQ = -struct - - -(** lazy sequences **) - -datatype 'a seq = Seq of unit -> ('a * 'a seq) option; - -(*the abstraction for making a sequence*) -val make = Seq; - -(*return next sequence element as NONE or SOME (x, xq)*) -fun pull (Seq f) = f (); - - -(*the empty sequence*) -val empty = Seq (fn () => NONE); - -(*prefix an element to the sequence -- use cons (x, xq) only if - evaluation of xq need not be delayed, otherwise use - make (fn () => SOME (x, xq))*) -fun cons x xq = make (fn () => SOME (x, xq)); - -fun single x = cons x empty; - -(*head and tail -- beware of calling the sequence function twice!!*) -fun hd xq = #1 (the (pull xq)) -and tl xq = #2 (the (pull xq)); - -(*partial function as procedure*) -fun try f x = - (case Basics.try f x of - SOME y => single y - | NONE => empty); - - -(*the list of the first n elements, paired with rest of sequence; - if length of list is less than n, then sequence had less than n elements*) -fun chop n xq = - if n <= (0 : int) then ([], xq) - else - (case pull xq of - NONE => ([], xq) - | SOME (x, xq') => apfst (Basics.cons x) (chop (n - 1) xq')); - -(*truncate the sequence after n elements*) -fun take n xq = - if n <= (0 : int) then empty - else make (fn () => - (Option.map o apsnd) (take (n - 1)) (pull xq)); - -(*conversion from sequence to list*) -fun list_of xq = - (case pull xq of - NONE => [] - | SOME (x, xq') => x :: list_of xq'); - -(*conversion from list to sequence*) -fun of_list xs = fold_rev cons xs empty; - - -(*sequence append: put the elements of xq in front of those of yq*) -fun append xq yq = - let - fun copy s = - make (fn () => - (case pull s of - NONE => pull yq - | SOME (x, s') => SOME (x, copy s'))) - in copy xq end; - -(*map over a sequence xq, append the sequence yq*) -fun mapp f xq yq = - let - fun copy s = - make (fn () => - (case pull s of - NONE => pull yq - | SOME (x, s') => SOME (f x, copy s'))) - in copy xq end; - -(*interleave elements of xq with those of yq -- fairer than append*) -fun interleave (xq, yq) = - make (fn () => - (case pull xq of - NONE => pull yq - | SOME (x, xq') => SOME (x, interleave (yq, xq')))); - -(*filter sequence by predicate*) -fun filter pred xq = - let - fun copy s = - make (fn () => - (case pull s of - NONE => NONE - | SOME (x, s') => if pred x then SOME (x, copy s') else pull (copy s'))); - in copy xq end; - -(*flatten a sequence of sequences to a single sequence*) -fun flat xqq = - make (fn () => - (case pull xqq of - NONE => NONE - | SOME (xq, xqq') => pull (append xq (flat xqq')))); - -(*map the function f over the sequence, making a new sequence*) -fun map f xq = - make (fn () => - (case pull xq of - NONE => NONE - | SOME (x, xq') => SOME (f x, map f xq'))); - -fun maps f xq = - make (fn () => - (case pull xq of - NONE => NONE - | SOME (x, xq') => pull (append (f x) (maps f xq')))); - -fun map_filter f = maps (fn x => (case f x of NONE => empty | SOME y => single y)); - -fun lift f xq y = map (fn x => f x y) xq; -fun lifts f xq y = maps (fn x => f x y) xq; - -fun singleton f x = f [x] |> map (fn [y] => y | _ => raise List.Empty); - -(*print a sequence, up to "count" elements*) -fun print print_elem count = - let - fun prnt (k: int) xq = - if k > count then () - else - (case pull xq of - NONE => () - | SOME (x, xq') => (print_elem k x; writeln ""; prnt (k + 1) xq')); - in prnt 1 end; - -(*accumulating a function over a sequence; this is lazy*) -fun it_right f (xq, yq) = - let - fun its s = - make (fn () => - (case pull s of - NONE => pull yq - | SOME (a, s') => pull (f (a, its s')))) - in its xq end; - - -(* embedded errors *) - -datatype 'a result = Result of 'a | Error of unit -> string; - -fun make_results xq = map Result xq; -fun filter_results xq = map_filter (fn Result x => SOME x | Error _ => NONE) xq; - -fun maps_results f xq = - make (fn () => - (case pull xq of - NONE => NONE - | SOME (Result x, xq') => pull (append (f x) (maps_results f xq')) - | SOME (Error msg, xq') => SOME (Error msg, maps_results f xq'))); - -fun maps_result f = maps_results (map Result o f); -fun map_result f = maps_result (single o f); - -(*first result or first error within sequence*) -fun first_result default_msg seq = - let - fun result opt_msg xq = - (case (opt_msg, pull xq) of - (_, SOME (Result x, xq')) => (x, filter_results xq') - | (SOME _, SOME (Error _, xq')) => result opt_msg xq' - | (NONE, SOME (Error msg, xq')) => result (SOME msg) xq' - | (SOME msg, NONE) => error (msg ()) - | (NONE, NONE) => error (if default_msg = "" then "Empty result sequence" else default_msg)); - in result NONE seq end; - -fun the_result default_msg seq = #1 (first_result default_msg seq); - - - -(** sequence functions **) (*cf. Pure/tactical.ML*) - -fun succeed x = single x; -fun fail _ = empty; - -fun op THEN (f, g) x = maps g (f x); - -fun op ORELSE (f, g) x = - (case pull (f x) of - NONE => g x - | some => make (fn () => some)); - -fun op APPEND (f, g) x = - append (f x) (make (fn () => pull (g x))); - -fun EVERY fs = fold_rev (curry op THEN) fs succeed; -fun FIRST fs = fold_rev (curry op ORELSE) fs fail; - -fun TRY f = ORELSE (f, succeed); - -fun REPEAT f = - let - fun rep qs x = - (case pull (f x) of - NONE => SOME (x, make (fn () => repq qs)) - | SOME (x', q) => rep (q :: qs) x') - and repq [] = NONE - | repq (q :: qs) = - (case pull q of - NONE => repq qs - | SOME (x, q) => rep (q :: qs) x); - in fn x => make (fn () => rep [] x) end; - -fun REPEAT1 f = THEN (f, REPEAT f); - -fun INTERVAL f (i: int) j x = - if i > j then single x - else op THEN (f j, INTERVAL f i (j - 1)) x; - -fun DETERM f x = - (case pull (f x) of - NONE => empty - | SOME (x', _) => cons x' empty); - -end; diff --git a/core/Pure/General/sha1.ML b/core/Pure/General/sha1.ML deleted file mode 100644 index 9da57a75..00000000 --- a/core/Pure/General/sha1.ML +++ /dev/null @@ -1,142 +0,0 @@ -(* Title: Pure/General/sha1.ML - Author: Makarius - -Digesting strings according to SHA-1 (see RFC 3174) -- relatively slow -version in pure ML. -*) - -signature SHA1 = -sig - eqtype digest - val digest: string -> digest - val rep: digest -> string - val fake: string -> digest -end; - -structure SHA1: SHA1 = -struct - -(* 32bit words *) - -infix 4 << >>; -infix 3 andb; -infix 2 orb xorb; - -val op << = Word32.<<; -val op >> = Word32.>>; -val op andb = Word32.andb; -val op orb = Word32.orb; -val op xorb = Word32.xorb; -val notb = Word32.notb; - -fun rotate k w = w << k orb w >> (0w32 - k); - - -(* hexadecimal words *) - -fun hex_digit (text, w: Word32.word) = - let - val d = Word32.toInt (w andb 0wxf); - val dig = if d < 10 then chr (ord "0" + d) else chr (ord "a" + d - 10); - in (dig ^ text, w >> 0w4) end; - -fun hex_word w = #1 (funpow 8 hex_digit ("", w)); - - -(* padding *) - -fun pack_bytes 0 n = "" - | pack_bytes k n = pack_bytes (k - 1) (n div 256) ^ chr (n mod 256); - -fun padded_text str = - let - val len = size str; - val padding = chr 128 ^ replicate_string (~ (len + 9) mod 64) (chr 0) ^ pack_bytes 8 (len * 8); - fun byte i = Char.ord (String.sub (if i < len then (str, i) else (padding, (i - len)))); - fun word i = - Word32.fromInt (byte (4 * i)) << 0w24 orb - Word32.fromInt (byte (4 * i + 1)) << 0w16 orb - Word32.fromInt (byte (4 * i + 2)) << 0w8 orb - Word32.fromInt (byte (4 * i + 3)); - in ((len + size padding) div 4, word) end; - - -(* digest_string *) - -fun digest_word (i, w, {a, b, c, d, e}) = - let - val {f, k} = - if i < 20 then - {f = (b andb c) orb (notb b andb d), - k = 0wx5A827999} - else if i < 40 then - {f = b xorb c xorb d, - k = 0wx6ED9EBA1} - else if i < 60 then - {f = (b andb c) orb (b andb d) orb (c andb d), - k = 0wx8F1BBCDC} - else - {f = b xorb c xorb d, - k = 0wxCA62C1D6}; - val op + = Word32.+; - in - {a = rotate 0w5 a + f + e + w + k, - b = a, - c = rotate 0w30 b, - d = c, - e = d} - end; - -fun digest_string str = - let - val (text_len, text) = padded_text str; - - (*hash result -- 5 words*) - val hash_array : Word32.word Array.array = - Array.fromList [0wx67452301, 0wxEFCDAB89, 0wx98BADCFE, 0wx10325476, 0wxC3D2E1F0]; - fun hash i = Array.sub (hash_array, i); - fun add_hash x i = Array.update (hash_array, i, hash i + x); - - (*current chunk -- 80 words*) - val chunk_array = Array.array (80, 0w0: Word32.word); - fun chunk i = Array.sub (chunk_array, i); - fun init_chunk pos = - Array.modifyi (fn (i, _) => - if i < 16 then text (pos + i) - else rotate 0w1 (chunk (i - 3) xorb chunk (i - 8) xorb chunk (i - 14) xorb chunk (i - 16))) - chunk_array; - - fun digest_chunks pos = - if pos < text_len then - let - val _ = init_chunk pos; - val {a, b, c, d, e} = Array.foldli digest_word - {a = hash 0, - b = hash 1, - c = hash 2, - d = hash 3, - e = hash 4} - chunk_array; - val _ = add_hash a 0; - val _ = add_hash b 1; - val _ = add_hash c 2; - val _ = add_hash d 3; - val _ = add_hash e 4; - in digest_chunks (pos + 16) end - else (); - val _ = digest_chunks 0; - - val hex = hex_word o hash; - in hex 0 ^ hex 1 ^ hex 2 ^ hex 3 ^ hex 4 end; - - -(* type digest *) - -datatype digest = Digest of string; - -val digest = Digest o digest_string; -fun rep (Digest s) = s; - -val fake = Digest; - -end; diff --git a/core/Pure/General/sha1.scala b/core/Pure/General/sha1.scala deleted file mode 100644 index 90063db3..00000000 --- a/core/Pure/General/sha1.scala +++ /dev/null @@ -1,71 +0,0 @@ -/* Title: Pure/General/sha1.scala - Module: PIDE - Author: Makarius - -Digest strings according to SHA-1 (see RFC 3174). -*/ - -package isabelle - - -import java.io.{File => JFile, FileInputStream} -import java.security.MessageDigest - - -object SHA1 -{ - final class Digest private[SHA1](val rep: String) - { - override def hashCode: Int = rep.hashCode - override def equals(that: Any): Boolean = - that match { - case other: Digest => rep == other.rep - case _ => false - } - override def toString: String = rep - } - - private def make_result(digest: MessageDigest): Digest = - { - val result = new StringBuilder - for (b <- digest.digest()) { - val i = b.asInstanceOf[Int] & 0xFF - if (i < 16) result += '0' - result ++= Integer.toHexString(i) - } - new Digest(result.toString) - } - - def digest(file: JFile): Digest = - { - val stream = new FileInputStream(file) - val digest = MessageDigest.getInstance("SHA") - - val buf = new Array[Byte](65536) - var m = 0 - try { - do { - m = stream.read(buf, 0, buf.length) - if (m != -1) digest.update(buf, 0, m) - } while (m != -1) - } - finally { stream.close } - - make_result(digest) - } - - def digest(bytes: Array[Byte]): Digest = - { - val digest = MessageDigest.getInstance("SHA") - digest.update(bytes) - - make_result(digest) - } - - def digest(bytes: Bytes): Digest = bytes.sha1_digest - - def digest(string: String): Digest = digest(Bytes(string)) - - def fake(rep: String): Digest = new Digest(rep) -} - diff --git a/core/Pure/General/sha1_polyml.ML b/core/Pure/General/sha1_polyml.ML deleted file mode 100644 index 423c6718..00000000 --- a/core/Pure/General/sha1_polyml.ML +++ /dev/null @@ -1,51 +0,0 @@ -(* Title: Pure/General/sha1_polyml.ML - Author: Sascha Boehme, TU Muenchen - -Digesting strings according to SHA-1 (see RFC 3174) -- based on an -external implementation in C with a fallback to an internal -implementation. -*) - -structure SHA1: SHA1 = -struct - -(* digesting *) - -fun hex_digit i = if i < 10 then chr (ord "0" + i) else chr (ord "a" + i - 10); - -fun hex_string arr i = - let val c = CInterface.fromCchar (CInterface.offset i CInterface.Cchar arr) - in (op ^) (pairself hex_digit (Integer.div_mod (Char.ord c) 16)) end - -val lib_path = - ("$ML_HOME/" ^ (if ML_System.platform_is_cygwin then "sha1.dll" else "libsha1.so")) - |> Path.explode; - -val STRING_INPUT_BYTES = - CInterface.mkConversion undefined (CInterface.toCbytes o Byte.stringToBytes) - (CInterface.Cpointer CInterface.Cchar); - -fun digest_external str = - let - val digest = CInterface.alloc 20 CInterface.Cchar; - val _ = - CInterface.call3 (CInterface.get_sym (File.platform_path lib_path) "sha1_buffer") - (STRING_INPUT_BYTES, CInterface.LONG, CInterface.POINTER) - CInterface.POINTER (str, size str, CInterface.address digest); - in fold (suffix o hex_string digest) (0 upto 19) "" end; - -fun digest_string str = digest_external str - handle CInterface.Foreign msg => - (warning (msg ^ "\nUsing slow ML implementation of SHA1.digest"); SHA1.rep (SHA1.digest str)); - - -(* type digest *) - -datatype digest = Digest of string; - -val digest = Digest o digest_string; -fun rep (Digest s) = s; - -val fake = Digest; - -end; diff --git a/core/Pure/General/sha1_samples.ML b/core/Pure/General/sha1_samples.ML deleted file mode 100644 index d4779995..00000000 --- a/core/Pure/General/sha1_samples.ML +++ /dev/null @@ -1,36 +0,0 @@ -(* Title: Pure/General/sha1_samples.ML - Author: Makarius - -Some SHA1 samples found in the wild. -*) - -signature SHA1_SAMPLES = -sig - val test: unit -> unit -end; - -structure SHA1_Samples: SHA1_SAMPLES = -struct - -fun check (msg, key) = - let val key' = SHA1.rep (SHA1.digest msg) in - if key = key' then () - else - raise Fail ("SHA1 library integrity test failed on " ^ quote msg ^ ":\n" ^ - key ^ " expected, but\n" ^ key' ^ " was found") - end; - -fun test () = - List.app check - [("", "da39a3ee5e6b4b0d3255bfef95601890afd80709"), - ("a", "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8"), - ("abc", "a9993e364706816aba3e25717850c26c9cd0d89d"), - ("abcdefghijklmnopqrstuvwxyz", "32d10c7b8cf96570ca04ce37f2a19d84240d3a89"), - ("The quick brown fox jumps over the lazy dog", "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12"), - (replicate_string 100 "\000", "ed4a77d1b56a118938788fc53037759b6c501e3d"), - ("a\000b", "4a3dec2d1f8245280855c42db0ee4239f917fdb8"), - ("\000\001", "3f29546453678b855931c174a97d6c0894b8f546")]; - -val _ = test (); - -end; diff --git a/core/Pure/General/socket_io.ML b/core/Pure/General/socket_io.ML deleted file mode 100644 index 3e66f36a..00000000 --- a/core/Pure/General/socket_io.ML +++ /dev/null @@ -1,88 +0,0 @@ -(* Title: Pure/General/socket_io.ML - Author: Timothy Bourke, NICTA - Author: Makarius - -Stream IO over TCP sockets. Following example 10.2 in "The Standard -ML Basis Library" by Emden R. Gansner and John H. Reppy. - -Note: BinIO requires Poly/ML 5.5.x to work reliably. -*) - -signature SOCKET_IO = -sig - val make_streams: Socket.active INetSock.stream_sock -> BinIO.instream * BinIO.outstream - val open_streams: string -> BinIO.instream * BinIO.outstream -end; - -structure Socket_IO: SOCKET_IO = -struct - -fun make_streams socket = - let - val (host, port) = INetSock.fromAddr (Socket.Ctl.getSockName socket); - val name = NetHostDB.toString host ^ ":" ^ string_of_int port; - - val rd = - BinPrimIO.RD { - name = name, - chunkSize = io_buffer_size, - readVec = SOME (fn n => Socket.recvVec (socket, n)), - readArr = SOME (fn buffer => Socket.recvArr (socket, buffer)), - readVecNB = NONE, - readArrNB = NONE, - block = NONE, - canInput = NONE, - avail = fn () => NONE, - getPos = NONE, - setPos = NONE, - endPos = NONE, - verifyPos = NONE, - close = fn () => Socket.close socket, - ioDesc = NONE - }; - - val wr = - BinPrimIO.WR { - name = name, - chunkSize = io_buffer_size, - writeVec = SOME (fn buffer => Socket.sendVec (socket, buffer)), - writeArr = SOME (fn buffer => Socket.sendArr (socket, buffer)), - writeVecNB = NONE, - writeArrNB = NONE, - block = NONE, - canOutput = NONE, - getPos = NONE, - setPos = NONE, - endPos = NONE, - verifyPos = NONE, - close = fn () => Socket.close socket, - ioDesc = NONE - }; - - val in_stream = - BinIO.mkInstream - (BinIO.StreamIO.mkInstream (rd, Word8Vector.fromList [])); - - val out_stream = - BinIO.mkOutstream - (BinIO.StreamIO.mkOutstream (wr, IO.BLOCK_BUF)); - - in (in_stream, out_stream) end; - - -fun open_streams socket_name = - let - fun err () = error ("Bad socket name: " ^ quote socket_name); - val (host, port) = - (case space_explode ":" socket_name of - [h, p] => - (case NetHostDB.getByName h of SOME host => host | NONE => err (), - case Int.fromString p of SOME port => port | NONE => err ()) - | _ => err ()); - val socket: Socket.active INetSock.stream_sock = INetSock.TCP.socket (); - val _ = Socket.connect (socket, INetSock.toAddr (NetHostDB.addr host, port)); - val _ = INetSock.TCP.setNODELAY (socket, true); - in make_streams socket end; - -end; - diff --git a/core/Pure/General/source.ML b/core/Pure/General/source.ML deleted file mode 100644 index 99613af2..00000000 --- a/core/Pure/General/source.ML +++ /dev/null @@ -1,178 +0,0 @@ -(* Title: Pure/General/source.ML - Author: Markus Wenzel, TU Muenchen - -Coalgebraic data sources -- efficient purely functional input streams. -*) - -signature SOURCE = -sig - type ('a, 'b) source - val default_prompt: string - val set_prompt: string -> ('a, 'b) source -> ('a, 'b) source - val get: ('a, 'b) source -> 'a list * ('a, 'b) source - val unget: 'a list * ('a, 'b) source -> ('a, 'b) source - val get_single: ('a, 'b) source -> ('a * ('a, 'b) source) option - val exhaust: ('a, 'b) source -> 'a list - val map_filter: ('a -> 'b option) -> ('a, 'c) source -> ('b, ('a, 'c) source) source - val filter: ('a -> bool) -> ('a, 'b) source -> ('a, ('a, 'b) source) source - val of_list: 'a list -> ('a, 'a list) source - val exhausted: ('a, 'b) source -> ('a, 'a list) source - val of_string: string -> (string, string list) source - val of_string_limited: int -> string -> (string, substring) source - val tty: TextIO.instream -> (string, unit) source - val source': 'a -> 'b Scan.stopper -> ('a * 'b list -> 'c list * ('a * 'b list)) -> - (bool * (string -> 'a * 'b list -> 'c list * ('a * 'b list))) option -> - ('b, 'e) source -> ('c, 'a * ('b, 'e) source) source - val source: 'a Scan.stopper -> ('a list -> 'b list * 'a list) -> - (bool * (string -> 'a list -> 'b list * 'a list)) option -> - ('a, 'd) source -> ('b, ('a, 'd) source) source -end; - -structure Source: SOURCE = -struct - - -(** datatype source **) - -datatype ('a, 'b) source = - Source of - {buffer: 'a list, - info: 'b, - prompt: string, - drain: string -> 'b -> 'a list * 'b}; - -fun make_source buffer info prompt drain = - Source {buffer = buffer, info = info, prompt = prompt, drain = drain}; - - -(* prompt *) - -val default_prompt = "> "; - -fun set_prompt prompt (Source {buffer, info, prompt = _, drain}) = - make_source buffer info prompt drain; - - -(* get / unget *) - -fun get (Source {buffer = [], info, prompt, drain}) = - let val (xs, info') = drain prompt info - in (xs, make_source [] info' prompt drain) end - | get (Source {buffer, info, prompt, drain}) = - (buffer, make_source [] info prompt drain); - -fun unget (xs, Source {buffer, info, prompt, drain}) = - make_source (xs @ buffer) info prompt drain; - - -(* variations on get *) - -fun get_prompt prompt src = get (set_prompt prompt src); - -fun get_single src = - (case get src of - ([], _) => NONE - | (x :: xs, src') => SOME (x, unget (xs, src'))); - -fun exhaust src = - (case get src of - ([], _) => [] - | (xs, src') => xs @ exhaust src'); - - -(* (map)filter *) - -fun drain_map_filter f prompt src = - let - val (xs, src') = get_prompt prompt src; - val xs' = map_filter f xs; - in - if null xs orelse not (null xs') then (xs', src') - else drain_map_filter f prompt src' - end; - -fun map_filter f src = make_source [] src default_prompt (drain_map_filter f); -fun filter pred = map_filter (fn x => if pred x then SOME x else NONE); - - - -(** build sources **) - -(* list source *) - -fun of_list xs = make_source [] xs default_prompt (fn _ => fn xs => (xs, [])); - -fun exhausted src = of_list (exhaust src); - - -(* string source *) - -val of_string = of_list o raw_explode; - -fun of_string_limited limit str = - make_source [] (Substring.full str) default_prompt - (fn _ => fn s => - let - val (s1, s2) = Substring.splitAt (s, Int.min (Substring.size s, limit)); - val cs = map String.str (Substring.explode s1); - in (cs, s2) end); - - -(* stream source *) - -fun slurp_input instream = - let - fun slurp () = - (case TextIO.canInput (instream, 1) handle IO.Io _ => NONE of - NONE => [] - | SOME 0 => [] - | SOME _ => TextIO.input instream :: slurp ()); - in maps raw_explode (slurp ()) end; - -fun tty in_stream = make_source [] () default_prompt (fn prompt => fn () => - let val input = slurp_input in_stream in - if exists (fn c => c = "\n") input then (input, ()) - else - (case (Output.prompt prompt; TextIO.inputLine in_stream) of - SOME line => (input @ raw_explode line, ()) - | NONE => (input, ())) - end); - - - -(** cascade sources **) - -(* state-based *) - -fun drain_source' stopper scan opt_recover prompt (state, src) = - let - val drain = Scan.drain prompt get_prompt stopper; - val (xs, s) = get_prompt prompt src; - val inp = ((state, xs), s); - val ((ys, (state', xs')), src') = - if null xs then (([], (state, [])), s) - else - (case opt_recover of - NONE => drain (Scan.error scan) inp - | SOME (interactive, recover) => - (drain (Scan.catch scan) inp handle Fail msg => - (if interactive then Output.error_message msg else (); - drain (Scan.unless (Scan.lift (Scan.one (Scan.is_stopper stopper))) (recover msg)) - inp))); - in (ys, (state', unget (xs', src'))) end; - -fun source' init_state stopper scan recover src = - make_source [] (init_state, src) default_prompt (drain_source' stopper scan recover); - - -(* non state-based *) - -fun drain_source stopper scan opt_recover prompt = - Scan.unlift (drain_source' stopper (Scan.lift scan) - (Option.map (fn (int, r) => (int, Scan.lift o r)) opt_recover) prompt); - -fun source stopper scan recover src = - make_source [] src default_prompt (drain_source stopper scan recover); - - -end; diff --git a/core/Pure/General/stack.ML b/core/Pure/General/stack.ML deleted file mode 100644 index cc669cf6..00000000 --- a/core/Pure/General/stack.ML +++ /dev/null @@ -1,39 +0,0 @@ -(* Title: Pure/General/stack.ML - Author: Makarius - -Non-empty stacks. -*) - -signature STACK = -sig - type 'a T = 'a * 'a list - val level: 'a T -> int - val init: 'a -> 'a T - val top: 'a T -> 'a - val map_top: ('a -> 'a) -> 'a T -> 'a T - val map_all: ('a -> 'a) -> 'a T -> 'a T - val push: 'a T -> 'a T - val pop: 'a T -> 'a T (*exception List.Empty*) -end; - -structure Stack: STACK = -struct - -type 'a T = 'a * 'a list; - -fun level (_, xs) = length xs; - -fun init x = (x, []); - -fun top (x, _) = x; - -fun map_top f (x, xs) = (f x, xs); - -fun map_all f (x, xs) = (f x, map f xs); - -fun push (x, xs) = (x, x :: xs); - -fun pop (_, x :: xs) = (x, xs) - | pop (_, []) = raise List.Empty; - -end; diff --git a/core/Pure/General/symbol.ML b/core/Pure/General/symbol.ML deleted file mode 100644 index dce6cd15..00000000 --- a/core/Pure/General/symbol.ML +++ /dev/null @@ -1,572 +0,0 @@ -(* Title: Pure/General/symbol.ML - Author: Markus Wenzel, TU Muenchen - -Generalized characters with infinitely many named symbols. -*) - -signature SYMBOL = -sig - type symbol = string - val STX: symbol - val DEL: symbol - val space: symbol - val is_char: symbol -> bool - val is_utf8: symbol -> bool - val is_symbolic: symbol -> bool - val is_symbolic_char: symbol -> bool - val is_printable: symbol -> bool - val eof: symbol - val is_eof: symbol -> bool - val not_eof: symbol -> bool - val stopper: symbol Scan.stopper - val sync: symbol - val is_sync: symbol -> bool - val is_regular: symbol -> bool - val is_malformed: symbol -> bool - val malformed_msg: symbol -> string - val is_ascii: symbol -> bool - val is_ascii_letter: symbol -> bool - val is_ascii_digit: symbol -> bool - val is_ascii_hex: symbol -> bool - val is_ascii_quasi: symbol -> bool - val is_ascii_blank: symbol -> bool - val is_ascii_control: symbol -> bool - val is_ascii_letdig: symbol -> bool - val is_ascii_lower: symbol -> bool - val is_ascii_upper: symbol -> bool - val to_ascii_lower: symbol -> symbol - val to_ascii_upper: symbol -> symbol - val is_ascii_identifier: string -> bool - val scan_ascii_id: string list -> string * string list - val is_raw: symbol -> bool - val decode_raw: symbol -> string - val encode_raw: string -> string - datatype sym = - Char of string | UTF8 of string | Sym of string | Ctrl of string | Raw of string | - Malformed of string | EOF - val decode: symbol -> sym - datatype kind = Letter | Digit | Quasi | Blank | Other - val kind: symbol -> kind - val is_letter: symbol -> bool - val is_digit: symbol -> bool - val is_quasi: symbol -> bool - val is_blank: symbol -> bool - val is_block_ctrl: symbol -> bool - val is_quasi_letter: symbol -> bool - val is_letdig: symbol -> bool - val beginning: int -> symbol list -> string - val source: (string, 'a) Source.source -> (symbol, (string, 'a) Source.source) Source.source - val explode: string -> symbol list - val esc: symbol -> string - val escape: string -> string - val scanner: string -> (string list -> 'a * string list) -> symbol list -> 'a - val split_words: symbol list -> string list - val explode_words: string -> string list - val strip_blanks: string -> string - val bump_init: string -> string - val bump_string: string -> string - val length: symbol list -> int - val xsymbolsN: string - val output: string -> Output.output * int -end; - -structure Symbol: SYMBOL = -struct - -(** type symbol **) - -(*Symbols, which are considered the smallest entities of any Isabelle - string, may be of the following form: - - (1) ASCII symbols: a - (2) regular symbols: \ - (3) control symbols: \<^ident> - (4) raw control symbols: \<^raw:...>, where "..." may be any printable - character (excluding ".", ">"), or \<^raw000> - - Output is subject to the print_mode variable (default: verbatim), - actual interpretation in display is up to front-end tools. -*) - -type symbol = string; - -val STX = chr 2; -val DEL = chr 127; - -val space = chr 32; - -fun is_char s = size s = 1; - -fun is_utf8 s = size s > 0 andalso forall_string (fn c => ord c >= 128) s; - -fun raw_symbolic s = - String.isPrefix "\\<" s andalso String.isSuffix ">" s andalso not (String.isPrefix "\\<^" s); - -fun is_symbolic s = - s <> "\\" andalso s <> "\\" andalso raw_symbolic s; - -val is_symbolic_char = member (op =) (raw_explode "!#$%&*+-/<=>?@^_|~"); - -fun is_printable s = - if is_char s then ord space <= ord s andalso ord s <= ord "~" - else is_utf8 s orelse raw_symbolic s; - - -(* input source control *) - -val eof = ""; -fun is_eof s = s = eof; -fun not_eof s = s <> eof; -val stopper = Scan.stopper (K eof) is_eof; - -(*Proof General legacy*) -val sync = "\\<^sync>"; -fun is_sync s = s = sync; - -fun is_regular s = not_eof s andalso s <> sync; - -fun is_malformed s = - String.isPrefix "\\<" s andalso not (String.isSuffix ">" s) - orelse s = "\\<>" orelse s = "\\<^>"; - -fun malformed_msg s = "Malformed symbolic character: " ^ quote s; - - -(* ASCII symbols *) - -fun is_ascii s = is_char s andalso ord s < 128; - -fun is_ascii_letter s = - is_char s andalso - (ord "A" <= ord s andalso ord s <= ord "Z" orelse - ord "a" <= ord s andalso ord s <= ord "z"); - -fun is_ascii_digit s = - is_char s andalso ord "0" <= ord s andalso ord s <= ord "9"; - -fun is_ascii_hex s = - is_char s andalso - (ord "0" <= ord s andalso ord s <= ord "9" orelse - ord "A" <= ord s andalso ord s <= ord "F" orelse - ord "a" <= ord s andalso ord s <= ord "f"); - -fun is_ascii_quasi "_" = true - | is_ascii_quasi "'" = true - | is_ascii_quasi _ = false; - -val is_ascii_blank = - fn " " => true | "\t" => true | "\n" => true | "\^K" => true | "\f" => true | "\^M" => true - | _ => false; - -fun is_ascii_control s = is_char s andalso ord s < 32 andalso not (is_ascii_blank s); - -fun is_ascii_letdig s = is_ascii_letter s orelse is_ascii_digit s orelse is_ascii_quasi s; - -fun is_ascii_lower s = is_char s andalso (ord "a" <= ord s andalso ord s <= ord "z"); -fun is_ascii_upper s = is_char s andalso (ord "A" <= ord s andalso ord s <= ord "Z"); - -fun to_ascii_lower s = if is_ascii_upper s then chr (ord s + ord "a" - ord "A") else s; -fun to_ascii_upper s = if is_ascii_lower s then chr (ord s + ord "A" - ord "a") else s; - -fun is_ascii_identifier s = - size s > 0 andalso is_ascii_letter (String.substring (s, 0, 1)) andalso - forall_string is_ascii_letdig s; - -val scan_ascii_id = Scan.one is_ascii_letter ^^ (Scan.many is_ascii_letdig >> implode); - - -(* encode_raw *) - -fun raw_chr c = - is_char c andalso - (ord space <= ord c andalso ord c <= ord "~" andalso c <> "." andalso c <> ">" - orelse ord c >= 128); - -fun encode_raw "" = "" - | encode_raw str = - let - val raw0 = enclose "\\<^raw:" ">"; - val raw1 = raw0 o implode; - val raw2 = enclose "\\<^raw" ">" o string_of_int o ord; - - fun encode cs = enc (take_prefix raw_chr cs) - and enc ([], []) = [] - | enc (cs, []) = [raw1 cs] - | enc ([], d :: ds) = raw2 d :: encode ds - | enc (cs, d :: ds) = raw1 cs :: raw2 d :: encode ds; - in - if exists_string (not o raw_chr) str then implode (encode (raw_explode str)) - else raw0 str - end; - - -(* diagnostics *) - -fun beginning n cs = - let - val drop_blanks = #1 o take_suffix is_ascii_blank; - val all_cs = drop_blanks cs; - val dots = if length all_cs > n then " ..." else ""; - in - (drop_blanks (take n all_cs) - |> map (fn c => if is_ascii_blank c then space else c) - |> implode) ^ dots - end; - - -(* decode_raw *) - -fun is_raw s = - String.isPrefix "\\<^raw" s andalso String.isSuffix ">" s; - -fun decode_raw s = - if not (is_raw s) then error (malformed_msg s) - else if String.isPrefix "\\<^raw:" s then String.substring (s, 7, size s - 8) - else chr (#1 (Library.read_int (raw_explode (String.substring (s, 6, size s - 7))))); - - -(* symbol variants *) - -datatype sym = - Char of string | UTF8 of string | Sym of string | Ctrl of string | Raw of string | - Malformed of string | EOF; - -fun decode s = - if s = "" then EOF - else if is_char s then Char s - else if is_utf8 s then UTF8 s - else if is_raw s then Raw (decode_raw s) - else if is_malformed s then Malformed s - else if String.isPrefix "\\<^" s then Ctrl (String.substring (s, 3, size s - 4)) - else Sym (String.substring (s, 2, size s - 3)); - - -(* standard symbol kinds *) - -local - val letter_symbols = - Symtab.make_set [ - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\

", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\

", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\
", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - (*"\\", sic!*) - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\" - ]; -in - -val is_letter_symbol = Symtab.defined letter_symbols; - -end; - -datatype kind = Letter | Digit | Quasi | Blank | Other; - -fun kind s = - if is_ascii_letter s then Letter - else if is_ascii_digit s then Digit - else if is_ascii_quasi s then Quasi - else if is_ascii_blank s then Blank - else if is_char s then Other - else if is_letter_symbol s then Letter - else Other; - -fun is_letter s = kind s = Letter; -fun is_digit s = kind s = Digit; -fun is_quasi s = kind s = Quasi; -fun is_blank s = kind s = Blank; - -val is_block_ctrl = member (op =) ["\\<^bsub>", "\\<^esub>", "\\<^bsup>", "\\<^esup>"]; - -fun is_quasi_letter s = let val k = kind s in k = Letter orelse k = Quasi end; -fun is_letdig s = let val k = kind s in k = Letter orelse k = Digit orelse k = Quasi end; - - - -(** symbol input **) - -(* source *) - -local - -fun is_plain s = is_ascii s andalso s <> "\^M" andalso s <> "\\"; - -fun is_utf8_trailer s = is_char s andalso 128 <= ord s andalso ord s < 192; - -fun implode_pseudo_utf8 (cs as ["\192", c]) = - if ord c < 160 then chr (ord c - 128) else implode cs - | implode_pseudo_utf8 cs = implode cs; - -val scan_encoded_newline = - $$ "\^M" -- $$ "\n" >> K "\n" || - $$ "\^M" >> K "\n"; - -val scan_raw = - Scan.this_string "raw:" ^^ (Scan.many raw_chr >> implode) || - Scan.this_string "raw" ^^ (Scan.many1 is_ascii_digit >> implode); - -val scan_total = - Scan.one is_plain || - Scan.one is_utf8 ::: Scan.many is_utf8_trailer >> implode_pseudo_utf8 || - scan_encoded_newline || - ($$ "\\" ^^ $$ "<" ^^ - (($$ "^" ^^ Scan.optional (scan_raw || scan_ascii_id) "" || Scan.optional scan_ascii_id "") ^^ - Scan.optional ($$ ">") "")) || - Scan.one not_eof; - -in - -fun source src = Source.source stopper (Scan.bulk scan_total) NONE src; - -end; - - -(* explode *) - -local - -fun no_explode [] = true - | no_explode ("\\" :: "<" :: _) = false - | no_explode ("\^M" :: _) = false - | no_explode (c :: cs) = is_ascii c andalso no_explode cs; - -in - -fun sym_explode str = - let val chs = raw_explode str in - if no_explode chs then chs - else Source.exhaust (source (Source.of_list chs)) - end; - -end; - - -(* escape *) - -val esc = fn s => - if is_char s then s - else if is_utf8 s then translate_string (fn c => "\\" ^ string_of_int (ord c)) s - else "\\" ^ s; - -val escape = implode o map esc o sym_explode; - - - -(** scanning through symbols **) - -(* scanner *) - -fun scanner msg scan syms = - let - fun message (ss, NONE) = (fn () => msg ^ ": " ^ quote (beginning 10 ss)) - | message (ss, SOME msg') = (fn () => msg ^ ", " ^ msg' () ^ ": " ^ quote (beginning 10 ss)); - val finite_scan = Scan.error (Scan.finite stopper (!! message scan)); - in - (case finite_scan syms of - (result, []) => result - | (_, rest) => error (message (rest, NONE) ())) - end; - - -(* space-separated words *) - -val scan_word = - Scan.many1 is_ascii_blank >> K NONE || - Scan.many1 (fn s => not (is_ascii_blank s) andalso not_eof s) >> (SOME o implode); - -val split_words = scanner "Bad text" (Scan.repeat scan_word >> map_filter I); - -val explode_words = split_words o sym_explode; - - -(* blanks *) - -fun strip_blanks s = - sym_explode s - |> take_prefix is_blank |> #2 - |> take_suffix is_blank |> #1 - |> implode; - - -(* bump string -- treat as base 26 or base 1 numbers *) - -fun symbolic_end (_ :: "\\<^sub>" :: _) = true - | symbolic_end ("'" :: ss) = symbolic_end ss - | symbolic_end (s :: _) = raw_symbolic s - | symbolic_end [] = false; - -fun bump_init str = - if symbolic_end (rev (sym_explode str)) then str ^ "'" - else str ^ "a"; - -fun bump_string str = - let - fun bump [] = ["a"] - | bump ("z" :: ss) = "a" :: bump ss - | bump (s :: ss) = - if is_char s andalso ord "a" <= ord s andalso ord s < ord "z" - then chr (ord s + 1) :: ss - else "a" :: s :: ss; - - val (ss, qs) = apfst rev (take_suffix is_quasi (sym_explode str)); - val ss' = if symbolic_end ss then "'" :: ss else bump ss; - in implode (rev ss' @ qs) end; - - - -(** symbol output **) - -(* length *) - -fun sym_len s = - if not (is_printable s) then (0: int) - else if String.isPrefix "\\ fn n => sym_len s + n) ss 0; - - -(* print mode *) - -val xsymbolsN = "xsymbols"; - -fun output s = (s, sym_length (sym_explode s)); - - -(*final declarations of this structure!*) -val explode = sym_explode; -val length = sym_length; - -end; diff --git a/core/Pure/General/symbol.scala b/core/Pure/General/symbol.scala deleted file mode 100644 index ec2329d1..00000000 --- a/core/Pure/General/symbol.scala +++ /dev/null @@ -1,532 +0,0 @@ -/* Title: Pure/General/symbol.scala - Author: Makarius - -Detecting and recoding Isabelle symbols. -*/ - -package isabelle - - -import scala.collection.mutable -import scala.util.matching.Regex -import scala.annotation.tailrec - - -object Symbol -{ - type Symbol = String - - // counting Isabelle symbols, starting from 1 - type Offset = Text.Offset - type Range = Text.Range - - - /* ASCII characters */ - - def is_ascii_letter(c: Char): Boolean = 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' - - def is_ascii_digit(c: Char): Boolean = '0' <= c && c <= '9' - - def is_ascii_hex(c: Char): Boolean = - '0' <= c && c <= '9' || 'A' <= c && c <= 'F' || 'a' <= c && c <= 'f' - - def is_ascii_quasi(c: Char): Boolean = c == '_' || c == '\'' - - def is_ascii_blank(c: Char): Boolean = " \t\n\u000b\f\r".contains(c) - - def is_ascii_letdig(c: Char): Boolean = - is_ascii_letter(c) || is_ascii_digit(c) || is_ascii_quasi(c) - - def is_ascii_identifier(s: String): Boolean = - s.length > 0 && is_ascii_letter(s(0)) && s.forall(is_ascii_letdig) - - - /* symbol matching */ - - private val symbol_total = new Regex("""(?xs) - [\ud800-\udbff][\udc00-\udfff] | \r\n | - \\ < (?: \^raw: [\x20-\x7e\u0100-\uffff && [^.>]]* | \^? ([A-Za-z][A-Za-z0-9_']*)? ) >? | - .""") - - private def is_plain(c: Char): Boolean = - !(c == '\r' || c == '\\' || Character.isHighSurrogate(c)) - - def is_malformed(s: Symbol): Boolean = - s.length match { - case 1 => - val c = s(0) - Character.isHighSurrogate(c) || Character.isLowSurrogate(c) || c == '\ufffd' - case 2 => - val c1 = s(0) - val c2 = s(1) - !(c1 == '\r' && c2 == '\n' || Character.isSurrogatePair(c1, c2)) - case _ => !s.endsWith(">") || s == "\\<>" || s == "\\<^>" - } - - def is_newline(s: Symbol): Boolean = - s == "\n" || s == "\r" || s == "\r\n" - - class Matcher(text: CharSequence) - { - private val matcher = symbol_total.pattern.matcher(text) - def apply(start: Int, end: Int): Int = - { - require(0 <= start && start < end && end <= text.length) - if (is_plain(text.charAt(start))) 1 - else { - matcher.region(start, end).lookingAt - matcher.group.length - } - } - } - - - /* iterator */ - - private val char_symbols: Array[Symbol] = - (0 until 256).iterator.map(i => new String(Array(i.toChar))).toArray - - def iterator(text: CharSequence): Iterator[Symbol] = - new Iterator[Symbol] - { - private val matcher = new Matcher(text) - private var i = 0 - def hasNext = i < text.length - def next = - { - val n = matcher(i, text.length) - val s = - if (n == 0) "" - else if (n == 1) { - val c = text.charAt(i) - if (c < char_symbols.length) char_symbols(c) - else text.subSequence(i, i + n).toString - } - else text.subSequence(i, i + n).toString - i += n - s - } - } - - def explode(text: CharSequence): List[Symbol] = iterator(text).toList - - def advance_line_column(pos: (Int, Int), text: CharSequence): (Int, Int) = - { - var (line, column) = pos - for (sym <- iterator(text)) { - if (is_newline(sym)) { line += 1; column = 1 } - else column += 1 - } - (line, column) - } - - - /* decoding offsets */ - - object Index - { - private sealed case class Entry(chr: Int, sym: Int) - - val empty: Index = new Index(Nil) - - def apply(text: CharSequence): Index = - { - val matcher = new Matcher(text) - val buf = new mutable.ListBuffer[Entry] - var chr = 0 - var sym = 0 - while (chr < text.length) { - val n = matcher(chr, text.length) - chr += n - sym += 1 - if (n > 1) buf += Entry(chr, sym) - } - if (buf.isEmpty) empty else new Index(buf.toList) - } - } - - final class Index private(entries: List[Index.Entry]) - { - private val hash: Int = entries.hashCode - private val index: Array[Index.Entry] = entries.toArray - - def decode(symbol_offset: Offset): Text.Offset = - { - val sym = symbol_offset - 1 - val end = index.length - @tailrec def bisect(a: Int, b: Int): Int = - { - if (a < b) { - val c = (a + b) / 2 - if (sym < index(c).sym) bisect(a, c) - else if (c + 1 == end || sym < index(c + 1).sym) c - else bisect(c + 1, b) - } - else -1 - } - val i = bisect(0, end) - if (i < 0) sym - else index(i).chr + sym - index(i).sym - } - def decode(symbol_range: Range): Text.Range = symbol_range.map(decode(_)) - - override def hashCode: Int = hash - override def equals(that: Any): Boolean = - that match { - case other: Index => index.sameElements(other.index) - case _ => false - } - } - - - /* text chunks */ - - object Text_Chunk - { - sealed abstract class Name - case object Default extends Name - case class Id(id: Document_ID.Generic) extends Name - case class File(name: String) extends Name - - def apply(text: CharSequence): Text_Chunk = - new Text_Chunk(Text.Range(0, text.length), Index(text)) - } - - final class Text_Chunk private(val range: Text.Range, private val index: Index) - { - override def hashCode: Int = (range, index).hashCode - override def equals(that: Any): Boolean = - that match { - case other: Text_Chunk => - range == other.range && - index == other.index - case _ => false - } - - override def toString: String = "Text_Chunk" + range.toString - - def decode(symbol_offset: Offset): Text.Offset = index.decode(symbol_offset) - def decode(symbol_range: Range): Text.Range = index.decode(symbol_range) - def incorporate(symbol_range: Range): Option[Text.Range] = - { - def in(r: Range): Option[Text.Range] = - range.try_restrict(decode(r)) match { - case Some(r1) if !r1.is_singularity => Some(r1) - case _ => None - } - in(symbol_range) orElse in(symbol_range - 1) - } - } - - - /* recoding text */ - - private class Recoder(list: List[(String, String)]) - { - private val (min, max) = - { - var min = '\uffff' - var max = '\u0000' - for ((x, _) <- list) { - val c = x(0) - if (c < min) min = c - if (c > max) max = c - } - (min, max) - } - private val table = - { - var tab = Map[String, String]() - for ((x, y) <- list) { - tab.get(x) match { - case None => tab += (x -> y) - case Some(z) => - error("Duplicate mapping of " + quote(x) + " to " + quote(y) + " vs. " + quote(z)) - } - } - tab - } - def recode(text: String): String = - { - val len = text.length - val matcher = symbol_total.pattern.matcher(text) - val result = new StringBuilder(len) - var i = 0 - while (i < len) { - val c = text(i) - if (min <= c && c <= max) { - matcher.region(i, len).lookingAt - val x = matcher.group - result.append(table.getOrElse(x, x)) - i = matcher.end - } - else { result.append(c); i += 1 } - } - result.toString - } - } - - - - /** symbol interpretation **/ - - private lazy val symbols = - new Interpretation(File.try_read(Path.split(Isabelle_System.getenv("ISABELLE_SYMBOLS")))) - - private class Interpretation(symbols_spec: String) - { - /* read symbols */ - - private val No_Decl = new Regex("""(?xs) ^\s* (?: \#.* )? $ """) - private val Key = new Regex("""(?xs) (.+): """) - - private def read_decl(decl: String): (Symbol, Properties.T) = - { - def err() = error("Bad symbol declaration: " + decl) - - def read_props(props: List[String]): Properties.T = - { - props match { - case Nil => Nil - case _ :: Nil => err() - case Key(x) :: y :: rest => (x -> y) :: read_props(rest) - case _ => err() - } - } - decl.split("\\s+").toList match { - case sym :: props if sym.length > 1 && !is_malformed(sym) => - (sym, read_props(props)) - case _ => err() - } - } - - private val symbols: List[(Symbol, Properties.T)] = - (((List.empty[(Symbol, Properties.T)], Set.empty[Symbol]) /: - split_lines(symbols_spec).reverse) - { case (res, No_Decl()) => res - case ((list, known), decl) => - val (sym, props) = read_decl(decl) - if (known(sym)) (list, known) - else ((sym, props) :: list, known + sym) - })._1 - - - /* basic properties */ - - val properties: Map[Symbol, Properties.T] = Map(symbols: _*) - - val names: Map[Symbol, String] = - { - val name = new Regex("""\\<\^?([A-Za-z][A-Za-z0-9_']*)>""") - Map((for ((sym @ name(a), _) <- symbols) yield (sym -> a)): _*) - } - - val groups: List[(String, List[Symbol])] = - symbols.map({ case (sym, props) => - val gs = for (("group", g) <- props) yield g - if (gs.isEmpty) List(sym -> "unsorted") else gs.map(sym -> _) - }).flatten - .groupBy(_._2).toList.map({ case (group, list) => (group, list.map(_._1)) }) - .sortBy(_._1) - - val abbrevs: Multi_Map[Symbol, String] = - Multi_Map(( - for { - (sym, props) <- symbols - ("abbrev", a) <- props.reverse - } yield (sym -> a)): _*) - - - /* recoding */ - - private val Code = new Properties.String("code") - private val (decoder, encoder) = - { - val mapping = - for { - (sym, props) <- symbols - code = - props match { - case Code(s) => - try { Integer.decode(s).intValue } - catch { case _: NumberFormatException => error("Bad code for symbol " + sym) } - case _ => error("Missing code for symbol " + sym) - } - ch = new String(Character.toChars(code)) - } yield { - if (code < 128) error("Illegal ASCII code for symbol " + sym) - else (sym, ch) - } - (new Recoder(mapping), - new Recoder(mapping map { case (x, y) => (y, x) })) - } - - def decode(text: String): String = decoder.recode(text) - def encode(text: String): String = encoder.recode(text) - - private def recode_set(elems: String*): Set[String] = - { - val content = elems.toList - Set((content ::: content.map(decode)): _*) - } - - private def recode_map[A](elems: (String, A)*): Map[String, A] = - { - val content = elems.toList - Map((content ::: content.map({ case (sym, a) => (decode(sym), a) })): _*) - } - - - /* user fonts */ - - private val Font = new Properties.String("font") - val fonts: Map[Symbol, String] = - recode_map((for ((sym, Font(font)) <- symbols) yield (sym -> font)): _*) - - val font_names: List[String] = Set(fonts.toList.map(_._2): _*).toList - val font_index: Map[String, Int] = Map((font_names zip (0 until font_names.length).toList): _*) - - - /* classification */ - - val letters = recode_set( - "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", - "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", - "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", - "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", - - "\\", "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\

", - "\\", "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", - - "\\", "\\", "\\", "\\

", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", "\\

", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\
", - "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", - - "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", - "\\", "\\", "\\", "\\", "\\", - "\\", "\\") - - val blanks = recode_set(" ", "\t", "\n", "\u000B", "\f", "\r", "\r\n") - - val sym_chars = - Set("!", "#", "$", "%", "&", "*", "+", "-", "/", "<", "=", ">", "?", "@", "^", "_", "|", "~") - - val symbolic = recode_set((for { (sym, _) <- symbols; if raw_symbolic(sym) } yield sym): _*) - - - /* cartouches */ - - val open_decoded = decode(open) - val close_decoded = decode(close) - - - /* control symbols */ - - val ctrl_decoded: Set[Symbol] = - Set((for ((sym, _) <- symbols if sym.startsWith("\\<^")) yield decode(sym)): _*) - - val sub_decoded = decode("\\<^sub>") - val sup_decoded = decode("\\<^sup>") - val bsub_decoded = decode("\\<^bsub>") - val esub_decoded = decode("\\<^esub>") - val bsup_decoded = decode("\\<^bsup>") - val esup_decoded = decode("\\<^esup>") - val bold_decoded = decode("\\<^bold>") - } - - - /* tables */ - - def properties: Map[Symbol, Properties.T] = symbols.properties - def names: Map[Symbol, String] = symbols.names - def groups: List[(String, List[Symbol])] = symbols.groups - def abbrevs: Multi_Map[Symbol, String] = symbols.abbrevs - - def decode(text: String): String = symbols.decode(text) - def encode(text: String): String = symbols.encode(text) - - def decode_string: XML.Decode.T[String] = (x => decode(XML.Decode.string(x))) - def encode_string: XML.Encode.T[String] = (x => XML.Encode.string(encode(x))) - - def decode_strict(text: String): String = - { - val decoded = decode(text) - if (encode(decoded) == text) decoded - else { - val bad = new mutable.ListBuffer[Symbol] - for (s <- iterator(text) if encode(decode(s)) != s && !bad.contains(s)) - bad += s - error("Bad Unicode symbols in text: " + commas_quote(bad)) - } - } - - def fonts: Map[Symbol, String] = symbols.fonts - def font_names: List[String] = symbols.font_names - def font_index: Map[String, Int] = symbols.font_index - def lookup_font(sym: Symbol): Option[Int] = symbols.fonts.get(sym).map(font_index(_)) - - - /* classification */ - - def is_letter(sym: Symbol): Boolean = symbols.letters.contains(sym) - def is_digit(sym: Symbol): Boolean = sym.length == 1 && '0' <= sym(0) && sym(0) <= '9' - def is_quasi(sym: Symbol): Boolean = sym == "_" || sym == "'" - def is_letdig(sym: Symbol): Boolean = is_letter(sym) || is_digit(sym) || is_quasi(sym) - def is_blank(sym: Symbol): Boolean = symbols.blanks.contains(sym) - - - /* cartouches */ - - val open = "\\" - val close = "\\" - - def open_decoded: Symbol = symbols.open_decoded - def close_decoded: Symbol = symbols.close_decoded - - def is_open(sym: Symbol): Boolean = sym == open_decoded || sym == open - def is_close(sym: Symbol): Boolean = sym == close_decoded || sym == close - - - /* symbols for symbolic identifiers */ - - private def raw_symbolic(sym: Symbol): Boolean = - sym.startsWith("\\<") && sym.endsWith(">") && !sym.startsWith("\\<^") - - def is_symbolic(sym: Symbol): Boolean = - !is_open(sym) && !is_close(sym) && (raw_symbolic(sym) || symbols.symbolic.contains(sym)) - - def is_symbolic_char(sym: Symbol): Boolean = symbols.sym_chars.contains(sym) - - - /* control symbols */ - - def is_ctrl(sym: Symbol): Boolean = - sym.startsWith("\\<^") || symbols.ctrl_decoded.contains(sym) - - def is_controllable(sym: Symbol): Boolean = - !is_blank(sym) && !is_ctrl(sym) && !is_open(sym) && !is_close(sym) && !is_malformed(sym) - - def sub_decoded: Symbol = symbols.sub_decoded - def sup_decoded: Symbol = symbols.sup_decoded - def bsub_decoded: Symbol = symbols.bsub_decoded - def esub_decoded: Symbol = symbols.esub_decoded - def bsup_decoded: Symbol = symbols.bsup_decoded - def esup_decoded: Symbol = symbols.esup_decoded - def bold_decoded: Symbol = symbols.bold_decoded -} diff --git a/core/Pure/General/symbol_pos.ML b/core/Pure/General/symbol_pos.ML deleted file mode 100644 index 5d726673..00000000 --- a/core/Pure/General/symbol_pos.ML +++ /dev/null @@ -1,298 +0,0 @@ -(* Title: Pure/General/symbol_pos.ML - Author: Makarius - -Symbols with explicit position information. -*) - -signature SYMBOL_POS = -sig - type T = Symbol.symbol * Position.T - val symbol: T -> Symbol.symbol - val $$ : Symbol.symbol -> T list -> T * T list - val ~$$ : Symbol.symbol -> T list -> T * T list - val $$$ : Symbol.symbol -> T list -> T list * T list - val ~$$$ : Symbol.symbol -> T list -> T list * T list - val content: T list -> string - val range: T list -> Position.range - val is_eof: T -> bool - val stopper: T Scan.stopper - val !!! : Scan.message -> (T list -> 'a) -> T list -> 'a - val change_prompt: ('a -> 'b) -> 'a -> 'b - val scan_pos: T list -> Position.T * T list - val scan_string_q: string -> T list -> (Position.T * (T list * Position.T)) * T list - val scan_string_qq: string -> T list -> (Position.T * (T list * Position.T)) * T list - val scan_string_bq: string -> T list -> (Position.T * (T list * Position.T)) * T list - val recover_string_q: T list -> T list * T list - val recover_string_qq: T list -> T list * T list - val recover_string_bq: T list -> T list * T list - val quote_string_q: string -> string - val quote_string_qq: string -> string - val quote_string_bq: string -> string - val scan_cartouche: string -> T list -> T list * T list - val recover_cartouche: T list -> T list * T list - val cartouche_content: T list -> T list - val scan_comment: string -> T list -> T list * T list - val scan_comment_body: string -> T list -> T list * T list - val recover_comment: T list -> T list * T list - val source: Position.T -> (Symbol.symbol, 'a) Source.source -> - (T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source - type text = string - val implode: T list -> text - val implode_range: Position.T -> Position.T -> T list -> text * Position.range - val explode: text * Position.T -> T list - type source = {delimited: bool, text: text, pos: Position.T} - val source_content: source -> string * Position.T - val scan_ident: T list -> T list * T list - val is_identifier: string -> bool -end; - -structure Symbol_Pos: SYMBOL_POS = -struct - -(* type T *) - -type T = Symbol.symbol * Position.T; - -fun symbol ((s, _): T) = s; - -val content = implode o map symbol; - -fun range (syms as (_, pos) :: _) = - let val pos' = List.last syms |-> Position.advance - in Position.range pos pos' end - | range [] = Position.no_range; - - -(* stopper *) - -fun mk_eof pos = (Symbol.eof, pos); -val eof = mk_eof Position.none; - -val is_eof = Symbol.is_eof o symbol; - -val stopper = - Scan.stopper (fn [] => eof | inp => mk_eof (List.last inp |-> Position.advance)) is_eof; - - -(* basic scanners *) - -fun !!! text scan = - let - fun get_pos [] = " (end-of-input)" - | get_pos ((_, pos) :: _) = Position.here pos; - - fun err (syms, msg) = fn () => - text () ^ get_pos syms ^ - Markup.markup Markup.no_report (" at " ^ Symbol.beginning 10 (map symbol syms)) ^ - (case msg of NONE => "" | SOME m => "\n" ^ m ()); - in Scan.!! err scan end; - -fun change_prompt scan = Scan.prompt "# " scan; - -fun $$ s = Scan.one (fn x => symbol x = s); -fun ~$$ s = Scan.one (fn x => symbol x <> s); - -fun $$$ s = Scan.one (fn x => symbol x = s) >> single; -fun ~$$$ s = Scan.one (fn x => symbol x <> s) >> single; - -val scan_pos = Scan.ahead (Scan.one (K true)) >> (fn (_, pos): T => pos); - - -(* scan string literals *) - -local - -val char_code = - Scan.one (Symbol.is_ascii_digit o symbol) -- - Scan.one (Symbol.is_ascii_digit o symbol) -- - Scan.one (Symbol.is_ascii_digit o symbol) :|-- - (fn (((a, pos), (b, _)), (c, _)) => - let val (n, _) = Library.read_int [a, b, c] - in if n <= 255 then Scan.succeed [(chr n, pos)] else Scan.fail end); - -fun scan_str q err_prefix = - $$$ "\\" |-- !!! (fn () => err_prefix ^ "bad escape character in string") - ($$$ q || $$$ "\\" || char_code) || - Scan.one (fn (s, _) => s <> q andalso s <> "\\" andalso Symbol.is_regular s) >> single; - -fun scan_strs q err_prefix = - Scan.ahead ($$ q) |-- - !!! (fn () => err_prefix ^ "unclosed string literal") - ((scan_pos --| $$$ q) -- - (change_prompt ((Scan.repeat (scan_str q err_prefix) >> flat) -- ($$$ q |-- scan_pos)))); - -fun recover_strs q = - $$$ q @@@ (Scan.repeat (Scan.permissive (scan_str q "")) >> flat); - -in - -val scan_string_q = scan_strs "'"; -val scan_string_qq = scan_strs "\""; -val scan_string_bq = scan_strs "`"; - -val recover_string_q = recover_strs "'"; -val recover_string_qq = recover_strs "\""; -val recover_string_bq = recover_strs "`"; - -end; - - -(* quote string literals *) - -local - -fun char_code i = - (if i < 10 then "00" else if i < 100 then "0" else "") ^ string_of_int i; - -fun quote_str q s = - if Symbol.is_ascii_control s then "\\" ^ char_code (ord s) - else if s = q orelse s = "\\" then "\\" ^ s - else s; - -fun quote_string q = enclose q q o implode o map (quote_str q) o Symbol.explode; - -in - -val quote_string_q = quote_string "'"; -val quote_string_qq = quote_string "\""; -val quote_string_bq = quote_string "`"; - -end; - - -(* nested text cartouches *) - -val scan_cartouche_depth = - Scan.repeat1 (Scan.depend (fn (d: int) => - $$ "\\" >> pair (d + 1) || - (if d > 0 then - Scan.one (fn (s, _) => s <> "\\" andalso Symbol.is_regular s) >> pair d || - $$ "\\" >> pair (d - 1) - else Scan.fail))); - -fun scan_cartouche err_prefix = - Scan.ahead ($$ "\\") |-- - !!! (fn () => err_prefix ^ "unclosed text cartouche") - (change_prompt (Scan.provide (fn d => d = 0) 0 scan_cartouche_depth)); - -val recover_cartouche = Scan.pass 0 scan_cartouche_depth; - -fun cartouche_content syms = - let - fun err () = - error ("Malformed text cartouche: " - ^ quote (content syms) ^ Position.here (#1 (range syms))); - in - (case syms of - ("\\", _) :: rest => - (case rev rest of - ("\\", _) :: rrest => rev rrest - | _ => err ()) - | _ => err ()) - end; - - -(* ML-style comments *) - -local - -val scan_cmt = - Scan.depend (fn (d: int) => $$$ "(" @@@ $$$ "*" >> pair (d + 1)) || - Scan.depend (fn 0 => Scan.fail | d => $$$ "*" @@@ $$$ ")" >> pair (d - 1)) || - Scan.lift ($$$ "*" --| Scan.ahead (~$$$ ")")) || - Scan.lift (Scan.one (fn (s, _) => s <> "*" andalso Symbol.is_regular s)) >> single; - -val scan_cmts = Scan.pass 0 (Scan.repeat scan_cmt >> flat); - -val scan_body = change_prompt scan_cmts; - -in - -fun scan_comment err_prefix = - Scan.ahead ($$ "(" -- $$ "*") |-- - !!! (fn () => err_prefix ^ "unclosed comment") - ($$$ "(" @@@ $$$ "*" @@@ scan_body @@@ $$$ "*" @@@ $$$ ")"); - -fun scan_comment_body err_prefix = - Scan.ahead ($$ "(" -- $$ "*") |-- - !!! (fn () => err_prefix ^ "unclosed comment") - ($$ "(" |-- $$ "*" |-- scan_body --| $$ "*" --| $$ ")"); - -val recover_comment = - $$$ "(" @@@ $$$ "*" @@@ scan_cmts; - -end; - - -(* source *) - -fun source pos = - Source.source' pos Symbol.stopper (Scan.bulk (Scan.depend (fn pos => - Scan.one Symbol.not_eof >> (fn s => (Position.advance s pos, (s, pos)))))) NONE; - - -(* compact representation -- with Symbol.DEL padding *) - -type text = string; - -fun pad [] = [] - | pad [(s, _)] = [s] - | pad ((s1, pos1) :: (rest as (_, pos2) :: _)) = - let - val end_pos1 = Position.advance s1 pos1; - val d = Int.max (0, Position.distance_of end_pos1 pos2); - in s1 :: replicate d Symbol.DEL @ pad rest end; - -val implode = implode o pad; - -fun implode_range pos1 pos2 syms = - let val syms' = (("", pos1) :: syms @ [("", pos2)]) - in (implode syms', range syms') end; - -fun explode (str, pos) = - let - val (res, _) = - fold (fn s => fn (res, p) => ((s, p) :: res, Position.advance s p)) - (Symbol.explode str) ([], Position.reset_range pos); - in fold (fn (s, p) => if s = Symbol.DEL then I else cons (s, p)) res [] end; - - -(* full source information *) - -type source = {delimited: bool, text: text, pos: Position.T}; - -fun source_content {delimited = _, text, pos} = - let val syms = explode (text, pos) in (content syms, pos) end; - - -(* identifiers *) - -local - -val letter = Scan.one (symbol #> Symbol.is_letter); -val letdigs1 = Scan.many1 (symbol #> Symbol.is_letdig); - -val sub = Scan.one (symbol #> (fn s => s = "\\<^sub>")); - -in - -val scan_ident = letter ::: (Scan.repeat (letdigs1 || sub ::: letdigs1) >> flat); - -end; - -fun is_identifier s = - Symbol.is_ascii_identifier s orelse - (case try (Scan.finite stopper scan_ident) (explode (s, Position.none)) of - SOME (_, []) => true - | _ => false); - -end; - -structure Basic_Symbol_Pos = (*not open by default*) -struct - val $$ = Symbol_Pos.$$; - val ~$$ = Symbol_Pos.~$$; - val $$$ = Symbol_Pos.$$$; - val ~$$$ = Symbol_Pos.~$$$; -end; - diff --git a/core/Pure/General/table.ML b/core/Pure/General/table.ML deleted file mode 100644 index fe516c10..00000000 --- a/core/Pure/General/table.ML +++ /dev/null @@ -1,432 +0,0 @@ -(* Title: Pure/General/table.ML - Author: Markus Wenzel and Stefan Berghofer, TU Muenchen - -Generic tables. Efficient purely functional implementation using -balanced 2-3 trees. -*) - -signature KEY = -sig - type key - val ord: key * key -> order -end; - -signature TABLE = -sig - type key - type 'a table - exception DUP of key - exception SAME - exception UNDEF of key - val empty: 'a table - val is_empty: 'a table -> bool - val map: (key -> 'a -> 'b) -> 'a table -> 'b table - val fold: (key * 'b -> 'a -> 'a) -> 'b table -> 'a -> 'a - val fold_rev: (key * 'b -> 'a -> 'a) -> 'b table -> 'a -> 'a - val dest: 'a table -> (key * 'a) list - val keys: 'a table -> key list - val min: 'a table -> (key * 'a) option - val max: 'a table -> (key * 'a) option - val get_first: (key * 'a -> 'b option) -> 'a table -> 'b option - val exists: (key * 'a -> bool) -> 'a table -> bool - val forall: (key * 'a -> bool) -> 'a table -> bool - val lookup_key: 'a table -> key -> (key * 'a) option - val lookup: 'a table -> key -> 'a option - val defined: 'a table -> key -> bool - val update: key * 'a -> 'a table -> 'a table - val update_new: key * 'a -> 'a table -> 'a table (*exception DUP*) - val default: key * 'a -> 'a table -> 'a table - val map_entry: key -> ('a -> 'a) (*exception SAME*) -> 'a table -> 'a table - val map_default: key * 'a -> ('a -> 'a) -> 'a table -> 'a table - val make: (key * 'a) list -> 'a table (*exception DUP*) - val join: (key -> 'a * 'a -> 'a) (*exception SAME*) -> - 'a table * 'a table -> 'a table (*exception DUP*) - val merge: ('a * 'a -> bool) -> 'a table * 'a table -> 'a table (*exception DUP*) - val delete: key -> 'a table -> 'a table (*exception UNDEF*) - val delete_safe: key -> 'a table -> 'a table - val member: ('b * 'a -> bool) -> 'a table -> key * 'b -> bool - val insert: ('a * 'a -> bool) -> key * 'a -> 'a table -> 'a table (*exception DUP*) - val remove: ('b * 'a -> bool) -> key * 'b -> 'a table -> 'a table - val lookup_list: 'a list table -> key -> 'a list - val cons_list: key * 'a -> 'a list table -> 'a list table - val insert_list: ('a * 'a -> bool) -> key * 'a -> 'a list table -> 'a list table - val remove_list: ('b * 'a -> bool) -> key * 'b -> 'a list table -> 'a list table - val update_list: ('a * 'a -> bool) -> key * 'a -> 'a list table -> 'a list table - val make_list: (key * 'a) list -> 'a list table - val dest_list: 'a list table -> (key * 'a) list - val merge_list: ('a * 'a -> bool) -> 'a list table * 'a list table -> 'a list table - type set = unit table - val make_set: key list -> set (*exception DUP*) -end; - -functor Table(Key: KEY): TABLE = -struct - - -(* datatype table *) - -type key = Key.key; - -datatype 'a table = - Empty | - Branch2 of 'a table * (key * 'a) * 'a table | - Branch3 of 'a table * (key * 'a) * 'a table * (key * 'a) * 'a table; - -exception DUP of key; - - -(* empty *) - -val empty = Empty; - -fun is_empty Empty = true - | is_empty _ = false; - - -(* map and fold combinators *) - -fun map_table f = - let - fun map Empty = Empty - | map (Branch2 (left, (k, x), right)) = - Branch2 (map left, (k, f k x), map right) - | map (Branch3 (left, (k1, x1), mid, (k2, x2), right)) = - Branch3 (map left, (k1, f k1 x1), map mid, (k2, f k2 x2), map right); - in map end; - -fun fold_table f = - let - fun fold Empty x = x - | fold (Branch2 (left, p, right)) x = - fold right (f p (fold left x)) - | fold (Branch3 (left, p1, mid, p2, right)) x = - fold right (f p2 (fold mid (f p1 (fold left x)))); - in fold end; - -fun fold_rev_table f = - let - fun fold Empty x = x - | fold (Branch2 (left, p, right)) x = - fold left (f p (fold right x)) - | fold (Branch3 (left, p1, mid, p2, right)) x = - fold left (f p1 (fold mid (f p2 (fold right x)))); - in fold end; - -fun dest tab = fold_rev_table cons tab []; -fun keys tab = fold_rev_table (cons o #1) tab []; - - -(* min/max entries *) - -fun min Empty = NONE - | min (Branch2 (Empty, p, _)) = SOME p - | min (Branch3 (Empty, p, _, _, _)) = SOME p - | min (Branch2 (left, _, _)) = min left - | min (Branch3 (left, _, _, _, _)) = min left; - -fun max Empty = NONE - | max (Branch2 (_, p, Empty)) = SOME p - | max (Branch3 (_, _, _, p, Empty)) = SOME p - | max (Branch2 (_, _, right)) = max right - | max (Branch3 (_, _, _, _, right)) = max right; - - -(* get_first *) - -fun get_first f = - let - fun get Empty = NONE - | get (Branch2 (left, (k, x), right)) = - (case get left of - NONE => - (case f (k, x) of - NONE => get right - | some => some) - | some => some) - | get (Branch3 (left, (k1, x1), mid, (k2, x2), right)) = - (case get left of - NONE => - (case f (k1, x1) of - NONE => - (case get mid of - NONE => - (case f (k2, x2) of - NONE => get right - | some => some) - | some => some) - | some => some) - | some => some); - in get end; - -fun exists pred = is_some o get_first (fn entry => if pred entry then SOME () else NONE); -fun forall pred = not o exists (not o pred); - - -(* lookup *) - -fun lookup_key tab key = - let - fun look Empty = NONE - | look (Branch2 (left, (k, x), right)) = - (case Key.ord (key, k) of - LESS => look left - | EQUAL => SOME (k, x) - | GREATER => look right) - | look (Branch3 (left, (k1, x1), mid, (k2, x2), right)) = - (case Key.ord (key, k1) of - LESS => look left - | EQUAL => SOME (k1, x1) - | GREATER => - (case Key.ord (key, k2) of - LESS => look mid - | EQUAL => SOME (k2, x2) - | GREATER => look right)); - in look tab end; - -fun lookup tab key = Option.map #2 (lookup_key tab key); - -fun defined tab key = - let - fun def Empty = false - | def (Branch2 (left, (k, x), right)) = - (case Key.ord (key, k) of - LESS => def left - | EQUAL => true - | GREATER => def right) - | def (Branch3 (left, (k1, x1), mid, (k2, x2), right)) = - (case Key.ord (key, k1) of - LESS => def left - | EQUAL => true - | GREATER => - (case Key.ord (key, k2) of - LESS => def mid - | EQUAL => true - | GREATER => def right)); - in def tab end; - - -(* modify *) - -datatype 'a growth = - Stay of 'a table | - Sprout of 'a table * (key * 'a) * 'a table; - -exception SAME; - -fun modify key f tab = - let - fun modfy Empty = Sprout (Empty, (key, f NONE), Empty) - | modfy (Branch2 (left, p as (k, x), right)) = - (case Key.ord (key, k) of - LESS => - (case modfy left of - Stay left' => Stay (Branch2 (left', p, right)) - | Sprout (left1, q, left2) => Stay (Branch3 (left1, q, left2, p, right))) - | EQUAL => Stay (Branch2 (left, (k, f (SOME x)), right)) - | GREATER => - (case modfy right of - Stay right' => Stay (Branch2 (left, p, right')) - | Sprout (right1, q, right2) => - Stay (Branch3 (left, p, right1, q, right2)))) - | modfy (Branch3 (left, p1 as (k1, x1), mid, p2 as (k2, x2), right)) = - (case Key.ord (key, k1) of - LESS => - (case modfy left of - Stay left' => Stay (Branch3 (left', p1, mid, p2, right)) - | Sprout (left1, q, left2) => - Sprout (Branch2 (left1, q, left2), p1, Branch2 (mid, p2, right))) - | EQUAL => Stay (Branch3 (left, (k1, f (SOME x1)), mid, p2, right)) - | GREATER => - (case Key.ord (key, k2) of - LESS => - (case modfy mid of - Stay mid' => Stay (Branch3 (left, p1, mid', p2, right)) - | Sprout (mid1, q, mid2) => - Sprout (Branch2 (left, p1, mid1), q, Branch2 (mid2, p2, right))) - | EQUAL => Stay (Branch3 (left, p1, mid, (k2, f (SOME x2)), right)) - | GREATER => - (case modfy right of - Stay right' => Stay (Branch3 (left, p1, mid, p2, right')) - | Sprout (right1, q, right2) => - Sprout (Branch2 (left, p1, mid), p2, Branch2 (right1, q, right2))))); - - in - (case modfy tab of - Stay tab' => tab' - | Sprout br => Branch2 br) - handle SAME => tab - end; - -fun update (key, x) tab = modify key (fn _ => x) tab; -fun update_new (key, x) tab = modify key (fn NONE => x | SOME _ => raise DUP key) tab; -fun default (key, x) tab = modify key (fn NONE => x | SOME _ => raise SAME) tab; -fun map_entry key f = modify key (fn NONE => raise SAME | SOME x => f x); -fun map_default (key, x) f = modify key (fn NONE => f x | SOME y => f y); - - -(* delete *) - -exception UNDEF of key; - -local - -fun compare NONE (k2, _) = LESS - | compare (SOME k1) (k2, _) = Key.ord (k1, k2); - -fun if_eq EQUAL x y = x - | if_eq _ x y = y; - -fun del (SOME k) Empty = raise UNDEF k - | del NONE (Branch2 (Empty, p, Empty)) = (p, (true, Empty)) - | del NONE (Branch3 (Empty, p, Empty, q, Empty)) = - (p, (false, Branch2 (Empty, q, Empty))) - | del k (Branch2 (Empty, p, Empty)) = (case compare k p of - EQUAL => (p, (true, Empty)) | _ => raise UNDEF (the k)) - | del k (Branch3 (Empty, p, Empty, q, Empty)) = (case compare k p of - EQUAL => (p, (false, Branch2 (Empty, q, Empty))) - | _ => (case compare k q of - EQUAL => (q, (false, Branch2 (Empty, p, Empty))) - | _ => raise UNDEF (the k))) - | del k (Branch2 (l, p, r)) = (case compare k p of - LESS => (case del k l of - (p', (false, l')) => (p', (false, Branch2 (l', p, r))) - | (p', (true, l')) => (p', case r of - Branch2 (rl, rp, rr) => - (true, Branch3 (l', p, rl, rp, rr)) - | Branch3 (rl, rp, rm, rq, rr) => (false, Branch2 - (Branch2 (l', p, rl), rp, Branch2 (rm, rq, rr))))) - | ord => (case del (if_eq ord NONE k) r of - (p', (false, r')) => (p', (false, Branch2 (l, if_eq ord p' p, r'))) - | (p', (true, r')) => (p', case l of - Branch2 (ll, lp, lr) => - (true, Branch3 (ll, lp, lr, if_eq ord p' p, r')) - | Branch3 (ll, lp, lm, lq, lr) => (false, Branch2 - (Branch2 (ll, lp, lm), lq, Branch2 (lr, if_eq ord p' p, r')))))) - | del k (Branch3 (l, p, m, q, r)) = (case compare k q of - LESS => (case compare k p of - LESS => (case del k l of - (p', (false, l')) => (p', (false, Branch3 (l', p, m, q, r))) - | (p', (true, l')) => (p', (false, case (m, r) of - (Branch2 (ml, mp, mr), Branch2 _) => - Branch2 (Branch3 (l', p, ml, mp, mr), q, r) - | (Branch3 (ml, mp, mm, mq, mr), _) => - Branch3 (Branch2 (l', p, ml), mp, Branch2 (mm, mq, mr), q, r) - | (Branch2 (ml, mp, mr), Branch3 (rl, rp, rm, rq, rr)) => - Branch3 (Branch2 (l', p, ml), mp, Branch2 (mr, q, rl), rp, - Branch2 (rm, rq, rr))))) - | ord => (case del (if_eq ord NONE k) m of - (p', (false, m')) => - (p', (false, Branch3 (l, if_eq ord p' p, m', q, r))) - | (p', (true, m')) => (p', (false, case (l, r) of - (Branch2 (ll, lp, lr), Branch2 _) => - Branch2 (Branch3 (ll, lp, lr, if_eq ord p' p, m'), q, r) - | (Branch3 (ll, lp, lm, lq, lr), _) => - Branch3 (Branch2 (ll, lp, lm), lq, - Branch2 (lr, if_eq ord p' p, m'), q, r) - | (_, Branch3 (rl, rp, rm, rq, rr)) => - Branch3 (l, if_eq ord p' p, Branch2 (m', q, rl), rp, - Branch2 (rm, rq, rr)))))) - | ord => (case del (if_eq ord NONE k) r of - (q', (false, r')) => - (q', (false, Branch3 (l, p, m, if_eq ord q' q, r'))) - | (q', (true, r')) => (q', (false, case (l, m) of - (Branch2 _, Branch2 (ml, mp, mr)) => - Branch2 (l, p, Branch3 (ml, mp, mr, if_eq ord q' q, r')) - | (_, Branch3 (ml, mp, mm, mq, mr)) => - Branch3 (l, p, Branch2 (ml, mp, mm), mq, - Branch2 (mr, if_eq ord q' q, r')) - | (Branch3 (ll, lp, lm, lq, lr), Branch2 (ml, mp, mr)) => - Branch3 (Branch2 (ll, lp, lm), lq, Branch2 (lr, p, ml), mp, - Branch2 (mr, if_eq ord q' q, r')))))); - -in - -fun delete key tab = snd (snd (del (SOME key) tab)); -fun delete_safe key tab = if defined tab key then delete key tab else tab; - -end; - - -(* membership operations *) - -fun member eq tab (key, x) = - (case lookup tab key of - NONE => false - | SOME y => eq (x, y)); - -fun insert eq (key, x) = - modify key (fn NONE => x | SOME y => if eq (x, y) then raise SAME else raise DUP key); - -fun remove eq (key, x) tab = - (case lookup tab key of - NONE => tab - | SOME y => if eq (x, y) then delete key tab else tab); - - -(* simultaneous modifications *) - -fun make entries = fold update_new entries empty; - -fun join f (table1, table2) = - let - fun add (key, y) tab = modify key (fn NONE => y | SOME x => f key (x, y)) tab; - in - if pointer_eq (table1, table2) then table1 - else if is_empty table1 then table2 - else fold_table add table2 table1 - end; - -fun merge eq = join (fn key => fn xy => if eq xy then raise SAME else raise DUP key); - - -(* list tables *) - -fun lookup_list tab key = these (lookup tab key); - -fun cons_list (key, x) tab = modify key (fn NONE => [x] | SOME xs => x :: xs) tab; - -fun insert_list eq (key, x) = - modify key (fn NONE => [x] | SOME xs => if Library.member eq xs x then raise SAME else x :: xs); - -fun remove_list eq (key, x) tab = - map_entry key (fn xs => (case Library.remove eq x xs of [] => raise UNDEF key | ys => ys)) tab - handle UNDEF _ => delete key tab; - -fun update_list eq (key, x) = - modify key (fn NONE => [x] | SOME [] => [x] | SOME (xs as y :: _) => - if eq (x, y) then raise SAME else Library.update eq x xs); - -fun make_list args = fold_rev cons_list args empty; -fun dest_list tab = maps (fn (key, xs) => map (pair key) xs) (dest tab); -fun merge_list eq = join (fn _ => Library.merge eq); - - -(* unit tables *) - -type set = unit table; - -fun make_set entries = fold (fn x => update_new (x, ())) entries empty; - - -(* ML pretty-printing *) - -val _ = - PolyML.addPrettyPrinter (fn depth => fn pretty => fn tab => - ml_pretty - (ML_Pretty.enum "," "{" "}" - (ML_Pretty.pair (pretty_ml o PolyML.prettyRepresentation) (pretty_ml o pretty)) - (dest tab, depth))); - - -(*final declarations of this structure!*) -val map = map_table; -val fold = fold_table; -val fold_rev = fold_rev_table; - -end; - -structure Inttab = Table(type key = int val ord = int_ord); -structure Symtab = Table(type key = string val ord = fast_string_ord); -structure Symreltab = Table(type key = string * string - val ord = prod_ord fast_string_ord fast_string_ord); - diff --git a/core/Pure/General/time.scala b/core/Pure/General/time.scala deleted file mode 100644 index 56c6fbe5..00000000 --- a/core/Pure/General/time.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* Title: Pure/General/time.scala - Module: PIDE - Author: Makarius - -Time based on milliseconds. -*/ - -package isabelle - - -import java.util.Locale - - -object Time -{ - def seconds(s: Double): Time = new Time((s * 1000.0).round) - def ms(m: Long): Time = new Time(m) - val zero: Time = ms(0) - def now(): Time = ms(System.currentTimeMillis()) - - def print_seconds(s: Double): String = - String.format(Locale.ROOT, "%.3f", s.asInstanceOf[AnyRef]) -} - -final class Time private(val ms: Long) extends AnyVal -{ - def seconds: Double = ms / 1000.0 - - def + (t: Time): Time = new Time(ms + t.ms) - def - (t: Time): Time = new Time(ms - t.ms) - - def < (t: Time): Boolean = ms < t.ms - def <= (t: Time): Boolean = ms <= t.ms - def > (t: Time): Boolean = ms > t.ms - def >= (t: Time): Boolean = ms >= t.ms - - def min(t: Time): Time = if (this < t) this else t - def max(t: Time): Time = if (this > t) this else t - - def is_zero: Boolean = ms == 0 - def is_relevant: Boolean = ms >= 1 - - override def toString = Time.print_seconds(seconds) - - def message: String = toString + "s" -} - diff --git a/core/Pure/General/timing.ML b/core/Pure/General/timing.ML deleted file mode 100644 index af8c7051..00000000 --- a/core/Pure/General/timing.ML +++ /dev/null @@ -1,120 +0,0 @@ -(* Title: Pure/General/timing.ML - Author: Makarius - -Basic support for time measurement. -*) - -signature BASIC_TIMING = -sig - val cond_timeit: bool -> string -> (unit -> 'a) -> 'a - val timeit: (unit -> 'a) -> 'a - val timeap: ('a -> 'b) -> 'a -> 'b - val timeap_msg: string -> ('a -> 'b) -> 'a -> 'b -end - -signature TIMING = -sig - include BASIC_TIMING - type timing = {elapsed: Time.time, cpu: Time.time, gc: Time.time} - type start - val start: unit -> start - val result: start -> timing - val timing: ('a -> 'b) -> 'a -> timing * 'b - val is_relevant_time: Time.time -> bool - val is_relevant: timing -> bool - val message: timing -> string - val protocol_message: Properties.T -> timing -> unit - val protocol: Properties.T -> ('a -> 'b) -> 'a -> 'b -end - -structure Timing: TIMING = -struct - -(* type timing *) - -type timing = {elapsed: Time.time, cpu: Time.time, gc: Time.time}; - - -(* timer control *) - -abstype start = Start of - Timer.real_timer * Time.time * Timer.cpu_timer * - {gc: {sys: Time.time, usr: Time.time}, nongc: {sys: Time.time, usr: Time.time}} -with - -fun start () = - let - val real_timer = Timer.startRealTimer (); - val real_time = Timer.checkRealTimer real_timer; - val cpu_timer = Timer.startCPUTimer (); - val cpu_times = Timer.checkCPUTimes cpu_timer; - in Start (real_timer, real_time, cpu_timer, cpu_times) end; - -fun result (Start (real_timer, real_time, cpu_timer, cpu_times)) = - let - val real_time2 = Timer.checkRealTimer real_timer; - val {nongc = {sys, usr}, gc = {sys = gc_sys, usr = gc_usr}} = cpu_times; - val {nongc = {sys = sys2, usr = usr2}, gc = {sys = gc_sys2, usr = gc_usr2}} = - Timer.checkCPUTimes cpu_timer; - - open Time; - val elapsed = real_time2 - real_time; - val gc = gc_usr2 - gc_usr + gc_sys2 - gc_sys; - val cpu = usr2 - usr + sys2 - sys + gc; - in {elapsed = elapsed, cpu = cpu, gc = gc} end; - -end; - -fun timing f x = - let - val start = start (); - val y = f x; - in (result start, y) end; - - -(* timing messages *) - -val min_time = Time.fromMilliseconds 1; - -fun is_relevant_time time = Time.>= (time, min_time); - -fun is_relevant {elapsed, cpu, gc} = - is_relevant_time elapsed orelse - is_relevant_time cpu orelse - is_relevant_time gc; - -fun message {elapsed, cpu, gc} = - Time.toString elapsed ^ "s elapsed time, " ^ - Time.toString cpu ^ "s cpu time, " ^ - Time.toString gc ^ "s GC time" handle Time.Time => ""; - -fun cond_timeit enabled msg e = - if enabled then - let - val (t, result) = timing (Exn.interruptible_capture e) (); - val _ = - if is_relevant t then - let val end_msg = message t - in warning (if msg = "" then end_msg else msg ^ "\n" ^ end_msg) end - else (); - in Exn.release result end - else e (); - -fun timeit e = cond_timeit true "" e; -fun timeap f x = timeit (fn () => f x); -fun timeap_msg msg f x = cond_timeit true msg (fn () => f x); - -fun protocol_message props t = - Output.try_protocol_message (props @ Markup.timing_properties t) []; - -fun protocol props f x = - let - val (t, result) = timing (Exn.interruptible_capture f) x; - val _ = protocol_message props t; - in Exn.release result end; - -end; - -structure Basic_Timing: BASIC_TIMING = Timing; -open Basic_Timing; - diff --git a/core/Pure/General/timing.scala b/core/Pure/General/timing.scala deleted file mode 100644 index 7c346305..00000000 --- a/core/Pure/General/timing.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* Title: Pure/General/timing.scala - Module: PIDE - Author: Makarius - -Basic support for time measurement. -*/ - -package isabelle - - -object Timing -{ - val zero = Timing(Time.zero, Time.zero, Time.zero) - - def timeit[A](message: String, enabled: Boolean = true)(e: => A) = - if (enabled) { - val start = Time.now() - val result = Exn.capture(e) - val stop = Time.now() - - val timing = stop - start - if (timing.is_relevant) - Output.warning( - (if (message == null || message.isEmpty) "" else message + ": ") + - timing.message + " elapsed time") - - Exn.release(result) - } - else e -} - -sealed case class Timing(elapsed: Time, cpu: Time, gc: Time) -{ - def is_relevant: Boolean = elapsed.is_relevant || cpu.is_relevant || gc.is_relevant - - def + (t: Timing): Timing = Timing(elapsed + t.elapsed, cpu + t.cpu, gc + t.gc) - - def message: String = - elapsed.message + " elapsed time, " + cpu.message + " cpu time, " + gc.message + " GC time" - - override def toString = message -} - diff --git a/core/Pure/General/untyped.scala b/core/Pure/General/untyped.scala deleted file mode 100644 index c02913af..00000000 --- a/core/Pure/General/untyped.scala +++ /dev/null @@ -1,23 +0,0 @@ -/* Title: Pure/General/untyped.scala - Module: PIDE - Author: Makarius - -Untyped, unscoped, unchecked access to JVM objects. -*/ - -package isabelle - - -object Untyped -{ - def get(obj: AnyRef, x: String): AnyRef = - { - obj.getClass.getDeclaredFields.find(_.getName == x) match { - case Some(field) => - field.setAccessible(true) - field.get(obj) - case None => error("No field " + quote(x) + " for " + obj) - } - } -} - diff --git a/core/Pure/General/url.ML b/core/Pure/General/url.ML deleted file mode 100644 index 3a392dcf..00000000 --- a/core/Pure/General/url.ML +++ /dev/null @@ -1,91 +0,0 @@ -(* Title: Pure/General/url.ML - Author: Markus Wenzel, TU Muenchen - -Basic URLs, see RFC 1738 and RFC 2396. -*) - -signature URL = -sig - datatype T = - File of Path.T | - RemoteFile of string * Path.T | - Http of string * Path.T | - Ftp of string * Path.T - val append: T -> T -> T - val implode: T -> string - val explode: string -> T - val pretty: T -> Pretty.T - val print: T -> string -end; - -structure Url: URL = -struct - -(* type url *) - -datatype T = - File of Path.T | - RemoteFile of string * Path.T | - Http of string * Path.T | - Ftp of string * Path.T; - - -(* append *) - -fun append (File p) (File p') = File (Path.append p p') - | append (RemoteFile (h, p)) (File p') = RemoteFile (h, Path.append p p') - | append (Http (h, p)) (File p') = Http (h, Path.append p p') - | append (Ftp (h, p)) (File p') = Ftp (h, Path.append p p') - | append _ url = url; - - -(* implode *) - -fun implode_path p = if Path.is_current p then "" else Path.implode p; - -fun implode_url (File p) = implode_path p - | implode_url (RemoteFile (h, p)) = "file://" ^ h ^ implode_path p - | implode_url (Http (h, p)) = "http://" ^ h ^ implode_path p - | implode_url (Ftp (h, p)) = "ftp://" ^ h ^ implode_path p; - - -(* explode *) - -local - -val scan_host = - (Scan.many1 (fn s => s <> "/" andalso Symbol.is_regular s) >> implode) --| - Scan.ahead ($$ "/" || Scan.one Symbol.is_eof); - -val scan_path = Scan.many Symbol.is_regular >> (Path.explode o implode); -val scan_path_root = Scan.many Symbol.is_regular >> (Path.explode o implode o cons "/"); - -val scan_url = - Scan.unless (Scan.this_string "file:" || - Scan.this_string "http:" || Scan.this_string "ftp:") scan_path >> File || - Scan.this_string "file:///" |-- scan_path_root >> File || - Scan.this_string "file://localhost/" |-- scan_path_root >> File || - Scan.this_string "file://" |-- scan_host -- scan_path >> RemoteFile || - Scan.this_string "file:/" |-- scan_path_root >> File || - Scan.this_string "http://" |-- scan_host -- scan_path >> Http || - Scan.this_string "ftp://" |-- scan_host -- scan_path >> Ftp; - -in - -fun explode_url s = Symbol.scanner "Malformed URL" scan_url (Symbol.explode s); - -end; - - -(* print *) - -val pretty = Pretty.mark_str o `Markup.url o implode_url; - -val print = Pretty.str_of o pretty; - - -(*final declarations of this structure!*) -val implode = implode_url; -val explode = explode_url; - -end; diff --git a/core/Pure/General/url.scala b/core/Pure/General/url.scala deleted file mode 100644 index 7ec3d3ef..00000000 --- a/core/Pure/General/url.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* Title: Pure/General/url.scala - Author: Makarius - -Basic URL operations. -*/ - -package isabelle - - -import java.net.{URL, MalformedURLException} - - -object Url -{ - def apply(name: String): URL = - { - try { new URL(name) } - catch { case _: MalformedURLException => error("Malformed URL " + quote(name)) } - } - - def is_wellformed(name: String): Boolean = - try { Url(name); true } - catch { case ERROR(_) => false } - - def is_readable(name: String): Boolean = - try { Url(name).openStream.close; true } - catch { case ERROR(_) => false } - - def read(name: String): String = - { - val stream = Url(name).openStream - try { File.read_stream(stream) } - finally { stream.close } - } -} - diff --git a/core/Pure/General/word.scala b/core/Pure/General/word.scala deleted file mode 100644 index 5b1623d4..00000000 --- a/core/Pure/General/word.scala +++ /dev/null @@ -1,91 +0,0 @@ -/* Title: Pure/General/word.scala - Module: PIDE - Author: Makarius - -Support for words within Unicode text. -*/ - -package isabelle - - -import java.util.Locale - - -object Word -{ - /* codepoints */ - - def codepoint_iterator(str: String): Iterator[Int] = - new Iterator[Int] { - var offset = 0 - def hasNext: Boolean = offset < str.length - def next: Int = - { - val c = str.codePointAt(offset) - offset += Character.charCount(c) - c - } - } - - def codepoint(c: Int): String = new String(Array(c), 0, 1) - - - /* case */ - - def lowercase(str: String): String = str.toLowerCase(Locale.ROOT) - def uppercase(str: String): String = str.toUpperCase(Locale.ROOT) - - def capitalize(str: String): String = - if (str.length == 0) str - else { - val n = Character.charCount(str.codePointAt(0)) - uppercase(str.substring(0, n)) + lowercase(str.substring(n)) - } - - def perhaps_capitalize(str: String): String = - if (codepoint_iterator(str).forall(c => Character.isLowerCase(c) || Character.isDigit(c))) - capitalize(str) - else str - - sealed abstract class Case - case object Lowercase extends Case - case object Uppercase extends Case - case object Capitalized extends Case - - object Case - { - def apply(c: Case, str: String): String = - c match { - case Lowercase => lowercase(str) - case Uppercase => uppercase(str) - case Capitalized => capitalize(str) - } - def unapply(str: String): Option[Case] = - if (!str.isEmpty) { - if (codepoint_iterator(str).forall(Character.isLowerCase(_))) Some(Lowercase) - else if (codepoint_iterator(str).forall(Character.isUpperCase(_))) Some(Uppercase) - else { - val it = codepoint_iterator(str) - if (Character.isUpperCase(it.next) && it.forall(Character.isLowerCase(_))) - Some(Capitalized) - else None - } - } - else None - } - - - /* sequence of words */ - - def implode(words: Iterable[String]): String = words.iterator.mkString(" ") - - def explode(sep: Char => Boolean, text: String): List[String] = - Library.separated_chunks(sep, text).map(_.toString).filter(_ != "").toList - - def explode(sep: Char, text: String): List[String] = - explode(_ == sep, text) - - def explode(text: String): List[String] = - explode(Character.isWhitespace(_), text) -} - diff --git a/core/Pure/General/xz_file.scala b/core/Pure/General/xz_file.scala deleted file mode 100644 index 93d94b55..00000000 --- a/core/Pure/General/xz_file.scala +++ /dev/null @@ -1,31 +0,0 @@ -/* Title: Pure/General/xz_file.scala - Author: Makarius - -XZ file system operations. -*/ - -package isabelle - - -import java.io.{BufferedOutputStream, OutputStream, FileInputStream, BufferedInputStream, - File => JFile} - -import org.tukaani.xz.{LZMA2Options, XZInputStream, XZOutputStream} - - -object XZ_File -{ - def read(file: JFile): String = - File.read_stream(new XZInputStream(new BufferedInputStream(new FileInputStream(file)))) - - def read(path: Path): String = read(path.file) - - def write(file: JFile, text: Iterable[CharSequence], preset: Int = 3) - { - val options = new LZMA2Options - options.setPreset(preset) - File.write_file(file, text, (s: OutputStream) => - new XZOutputStream(new BufferedOutputStream(s), options)) - } -} - diff --git a/core/Pure/Isar/args.ML b/core/Pure/Isar/args.ML deleted file mode 100644 index ada95ee8..00000000 --- a/core/Pure/Isar/args.ML +++ /dev/null @@ -1,307 +0,0 @@ -(* Title: Pure/Isar/args.ML - Author: Markus Wenzel, TU Muenchen - -Parsing with implicit value assignment. Concrete argument syntax of -attributes, methods etc. -*) - -signature ARGS = -sig - type src - val src: xstring * Position.T -> Token.T list -> src - val name_of_src: src -> string * Position.T - val range_of_src: src -> Position.T - val unparse_src: src -> string list - val pretty_src: Proof.context -> src -> Pretty.T - val check_src: Proof.context -> 'a Name_Space.table -> src -> src * 'a - val transform_values: morphism -> src -> src - val init_assignable: src -> src - val closure: src -> src - val context: Proof.context context_parser - val theory: theory context_parser - val $$$ : string -> string parser - val add: string parser - val del: string parser - val colon: string parser - val query: string parser - val bang: string parser - val query_colon: string parser - val bang_colon: string parser - val parens: 'a parser -> 'a parser - val bracks: 'a parser -> 'a parser - val mode: string -> bool parser - val maybe: 'a parser -> 'a option parser - val cartouche_inner_syntax: string parser - val cartouche_source_position: Symbol_Pos.source parser - val text_source_position: Symbol_Pos.source parser - val text: string parser - val name_inner_syntax: string parser - val name_source_position: Symbol_Pos.source parser - val name: string parser - val binding: binding parser - val alt_name: string parser - val symbol: string parser - val liberal_name: string parser - val var: indexname parser - val internal_text: string parser - val internal_typ: typ parser - val internal_term: term parser - val internal_fact: thm list parser - val internal_attribute: (morphism -> attribute) parser - val named_text: (string -> string) -> string parser - val named_typ: (string -> typ) -> typ parser - val named_term: (string -> term) -> term parser - val named_fact: (string -> thm list) -> thm list parser - val named_attribute: - (string * Position.T -> morphism -> attribute) -> (morphism -> attribute) parser - val typ_abbrev: typ context_parser - val typ: typ context_parser - val term: term context_parser - val term_pattern: term context_parser - val term_abbrev: term context_parser - val prop: term context_parser - val type_name: {proper: bool, strict: bool} -> string context_parser - val const: {proper: bool, strict: bool} -> string context_parser - val goal_spec: ((int -> tactic) -> tactic) context_parser - val attribs: (xstring * Position.T -> string) -> src list parser - val opt_attribs: (xstring * Position.T -> string) -> src list parser - val syntax_generic: 'a context_parser -> src -> Context.generic -> 'a * Context.generic - val syntax: 'a context_parser -> src -> Proof.context -> 'a * Proof.context -end; - -structure Args: ARGS = -struct - -(** datatype src **) - -datatype src = - Src of - {name: string * Position.T, - args: Token.T list, - output_info: (string * Markup.T) option}; - -fun src name args = Src {name = name, args = args, output_info = NONE}; - -fun name_of_src (Src {name, ...}) = name; - -fun range_of_src (Src {name = (_, pos), args, ...}) = - if null args then pos - else Position.set_range (pos, #2 (Token.range_of args)); - -fun unparse_src (Src {args, ...}) = map Token.unparse args; - -fun pretty_src ctxt src = - let - val Src {name = (name, _), args, output_info} = src; - val prt_name = - (case output_info of - NONE => Pretty.str name - | SOME (_, markup) => Pretty.mark_str (markup, name)); - val prt_thm = Pretty.backquote o Display.pretty_thm ctxt; - fun prt_arg arg = - (case Token.get_value arg of - SOME (Token.Literal markup) => - let val x = Token.content_of arg - in Pretty.mark_str (Token.keyword_markup markup x, x) end - | SOME (Token.Text s) => Pretty.str (quote s) - | SOME (Token.Typ T) => Syntax.pretty_typ ctxt T - | SOME (Token.Term t) => Syntax.pretty_term ctxt t - | SOME (Token.Fact ths) => Pretty.enclose "(" ")" (Pretty.breaks (map prt_thm ths)) - | _ => Pretty.mark_str (Token.markup arg, Token.unparse arg)); - in Pretty.block (Pretty.breaks (prt_name :: map prt_arg args)) end; - - -(* check *) - -fun check_src ctxt table (Src {name = (xname, pos), args, output_info = _}) = - let - val (name, x) = Name_Space.check (Context.Proof ctxt) table (xname, pos); - val space = Name_Space.space_of_table table; - val kind = Name_Space.kind_of space; - val markup = Name_Space.markup space name; - in (Src {name = (name, pos), args = args, output_info = SOME (kind, markup)}, x) end; - - -(* values *) - -fun map_args f (Src {name, args, output_info}) = - Src {name = name, args = map f args, output_info = output_info}; - -fun transform_values phi = map_args (Token.map_value - (fn Token.Typ T => Token.Typ (Morphism.typ phi T) - | Token.Term t => Token.Term (Morphism.term phi t) - | Token.Fact ths => Token.Fact (Morphism.fact phi ths) - | Token.Attribute att => Token.Attribute (Morphism.transform phi att) - | tok => tok)); - -val init_assignable = map_args Token.init_assignable; -val closure = map_args Token.closure; - - - -(** argument scanners **) - -(* context *) - -fun context x = (Scan.state >> Context.proof_of) x; -fun theory x = (Scan.state >> Context.theory_of) x; - - -(* basic *) - -val ident = Parse.token - (Parse.short_ident || Parse.long_ident || Parse.sym_ident || Parse.term_var || - Parse.type_ident || Parse.type_var || Parse.number); - -val string = Parse.token Parse.string; -val alt_string = Parse.token (Parse.alt_string || Parse.cartouche); -val symbolic = Parse.token (Parse.keyword_with Token.ident_or_symbolic); - -fun $$$ x = - (ident || Parse.token Parse.keyword) :|-- (fn tok => - let val y = Token.content_of tok in - if x = y - then (Token.assign (SOME (Token.Literal (false, Markup.quasi_keyword))) tok; Scan.succeed x) - else Scan.fail - end); - -val named = ident || string; - -val add = $$$ "add"; -val del = $$$ "del"; -val colon = $$$ ":"; -val query = $$$ "?"; -val bang = $$$ "!"; -val query_colon = $$$ "?" ^^ $$$ ":"; -val bang_colon = $$$ "!" ^^ $$$ ":"; - -fun parens scan = $$$ "(" |-- scan --| $$$ ")"; -fun bracks scan = $$$ "[" |-- scan --| $$$ "]"; -fun mode s = Scan.optional (parens ($$$ s) >> K true) false; -fun maybe scan = $$$ "_" >> K NONE || scan >> SOME; - -val cartouche = Parse.token Parse.cartouche; -val cartouche_inner_syntax = cartouche >> Token.inner_syntax_of; -val cartouche_source_position = cartouche >> Token.source_position_of; - -val text_token = named || Parse.token (Parse.verbatim || Parse.cartouche); -val text_source_position = text_token >> Token.source_position_of; -val text = text_token >> Token.content_of; - -val name_inner_syntax = named >> Token.inner_syntax_of; -val name_source_position = named >> Token.source_position_of; - -val name = named >> Token.content_of; -val binding = Parse.position name >> Binding.make; -val alt_name = alt_string >> Token.content_of; -val symbol = symbolic >> Token.content_of; -val liberal_name = symbol || name; - -val var = (ident >> Token.content_of) :|-- (fn x => - (case Lexicon.read_variable x of SOME v => Scan.succeed v | NONE => Scan.fail)); - - -(* values *) - -fun value dest = Scan.some (fn arg => - (case Token.get_value arg of SOME v => (SOME (dest v) handle Match => NONE) | NONE => NONE)); - -fun evaluate mk eval arg = - let val x = eval arg in (Token.assign (SOME (mk x)) arg; x) end; - -val internal_text = value (fn Token.Text s => s); -val internal_typ = value (fn Token.Typ T => T); -val internal_term = value (fn Token.Term t => t); -val internal_fact = value (fn Token.Fact ths => ths); -val internal_attribute = value (fn Token.Attribute att => att); - -fun named_text intern = internal_text || named >> evaluate Token.Text (intern o Token.content_of); -fun named_typ readT = internal_typ || named >> evaluate Token.Typ (readT o Token.inner_syntax_of); -fun named_term read = internal_term || named >> evaluate Token.Term (read o Token.inner_syntax_of); - -fun named_fact get = internal_fact || named >> evaluate Token.Fact (get o Token.content_of) || - alt_string >> evaluate Token.Fact (get o Token.inner_syntax_of); - -fun named_attribute att = - internal_attribute || - named >> evaluate Token.Attribute (fn tok => att (Token.content_of tok, Token.pos_of tok)); - - -(* terms and types *) - -val typ_abbrev = Scan.peek (named_typ o Proof_Context.read_typ_abbrev o Context.proof_of); -val typ = Scan.peek (named_typ o Syntax.read_typ o Context.proof_of); -val term = Scan.peek (named_term o Syntax.read_term o Context.proof_of); -val term_pattern = Scan.peek (named_term o Proof_Context.read_term_pattern o Context.proof_of); -val term_abbrev = Scan.peek (named_term o Proof_Context.read_term_abbrev o Context.proof_of); -val prop = Scan.peek (named_term o Syntax.read_prop o Context.proof_of); - - -(* type and constant names *) - -fun type_name flags = - Scan.peek (named_typ o Proof_Context.read_type_name flags o Context.proof_of) - >> (fn Type (c, _) => c | TFree (a, _) => a | _ => ""); - -fun const flags = - Scan.peek (named_term o Proof_Context.read_const flags o Context.proof_of) - >> (fn Const (c, _) => c | Free (x, _) => x | _ => ""); - - -(* improper method arguments *) - -val from_to = - Parse.nat -- ($$$ "-" |-- Parse.nat) >> (fn (i, j) => fn tac => Seq.INTERVAL tac i j) || - Parse.nat --| $$$ "-" >> (fn i => fn tac => fn st => Seq.INTERVAL tac i (Thm.nprems_of st) st) || - Parse.nat >> (fn i => fn tac => tac i) || - $$$ "!" >> K ALLGOALS; - -val goal = Parse.keyword_improper "[" |-- Parse.!!! (from_to --| Parse.keyword_improper "]"); -fun goal_spec x = Scan.lift (Scan.optional goal (fn tac => tac 1)) x; - - -(* attributes *) - -fun attribs check = - let - fun intern tok = check (Token.content_of tok, Token.pos_of tok); - val attrib_name = internal_text || (symbolic || named) >> evaluate Token.Text intern; - val attrib = Parse.position attrib_name -- Parse.!!! Parse.args >> uncurry src; - in $$$ "[" |-- Parse.!!! (Parse.list attrib --| $$$ "]") end; - -fun opt_attribs check = Scan.optional (attribs check) []; - - - -(** syntax wrapper **) - -fun syntax_generic scan (Src {name = (name, pos), args = args0, output_info}) context = - let - val args1 = map Token.init_assignable args0; - fun reported_text () = - if Context_Position.is_visible_generic context then - ((pos, Markup.operator) :: maps (Token.reports_of_value o Token.closure) args1) - |> map (fn (p, m) => Position.reported_text p m "") - else []; - in - (case Scan.error (Scan.finite' Token.stopper (Scan.option scan)) (context, args1) of - (SOME x, (context', [])) => - let val _ = Output.report (reported_text ()) - in (x, context') end - | (_, (_, args2)) => - let - val print_name = - (case output_info of - NONE => quote name - | SOME (kind, markup) => plain_words kind ^ " " ^ quote (Markup.markup markup name)); - val print_args = - if null args2 then "" else ":\n " ^ space_implode " " (map Token.print args2); - in - error ("Bad arguments for " ^ print_name ^ Position.here pos ^ print_args ^ - Markup.markup_report (implode (reported_text ()))) - end) - end; - -fun syntax scan src = apsnd Context.the_proof o syntax_generic scan src o Context.Proof; - -end; diff --git a/core/Pure/Isar/attrib.ML b/core/Pure/Isar/attrib.ML deleted file mode 100644 index f71a09c0..00000000 --- a/core/Pure/Isar/attrib.ML +++ /dev/null @@ -1,482 +0,0 @@ -(* Title: Pure/Isar/attrib.ML - Author: Markus Wenzel, TU Muenchen - -Symbolic representation of attributes -- with name and syntax. -*) - -signature ATTRIB = -sig - type src = Args.src - type binding = binding * src list - val empty_binding: binding - val is_empty_binding: binding -> bool - val print_attributes: Proof.context -> unit - val check_name_generic: Context.generic -> xstring * Position.T -> string - val check_name: Proof.context -> xstring * Position.T -> string - val check_src: Proof.context -> src -> src - val pretty_attribs: Proof.context -> src list -> Pretty.T list - val attribute: Proof.context -> src -> attribute - val attribute_global: theory -> src -> attribute - val attribute_cmd: Proof.context -> src -> attribute - val attribute_cmd_global: theory -> src -> attribute - val map_specs: ('a list -> 'att list) -> - (('c * 'a list) * 'b) list -> (('c * 'att list) * 'b) list - val map_facts: ('a list -> 'att list) -> - (('c * 'a list) * ('d * 'a list) list) list -> - (('c * 'att list) * ('d * 'att list) list) list - val map_facts_refs: ('a list -> 'att list) -> ('b -> 'fact) -> - (('c * 'a list) * ('b * 'a list) list) list -> - (('c * 'att list) * ('fact * 'att list) list) list - val global_notes: string -> (binding * (thm list * src list) list) list -> - theory -> (string * thm list) list * theory - val local_notes: string -> (binding * (thm list * src list) list) list -> - Proof.context -> (string * thm list) list * Proof.context - val generic_notes: string -> (binding * (thm list * src list) list) list -> - Context.generic -> (string * thm list) list * Context.generic - val eval_thms: Proof.context -> (Facts.ref * src list) list -> thm list - val setup: Binding.binding -> attribute context_parser -> string -> theory -> theory - val attribute_setup: bstring * Position.T -> Symbol_Pos.source -> string -> theory -> theory - val internal: (morphism -> attribute) -> src - val add_del: attribute -> attribute -> attribute context_parser - val thm_sel: Facts.interval list parser - val thm: thm context_parser - val thms: thm list context_parser - val multi_thm: thm list context_parser - val partial_evaluation: Proof.context -> - (binding * (thm list * Args.src list) list) list -> - (binding * (thm list * Args.src list) list) list - val print_options: Proof.context -> unit - val config_bool: Binding.binding -> - (Context.generic -> bool) -> bool Config.T * (theory -> theory) - val config_int: Binding.binding -> - (Context.generic -> int) -> int Config.T * (theory -> theory) - val config_real: Binding.binding -> - (Context.generic -> real) -> real Config.T * (theory -> theory) - val config_string: Binding.binding -> - (Context.generic -> string) -> string Config.T * (theory -> theory) - val setup_config_bool: Binding.binding -> (Context.generic -> bool) -> bool Config.T - val setup_config_int: Binding.binding -> (Context.generic -> int) -> int Config.T - val setup_config_real: Binding.binding -> (Context.generic -> real) -> real Config.T - val setup_config_string: Binding.binding -> (Context.generic -> string) -> string Config.T - val option_bool: string * Position.T -> bool Config.T * (theory -> theory) - val option_int: string * Position.T -> int Config.T * (theory -> theory) - val option_real: string * Position.T -> real Config.T * (theory -> theory) - val option_string: string * Position.T -> string Config.T * (theory -> theory) - val setup_option_bool: string * Position.T -> bool Config.T - val setup_option_int: string * Position.T -> int Config.T - val setup_option_real: string * Position.T -> real Config.T - val setup_option_string: string * Position.T -> string Config.T -end; - -structure Attrib: ATTRIB = -struct - -(* source and bindings *) - -type src = Args.src; - -type binding = binding * src list; - -val empty_binding: binding = (Binding.empty, []); -fun is_empty_binding ((b, srcs): binding) = Binding.is_empty b andalso null srcs; - - - -(** named attributes **) - -(* theory data *) - -structure Attributes = Theory_Data -( - type T = ((src -> attribute) * string) Name_Space.table; - val empty : T = Name_Space.empty_table "attribute"; - val extend = I; - fun merge data : T = Name_Space.merge_tables data; -); - -val get_attributes = Attributes.get o Context.theory_of; - -fun print_attributes ctxt = - let - val attribs = get_attributes (Context.Proof ctxt); - fun prt_attr (name, (_, "")) = Pretty.mark_str name - | prt_attr (name, (_, comment)) = - Pretty.block - (Pretty.mark_str name :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment); - in - [Pretty.big_list "attributes:" (map prt_attr (Name_Space.markup_table ctxt attribs))] - |> Pretty.writeln_chunks - end; - -val attribute_space = Name_Space.space_of_table o get_attributes o Context.Proof; - -fun add_attribute name att comment thy = thy - |> Attributes.map (Name_Space.define (Context.Theory thy) true (name, (att, comment)) #> snd); - - -(* check *) - -fun check_name_generic context = #1 o Name_Space.check context (get_attributes context); -val check_name = check_name_generic o Context.Proof; - -fun check_src ctxt src = - (Context_Position.report ctxt (Args.range_of_src src) Markup.language_attribute; - #1 (Args.check_src ctxt (get_attributes (Context.Proof ctxt)) src)); - - -(* pretty printing *) - -fun pretty_attribs _ [] = [] - | pretty_attribs ctxt srcs = [Pretty.enum "," "[" "]" (map (Args.pretty_src ctxt) srcs)]; - - -(* get attributes *) - -fun attribute_generic context = - let val table = get_attributes context - in fn src => #1 (Name_Space.get table (#1 (Args.name_of_src src))) src end; - -val attribute = attribute_generic o Context.Proof; -val attribute_global = attribute_generic o Context.Theory; - -fun attribute_cmd ctxt = attribute ctxt o check_src ctxt; -fun attribute_cmd_global thy = attribute_global thy o check_src (Proof_Context.init_global thy); - - -(* attributed declarations *) - -fun map_specs f = map (apfst (apsnd f)); - -fun map_facts f = map (apfst (apsnd f) o apsnd (map (apsnd f))); -fun map_facts_refs f g = map_facts f #> map (apsnd (map (apfst g))); - - -(* fact expressions *) - -fun global_notes kind facts thy = thy |> - Global_Theory.note_thmss kind (map_facts (map (attribute_global thy)) facts); - -fun local_notes kind facts ctxt = ctxt |> - Proof_Context.note_thmss kind (map_facts (map (attribute ctxt)) facts); - -fun generic_notes kind facts context = context |> - Context.mapping_result (global_notes kind facts) (local_notes kind facts); - -fun eval_thms ctxt srcs = ctxt - |> Proof_Context.note_thmss "" - (map_facts_refs (map (attribute_cmd ctxt)) (Proof_Context.get_fact ctxt) - [((Binding.empty, []), srcs)]) - |> fst |> maps snd; - - -(* attribute setup *) - -fun setup name scan = - add_attribute name - (fn src => fn (ctxt, th) => - let val (a, ctxt') = Args.syntax_generic scan src ctxt in a (ctxt', th) end); - -fun attribute_setup name source cmt = - Context.theory_map (ML_Context.expression (#pos source) - "val (name, scan, comment): binding * attribute context_parser * string" - "Context.map_theory (Attrib.setup name scan comment)" - (ML_Lex.read Position.none ("(" ^ ML_Syntax.make_binding name ^ ", ") @ - ML_Lex.read_source false source @ - ML_Lex.read Position.none (", " ^ ML_Syntax.print_string cmt ^ ")"))); - - -(* internal attribute *) - -fun internal att = Args.src ("Pure.attribute", Position.none) [Token.mk_attribute att]; - -val _ = Theory.setup - (setup (Binding.make ("attribute", @{here})) - (Scan.lift Args.internal_attribute >> Morphism.form) - "internal attribute"); - - -(* add/del syntax *) - -fun add_del add del = Scan.lift (Args.add >> K add || Args.del >> K del || Scan.succeed add); - - - -(** parsing attributed theorems **) - -val thm_sel = Parse.$$$ "(" |-- Parse.list1 - (Parse.nat --| Parse.minus -- Parse.nat >> Facts.FromTo || - Parse.nat --| Parse.minus >> Facts.From || - Parse.nat >> Facts.Single) --| Parse.$$$ ")"; - -local - -val fact_name = Args.internal_fact >> K "" || Args.name; - -fun gen_thm pick = Scan.depend (fn context => - let - val get = Proof_Context.get_fact_generic context; - val get_fact = get o Facts.Fact; - fun get_named pos name = get (Facts.Named ((name, pos), NONE)); - in - Parse.$$$ "[" |-- Args.attribs (check_name_generic context) --| Parse.$$$ "]" >> (fn srcs => - let - val atts = map (attribute_generic context) srcs; - val (th', context') = fold (uncurry o Thm.apply_attribute) atts (Drule.dummy_thm, context); - in (context', pick ("", Position.none) [th']) end) - || - (Scan.ahead Args.alt_name -- Args.named_fact get_fact - >> (fn (s, fact) => ("", Facts.Fact s, fact)) || - Scan.ahead (Parse.position fact_name) :|-- (fn (name, pos) => - Args.named_fact (get_named pos) -- Scan.option thm_sel - >> (fn (fact, sel) => (name, Facts.Named ((name, pos), sel), fact)))) - -- Args.opt_attribs (check_name_generic context) >> (fn ((name, thmref, fact), srcs) => - let - val ths = Facts.select thmref fact; - val atts = map (attribute_generic context) srcs; - val (ths', context') = - fold_map (curry (fold (uncurry o Thm.apply_attribute) atts)) ths context; - in (context', pick (name, Facts.pos_of_ref thmref) ths') end) - end); - -in - -val thm = gen_thm Facts.the_single; -val multi_thm = gen_thm (K I); -val thms = Scan.repeat multi_thm >> flat; - -end; - - - -(** partial evaluation -- observing rule/declaration/mixed attributes **) - -(*NB: result length may change due to rearrangement of symbolic expression*) - -local - -fun apply_att src (context, th) = - let - val src1 = Args.init_assignable src; - val result = attribute_generic context src1 (context, th); - val src2 = Args.closure src1; - in (src2, result) end; - -fun err msg src = - let val (name, pos) = Args.name_of_src src - in error (msg ^ " " ^ quote name ^ Position.here pos) end; - -fun eval src ((th, dyn), (decls, context)) = - (case (apply_att src (context, th), dyn) of - ((_, (NONE, SOME th')), NONE) => ((th', NONE), (decls, context)) - | ((_, (NONE, SOME _)), SOME _) => err "Mixed dynamic attribute followed by static rule" src - | ((src', (SOME context', NONE)), NONE) => - let - val decls' = - (case decls of - [] => [(th, [src'])] - | (th2, srcs2) :: rest => - if Thm.eq_thm_strict (th, th2) - then ((th2, src' :: srcs2) :: rest) - else (th, [src']) :: (th2, srcs2) :: rest); - in ((th, NONE), (decls', context')) end - | ((src', (opt_context', opt_th')), _) => - let - val context' = the_default context opt_context'; - val th' = the_default th opt_th'; - val dyn' = - (case dyn of - NONE => SOME (th, [src']) - | SOME (dyn_th, srcs) => SOME (dyn_th, src' :: srcs)); - in ((th', dyn'), (decls, context')) end); - -in - -fun partial_evaluation ctxt facts = - (facts, Context.Proof (Context_Position.not_really ctxt)) |-> - fold_map (fn ((b, more_atts), fact) => fn context => - let - val (fact', (decls, context')) = - (fact, ([], context)) |-> fold_map (fn (ths, atts) => fn res1 => - (ths, res1) |-> fold_map (fn th => fn res2 => - let - val ((th', dyn'), res3) = fold eval (atts @ more_atts) ((th, NONE), res2); - val th_atts' = - (case dyn' of - NONE => (th', []) - | SOME (dyn_th', atts') => (dyn_th', rev atts')); - in (th_atts', res3) end)) - |>> flat; - val decls' = rev (map (apsnd rev) decls); - val facts' = - if eq_list (eq_fst Thm.eq_thm_strict) (decls', fact') then - [((b, []), map2 (fn (th, atts1) => fn (_, atts2) => (th, atts1 @ atts2)) decls' fact')] - else if null decls' then [((b, []), fact')] - else [(empty_binding, decls'), ((b, []), fact')]; - in (facts', context') end) - |> fst |> flat |> map (apsnd (map (apfst single))) - |> filter_out (fn (b, fact) => is_empty_binding b andalso forall (null o #2) fact); - -end; - - - -(** configuration options **) - -(* naming *) - -structure Configs = Theory_Data -( - type T = Config.raw Symtab.table; - val empty = Symtab.empty; - val extend = I; - fun merge data = Symtab.merge (K true) data; -); - -fun print_options ctxt = - let - fun prt (name, config) = - let val value = Config.get ctxt config in - Pretty.block [Pretty.mark_str name, Pretty.str (": " ^ Config.print_type value ^ " ="), - Pretty.brk 1, Pretty.str (Config.print_value value)] - end; - val space = attribute_space ctxt; - val configs = - Name_Space.markup_entries ctxt space - (Symtab.dest (Configs.get (Proof_Context.theory_of ctxt))); - in Pretty.writeln (Pretty.big_list "configuration options" (map prt configs)) end; - - -(* concrete syntax *) - -local - -val equals = Parse.$$$ "="; - -fun scan_value (Config.Bool _) = - equals -- Args.$$$ "false" >> K (Config.Bool false) || - equals -- Args.$$$ "true" >> K (Config.Bool true) || - Scan.succeed (Config.Bool true) - | scan_value (Config.Int _) = equals |-- Parse.int >> Config.Int - | scan_value (Config.Real _) = equals |-- Parse.real >> Config.Real - | scan_value (Config.String _) = equals |-- Args.name >> Config.String; - -fun scan_config thy config = - let val config_type = Config.get_global thy config - in scan_value config_type >> (K o Thm.declaration_attribute o K o Config.put_generic config) end; - -fun register binding config thy = - let val name = Sign.full_name thy binding in - thy - |> setup binding (Scan.lift (scan_config thy config) >> Morphism.form) "configuration option" - |> Configs.map (Symtab.update (name, config)) - end; - -fun declare make coerce binding default = - let - val name = Binding.name_of binding; - val pos = Binding.pos_of binding; - val config_value = Config.declare (name, pos) (make o default); - val config = coerce config_value; - in (config, register binding config_value) end; - -in - -fun register_config config = - register (Binding.make (Config.name_of config, Config.pos_of config)) config; - -val config_bool = declare Config.Bool Config.bool; -val config_int = declare Config.Int Config.int; -val config_real = declare Config.Real Config.real; -val config_string = declare Config.String Config.string; - -end; - - -(* implicit setup *) - -local - -fun setup_config declare_config binding default = - let - val (config, setup) = declare_config binding default; - val _ = Theory.setup setup; - in config end; - -in - -val setup_config_bool = setup_config config_bool; -val setup_config_int = setup_config config_int; -val setup_config_string = setup_config config_string; -val setup_config_real = setup_config config_real; - -end; - - -(* system options *) - -local - -fun declare_option coerce (name, pos) = - let - val config = Config.declare_option (name, pos); - in (coerce config, register_config config) end; - -fun setup_option coerce (name, pos) = - let - val config = Config.declare_option (name, pos); - val _ = Theory.setup (register_config config); - in coerce config end; - -in - -val option_bool = declare_option Config.bool; -val option_int = declare_option Config.int; -val option_real = declare_option Config.real; -val option_string = declare_option Config.string; - -val setup_option_bool = setup_option Config.bool; -val setup_option_int = setup_option Config.int; -val setup_option_real = setup_option Config.real; -val setup_option_string = setup_option Config.string; - -end; - - -(* theory setup *) - -val _ = Theory.setup - (register_config quick_and_dirty_raw #> - register_config Ast.trace_raw #> - register_config Ast.stats_raw #> - register_config Printer.show_brackets_raw #> - register_config Printer.show_sorts_raw #> - register_config Printer.show_types_raw #> - register_config Printer.show_markup_raw #> - register_config Printer.show_structs_raw #> - register_config Printer.show_question_marks_raw #> - register_config Syntax.ambiguity_warning_raw #> - register_config Syntax.ambiguity_limit_raw #> - register_config Syntax_Trans.eta_contract_raw #> - register_config Name_Space.names_long_raw #> - register_config Name_Space.names_short_raw #> - register_config Name_Space.names_unique_raw #> - register_config ML_Options.source_trace_raw #> - register_config ML_Options.exception_trace_raw #> - register_config ML_Options.print_depth_raw #> - register_config Proof_Context.show_abbrevs_raw #> - register_config Goal_Display.goals_limit_raw #> - register_config Goal_Display.show_main_goal_raw #> - register_config Goal_Display.show_consts_raw #> - register_config Display.show_hyps_raw #> - register_config Display.show_tags_raw #> - register_config Pattern.unify_trace_failure_raw #> - register_config Unify.trace_bound_raw #> - register_config Unify.search_bound_raw #> - register_config Unify.trace_simp_raw #> - register_config Unify.trace_types_raw #> - register_config Raw_Simplifier.simp_depth_limit_raw #> - register_config Raw_Simplifier.simp_trace_depth_limit_raw #> - register_config Raw_Simplifier.simp_debug_raw #> - register_config Raw_Simplifier.simp_trace_raw); - -end; diff --git a/core/Pure/Isar/auto_bind.ML b/core/Pure/Isar/auto_bind.ML deleted file mode 100644 index 7a50a84f..00000000 --- a/core/Pure/Isar/auto_bind.ML +++ /dev/null @@ -1,52 +0,0 @@ -(* Title: Pure/Isar/auto_bind.ML - Author: Markus Wenzel, TU Muenchen - -Automatic bindings of Isar text elements. -*) - -signature AUTO_BIND = -sig - val thesisN: string - val thisN: string - val assmsN: string - val goal: theory -> term list -> (indexname * term option) list - val facts: theory -> term list -> (indexname * term option) list - val no_facts: (indexname * term option) list -end; - -structure Auto_Bind: AUTO_BIND = -struct - -(** bindings **) - -val thesisN = "thesis"; -val thisN = "this"; -val assmsN = "assms"; - -fun strip_judgment thy = Object_Logic.drop_judgment thy o Logic.strip_assums_concl; - -fun statement_binds thy name prop = - [((name, 0), SOME (fold_rev Term.abs (Logic.strip_params prop) (strip_judgment thy prop)))]; - - -(* goal *) - -fun goal thy [prop] = statement_binds thy thesisN prop - | goal _ _ = [((thesisN, 0), NONE)]; - - -(* facts *) - -fun get_arg thy prop = - (case strip_judgment thy prop of - _ $ t => SOME (fold_rev Term.abs (Logic.strip_params prop) t) - | _ => NONE); - -fun facts _ [] = [] - | facts thy props = - let val prop = List.last props - in [(Syntax_Ext.dddot_indexname, get_arg thy prop)] @ statement_binds thy thisN prop end; - -val no_facts = [(Syntax_Ext.dddot_indexname, NONE), ((thisN, 0), NONE)]; - -end; diff --git a/core/Pure/Isar/bundle.ML b/core/Pure/Isar/bundle.ML deleted file mode 100644 index 00cb2d9a..00000000 --- a/core/Pure/Isar/bundle.ML +++ /dev/null @@ -1,140 +0,0 @@ -(* Title: Pure/Isar/bundle.ML - Author: Makarius - -Bundled declarations (notes etc.). -*) - -signature BUNDLE = -sig - type bundle = (thm list * Args.src list) list - val check: Proof.context -> xstring * Position.T -> string - val get_bundle: Proof.context -> string -> bundle - val get_bundle_cmd: Proof.context -> xstring * Position.T -> bundle - val bundle: binding * (thm list * Args.src list) list -> - (binding * typ option * mixfix) list -> local_theory -> local_theory - val bundle_cmd: binding * (Facts.ref * Args.src list) list -> - (binding * string option * mixfix) list -> local_theory -> local_theory - val includes: string list -> Proof.context -> Proof.context - val includes_cmd: (xstring * Position.T) list -> Proof.context -> Proof.context - val include_: string list -> Proof.state -> Proof.state - val include_cmd: (xstring * Position.T) list -> Proof.state -> Proof.state - val including: string list -> Proof.state -> Proof.state - val including_cmd: (xstring * Position.T) list -> Proof.state -> Proof.state - val context: string list -> Element.context_i list -> generic_theory -> local_theory - val context_cmd: (xstring * Position.T) list -> Element.context list -> - generic_theory -> local_theory - val print_bundles: Proof.context -> unit -end; - -structure Bundle: BUNDLE = -struct - -(* maintain bundles *) - -type bundle = (thm list * Args.src list) list; - -fun transform_bundle phi : bundle -> bundle = - map (fn (fact, atts) => (Morphism.fact phi fact, map (Args.transform_values phi) atts)); - -structure Data = Generic_Data -( - type T = bundle Name_Space.table; - val empty : T = Name_Space.empty_table "bundle"; - val extend = I; - val merge = Name_Space.merge_tables; -); - -val get_bundles = Data.get o Context.Proof; - -fun check ctxt = #1 o Name_Space.check (Context.Proof ctxt) (get_bundles ctxt); - -val get_bundle = Name_Space.get o get_bundles; -fun get_bundle_cmd ctxt = get_bundle ctxt o check ctxt; - - -(* define bundle *) - -local - -fun gen_bundle prep_fact prep_att prep_vars (binding, raw_bundle) fixes lthy = - let - val (_, ctxt') = lthy |> prep_vars fixes |-> Proof_Context.add_fixes; - val bundle0 = raw_bundle - |> map (fn (fact, atts) => (prep_fact ctxt' fact, map (prep_att ctxt') atts)); - val bundle = - Attrib.partial_evaluation ctxt' [(Attrib.empty_binding, bundle0)] |> map snd |> flat - |> transform_bundle (Proof_Context.export_morphism ctxt' lthy); - in - lthy |> Local_Theory.declaration {syntax = false, pervasive = true} - (fn phi => fn context => - context |> Data.map - (#2 o Name_Space.define context true - (Morphism.binding phi binding, transform_bundle phi bundle))) - end; - -in - -val bundle = gen_bundle (K I) (K I) Proof_Context.cert_vars; -val bundle_cmd = gen_bundle Proof_Context.get_fact Attrib.check_src Proof_Context.read_vars; - -end; - - -(* include bundles *) - -local - -fun gen_includes get args ctxt = - let val decls = maps (get ctxt) args - in #2 (Attrib.local_notes "" [((Binding.empty, []), decls)] ctxt) end; - -fun gen_context get prep_decl raw_incls raw_elems gthy = - let - val (after_close, lthy) = - gthy |> Context.cases (pair Local_Theory.exit o Named_Target.theory_init) - (pair I o Local_Theory.assert); - val ((_, _, _, lthy'), _) = lthy - |> gen_includes get raw_incls - |> prep_decl ([], []) I raw_elems; - in - lthy' |> Local_Theory.open_target - (Local_Theory.naming_of lthy) (Local_Theory.operations_of lthy) after_close - end; - -in - -val includes = gen_includes get_bundle; -val includes_cmd = gen_includes get_bundle_cmd; - -fun include_ bs = Proof.assert_forward #> Proof.map_context (includes bs) #> Proof.reset_facts; -fun include_cmd bs = - Proof.assert_forward #> Proof.map_context (includes_cmd bs) #> Proof.reset_facts; - -fun including bs = Proof.assert_backward #> Proof.map_context (includes bs); -fun including_cmd bs = Proof.assert_backward #> Proof.map_context (includes_cmd bs); - -val context = gen_context get_bundle Expression.cert_declaration; -val context_cmd = gen_context get_bundle_cmd Expression.read_declaration; - -end; - - -(* print_bundles *) - -fun print_bundles ctxt = - let - val prt_thm = Pretty.backquote o Display.pretty_thm ctxt; - - fun prt_fact (ths, []) = map prt_thm ths - | prt_fact (ths, atts) = Pretty.enclose "(" ")" - (Pretty.breaks (map prt_thm ths)) :: Attrib.pretty_attribs ctxt atts; - - fun prt_bundle (name, bundle) = - Pretty.block (Pretty.keyword1 "bundle" :: Pretty.str " " :: Pretty.mark_str name :: - Pretty.breaks (Pretty.str " =" :: maps prt_fact bundle)); - in - map prt_bundle (Name_Space.markup_table ctxt (get_bundles ctxt)) - end |> Pretty.writeln_chunks; - -end; - diff --git a/core/Pure/Isar/calculation.ML b/core/Pure/Isar/calculation.ML deleted file mode 100644 index 14637341..00000000 --- a/core/Pure/Isar/calculation.ML +++ /dev/null @@ -1,228 +0,0 @@ -(* Title: Pure/Isar/calculation.ML - Author: Markus Wenzel, TU Muenchen - -Generic calculational proofs. -*) - -signature CALCULATION = -sig - val print_rules: Proof.context -> unit - val get_calculation: Proof.state -> thm list option - val trans_add: attribute - val trans_del: attribute - val sym_add: attribute - val sym_del: attribute - val symmetric: attribute - val also: thm list option -> bool -> Proof.state -> Proof.state Seq.result Seq.seq - val also_cmd: (Facts.ref * Attrib.src list) list option -> - bool -> Proof.state -> Proof.state Seq.result Seq.seq - val finally: thm list option -> bool -> Proof.state -> Proof.state Seq.result Seq.seq - val finally_cmd: (Facts.ref * Attrib.src list) list option -> bool -> - Proof.state -> Proof.state Seq.result Seq.seq - val moreover: bool -> Proof.state -> Proof.state - val ultimately: bool -> Proof.state -> Proof.state -end; - -structure Calculation: CALCULATION = -struct - -(** calculation data **) - -structure Data = Generic_Data -( - type T = (thm Item_Net.T * thm list) * (thm list * int) option; - val empty = ((Thm.elim_rules, []), NONE); - val extend = I; - fun merge (((trans1, sym1), _), ((trans2, sym2), _)) = - ((Item_Net.merge (trans1, trans2), Thm.merge_thms (sym1, sym2)), NONE); -); - -val get_rules = #1 o Data.get o Context.Proof; - -fun print_rules ctxt = - let - val pretty_thm = Display.pretty_thm_item ctxt; - val (trans, sym) = get_rules ctxt; - in - [Pretty.big_list "transitivity rules:" (map pretty_thm (Item_Net.content trans)), - Pretty.big_list "symmetry rules:" (map pretty_thm sym)] - end |> Pretty.writeln_chunks; - - -(* access calculation *) - -fun get_calculation state = - (case #2 (Data.get (Context.Proof (Proof.context_of state))) of - NONE => NONE - | SOME (thms, lev) => if lev = Proof.level state then SOME thms else NONE); - -val calculationN = "calculation"; - -fun put_calculation calc = - `Proof.level #-> (fn lev => Proof.map_context (Context.proof_map - (Data.map (apsnd (K (Option.map (rpair lev) calc)))))) - #> Proof.put_thms false (calculationN, calc); - - - -(** attributes **) - -(* add/del rules *) - -val trans_add = Thm.declaration_attribute (Data.map o apfst o apfst o Item_Net.update); -val trans_del = Thm.declaration_attribute (Data.map o apfst o apfst o Item_Net.remove); - -val sym_add = - Thm.declaration_attribute (fn th => - (Data.map o apfst o apsnd) (Thm.add_thm th) #> - Thm.attribute_declaration (Context_Rules.elim_query NONE) th); - -val sym_del = - Thm.declaration_attribute (fn th => - (Data.map o apfst o apsnd) (Thm.del_thm th) #> - Thm.attribute_declaration Context_Rules.rule_del th); - - -(* symmetric *) - -val symmetric = Thm.rule_attribute (fn x => fn th => - (case Seq.chop 2 (Drule.multi_resolves [th] (#2 (#1 (Data.get x)))) of - ([th'], _) => Drule.zero_var_indexes th' - | ([], _) => raise THM ("symmetric: no unifiers", 1, [th]) - | _ => raise THM ("symmetric: multiple unifiers", 1, [th]))); - - -(* concrete syntax *) - -val _ = Theory.setup - (Attrib.setup @{binding trans} (Attrib.add_del trans_add trans_del) - "declaration of transitivity rule" #> - Attrib.setup @{binding sym} (Attrib.add_del sym_add sym_del) - "declaration of symmetry rule" #> - Attrib.setup @{binding symmetric} (Scan.succeed symmetric) - "resolution with symmetry rule" #> - Global_Theory.add_thms - [((Binding.empty, transitive_thm), [trans_add]), - ((Binding.empty, symmetric_thm), [sym_add])] #> snd); - - - -(** proof commands **) - -fun assert_sane final = - if final then Proof.assert_forward else Proof.assert_forward_or_chain; - -fun maintain_calculation int final calc state = - let - val state' = put_calculation (SOME calc) state; - val ctxt' = Proof.context_of state'; - val _ = - if int then - Proof_Context.pretty_fact ctxt' - (Proof_Context.full_name ctxt' (Binding.name calculationN), calc) - |> Pretty.string_of |> Output.urgent_message - else (); - in state' |> final ? (put_calculation NONE #> Proof.chain_facts calc) end; - - -(* also and finally *) - -fun calculate prep_rules final raw_rules int state = - let - val ctxt = Proof.context_of state; - val pretty_thm = Display.pretty_thm ctxt; - val pretty_thm_item = Display.pretty_thm_item ctxt; - - val strip_assums_concl = Logic.strip_assums_concl o Thm.prop_of; - val eq_prop = op aconv o pairself (Envir.beta_eta_contract o strip_assums_concl); - fun check_projection ths th = - (case find_index (curry eq_prop th) ths of - ~1 => Seq.Result [th] - | i => - Seq.Error (fn () => - (Pretty.string_of o Pretty.chunks) - [Pretty.block [Pretty.str "Vacuous calculation result:", Pretty.brk 1, pretty_thm th], - (Pretty.block o Pretty.fbreaks) - (Pretty.str ("derived as projection (" ^ string_of_int (i + 1) ^ ") from:") :: - map pretty_thm_item ths)])); - - val opt_rules = Option.map (prep_rules ctxt) raw_rules; - fun combine ths = - Seq.append - ((case opt_rules of - SOME rules => rules - | NONE => - (case ths of - [] => Item_Net.content (#1 (get_rules ctxt)) - | th :: _ => Item_Net.retrieve (#1 (get_rules ctxt)) (strip_assums_concl th))) - |> Seq.of_list |> Seq.maps (Drule.multi_resolve ths) - |> Seq.map (check_projection ths)) - (Seq.single (Seq.Error (fn () => - (Pretty.string_of o Pretty.block o Pretty.fbreaks) - (Pretty.str "No matching trans rules for calculation:" :: - map pretty_thm_item ths)))); - - val facts = Proof.the_facts (assert_sane final state); - val (initial, calculations) = - (case get_calculation state of - NONE => (true, Seq.single (Seq.Result facts)) - | SOME calc => (false, combine (calc @ facts))); - - val _ = initial andalso final andalso error "No calculation yet"; - val _ = initial andalso is_some opt_rules andalso - error "Initial calculation -- no rules to be given"; - in - calculations |> Seq.map_result (fn calc => maintain_calculation int final calc state) - end; - -val also = calculate (K I) false; -val also_cmd = calculate Attrib.eval_thms false; -val finally = calculate (K I) true; -val finally_cmd = calculate Attrib.eval_thms true; - - -(* moreover and ultimately *) - -fun collect final int state = - let - val facts = Proof.the_facts (assert_sane final state); - val (initial, thms) = - (case get_calculation state of - NONE => (true, []) - | SOME thms => (false, thms)); - val calc = thms @ facts; - val _ = initial andalso final andalso error "No calculation yet"; - in maintain_calculation int final calc state end; - -val moreover = collect false; -val ultimately = collect true; - - -(* outer syntax *) - -val calc_args = - Scan.option (@{keyword "("} |-- Parse.!!! ((Parse_Spec.xthms1 --| @{keyword ")"}))); - -val _ = - Outer_Syntax.command @{command_spec "also"} "combine calculation and current facts" - (calc_args >> (Toplevel.proofs' o also_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "finally"} - "combine calculation and current facts, exhibit result" - (calc_args >> (Toplevel.proofs' o finally_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "moreover"} "augment calculation by current facts" - (Scan.succeed (Toplevel.proof' moreover)); - -val _ = - Outer_Syntax.command @{command_spec "ultimately"} - "augment calculation by current facts, exhibit result" - (Scan.succeed (Toplevel.proof' ultimately)); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_trans_rules"} "print transitivity rules" - (Scan.succeed (Toplevel.unknown_context o Toplevel.keep (print_rules o Toplevel.context_of))); - -end; diff --git a/core/Pure/Isar/class.ML b/core/Pure/Isar/class.ML deleted file mode 100644 index 4b7745bf..00000000 --- a/core/Pure/Isar/class.ML +++ /dev/null @@ -1,727 +0,0 @@ -(* Title: Pure/Isar/class.ML - Author: Florian Haftmann, TU Muenchen - -Type classes derived from primitive axclasses and locales. -*) - -signature CLASS = -sig - (*classes*) - val is_class: theory -> class -> bool - val these_params: theory -> sort -> (string * (class * (string * typ))) list - val base_sort: theory -> class -> sort - val rules: theory -> class -> thm option * thm - val these_defs: theory -> sort -> thm list - val these_operations: theory -> sort - -> (string * (class * (typ * term))) list - val print_classes: Proof.context -> unit - val init: class -> theory -> Proof.context - val begin: class list -> sort -> Proof.context -> Proof.context - val const: class -> (binding * mixfix) * term -> term list * term list -> local_theory -> local_theory - val abbrev: class -> Syntax.mode -> (binding * mixfix) * term -> term -> term list * term list -> local_theory -> local_theory - val redeclare_operations: theory -> sort -> Proof.context -> Proof.context - val class_prefix: string -> string - val register: class -> class list -> ((string * typ) * (string * typ)) list - -> sort -> morphism -> morphism -> thm option -> thm option -> thm - -> theory -> theory - - (*instances*) - val instantiation: string list * (string * sort) list * sort -> theory -> local_theory - val instantiation_instance: (local_theory -> local_theory) - -> local_theory -> Proof.state - val prove_instantiation_instance: (Proof.context -> tactic) - -> local_theory -> local_theory - val prove_instantiation_exit: (Proof.context -> tactic) - -> local_theory -> theory - val prove_instantiation_exit_result: (morphism -> 'a -> 'b) - -> (Proof.context -> 'b -> tactic) -> 'a -> local_theory -> 'b * theory - val read_multi_arity: theory -> xstring list * xstring list * xstring - -> string list * (string * sort) list * sort - val instantiation_cmd: xstring list * xstring list * xstring -> theory -> local_theory - val instance_arity_cmd: xstring list * xstring list * xstring -> theory -> Proof.state - - (*subclasses*) - val classrel: class * class -> theory -> Proof.state - val classrel_cmd: xstring * xstring -> theory -> Proof.state - val register_subclass: class * class -> morphism option -> Element.witness option - -> morphism -> local_theory -> local_theory - - (*tactics*) - val intro_classes_tac: thm list -> tactic - val default_intro_tac: Proof.context -> thm list -> tactic -end; - -structure Class: CLASS = -struct - -(** class data **) - -datatype class_data = Class_Data of { - - (* static part *) - consts: (string * string) list - (*locale parameter ~> constant name*), - base_sort: sort, - base_morph: morphism - (*static part of canonical morphism*), - export_morph: morphism, - assm_intro: thm option, - of_class: thm, - axiom: thm option, - - (* dynamic part *) - defs: thm list, - operations: (string * (class * (typ * term))) list - - (* n.b. - params = logical parameters of class - operations = operations participating in user-space type system - *) -}; - -fun make_class_data ((consts, base_sort, base_morph, export_morph, assm_intro, of_class, axiom), - (defs, operations)) = - Class_Data {consts = consts, base_sort = base_sort, - base_morph = base_morph, export_morph = export_morph, assm_intro = assm_intro, - of_class = of_class, axiom = axiom, defs = defs, operations = operations}; -fun map_class_data f (Class_Data {consts, base_sort, base_morph, export_morph, assm_intro, - of_class, axiom, defs, operations}) = - make_class_data (f ((consts, base_sort, base_morph, export_morph, assm_intro, of_class, axiom), - (defs, operations))); -fun merge_class_data _ (Class_Data {consts = consts, - base_sort = base_sort, base_morph = base_morph, export_morph = export_morph, assm_intro = assm_intro, - of_class = of_class, axiom = axiom, defs = defs1, operations = operations1}, - Class_Data {consts = _, base_sort = _, base_morph = _, export_morph = _, assm_intro = _, - of_class = _, axiom = _, defs = defs2, operations = operations2}) = - make_class_data ((consts, base_sort, base_morph, export_morph, assm_intro, of_class, axiom), - (Thm.merge_thms (defs1, defs2), - AList.merge (op =) (K true) (operations1, operations2))); - -structure Class_Data = Theory_Data -( - type T = class_data Graph.T - val empty = Graph.empty; - val extend = I; - val merge = Graph.join merge_class_data; -); - - -(* queries *) - -fun lookup_class_data thy class = - (case try (Graph.get_node (Class_Data.get thy)) class of - SOME (Class_Data data) => SOME data - | NONE => NONE); - -fun the_class_data thy class = - (case lookup_class_data thy class of - NONE => error ("Undeclared class " ^ quote class) - | SOME data => data); - -val is_class = is_some oo lookup_class_data; - -val ancestry = Graph.all_succs o Class_Data.get; -val heritage = Graph.all_preds o Class_Data.get; - -fun these_params thy = - let - fun params class = - let - val const_typs = (#params o Axclass.get_info thy) class; - val const_names = (#consts o the_class_data thy) class; - in - (map o apsnd) - (fn c => (class, (c, (the o AList.lookup (op =) const_typs) c))) const_names - end; - in maps params o ancestry thy end; - -val base_sort = #base_sort oo the_class_data; - -fun rules thy class = - let val {axiom, of_class, ...} = the_class_data thy class - in (axiom, of_class) end; - -fun all_assm_intros thy = - Graph.fold (fn (_, (Class_Data {assm_intro, ...}, _)) => fold (insert Thm.eq_thm) - (the_list assm_intro)) (Class_Data.get thy) []; - -fun these_defs thy = maps (#defs o the_class_data thy) o ancestry thy; -fun these_operations thy = maps (#operations o the_class_data thy) o ancestry thy; - -val base_morphism = #base_morph oo the_class_data; - -fun morphism thy class = - (case Element.eq_morphism thy (these_defs thy [class]) of - SOME eq_morph => base_morphism thy class $> eq_morph - | NONE => base_morphism thy class); - -val export_morphism = #export_morph oo the_class_data; - -fun print_classes ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val algebra = Sign.classes_of thy; - - val class_space = Proof_Context.class_space ctxt; - val type_space = Proof_Context.type_space ctxt; - val const_space = Proof_Context.const_space ctxt; - - val arities = - Symtab.empty - |> Symtab.fold (fn (tyco, arities) => fold (fn (class, _) => - Symtab.map_default (class, []) (insert (op =) tyco)) arities) - (Sorts.arities_of algebra); - - fun prt_supersort class = - Syntax.pretty_sort ctxt (Sign.minimize_sort thy (Sign.super_classes thy class)); - - fun prt_arity class tyco = - let - val Ss = Sorts.mg_domain algebra tyco [class]; - in Syntax.pretty_arity ctxt (tyco, Ss, [class]) end; - - fun prt_param (c, ty) = - Pretty.block - [Name_Space.pretty ctxt const_space c, Pretty.str " ::", - Pretty.brk 1, Syntax.pretty_typ ctxt (Type.strip_sorts_dummy ty)]; - - fun prt_entry class = - Pretty.block - ([Pretty.keyword1 "class", Pretty.brk 1, - Name_Space.pretty ctxt class_space class, Pretty.str ":", Pretty.fbrk, - Pretty.block [Pretty.str "supersort: ", prt_supersort class]] @ - (case try (Axclass.get_info thy) class of - NONE => [] - | SOME {params, ...} => - [Pretty.fbrk, Pretty.big_list "parameters:" (map prt_param params)]) @ - (case Symtab.lookup arities class of - NONE => [] - | SOME ars => - [Pretty.fbrk, Pretty.big_list "instances:" - (map (prt_arity class) (sort (Name_Space.extern_ord ctxt type_space) ars))])); - in - Sorts.all_classes algebra - |> sort (Name_Space.extern_ord ctxt class_space) - |> map prt_entry - |> Pretty.writeln_chunks2 - end; - - -(* updaters *) - -fun register class sups params base_sort base_morph export_morph - some_axiom some_assm_intro of_class thy = - let - val operations = map (fn (v_ty as (_, ty), (c, _)) => - (c, (class, (ty, Free v_ty)))) params; - val add_class = Graph.new_node (class, - make_class_data (((map o pairself) fst params, base_sort, - base_morph, export_morph, some_assm_intro, of_class, some_axiom), ([], operations))) - #> fold (curry Graph.add_edge class) sups; - in Class_Data.map add_class thy end; - -fun activate_defs class thms thy = - (case Element.eq_morphism thy thms of - SOME eq_morph => fold (fn cls => fn thy => - Context.theory_map (Locale.amend_registration (cls, base_morphism thy cls) - (eq_morph, true) (export_morphism thy cls)) thy) (heritage thy [class]) thy - | NONE => thy); - -fun register_operation class (c, t) thy = - let - val base_sort = base_sort thy class; - val prep_typ = map_type_tfree - (fn (v, sort) => if Name.aT = v - then TFree (v, base_sort) else TVar ((v, 0), sort)); - val t' = map_types prep_typ t; - val ty' = Term.fastype_of t'; - in - thy - |> (Class_Data.map o Graph.map_node class o map_class_data o apsnd o apsnd) - (cons (c, (class, (ty', t')))) - end; - -fun register_def class def_thm thy = - let - val sym_thm = Thm.symmetric def_thm - in - thy - |> (Class_Data.map o Graph.map_node class o map_class_data o apsnd o apfst) - (cons sym_thm) - |> activate_defs class [sym_thm] - end; - - -(** classes and class target **) - -(* class context syntax *) - -fun these_unchecks thy = map (fn (c, (_, (ty, t))) => (t, Const (c, ty))) - o these_operations thy; - -fun redeclare_const thy c = - let val b = Long_Name.base_name c - in Sign.intern_const thy b = c ? Variable.declare_const (b, c) end; - -fun synchronize_class_syntax sort base_sort ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val algebra = Sign.classes_of thy; - val operations = these_operations thy sort; - fun subst_class_typ sort = map_type_tfree (K (TVar ((Name.aT, 0), sort))); - val primary_constraints = - (map o apsnd) (subst_class_typ base_sort o fst o snd) operations; - val secondary_constraints = - (map o apsnd) (fn (class, (ty, _)) => subst_class_typ [class] ty) operations; - fun improve (c, ty) = - (case AList.lookup (op =) primary_constraints c of - SOME ty' => - (case try (Type.raw_match (ty', ty)) Vartab.empty of - SOME tyenv => - (case Vartab.lookup tyenv (Name.aT, 0) of - SOME (_, ty' as TVar (vi, sort)) => - if Type_Infer.is_param vi andalso Sorts.sort_le algebra (base_sort, sort) - then SOME (ty', TFree (Name.aT, base_sort)) - else NONE - | _ => NONE) - | NONE => NONE) - | NONE => NONE); - fun subst (c, _) = Option.map snd (AList.lookup (op =) operations c); - val unchecks = these_unchecks thy sort; - in - ctxt - |> fold (redeclare_const thy o fst) primary_constraints - |> Overloading.map_improvable_syntax (K (((primary_constraints, secondary_constraints), - (((improve, subst), true), unchecks)), false)) - |> Overloading.set_primary_constraints - end; - -fun synchronize_class_syntax_target class lthy = - lthy - |> Local_Theory.map_contexts - (K (synchronize_class_syntax [class] (base_sort (Proof_Context.theory_of lthy) class))); - -fun redeclare_operations thy sort = - fold (redeclare_const thy o fst) (these_operations thy sort); - -fun begin sort base_sort ctxt = - ctxt - |> Variable.declare_term - (Logic.mk_type (TFree (Name.aT, base_sort))) - |> synchronize_class_syntax sort base_sort - |> Overloading.activate_improvable_syntax; - -fun init class thy = - thy - |> Locale.init class - |> begin [class] (base_sort thy class); - - -(* class target *) - -val class_prefix = Logic.const_of_class o Long_Name.base_name; - -local - -fun guess_morphism_identity (b, rhs) phi1 phi2 = - let - (*FIXME proper concept to identify morphism instead of educated guess*) - val name_of_binding = Name_Space.full_name Name_Space.default_naming; - val n1 = (name_of_binding o Morphism.binding phi1) b; - val n2 = (name_of_binding o Morphism.binding phi2) b; - val rhs1 = Morphism.term phi1 rhs; - val rhs2 = Morphism.term phi2 rhs; - in n1 = n2 andalso Term.aconv_untyped (rhs1, rhs2) end; - -fun target_const class phi0 prmode ((b, _), rhs) = - let - val guess_identity = guess_morphism_identity (b, rhs) Morphism.identity; - val guess_canonical = guess_morphism_identity (b, rhs) phi0; - in - Generic_Target.locale_target_const class - (not o (guess_identity orf guess_canonical)) prmode ((b, NoSyn), rhs) - end; - -fun dangling_params_for lthy class (type_params, term_params) = - let - val class_param_names = - map fst (these_params (Proof_Context.theory_of lthy) [class]); - val dangling_term_params = - subtract (fn (v, Free (w, _)) => v = w | _ => false) class_param_names term_params; - in (type_params, dangling_term_params) end; - -fun global_def (b, eq) thy = - thy - |> Thm.add_def_global false false (b, eq) - |>> (Thm.varifyT_global o snd) - |-> (fn def_thm => Global_Theory.store_thm (b, def_thm) - #> snd - #> pair def_thm); - -fun canonical_const class phi dangling_params ((b, mx), rhs) thy = - let - val b_def = Binding.suffix_name "_dict" b; - val c = Sign.full_name thy b; - val ty = map Term.fastype_of dangling_params ---> Term.fastype_of rhs; - val def_eq = Logic.mk_equals (list_comb (Const (c, ty), dangling_params), rhs) - |> map_types Type.strip_sorts; - in - thy - |> Sign.declare_const_global ((b, Type.strip_sorts ty), mx) - |> snd - |> global_def (b_def, def_eq) - |-> (fn def_thm => register_def class def_thm) - |> null dangling_params ? register_operation class (c, rhs) - |> Sign.add_const_constraint (c, SOME ty) - end; - -fun canonical_abbrev class phi prmode dangling_term_params ((b, mx), rhs) thy = - let - val unchecks = these_unchecks thy [class]; - val rhs' = Pattern.rewrite_term thy unchecks [] rhs; - val c' = Sign.full_name thy b; - val ty' = map Term.fastype_of dangling_term_params ---> Term.fastype_of rhs'; - in - thy - |> Sign.add_abbrev (#1 prmode) (b, Logic.varify_types_global (fold lambda dangling_term_params rhs')) - |> snd - |> Sign.notation true prmode [(Const (c', ty'), mx)] - |> (null dangling_term_params andalso not (#1 prmode = Print_Mode.input)) - ? register_operation class (c', rhs') - |> Sign.add_const_constraint (c', SOME ty') - end; - -in - -fun const class ((b, mx), lhs) params lthy = - let - val phi = morphism (Proof_Context.theory_of lthy) class; - val dangling_params = map (Morphism.term phi) (uncurry append (dangling_params_for lthy class params)); - in - lthy - |> target_const class phi Syntax.mode_default ((b, mx), lhs) - |> Local_Theory.raw_theory (canonical_const class phi dangling_params - ((Morphism.binding phi b, if null dangling_params then mx else NoSyn), Morphism.term phi lhs)) - |> Generic_Target.standard_const (fn (this, other) => other <> 0 andalso this <> other) - Syntax.mode_default ((b, if null dangling_params then NoSyn else mx), lhs) - |> synchronize_class_syntax_target class - end; - -fun abbrev class prmode ((b, mx), lhs) rhs' params lthy = - let - val phi = morphism (Proof_Context.theory_of lthy) class; - val dangling_term_params = map (Morphism.term phi) (snd (dangling_params_for lthy class params)); - in - lthy - |> target_const class phi prmode ((b, mx), lhs) - |> Local_Theory.raw_theory (canonical_abbrev class phi prmode dangling_term_params - ((Morphism.binding phi b, if null dangling_term_params then mx else NoSyn), rhs')) - |> Generic_Target.standard_const (fn (this, other) => other <> 0 andalso this <> other) - prmode ((b, if null dangling_term_params then NoSyn else mx), lhs) - |> synchronize_class_syntax_target class - end; - -end; - - -(* subclasses *) - -fun register_subclass (sub, sup) some_dep_morph some_witn export lthy = - let - val thy = Proof_Context.theory_of lthy; - val intros = (snd o rules thy) sup :: map_filter I - [Option.map (Drule.export_without_context_open o Element.conclude_witness lthy) some_witn, - (fst o rules thy) sub]; - val classrel = - Goal.prove_sorry_global thy [] [] (Logic.mk_classrel (sub, sup)) - (K (EVERY (map (TRYALL o rtac) intros))); - val diff_sort = Sign.complete_sort thy [sup] - |> subtract (op =) (Sign.complete_sort thy [sub]) - |> filter (is_class thy); - fun add_dependency some_wit = case some_dep_morph of - SOME dep_morph => Generic_Target.locale_dependency sub - (sup, dep_morph $> Element.satisfy_morphism (the_list some_witn)) NONE export - | NONE => I; - in - lthy - |> Local_Theory.raw_theory - (Axclass.add_classrel classrel - #> Class_Data.map (Graph.add_edge (sub, sup)) - #> activate_defs sub (these_defs thy diff_sort)) - |> add_dependency some_witn - |> synchronize_class_syntax_target sub - end; - -local - -fun gen_classrel mk_prop classrel thy = - let - fun after_qed results = - Proof_Context.background_theory ((fold o fold) Axclass.add_classrel results); - in - thy - |> Proof_Context.init_global - |> Proof.theorem NONE after_qed [[(mk_prop thy classrel, [])]] - end; - -in - -val classrel = - gen_classrel (Logic.mk_classrel oo Axclass.cert_classrel); -val classrel_cmd = - gen_classrel (Logic.mk_classrel oo Axclass.read_classrel); - -end; (*local*) - - -(** instantiation target **) - -(* bookkeeping *) - -datatype instantiation = Instantiation of { - arities: string list * (string * sort) list * sort, - params: ((string * string) * (string * typ)) list - (*(instantiation parameter, type constructor), (local instantiation parameter, typ)*) -} - -structure Instantiation = Proof_Data -( - type T = instantiation; - fun init _ = Instantiation {arities = ([], [], []), params = []}; -); - -fun mk_instantiation (arities, params) = - Instantiation {arities = arities, params = params}; - -val get_instantiation = - (fn Instantiation data => data) o Instantiation.get o Local_Theory.target_of; - -fun map_instantiation f = - (Local_Theory.target o Instantiation.map) - (fn Instantiation {arities, params} => mk_instantiation (f (arities, params))); - -fun the_instantiation lthy = - (case get_instantiation lthy of - {arities = ([], [], []), ...} => error "No instantiation target" - | data => data); - -val instantiation_params = #params o get_instantiation; - -fun instantiation_param lthy b = instantiation_params lthy - |> find_first (fn (_, (v, _)) => Binding.name_of b = v) - |> Option.map (fst o fst); - -fun read_multi_arity thy (raw_tycos, raw_sorts, raw_sort) = - let - val ctxt = Proof_Context.init_global thy; - val all_arities = map (fn raw_tyco => Proof_Context.read_arity ctxt - (raw_tyco, raw_sorts, raw_sort)) raw_tycos; - val tycos = map #1 all_arities; - val (_, sorts, sort) = hd all_arities; - val vs = Name.invent_names Name.context Name.aT sorts; - in (tycos, vs, sort) end; - - -(* syntax *) - -fun synchronize_inst_syntax ctxt = - let - val Instantiation {params, ...} = Instantiation.get ctxt; - - val lookup_inst_param = Axclass.lookup_inst_param - (Sign.consts_of (Proof_Context.theory_of ctxt)) params; - fun subst (c, ty) = - (case lookup_inst_param (c, ty) of - SOME (v_ty as (_, ty)) => SOME (ty, Free v_ty) - | NONE => NONE); - val unchecks = - map (fn ((c, _), v_ty as (_, ty)) => (Free v_ty, Const (c, ty))) params; - in - ctxt - |> Overloading.map_improvable_syntax - (fn (((primary_constraints, _), (((improve, _), _), _)), _) => - (((primary_constraints, []), (((improve, subst), false), unchecks)), false)) - end; - -fun resort_terms ctxt algebra consts constraints ts = - let - fun matchings (Const (c_ty as (c, _))) = - (case constraints c of - NONE => I - | SOME sorts => - fold2 (curry (Sorts.meet_sort algebra)) (Consts.typargs consts c_ty) sorts) - | matchings _ = I; - val tvartab = (fold o fold_aterms) matchings ts Vartab.empty - handle Sorts.CLASS_ERROR e => error (Sorts.class_error (Context.pretty ctxt) e); - val inst = map_type_tvar - (fn (vi, sort) => TVar (vi, the_default sort (Vartab.lookup tvartab vi))); - in if Vartab.is_empty tvartab then ts else (map o map_types) inst ts end; - - -(* target *) - -fun define_overloaded (c, U) v (b_def, rhs) = - Local_Theory.background_theory_result (Axclass.declare_overloaded (c, U) - ##>> Axclass.define_overloaded b_def (c, rhs)) - ##> (map_instantiation o apsnd) (filter_out (fn (_, (v', _)) => v' = v)) - ##> Local_Theory.map_contexts (K synchronize_inst_syntax); - -fun foundation (((b, U), mx), (b_def, rhs)) params lthy = - (case instantiation_param lthy b of - SOME c => - if mx <> NoSyn then error ("Illegal mixfix syntax for overloaded constant " ^ quote c) - else lthy |> define_overloaded (c, U) (Binding.name_of b) (b_def, rhs) - | NONE => lthy |> Generic_Target.theory_foundation (((b, U), mx), (b_def, rhs)) params); - -fun pretty lthy = - let - val {arities = (tycos, vs, sort), params} = the_instantiation lthy; - fun pr_arity tyco = Syntax.pretty_arity lthy (tyco, map snd vs, sort); - fun pr_param ((c, _), (v, ty)) = - Pretty.block (Pretty.breaks - [Pretty.str v, Pretty.str "==", Proof_Context.pretty_const lthy c, - Pretty.str "::", Syntax.pretty_typ lthy ty]); - in Pretty.keyword1 "instantiation" :: map pr_arity tycos @ map pr_param params end; - -fun conclude lthy = - let - val (tycos, vs, sort) = #arities (the_instantiation lthy); - val thy = Proof_Context.theory_of lthy; - val _ = tycos |> List.app (fn tyco => - if Sign.of_sort thy (Type (tyco, map TFree vs), sort) then () - else error ("Missing instance proof for type " ^ quote (Proof_Context.markup_type lthy tyco))); - in lthy end; - -fun instantiation (tycos, vs, sort) thy = - let - val naming = Sign.naming_of thy; - - val _ = if null tycos then error "At least one arity must be given" else (); - val class_params = these_params thy (filter (can (Axclass.get_info thy)) sort); - fun get_param tyco (param, (_, (c, ty))) = - if can (Axclass.param_of_inst thy) (c, tyco) - then NONE else SOME ((c, tyco), - (param ^ "_" ^ Long_Name.base_name tyco, map_atyps (K (Type (tyco, map TFree vs))) ty)); - val params = map_product get_param tycos class_params |> map_filter I; - val _ = if null params andalso forall (fn tyco => can (Sign.arity_sorts thy tyco) sort) tycos - then error "No parameters and no pending instance proof obligations in instantiation." - else (); - val primary_constraints = map (apsnd - (map_atyps (K (TVar ((Name.aT, 0), [])))) o snd o snd) class_params; - val algebra = Sign.classes_of thy - |> fold (fn tyco => Sorts.add_arities (Context.pretty_global thy) - (tyco, map (fn class => (class, map snd vs)) sort)) tycos; - val consts = Sign.consts_of thy; - val improve_constraints = AList.lookup (op =) - (map (fn (_, (class, (c, _))) => (c, [[class]])) class_params); - fun resort_check ctxt ts = resort_terms ctxt algebra consts improve_constraints ts; - val lookup_inst_param = Axclass.lookup_inst_param consts params; - fun improve (c, ty) = - (case lookup_inst_param (c, ty) of - SOME (_, ty') => if Sign.typ_instance thy (ty', ty) then SOME (ty, ty') else NONE - | NONE => NONE); - in - thy - |> Sign.change_begin - |> Proof_Context.init_global - |> Instantiation.put (mk_instantiation ((tycos, vs, sort), params)) - |> fold (Variable.declare_typ o TFree) vs - |> fold (Variable.declare_names o Free o snd) params - |> (Overloading.map_improvable_syntax o apfst) - (K ((primary_constraints, []), (((improve, K NONE), false), []))) - |> Overloading.activate_improvable_syntax - |> Context.proof_map (Syntax_Phases.term_check 0 "resorting" resort_check) - |> synchronize_inst_syntax - |> Local_Theory.init naming - {define = Generic_Target.define foundation, - notes = Generic_Target.notes Generic_Target.theory_notes, - abbrev = Generic_Target.abbrev Generic_Target.theory_abbrev, - declaration = K Generic_Target.theory_declaration, - subscription = Generic_Target.theory_registration, - pretty = pretty, - exit = conclude #> Local_Theory.target_of #> Sign.change_end_local} - end; - -fun instantiation_cmd arities thy = - instantiation (read_multi_arity thy arities) thy; - -fun gen_instantiation_instance do_proof after_qed lthy = - let - val (tycos, vs, sort) = (#arities o the_instantiation) lthy; - val arities_proof = maps (fn tyco => Logic.mk_arities (tyco, map snd vs, sort)) tycos; - fun after_qed' results = - Local_Theory.background_theory (fold (Axclass.add_arity o Thm.varifyT_global) results) - #> after_qed; - in - lthy - |> do_proof after_qed' arities_proof - end; - -val instantiation_instance = gen_instantiation_instance (fn after_qed => fn ts => - Proof.theorem NONE (after_qed o map the_single) (map (fn t => [(t, [])]) ts)); - -fun prove_instantiation_instance tac = gen_instantiation_instance (fn after_qed => - fn ts => fn lthy => after_qed (map (fn t => Goal.prove lthy [] [] t - (fn {context, ...} => tac context)) ts) lthy) I; - -fun prove_instantiation_exit tac = prove_instantiation_instance tac - #> Local_Theory.exit_global; - -fun prove_instantiation_exit_result f tac x lthy = - let - val morph = Proof_Context.export_morphism lthy - (Proof_Context.init_global (Proof_Context.theory_of lthy)); - val y = f morph x; - in - lthy - |> prove_instantiation_exit (fn ctxt => tac ctxt y) - |> pair y - end; - - -(* simplified instantiation interface with no class parameter *) - -fun instance_arity_cmd raw_arities thy = - let - val (tycos, vs, sort) = read_multi_arity thy raw_arities; - val sorts = map snd vs; - val arities = maps (fn tyco => Logic.mk_arities (tyco, sorts, sort)) tycos; - fun after_qed results = - Proof_Context.background_theory ((fold o fold) Axclass.add_arity results); - in - thy - |> Proof_Context.init_global - |> Proof.theorem NONE after_qed (map (fn t => [(t, [])]) arities) - end; - - -(** tactics and methods **) - -fun intro_classes_tac facts st = - let - val thy = Thm.theory_of_thm st; - val classes = Sign.all_classes thy; - val class_trivs = map (Thm.class_triv thy) classes; - val class_intros = map_filter (try (#intro o Axclass.get_info thy)) classes; - val assm_intros = all_assm_intros thy; - in - Method.intros_tac (class_trivs @ class_intros @ assm_intros) facts st - end; - -fun default_intro_tac ctxt [] = - COND Thm.no_prems no_tac - (intro_classes_tac [] ORELSE Locale.intro_locales_tac true ctxt []) - | default_intro_tac _ _ = no_tac; - -fun default_tac rules ctxt facts = - HEADGOAL (Method.some_rule_tac ctxt rules facts) ORELSE - default_intro_tac ctxt facts; - -val _ = Theory.setup - (Method.setup @{binding intro_classes} (Scan.succeed (K (METHOD intro_classes_tac))) - "back-chain introduction rules of classes" #> - Method.setup @{binding default} (Attrib.thms >> (METHOD oo default_tac)) - "apply some intro/elim rule"); - -end; - diff --git a/core/Pure/Isar/class_declaration.ML b/core/Pure/Isar/class_declaration.ML deleted file mode 100644 index 2df272eb..00000000 --- a/core/Pure/Isar/class_declaration.ML +++ /dev/null @@ -1,381 +0,0 @@ -(* Title: Pure/Isar/class_declaration.ML - Author: Florian Haftmann, TU Muenchen - -Declaring classes and subclass relations. -*) - -signature CLASS_DECLARATION = -sig - val class: binding -> class list -> - Element.context_i list -> theory -> string * local_theory - val class_cmd: binding -> xstring list -> - Element.context list -> theory -> string * local_theory - val prove_subclass: tactic -> class -> - local_theory -> local_theory - val subclass: class -> local_theory -> Proof.state - val subclass_cmd: xstring -> local_theory -> Proof.state -end; - -structure Class_Declaration: CLASS_DECLARATION = -struct - -(** class definitions **) - -local - -(* calculating class-related rules including canonical interpretation *) - -fun calculate thy class sups base_sort param_map assm_axiom = - let - val empty_ctxt = Proof_Context.init_global thy; - - (* instantiation of canonical interpretation *) - val aT = TFree (Name.aT, base_sort); - val param_map_const = (map o apsnd) Const param_map; - val param_map_inst = (map o apsnd) - (Const o apsnd (map_atyps (K aT))) param_map; - val const_morph = Element.inst_morphism thy - (Symtab.empty, Symtab.make param_map_inst); - val typ_morph = Element.inst_morphism thy - (Symtab.empty |> Symtab.update (Name.aT, TFree (Name.aT, [class])), Symtab.empty); - val (([raw_props], [(_, raw_inst_morph)], export_morph), _) = empty_ctxt - |> Expression.cert_goal_expression ([(class, (("", false), - Expression.Named param_map_const))], []); - val (props, inst_morph) = - if null param_map - then (raw_props |> map (Morphism.term typ_morph), - raw_inst_morph $> typ_morph) - else (raw_props, raw_inst_morph); (*FIXME proper handling in - locale.ML / expression.ML would be desirable*) - - (* witness for canonical interpretation *) - val some_prop = try the_single props; - val some_witn = Option.map (fn prop => - let - val sup_axioms = map_filter (fst o Class.rules thy) sups; - val loc_intro_tac = - (case Locale.intros_of thy class of - (_, NONE) => all_tac - | (_, SOME intro) => ALLGOALS (rtac intro)); - val tac = loc_intro_tac - THEN ALLGOALS (Proof_Context.fact_tac empty_ctxt (sup_axioms @ the_list assm_axiom)); - in Element.prove_witness empty_ctxt prop tac end) some_prop; - val some_axiom = Option.map (Element.conclude_witness empty_ctxt) some_witn; - - (* canonical interpretation *) - val base_morph = inst_morph - $> Morphism.binding_morphism "class_binding" (Binding.prefix false (Class.class_prefix class)) - $> Element.satisfy_morphism (the_list some_witn); - val eq_morph = Element.eq_morphism thy (Class.these_defs thy sups); - - (* assm_intro *) - fun prove_assm_intro thm = - let - val ((_, [thm']), _) = Variable.import true [thm] empty_ctxt; - val const_eq_morph = - (case eq_morph of - SOME eq_morph => const_morph $> eq_morph - | NONE => const_morph); - val thm'' = Morphism.thm const_eq_morph thm'; - in - Goal.prove_sorry_global thy [] [] (Thm.prop_of thm'') - (fn {context = ctxt, ...} => ALLGOALS (Proof_Context.fact_tac ctxt [thm''])) - end; - val some_assm_intro = Option.map prove_assm_intro (fst (Locale.intros_of thy class)); - - (* of_class *) - val of_class_prop_concl = Logic.mk_of_class (aT, class); - val of_class_prop = - (case some_prop of - NONE => of_class_prop_concl - | SOME prop => Logic.mk_implies (Morphism.term const_morph - ((map_types o map_atyps) (K aT) prop), of_class_prop_concl)); - val sup_of_classes = map (snd o Class.rules thy) sups; - val loc_axiom_intros = map Drule.export_without_context_open (Locale.axioms_of thy class); - val axclass_intro = #intro (Axclass.get_info thy class); - val base_sort_trivs = Thm.of_sort (Thm.ctyp_of thy aT, base_sort); - val tac = - REPEAT (SOMEGOAL - (match_tac (axclass_intro :: sup_of_classes @ loc_axiom_intros @ base_sort_trivs) - ORELSE' assume_tac)); - val of_class = Goal.prove_sorry_global thy [] [] of_class_prop (K tac); - - in (base_morph, eq_morph, export_morph, some_axiom, some_assm_intro, of_class) end; - - -(* reading and processing class specifications *) - -fun prep_class_elems prep_decl thy sups raw_elems = - let - - (* user space type system: only permits 'a type variable, improves towards 'a *) - val algebra = Sign.classes_of thy; - val inter_sort = curry (Sorts.inter_sort algebra); - val proto_base_sort = - if null sups then Sign.defaultS thy - else fold inter_sort (map (Class.base_sort thy) sups) []; - val base_constraints = (map o apsnd) - (map_type_tfree (K (TVar ((Name.aT, 0), proto_base_sort))) o fst o snd) - (Class.these_operations thy sups); - fun singleton_fixate tms = - let - val tfrees = fold Term.add_tfrees tms []; - val inferred_sort = - (fold o fold_types o fold_atyps) (fn TVar (_, S) => inter_sort S | _ => I) tms []; - val fixate_sort = - (case tfrees of - [] => inferred_sort - | [(a, S)] => - if a <> Name.aT then - error ("No type variable other than " ^ Name.aT ^ " allowed in class specification") - else if Sorts.sort_le algebra (S, inferred_sort) then S - else - error ("Type inference imposes additional sort constraint " ^ - Syntax.string_of_sort_global thy inferred_sort ^ - " of type parameter " ^ Name.aT ^ " of sort " ^ - Syntax.string_of_sort_global thy S) - | _ => error "Multiple type variables in class specification"); - val fixateT = TFree (Name.aT, fixate_sort); - in - (map o map_types o map_atyps) - (fn T as TVar (xi, _) => if Type_Infer.is_param xi then fixateT else T | T => T) tms - end; - fun after_infer_fixate tms = - let - val fixate_sort = - (fold o fold_types o fold_atyps) - (fn TVar (xi, S) => if Type_Infer.is_param xi then inter_sort S else I | _ => I) tms []; - in - (map o map_types o map_atyps) - (fn T as TVar (xi, _) => - if Type_Infer.is_param xi then Type_Infer.param 0 (Name.aT, fixate_sort) else T - | T => T) tms - end; - - (* preprocessing elements, retrieving base sort from type-checked elements *) - val raw_supexpr = - (map (fn sup => (sup, (("", false), Expression.Positional []))) sups, []); - val init_class_body = - fold (Proof_Context.add_const_constraint o apsnd SOME) base_constraints - #> Class.redeclare_operations thy sups - #> Context.proof_map (Syntax_Phases.term_check 0 "singleton_fixate" (K singleton_fixate)); - val ((raw_supparams, _, raw_inferred_elems, _), _) = - Proof_Context.init_global thy - |> Context.proof_map (Syntax_Phases.term_check 0 "after_infer_fixate" (K after_infer_fixate)) - |> prep_decl raw_supexpr init_class_body raw_elems; - fun filter_element (Element.Fixes []) = NONE - | filter_element (e as Element.Fixes _) = SOME e - | filter_element (Element.Constrains []) = NONE - | filter_element (e as Element.Constrains _) = SOME e - | filter_element (Element.Assumes []) = NONE - | filter_element (e as Element.Assumes _) = SOME e - | filter_element (Element.Defines _) = - error ("\"defines\" element not allowed in class specification.") - | filter_element (Element.Notes _) = - error ("\"notes\" element not allowed in class specification."); - val inferred_elems = map_filter filter_element raw_inferred_elems; - fun fold_element_types f (Element.Fixes fxs) = fold (fn (_, SOME T, _) => f T) fxs - | fold_element_types f (Element.Constrains cnstrs) = fold (f o snd) cnstrs - | fold_element_types f (Element.Assumes assms) = fold (fold (fn (t, ts) => - fold_types f t #> (fold o fold_types) f ts) o snd) assms; - val base_sort = - if null inferred_elems then proto_base_sort - else - (case (fold o fold_element_types) Term.add_tfreesT inferred_elems [] of - [] => error "No type variable in class specification" - | [(_, sort)] => sort - | _ => error "Multiple type variables in class specification"); - val supparams = map (fn ((c, T), _) => - (c, map_atyps (K (TFree (Name.aT, base_sort))) T)) raw_supparams; - val supparam_names = map fst supparams; - fun mk_param ((c, _), _) = Free (c, (the o AList.lookup (op =) supparams) c); - val supexpr = (map (fn sup => (sup, (("", false), - Expression.Positional (map (SOME o mk_param) (Locale.params_of thy sup))))) sups, - map (fn (c, T) => (Binding.name c, SOME T, NoSyn)) supparams); - - in (base_sort, supparam_names, supexpr, inferred_elems) end; - -val cert_class_elems = prep_class_elems Expression.cert_declaration; -val read_class_elems = prep_class_elems Expression.cert_read_declaration; - -fun prep_class_spec prep_class prep_class_elems thy raw_supclasses raw_elems = - let - val thy_ctxt = Proof_Context.init_global thy; - - (* prepare import *) - val inter_sort = curry (Sorts.inter_sort (Sign.classes_of thy)); - val sups = Sign.minimize_sort thy (map (prep_class thy_ctxt) raw_supclasses); - val _ = - (case filter_out (Class.is_class thy) sups of - [] => () - | no_classes => error ("No (proper) classes: " ^ commas_quote no_classes)); - val raw_supparams = (map o apsnd) (snd o snd) (Class.these_params thy sups); - val raw_supparam_names = map fst raw_supparams; - val _ = - if has_duplicates (op =) raw_supparam_names then - error ("Duplicate parameter(s) in superclasses: " ^ - (commas_quote (duplicates (op =) raw_supparam_names))) - else (); - - (* infer types and base sort *) - val (base_sort, supparam_names, supexpr, inferred_elems) = prep_class_elems thy sups raw_elems; - val sup_sort = inter_sort base_sort sups; - - (* process elements as class specification *) - val class_ctxt = Class.begin sups base_sort thy_ctxt; - val ((_, _, syntax_elems, _), _) = class_ctxt - |> Expression.cert_declaration supexpr I inferred_elems; - fun check_vars e vs = - if null vs then - error ("No type variable in part of specification element " ^ - Pretty.string_of (Pretty.chunks (Element.pretty_ctxt class_ctxt e))) - else (); - fun check_element (e as Element.Fixes fxs) = - List.app (fn (_, SOME T, _) => check_vars e (Term.add_tfreesT T [])) fxs - | check_element (e as Element.Assumes assms) = - List.app (fn (_, ts_pss) => - List.app (fn (t, _) => check_vars e (Term.add_tfrees t [])) ts_pss) assms - | check_element _ = (); - val _ = List.app check_element syntax_elems; - fun fork_syn (Element.Fixes xs) = - fold_map (fn (c, ty, syn) => cons (c, syn) #> pair (c, ty, NoSyn)) xs - #>> Element.Fixes - | fork_syn x = pair x; - val (elems, global_syntax) = fold_map fork_syn syntax_elems []; - - in (((sups, supparam_names), (sup_sort, base_sort, supexpr)), (elems, global_syntax)) end; - -val cert_class_spec = prep_class_spec (K I) cert_class_elems; -val read_class_spec = prep_class_spec Proof_Context.read_class read_class_elems; - - -(* class establishment *) - -fun add_consts class base_sort sups supparam_names global_syntax thy = - let - (*FIXME simplify*) - val supconsts = supparam_names - |> AList.make (snd o the o AList.lookup (op =) (Class.these_params thy sups)) - |> (map o apsnd o apsnd o map_atyps o K o TFree) (Name.aT, [class]); - val all_params = Locale.params_of thy class; - val raw_params = (snd o chop (length supparam_names)) all_params; - fun add_const ((raw_c, raw_ty), _) thy = - let - val b = Binding.name raw_c; - val c = Sign.full_name thy b; - val ty = map_atyps (K (TFree (Name.aT, base_sort))) raw_ty; - val ty0 = Type.strip_sorts ty; - val ty' = map_atyps (K (TFree (Name.aT, [class]))) ty0; - val syn = (the_default NoSyn o AList.lookup Binding.eq_name global_syntax) b; - in - thy - |> Sign.declare_const_global ((b, ty0), syn) - |> snd - |> pair ((Variable.check_name b, ty), (c, ty')) - end; - in - thy - |> Sign.add_path (Class.class_prefix class) - |> fold_map add_const raw_params - ||> Sign.restore_naming thy - |-> (fn params => pair (supconsts @ (map o apfst) fst params, params)) - end; - -fun adjungate_axclass bname class base_sort sups supsort supparam_names global_syntax thy = - let - (*FIXME simplify*) - fun globalize param_map = map_aterms - (fn Free (v, ty) => Const ((fst o the o AList.lookup (op =) param_map) v, ty) - | t => t); - val raw_pred = Locale.intros_of thy class - |> fst - |> Option.map (Logic.unvarify_global o Logic.strip_imp_concl o Thm.prop_of); - fun get_axiom thy = - (case #axioms (Axclass.get_info thy class) of - [] => NONE - | [thm] => SOME thm); - in - thy - |> add_consts class base_sort sups supparam_names global_syntax - |-> (fn (param_map, params) => Axclass.define_class (bname, supsort) - (map (fst o snd) params) - [(Thm.empty_binding, Option.map (globalize param_map) raw_pred |> the_list)] - #> snd - #> `get_axiom - #-> (fn assm_axiom => fold (Sign.add_const_constraint o apsnd SOME o snd) params - #> pair (param_map, params, assm_axiom))) - end; - -fun gen_class prep_class_spec b raw_supclasses raw_elems thy = - let - val class = Sign.full_name thy b; - val prefix = Binding.qualify true "class"; - val (((sups, supparam_names), (supsort, base_sort, supexpr)), (elems, global_syntax)) = - prep_class_spec thy raw_supclasses raw_elems; - in - thy - |> Expression.add_locale b (prefix b) supexpr elems - |> snd |> Local_Theory.exit_global - |> adjungate_axclass b class base_sort sups supsort supparam_names global_syntax - |-> (fn (param_map, params, assm_axiom) => - `(fn thy => calculate thy class sups base_sort param_map assm_axiom) - #-> (fn (base_morph, eq_morph, export_morph, some_axiom, some_assm_intro, of_class) => - Context.theory_map (Locale.add_registration (class, base_morph) - (Option.map (rpair true) eq_morph) export_morph) - #> Class.register class sups params base_sort base_morph export_morph some_axiom some_assm_intro of_class - #> Global_Theory.store_thm (prefix (Binding.qualified_name (class ^ ".of_class.intro")), of_class))) - |> snd - |> Named_Target.init class - |> pair class - end; - -in - -val class = gen_class cert_class_spec; -val class_cmd = gen_class read_class_spec; - -end; (*local*) - - - -(** subclass relations **) - -local - -fun gen_subclass prep_class do_proof raw_sup lthy = - let - val thy = Proof_Context.theory_of lthy; - val proto_sup = prep_class thy raw_sup; - val proto_sub = case Named_Target.class_of lthy of - SOME class => class - | NONE => error "Not in a class target"; - val (sub, sup) = Axclass.cert_classrel thy (proto_sub, proto_sup); - - val expr = ([(sup, (("", false), Expression.Positional []))], []); - val (([props], deps, export), goal_ctxt) = - Expression.cert_goal_expression expr lthy; - val some_prop = try the_single props; - val some_dep_morph = try the_single (map snd deps); - fun after_qed some_wit = - Class.register_subclass (sub, sup) some_dep_morph some_wit export; - in do_proof after_qed some_prop goal_ctxt end; - -fun user_proof after_qed some_prop = - Element.witness_proof (after_qed o try the_single o the_single) - [the_list some_prop]; - -fun tactic_proof tac after_qed some_prop ctxt = - after_qed (Option.map - (fn prop => Element.prove_witness ctxt prop tac) some_prop) ctxt; - -in - -fun prove_subclass tac = gen_subclass (K I) (tactic_proof tac); - -fun subclass x = gen_subclass (K I) user_proof x; -fun subclass_cmd x = - gen_subclass (Proof_Context.read_class o Proof_Context.init_global) user_proof x; - -end; (*local*) - -end; diff --git a/core/Pure/Isar/code.ML b/core/Pure/Isar/code.ML deleted file mode 100644 index 9b376860..00000000 --- a/core/Pure/Isar/code.ML +++ /dev/null @@ -1,1322 +0,0 @@ -(* Title: Pure/Isar/code.ML - Author: Florian Haftmann, TU Muenchen - -Abstract executable ingredients of theory. Management of data -dependent on executable ingredients as synchronized cache; purged -on any change of underlying executable ingredients. -*) - -signature CODE = -sig - (*constants*) - val check_const: theory -> term -> string - val read_bare_const: theory -> string -> string * typ - val read_const: theory -> string -> string - val string_of_const: theory -> string -> string - val const_typ: theory -> string -> typ - val args_number: theory -> string -> int - - (*constructor sets*) - val constrset_of_consts: theory -> (string * typ) list - -> string * ((string * sort) list * (string * ((string * sort) list * typ list)) list) - - (*code equations and certificates*) - val mk_eqn: theory -> thm * bool -> thm * bool - val mk_eqn_liberal: theory -> thm -> (thm * bool) option - val assert_eqn: theory -> thm * bool -> thm * bool - val assert_abs_eqn: theory -> string option -> thm -> thm * string - val const_typ_eqn: theory -> thm -> string * typ - val expand_eta: theory -> int -> thm -> thm - type cert - val constrain_cert: theory -> sort list -> cert -> cert - val conclude_cert: cert -> cert - val typargs_deps_of_cert: theory -> cert -> (string * sort) list * (string * typ list) list - val equations_of_cert: theory -> cert -> ((string * sort) list * typ) - * (((term * string option) list * (term * string option)) * (thm option * bool)) list option - val pretty_cert: theory -> cert -> Pretty.T list - - (*executable code*) - val add_datatype: (string * typ) list -> theory -> theory - val add_datatype_cmd: string list -> theory -> theory - val datatype_interpretation: - (string * ((string * sort) list * (string * ((string * sort) list * typ list)) list) - -> theory -> theory) -> theory -> theory - val add_abstype: thm -> theory -> theory - val abstype_interpretation: - (string * ((string * sort) list * ((string * ((string * sort) list * typ)) * (string * thm))) - -> theory -> theory) -> theory -> theory - val add_eqn: thm -> theory -> theory - val add_nbe_eqn: thm -> theory -> theory - val add_abs_eqn: thm -> theory -> theory - val add_abs_eqn_attribute: attribute - val add_abs_eqn_attrib: Attrib.src - val add_default_eqn: thm -> theory -> theory - val add_default_eqn_attribute: attribute - val add_default_eqn_attrib: Attrib.src - val add_nbe_default_eqn: thm -> theory -> theory - val add_nbe_default_eqn_attribute: attribute - val add_nbe_default_eqn_attrib: Attrib.src - val del_eqn: thm -> theory -> theory - val del_eqns: string -> theory -> theory - val del_exception: string -> theory -> theory - val add_case: thm -> theory -> theory - val add_undefined: string -> theory -> theory - val get_type: theory -> string - -> ((string * sort) list * (string * ((string * sort) list * typ list)) list) * bool - val get_type_of_constr_or_abstr: theory -> string -> (string * bool) option - val is_constr: theory -> string -> bool - val is_abstr: theory -> string -> bool - val get_cert: Proof.context -> ((thm * bool) list -> (thm * bool) list option) list - -> string -> cert - val get_case_scheme: theory -> string -> (int * (int * string option list)) option - val get_case_cong: theory -> string -> thm option - val undefineds: theory -> string list - val print_codesetup: theory -> unit -end; - -signature CODE_DATA_ARGS = -sig - type T - val empty: T -end; - -signature CODE_DATA = -sig - type T - val change: theory option -> (T -> T) -> T - val change_yield: theory option -> (T -> 'a * T) -> 'a * T -end; - -signature PRIVATE_CODE = -sig - include CODE - val declare_data: Any.T -> serial - val change_yield_data: serial * ('a -> Any.T) * (Any.T -> 'a) - -> theory -> ('a -> 'b * 'a) -> 'b * 'a -end; - -structure Code : PRIVATE_CODE = -struct - -(** auxiliary **) - -(* printing *) - -fun string_of_typ thy = - Syntax.string_of_typ (Config.put show_sorts true (Syntax.init_pretty_global thy)); - -fun string_of_const thy c = - let val ctxt = Proof_Context.init_global thy in - case Axclass.inst_of_param thy c of - SOME (c, tyco) => - Proof_Context.extern_const ctxt c ^ " " ^ enclose "[" "]" - (Proof_Context.extern_type ctxt tyco) - | NONE => Proof_Context.extern_const ctxt c - end; - - -(* constants *) - -fun const_typ thy = Type.strip_sorts o Sign.the_const_type thy; - -fun args_number thy = length o binder_types o const_typ thy; - -fun devarify ty = - let - val tys = fold_atyps (fn TVar vi_sort => AList.update (op =) vi_sort) ty []; - val vs = Name.invent Name.context Name.aT (length tys); - val mapping = map2 (fn v => fn (vi, sort) => (vi, TFree (v, sort))) vs tys; - in Term.typ_subst_TVars mapping ty end; - -fun typscheme thy (c, ty) = - (map dest_TFree (Sign.const_typargs thy (c, ty)), Type.strip_sorts ty); - -fun typscheme_equiv (ty1, ty2) = - Type.raw_instance (devarify ty1, ty2) andalso Type.raw_instance (devarify ty2, ty1); - -fun check_bare_const thy t = case try dest_Const t - of SOME c_ty => c_ty - | NONE => error ("Not a constant: " ^ Syntax.string_of_term_global thy t); - -fun check_unoverload thy (c, ty) = - let - val c' = Axclass.unoverload_const thy (c, ty); - val ty_decl = const_typ thy c'; - in - if typscheme_equiv (ty_decl, Logic.varifyT_global ty) - then c' - else - error ("Type\n" ^ string_of_typ thy ty ^ - "\nof constant " ^ quote c ^ - "\nis too specific compared to declared type\n" ^ - string_of_typ thy ty_decl) - end; - -fun check_const thy = check_unoverload thy o check_bare_const thy; - -fun read_bare_const thy = check_bare_const thy o Syntax.read_term_global thy; - -fun read_const thy = check_unoverload thy o read_bare_const thy; - - -(** data store **) - -(* datatypes *) - -datatype typ_spec = Constructors of (string * ((string * sort) list * typ list)) list * - string list (*references to associated case constructors*) - | Abstractor of (string * ((string * sort) list * typ)) * (string * thm); - -fun constructors_of (Constructors (cos, _)) = (cos, false) - | constructors_of (Abstractor ((co, (vs, ty)), _)) = ([(co, (vs, [ty]))], true); - -fun case_consts_of (Constructors (_, case_consts)) = case_consts - | case_consts_of (Abstractor _) = []; - -(* functions *) - -datatype fun_spec = Default of (thm * bool) list * (thm * bool) list lazy - (* (cache for default equations, lazy computation of default equations) - -- helps to restore natural order of default equations *) - | Eqns of (thm * bool) list - | None - | Proj of term * string - | Abstr of thm * string; - -val initial_fun_spec = Default ([], Lazy.value []); - -fun is_default (Default _) = true - | is_default _ = false; - -fun associated_abstype (Abstr (_, tyco)) = SOME tyco - | associated_abstype _ = NONE; - - -(* executable code data *) - -datatype spec = Spec of { - history_concluded: bool, - functions: ((bool * fun_spec) * (serial * fun_spec) list) Symtab.table - (*with explicit history*), - types: ((serial * ((string * sort) list * typ_spec)) list) Symtab.table - (*with explicit history*), - cases: ((int * (int * string option list)) * thm) Symtab.table * unit Symtab.table -}; - -fun make_spec (history_concluded, (functions, (types, cases))) = - Spec { history_concluded = history_concluded, functions = functions, types = types, cases = cases }; -fun map_spec f (Spec { history_concluded = history_concluded, - functions = functions, types = types, cases = cases }) = - make_spec (f (history_concluded, (functions, (types, cases)))); -fun merge_spec (Spec { history_concluded = _, functions = functions1, - types = types1, cases = (cases1, undefs1) }, - Spec { history_concluded = _, functions = functions2, - types = types2, cases = (cases2, undefs2) }) = - let - val types = Symtab.join (K (AList.merge (op =) (K true))) (types1, types2); - val case_consts_of' = (maps case_consts_of o map (snd o snd o hd o snd) o Symtab.dest); - fun merge_functions ((_, history1), (_, history2)) = - let - val raw_history = AList.merge (op = : serial * serial -> bool) - (K true) (history1, history2); - val filtered_history = filter_out (is_default o snd) raw_history; - val history = if null filtered_history - then raw_history else filtered_history; - in ((false, (snd o hd) history), history) end; - val all_datatype_specs = map (snd o snd o hd o snd) (Symtab.dest types); - val all_constructors = maps (map fst o fst o constructors_of) all_datatype_specs; - val invalidated_case_consts = union (op =) (case_consts_of' types1) (case_consts_of' types2) - |> subtract (op =) (maps case_consts_of all_datatype_specs) - val functions = Symtab.join (K merge_functions) (functions1, functions2) - |> fold (fn c => Symtab.map_entry c (apfst (K (true, initial_fun_spec)))) all_constructors; - val cases = (Symtab.merge (K true) (cases1, cases2) - |> fold Symtab.delete invalidated_case_consts, Symtab.merge (K true) (undefs1, undefs2)); - in make_spec (false, (functions, (types, cases))) end; - -fun history_concluded (Spec { history_concluded, ... }) = history_concluded; -fun the_functions (Spec { functions, ... }) = functions; -fun the_types (Spec { types, ... }) = types; -fun the_cases (Spec { cases, ... }) = cases; -val map_history_concluded = map_spec o apfst; -val map_functions = map_spec o apsnd o apfst; -val map_typs = map_spec o apsnd o apsnd o apfst; -val map_cases = map_spec o apsnd o apsnd o apsnd; - - -(* data slots dependent on executable code *) - -(*private copy avoids potential conflict of table exceptions*) -structure Datatab = Table(type key = int val ord = int_ord); - -local - -type kind = { empty: Any.T }; - -val kinds = Synchronized.var "Code_Data" (Datatab.empty: kind Datatab.table); - -fun invoke f k = - (case Datatab.lookup (Synchronized.value kinds) k of - SOME kind => f kind - | NONE => raise Fail "Invalid code data identifier"); - -in - -fun declare_data empty = - let - val k = serial (); - val kind = { empty = empty }; - val _ = Synchronized.change kinds (Datatab.update (k, kind)); - in k end; - -fun invoke_init k = invoke (fn kind => #empty kind) k; - -end; (*local*) - - -(* theory store *) - -local - -type data = Any.T Datatab.table; -fun empty_dataref () = Synchronized.var "code data" (NONE : (data * theory) option); - -structure Code_Data = Theory_Data -( - type T = spec * (data * theory) option Synchronized.var; - val empty = (make_spec (false, (Symtab.empty, - (Symtab.empty, (Symtab.empty, Symtab.empty)))), empty_dataref ()); - val extend : T -> T = apsnd (K (empty_dataref ())); - fun merge ((spec1, _), (spec2, _)) = - (merge_spec (spec1, spec2), empty_dataref ()); -); - -in - - -(* access to executable code *) - -val the_exec : theory -> spec = fst o Code_Data.get; - -fun map_exec_purge f = Code_Data.map (fn (exec, _) => (f exec, empty_dataref ())); - -fun change_fun_spec c f = (map_exec_purge o map_functions - o (Symtab.map_default (c, ((false, initial_fun_spec), []))) - o apfst) (fn (_, spec) => (true, f spec)); - - -(* tackling equation history *) - -fun continue_history thy = if (history_concluded o the_exec) thy - then thy - |> (Code_Data.map o apfst o map_history_concluded) (K false) - |> SOME - else NONE; - -fun conclude_history thy = if (history_concluded o the_exec) thy - then NONE - else thy - |> (Code_Data.map o apfst) - ((map_functions o Symtab.map) (fn _ => fn ((changed, current), history) => - ((false, current), - if changed then (serial (), current) :: history else history)) - #> map_history_concluded (K true)) - |> SOME; - -val _ = Theory.setup - (Theory.at_begin continue_history #> Theory.at_end conclude_history); - - -(* access to data dependent on abstract executable code *) - -fun change_yield_data (kind, mk, dest) theory f = - let - val dataref = (snd o Code_Data.get) theory; - val (datatab, thy) = case Synchronized.value dataref - of SOME (datatab, thy) => - if Theory.eq_thy (theory, thy) - then (datatab, thy) - else (Datatab.empty, theory) - | NONE => (Datatab.empty, theory) - val data = case Datatab.lookup datatab kind - of SOME data => data - | NONE => invoke_init kind; - val result as (_, data') = f (dest data); - val _ = Synchronized.change dataref - ((K o SOME) (Datatab.update (kind, mk data') datatab, thy)); - in result end; - -end; (*local*) - - -(** foundation **) - -(* datatypes *) - -fun no_constr thy s (c, ty) = error ("Not a datatype constructor:\n" ^ string_of_const thy c - ^ " :: " ^ string_of_typ thy ty ^ "\n" ^ enclose "(" ")" s); - -fun analyze_constructor thy (c, ty) = - let - val _ = Thm.cterm_of thy (Const (c, ty)); - val ty_decl = devarify (const_typ thy c); - fun last_typ c_ty ty = - let - val tfrees = Term.add_tfreesT ty []; - val (tyco, vs) = (apsnd o map) dest_TFree (dest_Type (body_type ty)) - handle TYPE _ => no_constr thy "bad type" c_ty - val _ = if tyco = "fun" then no_constr thy "bad type" c_ty else (); - val _ = - if has_duplicates (eq_fst (op =)) vs - then no_constr thy "duplicate type variables in datatype" c_ty else (); - val _ = - if length tfrees <> length vs - then no_constr thy "type variables missing in datatype" c_ty else (); - in (tyco, vs) end; - val (tyco, _) = last_typ (c, ty) ty_decl; - val (_, vs) = last_typ (c, ty) ty; - in ((tyco, map snd vs), (c, (map fst vs, ty))) end; - -fun constrset_of_consts thy consts = - let - val _ = map (fn (c, _) => if (is_some o Axclass.class_of_param thy) c - then error ("Is a class parameter: " ^ string_of_const thy c) else ()) consts; - val raw_constructors = map (analyze_constructor thy) consts; - val tyco = case distinct (op =) (map (fst o fst) raw_constructors) - of [tyco] => tyco - | [] => error "Empty constructor set" - | tycos => error ("Different type constructors in constructor set: " ^ commas_quote tycos) - val vs = Name.invent Name.context Name.aT (Sign.arity_number thy tyco); - fun inst vs' (c, (vs, ty)) = - let - val the_v = the o AList.lookup (op =) (vs ~~ vs'); - val ty' = map_type_tfree (fn (v, _) => TFree (the_v v, [])) ty; - val (vs'', ty'') = typscheme thy (c, ty'); - in (c, (vs'', binder_types ty'')) end; - val constructors = map (inst vs o snd) raw_constructors; - in (tyco, (map (rpair []) vs, constructors)) end; - -fun get_type_entry thy tyco = case these (Symtab.lookup ((the_types o the_exec) thy) tyco) - of (_, entry) :: _ => SOME entry - | _ => NONE; - -fun get_type thy tyco = case get_type_entry thy tyco - of SOME (vs, spec) => apfst (pair vs) (constructors_of spec) - | NONE => Sign.arity_number thy tyco - |> Name.invent Name.context Name.aT - |> map (rpair []) - |> rpair [] - |> rpair false; - -fun get_abstype_spec thy tyco = case get_type_entry thy tyco - of SOME (vs, Abstractor spec) => (vs, spec) - | _ => error ("Not an abstract type: " ^ tyco); - -fun get_type_of_constr_or_abstr thy c = - case (body_type o const_typ thy) c - of Type (tyco, _) => let val ((_, cos), abstract) = get_type thy tyco - in if member (op =) (map fst cos) c then SOME (tyco, abstract) else NONE end - | _ => NONE; - -fun is_constr thy c = case get_type_of_constr_or_abstr thy c - of SOME (_, false) => true - | _ => false; - -fun is_abstr thy c = case get_type_of_constr_or_abstr thy c - of SOME (_, true) => true - | _ => false; - - -(* bare code equations *) - -(* convention for variables: - ?x ?'a for free-floating theorems (e.g. in the data store) - ?x 'a for certificates - x 'a for final representation of equations -*) - -exception BAD_THM of string; -fun bad_thm msg = raise BAD_THM msg; -fun error_thm f thy (thm, proper) = f (thm, proper) - handle BAD_THM msg => error (msg ^ ", in theorem:\n" ^ Display.string_of_thm_global thy thm); -fun error_abs_thm f thy thm = f thm - handle BAD_THM msg => error (msg ^ ", in theorem:\n" ^ Display.string_of_thm_global thy thm); -fun warning_thm f thy (thm, proper) = SOME (f (thm, proper)) - handle BAD_THM msg => (warning (msg ^ ", in theorem:\n" ^ Display.string_of_thm_global thy thm); NONE) -fun try_thm f thm_proper = SOME (f thm_proper) - handle BAD_THM _ => NONE; - -fun is_linear thm = - let val (_, args) = (strip_comb o fst o Logic.dest_equals o Thm.plain_prop_of) thm - in not (has_duplicates (op =) ((fold o fold_aterms) - (fn Var (v, _) => cons v | _ => I) args [])) end; - -fun check_decl_ty thy (c, ty) = - let - val ty_decl = const_typ thy c; - in if typscheme_equiv (ty_decl, ty) then () - else bad_thm ("Type\n" ^ string_of_typ thy ty - ^ "\nof constant " ^ quote c - ^ "\nis too specific compared to declared type\n" - ^ string_of_typ thy ty_decl) - end; - -fun check_eqn thy { allow_nonlinear, allow_consts, allow_pats } thm (lhs, rhs) = - let - fun vars_of t = fold_aterms (fn Var (v, _) => insert (op =) v - | Free _ => bad_thm "Illegal free variable" - | _ => I) t []; - fun tvars_of t = fold_term_types (fn _ => - fold_atyps (fn TVar (v, _) => insert (op =) v - | TFree _ => bad_thm "Illegal free type variable")) t []; - val lhs_vs = vars_of lhs; - val rhs_vs = vars_of rhs; - val lhs_tvs = tvars_of lhs; - val rhs_tvs = tvars_of rhs; - val _ = if null (subtract (op =) lhs_vs rhs_vs) - then () - else bad_thm "Free variables on right hand side of equation"; - val _ = if null (subtract (op =) lhs_tvs rhs_tvs) - then () - else bad_thm "Free type variables on right hand side of equation"; - val (head, args) = strip_comb lhs; - val (c, ty) = case head - of Const (c_ty as (_, ty)) => (Axclass.unoverload_const thy c_ty, ty) - | _ => bad_thm "Equation not headed by constant"; - fun check _ (Abs _) = bad_thm "Abstraction on left hand side of equation" - | check 0 (Var _) = () - | check _ (Var _) = bad_thm "Variable with application on left hand side of equation" - | check n (t1 $ t2) = (check (n+1) t1; check 0 t2) - | check n (Const (c_ty as (c, ty))) = - if allow_pats then let - val c' = Axclass.unoverload_const thy c_ty - in if n = (length o binder_types) ty - then if allow_consts orelse is_constr thy c' - then () - else bad_thm (quote c ^ " is not a constructor, on left hand side of equation") - else bad_thm ("Partially applied constant " ^ quote c ^ " on left hand side of equation") - end else bad_thm ("Pattern not allowed here, but constant " ^ quote c ^ " encountered on left hand side of equation") - val _ = map (check 0) args; - val _ = if allow_nonlinear orelse is_linear thm then () - else bad_thm "Duplicate variables on left hand side of equation"; - val _ = if (is_none o Axclass.class_of_param thy) c then () - else bad_thm "Overloaded constant as head in equation"; - val _ = if not (is_constr thy c) then () - else bad_thm "Constructor as head in equation"; - val _ = if not (is_abstr thy c) then () - else bad_thm "Abstractor as head in equation"; - val _ = check_decl_ty thy (c, ty); - val _ = case strip_type ty - of (Type (tyco, _) :: _, _) => (case get_type_entry thy tyco - of SOME (_, Abstractor (_, (proj, _))) => if c = proj - then bad_thm "Projection as head in equation" - else () - | _ => ()) - | _ => (); - in () end; - -fun gen_assert_eqn thy check_patterns (thm, proper) = - let - val (lhs, rhs) = (Logic.dest_equals o Thm.plain_prop_of) thm - handle TERM _ => bad_thm "Not an equation" - | THM _ => bad_thm "Not a proper equation"; - val _ = check_eqn thy { allow_nonlinear = not proper, - allow_consts = not (proper andalso check_patterns), allow_pats = true } thm (lhs, rhs); - in (thm, proper) end; - -fun assert_abs_eqn thy some_tyco thm = - let - val (full_lhs, rhs) = (Logic.dest_equals o Thm.plain_prop_of) thm - handle TERM _ => bad_thm "Not an equation" - | THM _ => bad_thm "Not a proper equation"; - val (rep, lhs) = dest_comb full_lhs - handle TERM _ => bad_thm "Not an abstract equation"; - val (rep_const, ty) = dest_Const rep - handle TERM _ => bad_thm "Not an abstract equation"; - val (tyco, Ts) = (dest_Type o domain_type) ty - handle TERM _ => bad_thm "Not an abstract equation" - | TYPE _ => bad_thm "Not an abstract equation"; - val _ = case some_tyco of SOME tyco' => if tyco = tyco' then () - else bad_thm ("Abstract type mismatch:" ^ quote tyco ^ " vs. " ^ quote tyco') - | NONE => (); - val (vs', (_, (rep', _))) = case try (get_abstype_spec thy) tyco - of SOME data => data - | NONE => bad_thm ("Not an abstract type: " ^ tyco); - val _ = if rep_const = rep' then () - else bad_thm ("Projection mismatch: " ^ quote rep_const ^ " vs. " ^ quote rep'); - val _ = check_eqn thy { allow_nonlinear = false, - allow_consts = false, allow_pats = false } thm (lhs, rhs); - val _ = if forall2 (fn T => fn (_, sort) => Sign.of_sort thy (T, sort)) Ts vs' then () - else error ("Type arguments do not satisfy sort constraints of abstype certificate."); - in (thm, tyco) end; - -fun assert_eqn thy = gen_assert_eqn thy true; - -fun meta_rewrite thy = Local_Defs.meta_rewrite_rule (Proof_Context.init_global thy); - -fun mk_eqn thy = error_thm (gen_assert_eqn thy false) thy o - apfst (meta_rewrite thy); - -fun mk_eqn_liberal thy = Option.map (fn (thm, _) => (thm, is_linear thm)) - o try_thm (gen_assert_eqn thy false) o rpair false o meta_rewrite thy; - -fun mk_eqn_maybe_abs thy raw_thm = - let - val thm = meta_rewrite thy raw_thm; - val some_abs_thm = try_thm (assert_abs_eqn thy NONE) thm; - in case some_abs_thm - of SOME (thm, tyco) => SOME ((thm, true), SOME tyco) - | NONE => (Option.map (fn (thm, _) => ((thm, is_linear thm), NONE)) - o warning_thm (gen_assert_eqn thy false) thy) (thm, false) - end; - -fun mk_abs_eqn thy = error_abs_thm (assert_abs_eqn thy NONE) thy o meta_rewrite thy; - -val head_eqn = dest_Const o fst o strip_comb o fst o Logic.dest_equals o Thm.plain_prop_of; - -fun const_typ_eqn thy thm = - let - val (c, ty) = head_eqn thm; - val c' = Axclass.unoverload_const thy (c, ty); - (*permissive wrt. to overloaded constants!*) - in (c', ty) end; - -fun const_eqn thy = fst o const_typ_eqn thy; - -fun const_abs_eqn thy = Axclass.unoverload_const thy o dest_Const o fst o strip_comb o snd - o dest_comb o fst o Logic.dest_equals o Thm.plain_prop_of; - -fun mk_proj tyco vs ty abs rep = - let - val ty_abs = Type (tyco, map TFree vs); - val xarg = Var (("x", 0), ty); - in Logic.mk_equals (Const (rep, ty_abs --> ty) $ (Const (abs, ty --> ty_abs) $ xarg), xarg) end; - - -(* technical transformations of code equations *) - -fun expand_eta thy k thm = - let - val (lhs, rhs) = (Logic.dest_equals o Thm.plain_prop_of) thm; - val (_, args) = strip_comb lhs; - val l = if k = ~1 - then (length o fst o strip_abs) rhs - else Int.max (0, k - length args); - val (raw_vars, _) = Term.strip_abs_eta l rhs; - val vars = burrow_fst (Name.variant_list (map (fst o fst) (Term.add_vars lhs []))) - raw_vars; - fun expand (v, ty) thm = Drule.fun_cong_rule thm - (Thm.cterm_of thy (Var ((v, 0), ty))); - in - thm - |> fold expand vars - |> Conv.fconv_rule Drule.beta_eta_conversion - end; - -fun same_arity thy thms = - let - val num_args_of = length o snd o strip_comb o fst o Logic.dest_equals; - val k = fold (Integer.max o num_args_of o Thm.prop_of) thms 0; - in map (expand_eta thy k) thms end; - -fun mk_desymbolization pre post mk vs = - let - val names = map (pre o fst o fst) vs - |> map (Name.desymbolize (SOME false)) - |> Name.variant_list [] - |> map post; - in map_filter (fn (((v, i), x), v') => - if v = v' andalso i = 0 then NONE - else SOME (((v, i), x), mk ((v', 0), x))) (vs ~~ names) - end; - -fun desymbolize_tvars thms = - let - val tvs = fold (Term.add_tvars o Thm.prop_of) thms []; - val tvar_subst = mk_desymbolization (unprefix "'") (prefix "'") TVar tvs; - in map (Thm.certify_instantiate (tvar_subst, [])) thms end; - -fun desymbolize_vars thm = - let - val vs = Term.add_vars (Thm.prop_of thm) []; - val var_subst = mk_desymbolization I I Var vs; - in Thm.certify_instantiate ([], var_subst) thm end; - -fun canonize_thms thy = desymbolize_tvars #> same_arity thy #> map desymbolize_vars; - - -(* abstype certificates *) - -fun check_abstype_cert thy proto_thm = - let - val thm = (Axclass.unoverload thy o meta_rewrite thy) proto_thm; - val (lhs, rhs) = Logic.dest_equals (Thm.plain_prop_of thm) - handle TERM _ => bad_thm "Not an equation" - | THM _ => bad_thm "Not a proper equation"; - val ((abs, raw_ty), ((rep, rep_ty), param)) = (apsnd (apfst dest_Const o dest_comb) - o apfst dest_Const o dest_comb) lhs - handle TERM _ => bad_thm "Not an abstype certificate"; - val _ = pairself (fn c => if (is_some o Axclass.class_of_param thy) c - then error ("Is a class parameter: " ^ string_of_const thy c) else ()) (abs, rep); - val _ = check_decl_ty thy (abs, raw_ty); - val _ = check_decl_ty thy (rep, rep_ty); - val _ = if length (binder_types raw_ty) = 1 - then () - else bad_thm "Bad type for abstract constructor"; - val _ = (fst o dest_Var) param - handle TERM _ => bad_thm "Not an abstype certificate"; - val _ = if param = rhs then () else bad_thm "Not an abstype certificate"; - val ((tyco, sorts), (abs, (vs, ty'))) = - analyze_constructor thy (abs, devarify raw_ty); - val ty = domain_type ty'; - val (vs', _) = typscheme thy (abs, ty'); - in (tyco, (vs ~~ sorts, ((abs, (vs', ty)), (rep, thm)))) end; - - -(* code equation certificates *) - -fun build_head thy (c, ty) = - Thm.cterm_of thy (Logic.mk_equals (Free ("HEAD", ty), Const (c, ty))); - -fun get_head thy cert_thm = - let - val [head] = (#hyps o Thm.crep_thm) cert_thm; - val (_, Const (c, ty)) = (Logic.dest_equals o Thm.term_of) head; - in (typscheme thy (c, ty), head) end; - -fun typscheme_projection thy = - typscheme thy o dest_Const o fst o dest_comb o fst o Logic.dest_equals; - -fun typscheme_abs thy = - typscheme thy o dest_Const o fst o strip_comb o snd o dest_comb o fst o Logic.dest_equals o Thm.prop_of; - -fun constrain_thm thy vs sorts thm = - let - val mapping = map2 (fn (v, sort) => fn sort' => - (v, Sorts.inter_sort (Sign.classes_of thy) (sort, sort'))) vs sorts; - val inst = map2 (fn (v, sort) => fn (_, sort') => - (((v, 0), sort), TFree (v, sort'))) vs mapping; - val subst = (map_types o map_type_tfree) - (fn (v, _) => TFree (v, the (AList.lookup (op =) mapping v))); - in - thm - |> Thm.varifyT_global - |> Thm.certify_instantiate (inst, []) - |> pair subst - end; - -fun concretify_abs thy tyco abs_thm = - let - val (_, ((c, _), (_, cert))) = get_abstype_spec thy tyco; - val lhs = (fst o Logic.dest_equals o Thm.prop_of) abs_thm - val ty = fastype_of lhs; - val ty_abs = (fastype_of o snd o dest_comb) lhs; - val abs = Thm.cterm_of thy (Const (c, ty --> ty_abs)); - val raw_concrete_thm = Drule.transitive_thm OF [Thm.symmetric cert, Thm.combination (Thm.reflexive abs) abs_thm]; - in (c, (Thm.varifyT_global o zero_var_indexes) raw_concrete_thm) end; - -fun add_rhss_of_eqn thy t = - let - val (args, rhs) = (apfst (snd o strip_comb) o Logic.dest_equals) t; - fun add_const (Const (c, ty)) = insert (op =) (c, Sign.const_typargs thy (c, ty)) - | add_const _ = I - val add_consts = fold_aterms add_const - in add_consts rhs o fold add_consts args end; - -val dest_eqn = apfst (snd o strip_comb) o Logic.dest_equals o Logic.unvarify_global; - -abstype cert = Nothing of thm - | Equations of thm * bool list - | Projection of term * string - | Abstract of thm * string -with - -fun dummy_thm ctxt c = - let - val thy = Proof_Context.theory_of ctxt; - val raw_ty = devarify (const_typ thy c); - val (vs, _) = typscheme thy (c, raw_ty); - val sortargs = case Axclass.class_of_param thy c - of SOME class => [[class]] - | NONE => (case get_type_of_constr_or_abstr thy c - of SOME (tyco, _) => (map snd o fst o the) - (AList.lookup (op =) ((snd o fst o get_type thy) tyco) c) - | NONE => replicate (length vs) []); - val the_sort = the o AList.lookup (op =) (map fst vs ~~ sortargs); - val ty = map_type_tfree (fn (v, _) => TFree (v, the_sort v)) raw_ty - val chead = build_head thy (c, ty); - in Thm.weaken chead Drule.dummy_thm end; - -fun nothing_cert ctxt c = Nothing (dummy_thm ctxt c); - -fun cert_of_eqns ctxt c [] = Equations (dummy_thm ctxt c, []) - | cert_of_eqns ctxt c raw_eqns = - let - val thy = Proof_Context.theory_of ctxt; - val eqns = burrow_fst (canonize_thms thy) raw_eqns; - val _ = map (error_thm (assert_eqn thy) thy) eqns; - val (thms, propers) = split_list eqns; - val _ = map (fn thm => if c = const_eqn thy thm then () - else error ("Wrong head of code equation,\nexpected constant " - ^ string_of_const thy c ^ "\n" ^ Display.string_of_thm_global thy thm)) thms; - fun tvars_of T = rev (Term.add_tvarsT T []); - val vss = map (tvars_of o snd o head_eqn) thms; - fun inter_sorts vs = - fold (curry (Sorts.inter_sort (Sign.classes_of thy)) o snd) vs []; - val sorts = map_transpose inter_sorts vss; - val vts = Name.invent_names Name.context Name.aT sorts; - val thms' = - map2 (fn vs => Thm.certify_instantiate (vs ~~ map TFree vts, [])) vss thms; - val head_thm = Thm.symmetric (Thm.assume (build_head thy (head_eqn (hd thms')))); - fun head_conv ct = if can Thm.dest_comb ct - then Conv.fun_conv head_conv ct - else Conv.rewr_conv head_thm ct; - val rewrite_head = Conv.fconv_rule (Conv.arg1_conv head_conv); - val cert_thm = Conjunction.intr_balanced (map rewrite_head thms'); - in Equations (cert_thm, propers) end; - -fun cert_of_proj thy c tyco = - let - val (vs, ((abs, (_, ty)), (rep, _))) = get_abstype_spec thy tyco; - val _ = if c = rep then () else - error ("Wrong head of projection,\nexpected constant " ^ string_of_const thy rep); - in Projection (mk_proj tyco vs ty abs rep, tyco) end; - -fun cert_of_abs thy tyco c raw_abs_thm = - let - val abs_thm = singleton (canonize_thms thy) raw_abs_thm; - val _ = assert_abs_eqn thy (SOME tyco) abs_thm; - val _ = if c = const_abs_eqn thy abs_thm then () - else error ("Wrong head of abstract code equation,\nexpected constant " - ^ string_of_const thy c ^ "\n" ^ Display.string_of_thm_global thy abs_thm); - in Abstract (Thm.legacy_freezeT abs_thm, tyco) end; - -fun constrain_cert_thm thy sorts cert_thm = - let - val ((vs, _), head) = get_head thy cert_thm; - val (subst, cert_thm') = cert_thm - |> Thm.implies_intr head - |> constrain_thm thy vs sorts; - val head' = Thm.term_of head - |> subst - |> Thm.cterm_of thy; - val cert_thm'' = cert_thm' - |> Thm.elim_implies (Thm.assume head'); - in cert_thm'' end; - -fun constrain_cert thy sorts (Nothing cert_thm) = - Nothing (constrain_cert_thm thy sorts cert_thm) - | constrain_cert thy sorts (Equations (cert_thm, propers)) = - Equations (constrain_cert_thm thy sorts cert_thm, propers) - | constrain_cert thy _ (cert as Projection _) = - cert - | constrain_cert thy sorts (Abstract (abs_thm, tyco)) = - Abstract (snd (constrain_thm thy (fst (typscheme_abs thy abs_thm)) sorts abs_thm), tyco); - -fun conclude_cert (Nothing cert_thm) = - Nothing (Thm.close_derivation cert_thm) - | conclude_cert (Equations (cert_thm, propers)) = - Equations (Thm.close_derivation cert_thm, propers) - | conclude_cert (cert as Projection _) = - cert - | conclude_cert (Abstract (abs_thm, tyco)) = - Abstract (Thm.close_derivation abs_thm, tyco); - -fun typscheme_of_cert thy (Nothing cert_thm) = - fst (get_head thy cert_thm) - | typscheme_of_cert thy (Equations (cert_thm, _)) = - fst (get_head thy cert_thm) - | typscheme_of_cert thy (Projection (proj, _)) = - typscheme_projection thy proj - | typscheme_of_cert thy (Abstract (abs_thm, _)) = - typscheme_abs thy abs_thm; - -fun typargs_deps_of_cert thy (Nothing cert_thm) = - let - val vs = (fst o fst) (get_head thy cert_thm); - in (vs, []) end - | typargs_deps_of_cert thy (Equations (cert_thm, propers)) = - let - val vs = (fst o fst) (get_head thy cert_thm); - val equations = if null propers then [] else - Thm.prop_of cert_thm - |> Logic.dest_conjunction_balanced (length propers); - in (vs, fold (add_rhss_of_eqn thy) equations []) end - | typargs_deps_of_cert thy (Projection (t, _)) = - (fst (typscheme_projection thy t), add_rhss_of_eqn thy t []) - | typargs_deps_of_cert thy (Abstract (abs_thm, tyco)) = - let - val vs = fst (typscheme_abs thy abs_thm); - val (_, concrete_thm) = concretify_abs thy tyco abs_thm; - in (vs, add_rhss_of_eqn thy (Logic.unvarify_types_global (Thm.prop_of concrete_thm)) []) end; - -fun equations_of_cert thy (cert as Nothing _) = - (typscheme_of_cert thy cert, NONE) - | equations_of_cert thy (cert as Equations (cert_thm, propers)) = - let - val tyscm = typscheme_of_cert thy cert; - val thms = if null propers then [] else - cert_thm - |> Local_Defs.expand [snd (get_head thy cert_thm)] - |> Thm.varifyT_global - |> Conjunction.elim_balanced (length propers); - fun abstractions (args, rhs) = (map (rpair NONE) args, (rhs, NONE)); - in (tyscm, SOME (map (abstractions o dest_eqn o Thm.prop_of) thms ~~ (map SOME thms ~~ propers))) end - | equations_of_cert thy (Projection (t, tyco)) = - let - val (_, ((abs, _), _)) = get_abstype_spec thy tyco; - val tyscm = typscheme_projection thy t; - val t' = Logic.varify_types_global t; - fun abstractions (args, rhs) = (map (rpair (SOME abs)) args, (rhs, NONE)); - in (tyscm, SOME [((abstractions o dest_eqn) t', (NONE, true))]) end - | equations_of_cert thy (Abstract (abs_thm, tyco)) = - let - val tyscm = typscheme_abs thy abs_thm; - val (abs, concrete_thm) = concretify_abs thy tyco abs_thm; - fun abstractions (args, rhs) = (map (rpair NONE) args, (rhs, (SOME abs))); - in - (tyscm, SOME [((abstractions o dest_eqn o Thm.prop_of) concrete_thm, - (SOME (Thm.varifyT_global abs_thm), true))]) - end; - -fun pretty_cert thy (cert as Nothing _) = - [Pretty.str "(not implemented)"] - | pretty_cert thy (cert as Equations _) = - (map_filter (Option.map (Display.pretty_thm_global thy o Axclass.overload thy) o fst o snd) - o these o snd o equations_of_cert thy) cert - | pretty_cert thy (Projection (t, _)) = - [Syntax.pretty_term_global thy (Logic.varify_types_global t)] - | pretty_cert thy (Abstract (abs_thm, _)) = - [(Display.pretty_thm_global thy o Axclass.overload thy o Thm.varifyT_global) abs_thm]; - -end; - - -(* code certificate access with preprocessing *) - -fun retrieve_raw thy c = - Symtab.lookup ((the_functions o the_exec) thy) c - |> Option.map (snd o fst) - |> the_default None - -fun eqn_conv conv ct = - let - fun lhs_conv ct = if can Thm.dest_comb ct - then Conv.combination_conv lhs_conv conv ct - else Conv.all_conv ct; - in Conv.combination_conv (Conv.arg_conv lhs_conv) conv ct end; - -fun rewrite_eqn conv ctxt = - singleton (Variable.trade (K (map (Conv.fconv_rule (conv (Simplifier.rewrite ctxt))))) ctxt) - -fun preprocess conv ctxt = - let - val thy = Proof_Context.theory_of ctxt; - in - Thm.transfer thy - #> rewrite_eqn conv ctxt - #> Axclass.unoverload thy - end; - -fun cert_of_eqns_preprocess ctxt functrans c = - (perhaps o perhaps_loop o perhaps_apply) functrans - #> (map o apfst) (preprocess eqn_conv ctxt) - #> cert_of_eqns ctxt c; - -fun get_cert ctxt functrans c = - let - val thy = Proof_Context.theory_of ctxt; - in - case retrieve_raw thy c of - Default (_, eqns_lazy) => Lazy.force eqns_lazy - |> cert_of_eqns_preprocess ctxt functrans c - | Eqns eqns => eqns - |> cert_of_eqns_preprocess ctxt functrans c - | None => nothing_cert ctxt c - | Proj (_, tyco) => cert_of_proj thy c tyco - | Abstr (abs_thm, tyco) => abs_thm - |> preprocess Conv.arg_conv ctxt - |> cert_of_abs thy tyco c - end; - - -(* cases *) - -fun case_certificate thm = - let - val ((head, raw_case_expr), cases) = (apfst Logic.dest_equals - o apsnd Logic.dest_conjunctions o Logic.dest_implies o Thm.plain_prop_of) thm; - val _ = case head of Free _ => true - | Var _ => true - | _ => raise TERM ("case_cert", []); - val ([(case_var, _)], case_expr) = Term.strip_abs_eta 1 raw_case_expr; - val (Const (case_const, _), raw_params) = strip_comb case_expr; - val n = find_index (fn Free (v, _) => v = case_var | _ => false) raw_params; - val _ = if n = ~1 then raise TERM ("case_cert", []) else (); - val params = map (fst o dest_Var) (nth_drop n raw_params); - fun dest_case t = - let - val (head' $ t_co, rhs) = Logic.dest_equals t; - val _ = if head' = head then () else raise TERM ("case_cert", []); - val (Const (co, _), args) = strip_comb t_co; - val (Var (param, _), args') = strip_comb rhs; - val _ = if args' = args then () else raise TERM ("case_cert", []); - in (param, co) end; - fun analyze_cases cases = - let - val co_list = fold (AList.update (op =) o dest_case) cases []; - in map (AList.lookup (op =) co_list) params end; - fun analyze_let t = - let - val (head' $ arg, Var (param', _) $ arg') = Logic.dest_equals t; - val _ = if head' = head then () else raise TERM ("case_cert", []); - val _ = if arg' = arg then () else raise TERM ("case_cert", []); - val _ = if [param'] = params then () else raise TERM ("case_cert", []); - in [] end; - fun analyze (cases as [let_case]) = - (analyze_cases cases handle Bind => analyze_let let_case) - | analyze cases = analyze_cases cases; - in (case_const, (n, analyze cases)) end; - -fun case_cert thm = case_certificate thm - handle Bind => error "bad case certificate" - | TERM _ => error "bad case certificate"; - -fun get_case_scheme thy = Option.map fst o Symtab.lookup ((fst o the_cases o the_exec) thy); -fun get_case_cong thy = Option.map snd o Symtab.lookup ((fst o the_cases o the_exec) thy); - -val undefineds = Symtab.keys o snd o the_cases o the_exec; - - -(* diagnostic *) - -fun print_codesetup thy = - let - val ctxt = Proof_Context.init_global thy; - val exec = the_exec thy; - fun pretty_equations const thms = - (Pretty.block o Pretty.fbreaks) - (Pretty.str (string_of_const thy const) :: map (Display.pretty_thm_item ctxt) thms); - fun pretty_function (const, Default (_, eqns_lazy)) = - pretty_equations const (map fst (Lazy.force eqns_lazy)) - | pretty_function (const, Eqns eqns) = pretty_equations const (map fst eqns) - | pretty_function (const, None) = pretty_equations const [] - | pretty_function (const, Proj (proj, _)) = Pretty.block - [Pretty.str (string_of_const thy const), Pretty.fbrk, Syntax.pretty_term ctxt proj] - | pretty_function (const, Abstr (thm, _)) = pretty_equations const [thm]; - fun pretty_typ (tyco, vs) = Pretty.str - (string_of_typ thy (Type (tyco, map TFree vs))); - fun pretty_typspec (typ, (cos, abstract)) = if null cos - then pretty_typ typ - else (Pretty.block o Pretty.breaks) ( - pretty_typ typ - :: Pretty.str "=" - :: (if abstract then [Pretty.str "(abstract)"] else []) - @ separate (Pretty.str "|") (map (fn (c, (_, [])) => Pretty.str (string_of_const thy c) - | (c, (_, tys)) => - (Pretty.block o Pretty.breaks) - (Pretty.str (string_of_const thy c) - :: Pretty.str "of" - :: map (Pretty.quote o Syntax.pretty_typ_global thy) tys)) cos) - ); - fun pretty_caseparam NONE = "" - | pretty_caseparam (SOME c) = string_of_const thy c - fun pretty_case (const, ((_, (_, [])), _)) = Pretty.str (string_of_const thy const) - | pretty_case (const, ((_, (_, cos)), _)) = (Pretty.block o Pretty.breaks) [ - Pretty.str (string_of_const thy const), Pretty.str "with", - (Pretty.block o Pretty.commas o map (Pretty.str o pretty_caseparam)) cos]; - val functions = the_functions exec - |> Symtab.dest - |> (map o apsnd) (snd o fst) - |> sort (string_ord o pairself fst); - val datatypes = the_types exec - |> Symtab.dest - |> map (fn (tyco, (_, (vs, spec)) :: _) => - ((tyco, vs), constructors_of spec)) - |> sort (string_ord o pairself (fst o fst)); - val cases = Symtab.dest ((fst o the_cases o the_exec) thy); - val undefineds = Symtab.keys ((snd o the_cases o the_exec) thy); - in - Pretty.writeln_chunks [ - Pretty.block ( - Pretty.str "code equations:" :: Pretty.fbrk - :: (Pretty.fbreaks o map pretty_function) functions - ), - Pretty.block ( - Pretty.str "datatypes:" :: Pretty.fbrk - :: (Pretty.fbreaks o map pretty_typspec) datatypes - ), - Pretty.block ( - Pretty.str "cases:" :: Pretty.fbrk - :: (Pretty.fbreaks o map pretty_case) cases - ), - Pretty.block ( - Pretty.str "undefined:" :: Pretty.fbrk - :: (Pretty.commas o map (Pretty.str o string_of_const thy)) undefineds - ) - ] - end; - - -(** declaring executable ingredients **) - -(* code equations *) - -fun gen_add_eqn default (raw_thm, proper) thy = - let - val thm = Thm.close_derivation raw_thm; - val c = const_eqn thy thm; - fun update_subsume verbose (thm, proper) eqns = - let - val args_of = snd o take_prefix is_Var o rev o snd o strip_comb - o map_types Type.strip_sorts o fst o Logic.dest_equals o Thm.plain_prop_of; - val args = args_of thm; - val incr_idx = Logic.incr_indexes ([], Thm.maxidx_of thm + 1); - fun matches_args args' = - let - val k = length args' - length args - in if k >= 0 - then Pattern.matchess thy (args, (map incr_idx o drop k) args') - else false - end; - fun drop (thm', proper') = if (proper orelse not proper') - andalso matches_args (args_of thm') then - (if verbose then warning ("Code generator: dropping subsumed code equation\n" ^ - Display.string_of_thm_global thy thm') else (); true) - else false; - in (thm, proper) :: filter_out drop eqns end; - fun natural_order eqns = - (eqns, Lazy.lazy (fn () => fold (update_subsume false) eqns [])) - fun add_eqn' true (Default (eqns, _)) = Default (natural_order ((thm, proper) :: eqns)) - (*this restores the natural order and drops syntactic redundancies*) - | add_eqn' true None = Default (natural_order [(thm, proper)]) - | add_eqn' true fun_spec = fun_spec - | add_eqn' false (Eqns eqns) = Eqns (update_subsume true (thm, proper) eqns) - | add_eqn' false _ = Eqns [(thm, proper)]; - in change_fun_spec c (add_eqn' default) thy end; - -fun gen_add_abs_eqn raw_thm thy = - let - val (abs_thm, tyco) = apfst Thm.close_derivation raw_thm; - val c = const_abs_eqn thy abs_thm; - in change_fun_spec c (K (Abstr (abs_thm, tyco))) thy end; - -fun add_eqn thm thy = - gen_add_eqn false (mk_eqn thy (thm, true)) thy; - -fun add_nbe_eqn thm thy = - gen_add_eqn false (mk_eqn thy (thm, false)) thy; - -fun add_default_eqn thm thy = - case mk_eqn_liberal thy thm - of SOME eqn => gen_add_eqn true eqn thy - | NONE => thy; - -val add_default_eqn_attribute = Thm.declaration_attribute - (fn thm => Context.mapping (add_default_eqn thm) I); -val add_default_eqn_attrib = Attrib.internal (K add_default_eqn_attribute); - -fun add_nbe_default_eqn thm thy = - gen_add_eqn true (mk_eqn thy (thm, false)) thy; - -val add_nbe_default_eqn_attribute = Thm.declaration_attribute - (fn thm => Context.mapping (add_nbe_default_eqn thm) I); -val add_nbe_default_eqn_attrib = Attrib.internal (K add_nbe_default_eqn_attribute); - -fun add_abs_eqn raw_thm thy = gen_add_abs_eqn (mk_abs_eqn thy raw_thm) thy; - -val add_abs_eqn_attribute = Thm.declaration_attribute - (fn thm => Context.mapping (add_abs_eqn thm) I); -val add_abs_eqn_attrib = Attrib.internal (K add_abs_eqn_attribute); - -fun add_eqn_maybe_abs thm thy = - case mk_eqn_maybe_abs thy thm - of SOME (eqn, NONE) => gen_add_eqn false eqn thy - | SOME ((thm, _), SOME tyco) => gen_add_abs_eqn (thm, tyco) thy - | NONE => thy; - -fun del_eqn thm thy = case mk_eqn_liberal thy thm - of SOME (thm, _) => - let - fun del_eqn' (Default _) = initial_fun_spec - | del_eqn' (Eqns eqns) = - let - val eqns' = filter_out (fn (thm', _) => Thm.eq_thm_prop (thm, thm')) eqns - in if null eqns' then None else Eqns eqns' end - | del_eqn' spec = spec - in change_fun_spec (const_eqn thy thm) del_eqn' thy end - | NONE => thy; - -fun del_eqns c = change_fun_spec c (K None); - -fun del_exception c = change_fun_spec c (K (Eqns [])); - - -(* cases *) - -fun case_cong thy case_const (num_args, (pos, _)) = - let - val ([x, y], ctxt) = fold_map Name.variant ["A", "A'"] Name.context; - val (zs, _) = fold_map Name.variant (replicate (num_args - 1) "") ctxt; - val (ws, vs) = chop pos zs; - val T = devarify (const_typ thy case_const); - val Ts = binder_types T; - val T_cong = nth Ts pos; - fun mk_prem z = Free (z, T_cong); - fun mk_concl z = list_comb (Const (case_const, T), map2 (curry Free) (ws @ z :: vs) Ts); - val (prem, concl) = pairself Logic.mk_equals (pairself mk_prem (x, y), pairself mk_concl (x, y)); - in - Goal.prove_sorry_global thy (x :: y :: zs) [prem] concl - (fn {context = ctxt', prems} => - Simplifier.rewrite_goals_tac ctxt' prems - THEN ALLGOALS (Proof_Context.fact_tac ctxt' [Drule.reflexive_thm])) - end; - -fun add_case thm thy = - let - val (case_const, (k, cos)) = case_cert thm; - val _ = case (filter_out (is_constr thy) o map_filter I) cos - of [] => () - | cs => error ("Non-constructor(s) in case certificate: " ^ commas_quote cs); - val entry = (1 + Int.max (1, length cos), (k, cos)); - fun register_case cong = (map_cases o apfst) - (Symtab.update (case_const, (entry, cong))); - fun register_for_constructors (Constructors (cos', cases)) = - Constructors (cos', - if exists (fn (co, _) => member (op =) cos (SOME co)) cos' - then insert (op =) case_const cases - else cases) - | register_for_constructors (x as Abstractor _) = x; - val register_type = (map_typs o Symtab.map) - (K ((map o apsnd o apsnd) register_for_constructors)); - in - thy - |> `(fn thy => case_cong thy case_const entry) - |-> (fn cong => map_exec_purge (register_case cong #> register_type)) - end; - -fun add_undefined c thy = - (map_exec_purge o map_cases o apsnd) (Symtab.update (c, ())) thy; - - -(* types *) - -fun register_type (tyco, vs_spec) thy = - let - val (old_constrs, some_old_proj) = - case these (Symtab.lookup ((the_types o the_exec) thy) tyco) - of (_, (_, Constructors (cos, _))) :: _ => (map fst cos, NONE) - | (_, (_, Abstractor ((co, _), (proj, _)))) :: _ => ([co], SOME proj) - | [] => ([], NONE); - val outdated_funs1 = (map fst o fst o constructors_of o snd) vs_spec; - val outdated_funs2 = case some_old_proj - of NONE => [] - | SOME old_proj => Symtab.fold - (fn (c, ((_, spec), _)) => - if member (op =) (the_list (associated_abstype spec)) tyco - then insert (op =) c else I) - ((the_functions o the_exec) thy) [old_proj]; - fun drop_outdated_cases cases = fold Symtab.delete_safe - (Symtab.fold (fn (c, ((_, (_, cos)), _)) => - if exists (member (op =) old_constrs) (map_filter I cos) - then insert (op =) c else I) cases []) cases; - in - thy - |> fold del_eqns (outdated_funs1 @ outdated_funs2) - |> map_exec_purge - ((map_typs o Symtab.map_default (tyco, [])) (cons (serial (), vs_spec)) - #> (map_cases o apfst) drop_outdated_cases) - end; - -fun unoverload_const_typ thy (c, ty) = (Axclass.unoverload_const thy (c, ty), ty); - -structure Datatype_Interpretation = - Interpretation(type T = string * serial val eq = eq_snd (op =) : T * T -> bool); - -fun with_repaired_path f (tyco, serial) thy = - thy - |> Sign.root_path - |> Sign.add_path (Long_Name.qualifier tyco) - |> f (tyco, serial) - |> Sign.restore_naming thy; - -fun datatype_interpretation f = Datatype_Interpretation.interpretation - (fn (tyco, _) => fn thy => with_repaired_path f (tyco, fst (get_type thy tyco)) thy); - -fun add_datatype proto_constrs thy = - let - val constrs = map (unoverload_const_typ thy) proto_constrs; - val (tyco, (vs, cos)) = constrset_of_consts thy constrs; - in - thy - |> register_type (tyco, (vs, Constructors (cos, []))) - |> Datatype_Interpretation.data (tyco, serial ()) - end; - -fun add_datatype_cmd raw_constrs thy = - add_datatype (map (read_bare_const thy) raw_constrs) thy; - -structure Abstype_Interpretation = - Interpretation(type T = string * serial val eq = eq_snd (op =) : T * T -> bool); - -fun abstype_interpretation f = Abstype_Interpretation.interpretation - (fn (tyco, _) => fn thy => f (tyco, get_abstype_spec thy tyco) thy); - -fun add_abstype proto_thm thy = - let - val (tyco, (vs, (abs_ty as (abs, (_, ty)), (rep, cert)))) = - error_abs_thm (check_abstype_cert thy) thy proto_thm; - in - thy - |> register_type (tyco, (vs, Abstractor (abs_ty, (rep, cert)))) - |> change_fun_spec rep - (K (Proj (Logic.varify_types_global (mk_proj tyco vs ty abs rep), tyco))) - |> Abstype_Interpretation.data (tyco, serial ()) - end; - - -(* setup *) - -val _ = Theory.setup - (let - fun mk_attribute f = Thm.declaration_attribute (fn thm => Context.mapping (f thm) I); - fun mk_const_attribute f cs = - mk_attribute (K (fold (fn c => fn thy => f (read_const thy c) thy) cs)); - val code_attribute_parser = - Args.$$$ "equation" |-- Scan.succeed (mk_attribute add_eqn) - || Args.$$$ "nbe" |-- Scan.succeed (mk_attribute add_nbe_eqn) - || Args.$$$ "abstype" |-- Scan.succeed (mk_attribute add_abstype) - || Args.$$$ "abstract" |-- Scan.succeed (mk_attribute add_abs_eqn) - || Args.del |-- Scan.succeed (mk_attribute del_eqn) - || Args.$$$ "drop" -- Args.colon |-- (Scan.repeat1 Parse.term >> mk_const_attribute del_eqns) - || Args.$$$ "abort" -- Args.colon |-- (Scan.repeat1 Parse.term >> mk_const_attribute del_exception) - || Scan.succeed (mk_attribute add_eqn_maybe_abs); - in - Datatype_Interpretation.init - #> Attrib.setup @{binding code} (Scan.lift code_attribute_parser) - "declare theorems for code generation" - end); - -end; (*struct*) - - -(* type-safe interfaces for data dependent on executable code *) - -functor Code_Data(Data: CODE_DATA_ARGS): CODE_DATA = -struct - -type T = Data.T; -exception Data of T; -fun dest (Data x) = x - -val kind = Code.declare_data (Data Data.empty); - -val data_op = (kind, Data, dest); - -fun change_yield (SOME thy) f = Code.change_yield_data data_op thy f - | change_yield NONE f = f Data.empty - -fun change some_thy f = snd (change_yield some_thy (pair () o f)); - -end; - -structure Code : CODE = struct open Code; end; diff --git a/core/Pure/Isar/context_rules.ML b/core/Pure/Isar/context_rules.ML deleted file mode 100644 index 62a0a783..00000000 --- a/core/Pure/Isar/context_rules.ML +++ /dev/null @@ -1,221 +0,0 @@ -(* Title: Pure/Isar/context_rules.ML - Author: Stefan Berghofer and Markus Wenzel, TU Muenchen - -Declarations of intro/elim/dest rules in Pure (see also -Provers/classical.ML for a more specialized version of the same idea). -*) - -signature CONTEXT_RULES = -sig - type netpair = ((int * int) * (bool * thm)) Net.net * ((int * int) * (bool * thm)) Net.net - val netpair_bang: Proof.context -> netpair - val netpair: Proof.context -> netpair - val orderlist: ((int * int) * 'a) list -> 'a list - val find_rules_netpair: bool -> thm list -> term -> netpair -> thm list - val find_rules: bool -> thm list -> term -> Proof.context -> thm list list - val print_rules: Proof.context -> unit - val addSWrapper: (Proof.context -> (int -> tactic) -> int -> tactic) -> theory -> theory - val addWrapper: (Proof.context -> (int -> tactic) -> int -> tactic) -> theory -> theory - val Swrap: Proof.context -> (int -> tactic) -> int -> tactic - val wrap: Proof.context -> (int -> tactic) -> int -> tactic - val intro_bang: int option -> attribute - val elim_bang: int option -> attribute - val dest_bang: int option -> attribute - val intro: int option -> attribute - val elim: int option -> attribute - val dest: int option -> attribute - val intro_query: int option -> attribute - val elim_query: int option -> attribute - val dest_query: int option -> attribute - val rule_del: attribute - val add: (int option -> attribute) -> (int option -> attribute) -> (int option -> attribute) -> - attribute context_parser -end; - -structure Context_Rules: CONTEXT_RULES = -struct - - -(** rule declaration contexts **) - -(* rule kinds *) - -val intro_bangK = (0, false); -val elim_bangK = (0, true); -val introK = (1, false); -val elimK = (1, true); -val intro_queryK = (2, false); -val elim_queryK = (2, true); - -val kind_names = - [(intro_bangK, "safe introduction rules (intro!)"), - (elim_bangK, "safe elimination rules (elim!)"), - (introK, "introduction rules (intro)"), - (elimK, "elimination rules (elim)"), - (intro_queryK, "extra introduction rules (intro?)"), - (elim_queryK, "extra elimination rules (elim?)")]; - -val rule_kinds = map #1 kind_names; -val rule_indexes = distinct (op =) (map #1 rule_kinds); - - -(* context data *) - -type netpair = ((int * int) * (bool * thm)) Net.net * ((int * int) * (bool * thm)) Net.net; -val empty_netpairs: netpair list = replicate (length rule_indexes) (Net.empty, Net.empty); - -datatype rules = Rules of - {next: int, - rules: (int * ((int * bool) * thm)) list, - netpairs: netpair list, - wrappers: - ((Proof.context -> (int -> tactic) -> int -> tactic) * stamp) list * - ((Proof.context -> (int -> tactic) -> int -> tactic) * stamp) list}; - -fun make_rules next rules netpairs wrappers = - Rules {next = next, rules = rules, netpairs = netpairs, wrappers = wrappers}; - -fun add_rule (i, b) opt_w th (Rules {next, rules, netpairs, wrappers}) = - let val w = (case opt_w of SOME w => w | NONE => Tactic.subgoals_of_brl (b, th)) in - make_rules (next - 1) ((w, ((i, b), th)) :: rules) - (nth_map i (Tactic.insert_tagged_brl ((w, next), (b, th))) netpairs) wrappers - end; - -fun del_rule0 th (rs as Rules {next, rules, netpairs, wrappers}) = - let - fun eq_th (_, (_, th')) = Thm.eq_thm_prop (th, th'); - fun del b netpair = Tactic.delete_tagged_brl (b, th) netpair handle Net.DELETE => netpair; - in - if not (exists eq_th rules) then rs - else make_rules next (filter_out eq_th rules) (map (del false o del true) netpairs) wrappers - end; - -fun del_rule th = del_rule0 th o del_rule0 (Tactic.make_elim th); - -structure Rules = Generic_Data -( - type T = rules; - val empty = make_rules ~1 [] empty_netpairs ([], []); - val extend = I; - fun merge - (Rules {rules = rules1, wrappers = (ws1, ws1'), ...}, - Rules {rules = rules2, wrappers = (ws2, ws2'), ...}) = - let - val wrappers = - (Library.merge (eq_snd (op =)) (ws1, ws2), Library.merge (eq_snd (op =)) (ws1', ws2')); - val rules = Library.merge (fn ((_, (k1, th1)), (_, (k2, th2))) => - k1 = k2 andalso Thm.eq_thm_prop (th1, th2)) (rules1, rules2); - val next = ~ (length rules); - val netpairs = fold (fn (n, (w, ((i, b), th))) => - nth_map i (Tactic.insert_tagged_brl ((w, n), (b, th)))) - (next upto ~1 ~~ rules) empty_netpairs; - in make_rules (next - 1) rules netpairs wrappers end; -); - -fun print_rules ctxt = - let - val Rules {rules, ...} = Rules.get (Context.Proof ctxt); - fun prt_kind (i, b) = - Pretty.big_list ((the o AList.lookup (op =) kind_names) (i, b) ^ ":") - (map_filter (fn (_, (k, th)) => - if k = (i, b) then SOME (Display.pretty_thm_item ctxt th) else NONE) - (sort (int_ord o pairself fst) rules)); - in Pretty.writeln_chunks (map prt_kind rule_kinds) end; - - -(* access data *) - -fun netpairs ctxt = let val Rules {netpairs, ...} = Rules.get (Context.Proof ctxt) in netpairs end; -val netpair_bang = hd o netpairs; -val netpair = hd o tl o netpairs; - - -(* retrieving rules *) - -fun untaglist [] = [] - | untaglist [(_ : int * int, x)] = [x] - | untaglist ((k, x) :: (rest as (k', _) :: _)) = - if k = k' then untaglist rest - else x :: untaglist rest; - -fun orderlist brls = - untaglist (sort (prod_ord int_ord int_ord o pairself fst) brls); - -fun orderlist_no_weight brls = - untaglist (sort (int_ord o pairself (snd o fst)) brls); - -fun may_unify weighted t net = - map snd ((if weighted then orderlist else orderlist_no_weight) (Net.unify_term net t)); - -fun find_erules _ [] = K [] - | find_erules w (fact :: _) = may_unify w (Logic.strip_assums_concl (Thm.prop_of fact)); - -fun find_irules w goal = may_unify w (Logic.strip_assums_concl goal); - -fun find_rules_netpair weighted facts goal (inet, enet) = - find_erules weighted facts enet @ find_irules weighted goal inet; - -fun find_rules weighted facts goals = - map (find_rules_netpair weighted facts goals) o netpairs; - - -(* wrappers *) - -fun gen_add_wrapper upd w = - Context.theory_map (Rules.map (fn Rules {next, rules, netpairs, wrappers} => - make_rules next rules netpairs (upd (fn ws => (w, stamp ()) :: ws) wrappers))); - -val addSWrapper = gen_add_wrapper Library.apfst; -val addWrapper = gen_add_wrapper Library.apsnd; - - -fun gen_wrap which ctxt = - let val Rules {wrappers, ...} = Rules.get (Context.Proof ctxt) - in fold_rev (fn (w, _) => w ctxt) (which wrappers) end; - -val Swrap = gen_wrap #1; -val wrap = gen_wrap #2; - - - -(** attributes **) - -(* add and del rules *) - - -val rule_del = Thm.declaration_attribute (fn th => Rules.map (del_rule th)); - -fun rule_add k view opt_w = - Thm.declaration_attribute (fn th => Rules.map (add_rule k opt_w (view th) o del_rule th)); - -val intro_bang = rule_add intro_bangK I; -val elim_bang = rule_add elim_bangK I; -val dest_bang = rule_add elim_bangK Tactic.make_elim; -val intro = rule_add introK I; -val elim = rule_add elimK I; -val dest = rule_add elimK Tactic.make_elim; -val intro_query = rule_add intro_queryK I; -val elim_query = rule_add elim_queryK I; -val dest_query = rule_add elim_queryK Tactic.make_elim; - -val _ = Theory.setup - (snd o Global_Theory.add_thms [((Binding.empty, Drule.equal_intr_rule), [intro_query NONE])]); - - -(* concrete syntax *) - -fun add a b c x = - (Scan.lift ((Args.bang >> K a || Args.query >> K c || Scan.succeed b) -- - Scan.option Parse.nat) >> (fn (f, n) => f n)) x; - -val _ = Theory.setup - (Attrib.setup @{binding intro} (add intro_bang intro intro_query) - "declaration of introduction rule" #> - Attrib.setup @{binding elim} (add elim_bang elim elim_query) - "declaration of elimination rule" #> - Attrib.setup @{binding dest} (add dest_bang dest dest_query) - "declaration of destruction rule" #> - Attrib.setup @{binding rule} (Scan.lift Args.del >> K rule_del) - "remove declaration of intro/elim/dest rule"); - -end; diff --git a/core/Pure/Isar/element.ML b/core/Pure/Isar/element.ML deleted file mode 100644 index cf4f9440..00000000 --- a/core/Pure/Isar/element.ML +++ /dev/null @@ -1,531 +0,0 @@ -(* Title: Pure/Isar/element.ML - Author: Makarius - -Explicit data structures for some Isar language elements, with derived -logical operations. -*) - -signature ELEMENT = -sig - datatype ('typ, 'term) stmt = - Shows of (Attrib.binding * ('term * 'term list) list) list | - Obtains of (binding * ((binding * 'typ option) list * 'term list)) list - type statement = (string, string) stmt - type statement_i = (typ, term) stmt - datatype ('typ, 'term, 'fact) ctxt = - Fixes of (binding * 'typ option * mixfix) list | - Constrains of (string * 'typ) list | - Assumes of (Attrib.binding * ('term * 'term list) list) list | - Defines of (Attrib.binding * ('term * 'term list)) list | - Notes of string * (Attrib.binding * ('fact * Attrib.src list) list) list - type context = (string, string, Facts.ref) ctxt - type context_i = (typ, term, thm list) ctxt - val map_ctxt: {binding: binding -> binding, typ: 'typ -> 'a, term: 'term -> 'b, - pattern: 'term -> 'b, fact: 'fact -> 'c, attrib: Attrib.src -> Attrib.src} -> - ('typ, 'term, 'fact) ctxt -> ('a, 'b, 'c) ctxt - val map_ctxt_attrib: (Attrib.src -> Attrib.src) -> - ('typ, 'term, 'fact) ctxt -> ('typ, 'term, 'fact) ctxt - val transform_ctxt: morphism -> context_i -> context_i - val transform_facts: morphism -> - (Attrib.binding * (thm list * Args.src list) list) list -> - (Attrib.binding * (thm list * Args.src list) list) list - val pretty_stmt: Proof.context -> statement_i -> Pretty.T list - val pretty_ctxt: Proof.context -> context_i -> Pretty.T list - val pretty_statement: Proof.context -> string -> thm -> Pretty.T - type witness - val prove_witness: Proof.context -> term -> tactic -> witness - val witness_proof: (witness list list -> Proof.context -> Proof.context) -> - term list list -> Proof.context -> Proof.state - val witness_proof_eqs: (witness list list -> thm list -> Proof.context -> Proof.context) -> - term list list -> term list -> Proof.context -> Proof.state - val witness_local_proof: (witness list list -> Proof.state -> Proof.state) -> - string -> term list list -> Proof.context -> bool -> Proof.state -> Proof.state - val witness_local_proof_eqs: (witness list list -> thm list -> Proof.state -> Proof.state) -> - string -> term list list -> term list -> Proof.context -> bool -> Proof.state -> - Proof.state - val transform_witness: morphism -> witness -> witness - val conclude_witness: Proof.context -> witness -> thm - val pretty_witness: Proof.context -> witness -> Pretty.T - val instT_morphism: theory -> typ Symtab.table -> morphism - val inst_morphism: theory -> typ Symtab.table * term Symtab.table -> morphism - val satisfy_morphism: witness list -> morphism - val eq_morphism: theory -> thm list -> morphism option - val init: context_i -> Context.generic -> Context.generic - val init': context_i -> Context.generic -> Context.generic - val activate_i: context_i -> Proof.context -> context_i * Proof.context - val activate: (typ, term, Facts.ref) ctxt -> Proof.context -> context_i * Proof.context -end; - -structure Element: ELEMENT = -struct - -(** language elements **) - -(* statement *) - -datatype ('typ, 'term) stmt = - Shows of (Attrib.binding * ('term * 'term list) list) list | - Obtains of (binding * ((binding * 'typ option) list * 'term list)) list; - -type statement = (string, string) stmt; -type statement_i = (typ, term) stmt; - - -(* context *) - -datatype ('typ, 'term, 'fact) ctxt = - Fixes of (binding * 'typ option * mixfix) list | - Constrains of (string * 'typ) list | - Assumes of (Attrib.binding * ('term * 'term list) list) list | - Defines of (Attrib.binding * ('term * 'term list)) list | - Notes of string * (Attrib.binding * ('fact * Attrib.src list) list) list; - -type context = (string, string, Facts.ref) ctxt; -type context_i = (typ, term, thm list) ctxt; - -fun map_ctxt {binding, typ, term, pattern, fact, attrib} = - fn Fixes fixes => Fixes (fixes |> map (fn (x, T, mx) => (binding x, Option.map typ T, mx))) - | Constrains xs => Constrains (xs |> map (fn (x, T) => - (Variable.check_name (binding (Binding.name x)), typ T))) - | Assumes asms => Assumes (asms |> map (fn ((a, atts), propps) => - ((binding a, map attrib atts), propps |> map (fn (t, ps) => (term t, map pattern ps))))) - | Defines defs => Defines (defs |> map (fn ((a, atts), (t, ps)) => - ((binding a, map attrib atts), (term t, map pattern ps)))) - | Notes (kind, facts) => Notes (kind, facts |> map (fn ((a, atts), bs) => - ((binding a, map attrib atts), bs |> map (fn (ths, btts) => (fact ths, map attrib btts))))); - -fun map_ctxt_attrib attrib = - map_ctxt {binding = I, typ = I, term = I, pattern = I, fact = I, attrib = attrib}; - -fun transform_ctxt phi = map_ctxt - {binding = Morphism.binding phi, - typ = Morphism.typ phi, - term = Morphism.term phi, - pattern = Morphism.term phi, - fact = Morphism.fact phi, - attrib = Args.transform_values phi}; - -fun transform_facts phi facts = - Notes ("", facts) |> transform_ctxt phi |> (fn Notes (_, facts') => facts'); - - - -(** pretty printing **) - -fun pretty_items _ _ [] = [] - | pretty_items keyword sep (x :: ys) = - Pretty.block [Pretty.keyword2 keyword, Pretty.brk 1, x] :: - map (fn y => Pretty.block [Pretty.str " ", Pretty.keyword2 sep, Pretty.brk 1, y]) ys; - -fun pretty_name_atts ctxt (b, atts) sep = - if Attrib.is_empty_binding (b, atts) then [] - else - [Pretty.block (Pretty.breaks - (Binding.pretty b :: Attrib.pretty_attribs ctxt atts @ [Pretty.str sep]))]; - - -(* pretty_stmt *) - -fun pretty_stmt ctxt = - let - val prt_typ = Pretty.quote o Syntax.pretty_typ ctxt; - val prt_term = Pretty.quote o Syntax.pretty_term ctxt; - val prt_terms = separate (Pretty.keyword2 "and") o map prt_term; - val prt_name_atts = pretty_name_atts ctxt; - - fun prt_show (a, ts) = - Pretty.block (Pretty.breaks (prt_name_atts a ":" @ prt_terms (map fst ts))); - - fun prt_var (x, SOME T) = Pretty.block - [Pretty.str (Binding.name_of x ^ " ::"), Pretty.brk 1, prt_typ T] - | prt_var (x, NONE) = Pretty.str (Binding.name_of x); - val prt_vars = separate (Pretty.keyword2 "and") o map prt_var; - - fun prt_obtain (_, ([], ts)) = Pretty.block (Pretty.breaks (prt_terms ts)) - | prt_obtain (_, (xs, ts)) = Pretty.block (Pretty.breaks - (prt_vars xs @ [Pretty.keyword2 "where"] @ prt_terms ts)); - in - fn Shows shows => pretty_items "shows" "and" (map prt_show shows) - | Obtains obtains => pretty_items "obtains" "|" (map prt_obtain obtains) - end; - - -(* pretty_ctxt *) - -fun pretty_ctxt ctxt = - let - val prt_typ = Pretty.quote o Syntax.pretty_typ ctxt; - val prt_term = Pretty.quote o Syntax.pretty_term ctxt; - val prt_thm = Pretty.backquote o Display.pretty_thm ctxt; - val prt_name_atts = pretty_name_atts ctxt; - - fun prt_mixfix NoSyn = [] - | prt_mixfix mx = [Pretty.brk 2, Mixfix.pretty_mixfix mx]; - - fun prt_fix (x, SOME T, mx) = Pretty.block (Pretty.str (Binding.name_of x ^ " ::") :: - Pretty.brk 1 :: prt_typ T :: Pretty.brk 1 :: prt_mixfix mx) - | prt_fix (x, NONE, mx) = Pretty.block (Pretty.str (Binding.name_of x) :: - Pretty.brk 1 :: prt_mixfix mx); - fun prt_constrain (x, T) = prt_fix (Binding.name x, SOME T, NoSyn); - - fun prt_asm (a, ts) = - Pretty.block (Pretty.breaks (prt_name_atts a ":" @ map (prt_term o fst) ts)); - fun prt_def (a, (t, _)) = - Pretty.block (Pretty.breaks (prt_name_atts a ":" @ [prt_term t])); - - fun prt_fact (ths, []) = map prt_thm ths - | prt_fact (ths, atts) = Pretty.enclose "(" ")" - (Pretty.breaks (map prt_thm ths)) :: Attrib.pretty_attribs ctxt atts; - fun prt_note (a, ths) = - Pretty.block (Pretty.breaks (flat (prt_name_atts a "=" :: map prt_fact ths))); - in - fn Fixes fixes => pretty_items "fixes" "and" (map prt_fix fixes) - | Constrains xs => pretty_items "constrains" "and" (map prt_constrain xs) - | Assumes asms => pretty_items "assumes" "and" (map prt_asm asms) - | Defines defs => pretty_items "defines" "and" (map prt_def defs) - | Notes ("", facts) => pretty_items "notes" "and" (map prt_note facts) - | Notes (kind, facts) => pretty_items ("notes " ^ kind) "and" (map prt_note facts) - end; - - -(* pretty_statement *) - -local - -fun standard_elim th = - (case Object_Logic.elim_concl th of - SOME C => - let - val cert = Thm.cterm_of (Thm.theory_of_thm th); - val thesis = Var ((Auto_Bind.thesisN, Thm.maxidx_of th + 1), fastype_of C); - val th' = Thm.instantiate ([], [(cert C, cert thesis)]) th; - in (th', true) end - | NONE => (th, false)); - -fun thm_name kind th prts = - let val head = - if Thm.has_name_hint th then - Pretty.block [Pretty.keyword1 kind, - Pretty.brk 1, Pretty.str (Long_Name.base_name (Thm.get_name_hint th) ^ ":")] - else Pretty.keyword1 kind - in Pretty.block (Pretty.fbreaks (head :: prts)) end; - -fun obtain prop ctxt = - let - val ((ps, prop'), ctxt') = Variable.focus prop ctxt; - fun fix (x, T) = (Binding.name (Variable.revert_fixed ctxt' x), SOME T); - val xs = map (fix o #2) ps; - val As = Logic.strip_imp_prems prop'; - in ((Binding.empty, (xs, As)), ctxt') end; - -in - -fun pretty_statement ctxt kind raw_th = - let - val thy = Proof_Context.theory_of ctxt; - - val (th, is_elim) = standard_elim (Raw_Simplifier.norm_hhf ctxt raw_th); - val ((_, [th']), ctxt') = Variable.import true [th] (Variable.set_body true ctxt); - val prop = Thm.prop_of th'; - val (prems, concl) = Logic.strip_horn prop; - val concl_term = Object_Logic.drop_judgment thy concl; - - val fixes = fold_aterms (fn v as Free (x, T) => - if Variable.newly_fixed ctxt' ctxt x andalso not (v aconv concl_term) - then insert (op =) (Variable.revert_fixed ctxt' x, T) else I | _ => I) prop [] |> rev; - val (assumes, cases) = take_suffix (fn prem => - is_elim andalso concl aconv Logic.strip_assums_concl prem) prems; - in - pretty_ctxt ctxt' (Fixes (map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) fixes)) @ - pretty_ctxt ctxt' (Assumes (map (fn t => (Attrib.empty_binding, [(t, [])])) assumes)) @ - (if null cases then pretty_stmt ctxt' (Shows [(Attrib.empty_binding, [(concl, [])])]) - else - let val (clauses, ctxt'') = fold_map obtain cases ctxt' - in pretty_stmt ctxt'' (Obtains clauses) end) - end |> thm_name kind raw_th; - -end; - - - -(** logical operations **) - -(* witnesses -- hypotheses as protected facts *) - -datatype witness = Witness of term * thm; - -val mark_witness = Logic.protect; -fun witness_prop (Witness (t, _)) = t; -fun witness_hyps (Witness (_, th)) = Thm.hyps_of th; -fun map_witness f (Witness witn) = Witness (f witn); - -fun transform_witness phi = map_witness (fn (t, th) => (Morphism.term phi t, Morphism.thm phi th)); - -fun prove_witness ctxt t tac = - Witness (t, - Thm.close_derivation - (Goal.prove ctxt [] [] (mark_witness t) (fn _ => rtac Drule.protectI 1 THEN tac))); - - -local - -val refine_witness = - Proof.refine (Method.Basic (K (RAW_METHOD - (K (ALLGOALS (CONJUNCTS (ALLGOALS (CONJUNCTS (TRYALL (rtac Drule.protectI)))))))))); - -fun gen_witness_proof proof after_qed wit_propss eq_props = - let - val propss = - (map o map) (fn prop => (mark_witness prop, [])) wit_propss @ - [map (rpair []) eq_props]; - fun after_qed' thmss = - let val (wits, eqs) = split_last ((map o map) Thm.close_derivation thmss); - in after_qed ((map2 o map2) (curry Witness) wit_propss wits) eqs end; - in proof after_qed' propss #> refine_witness #> Seq.hd end; - -fun proof_local cmd goal_ctxt int after_qed' propss = - Proof.map_context (K goal_ctxt) #> - Proof.local_goal (K (K ())) (K I) Proof_Context.bind_propp_i cmd NONE - after_qed' (map (pair Thm.empty_binding) propss); - -in - -fun witness_proof after_qed wit_propss = - gen_witness_proof (Proof.theorem NONE) (fn wits => fn _ => after_qed wits) - wit_propss []; - -val witness_proof_eqs = gen_witness_proof (Proof.theorem NONE); - -fun witness_local_proof after_qed cmd wit_propss goal_ctxt int = - gen_witness_proof (proof_local cmd goal_ctxt int) - (fn wits => fn _ => after_qed wits) wit_propss []; - -fun witness_local_proof_eqs after_qed cmd wit_propss eq_props goal_ctxt int = - gen_witness_proof (proof_local cmd goal_ctxt int) after_qed wit_propss eq_props; - -end; - - -fun compose_witness (Witness (_, th)) r = - let - val th' = Goal.conclude th; - val A = Thm.cprem_of r 1; - in - Thm.implies_elim - (Conv.gconv_rule Drule.beta_eta_conversion 1 r) - (Conv.fconv_rule Drule.beta_eta_conversion - (Thm.instantiate (Thm.match (Thm.cprop_of th', A)) th')) - end; - -fun conclude_witness ctxt (Witness (_, th)) = - Thm.close_derivation (Raw_Simplifier.norm_hhf_protect ctxt (Goal.conclude th)); - -fun pretty_witness ctxt witn = - let val prt_term = Pretty.quote o Syntax.pretty_term ctxt in - Pretty.block (prt_term (witness_prop witn) :: - (if Config.get ctxt show_hyps then [Pretty.brk 2, Pretty.list "[" "]" - (map prt_term (witness_hyps witn))] else [])) - end; - - -(* derived rules *) - -fun instantiate_tfrees thy subst th = - let - val certT = Thm.ctyp_of thy; - val idx = Thm.maxidx_of th + 1; - fun cert_inst (a, (S, T)) = (certT (TVar ((a, idx), S)), certT T); - - fun add_inst (a, S) insts = - if AList.defined (op =) insts a then insts - else (case AList.lookup (op =) subst a of NONE => insts | SOME T => (a, (S, T)) :: insts); - val insts = - (Term.fold_types o Term.fold_atyps) (fn TFree v => add_inst v | _ => I) - (Thm.full_prop_of th) []; - in - th - |> Thm.generalize (map fst insts, []) idx - |> Thm.instantiate (map cert_inst insts, []) - end; - -fun instantiate_frees thy subst = - let val cert = Thm.cterm_of thy in - Drule.forall_intr_list (map (cert o Free o fst) subst) #> - Drule.forall_elim_list (map (cert o snd) subst) - end; - -fun hyps_rule rule th = - let val {hyps, ...} = Thm.crep_thm th in - Drule.implies_elim_list - (rule (Drule.implies_intr_list hyps th)) - (map (Thm.assume o Drule.cterm_rule rule) hyps) - end; - - -(* instantiate types *) - -fun instT_type_same env = - if Symtab.is_empty env then Same.same - else - Term_Subst.map_atypsT_same - (fn TFree (a, _) => (case Symtab.lookup env a of SOME T => T | NONE => raise Same.SAME) - | _ => raise Same.SAME); - -fun instT_term_same env = - if Symtab.is_empty env then Same.same - else Term_Subst.map_types_same (instT_type_same env); - -val instT_type = Same.commit o instT_type_same; -val instT_term = Same.commit o instT_term_same; - -fun instT_subst env th = - (Thm.fold_terms o Term.fold_types o Term.fold_atyps) - (fn T as TFree (a, _) => - let val T' = the_default T (Symtab.lookup env a) - in if T = T' then I else insert (eq_fst (op =)) (a, T') end - | _ => I) th []; - -fun instT_thm thy env th = - if Symtab.is_empty env then th - else - let val subst = instT_subst env th - in if null subst then th else th |> hyps_rule (instantiate_tfrees thy subst) end; - -fun instT_morphism thy env = - Morphism.morphism "Element.instT" - {binding = [], - typ = [instT_type env], - term = [instT_term env], - fact = [map (instT_thm thy env)]}; - - -(* instantiate types and terms *) - -fun inst_term (envT, env) = - if Symtab.is_empty env then instT_term envT - else - instT_term envT #> - Same.commit (Term_Subst.map_aterms_same - (fn Free (x, _) => (case Symtab.lookup env x of SOME t => t | NONE => raise Same.SAME) - | _ => raise Same.SAME)) #> - Envir.beta_norm; - -fun inst_subst (envT, env) th = - (Thm.fold_terms o Term.fold_aterms) - (fn Free (x, T) => - let - val T' = instT_type envT T; - val t = Free (x, T'); - val t' = the_default t (Symtab.lookup env x); - in if t aconv t' then I else insert (eq_fst (op =)) ((x, T'), t') end - | _ => I) th []; - -fun inst_thm thy (envT, env) th = - if Symtab.is_empty env then instT_thm thy envT th - else - let - val substT = instT_subst envT th; - val subst = inst_subst (envT, env) th; - in - if null substT andalso null subst then th - else th |> hyps_rule - (instantiate_tfrees thy substT #> - instantiate_frees thy subst #> - Conv.fconv_rule (Thm.beta_conversion true)) - end; - -fun inst_morphism thy (envT, env) = - Morphism.morphism "Element.inst" - {binding = [], - typ = [instT_type envT], - term = [inst_term (envT, env)], - fact = [map (inst_thm thy (envT, env))]}; - - -(* satisfy hypotheses *) - -fun satisfy_thm witns thm = - thm |> fold (fn hyp => - (case find_first (fn Witness (t, _) => Thm.term_of hyp aconv t) witns of - NONE => I - | SOME w => Thm.implies_intr hyp #> compose_witness w)) (#hyps (Thm.crep_thm thm)); - -val satisfy_morphism = Morphism.thm_morphism "Element.satisfy" o satisfy_thm; - - -(* rewriting with equalities *) - -fun eq_morphism _ [] = NONE - | eq_morphism thy thms = - let - (* FIXME proper context!? *) - fun rewrite th = rewrite_rule (Proof_Context.init_global (Thm.theory_of_thm th)) thms th; - val phi = - Morphism.morphism "Element.eq_morphism" - {binding = [], - typ = [], - term = [Raw_Simplifier.rewrite_term thy thms []], - fact = [map rewrite]}; - in SOME phi end; - - - -(** activate in context **) - -(* init *) - -fun init (Fixes fixes) = Context.map_proof (Proof_Context.add_fixes fixes #> #2) - | init (Constrains _) = I - | init (Assumes asms) = Context.map_proof (fn ctxt => - let - val asms' = Attrib.map_specs (map (Attrib.attribute ctxt)) asms; - val (_, ctxt') = ctxt - |> fold Variable.auto_fixes (maps (map #1 o #2) asms') - |> Proof_Context.add_assms_i Assumption.assume_export asms'; - in ctxt' end) - | init (Defines defs) = Context.map_proof (fn ctxt => - let - val defs' = Attrib.map_specs (map (Attrib.attribute ctxt)) defs; - val asms = defs' |> map (fn (b, (t, ps)) => - let val (_, t') = Local_Defs.cert_def ctxt t (* FIXME adapt ps? *) - in (t', (b, [(t', ps)])) end); - val (_, ctxt') = ctxt - |> fold Variable.auto_fixes (map #1 asms) - |> Proof_Context.add_assms_i Local_Defs.def_export (map #2 asms); - in ctxt' end) - | init (Notes (kind, facts)) = Attrib.generic_notes kind facts #> #2; - -fun init' elem context = - context - |> Context.mapping I (Thm.unchecked_hyps #> Context_Position.not_really) - |> init elem - |> Context.mapping I (fn ctxt => - let val ctxt0 = Context.proof_of context - in ctxt |> Context_Position.restore_visible ctxt0 |> Thm.restore_hyps ctxt0 end); - - -(* activate *) - -fun activate_i elem ctxt = - let - val elem' = - (case map_ctxt_attrib Args.init_assignable elem of - Defines defs => - Defines (defs |> map (fn ((a, atts), (t, ps)) => - ((Thm.def_binding_optional (Binding.name (#1 (#1 (Local_Defs.cert_def ctxt t)))) a, atts), - (t, ps)))) - | e => e); - val ctxt' = Context.proof_map (init elem') ctxt; - in (map_ctxt_attrib Args.closure elem', ctxt') end; - -fun activate raw_elem ctxt = - let val elem = raw_elem |> map_ctxt - {binding = I, - typ = I, - term = I, - pattern = I, - fact = Proof_Context.get_fact ctxt, - attrib = Attrib.check_src ctxt} - in activate_i elem ctxt end; - -end; diff --git a/core/Pure/Isar/expression.ML b/core/Pure/Isar/expression.ML deleted file mode 100644 index bbfeb254..00000000 --- a/core/Pure/Isar/expression.ML +++ /dev/null @@ -1,997 +0,0 @@ -(* Title: Pure/Isar/expression.ML - Author: Clemens Ballarin, TU Muenchen - -Locale expressions and user interface layer of locales. -*) - -signature EXPRESSION = -sig - (* Locale expressions *) - datatype 'term map = Positional of 'term option list | Named of (string * 'term) list - type ('name, 'term) expr = ('name * ((string * bool) * 'term map)) list - type expression_i = (string, term) expr * (binding * typ option * mixfix) list - type expression = (xstring * Position.T, string) expr * (binding * string option * mixfix) list - - (* Processing of context statements *) - val cert_statement: Element.context_i list -> (term * term list) list list -> - Proof.context -> (term * term list) list list * Proof.context - val read_statement: Element.context list -> (string * string list) list list -> - Proof.context -> (term * term list) list list * Proof.context - - (* Declaring locales *) - val cert_declaration: expression_i -> (Proof.context -> Proof.context) -> - Element.context_i list -> - Proof.context -> (((string * typ) * mixfix) list * (string * morphism) list - * Element.context_i list * Proof.context) * ((string * typ) list * Proof.context) - val cert_read_declaration: expression_i -> (Proof.context -> Proof.context) -> - Element.context list -> - Proof.context -> (((string * typ) * mixfix) list * (string * morphism) list - * Element.context_i list * Proof.context) * ((string * typ) list * Proof.context) - (*FIXME*) - val read_declaration: expression -> (Proof.context -> Proof.context) -> Element.context list -> - Proof.context -> (((string * typ) * mixfix) list * (string * morphism) list - * Element.context_i list * Proof.context) * ((string * typ) list * Proof.context) - val add_locale: binding -> binding -> - expression_i -> Element.context_i list -> theory -> string * local_theory - val add_locale_cmd: binding -> binding -> - expression -> Element.context list -> theory -> string * local_theory - - (* Interpretation *) - val cert_goal_expression: expression_i -> Proof.context -> - (term list list * (string * morphism) list * morphism) * Proof.context - val read_goal_expression: expression -> Proof.context -> - (term list list * (string * morphism) list * morphism) * Proof.context - val permanent_interpretation: expression_i -> (Attrib.binding * term) list -> - local_theory -> Proof.state - val ephemeral_interpretation: expression_i -> (Attrib.binding * term) list -> - local_theory -> Proof.state - val interpret: expression_i -> (Attrib.binding * term) list -> bool -> Proof.state -> Proof.state - val interpret_cmd: expression -> (Attrib.binding * string) list -> - bool -> Proof.state -> Proof.state - val interpretation: expression_i -> (Attrib.binding * term) list -> local_theory -> Proof.state - val interpretation_cmd: expression -> (Attrib.binding * string) list -> - local_theory -> Proof.state - val sublocale: expression_i -> (Attrib.binding * term) list -> local_theory -> Proof.state - val sublocale_cmd: expression -> (Attrib.binding * string) list -> local_theory -> Proof.state - val sublocale_global: string -> expression_i -> - (Attrib.binding * term) list -> theory -> Proof.state - val sublocale_global_cmd: xstring * Position.T -> expression -> - (Attrib.binding * string) list -> theory -> Proof.state - - (* Diagnostic *) - val print_dependencies: Proof.context -> bool -> expression -> unit -end; - -structure Expression : EXPRESSION = -struct - -datatype ctxt = datatype Element.ctxt; - - -(*** Expressions ***) - -datatype 'term map = - Positional of 'term option list | - Named of (string * 'term) list; - -type ('name, 'term) expr = ('name * ((string * bool) * 'term map)) list; - -type expression_i = (string, term) expr * (binding * typ option * mixfix) list; -type expression = (xstring * Position.T, string) expr * (binding * string option * mixfix) list; - - -(** Internalise locale names in expr **) - -fun check_expr thy instances = map (apfst (Locale.check thy)) instances; - - -(** Parameters of expression **) - -(*Sanity check of instantiations and extraction of implicit parameters. - The latter only occurs iff strict = false. - Positional instantiations are extended to match full length of parameter list - of instantiated locale.*) - -fun parameters_of thy strict (expr, fixed) = - let - val ctxt = Proof_Context.init_global thy; - - fun reject_dups message xs = - (case duplicates (op =) xs of - [] => () - | dups => error (message ^ commas dups)); - - fun parm_eq ((p1: string, mx1: mixfix), (p2, mx2)) = p1 = p2 andalso - (mx1 = mx2 orelse error ("Conflicting syntax for parameter " ^ quote p1 ^ " in expression")); - - fun params_loc loc = Locale.params_of thy loc |> map (apfst #1); - fun params_inst (loc, (prfx, Positional insts)) = - let - val ps = params_loc loc; - val d = length ps - length insts; - val insts' = - if d < 0 then - error ("More arguments than parameters in instantiation of locale " ^ - quote (Locale.markup_name ctxt loc)) - else insts @ replicate d NONE; - val ps' = (ps ~~ insts') |> - map_filter (fn (p, NONE) => SOME p | (_, SOME _) => NONE); - in (ps', (loc, (prfx, Positional insts'))) end - | params_inst (loc, (prfx, Named insts)) = - let - val _ = - reject_dups "Duplicate instantiation of the following parameter(s): " - (map fst insts); - val ps' = (insts, params_loc loc) |-> fold (fn (p, _) => fn ps => - if AList.defined (op =) ps p then AList.delete (op =) p ps - else error (quote p ^ " not a parameter of instantiated expression")); - in (ps', (loc, (prfx, Named insts))) end; - fun params_expr is = - let - val (is', ps') = fold_map (fn i => fn ps => - let - val (ps', i') = params_inst i; - val ps'' = distinct parm_eq (ps @ ps'); - in (i', ps'') end) is [] - in (ps', is') end; - - val (implicit, expr') = params_expr expr; - - val implicit' = map #1 implicit; - val fixed' = map (Variable.check_name o #1) fixed; - val _ = reject_dups "Duplicate fixed parameter(s): " fixed'; - val implicit'' = - if strict then [] - else - let - val _ = - reject_dups - "Parameter(s) declared simultaneously in expression and for clause: " - (implicit' @ fixed'); - in map (fn (x, mx) => (Binding.name x, NONE, mx)) implicit end; - - in (expr', implicit'' @ fixed) end; - - -(** Read instantiation **) - -(* Parse positional or named instantiation *) - -local - -fun prep_inst prep_term ctxt parms (Positional insts) = - (insts ~~ parms) |> map - (fn (NONE, p) => Free (p, dummyT) - | (SOME t, _) => prep_term ctxt t) - | prep_inst prep_term ctxt parms (Named insts) = - parms |> map (fn p => - (case AList.lookup (op =) insts p of - SOME t => prep_term ctxt t | - NONE => Free (p, dummyT))); - -in - -fun parse_inst x = prep_inst Syntax.parse_term x; -fun make_inst x = prep_inst (K I) x; - -end; - - -(* Instantiation morphism *) - -fun inst_morphism (parm_names, parm_types) ((prfx, mandatory), insts') ctxt = - let - (* parameters *) - val type_parm_names = fold Term.add_tfreesT parm_types [] |> map fst; - - (* type inference and contexts *) - val parm_types' = map (Type_Infer.paramify_vars o Logic.varifyT_global) parm_types; - val type_parms = fold Term.add_tvarsT parm_types' [] |> map (Logic.mk_type o TVar); - val arg = type_parms @ map2 Type.constraint parm_types' insts'; - val res = Syntax.check_terms ctxt arg; - val ctxt' = ctxt |> fold Variable.auto_fixes res; - - (* instantiation *) - val (type_parms'', res') = chop (length type_parms) res; - val insts'' = (parm_names ~~ res') |> map_filter - (fn inst as (x, Free (y, _)) => if x = y then NONE else SOME inst - | inst => SOME inst); - val instT = Symtab.make (type_parm_names ~~ map Logic.dest_type type_parms''); - val inst = Symtab.make insts''; - in - (Element.inst_morphism (Proof_Context.theory_of ctxt) (instT, inst) $> - Morphism.binding_morphism "Expression.inst" (Binding.prefix mandatory prfx), ctxt') - end; - - -(*** Locale processing ***) - -(** Parsing **) - -fun parse_elem prep_typ prep_term ctxt = - Element.map_ctxt - {binding = I, - typ = prep_typ ctxt, - term = prep_term (Proof_Context.set_mode Proof_Context.mode_schematic ctxt), - pattern = prep_term (Proof_Context.set_mode Proof_Context.mode_pattern ctxt), - fact = I, - attrib = I}; - -fun parse_concl prep_term ctxt concl = - (map o map) (fn (t, ps) => - (prep_term (Proof_Context.set_mode Proof_Context.mode_schematic ctxt) t, - map (prep_term (Proof_Context.set_mode Proof_Context.mode_pattern ctxt)) ps)) concl; - - -(** Simultaneous type inference: instantiations + elements + conclusion **) - -local - -fun mk_type T = (Logic.mk_type T, []); -fun mk_term t = (t, []); -fun mk_propp (p, pats) = (Type.constraint propT p, pats); - -fun dest_type (T, []) = Logic.dest_type T; -fun dest_term (t, []) = t; -fun dest_propp (p, pats) = (p, pats); - -fun extract_inst (_, (_, ts)) = map mk_term ts; -fun restore_inst ((l, (p, _)), cs) = (l, (p, map dest_term cs)); - -fun extract_elem (Fixes fixes) = map (#2 #> the_list #> map mk_type) fixes - | extract_elem (Constrains csts) = map (#2 #> single #> map mk_type) csts - | extract_elem (Assumes asms) = map (#2 #> map mk_propp) asms - | extract_elem (Defines defs) = map (fn (_, (t, ps)) => [mk_propp (t, ps)]) defs - | extract_elem (Notes _) = []; - -fun restore_elem (Fixes fixes, css) = - (fixes ~~ css) |> map (fn ((x, _, mx), cs) => - (x, cs |> map dest_type |> try hd, mx)) |> Fixes - | restore_elem (Constrains csts, css) = - (csts ~~ css) |> map (fn ((x, _), cs) => - (x, cs |> map dest_type |> hd)) |> Constrains - | restore_elem (Assumes asms, css) = - (asms ~~ css) |> map (fn ((b, _), cs) => (b, map dest_propp cs)) |> Assumes - | restore_elem (Defines defs, css) = - (defs ~~ css) |> map (fn ((b, _), [c]) => (b, dest_propp c)) |> Defines - | restore_elem (Notes notes, _) = Notes notes; - -fun check cs context = - let - fun prep (_, pats) (ctxt, t :: ts) = - let val ctxt' = Variable.auto_fixes t ctxt - in - ((t, Syntax.check_props (Proof_Context.set_mode Proof_Context.mode_pattern ctxt') pats), - (ctxt', ts)) - end; - val (cs', (context', _)) = fold_map prep cs - (context, Syntax.check_terms - (Proof_Context.set_mode Proof_Context.mode_schematic context) (map fst cs)); - in (cs', context') end; - -in - -fun check_autofix insts elems concl ctxt = - let - val inst_cs = map extract_inst insts; - val elem_css = map extract_elem elems; - val concl_cs = (map o map) mk_propp concl; - (* Type inference *) - val (inst_cs' :: css', ctxt') = - (fold_burrow o fold_burrow) check (inst_cs :: elem_css @ [concl_cs]) ctxt; - val (elem_css', [concl_cs']) = chop (length elem_css) css'; - in - (map restore_inst (insts ~~ inst_cs'), - map restore_elem (elems ~~ elem_css'), - concl_cs', ctxt') - end; - -end; - - -(** Prepare locale elements **) - -fun declare_elem prep_vars (Fixes fixes) ctxt = - let val (vars, _) = prep_vars fixes ctxt - in ctxt |> Proof_Context.add_fixes vars |> snd end - | declare_elem prep_vars (Constrains csts) ctxt = - ctxt |> prep_vars (map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) csts) |> snd - | declare_elem _ (Assumes _) ctxt = ctxt - | declare_elem _ (Defines _) ctxt = ctxt - | declare_elem _ (Notes _) ctxt = ctxt; - - -(** Finish locale elements **) - -fun finish_inst ctxt (loc, (prfx, inst)) = - let - val thy = Proof_Context.theory_of ctxt; - val (parm_names, parm_types) = Locale.params_of thy loc |> map #1 |> split_list; - val (morph, _) = inst_morphism (parm_names, parm_types) (prfx, inst) ctxt; - in (loc, morph) end; - -fun finish_fixes (parms: (string * typ) list) = map (fn (binding, _, mx) => - let val x = Binding.name_of binding - in (binding, AList.lookup (op =) parms x, mx) end); - -local - -fun closeup _ _ false elem = elem - | closeup (outer_ctxt, ctxt) parms true elem = - let - (* FIXME consider closing in syntactic phase -- before type checking *) - fun close_frees t = - let - val rev_frees = - Term.fold_aterms (fn Free (x, T) => - if Variable.is_fixed outer_ctxt x orelse AList.defined (op =) parms x then I - else insert (op =) (x, T) | _ => I) t []; - in fold (Logic.all o Free) rev_frees t end; - - fun no_binds [] = [] - | no_binds _ = error "Illegal term bindings in context element"; - in - (case elem of - Assumes asms => Assumes (asms |> map (fn (a, propps) => - (a, map (fn (t, ps) => (close_frees t, no_binds ps)) propps))) - | Defines defs => Defines (defs |> map (fn ((name, atts), (t, ps)) => - let val ((c, _), t') = Local_Defs.cert_def ctxt (close_frees t) - in ((Thm.def_binding_optional (Binding.name c) name, atts), (t', no_binds ps)) end)) - | e => e) - end; - -in - -fun finish_elem _ parms _ (Fixes fixes) = Fixes (finish_fixes parms fixes) - | finish_elem _ _ _ (Constrains _) = Constrains [] - | finish_elem ctxts parms do_close (Assumes asms) = closeup ctxts parms do_close (Assumes asms) - | finish_elem ctxts parms do_close (Defines defs) = closeup ctxts parms do_close (Defines defs) - | finish_elem _ _ _ (Notes facts) = Notes facts; - -end; - - -(** Process full context statement: instantiations + elements + conclusion **) - -(* Interleave incremental parsing and type inference over entire parsed stretch. *) - -local - -fun prep_full_context_statement - parse_typ parse_prop prep_vars_elem prep_inst prep_vars_inst prep_expr - {strict, do_close, fixed_frees} raw_import init_body raw_elems raw_concl ctxt1 = - let - val thy = Proof_Context.theory_of ctxt1; - - val (raw_insts, fixed) = parameters_of thy strict (apfst (prep_expr thy) raw_import); - - fun prep_insts_cumulative (loc, (prfx, inst)) (i, insts, ctxt) = - let - val (parm_names, parm_types) = Locale.params_of thy loc |> map #1 |> split_list; - val inst' = prep_inst ctxt parm_names inst; - val parm_types' = parm_types - |> map (Type_Infer.paramify_vars o - Term.map_type_tvar (fn ((x, _), S) => TVar ((x, i), S)) o Logic.varifyT_global); - val inst'' = map2 Type.constraint parm_types' inst'; - val insts' = insts @ [(loc, (prfx, inst''))]; - val (insts'', _, _, _) = check_autofix insts' [] [] ctxt; - val inst''' = insts'' |> List.last |> snd |> snd; - val (morph, _) = inst_morphism (parm_names, parm_types) (prfx, inst''') ctxt; - val ctxt'' = Locale.activate_declarations (loc, morph) ctxt; - in (i + 1, insts', ctxt'') end; - - fun prep_elem raw_elem ctxt = - let - val ctxt' = ctxt - |> Context_Position.set_visible false - |> declare_elem prep_vars_elem raw_elem - |> Context_Position.restore_visible ctxt; - val elems' = parse_elem parse_typ parse_prop ctxt' raw_elem; - in (elems', ctxt') end; - - fun prep_concl raw_concl (insts, elems, ctxt) = - let - val concl = parse_concl parse_prop ctxt raw_concl; - in check_autofix insts elems concl ctxt end; - - val fors = prep_vars_inst fixed ctxt1 |> fst; - val ctxt2 = ctxt1 |> Proof_Context.add_fixes fors |> snd; - val (_, insts', ctxt3) = fold prep_insts_cumulative raw_insts (0, [], ctxt2); - - val _ = - if fixed_frees then () - else - (case fold (fold (Variable.add_frees ctxt3) o snd o snd) insts' [] of - [] => () - | frees => error ("Illegal free variables in expression: " ^ - commas_quote (map (Syntax.string_of_term ctxt3 o Free) (rev frees)))); - - val ctxt4 = init_body ctxt3; - val (elems, ctxt5) = fold_map prep_elem raw_elems ctxt4; - val (insts, elems', concl, ctxt6) = prep_concl raw_concl (insts', elems, ctxt5); - - (* Retrieve parameter types *) - val xs = maps (fn Fixes fixes => map (Variable.check_name o #1) fixes | _ => []) - (Fixes fors :: elems'); - val (Ts, ctxt7) = fold_map Proof_Context.inferred_param xs ctxt6; - val parms = xs ~~ Ts; (* params from expression and elements *) - - val fors' = finish_fixes parms fors; - val fixed = map (fn (b, SOME T, mx) => ((Binding.name_of b, T), mx)) fors'; - val deps = map (finish_inst ctxt6) insts; - val elems'' = map (finish_elem (ctxt1, ctxt6) parms do_close) elems'; - - in ((fixed, deps, elems'', concl), (parms, ctxt7)) end; - -in - -fun cert_full_context_statement x = - prep_full_context_statement (K I) (K I) Proof_Context.cert_vars - make_inst Proof_Context.cert_vars (K I) x; - -fun cert_read_full_context_statement x = - prep_full_context_statement Syntax.parse_typ Syntax.parse_prop Proof_Context.read_vars - make_inst Proof_Context.cert_vars (K I) x; - -fun read_full_context_statement x = - prep_full_context_statement Syntax.parse_typ Syntax.parse_prop Proof_Context.read_vars - parse_inst Proof_Context.read_vars check_expr x; - -end; - - -(* Context statement: elements + conclusion *) - -local - -fun prep_statement prep activate raw_elems raw_concl context = - let - val ((_, _, elems, concl), _) = - prep {strict = true, do_close = false, fixed_frees = true} - ([], []) I raw_elems raw_concl context; - val (_, context') = context - |> Proof_Context.set_stmt true - |> fold_map activate elems; - in (concl, context') end; - -in - -fun cert_statement x = prep_statement cert_full_context_statement Element.activate_i x; -fun read_statement x = prep_statement read_full_context_statement Element.activate x; - -end; - - -(* Locale declaration: import + elements *) - -fun fix_params params = - Proof_Context.add_fixes (map (fn ((x, T), mx) => (Binding.name x, SOME T, mx)) params) #> snd; - -local - -fun prep_declaration prep activate raw_import init_body raw_elems context = - let - val ((fixed, deps, elems, _), (parms, ctxt')) = - prep {strict = false, do_close = true, fixed_frees = false} - raw_import init_body raw_elems [] context; - (* Declare parameters and imported facts *) - val context' = context |> - fix_params fixed |> - fold (Context.proof_map o Locale.activate_facts NONE) deps; - val (elems', context'') = context' |> - Proof_Context.set_stmt true |> - fold_map activate elems; - in ((fixed, deps, elems', context''), (parms, ctxt')) end; - -in - -fun cert_declaration x = prep_declaration cert_full_context_statement Element.activate_i x; -fun cert_read_declaration x = prep_declaration cert_read_full_context_statement Element.activate x; -fun read_declaration x = prep_declaration read_full_context_statement Element.activate x; - -end; - - -(* Locale expression to set up a goal *) - -local - -fun props_of thy (name, morph) = - let - val (asm, defs) = Locale.specification_of thy name; - in - (case asm of NONE => defs | SOME asm => asm :: defs) - |> map (Morphism.term morph) - end; - -fun prep_goal_expression prep expression context = - let - val thy = Proof_Context.theory_of context; - - val ((fixed, deps, _, _), _) = - prep {strict = true, do_close = true, fixed_frees = true} expression I [] [] context; - (* proof obligations *) - val propss = map (props_of thy) deps; - - val goal_ctxt = context |> - fix_params fixed |> - (fold o fold) Variable.auto_fixes propss; - - val export = Variable.export_morphism goal_ctxt context; - val exp_fact = Drule.zero_var_indexes_list o map Thm.strip_shyps o Morphism.fact export; - val exp_term = Term_Subst.zero_var_indexes o Morphism.term export; - val exp_typ = Logic.type_map exp_term; - val export' = - Morphism.morphism "Expression.prep_goal" - {binding = [], typ = [exp_typ], term = [exp_term], fact = [exp_fact]}; - in ((propss, deps, export'), goal_ctxt) end; - -in - -fun cert_goal_expression x = prep_goal_expression cert_full_context_statement x; -fun read_goal_expression x = prep_goal_expression read_full_context_statement x; - -end; - - -(*** Locale declarations ***) - -(* extract specification text *) - -val norm_term = Envir.beta_norm oo Term.subst_atomic; - -fun bind_def ctxt eq (xs, env, eqs) = - let - val _ = Local_Defs.cert_def ctxt eq; - val ((y, T), b) = Local_Defs.abs_def eq; - val b' = norm_term env b; - fun err msg = error (msg ^ ": " ^ quote y); - in - (case filter (fn (Free (y', _), _) => y = y' | _ => false) env of - [] => (Term.add_frees b' xs, (Free (y, T), b') :: env, eq :: eqs) - | dups => - if forall (fn (_, b'') => b' aconv b'') dups then (xs, env, eqs) - else err "Attempt to redefine variable") - end; - -(* text has the following structure: - (((exts, exts'), (ints, ints')), (xs, env, defs)) - where - exts: external assumptions (terms in assumes elements) - exts': dito, normalised wrt. env - ints: internal assumptions (terms in assumptions from insts) - ints': dito, normalised wrt. env - xs: the free variables in exts' and ints' and rhss of definitions, - this includes parameters except defined parameters - env: list of term pairs encoding substitutions, where the first term - is a free variable; substitutions represent defines elements and - the rhs is normalised wrt. the previous env - defs: the equations from the defines elements - *) - -fun eval_text _ _ (Fixes _) text = text - | eval_text _ _ (Constrains _) text = text - | eval_text _ is_ext (Assumes asms) - (((exts, exts'), (ints, ints')), (xs, env, defs)) = - let - val ts = maps (map #1 o #2) asms; - val ts' = map (norm_term env) ts; - val spec' = - if is_ext then ((exts @ ts, exts' @ ts'), (ints, ints')) - else ((exts, exts'), (ints @ ts, ints' @ ts')); - in (spec', (fold Term.add_frees ts' xs, env, defs)) end - | eval_text ctxt _ (Defines defs) (spec, binds) = - (spec, fold (bind_def ctxt o #1 o #2) defs binds) - | eval_text _ _ (Notes _) text = text; - -fun eval_inst ctxt (loc, morph) text = - let - val thy = Proof_Context.theory_of ctxt; - val (asm, defs) = Locale.specification_of thy loc; - val asm' = Option.map (Morphism.term morph) asm; - val defs' = map (Morphism.term morph) defs; - val text' = - text |> - (if is_some asm then - eval_text ctxt false (Assumes [(Attrib.empty_binding, [(the asm', [])])]) - else I) |> - (if not (null defs) then - eval_text ctxt false (Defines (map (fn def => (Attrib.empty_binding, (def, []))) defs')) - else I) -(* FIXME clone from locale.ML *) - in text' end; - -fun eval_elem ctxt elem text = - eval_text ctxt true elem text; - -fun eval ctxt deps elems = - let - val text' = fold (eval_inst ctxt) deps ((([], []), ([], [])), ([], [], [])); - val ((spec, (_, _, defs))) = fold (eval_elem ctxt) elems text'; - in (spec, defs) end; - -(* axiomsN: name of theorem set with destruct rules for locale predicates, - also name suffix of delta predicates and assumptions. *) - -val axiomsN = "axioms"; - -local - -(* introN: name of theorems for introduction rules of locale and - delta predicates *) - -val introN = "intro"; - -fun atomize_spec thy ts = - let - val t = Logic.mk_conjunction_balanced ts; - val body = Object_Logic.atomize_term thy t; - val bodyT = Term.fastype_of body; - in - if bodyT = propT - then (t, propT, Thm.reflexive (Thm.cterm_of thy t)) - else (body, bodyT, Object_Logic.atomize (Proof_Context.init_global thy) (Thm.cterm_of thy t)) - end; - -(* achieve plain syntax for locale predicates (without "PROP") *) - -fun aprop_tr' n c = - let - val c' = Lexicon.mark_const c; - fun tr' (_: Proof.context) T args = - if T <> dummyT andalso length args = n - then Syntax.const "_aprop" $ Term.list_comb (Syntax.const c', args) - else raise Match; - in (c', tr') end; - -(* define one predicate including its intro rule and axioms - - binding: predicate name - - parms: locale parameters - - defs: thms representing substitutions from defines elements - - ts: terms representing locale assumptions (not normalised wrt. defs) - - norm_ts: terms representing locale assumptions (normalised wrt. defs) - - thy: the theory -*) - -fun def_pred binding parms defs ts norm_ts thy = - let - val name = Sign.full_name thy binding; - - val (body, bodyT, body_eq) = atomize_spec thy norm_ts; - val env = Term.add_free_names body []; - val xs = filter (member (op =) env o #1) parms; - val Ts = map #2 xs; - val extraTs = - (subtract (op =) (fold Term.add_tfreesT Ts []) (Term.add_tfrees body [])) - |> Library.sort_wrt #1 |> map TFree; - val predT = map Term.itselfT extraTs ---> Ts ---> bodyT; - - val args = map Logic.mk_type extraTs @ map Free xs; - val head = Term.list_comb (Const (name, predT), args); - val statement = Object_Logic.ensure_propT thy head; - - val ([pred_def], defs_thy) = - thy - |> bodyT = propT ? Sign.typed_print_translation [aprop_tr' (length args) name] - |> Sign.declare_const_global ((Binding.conceal binding, predT), NoSyn) |> snd - |> Global_Theory.add_defs false - [((Binding.conceal (Thm.def_binding binding), Logic.mk_equals (head, body)), [])]; - val defs_ctxt = Proof_Context.init_global defs_thy |> Variable.declare_term head; - - val cert = Thm.cterm_of defs_thy; - - val intro = Goal.prove_global defs_thy [] norm_ts statement - (fn {context = ctxt, ...} => - rewrite_goals_tac ctxt [pred_def] THEN - compose_tac (false, body_eq RS Drule.equal_elim_rule1, 1) 1 THEN - compose_tac (false, Conjunction.intr_balanced (map (Thm.assume o cert) norm_ts), 0) 1); - - val conjuncts = - (Drule.equal_elim_rule2 OF - [body_eq, rewrite_rule defs_ctxt [pred_def] (Thm.assume (cert statement))]) - |> Conjunction.elim_balanced (length ts); - - val (_, axioms_ctxt) = defs_ctxt - |> Assumption.add_assumes (maps (#hyps o Thm.crep_thm) (defs @ conjuncts)); - val axioms = ts ~~ conjuncts |> map (fn (t, ax) => - Element.prove_witness axioms_ctxt t - (rewrite_goals_tac axioms_ctxt defs THEN compose_tac (false, ax, 0) 1)); - in ((statement, intro, axioms), defs_thy) end; - -in - -(* main predicate definition function *) - -fun define_preds binding parms (((exts, exts'), (ints, ints')), defs) thy = - let - val ctxt = Proof_Context.init_global thy; - val defs' = map (cterm_of thy #> Assumption.assume ctxt #> Drule.abs_def) defs; - - val (a_pred, a_intro, a_axioms, thy'') = - if null exts then (NONE, NONE, [], thy) - else - let - val abinding = - if null ints then binding else Binding.suffix_name ("_" ^ axiomsN) binding; - val ((statement, intro, axioms), thy') = - thy - |> def_pred abinding parms defs' exts exts'; - val (_, thy'') = - thy' - |> Sign.qualified_path true abinding - |> Global_Theory.note_thmss "" - [((Binding.conceal (Binding.name introN), []), [([intro], [Locale.unfold_add])])] - ||> Sign.restore_naming thy'; - in (SOME statement, SOME intro, axioms, thy'') end; - val (b_pred, b_intro, b_axioms, thy'''') = - if null ints then (NONE, NONE, [], thy'') - else - let - val ((statement, intro, axioms), thy''') = - thy'' - |> def_pred binding parms defs' (ints @ the_list a_pred) (ints' @ the_list a_pred); - val ctxt''' = Proof_Context.init_global thy'''; - val (_, thy'''') = - thy''' - |> Sign.qualified_path true binding - |> Global_Theory.note_thmss "" - [((Binding.conceal (Binding.name introN), []), [([intro], [Locale.intro_add])]), - ((Binding.conceal (Binding.name axiomsN), []), - [(map (Drule.export_without_context o Element.conclude_witness ctxt''') axioms, - [])])] - ||> Sign.restore_naming thy'''; - in (SOME statement, SOME intro, axioms, thy'''') end; - in ((a_pred, a_intro, a_axioms), (b_pred, b_intro, b_axioms), thy'''') end; - -end; - - -local - -fun assumes_to_notes (Assumes asms) axms = - fold_map (fn (a, spec) => fn axs => - let val (ps, qs) = chop (length spec) axs - in ((a, [(ps, [])]), qs) end) asms axms - |> apfst (curry Notes "") - | assumes_to_notes e axms = (e, axms); - -fun defines_to_notes ctxt (Defines defs) = - Notes ("", map (fn (a, (def, _)) => - (a, [([Assumption.assume ctxt (cterm_of (Proof_Context.theory_of ctxt) def)], - [(Attrib.internal o K) Locale.witness_add])])) defs) - | defines_to_notes _ e = e; - -fun gen_add_locale prep_decl - binding raw_predicate_binding raw_import raw_body thy = - let - val name = Sign.full_name thy binding; - val _ = Locale.defined thy name andalso - error ("Duplicate definition of locale " ^ quote name); - - val ((fixed, deps, body_elems, _), (parms, ctxt')) = - prep_decl raw_import I raw_body (Proof_Context.init_global thy); - val text as (((_, exts'), _), defs) = eval ctxt' deps body_elems; - - val extraTs = - subtract (op =) - (fold Term.add_tfreesT (map snd parms) []) - (fold Term.add_tfrees exts' []); - val _ = - if null extraTs then () - else warning ("Additional type variable(s) in locale specification " ^ - Binding.print binding ^ ": " ^ - commas (map (Syntax.string_of_typ ctxt' o TFree) (sort_wrt #1 extraTs))); - - val predicate_binding = - if Binding.is_empty raw_predicate_binding then binding - else raw_predicate_binding; - val ((a_statement, a_intro, a_axioms), (b_statement, b_intro, b_axioms), thy') = - define_preds predicate_binding parms text thy; - val pred_ctxt = Proof_Context.init_global thy'; - - val a_satisfy = Element.satisfy_morphism a_axioms; - val b_satisfy = Element.satisfy_morphism b_axioms; - - val params = fixed @ - maps (fn Fixes fixes => - map (fn (b, SOME T, mx) => ((Binding.name_of b, T), mx)) fixes | _ => []) body_elems; - val asm = if is_some b_statement then b_statement else a_statement; - - val notes = - if is_some asm then - [("", [((Binding.conceal (Binding.suffix_name ("_" ^ axiomsN) binding), []), - [([Assumption.assume pred_ctxt (cterm_of thy' (the asm))], - [(Attrib.internal o K) Locale.witness_add])])])] - else []; - - val notes' = - body_elems - |> map (defines_to_notes pred_ctxt) - |> map (Element.transform_ctxt a_satisfy) - |> (fn elems => - fold_map assumes_to_notes elems (map (Element.conclude_witness pred_ctxt) a_axioms)) - |> fst - |> map (Element.transform_ctxt b_satisfy) - |> map_filter (fn Notes notes => SOME notes | _ => NONE); - - val deps' = map (fn (l, morph) => (l, morph $> b_satisfy)) deps; - val axioms = map (Element.conclude_witness pred_ctxt) b_axioms; - - val loc_ctxt = thy' - |> Locale.register_locale binding (extraTs, params) - (asm, rev defs) (a_intro, b_intro) axioms [] (rev notes) (rev deps') - |> Named_Target.init name - |> fold (fn (kind, facts) => Local_Theory.notes_kind kind facts #> snd) notes'; - - in (name, loc_ctxt) end; - -in - -val add_locale = gen_add_locale cert_declaration; -val add_locale_cmd = gen_add_locale read_declaration; - -end; - - -(*** Interpretation ***) - -local - -(* reading *) - -fun prep_with_extended_syntax prep_prop deps ctxt props = - let - val deps_ctxt = fold Locale.activate_declarations deps ctxt; - in - map (prep_prop deps_ctxt o snd) props |> Syntax.check_terms deps_ctxt - |> Variable.export_terms deps_ctxt ctxt - end; - -fun prep_interpretation prep_expr prep_prop prep_attr expression raw_eqns initial_ctxt = - let - val ((propss, deps, export), expr_ctxt) = prep_expr expression initial_ctxt; - val eqns = prep_with_extended_syntax prep_prop deps expr_ctxt raw_eqns; - val attrss = map (apsnd (map (prep_attr initial_ctxt)) o fst) raw_eqns; - val goal_ctxt = fold Variable.auto_fixes eqns expr_ctxt; - val export' = Variable.export_morphism goal_ctxt expr_ctxt; - in (((propss, deps, export, export'), (eqns, attrss)), goal_ctxt) end; - -val cert_interpretation = - prep_interpretation cert_goal_expression (K I) (K I); - -val read_interpretation = - prep_interpretation read_goal_expression Syntax.parse_prop Attrib.check_src; - - -(* generic interpretation machinery *) - -fun meta_rewrite ctxt eqns = - map (Local_Defs.meta_rewrite_rule ctxt #> Drule.abs_def) (maps snd eqns); - -fun note_eqns_register note activate deps witss eqns attrss export export' ctxt = - let - val facts = map2 (fn attrs => fn eqn => - (attrs, [([Morphism.thm (export' $> export) eqn], [])])) attrss eqns; - val (eqns', ctxt') = ctxt - |> note Thm.lemmaK facts - |> (fn (eqns, ctxt') => (meta_rewrite ctxt' eqns, ctxt')); - val dep_morphs = - map2 (fn (dep, morph) => fn wits => - (dep, morph $> Element.satisfy_morphism (map (Element.transform_witness export') wits))) - deps witss; - fun activate' dep_morph ctxt = - activate dep_morph - (Option.map (rpair true) (Element.eq_morphism (Proof_Context.theory_of ctxt) eqns')) - export ctxt; - in - ctxt' - |> fold activate' dep_morphs - end; - -fun generic_interpretation prep_interpretation setup_proof note activate - expression raw_eqns initial_ctxt = - let - val (((propss, deps, export, export'), (eqns, attrss)), goal_ctxt) = - prep_interpretation expression raw_eqns initial_ctxt; - fun after_qed witss eqns = - note_eqns_register note activate deps witss eqns attrss export export'; - in setup_proof after_qed propss eqns goal_ctxt end; - - -(* first dimension: proof vs. local theory *) - -fun gen_interpret prep_interpretation expression raw_eqns int state = - let - val _ = Proof.assert_forward_or_chain state; - val ctxt = Proof.context_of state; - fun lift_after_qed after_qed witss eqns = - Proof.map_context (after_qed witss eqns) #> Proof.reset_facts; - fun setup_proof after_qed propss eqns goal_ctxt = - Element.witness_local_proof_eqs (lift_after_qed after_qed) "interpret" - propss eqns goal_ctxt int state; - in - generic_interpretation prep_interpretation setup_proof - Attrib.local_notes (Context.proof_map ooo Locale.add_registration) expression raw_eqns ctxt - end; - -fun gen_local_theory_interpretation prep_interpretation activate expression raw_eqns lthy = - generic_interpretation prep_interpretation Element.witness_proof_eqs - Local_Theory.notes_kind (activate lthy) expression raw_eqns lthy; - - -(* second dimension: relation to underlying target *) - -fun subscribe_or_activate lthy = - if Named_Target.is_theory lthy - then Local_Theory.subscription - else Local_Theory.activate; - -fun subscribe_locale_only lthy = - let - val _ = - if Named_Target.is_theory lthy - then error "Not possible on level of global theory" - else (); - in Local_Theory.subscription end; - - -(* special case: global sublocale command *) - -fun gen_sublocale_global prep_loc prep_interpretation - raw_locale expression raw_eqns thy = - let - val lthy = Named_Target.init (prep_loc thy raw_locale) thy; - fun setup_proof after_qed = - Element.witness_proof_eqs - (fn wits => fn eqs => after_qed wits eqs #> Local_Theory.exit); - in - lthy |> - generic_interpretation prep_interpretation setup_proof - Local_Theory.notes_kind (subscribe_locale_only lthy) expression raw_eqns - end; - -in - - -(* interfaces *) - -fun interpret x = gen_interpret cert_interpretation x; -fun interpret_cmd x = gen_interpret read_interpretation x; - -fun permanent_interpretation expression raw_eqns = - Local_Theory.assert_bottom true - #> gen_local_theory_interpretation cert_interpretation - (K Local_Theory.subscription) expression raw_eqns; - -fun ephemeral_interpretation x = - gen_local_theory_interpretation cert_interpretation (K Local_Theory.activate) x; - -fun interpretation x = - gen_local_theory_interpretation cert_interpretation subscribe_or_activate x; -fun interpretation_cmd x = - gen_local_theory_interpretation read_interpretation subscribe_or_activate x; - -fun sublocale x = - gen_local_theory_interpretation cert_interpretation subscribe_locale_only x; -fun sublocale_cmd x = - gen_local_theory_interpretation read_interpretation subscribe_locale_only x; - -fun sublocale_global x = gen_sublocale_global (K I) cert_interpretation x; -fun sublocale_global_cmd x = gen_sublocale_global Locale.check read_interpretation x; - -end; - - -(** Print the instances that would be activated by an interpretation - of the expression in the current context (clean = false) or in an - empty context (clean = true). **) - -fun print_dependencies ctxt clean expression = - let - val ((_, deps, export), expr_ctxt) = read_goal_expression expression ctxt; - val export' = if clean then Morphism.identity else export; - in - Locale.print_dependencies expr_ctxt clean export' deps - end; - -end; diff --git a/core/Pure/Isar/generic_target.ML b/core/Pure/Isar/generic_target.ML deleted file mode 100644 index 2ef9165a..00000000 --- a/core/Pure/Isar/generic_target.ML +++ /dev/null @@ -1,363 +0,0 @@ -(* Title: Pure/Isar/generic_target.ML - Author: Makarius - Author: Florian Haftmann, TU Muenchen - -Common target infrastructure. -*) - -signature GENERIC_TARGET = -sig - (* consts *) - val standard_const: (int * int -> bool) -> Syntax.mode -> (binding * mixfix) * term -> - local_theory -> local_theory - - (* background operations *) - val background_foundation: ((binding * typ) * mixfix) * (binding * term) -> - term list * term list -> local_theory -> (term * thm) * local_theory - val background_declaration: declaration -> local_theory -> local_theory - val background_abbrev: binding * term -> term list -> local_theory -> (term * term) * local_theory - - (* lifting primitives to local theory operations *) - val define: (((binding * typ) * mixfix) * (binding * term) -> - term list * term list -> local_theory -> (term * thm) * local_theory) -> - bool -> (binding * mixfix) * (Attrib.binding * term) -> local_theory -> - (term * (string * thm)) * local_theory - val notes: - (string -> (Attrib.binding * (thm list * Args.src list) list) list -> - (Attrib.binding * (thm list * Args.src list) list) list -> local_theory -> local_theory) -> - string -> (Attrib.binding * (thm list * Args.src list) list) list -> local_theory -> - (string * thm list) list * local_theory - val abbrev: (string * bool -> binding * mixfix -> term -> - term list * term list -> local_theory -> local_theory) -> - string * bool -> (binding * mixfix) * term -> local_theory -> (term * term) * local_theory - - (* theory operations *) - val theory_foundation: ((binding * typ) * mixfix) * (binding * term) -> - term list * term list -> local_theory -> (term * thm) * local_theory - val theory_notes: string -> - (Attrib.binding * (thm list * Args.src list) list) list -> - (Attrib.binding * (thm list * Args.src list) list) list -> - local_theory -> local_theory - val theory_declaration: declaration -> local_theory -> local_theory - val theory_abbrev: Syntax.mode -> (binding * mixfix) -> term -> term list * term list -> - local_theory -> local_theory - val theory_registration: string * morphism -> (morphism * bool) option -> morphism -> - local_theory -> local_theory - - (* locale operations *) - val locale_notes: string -> string -> - (Attrib.binding * (thm list * Args.src list) list) list -> - (Attrib.binding * (thm list * Args.src list) list) list -> - local_theory -> local_theory - val locale_target_declaration: string -> bool -> declaration -> local_theory -> local_theory - val locale_declaration: string -> {syntax: bool, pervasive: bool} -> declaration -> - local_theory -> local_theory - val locale_target_const: string -> (morphism -> bool) -> Syntax.mode -> - (binding * mixfix) * term -> local_theory -> local_theory - val locale_const: string -> Syntax.mode -> (binding * mixfix) * term -> - local_theory -> local_theory - val locale_dependency: string -> string * morphism -> (morphism * bool) option -> morphism -> - local_theory -> local_theory -end - -structure Generic_Target: GENERIC_TARGET = -struct - -(** notes **) - -fun standard_facts lthy ctxt = - Element.transform_facts (Local_Theory.standard_morphism lthy ctxt); - -fun standard_notes pred kind facts lthy = - Local_Theory.map_contexts (fn level => fn ctxt => - if pred (Local_Theory.level lthy, level) - then Attrib.local_notes kind (standard_facts lthy ctxt facts) ctxt |> snd - else ctxt) lthy; - - -(** declarations **) - -fun standard_declaration pred decl lthy = - Local_Theory.map_contexts (fn level => fn ctxt => - if pred (Local_Theory.level lthy, level) - then Context.proof_map (Local_Theory.standard_form lthy ctxt decl) ctxt - else ctxt) lthy; - - -(** consts **) - -fun check_mixfix ctxt (b, extra_tfrees) mx = - if null extra_tfrees then mx - else - (if Context_Position.is_visible ctxt then - warning - ("Additional type variable(s) in specification of " ^ Binding.print b ^ ": " ^ - commas (map (Syntax.string_of_typ ctxt o TFree) (sort_wrt #1 extra_tfrees)) ^ - (if mx = NoSyn then "" - else "\nDropping mixfix syntax " ^ Pretty.string_of (Mixfix.pretty_mixfix mx))) - else (); NoSyn); - -fun check_mixfix_global (b, no_params) mx = - if no_params orelse mx = NoSyn then mx - else (warning ("Dropping global mixfix syntax: " ^ Binding.print b ^ " " ^ - Pretty.string_of (Mixfix.pretty_mixfix mx)); NoSyn); - -fun const_decl phi_pred prmode ((b, mx), rhs) phi context = - if phi_pred phi then - let - val b' = Morphism.binding phi b; - val rhs' = Morphism.term phi rhs; - val same_shape = Term.aconv_untyped (rhs, rhs'); - val const_alias = - if same_shape then - (case rhs' of - Const (c, T) => - let - val thy = Context.theory_of context; - val ctxt = Context.proof_of context; - in - (case Type_Infer_Context.const_type ctxt c of - SOME T' => if Sign.typ_equiv thy (T, T') then SOME c else NONE - | NONE => NONE) - end - | _ => NONE) - else NONE; - in - case const_alias of - SOME c => - context - |> Context.mapping (Sign.const_alias b' c) (Proof_Context.const_alias b' c) - |> Morphism.form (Proof_Context.generic_notation true prmode [(rhs', mx)]) - | NONE => - context - |> Proof_Context.generic_add_abbrev Print_Mode.internal (b', Term.close_schematic_term rhs') - |-> (fn (const as Const (c, _), _) => same_shape ? - (Proof_Context.generic_revert_abbrev (#1 prmode) c #> - Morphism.form (Proof_Context.generic_notation true prmode [(const, mx)]))) - end - else context; - -fun standard_const pred prmode ((b, mx), rhs) = - standard_declaration pred (const_decl (K true) prmode ((b, mx), rhs)); - - -(** background primitives **) - -fun background_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params) lthy = - let - val params = type_params @ term_params; - val mx' = check_mixfix_global (b, null params) mx; - - val (const, lthy2) = lthy - |> Local_Theory.background_theory_result (Sign.declare_const lthy ((b, U), mx')); - val lhs = Term.list_comb (const, params); - - val ((_, def), lthy3) = lthy2 - |> Local_Theory.background_theory_result - (Thm.add_def lthy2 false false - (Thm.def_binding_optional b b_def, Logic.mk_equals (lhs, rhs))); - in ((lhs, def), lthy3) end; - -fun background_declaration decl lthy = - let - val theory_decl = - Local_Theory.standard_form lthy - (Proof_Context.init_global (Proof_Context.theory_of lthy)) decl; - in Local_Theory.background_theory (Context.theory_map theory_decl) lthy end; - -fun background_abbrev (b, global_rhs) params = - Local_Theory.background_theory_result (Sign.add_abbrev Print_Mode.internal (b, global_rhs)) - #>> pairself (fn t => Term.list_comb (Logic.unvarify_global t, params)) - - -(** lifting primitive to local theory operations **) - -(* define *) - -fun define foundation internal ((b, mx), ((b_def, atts), rhs)) lthy = - let - val thy = Proof_Context.theory_of lthy; - val thy_ctxt = Proof_Context.init_global thy; - - (*term and type parameters*) - val ((defs, _), rhs') = Thm.cterm_of thy rhs - |> Local_Defs.export_cterm lthy thy_ctxt ||> Thm.term_of; - - val xs = Variable.add_fixed lthy rhs' []; - val T = Term.fastype_of rhs; - val tfreesT = Term.add_tfreesT T (fold (Term.add_tfreesT o #2) xs []); - val extra_tfrees = rev (subtract (op =) tfreesT (Term.add_tfrees rhs [])); - val mx' = check_mixfix lthy (b, extra_tfrees) mx; - - val type_params = map (Logic.mk_type o TFree) extra_tfrees; - val term_params = map Free (sort (Variable.fixed_ord lthy o pairself #1) xs); - val params = type_params @ term_params; - - val U = map Term.fastype_of params ---> T; - - (*foundation*) - val ((lhs', global_def), lthy2) = lthy - |> foundation (((b, U), mx'), (b_def, rhs')) (type_params, term_params); - - (*local definition*) - val ((lhs, local_def), lthy3) = lthy2 - |> Local_Defs.add_def ((b, NoSyn), lhs'); - - (*result*) - val def = - Thm.transitive local_def global_def - |> Local_Defs.contract lthy3 defs - (Thm.cterm_of (Proof_Context.theory_of lthy3) (Logic.mk_equals (lhs, rhs))); - val ([(res_name, [res])], lthy4) = lthy3 - |> Local_Theory.notes [((if internal then Binding.empty else b_def, atts), [([def], [])])]; - in ((lhs, (res_name, res)), lthy4) end; - - -(* notes *) - -local - -fun import_export_proof ctxt (name, raw_th) = - let - val thy = Proof_Context.theory_of ctxt; - val thy_ctxt = Proof_Context.init_global thy; - val certT = Thm.ctyp_of thy; - val cert = Thm.cterm_of thy; - - (*export assumes/defines*) - val th = Goal.norm_result ctxt raw_th; - val ((defs, asms), th') = Local_Defs.export ctxt thy_ctxt th; - val asms' = map (rewrite_rule ctxt (Drule.norm_hhf_eqs @ defs)) asms; - - (*export fixes*) - val tfrees = map TFree (Thm.fold_terms Term.add_tfrees th' []); - val frees = map Free (Thm.fold_terms Term.add_frees th' []); - val (th'' :: vs) = - (th' :: map (Drule.mk_term o cert) (map Logic.mk_type tfrees @ frees)) - |> Variable.export ctxt thy_ctxt - |> Drule.zero_var_indexes_list; - - (*thm definition*) - val result = Global_Theory.name_thm true true name th''; - - (*import fixes*) - val (tvars, vars) = - chop (length tfrees) (map (Thm.term_of o Drule.dest_term) vs) - |>> map Logic.dest_type; - - val instT = map_filter (fn (TVar v, T) => SOME (v, T) | _ => NONE) (tvars ~~ tfrees); - val inst = filter (is_Var o fst) (vars ~~ frees); - val cinstT = map (pairself certT o apfst TVar) instT; - val cinst = map (pairself (cert o Term.map_types (Term_Subst.instantiateT instT))) inst; - val result' = Thm.instantiate (cinstT, cinst) result; - - (*import assumes/defines*) - val result'' = - (fold (curry op COMP) asms' result' - handle THM _ => raise THM ("Failed to re-import result", 0, result' :: asms')) - |> Local_Defs.contract ctxt defs (Thm.cprop_of th) - |> Goal.norm_result ctxt - |> Global_Theory.name_thm false false name; - - in (result'', result) end; - -in - -fun notes notes' kind facts lthy = - let - val facts' = facts - |> map (fn (a, bs) => (a, Global_Theory.burrow_fact (Global_Theory.name_multi - (Local_Theory.full_name lthy (fst a))) bs)) - |> Global_Theory.map_facts (import_export_proof lthy); - val local_facts = Global_Theory.map_facts #1 facts'; - val global_facts = Global_Theory.map_facts #2 facts'; - in - lthy - |> notes' kind global_facts (Attrib.partial_evaluation lthy local_facts) - |> Attrib.local_notes kind local_facts - end; - -end; - - -(* abbrev *) - -fun abbrev abbrev' prmode ((b, mx), rhs) lthy = - let - val thy_ctxt = Proof_Context.init_global (Proof_Context.theory_of lthy); - - val rhs' = Assumption.export_term lthy (Local_Theory.target_of lthy) rhs; - val term_params = map Free (sort (Variable.fixed_ord lthy o pairself #1) (Variable.add_fixed lthy rhs' [])); - val u = fold_rev lambda term_params rhs'; - val global_rhs = singleton (Variable.polymorphic thy_ctxt) u; - - val extra_tfrees = - subtract (op =) (Term.add_tfreesT (Term.fastype_of u) []) (Term.add_tfrees u []); - val mx' = check_mixfix lthy (b, extra_tfrees) mx; - val type_params = map (Logic.mk_type o TFree) extra_tfrees; - in - lthy - |> abbrev' prmode (b, mx') global_rhs (type_params, term_params) - |> Proof_Context.add_abbrev Print_Mode.internal (b, rhs) |> snd - |> Local_Defs.fixed_abbrev ((b, NoSyn), rhs) - end; - - -(** theory operations **) - -fun theory_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params) = - background_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params) - #-> (fn (lhs, def) => standard_const (op <>) Syntax.mode_default ((b, mx), lhs) - #> pair (lhs, def)); - -fun theory_notes kind global_facts local_facts = - Local_Theory.background_theory (Attrib.global_notes kind global_facts #> snd) - #> standard_notes (op <>) kind local_facts; - -fun theory_declaration decl = - background_declaration decl #> standard_declaration (K true) decl; - -fun theory_abbrev prmode (b, mx) global_rhs params = - Local_Theory.background_theory_result - (Sign.add_abbrev (#1 prmode) (b, global_rhs) #-> - (fn (lhs, _) => (* FIXME type_params!? *) - Sign.notation true prmode [(lhs, check_mixfix_global (b, null (snd params)) mx)] #> pair lhs)) - #-> (fn lhs => standard_const (op <>) prmode - ((b, if null (snd params) then NoSyn else mx), Term.list_comb (Logic.unvarify_global lhs, snd params))); - -val theory_registration = - Local_Theory.raw_theory o Context.theory_map ooo Locale.add_registration; - - -(** locale operations **) - -fun locale_notes locale kind global_facts local_facts = - Local_Theory.background_theory - (Attrib.global_notes kind (Attrib.map_facts (K []) global_facts) #> snd) #> - (fn lthy => lthy |> - Local_Theory.target (fn ctxt => ctxt |> - Locale.add_thmss locale kind (standard_facts lthy ctxt local_facts))) #> - standard_notes (fn (this, other) => other <> 0 andalso this <> other) kind local_facts; - -fun locale_target_declaration locale syntax decl lthy = lthy - |> Local_Theory.target (fn ctxt => ctxt |> - Locale.add_declaration locale syntax - (Morphism.transform (Local_Theory.standard_morphism lthy ctxt) decl)); - -fun locale_declaration locale {syntax, pervasive} decl = - pervasive ? background_declaration decl - #> locale_target_declaration locale syntax decl - #> standard_declaration (fn (_, other) => other <> 0) decl; - -fun locale_target_const locale phi_pred prmode ((b, mx), rhs) = - locale_target_declaration locale true (const_decl phi_pred prmode ((b, mx), rhs)) - -fun locale_const locale prmode ((b, mx), rhs) = - locale_target_const locale (K true) prmode ((b, mx), rhs) - #> standard_const (fn (this, other) => other <> 0 andalso this <> other) prmode ((b, mx), rhs); - -fun locale_dependency locale dep_morph mixin export = - (Local_Theory.raw_theory ooo Locale.add_dependency locale) dep_morph mixin export - #> Local_Theory.activate_nonbrittle dep_morph mixin export; - -end; diff --git a/core/Pure/Isar/isar_cmd.ML b/core/Pure/Isar/isar_cmd.ML deleted file mode 100644 index e3d67568..00000000 --- a/core/Pure/Isar/isar_cmd.ML +++ /dev/null @@ -1,426 +0,0 @@ -(* Title: Pure/Isar/isar_cmd.ML - Author: Markus Wenzel, TU Muenchen - -Miscellaneous Isar commands. -*) - -signature ISAR_CMD = -sig - val global_setup: Symbol_Pos.source -> theory -> theory - val local_setup: Symbol_Pos.source -> Proof.context -> Proof.context - val parse_ast_translation: Symbol_Pos.source -> theory -> theory - val parse_translation: Symbol_Pos.source -> theory -> theory - val print_translation: Symbol_Pos.source -> theory -> theory - val typed_print_translation: Symbol_Pos.source -> theory -> theory - val print_ast_translation: Symbol_Pos.source -> theory -> theory - val translations: (xstring * string) Syntax.trrule list -> theory -> theory - val no_translations: (xstring * string) Syntax.trrule list -> theory -> theory - val oracle: bstring * Position.T -> Symbol_Pos.source -> theory -> theory - val add_defs: (bool * bool) * ((binding * string) * Attrib.src list) list -> theory -> theory - val declaration: {syntax: bool, pervasive: bool} -> - Symbol_Pos.source -> local_theory -> local_theory - val simproc_setup: string * Position.T -> string list -> Symbol_Pos.source -> - string list -> local_theory -> local_theory - val have: (Attrib.binding * (string * string list) list) list -> bool -> Proof.state -> Proof.state - val hence: (Attrib.binding * (string * string list) list) list -> bool -> Proof.state -> Proof.state - val show: (Attrib.binding * (string * string list) list) list -> bool -> Proof.state -> Proof.state - val thus: (Attrib.binding * (string * string list) list) list -> bool -> Proof.state -> Proof.state - val qed: Method.text_range option -> Toplevel.transition -> Toplevel.transition - val terminal_proof: Method.text_range * Method.text_range option -> - Toplevel.transition -> Toplevel.transition - val default_proof: Toplevel.transition -> Toplevel.transition - val immediate_proof: Toplevel.transition -> Toplevel.transition - val done_proof: Toplevel.transition -> Toplevel.transition - val skip_proof: Toplevel.transition -> Toplevel.transition - val ml_diag: bool -> Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition - val diag_state: Proof.context -> Toplevel.state - val diag_goal: Proof.context -> {context: Proof.context, facts: thm list, goal: thm} - val pretty_theorems: bool -> Toplevel.state -> Pretty.T list - val thy_deps: Toplevel.transition -> Toplevel.transition - val locale_deps: Toplevel.transition -> Toplevel.transition - val class_deps: Toplevel.transition -> Toplevel.transition - val thm_deps: (Facts.ref * Attrib.src list) list -> Toplevel.transition -> Toplevel.transition - val unused_thms: (string list * string list option) option -> - Toplevel.transition -> Toplevel.transition - val print_stmts: string list * (Facts.ref * Attrib.src list) list - -> Toplevel.transition -> Toplevel.transition - val print_thms: string list * (Facts.ref * Attrib.src list) list - -> Toplevel.transition -> Toplevel.transition - val print_prfs: bool -> string list * (Facts.ref * Attrib.src list) list option - -> Toplevel.transition -> Toplevel.transition - val print_prop: (string list * string) -> Toplevel.transition -> Toplevel.transition - val print_term: (string list * string) -> Toplevel.transition -> Toplevel.transition - val print_type: (string list * (string * string option)) -> - Toplevel.transition -> Toplevel.transition - val header_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition - val local_theory_markup: (xstring * Position.T) option * (Symbol_Pos.source) -> - Toplevel.transition -> Toplevel.transition - val proof_markup: Symbol_Pos.source -> Toplevel.transition -> Toplevel.transition -end; - -structure Isar_Cmd: ISAR_CMD = -struct - - -(** theory declarations **) - -(* generic setup *) - -fun global_setup source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) "val setup: theory -> theory" "Context.map_theory setup" - |> Context.theory_map; - -fun local_setup source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) "val setup: local_theory -> local_theory" "Context.map_proof setup" - |> Context.proof_map; - - -(* translation functions *) - -fun parse_ast_translation source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) - "val parse_ast_translation: (string * (Proof.context -> Ast.ast list -> Ast.ast)) list" - "Context.map_theory (Sign.parse_ast_translation parse_ast_translation)" - |> Context.theory_map; - -fun parse_translation source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) - "val parse_translation: (string * (Proof.context -> term list -> term)) list" - "Context.map_theory (Sign.parse_translation parse_translation)" - |> Context.theory_map; - -fun print_translation source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) - "val print_translation: (string * (Proof.context -> term list -> term)) list" - "Context.map_theory (Sign.print_translation print_translation)" - |> Context.theory_map; - -fun typed_print_translation source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) - "val typed_print_translation: (string * (Proof.context -> typ -> term list -> term)) list" - "Context.map_theory (Sign.typed_print_translation typed_print_translation)" - |> Context.theory_map; - -fun print_ast_translation source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) - "val print_ast_translation: (string * (Proof.context -> Ast.ast list -> Ast.ast)) list" - "Context.map_theory (Sign.print_ast_translation print_ast_translation)" - |> Context.theory_map; - - -(* translation rules *) - -fun read_trrules thy raw_rules = - let - val ctxt = Proof_Context.init_global thy; - val read_root = - #1 o dest_Type o Proof_Context.read_type_name {proper = true, strict = false} ctxt; - in - raw_rules - |> map (Syntax.map_trrule (fn (r, s) => Syntax_Phases.parse_ast_pattern ctxt (read_root r, s))) - end; - -fun translations args thy = Sign.add_trrules (read_trrules thy args) thy; -fun no_translations args thy = Sign.del_trrules (read_trrules thy args) thy; - - -(* oracles *) - -fun oracle (name, pos) source = - let - val body = ML_Lex.read_source false source; - val ants = - ML_Lex.read Position.none - ("local\n\ - \ val binding = " ^ ML_Syntax.make_binding (name, pos) ^ ";\n\ - \ val body = ") @ body @ ML_Lex.read Position.none (";\n\ - \in\n\ - \ val " ^ name ^ - " = snd (Context.>>> (Context.map_theory_result (Thm.add_oracle (binding, body))));\n\ - \end;\n"); - in - Context.theory_map - (ML_Context.exec (fn () => ML_Context.eval ML_Compiler.flags (#pos source) ants)) - end; - - -(* old-style defs *) - -fun add_defs ((unchecked, overloaded), args) thy = - (legacy_feature "Old 'defs' command -- use 'definition' (with 'overloading') instead"; - thy |> - (if unchecked then Global_Theory.add_defs_unchecked_cmd else Global_Theory.add_defs_cmd) - overloaded - (map (fn ((b, ax), srcs) => ((b, ax), map (Attrib.attribute_cmd_global thy) srcs)) args) - |> snd); - - -(* declarations *) - -fun declaration {syntax, pervasive} source = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) - "val declaration: Morphism.declaration" - ("Context.map_proof (Local_Theory.declaration {syntax = " ^ Bool.toString syntax ^ ", \ - \pervasive = " ^ Bool.toString pervasive ^ "} declaration)") - |> Context.proof_map; - - -(* simprocs *) - -fun simproc_setup name lhss source identifier = - ML_Lex.read_source false source - |> ML_Context.expression (#pos source) - "val proc: Morphism.morphism -> Proof.context -> cterm -> thm option" - ("Context.map_proof (Simplifier.def_simproc_cmd {name = " ^ ML_Syntax.make_binding name ^ ", \ - \lhss = " ^ ML_Syntax.print_strings lhss ^ ", proc = proc, \ - \identifier = Library.maps ML_Context.thms " ^ ML_Syntax.print_strings identifier ^ "})") - |> Context.proof_map; - - -(* goals *) - -fun goal opt_chain goal stmt int = - opt_chain #> goal NONE (K I) stmt int; - -val have = goal I Proof.have_cmd; -val hence = goal Proof.chain Proof.have_cmd; -val show = goal I Proof.show_cmd; -val thus = goal Proof.chain Proof.show_cmd; - - -(* local endings *) - -fun local_qed m = Toplevel.proof (Proof.local_qed (m, true)); -val local_terminal_proof = Toplevel.proof' o Proof.local_future_terminal_proof; -val local_default_proof = Toplevel.proof Proof.local_default_proof; -val local_immediate_proof = Toplevel.proof Proof.local_immediate_proof; -val local_done_proof = Toplevel.proof Proof.local_done_proof; -val local_skip_proof = Toplevel.proof' Proof.local_skip_proof; - -val skip_local_qed = Toplevel.skip_proof (fn i => if i > 1 then i - 1 else raise Toplevel.UNDEF); - - -(* global endings *) - -fun global_qed m = Toplevel.end_proof (K (Proof.global_qed (m, true))); -val global_terminal_proof = Toplevel.end_proof o Proof.global_future_terminal_proof; -val global_default_proof = Toplevel.end_proof (K Proof.global_default_proof); -val global_immediate_proof = Toplevel.end_proof (K Proof.global_immediate_proof); -val global_skip_proof = Toplevel.end_proof Proof.global_skip_proof; -val global_done_proof = Toplevel.end_proof (K Proof.global_done_proof); - -val skip_global_qed = Toplevel.skip_proof_to_theory (fn n => n = 1); - - -(* common endings *) - -fun qed m = local_qed m o global_qed m o skip_local_qed o skip_global_qed; -fun terminal_proof m = local_terminal_proof m o global_terminal_proof m; -val default_proof = local_default_proof o global_default_proof; -val immediate_proof = local_immediate_proof o global_immediate_proof; -val done_proof = local_done_proof o global_done_proof; -val skip_proof = local_skip_proof o global_skip_proof; - - -(* diagnostic ML evaluation *) - -structure Diag_State = Proof_Data -( - type T = Toplevel.state; - fun init _ = Toplevel.toplevel; -); - -fun ml_diag verbose source = Toplevel.keep (fn state => - let - val opt_ctxt = - try Toplevel.generic_theory_of state - |> Option.map (Context.proof_of #> Diag_State.put state); - val flags = ML_Compiler.verbose verbose ML_Compiler.flags; - in ML_Context.eval_source_in opt_ctxt flags source end); - -val diag_state = Diag_State.get; - -fun diag_goal ctxt = - Proof.goal (Toplevel.proof_of (diag_state ctxt)) - handle Toplevel.UNDEF => error "No goal present"; - -val _ = Theory.setup - (ML_Antiquotation.value (Binding.qualify true "Isar" @{binding state}) - (Scan.succeed "Isar_Cmd.diag_state ML_context") #> - ML_Antiquotation.value (Binding.qualify true "Isar" @{binding goal}) - (Scan.succeed "Isar_Cmd.diag_goal ML_context")); - - -(* theorems of theory or proof context *) - -fun pretty_theorems verbose st = - if Toplevel.is_proof st then - Proof_Context.pretty_local_facts (Toplevel.context_of st) verbose - else - let - val thy = Toplevel.theory_of st; - val prev_thys = - (case Toplevel.previous_context_of st of - SOME prev => [Proof_Context.theory_of prev] - | NONE => Theory.parents_of thy); - in Proof_Display.pretty_theorems_diff verbose prev_thys thy end; - - -(* display dependencies *) - -val thy_deps = Toplevel.unknown_theory o Toplevel.keep (fn state => - let - val thy = Toplevel.theory_of state; - val thy_session = Present.session_name thy; - - val gr = rev (Theory.nodes_of thy) |> map (fn node => - let - val name = Context.theory_name node; - val parents = map Context.theory_name (Theory.parents_of node); - val session = Present.session_name node; - val unfold = (session = thy_session); - in - {name = name, ID = name, parents = parents, dir = session, - unfold = unfold, path = "", content = []} - end); - in Graph_Display.display_graph gr end); - -val locale_deps = Toplevel.unknown_theory o Toplevel.keep (fn state => - let - val thy = Toplevel.theory_of state; - val gr = Locale.pretty_locale_deps thy |> map (fn {name, parents, body} => - {name = Locale.extern thy name, ID = name, parents = parents, - dir = "", unfold = true, path = "", content = [body]}); - in Graph_Display.display_graph gr end); - -val class_deps = Toplevel.unknown_theory o Toplevel.keep (fn state => - let - val ctxt = Toplevel.context_of state; - val {classes = (space, algebra), ...} = Type.rep_tsig (Proof_Context.tsig_of ctxt); - val classes = Sorts.classes_of algebra; - fun entry (c, (i, (_, cs))) = - (i, {name = Name_Space.extern ctxt space c, ID = c, parents = Graph.Keys.dest cs, - dir = "", unfold = true, path = "", content = []}); - val gr = - Graph.fold (cons o entry) classes [] - |> sort (int_ord o pairself #1) |> map #2; - in Graph_Display.display_graph gr end); - -fun thm_deps args = Toplevel.unknown_theory o Toplevel.keep (fn state => - Thm_Deps.thm_deps (Toplevel.theory_of state) - (Attrib.eval_thms (Toplevel.context_of state) args)); - - -(* find unused theorems *) - -fun unused_thms opt_range = Toplevel.keep (fn state => - let - val thy = Toplevel.theory_of state; - val ctxt = Toplevel.context_of state; - fun pretty_thm (a, th) = Proof_Context.pretty_fact ctxt (a, [th]); - val get_theory = Context.get_theory thy; - in - Thm_Deps.unused_thms - (case opt_range of - NONE => (Theory.parents_of thy, [thy]) - | SOME (xs, NONE) => (map get_theory xs, [thy]) - | SOME (xs, SOME ys) => (map get_theory xs, map get_theory ys)) - |> map pretty_thm |> Pretty.writeln_chunks - end); - - -(* print theorems, terms, types etc. *) - -local - -fun string_of_stmts ctxt args = - Attrib.eval_thms ctxt args - |> map (Element.pretty_statement ctxt Thm.theoremK) - |> Pretty.chunks2 |> Pretty.string_of; - -fun string_of_thms ctxt args = - Pretty.string_of (Proof_Context.pretty_fact ctxt ("", Attrib.eval_thms ctxt args)); - -fun string_of_prfs full state arg = - Pretty.string_of - (case arg of - NONE => - let - val {context = ctxt, goal = thm} = Proof.simple_goal (Toplevel.proof_of state); - val thy = Proof_Context.theory_of ctxt; - val prf = Thm.proof_of thm; - val prop = Thm.full_prop_of thm; - val prf' = Proofterm.rewrite_proof_notypes ([], []) prf; - in - Proof_Syntax.pretty_proof ctxt - (if full then Reconstruct.reconstruct_proof thy prop prf' else prf') - end - | SOME srcs => - let val ctxt = Toplevel.context_of state - in map (Proof_Syntax.pretty_proof_of ctxt full) (Attrib.eval_thms ctxt srcs) end - |> Pretty.chunks); - -fun string_of_prop ctxt s = - let - val prop = Syntax.read_prop ctxt s; - val ctxt' = Variable.auto_fixes prop ctxt; - in Pretty.string_of (Pretty.quote (Syntax.pretty_term ctxt' prop)) end; - -fun string_of_term ctxt s = - let - val t = Syntax.read_term ctxt s; - val T = Term.type_of t; - val ctxt' = Variable.auto_fixes t ctxt; - in - Pretty.string_of - (Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t), Pretty.fbrk, - Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' T)]) - end; - -fun string_of_type ctxt (s, NONE) = - let val T = Syntax.read_typ ctxt s - in Pretty.string_of (Pretty.quote (Syntax.pretty_typ ctxt T)) end - | string_of_type ctxt (s1, SOME s2) = - let - val ctxt' = Config.put show_sorts true ctxt; - val raw_T = Syntax.parse_typ ctxt' s1; - val S = Syntax.read_sort ctxt' s2; - val T = - Syntax.check_term ctxt' - (Logic.mk_type raw_T |> Type.constraint (Term.itselfT (Type_Infer.anyT S))) - |> Logic.dest_type; - in Pretty.string_of (Pretty.quote (Syntax.pretty_typ ctxt' T)) end; - -fun print_item string_of (modes, arg) = Toplevel.keep (fn state => - Print_Mode.with_modes modes (fn () => writeln (string_of state arg)) ()); - -in - -val print_stmts = print_item (string_of_stmts o Toplevel.context_of); -val print_thms = print_item (string_of_thms o Toplevel.context_of); -val print_prfs = print_item o string_of_prfs; -val print_prop = print_item (string_of_prop o Toplevel.context_of); -val print_term = print_item (string_of_term o Toplevel.context_of); -val print_type = print_item (string_of_type o Toplevel.context_of); - -end; - - -(* markup commands *) - -fun header_markup txt = Toplevel.keep (fn state => - if Toplevel.is_toplevel state then Thy_Output.check_text txt state - else raise Toplevel.UNDEF); - -fun local_theory_markup (loc, txt) = Toplevel.present_local_theory loc (Thy_Output.check_text txt); -val proof_markup = Toplevel.present_proof o Thy_Output.check_text; - -end; diff --git a/core/Pure/Isar/isar_syn.ML b/core/Pure/Isar/isar_syn.ML deleted file mode 100644 index c0f3ca53..00000000 --- a/core/Pure/Isar/isar_syn.ML +++ /dev/null @@ -1,1108 +0,0 @@ -(* Title: Pure/Isar/isar_syn.ML - Author: Makarius - -Outer syntax for Isabelle/Pure. -*) - -structure Isar_Syn: sig end = -struct - -(** markup commands **) - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "header"} "theory header" - (Parse.document_source >> Isar_Cmd.header_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "chapter"} "chapter heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "section"} "section heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "subsection"} "subsection heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "subsubsection"} "subsubsection heading" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.MarkupEnv - @{command_spec "text"} "formal comment (theory)" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Verbatim - @{command_spec "text_raw"} "raw document preparation text" - (Parse.opt_target -- Parse.document_source >> Isar_Cmd.local_theory_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "sect"} "formal comment (proof)" - (Parse.document_source >> Isar_Cmd.proof_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "subsect"} "formal comment (proof)" - (Parse.document_source >> Isar_Cmd.proof_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Markup - @{command_spec "subsubsect"} "formal comment (proof)" - (Parse.document_source >> Isar_Cmd.proof_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.MarkupEnv - @{command_spec "txt"} "formal comment (proof)" - (Parse.document_source >> Isar_Cmd.proof_markup); - -val _ = - Outer_Syntax.markup_command Thy_Output.Verbatim - @{command_spec "txt_raw"} "raw document preparation text (proof)" - (Parse.document_source >> Isar_Cmd.proof_markup); - - - -(** theory commands **) - -(* sorts *) - -val _ = - Outer_Syntax.local_theory @{command_spec "default_sort"} - "declare default sort for explicit type variables" - (Parse.sort >> (fn s => fn lthy => Local_Theory.set_defsort (Syntax.read_sort lthy s) lthy)); - - -(* types *) - -val _ = - Outer_Syntax.local_theory @{command_spec "typedecl"} "type declaration" - (Parse.type_args -- Parse.binding -- Parse.opt_mixfix - >> (fn ((args, a), mx) => Typedecl.typedecl (a, map (rpair dummyS) args, mx) #> snd)); - -val _ = - Outer_Syntax.local_theory @{command_spec "type_synonym"} "declare type abbreviation" - (Parse.type_args -- Parse.binding -- - (@{keyword "="} |-- Parse.!!! (Parse.typ -- Parse.opt_mixfix')) - >> (fn ((args, a), (rhs, mx)) => snd o Typedecl.abbrev_cmd (a, args, mx) rhs)); - -val _ = - Outer_Syntax.command @{command_spec "nonterminal"} - "declare syntactic type constructors (grammar nonterminal symbols)" - (Parse.and_list1 Parse.binding >> (Toplevel.theory o Sign.add_nonterminals_global)); - - -(* consts and syntax *) - -val _ = - Outer_Syntax.command @{command_spec "judgment"} "declare object-logic judgment" - (Parse.const_binding >> (Toplevel.theory o Object_Logic.add_judgment_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "consts"} "declare constants" - (Scan.repeat1 Parse.const_binding >> (Toplevel.theory o Sign.add_consts_cmd)); - -val mode_spec = - (@{keyword "output"} >> K ("", false)) || - Parse.name -- Scan.optional (@{keyword "output"} >> K false) true; - -val opt_mode = - Scan.optional (@{keyword "("} |-- Parse.!!! (mode_spec --| @{keyword ")"})) Syntax.mode_default; - -val _ = - Outer_Syntax.command @{command_spec "syntax"} "add raw syntax clauses" - (opt_mode -- Scan.repeat1 Parse.const_decl - >> (Toplevel.theory o uncurry Sign.add_syntax_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "no_syntax"} "delete raw syntax clauses" - (opt_mode -- Scan.repeat1 Parse.const_decl - >> (Toplevel.theory o uncurry Sign.del_syntax_cmd)); - - -(* translations *) - -val trans_pat = - Scan.optional - (@{keyword "("} |-- Parse.!!! (Parse.inner_syntax Parse.xname --| @{keyword ")"})) "logic" - -- Parse.inner_syntax Parse.string; - -fun trans_arrow toks = - ((@{keyword "\"} || @{keyword "=>"}) >> K Syntax.Parse_Rule || - (@{keyword "\"} || @{keyword "<="}) >> K Syntax.Print_Rule || - (@{keyword "\"} || @{keyword "=="}) >> K Syntax.Parse_Print_Rule) toks; - -val trans_line = - trans_pat -- Parse.!!! (trans_arrow -- trans_pat) - >> (fn (left, (arr, right)) => arr (left, right)); - -val _ = - Outer_Syntax.command @{command_spec "translations"} "add syntax translation rules" - (Scan.repeat1 trans_line >> (Toplevel.theory o Isar_Cmd.translations)); - -val _ = - Outer_Syntax.command @{command_spec "no_translations"} "delete syntax translation rules" - (Scan.repeat1 trans_line >> (Toplevel.theory o Isar_Cmd.no_translations)); - - -(* axioms and definitions *) - -val opt_unchecked_overloaded = - Scan.optional (@{keyword "("} |-- Parse.!!! - (((@{keyword "unchecked"} >> K true) -- - Scan.optional (@{keyword "overloaded"} >> K true) false || - @{keyword "overloaded"} >> K (false, true)) --| @{keyword ")"})) (false, false); - -val _ = - Outer_Syntax.command @{command_spec "defs"} "define constants" - (opt_unchecked_overloaded -- - Scan.repeat1 (Parse_Spec.thm_name ":" -- Parse.prop >> (fn ((x, y), z) => ((x, z), y))) - >> (Toplevel.theory o Isar_Cmd.add_defs)); - - -(* constant definitions and abbreviations *) - -val _ = - Outer_Syntax.local_theory' @{command_spec "definition"} "constant definition" - (Parse_Spec.constdef >> (fn args => #2 oo Specification.definition_cmd args)); - -val _ = - Outer_Syntax.local_theory' @{command_spec "abbreviation"} "constant abbreviation" - (opt_mode -- (Scan.option Parse_Spec.constdecl -- Parse.prop) - >> (fn (mode, args) => Specification.abbreviation_cmd mode args)); - -val _ = - Outer_Syntax.local_theory @{command_spec "type_notation"} - "add concrete syntax for type constructors" - (opt_mode -- Parse.and_list1 (Parse.type_const -- Parse.mixfix) - >> (fn (mode, args) => Specification.type_notation_cmd true mode args)); - -val _ = - Outer_Syntax.local_theory @{command_spec "no_type_notation"} - "delete concrete syntax for type constructors" - (opt_mode -- Parse.and_list1 (Parse.type_const -- Parse.mixfix) - >> (fn (mode, args) => Specification.type_notation_cmd false mode args)); - -val _ = - Outer_Syntax.local_theory @{command_spec "notation"} - "add concrete syntax for constants / fixed variables" - (opt_mode -- Parse.and_list1 (Parse.const -- Parse.mixfix) - >> (fn (mode, args) => Specification.notation_cmd true mode args)); - -val _ = - Outer_Syntax.local_theory @{command_spec "no_notation"} - "delete concrete syntax for constants / fixed variables" - (opt_mode -- Parse.and_list1 (Parse.const -- Parse.mixfix) - >> (fn (mode, args) => Specification.notation_cmd false mode args)); - - -(* constant specifications *) - -val _ = - Outer_Syntax.command @{command_spec "axiomatization"} "axiomatic constant specification" - (Scan.optional Parse.fixes [] -- - Scan.optional (Parse.where_ |-- Parse.!!! (Parse.and_list1 Parse_Spec.specs)) [] - >> (fn (x, y) => Toplevel.theory (#2 o Specification.axiomatization_cmd x y))); - - -(* theorems *) - -fun theorems kind = - Parse_Spec.name_facts -- Parse.for_fixes - >> (fn (facts, fixes) => #2 oo Specification.theorems_cmd kind facts fixes); - -val _ = - Outer_Syntax.local_theory' @{command_spec "theorems"} "define theorems" - (theorems Thm.theoremK); - -val _ = - Outer_Syntax.local_theory' @{command_spec "lemmas"} "define lemmas" (theorems Thm.lemmaK); - -val _ = - Outer_Syntax.local_theory' @{command_spec "declare"} "declare theorems" - (Parse.and_list1 Parse_Spec.xthms1 -- Parse.for_fixes - >> (fn (facts, fixes) => - #2 oo Specification.theorems_cmd "" [(Attrib.empty_binding, flat facts)] fixes)); - - -(* hide names *) - -local - -fun hide_names command_spec what hide parse prep = - Outer_Syntax.command command_spec ("hide " ^ what ^ " from name space") - ((Parse.opt_keyword "open" >> not) -- Scan.repeat1 parse >> (fn (fully, args) => - (Toplevel.theory (fn thy => - let val ctxt = Proof_Context.init_global thy - in fold (hide fully o prep ctxt) args thy end)))); - -in - -val _ = - hide_names @{command_spec "hide_class"} "classes" Sign.hide_class Parse.class - Proof_Context.read_class; - -val _ = - hide_names @{command_spec "hide_type"} "types" Sign.hide_type Parse.type_const - ((#1 o dest_Type) oo Proof_Context.read_type_name {proper = true, strict = false}); - -val _ = - hide_names @{command_spec "hide_const"} "constants" Sign.hide_const Parse.const - ((#1 o dest_Const) oo Proof_Context.read_const {proper = true, strict = false}); - -val _ = - hide_names @{command_spec "hide_fact"} "facts" Global_Theory.hide_fact - (Parse.position Parse.xname) (Global_Theory.check_fact o Proof_Context.theory_of); - -end; - - -(* use ML text *) - -val _ = - Outer_Syntax.command @{command_spec "SML_file"} "read and evaluate Standard ML file" - (Resources.provide_parse_files "SML_file" >> (fn files => Toplevel.theory (fn thy => - let - val ([{lines, pos, ...}], thy') = files thy; - val source = {delimited = true, text = cat_lines lines, pos = pos}; - val flags = {SML = true, exchange = false, redirect = true, verbose = true}; - in - thy' |> Context.theory_map - (ML_Context.exec (fn () => ML_Context.eval_source flags source)) - end))); - -val _ = - Outer_Syntax.command @{command_spec "SML_export"} "evaluate SML within Isabelle/ML environment" - (Parse.ML_source >> (fn source => - let val flags = {SML = true, exchange = true, redirect = false, verbose = true} in - Toplevel.theory - (Context.theory_map (ML_Context.exec (fn () => ML_Context.eval_source flags source))) - end)); - -val _ = - Outer_Syntax.command @{command_spec "SML_import"} "evaluate Isabelle/ML within SML environment" - (Parse.ML_source >> (fn source => - let val flags = {SML = false, exchange = true, redirect = false, verbose = true} in - Toplevel.generic_theory - (ML_Context.exec (fn () => ML_Context.eval_source flags source) #> - Local_Theory.propagate_ml_env) - end)); - -val _ = - Outer_Syntax.command @{command_spec "ML"} "ML text within theory or local theory" - (Parse.ML_source >> (fn source => - Toplevel.generic_theory - (ML_Context.exec (fn () => - ML_Context.eval_source (ML_Compiler.verbose true ML_Compiler.flags) source) #> - Local_Theory.propagate_ml_env))); - -val _ = - Outer_Syntax.command @{command_spec "ML_prf"} "ML text within proof" - (Parse.ML_source >> (fn source => - Toplevel.proof (Proof.map_context (Context.proof_map - (ML_Context.exec (fn () => - ML_Context.eval_source (ML_Compiler.verbose true ML_Compiler.flags) source))) #> - Proof.propagate_ml_env))); - -val _ = - Outer_Syntax.command @{command_spec "ML_val"} "diagnostic ML text" - (Parse.ML_source >> Isar_Cmd.ml_diag true); - -val _ = - Outer_Syntax.command @{command_spec "ML_command"} "diagnostic ML text (silent)" - (Parse.ML_source >> Isar_Cmd.ml_diag false); - -val _ = - Outer_Syntax.command @{command_spec "setup"} "ML theory setup" - (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.global_setup)); - -val _ = - Outer_Syntax.local_theory @{command_spec "local_setup"} "ML local theory setup" - (Parse.ML_source >> Isar_Cmd.local_setup); - -val _ = - Outer_Syntax.command @{command_spec "attribute_setup"} "define attribute in ML" - (Parse.position Parse.name -- - Parse.!!! (@{keyword "="} |-- Parse.ML_source -- Scan.optional Parse.text "") - >> (fn (name, (txt, cmt)) => Toplevel.theory (Attrib.attribute_setup name txt cmt))); - -val _ = - Outer_Syntax.command @{command_spec "method_setup"} "define proof method in ML" - (Parse.position Parse.name -- - Parse.!!! (@{keyword "="} |-- Parse.ML_source -- Scan.optional Parse.text "") - >> (fn (name, (txt, cmt)) => Toplevel.theory (Method.method_setup name txt cmt))); - -val _ = - Outer_Syntax.local_theory @{command_spec "declaration"} "generic ML declaration" - (Parse.opt_keyword "pervasive" -- Parse.ML_source - >> (fn (pervasive, txt) => Isar_Cmd.declaration {syntax = false, pervasive = pervasive} txt)); - -val _ = - Outer_Syntax.local_theory @{command_spec "syntax_declaration"} "generic ML syntax declaration" - (Parse.opt_keyword "pervasive" -- Parse.ML_source - >> (fn (pervasive, txt) => Isar_Cmd.declaration {syntax = true, pervasive = pervasive} txt)); - -val _ = - Outer_Syntax.local_theory @{command_spec "simproc_setup"} "define simproc in ML" - (Parse.position Parse.name -- - (@{keyword "("} |-- Parse.enum1 "|" Parse.term --| @{keyword ")"} --| @{keyword "="}) -- - Parse.ML_source -- Scan.optional (@{keyword "identifier"} |-- Scan.repeat1 Parse.xname) [] - >> (fn (((a, b), c), d) => Isar_Cmd.simproc_setup a b c d)); - - -(* translation functions *) - -val _ = - Outer_Syntax.command @{command_spec "parse_ast_translation"} - "install parse ast translation functions" - (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.parse_ast_translation)); - -val _ = - Outer_Syntax.command @{command_spec "parse_translation"} - "install parse translation functions" - (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.parse_translation)); - -val _ = - Outer_Syntax.command @{command_spec "print_translation"} - "install print translation functions" - (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.print_translation)); - -val _ = - Outer_Syntax.command @{command_spec "typed_print_translation"} - "install typed print translation functions" - (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.typed_print_translation)); - -val _ = - Outer_Syntax.command @{command_spec "print_ast_translation"} - "install print ast translation functions" - (Parse.ML_source >> (Toplevel.theory o Isar_Cmd.print_ast_translation)); - - -(* oracles *) - -val _ = - Outer_Syntax.command @{command_spec "oracle"} "declare oracle" - (Parse.position Parse.name -- (@{keyword "="} |-- Parse.ML_source) >> - (fn (x, y) => Toplevel.theory (Isar_Cmd.oracle x y))); - - -(* bundled declarations *) - -val _ = - Outer_Syntax.local_theory @{command_spec "bundle"} "define bundle of declarations" - ((Parse.binding --| @{keyword "="}) -- Parse_Spec.xthms1 -- Parse.for_fixes - >> (uncurry Bundle.bundle_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "include"} - "include declarations from bundle in proof body" - (Scan.repeat1 (Parse.position Parse.xname) >> (Toplevel.proof o Bundle.include_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "including"} - "include declarations from bundle in goal refinement" - (Scan.repeat1 (Parse.position Parse.xname) >> (Toplevel.proof o Bundle.including_cmd)); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_bundles"} - "print bundles of declarations" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Bundle.print_bundles o Toplevel.context_of))); - - -(* local theories *) - -val _ = - Outer_Syntax.command @{command_spec "context"} "begin local theory context" - ((Parse.position Parse.xname >> (fn name => - Toplevel.begin_local_theory true (Named_Target.begin name)) || - Scan.optional Parse_Spec.includes [] -- Scan.repeat Parse_Spec.context_element - >> (fn (incls, elems) => Toplevel.open_target (Bundle.context_cmd incls elems))) - --| Parse.begin); - - -(* locales *) - -val locale_val = - Parse_Spec.locale_expression false -- - Scan.optional (@{keyword "+"} |-- Parse.!!! (Scan.repeat1 Parse_Spec.context_element)) [] || - Scan.repeat1 Parse_Spec.context_element >> pair ([], []); - -val _ = - Outer_Syntax.command @{command_spec "locale"} "define named proof context" - (Parse.binding -- - Scan.optional (@{keyword "="} |-- Parse.!!! locale_val) (([], []), []) -- Parse.opt_begin - >> (fn ((name, (expr, elems)), begin) => - Toplevel.begin_local_theory begin - (Expression.add_locale_cmd name Binding.empty expr elems #> snd))); - -fun interpretation_args mandatory = - Parse.!!! (Parse_Spec.locale_expression mandatory) -- - Scan.optional - (Parse.where_ |-- Parse.and_list1 (Parse_Spec.opt_thm_name ":" -- Parse.prop)) []; - -val _ = - Outer_Syntax.command @{command_spec "sublocale"} - "prove sublocale relation between a locale and a locale expression" - ((Parse.position Parse.xname --| (@{keyword "\"} || @{keyword "<"}) -- - interpretation_args false >> (fn (loc, (expr, equations)) => - Toplevel.theory_to_proof (Expression.sublocale_global_cmd loc expr equations))) - || interpretation_args false >> (fn (expr, equations) => - Toplevel.local_theory_to_proof NONE (Expression.sublocale_cmd expr equations))); - -val _ = - Outer_Syntax.command @{command_spec "interpretation"} - "prove interpretation of locale expression in local theory" - (interpretation_args true >> (fn (expr, equations) => - Toplevel.local_theory_to_proof NONE (Expression.interpretation_cmd expr equations))); - -val _ = - Outer_Syntax.command @{command_spec "interpret"} - "prove interpretation of locale expression in proof context" - (interpretation_args true >> (fn (expr, equations) => - Toplevel.proof' (Expression.interpret_cmd expr equations))); - - -(* classes *) - -val class_val = - Parse_Spec.class_expression -- - Scan.optional (@{keyword "+"} |-- Parse.!!! (Scan.repeat1 Parse_Spec.context_element)) [] || - Scan.repeat1 Parse_Spec.context_element >> pair []; - -val _ = - Outer_Syntax.command @{command_spec "class"} "define type class" - (Parse.binding -- Scan.optional (@{keyword "="} |-- class_val) ([], []) -- Parse.opt_begin - >> (fn ((name, (supclasses, elems)), begin) => - Toplevel.begin_local_theory begin - (Class_Declaration.class_cmd name supclasses elems #> snd))); - -val _ = - Outer_Syntax.local_theory_to_proof @{command_spec "subclass"} "prove a subclass relation" - (Parse.class >> Class_Declaration.subclass_cmd); - -val _ = - Outer_Syntax.command @{command_spec "instantiation"} "instantiate and prove type arity" - (Parse.multi_arity --| Parse.begin - >> (fn arities => Toplevel.begin_local_theory true (Class.instantiation_cmd arities))); - -val _ = - Outer_Syntax.command @{command_spec "instance"} "prove type arity or subclass relation" - ((Parse.class -- - ((@{keyword "\"} || @{keyword "<"}) |-- Parse.!!! Parse.class) >> Class.classrel_cmd || - Parse.multi_arity >> Class.instance_arity_cmd) >> Toplevel.theory_to_proof || - Scan.succeed (Toplevel.local_theory_to_proof NONE (Class.instantiation_instance I))); - - -(* arbitrary overloading *) - -val _ = - Outer_Syntax.command @{command_spec "overloading"} "overloaded definitions" - (Scan.repeat1 (Parse.name --| (@{keyword "\"} || @{keyword "=="}) -- Parse.term -- - Scan.optional (@{keyword "("} |-- (@{keyword "unchecked"} >> K false) --| @{keyword ")"}) true - >> Parse.triple1) --| Parse.begin - >> (fn operations => Toplevel.begin_local_theory true (Overloading.overloading_cmd operations))); - - -(* code generation *) - -val _ = - Outer_Syntax.command @{command_spec "code_datatype"} - "define set of code datatype constructors" - (Scan.repeat1 Parse.term >> (Toplevel.theory o Code.add_datatype_cmd)); - - - -(** proof commands **) - -(* statements *) - -fun theorem spec schematic kind = - Outer_Syntax.local_theory_to_proof' spec - ("state " ^ (if schematic then "schematic " ^ kind else kind)) - (Scan.optional (Parse_Spec.opt_thm_name ":" --| - Scan.ahead (Parse_Spec.includes >> K "" || - Parse_Spec.locale_keyword || Parse_Spec.statement_keyword)) Attrib.empty_binding -- - Scan.optional Parse_Spec.includes [] -- - Parse_Spec.general_statement >> (fn ((a, includes), (elems, concl)) => - ((if schematic then Specification.schematic_theorem_cmd else Specification.theorem_cmd) - kind NONE (K I) a includes elems concl))); - -val _ = theorem @{command_spec "theorem"} false Thm.theoremK; -val _ = theorem @{command_spec "lemma"} false Thm.lemmaK; -val _ = theorem @{command_spec "corollary"} false Thm.corollaryK; -val _ = theorem @{command_spec "schematic_theorem"} true Thm.theoremK; -val _ = theorem @{command_spec "schematic_lemma"} true Thm.lemmaK; -val _ = theorem @{command_spec "schematic_corollary"} true Thm.corollaryK; - -val _ = - Outer_Syntax.local_theory_to_proof @{command_spec "notepad"} "begin proof context" - (Parse.begin >> K Proof.begin_notepad); - -val _ = - Outer_Syntax.command @{command_spec "have"} "state local goal" - (Parse_Spec.statement >> (Toplevel.proof' o Isar_Cmd.have)); - -val _ = - Outer_Syntax.command @{command_spec "hence"} "old-style alias of \"then have\"" - (Parse_Spec.statement >> (Toplevel.proof' o Isar_Cmd.hence)); - -val _ = - Outer_Syntax.command @{command_spec "show"} - "state local goal, solving current obligation" - (Parse_Spec.statement >> (Toplevel.proof' o Isar_Cmd.show)); - -val _ = - Outer_Syntax.command @{command_spec "thus"} "old-style alias of \"then show\"" - (Parse_Spec.statement >> (Toplevel.proof' o Isar_Cmd.thus)); - - -(* facts *) - -val facts = Parse.and_list1 Parse_Spec.xthms1; - -val _ = - Outer_Syntax.command @{command_spec "then"} "forward chaining" - (Scan.succeed (Toplevel.proof Proof.chain)); - -val _ = - Outer_Syntax.command @{command_spec "from"} "forward chaining from given facts" - (facts >> (Toplevel.proof o Proof.from_thmss_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "with"} "forward chaining from given and current facts" - (facts >> (Toplevel.proof o Proof.with_thmss_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "note"} "define facts" - (Parse_Spec.name_facts >> (Toplevel.proof o Proof.note_thmss_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "using"} "augment goal facts" - (facts >> (Toplevel.proof o Proof.using_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "unfolding"} "unfold definitions in goal and facts" - (facts >> (Toplevel.proof o Proof.unfolding_cmd)); - - -(* proof context *) - -val _ = - Outer_Syntax.command @{command_spec "fix"} "fix local variables (Skolem constants)" - (Parse.fixes >> (Toplevel.proof o Proof.fix_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "assume"} "assume propositions" - (Parse_Spec.statement >> (Toplevel.proof o Proof.assume_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "presume"} "assume propositions, to be established later" - (Parse_Spec.statement >> (Toplevel.proof o Proof.presume_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "def"} "local definition (non-polymorphic)" - (Parse.and_list1 - (Parse_Spec.opt_thm_name ":" -- - ((Parse.binding -- Parse.opt_mixfix) -- - ((@{keyword "\"} || @{keyword "=="}) |-- Parse.!!! Parse.termp))) - >> (Toplevel.proof o Proof.def_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "obtain"} "generalized elimination" - (Parse.parname -- Scan.optional (Parse.fixes --| Parse.where_) [] -- Parse_Spec.statement - >> (fn ((x, y), z) => Toplevel.proof' (Obtain.obtain_cmd x y z))); - -val _ = - Outer_Syntax.command @{command_spec "guess"} "wild guessing (unstructured)" - (Scan.optional Parse.fixes [] >> (Toplevel.proof' o Obtain.guess_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "let"} "bind text variables" - (Parse.and_list1 (Parse.and_list1 Parse.term -- (@{keyword "="} |-- Parse.term)) - >> (Toplevel.proof o Proof.let_bind_cmd)); - -val _ = - Outer_Syntax.command @{command_spec "write"} "add concrete syntax for constants / fixed variables" - (opt_mode -- Parse.and_list1 (Parse.const -- Parse.mixfix) - >> (fn (mode, args) => Toplevel.proof (Proof.write_cmd mode args))); - -val _ = - Outer_Syntax.command @{command_spec "case"} "invoke local context" - ((@{keyword "("} |-- - Parse.!!! (Parse.position Parse.xname -- Scan.repeat (Parse.maybe Parse.binding) - --| @{keyword ")"}) || - Parse.position Parse.xname >> rpair []) -- Parse_Spec.opt_attribs >> (fn ((c, xs), atts) => - Toplevel.proof (Proof.invoke_case_cmd (c, xs, atts)))); - - -(* proof structure *) - -val _ = - Outer_Syntax.command @{command_spec "{"} "begin explicit proof block" - (Scan.succeed (Toplevel.proof Proof.begin_block)); - -val _ = - Outer_Syntax.command @{command_spec "}"} "end explicit proof block" - (Scan.succeed (Toplevel.proof Proof.end_block)); - -val _ = - Outer_Syntax.command @{command_spec "next"} "enter next proof block" - (Scan.succeed (Toplevel.proof Proof.next_block)); - - -(* end proof *) - -val _ = - Outer_Syntax.command @{command_spec "qed"} "conclude proof" - (Scan.option Method.parse >> (fn m => - (Option.map Method.report m; - Isar_Cmd.qed m))); - -val _ = - Outer_Syntax.command @{command_spec "by"} "terminal backward proof" - (Method.parse -- Scan.option Method.parse >> (fn (m1, m2) => - (Method.report m1; - Option.map Method.report m2; - Isar_Cmd.terminal_proof (m1, m2)))); - -val _ = - Outer_Syntax.command @{command_spec ".."} "default proof" - (Scan.succeed Isar_Cmd.default_proof); - -val _ = - Outer_Syntax.command @{command_spec "."} "immediate proof" - (Scan.succeed Isar_Cmd.immediate_proof); - -val _ = - Outer_Syntax.command @{command_spec "done"} "done proof" - (Scan.succeed Isar_Cmd.done_proof); - -val _ = - Outer_Syntax.command @{command_spec "sorry"} "skip proof (quick-and-dirty mode only!)" - (Scan.succeed Isar_Cmd.skip_proof); - -val _ = - Outer_Syntax.command @{command_spec "oops"} "forget proof" - (Scan.succeed Toplevel.forget_proof); - - -(* proof steps *) - -val _ = - Outer_Syntax.command @{command_spec "defer"} "shuffle internal proof state" - (Scan.optional Parse.nat 1 >> (Toplevel.proof o Proof.defer)); - -val _ = - Outer_Syntax.command @{command_spec "prefer"} "shuffle internal proof state" - (Parse.nat >> (Toplevel.proof o Proof.prefer)); - -val _ = - Outer_Syntax.command @{command_spec "apply"} "initial refinement step (unstructured)" - (Method.parse >> (fn m => (Method.report m; Toplevel.proofs (Proof.apply_results m)))); - -val _ = - Outer_Syntax.command @{command_spec "apply_end"} "terminal refinement step (unstructured)" - (Method.parse >> (fn m => (Method.report m; Toplevel.proofs (Proof.apply_end_results m)))); - -val _ = - Outer_Syntax.command @{command_spec "proof"} "backward proof step" - (Scan.option Method.parse >> (fn m => - (Option.map Method.report m; - Toplevel.actual_proof (Proof_Node.applys (Proof.proof_results m)) o - Toplevel.skip_proof (fn i => i + 1)))); - - -(* proof navigation *) - -fun report_back () = - Output.report [Markup.markup Markup.bad "Explicit backtracking"]; - -val _ = - Outer_Syntax.command @{command_spec "back"} "explicit backtracking of proof command" - (Scan.succeed - (Toplevel.actual_proof (fn prf => (report_back (); Proof_Node.back prf)) o - Toplevel.skip_proof (fn h => (report_back (); h)))); - - - -(** nested commands **) - -val props_text = - Scan.optional Parse.properties [] -- Parse.position Parse.string - >> (fn (props, (str, pos)) => - (Position.of_properties (Position.default_properties pos props), str)); - -val _ = - Outer_Syntax.improper_command @{command_spec "Isabelle.command"} "evaluate nested Isabelle command" - (props_text :|-- (fn (pos, str) => - (case Outer_Syntax.parse pos str of - [tr] => Scan.succeed (K tr) - | _ => Scan.fail_with (K (fn () => "exactly one command expected"))) - handle ERROR msg => Scan.fail_with (K (fn () => msg)))); - - - -(** diagnostic commands (for interactive mode only) **) - -val opt_modes = - Scan.optional (@{keyword "("} |-- Parse.!!! (Scan.repeat1 Parse.xname --| @{keyword ")"})) []; - -val opt_bang = Scan.optional (@{keyword "!"} >> K true) false; - -val _ = (*Proof General legacy*) - Outer_Syntax.improper_command @{command_spec "pretty_setmargin"} - "change default margin for pretty printing" - (Parse.nat >> (fn n => Toplevel.imperative (fn () => Pretty.margin_default := n))); - -val _ = - Outer_Syntax.improper_command @{command_spec "help"} - "retrieve outer syntax commands according to name patterns" - (Scan.repeat Parse.name >> - (fn pats => Toplevel.imperative (fn () => Outer_Syntax.help_outer_syntax pats))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_commands"} "print outer syntax commands" - (Scan.succeed (Toplevel.imperative Outer_Syntax.print_outer_syntax)); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_options"} "print configuration options" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Attrib.print_options o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_context"} - "print context of local theory target" - (Scan.succeed (Toplevel.keep (Pretty.writeln_chunks o Toplevel.pretty_context))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_theory"} - "print logical theory contents (verbose!)" - (opt_bang >> (fn b => Toplevel.unknown_theory o - Toplevel.keep (Pretty.writeln o Proof_Display.pretty_full_theory b o Toplevel.theory_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_syntax"} - "print inner syntax of context (verbose!)" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Proof_Context.print_syntax o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_defn_rules"} - "print definitional rewrite rules of context" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Local_Defs.print_rules o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_abbrevs"} - "print constant abbreviations of context" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Proof_Context.print_abbrevs o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_theorems"} - "print theorems of local theory or proof context" - (opt_bang >> (fn b => - Toplevel.unknown_context o - Toplevel.keep (Pretty.writeln o Pretty.chunks o Isar_Cmd.pretty_theorems b))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_locales"} - "print locales of this theory" - (Scan.succeed (Toplevel.unknown_theory o - Toplevel.keep (Locale.print_locales o Toplevel.theory_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_classes"} - "print classes of this theory" - (Scan.succeed (Toplevel.unknown_theory o - Toplevel.keep (Class.print_classes o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_locale"} - "print locale of this theory" - (opt_bang -- Parse.position Parse.xname >> (fn (b, name) => - Toplevel.unknown_theory o - Toplevel.keep (fn state => Locale.print_locale (Toplevel.theory_of state) b name))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_interps"} - "print interpretations of locale for this theory or proof context" - (Parse.position Parse.xname >> (fn name => - Toplevel.unknown_context o - Toplevel.keep (fn state => Locale.print_registrations (Toplevel.context_of state) name))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_dependencies"} - "print dependencies of locale expression" - (opt_bang -- Parse_Spec.locale_expression true >> (fn (b, expr) => - Toplevel.unknown_context o - Toplevel.keep (fn state => Expression.print_dependencies (Toplevel.context_of state) b expr))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_attributes"} - "print attributes of this theory" - (Scan.succeed (Toplevel.unknown_theory o - Toplevel.keep (Attrib.print_attributes o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_simpset"} - "print context of Simplifier" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Pretty.writeln o Simplifier.pretty_simpset o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_rules"} "print intro/elim rules" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Context_Rules.print_rules o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_methods"} "print methods of this theory" - (Scan.succeed (Toplevel.unknown_theory o - Toplevel.keep (Method.print_methods o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_antiquotations"} - "print document antiquotations" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Thy_Output.print_antiquotations o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_ML_antiquotations"} - "print ML antiquotations" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (ML_Context.print_antiquotations o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "thy_deps"} "visualize theory dependencies" - (Scan.succeed Isar_Cmd.thy_deps); - -val _ = - Outer_Syntax.improper_command @{command_spec "locale_deps"} "visualize locale dependencies" - (Scan.succeed Isar_Cmd.locale_deps); - -val _ = - Outer_Syntax.improper_command @{command_spec "class_deps"} "visualize class dependencies" - (Scan.succeed Isar_Cmd.class_deps); - -val _ = - Outer_Syntax.improper_command @{command_spec "thm_deps"} "visualize theorem dependencies" - (Parse_Spec.xthms1 >> Isar_Cmd.thm_deps); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_binds"} - "print term bindings of proof context -- Proof General legacy" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep - (Pretty.writeln_chunks o Proof_Context.pretty_term_bindings o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_term_bindings"} - "print term bindings of proof context" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep - (Pretty.writeln_chunks o Proof_Context.pretty_term_bindings o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_facts"} "print facts of proof context" - (opt_bang >> (fn verbose => Toplevel.unknown_context o - Toplevel.keep (fn st => Proof_Context.print_local_facts (Toplevel.context_of st) verbose))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_cases"} "print cases of proof context" - (Scan.succeed (Toplevel.unknown_context o - Toplevel.keep (Pretty.writeln_chunks o Proof_Context.pretty_cases o Toplevel.context_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_statement"} - "print theorems as long statements" - (opt_modes -- Parse_Spec.xthms1 >> Isar_Cmd.print_stmts); - -val _ = - Outer_Syntax.improper_command @{command_spec "thm"} "print theorems" - (opt_modes -- Parse_Spec.xthms1 >> Isar_Cmd.print_thms); - -val _ = - Outer_Syntax.improper_command @{command_spec "prf"} "print proof terms of theorems" - (opt_modes -- Scan.option Parse_Spec.xthms1 >> Isar_Cmd.print_prfs false); - -val _ = - Outer_Syntax.improper_command @{command_spec "full_prf"} "print full proof terms of theorems" - (opt_modes -- Scan.option Parse_Spec.xthms1 >> Isar_Cmd.print_prfs true); - -val _ = - Outer_Syntax.improper_command @{command_spec "prop"} "read and print proposition" - (opt_modes -- Parse.term >> Isar_Cmd.print_prop); - -val _ = - Outer_Syntax.improper_command @{command_spec "term"} "read and print term" - (opt_modes -- Parse.term >> Isar_Cmd.print_term); - -val _ = - Outer_Syntax.improper_command @{command_spec "typ"} "read and print type" - (opt_modes -- (Parse.typ -- Scan.option (@{keyword "::"} |-- Parse.!!! Parse.sort)) - >> Isar_Cmd.print_type); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_codesetup"} "print code generator setup" - (Scan.succeed (Toplevel.unknown_theory o - Toplevel.keep (Code.print_codesetup o Toplevel.theory_of))); - -val _ = - Outer_Syntax.improper_command @{command_spec "unused_thms"} "find unused theorems" - (Scan.option ((Scan.repeat1 (Scan.unless Parse.minus Parse.name) --| Parse.minus) -- - Scan.option (Scan.repeat1 (Scan.unless Parse.minus Parse.name))) >> Isar_Cmd.unused_thms); - - - -(** system commands (for interactive mode only) **) - -val _ = - Outer_Syntax.improper_command @{command_spec "use_thy"} "use theory file" - (Parse.position Parse.name >> - (fn name => Toplevel.imperative (fn () => Thy_Info.use_thy name))); - -val _ = - Outer_Syntax.improper_command @{command_spec "remove_thy"} "remove theory from loader database" - (Parse.name >> (fn name => Toplevel.imperative (fn () => Thy_Info.remove_thy name))); - -val _ = - Outer_Syntax.improper_command @{command_spec "kill_thy"} - "kill theory -- try to remove from loader database" - (Parse.name >> (fn name => Toplevel.imperative (fn () => Thy_Info.kill_thy name))); - -val _ = (*partial Proof General legacy*) - Outer_Syntax.improper_command @{command_spec "display_drafts"} - "display raw source files with symbols" - (Scan.repeat1 Parse.path >> (fn names => - Toplevel.imperative (fn () => ignore (Present.display_drafts (map Path.explode names))))); - -val _ = - Outer_Syntax.improper_command @{command_spec "print_state"} - "print current proof state (if present)" - (opt_modes >> (fn modes => Toplevel.keep (Print_Mode.with_modes modes Toplevel.print_state))); - -val _ = (*Proof General legacy, e.g. for ProofGeneral-3.7.x*) - Outer_Syntax.improper_command @{command_spec "pr"} "print current proof state (if present)" - (opt_modes -- Scan.option Parse.nat >> (fn (modes, limit) => - Toplevel.keep (fn state => - (if Isabelle_Process.is_active () then error "Illegal TTY command" else (); - case limit of NONE => () | SOME n => Options.default_put_int @{system_option goals_limit} n; - Toplevel.quiet := false; - Print_Mode.with_modes modes Toplevel.print_state state)))); - -val _ = (*Proof General legacy*) - Outer_Syntax.improper_command @{command_spec "disable_pr"} - "disable printing of toplevel state" - (Scan.succeed (Toplevel.imperative (fn () => Toplevel.quiet := true))); - -val _ = (*Proof General legacy*) - Outer_Syntax.improper_command @{command_spec "enable_pr"} - "enable printing of toplevel state" - (Scan.succeed (Toplevel.imperative (fn () => Toplevel.quiet := false))); - -val _ = - Outer_Syntax.improper_command @{command_spec "commit"} - "commit current session to ML session image" - (Parse.opt_unit >> K (Toplevel.imperative Secure.commit)); - -val _ = - Outer_Syntax.improper_command @{command_spec "quit"} "quit Isabelle process" - (Parse.opt_unit >> (K (Toplevel.imperative quit))); - -val _ = - Outer_Syntax.improper_command @{command_spec "exit"} "exit Isar loop" - (Scan.succeed - (Toplevel.keep (fn state => - (Context.set_thread_data (try Toplevel.generic_theory_of state); - raise Runtime.TERMINATE)))); - -val _ = - Outer_Syntax.improper_command @{command_spec "welcome"} "print welcome message" - (Scan.succeed (Toplevel.imperative (writeln o Session.welcome))); - - - -(** raw Isar read-eval-print loop **) - -val _ = - Outer_Syntax.improper_command @{command_spec "init_toplevel"} "init toplevel point-of-interest" - (Scan.succeed (Toplevel.imperative Isar.init)); - -val _ = - Outer_Syntax.improper_command @{command_spec "linear_undo"} "undo commands" - (Scan.optional Parse.nat 1 >> - (fn n => Toplevel.imperative (fn () => Isar.linear_undo n))); - -val _ = - Outer_Syntax.improper_command @{command_spec "undo"} "undo commands (skipping closed proofs)" - (Scan.optional Parse.nat 1 >> - (fn n => Toplevel.imperative (fn () => Isar.undo n))); - -val _ = - Outer_Syntax.improper_command @{command_spec "undos_proof"} - "undo commands (skipping closed proofs)" - (Scan.optional Parse.nat 1 >> (fn n => - Toplevel.keep (fn state => - if Toplevel.is_proof state then (Isar.undo n; Isar.print ()) else raise Toplevel.UNDEF))); - -val _ = - Outer_Syntax.improper_command @{command_spec "cannot_undo"} - "partial undo -- Proof General legacy" - (Parse.name >> - (fn "end" => Toplevel.imperative (fn () => Isar.undo 1) - | txt => Toplevel.imperative (fn () => error ("Cannot undo " ^ quote txt)))); - -val _ = - Outer_Syntax.improper_command @{command_spec "kill"} - "kill partial proof or theory development" - (Scan.succeed (Toplevel.imperative Isar.kill)); - - - -(** extraction of programs from proofs **) - -val parse_vars = Scan.optional (Parse.$$$ "(" |-- Parse.list1 Parse.name --| Parse.$$$ ")") []; - -val _ = - Outer_Syntax.command @{command_spec "realizers"} - "specify realizers for primitive axioms / theorems, together with correctness proof" - (Scan.repeat1 (Parse.xname -- parse_vars --| Parse.$$$ ":" -- Parse.string -- Parse.string) >> - (fn xs => Toplevel.theory (fn thy => Extraction.add_realizers - (map (fn (((a, vs), s1), s2) => (Global_Theory.get_thm thy a, (vs, s1, s2))) xs) thy))); - -val _ = - Outer_Syntax.command @{command_spec "realizability"} - "add equations characterizing realizability" - (Scan.repeat1 Parse.string >> (Toplevel.theory o Extraction.add_realizes_eqns)); - -val _ = - Outer_Syntax.command @{command_spec "extract_type"} - "add equations characterizing type of extracted program" - (Scan.repeat1 Parse.string >> (Toplevel.theory o Extraction.add_typeof_eqns)); - -val _ = - Outer_Syntax.command @{command_spec "extract"} "extract terms from proofs" - (Scan.repeat1 (Parse.xname -- parse_vars) >> (fn xs => Toplevel.theory (fn thy => - Extraction.extract (map (apfst (Global_Theory.get_thm thy)) xs) thy))); - - - -(** end **) - -val _ = - Outer_Syntax.command @{command_spec "end"} "end context" - (Scan.succeed - (Toplevel.exit o Toplevel.end_local_theory o Toplevel.close_target o - Toplevel.end_proof (K Proof.end_notepad))); - -end; - diff --git a/core/Pure/Isar/keyword.ML b/core/Pure/Isar/keyword.ML deleted file mode 100644 index 70fdb4d9..00000000 --- a/core/Pure/Isar/keyword.ML +++ /dev/null @@ -1,274 +0,0 @@ -(* Title: Pure/Isar/keyword.ML - Author: Makarius - -Isar command keyword classification and global keyword tables. -*) - -signature KEYWORD = -sig - type T - val kind_of: T -> string - val kind_files_of: T -> string * string list - val control: T - val diag: T - val thy_begin: T - val thy_end: T - val thy_heading1: T - val thy_heading2: T - val thy_heading3: T - val thy_heading4: T - val thy_decl: T - val thy_load: T - val thy_load_files: string list -> T - val thy_goal: T - val qed: T - val qed_script: T - val qed_block: T - val qed_global: T - val prf_heading2: T - val prf_heading3: T - val prf_heading4: T - val prf_goal: T - val prf_block: T - val prf_open: T - val prf_close: T - val prf_chain: T - val prf_decl: T - val prf_asm: T - val prf_asm_goal: T - val prf_asm_goal_script: T - val prf_script: T - val kinds: T list - val tag: string -> T -> T - val tags_of: T -> string list - val tag_theory: T -> T - val tag_proof: T -> T - val tag_ml: T -> T - type spec = (string * string list) * string list - val spec: spec -> T - val command_spec: (string * spec) * Position.T -> (string * T) * Position.T - val get_lexicons: unit -> Scan.lexicon * Scan.lexicon - val is_keyword: string -> bool - val command_keyword: string -> T option - val command_files: string -> Path.T -> Path.T list - val command_tags: string -> string list - val dest: unit -> string list * string list - val define: string * T option -> unit - val is_diag: string -> bool - val is_control: string -> bool - val is_regular: string -> bool - val is_heading: string -> bool - val is_theory_begin: string -> bool - val is_theory_load: string -> bool - val is_theory: string -> bool - val is_theory_body: string -> bool - val is_proof: string -> bool - val is_proof_body: string -> bool - val is_theory_goal: string -> bool - val is_proof_goal: string -> bool - val is_qed: string -> bool - val is_qed_global: string -> bool - val is_printed: string -> bool -end; - -structure Keyword: KEYWORD = -struct - -(** keyword classification **) - -datatype T = Keyword of - {kind: string, - files: string list, (*extensions of embedded files*) - tags: string list}; (*tags in canonical reverse order*) - -fun kind s = Keyword {kind = s, files = [], tags = []}; -fun kind_of (Keyword {kind, ...}) = kind; -fun kind_files_of (Keyword {kind, files, ...}) = (kind, files); - -fun add_files fs (Keyword {kind, files, tags}) = - Keyword {kind = kind, files = files @ fs, tags = tags}; - - -(* kinds *) - -val control = kind "control"; -val diag = kind "diag"; -val thy_begin = kind "thy_begin"; -val thy_end = kind "thy_end"; -val thy_heading1 = kind "thy_heading1"; -val thy_heading2 = kind "thy_heading2"; -val thy_heading3 = kind "thy_heading3"; -val thy_heading4 = kind "thy_heading4"; -val thy_decl = kind "thy_decl"; -val thy_load = kind "thy_load"; -fun thy_load_files files = Keyword {kind = "thy_load", files = files, tags = []}; -val thy_goal = kind "thy_goal"; -val qed = kind "qed"; -val qed_script = kind "qed_script"; -val qed_block = kind "qed_block"; -val qed_global = kind "qed_global"; -val prf_heading2 = kind "prf_heading2"; -val prf_heading3 = kind "prf_heading3"; -val prf_heading4 = kind "prf_heading4"; -val prf_goal = kind "prf_goal"; -val prf_block = kind "prf_block"; -val prf_open = kind "prf_open"; -val prf_close = kind "prf_close"; -val prf_chain = kind "prf_chain"; -val prf_decl = kind "prf_decl"; -val prf_asm = kind "prf_asm"; -val prf_asm_goal = kind "prf_asm_goal"; -val prf_asm_goal_script = kind "prf_asm_goal_script"; -val prf_script = kind "prf_script"; - -val kinds = - [control, diag, thy_begin, thy_end, thy_heading1, thy_heading2, thy_heading3, thy_heading4, - thy_load, thy_decl, thy_goal, qed, qed_script, qed_block, qed_global, - prf_heading2, prf_heading3, prf_heading4, prf_goal, prf_block, prf_open, - prf_close, prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_asm_goal_script, prf_script]; - - -(* tags *) - -fun tag t (Keyword {kind, files, tags}) = - Keyword {kind = kind, files = files, tags = update (op =) t tags}; -fun tags_of (Keyword {tags, ...}) = tags; - -val tag_theory = tag "theory"; -val tag_proof = tag "proof"; -val tag_ml = tag "ML"; - - -(* external names *) - -val name_table = Symtab.make (map (`kind_of) kinds); - -type spec = (string * string list) * string list; - -fun spec ((name, files), tags) = - (case Symtab.lookup name_table name of - SOME kind => - let val kind' = kind |> fold tag tags in - if null files then kind' - else if name = kind_of thy_load then kind' |> add_files files - else error ("Illegal specification of files for " ^ quote name) - end - | NONE => error ("Unknown outer syntax keyword kind " ^ quote name)); - -fun command_spec ((name, s), pos) = ((name, spec s), pos); - - - -(** global keyword tables **) - -datatype keywords = Keywords of - {lexicons: Scan.lexicon * Scan.lexicon, (*minor, major*) - commands: T Symtab.table}; (*command classification*) - -fun make_keywords (lexicons, commands) = - Keywords {lexicons = lexicons, commands = commands}; - -local - -val global_keywords = - Unsynchronized.ref (make_keywords ((Scan.empty_lexicon, Scan.empty_lexicon), Symtab.empty)); - -in - -fun get_keywords () = ! global_keywords; - -fun change_keywords f = CRITICAL (fn () => - Unsynchronized.change global_keywords - (fn Keywords {lexicons, commands} => make_keywords (f (lexicons, commands)))); - -end; - -fun get_lexicons () = get_keywords () |> (fn Keywords {lexicons, ...} => lexicons); -fun get_commands () = get_keywords () |> (fn Keywords {commands, ...} => commands); - - -(* lookup *) - -fun is_keyword s = - let - val (minor, major) = get_lexicons (); - val syms = Symbol.explode s; - in Scan.is_literal minor syms orelse Scan.is_literal major syms end; - -fun command_keyword name = Symtab.lookup (get_commands ()) name; - -fun command_files name path = - (case command_keyword name of - NONE => [] - | SOME (Keyword {kind, files, ...}) => - if kind <> kind_of thy_load then [] - else if null files then [path] - else map (fn ext => Path.ext ext path) files); - -val command_tags = these o Option.map tags_of o command_keyword; - -fun dest () = pairself (sort_strings o Scan.dest_lexicon) (get_lexicons ()); - - -(* define *) - -fun define (name, opt_kind) = change_keywords (fn ((minor, major), commands) => - (case opt_kind of - NONE => - let - val minor' = Scan.extend_lexicon (Symbol.explode name) minor; - in ((minor', major), commands) end - | SOME kind => - let - val major' = Scan.extend_lexicon (Symbol.explode name) major; - val commands' = Symtab.update (name, kind) commands; - in ((minor, major'), commands') end)); - - -(* command categories *) - -fun command_category ks = - let val tab = Symtab.make_set (map kind_of ks) in - fn name => - (case command_keyword name of - NONE => false - | SOME k => Symtab.defined tab (kind_of k)) - end; - -val is_diag = command_category [diag]; -val is_control = command_category [control]; -val is_regular = not o command_category [diag, control]; - -val is_heading = - command_category [thy_heading1, thy_heading2, thy_heading3, thy_heading4, - prf_heading2, prf_heading3, prf_heading4]; - -val is_theory_begin = command_category [thy_begin]; - -val is_theory_load = command_category [thy_load]; - -val is_theory = command_category - [thy_begin, thy_end, thy_heading1, thy_heading2, thy_heading3, thy_heading4, - thy_load, thy_decl, thy_goal]; - -val is_theory_body = command_category - [thy_heading1, thy_heading2, thy_heading3, thy_heading4, thy_load, thy_decl, thy_goal]; - -val is_proof = command_category - [qed, qed_script, qed_block, qed_global, prf_heading2, prf_heading3, prf_heading4, - prf_goal, prf_block, prf_open, prf_close, prf_chain, prf_decl, - prf_asm, prf_asm_goal, prf_asm_goal_script, prf_script]; - -val is_proof_body = command_category - [diag, prf_heading2, prf_heading3, prf_heading4, prf_block, prf_open, prf_close, prf_chain, - prf_decl, prf_asm, prf_asm_goal, prf_asm_goal_script, prf_script]; - -val is_theory_goal = command_category [thy_goal]; -val is_proof_goal = command_category [prf_goal, prf_asm_goal, prf_asm_goal_script]; -val is_qed = command_category [qed, qed_script, qed_block]; -val is_qed_global = command_category [qed_global]; - -val is_printed = is_theory_goal orf is_proof; - -end; - diff --git a/core/Pure/Isar/keyword.scala b/core/Pure/Isar/keyword.scala deleted file mode 100644 index e3115a46..00000000 --- a/core/Pure/Isar/keyword.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* Title: Pure/Isar/keyword.scala - Author: Makarius - -Isar command keyword classification and keyword tables. -*/ - -package isabelle - - -object Keyword -{ - /* kinds */ - - val MINOR = "minor" - val CONTROL = "control" - val DIAG = "diag" - val THY_BEGIN = "thy_begin" - val THY_END = "thy_end" - val THY_HEADING1 = "thy_heading1" - val THY_HEADING2 = "thy_heading2" - val THY_HEADING3 = "thy_heading3" - val THY_HEADING4 = "thy_heading4" - val THY_DECL = "thy_decl" - val THY_LOAD = "thy_load" - val THY_GOAL = "thy_goal" - val QED = "qed" - val QED_SCRIPT = "qed_script" - val QED_BLOCK = "qed_block" - val QED_GLOBAL = "qed_global" - val PRF_HEADING2 = "prf_heading2" - val PRF_HEADING3 = "prf_heading3" - val PRF_HEADING4 = "prf_heading4" - val PRF_GOAL = "prf_goal" - val PRF_BLOCK = "prf_block" - val PRF_OPEN = "prf_open" - val PRF_CLOSE = "prf_close" - val PRF_CHAIN = "prf_chain" - val PRF_DECL = "prf_decl" - val PRF_ASM = "prf_asm" - val PRF_ASM_GOAL = "prf_asm_goal" - val PRF_ASM_GOAL_SCRIPT = "prf_asm_goal_script" - val PRF_SCRIPT = "prf_script" - - - /* categories */ - - val diag = Set(DIAG) - val control = Set(CONTROL) - - val heading = Set(THY_HEADING1, THY_HEADING2, THY_HEADING3, THY_HEADING4, - PRF_HEADING2, PRF_HEADING3, PRF_HEADING4) - - val theory = - Set(THY_BEGIN, THY_END, THY_HEADING1, THY_HEADING2, THY_HEADING3, THY_HEADING4, - THY_LOAD, THY_DECL, THY_GOAL) - - val theory_body = - Set(THY_HEADING1, THY_HEADING2, THY_HEADING3, THY_HEADING4, THY_LOAD, THY_DECL, THY_GOAL) - - val proof = - Set(QED, QED_SCRIPT, QED_BLOCK, QED_GLOBAL, PRF_HEADING2, PRF_HEADING3, PRF_HEADING4, - PRF_GOAL, PRF_BLOCK, PRF_OPEN, PRF_CLOSE, PRF_CHAIN, PRF_DECL, - PRF_ASM, PRF_ASM_GOAL, PRF_ASM_GOAL_SCRIPT, PRF_SCRIPT) - - val proof_body = - Set(DIAG, PRF_HEADING2, PRF_HEADING3, PRF_HEADING4, PRF_BLOCK, PRF_OPEN, PRF_CLOSE, PRF_CHAIN, - PRF_DECL, PRF_ASM, PRF_ASM_GOAL, PRF_ASM_GOAL_SCRIPT, PRF_SCRIPT) - - val theory_goal = Set(THY_GOAL) - val proof_goal = Set(PRF_GOAL, PRF_ASM_GOAL, PRF_ASM_GOAL_SCRIPT) - val qed = Set(QED, QED_SCRIPT, QED_BLOCK) - val qed_global = Set(QED_GLOBAL) -} - diff --git a/core/Pure/Isar/local_defs.ML b/core/Pure/Isar/local_defs.ML deleted file mode 100644 index d61161c2..00000000 --- a/core/Pure/Isar/local_defs.ML +++ /dev/null @@ -1,232 +0,0 @@ -(* Title: Pure/Isar/local_defs.ML - Author: Makarius - -Local definitions. -*) - -signature LOCAL_DEFS = -sig - val cert_def: Proof.context -> term -> (string * typ) * term - val abs_def: term -> (string * typ) * term - val expand: cterm list -> thm -> thm - val def_export: Assumption.export - val add_defs: ((binding * mixfix) * (Thm.binding * term)) list -> Proof.context -> - (term * (string * thm)) list * Proof.context - val add_def: (binding * mixfix) * term -> Proof.context -> (term * thm) * Proof.context - val fixed_abbrev: (binding * mixfix) * term -> Proof.context -> - (term * term) * Proof.context - val export: Proof.context -> Proof.context -> thm -> (thm list * thm list) * thm - val export_cterm: Proof.context -> Proof.context -> cterm -> (thm list * thm list) * cterm - val contract: Proof.context -> thm list -> cterm -> thm -> thm - val print_rules: Proof.context -> unit - val defn_add: attribute - val defn_del: attribute - val meta_rewrite_conv: Proof.context -> conv - val meta_rewrite_rule: Proof.context -> thm -> thm - val unfold: Proof.context -> thm list -> thm -> thm - val unfold_goals: Proof.context -> thm list -> thm -> thm - val unfold_tac: Proof.context -> thm list -> tactic - val fold: Proof.context -> thm list -> thm -> thm - val fold_tac: Proof.context -> thm list -> tactic - val derived_def: Proof.context -> bool -> term -> - ((string * typ) * term) * (Proof.context -> thm -> thm) -end; - -structure Local_Defs: LOCAL_DEFS = -struct - -(** primitive definitions **) - -(* prepare defs *) - -fun cert_def ctxt eq = - let - fun err msg = - cat_error msg ("The error(s) above occurred in definition:\n" ^ - quote (Syntax.string_of_term ctxt eq)); - val ((lhs, _), eq') = eq - |> Sign.no_vars ctxt - |> Primitive_Defs.dest_def ctxt Term.is_Free (Variable.is_fixed ctxt) (K true) - handle TERM (msg, _) => err msg | ERROR msg => err msg; - in (Term.dest_Free (Term.head_of lhs), eq') end; - -val abs_def = Primitive_Defs.abs_def #>> Term.dest_Free; - -fun mk_def ctxt args = - let - val (bs, rhss) = split_list args; - val Ts = map Term.fastype_of rhss; - val (xs, _) = Proof_Context.add_fixes (map2 (fn b => fn T => (b, SOME T, NoSyn)) bs Ts) ctxt; - val lhss = ListPair.map Free (xs, Ts); - in map Logic.mk_equals (lhss ~~ rhss) end; - - -(* export defs *) - -val head_of_def = - Term.dest_Free o Term.head_of o #1 o Logic.dest_equals o Term.strip_all_body; - - -(* - [x, x == a] - : - B x - ----------- - B a -*) -fun expand defs = - Drule.implies_intr_list defs - #> Drule.generalize ([], map (#1 o head_of_def o Thm.term_of) defs) - #> funpow (length defs) (fn th => Drule.reflexive_thm RS th); - -val expand_term = Envir.expand_term_frees o map (abs_def o Thm.term_of); - -fun def_export _ defs = (expand defs, expand_term defs); - - -(* add defs *) - -fun add_defs defs ctxt = - let - val ((xs, mxs), specs) = defs |> split_list |>> split_list; - val (bs, rhss) = specs |> split_list; - val eqs = mk_def ctxt (xs ~~ rhss); - val lhss = map (fst o Logic.dest_equals) eqs; - in - ctxt - |> Proof_Context.add_fixes (map2 (fn x => fn mx => (x, NONE, mx)) xs mxs) |> #2 - |> fold Variable.declare_term eqs - |> Proof_Context.add_assms_i def_export (map2 (fn b => fn eq => (b, [(eq, [])])) bs eqs) - |>> map2 (fn lhs => fn (name, [th]) => (lhs, (name, th))) lhss - end; - -fun add_def (var, rhs) ctxt = - let val ([(lhs, (_, th))], ctxt') = add_defs [(var, (Thm.empty_binding, rhs))] ctxt - in ((lhs, th), ctxt') end; - - -(* fixed_abbrev *) - -fun fixed_abbrev ((x, mx), rhs) ctxt = - let - val T = Term.fastype_of rhs; - val ([x'], ctxt') = ctxt - |> Variable.declare_term rhs - |> Proof_Context.add_fixes [(x, SOME T, mx)]; - val lhs = Free (x', T); - val _ = cert_def ctxt' (Logic.mk_equals (lhs, rhs)); - fun abbrev_export _ _ = (I, Envir.expand_term_frees [((x', T), rhs)]); - val (_, ctxt'') = Assumption.add_assms abbrev_export [] ctxt'; - in ((lhs, rhs), ctxt'') end; - - -(* specific export -- result based on educated guessing *) - -(* - [xs, xs == as] - : - B xs - -------------- - B as -*) -fun export inner outer = (*beware of closure sizes*) - let - val exp = Assumption.export false inner outer; - val exp_term = Assumption.export_term inner outer; - val asms = Assumption.local_assms_of inner outer; - in - fn th => - let - val th' = exp th; - val defs_asms = asms |> map (Thm.assume #> (fn asm => - (case try (head_of_def o Thm.prop_of) asm of - NONE => (asm, false) - | SOME x => - let val t = Free x in - (case try exp_term t of - NONE => (asm, false) - | SOME u => - if t aconv u then (asm, false) - else (Drule.abs_def (Drule.gen_all asm), true)) - end))); - in (pairself (map #1) (List.partition #2 defs_asms), th') end - end; - -(* - [xs, xs == as] - : - TERM b xs - -------------- and -------------- - TERM b as b xs == b as -*) -fun export_cterm inner outer ct = - export inner outer (Drule.mk_term ct) ||> Drule.dest_term; - -fun contract ctxt defs ct th = - th COMP (Raw_Simplifier.rewrite ctxt true defs ct COMP_INCR Drule.equal_elim_rule2); - - - -(** defived definitions **) - -(* transformation via rewrite rules *) - -structure Rules = Generic_Data -( - type T = thm list; - val empty = []; - val extend = I; - val merge = Thm.merge_thms; -); - -fun print_rules ctxt = - Pretty.writeln (Pretty.big_list "definitional rewrite rules:" - (map (Display.pretty_thm_item ctxt) (Rules.get (Context.Proof ctxt)))); - -val defn_add = Thm.declaration_attribute (Rules.map o Thm.add_thm); -val defn_del = Thm.declaration_attribute (Rules.map o Thm.del_thm); - - -(* meta rewrite rules *) - -fun meta_rewrite_conv ctxt = - Raw_Simplifier.rewrite_cterm (false, false, false) (K (K NONE)) - (empty_simpset ctxt - addsimps (Rules.get (Context.Proof ctxt)) - |> Raw_Simplifier.add_eqcong Drule.equals_cong); (*protect meta-level equality*) - -val meta_rewrite_rule = Conv.fconv_rule o meta_rewrite_conv; - - -(* rewriting with object-level rules *) - -fun meta f ctxt = f ctxt o map (meta_rewrite_rule ctxt); - -val unfold = meta Raw_Simplifier.rewrite_rule; -val unfold_goals = meta Raw_Simplifier.rewrite_goals_rule; -val unfold_tac = meta Raw_Simplifier.rewrite_goals_tac; -val fold = meta Raw_Simplifier.fold_rule; -val fold_tac = meta Raw_Simplifier.fold_goals_tac; - - -(* derived defs -- potentially within the object-logic *) - -fun derived_def ctxt conditional prop = - let - val ((c, T), rhs) = prop - |> Thm.cterm_of (Proof_Context.theory_of ctxt) - |> meta_rewrite_conv ctxt - |> (snd o Logic.dest_equals o Thm.prop_of) - |> conditional ? Logic.strip_imp_concl - |> (abs_def o #2 o cert_def ctxt); - fun prove ctxt' def = - Goal.prove ctxt' (Variable.add_free_names ctxt' prop []) [] prop - (fn {context = ctxt'', ...} => - ALLGOALS - (CONVERSION (meta_rewrite_conv ctxt'') THEN' - rewrite_goal_tac ctxt'' [def] THEN' - resolve_tac [Drule.reflexive_thm])) - handle ERROR msg => cat_error msg "Failed to prove definitional specification"; - in (((c, T), rhs), prove) end; - -end; diff --git a/core/Pure/Isar/local_theory.ML b/core/Pure/Isar/local_theory.ML deleted file mode 100644 index 2b13aba2..00000000 --- a/core/Pure/Isar/local_theory.ML +++ /dev/null @@ -1,351 +0,0 @@ -(* Title: Pure/Isar/local_theory.ML - Author: Makarius - -Local theory operations, with abstract target context. -*) - -type local_theory = Proof.context; -type generic_theory = Context.generic; - -signature LOCAL_THEORY = -sig - type operations - val assert: local_theory -> local_theory - val restore: local_theory -> local_theory - val level: Proof.context -> int - val assert_bottom: bool -> local_theory -> local_theory - val assert_nonbrittle: local_theory -> local_theory - val open_target: Name_Space.naming -> operations -> (local_theory -> local_theory) -> - local_theory -> local_theory - val close_target: local_theory -> local_theory - val map_contexts: (int -> Proof.context -> Proof.context) -> local_theory -> local_theory - val naming_of: local_theory -> Name_Space.naming - val full_name: local_theory -> binding -> string - val map_naming: (Name_Space.naming -> Name_Space.naming) -> local_theory -> local_theory - val conceal: local_theory -> local_theory - val new_group: local_theory -> local_theory - val reset_group: local_theory -> local_theory - val restore_naming: local_theory -> local_theory -> local_theory - val standard_morphism: local_theory -> Proof.context -> morphism - val standard_form: local_theory -> Proof.context -> (morphism -> 'a) -> 'a - val raw_theory_result: (theory -> 'a * theory) -> local_theory -> 'a * local_theory - val raw_theory: (theory -> theory) -> local_theory -> local_theory - val background_theory_result: (theory -> 'a * theory) -> local_theory -> 'a * local_theory - val background_theory: (theory -> theory) -> local_theory -> local_theory - val target_of: local_theory -> Proof.context - val target: (Proof.context -> Proof.context) -> local_theory -> local_theory - val target_morphism: local_theory -> morphism - val propagate_ml_env: generic_theory -> generic_theory - val operations_of: local_theory -> operations - val define: (binding * mixfix) * (Attrib.binding * term) -> local_theory -> - (term * (string * thm)) * local_theory - val define_internal: (binding * mixfix) * (Attrib.binding * term) -> local_theory -> - (term * (string * thm)) * local_theory - val note: Attrib.binding * thm list -> local_theory -> (string * thm list) * local_theory - val notes: (Attrib.binding * (thm list * Attrib.src list) list) list -> - local_theory -> (string * thm list) list * local_theory - val notes_kind: string -> (Attrib.binding * (thm list * Attrib.src list) list) list -> - local_theory -> (string * thm list) list * local_theory - val abbrev: Syntax.mode -> (binding * mixfix) * term -> local_theory -> - (term * term) * local_theory - val declaration: {syntax: bool, pervasive: bool} -> declaration -> local_theory -> local_theory - val subscription: string * morphism -> (morphism * bool) option -> morphism -> - local_theory -> local_theory - val pretty: local_theory -> Pretty.T list - val set_defsort: sort -> local_theory -> local_theory - val type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> local_theory -> local_theory - val notation: bool -> Syntax.mode -> (term * mixfix) list -> local_theory -> local_theory - val class_alias: binding -> class -> local_theory -> local_theory - val type_alias: binding -> string -> local_theory -> local_theory - val const_alias: binding -> string -> local_theory -> local_theory - val activate: string * morphism -> (morphism * bool) option -> morphism -> - local_theory -> local_theory - val activate_nonbrittle: string * morphism -> (morphism * bool) option -> morphism -> - local_theory -> local_theory - val init: Name_Space.naming -> operations -> Proof.context -> local_theory - val exit: local_theory -> Proof.context - val exit_global: local_theory -> theory - val exit_result: (morphism -> 'a -> 'b) -> 'a * local_theory -> 'b * Proof.context - val exit_result_global: (morphism -> 'a -> 'b) -> 'a * local_theory -> 'b * theory -end; - -structure Local_Theory: LOCAL_THEORY = -struct - -(** local theory data **) - -(* type lthy *) - -type operations = - {define: bool -> (binding * mixfix) * (Attrib.binding * term) -> local_theory -> - (term * (string * thm)) * local_theory, - notes: string -> - (Attrib.binding * (thm list * Attrib.src list) list) list -> - local_theory -> (string * thm list) list * local_theory, - abbrev: Syntax.mode -> (binding * mixfix) * term -> local_theory -> - (term * term) * local_theory, - declaration: {syntax: bool, pervasive: bool} -> declaration -> local_theory -> local_theory, - subscription: string * morphism -> (morphism * bool) option -> morphism -> - local_theory -> local_theory, - pretty: local_theory -> Pretty.T list, - exit: local_theory -> Proof.context}; - -type lthy = - {naming: Name_Space.naming, - operations: operations, - after_close: local_theory -> local_theory, - brittle: bool, - target: Proof.context}; - -fun make_lthy (naming, operations, after_close, brittle, target) : lthy = - {naming = naming, operations = operations, after_close = after_close, brittle = brittle, target = target}; - - -(* context data *) - -structure Data = Proof_Data -( - type T = lthy list; - fun init _ = []; -); - -fun assert lthy = - if null (Data.get lthy) then error "Missing local theory context" else lthy; - -val bottom_of = List.last o Data.get o assert; -val top_of = hd o Data.get o assert; - -fun map_bottom f = - assert #> - Data.map (fn {naming, operations, after_close, brittle, target} :: parents => - make_lthy (f (naming, operations, after_close, brittle, target)) :: parents); - -fun restore lthy = #target (top_of lthy) |> Data.put (Data.get lthy); - - -(* nested structure *) - -val level = length o Data.get; (*1: main target at bottom, >= 2: nested context*) - -fun assert_bottom b lthy = - let - val _ = assert lthy; - val b' = level lthy <= 1; - in - if b andalso not b' then error "Not at bottom of local theory nesting" - else if not b andalso b' then error "Already at bottom of local theory nesting" - else lthy - end; - -fun open_target naming operations after_close target = - assert target - |> Data.map (cons (make_lthy (naming, operations, after_close, true, target))); - -fun close_target lthy = - let - val _ = assert_bottom false lthy; - val ({after_close, ...} :: rest) = Data.get lthy; - in lthy |> Data.put rest |> restore |> after_close end; - -fun map_contexts f lthy = - let val n = level lthy in - lthy |> (Data.map o map_index) (fn (i, {naming, operations, after_close, brittle, target}) => - make_lthy (naming, operations, after_close, brittle, - target - |> Context_Position.set_visible false - |> f (n - i - 1) - |> Context_Position.restore_visible target)) - |> f n - end; - - -(* brittle context -- implicit for nested structures *) - -fun mark_brittle lthy = - if level lthy = 1 - then map_bottom (fn (naming, operations, after_close, brittle, target) => - (naming, operations, after_close, true, target)) lthy - else lthy; - -fun assert_nonbrittle lthy = - if #brittle (top_of lthy) - then error "Brittle local theory context" - else lthy; - - -(* naming *) - -val naming_of = #naming o top_of; -val full_name = Name_Space.full_name o naming_of; - -fun map_naming f = - map_bottom (fn (naming, operations, after_close, brittle, target) => - (f naming, operations, after_close, brittle, target)); - -val conceal = map_naming Name_Space.conceal; -val new_group = map_naming Name_Space.new_group; -val reset_group = map_naming Name_Space.reset_group; - -val restore_naming = map_naming o K o naming_of; - - -(* standard morphisms *) - -fun standard_morphism lthy ctxt = - Proof_Context.norm_export_morphism lthy ctxt $> - Morphism.binding_morphism "Local_Theory.standard_binding" - (Name_Space.transform_binding (naming_of lthy)); - -fun standard_form lthy ctxt x = - Morphism.form (Morphism.transform (standard_morphism lthy ctxt) x); - - -(* background theory *) - -fun raw_theory_result f lthy = - let - val (res, thy') = f (Proof_Context.theory_of lthy); - val lthy' = map_contexts (K (Proof_Context.transfer thy')) lthy; - in (res, lthy') end; - -fun raw_theory f = #2 o raw_theory_result (f #> pair ()); - -fun background_theory_result f lthy = - lthy |> raw_theory_result (fn thy => - thy - |> Sign.map_naming (K (naming_of lthy)) - |> f - ||> Sign.restore_naming thy); - -fun background_theory f = #2 o background_theory_result (f #> pair ()); - - -(* target contexts *) - -val target_of = #target o bottom_of; - -fun target f lthy = - let - val ctxt = target_of lthy; - val ctxt' = ctxt - |> Context_Position.set_visible false - |> f - |> Context_Position.restore_visible ctxt; - val thy' = Proof_Context.theory_of ctxt'; - in map_contexts (fn 0 => K ctxt' | _ => Proof_Context.transfer thy') lthy end; - -fun target_morphism lthy = standard_morphism lthy (target_of lthy); - -fun propagate_ml_env (context as Context.Proof lthy) = - let val inherit = ML_Env.inherit context in - lthy - |> background_theory (Context.theory_map inherit) - |> map_contexts (K (Context.proof_map inherit)) - |> Context.Proof - end - | propagate_ml_env context = context; - - - -(** operations **) - -val operations_of = #operations o top_of; - - -(* primitives *) - -fun operation f lthy = f (operations_of lthy) lthy; -fun operation2 f x y = operation (fn ops => f ops x y); - -val pretty = operation #pretty; -val abbrev = operation2 #abbrev; -val define = operation2 #define false; -val define_internal = operation2 #define true; -val notes_kind = operation2 #notes; -val declaration = operation2 #declaration; -fun subscription dep_morph mixin export = - assert_bottom true #> operation (fn ops => #subscription ops dep_morph mixin export); - - -(* basic derived operations *) - -val notes = notes_kind ""; -fun note (a, ths) = notes [(a, [(ths, [])])] #>> the_single; - -fun set_defsort S = - declaration {syntax = true, pervasive = false} - (K (Context.mapping (Sign.set_defsort S) (Proof_Context.set_defsort S))); - - -(* notation *) - -fun type_notation add mode raw_args lthy = - let - val args = map (apfst (Logic.type_map (Assumption.export_term lthy (target_of lthy)))) raw_args; - in - declaration {syntax = true, pervasive = false} - (Proof_Context.generic_type_notation add mode args) lthy - end; - -fun notation add mode raw_args lthy = - let - val args = map (apfst (Assumption.export_term lthy (target_of lthy))) raw_args - in - declaration {syntax = true, pervasive = false} - (Proof_Context.generic_notation add mode args) lthy - end; - - -(* name space aliases *) - -fun alias global_alias local_alias b name = - declaration {syntax = true, pervasive = false} (fn phi => - let val b' = Morphism.binding phi b - in Context.mapping (global_alias b' name) (local_alias b' name) end); - -val class_alias = alias Sign.class_alias Proof_Context.class_alias; -val type_alias = alias Sign.type_alias Proof_Context.type_alias; -val const_alias = alias Sign.const_alias Proof_Context.const_alias; - - -(* activation of locale fragments *) - -fun activate_nonbrittle dep_morph mixin export = - map_bottom (fn (naming, operations, after_close, brittle, target) => - (naming, operations, after_close, brittle, - (Context.proof_map ooo Locale.add_registration) dep_morph mixin export target)); - -fun activate dep_morph mixin export = - mark_brittle #> activate_nonbrittle dep_morph mixin export; - - - -(** init and exit **) - -(* init *) - -fun init naming operations target = - target |> Data.map - (fn [] => [make_lthy (naming, operations, I, false, target)] - | _ => error "Local theory already initialized"); - - -(* exit *) - -val exit = operation #exit; -val exit_global = Proof_Context.theory_of o exit; - -fun exit_result f (x, lthy) = - let - val ctxt = exit lthy; - val phi = standard_morphism lthy ctxt; - in (f phi x, ctxt) end; - -fun exit_result_global f (x, lthy) = - let - val thy = exit_global lthy; - val thy_ctxt = Proof_Context.init_global thy; - val phi = standard_morphism lthy thy_ctxt; - in (f phi x, thy) end; - -end; diff --git a/core/Pure/Isar/locale.ML b/core/Pure/Isar/locale.ML deleted file mode 100644 index a5bd10e2..00000000 --- a/core/Pure/Isar/locale.ML +++ /dev/null @@ -1,687 +0,0 @@ -(* Title: Pure/Isar/locale.ML - Author: Clemens Ballarin, TU Muenchen - -Locales -- managed Isar proof contexts, based on Pure predicates. - -Draws basic ideas from Florian Kammueller's original version of -locales, but uses the richer infrastructure of Isar instead of the raw -meta-logic. Furthermore, structured import of contexts (with merge -and rename operations) are provided, as well as type-inference of the -signature parts, and predicate definitions of the specification text. - -Interpretation enables the reuse of theorems of locales in other -contexts, namely those defined by theories, structured proofs and -locales themselves. - -See also: - -[1] Clemens Ballarin. Locales and Locale Expressions in Isabelle/Isar. - In Stefano Berardi et al., Types for Proofs and Programs: International - Workshop, TYPES 2003, Torino, Italy, LNCS 3085, pages 34-50, 2004. -[2] Clemens Ballarin. Interpretation of Locales in Isabelle: Managing - Dependencies between Locales. Technical Report TUM-I0607, Technische - Universitaet Muenchen, 2006. -[3] Clemens Ballarin. Interpretation of Locales in Isabelle: Theories and - Proof Contexts. In J.M. Borwein and W.M. Farmer, MKM 2006, LNAI 4108, - pages 31-43, 2006. -*) - -signature LOCALE = -sig - (* Locale specification *) - val register_locale: binding -> - (string * sort) list * ((string * typ) * mixfix) list -> - term option * term list -> - thm option * thm option -> thm list -> - declaration list -> - (string * (Attrib.binding * (thm list * Attrib.src list) list) list) list -> - (string * morphism) list -> theory -> theory - val intern: theory -> xstring -> string - val check: theory -> xstring * Position.T -> string - val extern: theory -> string -> xstring - val markup_name: Proof.context -> string -> string - val pretty_name: Proof.context -> string -> Pretty.T - val defined: theory -> string -> bool - val params_of: theory -> string -> ((string * typ) * mixfix) list - val intros_of: theory -> string -> thm option * thm option - val axioms_of: theory -> string -> thm list - val instance_of: theory -> string -> morphism -> term list - val specification_of: theory -> string -> term option * term list - - (* Storing results *) - val add_thmss: string -> string -> (Attrib.binding * (thm list * Attrib.src list) list) list -> - Proof.context -> Proof.context - val add_declaration: string -> bool -> declaration -> Proof.context -> Proof.context - - (* Activation *) - val activate_declarations: string * morphism -> Proof.context -> Proof.context - val activate_facts: morphism option -> string * morphism -> Context.generic -> Context.generic - val init: string -> theory -> Proof.context - - (* Reasoning about locales *) - val get_witnesses: Proof.context -> thm list - val get_intros: Proof.context -> thm list - val get_unfolds: Proof.context -> thm list - val witness_add: attribute - val intro_add: attribute - val unfold_add: attribute - val intro_locales_tac: bool -> Proof.context -> thm list -> tactic - - (* Registrations and dependencies *) - val add_registration: string * morphism -> (morphism * bool) option -> - morphism -> Context.generic -> Context.generic - val amend_registration: string * morphism -> morphism * bool -> - morphism -> Context.generic -> Context.generic - val registrations_of: Context.generic -> string -> (string * morphism) list - val all_registrations_of: Context.generic -> (string * morphism) list - val add_dependency: string -> string * morphism -> (morphism * bool) option -> - morphism -> theory -> theory - - (* Diagnostic *) - val print_locales: theory -> unit - val print_locale: theory -> bool -> xstring * Position.T -> unit - val print_registrations: Proof.context -> xstring * Position.T -> unit - val print_dependencies: Proof.context -> bool -> morphism -> (string * morphism) list -> unit - val pretty_locale_deps: theory -> {name: string, parents: string list, body: Pretty.T} list -end; - -structure Locale: LOCALE = -struct - -datatype ctxt = datatype Element.ctxt; - - -(*** Locales ***) - -type mixins = (((morphism * bool) * serial) list) Inttab.table; - (* table of mixin lists, per list mixins in reverse order of declaration; - lists indexed by registration/dependency serial, - entries for empty lists may be omitted *) - -fun lookup_mixins serial' mixins = Inttab.lookup_list mixins serial'; - -fun merge_mixins mixs : mixins = Inttab.merge_list (eq_snd op =) mixs; - -fun insert_mixin serial' mixin = Inttab.cons_list (serial', (mixin, serial ())); - -fun rename_mixin (old, new) mix = - (case Inttab.lookup mix old of - NONE => mix - | SOME mxs => Inttab.delete old mix |> Inttab.update_new (new, mxs)); - -fun compose_mixins mixins = - fold_rev Morphism.compose (map (fst o fst) mixins) Morphism.identity; - -datatype locale = Loc of { - (** static part **) - parameters: (string * sort) list * ((string * typ) * mixfix) list, - (* type and term parameters *) - spec: term option * term list, - (* assumptions (as a single predicate expression) and defines *) - intros: thm option * thm option, - axioms: thm list, - (** dynamic part **) - syntax_decls: (declaration * serial) list, - (* syntax declarations *) - notes: ((string * (Attrib.binding * (thm list * Attrib.src list) list) list) * serial) list, - (* theorem declarations *) - dependencies: ((string * (morphism * morphism)) * serial) list - (* locale dependencies (sublocale relation) in reverse order *), - mixins: mixins - (* mixin part of dependencies *) -}; - -fun mk_locale ((parameters, spec, intros, axioms), - ((syntax_decls, notes), (dependencies, mixins))) = - Loc {parameters = parameters, intros = intros, axioms = axioms, spec = spec, - syntax_decls = syntax_decls, notes = notes, dependencies = dependencies, mixins = mixins}; - -fun map_locale f (Loc {parameters, spec, intros, axioms, - syntax_decls, notes, dependencies, mixins}) = - mk_locale (f ((parameters, spec, intros, axioms), - ((syntax_decls, notes), (dependencies, mixins)))); - -fun merge_locale - (Loc {parameters, spec, intros, axioms, syntax_decls, notes, dependencies, mixins}, - Loc {syntax_decls = syntax_decls', notes = notes', - dependencies = dependencies', mixins = mixins', ...}) = - mk_locale - ((parameters, spec, intros, axioms), - ((merge (eq_snd op =) (syntax_decls, syntax_decls'), - merge (eq_snd op =) (notes, notes')), - (merge (eq_snd op =) (dependencies, dependencies'), - (merge_mixins (mixins, mixins'))))); - -structure Locales = Theory_Data -( - type T = locale Name_Space.table; - val empty : T = Name_Space.empty_table "locale"; - val extend = I; - val merge = Name_Space.join_tables (K merge_locale); -); - -val locale_space = Name_Space.space_of_table o Locales.get; -val intern = Name_Space.intern o locale_space; -fun check thy = #1 o Name_Space.check (Context.Theory thy) (Locales.get thy); - -fun extern thy = Name_Space.extern (Proof_Context.init_global thy) (locale_space thy); - -fun markup_extern ctxt = - Name_Space.markup_extern ctxt (locale_space (Proof_Context.theory_of ctxt)); - -fun markup_name ctxt name = markup_extern ctxt name |-> Markup.markup; -fun pretty_name ctxt name = markup_extern ctxt name |> Pretty.mark_str; - -val get_locale = Option.map #2 oo (Name_Space.lookup_key o Locales.get); -val defined = is_some oo get_locale; - -fun the_locale thy name = - (case get_locale thy name of - SOME (Loc loc) => loc - | NONE => error ("Unknown locale " ^ quote name)); - -fun register_locale binding parameters spec intros axioms syntax_decls notes dependencies thy = - thy |> Locales.map (Name_Space.define (Context.Theory thy) true - (binding, - mk_locale ((parameters, spec, intros, axioms), - ((map (fn decl => (decl, serial ())) syntax_decls, map (fn n => (n, serial ())) notes), - (map (fn d => (d |> apsnd (rpair Morphism.identity), serial ())) dependencies, - Inttab.empty)))) #> snd); - (* FIXME Morphism.identity *) - -fun change_locale name = - Locales.map o Name_Space.map_table_entry name o map_locale o apsnd; - - -(** Primitive operations **) - -fun params_of thy = snd o #parameters o the_locale thy; - -fun intros_of thy = #intros o the_locale thy; - -fun axioms_of thy = #axioms o the_locale thy; - -fun instance_of thy name morph = params_of thy name |> - map (Morphism.term morph o Free o #1); - -fun specification_of thy = #spec o the_locale thy; - -fun dependencies_of thy name = the_locale thy name |> - #dependencies; - -fun mixins_of thy name serial = the_locale thy name |> - #mixins |> lookup_mixins serial; - -(* FIXME unused!? *) -fun identity_on thy name morph = - let val mk_instance = instance_of thy name - in ListPair.all Term.aconv_untyped (mk_instance Morphism.identity, mk_instance morph) end; - -(* Print instance and qualifiers *) - -fun pretty_reg ctxt export (name, morph) = - let - val thy = Proof_Context.theory_of ctxt; - val morph' = morph $> export; - fun print_qual (qual, mandatory) = qual ^ (if mandatory then "!" else "?"); - fun prt_quals qs = Pretty.str (space_implode "." (map print_qual qs)); - val prt_term = Pretty.quote o Syntax.pretty_term ctxt; - fun prt_term' t = - if Config.get ctxt show_types - then Pretty.block [prt_term t, Pretty.brk 1, Pretty.str "::", - Pretty.brk 1, (Pretty.quote o Syntax.pretty_typ ctxt) (type_of t)] - else prt_term t; - fun prt_inst ts = - Pretty.block (Pretty.breaks (pretty_name ctxt name :: map prt_term' ts)); - - val qs = Binding.name "x" |> Morphism.binding morph' |> Binding.prefix_of; - val ts = instance_of thy name morph'; - in - (case qs of - [] => prt_inst ts - | qs => Pretty.block [prt_quals qs, Pretty.brk 1, Pretty.str ":", Pretty.brk 1, prt_inst ts]) - end; - - -(*** Identifiers: activated locales in theory or proof context ***) - -type idents = term list list Symtab.table; (* name ~> instance (grouped by name) *) - -val empty_idents : idents = Symtab.empty; -val insert_idents = Symtab.insert_list (eq_list (op aconv)); -val merge_idents = Symtab.merge_list (eq_list (op aconv)); - -fun redundant_ident thy idents (name, instance) = - exists (fn pat => Pattern.matchess thy (pat, instance)) (Symtab.lookup_list idents name); - -structure Idents = Generic_Data -( - type T = idents; - val empty = empty_idents; - val extend = I; - val merge = merge_idents; -); - - -(** Resolve locale dependencies in a depth-first fashion **) - -local - -val roundup_bound = 120; - -fun add thy depth stem export (name, morph) (deps, marked) = - if depth > roundup_bound - then error "Roundup bound exceeded (sublocale relation probably not terminating)." - else - let - val instance = instance_of thy name (morph $> stem $> export); - in - if redundant_ident thy marked (name, instance) then (deps, marked) - else - let - (* no inheritance of mixins, regardless of requests by clients *) - val dependencies = dependencies_of thy name |> - map (fn ((name', (morph', export')), serial') => - (name', morph' $> export' $> compose_mixins (mixins_of thy name serial'))); - val marked' = insert_idents (name, instance) marked; - val (deps', marked'') = - fold_rev (add thy (depth + 1) (morph $> stem) export) dependencies - ([], marked'); - in - ((name, morph $> stem) :: deps' @ deps, marked'') - end - end; - -in - -(* Note that while identifiers always have the external (exported) view, activate_dep - is presented with the internal view. *) - -fun roundup thy activate_dep export (name, morph) (marked, input) = - let - (* Find all dependencies including new ones (which are dependencies enriching - existing registrations). *) - val (dependencies, marked') = - add thy 0 Morphism.identity export (name, morph) ([], empty_idents); - (* Filter out fragments from marked; these won't be activated. *) - val dependencies' = filter_out (fn (name, morph) => - redundant_ident thy marked (name, instance_of thy name (morph $> export))) dependencies; - in - (merge_idents (marked, marked'), input |> fold_rev activate_dep dependencies') - end; - -end; - - -(*** Registrations: interpretations in theories or proof contexts ***) - -val total_ident_ord = prod_ord fast_string_ord (list_ord Term_Ord.fast_term_ord); - -structure Idtab = Table(type key = string * term list val ord = total_ident_ord); - -structure Registrations = Generic_Data -( - type T = ((morphism * morphism) * serial) Idtab.table * mixins; - (* registrations, indexed by locale name and instance; - unique registration serial points to mixin list *) - val empty = (Idtab.empty, Inttab.empty); - val extend = I; - fun merge ((reg1, mix1), (reg2, mix2)) : T = - (Idtab.join (fn id => fn ((_, s1), (_, s2)) => - if s1 = s2 then raise Idtab.SAME else raise Idtab.DUP id) (reg1, reg2), - merge_mixins (mix1, mix2)) - handle Idtab.DUP id => - (* distinct interpretations with same base: merge their mixins *) - let - val (_, s1) = Idtab.lookup reg1 id |> the; - val (morph2, s2) = Idtab.lookup reg2 id |> the; - val reg2' = Idtab.update (id, (morph2, s1)) reg2; - val _ = warning "Removed duplicate interpretation after retrieving its mixins."; - (* FIXME print interpretations, - which is not straightforward without theory context *) - in merge ((reg1, mix1), (reg2', rename_mixin (s2, s1) mix2)) end; - (* FIXME consolidate with dependencies, consider one data slot only *) -); - - -(* Primitive operations *) - -fun add_reg thy export (name, morph) = - Registrations.map (apfst (Idtab.insert (K false) - ((name, instance_of thy name (morph $> export)), ((morph, export), serial ())))); - -fun add_mixin serial' mixin = - (* registration to be amended identified by its serial id *) - Registrations.map (apsnd (insert_mixin serial' mixin)); - -fun get_mixins context (name, morph) = - let - val thy = Context.theory_of context; - val (regs, mixins) = Registrations.get context; - in - (case Idtab.lookup regs (name, instance_of thy name morph) of - NONE => [] - | SOME (_, serial) => lookup_mixins serial mixins) - end; - -fun collect_mixins context (name, morph) = - let - val thy = Context.theory_of context; - in - roundup thy (fn dep => fn mixins => merge (eq_snd op =) (mixins, get_mixins context dep)) - Morphism.identity (name, morph) - (insert_idents (name, instance_of thy name morph) empty_idents, []) - |> snd |> filter (snd o fst) (* only inheritable mixins *) - |> (fn x => merge (eq_snd op =) (x, get_mixins context (name, morph))) - |> compose_mixins - end; - -fun get_registrations context select = - Registrations.get context - |>> Idtab.dest |>> select - (* with inherited mixins *) - |-> (fn regs => fn _ => map (fn ((name, _), ((base, export) ,_)) => - (name, base $> (collect_mixins context (name, base $> export)) $> export)) regs); - -fun registrations_of context name = - get_registrations context (filter (curry (op =) name o fst o fst)); - -fun all_registrations_of context = get_registrations context I; - - -(*** Activate context elements of locale ***) - -(* Declarations, facts and entire locale content *) - -fun activate_syntax_decls (name, morph) context = - let - val thy = Context.theory_of context; - val {syntax_decls, ...} = the_locale thy name; - in - context - |> fold_rev (fn (decl, _) => decl morph) syntax_decls - end; - -fun activate_notes activ_elem transfer context export' (name, morph) input = - let - val thy = Context.theory_of context; - val mixin = - (case export' of - NONE => Morphism.identity - | SOME export => collect_mixins context (name, morph $> export) $> export); - val morph' = transfer input $> morph $> mixin; - val notes' = - grouped 100 Par_List.map - (Element.transform_ctxt morph' o Notes o #1) (#notes (the_locale thy name)); - in - fold_rev (fn elem => fn res => activ_elem (Element.transform_ctxt (transfer res) elem) res) - notes' input - end; - -fun activate_all name thy activ_elem transfer (marked, input) = - let - val {parameters = (_, params), spec = (asm, defs), ...} = the_locale thy name; - val input' = input |> - (not (null params) ? - activ_elem (Fixes (map (fn ((x, T), mx) => (Binding.name x, SOME T, mx)) params))) |> - (* FIXME type parameters *) - (case asm of SOME A => activ_elem (Assumes [(Attrib.empty_binding, [(A, [])])]) | _ => I) |> - (not (null defs) ? - activ_elem (Defines (map (fn def => (Attrib.empty_binding, (def, []))) defs))); - val activate = activate_notes activ_elem transfer (Context.Theory thy) NONE; - in - roundup thy activate Morphism.identity (name, Morphism.identity) (marked, input') - end; - - -(** Public activation functions **) - -fun activate_declarations dep = Context.proof_map (fn context => - let - val thy = Context.theory_of context; - in - roundup thy activate_syntax_decls Morphism.identity dep (Idents.get context, context) - |-> Idents.put - end); - -fun activate_facts export dep context = - let - val thy = Context.theory_of context; - val activate = - activate_notes Element.init' - (Morphism.transfer_morphism o Context.theory_of) context export; - in - roundup thy activate (the_default Morphism.identity export) dep (Idents.get context, context) - |-> Idents.put - end; - -fun init name thy = - let - val context = Context.Proof (Proof_Context.init_global thy); - val marked = Idents.get context; - val (marked', context') = (empty_idents, context) - |> activate_all name thy Element.init' (Morphism.transfer_morphism o Context.theory_of); - in - context' - |> Idents.put (merge_idents (marked, marked')) - |> Context.proof_of - end; - - -(*** Add and extend registrations ***) - -fun amend_registration (name, morph) mixin export context = - let - val thy = Context.theory_of context; - val ctxt = Context.proof_of context; - - val regs = Registrations.get context |> fst; - val base = instance_of thy name (morph $> export); - val serial' = - (case Idtab.lookup regs (name, base) of - NONE => - error ("No interpretation of locale " ^ quote (markup_name ctxt name) ^ - " with\nparameter instantiation " ^ - space_implode " " (map (quote o Syntax.string_of_term_global thy) base) ^ - " available") - | SOME (_, serial') => serial'); - in - add_mixin serial' mixin context - end; - -(* Note that a registration that would be subsumed by an existing one will not be - generated, and it will not be possible to amend it. *) - -fun add_registration (name, base_morph) mixin export context = - let - val thy = Context.theory_of context; - val mix = (case mixin of NONE => Morphism.identity | SOME (mix, _) => mix); - val morph = base_morph $> mix; - val inst = instance_of thy name morph; - val idents = Idents.get context; - in - if redundant_ident thy idents (name, inst) - then context (* FIXME amend mixins? *) - else - (idents, context) - (* add new registrations with inherited mixins *) - |> roundup thy (add_reg thy export) export (name, morph) - |> snd - (* add mixin *) - |> (case mixin of NONE => I | SOME mixin => amend_registration (name, morph) mixin export) - (* activate import hierarchy as far as not already active *) - |> activate_facts (SOME export) (name, morph) - end; - - -(*** Dependencies ***) - -(* FIXME dead code!? -fun amend_dependency loc (name, morph) mixin export thy = - let - val deps = dependencies_of thy loc; - in - case AList.lookup (fn ((name, morph), ((name', (morph', _)), _)) => - total_ident_ord ((name, instance_of thy name morph), (name', instance_of thy name' morph')) = EQUAL) deps (name, morph) of - NONE => error ("Locale " ^ - quote (extern thy name) ^ " with\parameter instantiation " ^ - space_implode " " (map (quote o Syntax.string_of_term_global thy) morph) ^ - " not a sublocale of " ^ quote (extern thy loc)) - | SOME (_, serial') => change_locale ... - end; -*) - -fun add_dependency loc (name, morph) mixin export thy = - let - val serial' = serial (); - val thy' = thy |> - (change_locale loc o apsnd) - (apfst (cons ((name, (morph, export)), serial')) #> - apsnd (case mixin of NONE => I | SOME mixin => insert_mixin serial' mixin)); - val context' = Context.Theory thy'; - val (_, regs) = - fold_rev (roundup thy' cons export) - (registrations_of context' loc) (Idents.get context', []); - in - thy' - |> fold_rev (fn dep => Context.theory_map (add_registration dep NONE export)) regs - end; - - -(*** Storing results ***) - -(* Theorems *) - -fun add_thmss _ _ [] ctxt = ctxt - | add_thmss loc kind facts ctxt = - ctxt - |> Attrib.local_notes kind facts |> snd - |> Proof_Context.background_theory - ((change_locale loc o apfst o apsnd) (cons ((kind, facts), serial ())) #> - (* Registrations *) - (fn thy => - fold_rev (fn (_, morph) => - snd o Attrib.global_notes kind (Element.transform_facts morph facts)) - (registrations_of (Context.Theory thy) loc) thy)); - - -(* Declarations *) - -local - -fun add_decl loc decl = - add_thmss loc "" - [((Binding.conceal Binding.empty, - [Attrib.internal (fn phi => Thm.declaration_attribute (K (decl phi)))]), - [([Drule.dummy_thm], [])])]; - -in - -fun add_declaration loc syntax decl = - syntax ? - Proof_Context.background_theory ((change_locale loc o apfst o apfst) (cons (decl, serial ()))) - #> add_decl loc decl; - -end; - - -(*** Reasoning about locales ***) - -(* Storage for witnesses, intro and unfold rules *) - -structure Thms = Generic_Data -( - type T = thm list * thm list * thm list; - val empty = ([], [], []); - val extend = I; - fun merge ((witnesses1, intros1, unfolds1), (witnesses2, intros2, unfolds2)) = - (Thm.merge_thms (witnesses1, witnesses2), - Thm.merge_thms (intros1, intros2), - Thm.merge_thms (unfolds1, unfolds2)); -); - -val get_witnesses = #1 o Thms.get o Context.Proof; -val get_intros = #2 o Thms.get o Context.Proof; -val get_unfolds = #3 o Thms.get o Context.Proof; - -val witness_add = - Thm.declaration_attribute (fn th => Thms.map (fn (x, y, z) => (Thm.add_thm th x, y, z))); -val intro_add = - Thm.declaration_attribute (fn th => Thms.map (fn (x, y, z) => (x, Thm.add_thm th y, z))); -val unfold_add = - Thm.declaration_attribute (fn th => Thms.map (fn (x, y, z) => (x, y, Thm.add_thm th z))); - - -(* Tactics *) - -fun gen_intro_locales_tac intros_tac eager ctxt = - intros_tac - (get_witnesses ctxt @ get_intros ctxt @ (if eager then get_unfolds ctxt else [])); - -val intro_locales_tac = gen_intro_locales_tac Method.intros_tac; -val try_intro_locales_tac= gen_intro_locales_tac Method.try_intros_tac; - -val _ = Theory.setup - (Method.setup @{binding intro_locales} (Scan.succeed (METHOD o try_intro_locales_tac false)) - "back-chain introduction rules of locales without unfolding predicates" #> - Method.setup @{binding unfold_locales} (Scan.succeed (METHOD o try_intro_locales_tac true)) - "back-chain all introduction rules of locales"); - - -(*** diagnostic commands and interfaces ***) - -fun print_locales thy = - Pretty.block - (Pretty.breaks - (Pretty.str "locales:" :: - map (Pretty.mark_str o #1) - (Name_Space.markup_table (Proof_Context.init_global thy) (Locales.get thy)))) - |> Pretty.writeln; - -fun pretty_locale thy show_facts name = - let - val locale_ctxt = init name thy; - fun cons_elem (elem as Notes _) = show_facts ? cons elem - | cons_elem elem = cons elem; - val elems = - activate_all name thy cons_elem (K (Morphism.transfer_morphism thy)) (empty_idents, []) - |> snd |> rev; - in - Pretty.block - (Pretty.keyword1 "locale" :: Pretty.brk 1 :: pretty_name locale_ctxt name :: - maps (fn elem => [Pretty.fbrk, Pretty.chunks (Element.pretty_ctxt locale_ctxt elem)]) elems) - end; - -fun print_locale thy show_facts raw_name = - Pretty.writeln (pretty_locale thy show_facts (check thy raw_name)); - -fun print_registrations ctxt raw_name = - let - val thy = Proof_Context.theory_of ctxt; - val name = check thy raw_name; - in - (case registrations_of (Context.Proof ctxt) (* FIXME *) name of - [] => Pretty.str "no interpretations" - | regs => Pretty.big_list "interpretations:" (map (pretty_reg ctxt Morphism.identity) (rev regs))) - end |> Pretty.writeln; - -fun print_dependencies ctxt clean export insts = - let - val thy = Proof_Context.theory_of ctxt; - val idents = if clean then empty_idents else Idents.get (Context.Proof ctxt); - in - (case fold (roundup thy cons export) insts (idents, []) |> snd of - [] => Pretty.str "no dependencies" - | deps => Pretty.big_list "dependencies:" (map (pretty_reg ctxt export) (rev deps))) - end |> Pretty.writeln; - -fun pretty_locale_deps thy = - let - fun make_node name = - {name = name, - parents = map (fst o fst) (dependencies_of thy name), - body = pretty_locale thy false name}; - val names = sort_strings (Name_Space.fold_table (cons o #1) (Locales.get thy) []); - in map make_node names end; - -end; diff --git a/core/Pure/Isar/method.ML b/core/Pure/Isar/method.ML deleted file mode 100644 index 30d0681b..00000000 --- a/core/Pure/Isar/method.ML +++ /dev/null @@ -1,536 +0,0 @@ -(* Title: Pure/Isar/method.ML - Author: Markus Wenzel, TU Muenchen - -Isar proof methods. -*) - -signature METHOD = -sig - type method - val apply: (Proof.context -> method) -> Proof.context -> thm list -> cases_tactic - val RAW_METHOD_CASES: (thm list -> cases_tactic) -> method - val RAW_METHOD: (thm list -> tactic) -> method - val METHOD_CASES: (thm list -> cases_tactic) -> method - val METHOD: (thm list -> tactic) -> method - val fail: method - val succeed: method - val insert_tac: thm list -> int -> tactic - val insert: thm list -> method - val insert_facts: method - val SIMPLE_METHOD: tactic -> method - val SIMPLE_METHOD': (int -> tactic) -> method - val SIMPLE_METHOD'': ((int -> tactic) -> tactic) -> (int -> tactic) -> method - val cheating: Proof.context -> bool -> method - val intro: thm list -> method - val elim: thm list -> method - val unfold: thm list -> Proof.context -> method - val fold: thm list -> Proof.context -> method - val atomize: bool -> Proof.context -> method - val this: method - val fact: thm list -> Proof.context -> method - val assm_tac: Proof.context -> int -> tactic - val all_assm_tac: Proof.context -> tactic - val assumption: Proof.context -> method - val rule_trace: bool Config.T - val trace: Proof.context -> thm list -> unit - val rule_tac: Proof.context -> thm list -> thm list -> int -> tactic - val some_rule_tac: Proof.context -> thm list -> thm list -> int -> tactic - val intros_tac: thm list -> thm list -> tactic - val try_intros_tac: thm list -> thm list -> tactic - val rule: Proof.context -> thm list -> method - val erule: Proof.context -> int -> thm list -> method - val drule: Proof.context -> int -> thm list -> method - val frule: Proof.context -> int -> thm list -> method - val set_tactic: (thm list -> tactic) -> Proof.context -> Proof.context - val tactic: Symbol_Pos.source -> Proof.context -> method - val raw_tactic: Symbol_Pos.source -> Proof.context -> method - type src = Args.src - type combinator_info - val no_combinator_info: combinator_info - datatype text = - Source of src | - Basic of Proof.context -> method | - Then of combinator_info * text list | - Orelse of combinator_info * text list | - Try of combinator_info * text | - Repeat1 of combinator_info * text | - Select_Goals of combinator_info * int * text - val primitive_text: (Proof.context -> thm -> thm) -> text - val succeed_text: text - val default_text: text - val this_text: text - val done_text: text - val sorry_text: bool -> text - val finish_text: text option * bool -> text - val print_methods: Proof.context -> unit - val check_name: Proof.context -> xstring * Position.T -> string - val method: Proof.context -> src -> Proof.context -> method - val method_cmd: Proof.context -> src -> Proof.context -> method - val setup: binding -> (Proof.context -> method) context_parser -> string -> theory -> theory - val method_setup: bstring * Position.T -> Symbol_Pos.source -> string -> theory -> theory - type modifier = (Proof.context -> Proof.context) * attribute - val section: modifier parser list -> thm list context_parser - val sections: modifier parser list -> thm list list context_parser - type text_range = text * Position.range - val text: text_range option -> text option - val position: text_range option -> Position.T - val reports_of: text_range -> Position.report list - val report: text_range -> unit - val parse: text_range parser -end; - -structure Method: METHOD = -struct - -(** proof methods **) - -(* datatype method *) - -datatype method = Meth of thm list -> cases_tactic; - -fun apply meth ctxt = let val Meth m = meth ctxt in m end; - -val RAW_METHOD_CASES = Meth; - -fun RAW_METHOD tac = RAW_METHOD_CASES (NO_CASES o tac); - -fun METHOD_CASES tac = RAW_METHOD_CASES (fn facts => - Seq.THEN (ALLGOALS Goal.conjunction_tac, tac facts)); - -fun METHOD tac = RAW_METHOD (fn facts => ALLGOALS Goal.conjunction_tac THEN tac facts); - -val fail = METHOD (K no_tac); -val succeed = METHOD (K all_tac); - - -(* insert facts *) - -local - -fun cut_rule_tac rule = - rtac (Drule.forall_intr_vars rule COMP_INCR revcut_rl); - -in - -fun insert_tac [] _ = all_tac - | insert_tac facts i = EVERY (map (fn th => cut_rule_tac th i) facts); - -val insert_facts = METHOD (ALLGOALS o insert_tac); -fun insert thms = METHOD (fn _ => ALLGOALS (insert_tac thms)); - -fun SIMPLE_METHOD tac = METHOD (fn facts => ALLGOALS (insert_tac facts) THEN tac); -fun SIMPLE_METHOD'' quant tac = METHOD (fn facts => quant (insert_tac facts THEN' tac)); -val SIMPLE_METHOD' = SIMPLE_METHOD'' HEADGOAL; - -end; - - -(* cheating *) - -fun cheating ctxt int = METHOD (fn _ => fn st => - if int orelse Config.get ctxt quick_and_dirty then - ALLGOALS Skip_Proof.cheat_tac st - else error "Cheating requires quick_and_dirty mode!"); - - -(* unfold intro/elim rules *) - -fun intro ths = SIMPLE_METHOD' (CHANGED_PROP o REPEAT_ALL_NEW (match_tac ths)); -fun elim ths = SIMPLE_METHOD' (CHANGED_PROP o REPEAT_ALL_NEW (ematch_tac ths)); - - -(* unfold/fold definitions *) - -fun unfold_meth ths ctxt = SIMPLE_METHOD (CHANGED_PROP (Local_Defs.unfold_tac ctxt ths)); -fun fold_meth ths ctxt = SIMPLE_METHOD (CHANGED_PROP (Local_Defs.fold_tac ctxt ths)); - - -(* atomize rule statements *) - -fun atomize false ctxt = - SIMPLE_METHOD' (CHANGED_PROP o Object_Logic.atomize_prems_tac ctxt) - | atomize true ctxt = - RAW_METHOD (K (HEADGOAL (CHANGED_PROP o Object_Logic.full_atomize_tac ctxt))); - - -(* this -- resolve facts directly *) - -val this = METHOD (EVERY o map (HEADGOAL o rtac)); - - -(* fact -- composition by facts from context *) - -fun fact [] ctxt = SIMPLE_METHOD' (Proof_Context.some_fact_tac ctxt) - | fact rules ctxt = SIMPLE_METHOD' (Proof_Context.fact_tac ctxt rules); - - -(* assumption *) - -local - -fun cond_rtac cond rule = SUBGOAL (fn (prop, i) => - if cond (Logic.strip_assums_concl prop) - then rtac rule i else no_tac); - -in - -fun assm_tac ctxt = - assume_tac APPEND' - Goal.assume_rule_tac ctxt APPEND' - cond_rtac (can Logic.dest_equals) Drule.reflexive_thm APPEND' - cond_rtac (can Logic.dest_term) Drule.termI; - -fun all_assm_tac ctxt = - let - fun tac i st = - if i > Thm.nprems_of st then all_tac st - else ((assm_tac ctxt i THEN tac i) ORELSE tac (i + 1)) st; - in tac 1 end; - -fun assumption ctxt = METHOD (HEADGOAL o - (fn [] => assm_tac ctxt - | [fact] => solve_tac [fact] - | _ => K no_tac)); - -fun finish immed ctxt = - METHOD (K ((if immed then all_assm_tac ctxt else all_tac) THEN flexflex_tac)); - -end; - - -(* rule etc. -- single-step refinements *) - -val rule_trace = Attrib.setup_config_bool @{binding rule_trace} (fn _ => false); - -fun trace ctxt rules = - if Config.get ctxt rule_trace andalso not (null rules) then - Pretty.big_list "rules:" (map (Display.pretty_thm_item ctxt) rules) - |> Pretty.string_of |> tracing - else (); - -local - -fun gen_rule_tac tac ctxt rules facts = - (fn i => fn st => - if null facts then tac rules i st - else Seq.maps (fn rule => (tac o single) rule i st) (Drule.multi_resolves facts rules)) - THEN_ALL_NEW Goal.norm_hhf_tac ctxt; - -fun gen_arule_tac tac ctxt j rules facts = - EVERY' (gen_rule_tac tac ctxt rules facts :: replicate j assume_tac); - -fun gen_some_rule_tac tac ctxt arg_rules facts = SUBGOAL (fn (goal, i) => - let - val rules = - if not (null arg_rules) then arg_rules - else flat (Context_Rules.find_rules false facts goal ctxt) - in trace ctxt rules; tac ctxt rules facts i end); - -fun meth tac x y = METHOD (HEADGOAL o tac x y); -fun meth' tac x y z = METHOD (HEADGOAL o tac x y z); - -in - -val rule_tac = gen_rule_tac resolve_tac; -val rule = meth rule_tac; -val some_rule_tac = gen_some_rule_tac rule_tac; -val some_rule = meth some_rule_tac; - -val erule = meth' (gen_arule_tac eresolve_tac); -val drule = meth' (gen_arule_tac dresolve_tac); -val frule = meth' (gen_arule_tac forward_tac); - -end; - - -(* intros_tac -- pervasive search spanned by intro rules *) - -fun gen_intros_tac goals intros facts = - goals (insert_tac facts THEN' - REPEAT_ALL_NEW (resolve_tac intros)) - THEN Tactic.distinct_subgoals_tac; - -val intros_tac = gen_intros_tac ALLGOALS; -val try_intros_tac = gen_intros_tac TRYALL; - - -(* ML tactics *) - -structure ML_Tactic = Proof_Data -( - type T = thm list -> tactic; - fun init _ = undefined; -); - -val set_tactic = ML_Tactic.put; - -fun ml_tactic source ctxt = - let - val ctxt' = ctxt |> Context.proof_map - (ML_Context.expression (#pos source) - "fun tactic (facts: thm list) : tactic" - "Context.map_proof (Method.set_tactic tactic)" (ML_Lex.read_source false source)); - in Context.setmp_thread_data (SOME (Context.Proof ctxt)) (ML_Tactic.get ctxt') end; - -fun tactic source ctxt = METHOD (ml_tactic source ctxt); -fun raw_tactic source ctxt = RAW_METHOD (ml_tactic source ctxt); - - - -(** method syntax **) - -(* method text *) - -type src = Args.src; - -datatype combinator_info = Combinator_Info of {keywords: Position.T list}; -fun combinator_info keywords = Combinator_Info {keywords = keywords}; -val no_combinator_info = combinator_info []; - -datatype text = - Source of src | - Basic of Proof.context -> method | - Then of combinator_info * text list | - Orelse of combinator_info * text list | - Try of combinator_info * text | - Repeat1 of combinator_info * text | - Select_Goals of combinator_info * int * text; - -fun primitive_text r = Basic (SIMPLE_METHOD o PRIMITIVE o r); -val succeed_text = Basic (K succeed); -val default_text = Source (Args.src ("default", Position.none) []); -val this_text = Basic (K this); -val done_text = Basic (K (SIMPLE_METHOD all_tac)); -fun sorry_text int = Basic (fn ctxt => cheating ctxt int); - -fun finish_text (NONE, immed) = Basic (finish immed) - | finish_text (SOME txt, immed) = Then (no_combinator_info, [txt, Basic (finish immed)]); - - -(* method definitions *) - -structure Methods = Theory_Data -( - type T = ((src -> Proof.context -> method) * string) Name_Space.table; - val empty : T = Name_Space.empty_table "method"; - val extend = I; - fun merge data : T = Name_Space.merge_tables data; -); - -val get_methods = Methods.get o Proof_Context.theory_of; - -fun print_methods ctxt = - let - val meths = get_methods ctxt; - fun prt_meth (name, (_, "")) = Pretty.mark_str name - | prt_meth (name, (_, comment)) = - Pretty.block - (Pretty.mark_str name :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment); - in - [Pretty.big_list "methods:" (map prt_meth (Name_Space.markup_table ctxt meths))] - |> Pretty.writeln_chunks - end; - -fun add_method name meth comment thy = thy - |> Methods.map (Name_Space.define (Context.Theory thy) true (name, (meth, comment)) #> snd); - - -(* check *) - -fun check_name ctxt = #1 o Name_Space.check (Context.Proof ctxt) (get_methods ctxt); -fun check_src ctxt src = Args.check_src ctxt (get_methods ctxt) src; - - -(* get methods *) - -fun method ctxt = - let val table = get_methods ctxt - in fn src => #1 (Name_Space.get table (#1 (Args.name_of_src src))) src end; - -fun method_closure ctxt0 src0 = - let - val (src1, meth) = check_src ctxt0 src0; - val src2 = Args.init_assignable src1; - val ctxt = Context_Position.not_really ctxt0; - val _ = Seq.pull (apply (method ctxt src2) ctxt [] (Goal.protect 0 Drule.dummy_thm)); - in Args.closure src2 end; - -fun method_cmd ctxt = method ctxt o method_closure ctxt; - - -(* method setup *) - -fun setup name scan = - add_method name - (fn src => fn ctxt => let val (m, ctxt') = Args.syntax scan src ctxt in m ctxt' end); - -fun method_setup name source cmt = - Context.theory_map (ML_Context.expression (#pos source) - "val (name, scan, comment): binding * (Proof.context -> Proof.method) context_parser * string" - "Context.map_theory (Method.setup name scan comment)" - (ML_Lex.read Position.none ("(" ^ ML_Syntax.make_binding name ^ ", ") @ - ML_Lex.read_source false source @ - ML_Lex.read Position.none (", " ^ ML_Syntax.print_string cmt ^ ")"))); - - - -(** concrete syntax **) - -(* sections *) - -type modifier = (Proof.context -> Proof.context) * attribute; - -local - -fun thms ss = Scan.repeat (Scan.unless (Scan.lift (Scan.first ss)) Attrib.multi_thm) >> flat; -fun app (f, att) ths context = fold_map (Thm.apply_attribute att) ths (Context.map_proof f context); - -in - -fun section ss = Scan.depend (fn context => (Scan.first ss -- Scan.pass context (thms ss)) :|-- - (fn (m, ths) => Scan.succeed (swap (app m ths context)))); - -fun sections ss = Scan.repeat (section ss); - -end; - - -(* extra rule methods *) - -fun xrule_meth meth = - Scan.lift (Scan.optional (Args.parens Parse.nat) 0) -- Attrib.thms >> - (fn (n, ths) => fn ctxt => meth ctxt n ths); - - -(* text range *) - -type text_range = text * Position.range; - -fun text NONE = NONE - | text (SOME (txt, _)) = SOME txt; - -fun position NONE = Position.none - | position (SOME (_, (pos, _))) = pos; - - -(* reports *) - -local - -fun keyword_positions (Source _) = [] - | keyword_positions (Basic _) = [] - | keyword_positions (Then (Combinator_Info {keywords}, texts)) = - keywords @ maps keyword_positions texts - | keyword_positions (Orelse (Combinator_Info {keywords}, texts)) = - keywords @ maps keyword_positions texts - | keyword_positions (Try (Combinator_Info {keywords}, text)) = - keywords @ keyword_positions text - | keyword_positions (Repeat1 (Combinator_Info {keywords}, text)) = - keywords @ keyword_positions text - | keyword_positions (Select_Goals (Combinator_Info {keywords}, _, text)) = - keywords @ keyword_positions text; - -in - -fun reports_of ((text, (pos, _)): text_range) = - (pos, Markup.language_method) :: - maps (fn p => map (pair p) (Markup.keyword3 :: Completion.suppress_abbrevs "")) - (keyword_positions text); - -val report = Position.reports o reports_of; - -end; - - -(* outer parser *) - -fun is_symid_meth s = - s <> "|" andalso s <> "?" andalso s <> "+" andalso Token.ident_or_symbolic s; - -local - -fun meth4 x = - (Parse.position Parse.xname >> (fn name => Source (Args.src name [])) || - Scan.ahead Parse.cartouche |-- Parse.not_eof >> (fn tok => - Source (Args.src ("cartouche", Token.pos_of tok) [tok])) || - Parse.$$$ "(" |-- Parse.!!! (meth0 --| Parse.$$$ ")")) x -and meth3 x = - (meth4 -- Parse.position (Parse.$$$ "?") - >> (fn (m, (_, pos)) => Try (combinator_info [pos], m)) || - meth4 -- Parse.position (Parse.$$$ "+") - >> (fn (m, (_, pos)) => Repeat1 (combinator_info [pos], m)) || - meth4 -- - (Parse.position (Parse.$$$ "[") -- Scan.optional Parse.nat 1 -- Parse.position (Parse.$$$ "]")) - >> (fn (m, (((_, pos1), n), (_, pos2))) => - Select_Goals (combinator_info [pos1, pos2], n, m)) || - meth4) x -and meth2 x = - (Parse.position Parse.xname -- Parse.args1 is_symid_meth >> (Source o uncurry Args.src) || - meth3) x -and meth1 x = - (Parse.enum1_positions "," meth2 - >> (fn ([m], _) => m | (ms, ps) => Then (combinator_info ps, ms))) x -and meth0 x = - (Parse.enum1_positions "|" meth1 - >> (fn ([m], _) => m | (ms, ps) => Orelse (combinator_info ps, ms))) x; - -in - -val parse = - Scan.trace meth3 >> (fn (m, toks) => (m, Token.range_of toks)); - -end; - - -(* theory setup *) - -val _ = Theory.setup - (setup @{binding fail} (Scan.succeed (K fail)) "force failure" #> - setup @{binding succeed} (Scan.succeed (K succeed)) "succeed" #> - setup @{binding "-"} (Scan.succeed (K insert_facts)) - "do nothing (insert current facts only)" #> - setup @{binding insert} (Attrib.thms >> (K o insert)) - "insert theorems, ignoring facts (improper)" #> - setup @{binding intro} (Attrib.thms >> (K o intro)) - "repeatedly apply introduction rules" #> - setup @{binding elim} (Attrib.thms >> (K o elim)) - "repeatedly apply elimination rules" #> - setup @{binding unfold} (Attrib.thms >> unfold_meth) "unfold definitions" #> - setup @{binding fold} (Attrib.thms >> fold_meth) "fold definitions" #> - setup @{binding atomize} (Scan.lift (Args.mode "full") >> atomize) - "present local premises as object-level statements" #> - setup @{binding rule} (Attrib.thms >> (fn ths => fn ctxt => some_rule ctxt ths)) - "apply some intro/elim rule" #> - setup @{binding erule} (xrule_meth erule) "apply rule in elimination manner (improper)" #> - setup @{binding drule} (xrule_meth drule) "apply rule in destruct manner (improper)" #> - setup @{binding frule} (xrule_meth frule) "apply rule in forward manner (improper)" #> - setup @{binding this} (Scan.succeed (K this)) "apply current facts as rules" #> - setup @{binding fact} (Attrib.thms >> fact) "composition by facts from context" #> - setup @{binding assumption} (Scan.succeed assumption) - "proof by assumption, preferring facts" #> - setup @{binding rename_tac} (Args.goal_spec -- Scan.lift (Scan.repeat1 Args.name) >> - (fn (quant, xs) => K (SIMPLE_METHOD'' quant (rename_tac xs)))) - "rename parameters of goal" #> - setup @{binding rotate_tac} (Args.goal_spec -- Scan.lift (Scan.optional Parse.int 1) >> - (fn (quant, i) => K (SIMPLE_METHOD'' quant (rotate_tac i)))) - "rotate assumptions of goal" #> - setup @{binding tactic} (Scan.lift Args.text_source_position >> tactic) - "ML tactic as proof method" #> - setup @{binding raw_tactic} (Scan.lift Args.text_source_position >> raw_tactic) - "ML tactic as raw proof method"); - - -(*final declarations of this structure!*) -val unfold = unfold_meth; -val fold = fold_meth; - -end; - -val RAW_METHOD_CASES = Method.RAW_METHOD_CASES; -val RAW_METHOD = Method.RAW_METHOD; -val METHOD_CASES = Method.METHOD_CASES; -val METHOD = Method.METHOD; -val SIMPLE_METHOD = Method.SIMPLE_METHOD; -val SIMPLE_METHOD' = Method.SIMPLE_METHOD'; -val SIMPLE_METHOD'' = Method.SIMPLE_METHOD''; - diff --git a/core/Pure/Isar/named_target.ML b/core/Pure/Isar/named_target.ML deleted file mode 100644 index b61a7951..00000000 --- a/core/Pure/Isar/named_target.ML +++ /dev/null @@ -1,199 +0,0 @@ -(* Title: Pure/Isar/named_target.ML - Author: Makarius - Author: Florian Haftmann, TU Muenchen - -Targets for theory, locale, class -- at the bottom the nested structure. -*) - -signature NAMED_TARGET = -sig - val is_theory: local_theory -> bool - val locale_of: local_theory -> string option - val bottom_locale_of: local_theory -> string option - val class_of: local_theory -> string option - val init: string -> theory -> local_theory - val theory_init: theory -> local_theory - val theory_like_init: (local_theory -> local_theory) -> theory -> local_theory - val begin: xstring * Position.T -> theory -> local_theory - val exit: local_theory -> theory - val switch: (xstring * Position.T) option -> Context.generic - -> (local_theory -> Context.generic) * local_theory -end; - -structure Named_Target: NAMED_TARGET = -struct - -(* context data *) - -structure Data = Proof_Data -( - type T = (string * bool) option; - fun init _ = NONE; -); - -val get_bottom_data = Data.get; - -fun get_data lthy = - if Local_Theory.level lthy = 1 - then get_bottom_data lthy - else NONE; - -fun is_theory lthy = - case get_data lthy of - SOME ("", _) => true - | _ => false; - -fun target_of lthy = - case get_data lthy of - NONE => error "Not in a named target" - | SOME (target, _) => target; - -fun locale_name_of NONE = NONE - | locale_name_of (SOME ("", _)) = NONE - | locale_name_of (SOME (locale, _)) = SOME locale; - -val locale_of = locale_name_of o get_data; - -val bottom_locale_of = locale_name_of o get_bottom_data; - -fun class_of lthy = - case get_data lthy of - SOME (class, true) => SOME class - | _ => NONE; - - -(* define *) - -fun locale_foundation locale (((b, U), mx), (b_def, rhs)) params = - Generic_Target.background_foundation (((b, U), NoSyn), (b_def, rhs)) params - #-> (fn (lhs, def) => Generic_Target.locale_const locale Syntax.mode_default ((b, mx), lhs) - #> pair (lhs, def)); - -fun class_foundation class (((b, U), mx), (b_def, rhs)) params = - Generic_Target.background_foundation (((b, U), NoSyn), (b_def, rhs)) params - #-> (fn (lhs, def) => Class.const class ((b, mx), lhs) params - #> pair (lhs, def)); - -fun foundation ("", _) = Generic_Target.theory_foundation - | foundation (locale, false) = locale_foundation locale - | foundation (class, true) = class_foundation class; - - -(* notes *) - -fun notes ("", _) = Generic_Target.theory_notes - | notes (locale, _) = Generic_Target.locale_notes locale; - - -(* abbrev *) - -fun locale_abbrev locale prmode (b, mx) global_rhs params = - Generic_Target.background_abbrev (b, global_rhs) (snd params) - #-> (fn (lhs, _) => Generic_Target.locale_const locale prmode ((b, mx), lhs)); - -fun class_abbrev class prmode (b, mx) global_rhs params = - Generic_Target.background_abbrev (b, global_rhs) (snd params) - #-> (fn (lhs, rhs) => Class.abbrev class prmode ((b, mx), lhs) rhs params); - -fun abbrev ("", _) = Generic_Target.theory_abbrev - | abbrev (locale, false) = locale_abbrev locale - | abbrev (class, true) = class_abbrev class; - - -(* declaration *) - -fun declaration ("", _) flags decl = Generic_Target.theory_declaration decl - | declaration (locale, _) flags decl = Generic_Target.locale_declaration locale flags decl; - - -(* subscription *) - -fun subscription ("", _) = Generic_Target.theory_registration - | subscription (locale, _) = Generic_Target.locale_dependency locale; - - -(* pretty *) - -fun pretty (target, is_class) ctxt = - let - val target_name = - [Pretty.keyword1 (if is_class then "class" else "locale"), Pretty.brk 1, - Locale.pretty_name ctxt target]; - val fixes = - map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) - (#1 (Proof_Context.inferred_fixes ctxt)); - val assumes = - map (fn A => (Attrib.empty_binding, [(Thm.term_of A, [])])) - (Assumption.all_assms_of ctxt); - val elems = - (if null fixes then [] else [Element.Fixes fixes]) @ - (if null assumes then [] else [Element.Assumes assumes]); - val body_elems = - if target = "" then [] - else if null elems then [Pretty.block target_name] - else [Pretty.block (Pretty.fbreaks (Pretty.block (target_name @ [Pretty.str " ="]) :: - map (Pretty.chunks o Element.pretty_ctxt ctxt) elems))]; - in - Pretty.block [Pretty.keyword1 "theory", Pretty.brk 1, - Pretty.str (Context.theory_name (Proof_Context.theory_of ctxt))] :: body_elems - end; - - -(* init *) - -fun make_name_data _ "" = ("", false) - | make_name_data thy target = - if Locale.defined thy target - then (target, Class.is_class thy target) - else error ("No such locale: " ^ quote target); - -fun init_context ("", _) = Proof_Context.init_global - | init_context (locale, false) = Locale.init locale - | init_context (class, true) = Class.init class; - -fun gen_init before_exit target thy = - let - val name_data = make_name_data thy target; - val naming = Sign.naming_of thy - |> Name_Space.mandatory_path (Long_Name.base_name target); - in - thy - |> Sign.change_begin - |> init_context name_data - |> is_none before_exit ? Data.put (SOME name_data) - |> Local_Theory.init naming - {define = Generic_Target.define (foundation name_data), - notes = Generic_Target.notes (notes name_data), - abbrev = Generic_Target.abbrev (abbrev name_data), - declaration = declaration name_data, - subscription = subscription name_data, - pretty = pretty name_data, - exit = the_default I before_exit - #> Local_Theory.target_of #> Sign.change_end_local} - end; - -val init = gen_init NONE - -val theory_init = init ""; - -fun theory_like_init before_exit = gen_init (SOME before_exit) ""; - - -(* toplevel interaction *) - -fun begin ("-", _) thy = theory_init thy - | begin target thy = init (Locale.check thy target) thy; - -val exit = Local_Theory.assert_bottom true #> Local_Theory.exit_global; - -fun switch NONE (Context.Theory thy) = - (Context.Theory o exit, theory_init thy) - | switch (SOME name) (Context.Theory thy) = - (Context.Theory o exit, begin name thy) - | switch NONE (Context.Proof lthy) = - (Context.Proof o Local_Theory.restore, lthy) - | switch (SOME name) (Context.Proof lthy) = - (Context.Proof o init (target_of lthy) o exit, - (begin name o exit o Local_Theory.assert_nonbrittle) lthy); - -end; diff --git a/core/Pure/Isar/object_logic.ML b/core/Pure/Isar/object_logic.ML deleted file mode 100644 index d2a6e1d3..00000000 --- a/core/Pure/Isar/object_logic.ML +++ /dev/null @@ -1,212 +0,0 @@ -(* Title: Pure/Isar/object_logic.ML - Author: Markus Wenzel, TU Muenchen - -Specifics about common object-logics. -*) - -signature OBJECT_LOGIC = -sig - val get_base_sort: theory -> sort option - val add_base_sort: sort -> theory -> theory - val add_judgment: binding * typ * mixfix -> theory -> theory - val add_judgment_cmd: binding * string * mixfix -> theory -> theory - val judgment_name: theory -> string - val is_judgment: theory -> term -> bool - val drop_judgment: theory -> term -> term - val fixed_judgment: theory -> string -> term - val ensure_propT: theory -> term -> term - val dest_judgment: cterm -> cterm - val judgment_conv: conv -> conv - val elim_concl: thm -> term option - val declare_atomize: attribute - val declare_rulify: attribute - val atomize_term: theory -> term -> term - val atomize: Proof.context -> conv - val atomize_prems: Proof.context -> conv - val atomize_prems_tac: Proof.context -> int -> tactic - val full_atomize_tac: Proof.context -> int -> tactic - val rulify_term: theory -> term -> term - val rulify_tac: Proof.context -> int -> tactic - val rulify: Proof.context -> thm -> thm - val rulify_no_asm: Proof.context -> thm -> thm - val rule_format: attribute - val rule_format_no_asm: attribute -end; - -structure Object_Logic: OBJECT_LOGIC = -struct - -(** theory data **) - -datatype data = Data of - {base_sort: sort option, - judgment: string option, - atomize_rulify: thm list * thm list}; - -fun make_data (base_sort, judgment, atomize_rulify) = - Data {base_sort = base_sort, judgment = judgment, atomize_rulify = atomize_rulify}; - -structure Data = Theory_Data -( - type T = data; - val empty = make_data (NONE, NONE, ([], [])); - val extend = I; - - fun merge_opt eq (SOME x, SOME y) = - if eq (x, y) then SOME x else error "Attempt to merge different object-logics" - | merge_opt _ data = merge_options data; - - fun merge - (Data {base_sort = base_sort1, judgment = judgment1, atomize_rulify = (atomize1, rulify1)}, - Data {base_sort = base_sort2, judgment = judgment2, atomize_rulify = (atomize2, rulify2)}) = - make_data (merge_opt (op =) (base_sort1, base_sort2), merge_opt (op =) (judgment1, judgment2), - (Thm.merge_thms (atomize1, atomize2), Thm.merge_thms (rulify1, rulify2))); -); - -fun map_data f = Data.map (fn (Data {base_sort, judgment, atomize_rulify}) => - make_data (f (base_sort, judgment, atomize_rulify))); - -fun get_data thy = Data.get thy |> (fn Data args => args); - - - -(** generic treatment of judgments -- with a single argument only **) - -(* base_sort *) - -val get_base_sort = #base_sort o get_data; - -fun add_base_sort S = map_data (fn (base_sort, judgment, atomize_rulify) => - if is_some base_sort then error "Attempt to redeclare object-logic base sort" - else (SOME S, judgment, atomize_rulify)); - - -(* add judgment *) - -local - -fun gen_add_judgment add_consts (b, T, mx) thy = - let val c = Sign.full_name thy b in - thy - |> add_consts [(b, T, mx)] - |> (fn thy' => Theory.add_deps_global c (c, Sign.the_const_type thy' c) [] thy') - |> map_data (fn (base_sort, judgment, atomize_rulify) => - if is_some judgment then error "Attempt to redeclare object-logic judgment" - else (base_sort, SOME c, atomize_rulify)) - end; - -in - -val add_judgment = gen_add_judgment Sign.add_consts; -val add_judgment_cmd = gen_add_judgment Sign.add_consts_cmd; - -end; - - -(* judgments *) - -fun judgment_name thy = - (case #judgment (get_data thy) of - SOME name => name - | _ => raise TERM ("Unknown object-logic judgment", [])); - -fun is_judgment thy (Const (c, _) $ _) = c = judgment_name thy - | is_judgment _ _ = false; - -fun drop_judgment thy (Abs (x, T, t)) = Abs (x, T, drop_judgment thy t) - | drop_judgment thy (tm as (Const (c, _) $ t)) = - if (c = judgment_name thy handle TERM _ => false) then t else tm - | drop_judgment _ tm = tm; - -fun fixed_judgment thy x = - let (*be robust wrt. low-level errors*) - val c = judgment_name thy; - val aT = TFree (Name.aT, []); - val T = - the_default (aT --> propT) (Sign.const_type thy c) - |> Term.map_type_tvar (fn ((x, _), S) => TFree (x, S)); - val U = Term.domain_type T handle Match => aT; - in Const (c, T) $ Free (x, U) end; - -fun ensure_propT thy t = - let val T = Term.fastype_of t - in if T = propT then t else Const (judgment_name thy, T --> propT) $ t end; - -fun dest_judgment ct = - if is_judgment (Thm.theory_of_cterm ct) (Thm.term_of ct) - then Thm.dest_arg ct - else raise CTERM ("dest_judgment", [ct]); - -fun judgment_conv cv ct = - if is_judgment (Thm.theory_of_cterm ct) (Thm.term_of ct) - then Conv.arg_conv cv ct - else raise CTERM ("judgment_conv", [ct]); - - -(* elimination rules *) - -fun elim_concl rule = - let - val thy = Thm.theory_of_thm rule; - val concl = Thm.concl_of rule; - val C = drop_judgment thy concl; - in - if Term.is_Var C andalso - exists (fn prem => concl aconv Logic.strip_assums_concl prem) (Thm.prems_of rule) - then SOME C else NONE - end; - - - -(** treatment of meta-level connectives **) - -(* maintain rules *) - -val get_atomize = #1 o #atomize_rulify o get_data; -val get_rulify = #2 o #atomize_rulify o get_data; - -fun add_atomize th = map_data (fn (base_sort, judgment, (atomize, rulify)) => - (base_sort, judgment, (Thm.add_thm th atomize, rulify))); - -fun add_rulify th = map_data (fn (base_sort, judgment, (atomize, rulify)) => - (base_sort, judgment, (atomize, Thm.add_thm th rulify))); - -val declare_atomize = Thm.declaration_attribute (fn th => Context.mapping (add_atomize th) I); -val declare_rulify = Thm.declaration_attribute (fn th => Context.mapping (add_rulify th) I); - -val _ = Theory.setup (fold add_rulify Drule.norm_hhf_eqs); - - -(* atomize *) - -fun atomize_term thy = - drop_judgment thy o Raw_Simplifier.rewrite_term thy (get_atomize thy) []; - -fun atomize ctxt = - Raw_Simplifier.rewrite ctxt true (get_atomize (Proof_Context.theory_of ctxt)); - -fun atomize_prems ctxt ct = - if Logic.has_meta_prems (Thm.term_of ct) then - Conv.params_conv ~1 (Conv.prems_conv ~1 o atomize) ctxt ct - else Conv.all_conv ct; - -val atomize_prems_tac = CONVERSION o atomize_prems; -val full_atomize_tac = CONVERSION o atomize; - - -(* rulify *) - -fun rulify_term thy = Raw_Simplifier.rewrite_term thy (get_rulify thy) []; -fun rulify_tac ctxt = rewrite_goal_tac ctxt (get_rulify (Proof_Context.theory_of ctxt)); - -fun gen_rulify full ctxt = - Conv.fconv_rule (Raw_Simplifier.rewrite ctxt full (get_rulify (Proof_Context.theory_of ctxt))) - #> Drule.gen_all #> Thm.strip_shyps #> Drule.zero_var_indexes; - -val rulify = gen_rulify true; -val rulify_no_asm = gen_rulify false; - -val rule_format = Thm.rule_attribute (rulify o Context.proof_of); -val rule_format_no_asm = Thm.rule_attribute (rulify_no_asm o Context.proof_of); - -end; diff --git a/core/Pure/Isar/obtain.ML b/core/Pure/Isar/obtain.ML deleted file mode 100644 index cb7ffdc5..00000000 --- a/core/Pure/Isar/obtain.ML +++ /dev/null @@ -1,329 +0,0 @@ -(* Title: Pure/Isar/obtain.ML - Author: Markus Wenzel, TU Muenchen - -The 'obtain' and 'guess' language elements -- generalized existence at -the level of proof texts: 'obtain' involves a proof that certain -fixes/assumes may be introduced into the present context; 'guess' is -similar, but derives these elements from the course of reasoning! - - - obtain x where "A x" == - - have "!!thesis. (!!x. A x ==> thesis) ==> thesis" - proof succeed - fix thesis - assume that [intro?]: "!!x. A x ==> thesis" - - show thesis - apply (insert that) - - qed - fix x assm <> "A x" - - - - guess x == - - { - fix thesis - have "PROP ?guess" - apply magic -- {* turns goal into "thesis ==> #thesis" *} - - apply_end magic -- {* turns final "(!!x. P x ==> thesis) ==> #thesis" into - "#((!!x. A x ==> thesis) ==> thesis)" which is a finished goal state *} - - } - fix x assm <> "A x" -*) - -signature OBTAIN = -sig - val thatN: string - val obtain: string -> (binding * typ option * mixfix) list -> - (Thm.binding * (term * term list) list) list -> bool -> Proof.state -> Proof.state - val obtain_cmd: string -> (binding * string option * mixfix) list -> - (Attrib.binding * (string * string list) list) list -> bool -> Proof.state -> Proof.state - val result: (Proof.context -> tactic) -> thm list -> Proof.context -> - ((string * cterm) list * thm list) * Proof.context - val guess: (binding * typ option * mixfix) list -> bool -> Proof.state -> Proof.state - val guess_cmd: (binding * string option * mixfix) list -> bool -> Proof.state -> Proof.state -end; - -structure Obtain: OBTAIN = -struct - -(** obtain_export **) - -(* - [x, A x] - : - B - -------- - B -*) -fun eliminate_term ctxt xs tm = - let - val vs = map (dest_Free o Thm.term_of) xs; - val bads = Term.fold_aterms (fn t as Free v => - if member (op =) vs v then insert (op aconv) t else I | _ => I) tm []; - val _ = null bads orelse - error ("Result contains obtained parameters: " ^ - space_implode " " (map (Syntax.string_of_term ctxt) bads)); - in tm end; - -fun eliminate fix_ctxt rule xs As thm = - let - val thy = Proof_Context.theory_of fix_ctxt; - - val _ = eliminate_term fix_ctxt xs (Thm.full_prop_of thm); - val _ = Object_Logic.is_judgment thy (Thm.concl_of thm) orelse - error "Conclusion in obtained context must be object-logic judgment"; - - val ((_, [thm']), ctxt') = Variable.import true [thm] fix_ctxt; - val prems = Drule.strip_imp_prems (#prop (Thm.crep_thm thm')); - in - ((Drule.implies_elim_list thm' (map Thm.assume prems) - |> Drule.implies_intr_list (map Drule.norm_hhf_cterm As) - |> Drule.forall_intr_list xs) - COMP rule) - |> Drule.implies_intr_list prems - |> singleton (Variable.export ctxt' fix_ctxt) - end; - -fun obtain_export ctxt rule xs _ As = - (eliminate ctxt rule xs As, eliminate_term ctxt xs); - - - -(** obtain **) - -fun bind_judgment ctxt name = - let - val thy = Proof_Context.theory_of ctxt; - val ([x], ctxt') = Proof_Context.add_fixes [(Binding.name name, NONE, NoSyn)] ctxt; - val (t as _ $ Free v) = Object_Logic.fixed_judgment thy x; - in ((v, t), ctxt') end; - -val thatN = "that"; - -local - -fun gen_obtain prep_att prep_vars prep_propp - name raw_vars raw_asms int state = - let - val _ = Proof.assert_forward_or_chain state; - val thy = Proof.theory_of state; - val cert = Thm.cterm_of thy; - val ctxt = Proof.context_of state; - val chain_facts = if can Proof.assert_chain state then Proof.the_facts state else []; - - (*obtain vars*) - val (vars, vars_ctxt) = prep_vars raw_vars ctxt; - val (xs', fix_ctxt) = vars_ctxt |> Proof_Context.add_fixes vars; - val xs = map (Variable.check_name o #1) vars; - - (*obtain asms*) - val (proppss, asms_ctxt) = prep_propp (map snd raw_asms) fix_ctxt; - val ((_, bind_ctxt), _) = Proof_Context.bind_propp_i proppss asms_ctxt; - val asm_props = maps (map fst) proppss; - val asms = map fst (Attrib.map_specs (map (prep_att ctxt)) raw_asms) ~~ proppss; - - (*obtain parms*) - val (Ts, parms_ctxt) = fold_map Proof_Context.inferred_param xs' asms_ctxt; - val parms = map Free (xs' ~~ Ts); - val _ = Variable.warn_extra_tfrees fix_ctxt parms_ctxt; - - (*obtain statements*) - val thesisN = singleton (Name.variant_list xs) Auto_Bind.thesisN; - val (thesis_var, thesis) = #1 (bind_judgment fix_ctxt thesisN); - - val that_name = if name = "" then thatN else name; - val that_prop = - Logic.list_rename_params xs - (fold_rev Logic.all parms (Logic.list_implies (asm_props, thesis))); - val obtain_prop = - Logic.list_rename_params [Auto_Bind.thesisN] - (Logic.all (Free thesis_var) (Logic.mk_implies (that_prop, thesis))); - - fun after_qed _ = - Proof.local_qed (NONE, false) - #> `Proof.the_fact #-> (fn rule => - Proof.fix vars - #> Proof.assm (obtain_export fix_ctxt rule (map cert parms)) asms); - in - state - |> Proof.enter_forward - |> Proof.have NONE (K I) [(Thm.empty_binding, [(obtain_prop, [])])] int - |> Proof.map_context bind_ctxt - |> Proof.proof (SOME Method.succeed_text) |> Seq.hd - |> Proof.fix [(Binding.name thesisN, NONE, NoSyn)] - |> Proof.assume - [((Binding.name that_name, [Context_Rules.intro_query NONE]), [(that_prop, [])])] - |> `Proof.the_facts - ||> Proof.chain_facts chain_facts - ||> Proof.show NONE after_qed [(Thm.empty_binding, [(thesis, [])])] false - |-> Proof.refine_insert - end; - -in - -val obtain = gen_obtain (K I) Proof_Context.cert_vars Proof_Context.cert_propp; -val obtain_cmd = gen_obtain Attrib.attribute_cmd Proof_Context.read_vars Proof_Context.read_propp; - -end; - - - -(** tactical result **) - -fun check_result ctxt thesis th = - (case Thm.prems_of th of - [prem] => - if Thm.concl_of th aconv thesis andalso - Logic.strip_assums_concl prem aconv thesis then th - else error ("Guessed a different clause:\n" ^ Display.string_of_thm ctxt th) - | [] => error "Goal solved -- nothing guessed" - | _ => error ("Guess split into several cases:\n" ^ Display.string_of_thm ctxt th)); - -fun result tac facts ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val cert = Thm.cterm_of thy; - - val ((thesis_var, thesis), thesis_ctxt) = bind_judgment ctxt Auto_Bind.thesisN; - val rule = - (case SINGLE (Method.insert_tac facts 1 THEN tac thesis_ctxt) (Goal.init (cert thesis)) of - NONE => raise THM ("Obtain.result: tactic failed", 0, facts) - | SOME th => - check_result thesis_ctxt thesis (Raw_Simplifier.norm_hhf thesis_ctxt (Goal.conclude th))); - - val closed_rule = Thm.forall_intr (cert (Free thesis_var)) rule; - val ((_, [rule']), ctxt') = Variable.import false [closed_rule] ctxt; - val obtain_rule = Thm.forall_elim (cert (Logic.varify_global (Free thesis_var))) rule'; - val ((params, stmt), fix_ctxt) = Variable.focus_cterm (Thm.cprem_of obtain_rule 1) ctxt'; - val (prems, ctxt'') = - Assumption.add_assms (obtain_export fix_ctxt obtain_rule (map #2 params)) - (Drule.strip_imp_prems stmt) fix_ctxt; - in ((params, prems), ctxt'') end; - - - -(** guess **) - -local - -fun unify_params vars thesis_var raw_rule ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val certT = Thm.ctyp_of thy; - val cert = Thm.cterm_of thy; - val string_of_term = Syntax.string_of_term (Config.put show_types true ctxt); - - fun err msg th = error (msg ^ ":\n" ^ Display.string_of_thm ctxt th); - - val maxidx = fold (Term.maxidx_typ o snd o fst) vars ~1; - val rule = Thm.incr_indexes (maxidx + 1) raw_rule; - - val params = Rule_Cases.strip_params (Logic.nth_prem (1, Thm.prop_of rule)); - val m = length vars; - val n = length params; - val _ = m <= n orelse err "More variables than parameters in obtained rule" rule; - - fun unify ((x, T), (y, U)) (tyenv, max) = Sign.typ_unify thy (T, U) (tyenv, max) - handle Type.TUNIFY => - err ("Failed to unify variable " ^ - string_of_term (Free (x, Envir.norm_type tyenv T)) ^ " against parameter " ^ - string_of_term (Syntax_Trans.mark_bound_abs (y, Envir.norm_type tyenv U)) ^ " in") rule; - val (tyenv, _) = fold unify (map #1 vars ~~ take m params) - (Vartab.empty, Int.max (maxidx, Thm.maxidx_of rule)); - val norm_type = Envir.norm_type tyenv; - - val xs = map (apsnd norm_type o fst) vars; - val ys = map (apsnd norm_type) (drop m params); - val ys' = map Name.internal (Name.variant_list (map fst xs) (map fst ys)) ~~ map #2 ys; - val terms = map (Drule.mk_term o cert o Free) (xs @ ys'); - - val instT = - fold (Term.add_tvarsT o #2) params [] - |> map (TVar #> (fn T => (certT T, certT (norm_type T)))); - val closed_rule = rule - |> Thm.forall_intr (cert (Free thesis_var)) - |> Thm.instantiate (instT, []); - - val ((_, rule' :: terms'), ctxt') = Variable.import false (closed_rule :: terms) ctxt; - val vars' = - map (dest_Free o Thm.term_of o Drule.dest_term) terms' ~~ - (map snd vars @ replicate (length ys) NoSyn); - val rule'' = Thm.forall_elim (cert (Logic.varify_global (Free thesis_var))) rule'; - in ((vars', rule''), ctxt') end; - -fun inferred_type (binding, _, mx) ctxt = - let - val x = Variable.check_name binding; - val (T, ctxt') = Proof_Context.inferred_param x ctxt - in ((x, T, mx), ctxt') end; - -fun polymorphic ctxt vars = - let val Ts = map Logic.dest_type (Variable.polymorphic ctxt (map (Logic.mk_type o #2) vars)) - in map2 (fn (x, _, mx) => fn T => ((x, T), mx)) vars Ts end; - -fun gen_guess prep_vars raw_vars int state = - let - val _ = Proof.assert_forward_or_chain state; - val thy = Proof.theory_of state; - val cert = Thm.cterm_of thy; - val ctxt = Proof.context_of state; - val chain_facts = if can Proof.assert_chain state then Proof.the_facts state else []; - - val (thesis_var, thesis) = #1 (bind_judgment ctxt Auto_Bind.thesisN); - val vars = ctxt |> prep_vars raw_vars |-> fold_map inferred_type |> fst |> polymorphic ctxt; - - fun guess_context raw_rule state' = - let - val ((parms, rule), ctxt') = - unify_params vars thesis_var raw_rule (Proof.context_of state'); - val (xs, _) = Variable.add_fixes (map (#1 o #1) parms) ctxt'; - val ps = xs ~~ map (#2 o #1) parms; - val ts = map Free ps; - val asms = - Logic.strip_assums_hyp (Logic.nth_prem (1, Thm.prop_of rule)) - |> map (fn asm => (Term.betapplys (fold_rev Term.abs ps asm, ts), [])); - val _ = not (null asms) orelse error "Trivial result -- nothing guessed"; - in - state' - |> Proof.map_context (K ctxt') - |> Proof.fix (map (fn ((x, T), mx) => (Binding.name x, SOME T, mx)) parms) - |> `Proof.context_of |-> (fn fix_ctxt => Proof.assm - (obtain_export fix_ctxt rule (map cert ts)) [(Thm.empty_binding, asms)]) - |> Proof.bind_terms Auto_Bind.no_facts - end; - - val goal = Var (("guess", 0), propT); - val pos = Position.thread_data (); - fun print_result ctxt' (k, [(s, [_, th])]) = - Proof_Display.print_results int pos ctxt' (k, [(s, [th])]); - val before_qed = - Method.primitive_text (fn ctxt => - Goal.conclude #> Raw_Simplifier.norm_hhf ctxt #> - (fn th => Goal.protect 0 (Conjunction.intr (Drule.mk_term (Thm.cprop_of th)) th))); - fun after_qed [[_, res]] = - Proof.end_block #> guess_context (check_result ctxt thesis res); - in - state - |> Proof.enter_forward - |> Proof.begin_block - |> Proof.fix [(Binding.name Auto_Bind.thesisN, NONE, NoSyn)] - |> Proof.chain_facts chain_facts - |> Proof.local_goal print_result (K I) (pair o rpair I) - "guess" (SOME before_qed) after_qed [(Thm.empty_binding, [Logic.mk_term goal, goal])] - |> Proof.refine (Method.primitive_text (fn _ => fn _ => Goal.init (cert thesis))) |> Seq.hd - end; - -in - -val guess = gen_guess Proof_Context.cert_vars; -val guess_cmd = gen_guess Proof_Context.read_vars; - -end; - -end; diff --git a/core/Pure/Isar/outer_syntax.ML b/core/Pure/Isar/outer_syntax.ML deleted file mode 100644 index 308ffa0d..00000000 --- a/core/Pure/Isar/outer_syntax.ML +++ /dev/null @@ -1,318 +0,0 @@ -(* Title: Pure/Isar/outer_syntax.ML - Author: Markus Wenzel, TU Muenchen - -The global Isabelle/Isar outer syntax. - -Note: the syntax for files is statically determined at the very -beginning; for interactive processing it may change dynamically. -*) - -signature OUTER_SYNTAX = -sig - type outer_syntax - val batch_mode: bool Unsynchronized.ref - val is_markup: outer_syntax -> Thy_Output.markup -> string -> bool - val get_syntax: unit -> (Scan.lexicon * Scan.lexicon) * outer_syntax - val check_syntax: unit -> unit - type command_spec = (string * Keyword.T) * Position.T - val command: command_spec -> string -> - (Toplevel.transition -> Toplevel.transition) parser -> unit - val markup_command: Thy_Output.markup -> command_spec -> string -> - (Toplevel.transition -> Toplevel.transition) parser -> unit - val improper_command: command_spec -> string -> - (Toplevel.transition -> Toplevel.transition) parser -> unit - val local_theory': command_spec -> string -> - (bool -> local_theory -> local_theory) parser -> unit - val local_theory: command_spec -> string -> - (local_theory -> local_theory) parser -> unit - val local_theory_to_proof': command_spec -> string -> - (bool -> local_theory -> Proof.state) parser -> unit - val local_theory_to_proof: command_spec -> string -> - (local_theory -> Proof.state) parser -> unit - val help_outer_syntax: string list -> unit - val print_outer_syntax: unit -> unit - val scan: Position.T -> string -> Token.T list - val parse: Position.T -> string -> Toplevel.transition list - type isar - val isar: TextIO.instream -> bool -> isar - val side_comments: Token.T list -> Token.T list - val command_reports: outer_syntax -> Token.T -> Position.report_text list - val read_spans: outer_syntax -> Token.T list -> Toplevel.transition list -end; - -structure Outer_Syntax: OUTER_SYNTAX = -struct - -(** outer syntax **) - -(* command parsers *) - -datatype command = Command of - {comment: string, - markup: Thy_Output.markup option, - int_only: bool, - parse: (Toplevel.transition -> Toplevel.transition) parser, - pos: Position.T, - id: serial}; - -fun new_command comment markup int_only parse pos = - Command {comment = comment, markup = markup, int_only = int_only, parse = parse, - pos = pos, id = serial ()}; - -fun command_markup def (name, Command {pos, id, ...}) = - Markup.properties (Position.entity_properties_of def id pos) - (Markup.entity Markup.commandN name); - -fun pretty_command (cmd as (name, Command {comment, ...})) = - Pretty.block - (Pretty.marks_str - ([Active.make_markup Markup.sendbackN {implicit = true, properties = [Markup.padding_line]}, - command_markup false cmd], name) :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment); - - -(* parse command *) - -local - -fun terminate false = Scan.succeed () - | terminate true = - Parse.group (fn () => "end of input") - (Scan.option Parse.sync -- Parse.semicolon >> K ()); - -fun body cmd (name, _) = - (case cmd name of - SOME (Command {int_only, parse, ...}) => - Parse.!!! (Scan.prompt (name ^ "# ") (Parse.tags |-- parse >> pair int_only)) - | NONE => - Scan.succeed (false, Toplevel.imperative (fn () => - error ("Bad parser for outer syntax command " ^ quote name)))); - -in - -fun parse_command do_terminate cmd = - Parse.semicolon >> K NONE || - Parse.sync >> K NONE || - (Parse.position Parse.command :-- body cmd) --| terminate do_terminate - >> (fn ((name, pos), (int_only, f)) => - SOME (Toplevel.empty |> Toplevel.name name |> Toplevel.position pos |> - Toplevel.interactive int_only |> f)); - -end; - - -(* type outer_syntax *) - -datatype outer_syntax = Outer_Syntax of - {commands: command Symtab.table, - markups: (string * Thy_Output.markup) list}; - -fun make_outer_syntax commands markups = - Outer_Syntax {commands = commands, markups = markups}; - -val empty_outer_syntax = make_outer_syntax Symtab.empty []; - - -fun map_commands f (Outer_Syntax {commands, ...}) = - let - val commands' = f commands; - val markups' = - Symtab.fold (fn (name, Command {markup = SOME m, ...}) => cons (name, m) | _ => I) - commands' []; - in make_outer_syntax commands' markups' end; - -fun dest_commands (Outer_Syntax {commands, ...}) = - commands |> Symtab.dest |> sort_wrt #1; - -fun lookup_commands (Outer_Syntax {commands, ...}) = Symtab.lookup commands; - -fun is_markup (Outer_Syntax {markups, ...}) kind name = - AList.lookup (op =) markups name = SOME kind; - - - -(** global outer syntax **) - -type command_spec = (string * Keyword.T) * Position.T; - -val batch_mode = Unsynchronized.ref false; - -local - -(*synchronized wrt. Keywords*) -val global_outer_syntax = Unsynchronized.ref empty_outer_syntax; - -fun add_command (name, kind) cmd = CRITICAL (fn () => - let - val context = ML_Context.the_generic_context (); - val thy = Context.theory_of context; - val Command {pos, ...} = cmd; - val command_name = quote (Markup.markup Markup.keyword1 name); - val _ = - (case try (Thy_Header.the_keyword thy) name of - SOME spec => - if Option.map #1 spec = SOME (Keyword.kind_files_of kind) then () - else error ("Inconsistent outer syntax keyword declaration " ^ - command_name ^ Position.here pos) - | NONE => - if Context.theory_name thy = Context.PureN - then Keyword.define (name, SOME kind) - else error ("Undeclared outer syntax command " ^ command_name ^ Position.here pos)); - val _ = Context_Position.report_generic context pos (command_markup true (name, cmd)); - in - Unsynchronized.change global_outer_syntax (map_commands (fn commands => - (if not (Symtab.defined commands name) then () - else if ! batch_mode then - error ("Attempt to redefine outer syntax command " ^ command_name) - else - warning ("Redefining outer syntax command " ^ command_name ^ - Position.here (Position.thread_data ())); - Symtab.update (name, cmd) commands))) - end); - -in - -fun get_syntax () = CRITICAL (fn () => (Keyword.get_lexicons (), ! global_outer_syntax)); - -fun check_syntax () = - let - val ((_, major), syntax) = CRITICAL (fn () => (Keyword.dest (), ! global_outer_syntax)); - in - (case subtract (op =) (map #1 (dest_commands syntax)) major of - [] => () - | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing)) - end; - -fun lookup_commands_dynamic () = lookup_commands (! global_outer_syntax); - -fun command (spec, pos) comment parse = - add_command spec (new_command comment NONE false parse pos); - -fun markup_command markup (spec, pos) comment parse = - add_command spec (new_command comment (SOME markup) false parse pos); - -fun improper_command (spec, pos) comment parse = - add_command spec (new_command comment NONE true parse pos); - -end; - - -(* local_theory commands *) - -fun local_theory_command trans command_spec comment parse = - command command_spec comment (Parse.opt_target -- parse >> (fn (loc, f) => trans loc f)); - -val local_theory' = local_theory_command Toplevel.local_theory'; -val local_theory = local_theory_command Toplevel.local_theory; -val local_theory_to_proof' = local_theory_command Toplevel.local_theory_to_proof'; -val local_theory_to_proof = local_theory_command Toplevel.local_theory_to_proof; - - -(* inspect syntax *) - -fun help_outer_syntax pats = - dest_commands (#2 (get_syntax ())) - |> filter (fn (name, _) => forall (fn pat => match_string pat name) pats) - |> map pretty_command - |> Pretty.writeln_chunks; - -fun print_outer_syntax () = - let - val ((keywords, _), outer_syntax) = - CRITICAL (fn () => (Keyword.dest (), #2 (get_syntax ()))); - val (int_cmds, cmds) = - List.partition (fn (_, Command {int_only, ...}) => int_only) (dest_commands outer_syntax); - in - [Pretty.strs ("syntax keywords:" :: map quote keywords), - Pretty.big_list "commands:" (map pretty_command cmds), - Pretty.big_list "interactive-only commands:" (map pretty_command int_cmds)] - |> Pretty.writeln_chunks - end; - - - -(** toplevel parsing **) - -(* basic sources *) - -fun toplevel_source term do_recover cmd src = - let - val no_terminator = - Scan.unless Parse.semicolon (Scan.one (Token.not_sync andf Token.not_eof)); - fun recover int = - (int, fn _ => Scan.prompt "recover# " (Scan.repeat no_terminator) >> K [NONE]); - in - src - |> Token.source_proper - |> Source.source Token.stopper - (Scan.bulk (Parse.$$$ "--" -- Parse.!!! Parse.document_source >> K NONE || Parse.not_eof >> SOME)) - (Option.map recover do_recover) - |> Source.map_filter I - |> Source.source Token.stopper - (Scan.bulk (fn xs => Parse.!!! (parse_command term (cmd ())) xs)) - (Option.map recover do_recover) - |> Source.map_filter I - end; - - -(* off-line scanning/parsing *) - -fun scan pos str = - Source.of_string str - |> Symbol.source - |> Token.source {do_recover = SOME false} Keyword.get_lexicons pos - |> Source.exhaust; - -fun parse pos str = - Source.of_string str - |> Symbol.source - |> Token.source {do_recover = SOME false} Keyword.get_lexicons pos - |> toplevel_source false NONE lookup_commands_dynamic - |> Source.exhaust; - - -(* interactive source of toplevel transformers *) - -type isar = - (Toplevel.transition, (Toplevel.transition option, - (Token.T, (Token.T option, (Token.T, (Token.T, - (Symbol_Pos.T, - Position.T * (Symbol.symbol, (Symbol.symbol, (string, unit) Source.source) Source.source) - Source.source) Source.source) Source.source) Source.source) - Source.source) Source.source) Source.source) Source.source; - -fun isar in_stream term : isar = - Source.tty in_stream - |> Symbol.source - |> Source.map_filter (fn "\<^newline>" => SOME "\n" | s => SOME s) (*Proof General legacy*) - |> Token.source {do_recover = SOME true} Keyword.get_lexicons Position.none - |> toplevel_source term (SOME true) lookup_commands_dynamic; - - -(* side-comments *) - -fun cmts (t1 :: t2 :: toks) = - if Token.keyword_with (fn s => s = "--") t1 then t2 :: cmts toks - else cmts (t2 :: toks) - | cmts _ = []; - -val side_comments = filter Token.is_proper #> cmts; - - -(* read commands *) - -fun command_reports outer_syntax tok = - if Token.is_command tok then - let val name = Token.content_of tok in - (case lookup_commands outer_syntax name of - NONE => [] - | SOME cmd => [((Token.pos_of tok, command_markup false (name, cmd)), "")]) - end - else []; - -fun read_spans outer_syntax toks = - Source.of_list toks - |> toplevel_source false NONE (K (lookup_commands outer_syntax)) - |> Source.exhaust; - -end; - diff --git a/core/Pure/Isar/outer_syntax.scala b/core/Pure/Isar/outer_syntax.scala deleted file mode 100644 index 7794856b..00000000 --- a/core/Pure/Isar/outer_syntax.scala +++ /dev/null @@ -1,168 +0,0 @@ -/* Title: Pure/Isar/outer_syntax.scala - Author: Makarius - -Isabelle/Isar outer syntax. -*/ - -package isabelle - - -import scala.util.parsing.input.{Reader, CharSequenceReader} -import scala.collection.mutable - - -object Outer_Syntax -{ - def quote_string(str: String): String = - { - val result = new StringBuilder(str.length + 10) - result += '"' - for (s <- Symbol.iterator(str)) { - if (s.length == 1) { - val c = s(0) - if (c < 32 && c != YXML.X && c != YXML.Y || c == '\\' || c == '"') { - result += '\\' - if (c < 10) result += '0' - if (c < 100) result += '0' - result ++= (c.asInstanceOf[Int].toString) - } - else result += c - } - else result ++= s - } - result += '"' - result.toString - } - - val empty: Outer_Syntax = new Outer_Syntax() - - def init(): Outer_Syntax = new Outer_Syntax(completion = Completion.init()) -} - -final class Outer_Syntax private( - keywords: Map[String, (String, List[String])] = Map.empty, - lexicon: Scan.Lexicon = Scan.Lexicon.empty, - val completion: Completion = Completion.empty, - val language_context: Completion.Language_Context = Completion.Language_Context.outer, - val has_tokens: Boolean = true) extends Prover.Syntax -{ - override def toString: String = - (for ((name, (kind, files)) <- keywords) yield { - if (kind == Keyword.MINOR) quote(name) - else - quote(name) + " :: " + quote(kind) + - (if (files.isEmpty) "" else " (" + commas_quote(files) + ")") - }).toList.sorted.mkString("keywords\n ", " and\n ", "") - - def keyword_kind_files(name: String): Option[(String, List[String])] = keywords.get(name) - def keyword_kind(name: String): Option[String] = keyword_kind_files(name).map(_._1) - - def load(span: List[Token]): Option[List[String]] = - keywords.get(Command.name(span)) match { - case Some((Keyword.THY_LOAD, exts)) => Some(exts) - case _ => None - } - - val load_commands: List[(String, List[String])] = - (for ((name, (Keyword.THY_LOAD, files)) <- keywords.iterator) yield (name, files)).toList - - def load_commands_in(text: String): Boolean = - load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) }) - - def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax = - { - val keywords1 = keywords + (name -> kind) - val lexicon1 = lexicon + name - val completion1 = - if (Keyword.control(kind._1) || replace == Some("")) completion - else completion + (name, replace getOrElse name) - new Outer_Syntax(keywords1, lexicon1, completion1, language_context, true) - } - - def + (name: String, kind: (String, List[String])): Outer_Syntax = - this + (name, kind, Some(name)) - def + (name: String, kind: String): Outer_Syntax = - this + (name, (kind, Nil), Some(name)) - def + (name: String, replace: Option[String]): Outer_Syntax = - this + (name, (Keyword.MINOR, Nil), replace) - def + (name: String): Outer_Syntax = this + (name, None) - - def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax = - (this /: keywords) { - case (syntax, (name, Some((kind, _)), replace)) => - syntax + - (Symbol.decode(name), kind, replace) + - (Symbol.encode(name), kind, replace) - case (syntax, (name, None, replace)) => - syntax + - (Symbol.decode(name), replace) + - (Symbol.encode(name), replace) - } - - def is_command(name: String): Boolean = - keyword_kind(name) match { - case Some(kind) => kind != Keyword.MINOR - case None => false - } - - def heading_level(name: String): Option[Int] = - { - keyword_kind(name) match { - case _ if name == "header" => Some(0) - case Some(Keyword.THY_HEADING1) => Some(1) - case Some(Keyword.THY_HEADING2) | Some(Keyword.PRF_HEADING2) => Some(2) - case Some(Keyword.THY_HEADING3) | Some(Keyword.PRF_HEADING3) => Some(3) - case Some(Keyword.THY_HEADING4) | Some(Keyword.PRF_HEADING4) => Some(4) - case Some(kind) if Keyword.theory(kind) => Some(5) - case _ => None - } - } - - def heading_level(command: Command): Option[Int] = - heading_level(command.name) - - - /* token language */ - - def scan(input: Reader[Char]): List[Token] = - { - Token.Parsers.parseAll( - Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), input) match { - case Token.Parsers.Success(tokens, _) => tokens - case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) - } - } - - def scan(input: CharSequence): List[Token] = - scan(new CharSequenceReader(input)) - - def scan_line(input: CharSequence, context: Scan.Line_Context): (List[Token], Scan.Line_Context) = - { - var in: Reader[Char] = new CharSequenceReader(input) - val toks = new mutable.ListBuffer[Token] - var ctxt = context - while (!in.atEnd) { - Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match { - case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } - case Token.Parsers.NoSuccess(_, rest) => - error("Unexpected failure of tokenizing input:\n" + rest.source.toString) - } - } - (toks.toList, ctxt) - } - - - /* language context */ - - def set_language_context(context: Completion.Language_Context): Outer_Syntax = - new Outer_Syntax(keywords, lexicon, completion, context, has_tokens) - - def no_tokens: Outer_Syntax = - { - require(keywords.isEmpty && lexicon.isEmpty) - new Outer_Syntax( - completion = completion, - language_context = language_context, - has_tokens = false) - } -} diff --git a/core/Pure/Isar/overloading.ML b/core/Pure/Isar/overloading.ML deleted file mode 100644 index 75864919..00000000 --- a/core/Pure/Isar/overloading.ML +++ /dev/null @@ -1,216 +0,0 @@ -(* Title: Pure/Isar/overloading.ML - Author: Florian Haftmann, TU Muenchen - -Overloaded definitions without any discipline. -*) - -signature OVERLOADING = -sig - type improvable_syntax - val activate_improvable_syntax: Proof.context -> Proof.context - val map_improvable_syntax: (improvable_syntax -> improvable_syntax) - -> Proof.context -> Proof.context - val set_primary_constraints: Proof.context -> Proof.context - - val overloading: (string * (string * typ) * bool) list -> theory -> local_theory - val overloading_cmd: (string * string * bool) list -> theory -> local_theory -end; - -structure Overloading: OVERLOADING = -struct - -(* generic check/uncheck combinators for improvable constants *) - -type improvable_syntax = ((((string * typ) list * (string * typ) list) * - ((((string * typ -> (typ * typ) option) * (string * typ -> (typ * term) option)) * bool) * - (term * term) list)) * bool); - -structure Improvable_Syntax = Proof_Data -( - type T = { - primary_constraints: (string * typ) list, - secondary_constraints: (string * typ) list, - improve: string * typ -> (typ * typ) option, - subst: string * typ -> (typ * term) option, - consider_abbrevs: bool, - unchecks: (term * term) list, - passed: bool - }; - fun init _ = { - primary_constraints = [], - secondary_constraints = [], - improve = K NONE, - subst = K NONE, - consider_abbrevs = false, - unchecks = [], - passed = true - }; -); - -fun map_improvable_syntax f = Improvable_Syntax.map (fn {primary_constraints, - secondary_constraints, improve, subst, consider_abbrevs, unchecks, passed} => - let - val (((primary_constraints', secondary_constraints'), - (((improve', subst'), consider_abbrevs'), unchecks')), passed') - = f (((primary_constraints, secondary_constraints), - (((improve, subst), consider_abbrevs), unchecks)), passed) - in - {primary_constraints = primary_constraints', secondary_constraints = secondary_constraints', - improve = improve', subst = subst', consider_abbrevs = consider_abbrevs', - unchecks = unchecks', passed = passed'} - end); - -val mark_passed = (map_improvable_syntax o apsnd) (K true); - -fun improve_term_check ts ctxt = - let - val thy = Proof_Context.theory_of ctxt; - - val {secondary_constraints, improve, subst, consider_abbrevs, passed, ...} = - Improvable_Syntax.get ctxt; - val is_abbrev = consider_abbrevs andalso Proof_Context.abbrev_mode ctxt; - val passed_or_abbrev = passed orelse is_abbrev; - fun accumulate_improvements (Const (c, ty)) = - (case improve (c, ty) of - SOME ty_ty' => Sign.typ_match thy ty_ty' - | _ => I) - | accumulate_improvements _ = I; - val improvements = (fold o fold_aterms) accumulate_improvements ts Vartab.empty; - val ts' = (map o map_types) (Envir.subst_type improvements) ts; - fun apply_subst t = - Envir.expand_term - (fn Const (c, ty) => - (case subst (c, ty) of - SOME (ty', t') => - if Sign.typ_instance thy (ty, ty') - then SOME (ty', apply_subst t') else NONE - | NONE => NONE) - | _ => NONE) t; - val ts'' = if is_abbrev then ts' else map apply_subst ts'; - in - if eq_list (op aconv) (ts, ts'') andalso passed_or_abbrev then NONE - else if passed_or_abbrev then SOME (ts'', ctxt) - else - SOME (ts'', ctxt - |> fold (Proof_Context.add_const_constraint o apsnd SOME) secondary_constraints - |> mark_passed) - end; - -fun rewrite_liberal thy unchecks t = - (case try (Pattern.rewrite_term thy unchecks []) t of - NONE => NONE - | SOME t' => if t aconv t' then NONE else SOME t'); - -fun improve_term_uncheck ts ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val {unchecks, ...} = Improvable_Syntax.get ctxt; - val ts' = map (rewrite_liberal thy unchecks) ts; - in if exists is_some ts' then SOME (map2 the_default ts ts', ctxt) else NONE end; - -fun set_primary_constraints ctxt = - let val {primary_constraints, ...} = Improvable_Syntax.get ctxt; - in fold (Proof_Context.add_const_constraint o apsnd SOME) primary_constraints ctxt end; - -val activate_improvable_syntax = - Context.proof_map - (Syntax_Phases.term_check' 0 "improvement" improve_term_check - #> Syntax_Phases.term_uncheck' 0 "improvement" improve_term_uncheck) - #> set_primary_constraints; - - -(* overloading target *) - -structure Data = Proof_Data -( - type T = ((string * typ) * (string * bool)) list; - fun init _ = []; -); - -val get_overloading = Data.get o Local_Theory.target_of; -val map_overloading = Local_Theory.target o Data.map; - -fun operation lthy b = - get_overloading lthy - |> get_first (fn ((c, _), (v, checked)) => - if Binding.name_of b = v then SOME (c, (v, checked)) else NONE); - -fun synchronize_syntax ctxt = - let - val overloading = Data.get ctxt; - fun subst (c, ty) = - (case AList.lookup (op =) overloading (c, ty) of - SOME (v, _) => SOME (ty, Free (v, ty)) - | NONE => NONE); - val unchecks = - map (fn (c_ty as (_, ty), (v, _)) => (Free (v, ty), Const c_ty)) overloading; - in - ctxt - |> map_improvable_syntax (K ((([], []), (((K NONE, subst), false), unchecks)), false)) - end; - -fun define_overloaded (c, U) (v, checked) (b_def, rhs) = - Local_Theory.background_theory_result - (Thm.add_def_global (not checked) true - (Thm.def_binding_optional (Binding.name v) b_def, - Logic.mk_equals (Const (c, Term.fastype_of rhs), rhs))) - ##> map_overloading (filter_out (fn (_, (v', _)) => v' = v)) - ##> Local_Theory.map_contexts (K synchronize_syntax) - #-> (fn (_, def) => pair (Const (c, U), def)); - -fun foundation (((b, U), mx), (b_def, rhs)) params lthy = - (case operation lthy b of - SOME (c, (v, checked)) => - if mx <> NoSyn - then error ("Illegal mixfix syntax for overloaded constant " ^ quote c) - else lthy |> define_overloaded (c, U) (v, checked) (b_def, rhs) - | NONE => lthy |> Generic_Target.theory_foundation (((b, U), mx), (b_def, rhs)) params); - -fun pretty lthy = - let - val overloading = get_overloading lthy; - fun pr_operation ((c, ty), (v, _)) = - Pretty.block (Pretty.breaks - [Pretty.str v, Pretty.str "==", Proof_Context.pretty_const lthy c, - Pretty.str "::", Syntax.pretty_typ lthy ty]); - in Pretty.keyword1 "overloading" :: map pr_operation overloading end; - -fun conclude lthy = - let - val overloading = get_overloading lthy; - val _ = - if null overloading then () - else - error ("Missing definition(s) for parameter(s) " ^ - commas_quote (map (Syntax.string_of_term lthy o Const o fst) overloading)); - in lthy end; - -fun gen_overloading prep_const raw_overloading thy = - let - val ctxt = Proof_Context.init_global thy; - val naming = Sign.naming_of thy; - val _ = if null raw_overloading then error "At least one parameter must be given" else (); - val overloading = raw_overloading |> map (fn (v, const, checked) => - (Term.dest_Const (prep_const ctxt const), (v, checked))); - in - thy - |> Sign.change_begin - |> Proof_Context.init_global - |> Data.put overloading - |> fold (fn ((_, ty), (v, _)) => Variable.declare_names (Free (v, ty))) overloading - |> activate_improvable_syntax - |> synchronize_syntax - |> Local_Theory.init naming - {define = Generic_Target.define foundation, - notes = Generic_Target.notes Generic_Target.theory_notes, - abbrev = Generic_Target.abbrev Generic_Target.theory_abbrev, - declaration = K Generic_Target.theory_declaration, - subscription = Generic_Target.theory_registration, - pretty = pretty, - exit = conclude #> Local_Theory.target_of #> Sign.change_end_local} - end; - -val overloading = gen_overloading (fn ctxt => Syntax.check_term ctxt o Const); -val overloading_cmd = gen_overloading Syntax.read_term; - -end; diff --git a/core/Pure/Isar/parse.ML b/core/Pure/Isar/parse.ML deleted file mode 100644 index c67a7448..00000000 --- a/core/Pure/Isar/parse.ML +++ /dev/null @@ -1,446 +0,0 @@ -(* Title: Pure/Isar/parse.ML - Author: Markus Wenzel, TU Muenchen - -Generic parsers for Isabelle/Isar outer syntax. -*) - -signature PARSE = -sig - type 'a parser = Token.T list -> 'a * Token.T list - type 'a context_parser = Context.generic * Token.T list -> 'a * (Context.generic * Token.T list) - val group: (unit -> string) -> (Token.T list -> 'a) -> Token.T list -> 'a - val !!! : (Token.T list -> 'a) -> Token.T list -> 'a - val !!!! : (Token.T list -> 'a) -> Token.T list -> 'a - val triple1: ('a * 'b) * 'c -> 'a * 'b * 'c - val triple2: 'a * ('b * 'c) -> 'a * 'b * 'c - val triple_swap: ('a * 'b) * 'c -> ('a * 'c) * 'b - val not_eof: Token.T parser - val token: 'a parser -> Token.T parser - val position: 'a parser -> ('a * Position.T) parser - val source_position: 'a parser -> Symbol_Pos.source parser - val inner_syntax: 'a parser -> string parser - val command: string parser - val keyword: string parser - val short_ident: string parser - val long_ident: string parser - val sym_ident: string parser - val minus: string parser - val term_var: string parser - val type_ident: string parser - val type_var: string parser - val number: string parser - val float_number: string parser - val string: string parser - val alt_string: string parser - val verbatim: string parser - val cartouche: string parser - val sync: string parser - val eof: string parser - val command_name: string -> string parser - val keyword_with: (string -> bool) -> string parser - val keyword_markup: bool * Markup.T -> string -> string parser - val keyword_improper: string -> string parser - val $$$ : string -> string parser - val reserved: string -> string parser - val semicolon: string parser - val underscore: string parser - val maybe: 'a parser -> 'a option parser - val tag_name: string parser - val tags: string list parser - val opt_unit: unit parser - val opt_keyword: string -> bool parser - val begin: string parser - val opt_begin: bool parser - val nat: int parser - val int: int parser - val real: real parser - val enum_positions: string -> 'a parser -> ('a list * Position.T list) parser - val enum1_positions: string -> 'a parser -> ('a list * Position.T list) parser - val enum: string -> 'a parser -> 'a list parser - val enum1: string -> 'a parser -> 'a list parser - val and_list: 'a parser -> 'a list parser - val and_list1: 'a parser -> 'a list parser - val enum': string -> 'a context_parser -> 'a list context_parser - val enum1': string -> 'a context_parser -> 'a list context_parser - val and_list': 'a context_parser -> 'a list context_parser - val and_list1': 'a context_parser -> 'a list context_parser - val list: 'a parser -> 'a list parser - val list1: 'a parser -> 'a list parser - val properties: Properties.T parser - val name: bstring parser - val binding: binding parser - val xname: xstring parser - val text: string parser - val path: string parser - val liberal_name: xstring parser - val parname: string parser - val parbinding: binding parser - val class: string parser - val sort: string parser - val type_const: string parser - val arity: (string * string list * string) parser - val multi_arity: (string list * string list * string) parser - val type_args: string list parser - val type_args_constrained: (string * string option) list parser - val typ_group: string parser - val typ: string parser - val mixfix: mixfix parser - val mixfix': mixfix parser - val opt_mixfix: mixfix parser - val opt_mixfix': mixfix parser - val where_: string parser - val const_decl: (string * string * mixfix) parser - val const_binding: (binding * string * mixfix) parser - val params: (binding * string option) list parser - val simple_fixes: (binding * string option) list parser - val fixes: (binding * string option * mixfix) list parser - val for_fixes: (binding * string option * mixfix) list parser - val ML_source: Symbol_Pos.source parser - val document_source: Symbol_Pos.source parser - val term_group: string parser - val prop_group: string parser - val term: string parser - val prop: string parser - val const: string parser - val literal_fact: string parser - val propp: (string * string list) parser - val termp: (string * string list) parser - val target: (xstring * Position.T) parser - val opt_target: (xstring * Position.T) option parser - val args: Token.T list parser - val args1: (string -> bool) -> Token.T list parser -end; - -structure Parse: PARSE = -struct - -type 'a parser = Token.T list -> 'a * Token.T list; -type 'a context_parser = Context.generic * Token.T list -> 'a * (Context.generic * Token.T list); - - -(** error handling **) - -(* group atomic parsers (no cuts!) *) - -fun group s scan = scan || Scan.fail_with - (fn [] => (fn () => s () ^ " expected,\nbut end-of-input was found") - | tok :: _ => - (fn () => - (case Token.text_of tok of - (txt, "") => - s () ^ " expected,\nbut " ^ txt ^ Position.here (Token.pos_of tok) ^ - " was found" - | (txt1, txt2) => - s () ^ " expected,\nbut " ^ txt1 ^ Position.here (Token.pos_of tok) ^ - " was found:\n" ^ txt2))); - - -(* cut *) - -fun cut kind scan = - let - fun get_pos [] = " (end-of-input)" - | get_pos (tok :: _) = Position.here (Token.pos_of tok); - - fun err (toks, NONE) = (fn () => kind ^ get_pos toks) - | err (toks, SOME msg) = - (fn () => - let val s = msg () in - if String.isPrefix kind s then s - else kind ^ get_pos toks ^ ": " ^ s - end); - in Scan.!! err scan end; - -fun !!! scan = cut "Outer syntax error" scan; -fun !!!! scan = cut "Corrupted outer syntax in presentation" scan; - - - -(** basic parsers **) - -(* utils *) - -fun triple1 ((x, y), z) = (x, y, z); -fun triple2 (x, (y, z)) = (x, y, z); -fun triple_swap ((x, y), z) = ((x, z), y); - - -(* tokens *) - -fun RESET_VALUE atom = (*required for all primitive parsers*) - Scan.ahead (Scan.one (K true)) -- atom >> (fn (arg, x) => (Token.assign NONE arg; x)); - - -val not_eof = RESET_VALUE (Scan.one Token.not_eof); - -fun token atom = Scan.ahead not_eof --| atom; - -fun position scan = (Scan.ahead not_eof >> Token.pos_of) -- scan >> Library.swap; -fun source_position atom = Scan.ahead atom |-- not_eof >> Token.source_position_of; -fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.inner_syntax_of; - -fun kind k = - group (fn () => Token.str_of_kind k) - (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); - -val command = kind Token.Command; -val keyword = kind Token.Keyword; -val short_ident = kind Token.Ident; -val long_ident = kind Token.LongIdent; -val sym_ident = kind Token.SymIdent; -val term_var = kind Token.Var; -val type_ident = kind Token.TypeIdent; -val type_var = kind Token.TypeVar; -val number = kind Token.Nat; -val float_number = kind Token.Float; -val string = kind Token.String; -val alt_string = kind Token.AltString; -val verbatim = kind Token.Verbatim; -val cartouche = kind Token.Cartouche; -val sync = kind Token.Sync; -val eof = kind Token.EOF; - -fun command_name x = - group (fn () => Token.str_of_kind Token.Command ^ " " ^ quote x) - (RESET_VALUE (Scan.one (fn tok => Token.is_command tok andalso Token.content_of tok = x))) - >> Token.content_of; - -fun keyword_with pred = RESET_VALUE (Scan.one (Token.keyword_with pred) >> Token.content_of); - -fun keyword_markup markup x = - group (fn () => Token.str_of_kind Token.Keyword ^ " " ^ quote x) - (Scan.ahead not_eof -- keyword_with (fn y => x = y)) - >> (fn (tok, x) => (Token.assign (SOME (Token.Literal markup)) tok; x)); - -val keyword_improper = keyword_markup (true, Markup.improper); -val $$$ = keyword_markup (false, Markup.quasi_keyword); - -fun reserved x = - group (fn () => "reserved identifier " ^ quote x) - (RESET_VALUE (Scan.one (Token.ident_with (fn y => x = y)) >> Token.content_of)); - -val semicolon = $$$ ";"; - -val minus = sym_ident :-- (fn "-" => Scan.succeed () | _ => Scan.fail) >> #1; -val underscore = sym_ident :-- (fn "_" => Scan.succeed () | _ => Scan.fail) >> #1; -fun maybe scan = underscore >> K NONE || scan >> SOME; - -val nat = number >> (#1 o Library.read_int o Symbol.explode); -val int = Scan.optional (minus >> K ~1) 1 -- nat >> op *; -val real = float_number >> Markup.parse_real || int >> Real.fromInt; - -val tag_name = group (fn () => "tag name") (short_ident || string); -val tags = Scan.repeat ($$$ "%" |-- !!! tag_name); - -val opt_unit = Scan.optional ($$$ "(" -- $$$ ")" >> (K ())) (); -fun opt_keyword s = Scan.optional ($$$ "(" |-- !!! (($$$ s >> K true) --| $$$ ")")) false; - -val begin = $$$ "begin"; -val opt_begin = Scan.optional (begin >> K true) false; - - -(* enumerations *) - -fun enum1_positions sep scan = - scan -- Scan.repeat (position ($$$ sep) -- !!! scan) >> - (fn (x, ys) => (x :: map #2 ys, map (#2 o #1) ys)); -fun enum_positions sep scan = - enum1_positions sep scan || Scan.succeed ([], []); - -fun enum1 sep scan = scan ::: Scan.repeat ($$$ sep |-- !!! scan); -fun enum sep scan = enum1 sep scan || Scan.succeed []; - -fun enum1' sep scan = scan ::: Scan.repeat (Scan.lift ($$$ sep) |-- scan); -fun enum' sep scan = enum1' sep scan || Scan.succeed []; - -fun and_list1 scan = enum1 "and" scan; -fun and_list scan = enum "and" scan; - -fun and_list1' scan = enum1' "and" scan; -fun and_list' scan = enum' "and" scan; - -fun list1 scan = enum1 "," scan; -fun list scan = enum "," scan; - -val properties = $$$ "(" |-- !!! (list (string -- ($$$ "=" |-- string)) --| $$$ ")"); - - -(* names and text *) - -val name = group (fn () => "name declaration") (short_ident || sym_ident || string || number); - -val binding = position name >> Binding.make; - -val xname = group (fn () => "name reference") - (short_ident || long_ident || sym_ident || string || number); - -val text = group (fn () => "text") - (short_ident || long_ident || sym_ident || string || number || verbatim || cartouche); - -val path = group (fn () => "file name/path specification") name; - -val liberal_name = keyword_with Token.ident_or_symbolic || xname; - -val parname = Scan.optional ($$$ "(" |-- name --| $$$ ")") ""; -val parbinding = Scan.optional ($$$ "(" |-- binding --| $$$ ")") Binding.empty; - - -(* type classes *) - -val class = group (fn () => "type class") (inner_syntax xname); - -val sort = group (fn () => "sort") (inner_syntax xname); - -val type_const = inner_syntax (group (fn () => "type constructor") xname); - -val arity = type_const -- ($$$ "::" |-- !!! - (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> triple2; - -val multi_arity = and_list1 type_const -- ($$$ "::" |-- !!! - (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> triple2; - - -(* types *) - -val typ_group = - group (fn () => "type") - (short_ident || long_ident || sym_ident || type_ident || type_var || string || number); - -val typ = inner_syntax typ_group; - -fun type_arguments arg = - arg >> single || - $$$ "(" |-- !!! (list1 arg --| $$$ ")") || - Scan.succeed []; - -val type_args = type_arguments type_ident; -val type_args_constrained = type_arguments (type_ident -- Scan.option ($$$ "::" |-- !!! sort)); - - -(* mixfix annotations *) - -local - -val mfix = string -- - !!! (Scan.optional ($$$ "[" |-- !!! (list nat --| $$$ "]")) [] -- - Scan.optional nat 1000) >> (Mixfix o triple2); - -val infx = $$$ "infix" |-- !!! (string -- nat >> Infix); -val infxl = $$$ "infixl" |-- !!! (string -- nat >> Infixl); -val infxr = $$$ "infixr" |-- !!! (string -- nat >> Infixr); -val strcture = $$$ "structure" >> K Structure; - -val binder = $$$ "binder" |-- - !!! (string -- ($$$ "[" |-- nat --| $$$ "]" -- nat || nat >> (fn n => (n, n)))) - >> (Binder o triple2); - -val mixfix_body = mfix || strcture || binder || infxl || infxr || infx; - -fun annotation guard body = $$$ "(" |-- guard (body --| $$$ ")"); -fun opt_annotation guard body = Scan.optional (annotation guard body) NoSyn; - -in - -val mixfix = annotation !!! mixfix_body; -val mixfix' = annotation I mixfix_body; -val opt_mixfix = opt_annotation !!! mixfix_body; -val opt_mixfix' = opt_annotation I mixfix_body; - -end; - - -(* fixes *) - -val where_ = $$$ "where"; - -val const_decl = name -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> triple1; -val const_binding = binding -- ($$$ "::" |-- !!! typ) -- opt_mixfix >> triple1; - -val params = Scan.repeat1 binding -- Scan.option ($$$ "::" |-- !!! typ) - >> (fn (xs, T) => map (rpair T) xs); - -val simple_fixes = and_list1 params >> flat; - -val fixes = - and_list1 (binding -- Scan.option ($$$ "::" |-- typ) -- mixfix' >> (single o triple1) || - params >> map (fn (x, y) => (x, y, NoSyn))) >> flat; - -val for_fixes = Scan.optional ($$$ "for" |-- !!! fixes) []; - - -(* embedded source text *) - -val ML_source = source_position (group (fn () => "ML source") text); -val document_source = source_position (group (fn () => "document source") text); - - -(* terms *) - -val tm = short_ident || long_ident || sym_ident || term_var || number || string; - -val term_group = group (fn () => "term") tm; -val prop_group = group (fn () => "proposition") tm; - -val term = inner_syntax term_group; -val prop = inner_syntax prop_group; - -val const = inner_syntax (group (fn () => "constant") xname); - -val literal_fact = inner_syntax (group (fn () => "literal fact") (alt_string || cartouche)); - - -(* patterns *) - -val is_terms = Scan.repeat1 ($$$ "is" |-- term); -val is_props = Scan.repeat1 ($$$ "is" |-- prop); - -val propp = prop -- Scan.optional ($$$ "(" |-- !!! (is_props --| $$$ ")")) []; -val termp = term -- Scan.optional ($$$ "(" |-- !!! (is_terms --| $$$ ")")) []; - - -(* targets *) - -val target = ($$$ "(" -- $$$ "in") |-- !!! (position xname --| $$$ ")"); -val opt_target = Scan.option target; - - -(* arguments within outer syntax *) - -local - -val argument_kinds = - [Token.Ident, Token.LongIdent, Token.SymIdent, Token.Var, Token.TypeIdent, Token.TypeVar, - Token.Nat, Token.Float, Token.String, Token.AltString, Token.Cartouche, Token.Verbatim]; - -fun arguments is_symid = - let - fun argument blk = - group (fn () => "argument") - (Scan.one (fn tok => - let val kind = Token.kind_of tok in - member (op =) argument_kinds kind orelse - Token.keyword_with is_symid tok orelse - (blk andalso Token.keyword_with (fn s => s = ",") tok) - end)); - - fun args blk x = Scan.optional (args1 blk) [] x - and args1 blk x = - ((Scan.repeat1 - (Scan.repeat1 (argument blk) || - argsp "(" ")" || - argsp "[" "]")) >> flat) x - and argsp l r x = (token ($$$ l) ::: !!! (args true @@@ (token ($$$ r) >> single))) x; - in (args, args1) end; - -in - -val args = #1 (arguments Token.ident_or_symbolic) false; -fun args1 is_symid = #2 (arguments is_symid) false; - -end; - -end; - -type 'a parser = 'a Parse.parser; -type 'a context_parser = 'a Parse.context_parser; - diff --git a/core/Pure/Isar/parse.scala b/core/Pure/Isar/parse.scala deleted file mode 100644 index 3689c325..00000000 --- a/core/Pure/Isar/parse.scala +++ /dev/null @@ -1,98 +0,0 @@ -/* Title: Pure/Isar/parse.scala - Author: Makarius - -Generic parsers for Isabelle/Isar outer syntax. -*/ - -package isabelle - - -import scala.util.parsing.combinator.Parsers -import scala.annotation.tailrec - - -object Parse -{ - /* parsing tokens */ - - trait Parser extends Parsers - { - type Elem = Token - - def filter_proper: Boolean = true - - @tailrec private def proper(in: Input): Input = - if (!filter_proper || in.atEnd || in.first.is_proper) in - else proper(in.rest) - - def token(s: String, pred: Elem => Boolean): Parser[(Elem, Token.Pos)] = - new Parser[(Elem, Token.Pos)] { - def apply(raw_input: Input) = - { - val in = proper(raw_input) - if (in.atEnd) Failure(s + " expected,\nbut end-of-input was found", in) - else { - val pos = - in.pos match { - case pos: Token.Pos => pos - case _ => Token.Pos.none - } - val token = in.first - if (pred(token)) Success((token, pos), proper(in.rest)) - else - token.text match { - case (txt, "") => - Failure(s + " expected,\nbut " + txt + " was found", in) - case (txt1, txt2) => - Failure(s + " expected,\nbut " + txt1 + " was found:\n" + txt2, in) - } - } - } - } - - def atom(s: String, pred: Elem => Boolean): Parser[String] = - token(s, pred) ^^ { case (tok, _) => tok.content } - - def command(name: String): Parser[Position.T] = - token("command " + quote(name), tok => tok.is_command && tok.source == name) ^^ - { case (_, pos) => pos.position } - - def keyword(name: String): Parser[String] = - atom("keyword " + quote(name), tok => tok.is_keyword && tok.source == name) - - def string: Parser[String] = atom("string", _.is_string) - def nat: Parser[Int] = atom("natural number", _.is_nat) ^^ (s => Integer.parseInt(s)) - def name: Parser[String] = atom("name declaration", _.is_name) - def xname: Parser[String] = atom("name reference", _.is_xname) - def text: Parser[String] = atom("text", _.is_text) - def ML_source: Parser[String] = atom("ML source", _.is_text) - def document_source: Parser[String] = atom("document source", _.is_text) - def path: Parser[String] = - atom("file name/path specification", tok => tok.is_name && Path.is_wellformed(tok.content)) - def theory_name: Parser[String] = - atom("theory name", tok => tok.is_name && Path.is_wellformed(tok.content)) - def theory_xname: Parser[String] = - atom("theory name reference", tok => tok.is_xname && Path.is_wellformed(tok.content)) - - private def tag_name: Parser[String] = - atom("tag name", tok => - tok.kind == Token.Kind.IDENT || - tok.kind == Token.Kind.STRING) - - def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name) - - - /* wrappers */ - - def parse[T](p: Parser[T], in: Token.Reader): ParseResult[T] = p(in) - - def parse_all[T](p: Parser[T], in: Token.Reader): ParseResult[T] = - { - val result = parse(p, in) - val rest = proper(result.next) - if (result.successful && !rest.atEnd) Error("bad input", rest) - else result - } - } -} - diff --git a/core/Pure/Isar/parse_spec.ML b/core/Pure/Isar/parse_spec.ML deleted file mode 100644 index 10d523fb..00000000 --- a/core/Pure/Isar/parse_spec.ML +++ /dev/null @@ -1,160 +0,0 @@ -(* Title: Pure/Isar/parse_spec.ML - Author: Makarius - -Parsers for complex specifications. -*) - -signature PARSE_SPEC = -sig - val attribs: Attrib.src list parser - val opt_attribs: Attrib.src list parser - val thm_name: string -> Attrib.binding parser - val opt_thm_name: string -> Attrib.binding parser - val spec: (Attrib.binding * string) parser - val specs: (Attrib.binding * string list) parser - val alt_specs: (Attrib.binding * string) list parser - val where_alt_specs: (Attrib.binding * string) list parser - val xthm: (Facts.ref * Attrib.src list) parser - val xthms1: (Facts.ref * Attrib.src list) list parser - val name_facts: (Attrib.binding * (Facts.ref * Attrib.src list) list) list parser - val constdecl: (binding * string option * mixfix) parser - val constdef: ((binding * string option * mixfix) option * (Attrib.binding * string)) parser - val includes: (xstring * Position.T) list parser - val locale_fixes: (binding * string option * mixfix) list parser - val locale_insts: (string option list * (Attrib.binding * string) list) parser - val class_expression: string list parser - val locale_prefix: bool -> (string * bool) parser - val locale_keyword: string parser - val locale_expression: bool -> Expression.expression parser - val context_element: Element.context parser - val statement: (Attrib.binding * (string * string list) list) list parser - val general_statement: (Element.context list * Element.statement) parser - val statement_keyword: string parser -end; - -structure Parse_Spec: PARSE_SPEC = -struct - -(* theorem specifications *) - -val attrib = Parse.position Parse.liberal_name -- Parse.!!! Parse.args >> uncurry Args.src; -val attribs = Parse.$$$ "[" |-- Parse.list attrib --| Parse.$$$ "]"; -val opt_attribs = Scan.optional attribs []; - -fun thm_name s = Parse.binding -- opt_attribs --| Parse.$$$ s; - -fun opt_thm_name s = - Scan.optional ((Parse.binding -- opt_attribs || attribs >> pair Binding.empty) --| Parse.$$$ s) - Attrib.empty_binding; - -val spec = opt_thm_name ":" -- Parse.prop; -val specs = opt_thm_name ":" -- Scan.repeat1 Parse.prop; - -val alt_specs = - Parse.enum1 "|" - (spec --| Scan.option (Scan.ahead (Parse.name || Parse.$$$ "[") -- Parse.!!! (Parse.$$$ "|"))); - -val where_alt_specs = Parse.where_ |-- Parse.!!! alt_specs; - -val xthm = - Parse.$$$ "[" |-- attribs --| Parse.$$$ "]" >> pair (Facts.named "") || - (Parse.literal_fact >> Facts.Fact || - Parse.position Parse.xname -- Scan.option Attrib.thm_sel >> Facts.Named) -- opt_attribs; - -val xthms1 = Scan.repeat1 xthm; - -val name_facts = Parse.and_list1 (opt_thm_name "=" -- xthms1); - - -(* basic constant specifications *) - -val constdecl = - Parse.binding -- - (Parse.where_ >> K (NONE, NoSyn) || - Parse.$$$ "::" |-- Parse.!!! ((Parse.typ >> SOME) -- Parse.opt_mixfix' --| Parse.where_) || - Scan.ahead (Parse.$$$ "(") |-- Parse.!!! (Parse.mixfix' --| Parse.where_ >> pair NONE)) - >> Parse.triple2; - -val constdef = Scan.option constdecl -- (opt_thm_name ":" -- Parse.prop); - - -(* locale and context elements *) - -val includes = Parse.$$$ "includes" |-- Parse.!!! (Scan.repeat1 (Parse.position Parse.xname)); - -val locale_fixes = - Parse.and_list1 (Parse.binding -- Scan.option (Parse.$$$ "::" |-- Parse.typ) -- Parse.mixfix - >> (single o Parse.triple1) || - Parse.params >> map (fn (x, y) => (x, y, NoSyn))) >> flat; - -val locale_insts = - Scan.optional - (Parse.$$$ "[" |-- Parse.!!! (Scan.repeat1 (Parse.maybe Parse.term) --| Parse.$$$ "]")) [] -- - Scan.optional (Parse.where_ |-- Parse.and_list1 (opt_thm_name ":" -- Parse.prop)) []; - -local - -val loc_element = - Parse.$$$ "fixes" |-- Parse.!!! locale_fixes >> Element.Fixes || - Parse.$$$ "constrains" |-- - Parse.!!! (Parse.and_list1 (Parse.name -- (Parse.$$$ "::" |-- Parse.typ))) - >> Element.Constrains || - Parse.$$$ "assumes" |-- Parse.!!! (Parse.and_list1 (opt_thm_name ":" -- Scan.repeat1 Parse.propp)) - >> Element.Assumes || - Parse.$$$ "defines" |-- Parse.!!! (Parse.and_list1 (opt_thm_name ":" -- Parse.propp)) - >> Element.Defines || - Parse.$$$ "notes" |-- Parse.!!! (Parse.and_list1 (opt_thm_name "=" -- xthms1)) - >> (curry Element.Notes ""); - -fun plus1_unless test scan = - scan ::: Scan.repeat (Parse.$$$ "+" |-- Scan.unless test (Parse.!!! scan)); - -val instance = Parse.where_ |-- - Parse.and_list1 (Parse.name -- (Parse.$$$ "=" |-- Parse.term)) >> Expression.Named || - Scan.repeat1 (Parse.maybe Parse.term) >> Expression.Positional; - -in - -fun locale_prefix mandatory = - Scan.optional - (Parse.name -- - (Parse.$$$ "!" >> K true || Parse.$$$ "?" >> K false || Scan.succeed mandatory) --| - Parse.$$$ ":") - ("", false); - -val locale_keyword = - Parse.$$$ "fixes" || Parse.$$$ "constrains" || Parse.$$$ "assumes" || - Parse.$$$ "defines" || Parse.$$$ "notes"; - -val class_expression = plus1_unless locale_keyword Parse.class; - -fun locale_expression mandatory = - let - val expr2 = Parse.position Parse.xname; - val expr1 = locale_prefix mandatory -- expr2 -- - Scan.optional instance (Expression.Named []) >> (fn ((p, l), i) => (l, (p, i))); - val expr0 = plus1_unless locale_keyword expr1; - in expr0 -- Scan.optional (Parse.$$$ "for" |-- Parse.!!! locale_fixes) [] end; - -val context_element = Parse.group (fn () => "context element") loc_element; - -end; - - -(* statements *) - -val statement = Parse.and_list1 (opt_thm_name ":" -- Scan.repeat1 Parse.propp); - -val obtain_case = - Parse.parbinding -- (Scan.optional (Parse.simple_fixes --| Parse.where_) [] -- - (Parse.and_list1 (Scan.repeat1 Parse.prop) >> flat)); - -val general_statement = - statement >> (fn x => ([], Element.Shows x)) || - Scan.repeat context_element -- - (Parse.$$$ "obtains" |-- Parse.!!! (Parse.enum1 "|" obtain_case) >> Element.Obtains || - Parse.$$$ "shows" |-- Parse.!!! statement >> Element.Shows); - -val statement_keyword = Parse.$$$ "obtains" || Parse.$$$ "shows"; - -end; diff --git a/core/Pure/Isar/proof.ML b/core/Pure/Isar/proof.ML deleted file mode 100644 index e1f49ed7..00000000 --- a/core/Pure/Isar/proof.ML +++ /dev/null @@ -1,1201 +0,0 @@ -(* Title: Pure/Isar/proof.ML - Author: Markus Wenzel, TU Muenchen - -The Isar/VM proof language interpreter: maintains a structured flow of -context elements, goals, refinements, and facts. -*) - -signature PROOF = -sig - type context = Proof.context - type method = Method.method - type state - val init: context -> state - val level: state -> int - val assert_bottom: bool -> state -> state - val context_of: state -> context - val theory_of: state -> theory - val map_context: (context -> context) -> state -> state - val map_context_result : (context -> 'a * context) -> state -> 'a * state - val map_contexts: (context -> context) -> state -> state - val propagate_ml_env: state -> state - val bind_terms: (indexname * term option) list -> state -> state - val put_thms: bool -> string * thm list option -> state -> state - val the_facts: state -> thm list - val the_fact: state -> thm - val set_facts: thm list -> state -> state - val reset_facts: state -> state - val assert_forward: state -> state - val assert_chain: state -> state - val assert_forward_or_chain: state -> state - val assert_backward: state -> state - val assert_no_chain: state -> state - val enter_forward: state -> state - val goal_message: (unit -> Pretty.T) -> state -> state - val pretty_goal_messages: state -> Pretty.T list - val pretty_state: int -> state -> Pretty.T list - val refine: Method.text -> state -> state Seq.seq - val refine_end: Method.text -> state -> state Seq.seq - val refine_insert: thm list -> state -> state - val refine_goals: (context -> thm -> unit) -> context -> thm list -> state -> state Seq.seq - val raw_goal: state -> {context: context, facts: thm list, goal: thm} - val goal: state -> {context: context, facts: thm list, goal: thm} - val simple_goal: state -> {context: context, goal: thm} - val status_markup: state -> Markup.T - val let_bind: (term list * term) list -> state -> state - val let_bind_cmd: (string list * string) list -> state -> state - val write: Syntax.mode -> (term * mixfix) list -> state -> state - val write_cmd: Syntax.mode -> (string * mixfix) list -> state -> state - val fix: (binding * typ option * mixfix) list -> state -> state - val fix_cmd: (binding * string option * mixfix) list -> state -> state - val assm: Assumption.export -> - (Thm.binding * (term * term list) list) list -> state -> state - val assm_cmd: Assumption.export -> - (Attrib.binding * (string * string list) list) list -> state -> state - val assume: (Thm.binding * (term * term list) list) list -> state -> state - val assume_cmd: (Attrib.binding * (string * string list) list) list -> state -> state - val presume: (Thm.binding * (term * term list) list) list -> state -> state - val presume_cmd: (Attrib.binding * (string * string list) list) list -> state -> state - val def: (Thm.binding * ((binding * mixfix) * (term * term list))) list -> state -> state - val def_cmd: (Attrib.binding * ((binding * mixfix) * (string * string list))) list -> state -> state - val chain: state -> state - val chain_facts: thm list -> state -> state - val note_thmss: (Thm.binding * (thm list * attribute list) list) list -> state -> state - val note_thmss_cmd: (Attrib.binding * (Facts.ref * Attrib.src list) list) list -> state -> state - val from_thmss: ((thm list * attribute list) list) list -> state -> state - val from_thmss_cmd: ((Facts.ref * Attrib.src list) list) list -> state -> state - val with_thmss: ((thm list * attribute list) list) list -> state -> state - val with_thmss_cmd: ((Facts.ref * Attrib.src list) list) list -> state -> state - val using: ((thm list * attribute list) list) list -> state -> state - val using_cmd: ((Facts.ref * Attrib.src list) list) list -> state -> state - val unfolding: ((thm list * attribute list) list) list -> state -> state - val unfolding_cmd: ((Facts.ref * Attrib.src list) list) list -> state -> state - val invoke_case: (string * Position.T) * binding option list * attribute list -> - state -> state - val invoke_case_cmd: (string * Position.T) * binding option list * Attrib.src list -> - state -> state - val begin_block: state -> state - val next_block: state -> state - val end_block: state -> state - val begin_notepad: context -> state - val end_notepad: state -> context - val proof: Method.text option -> state -> state Seq.seq - val proof_results: Method.text_range option -> state -> state Seq.result Seq.seq - val defer: int -> state -> state - val prefer: int -> state -> state - val apply: Method.text -> state -> state Seq.seq - val apply_end: Method.text -> state -> state Seq.seq - val apply_results: Method.text_range -> state -> state Seq.result Seq.seq - val apply_end_results: Method.text_range -> state -> state Seq.result Seq.seq - val local_goal: (context -> ((string * string) * (string * thm list) list) -> unit) -> - (context -> 'a -> attribute) -> - ('b list -> context -> (term list list * (context -> context)) * context) -> - string -> Method.text option -> (thm list list -> state -> state) -> - ((binding * 'a list) * 'b) list -> state -> state - val local_qed: Method.text_range option * bool -> state -> state - val theorem: Method.text option -> (thm list list -> context -> context) -> - (term * term list) list list -> context -> state - val theorem_cmd: Method.text option -> (thm list list -> context -> context) -> - (string * string list) list list -> context -> state - val global_qed: Method.text_range option * bool -> state -> context - val local_terminal_proof: Method.text_range * Method.text_range option -> state -> state - val local_default_proof: state -> state - val local_immediate_proof: state -> state - val local_skip_proof: bool -> state -> state - val local_done_proof: state -> state - val global_terminal_proof: Method.text_range * Method.text_range option -> state -> context - val global_default_proof: state -> context - val global_immediate_proof: state -> context - val global_skip_proof: bool -> state -> context - val global_done_proof: state -> context - val have: Method.text option -> (thm list list -> state -> state) -> - (Thm.binding * (term * term list) list) list -> bool -> state -> state - val have_cmd: Method.text option -> (thm list list -> state -> state) -> - (Attrib.binding * (string * string list) list) list -> bool -> state -> state - val show: Method.text option -> (thm list list -> state -> state) -> - (Thm.binding * (term * term list) list) list -> bool -> state -> state - val show_cmd: Method.text option -> (thm list list -> state -> state) -> - (Attrib.binding * (string * string list) list) list -> bool -> state -> state - val schematic_goal: state -> bool - val is_relevant: state -> bool - val future_proof: (state -> ('a * context) future) -> state -> 'a future * state - val local_future_terminal_proof: Method.text_range * Method.text_range option -> bool -> - state -> state - val global_future_terminal_proof: Method.text_range * Method.text_range option -> bool -> - state -> context -end; - -structure Proof: PROOF = -struct - -type context = Proof.context; -type method = Method.method; - - -(** proof state **) - -(* datatype state *) - -datatype mode = Forward | Chain | Backward; - -datatype state = - State of node Stack.T -and node = - Node of - {context: context, - facts: thm list option, - mode: mode, - goal: goal option} -and goal = - Goal of - {statement: (string * Position.T) * term list list * term, - (*goal kind and statement (starting with vars), initial proposition*) - messages: (unit -> Pretty.T) list, (*persistent messages (hints etc.)*) - using: thm list, (*goal facts*) - goal: thm, (*subgoals ==> statement*) - before_qed: Method.text option, - after_qed: - (thm list list -> state -> state) * - (thm list list -> context -> context)}; - -fun make_goal (statement, messages, using, goal, before_qed, after_qed) = - Goal {statement = statement, messages = messages, using = using, goal = goal, - before_qed = before_qed, after_qed = after_qed}; - -fun make_node (context, facts, mode, goal) = - Node {context = context, facts = facts, mode = mode, goal = goal}; - -fun map_node f (Node {context, facts, mode, goal}) = - make_node (f (context, facts, mode, goal)); - -val init_context = - Proof_Context.set_stmt true #> - Proof_Context.map_naming (K Name_Space.local_naming); - -fun init ctxt = - State (Stack.init (make_node (init_context ctxt, NONE, Forward, NONE))); - -fun top (State st) = Stack.top st |> (fn Node node => node); -fun map_top f (State st) = State (Stack.map_top (map_node f) st); -fun map_all f (State st) = State (Stack.map_all (map_node f) st); - - - -(** basic proof state operations **) - -(* block structure *) - -fun open_block (State st) = State (Stack.push st); - -fun close_block (State st) = State (Stack.pop st) - handle List.Empty => error "Unbalanced block parentheses"; - -fun level (State st) = Stack.level st; - -fun assert_bottom b state = - let val b' = level state <= 2 in - if b andalso not b' then error "Not at bottom of proof" - else if not b andalso b' then error "Already at bottom of proof" - else state - end; - - -(* context *) - -val context_of = #context o top; -val theory_of = Proof_Context.theory_of o context_of; - -fun map_node_context f = - map_node (fn (ctxt, facts, mode, goal) => (f ctxt, facts, mode, goal)); - -fun map_context f = - map_top (fn (ctxt, facts, mode, goal) => (f ctxt, facts, mode, goal)); - -fun map_context_result f state = - f (context_of state) ||> (fn ctxt => map_context (K ctxt) state); - -fun map_contexts f = map_all (fn (ctxt, facts, mode, goal) => (f ctxt, facts, mode, goal)); - -fun propagate_ml_env state = map_contexts - (Context.proof_map (ML_Env.inherit (Context.Proof (context_of state)))) state; - -val bind_terms = map_context o Proof_Context.bind_terms; -val put_thms = map_context oo Proof_Context.put_thms; - - -(* facts *) - -val get_facts = #facts o top; - -fun the_facts state = - (case get_facts state of SOME facts => facts - | NONE => error "No current facts available"); - -fun the_fact state = - (case the_facts state of [thm] => thm - | _ => error "Single theorem expected"); - -fun put_facts facts = - map_top (fn (ctxt, _, mode, goal) => (ctxt, facts, mode, goal)) #> - put_thms true (Auto_Bind.thisN, facts); - -val set_facts = put_facts o SOME; -val reset_facts = put_facts NONE; - -fun these_factss more_facts (named_factss, state) = - (named_factss, state |> set_facts (maps snd named_factss @ more_facts)); - -fun export_facts inner outer = - (case get_facts inner of - NONE => reset_facts outer - | SOME thms => - thms - |> Proof_Context.export (context_of inner) (context_of outer) - |> (fn ths => set_facts ths outer)); - - -(* mode *) - -val get_mode = #mode o top; -fun put_mode mode = map_top (fn (ctxt, facts, _, goal) => (ctxt, facts, mode, goal)); - -val mode_name = (fn Forward => "state" | Chain => "chain" | Backward => "prove"); - -fun assert_mode pred state = - let val mode = get_mode state in - if pred mode then state - else error ("Illegal application of proof command in " ^ quote (mode_name mode) ^ " mode") - end; - -val assert_forward = assert_mode (fn mode => mode = Forward); -val assert_chain = assert_mode (fn mode => mode = Chain); -val assert_forward_or_chain = assert_mode (fn mode => mode = Forward orelse mode = Chain); -val assert_backward = assert_mode (fn mode => mode = Backward); -val assert_no_chain = assert_mode (fn mode => mode <> Chain); - -val enter_forward = put_mode Forward; -val enter_chain = put_mode Chain; -val enter_backward = put_mode Backward; - - -(* current goal *) - -fun current_goal state = - (case top state of - {context, goal = SOME (Goal goal), ...} => (context, goal) - | _ => error "No current goal"); - -fun assert_current_goal g state = - let val g' = can current_goal state in - if g andalso not g' then error "No goal in this block" - else if not g andalso g' then error "Goal present in this block" - else state - end; - -fun put_goal goal = map_top (fn (ctxt, using, mode, _) => (ctxt, using, mode, goal)); - -val set_goal = put_goal o SOME; -val reset_goal = put_goal NONE; - -val before_qed = #before_qed o #2 o current_goal; - - -(* nested goal *) - -fun map_goal f g h (State (Node {context, facts, mode, goal = SOME goal}, node :: nodes)) = - let - val Goal {statement, messages, using, goal, before_qed, after_qed} = goal; - val goal' = make_goal (g (statement, messages, using, goal, before_qed, after_qed)); - val node' = map_node_context h node; - in State (make_node (f context, facts, mode, SOME goal'), node' :: nodes) end - | map_goal f g h (State (nd, node :: nodes)) = - let - val nd' = map_node_context f nd; - val State (node', nodes') = map_goal f g h (State (node, nodes)); - in State (nd', node' :: nodes') end - | map_goal _ _ _ state = state; - -fun provide_goal goal = map_goal I (fn (statement, _, using, _, before_qed, after_qed) => - (statement, [], using, goal, before_qed, after_qed)) I; - -fun goal_message msg = map_goal I (fn (statement, messages, using, goal, before_qed, after_qed) => - (statement, msg :: messages, using, goal, before_qed, after_qed)) I; - -fun using_facts using = map_goal I (fn (statement, _, _, goal, before_qed, after_qed) => - (statement, [], using, goal, before_qed, after_qed)) I; - -local - fun find i state = - (case try current_goal state of - SOME (ctxt, goal) => (ctxt, (i, goal)) - | NONE => find (i + 1) (close_block state handle ERROR _ => error "No goal present")); -in val find_goal = find 0 end; - -fun get_goal state = - let val (ctxt, (_, {using, goal, ...})) = find_goal state - in (ctxt, (using, goal)) end; - - - -(** pretty_state **) - -fun pretty_goal_messages state = - (case try find_goal state of - SOME (_, (_, {messages, ...})) => map (fn msg => msg ()) (rev messages) - | NONE => []); - -fun pretty_facts _ _ NONE = [] - | pretty_facts ctxt s (SOME ths) = [Proof_Display.pretty_goal_facts ctxt s ths, Pretty.str ""]; - -fun pretty_state nr state = - let - val {context = ctxt, facts, mode, goal = _} = top state; - val verbose = Config.get ctxt Proof_Context.verbose; - - fun prt_goal (SOME (_, (_, - {statement = ((_, pos), _, _), messages, using, goal, before_qed = _, after_qed = _}))) = - pretty_facts ctxt "using" - (if mode <> Backward orelse null using then NONE else SOME using) @ - [Proof_Display.pretty_goal_header goal] @ Goal_Display.pretty_goals ctxt goal @ - (map (fn msg => Position.setmp_thread_data pos msg ()) (rev messages)) - | prt_goal NONE = []; - - val prt_ctxt = - if verbose orelse mode = Forward then Proof_Context.pretty_context ctxt - else if mode = Backward then Proof_Context.pretty_ctxt ctxt - else []; - - val position_markup = Position.markup (Position.thread_data ()) Markup.position; - in - [Pretty.block - [Pretty.mark_str (position_markup, "proof"), - Pretty.str (" (" ^ mode_name mode ^ "): depth " ^ string_of_int (level state div 2 - 1))], - Pretty.str ""] @ - (if null prt_ctxt then [] else prt_ctxt @ [Pretty.str ""]) @ - (if verbose orelse mode = Forward then - pretty_facts ctxt "" facts @ prt_goal (try find_goal state) - else if mode = Chain then pretty_facts ctxt "picking" facts - else prt_goal (try find_goal state)) - end; - - - -(** proof steps **) - -(* refine via method *) - -local - -fun goalN i = "goal" ^ string_of_int i; -fun goals st = map goalN (1 upto Thm.nprems_of st); - -fun no_goal_cases st = map (rpair NONE) (goals st); - -fun goal_cases st = - Rule_Cases.make_common - (Thm.theory_of_thm st, Thm.prop_of st) (map (rpair [] o rpair []) (goals st)); - -fun apply_method current_context method state = - let - val (goal_ctxt, (_, {statement, messages = _, using, goal, before_qed, after_qed})) = - find_goal state; - val ctxt = if current_context then context_of state else goal_ctxt; - in - Method.apply method ctxt using goal |> Seq.map (fn (meth_cases, goal') => - state - |> map_goal - (Proof_Context.update_cases false (no_goal_cases goal @ goal_cases goal') #> - Proof_Context.update_cases true meth_cases) - (K (statement, [], using, goal', before_qed, after_qed)) I) - end; - -fun select_goals n meth state = - ALLGOALS Goal.conjunction_tac (#2 (#2 (get_goal state))) - |> Seq.maps (fn goal => - state - |> Seq.lift provide_goal ((PRIMITIVE (Goal.restrict 1 n) THEN Goal.conjunction_tac 1) goal) - |> Seq.maps meth - |> Seq.maps (fn state' => state' - |> Seq.lift provide_goal (PRIMITIVE (Goal.unrestrict 1) (#2 (#2 (get_goal state'))))) - |> Seq.maps (apply_method true (K Method.succeed))); - -fun apply_text current_context text state = - let - val ctxt = context_of state; - - fun eval (Method.Basic m) = apply_method current_context m - | eval (Method.Source src) = apply_method current_context (Method.method_cmd ctxt src) - | eval (Method.Then (_, txts)) = Seq.EVERY (map eval txts) - | eval (Method.Orelse (_, txts)) = Seq.FIRST (map eval txts) - | eval (Method.Try (_, txt)) = Seq.TRY (eval txt) - | eval (Method.Repeat1 (_, txt)) = Seq.REPEAT1 (eval txt) - | eval (Method.Select_Goals (_, n, txt)) = select_goals n (eval txt); - in eval text state end; - -in - -val refine = apply_text true; -val refine_end = apply_text false; -fun refine_insert ths = Seq.hd o refine (Method.Basic (K (Method.insert ths))); - -end; - - -(* refine via sub-proof *) - -local - -fun finish_tac _ 0 = K all_tac - | finish_tac ctxt n = - Goal.norm_hhf_tac ctxt THEN' - SUBGOAL (fn (goal, i) => - if can Logic.unprotect (Logic.strip_assums_concl goal) then - etac Drule.protectI i THEN finish_tac ctxt (n - 1) i - else finish_tac ctxt (n - 1) (i + 1)); - -fun goal_tac ctxt rule = - Goal.norm_hhf_tac ctxt THEN' - rtac rule THEN' - finish_tac ctxt (Thm.nprems_of rule); - -fun FINDGOAL tac st = - let fun find i n = if i > n then Seq.fail else Seq.APPEND (tac i, find (i + 1) n) - in find 1 (Thm.nprems_of st) st end; - -in - -fun refine_goals print_rule inner raw_rules state = - let - val (outer, (_, goal)) = get_goal state; - fun refine rule st = (print_rule outer rule; FINDGOAL (goal_tac outer rule) st); - in - raw_rules - |> Proof_Context.goal_export inner outer - |> (fn rules => Seq.lift provide_goal (EVERY (map refine rules) goal) state) - end; - -end; - - -(* conclude goal *) - -fun conclude_goal ctxt goal propss = - let - val thy = Proof_Context.theory_of ctxt; - - val _ = - Theory.subthy (theory_of_thm goal, thy) orelse error "Bad background theory of goal state"; - val _ = Thm.no_prems goal orelse error (Proof_Display.string_of_goal ctxt goal); - - fun lost_structure () = error ("Lost goal structure:\n" ^ Display.string_of_thm ctxt goal); - - val th = - (Goal.conclude (if length (flat propss) > 1 then Thm.norm_proof goal else goal) - handle THM _ => lost_structure ()) - |> Drule.flexflex_unique - |> Thm.check_shyps (Variable.sorts_of ctxt) - |> Thm.check_hyps (Context.Proof ctxt); - - val goal_propss = filter_out null propss; - val results = - Conjunction.elim_balanced (length goal_propss) th - |> map2 Conjunction.elim_balanced (map length goal_propss) - handle THM _ => lost_structure (); - val _ = Unify.matches_list thy (flat goal_propss) (map Thm.prop_of (flat results)) orelse - error ("Proved a different theorem:\n" ^ Display.string_of_thm ctxt th); - - fun recover_result ([] :: pss) thss = [] :: recover_result pss thss - | recover_result (_ :: pss) (ths :: thss) = ths :: recover_result pss thss - | recover_result [] [] = [] - | recover_result _ _ = lost_structure (); - in recover_result propss results end; - -val finished_goal_error = "Failed to finish proof"; - -fun finished_goal pos state = - let val (ctxt, (_, goal)) = get_goal state in - if Thm.no_prems goal then Seq.Result state - else - Seq.Error (fn () => - finished_goal_error ^ Position.here pos ^ ":\n" ^ - Proof_Display.string_of_goal ctxt goal) - end; - - -(* goal views -- corresponding to methods *) - -fun raw_goal state = - let val (ctxt, (facts, goal)) = get_goal state - in {context = ctxt, facts = facts, goal = goal} end; - -val goal = raw_goal o refine_insert []; - -fun simple_goal state = - let - val (_, (facts, _)) = get_goal state; - val (ctxt, (_, goal)) = get_goal (refine_insert facts state); - in {context = ctxt, goal = goal} end; - -fun status_markup state = - (case try goal state of - SOME {goal, ...} => Markup.proof_state (Thm.nprems_of goal) - | NONE => Markup.empty); - -fun method_error kind pos state = - Seq.single (Proof_Display.method_error kind pos (raw_goal state)); - - - -(*** structured proof commands ***) - -(** context elements **) - -(* let bindings *) - -local - -fun gen_bind bind args state = - state - |> assert_forward - |> map_context (bind true args #> snd) - |> reset_facts; - -in - -val let_bind = gen_bind Proof_Context.match_bind_i; -val let_bind_cmd = gen_bind Proof_Context.match_bind; - -end; - - -(* concrete syntax *) - -local - -fun gen_write prep_arg mode args = - assert_forward - #> map_context (fn ctxt => ctxt |> Proof_Context.notation true mode (map (prep_arg ctxt) args)) - #> reset_facts; - -fun read_arg ctxt (c, mx) = - (case Proof_Context.read_const {proper = false, strict = false} ctxt c of - Free (x, _) => - let val T = Proof_Context.infer_type ctxt (x, Mixfix.mixfixT mx) - in (Free (x, T), mx) end - | t => (t, mx)); - -in - -val write = gen_write (K I); -val write_cmd = gen_write read_arg; - -end; - - -(* fix *) - -local - -fun gen_fix prep_vars args = - assert_forward - #> map_context (fn ctxt => snd (Proof_Context.add_fixes (fst (prep_vars args ctxt)) ctxt)) - #> reset_facts; - -in - -val fix = gen_fix Proof_Context.cert_vars; -val fix_cmd = gen_fix Proof_Context.read_vars; - -end; - - -(* assume etc. *) - -local - -fun gen_assume asm prep_att exp args state = - state - |> assert_forward - |> map_context_result (asm exp (Attrib.map_specs (map (prep_att (context_of state))) args)) - |> these_factss [] |> #2; - -in - -val assm = gen_assume Proof_Context.add_assms_i (K I); -val assm_cmd = gen_assume Proof_Context.add_assms Attrib.attribute_cmd; -val assume = assm Assumption.assume_export; -val assume_cmd = assm_cmd Assumption.assume_export; -val presume = assm Assumption.presume_export; -val presume_cmd = assm_cmd Assumption.presume_export; - -end; - - -(* def *) - -local - -fun gen_def prep_att prep_vars prep_binds args state = - let - val _ = assert_forward state; - val (raw_name_atts, (raw_vars, raw_rhss)) = args |> split_list ||> split_list; - val name_atts = map (apsnd (map (prep_att (context_of state)))) raw_name_atts; - in - state - |> map_context_result (prep_vars (map (fn (x, mx) => (x, NONE, mx)) raw_vars)) - |>> map (fn (x, _, mx) => (x, mx)) - |-> (fn vars => - map_context_result (prep_binds false (map swap raw_rhss)) - #-> (fn rhss => - let - val defs = (vars ~~ (name_atts ~~ rhss)) |> map (fn ((x, mx), ((a, atts), rhs)) => - ((x, mx), ((Thm.def_binding_optional x a, atts), rhs))); - in map_context_result (Local_Defs.add_defs defs) end)) - |-> (set_facts o map (#2 o #2)) - end; - -in - -val def = gen_def (K I) Proof_Context.cert_vars Proof_Context.match_bind_i; -val def_cmd = gen_def Attrib.attribute_cmd Proof_Context.read_vars Proof_Context.match_bind; - -end; - - - -(** facts **) - -(* chain *) - -fun clean_facts ctxt = - set_facts (filter_out Thm.is_dummy (the_facts ctxt)) ctxt; - -val chain = - assert_forward - #> clean_facts - #> enter_chain; - -fun chain_facts facts = - set_facts facts - #> chain; - - -(* note etc. *) - -fun no_binding args = map (pair (Binding.empty, [])) args; - -local - -fun gen_thmss more_facts opt_chain opt_result prep_atts prep_fact args state = - state - |> assert_forward - |> map_context_result (fn ctxt => ctxt |> Proof_Context.note_thmss "" - (Attrib.map_facts_refs (map (prep_atts ctxt)) (prep_fact ctxt) args)) - |> these_factss (more_facts state) - ||> opt_chain - |> opt_result; - -in - -val note_thmss = gen_thmss (K []) I #2 (K I) (K I); -val note_thmss_cmd = gen_thmss (K []) I #2 Attrib.attribute_cmd Proof_Context.get_fact; - -val from_thmss = gen_thmss (K []) chain #2 (K I) (K I) o no_binding; -val from_thmss_cmd = - gen_thmss (K []) chain #2 Attrib.attribute_cmd Proof_Context.get_fact o no_binding; - -val with_thmss = gen_thmss the_facts chain #2 (K I) (K I) o no_binding; -val with_thmss_cmd = - gen_thmss the_facts chain #2 Attrib.attribute_cmd Proof_Context.get_fact o no_binding; - -val local_results = gen_thmss (K []) I I (K I) (K I) o map (apsnd Thm.simple_fact); - -end; - - -(* using/unfolding *) - -local - -fun gen_using f g prep_att prep_fact args state = - state - |> assert_backward - |> map_context_result - (fn ctxt => ctxt |> Proof_Context.note_thmss "" - (Attrib.map_facts_refs (map (prep_att ctxt)) (prep_fact ctxt) (no_binding args))) - |> (fn (named_facts, state') => - state' |> map_goal I (fn (statement, _, using, goal, before_qed, after_qed) => - let - val ctxt = context_of state'; - val ths = maps snd named_facts; - in (statement, [], f ctxt ths using, g ctxt ths goal, before_qed, after_qed) end) I); - -fun append_using _ ths using = using @ filter_out Thm.is_dummy ths; -fun unfold_using ctxt ths = map (Local_Defs.unfold ctxt ths); -val unfold_goals = Local_Defs.unfold_goals; - -in - -val using = gen_using append_using (K (K I)) (K I) (K I); -val using_cmd = gen_using append_using (K (K I)) Attrib.attribute_cmd Proof_Context.get_fact; -val unfolding = gen_using unfold_using unfold_goals (K I) (K I); -val unfolding_cmd = gen_using unfold_using unfold_goals Attrib.attribute_cmd Proof_Context.get_fact; - -end; - - -(* case *) - -local - -fun gen_invoke_case internal prep_att ((name, pos), xs, raw_atts) state = - let - val atts = map (prep_att (context_of state)) raw_atts; - val (asms, state') = state |> map_context_result (fn ctxt => - ctxt |> Proof_Context.apply_case (Proof_Context.check_case ctxt internal (name, pos) xs)); - val assumptions = - asms |> map (fn (b, ts) => ((Binding.set_pos pos b, atts), map (rpair []) ts)); - in - state' - |> assume assumptions - |> bind_terms Auto_Bind.no_facts - |> `the_facts |-> (fn thms => note_thmss [((Binding.make (name, pos), []), [(thms, [])])]) - end; - -in - -val invoke_case = gen_invoke_case true (K I); -val invoke_case_cmd = gen_invoke_case false Attrib.attribute_cmd; - -end; - - - -(** proof structure **) - -(* blocks *) - -val begin_block = - assert_forward - #> open_block - #> reset_goal - #> open_block; - -val next_block = - assert_forward - #> close_block - #> open_block - #> reset_goal - #> reset_facts; - -fun end_block state = - state - |> assert_forward - |> assert_bottom false - |> close_block - |> assert_current_goal false - |> close_block - |> export_facts state; - - -(* global notepad *) - -val begin_notepad = - init - #> open_block - #> map_context (Variable.set_body true) - #> open_block; - -val end_notepad = - assert_forward - #> assert_bottom true - #> close_block - #> assert_current_goal false - #> close_block - #> context_of; - - -(* sub-proofs *) - -fun proof opt_text = - assert_backward - #> refine (the_default Method.default_text opt_text) - #> Seq.map (using_facts [] #> enter_forward); - -fun proof_results arg = - Seq.APPEND (proof (Method.text arg) #> Seq.make_results, - method_error "initial" (Method.position arg)); - -fun end_proof bot (prev_pos, (opt_text, immed)) = - let - val (finish_text, terminal_pos, finished_pos) = - (case opt_text of - NONE => (Method.finish_text (NONE, immed), Position.none, prev_pos) - | SOME (text, (pos, end_pos)) => (Method.finish_text (SOME text, immed), pos, end_pos)); - in - Seq.APPEND (fn state => - state - |> assert_forward - |> assert_bottom bot - |> close_block - |> assert_current_goal true - |> using_facts [] - |> `before_qed |-> (refine o the_default Method.succeed_text) - |> Seq.maps (refine finish_text) - |> Seq.make_results, method_error "terminal" terminal_pos) - #> Seq.maps_results (Seq.single o finished_goal finished_pos) - end; - -fun check_result msg sq = - (case Seq.pull sq of - NONE => error msg - | SOME (s, _) => s); - - -(* unstructured refinement *) - -fun defer i = - assert_no_chain #> - refine (Method.Basic (fn _ => METHOD (fn _ => ASSERT_SUBGOAL defer_tac i))) #> Seq.hd; - -fun prefer i = - assert_no_chain #> - refine (Method.Basic (fn _ => METHOD (fn _ => ASSERT_SUBGOAL prefer_tac i))) #> Seq.hd; - -fun apply text = assert_backward #> refine text #> Seq.map (using_facts []); - -fun apply_end text = assert_forward #> refine_end text; - -fun apply_results (text, (pos, _)) = - Seq.APPEND (apply text #> Seq.make_results, method_error "" pos); - -fun apply_end_results (text, (pos, _)) = - Seq.APPEND (apply_end text #> Seq.make_results, method_error "" pos); - - - -(** goals **) - -(* generic goals *) - -local - -val is_var = - can (dest_TVar o Logic.dest_type o Logic.dest_term) orf - can (dest_Var o Logic.dest_term); - -fun implicit_vars props = - let - val (var_props, _) = take_prefix is_var props; - val explicit_vars = fold Term.add_vars var_props []; - val vars = filter_out (member (op =) explicit_vars) (fold Term.add_vars props []); - in map (Logic.mk_term o Var) vars end; - -fun refine_terms n = - refine (Method.Basic (K (RAW_METHOD - (K (HEADGOAL (PRECISE_CONJUNCTS n - (HEADGOAL (CONJUNCTS (ALLGOALS (rtac Drule.termI)))))))))) - #> Seq.hd; - -in - -fun generic_goal prepp kind before_qed after_qed raw_propp state = - let - val thy = theory_of state; - val cert = Thm.cterm_of thy; - val chaining = can assert_chain state; - val pos = Position.thread_data (); - - val ((propss, after_ctxt), goal_state) = - state - |> assert_forward_or_chain - |> enter_forward - |> open_block - |> map_context_result (prepp raw_propp); - val props = flat propss; - - val vars = implicit_vars props; - val propss' = vars :: propss; - val goal_propss = filter_out null propss'; - val goal = - cert (Logic.mk_conjunction_balanced (map Logic.mk_conjunction_balanced goal_propss)) - |> Thm.weaken_sorts (Variable.sorts_of (context_of goal_state)); - val statement = ((kind, pos), propss', Thm.term_of goal); - val after_qed' = after_qed |>> (fn after_local => - fn results => map_context after_ctxt #> after_local results); - in - goal_state - |> map_context (init_context #> Variable.set_body true) - |> set_goal (make_goal (statement, [], [], Goal.init goal, before_qed, after_qed')) - |> map_context (Proof_Context.auto_bind_goal props) - |> chaining ? (`the_facts #-> using_facts) - |> reset_facts - |> open_block - |> reset_goal - |> enter_backward - |> not (null vars) ? refine_terms (length goal_propss) - |> null props ? (refine (Method.Basic Method.assumption) #> Seq.hd) - end; - -fun generic_qed after_ctxt state = - let - val (goal_ctxt, {statement = (_, stmt, _), goal, after_qed, ...}) = current_goal state; - val outer_state = state |> close_block; - val outer_ctxt = context_of outer_state; - - val props = - flat (tl stmt) - |> Variable.exportT_terms goal_ctxt outer_ctxt; - val results = - tl (conclude_goal goal_ctxt goal stmt) - |> burrow (Proof_Context.export goal_ctxt outer_ctxt); - in - outer_state - |> map_context (after_ctxt props) - |> pair (after_qed, results) - end; - -end; - - -(* local goals *) - -fun local_goal print_results prep_att prepp kind before_qed after_qed stmt state = - let - val ((names, attss), propp) = - Attrib.map_specs (map (prep_att (context_of state))) stmt |> split_list |>> split_list; - - fun after_qed' results = - local_results ((names ~~ attss) ~~ results) - #-> (fn res => tap (fn st => print_results (context_of st) ((kind, ""), res) : unit)) - #> after_qed results; - in - state - |> generic_goal prepp kind before_qed (after_qed', K I) propp - |> tap (Variable.warn_extra_tfrees (context_of state) o context_of) - end; - -fun local_qeds arg = - end_proof false arg - #> Seq.map_result (generic_qed Proof_Context.auto_bind_facts #-> - (fn ((after_qed, _), results) => after_qed results)); - -fun local_qed arg = - local_qeds (Position.none, arg) #> Seq.the_result finished_goal_error; - - -(* global goals *) - -fun prepp_auto_fixes prepp args = - prepp args #> - (fn ((propss, a), ctxt) => ((propss, a), (fold o fold) Variable.auto_fixes propss ctxt)); - -fun global_goal prepp before_qed after_qed propp = - init #> - generic_goal (prepp_auto_fixes prepp) "" before_qed (K I, after_qed) propp; - -val theorem = global_goal Proof_Context.bind_propp_schematic_i; -val theorem_cmd = global_goal Proof_Context.bind_propp_schematic; - -fun global_qeds arg = - end_proof true arg - #> Seq.map_result (generic_qed (K I) #> (fn (((_, after_qed), results), state) => - after_qed results (context_of state))); - -fun global_qed arg = - global_qeds (Position.none, arg) #> Seq.the_result finished_goal_error; - - -(* terminal proof steps *) - -local - -fun terminal_proof qeds initial terminal = - proof_results (SOME initial) #> Seq.maps_results (qeds (#2 (#2 initial), terminal)) - #> Seq.the_result ""; - -in - -fun local_terminal_proof (text, opt_text) = terminal_proof local_qeds text (opt_text, true); -val local_default_proof = local_terminal_proof ((Method.default_text, Position.no_range), NONE); -val local_immediate_proof = local_terminal_proof ((Method.this_text, Position.no_range), NONE); -val local_done_proof = terminal_proof local_qeds (Method.done_text, Position.no_range) (NONE, false); - -fun global_terminal_proof (text, opt_text) = terminal_proof global_qeds text (opt_text, true); -val global_default_proof = global_terminal_proof ((Method.default_text, Position.no_range), NONE); -val global_immediate_proof = global_terminal_proof ((Method.this_text, Position.no_range), NONE); -val global_done_proof = terminal_proof global_qeds (Method.done_text, Position.no_range) (NONE, false); - -end; - - -(* skip proofs *) - -fun local_skip_proof int state = - local_terminal_proof ((Method.sorry_text int, Position.no_range), NONE) state before - Skip_Proof.report (context_of state); - -fun global_skip_proof int state = - global_terminal_proof ((Method.sorry_text int, Position.no_range), NONE) state before - Skip_Proof.report (context_of state); - - -(* common goal statements *) - -local - -fun gen_have prep_att prepp before_qed after_qed stmt int = - local_goal (Proof_Display.print_results int (Position.thread_data ())) - prep_att prepp "have" before_qed after_qed stmt; - -fun gen_show prep_att prepp before_qed after_qed stmt int state = - let - val testing = Unsynchronized.ref false; - val rule = Unsynchronized.ref (NONE: thm option); - fun fail_msg ctxt = - "Local statement fails to refine any pending goal" :: - (case ! rule of NONE => [] | SOME th => [Proof_Display.string_of_rule ctxt "Failed" th]) - |> cat_lines; - - val pos = Position.thread_data (); - fun print_results ctxt res = - if ! testing then () - else Proof_Display.print_results int pos ctxt res; - fun print_rule ctxt th = - if ! testing then rule := SOME th - else if int then - Proof_Display.string_of_rule ctxt "Successful" th - |> Markup.markup Markup.text_fold - |> Markup.markup Markup.state - |> writeln - else (); - val test_proof = - local_skip_proof true - |> Unsynchronized.setmp testing true - |> Exn.interruptible_capture; - - fun after_qed' results = - refine_goals print_rule (context_of state) (flat results) - #> check_result "Failed to refine any pending goal" - #> after_qed results; - in - state - |> local_goal print_results prep_att prepp "show" before_qed after_qed' stmt - |> int ? (fn goal_state => - (case test_proof (map_context (Context_Position.set_visible false) goal_state) of - Exn.Res _ => goal_state - | Exn.Exn exn => raise Exn.EXCEPTIONS ([exn, ERROR (fail_msg (context_of goal_state))]))) - end; - -in - -val have = gen_have (K I) Proof_Context.bind_propp_i; -val have_cmd = gen_have Attrib.attribute_cmd Proof_Context.bind_propp; -val show = gen_show (K I) Proof_Context.bind_propp_i; -val show_cmd = gen_show Attrib.attribute_cmd Proof_Context.bind_propp; - -end; - - - -(** future proofs **) - -(* relevant proof states *) - -fun schematic_goal state = - let val (_, (_, {statement = (_, _, prop), ...})) = find_goal state - in Goal.is_schematic prop end; - -fun is_relevant state = - (case try find_goal state of - NONE => true - | SOME (_, (_, {statement = (_, _, prop), goal, ...})) => - Goal.is_schematic prop orelse not (Logic.protect prop aconv Thm.concl_of goal)); - - -(* full proofs *) - -local - -structure Result = Proof_Data -( - type T = thm option; - val empty = NONE; - fun init _ = empty; -); - -fun the_result ctxt = - (case Result.get ctxt of - NONE => error "No result of forked proof" - | SOME th => th); - -val set_result = Result.put o SOME; -val reset_result = Result.put NONE; - -in - -fun future_proof fork_proof state = - let - val _ = assert_backward state; - val (goal_ctxt, (_, goal)) = find_goal state; - val {statement as (kind, _, prop), messages, using, goal, before_qed, after_qed} = goal; - val goal_tfrees = - fold Term.add_tfrees - (prop :: map Thm.term_of (Assumption.all_assms_of goal_ctxt)) []; - - val _ = is_relevant state andalso error "Cannot fork relevant proof"; - - val prop' = Logic.protect prop; - val statement' = (kind, [[], [prop']], prop'); - val goal' = Thm.adjust_maxidx_thm (Thm.maxidx_of goal) (Goal.protect (Thm.nprems_of goal) goal); - val after_qed' = (fn [[th]] => map_context (set_result th), fn [[th]] => set_result th); - - val result_ctxt = - state - |> map_context reset_result - |> map_goal I (K (statement', messages, using, goal', before_qed, after_qed')) - (fold (Variable.declare_typ o TFree) goal_tfrees) - |> fork_proof; - - val future_thm = Future.map (the_result o snd) result_ctxt; - val finished_goal = Goal.future_result goal_ctxt future_thm prop'; - val state' = - state - |> map_goal I (K (statement, messages, using, finished_goal, NONE, after_qed)) I; - in (Future.map fst result_ctxt, state') end; - -end; - - -(* terminal proofs *) (* FIXME avoid toplevel imitation -- include in PIDE/document *) - -local - -fun future_terminal_proof proof1 proof2 done int state = - if Goal.future_enabled 3 andalso not (is_relevant state) then - state |> future_proof (fn state' => - let - val pos = Position.thread_data (); - val props = Markup.command_timing :: (Markup.nameN, "by") :: Position.properties_of pos; - in - Execution.fork {name = "Proof.future_terminal_proof", pos = pos, pri = ~1} - (fn () => ((), Timing.protocol props proof2 state')) - end) |> snd |> done - else proof1 state; - -in - -fun local_future_terminal_proof meths = - future_terminal_proof - (local_terminal_proof meths) - (local_terminal_proof meths #> context_of) local_done_proof; - -fun global_future_terminal_proof meths = - future_terminal_proof - (global_terminal_proof meths) - (global_terminal_proof meths) global_done_proof; - -end; - -end; - diff --git a/core/Pure/Isar/proof_context.ML b/core/Pure/Isar/proof_context.ML deleted file mode 100644 index 6133309f..00000000 --- a/core/Pure/Isar/proof_context.ML +++ /dev/null @@ -1,1427 +0,0 @@ -(* Title: Pure/Isar/proof_context.ML - Author: Markus Wenzel, TU Muenchen - -The key concept of Isar proof contexts: elevates primitive local -reasoning Gamma |- phi to a structured concept, with generic context -elements. See also structure Variable and Assumption. -*) - -signature PROOF_CONTEXT = -sig - val theory_of: Proof.context -> theory - val init_global: theory -> Proof.context - val get_global: theory -> string -> Proof.context - type mode - val mode_default: mode - val mode_stmt: mode - val mode_pattern: mode - val mode_schematic: mode - val mode_abbrev: mode - val set_mode: mode -> Proof.context -> Proof.context - val get_mode: Proof.context -> mode - val restore_mode: Proof.context -> Proof.context -> Proof.context - val abbrev_mode: Proof.context -> bool - val set_stmt: bool -> Proof.context -> Proof.context - val syntax_of: Proof.context -> Local_Syntax.T - val syn_of: Proof.context -> Syntax.syntax - val tsig_of: Proof.context -> Type.tsig - val set_defsort: sort -> Proof.context -> Proof.context - val default_sort: Proof.context -> indexname -> sort - val consts_of: Proof.context -> Consts.T - val set_syntax_mode: Syntax.mode -> Proof.context -> Proof.context - val restore_syntax_mode: Proof.context -> Proof.context -> Proof.context - val map_naming: (Name_Space.naming -> Name_Space.naming) -> Proof.context -> Proof.context - val naming_of: Proof.context -> Name_Space.naming - val restore_naming: Proof.context -> Proof.context -> Proof.context - val full_name: Proof.context -> binding -> string - val class_space: Proof.context -> Name_Space.T - val type_space: Proof.context -> Name_Space.T - val const_space: Proof.context -> Name_Space.T - val intern_class: Proof.context -> xstring -> string - val intern_type: Proof.context -> xstring -> string - val intern_const: Proof.context -> xstring -> string - val extern_class: Proof.context -> string -> xstring - val markup_class: Proof.context -> string -> string - val pretty_class: Proof.context -> string -> Pretty.T - val extern_type: Proof.context -> string -> xstring - val markup_type: Proof.context -> string -> string - val pretty_type: Proof.context -> string -> Pretty.T - val extern_const: Proof.context -> string -> xstring - val markup_const: Proof.context -> string -> string - val pretty_const: Proof.context -> string -> Pretty.T - val transfer: theory -> Proof.context -> Proof.context - val background_theory: (theory -> theory) -> Proof.context -> Proof.context - val background_theory_result: (theory -> 'a * theory) -> Proof.context -> 'a * Proof.context - val facts_of: Proof.context -> Facts.T - val facts_of_fact: Proof.context -> string -> Facts.T - val markup_extern_fact: Proof.context -> string -> Markup.T * xstring - val pretty_term_abbrev: Proof.context -> term -> Pretty.T - val pretty_fact: Proof.context -> string * thm list -> Pretty.T - val check_class: Proof.context -> xstring * Position.T -> class * Position.report list - val read_class: Proof.context -> string -> class - val read_typ: Proof.context -> string -> typ - val read_typ_syntax: Proof.context -> string -> typ - val read_typ_abbrev: Proof.context -> string -> typ - val cert_typ: Proof.context -> typ -> typ - val cert_typ_syntax: Proof.context -> typ -> typ - val cert_typ_abbrev: Proof.context -> typ -> typ - val infer_type: Proof.context -> string * typ -> typ - val inferred_param: string -> Proof.context -> typ * Proof.context - val inferred_fixes: Proof.context -> (string * typ) list * Proof.context - val check_type_name: {proper: bool, strict: bool} -> Proof.context -> - xstring * Position.T -> typ * Position.report list - val read_type_name: {proper: bool, strict: bool} -> Proof.context -> string -> typ - val consts_completion_message: Proof.context -> xstring * Position.T list -> string - val check_const: {proper: bool, strict: bool} -> Proof.context -> - xstring * Position.T list -> term * Position.report list - val read_const: {proper: bool, strict: bool} -> Proof.context -> string -> term - val read_arity: Proof.context -> xstring * string list * string -> arity - val cert_arity: Proof.context -> arity -> arity - val allow_dummies: Proof.context -> Proof.context - val prepare_sortsT: Proof.context -> typ list -> string list * typ list - val prepare_sorts: Proof.context -> term list -> string list * term list - val check_tfree: Proof.context -> string * sort -> string * sort - val read_term_pattern: Proof.context -> string -> term - val read_term_schematic: Proof.context -> string -> term - val read_term_abbrev: Proof.context -> string -> term - val show_abbrevs_raw: Config.raw - val show_abbrevs: bool Config.T - val expand_abbrevs: Proof.context -> term -> term - val cert_term: Proof.context -> term -> term - val cert_prop: Proof.context -> term -> term - val def_type: Proof.context -> indexname -> typ option - val standard_typ_check: Proof.context -> typ list -> typ list - val standard_term_check_finish: Proof.context -> term list -> term list - val standard_term_uncheck: Proof.context -> term list -> term list - val goal_export: Proof.context -> Proof.context -> thm list -> thm list - val export: Proof.context -> Proof.context -> thm list -> thm list - val export_morphism: Proof.context -> Proof.context -> morphism - val norm_export_morphism: Proof.context -> Proof.context -> morphism - val bind_terms: (indexname * term option) list -> Proof.context -> Proof.context - val auto_bind_goal: term list -> Proof.context -> Proof.context - val auto_bind_facts: term list -> Proof.context -> Proof.context - val match_bind: bool -> (string list * string) list -> Proof.context -> term list * Proof.context - val match_bind_i: bool -> (term list * term) list -> Proof.context -> term list * Proof.context - val read_propp: (string * string list) list list -> Proof.context -> - (term * term list) list list * Proof.context - val cert_propp: (term * term list) list list -> Proof.context -> - (term * term list) list list * Proof.context - val read_propp_schematic: (string * string list) list list -> Proof.context -> - (term * term list) list list * Proof.context - val cert_propp_schematic: (term * term list) list list -> Proof.context -> - (term * term list) list list * Proof.context - val bind_propp: (string * string list) list list -> Proof.context -> - (term list list * (Proof.context -> Proof.context)) * Proof.context - val bind_propp_i: (term * term list) list list -> Proof.context -> - (term list list * (Proof.context -> Proof.context)) * Proof.context - val bind_propp_schematic: (string * string list) list list -> Proof.context -> - (term list list * (Proof.context -> Proof.context)) * Proof.context - val bind_propp_schematic_i: (term * term list) list list -> Proof.context -> - (term list list * (Proof.context -> Proof.context)) * Proof.context - val fact_tac: Proof.context -> thm list -> int -> tactic - val some_fact_tac: Proof.context -> int -> tactic - val get_fact_generic: Context.generic -> Facts.ref -> thm list - val get_fact: Proof.context -> Facts.ref -> thm list - val get_fact_single: Proof.context -> Facts.ref -> thm - val get_thms: Proof.context -> xstring -> thm list - val get_thm: Proof.context -> xstring -> thm - val note_thmss: string -> (Thm.binding * (thm list * attribute list) list) list -> - Proof.context -> (string * thm list) list * Proof.context - val put_thms: bool -> string * thm list option -> Proof.context -> Proof.context - val read_vars: (binding * string option * mixfix) list -> Proof.context -> - (binding * typ option * mixfix) list * Proof.context - val cert_vars: (binding * typ option * mixfix) list -> Proof.context -> - (binding * typ option * mixfix) list * Proof.context - val add_fixes: (binding * typ option * mixfix) list -> Proof.context -> - string list * Proof.context - val add_assms: Assumption.export -> - (Thm.binding * (string * string list) list) list -> - Proof.context -> (string * thm list) list * Proof.context - val add_assms_i: Assumption.export -> - (Thm.binding * (term * term list) list) list -> - Proof.context -> (string * thm list) list * Proof.context - val dest_cases: Proof.context -> (string * (Rule_Cases.T * bool)) list - val update_cases: bool -> (string * Rule_Cases.T option) list -> Proof.context -> Proof.context - val apply_case: Rule_Cases.T -> Proof.context -> (binding * term list) list * Proof.context - val check_case: Proof.context -> bool -> - string * Position.T -> binding option list -> Rule_Cases.T - val type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> Proof.context -> Proof.context - val notation: bool -> Syntax.mode -> (term * mixfix) list -> Proof.context -> Proof.context - val generic_type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> morphism -> - Context.generic -> Context.generic - val generic_notation: bool -> Syntax.mode -> (term * mixfix) list -> morphism -> - Context.generic -> Context.generic - val class_alias: binding -> class -> Proof.context -> Proof.context - val type_alias: binding -> string -> Proof.context -> Proof.context - val const_alias: binding -> string -> Proof.context -> Proof.context - val add_const_constraint: string * typ option -> Proof.context -> Proof.context - val add_abbrev: string -> binding * term -> Proof.context -> (term * term) * Proof.context - val revert_abbrev: string -> string -> Proof.context -> Proof.context - val generic_add_abbrev: string -> binding * term -> Context.generic -> - (term * term) * Context.generic - val generic_revert_abbrev: string -> string -> Context.generic -> Context.generic - val print_syntax: Proof.context -> unit - val print_abbrevs: Proof.context -> unit - val pretty_term_bindings: Proof.context -> Pretty.T list - val pretty_local_facts: Proof.context -> bool -> Pretty.T list - val print_local_facts: Proof.context -> bool -> unit - val pretty_cases: Proof.context -> Pretty.T list - val debug: bool Config.T - val verbose: bool Config.T - val pretty_ctxt: Proof.context -> Pretty.T list - val pretty_context: Proof.context -> Pretty.T list -end; - -structure Proof_Context: PROOF_CONTEXT = -struct - -val theory_of = Proof_Context.theory_of; -val init_global = Proof_Context.init_global; -val get_global = Proof_Context.get_global; - - - -(** inner syntax mode **) - -datatype mode = - Mode of - {stmt: bool, (*inner statement mode*) - pattern: bool, (*pattern binding schematic variables*) - schematic: bool, (*term referencing loose schematic variables*) - abbrev: bool}; (*abbrev mode -- no normalization*) - -fun make_mode (stmt, pattern, schematic, abbrev) = - Mode {stmt = stmt, pattern = pattern, schematic = schematic, abbrev = abbrev}; - -val mode_default = make_mode (false, false, false, false); -val mode_stmt = make_mode (true, false, false, false); -val mode_pattern = make_mode (false, true, false, false); -val mode_schematic = make_mode (false, false, true, false); -val mode_abbrev = make_mode (false, false, false, true); - - - -(** Isar proof context information **) - -type cases = ((Rule_Cases.T * bool) * int) Name_Space.table; -val empty_cases: cases = Name_Space.empty_table Markup.caseN; - -datatype data = - Data of - {mode: mode, (*inner syntax mode*) - syntax: Local_Syntax.T, (*local syntax*) - tsig: Type.tsig * Type.tsig, (*local/global type signature -- local name space / defsort only*) - consts: Consts.T * Consts.T, (*local/global consts -- local name space / abbrevs only*) - facts: Facts.T, (*local facts, based on initial global facts*) - cases: cases}; (*named case contexts: case, is_proper, running index*) - -fun make_data (mode, syntax, tsig, consts, facts, cases) = - Data {mode = mode, syntax = syntax, tsig = tsig, consts = consts, facts = facts, cases = cases}; - -structure Data = Proof_Data -( - type T = data; - fun init thy = - make_data (mode_default, - Local_Syntax.init thy, - (Type.change_ignore (Sign.tsig_of thy), Sign.tsig_of thy), - (Consts.change_ignore (Sign.consts_of thy), Sign.consts_of thy), - Global_Theory.facts_of thy, - empty_cases); -); - -fun rep_data ctxt = Data.get ctxt |> (fn Data rep => rep); - -fun map_data f = - Data.map (fn Data {mode, syntax, tsig, consts, facts, cases} => - make_data (f (mode, syntax, tsig, consts, facts, cases))); - -fun set_mode mode = map_data (fn (_, syntax, tsig, consts, facts, cases) => - (mode, syntax, tsig, consts, facts, cases)); - -fun map_mode f = - map_data (fn (Mode {stmt, pattern, schematic, abbrev}, syntax, tsig, consts, facts, cases) => - (make_mode (f (stmt, pattern, schematic, abbrev)), syntax, tsig, consts, facts, cases)); - -fun map_syntax f = - map_data (fn (mode, syntax, tsig, consts, facts, cases) => - (mode, f syntax, tsig, consts, facts, cases)); - -fun map_tsig f = - map_data (fn (mode, syntax, tsig, consts, facts, cases) => - (mode, syntax, f tsig, consts, facts, cases)); - -fun map_consts f = - map_data (fn (mode, syntax, tsig, consts, facts, cases) => - (mode, syntax, tsig, f consts, facts, cases)); - -fun map_facts f = - map_data (fn (mode, syntax, tsig, consts, facts, cases) => - (mode, syntax, tsig, consts, f facts, cases)); - -fun map_cases f = - map_data (fn (mode, syntax, tsig, consts, facts, cases) => - (mode, syntax, tsig, consts, facts, f cases)); - -val get_mode = #mode o rep_data; -val restore_mode = set_mode o get_mode; -val abbrev_mode = get_mode #> (fn Mode {abbrev, ...} => abbrev); - -fun set_stmt stmt = - map_mode (fn (_, pattern, schematic, abbrev) => (stmt, pattern, schematic, abbrev)); - -val syntax_of = #syntax o rep_data; -val syn_of = Local_Syntax.syn_of o syntax_of; -val set_syntax_mode = map_syntax o Local_Syntax.set_mode; -val restore_syntax_mode = map_syntax o Local_Syntax.restore_mode o syntax_of; - -val tsig_of = #1 o #tsig o rep_data; -val set_defsort = map_tsig o apfst o Type.set_defsort; -fun default_sort ctxt = the_default (Type.defaultS (tsig_of ctxt)) o Variable.def_sort ctxt; - -val consts_of = #1 o #consts o rep_data; -val cases_of = #cases o rep_data; - - -(* naming *) - -val naming_of = Name_Space.naming_of o Context.Proof; -val map_naming = Context.proof_map o Name_Space.map_naming; -val restore_naming = map_naming o K o naming_of; - -val full_name = Name_Space.full_name o naming_of; - - -(* name spaces *) - -val class_space = Type.class_space o tsig_of; -val type_space = Type.type_space o tsig_of; -val const_space = Consts.space_of o consts_of; - -val intern_class = Name_Space.intern o class_space; -val intern_type = Name_Space.intern o type_space; -val intern_const = Name_Space.intern o const_space; - -fun extern_class ctxt = Name_Space.extern ctxt (class_space ctxt); -fun extern_type ctxt = Name_Space.extern ctxt (type_space ctxt); -fun extern_const ctxt = Name_Space.extern ctxt (const_space ctxt); - -fun markup_class ctxt c = Name_Space.markup_extern ctxt (class_space ctxt) c |-> Markup.markup; -fun markup_type ctxt c = Name_Space.markup_extern ctxt (type_space ctxt) c |-> Markup.markup; -fun markup_const ctxt c = Name_Space.markup_extern ctxt (const_space ctxt) c |-> Markup.markup; - -fun pretty_class ctxt c = Name_Space.markup_extern ctxt (class_space ctxt) c |> Pretty.mark_str; -fun pretty_type ctxt c = Name_Space.markup_extern ctxt (type_space ctxt) c |> Pretty.mark_str; -fun pretty_const ctxt c = Name_Space.markup_extern ctxt (const_space ctxt) c |> Pretty.mark_str; - - -(* theory transfer *) - -fun transfer_syntax thy ctxt = ctxt |> - map_syntax (Local_Syntax.rebuild thy) |> - map_tsig (fn tsig as (local_tsig, global_tsig) => - let val thy_tsig = Sign.tsig_of thy in - if Type.eq_tsig (thy_tsig, global_tsig) then tsig - else (Type.merge_tsig (Context.pretty ctxt) (local_tsig, thy_tsig), thy_tsig) - end) |> - map_consts (fn consts as (local_consts, global_consts) => - let val thy_consts = Sign.consts_of thy in - if Consts.eq_consts (thy_consts, global_consts) then consts - else (Consts.merge (local_consts, thy_consts), thy_consts) - end); - -fun transfer thy = Context.raw_transfer thy #> transfer_syntax thy; - -fun background_theory f ctxt = transfer (f (theory_of ctxt)) ctxt; - -fun background_theory_result f ctxt = - let val (res, thy') = f (theory_of ctxt) - in (res, ctxt |> transfer thy') end; - - -(* hybrid facts *) - -val facts_of = #facts o rep_data; - -fun facts_of_fact ctxt name = - let - val local_facts = facts_of ctxt; - val global_facts = Global_Theory.facts_of (theory_of ctxt); - in - if Facts.defined local_facts name - then local_facts else global_facts - end; - -fun markup_extern_fact ctxt name = - Facts.markup_extern ctxt (facts_of_fact ctxt name) name; - - - -(** pretty printing **) - -fun pretty_term_abbrev ctxt = Syntax.pretty_term (set_mode mode_abbrev ctxt); - -fun pretty_fact_name ctxt a = - Pretty.block [Pretty.mark_str (markup_extern_fact ctxt a), Pretty.str ":"]; - -fun pretty_fact ctxt = - let - val pretty_thm = Display.pretty_thm ctxt; - val pretty_thms = map (Display.pretty_thm_item ctxt); - in - fn ("", [th]) => pretty_thm th - | ("", ths) => Pretty.blk (0, Pretty.fbreaks (pretty_thms ths)) - | (a, [th]) => Pretty.block [pretty_fact_name ctxt a, Pretty.brk 1, pretty_thm th] - | (a, ths) => Pretty.block (Pretty.fbreaks (pretty_fact_name ctxt a :: pretty_thms ths)) - end; - - - -(** prepare types **) - -(* classes *) - -fun check_class ctxt (xname, pos) = - let - val tsig = tsig_of ctxt; - val class_space = Type.class_space tsig; - - val name = Type.cert_class tsig (Name_Space.intern class_space xname) - handle TYPE (msg, _, _) => - error (msg ^ Position.here pos ^ - Markup.markup_report (Completion.reported_text - (Name_Space.completion (Context.Proof ctxt) class_space (xname, pos)))); - val reports = - if Context_Position.is_reported ctxt pos - then [(pos, Name_Space.markup class_space name)] else []; - in (name, reports) end; - -fun read_class ctxt text = - let - val (c, reports) = check_class ctxt (Symbol_Pos.source_content (Syntax.read_token text)); - val _ = Position.reports reports; - in c end; - - -(* types *) - -fun read_typ_mode mode ctxt s = - Syntax.read_typ (Type.set_mode mode ctxt) s; - -val read_typ = read_typ_mode Type.mode_default; -val read_typ_syntax = read_typ_mode Type.mode_syntax; -val read_typ_abbrev = read_typ_mode Type.mode_abbrev; - - -fun cert_typ_mode mode ctxt T = - Type.cert_typ_mode mode (tsig_of ctxt) T - handle TYPE (msg, _, _) => error msg; - -val cert_typ = cert_typ_mode Type.mode_default; -val cert_typ_syntax = cert_typ_mode Type.mode_syntax; -val cert_typ_abbrev = cert_typ_mode Type.mode_abbrev; - - - -(** prepare terms and propositions **) - -(* inferred types of parameters *) - -fun infer_type ctxt x = - Term.fastype_of (singleton (Syntax.check_terms (set_mode mode_schematic ctxt)) (Free x)); - -fun inferred_param x ctxt = - let val T = infer_type ctxt (x, dummyT) - in (T, ctxt |> Variable.declare_term (Free (x, T))) end; - -fun inferred_fixes ctxt = - let - val xs = map #2 (Variable.dest_fixes ctxt); - val (Ts, ctxt') = fold_map inferred_param xs ctxt; - in (xs ~~ Ts, ctxt') end; - - -(* type names *) - -fun check_type_name {proper, strict} ctxt (c, pos) = - if Lexicon.is_tid c then - if proper then error ("Not a type constructor: " ^ quote c ^ Position.here pos) - else - let - val reports = - if Context_Position.is_reported ctxt pos - then [(pos, Markup.tfree)] else []; - in (TFree (c, default_sort ctxt (c, ~1)), reports) end - else - let - val ((d, reports), decl) = Type.check_decl (Context.Proof ctxt) (tsig_of ctxt) (c, pos); - fun err () = error ("Bad type name: " ^ quote d ^ Position.here pos); - val args = - (case decl of - Type.LogicalType n => n - | Type.Abbreviation (vs, _, _) => if strict then err () else length vs - | Type.Nonterminal => if strict then err () else 0); - in (Type (d, replicate args dummyT), reports) end; - -fun read_type_name ctxt flags text = - let - val (T, reports) = - check_type_name ctxt flags (Symbol_Pos.source_content (Syntax.read_token text)); - val _ = Position.reports reports; - in T end; - - -(* constant names *) - -fun consts_completion_message ctxt (c, ps) = - ps |> map (fn pos => - Name_Space.completion (Context.Proof ctxt) (Consts.space_of (consts_of ctxt)) (c, pos) - |> Completion.reported_text) - |> implode - |> Markup.markup_report; - -fun check_const {proper, strict} ctxt (c, ps) = - let - val _ = - Name.reject_internal (c, ps) handle ERROR msg => - error (msg ^ consts_completion_message ctxt (c, ps)); - fun err msg = error (msg ^ Position.here_list ps); - val consts = consts_of ctxt; - val fixed = if proper then NONE else Variable.lookup_fixed ctxt c; - val (t, reports) = - (case (fixed, Variable.lookup_const ctxt c) of - (SOME x, NONE) => - let - val reports = ps - |> filter (Context_Position.is_reported ctxt) - |> map (fn pos => - (pos, Markup.name x (if Name.is_skolem x then Markup.skolem else Markup.free))); - in (Free (x, infer_type ctxt (x, dummyT)), reports) end - | (_, SOME d) => - let - val T = Consts.type_scheme consts d handle TYPE (msg, _, _) => err msg; - val reports = ps - |> filter (Context_Position.is_reported ctxt) - |> map (fn pos => (pos, Name_Space.markup (Consts.space_of consts) d)); - in (Const (d, T), reports) end - | _ => Consts.check_const (Context.Proof ctxt) consts (c, ps)); - val _ = - (case (strict, t) of - (true, Const (d, _)) => - (ignore (Consts.the_const consts d) handle TYPE (msg, _, _) => err msg) - | _ => ()); - in (t, reports) end; - -fun read_const ctxt flags text = - let - val (xname, pos) = Symbol_Pos.source_content (Syntax.read_token text); - val (t, reports) = check_const ctxt flags (xname, [pos]); - val _ = Position.reports reports; - in t end; - - -(* type arities *) - -local - -fun prep_arity prep_tycon prep_sort ctxt (t, Ss, S) = - let val arity = (prep_tycon ctxt t, map (prep_sort ctxt) Ss, prep_sort ctxt S) - in Type.add_arity (Context.pretty ctxt) arity (tsig_of ctxt); arity end; - -in - -val read_arity = - prep_arity ((#1 o dest_Type) oo read_type_name {proper = true, strict = true}) Syntax.read_sort; - -val cert_arity = prep_arity (K I) (Type.cert_sort o tsig_of); - -end; - - -(* read_term *) - -fun read_term_mode mode ctxt = Syntax.read_term (set_mode mode ctxt); - -val read_term_pattern = read_term_mode mode_pattern; -val read_term_schematic = read_term_mode mode_schematic; -val read_term_abbrev = read_term_mode mode_abbrev; - - -(* local abbreviations *) - -local - -fun certify_consts ctxt = Consts.certify (Context.pretty ctxt) (tsig_of ctxt) - (not (abbrev_mode ctxt)) (consts_of ctxt); - -fun expand_binds ctxt = - let - val Mode {pattern, schematic, ...} = get_mode ctxt; - - fun reject_schematic (t as Var _) = - error ("Unbound schematic variable: " ^ Syntax.string_of_term ctxt t) - | reject_schematic (Abs (_, _, t)) = reject_schematic t - | reject_schematic (t $ u) = (reject_schematic t; reject_schematic u) - | reject_schematic _ = (); - in - if pattern then I - else Variable.expand_binds ctxt #> (if schematic then I else tap reject_schematic) - end; - -in - -fun expand_abbrevs ctxt = certify_consts ctxt #> expand_binds ctxt; - -end; - -val show_abbrevs_raw = Config.declare ("show_abbrevs", @{here}) (fn _ => Config.Bool true); -val show_abbrevs = Config.bool show_abbrevs_raw; - -fun contract_abbrevs ctxt t = - let - val thy = theory_of ctxt; - val consts = consts_of ctxt; - val Mode {abbrev, ...} = get_mode ctxt; - val retrieve = Consts.retrieve_abbrevs consts (print_mode_value () @ [""]); - fun match_abbrev u = Option.map #1 (get_first (Pattern.match_rew thy u) (retrieve u)); - in - if abbrev orelse not (Config.get ctxt show_abbrevs) orelse not (can Term.type_of t) then t - else Pattern.rewrite_term_top thy [] [match_abbrev] t - end; - - -(* patterns *) - -fun prepare_patternT ctxt T = - let - val Mode {pattern, schematic, ...} = get_mode ctxt; - val _ = - pattern orelse schematic orelse - T |> Term.exists_subtype - (fn T as TVar (xi, _) => - not (Type_Infer.is_param xi) andalso - error ("Illegal schematic type variable: " ^ Syntax.string_of_typ ctxt T) - | _ => false) - in T end; - - -local - -val dummies = - Config.bool (Config.declare ("Proof_Context.dummies", @{here}) (K (Config.Bool false))); - -fun check_dummies ctxt t = - if Config.get ctxt dummies then t - else Term.no_dummy_patterns t handle TERM _ => error "Illegal dummy pattern(s) in term"; - -fun prepare_dummies ts = #1 (fold_map Term.replace_dummy_patterns ts 1); - -in - -val allow_dummies = Config.put dummies true; - -fun prepare_patterns ctxt = - let val Mode {pattern, ...} = get_mode ctxt in - Type_Infer.fixate ctxt #> - pattern ? Variable.polymorphic ctxt #> - (map o Term.map_types) (prepare_patternT ctxt) #> - (if pattern then prepare_dummies else map (check_dummies ctxt)) - end; - -end; - - -(* sort constraints *) - -local - -fun prepare_sorts_env ctxt tys = - let - val tsig = tsig_of ctxt; - val defaultS = Type.defaultS tsig; - - val dummy_var = ("'_dummy_", ~1); - - fun constraint (xi, raw_S) env = - let val (ps, S) = Term_Position.decode_positionS raw_S in - if xi = dummy_var orelse S = dummyS then env - else - Vartab.insert (op =) (xi, Type.minimize_sort tsig S) env - handle Vartab.DUP _ => - error ("Inconsistent sort constraints for type variable " ^ - quote (Term.string_of_vname' xi) ^ Position.here_list ps) - end; - - val env = - (fold o fold_atyps) - (fn TFree (x, S) => constraint ((x, ~1), S) - | TVar v => constraint v - | _ => I) tys Vartab.empty; - - fun get_sort xi raw_S = - if xi = dummy_var then - Type.minimize_sort tsig (#2 (Term_Position.decode_positionS raw_S)) - else - (case (Vartab.lookup env xi, Variable.def_sort ctxt xi) of - (NONE, NONE) => defaultS - | (NONE, SOME S) => S - | (SOME S, NONE) => S - | (SOME S, SOME S') => - if Type.eq_sort tsig (S, S') then S' - else - error ("Sort constraint " ^ Syntax.string_of_sort ctxt S ^ - " inconsistent with default " ^ Syntax.string_of_sort ctxt S' ^ - " for type variable " ^ quote (Term.string_of_vname' xi))); - - fun add_report S pos reports = - if Position.is_reported pos andalso not (AList.defined (op =) reports pos) then - (pos, Position.reported_text pos Markup.sorting (Syntax.string_of_sort ctxt S)) :: reports - else reports; - - fun get_sort_reports xi raw_S = - let - val ps = #1 (Term_Position.decode_positionS raw_S); - val S = get_sort xi raw_S handle ERROR msg => error (msg ^ Position.here_list ps); - in fold (add_report S) ps end; - - val reports = - (fold o fold_atyps) - (fn T => - if Term_Position.is_positionT T then I - else - (case T of - TFree (x, raw_S) => get_sort_reports (x, ~1) raw_S - | TVar (xi, raw_S) => get_sort_reports xi raw_S - | _ => I)) tys []; - - in (map #2 reports, get_sort) end; - -fun replace_sortsT get_sort = - map_atyps - (fn T => - if Term_Position.is_positionT T then T - else - (case T of - TFree (x, raw_S) => TFree (x, get_sort (x, ~1) raw_S) - | TVar (xi, raw_S) => TVar (xi, get_sort xi raw_S) - | _ => T)); - -in - -fun prepare_sortsT ctxt tys = - let val (sorting_report, get_sort) = prepare_sorts_env ctxt tys - in (sorting_report, map (replace_sortsT get_sort) tys) end; - -fun prepare_sorts ctxt tms = - let - val tys = rev ((fold o fold_types) cons tms []); - val (sorting_report, get_sort) = prepare_sorts_env ctxt tys; - in (sorting_report, (map o map_types) (replace_sortsT get_sort) tms) end; - -fun check_tfree ctxt v = - let - val (sorting_report, [TFree a]) = prepare_sortsT ctxt [TFree v]; - val _ = if Context_Position.is_visible ctxt then Output.report sorting_report else (); - in a end; - -end; - - -(* certify terms *) - -local - -fun gen_cert prop ctxt t = - t - |> expand_abbrevs ctxt - |> (fn t' => - #1 (Sign.certify' prop (Context.pretty ctxt) false (consts_of ctxt) (theory_of ctxt) t') - handle TYPE (msg, _, _) => error msg | TERM (msg, _) => error msg); - -in - -val cert_term = gen_cert false; -val cert_prop = gen_cert true; - -end; - - -(* check/uncheck *) - -fun def_type ctxt = - let val Mode {pattern, ...} = get_mode ctxt - in Variable.def_type ctxt pattern end; - -fun standard_typ_check ctxt = - map (cert_typ_mode (Type.get_mode ctxt) ctxt #> prepare_patternT ctxt); - -val standard_term_check_finish = prepare_patterns; - -fun standard_term_uncheck ctxt = map (contract_abbrevs ctxt); - - - -(** export results **) - -fun common_export is_goal inner outer = - map (Assumption.export is_goal inner outer) #> - Variable.export inner outer; - -val goal_export = common_export true; -val export = common_export false; - -fun export_morphism inner outer = - Assumption.export_morphism inner outer $> - Variable.export_morphism inner outer; - -fun norm_export_morphism inner outer = - export_morphism inner outer $> - Morphism.thm_morphism "Proof_Context.norm_export" (Goal.norm_result outer); - - - -(** term bindings **) - -(* simult_matches *) - -fun simult_matches ctxt (t, pats) = - (case Seq.pull (Unify.matchers (theory_of ctxt) (map (rpair t) pats)) of - NONE => error "Pattern match failed!" - | SOME (env, _) => Vartab.fold (fn (v, (_, t)) => cons (v, t)) (Envir.term_env env) []); - - -(* bind_terms *) - -val bind_terms = fold (fn (xi, t) => fn ctxt => - ctxt - |> Variable.bind_term (xi, Option.map (cert_term (set_mode mode_default ctxt)) t)); - - -(* auto_bind *) - -fun drop_schematic (b as (xi, SOME t)) = if Term.exists_subterm is_Var t then (xi, NONE) else b - | drop_schematic b = b; - -fun auto_bind f ts ctxt = ctxt |> bind_terms (map drop_schematic (f (theory_of ctxt) ts)); - -val auto_bind_goal = auto_bind Auto_Bind.goal; -val auto_bind_facts = auto_bind Auto_Bind.facts; - - -(* match_bind(_i) *) - -local - -fun gen_bind prep_terms gen raw_binds ctxt = - let - fun prep_bind (raw_pats, t) ctxt1 = - let - val T = Term.fastype_of t; - val ctxt2 = Variable.declare_term t ctxt1; - val pats = prep_terms (set_mode mode_pattern ctxt2) T raw_pats; - val binds = simult_matches ctxt2 (t, pats); - in (binds, ctxt2) end; - - val ts = prep_terms ctxt dummyT (map snd raw_binds); - val (binds, ctxt') = apfst flat (fold_map prep_bind (map fst raw_binds ~~ ts) ctxt); - val binds' = - if gen then map #1 binds ~~ Variable.exportT_terms ctxt' ctxt (map #2 binds) - else binds; - val binds'' = map (apsnd SOME) binds'; - val ctxt'' = - tap (Variable.warn_extra_tfrees ctxt) - (if gen then - ctxt (*sic!*) |> fold Variable.declare_term (map #2 binds') |> bind_terms binds'' - else ctxt' |> bind_terms binds''); - in (ts, ctxt'') end; - -in - -fun read_terms ctxt T = - map (Syntax.parse_term ctxt #> Type.constraint T) #> Syntax.check_terms ctxt; - -val match_bind = gen_bind read_terms; -val match_bind_i = gen_bind (fn ctxt => fn _ => map (cert_term ctxt)); - -end; - - -(* propositions with patterns *) - -local - -fun prep_propp mode prep_props args context = - let - fun prep (_, raw_pats) (ctxt, prop :: props) = - let val ctxt' = Variable.declare_term prop ctxt - in ((prop, prep_props (set_mode mode_pattern ctxt') raw_pats), (ctxt', props)) end; - - val (propp, (context', _)) = - (fold_map o fold_map) prep args - (context, prep_props (set_mode mode context) (maps (map fst) args)); - in (propp, context') end; - -fun gen_bind_propp mode parse_prop raw_args ctxt = - let - val (args, ctxt') = prep_propp mode parse_prop raw_args ctxt; - val binds = flat (flat (map (map (simult_matches ctxt')) args)); - val propss = map (map #1) args; - fun gen_binds ctxt0 = ctxt0 - |> bind_terms (map #1 binds ~~ - map (SOME o Term.close_schematic_term) (Variable.export_terms ctxt' ctxt0 (map #2 binds))); - in ((propss, gen_binds), ctxt' |> bind_terms (map (apsnd SOME) binds)) end; - -in - -val read_propp = prep_propp mode_default Syntax.read_props; -val cert_propp = prep_propp mode_default (map o cert_prop); -val read_propp_schematic = prep_propp mode_schematic Syntax.read_props; -val cert_propp_schematic = prep_propp mode_schematic (map o cert_prop); - -val bind_propp = gen_bind_propp mode_default Syntax.read_props; -val bind_propp_i = gen_bind_propp mode_default (map o cert_prop); -val bind_propp_schematic = gen_bind_propp mode_schematic Syntax.read_props; -val bind_propp_schematic_i = gen_bind_propp mode_schematic (map o cert_prop); - -end; - - - -(** theorems **) - -(* fact_tac *) - -local - -fun comp_hhf_tac th i st = - PRIMSEQ (Thm.bicompose {flatten = true, match = false, incremented = true} - (false, Drule.lift_all (Thm.cprem_of st i) th, 0) i) st; - -fun comp_incr_tac [] _ = no_tac - | comp_incr_tac (th :: ths) i = - (fn st => comp_hhf_tac (Drule.incr_indexes st th) i st) APPEND comp_incr_tac ths i; - -in - -fun fact_tac ctxt facts = Goal.norm_hhf_tac ctxt THEN' comp_incr_tac facts; - -fun potential_facts ctxt prop = - Facts.could_unify (facts_of ctxt) (Term.strip_all_body prop); - -fun some_fact_tac ctxt = SUBGOAL (fn (goal, i) => fact_tac ctxt (potential_facts ctxt goal) i); - -end; - - -(* get facts *) - -local - -fun retrieve_global context = - Facts.retrieve context (Global_Theory.facts_of (Context.theory_of context)); - -fun retrieve_generic (context as Context.Proof ctxt) arg = - (Facts.retrieve context (facts_of ctxt) arg handle ERROR local_msg => - (retrieve_global context arg handle ERROR _ => error local_msg)) - | retrieve_generic context arg = retrieve_global context arg; - -fun retrieve pick context (Facts.Fact s) = - let - val ctxt = Context.the_proof context; - val pos = Syntax.read_token_pos s; - val prop = - Syntax.read_prop (ctxt |> set_mode mode_default |> allow_dummies) s - |> singleton (Variable.polymorphic ctxt); - fun err msg = error (msg ^ Position.here pos ^ ":\n" ^ Syntax.string_of_term ctxt prop); - - val (prop', _) = Term.replace_dummy_patterns prop (Variable.maxidx_of ctxt + 1); - fun prove_fact th = - Goal.prove ctxt [] [] prop' (K (ALLGOALS (fact_tac ctxt [th]))); - val results = map_filter (try prove_fact) (potential_facts ctxt prop'); - val thm = - (case distinct Thm.eq_thm_prop results of - [thm] => thm - | [] => err "Failed to retrieve literal fact" - | _ => err "Ambiguous specification of literal fact"); - in pick ("", Position.none) [thm] end - | retrieve pick context (Facts.Named ((xname, pos), ivs)) = - let - val thy = Context.theory_of context; - val (name, thms) = - (case xname of - "" => (xname, [Thm.transfer thy Drule.dummy_thm]) - | "_" => (xname, [Thm.transfer thy Drule.asm_rl]) - | _ => retrieve_generic context (xname, pos)); - in pick (name, pos) (Facts.select (Facts.Named ((name, pos), ivs)) thms) end; - -in - -val get_fact_generic = retrieve (K I); -val get_fact = retrieve (K I) o Context.Proof; -val get_fact_single = retrieve Facts.the_single o Context.Proof; - -fun get_thms ctxt = get_fact ctxt o Facts.named; -fun get_thm ctxt = get_fact_single ctxt o Facts.named; - -end; - - -(* facts *) - -local - -fun update_thms _ (b, NONE) ctxt = ctxt |> map_facts (Facts.del (full_name ctxt b)) - | update_thms flags (b, SOME ths) ctxt = ctxt |> map_facts - (Facts.add_static (Context.Proof ctxt) flags (b, ths) #> snd); - -in - -fun note_thmss kind = fold_map (fn ((b, more_atts), raw_facts) => fn ctxt => - let - val name = full_name ctxt b; - val facts = Global_Theory.name_thmss false name raw_facts; - fun app (ths, atts) = - fold_map (Thm.proof_attributes (surround (Thm.kind kind) (atts @ more_atts))) ths; - val (res, ctxt') = fold_map app facts ctxt; - val thms = Global_Theory.name_thms false false name (flat res); - val Mode {stmt, ...} = get_mode ctxt; - in ((name, thms), ctxt' |> update_thms {strict = false, index = stmt} (b, SOME thms)) end); - -fun put_thms index thms ctxt = ctxt - |> map_naming (K Name_Space.local_naming) - |> Context_Position.set_visible false - |> update_thms {strict = false, index = index} (apfst Binding.name thms) - |> Context_Position.restore_visible ctxt - |> restore_naming ctxt; - -end; - - - -(** basic logical entities **) - -(* variables *) - -fun declare_var (x, opt_T, mx) ctxt = - let val T = (case opt_T of SOME T => T | NONE => Mixfix.mixfixT mx) - in ((x, T, mx), ctxt |> Variable.declare_constraints (Free (x, T))) end; - -fun check_var internal b = - let - val x = Variable.check_name b; - val check = if internal then Name.reject_skolem else Name.reject_internal; - val _ = - if can check (x, []) andalso Symbol_Pos.is_identifier x then () - else error ("Bad name: " ^ Binding.print b); - in x end; - -local - -fun prep_vars prep_typ internal = - fold_map (fn (b, raw_T, mx) => fn ctxt => - let - val x = check_var internal b; - fun cond_tvars T = - if internal then T - else Type.no_tvars T handle TYPE (msg, _, _) => error msg; - val opt_T = Option.map (cond_tvars o cert_typ ctxt o prep_typ ctxt) raw_T; - val (_, ctxt') = ctxt |> declare_var (x, opt_T, mx); - in ((b, opt_T, mx), ctxt') end); - -in - -val read_vars = prep_vars Syntax.read_typ false; -val cert_vars = prep_vars (K I) true; - -end; - - -(* notation *) - -local - -fun type_syntax (Type (c, args), mx) = - SOME (Local_Syntax.Type, (Lexicon.mark_type c, Mixfix.make_type (length args), mx)) - | type_syntax _ = NONE; - -fun const_syntax _ (Free (x, T), mx) = SOME (Local_Syntax.Fixed, (x, T, mx)) - | const_syntax ctxt (Const (c, _), mx) = - (case try (Consts.type_scheme (consts_of ctxt)) c of - SOME T => SOME (Local_Syntax.Const, (Lexicon.mark_const c, T, mx)) - | NONE => NONE) - | const_syntax _ _ = NONE; - -fun gen_notation syntax add mode args ctxt = - ctxt |> map_syntax - (Local_Syntax.update_modesyntax (theory_of ctxt) add mode (map_filter (syntax ctxt) args)); - -in - -val type_notation = gen_notation (K type_syntax); -val notation = gen_notation const_syntax; - -fun generic_type_notation add mode args phi = - let - val args' = args |> map_filter (fn (T, mx) => - let - val T' = Morphism.typ phi T; - val similar = (case (T, T') of (Type (c, _), Type (c', _)) => c = c' | _ => false); - in if similar then SOME (T', mx) else NONE end); - in Context.mapping (Sign.type_notation add mode args') (type_notation add mode args') end; - -fun generic_notation add mode args phi = - let - val args' = args |> map_filter (fn (t, mx) => - let val t' = Morphism.term phi t - in if Term.aconv_untyped (t, t') then SOME (t', mx) else NONE end); - in Context.mapping (Sign.notation add mode args') (notation add mode args') end; - -end; - - -(* aliases *) - -fun class_alias b c ctxt = (map_tsig o apfst) (Type.class_alias (naming_of ctxt) b c) ctxt; -fun type_alias b c ctxt = (map_tsig o apfst) (Type.type_alias (naming_of ctxt) b c) ctxt; -fun const_alias b c ctxt = (map_consts o apfst) (Consts.alias (naming_of ctxt) b c) ctxt; - - -(* local constants *) - -fun add_const_constraint (c, opt_T) ctxt = - let - fun prepT raw_T = - let val T = cert_typ ctxt raw_T - in cert_term ctxt (Const (c, T)); T end; - in ctxt |> (map_consts o apfst) (Consts.constrain (c, Option.map prepT opt_T)) end; - -fun add_abbrev mode (b, raw_t) ctxt = - let - val t0 = cert_term (ctxt |> set_mode mode_abbrev) raw_t - handle ERROR msg => cat_error msg ("in constant abbreviation " ^ Binding.print b); - val [t] = Variable.exportT_terms (Variable.declare_term t0 ctxt) ctxt [t0]; - val ((lhs, rhs), consts') = consts_of ctxt - |> Consts.abbreviate (Context.Proof ctxt) (tsig_of ctxt) mode (b, t); - in - ctxt - |> (map_consts o apfst) (K consts') - |> Variable.declare_term rhs - |> pair (lhs, rhs) - end; - -fun revert_abbrev mode c = (map_consts o apfst) (Consts.revert_abbrev mode c); - -fun generic_add_abbrev mode arg = - Context.mapping_result (Sign.add_abbrev mode arg) (add_abbrev mode arg); - -fun generic_revert_abbrev mode arg = - Context.mapping (Sign.revert_abbrev mode arg) (revert_abbrev mode arg); - - -(* fixes *) - -fun add_fixes raw_vars ctxt = - let - val thy = theory_of ctxt; - val vars = #1 (cert_vars raw_vars ctxt); - in - ctxt - |> Variable.add_fixes_binding (map #1 vars) - |-> (fn xs => - fold_map declare_var (map2 (fn x => fn (_, T, mx) => (x, T, mx)) xs vars) - #-> (map_syntax o Local_Syntax.add_syntax thy o map (pair Local_Syntax.Fixed)) - #> pair xs) - end; - - - -(** assumptions **) - -local - -fun gen_assms prepp exp args ctxt = - let - val cert = Thm.cterm_of (theory_of ctxt); - val ((propss, _), ctxt1) = prepp (map snd args) ctxt; - val _ = Variable.warn_extra_tfrees ctxt ctxt1; - val (premss, ctxt2) = fold_burrow (Assumption.add_assms exp o map cert) propss ctxt1; - in - ctxt2 - |> auto_bind_facts (flat propss) - |> note_thmss "" (map fst args ~~ map (map (fn th => ([th], []))) premss) - end; - -in - -val add_assms = gen_assms bind_propp; -val add_assms_i = gen_assms bind_propp_i; - -end; - - - -(** cases **) - -fun dest_cases ctxt = - Name_Space.fold_table (fn (a, (c, i)) => cons (i, (a, c))) (cases_of ctxt) [] - |> sort (int_ord o pairself #1) |> map #2; - -local - -fun update_case _ _ ("", _) res = res - | update_case _ _ (name, NONE) (cases, index) = - (Name_Space.del_table name cases, index) - | update_case context is_proper (name, SOME c) (cases, index) = - let - val binding = - Binding.make (name, Position.none) - |> not is_proper ? Binding.conceal; - val (_, cases') = cases - |> Name_Space.define context false (binding, ((c, is_proper), index)); - val index' = index + 1; - in (cases', index') end; - -fun fix (b, T) ctxt = - let val ([x], ctxt') = add_fixes [(b, SOME T, NoSyn)] ctxt - in (Free (x, T), ctxt') end; - -in - -fun update_cases is_proper args ctxt = - let - val context = Context.Proof ctxt |> Name_Space.map_naming (K Name_Space.default_naming); - val cases = cases_of ctxt; - val index = Name_Space.fold_table (fn _ => Integer.add 1) cases 0; - val (cases', _) = fold (update_case context is_proper) args (cases, index); - in map_cases (K cases') ctxt end; - -fun case_result c ctxt = - let - val Rule_Cases.Case {fixes, ...} = c; - val (ts, ctxt') = ctxt |> fold_map fix fixes; - val Rule_Cases.Case {assumes, binds, cases, ...} = Rule_Cases.apply ts c; - in - ctxt' - |> bind_terms (map drop_schematic binds) - |> update_cases true (map (apsnd SOME) cases) - |> pair (assumes, (binds, cases)) - end; - -val apply_case = apfst fst oo case_result; - -fun check_case ctxt internal (name, pos) fxs = - let - val (_, ((Rule_Cases.Case {fixes, assumes, binds, cases}, is_proper), _)) = - Name_Space.check (Context.Proof ctxt) (cases_of ctxt) (name, pos); - val _ = if is_proper then () else Context_Position.report ctxt pos Markup.improper; - - val _ = List.app (fn NONE => () | SOME b => ignore (check_var internal b)) fxs; - fun replace (opt_x :: xs) ((y, T) :: ys) = (the_default y opt_x, T) :: replace xs ys - | replace [] ys = ys - | replace (_ :: _) [] = - error ("Too many parameters for case " ^ quote name ^ Position.here pos); - val fixes' = replace fxs fixes; - val binds' = map drop_schematic binds; - in - if null (fold (Term.add_tvarsT o snd) fixes []) andalso - null (fold (fold Term.add_vars o snd) assumes []) then - Rule_Cases.Case {fixes = fixes', assumes = assumes, binds = binds', cases = cases} - else error ("Illegal schematic variable(s) in case " ^ quote name ^ Position.here pos) - end; - -end; - - - -(** print context information **) - -(* local syntax *) - -val print_syntax = Syntax.print_syntax o syn_of; - - -(* abbreviations *) - -fun pretty_abbrevs show_globals ctxt = - let - val space = const_space ctxt; - val (constants, global_constants) = - pairself (#constants o Consts.dest) (#consts (rep_data ctxt)); - val globals = Symtab.make global_constants; - fun add_abbr (_, (_, NONE)) = I - | add_abbr (c, (T, SOME t)) = - if not show_globals andalso Symtab.defined globals c then I - else cons (c, Logic.mk_equals (Const (c, T), t)); - val abbrevs = Name_Space.markup_entries ctxt space (fold add_abbr constants []); - in - if null abbrevs then [] - else [Pretty.big_list "abbreviations:" (map (pretty_term_abbrev ctxt o #2) abbrevs)] - end; - -val print_abbrevs = Pretty.writeln_chunks o pretty_abbrevs true; - - -(* term bindings *) - -fun pretty_term_bindings ctxt = - let - val binds = Variable.binds_of ctxt; - fun prt_bind (xi, (T, t)) = pretty_term_abbrev ctxt (Logic.mk_equals (Var (xi, T), t)); - in - if Vartab.is_empty binds then [] - else [Pretty.big_list "term bindings:" (map prt_bind (Vartab.dest binds))] - end; - - -(* local facts *) - -fun pretty_local_facts ctxt verbose = - let - val facts = facts_of ctxt; - val props = Facts.props facts; - val local_facts = - (if null props then [] else [("", props)]) @ - Facts.dest_static verbose [Global_Theory.facts_of (theory_of ctxt)] facts; - in - if null local_facts then [] - else - [Pretty.big_list "local facts:" - (map #1 (sort_wrt (#1 o #2) (map (`(pretty_fact ctxt)) local_facts)))] - end; - -fun print_local_facts ctxt verbose = - Pretty.writeln_chunks (pretty_local_facts ctxt verbose); - - -(* local contexts *) - -local - -fun pretty_case (name, (fixes, ((asms, (lets, cs)), ctxt))) = - let - val prt_term = Syntax.pretty_term ctxt; - - fun prt_let (xi, t) = Pretty.block - [Pretty.quote (prt_term (Var (xi, Term.fastype_of t))), Pretty.str " =", Pretty.brk 1, - Pretty.quote (prt_term t)]; - - fun prt_asm (b, ts) = Pretty.block (Pretty.breaks - ((if Binding.is_empty b then [] - else [Binding.pretty b, Pretty.str ":"]) @ map (Pretty.quote o prt_term) ts)); - - fun prt_sect _ _ _ [] = [] - | prt_sect head sep prt xs = - [Pretty.block (Pretty.breaks (head :: - flat (separate sep (map (single o prt) xs))))]; - in - Pretty.block (Pretty.fbreaks - (Pretty.str (name ^ ":") :: - prt_sect (Pretty.keyword1 "fix") [] (Pretty.str o Binding.name_of o fst) fixes @ - prt_sect (Pretty.keyword1 "let") [Pretty.keyword2 "and"] prt_let - (map_filter (fn (xi, SOME t) => SOME (xi, t) | _ => NONE) lets) @ - (if forall (null o #2) asms then [] - else prt_sect (Pretty.keyword1 "assume") [Pretty.keyword2 "and"] prt_asm asms) @ - prt_sect (Pretty.str "subcases:") [] (Pretty.str o fst) cs)) - end; - -in - -fun pretty_cases ctxt = - let - fun mk_case (_, (_, false)) = NONE - | mk_case (name, (c as Rule_Cases.Case {fixes, ...}, true)) = - SOME (name, (fixes, case_result c ctxt)); - val cases = dest_cases ctxt |> map_filter mk_case; - in - if null cases then [] - else [Pretty.big_list "cases:" (map pretty_case cases)] - end; - -end; - - -(* core context *) - -val debug = - Config.bool (Config.declare ("Proof_Context.debug", @{here}) (K (Config.Bool false))); - -val verbose = - Config.bool (Config.declare ("Proof_Context.verbose", @{here}) (K (Config.Bool false))); - -fun pretty_ctxt ctxt = - if not (Config.get ctxt debug) then [] - else - let - val prt_term = Syntax.pretty_term ctxt; - - (*structures*) - val {structs, ...} = Local_Syntax.idents_of (syntax_of ctxt); - val prt_structs = - if null structs then [] - else [Pretty.block (Pretty.str "structures:" :: Pretty.brk 1 :: - Pretty.commas (map Pretty.str structs))]; - - (*fixes*) - fun prt_fix (x, x') = - if x = x' then Pretty.str x - else Pretty.block [Pretty.str x, Pretty.str " =", Pretty.brk 1, prt_term (Syntax.free x')]; - val fixes = - filter_out ((Name.is_internal orf member (op =) structs) o #1) - (Variable.dest_fixes ctxt); - val prt_fixes = - if null fixes then [] - else [Pretty.block (Pretty.str "fixed variables:" :: Pretty.brk 1 :: - Pretty.commas (map prt_fix fixes))]; - - (*prems*) - val prt_prems = - (case Assumption.all_prems_of ctxt of - [] => [] - | prems => [Pretty.big_list "prems:" [pretty_fact ctxt ("", prems)]]); - in prt_structs @ prt_fixes @ prt_prems end; - - -(* main context *) - -fun pretty_context ctxt = - let - val verbose = Config.get ctxt verbose; - fun verb f x = if verbose then f (x ()) else []; - - val prt_term = Syntax.pretty_term ctxt; - val prt_typ = Syntax.pretty_typ ctxt; - val prt_sort = Syntax.pretty_sort ctxt; - - (*theory*) - val pretty_thy = Pretty.block - [Pretty.str "theory:", Pretty.brk 1, Context.pretty_thy (theory_of ctxt)]; - - (*defaults*) - fun prt_atom prt prtT (x, X) = Pretty.block - [prt x, Pretty.str " ::", Pretty.brk 1, prtT X]; - - fun prt_var (x, ~1) = prt_term (Syntax.free x) - | prt_var xi = prt_term (Syntax.var xi); - - fun prt_varT (x, ~1) = prt_typ (TFree (x, [])) - | prt_varT xi = prt_typ (TVar (xi, [])); - - val prt_defT = prt_atom prt_var prt_typ; - val prt_defS = prt_atom prt_varT prt_sort; - - val (types, sorts) = Variable.constraints_of ctxt; - in - verb single (K pretty_thy) @ - pretty_ctxt ctxt @ - verb (pretty_abbrevs false) (K ctxt) @ - verb pretty_term_bindings (K ctxt) @ - verb (pretty_local_facts ctxt) (K true) @ - verb pretty_cases (K ctxt) @ - verb single (fn () => Pretty.big_list "type constraints:" (map prt_defT (Vartab.dest types))) @ - verb single (fn () => Pretty.big_list "default sorts:" (map prt_defS (Vartab.dest sorts))) - end; - -end; - -val show_abbrevs = Proof_Context.show_abbrevs; - diff --git a/core/Pure/Isar/proof_display.ML b/core/Pure/Isar/proof_display.ML deleted file mode 100644 index d05ef3c6..00000000 --- a/core/Pure/Isar/proof_display.ML +++ /dev/null @@ -1,189 +0,0 @@ -(* Title: Pure/Isar/proof_display.ML - Author: Makarius - -Printing of theorems, goals, results etc. -*) - -signature PROOF_DISPLAY = -sig - val pp_context: Proof.context -> Pretty.T - val pp_thm: thm -> Pretty.T - val pp_typ: theory -> typ -> Pretty.T - val pp_term: theory -> term -> Pretty.T - val pp_ctyp: ctyp -> Pretty.T - val pp_cterm: cterm -> Pretty.T - val pretty_theorems_diff: bool -> theory list -> theory -> Pretty.T list - val pretty_theorems: bool -> theory -> Pretty.T list - val pretty_full_theory: bool -> theory -> Pretty.T - val print_theory: theory -> unit - val string_of_rule: Proof.context -> string -> thm -> string - val pretty_goal_header: thm -> Pretty.T - val string_of_goal: Proof.context -> thm -> string - val pretty_goal_facts: Proof.context -> string -> thm list -> Pretty.T - val method_error: string -> Position.T -> - {context: Proof.context, facts: thm list, goal: thm} -> 'a Seq.result - val print_results: bool -> Position.T -> Proof.context -> - ((string * string) * (string * thm list) list) -> unit - val print_consts: bool -> Position.T -> Proof.context -> - (string * typ -> bool) -> (string * typ) list -> unit -end - -structure Proof_Display: PROOF_DISPLAY = -struct - -(* toplevel pretty printing *) - -fun pp_context ctxt = - (if Config.get ctxt Proof_Context.debug then - Pretty.quote (Pretty.big_list "proof context:" (Proof_Context.pretty_context ctxt)) - else Pretty.str ""); - -fun default_context thy0 = - (case Context.thread_data () of - SOME (Context.Proof ctxt) => ctxt - | SOME (Context.Theory thy) => - (case try Syntax.init_pretty_global thy of - SOME ctxt => ctxt - | NONE => Syntax.init_pretty_global thy0) - | NONE => Syntax.init_pretty_global thy0); - -fun pp_thm th = - let val ctxt = default_context (Thm.theory_of_thm th); - in Display.pretty_thm_raw ctxt {quote = true, show_hyps = false} th end; - -fun pp_typ thy T = Pretty.quote (Syntax.pretty_typ (default_context thy) T); -fun pp_term thy t = Pretty.quote (Syntax.pretty_term (default_context thy) t); - -fun pp_ctyp cT = pp_typ (Thm.theory_of_ctyp cT) (Thm.typ_of cT); -fun pp_cterm ct = pp_term (Thm.theory_of_cterm ct) (Thm.term_of ct); - - -(* theorems and theory *) - -fun pretty_theorems_diff verbose prev_thys thy = - let - val pretty_fact = Proof_Context.pretty_fact (Proof_Context.init_global thy); - val facts = Global_Theory.facts_of thy; - val thmss = Facts.dest_static verbose (map Global_Theory.facts_of prev_thys) facts; - val prts = map #1 (sort_wrt (#1 o #2) (map (`pretty_fact) thmss)); - in if null prts then [] else [Pretty.big_list "theorems:" prts] end; - -fun pretty_theorems verbose thy = - pretty_theorems_diff verbose (Theory.parents_of thy) thy; - -fun pretty_full_theory verbose thy = - Pretty.chunks (Display.pretty_full_theory verbose thy @ pretty_theorems verbose thy); - -val print_theory = Pretty.writeln o pretty_full_theory false; - - -(* refinement rule *) - -fun pretty_rule ctxt s thm = - Pretty.block [Pretty.str (s ^ " attempt to solve goal by exported rule:"), - Pretty.fbrk, Display.pretty_thm ctxt thm]; - -val string_of_rule = Pretty.string_of ooo pretty_rule; - - -(* goals *) - -local - -fun subgoals 0 = [] - | subgoals 1 = [Pretty.brk 1, Pretty.str "(1 subgoal)"] - | subgoals n = [Pretty.brk 1, Pretty.str ("(" ^ string_of_int n ^ " subgoals)")]; - -in - -fun pretty_goal_header goal = - Pretty.block ([Pretty.keyword1 "goal"] @ subgoals (Thm.nprems_of goal) @ [Pretty.str ":"]); - -end; - -fun string_of_goal ctxt goal = - Pretty.string_of (Pretty.chunks [pretty_goal_header goal, Goal_Display.pretty_goal ctxt goal]); - - -(* goal facts *) - -fun pretty_goal_facts ctxt s ths = - (Pretty.block o Pretty.fbreaks) - [if s = "" then Pretty.str "this:" - else Pretty.block [Pretty.keyword1 s, Pretty.brk 1, Pretty.str "this:"], - Proof_Context.pretty_fact ctxt ("", ths)]; - - -(* method_error *) - -fun method_error kind pos {context = ctxt, facts, goal} = Seq.Error (fn () => - let - val pr_header = - "Failed to apply " ^ (if kind = "" then "" else kind ^ " ") ^ - "proof method" ^ Position.here pos ^ ":\n"; - val pr_facts = - if null facts then "" - else Pretty.string_of (pretty_goal_facts ctxt "using" facts) ^ "\n"; - val pr_goal = string_of_goal ctxt goal; - in pr_header ^ pr_facts ^ pr_goal end); - - -(* results *) - -fun position_markup pos = Pretty.mark (Position.markup pos Markup.position); - -local - -fun pretty_fact_name pos (kind, "") = position_markup pos (Pretty.keyword1 kind) - | pretty_fact_name pos (kind, name) = - Pretty.block [position_markup pos (Pretty.keyword1 kind), Pretty.brk 1, - Pretty.str (Long_Name.base_name name), Pretty.str ":"]; - -fun pretty_facts ctxt = - flat o (separate [Pretty.fbrk, Pretty.keyword2 "and", Pretty.str " "]) o - map (single o Proof_Context.pretty_fact ctxt); - -in - -fun print_results do_print pos ctxt ((kind, name), facts) = - if not do_print orelse kind = "" then () - else if name = "" then - (Pretty.writeln o Pretty.mark Markup.state) - (Pretty.block (position_markup pos (Pretty.keyword1 kind) :: Pretty.brk 1 :: - pretty_facts ctxt facts)) - else - (Pretty.writeln o Pretty.mark Markup.state) - (case facts of - [fact] => Pretty.blk (1, [pretty_fact_name pos (kind, name), Pretty.fbrk, - Proof_Context.pretty_fact ctxt fact]) - | _ => Pretty.blk (1, [pretty_fact_name pos (kind, name), Pretty.fbrk, - Pretty.blk (1, Pretty.str "(" :: pretty_facts ctxt facts @ [Pretty.str ")"])])); - -end; - - -(* consts *) - -local - -fun pretty_var ctxt (x, T) = - Pretty.block [Pretty.str x, Pretty.str " ::", Pretty.brk 1, - Pretty.quote (Syntax.pretty_typ ctxt T)]; - -fun pretty_vars pos ctxt kind vs = - Pretty.block (Pretty.fbreaks (position_markup pos (Pretty.str kind) :: map (pretty_var ctxt) vs)); - -fun pretty_consts pos ctxt pred cs = - (case filter pred (#1 (Proof_Context.inferred_fixes ctxt)) of - [] => pretty_vars pos ctxt "constants" cs - | ps => Pretty.chunks [pretty_vars pos ctxt "parameters" ps, pretty_vars pos ctxt "constants" cs]); - -in - -fun print_consts do_print pos ctxt pred cs = - if not do_print orelse null cs then () - else Pretty.writeln (Pretty.mark Markup.state (pretty_consts pos ctxt pred cs)); - -end; - -end; diff --git a/core/Pure/Isar/proof_node.ML b/core/Pure/Isar/proof_node.ML deleted file mode 100644 index 5e844466..00000000 --- a/core/Pure/Isar/proof_node.ML +++ /dev/null @@ -1,49 +0,0 @@ -(* Title: Pure/Isar/proof_node.ML - Author: Makarius - -Proof nodes with linear position and backtracking. -*) - -signature PROOF_NODE = -sig - type T - val init: Proof.state -> T - val current: T -> Proof.state - val position: T -> int - val back: T -> T - val applys: (Proof.state -> Proof.state Seq.result Seq.seq) -> T -> T - val apply: (Proof.state -> Proof.state) -> T -> T -end; - -structure Proof_Node: PROOF_NODE = -struct - -(* datatype *) - -datatype T = Proof_Node of - (Proof.state * (*first result*) - Proof.state Seq.seq) * (*alternative results*) - int; (*linear proof position*) - -fun init st = Proof_Node ((st, Seq.empty), 0); - -fun current (Proof_Node ((st, _), _)) = st; -fun position (Proof_Node (_, n)) = n; - - -(* backtracking *) - -fun back (Proof_Node ((_, stq), n)) = - (case Seq.pull stq of - NONE => error "back: no alternatives" - | SOME res => Proof_Node (res, n)); - - -(* apply transformer *) - -fun applys f (Proof_Node ((st, _), n)) = - Proof_Node (Seq.first_result "Empty result sequence -- proof command failed" (f st), n + 1); - -fun apply f = applys (Seq.single o Seq.Result o f); - -end; diff --git a/core/Pure/Isar/rule_cases.ML b/core/Pure/Isar/rule_cases.ML deleted file mode 100644 index 4dfe4ab0..00000000 --- a/core/Pure/Isar/rule_cases.ML +++ /dev/null @@ -1,469 +0,0 @@ -(* Title: Pure/Isar/rule_cases.ML - Author: Markus Wenzel, TU Muenchen - -Annotations and local contexts of rules. -*) - -infix 1 THEN_ALL_NEW_CASES; - -signature BASIC_RULE_CASES = -sig - type cases - type cases_tactic - val CASES: cases -> tactic -> cases_tactic - val NO_CASES: tactic -> cases_tactic - val SUBGOAL_CASES: ((term * int * thm) -> cases_tactic) -> int -> cases_tactic - val THEN_ALL_NEW_CASES: (int -> cases_tactic) * (int -> tactic) -> int -> cases_tactic -end - -signature RULE_CASES = -sig - include BASIC_RULE_CASES - datatype T = Case of - {fixes: (binding * typ) list, - assumes: (binding * term list) list, - binds: (indexname * term option) list, - cases: (string * T) list} - val case_hypsN: string - val strip_params: term -> (string * typ) list - val make_common: theory * term -> - ((string * string list) * string list) list -> cases - val make_nested: term -> theory * term -> - ((string * string list) * string list) list -> cases - val apply: term list -> T -> T - val consume: Proof.context -> thm list -> thm list -> ('a * int) * thm -> - (('a * (int * thm list)) * thm) Seq.seq - val get_consumes: thm -> int - val put_consumes: int option -> thm -> thm - val add_consumes: int -> thm -> thm - val default_consumes: int -> thm -> thm - val consumes: int -> attribute - val get_constraints: thm -> int - val constraints: int -> attribute - val name: string list -> thm -> thm - val case_names: string list -> attribute - val cases_hyp_names: string list -> string list list -> attribute - val case_conclusion: string * string list -> attribute - val is_inner_rule: thm -> bool - val inner_rule: attribute - val save: thm -> thm -> thm - val get: thm -> ((string * string list) * string list) list * int - val rename_params: string list list -> thm -> thm - val params: string list list -> attribute - val internalize_params: thm -> thm - val mutual_rule: Proof.context -> thm list -> (int list * thm) option - val strict_mutual_rule: Proof.context -> thm list -> int list * thm -end; - -structure Rule_Cases: RULE_CASES = -struct - -(** cases **) - -datatype T = Case of - {fixes: (binding * typ) list, - assumes: (binding * term list) list, - binds: (indexname * term option) list, - cases: (string * T) list}; - -type cases = (string * T option) list; - -val case_conclN = "case"; -val case_hypsN = "hyps"; -val case_premsN = "prems"; - -val strip_params = map (apfst (perhaps (try Name.dest_skolem))) o Logic.strip_params; - -local - -fun app us t = Term.betapplys (t, us); - -fun dest_binops cs tm = - let - val n = length cs; - fun dest 0 _ = [] - | dest 1 t = [t] - | dest k (_ $ t $ u) = t :: dest (k - 1) u - | dest _ _ = raise TERM ("Expected " ^ string_of_int n ^ " binop arguments", [tm]); - in cs ~~ dest n tm end; - -fun extract_fixes NONE prop = (strip_params prop, []) - | extract_fixes (SOME outline) prop = - chop (length (Logic.strip_params outline)) (strip_params prop); - -fun extract_assumes _ _ NONE prop = ([(Binding.empty, Logic.strip_assums_hyp prop)], []) - | extract_assumes qualifier hyp_names (SOME outline) prop = - let - val qual = Binding.qualify true qualifier o Binding.name; - val (hyps, prems) = - chop (length (Logic.strip_assums_hyp outline)) (Logic.strip_assums_hyp prop) - fun extract ((hyp_name, hyp) :: rest) = (qual hyp_name, hyp :: map snd rest); - val (hyps1, hyps2) = chop (length hyp_names) hyps; - val pairs1 = if null hyps1 then [] else hyp_names ~~ hyps1; - val pairs = pairs1 @ map (pair case_hypsN) hyps2; - val named_hyps = map extract (partition_eq (eq_fst op =) pairs); - in (named_hyps, [(qual case_premsN, prems)]) end; - -fun bindings args = map (apfst Binding.name) args; - -fun extract_case thy (case_outline, raw_prop) name hyp_names concls = - let - val props = Logic.dest_conjunctions (Drule.norm_hhf thy raw_prop); - val len = length props; - val nested = is_some case_outline andalso len > 1; - - fun extract prop = - let - val (fixes1, fixes2) = extract_fixes case_outline prop; - val abs_fixes = fold_rev Term.abs (fixes1 @ fixes2); - fun abs_fixes1 t = - if not nested then abs_fixes t - else - fold_rev Term.abs fixes1 - (app (map (Term.dummy_pattern o #2) fixes2) (fold_rev Term.abs fixes2 t)); - val (assumes1, assumes2) = - extract_assumes name hyp_names case_outline prop - |> pairself (map (apsnd (maps Logic.dest_conjunctions))); - - val concl = Object_Logic.drop_judgment thy (Logic.strip_assums_concl prop); - val binds = - (case_conclN, concl) :: dest_binops concls concl - |> map (fn (x, t) => ((x, 0), SOME (abs_fixes t))); - in - ((fixes1, map (apsnd (map abs_fixes1)) assumes1), - ((fixes2, map (apsnd (map abs_fixes)) assumes2), binds)) - end; - - val cases = map extract props; - - fun common_case ((fixes1, assumes1), ((fixes2, assumes2), binds)) = - Case {fixes = bindings (fixes1 @ fixes2), - assumes = assumes1 @ assumes2, binds = binds, cases = []}; - fun inner_case (_, ((fixes2, assumes2), binds)) = - Case {fixes = bindings fixes2, assumes = assumes2, binds = binds, cases = []}; - fun nested_case ((fixes1, assumes1), _) = - Case {fixes = bindings fixes1, assumes = assumes1, binds = [], - cases = map string_of_int (1 upto len) ~~ map inner_case cases}; - in - if len = 0 then NONE - else if len = 1 then SOME (common_case (hd cases)) - else if is_none case_outline orelse length (distinct (op =) (map fst cases)) > 1 then NONE - else SOME (nested_case (hd cases)) - end; - -fun make rule_struct (thy, prop) cases = - let - val n = length cases; - val nprems = Logic.count_prems prop; - fun add_case ((name, hyp_names), concls) (cs, i) = - ((case try (fn () => - (Option.map (curry Logic.nth_prem i) rule_struct, Logic.nth_prem (i, prop))) () of - NONE => (name, NONE) - | SOME p => (name, extract_case thy p name hyp_names concls)) :: cs, i - 1); - in fold_rev add_case (drop (Int.max (n - nprems, 0)) cases) ([], n) |> #1 end; - -in - -val make_common = make NONE; -fun make_nested rule_struct = make (SOME rule_struct); - -fun apply args = - let - fun appl (Case {fixes, assumes, binds, cases}) = - let - val assumes' = map (apsnd (map (app args))) assumes; - val binds' = map (apsnd (Option.map (app args))) binds; - val cases' = map (apsnd appl) cases; - in Case {fixes = fixes, assumes = assumes', binds = binds', cases = cases'} end; - in appl end; - -end; - - - -(** tactics with cases **) - -type cases_tactic = thm -> (cases * thm) Seq.seq; - -fun CASES cases tac st = Seq.map (pair cases) (tac st); -fun NO_CASES tac = CASES [] tac; - -fun SUBGOAL_CASES tac i st = - (case try Logic.nth_prem (i, Thm.prop_of st) of - SOME goal => tac (goal, i, st) st - | NONE => Seq.empty); - -fun (tac1 THEN_ALL_NEW_CASES tac2) i st = - st |> tac1 i |> Seq.maps (fn (cases, st') => - CASES cases (Seq.INTERVAL tac2 i (i + nprems_of st' - nprems_of st)) st'); - - - -(** consume facts **) - -local - -fun unfold_prems ctxt n defs th = - if null defs then th - else Conv.fconv_rule (Conv.prems_conv n (Raw_Simplifier.rewrite ctxt true defs)) th; - -fun unfold_prems_concls ctxt defs th = - if null defs orelse not (can Logic.dest_conjunction (Thm.concl_of th)) then th - else - Conv.fconv_rule - (Conv.concl_conv ~1 (Conjunction.convs - (Conv.prems_conv ~1 (Raw_Simplifier.rewrite ctxt true defs)))) th; - -in - -fun consume ctxt defs facts ((xx, n), th) = - let val m = Int.min (length facts, n) in - th - |> unfold_prems ctxt n defs - |> unfold_prems_concls ctxt defs - |> Drule.multi_resolve (take m facts) - |> Seq.map (pair (xx, (n - m, drop m facts))) - end; - -end; - -val consumes_tagN = "consumes"; - -fun lookup_consumes th = - (case AList.lookup (op =) (Thm.get_tags th) consumes_tagN of - NONE => NONE - | SOME s => - (case Lexicon.read_nat s of - SOME n => SOME n - | _ => raise THM ("Malformed 'consumes' tag of theorem", 0, [th]))); - -fun get_consumes th = the_default 0 (lookup_consumes th); - -fun put_consumes NONE th = th - | put_consumes (SOME n) th = th - |> Thm.untag_rule consumes_tagN - |> Thm.tag_rule (consumes_tagN, string_of_int (if n < 0 then Thm.nprems_of th + n else n)); - -fun add_consumes k th = put_consumes (SOME (k + get_consumes th)) th; - -fun default_consumes n th = - if is_some (lookup_consumes th) then th else put_consumes (SOME n) th; - -val save_consumes = put_consumes o lookup_consumes; - -fun consumes n = Thm.mixed_attribute (apsnd (put_consumes (SOME n))); - - - -(** equality constraints **) - -val constraints_tagN = "constraints"; - -fun lookup_constraints th = - (case AList.lookup (op =) (Thm.get_tags th) constraints_tagN of - NONE => NONE - | SOME s => - (case Lexicon.read_nat s of - SOME n => SOME n - | _ => raise THM ("Malformed 'constraints' tag of theorem", 0, [th]))); - -fun get_constraints th = the_default 0 (lookup_constraints th); - -fun put_constraints NONE th = th - | put_constraints (SOME n) th = th - |> Thm.untag_rule constraints_tagN - |> Thm.tag_rule (constraints_tagN, string_of_int (if n < 0 then 0 else n)); - -val save_constraints = put_constraints o lookup_constraints; - -fun constraints n = Thm.mixed_attribute (apsnd (put_constraints (SOME n))); - - - -(** case names **) - -val implode_args = space_implode ";"; -val explode_args = space_explode ";"; - -val case_names_tagN = "case_names"; - -fun add_case_names NONE = I - | add_case_names (SOME names) = - Thm.untag_rule case_names_tagN - #> Thm.tag_rule (case_names_tagN, implode_args names); - -fun lookup_case_names th = - AList.lookup (op =) (Thm.get_tags th) case_names_tagN - |> Option.map explode_args; - -val save_case_names = add_case_names o lookup_case_names; -val name = add_case_names o SOME; -fun case_names cs = Thm.mixed_attribute (apsnd (name cs)); - - - -(** hyp names **) - -val implode_hyps = implode_args o map (suffix "," o space_implode ","); -val explode_hyps = map (space_explode "," o unsuffix ",") o explode_args; - -val cases_hyp_names_tagN = "cases_hyp_names"; - -fun add_cases_hyp_names NONE = I - | add_cases_hyp_names (SOME namess) = - Thm.untag_rule cases_hyp_names_tagN - #> Thm.tag_rule (cases_hyp_names_tagN, implode_hyps namess); - -fun lookup_cases_hyp_names th = - AList.lookup (op =) (Thm.get_tags th) cases_hyp_names_tagN - |> Option.map explode_hyps; - -val save_cases_hyp_names = add_cases_hyp_names o lookup_cases_hyp_names; -fun cases_hyp_names cs hs = - Thm.mixed_attribute (apsnd (name cs #> add_cases_hyp_names (SOME hs))); - - - -(** case conclusions **) - -val case_concl_tagN = "case_conclusion"; - -fun get_case_concl name (a, b) = - if a = case_concl_tagN then - (case explode_args b of c :: cs => if c = name then SOME cs else NONE) - else NONE; - -fun add_case_concl (name, cs) = Thm.map_tags (fn tags => - filter_out (is_some o get_case_concl name) tags @ - [(case_concl_tagN, implode_args (name :: cs))]); - -fun get_case_concls th name = - these (get_first (get_case_concl name) (Thm.get_tags th)); - -fun save_case_concls th = - let val concls = Thm.get_tags th |> map_filter - (fn (a, b) => - if a = case_concl_tagN - then (case explode_args b of c :: cs => SOME (c, cs) | _ => NONE) - else NONE) - in fold add_case_concl concls end; - -fun case_conclusion concl = Thm.mixed_attribute (apsnd (add_case_concl concl)); - - - -(** inner rule **) - -val inner_rule_tagN = "inner_rule"; - -fun is_inner_rule th = - AList.defined (op =) (Thm.get_tags th) inner_rule_tagN; - -fun put_inner_rule inner = - Thm.untag_rule inner_rule_tagN - #> inner ? Thm.tag_rule (inner_rule_tagN, ""); - -val save_inner_rule = put_inner_rule o is_inner_rule; - -val inner_rule = Thm.mixed_attribute (apsnd (put_inner_rule true)); - - - -(** case declarations **) - -(* access hints *) - -fun save th = - save_consumes th #> - save_constraints th #> - save_case_names th #> - save_cases_hyp_names th #> - save_case_concls th #> - save_inner_rule th; - -fun get th = - let - val n = get_consumes th; - val cases = - (case lookup_case_names th of - NONE => map (rpair [] o Library.string_of_int) (1 upto (Thm.nprems_of th - n)) - | SOME names => map (fn name => (name, get_case_concls th name)) names); - val cases_hyps = - (case lookup_cases_hyp_names th of - NONE => replicate (length cases) [] - | SOME names => names); - fun regroup ((name,concls),hyps) = ((name,hyps),concls) - in (map regroup (cases ~~ cases_hyps), n) end; - - -(* params *) - -fun rename_params xss th = - th - |> fold_index (fn (i, xs) => Thm.rename_params_rule (xs, i + 1)) xss - |> save th; - -fun params xss = Thm.rule_attribute (K (rename_params xss)); - - -(* internalize parameter names *) - -fun internalize_params rule = - let - fun rename prem = - let val xs = - map (Name.internal o Name.clean o fst) (Logic.strip_params prem) - in Logic.list_rename_params xs prem end; - fun rename_prems prop = - let val (As, C) = Logic.strip_horn prop - in Logic.list_implies (map rename As, C) end; - in Thm.equal_elim (Thm.reflexive (Drule.cterm_fun rename_prems (Thm.cprop_of rule))) rule end; - - - -(** mutual_rule **) - -local - -fun equal_cterms ts us = - is_equal (list_ord (Term_Ord.fast_term_ord o pairself Thm.term_of) (ts, us)); - -fun prep_rule n th = - let - val th' = Thm.permute_prems 0 n th; - val prems = take (Thm.nprems_of th' - n) (Drule.cprems_of th'); - val th'' = Drule.implies_elim_list th' (map Thm.assume prems); - in (prems, (n, th'')) end; - -in - -fun mutual_rule _ [] = NONE - | mutual_rule _ [th] = SOME ([0], th) - | mutual_rule ctxt (ths as th :: _) = - let - val ((_, ths'), ctxt') = Variable.import true ths ctxt; - val rules as (prems, _) :: _ = map (prep_rule (get_consumes th)) ths'; - val (ns, rls) = split_list (map #2 rules); - in - if not (forall (equal_cterms prems o #1) rules) then NONE - else - SOME (ns, - rls - |> Conjunction.intr_balanced - |> Drule.implies_intr_list prems - |> singleton (Variable.export ctxt' ctxt) - |> save th - |> put_consumes (SOME 0)) - end; - -end; - -fun strict_mutual_rule ctxt ths = - (case mutual_rule ctxt ths of - NONE => error "Failed to join given rules into one mutual rule" - | SOME res => res); - -end; - -structure Basic_Rule_Cases: BASIC_RULE_CASES = Rule_Cases; -open Basic_Rule_Cases; diff --git a/core/Pure/Isar/runtime.ML b/core/Pure/Isar/runtime.ML deleted file mode 100644 index 09c8c06d..00000000 --- a/core/Pure/Isar/runtime.ML +++ /dev/null @@ -1,179 +0,0 @@ -(* Title: Pure/Isar/runtime.ML - Author: Makarius - -Isar toplevel runtime support. -*) - -signature RUNTIME = -sig - exception UNDEF - exception TERMINATE - exception EXCURSION_FAIL of exn * string - exception TOPLEVEL_ERROR - val exn_context: Proof.context option -> exn -> exn - type error = ((serial * string) * string option) - val exn_messages_ids: exn -> error list - val exn_messages: exn -> (serial * string) list - val exn_message: exn -> string - val exn_error_message: exn -> unit - val exn_system_message: exn -> unit - val exn_trace: (unit -> 'a) -> 'a - val debugging: Context.generic option -> ('a -> 'b) -> 'a -> 'b - val controlled_execution: Context.generic option -> ('a -> 'b) -> 'a -> 'b - val toplevel_error: (exn -> unit) -> ('a -> 'b) -> 'a -> 'b - val toplevel_program: (unit -> 'a) -> 'a - val thread: bool -> (unit -> unit) -> Thread.thread -end; - -structure Runtime: RUNTIME = -struct - -(** exceptions **) - -exception UNDEF; -exception TERMINATE; -exception EXCURSION_FAIL of exn * string; -exception TOPLEVEL_ERROR; - - -(* exn_context *) - -exception CONTEXT of Proof.context * exn; - -fun exn_context NONE exn = exn - | exn_context (SOME ctxt) exn = if Exn.is_interrupt exn then exn else CONTEXT (ctxt, exn); - - -(* exn_message *) - -type error = ((serial * string) * string option); - -local - -fun robust f x = - (case try f x of - SOME s => s - | NONE => Markup.markup Markup.intensify ""); - -fun robust2 f x y = robust (fn () => f x y) (); - -fun robust_context NONE _ _ = [] - | robust_context (SOME ctxt) f xs = map (robust2 f ctxt) xs; - -fun identify exn = - let - val exn' = Par_Exn.identify [] exn; - val exec_id = Properties.get (Exn_Properties.get exn') Markup.exec_idN; - val i = Par_Exn.the_serial exn' handle Option.Option => serial (); - in ((i, exn'), exec_id) end; - -fun flatten _ (CONTEXT (ctxt, exn)) = flatten (SOME ctxt) exn - | flatten context (Exn.EXCEPTIONS exns) = maps (flatten context) exns - | flatten context exn = - (case Par_Exn.dest exn of - SOME exns => maps (flatten context) exns - | NONE => [(context, identify exn)]); - -in - -fun exn_messages_ids e = - let - fun raised exn name msgs = - let val pos = Position.here (Exn_Output.position exn) in - (case msgs of - [] => "exception " ^ name ^ " raised" ^ pos - | [msg] => "exception " ^ name ^ " raised" ^ pos ^ ": " ^ msg - | _ => - cat_lines (("exception " ^ name ^ " raised" ^ pos ^ ":") :: - map (Markup.markup Markup.item) msgs)) - end; - - fun exn_msgs (context, ((i, exn), id)) = - (case exn of - EXCURSION_FAIL (exn, loc) => - map (fn ((i, msg), id) => ((i, msg ^ Markup.markup Markup.no_report ("\n" ^ loc)), id)) - (sorted_msgs context exn) - | _ => - let - val msg = - (case exn of - TERMINATE => "Exit" - | TimeLimit.TimeOut => "Timeout" - | TOPLEVEL_ERROR => "Error" - | ERROR "" => "Error" - | ERROR msg => msg - | Fail msg => raised exn "Fail" [msg] - | THEORY (msg, thys) => - raised exn "THEORY" (msg :: map (robust Context.str_of_thy) thys) - | Ast.AST (msg, asts) => - raised exn "AST" (msg :: map (robust (Pretty.string_of o Ast.pretty_ast)) asts) - | TYPE (msg, Ts, ts) => - raised exn "TYPE" (msg :: - (robust_context context Syntax.string_of_typ Ts @ - robust_context context Syntax.string_of_term ts)) - | TERM (msg, ts) => - raised exn "TERM" (msg :: robust_context context Syntax.string_of_term ts) - | CTERM (msg, cts) => - raised exn "CTERM" - (msg :: robust_context context Syntax.string_of_term (map term_of cts)) - | THM (msg, i, thms) => - raised exn ("THM " ^ string_of_int i) - (msg :: robust_context context Display.string_of_thm thms) - | _ => raised exn (robust (Pretty.string_of o Exn_Output.pretty) exn) []); - in [((i, msg), id)] end) - and sorted_msgs context exn = - sort_distinct (int_ord o pairself (fst o fst)) (maps exn_msgs (flatten context exn)); - - in sorted_msgs NONE e end; - -end; - -fun exn_messages exn = map #1 (exn_messages_ids exn); - -fun exn_message exn = - (case exn_messages exn of - [] => "Interrupt" - | msgs => cat_lines (map snd msgs)); - -val exn_error_message = Output.error_message o exn_message; -val exn_system_message = Output.system_message o exn_message; -fun exn_trace e = print_exception_trace exn_message e; - - - -(** controlled execution **) - -fun debugging opt_context f x = - if ML_Options.exception_trace_enabled opt_context - then print_exception_trace exn_message (fn () => f x) - else f x; - -fun controlled_execution opt_context f x = - (f |> debugging opt_context |> Future.interruptible_task) x; - -fun toplevel_error output_exn f x = f x - handle exn => - if Exn.is_interrupt exn then reraise exn - else - let - val opt_ctxt = - (case Context.thread_data () of - NONE => NONE - | SOME context => try Context.proof_of context); - val _ = output_exn (exn_context opt_ctxt exn); - in raise TOPLEVEL_ERROR end; - -fun toplevel_program body = - (body |> controlled_execution NONE |> toplevel_error exn_error_message) (); - -(*Proof General legacy*) -fun thread interrupts body = - Thread.fork - (fn () => - debugging NONE body () handle exn => - if Exn.is_interrupt exn then () - else Output.urgent_message ("## INTERNAL ERROR ##\n" ^ exn_message exn), - Simple_Thread.attributes interrupts); - -end; - diff --git a/core/Pure/Isar/spec_rules.ML b/core/Pure/Isar/spec_rules.ML deleted file mode 100644 index a2e0de06..00000000 --- a/core/Pure/Isar/spec_rules.ML +++ /dev/null @@ -1,66 +0,0 @@ -(* Title: Pure/Isar/spec_rules.ML - Author: Makarius - -Rules that characterize specifications, with rough classification. -NB: In the face of arbitrary morphisms, the original shape of -specifications may get lost. -*) - -signature SPEC_RULES = -sig - datatype rough_classification = Unknown | Equational | Inductive | Co_Inductive - type spec = rough_classification * (term list * thm list) - val get: Proof.context -> spec list - val get_global: theory -> spec list - val retrieve: Proof.context -> term -> spec list - val retrieve_global: theory -> term -> spec list - val add: rough_classification -> term list * thm list -> local_theory -> local_theory - val add_global: rough_classification -> term list * thm list -> theory -> theory -end; - -structure Spec_Rules: SPEC_RULES = -struct - -(* maintain rules *) - -datatype rough_classification = Unknown | Equational | Inductive | Co_Inductive; -type spec = rough_classification * (term list * thm list); - -structure Rules = Generic_Data -( - type T = spec Item_Net.T; - val empty : T = - Item_Net.init - (fn ((class1, (ts1, ths1)), (class2, (ts2, ths2))) => - class1 = class2 andalso - eq_list (op aconv) (ts1, ts2) andalso - eq_list Thm.eq_thm_prop (ths1, ths2)) - (#1 o #2); - val extend = I; - val merge = Item_Net.merge; -); - -val get = Item_Net.content o Rules.get o Context.Proof; -val get_global = Item_Net.content o Rules.get o Context.Theory; - -val retrieve = Item_Net.retrieve o Rules.get o Context.Proof; -val retrieve_global = Item_Net.retrieve o Rules.get o Context.Theory; - -fun add class (ts, ths) lthy = - let - val cts = map (Thm.cterm_of (Proof_Context.theory_of lthy)) ts; - in - lthy |> Local_Theory.declaration {syntax = false, pervasive = true} - (fn phi => - let - val (ts', ths') = - Morphism.fact phi (map Drule.mk_term cts @ ths) - |> chop (length cts) - |>> map (Thm.term_of o Drule.dest_term); - in Rules.map (Item_Net.update (class, (ts', ths'))) end) - end; - -fun add_global class spec = - Context.theory_map (Rules.map (Item_Net.update (class, spec))); - -end; diff --git a/core/Pure/Isar/specification.ML b/core/Pure/Isar/specification.ML deleted file mode 100644 index a0108ebf..00000000 --- a/core/Pure/Isar/specification.ML +++ /dev/null @@ -1,441 +0,0 @@ -(* Title: Pure/Isar/specification.ML - Author: Makarius - -Derived local theory specifications --- with type-inference and -toplevel polymorphism. -*) - -signature SPECIFICATION = -sig - val check_spec: - (binding * typ option * mixfix) list -> (Attrib.binding * term) list -> Proof.context -> - (((binding * typ) * mixfix) list * (Attrib.binding * term) list) * Proof.context - val read_spec: - (binding * string option * mixfix) list -> (Attrib.binding * string) list -> Proof.context -> - (((binding * typ) * mixfix) list * (Attrib.binding * term) list) * Proof.context - val check_free_spec: - (binding * typ option * mixfix) list -> (Attrib.binding * term) list -> Proof.context -> - (((binding * typ) * mixfix) list * (Attrib.binding * term) list) * Proof.context - val read_free_spec: - (binding * string option * mixfix) list -> (Attrib.binding * string) list -> Proof.context -> - (((binding * typ) * mixfix) list * (Attrib.binding * term) list) * Proof.context - val check_specification: (binding * typ option * mixfix) list -> - (Attrib.binding * term list) list -> Proof.context -> - (((binding * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context - val read_specification: (binding * string option * mixfix) list -> - (Attrib.binding * string list) list -> Proof.context -> - (((binding * typ) * mixfix) list * (Attrib.binding * term list) list) * Proof.context - val axiomatization: (binding * typ option * mixfix) list -> - (Attrib.binding * term list) list -> theory -> - (term list * thm list list) * theory - val axiomatization_cmd: (binding * string option * mixfix) list -> - (Attrib.binding * string list) list -> theory -> - (term list * thm list list) * theory - val axiom: Attrib.binding * term -> theory -> thm * theory - val definition: - (binding * typ option * mixfix) option * (Attrib.binding * term) -> - local_theory -> (term * (string * thm)) * local_theory - val definition': - (binding * typ option * mixfix) option * (Attrib.binding * term) -> - bool -> local_theory -> (term * (string * thm)) * local_theory - val definition_cmd: - (binding * string option * mixfix) option * (Attrib.binding * string) -> - bool -> local_theory -> (term * (string * thm)) * local_theory - val abbreviation: Syntax.mode -> (binding * typ option * mixfix) option * term -> - bool -> local_theory -> local_theory - val abbreviation_cmd: Syntax.mode -> (binding * string option * mixfix) option * string -> - bool -> local_theory -> local_theory - val type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> local_theory -> local_theory - val type_notation_cmd: bool -> Syntax.mode -> (string * mixfix) list -> - local_theory -> local_theory - val notation: bool -> Syntax.mode -> (term * mixfix) list -> local_theory -> local_theory - val notation_cmd: bool -> Syntax.mode -> (string * mixfix) list -> local_theory -> local_theory - val theorems: string -> - (Attrib.binding * (thm list * Attrib.src list) list) list -> - (binding * typ option * mixfix) list -> - bool -> local_theory -> (string * thm list) list * local_theory - val theorems_cmd: string -> - (Attrib.binding * (Facts.ref * Attrib.src list) list) list -> - (binding * string option * mixfix) list -> - bool -> local_theory -> (string * thm list) list * local_theory - val theorem: string -> Method.text option -> - (thm list list -> local_theory -> local_theory) -> Attrib.binding -> - string list -> Element.context_i list -> Element.statement_i -> - bool -> local_theory -> Proof.state - val theorem_cmd: string -> Method.text option -> - (thm list list -> local_theory -> local_theory) -> Attrib.binding -> - (xstring * Position.T) list -> Element.context list -> Element.statement -> - bool -> local_theory -> Proof.state - val schematic_theorem: string -> Method.text option -> - (thm list list -> local_theory -> local_theory) -> Attrib.binding -> - string list -> Element.context_i list -> Element.statement_i -> - bool -> local_theory -> Proof.state - val schematic_theorem_cmd: string -> Method.text option -> - (thm list list -> local_theory -> local_theory) -> Attrib.binding -> - (xstring * Position.T) list -> Element.context list -> Element.statement -> - bool -> local_theory -> Proof.state - val add_theorem_hook: (bool -> Proof.state -> Proof.state) -> Context.generic -> Context.generic -end; - -structure Specification: SPECIFICATION = -struct - -(* prepare specification *) - -local - -fun close_forms ctxt i xs As = - let - val commons = map #1 xs; - val _ = - (case duplicates (op =) commons of [] => () - | dups => error ("Duplicate local variables " ^ commas_quote dups)); - val frees = rev (fold (Variable.add_free_names ctxt) As (rev commons)); - val types = - map (Type_Infer.param i o rpair []) (Name.invent Name.context Name.aT (length frees)); - val uniform_typing = the o AList.lookup (op =) (frees ~~ types); - - fun abs_body lev y (Abs (x, T, b)) = Abs (x, T, abs_body (lev + 1) y b) - | abs_body lev y (t $ u) = abs_body lev y t $ abs_body lev y u - | abs_body lev y (t as Free (x, T)) = - if x = y then Type.constraint (uniform_typing x) (Type.constraint T (Bound lev)) - else t - | abs_body _ _ a = a; - fun close (y, U) B = - let val B' = abs_body 0 y (Term.incr_boundvars 1 B) - in if Term.is_dependent B' then Logic.all_const dummyT $ Abs (y, U, B') else B end; - fun close_form A = - let - val occ_frees = rev (Variable.add_free_names ctxt A []); - val bounds = xs @ map (rpair dummyT) (subtract (op =) commons occ_frees); - in fold_rev close bounds A end; - in map close_form As end; - -fun prepare prep_vars parse_prop prep_att do_close raw_vars raw_specss ctxt = - let - val (vars, vars_ctxt) = ctxt |> prep_vars raw_vars; - val (xs, params_ctxt) = vars_ctxt |> Proof_Context.add_fixes vars; - - val Asss = - (map o map) snd raw_specss - |> (burrow o burrow) - (grouped 10 (Par_List.map_name "Specification.parse_prop") (parse_prop params_ctxt)); - val names = Variable.names_of (params_ctxt |> (fold o fold o fold) Variable.declare_term Asss) - |> fold Name.declare xs; - val Asss' = #1 ((fold_map o fold_map o fold_map) Term.free_dummy_patterns Asss names); - val idx = (fold o fold o fold) Term.maxidx_term Asss' ~1 + 1; - val specs = - (if do_close then - #1 (fold_map - (fn Ass => fn i => (burrow (close_forms params_ctxt i []) Ass, i + 1)) Asss' idx) - else Asss') - |> flat |> burrow (Syntax.check_props params_ctxt); - val specs_ctxt = params_ctxt |> (fold o fold) Variable.declare_term specs; - - val Ts = specs_ctxt |> fold_map Proof_Context.inferred_param xs |> fst; - val params = map2 (fn (b, _, mx) => fn T => ((b, T), mx)) vars Ts; - val name_atts = map (fn ((name, atts), _) => (name, map (prep_att ctxt) atts)) (flat raw_specss); - in ((params, name_atts ~~ specs), specs_ctxt) end; - - -fun single_spec (a, prop) = [(a, [prop])]; -fun the_spec (a, [prop]) = (a, prop); - -fun prep_spec prep_vars parse_prop prep_att do_close vars specs = - prepare prep_vars parse_prop prep_att do_close - vars (map single_spec specs) #>> apsnd (map the_spec); - -in - -fun check_spec x = prep_spec Proof_Context.cert_vars (K I) (K I) true x; -fun read_spec x = prep_spec Proof_Context.read_vars Syntax.parse_prop Attrib.check_src true x; - -fun check_free_spec x = prep_spec Proof_Context.cert_vars (K I) (K I) false x; -fun read_free_spec x = prep_spec Proof_Context.read_vars Syntax.parse_prop Attrib.check_src false x; - -fun check_specification vars specs = - prepare Proof_Context.cert_vars (K I) (K I) true vars [specs]; - -fun read_specification vars specs = - prepare Proof_Context.read_vars Syntax.parse_prop Attrib.check_src true vars [specs]; - -end; - - -(* axiomatization -- within global theory *) - -fun gen_axioms prep raw_vars raw_specs thy = - let - val ((vars, specs), _) = prep raw_vars raw_specs (Proof_Context.init_global thy); - val xs = map (fn ((b, T), _) => (Variable.check_name b, T)) vars; - - (*consts*) - val (consts, consts_thy) = thy |> fold_map Theory.specify_const vars; - val subst = Term.subst_atomic (map Free xs ~~ consts); - - (*axioms*) - val (axioms, axioms_thy) = (specs, consts_thy) |-> fold_map (fn ((b, atts), props) => - fold_map Thm.add_axiom_global - (map (apfst (fn a => Binding.map_name (K a) b)) - (Global_Theory.name_multi (Binding.name_of b) (map subst props))) - #>> (fn ths => ((b, atts), [(map #2 ths, [])]))); - - (*facts*) - val (facts, facts_lthy) = axioms_thy - |> Named_Target.theory_init - |> Spec_Rules.add Spec_Rules.Unknown (consts, maps (maps #1 o #2) axioms) - |> Local_Theory.notes axioms; - - in ((consts, map #2 facts), Local_Theory.exit_global facts_lthy) end; - -val axiomatization = gen_axioms check_specification; -val axiomatization_cmd = gen_axioms read_specification; - -fun axiom (b, ax) = axiomatization [] [(b, [ax])] #>> (hd o hd o snd); - - -(* definition *) - -fun gen_def prep (raw_var, raw_spec) int lthy = - let - val (vars, [((raw_name, atts), prop)]) = fst (prep (the_list raw_var) [raw_spec] lthy); - val (((x, T), rhs), prove) = Local_Defs.derived_def lthy true prop; - val _ = Name.reject_internal (x, []); - val var as (b, _) = - (case vars of - [] => (Binding.name x, NoSyn) - | [((b, _), mx)] => - let - val y = Variable.check_name b; - val _ = x = y orelse - error ("Head of definition " ^ quote x ^ " differs from declaration " ^ quote y ^ - Position.here (Binding.pos_of b)); - in (b, mx) end); - val name = Thm.def_binding_optional b raw_name; - val ((lhs, (_, raw_th)), lthy2) = lthy - |> Local_Theory.define_internal (var, ((Binding.suffix_name "_raw" name, []), rhs)); - - val th = prove lthy2 raw_th; - val lthy3 = lthy2 |> Spec_Rules.add Spec_Rules.Equational ([lhs], [th]); - - val ([(def_name, [th'])], lthy4) = lthy3 - |> Local_Theory.notes [((name, Code.add_default_eqn_attrib :: atts), [([th], [])])]; - - val lhs' = Morphism.term (Local_Theory.target_morphism lthy4) lhs; - - val _ = - Proof_Display.print_consts int (Position.thread_data ()) lthy4 - (member (op =) (Term.add_frees lhs' [])) [(x, T)]; - in ((lhs, (def_name, th')), lthy4) end; - -val definition' = gen_def check_free_spec; -fun definition spec = definition' spec false; -val definition_cmd = gen_def read_free_spec; - - -(* abbreviation *) - -fun gen_abbrev prep mode (raw_var, raw_prop) int lthy = - let - val lthy1 = lthy - |> Proof_Context.set_syntax_mode mode; - val ((vars, [(_, prop)]), _) = - prep (the_list raw_var) [(Attrib.empty_binding, raw_prop)] - (lthy1 |> Proof_Context.set_mode Proof_Context.mode_abbrev); - val ((x, T), rhs) = Local_Defs.abs_def (#2 (Local_Defs.cert_def lthy1 prop)); - val _ = Name.reject_internal (x, []); - val var = - (case vars of - [] => (Binding.name x, NoSyn) - | [((b, _), mx)] => - let - val y = Variable.check_name b; - val _ = x = y orelse - error ("Head of abbreviation " ^ quote x ^ " differs from declaration " ^ quote y ^ - Position.here (Binding.pos_of b)); - in (b, mx) end); - val lthy2 = lthy1 - |> Local_Theory.abbrev mode (var, rhs) |> snd - |> Proof_Context.restore_syntax_mode lthy; - - val _ = Proof_Display.print_consts int (Position.thread_data ()) lthy2 (K false) [(x, T)]; - in lthy2 end; - -val abbreviation = gen_abbrev check_free_spec; -val abbreviation_cmd = gen_abbrev read_free_spec; - - -(* notation *) - -local - -fun gen_type_notation prep_type add mode args lthy = - lthy |> Local_Theory.type_notation add mode (map (apfst (prep_type lthy)) args); - -fun gen_notation prep_const add mode args lthy = - lthy |> Local_Theory.notation add mode (map (apfst (prep_const lthy)) args); - -in - -val type_notation = gen_type_notation (K I); -val type_notation_cmd = - gen_type_notation (Proof_Context.read_type_name {proper = true, strict = false}); - -val notation = gen_notation (K I); -val notation_cmd = gen_notation (Proof_Context.read_const {proper = false, strict = false}); - -end; - - -(* fact statements *) - -local - -fun gen_theorems prep_fact prep_att prep_vars - kind raw_facts raw_fixes int lthy = - let - val facts = raw_facts |> map (fn ((name, atts), bs) => - ((name, map (prep_att lthy) atts), - bs |> map (fn (b, more_atts) => (prep_fact lthy b, map (prep_att lthy) more_atts)))); - val (_, ctxt') = lthy |> prep_vars raw_fixes |-> Proof_Context.add_fixes; - - val facts' = facts - |> Attrib.partial_evaluation ctxt' - |> Element.transform_facts (Proof_Context.export_morphism ctxt' lthy); - val (res, lthy') = lthy |> Local_Theory.notes_kind kind facts'; - val _ = Proof_Display.print_results int (Position.thread_data ()) lthy' ((kind, ""), res); - in (res, lthy') end; - -in - -val theorems = gen_theorems (K I) (K I) Proof_Context.cert_vars; -val theorems_cmd = gen_theorems Proof_Context.get_fact Attrib.check_src Proof_Context.read_vars; - -end; - - -(* complex goal statements *) - -local - -fun prep_statement prep_att prep_stmt elems concl ctxt = - (case concl of - Element.Shows shows => - let - val (propp, elems_ctxt) = prep_stmt elems (map snd shows) ctxt; - val prems = Assumption.local_prems_of elems_ctxt ctxt; - val stmt = Attrib.map_specs (map prep_att) (map fst shows ~~ propp); - val goal_ctxt = (fold o fold) (Variable.auto_fixes o fst) propp elems_ctxt; - in (([], prems, stmt, NONE), goal_ctxt) end - | Element.Obtains obtains => - let - val case_names = obtains |> map_index (fn (i, (b, _)) => - if Binding.is_empty b then string_of_int (i + 1) else Name_Space.base_name b); - val constraints = obtains |> map (fn (_, (vars, _)) => - Element.Constrains - (vars |> map_filter (fn (x, SOME T) => SOME (Variable.check_name x, T) | _ => NONE))); - - val raw_propp = obtains |> map (fn (_, (_, props)) => map (rpair []) props); - val (propp, elems_ctxt) = prep_stmt (elems @ constraints) raw_propp ctxt; - - val thesis = Object_Logic.fixed_judgment (Proof_Context.theory_of ctxt) Auto_Bind.thesisN; - - fun assume_case ((name, (vars, _)), asms) ctxt' = - let - val bs = map fst vars; - val xs = map Variable.check_name bs; - val props = map fst asms; - val (Ts, _) = ctxt' - |> fold Variable.declare_term props - |> fold_map Proof_Context.inferred_param xs; - val params = map Free (xs ~~ Ts); - val asm = fold_rev Logic.all params (Logic.list_implies (props, thesis)); - val _ = ctxt' |> Proof_Context.add_fixes (map (fn b => (b, NONE, NoSyn)) bs); - in - ctxt' - |> Variable.auto_fixes asm - |> Proof_Context.add_assms_i Assumption.assume_export - [((name, [Context_Rules.intro_query NONE]), [(asm, [])])] - |>> (fn [(_, [th])] => th) - end; - - val more_atts = map (Attrib.internal o K) - [Rule_Cases.consumes (~ (length obtains)), Rule_Cases.case_names case_names]; - val prems = Assumption.local_prems_of elems_ctxt ctxt; - val stmt = [((Binding.empty, []), [(thesis, [])])]; - - val (facts, goal_ctxt) = elems_ctxt - |> (snd o Proof_Context.add_fixes [(Binding.name Auto_Bind.thesisN, NONE, NoSyn)]) - |> fold_map assume_case (obtains ~~ propp) - |-> (fn ths => - Proof_Context.note_thmss "" [((Binding.name Obtain.thatN, []), [(ths, [])])] #> - #2 #> pair ths); - in ((more_atts, prems, stmt, SOME facts), goal_ctxt) end); - -structure Theorem_Hook = Generic_Data -( - type T = ((bool -> Proof.state -> Proof.state) * stamp) list; - val empty = []; - val extend = I; - fun merge data : T = Library.merge (eq_snd op =) data; -); - -fun gen_theorem schematic bundle_includes prep_att prep_stmt - kind before_qed after_qed (name, raw_atts) raw_includes raw_elems raw_concl int lthy = - let - val _ = Local_Theory.assert lthy; - - val elems = raw_elems |> map (Element.map_ctxt_attrib (prep_att lthy)); - val ((more_atts, prems, stmt, facts), goal_ctxt) = lthy - |> bundle_includes raw_includes - |> prep_statement (prep_att lthy) prep_stmt elems raw_concl; - val atts = more_atts @ map (prep_att lthy) raw_atts; - - val pos = Position.thread_data (); - fun after_qed' results goal_ctxt' = - let - val results' = - burrow (map (Goal.norm_result lthy) o Proof_Context.export goal_ctxt' lthy) results; - val (res, lthy') = - if forall (Attrib.is_empty_binding o fst) stmt then (map (pair "") results', lthy) - else - Local_Theory.notes_kind kind - (map2 (fn (b, _) => fn ths => (b, [(ths, [])])) stmt results') lthy; - val lthy'' = - if Attrib.is_empty_binding (name, atts) then - (Proof_Display.print_results int pos lthy' ((kind, ""), res); lthy') - else - let - val ([(res_name, _)], lthy'') = - Local_Theory.notes_kind kind [((name, atts), [(maps #2 res, [])])] lthy'; - val _ = Proof_Display.print_results int pos lthy' ((kind, res_name), res); - in lthy'' end; - in after_qed results' lthy'' end; - in - goal_ctxt - |> Proof_Context.note_thmss "" [((Binding.name Auto_Bind.assmsN, []), [(prems, [])])] - |> snd - |> Proof.theorem before_qed after_qed' (map snd stmt) - |> (case facts of NONE => I | SOME ths => Proof.refine_insert ths) - |> tap (fn state => not schematic andalso Proof.schematic_goal state andalso - error "Illegal schematic goal statement") - |> fold_rev (fn (f, _) => f int) (Theorem_Hook.get (Context.Proof goal_ctxt)) - end; - -in - -val theorem = - gen_theorem false Bundle.includes (K I) Expression.cert_statement; -val theorem_cmd = - gen_theorem false Bundle.includes_cmd Attrib.check_src Expression.read_statement; - -val schematic_theorem = - gen_theorem true Bundle.includes (K I) Expression.cert_statement; -val schematic_theorem_cmd = - gen_theorem true Bundle.includes_cmd Attrib.check_src Expression.read_statement; - -fun add_theorem_hook f = Theorem_Hook.map (cons (f, stamp ())); - -end; - -end; diff --git a/core/Pure/Isar/token.ML b/core/Pure/Isar/token.ML deleted file mode 100644 index e22b5afe..00000000 --- a/core/Pure/Isar/token.ML +++ /dev/null @@ -1,510 +0,0 @@ -(* Title: Pure/Isar/token.ML - Author: Markus Wenzel, TU Muenchen - -Outer token syntax for Isabelle/Isar. -*) - -signature TOKEN = -sig - datatype kind = - Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar | - Nat | Float | String | AltString | Verbatim | Cartouche | Space | Comment | InternalValue | - Error of string | Sync | EOF - type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T} - datatype value = - Literal of bool * Markup.T | Text of string | Typ of typ | Term of term | Fact of thm list | - Attribute of morphism -> attribute | Files of file Exn.result list - type T - val str_of_kind: kind -> string - val pos_of: T -> Position.T - val range_of: T list -> Position.range - val eof: T - val is_eof: T -> bool - val not_eof: T -> bool - val not_sync: T -> bool - val stopper: T Scan.stopper - val kind_of: T -> kind - val is_kind: kind -> T -> bool - val keyword_with: (string -> bool) -> T -> bool - val ident_with: (string -> bool) -> T -> bool - val is_command: T -> bool - val is_name: T -> bool - val is_proper: T -> bool - val is_improper: T -> bool - val is_semicolon: T -> bool - val is_comment: T -> bool - val is_begin_ignore: T -> bool - val is_end_ignore: T -> bool - val is_error: T -> bool - val is_space: T -> bool - val is_blank: T -> bool - val is_newline: T -> bool - val inner_syntax_of: T -> string - val source_position_of: T -> Symbol_Pos.source - val content_of: T -> string - val keyword_markup: bool * Markup.T -> string -> Markup.T - val completion_report: T -> Position.report_text list - val report: T -> Position.report_text - val markup: T -> Markup.T - val unparse: T -> string - val print: T -> string - val text_of: T -> string * string - val get_files: T -> file Exn.result list - val put_files: file Exn.result list -> T -> T - val get_value: T -> value option - val map_value: (value -> value) -> T -> T - val reports_of_value: T -> Position.report list - val mk_text: string -> T - val mk_typ: typ -> T - val mk_term: term -> T - val mk_fact: thm list -> T - val mk_attribute: (morphism -> attribute) -> T - val init_assignable: T -> T - val assign: value option -> T -> unit - val closure: T -> T - val ident_or_symbolic: string -> bool - val source_proper: (T, 'a) Source.source -> (T, (T, 'a) Source.source) Source.source - val source': {do_recover: bool option} -> (unit -> Scan.lexicon * Scan.lexicon) -> - (Symbol_Pos.T, 'a) Source.source -> (T, (Symbol_Pos.T, 'a) Source.source) Source.source - val source: {do_recover: bool option} -> (unit -> Scan.lexicon * Scan.lexicon) -> - Position.T -> (Symbol.symbol, 'a) Source.source -> (T, - (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source - val read_antiq: Scan.lexicon -> (T list -> 'a * T list) -> Symbol_Pos.T list * Position.T -> 'a -end; - -structure Token: TOKEN = -struct - -(** tokens **) - -(* token values *) - -(*The value slot assigns an (optional) internal value to a token, - usually as a side-effect of special scanner setup (see also - args.ML). Note that an assignable ref designates an intermediate - state of internalization -- it is NOT meant to persist.*) - -type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T}; - -datatype value = - Literal of bool * Markup.T | - Text of string | - Typ of typ | - Term of term | - Fact of thm list | - Attribute of morphism -> attribute | - Files of file Exn.result list; - -datatype slot = - Slot | - Value of value option | - Assignable of value option Unsynchronized.ref; - - -(* datatype token *) - -datatype kind = - Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar | - Nat | Float | String | AltString | Verbatim | Cartouche | Space | Comment | InternalValue | - Error of string | Sync | EOF; - -datatype T = Token of (Symbol_Pos.text * Position.range) * (kind * string) * slot; - -val str_of_kind = - fn Command => "command" - | Keyword => "keyword" - | Ident => "identifier" - | LongIdent => "long identifier" - | SymIdent => "symbolic identifier" - | Var => "schematic variable" - | TypeIdent => "type variable" - | TypeVar => "schematic type variable" - | Nat => "natural number" - | Float => "floating-point number" - | String => "quoted string" - | AltString => "back-quoted string" - | Verbatim => "verbatim text" - | Cartouche => "text cartouche" - | Space => "white space" - | Comment => "comment text" - | InternalValue => "internal value" - | Error _ => "bad input" - | Sync => "sync marker" - | EOF => "end-of-input"; - -val delimited_kind = member (op =) [String, AltString, Verbatim, Cartouche, Comment]; - - -(* position *) - -fun pos_of (Token ((_, (pos, _)), _, _)) = pos; -fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos; - -fun range_of (toks as tok :: _) = - let val pos' = end_pos_of (List.last toks) - in Position.range (pos_of tok) pos' end - | range_of [] = Position.no_range; - - -(* control tokens *) - -fun mk_eof pos = Token (("", (pos, Position.none)), (EOF, ""), Slot); -val eof = mk_eof Position.none; - -fun is_eof (Token (_, (EOF, _), _)) = true - | is_eof _ = false; - -val not_eof = not o is_eof; - -fun not_sync (Token (_, (Sync, _), _)) = false - | not_sync _ = true; - -val stopper = - Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof; - - -(* kind of token *) - -fun kind_of (Token (_, (k, _), _)) = k; -fun is_kind k (Token (_, (k', _), _)) = k = k'; - -val is_command = is_kind Command; -val is_name = is_kind Ident orf is_kind SymIdent orf is_kind String orf is_kind Nat; - -fun keyword_with pred (Token (_, (Keyword, x), _)) = pred x - | keyword_with _ _ = false; - -fun ident_with pred (Token (_, (Ident, x), _)) = pred x - | ident_with _ _ = false; - -fun is_proper (Token (_, (Space, _), _)) = false - | is_proper (Token (_, (Comment, _), _)) = false - | is_proper _ = true; - -val is_improper = not o is_proper; - -fun is_semicolon (Token (_, (Keyword, ";"), _)) = true - | is_semicolon _ = false; - -fun is_comment (Token (_, (Comment, _), _)) = true - | is_comment _ = false; - -fun is_begin_ignore (Token (_, (Comment, "<"), _)) = true - | is_begin_ignore _ = false; - -fun is_end_ignore (Token (_, (Comment, ">"), _)) = true - | is_end_ignore _ = false; - -fun is_error (Token (_, (Error _, _), _)) = true - | is_error _ = false; - - -(* blanks and newlines -- space tokens obey lines *) - -fun is_space (Token (_, (Space, _), _)) = true - | is_space _ = false; - -fun is_blank (Token (_, (Space, x), _)) = not (String.isSuffix "\n" x) - | is_blank _ = false; - -fun is_newline (Token (_, (Space, x), _)) = String.isSuffix "\n" x - | is_newline _ = false; - - -(* token content *) - -fun inner_syntax_of (Token ((source, (pos, _)), (kind, x), _)) = - if YXML.detect x then x - else - let - val delimited = delimited_kind kind; - val tree = XML.Elem (Markup.token delimited (Position.properties_of pos), [XML.Text source]); - in YXML.string_of tree end; - -fun source_position_of (Token ((source, (pos, _)), (kind, _), _)) = - {delimited = delimited_kind kind, text = source, pos = pos}; - -fun content_of (Token (_, (_, x), _)) = x; - - -(* markup reports *) - -local - -val token_kind_markup = - fn Command => (Markup.command, "") - | Keyword => (Markup.keyword2, "") - | Ident => (Markup.empty, "") - | LongIdent => (Markup.empty, "") - | SymIdent => (Markup.empty, "") - | Var => (Markup.var, "") - | TypeIdent => (Markup.tfree, "") - | TypeVar => (Markup.tvar, "") - | Nat => (Markup.empty, "") - | Float => (Markup.empty, "") - | String => (Markup.string, "") - | AltString => (Markup.altstring, "") - | Verbatim => (Markup.verbatim, "") - | Cartouche => (Markup.cartouche, "") - | Space => (Markup.empty, "") - | Comment => (Markup.comment, "") - | InternalValue => (Markup.empty, "") - | Error msg => (Markup.bad, msg) - | Sync => (Markup.control, "") - | EOF => (Markup.control, ""); - -in - -fun keyword_markup (important, keyword) x = - if important orelse Symbol.is_ascii_identifier x then keyword else Markup.delimiter; - -fun completion_report tok = - if is_kind Keyword tok - then map (fn m => ((pos_of tok, m), "")) (Completion.suppress_abbrevs (content_of tok)) - else []; - -fun report tok = - if is_kind Keyword tok then - ((pos_of tok, keyword_markup (false, Markup.keyword2) (content_of tok)), "") - else - let val (m, text) = token_kind_markup (kind_of tok) - in ((pos_of tok, m), text) end; - -val markup = #2 o #1 o report; - -end; - - -(* unparse *) - -fun unparse (Token (_, (kind, x), _)) = - (case kind of - String => Symbol_Pos.quote_string_qq x - | AltString => Symbol_Pos.quote_string_bq x - | Verbatim => enclose "{*" "*}" x - | Cartouche => cartouche x - | Comment => enclose "(*" "*)" x - | Sync => "" - | EOF => "" - | _ => x); - -fun print tok = Markup.markup (markup tok) (unparse tok); - -fun text_of tok = - if is_semicolon tok then ("terminator", "") - else - let - val k = str_of_kind (kind_of tok); - val m = markup tok; - val s = unparse tok; - in - if s = "" then (k, "") - else if size s < 40 andalso not (exists_string (fn c => c = "\n") s) - then (k ^ " " ^ Markup.markup m s, "") - else (k, Markup.markup m s) - end; - - - -(** associated values **) - -(* inlined file content *) - -fun get_files (Token (_, _, Value (SOME (Files files)))) = files - | get_files _ = []; - -fun put_files [] tok = tok - | put_files files (Token (x, y, Slot)) = Token (x, y, Value (SOME (Files files))) - | put_files _ tok = raise Fail ("Cannot put inlined files here" ^ Position.here (pos_of tok)); - - -(* access values *) - -fun get_value (Token (_, _, Value v)) = v - | get_value _ = NONE; - -fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v))) - | map_value _ tok = tok; - -fun reports_of_value tok = - (case get_value tok of - SOME (Literal markup) => - let - val pos = pos_of tok; - val x = content_of tok; - in - if Position.is_reported pos then - map (pair pos) (keyword_markup markup x :: Completion.suppress_abbrevs x) - else [] - end - | _ => []); - - -(* make values *) - -fun mk_value k v = Token ((k, Position.no_range), (InternalValue, k), Value (SOME v)); - -val mk_text = mk_value "" o Text; -val mk_typ = mk_value "" o Typ; -val mk_term = mk_value "" o Term; -val mk_fact = mk_value "" o Fact; -val mk_attribute = mk_value "" o Attribute; - - -(* static binding *) - -(*1st stage: initialize assignable slots*) -fun init_assignable (Token (x, y, Slot)) = Token (x, y, Assignable (Unsynchronized.ref NONE)) - | init_assignable (tok as Token (_, _, Assignable r)) = (r := NONE; tok) - | init_assignable tok = tok; - -(*2nd stage: assign values as side-effect of scanning*) -fun assign v (Token (_, _, Assignable r)) = r := v - | assign _ _ = (); - -(*3rd stage: static closure of final values*) -fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v) - | closure tok = tok; - - - -(** scanners **) - -open Basic_Symbol_Pos; - -val err_prefix = "Outer lexical error: "; - -fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); - - -(* scan symbolic idents *) - -val scan_symid = - Scan.many1 (Symbol.is_symbolic_char o Symbol_Pos.symbol) || - Scan.one (Symbol.is_symbolic o Symbol_Pos.symbol) >> single; - -fun is_symid str = - (case try Symbol.explode str of - SOME [s] => Symbol.is_symbolic s orelse Symbol.is_symbolic_char s - | SOME ss => forall Symbol.is_symbolic_char ss - | _ => false); - -fun ident_or_symbolic "begin" = false - | ident_or_symbolic ":" = true - | ident_or_symbolic "::" = true - | ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s; - - -(* scan verbatim text *) - -val scan_verb = - $$$ "*" --| Scan.ahead (~$$ "}") || - Scan.one (fn (s, _) => s <> "*" andalso Symbol.is_regular s) >> single; - -val scan_verbatim = - Scan.ahead ($$ "{" -- $$ "*") |-- - !!! "unclosed verbatim text" - ((Symbol_Pos.scan_pos --| $$ "{" --| $$ "*") -- - Symbol_Pos.change_prompt - ((Scan.repeat scan_verb >> flat) -- ($$ "*" |-- $$ "}" |-- Symbol_Pos.scan_pos))); - -val recover_verbatim = - $$$ "{" @@@ $$$ "*" @@@ (Scan.repeat scan_verb >> flat); - - -(* scan cartouche *) - -val scan_cartouche = - Symbol_Pos.scan_pos -- - ((Symbol_Pos.scan_cartouche err_prefix >> Symbol_Pos.cartouche_content) -- Symbol_Pos.scan_pos); - - -(* scan space *) - -fun space_symbol (s, _) = Symbol.is_blank s andalso s <> "\n"; - -val scan_space = - Scan.many1 space_symbol @@@ Scan.optional ($$$ "\n") [] || - Scan.many space_symbol @@@ $$$ "\n"; - - -(* scan comment *) - -val scan_comment = - Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body err_prefix -- Symbol_Pos.scan_pos); - - - -(** token sources **) - -fun source_proper src = src |> Source.filter is_proper; - -local - -fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2; - -fun token k ss = - Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.content ss), Slot); - -fun token_range k (pos1, (ss, pos2)) = - Token (Symbol_Pos.implode_range pos1 pos2 ss, (k, Symbol_Pos.content ss), Slot); - -fun scan (lex1, lex2) = !!! "bad input" - (Symbol_Pos.scan_string_qq err_prefix >> token_range String || - Symbol_Pos.scan_string_bq err_prefix >> token_range AltString || - scan_verbatim >> token_range Verbatim || - scan_cartouche >> token_range Cartouche || - scan_comment >> token_range Comment || - scan_space >> token Space || - Scan.one (Symbol.is_sync o Symbol_Pos.symbol) >> (token Sync o single) || - (Scan.max token_leq - (Scan.max token_leq - (Scan.literal lex2 >> pair Command) - (Scan.literal lex1 >> pair Keyword)) - (Lexicon.scan_longid >> pair LongIdent || - Lexicon.scan_id >> pair Ident || - Lexicon.scan_var >> pair Var || - Lexicon.scan_tid >> pair TypeIdent || - Lexicon.scan_tvar >> pair TypeVar || - Lexicon.scan_float >> pair Float || - Lexicon.scan_nat >> pair Nat || - scan_symid >> pair SymIdent) >> uncurry token)); - -fun recover msg = - (Symbol_Pos.recover_string_qq || - Symbol_Pos.recover_string_bq || - recover_verbatim || - Symbol_Pos.recover_cartouche || - Symbol_Pos.recover_comment || - Scan.one (Symbol.is_regular o Symbol_Pos.symbol) >> single) - >> (single o token (Error msg)); - -in - -fun source' {do_recover} get_lex = - Source.source Symbol_Pos.stopper (Scan.bulk (fn xs => scan (get_lex ()) xs)) - (Option.map (rpair recover) do_recover); - -fun source do_recover get_lex pos src = - Symbol_Pos.source pos src - |> source' do_recover get_lex; - -end; - - -(* read_antiq *) - -fun read_antiq lex scan (syms, pos) = - let - fun err msg = cat_error msg ("Malformed antiquotation" ^ Position.here pos ^ ":\n" ^ - "@{" ^ Symbol_Pos.content syms ^ "}"); - - val res = - Source.of_list syms - |> source' {do_recover = NONE} (K (lex, Scan.empty_lexicon)) - |> source_proper - |> Source.source stopper (Scan.error (Scan.bulk scan)) NONE - |> Source.exhaust; - in (case res of [x] => x | _ => err "") handle ERROR msg => err msg end; - -end; diff --git a/core/Pure/Isar/token.scala b/core/Pure/Isar/token.scala deleted file mode 100644 index 847123b3..00000000 --- a/core/Pure/Isar/token.scala +++ /dev/null @@ -1,207 +0,0 @@ -/* Title: Pure/Isar/token.scala - Author: Makarius - -Outer token syntax for Isabelle/Isar. -*/ - -package isabelle - - -object Token -{ - /* tokens */ - - object Kind extends Enumeration - { - val COMMAND = Value("command") - val KEYWORD = Value("keyword") - val IDENT = Value("identifier") - val LONG_IDENT = Value("long identifier") - val SYM_IDENT = Value("symbolic identifier") - val VAR = Value("schematic variable") - val TYPE_IDENT = Value("type variable") - val TYPE_VAR = Value("schematic type variable") - val NAT = Value("natural number") - val FLOAT = Value("floating-point number") - val STRING = Value("string") - val ALT_STRING = Value("back-quoted string") - val VERBATIM = Value("verbatim text") - val CARTOUCHE = Value("text cartouche") - val SPACE = Value("white space") - val COMMENT = Value("comment text") - val ERROR = Value("bad input") - val UNPARSED = Value("unparsed input") - } - - - /* parsers */ - - object Parsers extends Parsers - - trait Parsers extends Scan.Parsers - { - private def delimited_token: Parser[Token] = - { - val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x)) - val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x)) - val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) - val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x)) - val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x)) - - string | (alt_string | (verb | (cart | cmt))) - } - - private def other_token(lexicon: Scan.Lexicon, is_command: String => Boolean) - : Parser[Token] = - { - val letdigs1 = many1(Symbol.is_letdig) - val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>") - val id = - one(Symbol.is_letter) ~ - (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^ - { case x ~ y => x + y } - - val nat = many1(Symbol.is_digit) - val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z } - val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x } - - val ident = id ~ rep("." ~> id) ^^ - { case x ~ Nil => Token(Token.Kind.IDENT, x) - case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) } - - val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) } - val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) } - val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) } - val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x)) - val float = - ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x)) - - val sym_ident = - (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^ - (x => Token(Token.Kind.SYM_IDENT, x)) - - val command_keyword = - literal(lexicon) ^^ - (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x)) - - val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x)) - - val recover_delimited = - (recover_quoted("\"") | - (recover_quoted("`") | - (recover_verbatim | - (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x)) - - val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x)) - - space | (recover_delimited | - (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) ||| - command_keyword) | bad)) - } - - def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] = - delimited_token | other_token(lexicon, is_command) - - def token_line(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Line_Context) - : Parser[(Token, Scan.Line_Context)] = - { - val string = - quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) } - val alt_string = - quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) } - val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) } - val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) } - val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) } - val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) } - - string | (alt_string | (verb | (cart | (cmt | other)))) - } - } - - - /* token reader */ - - object Pos - { - val none: Pos = new Pos(0, "") - } - - final class Pos private[Token](val line: Int, val file: String) - extends scala.util.parsing.input.Position - { - def column = 0 - def lineContents = "" - - def advance(token: Token): Pos = - { - var n = 0 - for (c <- token.content if c == '\n') n += 1 - if (n == 0) this else new Pos(line + n, file) - } - - def position: Position.T = Position.Line_File(line, file) - override def toString: String = Position.here_undelimited(position) - } - - abstract class Reader extends scala.util.parsing.input.Reader[Token] - - private class Token_Reader(tokens: List[Token], val pos: Pos) extends Reader - { - def first = tokens.head - def rest = new Token_Reader(tokens.tail, pos.advance(first)) - def atEnd = tokens.isEmpty - } - - def reader(tokens: List[Token], file: String = ""): Reader = - new Token_Reader(tokens, new Pos(1, file)) -} - - -sealed case class Token(val kind: Token.Kind.Value, val source: String) -{ - def is_command: Boolean = kind == Token.Kind.COMMAND - def is_keyword: Boolean = kind == Token.Kind.KEYWORD - def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source) - def is_ident: Boolean = kind == Token.Kind.IDENT - def is_sym_ident: Boolean = kind == Token.Kind.SYM_IDENT - def is_string: Boolean = kind == Token.Kind.STRING - def is_nat: Boolean = kind == Token.Kind.NAT - def is_float: Boolean = kind == Token.Kind.FLOAT - def is_name: Boolean = - kind == Token.Kind.IDENT || - kind == Token.Kind.SYM_IDENT || - kind == Token.Kind.STRING || - kind == Token.Kind.NAT - def is_xname: Boolean = is_name || kind == Token.Kind.LONG_IDENT - def is_text: Boolean = is_xname || kind == Token.Kind.VERBATIM || kind == Token.Kind.CARTOUCHE - def is_space: Boolean = kind == Token.Kind.SPACE - def is_comment: Boolean = kind == Token.Kind.COMMENT - def is_improper: Boolean = is_space || is_comment - def is_proper: Boolean = !is_space && !is_comment - def is_error: Boolean = kind == Token.Kind.ERROR - def is_unparsed: Boolean = kind == Token.Kind.UNPARSED - - def is_unfinished: Boolean = is_error && - (source.startsWith("\"") || - source.startsWith("`") || - source.startsWith("{*") || - source.startsWith("(*") || - source.startsWith(Symbol.open) || - source.startsWith(Symbol.open_decoded)) - - def is_begin: Boolean = is_keyword && source == "begin" - def is_end: Boolean = is_keyword && source == "end" - - def content: String = - if (kind == Token.Kind.STRING) Scan.Parsers.quoted_content("\"", source) - else if (kind == Token.Kind.ALT_STRING) Scan.Parsers.quoted_content("`", source) - else if (kind == Token.Kind.VERBATIM) Scan.Parsers.verbatim_content(source) - else if (kind == Token.Kind.CARTOUCHE) Scan.Parsers.cartouche_content(source) - else if (kind == Token.Kind.COMMENT) Scan.Parsers.comment_content(source) - else source - - def text: (String, String) = - if (is_keyword && source == ";") ("terminator", "") - else (kind.toString, source) -} - diff --git a/core/Pure/Isar/toplevel.ML b/core/Pure/Isar/toplevel.ML deleted file mode 100644 index 70c09725..00000000 --- a/core/Pure/Isar/toplevel.ML +++ /dev/null @@ -1,762 +0,0 @@ -(* Title: Pure/Isar/toplevel.ML - Author: Markus Wenzel, TU Muenchen - -Isabelle/Isar toplevel transactions. -*) - -signature TOPLEVEL = -sig - exception UNDEF - type state - val toplevel: state - val is_toplevel: state -> bool - val is_theory: state -> bool - val is_proof: state -> bool - val is_skipped_proof: state -> bool - val level: state -> int - val presentation_context_of: state -> Proof.context - val previous_context_of: state -> Proof.context option - val context_of: state -> Proof.context - val generic_theory_of: state -> generic_theory - val theory_of: state -> theory - val proof_of: state -> Proof.state - val proof_position_of: state -> int - val end_theory: Position.T -> state -> theory - val pretty_context: state -> Pretty.T list - val pretty_state: state -> Pretty.T list - val print_state: state -> unit - val pretty_abstract: state -> Pretty.T - val quiet: bool Unsynchronized.ref - val interact: bool Unsynchronized.ref - val timing: bool Unsynchronized.ref - val profiling: int Unsynchronized.ref - type transition - val empty: transition - val name_of: transition -> string - val pos_of: transition -> Position.T - val type_error: transition -> state -> string - val name: string -> transition -> transition - val position: Position.T -> transition -> transition - val interactive: bool -> transition -> transition - val init_theory: (unit -> theory) -> transition -> transition - val is_init: transition -> bool - val modify_init: (unit -> theory) -> transition -> transition - val exit: transition -> transition - val keep: (state -> unit) -> transition -> transition - val keep': (bool -> state -> unit) -> transition -> transition - val imperative: (unit -> unit) -> transition -> transition - val ignored: Position.T -> transition - val is_ignored: transition -> bool - val malformed: Position.T -> string -> transition - val is_malformed: transition -> bool - val generic_theory: (generic_theory -> generic_theory) -> transition -> transition - val theory': (bool -> theory -> theory) -> transition -> transition - val theory: (theory -> theory) -> transition -> transition - val begin_local_theory: bool -> (theory -> local_theory) -> transition -> transition - val end_local_theory: transition -> transition - val open_target: (generic_theory -> local_theory) -> transition -> transition - val close_target: transition -> transition - val local_theory': (xstring * Position.T) option -> (bool -> local_theory -> local_theory) -> - transition -> transition - val local_theory: (xstring * Position.T) option -> (local_theory -> local_theory) -> - transition -> transition - val present_local_theory: (xstring * Position.T) option -> (state -> unit) -> - transition -> transition - val local_theory_to_proof': (xstring * Position.T) option -> - (bool -> local_theory -> Proof.state) -> transition -> transition - val local_theory_to_proof: (xstring * Position.T) option -> - (local_theory -> Proof.state) -> transition -> transition - val theory_to_proof: (theory -> Proof.state) -> transition -> transition - val end_proof: (bool -> Proof.state -> Proof.context) -> transition -> transition - val forget_proof: transition -> transition - val present_proof: (state -> unit) -> transition -> transition - val proofs': (bool -> Proof.state -> Proof.state Seq.result Seq.seq) -> transition -> transition - val proof': (bool -> Proof.state -> Proof.state) -> transition -> transition - val proofs: (Proof.state -> Proof.state Seq.result Seq.seq) -> transition -> transition - val proof: (Proof.state -> Proof.state) -> transition -> transition - val actual_proof: (Proof_Node.T -> Proof_Node.T) -> transition -> transition - val skip_proof: (int -> int) -> transition -> transition - val skip_proof_to_theory: (int -> bool) -> transition -> transition - val exec_id: Document_ID.exec -> transition -> transition - val unknown_theory: transition -> transition - val unknown_proof: transition -> transition - val unknown_context: transition -> transition - val setmp_thread_position: transition -> ('a -> 'b) -> 'a -> 'b - val add_hook: (transition -> state -> state -> unit) -> unit - val get_timing: transition -> Time.time option - val put_timing: Time.time option -> transition -> transition - val transition: bool -> transition -> state -> (state * (exn * string) option) option - val command_errors: bool -> transition -> state -> Runtime.error list * state option - val command_exception: bool -> transition -> state -> state - val reset_theory: state -> state option - val reset_proof: state -> state option - type result - val join_results: result -> (transition * state) list - val element_result: transition Thy_Syntax.element -> state -> result * state -end; - -structure Toplevel: TOPLEVEL = -struct - -(** toplevel state **) - -exception UNDEF = Runtime.UNDEF; - - -(* datatype node *) - -datatype node = - Theory of generic_theory * Proof.context option - (*theory with presentation context*) | - Proof of Proof_Node.T * ((Proof.context -> generic_theory) * generic_theory) - (*proof node, finish, original theory*) | - Skipped_Proof of int * (generic_theory * generic_theory); - (*proof depth, resulting theory, original theory*) - -val theory_node = fn Theory (gthy, _) => SOME gthy | _ => NONE; -val proof_node = fn Proof (prf, _) => SOME prf | _ => NONE; -val skipped_proof_node = fn Skipped_Proof _ => true | _ => false; - -fun cases_node f _ (Theory (gthy, _)) = f gthy - | cases_node _ g (Proof (prf, _)) = g (Proof_Node.current prf) - | cases_node f _ (Skipped_Proof (_, (gthy, _))) = f gthy; - -val context_node = cases_node Context.proof_of Proof.context_of; - - -(* datatype state *) - -datatype state = State of node option * node option; (*current, previous*) - -val toplevel = State (NONE, NONE); - -fun is_toplevel (State (NONE, _)) = true - | is_toplevel _ = false; - -fun level (State (NONE, _)) = 0 - | level (State (SOME (Theory _), _)) = 0 - | level (State (SOME (Proof (prf, _)), _)) = Proof.level (Proof_Node.current prf) - | level (State (SOME (Skipped_Proof (d, _)), _)) = d + 1; (*different notion of proof depth!*) - -fun str_of_state (State (NONE, SOME (Theory (Context.Theory thy, _)))) = - "at top level, result theory " ^ quote (Context.theory_name thy) - | str_of_state (State (NONE, _)) = "at top level" - | str_of_state (State (SOME (Theory (Context.Theory _, _)), _)) = "in theory mode" - | str_of_state (State (SOME (Theory (Context.Proof _, _)), _)) = "in local theory mode" - | str_of_state (State (SOME (Proof _), _)) = "in proof mode" - | str_of_state (State (SOME (Skipped_Proof _), _)) = "in skipped proof mode"; - - -(* current node *) - -fun node_of (State (NONE, _)) = raise UNDEF - | node_of (State (SOME node, _)) = node; - -fun is_theory state = not (is_toplevel state) andalso is_some (theory_node (node_of state)); -fun is_proof state = not (is_toplevel state) andalso is_some (proof_node (node_of state)); -fun is_skipped_proof state = not (is_toplevel state) andalso skipped_proof_node (node_of state); - -fun node_case f g state = cases_node f g (node_of state); - -fun presentation_context_of state = - (case try node_of state of - SOME (Theory (_, SOME ctxt)) => ctxt - | SOME node => context_node node - | NONE => raise UNDEF); - -fun previous_context_of (State (_, NONE)) = NONE - | previous_context_of (State (_, SOME prev)) = SOME (context_node prev); - -val context_of = node_case Context.proof_of Proof.context_of; -val generic_theory_of = node_case I (Context.Proof o Proof.context_of); -val theory_of = node_case Context.theory_of Proof.theory_of; -val proof_of = node_case (fn _ => raise UNDEF) I; - -fun proof_position_of state = - (case node_of state of - Proof (prf, _) => Proof_Node.position prf - | _ => raise UNDEF); - -fun end_theory _ (State (NONE, SOME (Theory (Context.Theory thy, _)))) = thy - | end_theory pos (State (NONE, _)) = error ("Bad theory" ^ Position.here pos) - | end_theory pos (State (SOME _, _)) = error ("Unfinished theory" ^ Position.here pos); - - -(* print state *) - -fun pretty_context state = - (case try node_of state of - NONE => [] - | SOME node => - let - val gthy = - (case node of - Theory (gthy, _) => gthy - | Proof (_, (_, gthy)) => gthy - | Skipped_Proof (_, (gthy, _)) => gthy); - val lthy = Context.cases (Named_Target.theory_init) I gthy; - in Local_Theory.pretty lthy end); - -fun pretty_state state = - (case try node_of state of - NONE => [] - | SOME (Theory _) => [] - | SOME (Proof (prf, _)) => - Proof.pretty_state (Proof_Node.position prf) (Proof_Node.current prf) - | SOME (Skipped_Proof (d, _)) => [Pretty.str ("skipped proof: depth " ^ string_of_int d)]); - -val print_state = pretty_state #> Pretty.markup_chunks Markup.state #> Pretty.writeln; - -fun pretty_abstract state = Pretty.str (""); - - - -(** toplevel transitions **) - -val quiet = Unsynchronized.ref false; (*Proof General legacy*) -val interact = Unsynchronized.ref false; (*Proof General legacy*) -val timing = Unsynchronized.ref false; (*Proof General legacy*) -val profiling = Unsynchronized.ref 0; - - -(* node transactions -- maintaining stable checkpoints *) - -exception FAILURE of state * exn; - -local - -fun reset_presentation (Theory (gthy, _)) = Theory (gthy, NONE) - | reset_presentation node = node; - -fun map_theory f (Theory (gthy, ctxt)) = - Theory (Context.mapping f (Local_Theory.raw_theory f) gthy, ctxt) - | map_theory _ node = node; - -in - -fun apply_transaction f g node = - let - val cont_node = reset_presentation node; - val context = cases_node I (Context.Proof o Proof.context_of) cont_node; - fun state_error e nd = (State (SOME nd, SOME node), e); - - val (result, err) = - cont_node - |> Runtime.controlled_execution (SOME context) f - |> state_error NONE - handle exn => state_error (SOME exn) cont_node; - in - (case err of - NONE => tap g result - | SOME exn => raise FAILURE (result, exn)) - end; - -val exit_transaction = - apply_transaction - (fn Theory (Context.Theory thy, _) => Theory (Context.Theory (Theory.end_theory thy), NONE) - | node => node) (K ()) - #> (fn State (node', _) => State (NONE, node')); - -end; - - -(* primitive transitions *) - -datatype trans = - Init of unit -> theory | (*init theory*) - Exit | (*formal exit of theory*) - Keep of bool -> state -> unit | (*peek at state*) - Transaction of (bool -> node -> node) * (state -> unit); (*node transaction and presentation*) - -local - -fun apply_tr _ (Init f) (State (NONE, _)) = - State (SOME (Theory (Context.Theory (Runtime.controlled_execution NONE f ()), NONE)), NONE) - | apply_tr _ Exit (State (SOME (state as Theory (Context.Theory _, _)), _)) = - exit_transaction state - | apply_tr int (Keep f) state = - Runtime.controlled_execution (try generic_theory_of state) (fn x => tap (f int) x) state - | apply_tr int (Transaction (f, g)) (State (SOME state, _)) = - apply_transaction (fn x => f int x) g state - | apply_tr _ _ _ = raise UNDEF; - -fun apply_union _ [] state = raise FAILURE (state, UNDEF) - | apply_union int (tr :: trs) state = - apply_union int trs state - handle Runtime.UNDEF => apply_tr int tr state - | FAILURE (alt_state, UNDEF) => apply_tr int tr alt_state - | exn as FAILURE _ => raise exn - | exn => raise FAILURE (state, exn); - -in - -fun apply_trans int trs state = (apply_union int trs state, NONE) - handle FAILURE (alt_state, exn) => (alt_state, SOME exn) | exn => (state, SOME exn); - -end; - - -(* datatype transition *) - -datatype transition = Transition of - {name: string, (*command name*) - pos: Position.T, (*source position*) - int_only: bool, (*interactive-only*) (* TTY / Proof General legacy*) - timing: Time.time option, (*prescient timing information*) - trans: trans list}; (*primitive transitions (union)*) - -fun make_transition (name, pos, int_only, timing, trans) = - Transition {name = name, pos = pos, int_only = int_only, - timing = timing, trans = trans}; - -fun map_transition f (Transition {name, pos, int_only, timing, trans}) = - make_transition (f (name, pos, int_only, timing, trans)); - -val empty = make_transition ("", Position.none, false, NONE, []); - - -(* diagnostics *) - -fun name_of (Transition {name, ...}) = name; -fun pos_of (Transition {pos, ...}) = pos; - -fun command_msg msg tr = msg ^ "command " ^ quote (name_of tr) ^ Position.here (pos_of tr); -fun at_command tr = command_msg "At " tr; - -fun type_error tr state = - command_msg "Illegal application of " tr ^ " " ^ str_of_state state; - - -(* modify transitions *) - -fun name name = map_transition (fn (_, pos, int_only, timing, trans) => - (name, pos, int_only, timing, trans)); - -fun position pos = map_transition (fn (name, _, int_only, timing, trans) => - (name, pos, int_only, timing, trans)); - -fun interactive int_only = map_transition (fn (name, pos, _, timing, trans) => - (name, pos, int_only, timing, trans)); - -fun add_trans tr = map_transition (fn (name, pos, int_only, timing, trans) => - (name, pos, int_only, timing, tr :: trans)); - -val reset_trans = map_transition (fn (name, pos, int_only, timing, _) => - (name, pos, int_only, timing, [])); - - -(* basic transitions *) - -fun init_theory f = add_trans (Init f); - -fun is_init (Transition {trans = [Init _], ...}) = true - | is_init _ = false; - -fun modify_init f tr = if is_init tr then init_theory f (reset_trans tr) else tr; - -val exit = add_trans Exit; -val keep' = add_trans o Keep; - -fun present_transaction f g = add_trans (Transaction (f, g)); -fun transaction f = present_transaction f (K ()); - -fun keep f = add_trans (Keep (fn _ => f)); -fun imperative f = keep (fn _ => f ()); - -fun ignored pos = empty |> name "" |> position pos |> imperative I; -fun is_ignored tr = name_of tr = ""; - -val malformed_name = ""; -fun malformed pos msg = - empty |> name malformed_name |> position pos |> imperative (fn () => error msg); -fun is_malformed tr = name_of tr = malformed_name; - -val unknown_theory = imperative (fn () => warning "Unknown theory context"); -val unknown_proof = imperative (fn () => warning "Unknown proof context"); -val unknown_context = imperative (fn () => warning "Unknown context"); - - -(* theory transitions *) - -fun generic_theory f = transaction (fn _ => - (fn Theory (gthy, _) => Theory (f gthy, NONE) - | _ => raise UNDEF)); - -fun theory' f = transaction (fn int => - (fn Theory (Context.Theory thy, _) => - let val thy' = thy - |> Sign.new_group - |> f int - |> Sign.reset_group; - in Theory (Context.Theory thy', NONE) end - | _ => raise UNDEF)); - -fun theory f = theory' (K f); - -fun begin_local_theory begin f = transaction (fn _ => - (fn Theory (Context.Theory thy, _) => - let - val lthy = f thy; - val gthy = if begin then Context.Proof lthy else Context.Theory (Named_Target.exit lthy); - val _ = - if begin then - Pretty.writeln (Pretty.mark Markup.state (Pretty.chunks (Local_Theory.pretty lthy))) - else (); - in Theory (gthy, SOME lthy) end - | _ => raise UNDEF)); - -val end_local_theory = transaction (fn _ => - (fn Theory (Context.Proof lthy, _) => Theory (Context.Theory (Named_Target.exit lthy), SOME lthy) - | _ => raise UNDEF)); - -fun open_target f = transaction (fn _ => - (fn Theory (gthy, _) => - let val lthy = f gthy - in Theory (Context.Proof lthy, SOME lthy) end - | _ => raise UNDEF)); - -val close_target = transaction (fn _ => - (fn Theory (Context.Proof lthy, _) => - (case try Local_Theory.close_target lthy of - SOME ctxt' => - let - val gthy' = - if can Local_Theory.assert ctxt' - then Context.Proof ctxt' - else Context.Theory (Proof_Context.theory_of ctxt'); - in Theory (gthy', SOME lthy) end - | NONE => raise UNDEF) - | _ => raise UNDEF)); - - -local - -fun local_theory_presentation loc f = present_transaction (fn int => - (fn Theory (gthy, _) => - let - val (finish, lthy) = Named_Target.switch loc gthy; - val lthy' = lthy - |> Local_Theory.new_group - |> f int - |> Local_Theory.reset_group; - in Theory (finish lthy', SOME lthy') end - | _ => raise UNDEF)); - -in - -fun local_theory' loc f = local_theory_presentation loc f (K ()); -fun local_theory loc f = local_theory' loc (K f); -fun present_local_theory loc = local_theory_presentation loc (K I); - -end; - - -(* proof transitions *) - -fun end_proof f = transaction (fn int => - (fn Proof (prf, (finish, _)) => - let val state = Proof_Node.current prf in - if can (Proof.assert_bottom true) state then - let - val ctxt' = f int state; - val gthy' = finish ctxt'; - in Theory (gthy', SOME ctxt') end - else raise UNDEF - end - | Skipped_Proof (0, (gthy, _)) => Theory (gthy, NONE) - | _ => raise UNDEF)); - -local - -fun begin_proof init = transaction (fn int => - (fn Theory (gthy, _) => - let - val (finish, prf) = init int gthy; - val skip = Goal.skip_proofs_enabled (); - val (is_goal, no_skip) = - (true, Proof.schematic_goal prf) handle ERROR _ => (false, true); - val _ = - if is_goal andalso skip andalso no_skip then - warning "Cannot skip proof of schematic goal statement" - else (); - in - if skip andalso not no_skip then - Skipped_Proof (0, (finish (Proof.global_skip_proof true prf), gthy)) - else Proof (Proof_Node.init prf, (finish, gthy)) - end - | _ => raise UNDEF)); - -in - -fun local_theory_to_proof' loc f = begin_proof - (fn int => fn gthy => - let val (finish, lthy) = Named_Target.switch loc gthy - in (finish o Local_Theory.reset_group, f int (Local_Theory.new_group lthy)) end); - -fun local_theory_to_proof loc f = local_theory_to_proof' loc (K f); - -fun theory_to_proof f = begin_proof - (fn _ => fn gthy => - (Context.Theory o Sign.reset_group o Sign.change_check o Proof_Context.theory_of, - (case gthy of - Context.Theory thy => f (Sign.new_group thy) - | _ => raise UNDEF))); - -end; - -val forget_proof = transaction (fn _ => - (fn Proof (_, (_, orig_gthy)) => Theory (orig_gthy, NONE) - | Skipped_Proof (_, (_, orig_gthy)) => Theory (orig_gthy, NONE) - | _ => raise UNDEF)); - -val present_proof = present_transaction (fn _ => - (fn Proof (prf, x) => Proof (Proof_Node.apply I prf, x) - | skip as Skipped_Proof _ => skip - | _ => raise UNDEF)); - -fun proofs' f = transaction (fn int => - (fn Proof (prf, x) => Proof (Proof_Node.applys (f int) prf, x) - | skip as Skipped_Proof _ => skip - | _ => raise UNDEF)); - -fun proof' f = proofs' ((Seq.single o Seq.Result) oo f); -val proofs = proofs' o K; -val proof = proof' o K; - -fun actual_proof f = transaction (fn _ => - (fn Proof (prf, x) => Proof (f prf, x) - | _ => raise UNDEF)); - -(*Proof General legacy*) -fun skip_proof f = transaction (fn _ => - (fn Skipped_Proof (h, x) => Skipped_Proof (f h, x) - | _ => raise UNDEF)); - -(*Proof General legacy*) -fun skip_proof_to_theory pred = transaction (fn _ => - (fn Skipped_Proof (d, (gthy, _)) => if pred d then Theory (gthy, NONE) else raise UNDEF - | _ => raise UNDEF)); - - - -(** toplevel transactions **) - -(* runtime position *) - -fun exec_id id (tr as Transition {pos, ...}) = - position (Position.put_id (Document_ID.print id) pos) tr; - -fun setmp_thread_position (Transition {pos, ...}) f x = - Position.setmp_thread_data pos f x; - - -(* post-transition hooks *) - -local - val hooks = - Synchronized.var "Toplevel.hooks" ([]: (transition -> state -> state -> unit) list); -in - -fun add_hook hook = Synchronized.change hooks (cons hook); -fun get_hooks () = Synchronized.value hooks; - -end; - - -(* apply transitions *) - -fun get_timing (Transition {timing, ...}) = timing; -fun put_timing timing = map_transition (fn (name, pos, int_only, _, trans) => - (name, pos, int_only, timing, trans)); - -local - -fun app int (tr as Transition {name, trans, ...}) = - setmp_thread_position tr (fn state => - let - val timing_start = Timing.start (); - - val (result, opt_err) = - state |> (apply_trans int trans |> ! profiling > 0 ? profile (! profiling)); - - val _ = - if int andalso not (! quiet) andalso Keyword.is_printed name - then print_state result else (); - - val timing_result = Timing.result timing_start; - val timing_props = - Markup.command_timing :: (Markup.nameN, name_of tr) :: Position.properties_of (pos_of tr); - val _ = Timing.protocol_message timing_props timing_result; - in - (result, Option.map (fn UNDEF => ERROR (type_error tr state) | exn => exn) opt_err) - end); - -in - -fun transition int tr st = - let - val hooks = get_hooks (); - fun apply_hooks st' = hooks |> List.app (fn f => (try (fn () => f tr st st') (); ())); - - val ctxt = try context_of st; - val res = - (case app int tr st of - (_, SOME Runtime.TERMINATE) => NONE - | (st', SOME (Runtime.EXCURSION_FAIL exn_info)) => SOME (st', SOME exn_info) - | (st', SOME exn) => SOME (st', SOME (Runtime.exn_context ctxt exn, at_command tr)) - | (st', NONE) => SOME (st', NONE)); - val _ = (case res of SOME (st', NONE) => apply_hooks st' | _ => ()); - in res end; - -end; - - -(* managed commands *) - -fun command_errors int tr st = - (case transition int tr st of - SOME (st', NONE) => ([], SOME st') - | SOME (_, SOME (exn, _)) => (Runtime.exn_messages_ids exn, NONE) - | NONE => (Runtime.exn_messages_ids Runtime.TERMINATE, NONE)); - -fun command_exception int tr st = - (case transition int tr st of - SOME (st', NONE) => st' - | SOME (_, SOME (exn, info)) => - if Exn.is_interrupt exn then reraise exn else raise Runtime.EXCURSION_FAIL (exn, info) - | NONE => raise Runtime.EXCURSION_FAIL (Runtime.TERMINATE, at_command tr)); - -fun command tr = command_exception (! interact) tr; - - -(* reset state *) - -local - -fun reset_state check trans st = - if check st then NONE - else #2 (command_errors false (trans empty) st); - -in - -val reset_theory = reset_state is_theory forget_proof; - -val reset_proof = - reset_state is_proof - (transaction (fn _ => - (fn Theory (gthy, _) => Skipped_Proof (0, (gthy, gthy)) - | _ => raise UNDEF))); - -end; - - -(* scheduled proof result *) - -datatype result = - Result of transition * state | - Result_List of result list | - Result_Future of result future; - -fun join_results (Result x) = [x] - | join_results (Result_List xs) = maps join_results xs - | join_results (Result_Future x) = join_results (Future.join x); - -local - -structure Result = Proof_Data -( - type T = result; - val empty: T = Result_List []; - fun init _ = empty; -); - -val get_result = Result.get o Proof.context_of; -val put_result = Proof.map_context o Result.put; - -fun timing_estimate include_head elem = - let - val trs = Thy_Syntax.flat_element elem |> not include_head ? tl; - val timings = map get_timing trs; - in - if forall is_some timings then - SOME (fold (curry Time.+ o the) timings Time.zeroTime) - else NONE - end; - -fun priority NONE = ~1 - | priority (SOME estimate) = - Int.min (Real.floor (Real.max (Math.log10 (Time.toReal estimate), ~3.0)) - 3, ~1); - -fun proof_future_enabled estimate st = - (case try proof_of st of - NONE => false - | SOME state => - not (Proof.is_relevant state) andalso - (if can (Proof.assert_bottom true) state - then Goal.future_enabled 1 - else - (case estimate of - NONE => Goal.future_enabled 2 - | SOME t => Goal.future_enabled_timing t))); - -fun atom_result tr st = - let - val st' = - if Goal.future_enabled 1 andalso Keyword.is_diag (name_of tr) then - (Execution.fork - {name = "Toplevel.diag", pos = pos_of tr, - pri = priority (timing_estimate true (Thy_Syntax.atom tr))} - (fn () => command tr st); st) - else command tr st; - in (Result (tr, st'), st') end; - -in - -fun element_result (Thy_Syntax.Element (tr, NONE)) st = atom_result tr st - | element_result (elem as Thy_Syntax.Element (head_tr, SOME element_rest)) st = - let - val (head_result, st') = atom_result head_tr st; - val (body_elems, end_tr) = element_rest; - val estimate = timing_estimate false elem; - in - if not (proof_future_enabled estimate st') - then - let - val proof_trs = maps Thy_Syntax.flat_element body_elems @ [end_tr]; - val (proof_results, st'') = fold_map atom_result proof_trs st'; - in (Result_List (head_result :: proof_results), st'') end - else - let - val finish = Context.Theory o Proof_Context.theory_of; - - val future_proof = - Proof.future_proof (fn state => - Execution.fork - {name = "Toplevel.future_proof", pos = pos_of head_tr, pri = priority estimate} - (fn () => - let - val State (SOME (Proof (prf, (_, orig_gthy))), prev) = st'; - val prf' = Proof_Node.apply (K state) prf; - val (result, result_state) = - State (SOME (Proof (prf', (finish, orig_gthy))), prev) - |> fold_map element_result body_elems ||> command end_tr; - in (Result_List result, presentation_context_of result_state) end)) - #> (fn (res, state') => state' |> put_result (Result_Future res)); - - val forked_proof = - proof (future_proof #> - (fn state => state |> Proof.local_done_proof |> put_result (get_result state))) o - end_proof (fn _ => future_proof #> - (fn state => state |> Proof.global_done_proof |> Result.put (get_result state))); - - val st'' = st' - |> command (head_tr |> reset_trans |> forked_proof); - val end_result = Result (end_tr, st''); - val result = - Result_List [head_result, Result.get (presentation_context_of st''), end_result]; - in (result, st'') end - end; - -end; - -end; diff --git a/core/Pure/Isar/typedecl.ML b/core/Pure/Isar/typedecl.ML deleted file mode 100644 index 618d93b3..00000000 --- a/core/Pure/Isar/typedecl.ML +++ /dev/null @@ -1,126 +0,0 @@ -(* Title: Pure/Isar/typedecl.ML - Author: Makarius - -Type declarations (with object-logic arities) and type abbreviations. -*) - -signature TYPEDECL = -sig - val read_constraint: Proof.context -> string option -> sort - val basic_typedecl: binding * int * mixfix -> local_theory -> string * local_theory - val typedecl: binding * (string * sort) list * mixfix -> local_theory -> typ * local_theory - val typedecl_global: binding * (string * sort) list * mixfix -> theory -> typ * theory - val abbrev: binding * string list * mixfix -> typ -> local_theory -> string * local_theory - val abbrev_cmd: binding * string list * mixfix -> string -> local_theory -> string * local_theory - val abbrev_global: binding * string list * mixfix -> typ -> theory -> string * theory -end; - -structure Typedecl: TYPEDECL = -struct - -(* constraints *) - -fun read_constraint _ NONE = dummyS - | read_constraint ctxt (SOME s) = Syntax.read_sort ctxt s; - - -(* primitives *) - -fun object_logic_arity name thy = - (case Object_Logic.get_base_sort thy of - NONE => thy - | SOME S => Axclass.arity_axiomatization (name, replicate (Sign.arity_number thy name) S, S) thy); - -fun basic_decl decl (b, n, mx) lthy = - let val name = Local_Theory.full_name lthy b in - lthy - |> Local_Theory.background_theory (decl name) - |> Local_Theory.type_notation true Syntax.mode_default [(Type (name, replicate n dummyT), mx)] - |> Local_Theory.type_alias b name - |> pair name - end; - -fun basic_typedecl (b, n, mx) lthy = - basic_decl (fn name => Sign.add_type lthy (b, n, NoSyn) #> object_logic_arity name) - (b, n, mx) lthy; - - -(* global type -- without dependencies on type parameters of the context *) - -fun global_type lthy (b, raw_args) = - let - fun err msg = error (msg ^ " in type declaration " ^ Binding.print b); - - val _ = has_duplicates (eq_fst op =) raw_args andalso err "Duplicate parameters"; - val args = map (TFree o Proof_Context.check_tfree lthy) raw_args; - val T = Type (Local_Theory.full_name lthy b, args); - - val bad_args = - #2 (Term.dest_Type (Logic.type_map (singleton (Variable.polymorphic lthy)) T)) - |> filter_out Term.is_TVar; - val _ = null bad_args orelse - err ("Locally fixed type arguments " ^ - commas_quote (map (Syntax.string_of_typ lthy) bad_args)); - in T end; - - -(* type declarations *) - -fun typedecl (b, raw_args, mx) lthy = - let val T = global_type lthy (b, raw_args) in - lthy - |> basic_typedecl (b, length raw_args, mx) - |> snd - |> Variable.declare_typ T - |> pair T - end; - -fun typedecl_global decl = - Named_Target.theory_init - #> typedecl decl - #> Local_Theory.exit_result_global Morphism.typ; - - -(* type abbreviations *) - -local - -fun gen_abbrev prep_typ (b, vs, mx) raw_rhs lthy = - let - val Type (name, _) = global_type lthy (b, map (rpair dummyS) vs); - val rhs = prep_typ b lthy raw_rhs - handle ERROR msg => cat_error msg ("in type abbreviation " ^ Binding.print b); - in - lthy - |> basic_decl (fn _ => Sign.add_type_abbrev lthy (b, vs, rhs)) (b, length vs, mx) - |> snd - |> pair name - end; - -fun read_abbrev b ctxt raw_rhs = - let - val rhs = Proof_Context.read_typ_syntax (ctxt |> Proof_Context.set_defsort []) raw_rhs; - val ignored = Term.fold_atyps_sorts (fn (_, []) => I | (T, _) => insert (op =) T) rhs []; - val _ = - if not (null ignored) andalso Context_Position.is_visible ctxt then - warning - ("Ignoring sort constraints in type variables(s): " ^ - commas_quote (map (Syntax.string_of_typ ctxt) (rev ignored)) ^ - "\nin type abbreviation " ^ Binding.print b) - else (); - in rhs end; - -in - -val abbrev = gen_abbrev (K Proof_Context.cert_typ_syntax); -val abbrev_cmd = gen_abbrev read_abbrev; - -end; - -fun abbrev_global decl rhs = - Named_Target.theory_init - #> abbrev decl rhs - #> Local_Theory.exit_result_global (K I); - -end; - diff --git a/core/Pure/ML-Systems/compiler_polyml.ML b/core/Pure/ML-Systems/compiler_polyml.ML deleted file mode 100644 index 0e674277..00000000 --- a/core/Pure/ML-Systems/compiler_polyml.ML +++ /dev/null @@ -1,58 +0,0 @@ -(* Title: Pure/ML-Systems/compiler_polyml.ML - -Basic runtime compilation for Poly/ML (cf. Pure/ML/ml_compiler_polyml.ML). -*) - -local - -fun drop_newline s = - if String.isSuffix "\n" s then String.substring (s, 0, size s - 1) - else s; - -in - -fun use_text ({tune_source, name_space, str_of_pos, print, error, ...}: use_context) - (start_line, name) verbose txt = - let - val line = Unsynchronized.ref start_line; - val in_buffer = - Unsynchronized.ref (String.explode (tune_source (ml_positions start_line name txt))); - val out_buffer = Unsynchronized.ref ([]: string list); - fun output () = drop_newline (implode (rev (! out_buffer))); - - fun get () = - (case ! in_buffer of - [] => NONE - | c :: cs => (in_buffer := cs; if c = #"\n" then line := ! line + 1 else (); SOME c)); - fun put s = out_buffer := s :: ! out_buffer; - fun put_message {message = msg1, hard, location = {startLine = line, ...}, context} = - (put (if hard then "Error: " else "Warning: "); - PolyML.prettyPrint (put, 76) msg1; - (case context of NONE => () | SOME msg2 => PolyML.prettyPrint (put, 76) msg2); - put ("At" ^ str_of_pos line name ^ "\n")); - - val parameters = - [PolyML.Compiler.CPOutStream put, - PolyML.Compiler.CPNameSpace name_space, - PolyML.Compiler.CPErrorMessageProc put_message, - PolyML.Compiler.CPLineNo (fn () => ! line), - PolyML.Compiler.CPFileName name, - PolyML.Compiler.CPPrintInAlphabeticalOrder false]; - val _ = - (while not (List.null (! in_buffer)) do - PolyML.compiler (get, parameters) ()) - handle exn => - if Exn.is_interrupt exn then reraise exn - else - (put ("Exception- " ^ General.exnMessage exn ^ " raised"); - error (output ()); reraise exn); - in if verbose then print (output ()) else () end; - -fun use_file context verbose name = - let - val instream = TextIO.openIn name; - val txt = Exn.release (Exn.capture TextIO.inputAll instream before TextIO.closeIn instream); - in use_text context (1, name) verbose txt end; - -end; - diff --git a/core/Pure/ML-Systems/ml_name_space.ML b/core/Pure/ML-Systems/ml_name_space.ML deleted file mode 100644 index f0daf2a5..00000000 --- a/core/Pure/ML-Systems/ml_name_space.ML +++ /dev/null @@ -1,64 +0,0 @@ -(* Title: Pure/ML-Systems/ml_name_space.ML - Author: Makarius - -ML name space -- dummy version of Poly/ML 5.2 facility. -*) - -structure ML_Name_Space = -struct - -type valueVal = unit; -type typeVal = unit; -type fixityVal = unit; -type structureVal = unit; -type signatureVal = unit; -type functorVal = unit; - -type T = - {lookupVal: string -> valueVal option, - lookupType: string -> typeVal option, - lookupFix: string -> fixityVal option, - lookupStruct: string -> structureVal option, - lookupSig: string -> signatureVal option, - lookupFunct: string -> functorVal option, - enterVal: string * valueVal -> unit, - enterType: string * typeVal -> unit, - enterFix: string * fixityVal -> unit, - enterStruct: string * structureVal -> unit, - enterSig: string * signatureVal -> unit, - enterFunct: string * functorVal -> unit, - allVal: unit -> (string * valueVal) list, - allType: unit -> (string * typeVal) list, - allFix: unit -> (string * fixityVal) list, - allStruct: unit -> (string * structureVal) list, - allSig: unit -> (string * signatureVal) list, - allFunct: unit -> (string * functorVal) list}; - -val global: T = - {lookupVal = fn _ => NONE, - lookupType = fn _ => NONE, - lookupFix = fn _ => NONE, - lookupStruct = fn _ => NONE, - lookupSig = fn _ => NONE, - lookupFunct = fn _ => NONE, - enterVal = fn _ => (), - enterType = fn _ => (), - enterFix = fn _ => (), - enterStruct = fn _ => (), - enterSig = fn _ => (), - enterFunct = fn _ => (), - allVal = fn _ => [], - allType = fn _ => [], - allFix = fn _ => [], - allStruct = fn _ => [], - allSig = fn _ => [], - allFunct = fn _ => []}; - -val initial_val : (string * valueVal) list = []; -val initial_type : (string * typeVal) list = []; -val initial_fixity : (string * fixityVal) list = []; -val initial_structure : (string * structureVal) list = []; -val initial_signature : (string * signatureVal) list = []; -val initial_functor : (string * functorVal) list = []; - -end; diff --git a/core/Pure/ML-Systems/ml_positions.ML b/core/Pure/ML-Systems/ml_positions.ML deleted file mode 100644 index 1ba74c75..00000000 --- a/core/Pure/ML-Systems/ml_positions.ML +++ /dev/null @@ -1,16 +0,0 @@ -(* Title: Pure/ML-Systems/ml_positions.ML - Author: Makarius - -Approximate ML antiquotation @{here} for Isabelle/Pure bootstrap. -*) - -fun ml_positions start_line name txt = - let - fun positions line (#"@" :: #"{" :: #"h" :: #"e" :: #"r" :: #"e" :: #"}" :: cs) res = - let val s = "(Position.line_file_only " ^ Int.toString line ^ " \"" ^ name ^ "\")" - in positions line cs (s :: res) end - | positions line (c :: cs) res = - positions (if c = #"\n" then line + 1 else line) cs (str c :: res) - | positions _ [] res = rev res; - in String.concat (positions start_line (String.explode txt) []) end; - diff --git a/core/Pure/ML-Systems/ml_pretty.ML b/core/Pure/ML-Systems/ml_pretty.ML deleted file mode 100644 index 319a20b3..00000000 --- a/core/Pure/ML-Systems/ml_pretty.ML +++ /dev/null @@ -1,31 +0,0 @@ -(* Title: Pure/ML-Systems/ml_pretty.ML - Author: Makarius - -Minimal support for raw ML pretty printing -- for boot-strapping only. -*) - -structure ML_Pretty = -struct - -datatype pretty = - Block of (string * string) * pretty list * int | - String of string * int | - Break of bool * int; - -fun block prts = Block (("", ""), prts, 2); -fun str s = String (s, size s); -fun brk wd = Break (false, wd); - -fun pair pretty1 pretty2 ((x, y), depth: int) = - block [str "(", pretty1 (x, depth), str ",", brk 1, pretty2 (y, depth - 1), str ")"]; - -fun enum sep lpar rpar pretty (args, depth) = - let - fun elems _ [] = [] - | elems 0 _ = [str "..."] - | elems d [x] = [pretty (x, d)] - | elems d (x :: xs) = pretty (x, d) :: str sep :: brk 1 :: elems (d - 1) xs; - in block (str lpar :: (elems (Int.max (depth, 0)) args @ [str rpar])) end; - -end; - diff --git a/core/Pure/ML-Systems/ml_system.ML b/core/Pure/ML-Systems/ml_system.ML deleted file mode 100644 index 25a44257..00000000 --- a/core/Pure/ML-Systems/ml_system.ML +++ /dev/null @@ -1,32 +0,0 @@ -(* Title: Pure/ML-Systems/ml_system.ML - Author: Makarius - -ML system and platform operations. -*) - -signature ML_SYSTEM = -sig - val name: string - val is_polyml: bool - val is_smlnj: bool - val platform: string - val platform_is_cygwin: bool - val share_common_data: unit -> unit - val save_state: string -> unit -end; - -structure ML_System: ML_SYSTEM = -struct - -val name = case OS.Process.getEnv "ML_SYSTEM" of SOME s => s | NONE => "polyml"; -val is_polyml = String.isPrefix "polyml" name; -val is_smlnj = String.isPrefix "smlnj" name; - -val platform = case OS.Process.getEnv "ML_PLATFORM" of SOME p => p | NONE => "unknown"; -val platform_is_cygwin = String.isSuffix "cygwin" platform; - -fun share_common_data () = (); -fun save_state _ = raise Fail "Cannot save state -- undefined operation"; - -end; - diff --git a/core/Pure/ML-Systems/multithreading.ML b/core/Pure/ML-Systems/multithreading.ML deleted file mode 100644 index ef2df2f4..00000000 --- a/core/Pure/ML-Systems/multithreading.ML +++ /dev/null @@ -1,86 +0,0 @@ -(* Title: Pure/ML-Systems/multithreading.ML - Author: Makarius - -Dummy implementation of multithreading setup. -*) - -signature BASIC_MULTITHREADING = -sig - val NAMED_CRITICAL: string -> (unit -> 'a) -> 'a - val CRITICAL: (unit -> 'a) -> 'a -end; - -signature MULTITHREADING = -sig - include BASIC_MULTITHREADING - val available: bool - val max_threads_value: unit -> int - val max_threads_update: int -> unit - val max_threads_setmp: int -> ('a -> 'b) -> 'a -> 'b - val enabled: unit -> bool - val no_interrupts: Thread.threadAttribute list - val public_interrupts: Thread.threadAttribute list - val private_interrupts: Thread.threadAttribute list - val sync_interrupts: Thread.threadAttribute list -> Thread.threadAttribute list - val interrupted: unit -> unit (*exception Interrupt*) - val with_attributes: Thread.threadAttribute list -> (Thread.threadAttribute list -> 'a) -> 'a - val sync_wait: Thread.threadAttribute list option -> Time.time option -> - ConditionVar.conditionVar -> Mutex.mutex -> bool Exn.result - val trace: int ref - val tracing: int -> (unit -> string) -> unit - val tracing_time: bool -> Time.time -> (unit -> string) -> unit - val real_time: ('a -> unit) -> 'a -> Time.time - val self_critical: unit -> bool - val serial: unit -> int -end; - -structure Multithreading: MULTITHREADING = -struct - -(* options *) - -val available = false; -fun max_threads_value () = 1: int; -fun max_threads_update _ = (); -fun max_threads_setmp _ f x = f x; -fun enabled () = false; - - -(* attributes *) - -val no_interrupts = []; -val public_interrupts = []; -val private_interrupts = []; -fun sync_interrupts _ = []; - -fun interrupted () = (); - -fun with_attributes _ e = e []; - -fun sync_wait _ _ _ _ = Exn.Res true; - - -(* tracing *) - -val trace = ref (0: int); -fun tracing _ _ = (); -fun tracing_time _ _ _ = (); -fun real_time f x = (f x; Time.zeroTime); - - -(* critical section *) - -fun self_critical () = false; -fun NAMED_CRITICAL _ e = e (); -fun CRITICAL e = e (); - - -(* serial numbers *) - -local val count = ref (0: int) -in fun serial () = (count := ! count + 1; ! count) end; - -end; - -structure Basic_Multithreading: BASIC_MULTITHREADING = Multithreading; -open Basic_Multithreading; diff --git a/core/Pure/ML-Systems/multithreading_polyml.ML b/core/Pure/ML-Systems/multithreading_polyml.ML deleted file mode 100644 index 1ccebde1..00000000 --- a/core/Pure/ML-Systems/multithreading_polyml.ML +++ /dev/null @@ -1,210 +0,0 @@ -(* Title: Pure/ML-Systems/multithreading_polyml.ML - Author: Makarius - -Multithreading in Poly/ML (cf. polyml/basis/Thread.sml). -*) - -signature MULTITHREADING_POLYML = -sig - val interruptible: ('a -> 'b) -> 'a -> 'b - val uninterruptible: ((('c -> 'd) -> 'c -> 'd) -> 'a -> 'b) -> 'a -> 'b -end; - -signature BASIC_MULTITHREADING = -sig - include BASIC_MULTITHREADING - include MULTITHREADING_POLYML -end; - -signature MULTITHREADING = -sig - include MULTITHREADING - include MULTITHREADING_POLYML -end; - -structure Multithreading: MULTITHREADING = -struct - -(* thread attributes *) - -val no_interrupts = - [Thread.EnableBroadcastInterrupt false, Thread.InterruptState Thread.InterruptDefer]; - -val test_interrupts = - [Thread.EnableBroadcastInterrupt false, Thread.InterruptState Thread.InterruptSynch]; - -val public_interrupts = - [Thread.EnableBroadcastInterrupt true, Thread.InterruptState Thread.InterruptAsynchOnce]; - -val private_interrupts = - [Thread.EnableBroadcastInterrupt false, Thread.InterruptState Thread.InterruptAsynchOnce]; - -val sync_interrupts = map - (fn x as Thread.InterruptState Thread.InterruptDefer => x - | Thread.InterruptState _ => Thread.InterruptState Thread.InterruptSynch - | x => x); - -val safe_interrupts = map - (fn Thread.InterruptState Thread.InterruptAsynch => - Thread.InterruptState Thread.InterruptAsynchOnce - | x => x); - -fun interrupted () = - let - val orig_atts = safe_interrupts (Thread.getAttributes ()); - val _ = Thread.setAttributes test_interrupts; - val test = Exn.capture Thread.testInterrupt (); - val _ = Thread.setAttributes orig_atts; - in Exn.release test end; - -fun with_attributes new_atts e = - let - val orig_atts = safe_interrupts (Thread.getAttributes ()); - val result = Exn.capture (fn () => - (Thread.setAttributes (safe_interrupts new_atts); e orig_atts)) (); - val _ = Thread.setAttributes orig_atts; - in Exn.release result end; - - -(* portable wrappers *) - -fun interruptible f x = with_attributes public_interrupts (fn _ => f x); - -fun uninterruptible f x = - with_attributes no_interrupts (fn atts => - f (fn g => fn y => with_attributes atts (fn _ => g y)) x); - - -(* options *) - -val available = true; - -fun max_threads_result m = - if m > 0 then m - else Int.min (Int.max (Thread.numProcessors (), 1), 8); - -val max_threads = ref 1; - -fun max_threads_value () = ! max_threads; - -fun max_threads_update m = max_threads := max_threads_result m; - -fun max_threads_setmp m f x = - uninterruptible (fn restore_attributes => fn () => - let - val max_threads_orig = ! max_threads; - val _ = max_threads_update m; - val result = Exn.capture (restore_attributes f) x; - val _ = max_threads := max_threads_orig; - in Exn.release result end) (); - -fun enabled () = max_threads_value () > 1; - - -(* synchronous wait *) - -fun sync_wait opt_atts time cond lock = - with_attributes - (sync_interrupts (case opt_atts of SOME atts => atts | NONE => Thread.getAttributes ())) - (fn _ => - (case time of - SOME t => Exn.Res (ConditionVar.waitUntil (cond, lock, t)) - | NONE => (ConditionVar.wait (cond, lock); Exn.Res true)) - handle exn => Exn.Exn exn); - - -(* tracing *) - -val trace = ref 0; - -fun tracing level msg = - if level > ! trace then () - else uninterruptible (fn _ => fn () => - (TextIO.output (TextIO.stdErr, (">>> " ^ msg () ^ "\n")); TextIO.flushOut TextIO.stdErr) - handle _ (*sic*) => ()) (); - -fun tracing_time detailed time = - tracing - (if not detailed then 5 - else if Time.>= (time, seconds 1.0) then 1 - else if Time.>= (time, seconds 0.1) then 2 - else if Time.>= (time, seconds 0.01) then 3 - else if Time.>= (time, seconds 0.001) then 4 else 5); - -fun real_time f x = - let - val timer = Timer.startRealTimer (); - val () = f x; - val time = Timer.checkRealTimer timer; - in time end; - - -(* critical section -- may be nested within the same thread *) - -local - -val critical_lock = Mutex.mutex (); -val critical_thread = ref (NONE: Thread.thread option); -val critical_name = ref ""; - -fun show "" = "" | show name = " " ^ name; -fun show' "" = "" | show' name = " [" ^ name ^ "]"; - -in - -fun self_critical () = - (case ! critical_thread of - NONE => false - | SOME t => Thread.equal (t, Thread.self ())); - -fun NAMED_CRITICAL name e = - if self_critical () then e () - else - Exn.release (uninterruptible (fn restore_attributes => fn () => - let - val name' = ! critical_name; - val _ = - if Mutex.trylock critical_lock then () - else - let - val _ = tracing 5 (fn () => "CRITICAL" ^ show name ^ show' name' ^ ": waiting"); - val time = real_time Mutex.lock critical_lock; - val _ = tracing_time true time (fn () => - "CRITICAL" ^ show name ^ show' name' ^ ": passed after " ^ Time.toString time); - in () end; - val _ = critical_thread := SOME (Thread.self ()); - val _ = critical_name := name; - val result = Exn.capture (restore_attributes e) (); - val _ = critical_name := ""; - val _ = critical_thread := NONE; - val _ = Mutex.unlock critical_lock; - in result end) ()); - -fun CRITICAL e = NAMED_CRITICAL "" e; - -end; - - -(* serial numbers *) - -local - -val serial_lock = Mutex.mutex (); -val serial_count = ref 0; - -in - -val serial = uninterruptible (fn _ => fn () => - let - val _ = Mutex.lock serial_lock; - val _ = serial_count := ! serial_count + 1; - val res = ! serial_count; - val _ = Mutex.unlock serial_lock; - in res end); - -end; - -end; - -structure Basic_Multithreading: BASIC_MULTITHREADING = Multithreading; -open Basic_Multithreading; diff --git a/core/Pure/ML-Systems/overloading_smlnj.ML b/core/Pure/ML-Systems/overloading_smlnj.ML deleted file mode 100644 index fa7200ea..00000000 --- a/core/Pure/ML-Systems/overloading_smlnj.ML +++ /dev/null @@ -1,41 +0,0 @@ -(* Title: Pure/ML-Systems/overloading_smlnj.ML - Author: Makarius - -Overloading in SML/NJ (cf. smlnj/base/system/smlnj/init/pervasive.sml). -*) - -Control.overloadKW := true; - -overload ~ : ('a -> 'a) as - IntInf.~ and Int31.~ and Int32.~ and Int64.~ and - Word.~ and Word8.~ and Word32.~ and Word64.~ and Real.~; -overload + : ('a * 'a -> 'a) as - IntInf.+ and Int31.+ and Int32.+ and Int64.+ and - Word.+ and Word8.+ and Word32.+ and Word64.+ and Real.+; -overload - : ('a * 'a -> 'a) as - IntInf.- and Int31.- and Int32.- and Int64.- and - Word.- and Word8.- and Word32.- and Word64.- and Real.-; -overload * : ('a * 'a -> 'a) as - IntInf.* and Int31.* and Int32.* and Int64.* and - Word.* and Word8.* and Word32.* and Word64.* and Real.*; -overload div: ('a * 'a -> 'a) as - IntInf.div and Int31.div and Int32.div and Int64.div and - Word.div and Word8.div and Word32.div and Word64.div; -overload mod: ('a * 'a -> 'a) as - IntInf.mod and Int31.mod and Int32.mod and Int64.mod and - Word.mod and Word8.mod and Word32.mod and Word64.mod; -overload < : ('a * 'a -> bool) as - IntInf.< and Int31.< and Int32.< and Int64.< and Real.< and - Word.< and Word8.< and Word32.< and Word64.< and Char.< and String.<; -overload <= : ('a * 'a -> bool) as - IntInf.<= and Int31.<= and Int32.<= and Int64.<= and Real.<= and - Word.<= and Word8.<= and Word32.<= and Word64.<= and Char.<= and String.<=; -overload > : ('a * 'a -> bool) as - IntInf.> and Int31.> and Int32.> and Int64.> and Real.> and - Word.> and Word8.> and Word32.> and Word64.> and Char.> and String.>; -overload >= : ('a * 'a -> bool) as - IntInf.>= and Int31.>= and Int32.>= and Int64.>= and Real.>= and - Word.>= and Word8.>= and Word32.>= and Word64.>= and Char.>= and String.>=; -overload abs: ('a -> 'a) as IntInf.abs and Int31.abs and Int32.abs and Int64.abs and Real.abs; - -Control.overloadKW := false; diff --git a/core/Pure/ML-Systems/polyml-5.5.2.ML b/core/Pure/ML-Systems/polyml-5.5.2.ML deleted file mode 100644 index 05984ea8..00000000 --- a/core/Pure/ML-Systems/polyml-5.5.2.ML +++ /dev/null @@ -1,23 +0,0 @@ -(* Title: Pure/ML-Systems/polyml-5.5.2.ML - Author: Makarius - -Compatibility wrapper for Poly/ML 5.5.2. -*) - -structure Thread = -struct - open Thread; - - structure Thread = - struct - open Thread; - - fun numProcessors () = - (case Thread.numPhysicalProcessors () of - SOME n => n - | NONE => Thread.numProcessors ()); - end; -end; - -use "ML-Systems/polyml.ML"; - diff --git a/core/Pure/ML-Systems/polyml.ML b/core/Pure/ML-Systems/polyml.ML deleted file mode 100644 index 1a43ac85..00000000 --- a/core/Pure/ML-Systems/polyml.ML +++ /dev/null @@ -1,178 +0,0 @@ -(* Title: Pure/ML-Systems/polyml.ML - Author: Makarius - -Compatibility wrapper for Poly/ML. -*) - -(* ML name space *) - -structure ML_Name_Space = -struct - open PolyML.NameSpace; - type T = PolyML.NameSpace.nameSpace; - val global = PolyML.globalNameSpace; - val initial_val = - List.filter (fn (a, _) => a <> "use" andalso a <> "exit" andalso a <> "commit") - (#allVal global ()); - val initial_type = #allType global (); - val initial_fixity = #allFix global (); - val initial_structure = #allStruct global (); - val initial_signature = #allSig global (); - val initial_functor = #allFunct global (); -end; - - -(* ML system operations *) - -use "ML-Systems/ml_system.ML"; - -if ML_System.name = "polyml-5.3.0" -then use "ML-Systems/share_common_data_polyml-5.3.0.ML" -else (); - -structure ML_System = -struct - -open ML_System; - -fun share_common_data () = PolyML.shareCommonData PolyML.rootFunction; -val save_state = PolyML.SaveState.saveState; - -end; - - -(* exceptions *) - -fun reraise exn = - (case PolyML.exceptionLocation exn of - NONE => raise exn - | SOME location => PolyML.raiseWithLocation (exn, location)); - -exception Interrupt = SML90.Interrupt; - -use "General/exn.ML"; - - -(* multithreading *) - -val seconds = Time.fromReal; - -if List.exists (fn s => s = "SingleAssignment") (PolyML.Compiler.structureNames ()) -then () -else use "ML-Systems/single_assignment_polyml.ML"; - -open Thread; -use "ML-Systems/multithreading.ML"; -use "ML-Systems/multithreading_polyml.ML"; - -use "ML-Systems/unsynchronized.ML"; -val _ = PolyML.Compiler.forgetValue "ref"; -val _ = PolyML.Compiler.forgetType "ref"; - - -(* pervasive environment *) - -val _ = PolyML.Compiler.forgetValue "isSome"; -val _ = PolyML.Compiler.forgetValue "getOpt"; -val _ = PolyML.Compiler.forgetValue "valOf"; -val _ = PolyML.Compiler.forgetValue "foldl"; -val _ = PolyML.Compiler.forgetValue "foldr"; -val _ = PolyML.Compiler.forgetValue "print"; -val _ = PolyML.Compiler.forgetValue "explode"; -val _ = PolyML.Compiler.forgetValue "concat"; - -val ord = SML90.ord; -val chr = SML90.chr; -val raw_explode = SML90.explode; -val implode = SML90.implode; - -val io_buffer_size = 4096; - -fun quit () = exit 0; - - -(* ML runtime system *) - -fun print_exception_trace (_: exn -> string) = PolyML.exception_trace; -val timing = PolyML.timing; -val profiling = PolyML.profiling; - -fun profile 0 f x = f x - | profile n f x = - let - val _ = RunCall.run_call1 RuntimeCalls.POLY_SYS_profiler n; - val res = Exn.capture f x; - val _ = RunCall.run_call1 RuntimeCalls.POLY_SYS_profiler 0; - in Exn.release res end; - -val pointer_eq = PolyML.pointerEq; - - -(* ML compiler *) - -use "ML-Systems/use_context.ML"; -use "ML-Systems/ml_positions.ML"; -use "ML-Systems/compiler_polyml.ML"; - -PolyML.Compiler.reportUnreferencedIds := true; -PolyML.Compiler.printInAlphabeticalOrder := false; -PolyML.Compiler.maxInlineSize := 80; - -fun ml_prompts p1 p2 = (PolyML.Compiler.prompt1 := p1; PolyML.Compiler.prompt2 := p2); - - -(* ML toplevel pretty printing *) - -use "ML-Systems/ml_pretty.ML"; - -local - val depth = Unsynchronized.ref 10; -in - fun get_default_print_depth () = ! depth; - fun default_print_depth n = (depth := n; PolyML.print_depth n); - val _ = default_print_depth 10; -end; - -val error_depth = PolyML.error_depth; - -val pretty_ml = - let - fun convert _ (PolyML.PrettyBreak (wd, _)) = ML_Pretty.Break (false, wd) - | convert _ (PolyML.PrettyBlock (_, _, - [PolyML.ContextProperty ("fbrk", _)], [PolyML.PrettyString " "])) = - ML_Pretty.Break (true, 1) - | convert len (PolyML.PrettyBlock (ind, _, context, prts)) = - let - fun property name default = - (case List.find (fn PolyML.ContextProperty (a, _) => name = a | _ => false) context of - SOME (PolyML.ContextProperty (_, b)) => b - | _ => default); - val bg = property "begin" ""; - val en = property "end" ""; - val len' = property "length" len; - in ML_Pretty.Block ((bg, en), map (convert len') prts, ind) end - | convert len (PolyML.PrettyString s) = - ML_Pretty.String (s, case Int.fromString len of SOME i => i | NONE => size s) - in convert "" end; - -fun ml_pretty (ML_Pretty.Break (false, wd)) = PolyML.PrettyBreak (wd, 0) - | ml_pretty (ML_Pretty.Break (true, _)) = - PolyML.PrettyBlock (0, false, [PolyML.ContextProperty ("fbrk", "")], - [PolyML.PrettyString " "]) - | ml_pretty (ML_Pretty.Block ((bg, en), prts, ind)) = - let val context = - (if bg = "" then [] else [PolyML.ContextProperty ("begin", bg)]) @ - (if en = "" then [] else [PolyML.ContextProperty ("end", en)]) - in PolyML.PrettyBlock (ind, false, context, map ml_pretty prts) end - | ml_pretty (ML_Pretty.String (s, len)) = - if len = size s then PolyML.PrettyString s - else PolyML.PrettyBlock - (0, false, [PolyML.ContextProperty ("length", Int.toString len)], [PolyML.PrettyString s]); - -fun toplevel_pp context (_: string list) pp = - use_text context (1, "pp") false - ("PolyML.addPrettyPrinter (fn _ => fn _ => ml_pretty o Pretty.to_ML o (" ^ pp ^ "))"); - -val ml_make_string = - "(fn x => Pretty.string_of (Pretty.from_ML (pretty_ml (PolyML.prettyRepresentation (x, Isabelle.ML_print_depth ())))))"; - diff --git a/core/Pure/ML-Systems/pp_dummy.ML b/core/Pure/ML-Systems/pp_dummy.ML deleted file mode 100644 index b35c0e8d..00000000 --- a/core/Pure/ML-Systems/pp_dummy.ML +++ /dev/null @@ -1,16 +0,0 @@ -(* Title: Pure/ML-Systems/pp_dummy.ML - -Dummy setup for toplevel pretty printing. -*) - -fun ml_pretty _ = raise Fail "ml_pretty dummy"; -fun pretty_ml _ = raise Fail "pretty_ml dummy"; - -structure PolyML = -struct - fun addPrettyPrinter _ = (); - fun prettyRepresentation _ = - raise Fail "PolyML.prettyRepresentation dummy"; - open PolyML; -end; - diff --git a/core/Pure/ML-Systems/proper_int.ML b/core/Pure/ML-Systems/proper_int.ML deleted file mode 100644 index 86d6b6e8..00000000 --- a/core/Pure/ML-Systems/proper_int.ML +++ /dev/null @@ -1,289 +0,0 @@ -(* Title: Pure/ML-Systems/proper_int.ML - Author: Makarius - -SML basis with type int representing proper integers, not machine -words. -*) - -val mk_int = IntInf.fromInt: Int.int -> IntInf.int; -val dest_int = IntInf.toInt: IntInf.int -> Int.int; - - -(* Int *) - -type int = IntInf.int; - -structure IntInf = -struct - open IntInf; - fun fromInt (a: int) = a; - fun toInt (a: int) = a; - val log2 = mk_int o IntInf.log2; - val sign = mk_int o IntInf.sign; -end; - -structure Int = IntInf; - - -(* List *) - -structure List = -struct - open List; - fun length a = mk_int (List.length a); - fun nth (a, b) = List.nth (a, dest_int b); - fun take (a, b) = List.take (a, dest_int b); - fun drop (a, b) = List.drop (a, dest_int b); - fun tabulate (a, b) = List.tabulate (dest_int a, b o mk_int); -end; - -val length = List.length; - - -(* Array *) - -structure Array = -struct - open Array; - val maxLen = mk_int Array.maxLen; - fun array (a, b) = Array.array (dest_int a, b); - fun tabulate (a, b) = Array.tabulate (dest_int a, b o mk_int); - fun length a = mk_int (Array.length a); - fun sub (a, b) = Array.sub (a, dest_int b); - fun update (a, b, c) = Array.update (a, dest_int b, c); - fun copy {src, dst, di} = Array.copy {src = src, dst = dst, di = dest_int di}; - fun copyVec {src, dst, di} = Array.copyVec {src = src, dst = dst, di = dest_int di}; - fun appi a b = Array.appi (fn (x, y) => a (mk_int x, y)) b; - fun modifyi a b = Array.modifyi (fn (x, y) => a (mk_int x, y)) b; - fun foldli a b c = Array.foldli (fn (x, y, z) => a (mk_int x, y, z)) b c; - fun foldri a b c = Array.foldri (fn (x, y, z) => a (mk_int x, y, z)) b c; - fun findi a b = - (case Array.findi (fn (x, y) => a (mk_int x, y)) b of - NONE => NONE - | SOME (c, d) => SOME (mk_int c, d)); -end; - - -(* Vector *) - -structure Vector = -struct - open Vector; - val maxLen = mk_int Vector.maxLen; - fun tabulate (a, b) = Vector.tabulate (dest_int a, b o mk_int); - fun length a = mk_int (Vector.length a); - fun sub (a, b) = Vector.sub (a, dest_int b); - fun update (a, b, c) = Vector.update (a, dest_int b, c); - fun appi a b = Vector.appi (fn (x, y) => a (mk_int x, y)) b; - fun mapi a b = Vector.mapi (fn (x, y) => a (mk_int x, y)) b; - fun foldli a b c = Vector.foldli (fn (x, y, z) => a (mk_int x, y, z)) b c; - fun foldri a b c = Vector.foldri (fn (x, y, z) => a (mk_int x, y, z)) b c; - fun findi a b = - (case Vector.findi (fn (x, y) => a (mk_int x, y)) b of - NONE => NONE - | SOME (c, d) => SOME (mk_int c, d)); -end; - - -(* CharVector *) - -structure CharVector = -struct - open CharVector; - fun tabulate (a, b) = CharVector.tabulate (dest_int a, b o mk_int); -end; - - -(* Word8VectorSlice *) - -structure Word8VectorSlice = -struct - open Word8VectorSlice; - val length = mk_int o Word8VectorSlice.length; - fun subslice (a, b, c) = Word8VectorSlice.subslice (a, dest_int b, Option.map dest_int c); -end; - - -(* Char *) - -structure Char = -struct - open Char; - val maxOrd = mk_int Char.maxOrd; - val chr = Char.chr o dest_int; - val ord = mk_int o Char.ord; -end; - -val chr = Char.chr; -val ord = Char.ord; - - -(* String *) - -structure String = -struct - open String; - val maxSize = mk_int String.maxSize; - val size = mk_int o String.size; - fun sub (a, b) = String.sub (a, dest_int b); - fun extract (a, b, c) = String.extract (a, dest_int b, Option.map dest_int c); - fun substring (a, b, c) = String.substring (a, dest_int b, dest_int c); -end; - -val size = String.size; -val substring = String.substring; - - -(* Substring *) - -structure Substring = -struct - open Substring; - fun sub (a, b) = Substring.sub (a, dest_int b); - val size = mk_int o Substring.size; - fun base a = let val (b, c, d) = Substring.base a in (b, mk_int c, mk_int d) end; - fun extract (a, b, c) = Substring.extract (a, dest_int b, Option.map dest_int c); - fun substring (a, b, c) = Substring.substring (a, dest_int b, dest_int c); - fun triml a b = Substring.triml (dest_int a) b; - fun trimr a b = Substring.trimr (dest_int a) b; - fun slice (a, b, c) = Substring.slice (a, dest_int b, Option.map dest_int c); - fun splitAt (a, b) = Substring.splitAt (a, dest_int b); -end; - - -(* StringCvt *) - -structure StringCvt = -struct - open StringCvt; - datatype realfmt = EXACT | FIX of int option | GEN of int option | SCI of int option; - fun realfmt fmt = Real.fmt - (case fmt of - EXACT => StringCvt.EXACT - | FIX NONE => StringCvt.FIX NONE - | FIX (SOME b) => StringCvt.FIX (SOME (dest_int b)) - | GEN NONE => StringCvt.GEN NONE - | GEN (SOME b) => StringCvt.GEN (SOME (dest_int b)) - | SCI NONE => StringCvt.SCI NONE - | SCI (SOME b) => StringCvt.SCI (SOME (dest_int b))); - fun padRight a b c = StringCvt.padRight a (dest_int b) c; - fun padLeft a b c = StringCvt.padLeft a (dest_int b) c; -end; - - -(* Word *) - -structure Word = -struct - open Word; - val wordSize = mk_int Word.wordSize; - val toInt = Word.toLargeInt; - val toIntX = Word.toLargeIntX; - val fromInt = Word.fromLargeInt; -end; - -structure Word8 = -struct - open Word8; - val wordSize = mk_int Word8.wordSize; - val toInt = Word8.toLargeInt; - val toIntX = Word8.toLargeIntX; - val fromInt = Word8.fromLargeInt; -end; - -structure Word32 = -struct - open Word32; - val wordSize = mk_int Word32.wordSize; - val toInt = Word32.toLargeInt; - val toIntX = Word32.toLargeIntX; - val fromInt = Word32.fromLargeInt; -end; - -structure LargeWord = -struct - open LargeWord; - val wordSize = mk_int LargeWord.wordSize; - val toInt = LargeWord.toLargeInt; - val toIntX = LargeWord.toLargeIntX; - val fromInt = LargeWord.fromLargeInt; -end; - - -(* Real *) - -structure Real = -struct - open Real; - val radix = mk_int Real.radix; - val precision = mk_int Real.precision; - fun sign a = mk_int (Real.sign a); - fun toManExp a = let val {man, exp} = Real.toManExp a in {man = man, exp = mk_int exp} end; - fun fromManExp {man, exp} = Real.fromManExp {man = man, exp = dest_int exp}; - val ceil = mk_int o Real.ceil; - val floor = mk_int o Real.floor; - val real = Real.fromInt o dest_int; - val round = mk_int o Real.round; - val trunc = mk_int o Real.trunc; - fun toInt a b = mk_int (Real.toInt a b); - fun fromInt a = Real.fromInt (dest_int a); - val fmt = StringCvt.realfmt; -end; - -val ceil = Real.ceil; -val floor = Real.floor; -val real = Real.real; -val round = Real.round; -val trunc = Real.trunc; - - -(* TextIO *) - -structure TextIO = -struct - open TextIO; - fun inputN (a, b) = TextIO.inputN (a, dest_int b); - fun canInput (a, b) = Option.map mk_int (TextIO.canInput (a, dest_int b)); -end; - - -(* BinIO *) - -structure BinIO = -struct - open BinIO; - fun inputN (a, b) = BinIO.inputN (a, dest_int b); - fun canInput (a, b) = Option.map mk_int (BinIO.canInput (a, dest_int b)); -end; - - -(* Time *) - -structure Time = -struct - open Time; - fun fmt a b = Time.fmt (dest_int a) b; -end; - - -(* Sockets *) - -structure INetSock = -struct - open INetSock; - fun toAddr (a, b) = INetSock.toAddr (a, dest_int b); - fun fromAddr adr = let val (a, b) = INetSock.fromAddr adr in (a, mk_int b) end; -end; - - -(* OS.FileSys *) - -structure OS = -struct - open OS; - structure FileSys = - struct - open FileSys; - fun fileSize a = mk_int (FileSys.fileSize a); - end; -end; diff --git a/core/Pure/ML-Systems/share_common_data_polyml-5.3.0.ML b/core/Pure/ML-Systems/share_common_data_polyml-5.3.0.ML deleted file mode 100644 index 4091e4a7..00000000 --- a/core/Pure/ML-Systems/share_common_data_polyml-5.3.0.ML +++ /dev/null @@ -1,11 +0,0 @@ -(* Title: Pure/ML-Systems/share_common_data_polyml-5.3.0.ML - -Dummy for Poly/ML 5.3.0, which cannot share the massive heap of HOL -anymore. -*) - -structure PolyML = -struct - open PolyML; - fun shareCommonData _ = (); -end; diff --git a/core/Pure/ML-Systems/single_assignment.ML b/core/Pure/ML-Systems/single_assignment.ML deleted file mode 100644 index 6abbe443..00000000 --- a/core/Pure/ML-Systems/single_assignment.ML +++ /dev/null @@ -1,33 +0,0 @@ -(* Title: Pure/ML-Systems/single_assignment.ML - Author: Makarius - -References with single assignment. Unsynchronized! -*) - -signature SINGLE_ASSIGNMENT = -sig - type 'a saref - exception Locked - val saref: unit -> 'a saref - val savalue: 'a saref -> 'a option - val saset: 'a saref * 'a -> unit -end; - -structure SingleAssignment: SINGLE_ASSIGNMENT = -struct - -exception Locked; - -abstype 'a saref = SARef of 'a option ref -with - -fun saref () = SARef (ref NONE); - -fun savalue (SARef r) = ! r; - -fun saset (SARef (r as ref NONE), x) = r := SOME x - | saset _ = raise Locked; - -end; - -end; diff --git a/core/Pure/ML-Systems/single_assignment_polyml.ML b/core/Pure/ML-Systems/single_assignment_polyml.ML deleted file mode 100644 index 4367dbea..00000000 --- a/core/Pure/ML-Systems/single_assignment_polyml.ML +++ /dev/null @@ -1,35 +0,0 @@ -(* Title: Pure/ML-Systems/single_assignment_polyml.ML - Author: Makarius - -References with single assignment. Unsynchronized! Emulates -structure SingleAssignment from Poly/ML 5.4. -*) - -signature SINGLE_ASSIGNMENT = -sig - type 'a saref - exception Locked - val saref: unit -> 'a saref - val savalue: 'a saref -> 'a option - val saset: 'a saref * 'a -> unit -end; - -structure SingleAssignment: SINGLE_ASSIGNMENT = -struct - -exception Locked; - -abstype 'a saref = SARef of 'a option ref -with - -fun saref () = SARef (ref NONE); - -fun savalue (SARef r) = ! r; - -fun saset (SARef (r as ref NONE), x) = - (r := SOME x; RunCall.run_call1 RuntimeCalls.POLY_SYS_lockseg r) - | saset _ = raise Locked; - -end; - -end; diff --git a/core/Pure/ML-Systems/smlnj.ML b/core/Pure/ML-Systems/smlnj.ML deleted file mode 100644 index 0c9887d3..00000000 --- a/core/Pure/ML-Systems/smlnj.ML +++ /dev/null @@ -1,180 +0,0 @@ -(* Title: Pure/ML-Systems/smlnj.ML - -Compatibility file for Standard ML of New Jersey. -*) - -val io_buffer_size = 4096; -use "ML-Systems/proper_int.ML"; - -exception Interrupt; -fun reraise exn = raise exn; - -fun exit rc = Posix.Process.exit (Word8.fromInt rc); -fun quit () = exit 0; - -use "ML-Systems/overloading_smlnj.ML"; -use "General/exn.ML"; -use "ML-Systems/single_assignment.ML"; -use "ML-Systems/universal.ML"; -use "ML-Systems/thread_dummy.ML"; -use "ML-Systems/multithreading.ML"; -use "ML-Systems/ml_name_space.ML"; -use "ML-Systems/ml_pretty.ML"; -structure PolyML = struct end; -use "ML-Systems/pp_dummy.ML"; -use "ML-Systems/use_context.ML"; -use "ML-Systems/ml_positions.ML"; - - -val seconds = Time.fromReal; - -(*low-level pointer equality*) -CM.autoload "$smlnj/init/init.cmi"; -val pointer_eq = InlineT.ptreql; - - -(* restore old-style character / string functions *) - -val ord = mk_int o SML90.ord; -val chr = SML90.chr o dest_int; -val raw_explode = SML90.explode; -val implode = SML90.implode; - - -(* New Jersey ML parameters *) - -val _ = - (Control.Print.printLength := 1000; - Control.Print.printDepth := 350; - Control.Print.stringDepth := 250; - Control.Print.signatures := 2; - Control.MC.matchRedundantError := false); - - -(* Poly/ML emulation *) - -(*limit the printing depth -- divided by 2 for comparibility with Poly/ML*) -local - val depth = ref (10: int); -in - fun get_default_print_depth () = ! depth; - fun default_print_depth n = - (depth := n; - Control.Print.printDepth := dest_int n div 2; - Control.Print.printLength := dest_int n); - val _ = default_print_depth 10; -end; - -val ml_make_string = "(fn _ => \"?\")"; - - -(*prompts*) -fun ml_prompts p1 p2 = - (Control.primaryPrompt := p1; Control.secondaryPrompt := p2); - -(*dummy implementation*) -fun profile (n: int) f x = f x; - -(*dummy implementation*) -fun print_exception_trace (_: exn -> string) f = f (); - - -(* ML command execution *) - -fun use_text ({tune_source, print, error, ...}: use_context) (line, name) verbose txt = - let - val ref out_orig = Control.Print.out; - - val out_buffer = ref ([]: string list); - val out = {say = (fn s => out_buffer := s :: ! out_buffer), flush = (fn () => ())}; - fun output () = - let val str = implode (rev (! out_buffer)) - in String.substring (str, 0, Int.max (0, size str - 1)) end; - in - Control.Print.out := out; - Backend.Interact.useStream (TextIO.openString (tune_source (ml_positions line name txt))) - handle exn => - (Control.Print.out := out_orig; - error ((if name = "" then "" else "Error in " ^ name ^ "\n") ^ output ()); raise exn); - Control.Print.out := out_orig; - if verbose then print (output ()) else () - end; - -fun use_file context verbose name = - let - val instream = TextIO.openIn name; - val txt = Exn.release (Exn.capture TextIO.inputAll instream before TextIO.closeIn instream); - in use_text context (1, name) verbose txt end; - - -(* toplevel pretty printing *) - -fun ml_pprint pps = - let - fun str "" = () - | str s = PrettyPrint.string pps s; - fun pprint (ML_Pretty.Block ((bg, en), prts, ind)) = - (str bg; PrettyPrint.openHOVBox pps (PrettyPrint.Rel (dest_int ind)); - List.app pprint prts; PrettyPrint.closeBox pps; str en) - | pprint (ML_Pretty.String (s, _)) = str s - | pprint (ML_Pretty.Break (false, wd)) = PrettyPrint.break pps {nsp = dest_int wd, offset = 0} - | pprint (ML_Pretty.Break (true, _)) = PrettyPrint.newline pps; - in pprint end; - -fun toplevel_pp context path pp = - use_text context (1, "pp") false - ("CompilerPPTable.install_pp [" ^ String.concatWith "," (map (fn s => "\"" ^ s ^ "\"") path) ^ - "] (fn pps => ml_pprint pps o Pretty.to_ML o (" ^ pp ^ "))"); - - - -(** interrupts **) - -local - -fun change_signal new_handler f x = - let - val old_handler = Signals.setHandler (Signals.sigINT, new_handler); - val result = Exn.capture (f old_handler) x; - val _ = Signals.setHandler (Signals.sigINT, old_handler); - in Exn.release result end; - -in - -fun interruptible (f: 'a -> 'b) x = - let - val result = ref (Exn.interrupt_exn: 'b Exn.result); - val old_handler = Signals.inqHandler Signals.sigINT; - in - SMLofNJ.Cont.callcc (fn cont => - (Signals.setHandler (Signals.sigINT, Signals.HANDLER (fn _ => cont)); - result := Exn.capture f x)); - Signals.setHandler (Signals.sigINT, old_handler); - Exn.release (! result) - end; - -fun uninterruptible f = - change_signal Signals.IGNORE - (fn old_handler => f (fn g => change_signal old_handler (fn _ => g))); - -end; - - -use "ML-Systems/unsynchronized.ML"; - - -(* ML system operations *) - -use "ML-Systems/ml_system.ML"; - -structure ML_System = -struct - -open ML_System; - -fun save_state name = - if SMLofNJ.exportML name then () - else OS.FileSys.rename {old = name ^ "." ^ platform, new = name}; - -end; - diff --git a/core/Pure/ML-Systems/thread_dummy.ML b/core/Pure/ML-Systems/thread_dummy.ML deleted file mode 100644 index ebccf444..00000000 --- a/core/Pure/ML-Systems/thread_dummy.ML +++ /dev/null @@ -1,82 +0,0 @@ -(* Title: Pure/ML-Systems/thread_dummy.ML - Author: Makarius - -Default (mostly dummy) implementation of thread structures -(cf. polyml/basis/Thread.sml). -*) - -exception Thread of string; - -local fun fail _ = raise Thread "No multithreading support on this ML platform" in - -structure Mutex = -struct - type mutex = unit; - fun mutex _ = (); - fun lock _ = fail (); - fun unlock _ = fail (); - fun trylock _ = fail (); -end; - -structure ConditionVar = -struct - type conditionVar = unit; - fun conditionVar _ = (); - fun wait _ = fail (); - fun waitUntil _ = fail (); - fun signal _ = fail (); - fun broadcast _ = fail (); -end; - -structure Thread = -struct - type thread = unit; - - datatype threadAttribute = EnableBroadcastInterrupt of bool | InterruptState of interruptState - and interruptState = InterruptDefer | InterruptSynch | InterruptAsynch | InterruptAsynchOnce; - - fun unavailable () = fail (); - - fun fork _ = fail (); - fun exit _ = fail (); - fun isActive _ = fail (); - - fun equal _ = fail (); - fun self _ = fail (); - - fun interrupt _ = fail (); - fun broadcastInterrupt _ = fail (); - fun testInterrupt _ = fail (); - - fun kill _ = fail (); - - fun setAttributes _ = fail (); - fun getAttributes _ = fail (); - - fun numProcessors _ = fail (); - - -(* thread data *) - -local - -val data = ref ([]: Universal.universal ref list); - -fun find_data tag = - let - fun find (r :: rs) = if Universal.tagIs tag (! r) then SOME r else find rs - | find [] = NONE; - in find (! data) end; - -in - -fun getLocal tag = Option.map (Universal.tagProject tag o !) (find_data tag); - -fun setLocal (tag, x) = - (case find_data tag of - SOME r => r := Universal.tagInject tag x - | NONE => data := ref (Universal.tagInject tag x) :: ! data); - -end; -end; -end; diff --git a/core/Pure/ML-Systems/universal.ML b/core/Pure/ML-Systems/universal.ML deleted file mode 100644 index 7a4d9a42..00000000 --- a/core/Pure/ML-Systems/universal.ML +++ /dev/null @@ -1,41 +0,0 @@ -(* Title: Pure/ML-Systems/universal.ML - Author: Makarius - -Universal values via tagged union. Emulates structure Universal -from Poly/ML 5.1. -*) - -signature UNIVERSAL = -sig - type universal - type 'a tag - val tag: unit -> 'a tag - val tagIs: 'a tag -> universal -> bool - val tagInject: 'a tag -> 'a -> universal - val tagProject: 'a tag -> universal -> 'a -end; - -structure Universal: UNIVERSAL = -struct - -type universal = exn; - -datatype 'a tag = Tag of - {is: universal -> bool, - inject: 'a -> universal, - project: universal -> 'a}; - -fun tag () = - let exception Universal of 'a in - Tag { - is = fn Universal _ => true | _ => false, - inject = Universal, - project = fn Universal x => x} - end; - -fun tagIs (Tag {is, ...}) = is; -fun tagInject (Tag {inject, ...}) = inject; -fun tagProject (Tag {project, ...}) = project; - -end; - diff --git a/core/Pure/ML-Systems/unsynchronized.ML b/core/Pure/ML-Systems/unsynchronized.ML deleted file mode 100644 index 44136851..00000000 --- a/core/Pure/ML-Systems/unsynchronized.ML +++ /dev/null @@ -1,30 +0,0 @@ -(* Title: Pure/ML-Systems/unsynchronized.ML - Author: Makarius - -Raw ML references as unsynchronized state variables. -*) - -structure Unsynchronized = -struct - -datatype ref = datatype ref; - -val op := = op :=; -val ! = !; - -fun change r f = r := f (! r); -fun change_result r f = let val (x, y) = f (! r) in r := y; x end; - -fun inc i = (i := ! i + (1: int); ! i); -fun dec i = (i := ! i - (1: int); ! i); - -fun setmp flag value f x = - uninterruptible (fn restore_attributes => fn () => - let - val orig_value = ! flag; - val _ = flag := value; - val result = Exn.capture (restore_attributes f) x; - val _ = flag := orig_value; - in Exn.release result end) (); - -end; diff --git a/core/Pure/ML-Systems/use_context.ML b/core/Pure/ML-Systems/use_context.ML deleted file mode 100644 index 4e2ead39..00000000 --- a/core/Pure/ML-Systems/use_context.ML +++ /dev/null @@ -1,13 +0,0 @@ -(* Title: Pure/ML-Systems/use_context.ML - Author: Makarius - -Common context for "use" operations (compiler invocation). -*) - -type use_context = - {tune_source: string -> string, - name_space: ML_Name_Space.T, - str_of_pos: int -> string -> string, - print: string -> unit, - error: string -> unit}; - diff --git a/core/Pure/ML/exn_output.ML b/core/Pure/ML/exn_output.ML deleted file mode 100644 index 549d75f3..00000000 --- a/core/Pure/ML/exn_output.ML +++ /dev/null @@ -1,20 +0,0 @@ -(* Title: Pure/ML/exn_output.ML - Author: Makarius - -Auxiliary operations for exception output -- generic version. -*) - -signature EXN_OUTPUT = -sig - val position: exn -> Position.T - val pretty: exn -> Pretty.T -end - -structure Exn_Output: EXN_OUTPUT = -struct - -fun position (_: exn) = Position.none -val pretty = Pretty.str o General.exnMessage; - -end; - diff --git a/core/Pure/ML/exn_output_polyml.ML b/core/Pure/ML/exn_output_polyml.ML deleted file mode 100644 index 1b67c037..00000000 --- a/core/Pure/ML/exn_output_polyml.ML +++ /dev/null @@ -1,19 +0,0 @@ -(* Title: Pure/ML/exn_output_polyml.ML - Author: Makarius - -Auxiliary operations for exception output -- Poly/ML version. -*) - -structure Exn_Output: EXN_OUTPUT = -struct - -fun position exn = - (case PolyML.exceptionLocation exn of - NONE => Position.none - | SOME loc => Exn_Properties.position_of loc); - -fun pretty (exn: exn) = - Pretty.from_ML (pretty_ml (PolyML.prettyRepresentation (exn, ML_Options.get_print_depth ()))); - -end; - diff --git a/core/Pure/ML/exn_properties_dummy.ML b/core/Pure/ML/exn_properties_dummy.ML deleted file mode 100644 index 8212b165..00000000 --- a/core/Pure/ML/exn_properties_dummy.ML +++ /dev/null @@ -1,20 +0,0 @@ -(* Title: Pure/ML/exn_properties_dummy.ML - Author: Makarius - -Exception properties -- dummy version. -*) - -signature EXN_PROPERTIES = -sig - val get: exn -> Properties.T - val update: Properties.entry list -> exn -> exn -end; - -structure Exn_Properties: EXN_PROPERTIES = -struct - -fun get _ = []; -fun update _ exn = exn; - -end; - diff --git a/core/Pure/ML/exn_properties_polyml.ML b/core/Pure/ML/exn_properties_polyml.ML deleted file mode 100644 index d24064a5..00000000 --- a/core/Pure/ML/exn_properties_polyml.ML +++ /dev/null @@ -1,62 +0,0 @@ -(* Title: Pure/ML/exn_properties_polyml.ML - Author: Makarius - -Exception properties for Poly/ML. -*) - -signature EXN_PROPERTIES = -sig - val position_of: PolyML.location -> Position.T - val get: exn -> Properties.T - val update: Properties.entry list -> exn -> exn -end; - -structure Exn_Properties: EXN_PROPERTIES = -struct - -(* source locations *) - -fun props_of (loc: PolyML.location) = - (case YXML.parse_body (#file loc) of - [] => [] - | [XML.Text file] => [(Markup.fileN, file)] - | body => XML.Decode.properties body); - -fun position_of loc = - Position.make - {line = #startLine loc, - offset = #startPosition loc, - end_offset = #endPosition loc, - props = props_of loc}; - - -(* exception properties *) - -fun get exn = - (case PolyML.exceptionLocation exn of - NONE => [] - | SOME loc => props_of loc); - -fun update entries exn = - let - val loc = - the_default {file = "", startLine = 0, endLine = 0, startPosition = 0, endPosition = 0} - (PolyML.exceptionLocation exn); - val props = props_of loc; - val props' = fold Properties.put entries props; - in - if props = props' then exn - else - let - val loc' = - {file = YXML.string_of_body (XML.Encode.properties props'), - startLine = #startLine loc, endLine = #endLine loc, - startPosition = #startPosition loc, endPosition = #endPosition loc}; - in - uninterruptible (fn _ => fn () => PolyML.raiseWithLocation (exn, loc')) () - handle exn' => exn' - end - end; - -end; - diff --git a/core/Pure/ML/exn_trace_polyml-5.5.1.ML b/core/Pure/ML/exn_trace_polyml-5.5.1.ML deleted file mode 100644 index a0ce1624..00000000 --- a/core/Pure/ML/exn_trace_polyml-5.5.1.ML +++ /dev/null @@ -1,16 +0,0 @@ -(* Title: Pure/ML/exn_trace_polyml-5.5.1.ML - Author: Makarius - -Exception trace for Poly/ML 5.5.1, using regular Isabelle output. -*) - -fun print_exception_trace exn_message e = - PolyML.Exception.traceException - (e, fn (trace, exn) => - let - val title = "Exception trace - " ^ exn_message exn; - val _ = tracing (cat_lines (title :: trace)); - in reraise exn end); - -PolyML.Compiler.reportExhaustiveHandlers := true; - diff --git a/core/Pure/ML/install_pp_polyml.ML b/core/Pure/ML/install_pp_polyml.ML deleted file mode 100644 index 8dcfa50d..00000000 --- a/core/Pure/ML/install_pp_polyml.ML +++ /dev/null @@ -1,110 +0,0 @@ -(* Title: Pure/ML/install_pp_polyml.ML - Author: Makarius - -Extra toplevel pretty-printing for Poly/ML. -*) - -PolyML.addPrettyPrinter (fn depth => fn _ => fn str => - ml_pretty (Pretty.to_ML (ML_Syntax.pretty_string (depth * 100) str))); - -PolyML.addPrettyPrinter (fn depth => fn _ => fn tree => - ml_pretty (Pretty.to_ML (XML.pretty depth tree))); - -PolyML.addPrettyPrinter (fn depth => fn pretty => fn var => - pretty (Synchronized.value var, depth)); - -PolyML.addPrettyPrinter (fn depth => fn pretty => fn x => - (case Future.peek x of - NONE => PolyML.PrettyString "" - | SOME (Exn.Exn _) => PolyML.PrettyString "" - | SOME (Exn.Res y) => pretty (y, depth))); - -PolyML.addPrettyPrinter (fn depth => fn pretty => fn x => - (case Lazy.peek x of - NONE => PolyML.PrettyString "" - | SOME (Exn.Exn _) => PolyML.PrettyString "" - | SOME (Exn.Res y) => pretty (y, depth))); - - -local - -open PolyML; -val from_ML = Pretty.from_ML o pretty_ml; -fun prt_app name prt = Pretty.block [Pretty.str (name ^ " "), prt]; -fun prt_apps name = Pretty.enum "," (name ^ " (") ")"; - -fun prt_term parens dp t = - if dp <= 0 then Pretty.str "..." - else - (case t of - _ $ _ => - op :: (strip_comb t) - |> map_index (fn (i, u) => prt_term true (dp - i - 1) u) - |> Pretty.separate " $" - |> (if parens then Pretty.enclose "(" ")" else Pretty.block) - | Abs (a, T, b) => - prt_apps "Abs" - [from_ML (prettyRepresentation (a, dp - 1)), - from_ML (prettyRepresentation (T, dp - 2)), - prt_term false (dp - 3) b] - | Const a => prt_app "Const" (from_ML (prettyRepresentation (a, dp - 1))) - | Free a => prt_app "Free" (from_ML (prettyRepresentation (a, dp - 1))) - | Var a => prt_app "Var" (from_ML (prettyRepresentation (a, dp - 1))) - | Bound a => prt_app "Bound" (from_ML (prettyRepresentation (a, dp - 1)))); - -in - -val _ = - PolyML.addPrettyPrinter (fn depth => fn _ => fn t => - ml_pretty (Pretty.to_ML (prt_term false depth t))); - -local - -fun prt_proof parens dp prf = - if dp <= 0 then Pretty.str "..." - else - (case prf of - _ % _ => prt_proofs parens dp prf - | _ %% _ => prt_proofs parens dp prf - | Abst (a, T, b) => - prt_apps "Abst" - [from_ML (prettyRepresentation (a, dp - 1)), - from_ML (prettyRepresentation (T, dp - 2)), - prt_proof false (dp - 3) b] - | AbsP (a, t, b) => - prt_apps "AbsP" - [from_ML (prettyRepresentation (a, dp - 1)), - from_ML (prettyRepresentation (t, dp - 2)), - prt_proof false (dp - 3) b] - | Hyp t => prt_app "Hyp" (prt_term true (dp - 1) t) - | MinProof => Pretty.str "MinProof" - | PBound a => prt_app "PBound" (from_ML (prettyRepresentation (a, dp - 1))) - | PAxm a => prt_app "PAxm" (from_ML (prettyRepresentation (a, dp - 1))) - | OfClass a => prt_app "OfClass" (from_ML (prettyRepresentation (a, dp - 1))) - | Oracle a => prt_app "Oracle" (from_ML (prettyRepresentation (a, dp - 1))) - | Promise a => prt_app "Promise" (from_ML (prettyRepresentation (a, dp - 1))) - | PThm a => prt_app "PThm" (from_ML (prettyRepresentation (a, dp - 1)))) - -and prt_proofs parens dp prf = - let - val (head, args) = strip_proof prf []; - val prts = head (dp - 1) :: flat (map_index (fn (i, prt) => prt (dp - i - 2)) args); - in if parens then Pretty.enclose "(" ")" prts else Pretty.block prts end - -and strip_proof (p % t) res = - strip_proof p - ((fn d => [Pretty.str " %", Pretty.brk 1, from_ML (prettyRepresentation (t, d))]) :: res) - | strip_proof (p %% q) res = - strip_proof p ((fn d => [Pretty.str " %%", Pretty.brk 1, prt_proof true d q]) :: res) - | strip_proof p res = (fn d => prt_proof true d p, res); - -in - -val _ = - PolyML.addPrettyPrinter (fn depth => fn _ => fn prf => - ml_pretty (Pretty.to_ML (prt_proof false depth prf))); - -end; - -end; - diff --git a/core/Pure/ML/ml_antiquotation.ML b/core/Pure/ML/ml_antiquotation.ML deleted file mode 100644 index 7cb8065e..00000000 --- a/core/Pure/ML/ml_antiquotation.ML +++ /dev/null @@ -1,74 +0,0 @@ -(* Title: Pure/ML/ml_antiquotation.ML - Author: Makarius - -ML antiquotations. -*) - -signature ML_ANTIQUOTATION = -sig - val variant: string -> Proof.context -> string * Proof.context - val declaration: binding -> 'a context_parser -> - (Args.src -> 'a -> Proof.context -> ML_Context.decl * Proof.context) -> - theory -> theory - val inline: binding -> string context_parser -> theory -> theory - val value: binding -> string context_parser -> theory -> theory -end; - -structure ML_Antiquotation: ML_ANTIQUOTATION = -struct - -(* unique names *) - -val init_context = ML_Syntax.reserved |> fold Name.declare ["ML_context", "ML_print_depth"]; - -structure Names = Proof_Data -( - type T = Name.context; - fun init _ = init_context; -); - -fun variant a ctxt = - let - val names = Names.get ctxt; - val (b, names') = Name.variant (Name.desymbolize (SOME false) a) names; - val ctxt' = Names.put names' ctxt; - in (b, ctxt') end; - - -(* define antiquotations *) - -fun declaration name scan body = - ML_Context.add_antiquotation name - (fn src => fn orig_ctxt => - let val (x, _) = Args.syntax scan src orig_ctxt - in body src x orig_ctxt end); - -fun inline name scan = - declaration name scan (fn _ => fn s => fn ctxt => (K ("", s), ctxt)); - -fun value name scan = - declaration name scan (fn _ => fn s => fn ctxt => - let - val (a, ctxt') = variant (Binding.name_of name) ctxt; - val env = "val " ^ a ^ " = " ^ s ^ ";\n"; - val body = "Isabelle." ^ a; - in (K (env, body), ctxt') end); - - -(* basic antiquotations *) - -val _ = Theory.setup - (declaration (Binding.make ("here", @{here})) (Scan.succeed ()) - (fn src => fn () => fn ctxt => - let - val (a, ctxt') = variant "position" ctxt; - val (_, pos) = Args.name_of_src src; - val env = "val " ^ a ^ " = " ^ ML_Syntax.print_position pos ^ ";\n"; - val body = "Isabelle." ^ a; - in (K (env, body), ctxt') end) #> - - value (Binding.make ("binding", @{here})) - (Scan.lift (Parse.position Args.name) >> ML_Syntax.make_binding)); - -end; - diff --git a/core/Pure/ML/ml_antiquotations.ML b/core/Pure/ML/ml_antiquotations.ML deleted file mode 100644 index f32d5852..00000000 --- a/core/Pure/ML/ml_antiquotations.ML +++ /dev/null @@ -1,186 +0,0 @@ -(* Title: Pure/ML/ml_antiquotations.ML - Author: Makarius - -Miscellaneous ML antiquotations. -*) - -structure ML_Antiquotations: sig end = -struct - -val _ = Theory.setup - (ML_Antiquotation.value @{binding system_option} - (Args.context -- Scan.lift (Parse.position Args.name) >> (fn (ctxt, (name, pos)) => - (Context_Position.report ctxt pos (Options.default_markup (name, pos)); - ML_Syntax.print_string name))) #> - - ML_Antiquotation.value @{binding theory} - (Args.context -- Scan.lift (Parse.position Args.name) >> (fn (ctxt, (name, pos)) => - (Context_Position.report ctxt pos - (Theory.get_markup (Context.get_theory (Proof_Context.theory_of ctxt) name)); - "Context.get_theory (Proof_Context.theory_of ML_context) " ^ ML_Syntax.print_string name)) - || Scan.succeed "Proof_Context.theory_of ML_context") #> - - ML_Antiquotation.value @{binding theory_context} - (Args.context -- Scan.lift (Parse.position Args.name) >> (fn (ctxt, (name, pos)) => - (Context_Position.report ctxt pos - (Theory.get_markup (Context.get_theory (Proof_Context.theory_of ctxt) name)); - "Proof_Context.get_global (Proof_Context.theory_of ML_context) " ^ - ML_Syntax.print_string name))) #> - - ML_Antiquotation.inline @{binding context} (Scan.succeed "Isabelle.ML_context") #> - - ML_Antiquotation.inline @{binding typ} (Args.typ >> (ML_Syntax.atomic o ML_Syntax.print_typ)) #> - ML_Antiquotation.inline @{binding term} (Args.term >> (ML_Syntax.atomic o ML_Syntax.print_term)) #> - ML_Antiquotation.inline @{binding prop} (Args.prop >> (ML_Syntax.atomic o ML_Syntax.print_term)) #> - - ML_Antiquotation.value @{binding ctyp} (Args.typ >> (fn T => - "Thm.ctyp_of (Proof_Context.theory_of ML_context) " ^ - ML_Syntax.atomic (ML_Syntax.print_typ T))) #> - - ML_Antiquotation.value @{binding cterm} (Args.term >> (fn t => - "Thm.cterm_of (Proof_Context.theory_of ML_context) " ^ - ML_Syntax.atomic (ML_Syntax.print_term t))) #> - - ML_Antiquotation.value @{binding cprop} (Args.prop >> (fn t => - "Thm.cterm_of (Proof_Context.theory_of ML_context) " ^ - ML_Syntax.atomic (ML_Syntax.print_term t))) #> - - ML_Antiquotation.value @{binding cpat} - (Args.context -- - Scan.lift Args.name_inner_syntax >> uncurry Proof_Context.read_term_pattern >> (fn t => - "Thm.cterm_of (Proof_Context.theory_of ML_context) " ^ - ML_Syntax.atomic (ML_Syntax.print_term t)))); - - -(* ML support *) - -val _ = Theory.setup - (ML_Antiquotation.inline @{binding assert} - (Scan.succeed "(fn b => if b then () else raise General.Fail \"Assertion failed\")") #> - - ML_Antiquotation.inline @{binding make_string} (Scan.succeed ml_make_string) #> - - ML_Antiquotation.declaration @{binding print} - (Scan.lift (Scan.optional Args.name "Output.writeln")) - (fn src => fn output => fn ctxt => - let - val (_, pos) = Args.name_of_src src; - val (a, ctxt') = ML_Antiquotation.variant "output" ctxt; - val env = - "val " ^ a ^ ": string -> unit =\n\ - \ (" ^ output ^ ") o (fn s => s ^ Position.here (" ^ - ML_Syntax.print_position pos ^ "));\n"; - val body = - "(fn x => (Isabelle." ^ a ^ " (" ^ ml_make_string ^ " x); x))"; - in (K (env, body), ctxt') end)); - - -(* type classes *) - -fun class syn = Args.context -- Scan.lift Args.name_inner_syntax >> (fn (ctxt, s) => - Proof_Context.read_class ctxt s - |> syn ? Lexicon.mark_class - |> ML_Syntax.print_string); - -val _ = Theory.setup - (ML_Antiquotation.inline @{binding class} (class false) #> - ML_Antiquotation.inline @{binding class_syntax} (class true) #> - - ML_Antiquotation.inline @{binding sort} - (Args.context -- Scan.lift Args.name_inner_syntax >> (fn (ctxt, s) => - ML_Syntax.atomic (ML_Syntax.print_sort (Syntax.read_sort ctxt s))))); - - -(* type constructors *) - -fun type_name kind check = Args.context -- Scan.lift (Parse.position Args.name_inner_syntax) - >> (fn (ctxt, (s, pos)) => - let - val Type (c, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s; - val decl = Type.the_decl (Proof_Context.tsig_of ctxt) (c, pos); - val res = - (case try check (c, decl) of - SOME res => res - | NONE => error ("Not a " ^ kind ^ ": " ^ quote c ^ Position.here pos)); - in ML_Syntax.print_string res end); - -val _ = Theory.setup - (ML_Antiquotation.inline @{binding type_name} - (type_name "logical type" (fn (c, Type.LogicalType _) => c)) #> - ML_Antiquotation.inline @{binding type_abbrev} - (type_name "type abbreviation" (fn (c, Type.Abbreviation _) => c)) #> - ML_Antiquotation.inline @{binding nonterminal} - (type_name "nonterminal" (fn (c, Type.Nonterminal) => c)) #> - ML_Antiquotation.inline @{binding type_syntax} - (type_name "type" (fn (c, _) => Lexicon.mark_type c))); - - -(* constants *) - -fun const_name check = Args.context -- Scan.lift (Parse.position Args.name_inner_syntax) - >> (fn (ctxt, (s, pos)) => - let - val Const (c, _) = Proof_Context.read_const {proper = true, strict = false} ctxt s; - val res = check (Proof_Context.consts_of ctxt, c) - handle TYPE (msg, _, _) => error (msg ^ Position.here pos); - in ML_Syntax.print_string res end); - -val _ = Theory.setup - (ML_Antiquotation.inline @{binding const_name} - (const_name (fn (consts, c) => (Consts.the_const consts c; c))) #> - ML_Antiquotation.inline @{binding const_abbrev} - (const_name (fn (consts, c) => (Consts.the_abbreviation consts c; c))) #> - ML_Antiquotation.inline @{binding const_syntax} - (const_name (fn (_, c) => Lexicon.mark_const c)) #> - - ML_Antiquotation.inline @{binding syntax_const} - (Args.context -- Scan.lift (Parse.position Args.name) >> (fn (ctxt, (c, pos)) => - if is_some (Syntax.lookup_const (Proof_Context.syn_of ctxt) c) - then ML_Syntax.print_string c - else error ("Unknown syntax const: " ^ quote c ^ Position.here pos))) #> - - ML_Antiquotation.inline @{binding const} - (Args.context -- Scan.lift (Parse.position Args.name_inner_syntax) -- Scan.optional - (Scan.lift (Args.$$$ "(") |-- Parse.enum1' "," Args.typ --| Scan.lift (Args.$$$ ")")) [] - >> (fn ((ctxt, (raw_c, pos)), Ts) => - let - val Const (c, _) = - Proof_Context.read_const {proper = true, strict = true} ctxt raw_c; - val consts = Proof_Context.consts_of ctxt; - val n = length (Consts.typargs consts (c, Consts.type_scheme consts c)); - val _ = length Ts <> n andalso - error ("Constant requires " ^ string_of_int n ^ " type argument(s): " ^ - quote c ^ enclose "(" ")" (commas (replicate n "_")) ^ Position.here pos); - val const = Const (c, Consts.instance consts (c, Ts)); - in ML_Syntax.atomic (ML_Syntax.print_term const) end))); - - -(* outer syntax *) - -fun with_keyword f = - Args.theory -- Scan.lift (Parse.position Parse.string) >> (fn (thy, (name, pos)) => - (f ((name, Thy_Header.the_keyword thy name), pos) - handle ERROR msg => error (msg ^ Position.here pos))); - -val _ = Theory.setup - (ML_Antiquotation.value @{binding keyword} - (with_keyword - (fn ((name, NONE), _) => "Parse.$$$ " ^ ML_Syntax.print_string name - | ((name, SOME _), pos) => - error ("Expected minor keyword " ^ quote name ^ Position.here pos))) #> - ML_Antiquotation.value @{binding command_spec} - (with_keyword - (fn ((name, SOME kind), pos) => - "Keyword.command_spec " ^ ML_Syntax.atomic - ((ML_Syntax.print_pair - (ML_Syntax.print_pair ML_Syntax.print_string - (ML_Syntax.print_pair - (ML_Syntax.print_pair ML_Syntax.print_string - (ML_Syntax.print_list ML_Syntax.print_string)) - (ML_Syntax.print_list ML_Syntax.print_string))) - ML_Syntax.print_position) ((name, kind), pos)) - | ((name, NONE), pos) => - error ("Expected command keyword " ^ quote name ^ Position.here pos)))); - -end; - diff --git a/core/Pure/ML/ml_compiler.ML b/core/Pure/ML/ml_compiler.ML deleted file mode 100644 index d2d940cb..00000000 --- a/core/Pure/ML/ml_compiler.ML +++ /dev/null @@ -1,33 +0,0 @@ -(* Title: Pure/ML/ml_compiler.ML - Author: Makarius - -Runtime compilation and evaluation -- generic version. -*) - -signature ML_COMPILER = -sig - type flags = {SML: bool, exchange: bool, redirect: bool, verbose: bool} - val flags: flags - val verbose: bool -> flags -> flags - val eval: flags -> Position.T -> ML_Lex.token list -> unit -end - -structure ML_Compiler: ML_COMPILER = -struct - -type flags = {SML: bool, exchange: bool, redirect: bool, verbose: bool}; -val flags = {SML = false, exchange = false, redirect = false, verbose = false}; - -fun verbose b (flags: flags) = - {SML = #SML flags, exchange = #exchange flags, redirect = #redirect flags, verbose = b}; - -fun eval (flags: flags) pos toks = - let - val _ = if #SML flags then error ("Standard ML is unsupported on " ^ ML_System.name) else (); - val line = the_default 1 (Position.line_of pos); - val file = the_default "ML" (Position.file_of pos); - val text = ML_Lex.flatten toks; - in Secure.use_text ML_Env.local_context (line, file) (#verbose flags) text end; - -end; - diff --git a/core/Pure/ML/ml_compiler_polyml.ML b/core/Pure/ML/ml_compiler_polyml.ML deleted file mode 100644 index 4693b8ad..00000000 --- a/core/Pure/ML/ml_compiler_polyml.ML +++ /dev/null @@ -1,202 +0,0 @@ -(* Title: Pure/ML/ml_compiler_polyml.ML - Author: Makarius - -Runtime compilation and evaluation -- Poly/ML version. -*) - -structure ML_Compiler: ML_COMPILER = -struct - -open ML_Compiler; - - -(* parse trees *) - -fun report_parse_tree redirect depth space parse_tree = - let - val is_visible = - (case Context.thread_data () of - SOME context => Context_Position.is_visible_generic context - | NONE => true); - fun is_reported pos = is_visible andalso Position.is_reported pos; - - fun reported_types loc types = - let val pos = Exn_Properties.position_of loc in - is_reported pos ? - let - val xml = - PolyML.NameSpace.displayTypeExpression (types, depth, space) - |> pretty_ml |> Pretty.from_ML |> Pretty.string_of - |> Output.output |> YXML.parse_body; - in cons (pos, fn () => Markup.ML_typing, fn () => YXML.string_of_body xml) end - end; - - fun reported_entity kind loc decl = - let val pos = Exn_Properties.position_of loc in - is_reported pos ? - let - val def_pos = Exn_Properties.position_of decl; - fun markup () = - (Markup.entityN, (Markup.kindN, kind) :: Position.def_properties_of def_pos); - in cons (pos, markup, fn () => "") end - end; - - fun reported loc (PolyML.PTtype types) = reported_types loc types - | reported loc (PolyML.PTdeclaredAt decl) = reported_entity Markup.ML_defN loc decl - | reported loc (PolyML.PTopenedAt decl) = reported_entity Markup.ML_openN loc decl - | reported loc (PolyML.PTstructureAt decl) = reported_entity Markup.ML_structureN loc decl - | reported _ (PolyML.PTnextSibling tree) = reported_tree (tree ()) - | reported _ (PolyML.PTfirstChild tree) = reported_tree (tree ()) - | reported _ _ = I - and reported_tree (loc, props) = fold (reported loc) props; - - val persistent_reports = reported_tree parse_tree []; - - fun output () = - persistent_reports - |> map (fn (pos, markup, text) => Position.reported_text pos (markup ()) (text ())) - |> Output.report; - in - if not (null persistent_reports) andalso redirect andalso Multithreading.enabled () - then - Execution.print - {name = "ML_Compiler.report", pos = Position.thread_data (), pri = 1} output - else output () - end; - - -(* eval ML source tokens *) - -fun eval (flags: flags) pos toks = - let - val _ = Secure.secure_mltext (); - val space = ML_Env.name_space {SML = #SML flags, exchange = #exchange flags} - val opt_context = Context.thread_data (); - - - (* input *) - - val location_props = op ^ (YXML.output_markup (":", #props (Position.dest pos))); - - val input_buffer = - Unsynchronized.ref (toks |> map - (`(maps (String.explode o Symbol.esc) o Symbol.explode o ML_Lex.check_content_of))); - - fun get () = - (case ! input_buffer of - (c :: cs, tok) :: rest => (input_buffer := (cs, tok) :: rest; SOME c) - | ([], _) :: rest => (input_buffer := rest; SOME #" ") - | [] => NONE); - - fun get_pos () = - (case ! input_buffer of - (_ :: _, tok) :: _ => ML_Lex.pos_of tok - | ([], tok) :: _ => ML_Lex.end_pos_of tok - | [] => Position.none); - - - (* output channels *) - - val writeln_buffer = Unsynchronized.ref Buffer.empty; - fun write s = Unsynchronized.change writeln_buffer (Buffer.add s); - fun output_writeln () = writeln (trim_line (Buffer.content (! writeln_buffer))); - - val warnings = Unsynchronized.ref ([]: string list); - fun warn msg = Unsynchronized.change warnings (cons msg); - fun output_warnings () = List.app warning (rev (! warnings)); - - val error_buffer = Unsynchronized.ref Buffer.empty; - fun err msg = Unsynchronized.change error_buffer (Buffer.add msg #> Buffer.add "\n"); - fun flush_error () = writeln (Buffer.content (! error_buffer)); - fun raise_error msg = error (Buffer.content (Buffer.add msg (! error_buffer))); - - fun message {message = msg, hard, location = loc, context = _} = - let - val pos = Exn_Properties.position_of loc; - val txt = - (if hard then "ML error" else "ML warning") ^ Position.here pos ^ ":\n" ^ - Pretty.string_of (Pretty.from_ML (pretty_ml msg)); - in if hard then err txt else warn txt end; - - - (* results *) - - val depth = ML_Options.get_print_depth (); - - fun apply_result {fixes, types, signatures, structures, functors, values} = - let - fun display disp x = - if depth > 0 then - (disp x |> pretty_ml |> Pretty.from_ML |> Pretty.string_of |> write; write "\n") - else (); - - fun apply_fix (a, b) = - (#enterFix space (a, b); display PolyML.NameSpace.displayFix (a, b)); - fun apply_type (a, b) = - (#enterType space (a, b); display PolyML.NameSpace.displayType (b, depth, space)); - fun apply_sig (a, b) = - (#enterSig space (a, b); display PolyML.NameSpace.displaySig (b, depth, space)); - fun apply_struct (a, b) = - (#enterStruct space (a, b); display PolyML.NameSpace.displayStruct (b, depth, space)); - fun apply_funct (a, b) = - (#enterFunct space (a, b); display PolyML.NameSpace.displayFunct (b, depth, space)); - fun apply_val (a, b) = - (#enterVal space (a, b); display PolyML.NameSpace.displayVal (b, depth, space)); - in - List.app apply_fix fixes; - List.app apply_type types; - List.app apply_sig signatures; - List.app apply_struct structures; - List.app apply_funct functors; - List.app apply_val values - end; - - exception STATIC_ERRORS of unit; - - fun result_fun (phase1, phase2) () = - ((case phase1 of - NONE => () - | SOME parse_tree => report_parse_tree (#redirect flags) depth space parse_tree); - (case phase2 of - NONE => raise STATIC_ERRORS () - | SOME code => - apply_result - ((code - |> Runtime.debugging opt_context - |> Runtime.toplevel_error (err o Runtime.exn_message)) ()))); - - - (* compiler invocation *) - - val parameters = - [PolyML.Compiler.CPOutStream write, - PolyML.Compiler.CPNameSpace space, - PolyML.Compiler.CPErrorMessageProc message, - PolyML.Compiler.CPLineNo (the_default 0 o Position.line_of o get_pos), - PolyML.Compiler.CPLineOffset (the_default 0 o Position.offset_of o get_pos), - PolyML.Compiler.CPFileName location_props, - PolyML.Compiler.CPPrintDepth ML_Options.get_print_depth, - PolyML.Compiler.CPCompilerResultFun result_fun, - PolyML.Compiler.CPPrintInAlphabeticalOrder false]; - val _ = - (while not (List.null (! input_buffer)) do - PolyML.compiler (get, parameters) ()) - handle exn => - if Exn.is_interrupt exn then reraise exn - else - let - val exn_msg = - (case exn of - STATIC_ERRORS () => "" - | Runtime.TOPLEVEL_ERROR => "" - | _ => "Exception- " ^ Pretty.string_of (Exn_Output.pretty exn) ^ " raised"); - val _ = output_warnings (); - val _ = output_writeln (); - in raise_error exn_msg end; - in - if #verbose flags then (output_warnings (); flush_error (); output_writeln ()) - else () - end; - -end; - diff --git a/core/Pure/ML/ml_context.ML b/core/Pure/ML/ml_context.ML deleted file mode 100644 index 54ad493a..00000000 --- a/core/Pure/ML/ml_context.ML +++ /dev/null @@ -1,193 +0,0 @@ -(* Title: Pure/ML/ml_context.ML - Author: Makarius - -ML context and antiquotations. -*) - -signature ML_CONTEXT = -sig - val the_generic_context: unit -> Context.generic - val the_global_context: unit -> theory - val the_local_context: unit -> Proof.context - val thm: xstring -> thm - val thms: xstring -> thm list - val exec: (unit -> unit) -> Context.generic -> Context.generic - val check_antiquotation: Proof.context -> xstring * Position.T -> string - type decl = Proof.context -> string * string - val add_antiquotation: binding -> (Args.src -> Proof.context -> decl * Proof.context) -> - theory -> theory - val print_antiquotations: Proof.context -> unit - val eval_antiquotes: ML_Lex.token Antiquote.antiquote list * Position.T -> - Context.generic option -> (ML_Lex.token list * ML_Lex.token list) * Context.generic option - val eval: ML_Compiler.flags -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit - val eval_file: ML_Compiler.flags -> Path.T -> unit - val eval_source: ML_Compiler.flags -> Symbol_Pos.source -> unit - val eval_in: Proof.context option -> ML_Compiler.flags -> Position.T -> - ML_Lex.token Antiquote.antiquote list -> unit - val eval_source_in: Proof.context option -> ML_Compiler.flags -> Symbol_Pos.source -> unit - val expression: Position.T -> string -> string -> ML_Lex.token Antiquote.antiquote list -> - Context.generic -> Context.generic -end - -structure ML_Context: ML_CONTEXT = -struct - -(** implicit ML context **) - -val the_generic_context = Context.the_thread_data; -val the_global_context = Context.theory_of o the_generic_context; -val the_local_context = Context.proof_of o the_generic_context; - -fun thm name = Proof_Context.get_thm (the_local_context ()) name; -fun thms name = Proof_Context.get_thms (the_local_context ()) name; - -fun exec (e: unit -> unit) context = - (case Context.setmp_thread_data (SOME context) (fn () => (e (); Context.thread_data ())) () of - SOME context' => context' - | NONE => error "Missing context after execution"); - - - -(** ML antiquotations **) - -(* theory data *) - -type decl = Proof.context -> string * string; (*final context -> ML env, ML body*) -structure Antiquotations = Theory_Data -( - type T = (Args.src -> Proof.context -> decl * Proof.context) Name_Space.table; - val empty : T = Name_Space.empty_table Markup.ML_antiquotationN; - val extend = I; - fun merge data : T = Name_Space.merge_tables data; -); - -val get_antiquotations = Antiquotations.get o Proof_Context.theory_of; - -fun check_antiquotation ctxt = - #1 o Name_Space.check (Context.Proof ctxt) (get_antiquotations ctxt); - -fun add_antiquotation name f thy = thy - |> Antiquotations.map (Name_Space.define (Context.Theory thy) true (name, f) #> snd); - -fun print_antiquotations ctxt = - Pretty.big_list "ML antiquotations:" - (map (Pretty.mark_str o #1) (Name_Space.markup_table ctxt (get_antiquotations ctxt))) - |> Pretty.writeln; - -fun apply_antiquotation src ctxt = - let val (src', f) = Args.check_src ctxt (get_antiquotations ctxt) src - in f src' ctxt end; - - -(* parsing and evaluation *) - -local - -val antiq = - Parse.!!! (Parse.position Parse.xname -- Parse.args --| Scan.ahead Parse.eof) - >> uncurry Args.src; - -val begin_env0 = ML_Lex.tokenize "structure Isabelle =\nstruct\n"; - -fun begin_env visible = - ML_Lex.tokenize - ("structure Isabelle =\nstruct\n\ - \val ML_context = Context_Position.set_visible " ^ Bool.toString visible ^ - " (ML_Context.the_local_context ());\n\ - \val ML_print_depth =\n\ - \ let val default = ML_Options.get_print_depth ()\n\ - \ in fn () => ML_Options.get_print_depth_default default end;\n"); - -val end_env = ML_Lex.tokenize "end;"; -val reset_env = ML_Lex.tokenize "structure Isabelle = struct end"; - -in - -fun eval_antiquotes (ants, pos) opt_context = - let - val visible = - (case opt_context of - SOME (Context.Proof ctxt) => Context_Position.is_visible ctxt - | _ => true); - val opt_ctxt = Option.map (Context.Proof o Context.proof_of) opt_context; - - val ((ml_env, ml_body), opt_ctxt') = - if forall Antiquote.is_text ants - then ((begin_env0, map (fn Antiquote.Text tok => tok) ants), opt_ctxt) - else - let - val lex = #1 (Keyword.get_lexicons ()); - fun no_decl _ = ([], []); - - fun expand (Antiquote.Text tok) ctxt = (K ([], [tok]), ctxt) - | expand (Antiquote.Antiq (ss, {range, ...})) ctxt = - let - val (decl, ctxt') = - apply_antiquotation (Token.read_antiq lex antiq (ss, #1 range)) ctxt; - val decl' = decl #> pairself (ML_Lex.tokenize #> map (ML_Lex.set_range range)); - in (decl', ctxt') end; - - val ctxt = - (case opt_ctxt of - NONE => error ("No context -- cannot expand ML antiquotations" ^ Position.here pos) - | SOME ctxt => Context.proof_of ctxt); - - val (decls, ctxt') = fold_map expand ants ctxt; - val (ml_env, ml_body) = - decls |> map (fn decl => decl ctxt') |> split_list |> pairself flat; - in ((begin_env visible @ ml_env, ml_body), SOME (Context.Proof ctxt')) end; - in ((ml_env @ end_env, ml_body), opt_ctxt') end; - -fun eval flags pos ants = - let - val non_verbose = ML_Compiler.verbose false flags; - - (*prepare source text*) - val ((env, body), env_ctxt) = eval_antiquotes (ants, pos) (Context.thread_data ()); - val _ = - (case Option.map Context.proof_of env_ctxt of - SOME ctxt => - if Config.get ctxt ML_Options.source_trace andalso Context_Position.is_visible ctxt - then tracing (cat_lines [ML_Lex.flatten env, ML_Lex.flatten body]) - else () - | NONE => ()); - - (*prepare static ML environment*) - val _ = - Context.setmp_thread_data - (Option.map (Context.mapping I (Context_Position.set_visible false)) env_ctxt) - (fn () => (ML_Compiler.eval non_verbose Position.none env; Context.thread_data ())) () - |> (fn NONE => () | SOME context' => Context.>> (ML_Env.inherit context')); - - val _ = ML_Compiler.eval flags pos body; - val _ = ML_Compiler.eval non_verbose Position.none reset_env; - in () end; - -end; - - -(* derived versions *) - -fun eval_file flags path = - let val pos = Path.position path - in eval flags pos (ML_Lex.read pos (File.read path)) end; - -fun eval_source flags source = - eval flags (#pos source) (ML_Lex.read_source (#SML flags) source); - -fun eval_in ctxt flags pos ants = - Context.setmp_thread_data (Option.map Context.Proof ctxt) - (fn () => eval flags pos ants) (); - -fun eval_source_in ctxt flags source = - Context.setmp_thread_data (Option.map Context.Proof ctxt) - (fn () => eval_source flags source) (); - -fun expression pos bind body ants = - exec (fn () => - eval ML_Compiler.flags pos - (ML_Lex.read Position.none ("Context.set_thread_data (SOME (let " ^ bind ^ " = ") @ ants @ - ML_Lex.read Position.none (" in " ^ body ^ " end (ML_Context.the_generic_context ())));"))); - -end; - diff --git a/core/Pure/ML/ml_env.ML b/core/Pure/ML/ml_env.ML deleted file mode 100644 index a15bb409..00000000 --- a/core/Pure/ML/ml_env.ML +++ /dev/null @@ -1,139 +0,0 @@ -(* Title: Pure/ML/ml_env.ML - Author: Makarius - -Toplevel environment for Standard ML and Isabelle/ML within the -implicit context. -*) - -signature ML_ENV = -sig - val inherit: Context.generic -> Context.generic -> Context.generic - val name_space: {SML: bool, exchange: bool} -> ML_Name_Space.T - val local_context: use_context - val local_name_space: ML_Name_Space.T - val check_functor: string -> unit -end - -structure ML_Env: ML_ENV = -struct - -(* context data *) - -type tables = - ML_Name_Space.valueVal Symtab.table * - ML_Name_Space.typeVal Symtab.table * - ML_Name_Space.fixityVal Symtab.table * - ML_Name_Space.structureVal Symtab.table * - ML_Name_Space.signatureVal Symtab.table * - ML_Name_Space.functorVal Symtab.table; - -fun merge_tables - ((val1, type1, fixity1, structure1, signature1, functor1), - (val2, type2, fixity2, structure2, signature2, functor2)) : tables = - (Symtab.merge (K true) (val1, val2), - Symtab.merge (K true) (type1, type2), - Symtab.merge (K true) (fixity1, fixity2), - Symtab.merge (K true) (structure1, structure2), - Symtab.merge (K true) (signature1, signature2), - Symtab.merge (K true) (functor1, functor2)); - -type data = {bootstrap: bool, tables: tables, sml_tables: tables}; - -fun make_data (bootstrap, tables, sml_tables) : data = - {bootstrap = bootstrap, tables = tables, sml_tables = sml_tables}; - -structure Env = Generic_Data -( - type T = data - val empty = - make_data (true, - (Symtab.empty, Symtab.empty, Symtab.empty, Symtab.empty, Symtab.empty, Symtab.empty), - (Symtab.make ML_Name_Space.initial_val, - Symtab.make ML_Name_Space.initial_type, - Symtab.make ML_Name_Space.initial_fixity, - Symtab.make ML_Name_Space.initial_structure, - Symtab.make ML_Name_Space.initial_signature, - Symtab.make ML_Name_Space.initial_functor)); - fun extend (data : T) = make_data (false, #tables data, #sml_tables data); - fun merge (data : T * T) = - make_data (false, - merge_tables (pairself #tables data), - merge_tables (pairself #sml_tables data)); -); - -val inherit = Env.put o Env.get; - - -(* name space *) - -fun name_space {SML, exchange} : ML_Name_Space.T = - let - fun lookup sel1 sel2 name = - if SML then - Context.the_thread_data () - |> (fn context => Symtab.lookup (sel1 (#sml_tables (Env.get context))) name) - else - Context.thread_data () - |> (fn NONE => NONE | SOME context => Symtab.lookup (sel1 (#tables (Env.get context))) name) - |> (fn NONE => sel2 ML_Name_Space.global name | some => some); - - fun all sel1 sel2 () = - (if SML then - Context.the_thread_data () - |> (fn context => Symtab.dest (sel1 (#sml_tables (Env.get context)))) - else - Context.thread_data () - |> (fn NONE => [] | SOME context => Symtab.dest (sel1 (#tables (Env.get context)))) - |> append (sel2 ML_Name_Space.global ())) - |> sort_distinct (string_ord o pairself #1); - - fun enter ap1 sel2 entry = - if SML <> exchange then - Context.>> (Env.map (fn {bootstrap, tables, sml_tables} => - let val sml_tables' = ap1 (Symtab.update entry) sml_tables - in make_data (bootstrap, tables, sml_tables') end)) - else if is_some (Context.thread_data ()) then - Context.>> (Env.map (fn {bootstrap, tables, sml_tables} => - let - val _ = if bootstrap then sel2 ML_Name_Space.global entry else (); - val tables' = ap1 (Symtab.update entry) tables; - in make_data (bootstrap, tables', sml_tables) end)) - else sel2 ML_Name_Space.global entry; - in - {lookupVal = lookup #1 #lookupVal, - lookupType = lookup #2 #lookupType, - lookupFix = lookup #3 #lookupFix, - lookupStruct = lookup #4 #lookupStruct, - lookupSig = lookup #5 #lookupSig, - lookupFunct = lookup #6 #lookupFunct, - enterVal = enter (fn h => fn (a, b, c, d, e, f) => (h a, b, c, d, e, f)) #enterVal, - enterType = enter (fn h => fn (a, b, c, d, e, f) => (a, h b, c, d, e, f)) #enterType, - enterFix = enter (fn h => fn (a, b, c, d, e, f) => (a, b, h c, d, e, f)) #enterFix, - enterStruct = enter (fn h => fn (a, b, c, d, e, f) => (a, b, c, h d, e, f)) #enterStruct, - enterSig = enter (fn h => fn (a, b, c, d, e, f) => (a, b, c, d, h e, f)) #enterSig, - enterFunct = enter (fn h => fn (a, b, c, d, e, f) => (a, b, c, d, e, h f)) #enterFunct, - allVal = all #1 #allVal, - allType = all #2 #allType, - allFix = all #3 #allFix, - allStruct = all #4 #allStruct, - allSig = all #5 #allSig, - allFunct = all #6 #allFunct} - end; - -val local_context: use_context = - {tune_source = ML_Parse.fix_ints, - name_space = name_space {SML = false, exchange = false}, - str_of_pos = Position.here oo Position.line_file, - print = writeln, - error = error}; - -val local_name_space = #name_space local_context; - -val is_functor = is_some o #lookupFunct local_name_space; - -fun check_functor name = - if not (is_functor "Table") (*mask dummy version of name_space*) orelse is_functor name then () - else error ("Unknown ML functor: " ^ quote name); - -end; - diff --git a/core/Pure/ML/ml_lex.ML b/core/Pure/ML/ml_lex.ML deleted file mode 100644 index 1fd75166..00000000 --- a/core/Pure/ML/ml_lex.ML +++ /dev/null @@ -1,338 +0,0 @@ -(* Title: Pure/ML/ml_lex.ML - Author: Makarius - -Lexical syntax for SML. -*) - -signature ML_LEX = -sig - val keywords: string list - datatype token_kind = - Keyword | Ident | LongIdent | TypeVar | Word | Int | Real | Char | String | - Space | Comment | Error of string | EOF - eqtype token - val stopper: token Scan.stopper - val is_regular: token -> bool - val is_improper: token -> bool - val set_range: Position.range -> token -> token - val pos_of: token -> Position.T - val end_pos_of: token -> Position.T - val kind_of: token -> token_kind - val content_of: token -> string - val check_content_of: token -> string - val flatten: token list -> string - val source: (Symbol.symbol, 'a) Source.source -> - (token, (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) - Source.source) Source.source - val tokenize: string -> token list - val read: Position.T -> Symbol_Pos.text -> token Antiquote.antiquote list - val read_source: bool -> Symbol_Pos.source -> token Antiquote.antiquote list -end; - -structure ML_Lex: ML_LEX = -struct - -(** keywords **) - -val keywords = - ["#", "(", ")", ",", "->", "...", ":", ":>", ";", "=", "=>", "[", - "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as", "case", - "datatype", "do", "else", "end", "eqtype", "exception", "fn", "fun", - "functor", "handle", "if", "in", "include", "infix", "infixr", - "let", "local", "nonfix", "of", "op", "open", "orelse", "raise", - "rec", "sharing", "sig", "signature", "struct", "structure", "then", - "type", "val", "where", "while", "with", "withtype"]; - -val keywords2 = - ["case", "do", "else", "end", "if", "in", "let", "local", "of", - "sig", "struct", "then", "while", "with"]; - -val keywords3 = - ["handle", "open", "raise"]; - -val lexicon = Scan.make_lexicon (map raw_explode keywords); - - - -(** tokens **) - -(* datatype token *) - -datatype token_kind = - Keyword | Ident | LongIdent | TypeVar | Word | Int | Real | Char | String | - Space | Comment | Error of string | EOF; - -datatype token = Token of Position.range * (token_kind * string); - - -(* position *) - -fun set_range range (Token (_, x)) = Token (range, x); - -fun pos_of (Token ((pos, _), _)) = pos; -fun end_pos_of (Token ((_, pos), _)) = pos; - - -(* control tokens *) - -fun mk_eof pos = Token ((pos, Position.none), (EOF, "")); -val eof = mk_eof Position.none; - -fun is_eof (Token (_, (EOF, _))) = true - | is_eof _ = false; - -val stopper = - Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof; - - -(* token content *) - -fun kind_of (Token (_, (k, _))) = k; - -fun content_of (Token (_, (_, x))) = x; -fun token_leq (tok, tok') = content_of tok <= content_of tok'; - -fun is_keyword (Token (_, (Keyword, _))) = true - | is_keyword _ = false; - -fun is_delimiter (Token (_, (Keyword, x))) = not (Symbol.is_ascii_identifier x) - | is_delimiter _ = false; - -fun is_regular (Token (_, (Error _, _))) = false - | is_regular (Token (_, (EOF, _))) = false - | is_regular _ = true; - -fun is_improper (Token (_, (Space, _))) = true - | is_improper (Token (_, (Comment, _))) = true - | is_improper _ = false; - -fun warn tok = - (case tok of - Token (_, (Keyword, ":>")) => - warning ("Opaque signature matching (:>) fails to work with ML pretty printing --\n\ - \prefer non-opaque matching (:) possibly with abstype" ^ - Position.here (pos_of tok)) - | _ => ()); - -fun check_content_of tok = - (case kind_of tok of - Error msg => error msg - | _ => content_of tok); - -fun flatten_content (tok :: (toks as tok' :: _)) = - Symbol.escape (check_content_of tok) :: - (if is_improper tok orelse is_improper tok' then flatten_content toks - else Symbol.space :: flatten_content toks) - | flatten_content toks = map (Symbol.escape o check_content_of) toks; - -val flatten = implode o flatten_content; - - -(* markup *) - -local - -fun token_kind_markup SML = - fn Keyword => (Markup.empty, "") - | Ident => (Markup.empty, "") - | LongIdent => (Markup.empty, "") - | TypeVar => (Markup.ML_tvar, "") - | Word => (Markup.ML_numeral, "") - | Int => (Markup.ML_numeral, "") - | Real => (Markup.ML_numeral, "") - | Char => (Markup.ML_char, "") - | String => (if SML then Markup.SML_string else Markup.ML_string, "") - | Space => (Markup.empty, "") - | Comment => (if SML then Markup.SML_comment else Markup.ML_comment, "") - | Error msg => (Markup.bad, msg) - | EOF => (Markup.empty, ""); - -in - -fun report_of_token SML (tok as Token ((pos, _), (kind, x))) = - let - val (markup, txt) = - if not (is_keyword tok) then token_kind_markup SML kind - else if is_delimiter tok then (Markup.ML_delimiter, "") - else if member (op =) keywords2 x then (Markup.ML_keyword2, "") - else if member (op =) keywords3 x then (Markup.ML_keyword3, "") - else (Markup.ML_keyword1, ""); - in ((pos, markup), txt) end; - -end; - - - -(** scanners **) - -open Basic_Symbol_Pos; - -val err_prefix = "SML lexical error: "; - -fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); - - -(* identifiers *) - -local - -val scan_letdigs = - Scan.many (Symbol.is_ascii_letdig o Symbol_Pos.symbol); - -val scan_alphanumeric = - Scan.one (Symbol.is_ascii_letter o Symbol_Pos.symbol) -- scan_letdigs >> op ::; - -val scan_symbolic = - Scan.many1 (member (op =) (raw_explode "!#$%&*+-/:<=>?@\\^`|~") o Symbol_Pos.symbol); - -in - -val scan_ident = scan_alphanumeric || scan_symbolic; - -val scan_long_ident = - (Scan.repeat1 (scan_alphanumeric @@@ $$$ ".") >> flat) @@@ (scan_ident || $$$ "="); - -val scan_type_var = $$$ "'" @@@ scan_letdigs; - -end; - - -(* numerals *) - -local - -val scan_dec = Scan.many1 (Symbol.is_ascii_digit o Symbol_Pos.symbol); -val scan_hex = Scan.many1 (Symbol.is_ascii_hex o Symbol_Pos.symbol); -val scan_sign = Scan.optional ($$$ "~") []; -val scan_decint = scan_sign @@@ scan_dec; -val scan_exp = ($$$ "E" || $$$ "e") @@@ scan_decint; - -in - -val scan_word = - $$$ "0" @@@ $$$ "w" @@@ $$$ "x" @@@ scan_hex || - $$$ "0" @@@ $$$ "w" @@@ scan_dec; - -val scan_int = scan_sign @@@ ($$$ "0" @@@ $$$ "x" @@@ scan_hex || scan_dec); - -val scan_real = - scan_decint @@@ $$$ "." @@@ scan_dec @@@ Scan.optional scan_exp [] || - scan_decint @@@ scan_exp; - -end; - - -(* chars and strings *) - -val scan_blanks1 = Scan.many1 (Symbol.is_ascii_blank o Symbol_Pos.symbol); - -local - -val scan_escape = - Scan.one (member (op =) (raw_explode "\"\\abtnvfr") o Symbol_Pos.symbol) >> single || - $$$ "^" @@@ (Scan.one (fn (s, _) => ord "@" <= ord s andalso ord s <= ord "_") >> single) || - Scan.one (Symbol.is_ascii_digit o Symbol_Pos.symbol) -- - Scan.one (Symbol.is_ascii_digit o Symbol_Pos.symbol) -- - Scan.one (Symbol.is_ascii_digit o Symbol_Pos.symbol) >> (fn ((a, b), c) => [a, b, c]); - -val scan_str = - Scan.one (fn (s, _) => Symbol.is_regular s andalso s <> "\"" andalso s <> "\\" andalso - (not (Symbol.is_char s) orelse Symbol.is_printable s)) >> single || - $$$ "\\" @@@ !!! "bad escape character in string" scan_escape; - -val scan_gap = $$$ "\\" @@@ scan_blanks1 @@@ $$$ "\\"; -val scan_gaps = Scan.repeat scan_gap >> flat; - -in - -val scan_char = - $$$ "#" @@@ $$$ "\"" @@@ scan_gaps @@@ scan_str @@@ scan_gaps @@@ $$$ "\""; - -val recover_char = - $$$ "#" @@@ $$$ "\"" @@@ scan_gaps @@@ Scan.optional (Scan.permissive scan_str @@@ scan_gaps) []; - -val scan_string = - Scan.ahead ($$ "\"") |-- - !!! "unclosed string literal" - ($$$ "\"" @@@ (Scan.repeat (scan_gap || scan_str) >> flat) @@@ $$$ "\""); - -val recover_string = - $$$ "\"" @@@ (Scan.repeat (scan_gap || Scan.permissive scan_str) >> flat); - -end; - - -(* scan tokens *) - -local - -fun token k ss = Token (Symbol_Pos.range ss, (k, Symbol_Pos.content ss)); - -val scan_ml = - (scan_char >> token Char || - scan_string >> token String || - scan_blanks1 >> token Space || - Symbol_Pos.scan_comment err_prefix >> token Comment || - Scan.max token_leq - (Scan.literal lexicon >> token Keyword) - (scan_word >> token Word || - scan_real >> token Real || - scan_int >> token Int || - scan_long_ident >> token LongIdent || - scan_ident >> token Ident || - scan_type_var >> token TypeVar)); - -val scan_antiq = Antiquote.scan_antiq >> Antiquote.Antiq || scan_ml >> Antiquote.Text; - -fun recover msg = - (recover_char || - recover_string || - Symbol_Pos.recover_comment || - Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single) - >> (single o token (Error msg)); - -fun gen_read SML pos text = - let - val syms = Symbol_Pos.explode (text, pos); - val termination = - if null syms then [] - else - let - val pos1 = List.last syms |-> Position.advance; - val pos2 = Position.advance Symbol.space pos1; - in [Antiquote.Text (Token (Position.range pos1 pos2, (Space, Symbol.space)))] end; - - val scan = if SML then scan_ml >> Antiquote.Text else scan_antiq; - fun check (Antiquote.Text tok) = (check_content_of tok; if SML then () else warn tok) - | check _ = (); - val input = - Source.of_list syms - |> Source.source Symbol_Pos.stopper (Scan.bulk (!!! "bad input" scan)) - (SOME (false, fn msg => recover msg >> map Antiquote.Text)) - |> Source.exhaust - |> tap (Position.reports_text o Antiquote.antiquote_reports (single o report_of_token SML)) - |> tap (List.app check); - in input @ termination end; - -in - -fun source src = - Symbol_Pos.source (Position.line 1) src - |> Source.source Symbol_Pos.stopper (Scan.bulk (!!! "bad input" scan_ml)) (SOME (false, recover)); - -val tokenize = Source.of_string #> Symbol.source #> source #> Source.exhaust; - -val read = gen_read false; - -fun read_source SML {delimited, text, pos} = - let - val language = if SML then Markup.language_SML else Markup.language_ML; - val _ = - if Position.is_reported_range pos then Position.report pos (language delimited) - else (); - in gen_read SML pos text end; - -end; - -end; - diff --git a/core/Pure/ML/ml_lex.scala b/core/Pure/ML/ml_lex.scala deleted file mode 100644 index 06464419..00000000 --- a/core/Pure/ML/ml_lex.scala +++ /dev/null @@ -1,281 +0,0 @@ -/* Title: Pure/ML/ml_lex.scala - Author: Makarius - -Lexical syntax for SML. -*/ - -package isabelle - - -import scala.collection.mutable -import scala.util.parsing.input.{Reader, CharSequenceReader} - - -object ML_Lex -{ - /** keywords **/ - - val keywords: Set[String] = - Set("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=", "=>", - "[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as", - "case", "datatype", "do", "else", "end", "eqtype", "exception", - "fn", "fun", "functor", "handle", "if", "in", "include", - "infix", "infixr", "let", "local", "nonfix", "of", "op", "open", - "orelse", "raise", "rec", "sharing", "sig", "signature", - "struct", "structure", "then", "type", "val", "where", "while", - "with", "withtype") - - val keywords2: Set[String] = - Set("case", "do", "else", "end", "if", "in", "let", "local", "of", - "sig", "struct", "then", "while", "with") - - val keywords3: Set[String] = - Set("handle", "open", "raise") - - private val lexicon: Scan.Lexicon = Scan.Lexicon(keywords.toList: _*) - - - - /** tokens **/ - - object Kind extends Enumeration - { - val KEYWORD = Value("keyword") - val IDENT = Value("identifier") - val LONG_IDENT = Value("long identifier") - val TYPE_VAR = Value("type variable") - val WORD = Value("word") - val INT = Value("integer") - val REAL = Value("real") - val CHAR = Value("character") - val STRING = Value("quoted string") - val SPACE = Value("white space") - val COMMENT = Value("comment text") - val ANTIQ = Value("antiquotation") - val ANTIQ_START = Value("antiquotation: start") - val ANTIQ_STOP = Value("antiquotation: stop") - val ANTIQ_OTHER = Value("antiquotation: other") - val ANTIQ_STRING = Value("antiquotation: quoted string") - val ANTIQ_ALT_STRING = Value("antiquotation: back-quoted string") - val ANTIQ_CARTOUCHE = Value("antiquotation: text cartouche") - val ERROR = Value("bad input") - } - - sealed case class Token(val kind: Kind.Value, val source: String) - { - def is_keyword: Boolean = kind == Kind.KEYWORD - def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source) - } - - - - /** parsers **/ - - case object ML_String extends Scan.Line_Context - case class Antiq(ctxt: Scan.Line_Context) extends Scan.Line_Context - - private object Parsers extends Scan.Parsers with Antiquote.Parsers - { - /* string material */ - - private val blanks = many(character(Symbol.is_ascii_blank)) - private val blanks1 = many1(character(Symbol.is_ascii_blank)) - - private val gap = "\\" ~ blanks1 ~ "\\" ^^ { case x ~ y ~ z => x + y + z } - private val gap_start = "\\" ~ blanks ~ """\z""".r ^^ { case x ~ y ~ _ => x + y } - - private val escape = - one(character("\"\\abtnvfr".contains(_))) | - "^" ~ one(character(c => '@' <= c && c <= '_')) ^^ { case x ~ y => x + y } | - repeated(character(Symbol.is_ascii_digit), 3, 3) - - private val str = - one(Symbol.is_symbolic) | - one(character(c => c != '"' && c != '\\' && ' ' <= c && c <= '~')) | - "\\" ~ escape ^^ { case x ~ y => x + y } - - - /* ML char -- without gaps */ - - private val ml_char: Parser[Token] = - "#\"" ~ str ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.CHAR, x + y + z) } - - private val recover_ml_char: Parser[String] = - "#\"" ~ opt(str) ^^ { case x ~ Some(y) => x + y case x ~ None => x } - - - /* ML string */ - - private val ml_string_body: Parser[String] = - rep(gap | str) ^^ (_.mkString) - - private val recover_ml_string: Parser[String] = - "\"" ~ ml_string_body ^^ { case x ~ y => x + y } - - private val ml_string: Parser[Token] = - "\"" ~ ml_string_body ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.STRING, x + y + z) } - - private def ml_string_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = - { - def result(x: String, c: Scan.Line_Context) = (Token(Kind.STRING, x), c) - - ctxt match { - case Scan.Finished => - "\"" ~ ml_string_body ~ ("\"" | gap_start) ^^ - { case x ~ y ~ z => result(x + y + z, if (z == "\"") Scan.Finished else ML_String) } - case ML_String => - blanks ~ opt_term("\\" ~ ml_string_body ~ ("\"" | gap_start)) ^^ - { case x ~ Some(y ~ z ~ w) => - result(x + y + z + w, if (w == "\"") Scan.Finished else ML_String) - case x ~ None => result(x, ML_String) } - case _ => failure("") - } - } - - - /* ML comment */ - - private val ml_comment: Parser[Token] = - comment ^^ (x => Token(Kind.COMMENT, x)) - - private def ml_comment_line(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = - comment_line(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) } - - - /* delimited token */ - - private def delimited_token: Parser[Token] = - ml_char | (ml_string | ml_comment) - - private val recover_delimited: Parser[Token] = - (recover_ml_char | (recover_ml_string | recover_comment)) ^^ (x => Token(Kind.ERROR, x)) - - - private def other_token: Parser[Token] = - { - /* identifiers */ - - val letdigs = many(character(Symbol.is_ascii_letdig)) - - val alphanumeric = - one(character(Symbol.is_ascii_letter)) ~ letdigs ^^ { case x ~ y => x + y } - - val symbolic = many1(character("!#$%&*+-/:<=>?@\\^`|~".contains(_))) - - val ident = (alphanumeric | symbolic) ^^ (x => Token(Kind.IDENT, x)) - - val long_ident = - rep1(alphanumeric ~ "." ^^ { case x ~ y => x + y }) ~ - (alphanumeric | (symbolic | "=")) ^^ - { case x ~ y => Token(Kind.LONG_IDENT, x.mkString + y) } - - val type_var = "'" ~ letdigs ^^ { case x ~ y => Token(Kind.TYPE_VAR, x + y) } - - - /* numerals */ - - val dec = many1(character(Symbol.is_ascii_digit)) - val hex = many1(character(Symbol.is_ascii_hex)) - val sign = opt("~") ^^ { case Some(x) => x case None => "" } - val decint = sign ~ dec ^^ { case x ~ y => x + y } - val exp = ("E" | "e") ~ decint ^^ { case x ~ y => x + y } - - val word = - ("0wx" ~ hex ^^ { case x ~ y => x + y } | "0w" ~ dec ^^ { case x ~ y => x + y }) ^^ - (x => Token(Kind.WORD, x)) - - val int = - sign ~ ("0x" ~ hex ^^ { case x ~ y => x + y } | dec) ^^ - { case x ~ y => Token(Kind.INT, x + y) } - - val real = - (decint ~ "." ~ dec ~ (opt(exp) ^^ { case Some(x) => x case None => "" }) ^^ - { case x ~ y ~ z ~ w => x + y + z + w } | - decint ~ exp ^^ { case x ~ y => x + y }) ^^ (x => Token(Kind.REAL, x)) - - - /* main */ - - val space = blanks1 ^^ (x => Token(Kind.SPACE, x)) - - val keyword = literal(lexicon) ^^ (x => Token(Kind.KEYWORD, x)) - - val ml_antiq = antiq ^^ (x => Token(Kind.ANTIQ, x)) - - val bad = one(_ => true) ^^ (x => Token(Kind.ERROR, x)) - - space | (recover_delimited | (ml_antiq | - (((word | (real | (int | (long_ident | (ident | type_var))))) ||| keyword) | bad))) - } - - - /* antiquotations (line-oriented) */ - - def ml_antiq_start(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = - ctxt match { - case Scan.Finished => "@{" ^^ (x => (Token(Kind.ANTIQ_START, x), Antiq(Scan.Finished))) - case _ => failure("") - } - - def ml_antiq_stop(ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = - ctxt match { - case Antiq(Scan.Finished) => "}" ^^ (x => (Token(Kind.ANTIQ_STOP, x), Scan.Finished)) - case _ => failure("") - } - - def ml_antiq_body(context: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = - context match { - case Antiq(ctxt) => - (if (ctxt == Scan.Finished) antiq_other ^^ (x => (Token(Kind.ANTIQ_OTHER, x), context)) - else failure("")) | - quoted_line("\"", ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_STRING, x), Antiq(c)) } | - quoted_line("`", ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_ALT_STRING, x), Antiq(c)) } | - cartouche_line(ctxt) ^^ { case (x, c) => (Token(Kind.ANTIQ_CARTOUCHE, x), Antiq(c)) } - case _ => failure("") - } - - - /* token */ - - def token: Parser[Token] = delimited_token | other_token - - def token_line(SML: Boolean, ctxt: Scan.Line_Context): Parser[(Token, Scan.Line_Context)] = - { - val other = (ml_char | other_token) ^^ (x => (x, Scan.Finished)) - - if (SML) ml_string_line(ctxt) | (ml_comment_line(ctxt) | other) - else - ml_string_line(ctxt) | - (ml_comment_line(ctxt) | - (ml_antiq_start(ctxt) | (ml_antiq_stop(ctxt) | (ml_antiq_body(ctxt) | other)))) - } - } - - - /* tokenize */ - - def tokenize(input: CharSequence): List[Token] = - { - Parsers.parseAll(Parsers.rep(Parsers.token), new CharSequenceReader(input)) match { - case Parsers.Success(tokens, _) => tokens - case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) - } - } - - def tokenize_line(SML: Boolean, input: CharSequence, context: Scan.Line_Context) - : (List[Token], Scan.Line_Context) = - { - var in: Reader[Char] = new CharSequenceReader(input) - val toks = new mutable.ListBuffer[Token] - var ctxt = context - while (!in.atEnd) { - Parsers.parse(Parsers.token_line(SML, ctxt), in) match { - case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } - case Parsers.NoSuccess(_, rest) => - error("Unexpected failure of tokenizing input:\n" + rest.source.toString) - } - } - (toks.toList, ctxt) - } -} - diff --git a/core/Pure/ML/ml_options.ML b/core/Pure/ML/ml_options.ML deleted file mode 100644 index a0d7b5f9..00000000 --- a/core/Pure/ML/ml_options.ML +++ /dev/null @@ -1,56 +0,0 @@ -(* Title: Pure/ML/ml_options.ML - Author: Makarius - -ML configuration options. -*) - -signature ML_OPTIONS = -sig - val source_trace_raw: Config.raw - val source_trace: bool Config.T - val exception_trace_raw: Config.raw - val exception_trace: bool Config.T - val exception_trace_enabled: Context.generic option -> bool - val print_depth_raw: Config.raw - val print_depth: int Config.T - val get_print_depth: unit -> int - val get_print_depth_default: int -> int -end; - -structure ML_Options: ML_OPTIONS = -struct - -(* source trace *) - -val source_trace_raw = - Config.declare ("ML_source_trace", @{here}) (fn _ => Config.Bool false); -val source_trace = Config.bool source_trace_raw; - - -(* exception trace *) - -val exception_trace_raw = Config.declare_option ("ML_exception_trace", @{here}); -val exception_trace = Config.bool exception_trace_raw; - -fun exception_trace_enabled NONE = - (Options.default_bool (Config.name_of exception_trace_raw) handle ERROR _ => false) - | exception_trace_enabled (SOME context) = Config.get_generic context exception_trace; - - -(* print depth *) - -val print_depth_raw = - Config.declare ("ML_print_depth", @{here}) (fn _ => Config.Int (get_default_print_depth ())); -val print_depth = Config.int print_depth_raw; - -fun get_print_depth () = - (case Context.thread_data () of - NONE => get_default_print_depth () - | SOME context => Config.get_generic context print_depth); - -fun get_print_depth_default default = - (case Context.thread_data () of - NONE => default - | SOME context => Config.get_generic context print_depth); - -end; diff --git a/core/Pure/ML/ml_parse.ML b/core/Pure/ML/ml_parse.ML deleted file mode 100644 index be8971da..00000000 --- a/core/Pure/ML/ml_parse.ML +++ /dev/null @@ -1,74 +0,0 @@ -(* Title: Pure/ML/ml_parse.ML - Author: Makarius - -Minimal parsing for SML -- fixing integer numerals. -*) - -signature ML_PARSE = -sig - val fix_ints: string -> string - val global_context: use_context -end; - -structure ML_Parse: ML_PARSE = -struct - -(** error handling **) - -fun !!! scan = - let - fun get_pos [] = " (end-of-input)" - | get_pos (tok :: _) = Position.here (ML_Lex.pos_of tok); - - fun err (toks, NONE) = (fn () => "SML syntax error" ^ get_pos toks) - | err (toks, SOME msg) = (fn () => "SML syntax error" ^ get_pos toks ^ ": " ^ msg ()); - in Scan.!! err scan end; - -fun bad_input x = - (Scan.some (fn tok => (case ML_Lex.kind_of tok of ML_Lex.Error msg => SOME msg | _ => NONE)) :|-- - (fn msg => Scan.fail_with (K (fn () => msg)))) x; - - -(** basic parsers **) - -fun $$$ x = - Scan.one (fn tok => ML_Lex.kind_of tok = ML_Lex.Keyword andalso ML_Lex.content_of tok = x) - >> ML_Lex.content_of; - -val int = Scan.one (fn tok => ML_Lex.kind_of tok = ML_Lex.Int) >> ML_Lex.content_of; - -val regular = Scan.one ML_Lex.is_regular >> ML_Lex.content_of; -val improper = Scan.one ML_Lex.is_improper >> ML_Lex.content_of; - -val blanks = Scan.repeat improper >> implode; - - -(* fix_ints *) - -(*approximation only -- corrupts numeric record field patterns *) -val fix_int = - $$$ "#" ^^ blanks ^^ int || - ($$$ "infix" || $$$ "infixr") ^^ blanks ^^ int || - int >> (fn x => "(" ^ x ^ ":int)") || - regular || - bad_input; - -val fix_ints = - ML_System.is_smlnj ? - (Source.of_string #> - ML_Lex.source #> - Source.source ML_Lex.stopper (Scan.bulk (!!! fix_int)) NONE #> - Source.exhaust #> - implode); - - -(* global use_context *) - -val global_context: use_context = - {tune_source = fix_ints, - name_space = ML_Name_Space.global, - str_of_pos = Position.here oo Position.line_file, - print = writeln, - error = error}; - -end; diff --git a/core/Pure/ML/ml_statistics_dummy.ML b/core/Pure/ML/ml_statistics_dummy.ML deleted file mode 100644 index c855223f..00000000 --- a/core/Pure/ML/ml_statistics_dummy.ML +++ /dev/null @@ -1,18 +0,0 @@ -(* Title: Pure/ML/ml_statistics_dummy.ML - Author: Makarius - -ML runtime statistics -- dummy version. -*) - -signature ML_STATISTICS = -sig - val get: unit -> Properties.T -end; - -structure ML_Statistics: ML_STATISTICS = -struct - -fun get () = []; - -end; - diff --git a/core/Pure/ML/ml_statistics_polyml-5.5.0.ML b/core/Pure/ML/ml_statistics_polyml-5.5.0.ML deleted file mode 100644 index 435c9b4f..00000000 --- a/core/Pure/ML/ml_statistics_polyml-5.5.0.ML +++ /dev/null @@ -1,60 +0,0 @@ -(* Title: Pure/ML/ml_statistics_polyml-5.5.0.ML - Author: Makarius - -ML runtime statistics for Poly/ML 5.5.0. -*) - -signature ML_STATISTICS = -sig - val get: unit -> Properties.T -end; - -structure ML_Statistics: ML_STATISTICS = -struct - -fun get () = - let - val - {gcFullGCs, - gcPartialGCs, - sizeAllocation, - sizeAllocationFree, - sizeHeap, - sizeHeapFreeLastFullGC, - sizeHeapFreeLastGC, - threadsInML, - threadsTotal, - threadsWaitCondVar, - threadsWaitIO, - threadsWaitMutex, - threadsWaitSignal, - timeGCSystem, - timeGCUser, - timeNonGCSystem, - timeNonGCUser, - userCounters} = PolyML.Statistics.getLocalStats (); - val user_counters = - Vector.foldri - (fn (i, j, res) => ("user_counter" ^ Markup.print_int i, Markup.print_int j) :: res) - [] userCounters; - in - [("full_GCs", Markup.print_int gcFullGCs), - ("partial_GCs", Markup.print_int gcPartialGCs), - ("size_allocation", Markup.print_int sizeAllocation), - ("size_allocation_free", Markup.print_int sizeAllocationFree), - ("size_heap", Markup.print_int sizeHeap), - ("size_heap_free_last_full_GC", Markup.print_int sizeHeapFreeLastFullGC), - ("size_heap_free_last_GC", Markup.print_int sizeHeapFreeLastGC), - ("threads_in_ML", Markup.print_int threadsInML), - ("threads_total", Markup.print_int threadsTotal), - ("threads_wait_condvar", Markup.print_int threadsWaitCondVar), - ("threads_wait_IO", Markup.print_int threadsWaitIO), - ("threads_wait_mutex", Markup.print_int threadsWaitMutex), - ("threads_wait_signal", Markup.print_int threadsWaitSignal), - ("time_CPU", Markup.print_real (Time.toReal timeNonGCSystem + Time.toReal timeNonGCUser)), - ("time_GC", Markup.print_real (Time.toReal timeGCSystem + Time.toReal timeGCUser))] @ - user_counters - end; - -end; - diff --git a/core/Pure/ML/ml_syntax.ML b/core/Pure/ML/ml_syntax.ML deleted file mode 100644 index a820b2f6..00000000 --- a/core/Pure/ML/ml_syntax.ML +++ /dev/null @@ -1,113 +0,0 @@ -(* Title: Pure/ML/ml_syntax.ML - Author: Makarius - -Basic ML syntax operations. -*) - -signature ML_SYNTAX = -sig - val reserved_names: string list - val reserved: Name.context - val is_reserved: string -> bool - val is_identifier: string -> bool - val atomic: string -> string - val print_int: int -> string - val print_pair: ('a -> string) -> ('b -> string) -> 'a * 'b -> string - val print_list: ('a -> string) -> 'a list -> string - val print_option: ('a -> string) -> 'a option -> string - val print_char: string -> string - val print_string: string -> string - val print_strings: string list -> string - val print_properties: Properties.T -> string - val print_position: Position.T -> string - val make_binding: string * Position.T -> string - val print_indexname: indexname -> string - val print_class: class -> string - val print_sort: sort -> string - val print_typ: typ -> string - val print_term: term -> string - val pretty_string: int -> string -> Pretty.T -end; - -structure ML_Syntax: ML_SYNTAX = -struct - -(* reserved words *) - -val reserved_names = filter Symbol.is_ascii_identifier ML_Lex.keywords; -val reserved = Name.make_context reserved_names; -val is_reserved = Name.is_declared reserved; - - -(* identifiers *) - -fun is_identifier name = - not (is_reserved name) andalso Symbol.is_ascii_identifier name; - - -(* literal output -- unformatted *) - -val atomic = enclose "(" ")"; - -val print_int = string_of_int; - -fun print_pair f1 f2 (x, y) = "(" ^ f1 x ^ ", " ^ f2 y ^ ")"; - -fun print_list f = enclose "[" "]" o commas o map f; - -fun print_option f NONE = "NONE" - | print_option f (SOME x) = "SOME (" ^ f x ^ ")"; - -fun print_char s = - if not (Symbol.is_char s) then s - else if s = "\"" then "\\\"" - else if s = "\\" then "\\\\" - else if s = "\t" then "\\t" - else if s = "\n" then "\\n" - else if s = "\f" then "\\f" - else if s = "\r" then "\\r" - else - let val c = ord s in - if c < 32 then "\\^" ^ chr (c + ord "@") - else if c < 127 then s - else "\\" ^ string_of_int c - end; - -val print_string = quote o implode o map print_char o Symbol.explode; -val print_strings = print_list print_string; - -val print_properties = print_list (print_pair print_string print_string); -fun print_position pos = "Position.of_properties " ^ print_properties (Position.properties_of pos); -fun make_binding (name, pos) = "Binding.make " ^ print_pair print_string print_position (name, pos); - -val print_indexname = print_pair print_string print_int; - -val print_class = print_string; -val print_sort = print_list print_class; - -fun print_typ (Type arg) = "Term.Type " ^ print_pair print_string (print_list print_typ) arg - | print_typ (TFree arg) = "Term.TFree " ^ print_pair print_string print_sort arg - | print_typ (TVar arg) = "Term.TVar " ^ print_pair print_indexname print_sort arg; - -fun print_term (Const arg) = "Term.Const " ^ print_pair print_string print_typ arg - | print_term (Free arg) = "Term.Free " ^ print_pair print_string print_typ arg - | print_term (Var arg) = "Term.Var " ^ print_pair print_indexname print_typ arg - | print_term (Bound i) = "Term.Bound " ^ print_int i - | print_term (Abs (s, T, t)) = - "Term.Abs (" ^ print_string s ^ ", " ^ print_typ T ^ ", " ^ print_term t ^ ")" - | print_term (t1 $ t2) = "Term.$ " ^ print_pair print_term print_term (t1, t2); - - -(* toplevel pretty printing *) - -fun pretty_string max_len str = - let - val body = - maps (fn XML.Elem _ => [""] | XML.Text s => Symbol.explode s) (YXML.parse_body str) - handle Fail _ => Symbol.explode str; - val body' = - if length body <= max_len then body - else take (Int.max (max_len, 0)) body @ ["..."]; - in Pretty.str (quote (implode (map print_char body'))) end; - -end; diff --git a/core/Pure/ML/ml_thms.ML b/core/Pure/ML/ml_thms.ML deleted file mode 100644 index 08fb095b..00000000 --- a/core/Pure/ML/ml_thms.ML +++ /dev/null @@ -1,145 +0,0 @@ -(* Title: Pure/ML/ml_thms.ML - Author: Makarius - -Attribute source and theorem values within ML. -*) - -signature ML_THMS = -sig - val the_attributes: Proof.context -> int -> Args.src list - val the_thmss: Proof.context -> thm list list - val get_stored_thms: unit -> thm list - val get_stored_thm: unit -> thm - val store_thms: string * thm list -> unit - val store_thm: string * thm -> unit - val bind_thm: string * thm -> unit - val bind_thms: string * thm list -> unit -end; - -structure ML_Thms: ML_THMS = -struct - -(* auxiliary data *) - -type thms = (string * bool) * thm list; (*name, single, value*) - -structure Data = Proof_Data -( - type T = Args.src list Inttab.table * thms list; - fun init _ = (Inttab.empty, []); -); - -val put_attributes = Data.map o apfst o Inttab.update; -fun the_attributes ctxt name = the (Inttab.lookup (fst (Data.get ctxt)) name); - -val get_thmss = snd o Data.get; -val the_thmss = map snd o get_thmss; -val cons_thms = Data.map o apsnd o cons; - - - -(* attribute source *) - -val _ = Theory.setup - (ML_Antiquotation.declaration @{binding attributes} (Scan.lift Parse_Spec.attribs) - (fn _ => fn raw_srcs => fn ctxt => - let - val i = serial (); - val srcs = map (Attrib.check_src ctxt) raw_srcs; - val _ = map (Attrib.attribute ctxt) srcs; - val (a, ctxt') = ctxt - |> ML_Antiquotation.variant "attributes" ||> put_attributes (i, srcs); - val ml = - ("val " ^ a ^ " = ML_Thms.the_attributes ML_context " ^ - string_of_int i ^ ";\n", "Isabelle." ^ a); - in (K ml, ctxt') end)); - - -(* fact references *) - -fun thm_binding kind is_single thms ctxt = - let - val initial = null (get_thmss ctxt); - val (name, ctxt') = ML_Antiquotation.variant kind ctxt; - val ctxt'' = cons_thms ((name, is_single), thms) ctxt'; - - val ml_body = "Isabelle." ^ name; - fun decl final_ctxt = - if initial then - let - val binds = get_thmss final_ctxt |> map (fn ((a, b), _) => (b ? enclose "[" "]") a); - val ml_env = "val [" ^ commas binds ^ "] = ML_Thms.the_thmss ML_context;\n"; - in (ml_env, ml_body) end - else ("", ml_body); - in (decl, ctxt'') end; - -val _ = Theory.setup - (ML_Antiquotation.declaration @{binding thm} (Attrib.thm >> single) (K (thm_binding "thm" true)) #> - ML_Antiquotation.declaration @{binding thms} Attrib.thms (K (thm_binding "thms" false))); - - -(* ad-hoc goals *) - -val and_ = Args.$$$ "and"; -val by = Args.$$$ "by"; -val goal = Scan.unless (by || and_) Args.name_inner_syntax; - -val _ = Theory.setup - (ML_Antiquotation.declaration @{binding lemma} - (Scan.lift (Args.mode "open" -- Parse.enum1 "and" (Scan.repeat1 goal) -- - (by |-- Method.parse -- Scan.option Method.parse))) - (fn _ => fn ((is_open, raw_propss), (m1, m2)) => fn ctxt => - let - val _ = Context_Position.reports ctxt (maps Method.reports_of (m1 :: the_list m2)); - - val propss = burrow (map (rpair []) o Syntax.read_props ctxt) raw_propss; - val prep_result = Goal.norm_result ctxt #> not is_open ? Thm.close_derivation; - fun after_qed res goal_ctxt = - Proof_Context.put_thms false (Auto_Bind.thisN, - SOME (map prep_result (Proof_Context.export goal_ctxt ctxt (flat res)))) goal_ctxt; - - val ctxt' = ctxt - |> Proof.theorem NONE after_qed propss - |> Proof.global_terminal_proof (m1, m2); - val thms = - Proof_Context.get_fact ctxt' - (Facts.named (Proof_Context.full_name ctxt' (Binding.name Auto_Bind.thisN))); - in thm_binding "lemma" (length (flat propss) = 1) thms ctxt end)); - - -(* old-style theorem bindings *) - -structure Stored_Thms = Theory_Data -( - type T = thm list; - val empty = []; - fun extend _ = []; - fun merge _ = []; -); - -fun get_stored_thms () = Stored_Thms.get (ML_Context.the_global_context ()); -val get_stored_thm = hd o get_stored_thms; - -fun ml_store get (name, ths) = - let - val ths' = Context.>>> (Context.map_theory_result - (Global_Theory.store_thms (Binding.name name, ths))); - val _ = Theory.setup (Stored_Thms.put ths'); - val _ = - if name = "" then () - else if not (ML_Syntax.is_identifier name) then - error ("Cannot bind theorem(s) " ^ quote name ^ " as ML value") - else - ML_Compiler.eval (ML_Compiler.verbose true ML_Compiler.flags) Position.none - (ML_Lex.tokenize ("val " ^ name ^ " = " ^ get ^ " ();")); - val _ = Theory.setup (Stored_Thms.put []); - in () end; - -val store_thms = ml_store "ML_Thms.get_stored_thms"; -fun store_thm (name, th) = ml_store "ML_Thms.get_stored_thm" (name, [th]); - -fun bind_thm (name, thm) = store_thm (name, Drule.export_without_context thm); -fun bind_thms (name, thms) = store_thms (name, map Drule.export_without_context thms); - -end; - diff --git a/core/Pure/PIDE/active.ML b/core/Pure/PIDE/active.ML deleted file mode 100644 index 1a9bff5d..00000000 --- a/core/Pure/PIDE/active.ML +++ /dev/null @@ -1,63 +0,0 @@ -(* Title: Pure/PIDE/active.ML - Author: Makarius - -Active areas within the document (see also Tools/jEdit/src/active.scala). -*) - -signature ACTIVE = -sig - val make_markup: string -> {implicit: bool, properties: Properties.T} -> Markup.T - val markup_implicit: string -> string -> string - val markup: string -> string -> string - val sendback_markup: Properties.T -> string -> string - val dialog: unit -> (string -> Markup.T) * string future - val dialog_text: unit -> (string -> string) * string future - val dialog_result: serial -> string -> unit -end; - -structure Active: ACTIVE = -struct - -(* active markup *) - -fun explicit_id () = - (case Position.get_id (Position.thread_data ()) of - SOME id => [(Markup.idN, id)] - | NONE => []); - -fun make_markup name {implicit, properties} = - (name, []) - |> not implicit ? (fn markup => Markup.properties (explicit_id ()) markup) - |> Markup.properties properties; - -fun markup_implicit name s = Markup.markup (make_markup name {implicit = true, properties = []}) s; -fun markup name s = Markup.markup (make_markup name {implicit = false, properties = []}) s; - -fun sendback_markup props = - Markup.markup (make_markup Markup.sendbackN {implicit = false, properties = props}); - - -(* dialog via document content *) - -val dialogs = Synchronized.var "Active.dialogs" (Inttab.empty: string future Inttab.table); - -fun dialog () = - let - val i = serial (); - fun abort () = Synchronized.change dialogs (Inttab.delete_safe i); - val promise = Future.promise abort : string future; - val _ = Synchronized.change dialogs (Inttab.update_new (i, promise)); - fun result_markup result = Markup.properties (explicit_id ()) (Markup.dialog i result); - in (result_markup, promise) end; - -fun dialog_text () = - let val (markup, promise) = dialog () - in (fn s => Markup.markup (markup s) s, promise) end; - -fun dialog_result i result = - Synchronized.change_result dialogs - (fn tab => (Inttab.lookup tab i, Inttab.delete_safe i tab)) - |> (fn NONE => () | SOME promise => Future.fulfill promise result); - -end; - diff --git a/core/Pure/PIDE/command.ML b/core/Pure/PIDE/command.ML deleted file mode 100644 index cba2b64c..00000000 --- a/core/Pure/PIDE/command.ML +++ /dev/null @@ -1,432 +0,0 @@ -(* Title: Pure/PIDE/command.ML - Author: Makarius - -Prover command execution: read -- eval -- print. -*) - -signature COMMAND = -sig - type blob = (string * (SHA1.digest * string list) option) Exn.result - val read_file: Path.T -> Position.T -> Path.T -> Token.file - val read: (unit -> theory) -> Path.T -> blob list -> Token.T list -> Toplevel.transition - type eval - val eval_eq: eval * eval -> bool - val eval_running: eval -> bool - val eval_finished: eval -> bool - val eval_result_state: eval -> Toplevel.state - val eval: (unit -> theory) -> Path.T -> blob list -> Token.T list -> eval -> eval - type print - val print: bool -> (string * string list) list -> string -> - eval -> print list -> print list option - type print_fn = Toplevel.transition -> Toplevel.state -> unit - type print_function = - {command_name: string, args: string list, exec_id: Document_ID.exec} -> - {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option - val print_function: string -> print_function -> unit - val no_print_function: string -> unit - type exec = eval * print list - val no_exec: exec - val exec_ids: exec option -> Document_ID.exec list - val exec: Document_ID.execution -> exec -> unit -end; - -structure Command: COMMAND = -struct - -(** memo results **) - -datatype 'a expr = - Expr of Document_ID.exec * (unit -> 'a) | - Result of 'a Exn.result; - -abstype 'a memo = Memo of 'a expr Synchronized.var -with - -fun memo exec_id e = Memo (Synchronized.var "Command.memo" (Expr (exec_id, e))); -fun memo_value a = Memo (Synchronized.var "Command.memo" (Result (Exn.Res a))); - -fun memo_result (Memo v) = - (case Synchronized.value v of - Expr (exec_id, _) => error ("Unfinished execution result: " ^ Document_ID.print exec_id) - | Result res => Exn.release res); - -fun memo_finished (Memo v) = - (case Synchronized.value v of Expr _ => false | Result _ => true); - -fun memo_exec execution_id (Memo v) = - Synchronized.timed_access v (K (SOME Time.zeroTime)) - (fn expr => - (case expr of - Expr (exec_id, body) => - uninterruptible (fn restore_attributes => fn () => - let val group = Future.worker_subgroup () in - if Execution.running execution_id exec_id [group] then - let - val res = - (body - |> restore_attributes - |> Future.task_context "Command.memo_exec" group - |> Exn.interruptible_capture) (); - in SOME ((), Result res) end - else SOME ((), expr) - end) () - | Result _ => SOME ((), expr))) - |> (fn NONE => error "Conflicting command execution" | _ => ()); - -fun memo_fork params execution_id (Memo v) = - (case Synchronized.value v of - Result _ => () - | _ => ignore ((singleton o Future.forks) params (fn () => memo_exec execution_id (Memo v)))); - -end; - - - -(** main phases of execution **) - -(* read *) - -type blob = - (string * (SHA1.digest * string list) option) Exn.result; (*file node name, digest, lines*) - -fun read_file_node file_node master_dir pos src_path = - let - val _ = Position.report pos Markup.language_path; - val _ = - (case try Url.explode file_node of - NONE => () - | SOME (Url.File _) => () - | _ => - (Position.report pos (Markup.path file_node); - error ("Prover cannot load remote file " ^ - Markup.markup (Markup.path file_node) (quote file_node) ^ Position.here pos))); - val full_path = File.check_file (File.full_path master_dir src_path); - val _ = Position.report pos (Markup.path (Path.implode full_path)); - val text = File.read full_path; - val lines = split_lines text; - val digest = SHA1.digest text; - in {src_path = src_path, lines = lines, digest = digest, pos = Path.position full_path} end; - -val read_file = read_file_node ""; - -local - -fun blob_file src_path lines digest file_node = - let - val file_pos = - Position.file file_node |> - (case Position.get_id (Position.thread_data ()) of - NONE => I - | SOME exec_id => Position.put_id exec_id); - in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end - -fun resolve_files master_dir blobs toks = - (case Thy_Syntax.parse_spans toks of - [span] => span - |> Thy_Syntax.resolve_files (fn cmd => fn (path, pos) => - let - fun make_file src_path (Exn.Res (file_node, NONE)) = - Exn.interruptible_capture (fn () => - read_file_node file_node master_dir pos src_path) () - | make_file src_path (Exn.Res (file_node, SOME (digest, lines))) = - (Position.reports [(pos, Markup.language_path), (pos, Markup.path file_node)]; - Exn.Res (blob_file src_path lines digest file_node)) - | make_file _ (Exn.Exn e) = Exn.Exn e; - val src_paths = Keyword.command_files cmd path; - in - if null blobs then - map2 make_file src_paths (map (K (Exn.Res ("", NONE))) src_paths) - else if length src_paths = length blobs then - map2 make_file src_paths blobs - else error ("Misalignment of inlined files" ^ Position.here pos) - end) - |> Thy_Syntax.span_content - | _ => toks); - -in - -fun read init master_dir blobs span = - let - val outer_syntax = #2 (Outer_Syntax.get_syntax ()); - val command_reports = Outer_Syntax.command_reports outer_syntax; - - val proper_range = Token.range_of (#1 (take_suffix Token.is_improper span)); - val pos = - (case find_first Token.is_command span of - SOME tok => Token.pos_of tok - | NONE => #1 proper_range); - - val (is_malformed, token_reports) = Thy_Syntax.reports_of_tokens span; - val _ = Position.reports_text (token_reports @ maps command_reports span); - in - if is_malformed then Toplevel.malformed pos "Malformed command syntax" - else - (case Outer_Syntax.read_spans outer_syntax (resolve_files master_dir blobs span) of - [tr] => - if Keyword.is_control (Toplevel.name_of tr) then - Toplevel.malformed pos "Illegal control command" - else Toplevel.modify_init init tr - | [] => Toplevel.ignored (#1 (Token.range_of span)) - | _ => Toplevel.malformed (#1 proper_range) "Exactly one command expected") - handle ERROR msg => Toplevel.malformed (#1 proper_range) msg - end; - -end; - - -(* eval *) - -type eval_state = - {failed: bool, malformed: bool, command: Toplevel.transition, state: Toplevel.state}; -val init_eval_state = - {failed = false, malformed = false, command = Toplevel.empty, state = Toplevel.toplevel}; - -datatype eval = Eval of {exec_id: Document_ID.exec, eval_process: eval_state memo}; - -fun eval_exec_id (Eval {exec_id, ...}) = exec_id; -val eval_eq = op = o pairself eval_exec_id; - -val eval_running = Execution.is_running_exec o eval_exec_id; -fun eval_finished (Eval {eval_process, ...}) = memo_finished eval_process; - -fun eval_result (Eval {eval_process, ...}) = memo_result eval_process; -val eval_result_state = #state o eval_result; - -local - -fun reset_state tr st0 = Toplevel.setmp_thread_position tr (fn () => - let - val name = Toplevel.name_of tr; - val res = - if Keyword.is_theory_body name then Toplevel.reset_theory st0 - else if Keyword.is_proof name then Toplevel.reset_proof st0 - else NONE; - in - (case res of - NONE => st0 - | SOME st => (Output.error_message (Toplevel.type_error tr st0 ^ " -- using reset state"); st)) - end) (); - -fun run int tr st = - if Goal.future_enabled 1 andalso Keyword.is_diag (Toplevel.name_of tr) then - (Execution.fork {name = "Toplevel.diag", pos = Toplevel.pos_of tr, pri = ~1} - (fn () => Toplevel.command_exception int tr st); ([], SOME st)) - else Toplevel.command_errors int tr st; - -fun check_cmts span tr st' = - Toplevel.setmp_thread_position tr - (fn () => - Outer_Syntax.side_comments span |> maps (fn cmt => - (Thy_Output.check_text (Token.source_position_of cmt) st'; []) - handle exn => - if Exn.is_interrupt exn then reraise exn - else Runtime.exn_messages_ids exn)) (); - -fun report tr m = - Toplevel.setmp_thread_position tr (fn () => Output.report [Markup.markup_only m]) (); - -fun status tr m = - Toplevel.setmp_thread_position tr (fn () => Output.status (Markup.markup_only m)) (); - -fun proof_status tr st = - (case try Toplevel.proof_of st of - SOME prf => status tr (Proof.status_markup prf) - | NONE => ()); - -fun eval_state span tr ({malformed, state, ...}: eval_state) = - if malformed then - {failed = true, malformed = malformed, command = tr, state = Toplevel.toplevel} - else - let - val _ = Multithreading.interrupted (); - - val malformed' = Toplevel.is_malformed tr; - val st = reset_state tr state; - - val _ = status tr Markup.running; - val (errs1, result) = run true tr st; - val errs2 = (case result of NONE => [] | SOME st' => check_cmts span tr st'); - val errs = errs1 @ errs2; - val _ = List.app (Future.error_message (Toplevel.pos_of tr)) errs; - in - (case result of - NONE => - let - val _ = status tr Markup.failed; - val _ = status tr Markup.finished; - val _ = if null errs then (report tr Markup.bad; Exn.interrupt ()) else (); - in {failed = true, malformed = malformed', command = tr, state = st} end - | SOME st' => - let - val _ = proof_status tr st'; - val _ = status tr Markup.finished; - in {failed = false, malformed = malformed', command = tr, state = st'} end) - end; - -in - -fun eval init master_dir blobs span eval0 = - let - val exec_id = Document_ID.make (); - fun process () = - let - val tr = - Position.setmp_thread_data (Position.id_only (Document_ID.print exec_id)) - (fn () => read init master_dir blobs span |> Toplevel.exec_id exec_id) (); - in eval_state span tr (eval_result eval0) end; - in Eval {exec_id = exec_id, eval_process = memo exec_id process} end; - -end; - - -(* print *) - -datatype print = Print of - {name: string, args: string list, delay: Time.time option, pri: int, persistent: bool, - exec_id: Document_ID.exec, print_process: unit memo}; - -fun print_exec_id (Print {exec_id, ...}) = exec_id; -val print_eq = op = o pairself print_exec_id; - -type print_fn = Toplevel.transition -> Toplevel.state -> unit; - -type print_function = - {command_name: string, args: string list, exec_id: Document_ID.exec} -> - {delay: Time.time option, pri: int, persistent: bool, strict: bool, print_fn: print_fn} option; - -local - -val print_functions = - Synchronized.var "Command.print_functions" ([]: (string * print_function) list); - -fun print_error tr opt_context e = - (Toplevel.setmp_thread_position tr o Runtime.controlled_execution opt_context) e () - handle exn => - if Exn.is_interrupt exn then reraise exn - else List.app (Future.error_message (Toplevel.pos_of tr)) (Runtime.exn_messages_ids exn); - -fun print_finished (Print {print_process, ...}) = memo_finished print_process; - -fun print_persistent (Print {persistent, ...}) = persistent; - -val overlay_ord = prod_ord string_ord (list_ord string_ord); - -in - -fun print command_visible command_overlays command_name eval old_prints = - let - val print_functions = Synchronized.value print_functions; - - fun make_print name args {delay, pri, persistent, strict, print_fn} = - let - val exec_id = Document_ID.make (); - fun process () = - let - val {failed, command, state = st', ...} = eval_result eval; - val tr = Toplevel.exec_id exec_id command; - val opt_context = try Toplevel.generic_theory_of st'; - in - if failed andalso strict then () - else print_error tr opt_context (fn () => print_fn tr st') - end; - in - Print { - name = name, args = args, delay = delay, pri = pri, persistent = persistent, - exec_id = exec_id, print_process = memo exec_id process} - end; - - fun bad_print name args exn = - make_print name args {delay = NONE, pri = 0, persistent = false, - strict = false, print_fn = fn _ => fn _ => reraise exn}; - - fun new_print name args get_pr = - let - val params = {command_name = command_name, args = args, exec_id = eval_exec_id eval}; - in - (case Exn.capture (Runtime.controlled_execution NONE get_pr) params of - Exn.Res NONE => NONE - | Exn.Res (SOME pr) => SOME (make_print name args pr) - | Exn.Exn exn => SOME (bad_print name args exn)) - end; - - fun get_print (a, b) = - (case find_first (fn Print {name, args, ...} => name = a andalso args = b) old_prints of - NONE => - (case AList.lookup (op =) print_functions a of - NONE => SOME (bad_print a b (ERROR ("Missing print function " ^ quote a))) - | SOME get_pr => new_print a b get_pr) - | some => some); - - val new_prints = - if command_visible then - fold (fn (a, _) => cons (a, [])) print_functions command_overlays - |> sort_distinct overlay_ord - |> map_filter get_print - else filter (fn print => print_finished print andalso print_persistent print) old_prints; - in - if eq_list print_eq (old_prints, new_prints) then NONE else SOME new_prints - end; - -fun print_function name f = - Synchronized.change print_functions (fn funs => - (if not (AList.defined (op =) funs name) then () - else warning ("Redefining command print function: " ^ quote name); - AList.update (op =) (name, f) funs)); - -fun no_print_function name = - Synchronized.change print_functions (filter_out (equal name o #1)); - -end; - -val _ = - print_function "Execution.print" - (fn {args, exec_id, ...} => - if null args then - SOME {delay = NONE, pri = 1, persistent = false, strict = false, - print_fn = fn _ => fn _ => Execution.fork_prints exec_id} - else NONE); - -val _ = - print_function "print_state" - (fn {command_name, ...} => - if Keyword.is_printed command_name then - SOME {delay = NONE, pri = 1, persistent = false, strict = true, - print_fn = fn _ => fn st => if Toplevel.is_proof st then Toplevel.print_state st else ()} - else NONE); - - -(* combined execution *) - -type exec = eval * print list; -val no_exec: exec = - (Eval {exec_id = Document_ID.none, eval_process = memo_value init_eval_state}, []); - -fun exec_ids NONE = [] - | exec_ids (SOME (eval, prints)) = eval_exec_id eval :: map print_exec_id prints; - -local - -fun run_print execution_id (Print {name, delay, pri, print_process, ...}) = - if pri <= 0 orelse (Multithreading.enabled () andalso Options.default_bool "parallel_print") - then - let - val group = Future.worker_subgroup (); - fun fork () = - memo_fork {name = name, group = SOME group, deps = [], pri = pri, interrupts = true} - execution_id print_process; - in - (case delay of - NONE => fork () - | SOME d => ignore (Event_Timer.request (Time.+ (Time.now (), d)) fork)) - end - else memo_exec execution_id print_process; - -in - -fun exec execution_id (Eval {eval_process, ...}, prints) = - (memo_exec execution_id eval_process; List.app (run_print execution_id) prints); - -end; - -end; - diff --git a/core/Pure/PIDE/command.scala b/core/Pure/PIDE/command.scala deleted file mode 100644 index 825018ad..00000000 --- a/core/Pure/PIDE/command.scala +++ /dev/null @@ -1,393 +0,0 @@ -/* Title: Pure/PIDE/command.scala - Author: Fabian Immler, TU Munich - Author: Makarius - -Prover commands with accumulated results from execution. -*/ - -package isabelle - - -import scala.collection.mutable -import scala.collection.immutable.SortedMap - - -object Command -{ - type Edit = (Option[Command], Option[Command]) - type Blob = Exn.Result[(Document.Node.Name, Option[(SHA1.Digest, Symbol.Text_Chunk)])] - - - - /** accumulated results from prover **/ - - /* results */ - - object Results - { - type Entry = (Long, XML.Tree) - val empty = new Results(SortedMap.empty) - def make(es: List[Results.Entry]): Results = (empty /: es)(_ + _) - def merge(rs: List[Results]): Results = (empty /: rs)(_ ++ _) - } - - final class Results private(private val rep: SortedMap[Long, XML.Tree]) - { - def defined(serial: Long): Boolean = rep.isDefinedAt(serial) - def get(serial: Long): Option[XML.Tree] = rep.get(serial) - def iterator: Iterator[Results.Entry] = rep.iterator - - def + (entry: Results.Entry): Results = - if (defined(entry._1)) this - else new Results(rep + entry) - - def ++ (other: Results): Results = - if (this eq other) this - else if (rep.isEmpty) other - else (this /: other.iterator)(_ + _) - - override def hashCode: Int = rep.hashCode - override def equals(that: Any): Boolean = - that match { - case other: Results => rep == other.rep - case _ => false - } - override def toString: String = iterator.mkString("Results(", ", ", ")") - } - - - /* markup */ - - object Markup_Index - { - val markup: Markup_Index = Markup_Index(false, Symbol.Text_Chunk.Default) - } - - sealed case class Markup_Index(status: Boolean, chunk_name: Symbol.Text_Chunk.Name) - - object Markups - { - val empty: Markups = new Markups(Map.empty) - - def init(markup: Markup_Tree): Markups = - new Markups(Map(Markup_Index.markup -> markup)) - } - - final class Markups private(private val rep: Map[Markup_Index, Markup_Tree]) - { - def is_empty: Boolean = rep.isEmpty - - def apply(index: Markup_Index): Markup_Tree = - rep.getOrElse(index, Markup_Tree.empty) - - def add(index: Markup_Index, markup: Text.Markup): Markups = - new Markups(rep + (index -> (this(index) + markup))) - - def redirection_iterator: Iterator[Document_ID.Generic] = - for (Markup_Index(_, Symbol.Text_Chunk.Id(id)) <- rep.keysIterator) - yield id - - def redirect(other_id: Document_ID.Generic): Markups = - { - val rep1 = - (for { - (Markup_Index(status, Symbol.Text_Chunk.Id(id)), markup) <- rep.iterator - if other_id == id - } yield (Markup_Index(status, Symbol.Text_Chunk.Default), markup)).toMap - if (rep1.isEmpty) Markups.empty else new Markups(rep1) - } - - override def hashCode: Int = rep.hashCode - override def equals(that: Any): Boolean = - that match { - case other: Markups => rep == other.rep - case _ => false - } - override def toString: String = rep.iterator.mkString("Markups(", ", ", ")") - } - - - /* state */ - - object State - { - def merge_results(states: List[State]): Command.Results = - Results.merge(states.map(_.results)) - - def merge_markup(states: List[State], index: Markup_Index, - range: Text.Range, elements: Markup.Elements): Markup_Tree = - Markup_Tree.merge(states.map(_.markup(index)), range, elements) - } - - sealed case class State( - command: Command, - status: List[Markup] = Nil, - results: Results = Results.empty, - markups: Markups = Markups.empty) - { - lazy val protocol_status: Protocol.Status = - { - val warnings = - if (results.iterator.exists(p => Protocol.is_warning(p._2))) - List(Markup(Markup.WARNING, Nil)) - else Nil - val errors = - if (results.iterator.exists(p => Protocol.is_error(p._2))) - List(Markup(Markup.ERROR, Nil)) - else Nil - Protocol.Status.make((warnings ::: errors ::: status).iterator) - } - - def markup(index: Markup_Index): Markup_Tree = markups(index) - - def redirect(other_command: Command): Option[State] = - { - val markups1 = markups.redirect(other_command.id) - if (markups1.is_empty) None - else Some(new State(other_command, Nil, Results.empty, markups1)) - } - - def eq_content(other: State): Boolean = - command.source == other.command.source && - status == other.status && - results == other.results && - markups == other.markups - - private def add_status(st: Markup): State = - copy(status = st :: status) - - private def add_markup( - status: Boolean, chunk_name: Symbol.Text_Chunk.Name, m: Text.Markup): State = - { - val markups1 = - if (status || Protocol.liberal_status_elements(m.info.name)) - markups.add(Markup_Index(true, chunk_name), m) - else markups - copy(markups = markups1.add(Markup_Index(false, chunk_name), m)) - } - - def accumulate( - self_id: Document_ID.Generic => Boolean, - other_id: Document_ID.Generic => Option[(Symbol.Text_Chunk.Id, Symbol.Text_Chunk)], - message: XML.Elem): State = - message match { - case XML.Elem(Markup(Markup.STATUS, _), msgs) => - (this /: msgs)((state, msg) => - msg match { - case elem @ XML.Elem(markup, Nil) => - state. - add_status(markup). - add_markup(true, Symbol.Text_Chunk.Default, Text.Info(command.proper_range, elem)) - case _ => - Output.warning("Ignored status message: " + msg) - state - }) - - case XML.Elem(Markup(Markup.REPORT, _), msgs) => - (this /: msgs)((state, msg) => - { - def bad(): Unit = Output.warning("Ignored report message: " + msg) - - msg match { - case XML.Elem( - Markup(name, atts @ Position.Reported(id, chunk_name, symbol_range)), args) => - - val target = - if (self_id(id) && command.chunks.isDefinedAt(chunk_name)) - Some((chunk_name, command.chunks(chunk_name))) - else if (chunk_name == Symbol.Text_Chunk.Default) other_id(id) - else None - - target match { - case Some((target_name, target_chunk)) => - target_chunk.incorporate(symbol_range) match { - case Some(range) => - val props = Position.purge(atts) - val info = Text.Info(range, XML.Elem(Markup(name, props), args)) - state.add_markup(false, target_name, info) - case None => bad(); state - } - case None => - // silently ignore excessive reports - state - } - - case XML.Elem(Markup(name, atts), args) - if !atts.exists({ case (a, _) => Markup.POSITION_PROPERTIES(a) }) => - val range = command.proper_range - val props = Position.purge(atts) - val info: Text.Markup = Text.Info(range, XML.Elem(Markup(name, props), args)) - state.add_markup(false, Symbol.Text_Chunk.Default, info) - - case _ => bad(); state - } - }) - case XML.Elem(Markup(name, props), body) => - props match { - case Markup.Serial(i) => - val message1 = XML.Elem(Markup(Markup.message(name), props), body) - val message2 = XML.Elem(Markup(name, props), body) - - var st = copy(results = results + (i -> message1)) - if (Protocol.is_inlined(message)) { - for { - (chunk_name, chunk) <- command.chunks.iterator - range <- Protocol.message_positions(self_id, chunk_name, chunk, message) - } st = st.add_markup(false, chunk_name, Text.Info(range, message2)) - } - st - - case _ => - Output.warning("Ignored message without serial number: " + message) - this - } - } - } - - - - /** static content **/ - - /* make commands */ - - def name(span: List[Token]): String = - span.find(_.is_command) match { case Some(tok) => tok.source case _ => "" } - - private def source_span(span: List[Token]): (String, List[Token]) = - { - val source: String = - span match { - case List(tok) => tok.source - case _ => span.map(_.source).mkString - } - - val span1 = new mutable.ListBuffer[Token] - var i = 0 - for (Token(kind, s) <- span) { - val n = s.length - val s1 = source.substring(i, i + n) - span1 += Token(kind, s1) - i += n - } - (source, span1.toList) - } - - def apply( - id: Document_ID.Command, - node_name: Document.Node.Name, - blobs: List[Blob], - span: List[Token]): Command = - { - val (source, span1) = source_span(span) - new Command(id, node_name, blobs, span1, source, Results.empty, Markup_Tree.empty) - } - - val empty: Command = Command(Document_ID.none, Document.Node.Name.empty, Nil, Nil) - - def unparsed( - id: Document_ID.Command, - source: String, - results: Results, - markup: Markup_Tree): Command = - { - val (source1, span) = source_span(List(Token(Token.Kind.UNPARSED, source))) - new Command(id, Document.Node.Name.empty, Nil, span, source1, results, markup) - } - - def unparsed(source: String): Command = - unparsed(Document_ID.none, source, Results.empty, Markup_Tree.empty) - - def rich_text(id: Document_ID.Command, results: Results, body: XML.Body): Command = - { - val text = XML.content(body) - val markup = Markup_Tree.from_XML(body) - unparsed(id, text, results, markup) - } - - - /* perspective */ - - object Perspective - { - val empty: Perspective = Perspective(Nil) - } - - sealed case class Perspective(commands: List[Command]) // visible commands in canonical order - { - def is_empty: Boolean = commands.isEmpty - - def same(that: Perspective): Boolean = - { - val cmds1 = this.commands - val cmds2 = that.commands - require(!cmds1.exists(_.is_undefined)) - require(!cmds2.exists(_.is_undefined)) - cmds1.length == cmds2.length && - (cmds1.iterator zip cmds2.iterator).forall({ case (c1, c2) => c1.id == c2.id }) - } - } -} - - -final class Command private( - val id: Document_ID.Command, - val node_name: Document.Node.Name, - val blobs: List[Command.Blob], - val span: List[Token], - val source: String, - val init_results: Command.Results, - val init_markup: Markup_Tree) -{ - /* classification */ - - def is_undefined: Boolean = id == Document_ID.none - val is_unparsed: Boolean = span.exists(_.is_unparsed) - val is_unfinished: Boolean = span.exists(_.is_unfinished) - - val is_ignored: Boolean = !span.exists(_.is_proper) - val is_malformed: Boolean = !is_ignored && (!span.head.is_command || span.exists(_.is_error)) - def is_command: Boolean = !is_ignored && !is_malformed - - def name: String = Command.name(span) - - override def toString = - id + "/" + (if (is_command) name else if (is_ignored) "IGNORED" else "MALFORMED") - - - /* blobs */ - - def blobs_names: List[Document.Node.Name] = - for (Exn.Res((name, _)) <- blobs) yield name - - def blobs_defined: List[(Document.Node.Name, SHA1.Digest)] = - for (Exn.Res((name, Some((digest, _)))) <- blobs) yield (name, digest) - - def blobs_changed(doc_blobs: Document.Blobs): Boolean = - blobs.exists({ case Exn.Res((name, _)) => doc_blobs.changed(name) case _ => false }) - - - /* source chunks */ - - val chunk: Symbol.Text_Chunk = Symbol.Text_Chunk(source) - - val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] = - ((Symbol.Text_Chunk.Default -> chunk) :: - (for (Exn.Res((name, Some((_, file)))) <- blobs) - yield (Symbol.Text_Chunk.File(name.node) -> file))).toMap - - def length: Int = source.length - def range: Text.Range = chunk.range - - val proper_range: Text.Range = - Text.Range(0, (length /: span.reverse.iterator.takeWhile(_.is_improper))(_ - _.source.length)) - - def source(range: Text.Range): String = source.substring(range.start, range.stop) - - - /* accumulated results */ - - val init_state: Command.State = - Command.State(this, results = init_results, markups = Command.Markups.init(init_markup)) - - val empty_state: Command.State = Command.State(this) -} diff --git a/core/Pure/PIDE/document.ML b/core/Pure/PIDE/document.ML deleted file mode 100644 index b06d4864..00000000 --- a/core/Pure/PIDE/document.ML +++ /dev/null @@ -1,636 +0,0 @@ -(* Title: Pure/PIDE/document.ML - Author: Makarius - -Document as collection of named nodes, each consisting of an editable -list of commands, associated with asynchronous execution process. -*) - -signature DOCUMENT = -sig - val timing: bool Unsynchronized.ref - type node_header = string * Thy_Header.header * string list - type overlay = Document_ID.command * (string * string list) - datatype node_edit = - Edits of (Document_ID.command option * Document_ID.command option) list | - Deps of node_header | - Perspective of bool * Document_ID.command list * overlay list - type edit = string * node_edit - type state - val init_state: state - val define_blob: string -> string -> state -> state - type blob_digest = (string * string option) Exn.result - val define_command: Document_ID.command -> string -> blob_digest list -> string -> - state -> state - val remove_versions: Document_ID.version list -> state -> state - val start_execution: state -> state - val update: Document_ID.version -> Document_ID.version -> edit list -> state -> - Document_ID.exec list * (Document_ID.command * Document_ID.exec list) list * state - val state: unit -> state - val change_state: (state -> state) -> unit -end; - -structure Document: DOCUMENT = -struct - -val timing = Unsynchronized.ref false; -fun timeit msg e = cond_timeit (! timing) msg e; - - - -(** document structure **) - -fun err_dup kind id = error ("Duplicate " ^ kind ^ ": " ^ Document_ID.print id); -fun err_undef kind id = error ("Undefined " ^ kind ^ ": " ^ Document_ID.print id); - -type node_header = string * Thy_Header.header * string list; - -type perspective = - {required: bool, (*required node*) - visible: Inttab.set, (*visible commands*) - visible_last: Document_ID.command option, (*last visible command*) - overlays: (string * string list) list Inttab.table}; (*command id -> print functions with args*) - -structure Entries = Linear_Set(type key = Document_ID.command val ord = int_ord); - -abstype node = Node of - {header: node_header, (*master directory, theory header, errors*) - perspective: perspective, (*command perspective*) - entries: Command.exec option Entries.T, (*command entries with excecutions*) - result: Command.eval option} (*result of last execution*) -and version = Version of node String_Graph.T (*development graph wrt. static imports*) -with - -fun make_node (header, perspective, entries, result) = - Node {header = header, perspective = perspective, entries = entries, result = result}; - -fun map_node f (Node {header, perspective, entries, result}) = - make_node (f (header, perspective, entries, result)); - -fun make_perspective (required, command_ids, overlays) : perspective = - {required = required, - visible = Inttab.make_set command_ids, - visible_last = try List.last command_ids, - overlays = Inttab.make_list overlays}; - -val no_header: node_header = ("", Thy_Header.make ("", Position.none) [] [], []); -val no_perspective = make_perspective (false, [], []); - -val empty_node = make_node (no_header, no_perspective, Entries.empty, NONE); - -fun is_no_perspective ({required, visible, visible_last, overlays}: perspective) = - not required andalso - Inttab.is_empty visible andalso - is_none visible_last andalso - Inttab.is_empty overlays; - -fun is_empty_node (Node {header, perspective, entries, result}) = - header = no_header andalso - is_no_perspective perspective andalso - Entries.is_empty entries andalso - is_none result; - - -(* basic components *) - -fun master_directory (Node {header = (master, _, _), ...}) = - (case try Url.explode master of - SOME (Url.File path) => path - | _ => Path.current); - -fun set_header header = - map_node (fn (_, perspective, entries, result) => (header, perspective, entries, result)); - -fun get_header (Node {header = (master, header, errors), ...}) = - if null errors then (master, header) - else error (cat_lines errors); - -fun read_header node span = - let - val {name = (name, _), imports, keywords} = #2 (get_header node); - val {name = (_, pos), imports = imports', ...} = Thy_Header.read_tokens span; - in Thy_Header.make (name, pos) (map #1 imports ~~ map #2 imports') keywords end; - -fun get_perspective (Node {perspective, ...}) = perspective; -fun set_perspective args = - map_node (fn (header, _, entries, result) => (header, make_perspective args, entries, result)); - -val required_node = #required o get_perspective; -val visible_command = Inttab.defined o #visible o get_perspective; -val visible_last = #visible_last o get_perspective; -val visible_node = is_some o visible_last -val overlays = Inttab.lookup_list o #overlays o get_perspective; - -fun map_entries f = - map_node (fn (header, perspective, entries, result) => (header, perspective, f entries, result)); -fun get_entries (Node {entries, ...}) = entries; - -fun iterate_entries f = Entries.iterate NONE f o get_entries; -fun iterate_entries_after start f (Node {entries, ...}) = - (case Entries.get_after entries start of - NONE => I - | SOME id => Entries.iterate (SOME id) f entries); - -fun get_result (Node {result, ...}) = result; -fun set_result result = - map_node (fn (header, perspective, entries, _) => (header, perspective, entries, result)); - -fun changed_result node node' = - (case (get_result node, get_result node') of - (SOME eval, SOME eval') => not (Command.eval_eq (eval, eval')) - | (NONE, NONE) => false - | _ => true); - -fun pending_result node = - (case get_result node of - SOME eval => not (Command.eval_finished eval) - | NONE => false); - -fun get_node nodes name = String_Graph.get_node nodes name - handle String_Graph.UNDEF _ => empty_node; -fun default_node name = String_Graph.default_node (name, empty_node); -fun update_node name f = default_node name #> String_Graph.map_node name f; - - -(* node edits and associated executions *) - -type overlay = Document_ID.command * (string * string list); - -datatype node_edit = - Edits of (Document_ID.command option * Document_ID.command option) list | - Deps of node_header | - Perspective of bool * Document_ID.command list * overlay list; - -type edit = string * node_edit; - -val after_entry = Entries.get_after o get_entries; - -fun lookup_entry node id = - (case Entries.lookup (get_entries node) id of - NONE => NONE - | SOME (exec, _) => exec); - -fun the_entry node id = - (case Entries.lookup (get_entries node) id of - NONE => err_undef "command entry" id - | SOME (exec, _) => exec); - -fun the_default_entry node (SOME id) = (id, the_default Command.no_exec (the_entry node id)) - | the_default_entry _ NONE = (Document_ID.none, Command.no_exec); - -fun assign_entry (command_id, exec) node = - if is_none (Entries.lookup (get_entries node) command_id) then node - else map_entries (Entries.update (command_id, exec)) node; - -fun reset_after id entries = - (case Entries.get_after entries id of - NONE => entries - | SOME next => Entries.update (next, NONE) entries); - -val edit_node = map_entries o fold - (fn (id, SOME id2) => Entries.insert_after id (id2, NONE) - | (id, NONE) => Entries.delete_after id #> reset_after id); - - -(* version operations *) - -val empty_version = Version String_Graph.empty; - -fun nodes_of (Version nodes) = nodes; -val node_of = get_node o nodes_of; - -fun cycle_msg names = "Cyclic dependency of " ^ space_implode " via " (map quote names); - -fun edit_nodes (name, node_edit) (Version nodes) = - Version - (case node_edit of - Edits edits => update_node name (edit_node edits) nodes - | Deps (master, header, errors) => - let - val imports = map fst (#imports header); - val errors1 = - (Thy_Header.define_keywords header; errors) - handle ERROR msg => errors @ [msg]; - val nodes1 = nodes - |> default_node name - |> fold default_node imports; - val nodes2 = nodes1 - |> String_Graph.Keys.fold - (fn dep => String_Graph.del_edge (dep, name)) (String_Graph.imm_preds nodes1 name); - val (nodes3, errors2) = - (String_Graph.add_deps_acyclic (name, imports) nodes2, errors1) - handle String_Graph.CYCLES cs => (nodes2, errors1 @ map cycle_msg cs); - in String_Graph.map_node name (set_header (master, header, errors2)) nodes3 end - | Perspective perspective => update_node name (set_perspective perspective) nodes); - -fun put_node (name, node) (Version nodes) = - let - val nodes1 = update_node name (K node) nodes; - val nodes2 = - if String_Graph.is_maximal nodes1 name andalso is_empty_node node - then String_Graph.del_node name nodes1 - else nodes1; - in Version nodes2 end; - -end; - - - -(** main state -- document structure and execution process **) - -type blob_digest = (string * string option) Exn.result; (*file node name, raw digest*) - -type execution = - {version_id: Document_ID.version, (*static version id*) - execution_id: Document_ID.execution, (*dynamic execution id*) - delay_request: unit future, (*pending event timer request*) - frontier: Future.task Symtab.table}; (*node name -> running execution task*) - -val no_execution: execution = - {version_id = Document_ID.none, execution_id = Document_ID.none, - delay_request = Future.value (), frontier = Symtab.empty}; - -fun new_execution version_id delay_request frontier : execution = - {version_id = version_id, execution_id = Execution.start (), - delay_request = delay_request, frontier = frontier}; - -abstype state = State of - {versions: version Inttab.table, (*version id -> document content*) - blobs: (SHA1.digest * string list) Symtab.table, (*raw digest -> digest, lines*) - commands: (string * blob_digest list * Token.T list lazy) Inttab.table, - (*command id -> name, inlined files, command span*) - execution: execution} (*current execution process*) -with - -fun make_state (versions, blobs, commands, execution) = - State {versions = versions, blobs = blobs, commands = commands, execution = execution}; - -fun map_state f (State {versions, blobs, commands, execution}) = - make_state (f (versions, blobs, commands, execution)); - -val init_state = - make_state (Inttab.make [(Document_ID.none, empty_version)], - Symtab.empty, Inttab.empty, no_execution); - - -(* document versions *) - -fun define_version version_id version = - map_state (fn (versions, blobs, commands, {delay_request, frontier, ...}) => - let - val versions' = Inttab.update_new (version_id, version) versions - handle Inttab.DUP dup => err_dup "document version" dup; - val execution' = new_execution version_id delay_request frontier; - in (versions', blobs, commands, execution') end); - -fun the_version (State {versions, ...}) version_id = - (case Inttab.lookup versions version_id of - NONE => err_undef "document version" version_id - | SOME version => version); - -fun delete_version version_id versions = - Inttab.delete version_id versions - handle Inttab.UNDEF _ => err_undef "document version" version_id; - - -(* inlined files *) - -fun define_blob digest text = - map_state (fn (versions, blobs, commands, execution) => - let val blobs' = Symtab.update (digest, (SHA1.fake digest, split_lines text)) blobs - in (versions, blobs', commands, execution) end); - -fun the_blob (State {blobs, ...}) digest = - (case Symtab.lookup blobs digest of - NONE => error ("Undefined blob: " ^ digest) - | SOME content => content); - -fun resolve_blob state (blob_digest: blob_digest) = - blob_digest |> Exn.map_result (fn (file_node, raw_digest) => - (file_node, Option.map (the_blob state) raw_digest)); - - -(* commands *) - -fun define_command command_id name command_blobs text = - map_state (fn (versions, blobs, commands, execution) => - let - val id = Document_ID.print command_id; - val span = - Lazy.lazy (fn () => - Position.setmp_thread_data (Position.id_only id) - (fn () => Thy_Syntax.parse_tokens (Keyword.get_lexicons ()) (Position.id id) text) ()); - val _ = - Position.setmp_thread_data (Position.id_only id) - (fn () => Output.status (Markup.markup_only Markup.accepted)) (); - val commands' = - Inttab.update_new (command_id, (name, command_blobs, span)) commands - handle Inttab.DUP dup => err_dup "command" dup; - in (versions, blobs, commands', execution) end); - -fun the_command (State {commands, ...}) command_id = - (case Inttab.lookup commands command_id of - NONE => err_undef "command" command_id - | SOME command => command); - -val the_command_name = #1 oo the_command; - -end; - - -(* remove_versions *) - -fun remove_versions version_ids state = state |> map_state (fn (versions, _, _, execution) => - let - val _ = - member (op =) version_ids (#version_id execution) andalso - error ("Attempt to remove execution version " ^ Document_ID.print (#version_id execution)); - - val versions' = fold delete_version version_ids versions; - val commands' = - (versions', Inttab.empty) |-> - Inttab.fold (fn (_, version) => nodes_of version |> - String_Graph.fold (fn (_, (node, _)) => node |> - iterate_entries (fn ((_, command_id), _) => - SOME o Inttab.insert (K true) (command_id, the_command state command_id)))); - val blobs' = - (commands', Symtab.empty) |-> - Inttab.fold (fn (_, (_, blobs, _)) => blobs |> - fold (fn Exn.Res (_, SOME b) => Symtab.update (b, the_blob state b) | _ => I)); - - in (versions', blobs', commands', execution) end); - - -(* document execution *) - -fun start_execution state = state |> map_state (fn (versions, blobs, commands, execution) => - timeit "Document.start_execution" (fn () => - let - val {version_id, execution_id, delay_request, frontier} = execution; - - val delay = seconds (Options.default_real "editor_execution_delay"); - - val _ = Future.cancel delay_request; - val delay_request' = Event_Timer.future (Time.+ (Time.now (), delay)); - - val new_tasks = - nodes_of (the_version state version_id) |> String_Graph.schedule - (fn deps => fn (name, node) => - if visible_node node orelse pending_result node then - let - val more_deps = - Future.task_of delay_request' :: the_list (Symtab.lookup frontier name); - fun body () = - iterate_entries (fn (_, opt_exec) => fn () => - (case opt_exec of - SOME exec => - if Execution.is_running execution_id - then SOME (Command.exec execution_id exec) - else NONE - | NONE => NONE)) node () - handle exn => if Exn.is_interrupt exn then () (*sic!*) else reraise exn; - val future = - (singleton o Future.forks) - {name = "theory:" ^ name, group = SOME (Future.new_group NONE), - deps = more_deps @ map #2 (maps #2 deps), - pri = 0, interrupts = false} body; - in [(name, Future.task_of future)] end - else []); - val frontier' = (fold o fold) Symtab.update new_tasks frontier; - val execution' = - {version_id = version_id, execution_id = execution_id, - delay_request = delay_request', frontier = frontier'}; - in (versions, blobs, commands, execution') end)); - - - -(** document update **) - -(* exec state assignment *) - -type assign_update = Command.exec option Inttab.table; (*command id -> exec*) - -val assign_update_empty: assign_update = Inttab.empty; -fun assign_update_defined (tab: assign_update) command_id = Inttab.defined tab command_id; -fun assign_update_apply (tab: assign_update) node = Inttab.fold assign_entry tab node; - -fun assign_update_new upd (tab: assign_update) = - Inttab.update_new upd tab - handle Inttab.DUP dup => err_dup "exec state assignment" dup; - -fun assign_update_result (tab: assign_update) = - Inttab.fold (fn (command_id, exec) => cons (command_id, Command.exec_ids exec)) tab []; - - -(* update *) - -local - -fun make_required nodes = - let - fun all_preds P = - String_Graph.fold (fn (a, (node, _)) => P node ? cons a) nodes [] - |> String_Graph.all_preds nodes - |> Symtab.make_set; - - val all_visible = all_preds visible_node; - val all_required = all_preds required_node; - in - Symtab.fold (fn (a, ()) => - exists (Symtab.defined all_visible) (String_Graph.immediate_succs nodes a) ? - Symtab.update (a, ())) all_visible all_required - end; - -fun loaded_theory name = - (case try (unsuffix ".thy") name of - SOME a => get_first Thy_Info.lookup_theory [a, Long_Name.base_name a] - | NONE => NONE); - -fun init_theory deps node span = - let - val master_dir = master_directory node; - val header = read_header node span; - val imports = #imports header; - val parents = - imports |> map (fn (import, _) => - (case loaded_theory import of - SOME thy => thy - | NONE => - Toplevel.end_theory (Position.file_only import) - (case get_result (snd (the (AList.lookup (op =) deps import))) of - NONE => Toplevel.toplevel - | SOME eval => Command.eval_result_state eval))); - val _ = Position.reports (map #2 imports ~~ map Theory.get_markup parents); - in Resources.begin_theory master_dir header parents end; - -fun check_theory full name node = - is_some (loaded_theory name) orelse - can get_header node andalso (not full orelse is_some (get_result node)); - -fun last_common state node_required node0 node = - let - fun update_flags prev (visible, initial) = - let - val visible' = visible andalso prev <> visible_last node; - val initial' = initial andalso - (case prev of - NONE => true - | SOME command_id => not (Keyword.is_theory_begin (the_command_name state command_id))); - in (visible', initial') end; - - fun get_common ((prev, command_id), opt_exec) (_, ok, flags, assign_update) = - if ok then - let - val flags' as (visible', _) = update_flags prev flags; - val ok' = - (case (lookup_entry node0 command_id, opt_exec) of - (SOME (eval0, _), SOME (eval, _)) => - Command.eval_eq (eval0, eval) andalso - (visible' orelse node_required orelse Command.eval_running eval) - | _ => false); - val assign_update' = assign_update |> ok' ? - (case opt_exec of - SOME (eval, prints) => - let - val command_visible = visible_command node command_id; - val command_overlays = overlays node command_id; - val command_name = the_command_name state command_id; - in - (case Command.print command_visible command_overlays command_name eval prints of - SOME prints' => assign_update_new (command_id, SOME (eval, prints')) - | NONE => I) - end - | NONE => I); - in SOME (prev, ok', flags', assign_update') end - else NONE; - val (common, ok, flags, assign_update') = - iterate_entries get_common node (NONE, true, (true, true), assign_update_empty); - val (common', flags') = - if ok then - let val last = Entries.get_after (get_entries node) common - in (last, update_flags last flags) end - else (common, flags); - in (assign_update', common', flags') end; - -fun illegal_init _ = error "Illegal theory header after end of theory"; - -fun new_exec state node proper_init command_id' (assign_update, command_exec, init) = - if not proper_init andalso is_none init then NONE - else - let - val (_, (eval, _)) = command_exec; - - val command_visible = visible_command node command_id'; - val command_overlays = overlays node command_id'; - val (command_name, blob_digests, span0) = the_command state command_id'; - val blobs = map (resolve_blob state) blob_digests; - val span = Lazy.force span0; - - val eval' = - Command.eval (fn () => the_default illegal_init init span) - (master_directory node) blobs span eval; - val prints' = perhaps (Command.print command_visible command_overlays command_name eval') []; - val exec' = (eval', prints'); - - val assign_update' = assign_update_new (command_id', SOME exec') assign_update; - val init' = if Keyword.is_theory_begin command_name then NONE else init; - in SOME (assign_update', (command_id', (eval', prints')), init') end; - -fun removed_execs node0 (command_id, exec_ids) = - subtract (op =) exec_ids (Command.exec_ids (lookup_entry node0 command_id)); - -in - -fun update old_version_id new_version_id edits state = - let - val old_version = the_version state old_version_id; - val new_version = timeit "Document.edit_nodes" (fn () => fold edit_nodes edits old_version); - - val nodes = nodes_of new_version; - val required = make_required nodes; - val required0 = make_required (nodes_of old_version); - val edited = fold (fn (name, _) => Symtab.update (name, ())) edits Symtab.empty; - - val updated = timeit "Document.update" (fn () => - nodes |> String_Graph.schedule - (fn deps => fn (name, node) => - (singleton o Future.forks) - {name = "Document.update", group = NONE, - deps = map (Future.task_of o #2) deps, pri = 1, interrupts = false} - (fn () => timeit ("Document.update " ^ name) (fn () => - let - val imports = map (apsnd Future.join) deps; - val imports_result_changed = exists (#4 o #1 o #2) imports; - val node_required = Symtab.defined required name; - in - if Symtab.defined edited name orelse visible_node node orelse - imports_result_changed orelse Symtab.defined required0 name <> node_required - then - let - val node0 = node_of old_version name; - val init = init_theory imports node; - val proper_init = - check_theory false name node andalso - forall (fn (name, (_, node)) => check_theory true name node) imports; - - val (print_execs, common, (still_visible, initial)) = - if imports_result_changed then (assign_update_empty, NONE, (true, true)) - else last_common state node_required node0 node; - val common_command_exec = the_default_entry node common; - - val (updated_execs, (command_id', (eval', _)), _) = - (print_execs, common_command_exec, if initial then SOME init else NONE) - |> (still_visible orelse node_required) ? - iterate_entries_after common - (fn ((prev, id), _) => fn res => - if not node_required andalso prev = visible_last node then NONE - else new_exec state node proper_init id res) node; - - val assigned_execs = - (node0, updated_execs) |-> iterate_entries_after common - (fn ((_, command_id0), exec0) => fn res => - if is_none exec0 then NONE - else if assign_update_defined updated_execs command_id0 then SOME res - else SOME (assign_update_new (command_id0, NONE) res)); - - val last_exec = - if command_id' = Document_ID.none then NONE else SOME command_id'; - val result = - if is_none last_exec orelse is_some (after_entry node last_exec) then NONE - else SOME eval'; - - val assign_update = assign_update_result assigned_execs; - val removed = maps (removed_execs node0) assign_update; - val _ = List.app Execution.cancel removed; - - val node' = node - |> assign_update_apply assigned_execs - |> set_result result; - val assigned_node = SOME (name, node'); - val result_changed = changed_result node0 node'; - in ((removed, assign_update, assigned_node, result_changed), node') end - else (([], [], NONE, false), node) - end))) - |> Future.joins |> map #1); - - val removed = maps #1 updated; - val assign_update = maps #2 updated; - val assigned_nodes = map_filter #3 updated; - - val state' = state - |> define_version new_version_id (fold put_node assigned_nodes new_version); - - in (removed, assign_update, state') end; - -end; - - - -(** global state **) - -val global_state = Synchronized.var "Document.global_state" init_state; - -fun state () = Synchronized.value global_state; -val change_state = Synchronized.change global_state; - -end; - diff --git a/core/Pure/PIDE/document.scala b/core/Pure/PIDE/document.scala deleted file mode 100644 index 3ad17093..00000000 --- a/core/Pure/PIDE/document.scala +++ /dev/null @@ -1,852 +0,0 @@ -/* Title: Pure/PIDE/document.scala - Author: Makarius - -Document as collection of named nodes, each consisting of an editable -list of commands, associated with asynchronous execution process. -*/ - -package isabelle - - -import scala.collection.mutable - - -object Document -{ - /** document structure **/ - - /* overlays -- print functions with arguments */ - - object Overlays - { - val empty = new Overlays(Map.empty) - } - - final class Overlays private(rep: Map[Node.Name, Node.Overlays]) - { - def apply(name: Document.Node.Name): Node.Overlays = - rep.getOrElse(name, Node.Overlays.empty) - - private def update(name: Node.Name, f: Node.Overlays => Node.Overlays): Overlays = - { - val node_overlays = f(apply(name)) - new Overlays(if (node_overlays.is_empty) rep - name else rep + (name -> node_overlays)) - } - - def insert(command: Command, fn: String, args: List[String]): Overlays = - update(command.node_name, _.insert(command, fn, args)) - - def remove(command: Command, fn: String, args: List[String]): Overlays = - update(command.node_name, _.remove(command, fn, args)) - - override def toString: String = rep.mkString("Overlays(", ",", ")") - } - - - /* document blobs: auxiliary files */ - - sealed case class Blob(bytes: Bytes, chunk: Symbol.Text_Chunk, changed: Boolean) - { - def unchanged: Blob = copy(changed = false) - } - - object Blobs - { - def apply(blobs: Map[Node.Name, Blob]): Blobs = new Blobs(blobs) - val empty: Blobs = apply(Map.empty) - } - - final class Blobs private(blobs: Map[Node.Name, Blob]) - { - def get(name: Node.Name): Option[Blob] = blobs.get(name) - - def changed(name: Node.Name): Boolean = - get(name) match { - case Some(blob) => blob.changed - case None => false - } - - override def toString: String = blobs.mkString("Blobs(", ",", ")") - } - - - /* document nodes: theories and auxiliary files */ - - type Edit[A, B] = (Node.Name, Node.Edit[A, B]) - type Edit_Text = Edit[Text.Edit, Text.Perspective] - type Edit_Command = Edit[Command.Edit, Command.Perspective] - - object Node - { - /* header and name */ - - sealed case class Header( - imports: List[Name], - keywords: Thy_Header.Keywords, - errors: List[String]) - { - def error(msg: String): Header = copy(errors = errors ::: List(msg)) - - def cat_errors(msg2: String): Header = - copy(errors = errors.map(msg1 => Library.cat_message(msg1, msg2))) - } - - val no_header = Header(Nil, Nil, Nil) - def bad_header(msg: String): Header = Header(Nil, Nil, List(msg)) - - object Name - { - val empty = Name("") - - object Ordering extends scala.math.Ordering[Name] - { - def compare(name1: Name, name2: Name): Int = name1.node compare name2.node - } - } - - sealed case class Name(node: String, master_dir: String = "", theory: String = "") - { - override def hashCode: Int = node.hashCode - override def equals(that: Any): Boolean = - that match { - case other: Name => node == other.node - case _ => false - } - - def is_theory: Boolean = !theory.isEmpty - override def toString: String = if (is_theory) theory else node - - def map(f: String => String): Name = copy(f(node), f(master_dir), theory) - } - - - /* node overlays */ - - object Overlays - { - val empty = new Overlays(Multi_Map.empty) - } - - final class Overlays private(rep: Multi_Map[Command, (String, List[String])]) - { - def commands: Set[Command] = rep.keySet - def is_empty: Boolean = rep.isEmpty - def dest: List[(Command, (String, List[String]))] = rep.iterator.toList - def insert(cmd: Command, fn: String, args: List[String]): Overlays = - new Overlays(rep.insert(cmd, (fn, args))) - def remove(cmd: Command, fn: String, args: List[String]): Overlays = - new Overlays(rep.remove(cmd, (fn, args))) - - override def toString: String = rep.mkString("Node.Overlays(", ",", ")") - } - - - /* edits */ - - sealed abstract class Edit[A, B] - { - def foreach(f: A => Unit) - { - this match { - case Edits(es) => es.foreach(f) - case _ => - } - } - - def is_void: Boolean = - this match { - case Edits(Nil) => true - case _ => false - } - } - case class Clear[A, B]() extends Edit[A, B] - case class Blob[A, B](blob: Document.Blob) extends Edit[A, B] - - case class Edits[A, B](edits: List[A]) extends Edit[A, B] - case class Deps[A, B](header: Header) extends Edit[A, B] - case class Perspective[A, B](required: Boolean, visible: B, overlays: Overlays) extends Edit[A, B] - - - /* perspective */ - - type Perspective_Text = Perspective[Text.Edit, Text.Perspective] - type Perspective_Command = Perspective[Command.Edit, Command.Perspective] - - val no_perspective_text: Perspective_Text = - Perspective(false, Text.Perspective.empty, Overlays.empty) - - val no_perspective_command: Perspective_Command = - Perspective(false, Command.Perspective.empty, Overlays.empty) - - def is_no_perspective_command(perspective: Perspective_Command): Boolean = - !perspective.required && - perspective.visible.is_empty && - perspective.overlays.is_empty - - - /* commands */ - - object Commands - { - def apply(commands: Linear_Set[Command]): Commands = new Commands(commands) - val empty: Commands = apply(Linear_Set.empty) - - def starts(commands: Iterator[Command], offset: Text.Offset = 0) - : Iterator[(Command, Text.Offset)] = - { - var i = offset - for (command <- commands) yield { - val start = i - i += command.length - (command, start) - } - } - - private val block_size = 256 - } - - final class Commands private(val commands: Linear_Set[Command]) - { - lazy val load_commands: List[Command] = - commands.iterator.filter(cmd => !cmd.blobs.isEmpty).toList - - private lazy val full_index: (Array[(Command, Text.Offset)], Text.Range) = - { - val blocks = new mutable.ListBuffer[(Command, Text.Offset)] - var next_block = 0 - var last_stop = 0 - for ((command, start) <- Commands.starts(commands.iterator)) { - last_stop = start + command.length - while (last_stop + 1 > next_block) { - blocks += (command -> start) - next_block += Commands.block_size - } - } - (blocks.toArray, Text.Range(0, last_stop)) - } - - private def full_range: Text.Range = full_index._2 - - def iterator(i: Text.Offset = 0): Iterator[(Command, Text.Offset)] = - { - if (!commands.isEmpty && full_range.contains(i)) { - val (cmd0, start0) = full_index._1(i / Commands.block_size) - Node.Commands.starts(commands.iterator(cmd0), start0) dropWhile { - case (cmd, start) => start + cmd.length <= i } - } - else Iterator.empty - } - } - - val empty: Node = new Node() - } - - final class Node private( - val get_blob: Option[Document.Blob] = None, - val header: Node.Header = Node.no_header, - val perspective: Node.Perspective_Command = Node.no_perspective_command, - _commands: Node.Commands = Node.Commands.empty) - { - def is_empty: Boolean = - get_blob.isEmpty && - header == Node.no_header && - Node.is_no_perspective_command(perspective) && - commands.isEmpty - - def commands: Linear_Set[Command] = _commands.commands - def load_commands: List[Command] = _commands.load_commands - - def clear: Node = new Node(header = header) - - def init_blob(blob: Document.Blob): Node = new Node(Some(blob.unchanged)) - - def update_header(new_header: Node.Header): Node = - new Node(get_blob, new_header, perspective, _commands) - - def update_perspective(new_perspective: Node.Perspective_Command): Node = - new Node(get_blob, header, new_perspective, _commands) - - def same_perspective(other_perspective: Node.Perspective_Command): Boolean = - perspective.required == other_perspective.required && - perspective.visible.same(other_perspective.visible) && - perspective.overlays == other_perspective.overlays - - def update_commands(new_commands: Linear_Set[Command]): Node = - if (new_commands eq _commands.commands) this - else new Node(get_blob, header, perspective, Node.Commands(new_commands)) - - def command_iterator(i: Text.Offset = 0): Iterator[(Command, Text.Offset)] = - _commands.iterator(i) - - def command_iterator(range: Text.Range): Iterator[(Command, Text.Offset)] = - command_iterator(range.start) takeWhile { case (_, start) => start < range.stop } - - def command_start(cmd: Command): Option[Text.Offset] = - Node.Commands.starts(commands.iterator).find(_._1 == cmd).map(_._2) - } - - - /* development graph */ - - object Nodes - { - val empty: Nodes = new Nodes(Graph.empty(Node.Name.Ordering)) - } - - final class Nodes private(graph: Graph[Node.Name, Node]) - { - def apply(name: Node.Name): Node = - graph.default_node(name, Node.empty).get_node(name) - - def is_hidden(name: Node.Name): Boolean = - { - val graph1 = graph.default_node(name, Node.empty) - graph1.is_maximal(name) && graph1.get_node(name).is_empty - } - - def + (entry: (Node.Name, Node)): Nodes = - { - val (name, node) = entry - val imports = node.header.imports - val graph1 = - (graph.default_node(name, Node.empty) /: imports)((g, p) => g.default_node(p, Node.empty)) - val graph2 = (graph1 /: graph1.imm_preds(name))((g, dep) => g.del_edge(dep, name)) - val graph3 = (graph2 /: imports)((g, dep) => g.add_edge(dep, name)) - new Nodes( - if (graph3.is_maximal(name) && node.is_empty) graph3.del_node(name) - else graph3.map_node(name, _ => node) - ) - } - - def iterator: Iterator[(Node.Name, Node)] = - graph.iterator.map({ case (name, (node, _)) => (name, node) }) - - def load_commands(file_name: Node.Name): List[Command] = - (for { - (_, node) <- iterator - cmd <- node.load_commands.iterator - name <- cmd.blobs_names.iterator - if name == file_name - } yield cmd).toList - - def descendants(names: List[Node.Name]): List[Node.Name] = graph.all_succs(names) - def topological_order: List[Node.Name] = graph.topological_order - - override def toString: String = topological_order.mkString("Nodes(", ",", ")") - } - - - - /** versioning **/ - - /* particular document versions */ - - object Version - { - val init: Version = new Version() - - def make(syntax: Option[Prover.Syntax], nodes: Nodes): Version = - new Version(Document_ID.make(), syntax, nodes) - } - - final class Version private( - val id: Document_ID.Version = Document_ID.none, - val syntax: Option[Prover.Syntax] = None, - val nodes: Nodes = Nodes.empty) - { - override def toString: String = "Version(" + id + ")" - } - - - /* changes of plain text, eventually resulting in document edits */ - - object Change - { - val init: Change = new Change() - - def make(previous: Future[Version], edits: List[Edit_Text], version: Future[Version]): Change = - new Change(Some(previous), edits.reverse, version) - } - - final class Change private( - val previous: Option[Future[Version]] = Some(Future.value(Version.init)), - val rev_edits: List[Edit_Text] = Nil, - val version: Future[Version] = Future.value(Version.init)) - { - def is_finished: Boolean = - (previous match { case None => true case Some(future) => future.is_finished }) && - version.is_finished - - def truncate: Change = new Change(None, Nil, version) - } - - - /* history navigation */ - - object History - { - val init: History = new History() - } - - final class History private( - val undo_list: List[Change] = List(Change.init)) // non-empty list - { - def tip: Change = undo_list.head - def + (change: Change): History = new History(change :: undo_list) - - def prune(check: Change => Boolean, retain: Int): Option[(List[Change], History)] = - { - val n = undo_list.iterator.zipWithIndex.find(p => check(p._1)).get._2 + 1 - val (retained, dropped) = undo_list.splitAt(n max retain) - - retained.splitAt(retained.length - 1) match { - case (prefix, List(last)) => Some(dropped, new History(prefix ::: List(last.truncate))) - case _ => None - } - } - } - - - /* snapshot */ - - object Snapshot - { - val init = State.init.snapshot() - } - - abstract class Snapshot - { - val state: State - val version: Version - val is_outdated: Boolean - - def convert(i: Text.Offset): Text.Offset - def revert(i: Text.Offset): Text.Offset - def convert(range: Text.Range): Text.Range - def revert(range: Text.Range): Text.Range - - val node_name: Node.Name - val node: Node - val load_commands: List[Command] - def is_loaded: Boolean - def eq_content(other: Snapshot): Boolean - - def cumulate[A]( - range: Text.Range, - info: A, - elements: Markup.Elements, - result: List[Command.State] => (A, Text.Markup) => Option[A], - status: Boolean = false): List[Text.Info[A]] - - def select[A]( - range: Text.Range, - elements: Markup.Elements, - result: List[Command.State] => Text.Markup => Option[A], - status: Boolean = false): List[Text.Info[A]] - } - - - - /** global state -- document structure, execution process, editing history **/ - - type Assign_Update = - List[(Document_ID.Command, List[Document_ID.Exec])] // update of exec state assignment - - object State - { - class Fail(state: State) extends Exception - - object Assignment - { - val init: Assignment = new Assignment() - } - - final class Assignment private( - val command_execs: Map[Document_ID.Command, List[Document_ID.Exec]] = Map.empty, - val is_finished: Boolean = false) - { - def check_finished: Assignment = { require(is_finished); this } - def unfinished: Assignment = new Assignment(command_execs, false) - - def assign(update: Assign_Update): Assignment = - { - require(!is_finished) - val command_execs1 = - (command_execs /: update) { - case (res, (command_id, exec_ids)) => - if (exec_ids.isEmpty) res - command_id - else res + (command_id -> exec_ids) - } - new Assignment(command_execs1, true) - } - } - - val init: State = - State().define_version(Version.init, Assignment.init).assign(Version.init.id, Nil)._2 - } - - final case class State private( - /*reachable versions*/ - val versions: Map[Document_ID.Version, Version] = Map.empty, - /*inlined auxiliary files*/ - val blobs: Set[SHA1.Digest] = Set.empty, - /*static markup from define_command*/ - val commands: Map[Document_ID.Command, Command.State] = Map.empty, - /*dynamic markup from execution*/ - val execs: Map[Document_ID.Exec, Command.State] = Map.empty, - /*command-exec assignment for each version*/ - val assignments: Map[Document_ID.Version, State.Assignment] = Map.empty, - /*commands with markup produced by other commands (imm_succs)*/ - val commands_redirection: Graph[Document_ID.Command, Unit] = Graph.long, - /*explicit (linear) history*/ - val history: History = History.init, - /*intermediate state between remove_versions/removed_versions*/ - val removing_versions: Boolean = false) - { - private def fail[A]: A = throw new State.Fail(this) - - def define_version(version: Version, assignment: State.Assignment): State = - { - val id = version.id - copy(versions = versions + (id -> version), - assignments = assignments + (id -> assignment.unfinished)) - } - - def define_blob(digest: SHA1.Digest): State = copy(blobs = blobs + digest) - def defined_blob(digest: SHA1.Digest): Boolean = blobs.contains(digest) - - def define_command(command: Command): State = - { - val id = command.id - copy(commands = commands + (id -> command.init_state)) - } - - def defined_command(id: Document_ID.Command): Boolean = commands.isDefinedAt(id) - - def find_command(version: Version, id: Document_ID.Generic): Option[(Node, Command)] = - commands.get(id) orElse execs.get(id) match { - case None => None - case Some(st) => - val command = st.command - val node = version.nodes(command.node_name) - if (node.commands.contains(command)) Some((node, command)) else None - } - - def the_version(id: Document_ID.Version): Version = versions.getOrElse(id, fail) - def the_static_state(id: Document_ID.Command): Command.State = commands.getOrElse(id, fail) - def the_dynamic_state(id: Document_ID.Exec): Command.State = execs.getOrElse(id, fail) - def the_assignment(version: Version): State.Assignment = assignments.getOrElse(version.id, fail) - - private def self_id(st: Command.State)(id: Document_ID.Generic): Boolean = - id == st.command.id || - (execs.get(id) match { case Some(st1) => st1.command.id == st.command.id case None => false }) - - private def other_id(id: Document_ID.Generic) - : Option[(Symbol.Text_Chunk.Id, Symbol.Text_Chunk)] = None - /* FIXME - (execs.get(id) orElse commands.get(id)).map(st => - ((Symbol.Text_Chunk.Id(st.command.id), st.command.chunk))) - */ - - private def redirection(st: Command.State): Graph[Document_ID.Command, Unit] = - (commands_redirection /: st.markups.redirection_iterator)({ case (graph, id) => - graph.default_node(id, ()).default_node(st.command.id, ()).add_edge(id, st.command.id) }) - - def accumulate(id: Document_ID.Generic, message: XML.Elem): (Command.State, State) = - { - execs.get(id) match { - case Some(st) => - val new_st = st.accumulate(self_id(st), other_id _, message) - val execs1 = execs + (id -> new_st) - (new_st, copy(execs = execs1, commands_redirection = redirection(new_st))) - case None => - commands.get(id) match { - case Some(st) => - val new_st = st.accumulate(self_id(st), other_id _, message) - val commands1 = commands + (id -> new_st) - (new_st, copy(commands = commands1, commands_redirection = redirection(new_st))) - case None => fail - } - } - } - - def assign(id: Document_ID.Version, update: Assign_Update): (List[Command], State) = - { - val version = the_version(id) - - def upd(exec_id: Document_ID.Exec, st: Command.State) - : Option[(Document_ID.Exec, Command.State)] = - if (execs.isDefinedAt(exec_id)) None else Some(exec_id -> st) - - val (changed_commands, new_execs) = - ((Nil: List[Command], execs) /: update) { - case ((commands1, execs1), (command_id, exec)) => - val st = the_static_state(command_id) - val command = st.command - val commands2 = command :: commands1 - val execs2 = - exec match { - case Nil => execs1 - case eval_id :: print_ids => - execs1 ++ upd(eval_id, st) ++ - (for (id <- print_ids; up <- upd(id, command.empty_state)) yield up) - } - (commands2, execs2) - } - val new_assignment = the_assignment(version).assign(update) - val new_state = copy(assignments = assignments + (id -> new_assignment), execs = new_execs) - - (changed_commands, new_state) - } - - def is_assigned(version: Version): Boolean = - assignments.get(version.id) match { - case Some(assgn) => assgn.is_finished - case None => false - } - - def is_stable(change: Change): Boolean = - change.is_finished && is_assigned(change.version.get_finished) - - def recent_finished: Change = history.undo_list.find(_.is_finished) getOrElse fail - def recent_stable: Change = history.undo_list.find(is_stable) getOrElse fail - def tip_stable: Boolean = is_stable(history.tip) - def tip_version: Version = history.tip.version.get_finished - - def continue_history( - previous: Future[Version], - edits: List[Edit_Text], - version: Future[Version]): State = - { - val change = Change.make(previous, edits, version) - copy(history = history + change) - } - - def remove_versions(retain: Int = 0): (List[Version], State) = - { - history.prune(is_stable, retain) match { - case Some((dropped, history1)) => - val old_versions = dropped.map(change => change.version.get_finished) - val removing = !old_versions.isEmpty - val state1 = copy(history = history1, removing_versions = removing) - (old_versions, state1) - case None => fail - } - } - - def removed_versions(removed: List[Document_ID.Version]): State = - { - val versions1 = versions -- removed - val assignments1 = assignments -- removed - var blobs1 = Set.empty[SHA1.Digest] - var commands1 = Map.empty[Document_ID.Command, Command.State] - var execs1 = Map.empty[Document_ID.Exec, Command.State] - for { - (version_id, version) <- versions1.iterator - command_execs = assignments1(version_id).command_execs - (_, node) <- version.nodes.iterator - command <- node.commands.iterator - } { - for ((_, digest) <- command.blobs_defined; if !blobs1.contains(digest)) - blobs1 += digest - - if (!commands1.isDefinedAt(command.id)) - commands.get(command.id).foreach(st => commands1 += (command.id -> st)) - - for (exec_id <- command_execs.getOrElse(command.id, Nil)) { - if (!execs1.isDefinedAt(exec_id)) - execs.get(exec_id).foreach(st => execs1 += (exec_id -> st)) - } - } - copy( - versions = versions1, - blobs = blobs1, - commands = commands1, - execs = execs1, - commands_redirection = commands_redirection.restrict(commands1.isDefinedAt(_)), - assignments = assignments1, - removing_versions = false) - } - - private def command_states_self(version: Version, command: Command) - : List[(Document_ID.Generic, Command.State)] = - { - require(is_assigned(version)) - try { - the_assignment(version).check_finished.command_execs.getOrElse(command.id, Nil) - .map(id => id -> the_dynamic_state(id)) match { - case Nil => fail - case res => res - } - } - catch { - case _: State.Fail => - try { List(command.id -> the_static_state(command.id)) } - catch { case _: State.Fail => List(command.id -> command.init_state) } - } - } - - def command_states(version: Version, command: Command): List[Command.State] = - { - val self = command_states_self(version, command) - val others = - if (commands_redirection.defined(command.id)) { - (for { - command_id <- commands_redirection.imm_succs(command.id).iterator - (id, st) <- command_states_self(version, the_static_state(command_id).command) - if !self.exists(_._1 == id) - } yield (id, st)).toMap.valuesIterator.toList - } - else Nil - self.map(_._2) ::: others.flatMap(_.redirect(command)) - } - - def command_results(version: Version, command: Command): Command.Results = - Command.State.merge_results(command_states(version, command)) - - def command_markup(version: Version, command: Command, index: Command.Markup_Index, - range: Text.Range, elements: Markup.Elements): Markup_Tree = - Command.State.merge_markup(command_states(version, command), index, range, elements) - - def markup_to_XML(version: Version, node: Node, elements: Markup.Elements): XML.Body = - (for { - command <- node.commands.iterator - markup = - command_markup(version, command, Command.Markup_Index.markup, command.range, elements) - tree <- markup.to_XML(command.range, command.source, elements) - } yield tree).toList - - // persistent user-view - def snapshot(name: Node.Name = Node.Name.empty, pending_edits: List[Text.Edit] = Nil) - : Snapshot = - { - val stable = recent_stable - val latest = history.tip - - - /* pending edits and unstable changes */ - - val rev_pending_changes = - for { - change <- history.undo_list.takeWhile(_ != stable) - (a, edits) <- change.rev_edits - if a == name - } yield edits - - val is_cleared = rev_pending_changes.exists({ case Node.Clear() => true case _ => false }) - val edits = - if (is_cleared) Nil - else - (pending_edits /: rev_pending_changes)({ - case (edits, Node.Edits(es)) => es ::: edits - case (edits, _) => edits - }) - lazy val reverse_edits = edits.reverse - - new Snapshot - { - /* global information */ - - val state = State.this - val version = stable.version.get_finished - val is_outdated = !(pending_edits.isEmpty && latest == stable) - - - /* local node content */ - - def convert(offset: Text.Offset) = - if (is_cleared) 0 else (offset /: edits)((i, edit) => edit.convert(i)) - def revert(offset: Text.Offset) = - if (is_cleared) 0 else (offset /: reverse_edits)((i, edit) => edit.revert(i)) - - def convert(range: Text.Range) = range.map(convert(_)) - def revert(range: Text.Range) = range.map(revert(_)) - - val node_name = name - val node = version.nodes(name) - - val load_commands: List[Command] = - if (node_name.is_theory) Nil - else version.nodes.load_commands(node_name) - - val is_loaded: Boolean = node_name.is_theory || !load_commands.isEmpty - - def eq_content(other: Snapshot): Boolean = - { - def eq_commands(commands: (Command, Command)): Boolean = - { - val states1 = state.command_states(version, commands._1) - val states2 = other.state.command_states(other.version, commands._2) - states1.length == states2.length && - (states1 zip states2).forall({ case (st1, st2) => st1 eq_content st2 }) - } - - !is_outdated && !other.is_outdated && - node.commands.size == other.node.commands.size && - (node.commands.iterator zip other.node.commands.iterator).forall(eq_commands) && - load_commands.length == other.load_commands.length && - (load_commands zip other.load_commands).forall(eq_commands) - } - - - /* cumulate markup */ - - def cumulate[A]( - range: Text.Range, - info: A, - elements: Markup.Elements, - result: List[Command.State] => (A, Text.Markup) => Option[A], - status: Boolean = false): List[Text.Info[A]] = - { - val former_range = revert(range).inflate_singularity - val (chunk_name, command_iterator) = - load_commands match { - case command :: _ => (Symbol.Text_Chunk.File(node_name.node), Iterator((command, 0))) - case _ => (Symbol.Text_Chunk.Default, node.command_iterator(former_range)) - } - val markup_index = Command.Markup_Index(status, chunk_name) - (for { - (command, command_start) <- command_iterator - chunk <- command.chunks.get(chunk_name).iterator - states = state.command_states(version, command) - res = result(states) - markup_range <- (former_range - command_start).try_restrict(chunk.range).iterator - markup = Command.State.merge_markup(states, markup_index, markup_range, elements) - Text.Info(r0, a) <- markup.cumulate[A](markup_range, info, elements, - { - case (a, Text.Info(r0, b)) => res(a, Text.Info(convert(r0 + command_start), b)) - }).iterator - r1 <- convert(r0 + command_start).try_restrict(range).iterator - } yield Text.Info(r1, a)).toList - } - - def select[A]( - range: Text.Range, - elements: Markup.Elements, - result: List[Command.State] => Text.Markup => Option[A], - status: Boolean = false): List[Text.Info[A]] = - { - def result1(states: List[Command.State]): (Option[A], Text.Markup) => Option[Option[A]] = - { - val res = result(states) - (_: Option[A], x: Text.Markup) => - res(x) match { - case None => None - case some => Some(some) - } - } - for (Text.Info(r, Some(x)) <- cumulate(range, None, elements, result1 _, status)) - yield Text.Info(r, x) - } - - - /* output */ - - override def toString: String = - "Snapshot(node = " + node_name.node + ", version = " + version.id + - (if (is_outdated) ", outdated" else "") + ")" - } - } - } -} - diff --git a/core/Pure/PIDE/document_id.ML b/core/Pure/PIDE/document_id.ML deleted file mode 100644 index 775f18f4..00000000 --- a/core/Pure/PIDE/document_id.ML +++ /dev/null @@ -1,38 +0,0 @@ -(* Title: Pure/PIDE/document_id.ML - Author: Makarius - -Unique identifiers for document structure. - -NB: ML ticks forwards > 0, JVM ticks backwards < 0. -*) - -signature DOCUMENT_ID = -sig - type generic = int - type version = generic - type command = generic - type exec = generic - type execution = generic - val none: generic - val make: unit -> generic - val parse: string -> generic - val print: generic -> string -end; - -structure Document_ID: DOCUMENT_ID = -struct - -type generic = int; -type version = generic; -type command = generic; -type exec = generic; -type execution = generic; - -val none = 0; -val make = Counter.make (); - -val parse = Markup.parse_int; -val print = Markup.print_int; - -end; - diff --git a/core/Pure/PIDE/document_id.scala b/core/Pure/PIDE/document_id.scala deleted file mode 100644 index 743a9fb0..00000000 --- a/core/Pure/PIDE/document_id.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* Title: Pure/PIDE/document_id.scala - Module: PIDE - Author: Makarius - -Unique identifiers for document structure. - -NB: ML ticks forwards > 0, JVM ticks backwards < 0. -*/ - -package isabelle - - -object Document_ID -{ - type Generic = Long - type Version = Generic - type Command = Generic - type Exec = Generic - - val none: Generic = 0 - val make = Counter.make() - - def apply(id: Generic): String = Properties.Value.Long.apply(id) - def unapply(s: String): Option[Generic] = Properties.Value.Long.unapply(s) -} - diff --git a/core/Pure/PIDE/editor.scala b/core/Pure/PIDE/editor.scala deleted file mode 100644 index 5e1b5ad0..00000000 --- a/core/Pure/PIDE/editor.scala +++ /dev/null @@ -1,32 +0,0 @@ -/* Title: Pure/PIDE/editor.scala - Author: Makarius - -General editor operations. -*/ - -package isabelle - - -abstract class Editor[Context] -{ - def session: Session - def flush(): Unit - def invoke(): Unit - def current_context: Context - def current_node(context: Context): Option[Document.Node.Name] - def current_node_snapshot(context: Context): Option[Document.Snapshot] - def node_snapshot(name: Document.Node.Name): Document.Snapshot - def current_command(context: Context, snapshot: Document.Snapshot): Option[Command] - - def node_overlays(name: Document.Node.Name): Document.Node.Overlays - def insert_overlay(command: Command, fn: String, args: List[String]): Unit - def remove_overlay(command: Command, fn: String, args: List[String]): Unit - - abstract class Hyperlink { - def external: Boolean - def follow(context: Context): Unit - } - def hyperlink_command( - snapshot: Document.Snapshot, command: Command, offset: Symbol.Offset = 0): Option[Hyperlink] -} - diff --git a/core/Pure/PIDE/execution.ML b/core/Pure/PIDE/execution.ML deleted file mode 100644 index a6a80a9d..00000000 --- a/core/Pure/PIDE/execution.ML +++ /dev/null @@ -1,188 +0,0 @@ -(* Title: Pure/PIDE/execution.ML - Author: Makarius - -Global management of execution. Unique running execution serves as -barrier for further exploration of forked command execs. -*) - -signature EXECUTION = -sig - val start: unit -> Document_ID.execution - val discontinue: unit -> unit - val is_running: Document_ID.execution -> bool - val is_running_exec: Document_ID.exec -> bool - val running: Document_ID.execution -> Document_ID.exec -> Future.group list -> bool - val peek: Document_ID.exec -> Future.group list - val cancel: Document_ID.exec -> unit - val terminate: Document_ID.exec -> unit - type params = {name: string, pos: Position.T, pri: int} - val fork: params -> (unit -> 'a) -> 'a future - val print: params -> (unit -> unit) -> unit - val fork_prints: Document_ID.exec -> unit - val purge: Document_ID.exec list -> unit - val reset: unit -> Future.group list - val shutdown: unit -> unit -end; - -structure Execution: EXECUTION = -struct - -(* global state *) - -type print = {name: string, pri: int, body: unit -> unit}; -type exec_state = Future.group list * print list; (*active forks, prints*) -type state = - Document_ID.execution * (*overall document execution*) - exec_state Inttab.table; (*running command execs*) - -val init_state: state = (Document_ID.none, Inttab.make [(Document_ID.none, ([], []))]); -val state = Synchronized.var "Execution.state" init_state; - -fun get_state () = Synchronized.value state; -fun change_state_result f = Synchronized.change_result state f; -fun change_state f = Synchronized.change state f; - -fun unregistered exec_id = "Unregistered execution: " ^ Document_ID.print exec_id; - - -(* unique running execution *) - -fun start () = - let - val execution_id = Document_ID.make (); - val _ = change_state (apfst (K execution_id)); - in execution_id end; - -fun discontinue () = change_state (apfst (K Document_ID.none)); - -fun is_running execution_id = execution_id = #1 (get_state ()); - - -(* execs *) - -fun is_running_exec exec_id = - Inttab.defined (#2 (get_state ())) exec_id; - -fun running execution_id exec_id groups = - change_state_result (fn (execution_id', execs) => - let - val continue = execution_id = execution_id'; - val execs' = - if continue then - Inttab.update_new (exec_id, (groups, [])) execs - handle Inttab.DUP dup => - raise Fail ("Execution already registered: " ^ Document_ID.print dup) - else execs; - in (continue, (execution_id', execs')) end); - -fun peek exec_id = - (case Inttab.lookup (#2 (get_state ())) exec_id of - SOME (groups, _) => groups - | NONE => []); - -fun cancel exec_id = List.app Future.cancel_group (peek exec_id); -fun terminate exec_id = List.app Future.terminate (peek exec_id); - - -(* fork *) - -fun status task markups = - let - val props = - if ! Multithreading.trace >= 2 - then [(Markup.taskN, Task_Queue.str_of_task task)] else []; - in Output.status (implode (map (Markup.markup_only o Markup.properties props) markups)) end; - -type params = {name: string, pos: Position.T, pri: int}; - -fun fork ({name, pos, pri}: params) e = - uninterruptible (fn _ => Position.setmp_thread_data pos (fn () => - let - val exec_id = the_default 0 (Position.parse_id pos); - val group = Future.worker_subgroup (); - val _ = change_state (apsnd (fn execs => - (case Inttab.lookup execs exec_id of - SOME (groups, prints) => - Inttab.update (exec_id, (group :: groups, prints)) execs - | NONE => raise Fail (unregistered exec_id)))); - - val future = - (singleton o Future.forks) - {name = name, group = SOME group, deps = [], pri = pri, interrupts = false} - (fn () => - let - val task = the (Future.worker_task ()); - val _ = status task [Markup.running]; - val result = - Exn.capture (Future.interruptible_task e) () - |> Future.identify_result pos; - val _ = status task [Markup.joined]; - val _ = - (case result of - Exn.Exn exn => - (status task [Markup.failed]; - status task [Markup.finished]; - Output.report [Markup.markup_only Markup.bad]; - if exec_id = 0 then () - else List.app (Future.error_message pos) (Runtime.exn_messages_ids exn)) - | Exn.Res _ => - status task [Markup.finished]) - in Exn.release result end); - - val _ = status (Future.task_of future) [Markup.forked]; - in future end)) (); - - -(* print *) - -fun print ({name, pos, pri}: params) e = - change_state (apsnd (fn execs => - let - val exec_id = the_default 0 (Position.parse_id pos); - val print = {name = name, pri = pri, body = e}; - in - (case Inttab.lookup execs exec_id of - SOME (groups, prints) => Inttab.update (exec_id, (groups, print :: prints)) execs - | NONE => raise Fail (unregistered exec_id)) - end)); - -fun fork_prints exec_id = - (case Inttab.lookup (#2 (get_state ())) exec_id of - SOME (_, prints) => - if null prints orelse null (tl prints) orelse not (Multithreading.enabled ()) - then List.app (fn {body, ...} => body ()) (rev prints) - else - let val pos = Position.thread_data () in - List.app (fn {name, pri, body} => - ignore (fork {name = name, pos = pos, pri = pri} body)) (rev prints) - end - | NONE => raise Fail (unregistered exec_id)); - - -(* cleanup *) - -fun purge exec_ids = - (change_state o apsnd) (fn execs => - let - val execs' = fold Inttab.delete_safe exec_ids execs; - val () = - (execs', ()) |-> Inttab.fold (fn (exec_id, (groups, _)) => fn () => - if Inttab.defined execs' exec_id then () - else groups |> List.app (fn group => - if Task_Queue.is_canceled group then () - else raise Fail ("Attempt to purge valid execution: " ^ Document_ID.print exec_id))); - in execs' end); - -fun reset () = - change_state_result (fn (_, execs) => - let val groups = Inttab.fold (append o #1 o #2) execs [] - in (groups, init_state) end); - -fun shutdown () = - (Future.shutdown (); - (case maps Task_Queue.group_status (reset ()) of - [] => () - | exns => raise Par_Exn.make exns)); - -end; - diff --git a/core/Pure/PIDE/markup.ML b/core/Pure/PIDE/markup.ML deleted file mode 100644 index 3c0b7faf..00000000 --- a/core/Pure/PIDE/markup.ML +++ /dev/null @@ -1,636 +0,0 @@ -(* Title: Pure/PIDE/markup.ML - Author: Makarius - -Quasi-abstract markup elements. -*) - -signature MARKUP = -sig - val parse_bool: string -> bool - val print_bool: bool -> string - val parse_int: string -> int - val print_int: int -> string - val parse_real: string -> real - val print_real: real -> string - type T = string * Properties.T - val empty: T - val is_empty: T -> bool - val properties: Properties.T -> T -> T - val nameN: string - val name: string -> T -> T - val kindN: string - val instanceN: string - val languageN: string - val symbolsN: string - val delimitedN: string - val is_delimited: Properties.T -> bool - val language: {name: string, symbols: bool, antiquotes: bool, delimited: bool} -> T - val language': {name: string, symbols: bool, antiquotes: bool} -> bool -> T - val language_method: T - val language_attribute: T - val language_sort: bool -> T - val language_type: bool -> T - val language_term: bool -> T - val language_prop: bool -> T - val language_ML: bool -> T - val language_SML: bool -> T - val language_document: bool -> T - val language_antiquotation: T - val language_text: bool -> T - val language_rail: T - val language_path: T - val bindingN: string val binding: T - val entityN: string val entity: string -> string -> T - val get_entity_kind: T -> string option - val defN: string - val refN: string - val completionN: string val completion: T - val no_completionN: string val no_completion: T - val lineN: string - val offsetN: string - val end_offsetN: string - val fileN: string - val idN: string - val position_properties': string list - val position_properties: string list - val positionN: string val position: T - val pathN: string val path: string -> T - val urlN: string val url: string -> T - val indentN: string - val blockN: string val block: int -> T - val widthN: string - val breakN: string val break: int -> T - val fbreakN: string val fbreak: T - val itemN: string val item: T - val wordsN: string val words: T - val hiddenN: string val hidden: T - val system_optionN: string - val theoryN: string - val classN: string - val type_nameN: string - val constantN: string - val fixedN: string val fixed: string -> T - val caseN: string val case_: string -> T - val dynamic_factN: string val dynamic_fact: string -> T - val tfreeN: string val tfree: T - val tvarN: string val tvar: T - val freeN: string val free: T - val skolemN: string val skolem: T - val boundN: string val bound: T - val varN: string val var: T - val numeralN: string val numeral: T - val literalN: string val literal: T - val delimiterN: string val delimiter: T - val inner_stringN: string val inner_string: T - val inner_cartoucheN: string val inner_cartouche: T - val inner_commentN: string val inner_comment: T - val token_rangeN: string val token_range: T - val sortingN: string val sorting: T - val typingN: string val typing: T - val ML_keyword1N: string val ML_keyword1: T - val ML_keyword2N: string val ML_keyword2: T - val ML_keyword3N: string val ML_keyword3: T - val ML_delimiterN: string val ML_delimiter: T - val ML_tvarN: string val ML_tvar: T - val ML_numeralN: string val ML_numeral: T - val ML_charN: string val ML_char: T - val ML_stringN: string val ML_string: T - val ML_commentN: string val ML_comment: T - val SML_stringN: string val SML_string: T - val SML_commentN: string val SML_comment: T - val ML_defN: string - val ML_openN: string - val ML_structureN: string - val ML_typingN: string val ML_typing: T - val antiquotedN: string val antiquoted: T - val antiquoteN: string val antiquote: T - val ML_antiquotationN: string - val document_antiquotationN: string - val document_antiquotation_optionN: string - val paragraphN: string val paragraph: T - val text_foldN: string val text_fold: T - val commandN: string val command: T - val stringN: string val string: T - val altstringN: string val altstring: T - val verbatimN: string val verbatim: T - val cartoucheN: string val cartouche: T - val commentN: string val comment: T - val controlN: string val control: T - val tokenN: string val token: bool -> Properties.T -> T - val keyword1N: string val keyword1: T - val keyword2N: string val keyword2: T - val keyword3N: string val keyword3: T - val quasi_keywordN: string val quasi_keyword: T - val improperN: string val improper: T - val operatorN: string val operator: T - val elapsedN: string - val cpuN: string - val gcN: string - val timing_properties: {elapsed: Time.time, cpu: Time.time, gc: Time.time} -> Properties.T - val parse_timing_properties: Properties.T -> {elapsed: Time.time, cpu: Time.time, gc: Time.time} - val command_timingN: string - val command_timing_properties: - {file: string, offset: int, name: string} -> Time.time -> Properties.T - val parse_command_timing_properties: - Properties.T -> ({file: string, offset: int, name: string} * Time.time) option - val timingN: string val timing: {elapsed: Time.time, cpu: Time.time, gc: Time.time} -> T - val subgoalsN: string - val proof_stateN: string val proof_state: int -> T - val stateN: string val state: T - val goalN: string val goal: T - val subgoalN: string val subgoal: string -> T - val taskN: string - val acceptedN: string val accepted: T - val forkedN: string val forked: T - val joinedN: string val joined: T - val runningN: string val running: T - val finishedN: string val finished: T - val failedN: string val failed: T - val serialN: string - val serial_properties: int -> Properties.T - val exec_idN: string - val initN: string - val statusN: string - val resultN: string - val writelnN: string - val tracingN: string - val warningN: string - val errorN: string - val systemN: string - val protocolN: string - val legacyN: string val legacy: T - val promptN: string val prompt: T - val reportN: string val report: T - val no_reportN: string val no_report: T - val badN: string val bad: T - val intensifyN: string val intensify: T - val informationN: string val information: T - val browserN: string - val graphviewN: string - val sendbackN: string - val paddingN: string - val padding_line: Properties.entry - val padding_command: Properties.entry - val dialogN: string val dialog: serial -> string -> T - val functionN: string - val assign_update: Properties.T - val removed_versions: Properties.T - val protocol_handler: string -> Properties.T - val invoke_scala: string -> string -> Properties.T - val cancel_scala: string -> Properties.T - val ML_statistics: Properties.entry - val task_statistics: Properties.entry - val command_timing: Properties.entry - val loading_theory: string -> Properties.T - val dest_loading_theory: Properties.T -> string option - val use_theories_result: string -> bool -> Properties.T - val print_operationsN: string - val print_operations: Properties.T - val simp_trace_panelN: string - val simp_trace_logN: string - val simp_trace_stepN: string - val simp_trace_recurseN: string - val simp_trace_hintN: string - val simp_trace_ignoreN: string - val simp_trace_cancel: serial -> Properties.T - val no_output: Output.output * Output.output - val default_output: T -> Output.output * Output.output - val add_mode: string -> (T -> Output.output * Output.output) -> unit - val output: T -> Output.output * Output.output - val enclose: T -> Output.output -> Output.output - val markup: T -> string -> string - val markup_only: T -> string - val markup_report: string -> string -end; - -structure Markup: MARKUP = -struct - -(** markup elements **) - -(* misc values *) - -fun parse_bool "true" = true - | parse_bool "false" = false - | parse_bool s = raise Fail ("Bad boolean: " ^ quote s); - -val print_bool = Bool.toString; - -fun parse_int s = - let val i = Int.fromString s in - if is_none i orelse String.isPrefix "~" s - then raise Fail ("Bad integer: " ^ quote s) - else the i - end; - -val print_int = signed_string_of_int; - -fun parse_real s = - (case Real.fromString s of - SOME x => x - | NONE => raise Fail ("Bad real: " ^ quote s)); - -fun print_real x = - let val s = signed_string_of_real x in - (case space_explode "." s of - [a, b] => if forall_string (fn c => c = "0") b then a else s - | _ => s) - end; - - -(* basic markup *) - -type T = string * Properties.T; - -val empty = ("", []); - -fun is_empty ("", _) = true - | is_empty _ = false; - - -fun properties more_props ((elem, props): T) = - (elem, fold_rev Properties.put more_props props); - -fun markup_elem name = (name, (name, []): T); -fun markup_string name prop = (name, fn s => (name, [(prop, s)]): T); -fun markup_int name prop = (name, fn i => (name, [(prop, print_int i)]): T); - - -(* misc properties *) - -val nameN = "name"; -fun name a = properties [(nameN, a)]; - -val kindN = "kind"; - -val instanceN = "instance"; - - -(* embedded languages *) - -val languageN = "language"; -val symbolsN = "symbols"; -val antiquotesN = "antiquotes"; -val delimitedN = "delimited" - -fun is_delimited props = - Properties.get props delimitedN = SOME "true"; - -fun language {name, symbols, antiquotes, delimited} = - (languageN, - [(nameN, name), - (symbolsN, print_bool symbols), - (antiquotesN, print_bool antiquotes), - (delimitedN, print_bool delimited)]); - -fun language' {name, symbols, antiquotes} delimited = - language {name = name, symbols = symbols, antiquotes = antiquotes, delimited = delimited}; - -val language_method = - language {name = "method", symbols = true, antiquotes = false, delimited = false}; -val language_attribute = - language {name = "attribute", symbols = true, antiquotes = false, delimited = false}; -val language_sort = language' {name = "sort", symbols = true, antiquotes = false}; -val language_type = language' {name = "type", symbols = true, antiquotes = false}; -val language_term = language' {name = "term", symbols = true, antiquotes = false}; -val language_prop = language' {name = "prop", symbols = true, antiquotes = false}; -val language_ML = language' {name = "ML", symbols = false, antiquotes = true}; -val language_SML = language' {name = "SML", symbols = false, antiquotes = false}; -val language_document = language' {name = "document", symbols = false, antiquotes = true}; -val language_antiquotation = - language {name = "antiquotation", symbols = true, antiquotes = false, delimited = true}; -val language_text = language' {name = "text", symbols = true, antiquotes = false}; -val language_rail = language {name = "rail", symbols = true, antiquotes = true, delimited = true}; -val language_path = language {name = "path", symbols = false, antiquotes = false, delimited = true}; - - -(* formal entities *) - -val (bindingN, binding) = markup_elem "binding"; - -val entityN = "entity"; -fun entity kind name = (entityN, [(nameN, name), (kindN, kind)]); - -fun get_entity_kind (name, props) = - if name = entityN then AList.lookup (op =) props kindN - else NONE; - -val defN = "def"; -val refN = "ref"; - - -(* completion *) - -val (completionN, completion) = markup_elem "completion"; -val (no_completionN, no_completion) = markup_elem "no_completion"; - - -(* position *) - -val lineN = "line"; -val offsetN = "offset"; -val end_offsetN = "end_offset"; -val fileN = "file"; -val idN = "id"; - -val position_properties' = [fileN, idN]; -val position_properties = [lineN, offsetN, end_offsetN] @ position_properties'; - -val (positionN, position) = markup_elem "position"; - - -(* external resources *) - -val (pathN, path) = markup_string "path" nameN; -val (urlN, url) = markup_string "url" nameN; - - -(* pretty printing *) - -val indentN = "indent"; -val (blockN, block) = markup_int "block" indentN; - -val widthN = "width"; -val (breakN, break) = markup_int "break" widthN; - -val (fbreakN, fbreak) = markup_elem "fbreak"; - -val (itemN, item) = markup_elem "item"; - - -(* text properties *) - -val (wordsN, words) = markup_elem "words"; - -val (hiddenN, hidden) = markup_elem "hidden"; - - -(* formal entities *) - -val system_optionN = "system_option"; - -val theoryN = "theory"; -val classN = "class"; -val type_nameN = "type_name"; -val constantN = "constant"; - -val (fixedN, fixed) = markup_string "fixed" nameN; -val (caseN, case_) = markup_string "case" nameN; -val (dynamic_factN, dynamic_fact) = markup_string "dynamic_fact" nameN; - - -(* inner syntax *) - -val (tfreeN, tfree) = markup_elem "tfree"; -val (tvarN, tvar) = markup_elem "tvar"; -val (freeN, free) = markup_elem "free"; -val (skolemN, skolem) = markup_elem "skolem"; -val (boundN, bound) = markup_elem "bound"; -val (varN, var) = markup_elem "var"; -val (numeralN, numeral) = markup_elem "numeral"; -val (literalN, literal) = markup_elem "literal"; -val (delimiterN, delimiter) = markup_elem "delimiter"; -val (inner_stringN, inner_string) = markup_elem "inner_string"; -val (inner_cartoucheN, inner_cartouche) = markup_elem "inner_cartouche"; -val (inner_commentN, inner_comment) = markup_elem "inner_comment"; - -val (token_rangeN, token_range) = markup_elem "token_range"; - -val (sortingN, sorting) = markup_elem "sorting"; -val (typingN, typing) = markup_elem "typing"; - - -(* ML syntax *) - -val (ML_keyword1N, ML_keyword1) = markup_elem "ML_keyword1"; -val (ML_keyword2N, ML_keyword2) = markup_elem "ML_keyword2"; -val (ML_keyword3N, ML_keyword3) = markup_elem "ML_keyword3"; -val (ML_delimiterN, ML_delimiter) = markup_elem "ML_delimiter"; -val (ML_tvarN, ML_tvar) = markup_elem "ML_tvar"; -val (ML_numeralN, ML_numeral) = markup_elem "ML_numeral"; -val (ML_charN, ML_char) = markup_elem "ML_char"; -val (ML_stringN, ML_string) = markup_elem "ML_string"; -val (ML_commentN, ML_comment) = markup_elem "ML_comment"; -val (SML_stringN, SML_string) = markup_elem "SML_string"; -val (SML_commentN, SML_comment) = markup_elem "SML_comment"; - -val ML_defN = "ML_def"; -val ML_openN = "ML_open"; -val ML_structureN = "ML_structure"; -val (ML_typingN, ML_typing) = markup_elem "ML_typing"; - - -(* antiquotations *) - -val (antiquotedN, antiquoted) = markup_elem "antiquoted"; -val (antiquoteN, antiquote) = markup_elem "antiquote"; - -val ML_antiquotationN = "ML_antiquotation"; -val document_antiquotationN = "document_antiquotation"; -val document_antiquotation_optionN = "document_antiquotation_option"; - - -(* text structure *) - -val (paragraphN, paragraph) = markup_elem "paragraph"; -val (text_foldN, text_fold) = markup_elem "text_fold"; - - -(* outer syntax *) - -val (commandN, command) = markup_elem "command"; -val (keyword1N, keyword1) = markup_elem "keyword1"; -val (keyword2N, keyword2) = markup_elem "keyword2"; -val (keyword3N, keyword3) = markup_elem "keyword3"; -val (quasi_keywordN, quasi_keyword) = markup_elem "quasi_keyword"; -val (improperN, improper) = markup_elem "improper"; -val (operatorN, operator) = markup_elem "operator"; -val (stringN, string) = markup_elem "string"; -val (altstringN, altstring) = markup_elem "altstring"; -val (verbatimN, verbatim) = markup_elem "verbatim"; -val (cartoucheN, cartouche) = markup_elem "cartouche"; -val (commentN, comment) = markup_elem "comment"; -val (controlN, control) = markup_elem "control"; - -val tokenN = "token"; -fun token delimited props = (tokenN, (delimitedN, print_bool delimited) :: props); - - -(* timing *) - -val elapsedN = "elapsed"; -val cpuN = "cpu"; -val gcN = "gc"; - -fun timing_properties {elapsed, cpu, gc} = - [(elapsedN, Time.toString elapsed), - (cpuN, Time.toString cpu), - (gcN, Time.toString gc)]; - -fun parse_timing_properties props = - {elapsed = Properties.seconds props elapsedN, - cpu = Properties.seconds props cpuN, - gc = Properties.seconds props gcN}; - -val timingN = "timing"; -fun timing t = (timingN, timing_properties t); - - -(* command timing *) - -val command_timingN = "command_timing"; - -fun command_timing_properties {file, offset, name} elapsed = - [(fileN, file), (offsetN, print_int offset), - (nameN, name), (elapsedN, Time.toString elapsed)]; - -fun parse_command_timing_properties props = - (case (Properties.get props fileN, Properties.get props offsetN, Properties.get props nameN) of - (SOME file, SOME offset, SOME name) => - SOME ({file = file, offset = parse_int offset, name = name}, - Properties.seconds props elapsedN) - | _ => NONE); - - -(* toplevel *) - -val subgoalsN = "subgoals"; -val (proof_stateN, proof_state) = markup_int "proof_state" subgoalsN; - -val (stateN, state) = markup_elem "state"; -val (goalN, goal) = markup_elem "goal"; -val (subgoalN, subgoal) = markup_string "subgoal" nameN; - - -(* command status *) - -val taskN = "task"; - -val (acceptedN, accepted) = markup_elem "accepted"; -val (forkedN, forked) = markup_elem "forked"; -val (joinedN, joined) = markup_elem "joined"; -val (runningN, running) = markup_elem "running"; -val (finishedN, finished) = markup_elem "finished"; -val (failedN, failed) = markup_elem "failed"; - - -(* messages *) - -val serialN = "serial"; -fun serial_properties i = [(serialN, print_int i)]; - -val exec_idN = "exec_id"; - -val initN = "init"; -val statusN = "status"; -val resultN = "result"; -val writelnN = "writeln"; -val tracingN = "tracing"; -val warningN = "warning"; -val errorN = "error"; -val systemN = "system"; -val protocolN = "protocol"; - -val (legacyN, legacy) = markup_elem "legacy"; -val (promptN, prompt) = markup_elem "prompt"; - -val (reportN, report) = markup_elem "report"; -val (no_reportN, no_report) = markup_elem "no_report"; - -val (badN, bad) = markup_elem "bad"; - -val (intensifyN, intensify) = markup_elem "intensify"; -val (informationN, information) = markup_elem "information"; - - -(* active areas *) - -val browserN = "browser" -val graphviewN = "graphview"; - -val sendbackN = "sendback"; -val paddingN = "padding"; -val padding_line = (paddingN, "line"); -val padding_command = (paddingN, "command"); - -val dialogN = "dialog"; -fun dialog i result = (dialogN, [(serialN, print_int i), (resultN, result)]); - - -(* protocol message functions *) - -val functionN = "function" - -val assign_update = [(functionN, "assign_update")]; -val removed_versions = [(functionN, "removed_versions")]; - -fun protocol_handler name = [(functionN, "protocol_handler"), (nameN, name)]; - -fun invoke_scala name id = [(functionN, "invoke_scala"), (nameN, name), (idN, id)]; -fun cancel_scala id = [(functionN, "cancel_scala"), (idN, id)]; - -val ML_statistics = (functionN, "ML_statistics"); - -val task_statistics = (functionN, "task_statistics"); - -val command_timing = (functionN, "command_timing"); - -fun loading_theory name = [("function", "loading_theory"), ("name", name)]; - -fun dest_loading_theory [("function", "loading_theory"), ("name", name)] = SOME name - | dest_loading_theory _ = NONE; - -fun use_theories_result id ok = - [("function", "use_theories_result"), ("id", id), ("ok", print_bool ok)]; - -val print_operationsN = "print_operations"; -val print_operations = [(functionN, print_operationsN)]; - - -(* simplifier trace *) - -val simp_trace_panelN = "simp_trace_panel"; - -val simp_trace_logN = "simp_trace_log"; -val simp_trace_stepN = "simp_trace_step"; -val simp_trace_recurseN = "simp_trace_recurse"; -val simp_trace_hintN = "simp_trace_hint"; -val simp_trace_ignoreN = "simp_trace_ignore"; - -fun simp_trace_cancel i = [(functionN, "simp_trace_cancel"), (serialN, print_int i)]; - - - -(** print mode operations **) - -val no_output = ("", ""); -fun default_output (_: T) = no_output; - -local - val default = {output = default_output}; - val modes = Synchronized.var "Markup.modes" (Symtab.make [("", default)]); -in - fun add_mode name output = - Synchronized.change modes (fn tab => - (if not (Symtab.defined tab name) then () - else warning ("Redefining markup mode " ^ quote name); - Symtab.update (name, {output = output}) tab)); - fun get_mode () = - the_default default - (Library.get_first (Symtab.lookup (Synchronized.value modes)) (print_mode_value ())); -end; - -fun output m = if is_empty m then no_output else #output (get_mode ()) m; - -val enclose = output #-> Library.enclose; - -fun markup m = - let val (bg, en) = output m - in Library.enclose (Output.escape bg) (Output.escape en) end; - -fun markup_only m = markup m ""; - -fun markup_report "" = "" - | markup_report txt = markup report txt; - -end; diff --git a/core/Pure/PIDE/markup.scala b/core/Pure/PIDE/markup.scala deleted file mode 100644 index 0783550d..00000000 --- a/core/Pure/PIDE/markup.scala +++ /dev/null @@ -1,488 +0,0 @@ -/* Title: Pure/PIDE/markup.scala - Module: PIDE - Author: Makarius - -Quasi-abstract markup elements. -*/ - -package isabelle - - -object Markup -{ - /* elements */ - - object Elements - { - def apply(elems: Set[String]): Elements = new Elements(elems) - def apply(elems: String*): Elements = apply(Set(elems: _*)) - val empty: Elements = apply() - val full: Elements = - new Elements(Set.empty) - { - override def apply(elem: String): Boolean = true - override def toString: String = "Elements.full" - } - } - - sealed class Elements private[Markup](private val rep: Set[String]) - { - def apply(elem: String): Boolean = rep.contains(elem) - def + (elem: String): Elements = new Elements(rep + elem) - def ++ (elems: Elements): Elements = new Elements(rep ++ elems.rep) - override def toString: String = rep.mkString("Elements(", ",", ")") - } - - - /* properties */ - - val NAME = "name" - val Name = new Properties.String(NAME) - - val KIND = "kind" - val Kind = new Properties.String(KIND) - - val INSTANCE = "instance" - val Instance = new Properties.String(INSTANCE) - - - /* basic markup */ - - val Empty = Markup("", Nil) - val Broken = Markup("broken", Nil) - - class Markup_String(val name: String, prop: String) - { - private val Prop = new Properties.String(prop) - - def apply(s: String): Markup = Markup(name, Prop(s)) - def unapply(markup: Markup): Option[String] = - if (markup.name == name) Prop.unapply(markup.properties) else None - } - - class Markup_Int(val name: String, prop: String) - { - private val Prop = new Properties.Int(prop) - - def apply(i: Int): Markup = Markup(name, Prop(i)) - def unapply(markup: Markup): Option[Int] = - if (markup.name == name) Prop.unapply(markup.properties) else None - } - - - /* formal entities */ - - val BINDING = "binding" - val ENTITY = "entity" - val DEF = "def" - val REF = "ref" - - object Entity - { - def unapply(markup: Markup): Option[(String, String)] = - markup match { - case Markup(ENTITY, props) => - (props, props) match { - case (Kind(kind), Name(name)) => Some((kind, name)) - case _ => None - } - case _ => None - } - } - - - /* completion */ - - val COMPLETION = "completion" - val NO_COMPLETION = "no_completion" - - - /* position */ - - val LINE = "line" - val OFFSET = "offset" - val END_OFFSET = "end_offset" - val FILE = "file" - val ID = "id" - - val DEF_LINE = "def_line" - val DEF_OFFSET = "def_offset" - val DEF_END_OFFSET = "def_end_offset" - val DEF_FILE = "def_file" - val DEF_ID = "def_id" - - val POSITION_PROPERTIES = Set(LINE, OFFSET, END_OFFSET, FILE, ID) - val POSITION = "position" - - - /* embedded languages */ - - val Symbols = new Properties.Boolean("symbols") - val Antiquotes = new Properties.Boolean("antiquotes") - val Delimited = new Properties.Boolean("delimited") - - val LANGUAGE = "language" - object Language - { - val ML = "ML" - val SML = "SML" - val PATH = "path" - val UNKNOWN = "unknown" - - def unapply(markup: Markup): Option[(String, Boolean, Boolean, Boolean)] = - markup match { - case Markup(LANGUAGE, props) => - (props, props, props, props) match { - case (Name(name), Symbols(symbols), Antiquotes(antiquotes), Delimited(delimited)) => - Some((name, symbols, antiquotes, delimited)) - case _ => None - } - case _ => None - } - } - - - /* external resources */ - - val PATH = "path" - val Path = new Markup_String(PATH, NAME) - - val URL = "url" - val Url = new Markup_String(URL, NAME) - - - /* pretty printing */ - - val Block = new Markup_Int("block", "indent") - val Break = new Markup_Int("break", "width") - - val ITEM = "item" - val BULLET = "bullet" - - val SEPARATOR = "separator" - - - /* text properties */ - - val WORDS = "words" - - val HIDDEN = "hidden" - - - /* logical entities */ - - val CLASS = "class" - val TYPE_NAME = "type_name" - val FIXED = "fixed" - val CASE = "case" - val CONSTANT = "constant" - val DYNAMIC_FACT = "dynamic_fact" - - - /* inner syntax */ - - val TFREE = "tfree" - val TVAR = "tvar" - val FREE = "free" - val SKOLEM = "skolem" - val BOUND = "bound" - val VAR = "var" - val NUMERAL = "numeral" - val LITERAL = "literal" - val DELIMITER = "delimiter" - val INNER_STRING = "inner_string" - val INNER_CARTOUCHE = "inner_cartouche" - val INNER_COMMENT = "inner_comment" - - val TOKEN_RANGE = "token_range" - - val SORTING = "sorting" - val TYPING = "typing" - - val ATTRIBUTE = "attribute" - val METHOD = "method" - - - /* antiquotations */ - - val ANTIQUOTED = "antiquoted" - val ANTIQUOTE = "antiquote" - - val ML_ANTIQUOTATION = "ML_antiquotation" - val DOCUMENT_ANTIQUOTATION = "document_antiquotation" - val DOCUMENT_ANTIQUOTATION_OPTION = "document_antiquotation_option" - - - /* text structure */ - - val PARAGRAPH = "paragraph" - val TEXT_FOLD = "text_fold" - - - /* ML syntax */ - - val ML_KEYWORD1 = "ML_keyword1" - val ML_KEYWORD2 = "ML_keyword2" - val ML_KEYWORD3 = "ML_keyword3" - val ML_DELIMITER = "ML_delimiter" - val ML_TVAR = "ML_tvar" - val ML_NUMERAL = "ML_numeral" - val ML_CHAR = "ML_char" - val ML_STRING = "ML_string" - val ML_COMMENT = "ML_comment" - val SML_STRING = "SML_string" - val SML_COMMENT = "SML_comment" - - val ML_DEF = "ML_def" - val ML_OPEN = "ML_open" - val ML_STRUCTURE = "ML_structure" - val ML_TYPING = "ML_typing" - - - /* outer syntax */ - - val COMMAND = "command" - val KEYWORD1 = "keyword1" - val KEYWORD2 = "keyword2" - val KEYWORD3 = "keyword3" - val QUASI_KEYWORD = "quasi_keyword" - val IMPROPER = "improper" - val OPERATOR = "operator" - val STRING = "string" - val ALTSTRING = "altstring" - val VERBATIM = "verbatim" - val CARTOUCHE = "cartouche" - val COMMENT = "comment" - val CONTROL = "control" - - - /* timing */ - - val Elapsed = new Properties.Double("elapsed") - val CPU = new Properties.Double("cpu") - val GC = new Properties.Double("gc") - - object Timing_Properties - { - def apply(timing: isabelle.Timing): Properties.T = - Elapsed(timing.elapsed.seconds) ::: CPU(timing.cpu.seconds) ::: GC(timing.gc.seconds) - - def unapply(props: Properties.T): Option[isabelle.Timing] = - (props, props, props) match { - case (Elapsed(elapsed), CPU(cpu), GC(gc)) => - Some(new isabelle.Timing(Time.seconds(elapsed), Time.seconds(cpu), Time.seconds(gc))) - case _ => None - } - } - - val TIMING = "timing" - - object Timing - { - def apply(timing: isabelle.Timing): Markup = Markup(TIMING, Timing_Properties(timing)) - - def unapply(markup: Markup): Option[isabelle.Timing] = - markup match { - case Markup(TIMING, Timing_Properties(timing)) => Some(timing) - case _ => None - } - } - - - /* command timing */ - - val COMMAND_TIMING = "command_timing" - - - /* toplevel */ - - val SUBGOALS = "subgoals" - val PROOF_STATE = "proof_state" - - val STATE = "state" - val GOAL = "goal" - val SUBGOAL = "subgoal" - - - /* command status */ - - val TASK = "task" - - val ACCEPTED = "accepted" - val FORKED = "forked" - val JOINED = "joined" - val RUNNING = "running" - val FINISHED = "finished" - val FAILED = "failed" - - - /* interactive documents */ - - val VERSION = "version" - val ASSIGN = "assign" - - - /* prover process */ - - val PROVER_COMMAND = "prover_command" - val PROVER_ARG = "prover_arg" - - - /* messages */ - - val SERIAL = "serial" - val Serial = new Properties.Long(SERIAL) - - val MESSAGE = "message" - - val INIT = "init" - val STATUS = "status" - val REPORT = "report" - val RESULT = "result" - val WRITELN = "writeln" - val TRACING = "tracing" - val WARNING = "warning" - val ERROR = "error" - val PROTOCOL = "protocol" - val SYSTEM = "system" - val STDOUT = "stdout" - val STDERR = "stderr" - val EXIT = "exit" - - val WRITELN_MESSAGE = "writeln_message" - val TRACING_MESSAGE = "tracing_message" - val WARNING_MESSAGE = "warning_message" - val ERROR_MESSAGE = "error_message" - - val messages = - Map(WRITELN -> WRITELN_MESSAGE, TRACING -> TRACING_MESSAGE, - WARNING -> WARNING_MESSAGE, ERROR -> ERROR_MESSAGE) - val message: String => String = messages.withDefault((s: String) => s) - - val Return_Code = new Properties.Int("return_code") - - val LEGACY = "legacy" - - val NO_REPORT = "no_report" - - val BAD = "bad" - - val INTENSIFY = "intensify" - val INFORMATION = "information" - - - /* active areas */ - - val BROWSER = "browser" - val GRAPHVIEW = "graphview" - - val SENDBACK = "sendback" - val PADDING = "padding" - val PADDING_LINE = (PADDING, "line") - val PADDING_COMMAND = (PADDING, "command") - - val DIALOG = "dialog" - val Result = new Properties.String(RESULT) - - - /* protocol message functions */ - - val FUNCTION = "function" - val Function = new Properties.String(FUNCTION) - - val Assign_Update: Properties.T = List((FUNCTION, "assign_update")) - val Removed_Versions: Properties.T = List((FUNCTION, "removed_versions")) - - object Protocol_Handler - { - def unapply(props: Properties.T): Option[(String)] = - props match { - case List((FUNCTION, "protocol_handler"), (NAME, name)) => Some(name) - case _ => None - } - } - - val INVOKE_SCALA = "invoke_scala" - object Invoke_Scala - { - def unapply(props: Properties.T): Option[(String, String)] = - props match { - case List((FUNCTION, INVOKE_SCALA), (NAME, name), (ID, id)) => Some((name, id)) - case _ => None - } - } - - val CANCEL_SCALA = "cancel_scala" - object Cancel_Scala - { - def unapply(props: Properties.T): Option[String] = - props match { - case List((FUNCTION, CANCEL_SCALA), (ID, id)) => Some(id) - case _ => None - } - } - - object ML_Statistics - { - def unapply(props: Properties.T): Option[Properties.T] = - props match { - case (FUNCTION, "ML_statistics") :: stats => Some(stats) - case _ => None - } - } - - object Task_Statistics - { - def unapply(props: Properties.T): Option[Properties.T] = - props match { - case (FUNCTION, "task_statistics") :: stats => Some(stats) - case _ => None - } - } - - object Loading_Theory - { - def unapply(props: Properties.T): Option[String] = - props match { - case List((FUNCTION, "loading_theory"), (NAME, name)) => Some(name) - case _ => None - } - } - - object Use_Theories_Result - { - def unapply(props: Properties.T): Option[(String, Boolean)] = - props match { - case List((FUNCTION, "use_theories_result"), - ("id", id), ("ok", Properties.Value.Boolean(ok))) => Some((id, ok)) - case _ => None - } - } - - val PRINT_OPERATIONS = "print_operations" - - - /* simplifier trace */ - - val SIMP_TRACE_PANEL = "simp_trace_panel" - - val SIMP_TRACE_LOG = "simp_trace_log" - val SIMP_TRACE_STEP = "simp_trace_step" - val SIMP_TRACE_RECURSE = "simp_trace_recurse" - val SIMP_TRACE_HINT = "simp_trace_hint" - val SIMP_TRACE_IGNORE = "simp_trace_ignore" - - val SIMP_TRACE_CANCEL = "simp_trace_cancel" - object Simp_Trace_Cancel - { - def unapply(props: Properties.T): Option[Long] = - props match { - case (FUNCTION, SIMP_TRACE_CANCEL) :: Serial(i) => Some(i) - case _ => None - } - } -} - - -sealed case class Markup(name: String, properties: Properties.T) - diff --git a/core/Pure/PIDE/markup_tree.scala b/core/Pure/PIDE/markup_tree.scala deleted file mode 100644 index 819f32ea..00000000 --- a/core/Pure/PIDE/markup_tree.scala +++ /dev/null @@ -1,270 +0,0 @@ -/* Title: Pure/PIDE/markup_tree.scala - Module: PIDE - Author: Fabian Immler, TU Munich - Author: Makarius - -Markup trees over nested / non-overlapping text ranges. -*/ - -package isabelle - - -import scala.collection.immutable.SortedMap -import scala.collection.mutable -import scala.annotation.tailrec - - -object Markup_Tree -{ - /* construct trees */ - - val empty: Markup_Tree = new Markup_Tree(Branches.empty) - - def merge(trees: List[Markup_Tree], range: Text.Range, elements: Markup.Elements): Markup_Tree = - (empty /: trees)(_.merge(_, range, elements)) - - def merge_disjoint(trees: List[Markup_Tree]): Markup_Tree = - trees match { - case Nil => empty - case head :: tail => - new Markup_Tree( - (head.branches /: tail) { - case (branches, tree) => - (branches /: tree.branches) { - case (bs, (r, entry)) => - require(!bs.isDefinedAt(r)) - bs + (r -> entry) - } - }) - } - - - /* tree building blocks */ - - object Entry - { - def apply(markup: Text.Markup, subtree: Markup_Tree): Entry = - Entry(markup.range, List(markup.info), subtree) - } - - sealed case class Entry( - range: Text.Range, - rev_markup: List[XML.Elem], - subtree: Markup_Tree) - { - def markup: List[XML.Elem] = rev_markup.reverse - - def filter_markup(elements: Markup.Elements): List[XML.Elem] = - { - var result: List[XML.Elem] = Nil - for { elem <- rev_markup; if (elements(elem.name)) } - result ::= elem - result.toList - } - - def + (markup: Text.Markup): Entry = copy(rev_markup = markup.info :: rev_markup) - def \ (markup: Text.Markup): Entry = copy(subtree = subtree + markup) - } - - object Branches - { - type T = SortedMap[Text.Range, Entry] - val empty: T = SortedMap.empty(Text.Range.Ordering) - } - - - /* XML representation */ - - @tailrec private def strip_elems( - elems: List[XML.Elem], body: XML.Body): (List[XML.Elem], XML.Body) = - body match { - case List(XML.Wrapped_Elem(markup1, body1, body2)) => - strip_elems(XML.Elem(markup1, body1) :: elems, body2) - case List(XML.Elem(markup1, body1)) => - strip_elems(XML.Elem(markup1, Nil) :: elems, body1) - case _ => (elems, body) - } - - private def make_trees(acc: (Int, List[Markup_Tree]), tree: XML.Tree): (Int, List[Markup_Tree]) = - { - val (offset, markup_trees) = acc - - strip_elems(Nil, List(tree)) match { - case (Nil, body) => - (offset + XML.text_length(body), markup_trees) - - case (elems, body) => - val (end_offset, subtrees) = ((offset, Nil: List[Markup_Tree]) /: body)(make_trees) - if (offset == end_offset) acc - else { - val range = Text.Range(offset, end_offset) - val entry = Entry(range, elems, merge_disjoint(subtrees)) - (end_offset, new Markup_Tree(Branches.empty, entry) :: markup_trees) - } - } - } - - def from_XML(body: XML.Body): Markup_Tree = - merge_disjoint(((0, Nil: List[Markup_Tree]) /: body)(make_trees)._2) -} - - -final class Markup_Tree private(val branches: Markup_Tree.Branches.T) -{ - import Markup_Tree._ - - private def this(branches: Markup_Tree.Branches.T, entry: Markup_Tree.Entry) = - this(branches + (entry.range -> entry)) - - private def overlapping(range: Text.Range): Branches.T = - if (branches.isEmpty || - (range.contains(branches.firstKey.start) && branches.lastKey.stop <= range.stop)) - branches - else { - val start = Text.Range(range.start) - val stop = Text.Range(range.stop) - val bs = branches.range(start, stop) - branches.get(stop) match { - case Some(end) if range overlaps end.range => bs + (end.range -> end) - case _ => bs - } - } - - def restrict(range: Text.Range): Markup_Tree = - new Markup_Tree(overlapping(range)) - - def is_empty: Boolean = branches.isEmpty - - def + (new_markup: Text.Markup): Markup_Tree = - { - val new_range = new_markup.range - - branches.get(new_range) match { - case None => new Markup_Tree(branches, Entry(new_markup, empty)) - case Some(entry) => - if (entry.range == new_range) - new Markup_Tree(branches, entry + new_markup) - else if (entry.range.contains(new_range)) - new Markup_Tree(branches, entry \ new_markup) - else if (new_range.contains(branches.head._1) && new_range.contains(branches.last._1)) - new Markup_Tree(Branches.empty, Entry(new_markup, this)) - else { - val body = overlapping(new_range) - if (body.forall(e => new_range.contains(e._1))) - new Markup_Tree(branches -- body.keys, Entry(new_markup, new Markup_Tree(body))) - else { - Output.warning("Ignored overlapping markup information: " + new_markup + - body.filter(e => !new_range.contains(e._1)).mkString("\n")) - this - } - } - } - } - - def merge(other: Markup_Tree, root_range: Text.Range, elements: Markup.Elements): Markup_Tree = - { - def merge_trees(tree1: Markup_Tree, tree2: Markup_Tree): Markup_Tree = - (tree1 /: tree2.branches)( - { case (tree, (range, entry)) => - if (!range.overlaps(root_range)) tree - else - (merge_trees(tree, entry.subtree) /: entry.filter_markup(elements))( - { case (t, elem) => t + Text.Info(range, elem) }) - }) - - if (this eq other) this - else { - val tree1 = this.restrict(root_range) - val tree2 = other.restrict(root_range) - if (tree1.is_empty) tree2 - else merge_trees(tree1, tree2) - } - } - - def cumulate[A](root_range: Text.Range, root_info: A, elements: Markup.Elements, - result: (A, Text.Markup) => Option[A]): List[Text.Info[A]] = - { - def results(x: A, entry: Entry): Option[A] = - { - var y = x - var changed = false - for { - elem <- entry.filter_markup(elements) - y1 <- result(y, Text.Info(entry.range, elem)) - } { y = y1; changed = true } - if (changed) Some(y) else None - } - - def traverse( - last: Text.Offset, - stack: List[(Text.Info[A], List[(Text.Range, Entry)])]): List[Text.Info[A]] = - { - stack match { - case (parent, (range, entry) :: more) :: rest => - val subrange = range.restrict(root_range) - val subtree = entry.subtree.overlapping(subrange).toList - val start = subrange.start - - results(parent.info, entry) match { - case Some(res) => - val next = Text.Info(subrange, res) - val nexts = traverse(start, (next, subtree) :: (parent, more) :: rest) - if (last < start) parent.restrict(Text.Range(last, start)) :: nexts - else nexts - case None => traverse(last, (parent, subtree ::: more) :: rest) - } - - case (parent, Nil) :: rest => - val stop = parent.range.stop - val nexts = traverse(stop, rest) - if (last < stop) parent.restrict(Text.Range(last, stop)) :: nexts - else nexts - - case Nil => - val stop = root_range.stop - if (last < stop) List(Text.Info(Text.Range(last, stop), root_info)) - else Nil - } - } - traverse(root_range.start, - List((Text.Info(root_range, root_info), overlapping(root_range).toList))) - } - - def to_XML(root_range: Text.Range, text: CharSequence, elements: Markup.Elements): XML.Body = - { - def make_text(start: Text.Offset, stop: Text.Offset): XML.Body = - if (start == stop) Nil - else List(XML.Text(text.subSequence(start, stop).toString)) - - def make_elems(rev_markups: List[XML.Elem], body: XML.Body): XML.Body = - (body /: rev_markups) { - case (b, elem) => - if (!elements(elem.name)) b - else if (elem.body.isEmpty) List(XML.Elem(elem.markup, b)) - else List(XML.Wrapped_Elem(elem.markup, elem.body, b)) - } - - def make_body(elem_range: Text.Range, elem_markup: List[XML.Elem], entries: Branches.T) - : XML.Body = - { - val body = new mutable.ListBuffer[XML.Tree] - var last = elem_range.start - for ((range, entry) <- entries) { - val subrange = range.restrict(elem_range) - body ++= make_text(last, subrange.start) - body ++= make_body(subrange, entry.rev_markup, entry.subtree.overlapping(subrange)) - last = subrange.stop - } - body ++= make_text(last, elem_range.stop) - make_elems(elem_markup, body.toList) - } - make_body(root_range, Nil, overlapping(root_range)) - } - - override def toString = - branches.toList.map(_._2) match { - case Nil => "Empty" - case list => list.mkString("Tree(", ",", ")") - } -} - diff --git a/core/Pure/PIDE/protocol.ML b/core/Pure/PIDE/protocol.ML deleted file mode 100644 index b98a03ec..00000000 --- a/core/Pure/PIDE/protocol.ML +++ /dev/null @@ -1,131 +0,0 @@ -(* Title: Pure/PIDE/protocol.ML - Author: Makarius - -Protocol message formats for interactive proof documents. -*) - -structure Protocol: sig end = -struct - -val _ = - Isabelle_Process.protocol_command "Prover.echo" - (fn args => List.app writeln args); - -val _ = - Isabelle_Process.protocol_command "Prover.options" - (fn [options_yxml] => - let val options = Options.decode (YXML.parse_body options_yxml) in - Options.set_default options; - Future.ML_statistics := true; - Multithreading.trace := Options.int options "threads_trace"; - Multithreading.max_threads_update (Options.int options "threads"); - Goal.parallel_proofs := (if Options.int options "parallel_proofs" > 0 then 3 else 0) - end); - -val _ = - Isabelle_Process.protocol_command "Document.define_blob" - (fn [digest, content] => Document.change_state (Document.define_blob digest content)); - -val _ = - Isabelle_Process.protocol_command "Document.define_command" - (fn [id, name, blobs_yxml, text] => - let - val blobs = - YXML.parse_body blobs_yxml |> - let open XML.Decode in - list (variant - [fn ([], a) => Exn.Res (pair string (option string) a), - fn ([], a) => Exn.Exn (ERROR (string a))]) - end; - in - Document.change_state (Document.define_command (Document_ID.parse id) name blobs text) - end); - -val _ = - Isabelle_Process.protocol_command "Document.discontinue_execution" - (fn [] => Execution.discontinue ()); - -val _ = - Isabelle_Process.protocol_command "Document.cancel_exec" - (fn [exec_id] => Execution.cancel (Document_ID.parse exec_id)); - -val _ = - Isabelle_Process.protocol_command "Document.update" - (fn [old_id_string, new_id_string, edits_yxml] => Document.change_state (fn state => - let - val _ = Execution.discontinue (); - - val old_id = Document_ID.parse old_id_string; - val new_id = Document_ID.parse new_id_string; - val edits = - YXML.parse_body edits_yxml |> - let open XML.Decode in - list (pair string - (variant - [fn ([], a) => Document.Edits (list (pair (option int) (option int)) a), - fn ([], a) => - let - val (master, (name, (imports, (keywords, errors)))) = - pair string (pair string (pair (list string) - (pair (list (pair string - (option (pair (pair string (list string)) (list string))))) - (list string)))) a; - val imports' = map (rpair Position.none) imports; - val header = Thy_Header.make (name, Position.none) imports' keywords; - in Document.Deps (master, header, errors) end, - fn (a :: b, c) => - Document.Perspective (bool_atom a, map int_atom b, - list (pair int (pair string (list string))) c)])) - end; - - val (removed, assign_update, state') = Document.update old_id new_id edits state; - val _ = List.app Execution.terminate removed; - val _ = Execution.purge removed; - val _ = List.app Isabelle_Process.reset_tracing removed; - - val _ = - Output.protocol_message Markup.assign_update - [(new_id, assign_update) |> - let open XML.Encode - in pair int (list (pair int (list int))) end - |> YXML.string_of_body]; - in Document.start_execution state' end)); - -val _ = - Isabelle_Process.protocol_command "Document.remove_versions" - (fn [versions_yxml] => Document.change_state (fn state => - let - val versions = - YXML.parse_body versions_yxml |> - let open XML.Decode in list int end; - val state1 = Document.remove_versions versions state; - val _ = Output.protocol_message Markup.removed_versions [versions_yxml]; - in state1 end)); - -val _ = - Isabelle_Process.protocol_command "Document.dialog_result" - (fn [serial, result] => - Active.dialog_result (Markup.parse_int serial) result - handle exn => if Exn.is_interrupt exn then () (*sic!*) else reraise exn); - -val _ = - Isabelle_Process.protocol_command "use_theories" - (fn id :: master_dir :: thys => - let - val result = - Exn.capture (fn () => - Thy_Info.use_theories - {document = false, last_timing = K NONE, master_dir = Path.explode master_dir} - (map (rpair Position.none) thys)) (); - val ok = - (case result of - Exn.Res _ => true - | Exn.Exn exn => (Runtime.exn_error_message exn; false)); - in Output.protocol_message (Markup.use_theories_result id ok) [] end); - -val _ = - Isabelle_Process.protocol_command "ML_System.share_common_data" - (fn [] => ML_System.share_common_data ()); - -end; - diff --git a/core/Pure/PIDE/protocol.scala b/core/Pure/PIDE/protocol.scala deleted file mode 100644 index 498e37c8..00000000 --- a/core/Pure/PIDE/protocol.scala +++ /dev/null @@ -1,438 +0,0 @@ -/* Title: Pure/PIDE/protocol.scala - Author: Makarius - -Protocol message formats for interactive proof documents. -*/ - -package isabelle - - -object Protocol -{ - /* document editing */ - - object Assign_Update - { - def unapply(text: String): Option[(Document_ID.Version, Document.Assign_Update)] = - try { - import XML.Decode._ - val body = YXML.parse_body(text) - Some(pair(long, list(pair(long, list(long))))(body)) - } - catch { - case ERROR(_) => None - case _: XML.Error => None - } - } - - object Removed - { - def unapply(text: String): Option[List[Document_ID.Version]] = - try { - import XML.Decode._ - Some(list(long)(YXML.parse_body(text))) - } - catch { - case ERROR(_) => None - case _: XML.Error => None - } - } - - - /* command status */ - - object Status - { - def make(markup_iterator: Iterator[Markup]): Status = - { - var touched = false - var accepted = false - var warned = false - var failed = false - var forks = 0 - var runs = 0 - for (markup <- markup_iterator) { - markup.name match { - case Markup.ACCEPTED => accepted = true - case Markup.FORKED => touched = true; forks += 1 - case Markup.JOINED => forks -= 1 - case Markup.RUNNING => touched = true; runs += 1 - case Markup.FINISHED => runs -= 1 - case Markup.WARNING => warned = true - case Markup.FAILED | Markup.ERROR => failed = true - case _ => - } - } - Status(touched, accepted, warned, failed, forks, runs) - } - - val empty = make(Iterator.empty) - - def merge(status_iterator: Iterator[Status]): Status = - if (status_iterator.hasNext) { - val status0 = status_iterator.next - (status0 /: status_iterator)(_ + _) - } - else empty - } - - sealed case class Status( - private val touched: Boolean, - private val accepted: Boolean, - private val warned: Boolean, - private val failed: Boolean, - forks: Int, - runs: Int) - { - def + (that: Status): Status = - Status( - touched || that.touched, - accepted || that.accepted, - warned || that.warned, - failed || that.failed, - forks + that.forks, - runs + that.runs) - - def is_unprocessed: Boolean = accepted && !failed && (!touched || (forks != 0 && runs == 0)) - def is_running: Boolean = runs != 0 - def is_warned: Boolean = warned - def is_failed: Boolean = failed - def is_finished: Boolean = !failed && touched && forks == 0 && runs == 0 - } - - val proper_status_elements = - Markup.Elements(Markup.ACCEPTED, Markup.FORKED, Markup.JOINED, Markup.RUNNING, - Markup.FINISHED, Markup.FAILED) - - val liberal_status_elements = - proper_status_elements + Markup.WARNING + Markup.ERROR - - - /* command timing */ - - object Command_Timing - { - def unapply(props: Properties.T): Option[(Document_ID.Generic, isabelle.Timing)] = - props match { - case (Markup.FUNCTION, Markup.COMMAND_TIMING) :: args => - (args, args) match { - case (Position.Id(id), Markup.Timing_Properties(timing)) => Some((id, timing)) - case _ => None - } - case _ => None - } - } - - - /* node status */ - - sealed case class Node_Status( - unprocessed: Int, running: Int, warned: Int, failed: Int, finished: Int) - { - def total: Int = unprocessed + running + warned + failed + finished - } - - def node_status( - state: Document.State, version: Document.Version, node: Document.Node): Node_Status = - { - var unprocessed = 0 - var running = 0 - var warned = 0 - var failed = 0 - var finished = 0 - for (command <- node.commands.iterator) { - val states = state.command_states(version, command) - val status = Status.merge(states.iterator.map(_.protocol_status)) - - if (status.is_running) running += 1 - else if (status.is_failed) failed += 1 - else if (status.is_warned) warned += 1 - else if (status.is_finished) finished += 1 - else unprocessed += 1 - } - Node_Status(unprocessed, running, warned, failed, finished) - } - - - /* node timing */ - - sealed case class Node_Timing(total: Double, commands: Map[Command, Double]) - - val empty_node_timing = Node_Timing(0.0, Map.empty) - - def node_timing( - state: Document.State, - version: Document.Version, - node: Document.Node, - threshold: Double): Node_Timing = - { - var total = 0.0 - var commands = Map.empty[Command, Double] - for { - command <- node.commands.iterator - st <- state.command_states(version, command) - } { - val command_timing = - (0.0 /: st.status)({ - case (timing, Markup.Timing(t)) => timing + t.elapsed.seconds - case (timing, _) => timing - }) - total += command_timing - if (command_timing >= threshold) commands += (command -> command_timing) - } - Node_Timing(total, commands) - } - - - /* result messages */ - - private val clean_elements = - Markup.Elements(Markup.REPORT, Markup.NO_REPORT) - - def clean_message(body: XML.Body): XML.Body = - body filter { - case XML.Wrapped_Elem(Markup(name, _), _, _) => !clean_elements(name) - case XML.Elem(Markup(name, _), _) => !clean_elements(name) - case _ => true - } map { - case XML.Wrapped_Elem(markup, body, ts) => XML.Wrapped_Elem(markup, body, clean_message(ts)) - case XML.Elem(markup, ts) => XML.Elem(markup, clean_message(ts)) - case t => t - } - - def message_reports(props: Properties.T, body: XML.Body): List[XML.Elem] = - body flatMap { - case XML.Wrapped_Elem(Markup(Markup.REPORT, ps), body, ts) => - List(XML.Wrapped_Elem(Markup(Markup.REPORT, props ::: ps), body, ts)) - case XML.Elem(Markup(Markup.REPORT, ps), ts) => - List(XML.Elem(Markup(Markup.REPORT, props ::: ps), ts)) - case XML.Wrapped_Elem(_, _, ts) => message_reports(props, ts) - case XML.Elem(_, ts) => message_reports(props, ts) - case XML.Text(_) => Nil - } - - - /* specific messages */ - - def is_result(msg: XML.Tree): Boolean = - msg match { - case XML.Elem(Markup(Markup.RESULT, _), _) => true - case _ => false - } - - def is_tracing(msg: XML.Tree): Boolean = - msg match { - case XML.Elem(Markup(Markup.TRACING, _), _) => true - case XML.Elem(Markup(Markup.TRACING_MESSAGE, _), _) => true - case _ => false - } - - def is_writeln_markup(msg: XML.Tree, name: String): Boolean = - msg match { - case XML.Elem(Markup(Markup.WRITELN, _), - List(XML.Elem(markup, _))) => markup.name == name - case XML.Elem(Markup(Markup.WRITELN_MESSAGE, _), - List(XML.Elem(markup, _))) => markup.name == name - case _ => false - } - - def is_warning_markup(msg: XML.Tree, name: String): Boolean = - msg match { - case XML.Elem(Markup(Markup.WARNING, _), - List(XML.Elem(markup, _))) => markup.name == name - case XML.Elem(Markup(Markup.WARNING_MESSAGE, _), - List(XML.Elem(markup, _))) => markup.name == name - case _ => false - } - - def is_warning(msg: XML.Tree): Boolean = - msg match { - case XML.Elem(Markup(Markup.WARNING, _), _) => true - case XML.Elem(Markup(Markup.WARNING_MESSAGE, _), _) => true - case _ => false - } - - def is_error(msg: XML.Tree): Boolean = - msg match { - case XML.Elem(Markup(Markup.ERROR, _), _) => true - case XML.Elem(Markup(Markup.ERROR_MESSAGE, _), _) => true - case _ => false - } - - def is_state(msg: XML.Tree): Boolean = is_writeln_markup(msg, Markup.STATE) - def is_information(msg: XML.Tree): Boolean = is_writeln_markup(msg, Markup.INFORMATION) - def is_legacy(msg: XML.Tree): Boolean = is_warning_markup(msg, Markup.LEGACY) - - def is_inlined(msg: XML.Tree): Boolean = - !(is_result(msg) || is_tracing(msg) || is_state(msg)) - - - /* dialogs */ - - object Dialog_Args - { - def unapply(props: Properties.T): Option[(Document_ID.Generic, Long, String)] = - (props, props, props) match { - case (Position.Id(id), Markup.Serial(serial), Markup.Result(result)) => - Some((id, serial, result)) - case _ => None - } - } - - object Dialog - { - def unapply(tree: XML.Tree): Option[(Document_ID.Generic, Long, String)] = - tree match { - case XML.Elem(Markup(Markup.DIALOG, Dialog_Args(id, serial, result)), _) => - Some((id, serial, result)) - case _ => None - } - } - - object Dialog_Result - { - def apply(id: Document_ID.Generic, serial: Long, result: String): XML.Elem = - { - val props = Position.Id(id) ::: Markup.Serial(serial) - XML.Elem(Markup(Markup.RESULT, props), List(XML.Text(result))) - } - - def unapply(tree: XML.Tree): Option[String] = - tree match { - case XML.Elem(Markup(Markup.RESULT, _), List(XML.Text(result))) => Some(result) - case _ => None - } - } - - - /* reported positions */ - - private val position_elements = - Markup.Elements(Markup.BINDING, Markup.ENTITY, Markup.REPORT, Markup.POSITION) - - def message_positions( - self_id: Document_ID.Generic => Boolean, - chunk_name: Symbol.Text_Chunk.Name, - chunk: Symbol.Text_Chunk, - message: XML.Elem): Set[Text.Range] = - { - def elem_positions(props: Properties.T, set: Set[Text.Range]): Set[Text.Range] = - props match { - case Position.Reported(id, name, symbol_range) - if self_id(id) && name == chunk_name => - chunk.incorporate(symbol_range) match { - case Some(range) => set + range - case _ => set - } - case _ => set - } - - def positions(set: Set[Text.Range], tree: XML.Tree): Set[Text.Range] = - tree match { - case XML.Wrapped_Elem(Markup(name, props), _, body) => - body.foldLeft(if (position_elements(name)) elem_positions(props, set) else set)(positions) - case XML.Elem(Markup(name, props), body) => - body.foldLeft(if (position_elements(name)) elem_positions(props, set) else set)(positions) - case XML.Text(_) => set - } - - val set = positions(Set.empty, message) - if (set.isEmpty) elem_positions(message.markup.properties, set) - else set - } -} - - -trait Protocol extends Prover -{ - /* options */ - - def options(opts: Options): Unit = - protocol_command("Prover.options", YXML.string_of_body(opts.encode)) - - - /* interned items */ - - def define_blob(digest: SHA1.Digest, bytes: Bytes): Unit = - protocol_command_bytes("Document.define_blob", Bytes(digest.toString), bytes) - - def define_command(command: Command): Unit = - { - val blobs_yxml = - { import XML.Encode._ - val encode_blob: T[Command.Blob] = - variant(List( - { case Exn.Res((a, b)) => - (Nil, pair(string, option(string))((a.node, b.map(p => p._1.toString)))) }, - { case Exn.Exn(e) => (Nil, string(Exn.message(e))) })) - YXML.string_of_body(list(encode_blob)(command.blobs)) - } - protocol_command("Document.define_command", - Document_ID(command.id), encode(command.name), blobs_yxml, encode(command.source)) - } - - - /* execution */ - - def discontinue_execution(): Unit = - protocol_command("Document.discontinue_execution") - - def cancel_exec(id: Document_ID.Exec): Unit = - protocol_command("Document.cancel_exec", Document_ID(id)) - - - /* document versions */ - - def update(old_id: Document_ID.Version, new_id: Document_ID.Version, - edits: List[Document.Edit_Command]) - { - val edits_yxml = - { import XML.Encode._ - def id: T[Command] = (cmd => long(cmd.id)) - def encode_edit(name: Document.Node.Name) - : T[Document.Node.Edit[Command.Edit, Command.Perspective]] = - variant(List( - { case Document.Node.Edits(a) => (Nil, list(pair(option(id), option(id)))(a)) }, - { case Document.Node.Deps(header) => - val master_dir = Isabelle_System.posix_path_url(name.master_dir) - val theory = Long_Name.base_name(name.theory) - val imports = header.imports.map(_.node) - val keywords = header.keywords.map({ case (a, b, _) => (a, b) }) - (Nil, - pair(Encode.string, pair(Encode.string, pair(list(Encode.string), - pair(list(pair(Encode.string, - option(pair(pair(Encode.string, list(Encode.string)), list(Encode.string))))), - list(Encode.string)))))( - (master_dir, (theory, (imports, (keywords, header.errors)))))) }, - { case Document.Node.Perspective(a, b, c) => - (bool_atom(a) :: b.commands.map(cmd => long_atom(cmd.id)), - list(pair(id, pair(Encode.string, list(Encode.string))))(c.dest)) })) - def encode_edits: T[List[Document.Edit_Command]] = list((node_edit: Document.Edit_Command) => - { - val (name, edit) = node_edit - pair(string, encode_edit(name))(name.node, edit) - }) - YXML.string_of_body(encode_edits(edits)) } - protocol_command("Document.update", Document_ID(old_id), Document_ID(new_id), edits_yxml) - } - - def remove_versions(versions: List[Document.Version]) - { - val versions_yxml = - { import XML.Encode._ - YXML.string_of_body(list(long)(versions.map(_.id))) } - protocol_command("Document.remove_versions", versions_yxml) - } - - - /* dialog via document content */ - - def dialog_result(serial: Long, result: String): Unit = - protocol_command("Document.dialog_result", Properties.Value.Long(serial), result) - - - /* use_theories */ - - def use_theories(id: String, master_dir: Path, thys: List[Path]): Unit = - protocol_command("use_theories", (id :: master_dir.implode :: thys.map(_.implode)): _*) -} diff --git a/core/Pure/PIDE/prover.scala b/core/Pure/PIDE/prover.scala deleted file mode 100644 index d3c11c85..00000000 --- a/core/Pure/PIDE/prover.scala +++ /dev/null @@ -1,111 +0,0 @@ -/* Title: Pure/PIDE/prover.scala - Author: Makarius - -General prover operations. -*/ - -package isabelle - - -object Prover -{ - /* syntax */ - - trait Syntax - { - def add_keywords(keywords: Thy_Header.Keywords): Syntax - def scan(input: CharSequence): List[Token] - def load(span: List[Token]): Option[List[String]] - def load_commands_in(text: String): Boolean - } - - - /* messages */ - - sealed abstract class Message - - class Input(val name: String, val args: List[String]) extends Message - { - override def toString: String = - XML.Elem(Markup(Markup.PROVER_COMMAND, List((Markup.NAME, name))), - args.map(s => - List(XML.Text("\n"), XML.elem(Markup.PROVER_ARG, YXML.parse_body(s)))).flatten).toString - } - - class Output(val message: XML.Elem) extends Message - { - def kind: String = message.markup.name - def properties: Properties.T = message.markup.properties - def body: XML.Body = message.body - - def is_init = kind == Markup.INIT - def is_exit = kind == Markup.EXIT - def is_stdout = kind == Markup.STDOUT - def is_stderr = kind == Markup.STDERR - def is_system = kind == Markup.SYSTEM - def is_status = kind == Markup.STATUS - def is_report = kind == Markup.REPORT - def is_syslog = is_init || is_exit || is_system || is_stderr - - override def toString: String = - { - val res = - if (is_status || is_report) message.body.map(_.toString).mkString - else Pretty.string_of(message.body) - if (properties.isEmpty) - kind.toString + " [[" + res + "]]" - else - kind.toString + " " + - (for ((x, y) <- properties) yield x + "=" + y).mkString("{", ",", "}") + " [[" + res + "]]" - } - } - - class Protocol_Output(props: Properties.T, val bytes: Bytes) - extends Output(XML.Elem(Markup(Markup.PROTOCOL, props), Nil)) - { - lazy val text: String = bytes.toString - } -} - - -trait Prover -{ - /* text and tree data */ - - def encode(s: String): String - def decode(s: String): String - - object Encode - { - val string: XML.Encode.T[String] = (s => XML.Encode.string(encode(s))) - } - - def xml_cache: XML.Cache - - - /* process management */ - - def join(): Unit - def terminate(): Unit - - def protocol_command_bytes(name: String, args: Bytes*): Unit - def protocol_command(name: String, args: String*): Unit - - - /* PIDE protocol commands */ - - def options(opts: Options): Unit - - def define_blob(digest: SHA1.Digest, bytes: Bytes): Unit - def define_command(command: Command): Unit - - def discontinue_execution(): Unit - def cancel_exec(id: Document_ID.Exec): Unit - - def update(old_id: Document_ID.Version, new_id: Document_ID.Version, - edits: List[Document.Edit_Command]): Unit - def remove_versions(versions: List[Document.Version]): Unit - - def dialog_result(serial: Long, result: String): Unit -} - diff --git a/core/Pure/PIDE/query_operation.ML b/core/Pure/PIDE/query_operation.ML deleted file mode 100644 index 4d6fd34d..00000000 --- a/core/Pure/PIDE/query_operation.ML +++ /dev/null @@ -1,40 +0,0 @@ -(* Title: Pure/PIDE/query_operation.ML - Author: Makarius - -One-shot query operations via asynchronous print functions and temporary -document overlays. -*) - -signature QUERY_OPERATION = -sig - val register: string -> - ({state: Toplevel.state, args: string list, output_result: string -> unit} -> unit) -> unit -end; - -structure Query_Operation: QUERY_OPERATION = -struct - -fun register name f = - Command.print_function name - (fn {args = instance :: args, ...} => - SOME {delay = NONE, pri = 0, persistent = false, strict = false, - print_fn = fn _ => uninterruptible (fn restore_attributes => fn state => - let - fun result s = Output.result [(Markup.instanceN, instance)] [s]; - fun status m = result (Markup.markup_only m); - fun output_result s = result (Markup.markup (Markup.writelnN, []) s); - fun toplevel_error exn = - result (Markup.markup (Markup.errorN, []) (Runtime.exn_message exn)); - - val _ = status Markup.running; - fun run () = f {state = state, args = args, output_result = output_result}; - val _ = - (case Exn.capture (*sic!*) (restore_attributes run) () of - Exn.Res () => () - | Exn.Exn exn => toplevel_error exn); - val _ = status Markup.finished; - in () end)} - | _ => NONE); - -end; - diff --git a/core/Pure/PIDE/query_operation.scala b/core/Pure/PIDE/query_operation.scala deleted file mode 100644 index 265c9e76..00000000 --- a/core/Pure/PIDE/query_operation.scala +++ /dev/null @@ -1,235 +0,0 @@ -/* Title: Pure/PIDE/query_operation.scala - Author: Makarius - -One-shot query operations via asynchronous print functions and temporary -document overlays. -*/ - -package isabelle - - -object Query_Operation -{ - object Status extends Enumeration - { - val WAITING = Value("waiting") - val RUNNING = Value("running") - val FINISHED = Value("finished") - } -} - -class Query_Operation[Editor_Context]( - editor: Editor[Editor_Context], - editor_context: Editor_Context, - operation_name: String, - consume_status: Query_Operation.Status.Value => Unit, - consume_output: (Document.Snapshot, Command.Results, XML.Body) => Unit) -{ - private val instance = Document_ID.make().toString - - - /* implicit state -- owned by GUI thread */ - - @volatile private var current_location: Option[Command] = None - @volatile private var current_query: List[String] = Nil - @volatile private var current_update_pending = false - @volatile private var current_output: List[XML.Tree] = Nil - @volatile private var current_status = Query_Operation.Status.FINISHED - @volatile private var current_exec_id = Document_ID.none - - private def reset_state() - { - current_location = None - current_query = Nil - current_update_pending = false - current_output = Nil - current_status = Query_Operation.Status.FINISHED - current_exec_id = Document_ID.none - } - - private def remove_overlay() - { - current_location match { - case None => - case Some(command) => - editor.remove_overlay(command, operation_name, instance :: current_query) - editor.flush() - } - } - - - /* content update */ - - private def content_update() - { - GUI_Thread.require {} - - - /* snapshot */ - - val (snapshot, command_results, removed) = - current_location match { - case Some(cmd) => - val snapshot = editor.node_snapshot(cmd.node_name) - val command_results = snapshot.state.command_results(snapshot.version, cmd) - val removed = !snapshot.version.nodes(cmd.node_name).commands.contains(cmd) - (snapshot, command_results, removed) - case None => - (Document.Snapshot.init, Command.Results.empty, true) - } - - val results = - (for { - (_, elem @ XML.Elem(Markup(Markup.RESULT, props), _)) <- command_results.iterator - if props.contains((Markup.INSTANCE, instance)) - } yield elem).toList - - - /* resolve sendback: static command id */ - - def resolve_sendback(body: XML.Body): XML.Body = - { - current_location match { - case None => body - case Some(command) => - def resolve(body: XML.Body): XML.Body = - body map { - case XML.Wrapped_Elem(m, b1, b2) => XML.Wrapped_Elem(m, resolve(b1), resolve(b2)) - case XML.Elem(Markup(Markup.SENDBACK, props), b) => - val props1 = - props.map({ - case (Markup.ID, Properties.Value.Long(id)) if id == current_exec_id => - (Markup.ID, Properties.Value.Long(command.id)) - case p => p - }) - XML.Elem(Markup(Markup.SENDBACK, props1), resolve(b)) - case XML.Elem(m, b) => XML.Elem(m, resolve(b)) - case t => t - } - resolve(body) - } - } - - - /* output */ - - val new_output = - for { - XML.Elem(_, List(XML.Elem(markup, body))) <- results - if Markup.messages.contains(markup.name) - body1 = resolve_sendback(body) - } yield XML.Elem(Markup(Markup.message(markup.name), markup.properties), body1) - - - /* status */ - - def get_status(name: String, status: Query_Operation.Status.Value) - : Option[Query_Operation.Status.Value] = - results.collectFirst({ case XML.Elem(_, List(elem: XML.Elem)) if elem.name == name => status }) - - val new_status = - if (removed) Query_Operation.Status.FINISHED - else - get_status(Markup.FINISHED, Query_Operation.Status.FINISHED) orElse - get_status(Markup.RUNNING, Query_Operation.Status.RUNNING) getOrElse - Query_Operation.Status.WAITING - - if (new_status == Query_Operation.Status.RUNNING) - results.collectFirst( - { - case XML.Elem(Markup(_, Position.Id(id)), List(elem: XML.Elem)) - if elem.name == Markup.RUNNING => id - }).foreach(id => current_exec_id = id) - - - /* state update */ - - if (current_output != new_output || current_status != new_status) { - if (snapshot.is_outdated) - current_update_pending = true - else { - current_update_pending = false - if (current_output != new_output && !removed) { - current_output = new_output - consume_output(snapshot, command_results, new_output) - } - if (current_status != new_status) { - current_status = new_status - consume_status(new_status) - if (new_status == Query_Operation.Status.FINISHED) - remove_overlay() - } - } - } - } - - - /* query operations */ - - def cancel_query(): Unit = - GUI_Thread.require { editor.session.cancel_exec(current_exec_id) } - - def apply_query(query: List[String]) - { - GUI_Thread.require {} - - editor.current_node_snapshot(editor_context) match { - case Some(snapshot) => - remove_overlay() - reset_state() - consume_output(Document.Snapshot.init, Command.Results.empty, Nil) - if (!snapshot.is_outdated) { - editor.current_command(editor_context, snapshot) match { - case Some(command) => - current_location = Some(command) - current_query = query - current_status = Query_Operation.Status.WAITING - editor.insert_overlay(command, operation_name, instance :: query) - case None => - } - } - consume_status(current_status) - editor.flush() - case None => - } - } - - def locate_query() - { - GUI_Thread.require {} - - for { - command <- current_location - snapshot = editor.node_snapshot(command.node_name) - link <- editor.hyperlink_command(snapshot, command) - } link.follow(editor_context) - } - - - /* main */ - - private val main = - Session.Consumer[Session.Commands_Changed](getClass.getName) { - case changed => - current_location match { - case Some(command) - if current_update_pending || - (current_status != Query_Operation.Status.FINISHED && - changed.commands.contains(command)) => - GUI_Thread.later { content_update() } - case _ => - } - } - - def activate() { - editor.session.commands_changed += main - } - - def deactivate() { - editor.session.commands_changed -= main - remove_overlay() - reset_state() - consume_output(Document.Snapshot.init, Command.Results.empty, Nil) - consume_status(current_status) - } -} diff --git a/core/Pure/PIDE/resources.ML b/core/Pure/PIDE/resources.ML deleted file mode 100644 index a8bc149d..00000000 --- a/core/Pure/PIDE/resources.ML +++ /dev/null @@ -1,243 +0,0 @@ -(* Title: Pure/PIDE/resources.ML - Author: Makarius - -Resources for theories and auxiliary files. -*) - -signature RESOURCES = -sig - val master_directory: theory -> Path.T - val imports_of: theory -> (string * Position.T) list - val thy_path: Path.T -> Path.T - val check_thy: Path.T -> string -> - {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, - imports: (string * Position.T) list, keywords: Thy_Header.keywords} - val parse_files: string -> (theory -> Token.file list) parser - val provide: Path.T * SHA1.digest -> theory -> theory - val provide_parse_files: string -> (theory -> Token.file list * theory) parser - val loaded_files: theory -> Path.T list - val loaded_files_current: theory -> bool - val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory - val load_thy: bool -> (Toplevel.transition -> Time.time option) -> int -> Path.T -> - Thy_Header.header -> Position.T -> string -> theory list -> theory * (unit -> unit) * int -end; - -structure Resources: RESOURCES = -struct - -(* manage source files *) - -type files = - {master_dir: Path.T, (*master directory of theory source*) - imports: (string * Position.T) list, (*source specification of imports*) - provided: (Path.T * SHA1.digest) list}; (*source path, digest*) - -fun make_files (master_dir, imports, provided): files = - {master_dir = master_dir, imports = imports, provided = provided}; - -structure Files = Theory_Data -( - type T = files; - val empty = make_files (Path.current, [], []); - fun extend _ = empty; - fun merge _ = empty; -); - -fun map_files f = - Files.map (fn {master_dir, imports, provided} => - make_files (f (master_dir, imports, provided))); - - -val master_directory = #master_dir o Files.get; -val imports_of = #imports o Files.get; - -fun put_deps master_dir imports = map_files (fn _ => (master_dir, imports, [])); - - -(* theory files *) - -val thy_path = Path.ext "thy"; - -fun check_file dir file = File.check_file (File.full_path dir file); - -fun check_thy dir thy_name = - let - val path = thy_path (Path.basic thy_name); - val master_file = check_file dir path; - val text = File.read master_file; - - val {name = (name, pos), imports, keywords} = - Thy_Header.read (Path.position master_file) text; - val _ = thy_name <> name andalso - error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name ^ Position.here pos); - in - {master = (master_file, SHA1.digest text), text = text, theory_pos = pos, - imports = imports, keywords = keywords} - end; - - -(* load files *) - -fun parse_files cmd = - Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy => - (case Token.get_files tok of - [] => - let - val master_dir = master_directory thy; - val pos = Token.pos_of tok; - val src_paths = Keyword.command_files cmd (Path.explode name); - in map (Command.read_file master_dir pos) src_paths end - | files => map Exn.release files)); - -fun provide (src_path, id) = - map_files (fn (master_dir, imports, provided) => - if AList.defined (op =) provided src_path then - error ("Duplicate use of source file: " ^ Path.print src_path) - else (master_dir, imports, (src_path, id) :: provided)); - -fun provide_parse_files cmd = - parse_files cmd >> (fn files => fn thy => - let - val fs = files thy; - val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; - in (fs, thy') end); - -fun load_file thy src_path = - let - val full_path = check_file (master_directory thy) src_path; - val text = File.read full_path; - val id = SHA1.digest text; - in ((full_path, id), text) end; - -fun loaded_files_current thy = - #provided (Files.get thy) |> - forall (fn (src_path, id) => - (case try (load_file thy) src_path of - NONE => false - | SOME ((_, id'), _) => id = id')); - -(*Proof General legacy*) -fun loaded_files thy = - let val {master_dir, provided, ...} = Files.get thy - in map (File.full_path master_dir o #1) provided end; - - -(* load theory *) - -fun begin_theory master_dir {name, imports, keywords} parents = - Theory.begin_theory name parents - |> put_deps master_dir imports - |> fold Thy_Header.declare_keyword keywords; - -fun excursion master_dir last_timing init elements = - let - fun prepare_span span = - Thy_Syntax.span_content span - |> Command.read init master_dir [] - |> (fn tr => Toplevel.put_timing (last_timing tr) tr); - - fun element_result span_elem (st, _) = - let - val elem = Thy_Syntax.map_element prepare_span span_elem; - val (results, st') = Toplevel.element_result elem st; - val pos' = Toplevel.pos_of (Thy_Syntax.last_element elem); - in (results, (st', pos')) end; - - val (results, (end_state, end_pos)) = - fold_map element_result elements (Toplevel.toplevel, Position.none); - - val thy = Toplevel.end_theory end_pos end_state; - in (results, thy) end; - -fun load_thy document last_timing update_time master_dir header text_pos text parents = - let - val time = ! Toplevel.timing; - - val {name = (name, _), ...} = header; - val _ = Thy_Header.define_keywords header; - - val lexs = Keyword.get_lexicons (); - val toks = Thy_Syntax.parse_tokens lexs text_pos text; - val spans = Thy_Syntax.parse_spans toks; - val elements = Thy_Syntax.parse_elements spans; - - fun init () = - begin_theory master_dir header parents - |> Present.begin_theory update_time - (fn () => HTML.html_mode (implode o map Thy_Syntax.present_span) spans); - - val _ = if time then writeln ("\n**** Starting theory " ^ quote name ^ " ****") else (); - val (results, thy) = - cond_timeit time "" (fn () => excursion master_dir last_timing init elements); - val _ = if time then writeln ("**** Finished theory " ^ quote name ^ " ****\n") else (); - - fun present () = - let - val res = filter_out (Toplevel.is_ignored o #1) (maps Toplevel.join_results results); - val ((minor, _), outer_syntax) = Outer_Syntax.get_syntax (); - in - if exists (Toplevel.is_skipped_proof o #2) res then - warning ("Cannot present theory with skipped proofs: " ^ quote name) - else - let val tex_source = - Thy_Output.present_thy minor Keyword.command_tags - (Outer_Syntax.is_markup outer_syntax) res toks - |> Buffer.content; - in if document then Present.theory_output name tex_source else () end - end; - - in (thy, present, size text) end; - - -(* antiquotations *) - -local - -fun check_path strict ctxt dir (name, pos) = - let - val _ = Context_Position.report ctxt pos Markup.language_path; - - val path = Path.append dir (Path.explode name) - handle ERROR msg => error (msg ^ Position.here pos); - - val _ = Context_Position.report ctxt pos (Markup.path (Path.smart_implode path)); - val _ = - if can Path.expand path andalso File.exists path then () - else - let - val path' = perhaps (try Path.expand) path; - val msg = "Bad file: " ^ Path.print path' ^ Position.here pos; - in - if strict then error msg - else if Context_Position.is_visible ctxt then - Output.report - [Markup.markup (Markup.bad |> Markup.properties (Position.properties_of pos)) msg] - else () - end; - in path end; - -fun file_antiq strict ctxt (name, pos) = - let - val dir = master_directory (Proof_Context.theory_of ctxt); - val _ = check_path strict ctxt dir (name, pos); - in - space_explode "/" name - |> map Thy_Output.verb_text - |> space_implode (Thy_Output.verb_text "/" ^ "\\discretionary{}{}{}") - end; - -in - -val _ = Theory.setup - (Thy_Output.antiquotation @{binding file} (Scan.lift (Parse.position Parse.path)) - (file_antiq true o #context) #> - Thy_Output.antiquotation @{binding file_unchecked} (Scan.lift (Parse.position Parse.path)) - (file_antiq false o #context) #> - ML_Antiquotation.value @{binding path} - (Args.context -- Scan.lift (Parse.position Parse.path) >> (fn (ctxt, arg) => - let val path = check_path true ctxt Path.current arg - in "Path.explode " ^ ML_Syntax.print_string (Path.implode path) end))); - -end; - -end; diff --git a/core/Pure/PIDE/resources.scala b/core/Pure/PIDE/resources.scala deleted file mode 100644 index 6402421d..00000000 --- a/core/Pure/PIDE/resources.scala +++ /dev/null @@ -1,131 +0,0 @@ -/* Title: Pure/PIDE/resources.scala - Author: Makarius - -Resources for theories and auxiliary files. -*/ - -package isabelle - - -import scala.annotation.tailrec -import scala.util.parsing.input.Reader - -import java.io.{File => JFile} - - -object Resources -{ - def thy_path(path: Path): Path = path.ext("thy") -} - - -class Resources( - val loaded_theories: Set[String], - val known_theories: Map[String, Document.Node.Name], - val base_syntax: Prover.Syntax) -{ - /* document node names */ - - def node_name(qualifier: String, raw_path: Path): Document.Node.Name = - { - val no_qualifier = "" // FIXME - val path = raw_path.expand - val node = path.implode - val theory = Long_Name.qualify(no_qualifier, Thy_Header.thy_name(node).getOrElse("")) - val master_dir = if (theory == "") "" else path.dir.implode - Document.Node.Name(node, master_dir, theory) - } - - - /* file-system operations */ - - def append(dir: String, source_path: Path): String = - (Path.explode(dir) + source_path).expand.implode - - def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = - { - val path = Path.explode(name.node) - if (!path.is_file) error("No such file: " + path.toString) - - val reader = Scan.byte_reader(path.file) - try { f(reader) } finally { reader.close } - } - - - /* theory files */ - - def loaded_files(syntax: Prover.Syntax, text: String): List[String] = - if (syntax.load_commands_in(text)) { - val spans = Thy_Syntax.parse_spans(syntax.scan(text)) - spans.iterator.map(Thy_Syntax.span_files(syntax, _)).flatten.toList - } - else Nil - - private def dummy_name(theory: String): Document.Node.Name = - Document.Node.Name(theory + ".thy", "", theory) - - def import_name(qualifier: String, master: Document.Node.Name, s: String): Document.Node.Name = - { - val no_qualifier = "" // FIXME - val thy1 = Thy_Header.base_name(s) - val thy2 = if (Long_Name.is_qualified(thy1)) thy1 else Long_Name.qualify(no_qualifier, thy1) - (known_theories.get(thy1) orElse - known_theories.get(thy2) orElse - known_theories.get(Long_Name.base_name(thy1))) match { - case Some(name) if loaded_theories(name.theory) => dummy_name(name.theory) - case Some(name) => name - case None => - val path = Path.explode(s) - val theory = path.base.implode - if (Long_Name.is_qualified(theory)) dummy_name(theory) - else { - val node = append(master.master_dir, Resources.thy_path(path)) - val master_dir = append(master.master_dir, path.dir) - Document.Node.Name(node, master_dir, Long_Name.qualify(no_qualifier, theory)) - } - } - } - - def check_thy_reader(qualifier: String, name: Document.Node.Name, reader: Reader[Char]) - : Document.Node.Header = - { - if (reader.source.length > 0) { - try { - val header = Thy_Header.read(reader).decode_symbols - - val base_name = Long_Name.base_name(name.theory) - val name1 = header.name - if (base_name != name1) - error("Bad file name " + Resources.thy_path(Path.basic(base_name)) + - " for theory " + quote(name1)) - - val imports = header.imports.map(import_name(qualifier, name, _)) - Document.Node.Header(imports, header.keywords, Nil) - } - catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) } - } - else Document.Node.no_header - } - - def check_thy(qualifier: String, name: Document.Node.Name): Document.Node.Header = - with_thy_reader(name, check_thy_reader(qualifier, name, _)) - - - /* document changes */ - - def parse_change( - reparse_limit: Int, - previous: Document.Version, - doc_blobs: Document.Blobs, - edits: List[Document.Edit_Text]): Session.Change = - Thy_Syntax.parse_change(this, reparse_limit, previous, doc_blobs, edits) - - def commit(change: Session.Change) { } - - - /* prover process */ - - def start_prover(receiver: Prover.Message => Unit, name: String, args: List[String]): Prover = - new Isabelle_Process(receiver, args) with Protocol -} - diff --git a/core/Pure/PIDE/session.ML b/core/Pure/PIDE/session.ML deleted file mode 100644 index 931d0d23..00000000 --- a/core/Pure/PIDE/session.ML +++ /dev/null @@ -1,80 +0,0 @@ -(* Title: Pure/PIDE/session.ML - Author: Makarius - -Prover session: persistent state of logic image. -*) - -signature SESSION = -sig - val name: unit -> string - val welcome: unit -> string - val init: bool -> bool -> Path.T -> string -> bool -> string -> (string * string) list -> - (Path.T * Path.T) list -> string -> string * string -> bool -> unit - val finish: unit -> unit - val protocol_handler: string -> unit - val init_protocol_handlers: unit -> unit -end; - -structure Session: SESSION = -struct - -(** session identification -- not thread-safe **) - -val session = Unsynchronized.ref {chapter = "Pure", name = "Pure"}; -val session_finished = Unsynchronized.ref false; - -fun name () = "Isabelle/" ^ #name (! session); - -fun welcome () = - if Distribution.is_identified then - "Welcome to " ^ name () ^ " (" ^ Distribution.version ^ ")" - else "Unofficial version of " ^ name () ^ " (" ^ Distribution.version ^ ")"; - - -(* init *) - -fun init build info info_path doc doc_graph doc_output doc_variants doc_files - parent (chapter, name) verbose = - if #name (! session) <> parent orelse not (! session_finished) then - error ("Unfinished parent session " ^ quote parent ^ " for " ^ quote name) - else - let - val _ = session := {chapter = chapter, name = name}; - val _ = session_finished := false; - in - Present.init build info info_path (if doc = "false" then "" else doc) - doc_graph doc_output doc_variants doc_files (chapter, name) - verbose (map Thy_Info.get_theory (Thy_Info.get_names ())) - end; - - -(* finish *) - -fun finish () = - (Execution.shutdown (); - Thy_Info.finish (); - Present.finish (); - Outer_Syntax.check_syntax (); - Future.shutdown (); - Event_Timer.shutdown (); - Future.shutdown (); - session_finished := true); - - - -(** protocol handlers **) - -val protocol_handlers = Synchronized.var "protocol_handlers" ([]: string list); - -fun protocol_handler name = - Synchronized.change protocol_handlers (fn handlers => - (Output.try_protocol_message (Markup.protocol_handler name) []; - if not (member (op =) handlers name) then () - else warning ("Redefining protocol handler: " ^ quote name); - update (op =) name handlers)); - -fun init_protocol_handlers () = - Synchronized.value protocol_handlers - |> List.app (fn name => Output.try_protocol_message (Markup.protocol_handler name) []); - -end; diff --git a/core/Pure/PIDE/session.scala b/core/Pure/PIDE/session.scala deleted file mode 100644 index 85a00592..00000000 --- a/core/Pure/PIDE/session.scala +++ /dev/null @@ -1,615 +0,0 @@ -/* Title: Pure/PIDE/session.scala - Author: Makarius - Options: :folding=explicit:collapseFolds=1: - -PIDE editor session, potentially with running prover process. -*/ - -package isabelle - - -import scala.collection.immutable.Queue - - -object Session -{ - /* outlets */ - - object Consumer - { - def apply[A](name: String)(consume: A => Unit): Consumer[A] = - new Consumer[A](name, consume) - } - final class Consumer[-A] private(val name: String, val consume: A => Unit) - - class Outlet[A](dispatcher: Consumer_Thread[() => Unit]) - { - private val consumers = Synchronized(List.empty[Consumer[A]]) - - def += (c: Consumer[A]) { consumers.change(Library.update(c)) } - def -= (c: Consumer[A]) { consumers.change(Library.remove(c)) } - - def post(a: A) - { - for (c <- consumers.value.iterator) { - dispatcher.send(() => - try { c.consume(a) } - catch { - case exn: Throwable => - Output.error_message("Consumer failed: " + quote(c.name) + "\n" + Exn.message(exn)) - }) - } - } - } - - - /* change */ - - sealed case class Change( - previous: Document.Version, - syntax_changed: Boolean, - deps_changed: Boolean, - doc_edits: List[Document.Edit_Command], - version: Document.Version) - - case object Change_Flush - - - /* events */ - - //{{{ - case class Statistics(props: Properties.T) - case class Global_Options(options: Options) - case object Caret_Focus - case class Raw_Edits(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) - case class Dialog_Result(id: Document_ID.Generic, serial: Long, result: String) - case class Commands_Changed( - assignment: Boolean, nodes: Set[Document.Node.Name], commands: Set[Command]) - - sealed abstract class Phase - case object Inactive extends Phase - case object Startup extends Phase // transient - case object Failed extends Phase - case object Ready extends Phase - case object Shutdown extends Phase // transient - //}}} - - - /* syslog */ - - private[Session] class Syslog(limit: Int) - { - private var queue = Queue.empty[XML.Elem] - private var length = 0 - - def += (msg: XML.Elem): Unit = synchronized { - queue = queue.enqueue(msg) - length += 1 - if (length > limit) queue = queue.dequeue._2 - } - - def content: String = synchronized { - cat_lines(queue.iterator.map(XML.content)) + - (if (length > limit) "\n(A total of " + length + " messages...)" else "") - } - } - - - /* protocol handlers */ - - abstract class Protocol_Handler - { - def start(prover: Prover): Unit = {} - def stop(prover: Prover): Unit = {} - val functions: Map[String, (Prover, Prover.Protocol_Output) => Boolean] - } - - class Protocol_Handlers( - handlers: Map[String, Session.Protocol_Handler] = Map.empty, - functions: Map[String, Prover.Protocol_Output => Boolean] = Map.empty) - { - def get(name: String): Option[Protocol_Handler] = handlers.get(name) - - def add(prover: Prover, name: String): Protocol_Handlers = - { - val (handlers1, functions1) = - handlers.get(name) match { - case Some(old_handler) => - Output.warning("Redefining protocol handler: " + name) - old_handler.stop(prover) - (handlers - name, functions -- old_handler.functions.keys) - case None => (handlers, functions) - } - - val (handlers2, functions2) = - try { - val new_handler = Class.forName(name).newInstance.asInstanceOf[Protocol_Handler] - new_handler.start(prover) - - val new_functions = - for ((a, f) <- new_handler.functions.toList) yield - (a, (msg: Prover.Protocol_Output) => f(prover, msg)) - - val dups = for ((a, _) <- new_functions if functions1.isDefinedAt(a)) yield a - if (!dups.isEmpty) error("Duplicate protocol functions: " + commas_quote(dups)) - - (handlers1 + (name -> new_handler), functions1 ++ new_functions) - } - catch { - case exn: Throwable => - Output.error_message( - "Failed to initialize protocol handler: " + quote(name) + "\n" + Exn.message(exn)) - (handlers1, functions1) - } - - new Protocol_Handlers(handlers2, functions2) - } - - def invoke(msg: Prover.Protocol_Output): Boolean = - msg.properties match { - case Markup.Function(a) if functions.isDefinedAt(a) => - try { functions(a)(msg) } - catch { - case exn: Throwable => - Output.error_message( - "Failed invocation of protocol function: " + quote(a) + "\n" + Exn.message(exn)) - false - } - case _ => false - } - - def stop(prover: Prover): Protocol_Handlers = - { - for ((_, handler) <- handlers) handler.stop(prover) - new Protocol_Handlers() - } - } -} - - -class Session(val resources: Resources) -{ - /* global flags */ - - @volatile var timing: Boolean = false - @volatile var verbose: Boolean = false - - - /* tuning parameters */ - - def output_delay: Time = Time.seconds(0.1) // prover output (markup, common messages) - def prune_delay: Time = Time.seconds(60.0) // prune history (delete old versions) - def prune_size: Int = 0 // size of retained history - def syslog_limit: Int = 100 - def reparse_limit: Int = 0 - - - /* outlets */ - - private val dispatcher = - Consumer_Thread.fork[() => Unit]("Session.dispatcher", daemon = true) { case e => e(); true } - - val statistics = new Session.Outlet[Session.Statistics](dispatcher) - val global_options = new Session.Outlet[Session.Global_Options](dispatcher) - val caret_focus = new Session.Outlet[Session.Caret_Focus.type](dispatcher) - val raw_edits = new Session.Outlet[Session.Raw_Edits](dispatcher) - val commands_changed = new Session.Outlet[Session.Commands_Changed](dispatcher) - val phase_changed = new Session.Outlet[Session.Phase](dispatcher) - val syslog_messages = new Session.Outlet[Prover.Output](dispatcher) - val raw_output_messages = new Session.Outlet[Prover.Output](dispatcher) - val all_messages = new Session.Outlet[Prover.Message](dispatcher) // potential bottle-neck - val trace_events = new Session.Outlet[Simplifier_Trace.Event.type](dispatcher) - - - - /** main protocol manager **/ - - /* internal messages */ - - private case class Start(name: String, args: List[String]) - private case object Stop - private case class Cancel_Exec(exec_id: Document_ID.Exec) - private case class Protocol_Command(name: String, args: List[String]) - private case class Update_Options(options: Options) - private case object Prune_History - - - /* global state */ - - private val syslog = new Session.Syslog(syslog_limit) - def syslog_content(): String = syslog.content - - @volatile private var _phase: Session.Phase = Session.Inactive - private def phase_=(new_phase: Session.Phase) - { - _phase = new_phase - phase_changed.post(new_phase) - } - def phase = _phase - def is_ready: Boolean = phase == Session.Ready - - private val global_state = Synchronized(Document.State.init) - def current_state(): Document.State = global_state.value - - def recent_syntax(): Prover.Syntax = - { - val version = current_state().recent_finished.version.get_finished - version.syntax getOrElse resources.base_syntax - } - - - /* protocol handlers */ - - @volatile private var _protocol_handlers = new Session.Protocol_Handlers() - - def protocol_handler(name: String): Option[Session.Protocol_Handler] = - _protocol_handlers.get(name) - - - /* theory files */ - - def header_edit(name: Document.Node.Name, header: Document.Node.Header): Document.Edit_Text = - { - val header1 = - if (resources.loaded_theories(name.theory)) - header.error("Cannot update finished theory " + quote(name.theory)) - else header - (name, Document.Node.Deps(header1)) - } - - - /* pipelined change parsing */ - - private case class Text_Edits( - previous: Future[Document.Version], - doc_blobs: Document.Blobs, - text_edits: List[Document.Edit_Text], - version_result: Promise[Document.Version]) - - private val change_parser = Consumer_Thread.fork[Text_Edits]("change_parser", daemon = true) - { - case Text_Edits(previous, doc_blobs, text_edits, version_result) => - val prev = previous.get_finished - val change = - Timing.timeit("parse_change", timing) { - resources.parse_change(reparse_limit, prev, doc_blobs, text_edits) - } - version_result.fulfill(change.version) - manager.send(change) - true - } - - - /* buffered changes */ - - private object change_buffer - { - private var assignment: Boolean = false - private var nodes: Set[Document.Node.Name] = Set.empty - private var commands: Set[Command] = Set.empty - - def flush(): Unit = synchronized { - if (assignment || !nodes.isEmpty || !commands.isEmpty) - commands_changed.post(Session.Commands_Changed(assignment, nodes, commands)) - assignment = false - nodes = Set.empty - commands = Set.empty - } - private val delay_flush = Simple_Thread.delay_first(output_delay) { flush() } - - def invoke(assign: Boolean, cmds: List[Command]): Unit = synchronized { - assignment |= assign - for (command <- cmds) { - nodes += command.node_name - commands += command - } - delay_flush.invoke() - } - - def shutdown() - { - delay_flush.revoke() - flush() - } - } - - - /* postponed changes */ - - private object postponed_changes - { - private var postponed: List[Session.Change] = Nil - - def store(change: Session.Change): Unit = synchronized { postponed ::= change } - - def flush(state: Document.State): List[Session.Change] = synchronized { - val (assigned, unassigned) = postponed.partition(change => state.is_assigned(change.previous)) - postponed = unassigned - assigned.reverse - } - } - - - /* prover process */ - - private object prover - { - private val variable = Synchronized(None: Option[Prover]) - - def defined: Boolean = variable.value.isDefined - def get: Prover = variable.value.get - def set(p: Prover) { variable.change(_ => Some(p)) } - def reset { variable.change(_ => None) } - def await_reset() { variable.guarded_access({ case None => Some((), None) case _ => None }) } - } - - - /* manager thread */ - - private val delay_prune = Simple_Thread.delay_first(prune_delay) { manager.send(Prune_History) } - - private val manager: Consumer_Thread[Any] = - { - /* raw edits */ - - def handle_raw_edits(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) - //{{{ - { - require(prover.defined) - - prover.get.discontinue_execution() - - val previous = global_state.value.history.tip.version - val version = Future.promise[Document.Version] - global_state.change(_.continue_history(previous, edits, version)) - - raw_edits.post(Session.Raw_Edits(doc_blobs, edits)) - change_parser.send(Text_Edits(previous, doc_blobs, edits, version)) - } - //}}} - - - /* resulting changes */ - - def handle_change(change: Session.Change) - //{{{ - { - require(prover.defined) - - def id_command(command: Command) - { - for { - (name, digest) <- command.blobs_defined - if !global_state.value.defined_blob(digest) - } { - change.version.nodes(name).get_blob match { - case Some(blob) => - global_state.change(_.define_blob(digest)) - prover.get.define_blob(digest, blob.bytes) - case None => - Output.error_message("Missing blob " + quote(name.toString)) - } - } - - if (!global_state.value.defined_command(command.id)) { - global_state.change(_.define_command(command)) - prover.get.define_command(command) - } - } - change.doc_edits foreach { - case (_, edit) => - edit foreach { case (c1, c2) => c1 foreach id_command; c2 foreach id_command } - } - - val assignment = global_state.value.the_assignment(change.previous).check_finished - global_state.change(_.define_version(change.version, assignment)) - prover.get.update(change.previous.id, change.version.id, change.doc_edits) - resources.commit(change) - } - //}}} - - - /* prover output */ - - def handle_output(output: Prover.Output) - //{{{ - { - def bad_output() - { - if (verbose) - Output.warning("Ignoring bad prover output: " + output.message.toString) - } - - def accumulate(state_id: Document_ID.Generic, message: XML.Elem) - { - try { - val st = global_state.change_result(_.accumulate(state_id, message)) - change_buffer.invoke(false, List(st.command)) - } - catch { - case _: Document.State.Fail => bad_output() - } - } - - output match { - case msg: Prover.Protocol_Output => - val handled = _protocol_handlers.invoke(msg) - if (!handled) { - msg.properties match { - case Markup.Protocol_Handler(name) if prover.defined => - _protocol_handlers = _protocol_handlers.add(prover.get, name) - - case Protocol.Command_Timing(state_id, timing) if prover.defined => - val message = XML.elem(Markup.STATUS, List(XML.Elem(Markup.Timing(timing), Nil))) - accumulate(state_id, prover.get.xml_cache.elem(message)) - - case Markup.Assign_Update => - msg.text match { - case Protocol.Assign_Update(id, update) => - try { - val cmds = global_state.change_result(_.assign(id, update)) - change_buffer.invoke(true, cmds) - manager.send(Session.Change_Flush) - } - catch { case _: Document.State.Fail => bad_output() } - case _ => bad_output() - } - delay_prune.invoke() - - case Markup.Removed_Versions => - msg.text match { - case Protocol.Removed(removed) => - try { - global_state.change(_.removed_versions(removed)) - manager.send(Session.Change_Flush) - } - catch { case _: Document.State.Fail => bad_output() } - case _ => bad_output() - } - - case Markup.ML_Statistics(props) => - statistics.post(Session.Statistics(props)) - - case Markup.Task_Statistics(props) => - // FIXME - - case _ => bad_output() - } - } - case _ => - output.properties match { - case Position.Id(state_id) => - accumulate(state_id, output.message) - - case _ if output.is_init => - phase = Session.Ready - - case Markup.Return_Code(rc) if output.is_exit => - if (rc == 0) phase = Session.Inactive - else phase = Session.Failed - prover.reset - - case _ => raw_output_messages.post(output) - } - } - } - //}}} - - - /* main thread */ - - Consumer_Thread.fork[Any]("Session.manager", daemon = true) - { - case arg: Any => - //{{{ - arg match { - case output: Prover.Output => - if (output.is_stdout || output.is_stderr) - raw_output_messages.post(output) - else handle_output(output) - - if (output.is_syslog) { - syslog += output.message - syslog_messages.post(output) - } - - all_messages.post(output) - - case input: Prover.Input => - all_messages.post(input) - - case Start(name, args) if !prover.defined => - if (phase == Session.Inactive || phase == Session.Failed) { - phase = Session.Startup - prover.set(resources.start_prover(manager.send(_), name, args)) - } - - case Stop => - if (prover.defined && is_ready) { - _protocol_handlers = _protocol_handlers.stop(prover.get) - global_state.change(_ => Document.State.init) - phase = Session.Shutdown - prover.get.terminate - } - - case Prune_History => - if (prover.defined) { - val old_versions = global_state.change_result(_.remove_versions(prune_size)) - if (!old_versions.isEmpty) prover.get.remove_versions(old_versions) - } - - case Update_Options(options) => - if (prover.defined && is_ready) { - prover.get.options(options) - handle_raw_edits(Document.Blobs.empty, Nil) - } - global_options.post(Session.Global_Options(options)) - - case Cancel_Exec(exec_id) if prover.defined => - prover.get.cancel_exec(exec_id) - - case Session.Raw_Edits(doc_blobs, edits) if prover.defined => - handle_raw_edits(doc_blobs, edits) - - case Session.Dialog_Result(id, serial, result) if prover.defined => - prover.get.dialog_result(serial, result) - handle_output(new Prover.Output(Protocol.Dialog_Result(id, serial, result))) - - case Protocol_Command(name, args) if prover.defined => - prover.get.protocol_command(name, args:_*) - - case change: Session.Change if prover.defined => - val state = global_state.value - if (!state.removing_versions && state.is_assigned(change.previous)) - handle_change(change) - else postponed_changes.store(change) - - case Session.Change_Flush if prover.defined => - val state = global_state.value - if (!state.removing_versions) - postponed_changes.flush(state).foreach(handle_change(_)) - - case bad => - if (verbose) Output.warning("Ignoring bad message: " + bad.toString) - } - true - //}}} - } - } - - - /* main operations */ - - def snapshot(name: Document.Node.Name = Document.Node.Name.empty, - pending_edits: List[Text.Edit] = Nil): Document.Snapshot = - global_state.value.snapshot(name, pending_edits) - - def start(name: String, args: List[String]) - { manager.send(Start(name, args)) } - - def stop() - { - manager.send_wait(Stop) - prover.await_reset() - change_parser.shutdown() - change_buffer.shutdown() - delay_prune.revoke() - manager.shutdown() - dispatcher.shutdown() - } - - def protocol_command(name: String, args: String*) - { manager.send(Protocol_Command(name, args.toList)) } - - def cancel_exec(exec_id: Document_ID.Exec) - { manager.send(Cancel_Exec(exec_id)) } - - def update(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text]) - { if (!edits.isEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits)) } - - def update_options(options: Options) - { manager.send_wait(Update_Options(options)) } - - def dialog_result(id: Document_ID.Generic, serial: Long, result: String) - { manager.send(Session.Dialog_Result(id, serial, result)) } -} diff --git a/core/Pure/PIDE/text.scala b/core/Pure/PIDE/text.scala deleted file mode 100644 index b0d62e8f..00000000 --- a/core/Pure/PIDE/text.scala +++ /dev/null @@ -1,182 +0,0 @@ -/* Title: Pure/PIDE/text.scala - Module: PIDE - Author: Fabian Immler, TU Munich - Author: Makarius - -Basic operations on plain text. -*/ - -package isabelle - - -import scala.collection.mutable -import scala.util.Sorting - - -object Text -{ - /* offset */ - - type Offset = Int - - - /* range -- with total quasi-ordering */ - - object Range - { - def apply(start: Offset): Range = Range(start, start) - - val offside: Range = apply(-1) - - object Ordering extends scala.math.Ordering[Text.Range] - { - def compare(r1: Text.Range, r2: Text.Range): Int = r1 compare r2 - } - } - - sealed case class Range(val start: Offset, val stop: Offset) - { - // denotation: {start} Un {i. start < i & i < stop} - if (start > stop) - error("Bad range: [" + start.toString + ":" + stop.toString + "]") - - override def toString = "[" + start.toString + ":" + stop.toString + "]" - - def length: Int = stop - start - - def map(f: Offset => Offset): Range = Range(f(start), f(stop)) - def +(i: Offset): Range = if (i == 0) this else map(_ + i) - def -(i: Offset): Range = if (i == 0) this else map(_ - i) - - def is_singularity: Boolean = start == stop - def inflate_singularity: Range = if (is_singularity) Range(start, start + 1) else this - - def contains(i: Offset): Boolean = start == i || start < i && i < stop - def contains(that: Range): Boolean = this.contains(that.start) && that.stop <= this.stop - def overlaps(that: Range): Boolean = this.contains(that.start) || that.contains(this.start) - def compare(that: Range): Int = if (overlaps(that)) 0 else this.start compare that.start - - def apart(that: Range): Boolean = - (this.start max that.start) > (this.stop min that.stop) - - def restrict(that: Range): Range = - Range(this.start max that.start, this.stop min that.stop) - - def try_restrict(that: Range): Option[Range] = - if (this apart that) None - else Some(restrict(that)) - - def try_join(that: Range): Option[Range] = - if (this apart that) None - else Some(Range(this.start min that.start, this.stop max that.stop)) - } - - - /* perspective */ - - object Perspective - { - val empty: Perspective = Perspective(Nil) - - def full: Perspective = Perspective(List(Range(0, Integer.MAX_VALUE / 2))) - - def apply(ranges: Seq[Range]): Perspective = - { - val result = new mutable.ListBuffer[Text.Range] - var last: Option[Text.Range] = None - def ship(next: Option[Range]) { result ++= last; last = next } - - for (range <- ranges.sortBy(_.start)) - { - last match { - case None => ship(Some(range)) - case Some(last_range) => - last_range.try_join(range) match { - case None => ship(Some(range)) - case joined => last = joined - } - } - } - ship(None) - new Perspective(result.toList) - } - } - - final class Perspective private( - val ranges: List[Range]) // visible text partitioning in canonical order - { - def is_empty: Boolean = ranges.isEmpty - def range: Range = - if (is_empty) Range(0) - else Range(ranges.head.start, ranges.last.stop) - - override def hashCode: Int = ranges.hashCode - override def equals(that: Any): Boolean = - that match { - case other: Perspective => ranges == other.ranges - case _ => false - } - override def toString = ranges.toString - } - - - /* information associated with text range */ - - sealed case class Info[A](val range: Text.Range, val info: A) - { - def restrict(r: Text.Range): Info[A] = Info(range.restrict(r), info) - def try_restrict(r: Text.Range): Option[Info[A]] = range.try_restrict(r).map(Info(_, info)) - } - - type Markup = Info[XML.Elem] - - - /* editing */ - - object Edit - { - def insert(start: Offset, text: String): Edit = new Edit(true, start, text) - def remove(start: Offset, text: String): Edit = new Edit(false, start, text) - } - - final class Edit private(val is_insert: Boolean, val start: Offset, val text: String) - { - override def toString = - (if (is_insert) "Insert(" else "Remove(") + (start, text).toString + ")" - - - /* transform offsets */ - - private def transform(do_insert: Boolean, i: Offset): Offset = - if (i < start) i - else if (do_insert) i + text.length - else (i - text.length) max start - - def convert(i: Offset): Offset = transform(is_insert, i) - def revert(i: Offset): Offset = transform(!is_insert, i) - - - /* edit strings */ - - private def insert(i: Offset, string: String): String = - string.substring(0, i) + text + string.substring(i) - - private def remove(i: Offset, count: Int, string: String): String = - string.substring(0, i) + string.substring(i + count) - - def can_edit(string: String, shift: Int): Boolean = - shift <= start && start < shift + string.length - - def edit(string: String, shift: Int): (Option[Edit], String) = - if (!can_edit(string, shift)) (Some(this), string) - else if (is_insert) (None, insert(start - shift, string)) - else { - val i = start - shift - val count = text.length min (string.length - i) - val rest = - if (count == text.length) None - else Some(Edit.remove(start, text.substring(count))) - (rest, remove(i, count, string)) - } - } -} diff --git a/core/Pure/PIDE/xml.ML b/core/Pure/PIDE/xml.ML deleted file mode 100644 index 8ad87613..00000000 --- a/core/Pure/PIDE/xml.ML +++ /dev/null @@ -1,404 +0,0 @@ -(* Title: Pure/PIDE/xml.ML - Author: David Aspinall - Author: Stefan Berghofer - Author: Makarius - -Untyped XML trees and representation of ML values. -*) - -signature XML_DATA_OPS = -sig - type 'a A - type 'a T - type 'a V - val int_atom: int A - val bool_atom: bool A - val unit_atom: unit A - val properties: Properties.T T - val string: string T - val int: int T - val bool: bool T - val unit: unit T - val pair: 'a T -> 'b T -> ('a * 'b) T - val triple: 'a T -> 'b T -> 'c T -> ('a * 'b * 'c) T - val list: 'a T -> 'a list T - val option: 'a T -> 'a option T - val variant: 'a V list -> 'a T -end; - -signature XML = -sig - type attributes = (string * string) list - datatype tree = - Elem of (string * attributes) * tree list - | Text of string - type body = tree list - val wrap_elem: ((string * attributes) * tree list) * tree list -> tree - val unwrap_elem: tree -> (((string * attributes) * tree list) * tree list) option - val add_content: tree -> Buffer.T -> Buffer.T - val content_of: body -> string - val trim_blanks: body -> body - val header: string - val text: string -> string - val element: string -> attributes -> string list -> string - val output_markup: Markup.T -> Output.output * Output.output - val string_of: tree -> string - val pretty: int -> tree -> Pretty.T - val output: tree -> TextIO.outstream -> unit - val parse_comments: string list -> unit * string list - val parse_string : string -> string option - val parse_element: string list -> tree * string list - val parse_document: string list -> tree * string list - val parse: string -> tree - exception XML_ATOM of string - exception XML_BODY of body - structure Encode: XML_DATA_OPS - structure Decode: XML_DATA_OPS -end; - -structure XML: XML = -struct - -(** XML trees **) - -type attributes = (string * string) list; - -datatype tree = - Elem of (string * attributes) * tree list - | Text of string; - -type body = tree list; - - -(* wrapped elements *) - -val xml_elemN = "xml_elem"; -val xml_nameN = "xml_name"; -val xml_bodyN = "xml_body"; - -fun wrap_elem (((a, atts), body1), body2) = - Elem ((xml_elemN, (xml_nameN, a) :: atts), Elem ((xml_bodyN, []), body1) :: body2); - -fun unwrap_elem (Elem ((name, (n, a) :: atts), Elem ((name', atts'), body1) :: body2)) = - if name = xml_elemN andalso n = xml_nameN andalso name' = xml_bodyN andalso null atts' - then SOME (((a, atts), body1), body2) else NONE - | unwrap_elem _ = NONE; - - -(* text context *) - -fun add_content tree = - (case unwrap_elem tree of - SOME (_, ts) => fold add_content ts - | NONE => - (case tree of - Elem (_, ts) => fold add_content ts - | Text s => Buffer.add s)); - -fun content_of body = Buffer.empty |> fold add_content body |> Buffer.content; - - -(* trim blanks *) - -fun trim_blanks trees = - trees |> maps - (fn Elem (markup, body) => [Elem (markup, trim_blanks body)] - | Text s => - let - val s' = s - |> raw_explode - |> take_prefix Symbol.is_blank |> #2 - |> take_suffix Symbol.is_blank |> #1 - |> implode; - in if s' = "" then [] else [Text s'] end); - - - -(** string representation **) - -val header = "\n"; - - -(* escaped text *) - -fun decode "<" = "<" - | decode ">" = ">" - | decode "&" = "&" - | decode "'" = "'" - | decode """ = "\"" - | decode c = c; - -fun encode "<" = "<" - | encode ">" = ">" - | encode "&" = "&" - | encode "'" = "'" - | encode "\"" = """ - | encode c = c; - -val text = translate_string encode; - - -(* elements *) - -fun elem name atts = - space_implode " " (name :: map (fn (a, x) => a ^ "=\"" ^ text x ^ "\"") atts); - -fun element name atts body = - let val b = implode body in - if b = "" then enclose "<" "/>" (elem name atts) - else enclose "<" ">" (elem name atts) ^ b ^ enclose "" name - end; - -fun output_markup (markup as (name, atts)) = - if Markup.is_empty markup then Markup.no_output - else (enclose "<" ">" (elem name atts), enclose "" name); - - -(* output *) - -fun buffer_of depth tree = - let - fun traverse _ (Elem ((name, atts), [])) = - Buffer.add "<" #> Buffer.add (elem name atts) #> Buffer.add "/>" - | traverse d (Elem ((name, atts), ts)) = - Buffer.add "<" #> Buffer.add (elem name atts) #> Buffer.add ">" #> - traverse_body d ts #> - Buffer.add " Buffer.add name #> Buffer.add ">" - | traverse _ (Text s) = Buffer.add (text s) - and traverse_body 0 _ = Buffer.add "..." - | traverse_body d ts = fold (traverse (d - 1)) ts; - in Buffer.empty |> traverse depth tree end; - -val string_of = Buffer.content o buffer_of ~1; -val output = Buffer.output o buffer_of ~1; - -fun pretty depth tree = - Pretty.str (Buffer.content (buffer_of (Int.max (0, depth)) tree)); - - - -(** XML parsing **) - -local - -fun err msg (xs, _) = - fn () => "XML parsing error: " ^ msg () ^ "\nfound: " ^ quote (Symbol.beginning 100 xs); - -fun ignored _ = []; - -fun name_start_char c = Symbol.is_ascii_letter c orelse c = ":" orelse c = "_"; -fun name_char c = name_start_char c orelse Symbol.is_ascii_digit c orelse c = "-" orelse c = "."; -val parse_name = Scan.one name_start_char ::: Scan.many name_char; - -val blanks = Scan.many Symbol.is_blank; -val special = $$ "&" ^^ (parse_name >> implode) ^^ $$ ";" >> decode; -val regular = Scan.one Symbol.is_regular; -fun regular_except x = Scan.one (fn c => Symbol.is_regular c andalso c <> x); - -val parse_chars = Scan.repeat1 (special || regular_except "<") >> implode; - -val parse_cdata = - Scan.this_string "") regular) >> implode) --| - Scan.this_string "]]>"; - -val parse_att = - ((parse_name >> implode) --| (blanks -- $$ "=" -- blanks)) -- - (($$ "\"" || $$ "'") :|-- (fn s => - (Scan.repeat (special || regular_except s) >> implode) --| $$ s)); - -val parse_comment = - Scan.this_string "") regular) -- - Scan.this_string "-->" >> ignored; - -val parse_processing_instruction = - Scan.this_string "") regular) -- - Scan.this_string "?>" >> ignored; - -val parse_doctype = - Scan.this_string "") regular) -- - $$ ">" >> ignored; - -val parse_misc = - Scan.one Symbol.is_blank >> ignored || - parse_processing_instruction || - parse_comment; - -val parse_optional_text = - Scan.optional (parse_chars >> (single o Text)) []; - -in - -val parse_comments = - blanks -- Scan.repeat (parse_comment -- blanks >> K ()) >> K (); - -val parse_string = Scan.read Symbol.stopper parse_chars o raw_explode; - -fun parse_content xs = - (parse_optional_text @@@ - (Scan.repeat - ((parse_element >> single || - parse_cdata >> (single o Text) || - parse_processing_instruction || - parse_comment) - @@@ parse_optional_text) >> flat)) xs - -and parse_element xs = - ($$ "<" |-- parse_name -- Scan.repeat (blanks |-- parse_att) --| blanks :-- - (fn (name, _) => - !! (err (fn () => "Expected > or />")) - ($$ "/" -- $$ ">" >> ignored || - $$ ">" |-- parse_content --| - !! (err (fn () => "Expected ")) - ($$ "<" -- $$ "/" -- Scan.this name -- blanks -- $$ ">"))) - >> (fn ((name, atts), body) => Elem ((implode name, atts), body))) xs; - -val parse_document = - (Scan.repeat parse_misc -- Scan.option parse_doctype -- Scan.repeat parse_misc) - |-- parse_element; - -fun parse s = - (case Scan.finite Symbol.stopper (Scan.error (!! (err (fn () => "Malformed element")) - (blanks |-- parse_document --| blanks))) (raw_explode s) of - (x, []) => x - | (_, ys) => error ("XML parsing error: unprocessed input\n" ^ Symbol.beginning 100 ys)); - -end; - - - -(** XML as data representation language **) - -exception XML_ATOM of string; -exception XML_BODY of tree list; - - -structure Encode = -struct - -type 'a A = 'a -> string; -type 'a T = 'a -> body; -type 'a V = 'a -> string list * body; - - -(* atomic values *) - -fun int_atom i = signed_string_of_int i; - -fun bool_atom false = "0" - | bool_atom true = "1"; - -fun unit_atom () = ""; - - -(* structural nodes *) - -fun node ts = Elem ((":", []), ts); - -fun vector xs = map_index (fn (i, x) => (int_atom i, x)) xs; - -fun tagged (tag, (xs, ts)) = Elem ((int_atom tag, vector xs), ts); - - -(* representation of standard types *) - -fun properties props = [Elem ((":", props), [])]; - -fun string "" = [] - | string s = [Text s]; - -val int = string o int_atom; - -val bool = string o bool_atom; - -val unit = string o unit_atom; - -fun pair f g (x, y) = [node (f x), node (g y)]; - -fun triple f g h (x, y, z) = [node (f x), node (g y), node (h z)]; - -fun list f xs = map (node o f) xs; - -fun option _ NONE = [] - | option f (SOME x) = [node (f x)]; - -fun variant fs x = - [tagged (the (get_index (fn f => SOME (f x) handle General.Match => NONE) fs))]; - -end; - - -structure Decode = -struct - -type 'a A = string -> 'a; -type 'a T = body -> 'a; -type 'a V = string list * body -> 'a; - - -(* atomic values *) - -fun int_atom s = - Markup.parse_int s - handle Fail _ => raise XML_ATOM s; - -fun bool_atom "0" = false - | bool_atom "1" = true - | bool_atom s = raise XML_ATOM s; - -fun unit_atom "" = () - | unit_atom s = raise XML_ATOM s; - - -(* structural nodes *) - -fun node (Elem ((":", []), ts)) = ts - | node t = raise XML_BODY [t]; - -fun vector atts = - map_index (fn (i, (a, x)) => if int_atom a = i then x else raise XML_ATOM a) atts; - -fun tagged (Elem ((name, atts), ts)) = (int_atom name, (vector atts, ts)) - | tagged t = raise XML_BODY [t]; - - -(* representation of standard types *) - -fun properties [Elem ((":", props), [])] = props - | properties ts = raise XML_BODY ts; - -fun string [] = "" - | string [Text s] = s - | string ts = raise XML_BODY ts; - -val int = int_atom o string; - -val bool = bool_atom o string; - -val unit = unit_atom o string; - -fun pair f g [t1, t2] = (f (node t1), g (node t2)) - | pair _ _ ts = raise XML_BODY ts; - -fun triple f g h [t1, t2, t3] = (f (node t1), g (node t2), h (node t3)) - | triple _ _ _ ts = raise XML_BODY ts; - -fun list f ts = map (f o node) ts; - -fun option _ [] = NONE - | option f [t] = SOME (f (node t)) - | option _ ts = raise XML_BODY ts; - -fun variant fs [t] = - let - val (tag, (xs, ts)) = tagged t; - val f = nth fs tag handle General.Subscript => raise XML_BODY [t]; - in f (xs, ts) end - | variant _ ts = raise XML_BODY ts; - -end; - -end; diff --git a/core/Pure/PIDE/xml.scala b/core/Pure/PIDE/xml.scala deleted file mode 100644 index e6a58d77..00000000 --- a/core/Pure/PIDE/xml.scala +++ /dev/null @@ -1,375 +0,0 @@ -/* Title: Pure/PIDE/xml.scala - Module: PIDE - Author: Makarius - -Untyped XML trees and basic data representation. -*/ - -package isabelle - - -import java.util.WeakHashMap -import java.lang.ref.WeakReference -import javax.xml.parsers.DocumentBuilderFactory - - -object XML -{ - /** XML trees **/ - - /* datatype representation */ - - type Attributes = Properties.T - - sealed abstract class Tree { override def toString = string_of_tree(this) } - case class Elem(markup: Markup, body: List[Tree]) extends Tree - { - def name: String = markup.name - } - case class Text(content: String) extends Tree - - def elem(name: String, body: List[Tree]) = Elem(Markup(name, Nil), body) - def elem(name: String) = Elem(Markup(name, Nil), Nil) - - type Body = List[Tree] - - - /* wrapped elements */ - - val XML_ELEM = "xml_elem"; - val XML_NAME = "xml_name"; - val XML_BODY = "xml_body"; - - object Wrapped_Elem - { - def apply(markup: Markup, body1: Body, body2: Body): XML.Elem = - Elem(Markup(XML_ELEM, (XML_NAME, markup.name) :: markup.properties), - Elem(Markup(XML_BODY, Nil), body1) :: body2) - - def unapply(tree: Tree): Option[(Markup, Body, Body)] = - tree match { - case - Elem(Markup(XML_ELEM, (XML_NAME, name) :: props), - Elem(Markup(XML_BODY, Nil), body1) :: body2) => - Some(Markup(name, props), body1, body2) - case _ => None - } - } - - - /* traverse text */ - - def traverse_text[A](body: Body)(a: A)(op: (A, String) => A): A = - { - def traverse(x: A, t: Tree): A = - t match { - case Wrapped_Elem(_, _, ts) => (x /: ts)(traverse) - case Elem(_, ts) => (x /: ts)(traverse) - case Text(s) => op(x, s) - } - (a /: body)(traverse) - } - - def text_length(body: Body): Int = traverse_text(body)(0) { case (n, s) => n + s.length } - - - /* text content */ - - def content(body: Body): String = - { - val text = new StringBuilder(text_length(body)) - traverse_text(body)(()) { case (_, s) => text.append(s) } - text.toString - } - - def content(tree: Tree): String = content(List(tree)) - - - - /** string representation **/ - - def string_of_body(body: Body): String = - { - val s = new StringBuilder - - def text(txt: String) { - if (txt == null) s ++= txt - else { - for (c <- txt.iterator) c match { - case '<' => s ++= "<" - case '>' => s ++= ">" - case '&' => s ++= "&" - case '"' => s ++= """ - case '\'' => s ++= "'" - case _ => s += c - } - } - } - def attrib(p: (String, String)) { s ++= " "; s ++= p._1; s ++= "=\""; text(p._2); s ++= "\"" } - def elem(markup: Markup) { s ++= markup.name; markup.properties.foreach(attrib) } - def tree(t: Tree): Unit = - t match { - case Elem(markup, Nil) => - s ++= "<"; elem(markup); s ++= "/>" - case Elem(markup, ts) => - s ++= "<"; elem(markup); s ++= ">" - ts.foreach(tree) - s ++= "" - case Text(txt) => text(txt) - } - body.foreach(tree) - s.toString - } - - def string_of_tree(tree: XML.Tree): String = string_of_body(List(tree)) - - - - /** cache for partial sharing (weak table) **/ - - class Cache(initial_size: Int = 131071, max_string: Int = 100) - { - private var table = new WeakHashMap[Any, WeakReference[Any]](initial_size) - - private def lookup[A](x: A): Option[A] = - { - val ref = table.get(x) - if (ref == null) None - else { - val y = ref.asInstanceOf[WeakReference[A]].get - if (y == null) None - else Some(y) - } - } - private def store[A](x: A): A = - { - table.put(x, new WeakReference[Any](x)) - x - } - - private def trim_bytes(s: String): String = new String(s.toCharArray) - - private def cache_string(x: String): String = - lookup(x) match { - case Some(y) => y - case None => - val z = trim_bytes(x) - if (z.length > max_string) z else store(z) - } - private def cache_props(x: Properties.T): Properties.T = - if (x.isEmpty) x - else - lookup(x) match { - case Some(y) => y - case None => store(x.map(p => (trim_bytes(p._1).intern, cache_string(p._2)))) - } - private def cache_markup(x: Markup): Markup = - lookup(x) match { - case Some(y) => y - case None => - x match { - case Markup(name, props) => - store(Markup(cache_string(name), cache_props(props))) - } - } - private def cache_tree(x: XML.Tree): XML.Tree = - lookup(x) match { - case Some(y) => y - case None => - x match { - case XML.Elem(markup, body) => - store(XML.Elem(cache_markup(markup), cache_body(body))) - case XML.Text(text) => store(XML.Text(cache_string(text))) - } - } - private def cache_body(x: XML.Body): XML.Body = - if (x.isEmpty) x - else - lookup(x) match { - case Some(y) => y - case None => x.map(cache_tree(_)) - } - - // main methods - def string(x: String): String = synchronized { cache_string(x) } - def props(x: Properties.T): Properties.T = synchronized { cache_props(x) } - def markup(x: Markup): Markup = synchronized { cache_markup(x) } - def tree(x: XML.Tree): XML.Tree = synchronized { cache_tree(x) } - def body(x: XML.Body): XML.Body = synchronized { cache_body(x) } - def elem(x: XML.Elem): XML.Elem = synchronized { cache_tree(x).asInstanceOf[XML.Elem] } - } - - - - /** XML as data representation language **/ - - abstract class Error(s: String) extends Exception(s) - class XML_Atom(s: String) extends Error(s) - class XML_Body(body: XML.Body) extends Error("") - - object Encode - { - type T[A] = A => XML.Body - - - /* atomic values */ - - def long_atom(i: Long): String = i.toString - - def int_atom(i: Int): String = i.toString - - def bool_atom(b: Boolean): String = if (b) "1" else "0" - - def unit_atom(u: Unit) = "" - - - /* structural nodes */ - - private def node(ts: XML.Body): XML.Tree = XML.Elem(Markup(":", Nil), ts) - - private def vector(xs: List[String]): XML.Attributes = - xs.zipWithIndex.map({ case (x, i) => (int_atom(i), x) }) - - private def tagged(tag: Int, data: (List[String], XML.Body)): XML.Tree = - XML.Elem(Markup(int_atom(tag), vector(data._1)), data._2) - - - /* representation of standard types */ - - val properties: T[Properties.T] = - (props => List(XML.Elem(Markup(":", props), Nil))) - - val string: T[String] = (s => if (s.isEmpty) Nil else List(XML.Text(s))) - - val long: T[Long] = (x => string(long_atom(x))) - - val int: T[Int] = (x => string(int_atom(x))) - - val bool: T[Boolean] = (x => string(bool_atom(x))) - - val unit: T[Unit] = (x => string(unit_atom(x))) - - def pair[A, B](f: T[A], g: T[B]): T[(A, B)] = - (x => List(node(f(x._1)), node(g(x._2)))) - - def triple[A, B, C](f: T[A], g: T[B], h: T[C]): T[(A, B, C)] = - (x => List(node(f(x._1)), node(g(x._2)), node(h(x._3)))) - - def list[A](f: T[A]): T[List[A]] = - (xs => xs.map((x: A) => node(f(x)))) - - def option[A](f: T[A]): T[Option[A]] = - { - case None => Nil - case Some(x) => List(node(f(x))) - } - - def variant[A](fs: List[PartialFunction[A, (List[String], XML.Body)]]): T[A] = - { - case x => - val (f, tag) = fs.iterator.zipWithIndex.find(p => p._1.isDefinedAt(x)).get - List(tagged(tag, f(x))) - } - } - - object Decode - { - type T[A] = XML.Body => A - type V[A] = (List[String], XML.Body) => A - - - /* atomic values */ - - def long_atom(s: String): Long = - try { java.lang.Long.parseLong(s) } - catch { case e: NumberFormatException => throw new XML_Atom(s) } - - def int_atom(s: String): Int = - try { Integer.parseInt(s) } - catch { case e: NumberFormatException => throw new XML_Atom(s) } - - def bool_atom(s: String): Boolean = - if (s == "1") true - else if (s == "0") false - else throw new XML_Atom(s) - - def unit_atom(s: String): Unit = - if (s == "") () else throw new XML_Atom(s) - - - /* structural nodes */ - - private def node(t: XML.Tree): XML.Body = - t match { - case XML.Elem(Markup(":", Nil), ts) => ts - case _ => throw new XML_Body(List(t)) - } - - private def vector(atts: XML.Attributes): List[String] = - atts.iterator.zipWithIndex.map( - { case ((a, x), i) => if (int_atom(a) == i) x else throw new XML_Atom(a) }).toList - - private def tagged(t: XML.Tree): (Int, (List[String], XML.Body)) = - t match { - case XML.Elem(Markup(name, atts), ts) => (int_atom(name), (vector(atts), ts)) - case _ => throw new XML_Body(List(t)) - } - - - /* representation of standard types */ - - val properties: T[Properties.T] = - { - case List(XML.Elem(Markup(":", props), Nil)) => props - case ts => throw new XML_Body(ts) - } - - val string: T[String] = - { - case Nil => "" - case List(XML.Text(s)) => s - case ts => throw new XML_Body(ts) - } - - val long: T[Long] = (x => long_atom(string(x))) - - val int: T[Int] = (x => int_atom(string(x))) - - val bool: T[Boolean] = (x => bool_atom(string(x))) - - val unit: T[Unit] = (x => unit_atom(string(x))) - - def pair[A, B](f: T[A], g: T[B]): T[(A, B)] = - { - case List(t1, t2) => (f(node(t1)), g(node(t2))) - case ts => throw new XML_Body(ts) - } - - def triple[A, B, C](f: T[A], g: T[B], h: T[C]): T[(A, B, C)] = - { - case List(t1, t2, t3) => (f(node(t1)), g(node(t2)), h(node(t3))) - case ts => throw new XML_Body(ts) - } - - def list[A](f: T[A]): T[List[A]] = - (ts => ts.map(t => f(node(t)))) - - def option[A](f: T[A]): T[Option[A]] = - { - case Nil => None - case List(t) => Some(f(node(t))) - case ts => throw new XML_Body(ts) - } - - def variant[A](fs: List[V[A]]): T[A] = - { - case List(t) => - val (tag, (xs, ts)) = tagged(t) - val f = - try { fs(tag) } - catch { case _: IndexOutOfBoundsException => throw new XML_Body(List(t)) } - f(xs, ts) - case ts => throw new XML_Body(ts) - } - } -} diff --git a/core/Pure/PIDE/yxml.ML b/core/Pure/PIDE/yxml.ML deleted file mode 100644 index 5d037160..00000000 --- a/core/Pure/PIDE/yxml.ML +++ /dev/null @@ -1,152 +0,0 @@ -(* Title: Pure/PIDE/yxml.ML - Author: Makarius - -Efficient text representation of XML trees using extra characters X -and Y -- no escaping, may nest marked text verbatim. Suitable for -direct inlining into plain text. - -Markup ...body... is encoded as: - - X Y name Y att=val ... X - ... - body - ... - X Y X -*) - -signature YXML = -sig - val X: Symbol.symbol - val Y: Symbol.symbol - val embed_controls: string -> string - val detect: string -> bool - val output_markup: Markup.T -> string * string - val string_of_body: XML.body -> string - val string_of: XML.tree -> string - val output_markup_elem: Markup.T -> (string * string) * string - val parse_body: string -> XML.body - val parse: string -> XML.tree -end; - -structure YXML: YXML = -struct - -(** string representation **) - -(* idempotent recoding of certain low ASCII control characters *) - -fun pseudo_utf8 c = - if Symbol.is_ascii_control c - then chr 192 ^ chr (128 + ord c) - else c; - -fun embed_controls str = - if exists_string Symbol.is_ascii_control str - then translate_string pseudo_utf8 str - else str; - - -(* markers *) - -val X = chr 5; -val Y = chr 6; -val XY = X ^ Y; -val XYX = XY ^ X; - -val detect = exists_string (fn s => s = X orelse s = Y); - - -(* output *) - -fun output_markup (markup as (name, atts)) = - if Markup.is_empty markup then Markup.no_output - else (XY ^ name ^ implode (map (fn (a, x) => Y ^ a ^ "=" ^ x) atts) ^ X, XYX); - -fun string_of_body body = - let - fun attrib (a, x) = Buffer.add Y #> Buffer.add a #> Buffer.add "=" #> Buffer.add x; - fun tree (XML.Elem ((name, atts), ts)) = - Buffer.add XY #> Buffer.add name #> fold attrib atts #> Buffer.add X #> - trees ts #> - Buffer.add XYX - | tree (XML.Text s) = Buffer.add s - and trees ts = fold tree ts; - in Buffer.empty |> trees body |> Buffer.content end; - -val string_of = string_of_body o single; - - -(* wrapped elements *) - -val Z = chr 0; -val Z_text = [XML.Text Z]; - -fun output_markup_elem markup = - let val [bg1, bg2, en] = space_explode Z (string_of (XML.wrap_elem ((markup, Z_text), Z_text))) - in ((bg1, bg2), en) end; - - - -(** efficient YXML parsing **) - -local - -(* splitting *) - -fun is_char s c = ord s = Char.ord c; - -val split_string = - Substring.full #> - Substring.tokens (is_char X) #> - map (Substring.fields (is_char Y) #> map Substring.string); - - -(* structural errors *) - -fun err msg = raise Fail ("Malformed YXML: " ^ msg); -fun err_attribute () = err "bad attribute"; -fun err_element () = err "bad element"; -fun err_unbalanced "" = err "unbalanced element" - | err_unbalanced name = err ("unbalanced element " ^ quote name); - - -(* stack operations *) - -fun add x ((elem, body) :: pending) = (elem, x :: body) :: pending; - -fun push "" _ _ = err_element () - | push name atts pending = ((name, atts), []) :: pending; - -fun pop ((("", _), _) :: _) = err_unbalanced "" - | pop ((markup, body) :: pending) = add (XML.Elem (markup, rev body)) pending; - - -(* parsing *) - -fun parse_attrib s = - (case first_field "=" s of - NONE => err_attribute () - | SOME ("", _) => err_attribute () - | SOME att => att); - -fun parse_chunk ["", ""] = pop - | parse_chunk ("" :: name :: atts) = push name (map parse_attrib atts) - | parse_chunk txts = fold (add o XML.Text) txts; - -in - -fun parse_body source = - (case fold parse_chunk (split_string source) [(("", []), [])] of - [(("", _), result)] => rev result - | ((name, _), _) :: _ => err_unbalanced name); - -fun parse source = - (case parse_body source of - [result] => result - | [] => XML.Text "" - | _ => err "multiple results"); - -end; - -end; - diff --git a/core/Pure/PIDE/yxml.scala b/core/Pure/PIDE/yxml.scala deleted file mode 100644 index 74169f97..00000000 --- a/core/Pure/PIDE/yxml.scala +++ /dev/null @@ -1,140 +0,0 @@ -/* Title: Pure/PIDE/yxml.scala - Module: PIDE - Author: Makarius - -Efficient text representation of XML trees. Suitable for direct -inlining into plain text. -*/ - -package isabelle - - -import scala.collection.mutable - - -object YXML -{ - /* chunk markers */ - - val X = '\u0005' - val Y = '\u0006' - - val is_X = (c: Char) => c == X - val is_Y = (c: Char) => c == Y - - val X_string = X.toString - val Y_string = Y.toString - - def detect(s: String): Boolean = s.exists(c => c == X || c == Y) - - - /* string representation */ // FIXME byte array version with pseudo-utf-8 (!?) - - def string_of_body(body: XML.Body): String = - { - val s = new StringBuilder - def attrib(p: (String, String)) { s += Y; s ++= p._1; s += '='; s ++= p._2 } - def tree(t: XML.Tree): Unit = - t match { - case XML.Elem(Markup(name, atts), ts) => - s += X; s += Y; s ++= name; atts.foreach(attrib); s += X - ts.foreach(tree) - s += X; s += Y; s += X - case XML.Text(text) => s ++= text - } - body.foreach(tree) - s.toString - } - - def string_of_tree(tree: XML.Tree): String = string_of_body(List(tree)) - - - /* parsing */ - - private def err(msg: String) = error("Malformed YXML: " + msg) - private def err_attribute() = err("bad attribute") - private def err_element() = err("bad element") - private def err_unbalanced(name: String) = - if (name == "") err("unbalanced element") - else err("unbalanced element " + quote(name)) - - private def parse_attrib(source: CharSequence) = { - val s = source.toString - val i = s.indexOf('=') - if (i <= 0) err_attribute() - (s.substring(0, i), s.substring(i + 1)) - } - - - def parse_body(source: CharSequence): XML.Body = - { - /* stack operations */ - - def buffer(): mutable.ListBuffer[XML.Tree] = new mutable.ListBuffer[XML.Tree] - var stack: List[(Markup, mutable.ListBuffer[XML.Tree])] = List((Markup.Empty, buffer())) - - def add(x: XML.Tree) - { - (stack: @unchecked) match { case ((_, body) :: _) => body += x } - } - - def push(name: String, atts: XML.Attributes) - { - if (name == "") err_element() - else stack = (Markup(name, atts), buffer()) :: stack - } - - def pop() - { - (stack: @unchecked) match { - case ((Markup.Empty, _) :: _) => err_unbalanced("") - case ((markup, body) :: pending) => - stack = pending - add(XML.Elem(markup, body.toList)) - } - } - - - /* parse chunks */ - - for (chunk <- Library.separated_chunks(is_X, source) if chunk.length != 0) { - if (chunk.length == 1 && chunk.charAt(0) == Y) pop() - else { - Library.separated_chunks(is_Y, chunk).toList match { - case ch :: name :: atts if ch.length == 0 => - push(name.toString, atts.map(parse_attrib)) - case txts => for (txt <- txts) add(XML.Text(txt.toString)) - } - } - } - (stack: @unchecked) match { - case List((Markup.Empty, body)) => body.toList - case (Markup(name, _), _) :: _ => err_unbalanced(name) - } - } - - def parse(source: CharSequence): XML.Tree = - parse_body(source) match { - case List(result) => result - case Nil => XML.Text("") - case _ => err("multiple results") - } - - - /* failsafe parsing */ - - private def markup_broken(source: CharSequence) = - XML.Elem(Markup.Broken, List(XML.Text(source.toString))) - - def parse_body_failsafe(source: CharSequence): XML.Body = - { - try { parse_body(source) } - catch { case ERROR(_) => List(markup_broken(source)) } - } - - def parse_failsafe(source: CharSequence): XML.Tree = - { - try { parse(source) } - catch { case ERROR(_) => markup_broken(source) } - } -} diff --git a/core/Pure/Proof/extraction.ML b/core/Pure/Proof/extraction.ML deleted file mode 100644 index 61b5b6d4..00000000 --- a/core/Pure/Proof/extraction.ML +++ /dev/null @@ -1,821 +0,0 @@ -(* Title: Pure/Proof/extraction.ML - Author: Stefan Berghofer, TU Muenchen - -Extraction of programs from proofs. -*) - -signature EXTRACTION = -sig - val set_preprocessor : (theory -> Proofterm.proof -> Proofterm.proof) -> theory -> theory - val add_realizes_eqns_i : ((term * term) list * (term * term)) list -> theory -> theory - val add_realizes_eqns : string list -> theory -> theory - val add_typeof_eqns_i : ((term * term) list * (term * term)) list -> theory -> theory - val add_typeof_eqns : string list -> theory -> theory - val add_realizers_i : (string * (string list * term * Proofterm.proof)) list - -> theory -> theory - val add_realizers : (thm * (string list * string * string)) list - -> theory -> theory - val add_expand_thm : bool -> thm -> theory -> theory - val add_types : (xstring * ((term -> term option) list * - (term -> typ -> term -> typ -> term) option)) list -> theory -> theory - val extract : (thm * string list) list -> theory -> theory - val nullT : typ - val nullt : term - val mk_typ : typ -> term - val etype_of : theory -> string list -> typ list -> term -> typ - val realizes_of: theory -> string list -> term -> term -> term - val abs_corr_shyps: theory -> thm -> string list -> term list -> Proofterm.proof -> Proofterm.proof -end; - -structure Extraction : EXTRACTION = -struct - -(**** tools ****) - -val typ = Simple_Syntax.read_typ; - -val add_syntax = - Sign.root_path - #> Sign.add_types_global - [(Binding.make ("Type", @{here}), 0, NoSyn), - (Binding.make ("Null", @{here}), 0, NoSyn)] - #> Sign.add_consts - [(Binding.make ("typeof", @{here}), typ "'b => Type", NoSyn), - (Binding.make ("Type", @{here}), typ "'a itself => Type", NoSyn), - (Binding.make ("Null", @{here}), typ "Null", NoSyn), - (Binding.make ("realizes", @{here}), typ "'a => 'b => 'b", NoSyn)]; - -val nullT = Type ("Null", []); -val nullt = Const ("Null", nullT); - -fun mk_typ T = - Const ("Type", Term.itselfT T --> Type ("Type", [])) $ Logic.mk_type T; - -fun typeof_proc defaultS vs (Const ("typeof", _) $ u) = - SOME (mk_typ (case strip_comb u of - (Var ((a, i), _), _) => - if member (op =) vs a then TFree ("'" ^ a ^ ":" ^ string_of_int i, defaultS) - else nullT - | (Free (a, _), _) => - if member (op =) vs a then TFree ("'" ^ a, defaultS) else nullT - | _ => nullT)) - | typeof_proc _ _ _ = NONE; - -fun rlz_proc (Const ("realizes", Type (_, [Type ("Null", []), _])) $ _ $ t) = SOME t - | rlz_proc (Const ("realizes", Type (_, [T, _])) $ r $ t) = - (case strip_comb t of - (Var (ixn, U), ts) => SOME (list_comb (Var (ixn, T --> U), r :: ts)) - | (Free (s, U), ts) => SOME (list_comb (Free (s, T --> U), r :: ts)) - | _ => NONE) - | rlz_proc _ = NONE; - -val unpack_ixn = apfst implode o apsnd (fst o read_int o tl) o - take_prefix (fn s => s <> ":") o raw_explode; - -type rules = - {next: int, rs: ((term * term) list * (term * term)) list, - net: (int * ((term * term) list * (term * term))) Net.net}; - -val empty_rules : rules = {next = 0, rs = [], net = Net.empty}; - -fun add_rule (r as (_, (lhs, _))) ({next, rs, net} : rules) = - {next = next - 1, rs = r :: rs, net = Net.insert_term (K false) - (Envir.eta_contract lhs, (next, r)) net}; - -fun merge_rules ({next, rs = rs1, net} : rules) ({rs = rs2, ...} : rules) = - fold_rev add_rule (subtract (op =) rs1 rs2) {next = next, rs = rs1, net = net}; - -fun condrew thy rules procs = - let - fun rew tm = - Pattern.rewrite_term thy [] (condrew' :: procs) tm - and condrew' tm = - let - val cache = Unsynchronized.ref ([] : (term * term) list); - fun lookup f x = (case AList.lookup (op =) (!cache) x of - NONE => - let val y = f x - in (cache := (x, y) :: !cache; y) end - | SOME y => y); - in - get_first (fn (_, (prems, (tm1, tm2))) => - let - fun ren t = the_default t (Term.rename_abs tm1 tm t); - val inc = Logic.incr_indexes ([], maxidx_of_term tm + 1); - val env as (Tenv, tenv) = Pattern.match thy (inc tm1, tm) (Vartab.empty, Vartab.empty); - val prems' = map (pairself (Envir.subst_term env o inc o ren)) prems; - val env' = Envir.Envir - {maxidx = fold (fn (t, u) => Term.maxidx_term t #> Term.maxidx_term u) prems' ~1, - tenv = tenv, tyenv = Tenv}; - val env'' = fold (Pattern.unify thy o pairself (lookup rew)) prems' env'; - in SOME (Envir.norm_term env'' (inc (ren tm2))) - end handle Pattern.MATCH => NONE | Pattern.Unif => NONE) - (sort (int_ord o pairself fst) - (Net.match_term rules (Envir.eta_contract tm))) - end; - - in rew end; - -val chtype = Proofterm.change_type o SOME; - -fun extr_name s vs = Long_Name.append "extr" (space_implode "_" (s :: vs)); -fun corr_name s vs = extr_name s vs ^ "_correctness"; - -fun msg d s = Output.urgent_message (Pretty.spaces d ^ s); - -fun vars_of t = map Var (rev (Term.add_vars t [])); -fun frees_of t = map Free (rev (Term.add_frees t [])); -fun vfs_of t = vars_of t @ frees_of t; - -val mkabs = fold_rev (fn v => fn t => Abs ("x", fastype_of v, abstract_over (v, t))); - -val mkabsp = fold_rev (fn t => fn prf => AbsP ("H", SOME t, prf)); - -fun strip_abs 0 t = t - | strip_abs n (Abs (_, _, t)) = strip_abs (n-1) t - | strip_abs _ _ = error "strip_abs: not an abstraction"; - -val prf_subst_TVars = Proofterm.map_proof_types o typ_subst_TVars; - -fun relevant_vars types prop = - List.foldr - (fn (Var ((a, _), T), vs) => - (case body_type T of - Type (s, _) => if member (op =) types s then a :: vs else vs - | _ => vs) - | (_, vs) => vs) [] (vars_of prop); - -fun tname_of (Type (s, _)) = s - | tname_of _ = ""; - -fun get_var_type t = - let - val vs = Term.add_vars t []; - val fs = Term.add_frees t []; - in - fn Var (ixn, _) => - (case AList.lookup (op =) vs ixn of - NONE => error "get_var_type: no such variable in term" - | SOME T => Var (ixn, T)) - | Free (s, _) => - (case AList.lookup (op =) fs s of - NONE => error "get_var_type: no such variable in term" - | SOME T => Free (s, T)) - | _ => error "get_var_type: not a variable" - end; - -fun read_term thy T s = - let - val ctxt = Proof_Context.init_global thy - |> Config.put Type_Infer_Context.const_sorts false - |> Proof_Context.set_defsort []; - val parse = if T = propT then Syntax.parse_prop else Syntax.parse_term; - in parse ctxt s |> Type.constraint T |> Syntax.check_term ctxt end; - - -(**** theory data ****) - -(* theory data *) - -structure ExtractionData = Theory_Data -( - type T = - {realizes_eqns : rules, - typeof_eqns : rules, - types : (string * ((term -> term option) list * - (term -> typ -> term -> typ -> term) option)) list, - realizers : (string list * (term * proof)) list Symtab.table, - defs : thm list, - expand : string list, - prep : (theory -> proof -> proof) option} - - val empty = - {realizes_eqns = empty_rules, - typeof_eqns = empty_rules, - types = [], - realizers = Symtab.empty, - defs = [], - expand = [], - prep = NONE}; - val extend = I; - - fun merge - ({realizes_eqns = realizes_eqns1, typeof_eqns = typeof_eqns1, types = types1, - realizers = realizers1, defs = defs1, expand = expand1, prep = prep1}, - {realizes_eqns = realizes_eqns2, typeof_eqns = typeof_eqns2, types = types2, - realizers = realizers2, defs = defs2, expand = expand2, prep = prep2}) : T = - {realizes_eqns = merge_rules realizes_eqns1 realizes_eqns2, - typeof_eqns = merge_rules typeof_eqns1 typeof_eqns2, - types = AList.merge (op =) (K true) (types1, types2), - realizers = Symtab.merge_list (eq_set (op =) o pairself #1) (realizers1, realizers2), - defs = Library.merge Thm.eq_thm (defs1, defs2), - expand = Library.merge (op =) (expand1, expand2), - prep = if is_some prep1 then prep1 else prep2}; -); - -fun read_condeq thy = - let val thy' = add_syntax thy - in fn s => - let val t = Logic.varify_global (read_term thy' propT s) - in - (map Logic.dest_equals (Logic.strip_imp_prems t), - Logic.dest_equals (Logic.strip_imp_concl t)) - handle TERM _ => error ("Not a (conditional) meta equality:\n" ^ s) - end - end; - -(** preprocessor **) - -fun set_preprocessor prep thy = - let val {realizes_eqns, typeof_eqns, types, realizers, - defs, expand, ...} = ExtractionData.get thy - in - ExtractionData.put - {realizes_eqns = realizes_eqns, typeof_eqns = typeof_eqns, types = types, - realizers = realizers, defs = defs, expand = expand, prep = SOME prep} thy - end; - -(** equations characterizing realizability **) - -fun gen_add_realizes_eqns prep_eq eqns thy = - let val {realizes_eqns, typeof_eqns, types, realizers, - defs, expand, prep} = ExtractionData.get thy; - in - ExtractionData.put - {realizes_eqns = fold_rev add_rule (map (prep_eq thy) eqns) realizes_eqns, - typeof_eqns = typeof_eqns, types = types, realizers = realizers, - defs = defs, expand = expand, prep = prep} thy - end - -val add_realizes_eqns_i = gen_add_realizes_eqns (K I); -val add_realizes_eqns = gen_add_realizes_eqns read_condeq; - -(** equations characterizing type of extracted program **) - -fun gen_add_typeof_eqns prep_eq eqns thy = - let - val {realizes_eqns, typeof_eqns, types, realizers, - defs, expand, prep} = ExtractionData.get thy; - val eqns' = map (prep_eq thy) eqns - in - ExtractionData.put - {realizes_eqns = realizes_eqns, realizers = realizers, - typeof_eqns = fold_rev add_rule eqns' typeof_eqns, - types = types, defs = defs, expand = expand, prep = prep} thy - end - -val add_typeof_eqns_i = gen_add_typeof_eqns (K I); -val add_typeof_eqns = gen_add_typeof_eqns read_condeq; - -fun thaw (T as TFree (a, S)) = - if exists_string (fn s => s = ":") a then TVar (unpack_ixn a, S) else T - | thaw (Type (a, Ts)) = Type (a, map thaw Ts) - | thaw T = T; - -fun freeze (TVar ((a, i), S)) = TFree (a ^ ":" ^ string_of_int i, S) - | freeze (Type (a, Ts)) = Type (a, map freeze Ts) - | freeze T = T; - -fun freeze_thaw f x = - map_types thaw (f (map_types freeze x)); - -fun etype_of thy vs Ts t = - let - val {typeof_eqns, ...} = ExtractionData.get thy; - fun err () = error ("Unable to determine type of extracted program for\n" ^ - Syntax.string_of_term_global thy t) - in - (case - strip_abs_body - (freeze_thaw (condrew thy (#net typeof_eqns) [typeof_proc [] vs]) - (fold (Term.abs o pair "x") Ts - (Const ("typeof", fastype_of1 (Ts, t) --> Type ("Type", [])) $ t))) of - Const ("Type", _) $ u => (Logic.dest_type u handle TERM _ => err ()) - | _ => err ()) - end; - -(** realizers for axioms / theorems, together with correctness proofs **) - -fun gen_add_realizers prep_rlz rs thy = - let val {realizes_eqns, typeof_eqns, types, realizers, - defs, expand, prep} = ExtractionData.get thy - in - ExtractionData.put - {realizes_eqns = realizes_eqns, typeof_eqns = typeof_eqns, types = types, - realizers = fold (Symtab.cons_list o prep_rlz thy) rs realizers, - defs = defs, expand = expand, prep = prep} thy - end - -fun prep_realizer thy = - let - val {realizes_eqns, typeof_eqns, defs, types, ...} = - ExtractionData.get thy; - val procs = maps (fst o snd) types; - val rtypes = map fst types; - val eqns = Net.merge (K false) (#net realizes_eqns, #net typeof_eqns); - val thy' = add_syntax thy; - val rd = Proof_Syntax.read_proof thy' true false; - in fn (thm, (vs, s1, s2)) => - let - val name = Thm.derivation_name thm; - val _ = name <> "" orelse error "add_realizers: unnamed theorem"; - val prop = Thm.unconstrainT thm |> prop_of |> - Pattern.rewrite_term thy' (map (Logic.dest_equals o prop_of) defs) []; - val vars = vars_of prop; - val vars' = filter_out (fn v => - member (op =) rtypes (tname_of (body_type (fastype_of v)))) vars; - val shyps = maps (fn Var ((x, i), _) => - if member (op =) vs x then Logic.mk_of_sort - (TVar (("'" ^ x, i), []), Sign.defaultS thy') - else []) vars; - val T = etype_of thy' vs [] prop; - val (T', thw) = Type.legacy_freeze_thaw_type - (if T = nullT then nullT else map fastype_of vars' ---> T); - val t = map_types thw (read_term thy' T' s1); - val r' = freeze_thaw (condrew thy' eqns - (procs @ [typeof_proc [] vs, rlz_proc])) - (Const ("realizes", T --> propT --> propT) $ - (if T = nullT then t else list_comb (t, vars')) $ prop); - val r = Logic.list_implies (shyps, - fold_rev Logic.all (map (get_var_type r') vars) r'); - val prf = Reconstruct.reconstruct_proof thy' r (rd s2); - in (name, (vs, (t, prf))) end - end; - -val add_realizers_i = gen_add_realizers - (fn _ => fn (name, (vs, t, prf)) => (name, (vs, (t, prf)))); -val add_realizers = gen_add_realizers prep_realizer; - -fun realizes_of thy vs t prop = - let - val thy' = add_syntax thy; - val {realizes_eqns, typeof_eqns, defs, types, ...} = - ExtractionData.get thy'; - val procs = maps (rev o fst o snd) types; - val eqns = Net.merge (K false) (#net realizes_eqns, #net typeof_eqns); - val prop' = Pattern.rewrite_term thy' - (map (Logic.dest_equals o prop_of) defs) [] prop; - in freeze_thaw (condrew thy' eqns - (procs @ [typeof_proc [] vs, rlz_proc])) - (Const ("realizes", fastype_of t --> propT --> propT) $ t $ prop') - end; - -fun abs_corr_shyps thy thm vs xs prf = - let - val S = Sign.defaultS thy; - val ((atyp_map, constraints, _), prop') = - Logic.unconstrainT (#shyps (rep_thm thm)) (prop_of thm); - val atyps = fold_types (fold_atyps (insert (op =))) (prop_of thm) []; - val Ts = map_filter (fn ((v, i), _) => if member (op =) vs v then - SOME (TVar (("'" ^ v, i), [])) else NONE) - (rev (Term.add_vars prop' [])); - val cs = maps (fn T => map (pair T) S) Ts; - val constraints' = map Logic.mk_of_class cs; - fun typ_map T = Type.strip_sorts - (map_atyps (fn U => if member (op =) atyps U then atyp_map U else U) T); - fun mk_hyp (T, c) = Hyp (Logic.mk_of_class (typ_map T, c)); - val xs' = map (map_types typ_map) xs - in - prf |> - Same.commit (Proofterm.map_proof_same (map_types typ_map) typ_map mk_hyp) |> - fold_rev Proofterm.implies_intr_proof' (map snd constraints) |> - fold_rev Proofterm.forall_intr_proof' xs' |> - fold_rev Proofterm.implies_intr_proof' constraints' - end; - -(** expanding theorems / definitions **) - -fun add_expand_thm is_def thm thy = - let - val {realizes_eqns, typeof_eqns, types, realizers, - defs, expand, prep} = ExtractionData.get thy; - - val name = Thm.derivation_name thm; - val _ = name <> "" orelse error "add_expand_thm: unnamed theorem"; - in - thy |> ExtractionData.put - (if is_def then - {realizes_eqns = realizes_eqns, - typeof_eqns = add_rule ([], Logic.dest_equals (map_types - Type.strip_sorts (prop_of (Drule.abs_def thm)))) typeof_eqns, - types = types, - realizers = realizers, defs = insert Thm.eq_thm thm defs, - expand = expand, prep = prep} - else - {realizes_eqns = realizes_eqns, typeof_eqns = typeof_eqns, types = types, - realizers = realizers, defs = defs, - expand = insert (op =) name expand, prep = prep}) - end; - -fun extraction_expand is_def = - Thm.declaration_attribute (fn th => Context.mapping (add_expand_thm is_def th) I); - - -(** types with computational content **) - -fun add_types tys thy = - ExtractionData.map - (fn {realizes_eqns, typeof_eqns, types, realizers, defs, expand, prep} => - {realizes_eqns = realizes_eqns, typeof_eqns = typeof_eqns, - types = fold (AList.update (op =) o apfst (Sign.intern_type thy)) tys types, - realizers = realizers, defs = defs, expand = expand, prep = prep}) - thy; - - -(** Pure setup **) - -val _ = Theory.setup - (add_types [("prop", ([], NONE))] #> - - add_typeof_eqns - ["(typeof (PROP P)) == (Type (TYPE(Null))) ==> \ - \ (typeof (PROP Q)) == (Type (TYPE('Q))) ==> \ - \ (typeof (PROP P ==> PROP Q)) == (Type (TYPE('Q)))", - - "(typeof (PROP Q)) == (Type (TYPE(Null))) ==> \ - \ (typeof (PROP P ==> PROP Q)) == (Type (TYPE(Null)))", - - "(typeof (PROP P)) == (Type (TYPE('P))) ==> \ - \ (typeof (PROP Q)) == (Type (TYPE('Q))) ==> \ - \ (typeof (PROP P ==> PROP Q)) == (Type (TYPE('P => 'Q)))", - - "(%x. typeof (PROP P (x))) == (%x. Type (TYPE(Null))) ==> \ - \ (typeof (!!x. PROP P (x))) == (Type (TYPE(Null)))", - - "(%x. typeof (PROP P (x))) == (%x. Type (TYPE('P))) ==> \ - \ (typeof (!!x::'a. PROP P (x))) == (Type (TYPE('a => 'P)))", - - "(%x. typeof (f (x))) == (%x. Type (TYPE('f))) ==> \ - \ (typeof (f)) == (Type (TYPE('f)))"] #> - - add_realizes_eqns - ["(typeof (PROP P)) == (Type (TYPE(Null))) ==> \ - \ (realizes (r) (PROP P ==> PROP Q)) == \ - \ (PROP realizes (Null) (PROP P) ==> PROP realizes (r) (PROP Q))", - - "(typeof (PROP P)) == (Type (TYPE('P))) ==> \ - \ (typeof (PROP Q)) == (Type (TYPE(Null))) ==> \ - \ (realizes (r) (PROP P ==> PROP Q)) == \ - \ (!!x::'P. PROP realizes (x) (PROP P) ==> PROP realizes (Null) (PROP Q))", - - "(realizes (r) (PROP P ==> PROP Q)) == \ - \ (!!x. PROP realizes (x) (PROP P) ==> PROP realizes (r (x)) (PROP Q))", - - "(%x. typeof (PROP P (x))) == (%x. Type (TYPE(Null))) ==> \ - \ (realizes (r) (!!x. PROP P (x))) == \ - \ (!!x. PROP realizes (Null) (PROP P (x)))", - - "(realizes (r) (!!x. PROP P (x))) == \ - \ (!!x. PROP realizes (r (x)) (PROP P (x)))"] #> - - Attrib.setup @{binding extraction_expand} (Scan.succeed (extraction_expand false)) - "specify theorems to be expanded during extraction" #> - Attrib.setup @{binding extraction_expand_def} (Scan.succeed (extraction_expand true)) - "specify definitions to be expanded during extraction"); - - -(**** extract program ****) - -val dummyt = Const ("dummy", dummyT); - -fun extract thms thy = - let - val thy' = add_syntax thy; - val {realizes_eqns, typeof_eqns, types, realizers, defs, expand, prep} = - ExtractionData.get thy; - val procs = maps (rev o fst o snd) types; - val rtypes = map fst types; - val typroc = typeof_proc []; - val prep = the_default (K I) prep thy' o ProofRewriteRules.elim_defs thy' false defs o - Reconstruct.expand_proof thy' (map (rpair NONE) ("" :: expand)); - val rrews = Net.merge (K false) (#net realizes_eqns, #net typeof_eqns); - - fun find_inst prop Ts ts vs = - let - val rvs = relevant_vars rtypes prop; - val vars = vars_of prop; - val n = Int.min (length vars, length ts); - - fun add_args (Var ((a, i), _), t) (vs', tye) = - if member (op =) rvs a then - let val T = etype_of thy' vs Ts t - in if T = nullT then (vs', tye) - else (a :: vs', (("'" ^ a, i), T) :: tye) - end - else (vs', tye) - - in fold_rev add_args (take n vars ~~ take n ts) ([], []) end; - - fun mk_shyps tye = maps (fn (ixn, _) => - Logic.mk_of_sort (TVar (ixn, []), Sign.defaultS thy)) tye; - - fun mk_sprfs cs tye = maps (fn (_, T) => - ProofRewriteRules.mk_of_sort_proof thy (map SOME cs) - (T, Sign.defaultS thy)) tye; - - fun find (vs: string list) = Option.map snd o find_first (curry (eq_set (op =)) vs o fst); - fun find' (s: string) = map_filter (fn (s', x) => if s = s' then SOME x else NONE); - - fun app_rlz_rews Ts vs t = - strip_abs (length Ts) - (freeze_thaw (condrew thy' rrews (procs @ [typroc vs, rlz_proc])) - (fold (Term.abs o pair "x") Ts t)); - - fun realizes_null vs prop = app_rlz_rews [] vs - (Const ("realizes", nullT --> propT --> propT) $ nullt $ prop); - - fun corr d vs ts Ts hs cs _ (PBound i) _ defs = (PBound i, defs) - - | corr d vs ts Ts hs cs t (Abst (s, SOME T, prf)) (Abst (_, _, prf')) defs = - let val (corr_prf, defs') = corr d vs [] (T :: Ts) - (dummyt :: hs) cs (case t of SOME (Abs (_, _, u)) => SOME u | _ => NONE) - prf (Proofterm.incr_pboundvars 1 0 prf') defs - in (Abst (s, SOME T, corr_prf), defs') end - - | corr d vs ts Ts hs cs t (AbsP (s, SOME prop, prf)) (AbsP (_, _, prf')) defs = - let - val T = etype_of thy' vs Ts prop; - val u = if T = nullT then - (case t of SOME u => SOME (incr_boundvars 1 u) | NONE => NONE) - else (case t of SOME (Abs (_, _, u)) => SOME u | _ => NONE); - val (corr_prf, defs') = - corr d vs [] (T :: Ts) (prop :: hs) - (prop :: cs) u (Proofterm.incr_pboundvars 0 1 prf) - (Proofterm.incr_pboundvars 0 1 prf') defs; - val rlz = Const ("realizes", T --> propT --> propT) - in ( - if T = nullT then AbsP ("R", - SOME (app_rlz_rews Ts vs (rlz $ nullt $ prop)), - Proofterm.prf_subst_bounds [nullt] corr_prf) - else Abst (s, SOME T, AbsP ("R", - SOME (app_rlz_rews (T :: Ts) vs - (rlz $ Bound 0 $ incr_boundvars 1 prop)), corr_prf)), defs') - end - - | corr d vs ts Ts hs cs t' (prf % SOME t) (prf' % _) defs = - let - val (Us, T) = strip_type (fastype_of1 (Ts, t)); - val (corr_prf, defs') = corr d vs (t :: ts) Ts hs cs - (if member (op =) rtypes (tname_of T) then t' - else (case t' of SOME (u $ _) => SOME u | _ => NONE)) - prf prf' defs; - val u = if not (member (op =) rtypes (tname_of T)) then t else - let - val eT = etype_of thy' vs Ts t; - val (r, Us') = if eT = nullT then (nullt, Us) else - (Bound (length Us), eT :: Us); - val u = list_comb (incr_boundvars (length Us') t, - map Bound (length Us - 1 downto 0)); - val u' = (case AList.lookup (op =) types (tname_of T) of - SOME ((_, SOME f)) => f r eT u T - | _ => Const ("realizes", eT --> T --> T) $ r $ u) - in app_rlz_rews Ts vs (fold_rev (Term.abs o pair "x") Us' u') end - in (corr_prf % SOME u, defs') end - - | corr d vs ts Ts hs cs t (prf1 %% prf2) (prf1' %% prf2') defs = - let - val prop = Reconstruct.prop_of' hs prf2'; - val T = etype_of thy' vs Ts prop; - val (f, u, defs1) = if T = nullT then (t, NONE, defs) else - (case t of - SOME (f $ u) => (SOME f, SOME u, defs) - | _ => - let val (u, defs1) = extr d vs [] Ts hs prf2' defs - in (NONE, SOME u, defs1) end) - val ((corr_prf1, corr_prf2), defs2) = - defs1 - |> corr d vs [] Ts hs cs f prf1 prf1' - ||>> corr d vs [] Ts hs cs u prf2 prf2'; - in - if T = nullT then (corr_prf1 %% corr_prf2, defs2) else - (corr_prf1 % u %% corr_prf2, defs2) - end - - | corr d vs ts Ts hs cs _ (prf0 as PThm (_, ((name, prop, SOME Ts'), body))) _ defs = - let - val prf = Proofterm.join_proof body; - val (vs', tye) = find_inst prop Ts ts vs; - val shyps = mk_shyps tye; - val sprfs = mk_sprfs cs tye; - val tye' = (map fst (Term.add_tvars prop [] |> rev) ~~ Ts') @ tye; - val T = etype_of thy' vs' [] prop; - val defs' = if T = nullT then defs - else snd (extr d vs ts Ts hs prf0 defs) - in - if T = nullT andalso realizes_null vs' prop aconv prop then (prf0, defs) - else (case Symtab.lookup realizers name of - NONE => (case find vs' (find' name defs') of - NONE => - let - val _ = T = nullT orelse error "corr: internal error"; - val _ = msg d ("Building correctness proof for " ^ quote name ^ - (if null vs' then "" - else " (relevant variables: " ^ commas_quote vs' ^ ")")); - val prf' = prep (Reconstruct.reconstruct_proof thy' prop prf); - val (corr_prf0, defs'') = corr (d + 1) vs' [] [] [] - (rev shyps) NONE prf' prf' defs'; - val corr_prf = mkabsp shyps corr_prf0; - val corr_prop = Reconstruct.prop_of corr_prf; - val corr_prf' = - Proofterm.proof_combP (Proofterm.proof_combt - (PThm (serial (), - ((corr_name name vs', corr_prop, SOME (map TVar (Term.add_tvars corr_prop [] |> rev))), - Future.value (Proofterm.approximate_proof_body corr_prf))), - vfs_of corr_prop), - map PBound (length shyps - 1 downto 0)) |> - fold_rev Proofterm.forall_intr_proof' - (map (get_var_type corr_prop) (vfs_of prop)) |> - mkabsp shyps - in - (Proofterm.proof_combP (prf_subst_TVars tye' corr_prf', sprfs), - (name, (vs', ((nullt, nullt), (corr_prf, corr_prf')))) :: defs'') - end - | SOME (_, (_, prf')) => - (Proofterm.proof_combP (prf_subst_TVars tye' prf', sprfs), defs')) - | SOME rs => (case find vs' rs of - SOME (_, prf') => (Proofterm.proof_combP (prf_subst_TVars tye' prf', sprfs), defs') - | NONE => error ("corr: no realizer for instance of theorem " ^ - quote name ^ ":\n" ^ Syntax.string_of_term_global thy' (Envir.beta_norm - (Reconstruct.prop_of (Proofterm.proof_combt (prf0, ts))))))) - end - - | corr d vs ts Ts hs cs _ (prf0 as PAxm (s, prop, SOME Ts')) _ defs = - let - val (vs', tye) = find_inst prop Ts ts vs; - val tye' = (map fst (Term.add_tvars prop [] |> rev) ~~ Ts') @ tye - in - if etype_of thy' vs' [] prop = nullT andalso - realizes_null vs' prop aconv prop then (prf0, defs) - else case find vs' (Symtab.lookup_list realizers s) of - SOME (_, prf) => (Proofterm.proof_combP (prf_subst_TVars tye' prf, mk_sprfs cs tye), - defs) - | NONE => error ("corr: no realizer for instance of axiom " ^ - quote s ^ ":\n" ^ Syntax.string_of_term_global thy' (Envir.beta_norm - (Reconstruct.prop_of (Proofterm.proof_combt (prf0, ts))))) - end - - | corr d vs ts Ts hs _ _ _ _ defs = error "corr: bad proof" - - and extr d vs ts Ts hs (PBound i) defs = (Bound i, defs) - - | extr d vs ts Ts hs (Abst (s, SOME T, prf)) defs = - let val (t, defs') = extr d vs [] - (T :: Ts) (dummyt :: hs) (Proofterm.incr_pboundvars 1 0 prf) defs - in (Abs (s, T, t), defs') end - - | extr d vs ts Ts hs (AbsP (s, SOME t, prf)) defs = - let - val T = etype_of thy' vs Ts t; - val (t, defs') = - extr d vs [] (T :: Ts) (t :: hs) (Proofterm.incr_pboundvars 0 1 prf) defs - in - (if T = nullT then subst_bound (nullt, t) else Abs (s, T, t), defs') - end - - | extr d vs ts Ts hs (prf % SOME t) defs = - let val (u, defs') = extr d vs (t :: ts) Ts hs prf defs - in (if member (op =) rtypes (tname_of (body_type (fastype_of1 (Ts, t)))) then u - else u $ t, defs') - end - - | extr d vs ts Ts hs (prf1 %% prf2) defs = - let - val (f, defs') = extr d vs [] Ts hs prf1 defs; - val prop = Reconstruct.prop_of' hs prf2; - val T = etype_of thy' vs Ts prop - in - if T = nullT then (f, defs') else - let val (t, defs'') = extr d vs [] Ts hs prf2 defs' - in (f $ t, defs'') end - end - - | extr d vs ts Ts hs (prf0 as PThm (_, ((s, prop, SOME Ts'), body))) defs = - let - val prf = Proofterm.join_proof body; - val (vs', tye) = find_inst prop Ts ts vs; - val shyps = mk_shyps tye; - val tye' = (map fst (Term.add_tvars prop [] |> rev) ~~ Ts') @ tye - in - case Symtab.lookup realizers s of - NONE => (case find vs' (find' s defs) of - NONE => - let - val _ = msg d ("Extracting " ^ quote s ^ - (if null vs' then "" - else " (relevant variables: " ^ commas_quote vs' ^ ")")); - val prf' = prep (Reconstruct.reconstruct_proof thy' prop prf); - val (t, defs') = extr (d + 1) vs' [] [] [] prf' defs; - val (corr_prf, defs'') = corr (d + 1) vs' [] [] [] - (rev shyps) (SOME t) prf' prf' defs'; - - val nt = Envir.beta_norm t; - val args = filter_out (fn v => member (op =) rtypes - (tname_of (body_type (fastype_of v)))) (vfs_of prop); - val args' = filter (fn v => Logic.occs (v, nt)) args; - val t' = mkabs args' nt; - val T = fastype_of t'; - val cname = extr_name s vs'; - val c = Const (cname, T); - val u = mkabs args (list_comb (c, args')); - val eqn = Logic.mk_equals (c, t'); - val rlz = - Const ("realizes", fastype_of nt --> propT --> propT); - val lhs = app_rlz_rews [] vs' (rlz $ nt $ prop); - val rhs = app_rlz_rews [] vs' (rlz $ list_comb (c, args') $ prop); - val f = app_rlz_rews [] vs' - (Abs ("x", T, rlz $ list_comb (Bound 0, args') $ prop)); - - val corr_prf' = mkabsp shyps - (chtype [] Proofterm.equal_elim_axm %> lhs %> rhs %% - (chtype [propT] Proofterm.symmetric_axm %> rhs %> lhs %% - (chtype [T, propT] Proofterm.combination_axm %> f %> f %> c %> t' %% - (chtype [T --> propT] Proofterm.reflexive_axm %> f) %% - PAxm (Thm.def_name cname, eqn, - SOME (map TVar (Term.add_tvars eqn [] |> rev))))) %% corr_prf); - val corr_prop = Reconstruct.prop_of corr_prf'; - val corr_prf'' = - Proofterm.proof_combP (Proofterm.proof_combt - (PThm (serial (), - ((corr_name s vs', corr_prop, SOME (map TVar (Term.add_tvars corr_prop [] |> rev))), - Future.value (Proofterm.approximate_proof_body corr_prf'))), - vfs_of corr_prop), - map PBound (length shyps - 1 downto 0)) |> - fold_rev Proofterm.forall_intr_proof' - (map (get_var_type corr_prop) (vfs_of prop)) |> - mkabsp shyps - in - (subst_TVars tye' u, - (s, (vs', ((t', u), (corr_prf', corr_prf'')))) :: defs'') - end - | SOME ((_, u), _) => (subst_TVars tye' u, defs)) - | SOME rs => (case find vs' rs of - SOME (t, _) => (subst_TVars tye' t, defs) - | NONE => error ("extr: no realizer for instance of theorem " ^ - quote s ^ ":\n" ^ Syntax.string_of_term_global thy' (Envir.beta_norm - (Reconstruct.prop_of (Proofterm.proof_combt (prf0, ts)))))) - end - - | extr d vs ts Ts hs (prf0 as PAxm (s, prop, SOME Ts')) defs = - let - val (vs', tye) = find_inst prop Ts ts vs; - val tye' = (map fst (Term.add_tvars prop [] |> rev) ~~ Ts') @ tye - in - case find vs' (Symtab.lookup_list realizers s) of - SOME (t, _) => (subst_TVars tye' t, defs) - | NONE => error ("extr: no realizer for instance of axiom " ^ - quote s ^ ":\n" ^ Syntax.string_of_term_global thy' (Envir.beta_norm - (Reconstruct.prop_of (Proofterm.proof_combt (prf0, ts))))) - end - - | extr d vs ts Ts hs _ defs = error "extr: bad proof"; - - fun prep_thm (thm, vs) = - let - val thy = Thm.theory_of_thm thm; - val prop = Thm.prop_of thm; - val prf = Thm.proof_of thm; - val name = Thm.derivation_name thm; - val _ = name <> "" orelse error "extraction: unnamed theorem"; - val _ = etype_of thy' vs [] prop <> nullT orelse error ("theorem " ^ - quote name ^ " has no computational content") - in (Reconstruct.reconstruct_proof thy prop prf, vs) end; - - val defs = - fold (fn (prf, vs) => snd o extr 0 vs [] [] [] prf) - (map prep_thm thms) []; - - fun add_def (s, (vs, ((t, u), (prf, _)))) thy = - (case Sign.const_type thy (extr_name s vs) of - NONE => - let - val corr_prop = Reconstruct.prop_of prf; - val ft = Type.legacy_freeze t; - val fu = Type.legacy_freeze u; - val (def_thms, thy') = if t = nullt then ([], thy) else - thy - |> Sign.add_consts [(Binding.qualified_name (extr_name s vs), fastype_of ft, NoSyn)] - |> Global_Theory.add_defs false - [((Binding.qualified_name (Thm.def_name (extr_name s vs)), - Logic.mk_equals (head_of (strip_abs_body fu), ft)), [])] - in - thy' - |> Global_Theory.store_thm (Binding.qualified_name (corr_name s vs), - Thm.varifyT_global (funpow (length (vars_of corr_prop)) - (Thm.forall_elim_var 0) (Thm.forall_intr_frees - (Proof_Checker.thm_of_proof thy' - (fst (Proofterm.freeze_thaw_prf prf)))))) - |> snd - |> fold Code.add_default_eqn def_thms - end - | SOME _ => thy); - - in - thy - |> Sign.root_path - |> fold_rev add_def defs - |> Sign.restore_naming thy - end; - -val etype_of = etype_of o add_syntax; - -end; diff --git a/core/Pure/Proof/proof_checker.ML b/core/Pure/Proof/proof_checker.ML deleted file mode 100644 index 761ff40c..00000000 --- a/core/Pure/Proof/proof_checker.ML +++ /dev/null @@ -1,145 +0,0 @@ -(* Title: Pure/Proof/proof_checker.ML - Author: Stefan Berghofer, TU Muenchen - -Simple proof checker based only on the core inference rules -of Isabelle/Pure. -*) - -signature PROOF_CHECKER = -sig - val thm_of_proof : theory -> Proofterm.proof -> thm -end; - -structure Proof_Checker : PROOF_CHECKER = -struct - -(***** construct a theorem out of a proof term *****) - -fun lookup_thm thy = - let val tab = fold_rev Symtab.update (Global_Theory.all_thms_of thy true) Symtab.empty in - fn s => - (case Symtab.lookup tab s of - NONE => error ("Unknown theorem " ^ quote s) - | SOME thm => thm) - end; - -val beta_eta_convert = - Conv.fconv_rule Drule.beta_eta_conversion; - -(* equality modulo renaming of type variables *) -fun is_equal t t' = - let - val atoms = fold_types (fold_atyps (insert (op =))) t []; - val atoms' = fold_types (fold_atyps (insert (op =))) t' [] - in - length atoms = length atoms' andalso - map_types (map_atyps (the o AList.lookup (op =) (atoms ~~ atoms'))) t aconv t' - end; - -fun pretty_prf thy vs Hs prf = - let val prf' = prf |> Proofterm.prf_subst_bounds (map Free vs) |> - Proofterm.prf_subst_pbounds (map (Hyp o prop_of) Hs) - in - (Proof_Syntax.pretty_proof (Syntax.init_pretty_global thy) prf', - Syntax.pretty_term_global thy (Reconstruct.prop_of prf')) - end; - -fun pretty_term thy vs _ t = - let val t' = subst_bounds (map Free vs, t) - in - (Syntax.pretty_term_global thy t', - Syntax.pretty_typ_global thy (fastype_of t')) - end; - -fun appl_error thy prt vs Hs s f a = - let - val (pp_f, pp_fT) = pretty_prf thy vs Hs f; - val (pp_a, pp_aT) = prt thy vs Hs a - in - error (cat_lines - [s, - "", - Pretty.string_of (Pretty.block - [Pretty.str "Operator:", Pretty.brk 2, pp_f, - Pretty.str " ::", Pretty.brk 1, pp_fT]), - Pretty.string_of (Pretty.block - [Pretty.str "Operand:", Pretty.brk 3, pp_a, - Pretty.str " ::", Pretty.brk 1, pp_aT]), - ""]) - end; - -fun thm_of_proof thy = - let val lookup = lookup_thm thy in - fn prf => - let - val prf_names = Proofterm.fold_proof_terms Term.declare_term_frees (K I) prf Name.context; - - fun thm_of_atom thm Ts = - let - val tvars = Term.add_tvars (Thm.full_prop_of thm) [] |> rev; - val (fmap, thm') = Thm.varifyT_global' [] thm; - val ctye = map (pairself (Thm.ctyp_of thy)) - (map TVar tvars @ map (fn ((_, S), ixn) => TVar (ixn, S)) fmap ~~ Ts) - in - Thm.instantiate (ctye, []) (forall_intr_vars (Thm.forall_intr_frees thm')) - end; - - fun thm_of _ _ (PThm (_, ((name, prop', SOME Ts), _))) = - let - val thm = Thm.unconstrainT (Drule.implies_intr_hyps (lookup name)); - val prop = Thm.prop_of thm; - val _ = - if is_equal prop prop' then () - else - error ("Duplicate use of theorem name " ^ quote name ^ "\n" ^ - Syntax.string_of_term_global thy prop ^ "\n\n" ^ - Syntax.string_of_term_global thy prop'); - in thm_of_atom thm Ts end - - | thm_of _ _ (PAxm (name, _, SOME Ts)) = - thm_of_atom (Thm.axiom thy name) Ts - - | thm_of _ Hs (PBound i) = nth Hs i - - | thm_of (vs, names) Hs (Abst (s, SOME T, prf)) = - let - val (x, names') = Name.variant s names; - val thm = thm_of ((x, T) :: vs, names') Hs prf - in - Thm.forall_intr (Thm.cterm_of thy (Free (x, T))) thm - end - - | thm_of (vs, names) Hs (prf % SOME t) = - let - val thm = thm_of (vs, names) Hs prf; - val ct = Thm.cterm_of thy (Term.subst_bounds (map Free vs, t)); - in - Thm.forall_elim ct thm - handle THM (s, _, _) => appl_error thy pretty_term vs Hs s prf t - end - - | thm_of (vs, names) Hs (AbsP (s, SOME t, prf)) = - let - val ct = Thm.cterm_of thy (Term.subst_bounds (map Free vs, t)); - val thm = thm_of (vs, names) (Thm.assume ct :: Hs) prf; - in - Thm.implies_intr ct thm - end - - | thm_of vars Hs (prf %% prf') = - let - val thm = beta_eta_convert (thm_of vars Hs prf); - val thm' = beta_eta_convert (thm_of vars Hs prf'); - in - Thm.implies_elim thm thm' - handle THM (s, _, _) => appl_error thy pretty_prf (fst vars) Hs s prf prf' - end - - | thm_of _ _ (Hyp t) = Thm.assume (Thm.cterm_of thy t) - - | thm_of _ _ _ = error "thm_of_proof: partial proof term"; - - in beta_eta_convert (thm_of ([], prf_names) [] prf) end - end; - -end; diff --git a/core/Pure/Proof/proof_rewrite_rules.ML b/core/Pure/Proof/proof_rewrite_rules.ML deleted file mode 100644 index 41534eef..00000000 --- a/core/Pure/Proof/proof_rewrite_rules.ML +++ /dev/null @@ -1,371 +0,0 @@ -(* Title: Pure/Proof/proof_rewrite_rules.ML - Author: Stefan Berghofer, TU Muenchen - -Simplification functions for proof terms involving meta level rules. -*) - -signature PROOF_REWRITE_RULES = -sig - val rew : bool -> typ list -> term option list -> Proofterm.proof -> (Proofterm.proof * Proofterm.proof) option - val rprocs : bool -> - (typ list -> term option list -> Proofterm.proof -> (Proofterm.proof * Proofterm.proof) option) list - val rewrite_terms : (term -> term) -> Proofterm.proof -> Proofterm.proof - val elim_defs : theory -> bool -> thm list -> Proofterm.proof -> Proofterm.proof - val elim_vars : (typ -> term) -> Proofterm.proof -> Proofterm.proof - val hhf_proof : term -> term -> Proofterm.proof -> Proofterm.proof - val un_hhf_proof : term -> term -> Proofterm.proof -> Proofterm.proof - val mk_of_sort_proof : theory -> term option list -> typ * sort -> Proofterm.proof list - val expand_of_class : theory -> typ list -> term option list -> Proofterm.proof -> - (Proofterm.proof * Proofterm.proof) option -end; - -structure ProofRewriteRules : PROOF_REWRITE_RULES = -struct - -fun rew b _ _ = - let - fun ?? x = if b then SOME x else NONE; - fun ax (prf as PAxm (s, prop, _)) Ts = - if b then PAxm (s, prop, SOME Ts) else prf; - fun ty T = if b then - let val Type (_, [Type (_, [U, _]), _]) = T - in SOME U end - else NONE; - val equal_intr_axm = ax Proofterm.equal_intr_axm []; - val equal_elim_axm = ax Proofterm.equal_elim_axm []; - val symmetric_axm = ax Proofterm.symmetric_axm [propT]; - - fun rew' (PThm (_, (("Pure.protectD", _, _), _)) % _ %% - (PThm (_, (("Pure.protectI", _, _), _)) % _ %% prf)) = SOME prf - | rew' (PThm (_, (("Pure.conjunctionD1", _, _), _)) % _ % _ %% - (PThm (_, (("Pure.conjunctionI", _, _), _)) % _ % _ %% prf %% _)) = SOME prf - | rew' (PThm (_, (("Pure.conjunctionD2", _, _), _)) % _ % _ %% - (PThm (_, (("Pure.conjunctionI", _, _), _)) % _ % _ %% _ %% prf)) = SOME prf - | rew' (PAxm ("Pure.equal_elim", _, _) % _ % _ %% - (PAxm ("Pure.equal_intr", _, _) % _ % _ %% prf %% _)) = SOME prf - | rew' (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.equal_intr", _, _) % A % B %% prf1 %% prf2)) = - SOME (equal_intr_axm % B % A %% prf2 %% prf1) - - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME (_ $ A) % SOME (_ $ B) %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.prop", _)) % - _ % _ % _ %% (PAxm ("Pure.reflexive", _, _) % _) %% prf1) %% - ((tg as PThm (_, (("Pure.protectI", _, _), _))) % _ %% prf2)) = - SOME (tg %> B %% (equal_elim_axm %> A %> B %% prf1 %% prf2)) - - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME (_ $ A) % SOME (_ $ B) %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.prop", _)) % - _ % _ % _ %% (PAxm ("Pure.reflexive", _, _) % _) %% prf1)) %% - ((tg as PThm (_, (("Pure.protectI", _, _), _))) % _ %% prf2)) = - SOME (tg %> B %% (equal_elim_axm %> A %> B %% - (symmetric_axm % ?? B % ?? A %% prf1) %% prf2)) - - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME X % SOME Y %% - (PAxm ("Pure.combination", _, _) % _ % _ % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.imp", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% prf1) %% prf2)) = - let - val _ $ A $ C = Envir.beta_norm X; - val _ $ B $ D = Envir.beta_norm Y - in SOME (AbsP ("H1", ?? X, AbsP ("H2", ?? B, - Proofterm.equal_elim_axm %> C %> D %% Proofterm.incr_pboundvars 2 0 prf2 %% - (PBound 1 %% (equal_elim_axm %> B %> A %% - (Proofterm.symmetric_axm % ?? A % ?? B %% Proofterm.incr_pboundvars 2 0 prf1) %% - PBound 0))))) - end - - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME X % SOME Y %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.combination", _, _) % _ % _ % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.imp", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% prf1) %% prf2))) = - let - val _ $ A $ C = Envir.beta_norm Y; - val _ $ B $ D = Envir.beta_norm X - in SOME (AbsP ("H1", ?? X, AbsP ("H2", ?? A, - equal_elim_axm %> D %> C %% - (symmetric_axm % ?? C % ?? D %% Proofterm.incr_pboundvars 2 0 prf2) %% - (PBound 1 %% - (equal_elim_axm %> A %> B %% Proofterm.incr_pboundvars 2 0 prf1 %% PBound 0))))) - end - - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME X % SOME Y %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.all", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% - (PAxm ("Pure.abstract_rule", _, _) % _ % _ %% prf))) = - let - val Const (_, T) $ P = Envir.beta_norm X; - val _ $ Q = Envir.beta_norm Y; - in SOME (AbsP ("H", ?? X, Abst ("x", ty T, - equal_elim_axm %> incr_boundvars 1 P $ Bound 0 %> incr_boundvars 1 Q $ Bound 0 %% - (Proofterm.incr_pboundvars 1 1 prf %> Bound 0) %% (PBound 0 %> Bound 0)))) - end - - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME X % SOME Y %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.all", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% - (PAxm ("Pure.abstract_rule", _, _) % _ % _ %% prf)))) = - let - val Const (_, T) $ P = Envir.beta_norm X; - val _ $ Q = Envir.beta_norm Y; - val t = incr_boundvars 1 P $ Bound 0; - val u = incr_boundvars 1 Q $ Bound 0 - in SOME (AbsP ("H", ?? X, Abst ("x", ty T, - equal_elim_axm %> t %> u %% - (symmetric_axm % ?? u % ?? t %% (Proofterm.incr_pboundvars 1 1 prf %> Bound 0)) - %% (PBound 0 %> Bound 0)))) - end - - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME A % SOME C %% - (PAxm ("Pure.transitive", _, _) % _ % SOME B % _ %% prf1 %% prf2) %% prf3) = - SOME (equal_elim_axm %> B %> C %% prf2 %% - (equal_elim_axm %> A %> B %% prf1 %% prf3)) - | rew' (PAxm ("Pure.equal_elim", _, _) % SOME A % SOME C %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.transitive", _, _) % _ % SOME B % _ %% prf1 %% prf2)) %% prf3) = - SOME (equal_elim_axm %> B %> C %% (symmetric_axm % ?? C % ?? B %% prf1) %% - (equal_elim_axm %> A %> B %% (symmetric_axm % ?? B % ?? A %% prf2) %% prf3)) - - | rew' (PAxm ("Pure.equal_elim", _, _) % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% prf) = SOME prf - | rew' (PAxm ("Pure.equal_elim", _, _) % _ % _ %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _)) %% prf) = SOME prf - - | rew' (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% prf)) = SOME prf - - | rew' (PAxm ("Pure.equal_elim", _, _) % _ % _ %% - (PAxm ("Pure.equal_elim", _, _) % SOME (_ $ A $ C) % SOME (_ $ B $ D) %% - (PAxm ("Pure.combination", _, _) % _ % _ % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.eq", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% prf1) %% prf2) %% prf3) %% prf4) = - SOME (equal_elim_axm %> C %> D %% prf2 %% - (equal_elim_axm %> A %> C %% prf3 %% - (equal_elim_axm %> B %> A %% (symmetric_axm % ?? A % ?? B %% prf1) %% prf4))) - - | rew' (PAxm ("Pure.equal_elim", _, _) % _ % _ %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.equal_elim", _, _) % SOME (_ $ A $ C) % SOME (_ $ B $ D) %% - (PAxm ("Pure.combination", _, _) % _ % _ % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.eq", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% prf1) %% prf2) %% prf3)) %% prf4) = - SOME (equal_elim_axm %> A %> B %% prf1 %% - (equal_elim_axm %> C %> A %% (symmetric_axm % ?? A % ?? C %% prf3) %% - (equal_elim_axm %> D %> C %% (symmetric_axm % ?? C % ?? D %% prf2) %% prf4))) - - | rew' (PAxm ("Pure.equal_elim", _, _) % _ % _ %% - (PAxm ("Pure.equal_elim", _, _) % SOME (_ $ B $ D) % SOME (_ $ A $ C) %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.combination", _, _) % _ % _ % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.eq", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% prf1) %% prf2)) %% prf3) %% prf4) = - SOME (equal_elim_axm %> D %> C %% (symmetric_axm % ?? C % ?? D %% prf2) %% - (equal_elim_axm %> B %> D %% prf3 %% - (equal_elim_axm %> A %> B %% prf1 %% prf4))) - - | rew' (PAxm ("Pure.equal_elim", _, _) % _ % _ %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.equal_elim", _, _) % SOME (_ $ B $ D) % SOME (_ $ A $ C) %% - (PAxm ("Pure.symmetric", _, _) % _ % _ %% - (PAxm ("Pure.combination", _, _) % _ % _ % _ % _ %% - (PAxm ("Pure.combination", _, _) % SOME (Const ("Pure.eq", _)) % _ % _ % _ %% - (PAxm ("Pure.reflexive", _, _) % _) %% prf1) %% prf2)) %% prf3)) %% prf4) = - SOME (equal_elim_axm %> B %> A %% (symmetric_axm % ?? A % ?? B %% prf1) %% - (equal_elim_axm %> D %> B %% (symmetric_axm % ?? B % ?? D %% prf3) %% - (equal_elim_axm %> C %> D %% prf2 %% prf4))) - - | rew' ((prf as PAxm ("Pure.combination", _, _) % - SOME ((eq as Const ("Pure.eq", T)) $ t) % _ % _ % _) %% - (PAxm ("Pure.reflexive", _, _) % _)) = - let val (U, V) = (case T of - Type (_, [U, V]) => (U, V) | _ => (dummyT, dummyT)) - in SOME (prf %% (ax Proofterm.combination_axm [U, V] %> eq % ?? eq % ?? t % ?? t %% - (ax Proofterm.reflexive_axm [T] % ?? eq) %% (ax Proofterm.reflexive_axm [U] % ?? t))) - end - - | rew' _ = NONE; - in rew' #> Option.map (rpair Proofterm.no_skel) end; - -fun rprocs b = [rew b]; -val _ = Theory.setup (fold Proofterm.add_prf_rproc (rprocs false)); - - -(**** apply rewriting function to all terms in proof ****) - -fun rewrite_terms r = - let - fun rew_term Ts t = - let - val frees = - map Free (Name.invent (Term.declare_term_frees t Name.context) "xa" (length Ts) ~~ Ts); - val t' = r (subst_bounds (frees, t)); - fun strip [] t = t - | strip (_ :: xs) (Abs (_, _, t)) = strip xs t; - in - strip Ts (fold lambda frees t') - end; - - fun rew Ts (prf1 %% prf2) = rew Ts prf1 %% rew Ts prf2 - | rew Ts (prf % SOME t) = rew Ts prf % SOME (rew_term Ts t) - | rew Ts (Abst (s, SOME T, prf)) = Abst (s, SOME T, rew (T :: Ts) prf) - | rew Ts (AbsP (s, SOME t, prf)) = AbsP (s, SOME (rew_term Ts t), rew Ts prf) - | rew _ prf = prf - - in rew [] end; - - -(**** eliminate definitions in proof ****) - -fun vars_of t = rev (fold_aterms (fn v as Var _ => insert (op =) v | _ => I) t []); - -fun insert_refl defs Ts (prf1 %% prf2) = - let val (prf1', b) = insert_refl defs Ts prf1 - in - if b then (prf1', true) - else (prf1' %% fst (insert_refl defs Ts prf2), false) - end - | insert_refl defs Ts (Abst (s, SOME T, prf)) = - (Abst (s, SOME T, fst (insert_refl defs (T :: Ts) prf)), false) - | insert_refl defs Ts (AbsP (s, t, prf)) = - (AbsP (s, t, fst (insert_refl defs Ts prf)), false) - | insert_refl defs Ts prf = - (case Proofterm.strip_combt prf of - (PThm (_, ((s, prop, SOME Ts), _)), ts) => - if member (op =) defs s then - let - val vs = vars_of prop; - val tvars = Term.add_tvars prop [] |> rev; - val (_, rhs) = Logic.dest_equals (Logic.strip_imp_concl prop); - val rhs' = Term.betapplys (subst_TVars (map fst tvars ~~ Ts) - (fold_rev (fn x => fn b => Abs ("", dummyT, abstract_over (x, b))) vs rhs), - map the ts); - in - (Proofterm.change_type (SOME [fastype_of1 (Ts, rhs')]) - Proofterm.reflexive_axm %> rhs', true) - end - else (prf, false) - | (_, []) => (prf, false) - | (prf', ts) => (Proofterm.proof_combt' (fst (insert_refl defs Ts prf'), ts), false)); - -fun elim_defs thy r defs prf = - let - val defs' = map (Logic.dest_equals o - map_types Type.strip_sorts o prop_of o Drule.abs_def) defs; - val defnames = map Thm.derivation_name defs; - val f = if not r then I else - let - val cnames = map (fst o dest_Const o fst) defs'; - val thms = Proofterm.fold_proof_atoms true - (fn PThm (_, ((name, prop, _), _)) => - if member (op =) defnames name orelse - not (exists_Const (member (op =) cnames o #1) prop) - then I - else cons (name, SOME prop) - | _ => I) [prf] []; - in Reconstruct.expand_proof thy thms end; - in - rewrite_terms (Pattern.rewrite_term thy defs' []) - (fst (insert_refl defnames [] (f prf))) - end; - - -(**** eliminate all variables that don't occur in the proposition ****) - -fun elim_vars mk_default prf = - let - val prop = Reconstruct.prop_of prf; - val tv = Term.add_vars prop []; - val tf = Term.add_frees prop []; - - fun hidden_variable (Var v) = not (member (op =) tv v) - | hidden_variable (Free f) = not (member (op =) tf f) - | hidden_variable _ = false; - - fun mk_default' T = - fold_rev (Term.abs o pair "x") (binder_types T) (mk_default (body_type T)); - - fun elim_varst (t $ u) = elim_varst t $ elim_varst u - | elim_varst (Abs (s, T, t)) = Abs (s, T, elim_varst t) - | elim_varst (t as Free (x, T)) = if member (op =) tf (x, T) then t else mk_default' T - | elim_varst (t as Var (xi, T)) = if member (op =) tv (xi, T) then t else mk_default' T - | elim_varst t = t; - in - Proofterm.map_proof_terms (fn t => - if Term.exists_subterm hidden_variable t then Envir.beta_norm (elim_varst t) else t) I prf - end; - - -(**** convert between hhf and non-hhf form ****) - -fun hhf_proof P Q prf = - let - val params = Logic.strip_params Q; - val Hs = Logic.strip_assums_hyp P; - val Hs' = Logic.strip_assums_hyp Q; - val k = length Hs; - val l = length params; - fun mk_prf i j Hs Hs' (Const ("Pure.all", _) $ Abs (_, _, P)) prf = - mk_prf i (j - 1) Hs Hs' P (prf %> Bound j) - | mk_prf i j (H :: Hs) (H' :: Hs') (Const ("Pure.imp", _) $ _ $ P) prf = - mk_prf (i - 1) j Hs Hs' P (prf %% un_hhf_proof H' H (PBound i)) - | mk_prf _ _ _ _ _ prf = prf - in - prf |> Proofterm.incr_pboundvars k l |> mk_prf (k - 1) (l - 1) Hs Hs' P |> - fold_rev (fn P => fn prf => AbsP ("H", SOME P, prf)) Hs' |> - fold_rev (fn (s, T) => fn prf => Abst (s, SOME T, prf)) params - end -and un_hhf_proof P Q prf = - let - val params = Logic.strip_params Q; - val Hs = Logic.strip_assums_hyp P; - val Hs' = Logic.strip_assums_hyp Q; - val k = length Hs; - val l = length params; - fun mk_prf (Const ("Pure.all", _) $ Abs (s, T, P)) prf = - Abst (s, SOME T, mk_prf P prf) - | mk_prf (Const ("Pure.imp", _) $ P $ Q) prf = - AbsP ("H", SOME P, mk_prf Q prf) - | mk_prf _ prf = prf - in - prf |> Proofterm.incr_pboundvars k l |> - fold (fn i => fn prf => prf %> Bound i) (l - 1 downto 0) |> - fold (fn ((H, H'), i) => fn prf => prf %% hhf_proof H' H (PBound i)) - (Hs ~~ Hs' ~~ (k - 1 downto 0)) |> - mk_prf Q - end; - - -(**** expand OfClass proofs ****) - -fun mk_of_sort_proof thy hs (T, S) = - let - val hs' = map - (fn SOME t => (SOME (Logic.dest_of_class t) handle TERM _ => NONE) - | NONE => NONE) hs; - val sorts = AList.coalesce (op =) (rev (map_filter I hs')); - fun get_sort T = the_default [] (AList.lookup (op =) sorts T); - val subst = map_atyps - (fn T as TVar (ixn, _) => TVar (ixn, get_sort T) - | T as TFree (s, _) => TFree (s, get_sort T)); - fun hyp T_c = case find_index (equal (SOME T_c)) hs' of - ~1 => error "expand_of_class: missing class hypothesis" - | i => PBound i; - fun reconstruct prf prop = prf |> - Reconstruct.reconstruct_proof thy prop |> - Reconstruct.expand_proof thy [("", NONE)] |> - Same.commit (Proofterm.map_proof_same Same.same Same.same hyp) - in - map2 reconstruct - (Proofterm.of_sort_proof thy (OfClass o apfst Type.strip_sorts) (subst T, S)) - (Logic.mk_of_sort (T, S)) - end; - -fun expand_of_class thy Ts hs (OfClass (T, c)) = - mk_of_sort_proof thy hs (T, [c]) |> - hd |> rpair Proofterm.no_skel |> SOME - | expand_of_class thy Ts hs _ = NONE; - -end; diff --git a/core/Pure/Proof/proof_syntax.ML b/core/Pure/Proof/proof_syntax.ML deleted file mode 100644 index 3cd2c6c4..00000000 --- a/core/Pure/Proof/proof_syntax.ML +++ /dev/null @@ -1,258 +0,0 @@ -(* Title: Pure/Proof/proof_syntax.ML - Author: Stefan Berghofer, TU Muenchen - -Function for parsing and printing proof terms. -*) - -signature PROOF_SYNTAX = -sig - val proofT: typ - val add_proof_syntax: theory -> theory - val proof_of_term: theory -> bool -> term -> Proofterm.proof - val term_of_proof: Proofterm.proof -> term - val cterm_of_proof: theory -> Proofterm.proof -> cterm * (cterm -> Proofterm.proof) - val read_term: theory -> bool -> typ -> string -> term - val read_proof: theory -> bool -> bool -> string -> Proofterm.proof - val proof_syntax: Proofterm.proof -> theory -> theory - val proof_of: bool -> thm -> Proofterm.proof - val pretty_proof: Proof.context -> Proofterm.proof -> Pretty.T - val pretty_proof_of: Proof.context -> bool -> thm -> Pretty.T -end; - -structure Proof_Syntax : PROOF_SYNTAX = -struct - -(**** add special syntax for embedding proof terms ****) - -val proofT = Type ("proof", []); -val paramT = Type ("param", []); -val paramsT = Type ("params", []); -val idtT = Type ("idt", []); -val aT = TFree (Name.aT, []); - -(** constants for theorems and axioms **) - -fun add_proof_atom_consts names thy = - thy - |> Sign.root_path - |> Sign.add_consts (map (fn name => (Binding.qualified_name name, proofT, NoSyn)) names); - -(** constants for application and abstraction **) - -fun add_proof_syntax thy = - thy - |> Sign.root_path - |> Sign.set_defsort [] - |> Sign.add_types_global - [(Binding.make ("proof", @{here}), 0, NoSyn)] - |> fold (snd oo Sign.declare_const_global) - [((Binding.make ("Appt", @{here}), [proofT, aT] ---> proofT), Mixfix ("(1_ %/ _)", [4, 5], 4)), - ((Binding.make ("AppP", @{here}), [proofT, proofT] ---> proofT), Mixfix ("(1_ %%/ _)", [4, 5], 4)), - ((Binding.make ("Abst", @{here}), (aT --> proofT) --> proofT), NoSyn), - ((Binding.make ("AbsP", @{here}), [propT, proofT --> proofT] ---> proofT), NoSyn), - ((Binding.make ("Hyp", @{here}), propT --> proofT), NoSyn), - ((Binding.make ("Oracle", @{here}), propT --> proofT), NoSyn), - ((Binding.make ("OfClass", @{here}), (Term.a_itselfT --> propT) --> proofT), NoSyn), - ((Binding.make ("MinProof", @{here}), proofT), Delimfix "?")] - |> Sign.add_nonterminals_global - [Binding.make ("param", @{here}), - Binding.make ("params", @{here})] - |> Sign.add_syntax Syntax.mode_default - [("_Lam", [paramsT, proofT] ---> proofT, Mixfix ("(1Lam _./ _)", [0, 3], 3)), - ("_Lam0", [paramT, paramsT] ---> paramsT, Mixfix ("_/ _", [1, 0], 0)), - ("_Lam0", [idtT, paramsT] ---> paramsT, Mixfix ("_/ _", [1, 0], 0)), - ("_Lam1", [idtT, propT] ---> paramT, Mixfix ("_: _", [0, 0], 0)), - ("", paramT --> paramT, Delimfix "'(_')"), - ("", idtT --> paramsT, Delimfix "_"), - ("", paramT --> paramsT, Delimfix "_")] - |> Sign.add_syntax (Symbol.xsymbolsN, true) - [("_Lam", [paramsT, proofT] ---> proofT, Mixfix ("(1\<^bold>\_./ _)", [0, 3], 3)), - (Lexicon.mark_const "Appt", [proofT, aT] ---> proofT, Mixfix ("(1_ \/ _)", [4, 5], 4)), - (Lexicon.mark_const "AppP", [proofT, proofT] ---> proofT, Mixfix ("(1_ \/ _)", [4, 5], 4))] - |> Sign.add_trrules (map Syntax.Parse_Print_Rule - [(Ast.mk_appl (Ast.Constant "_Lam") - [Ast.mk_appl (Ast.Constant "_Lam0") - [Ast.Variable "l", Ast.Variable "m"], Ast.Variable "A"], - Ast.mk_appl (Ast.Constant "_Lam") - [Ast.Variable "l", - Ast.mk_appl (Ast.Constant "_Lam") [Ast.Variable "m", Ast.Variable "A"]]), - (Ast.mk_appl (Ast.Constant "_Lam") - [Ast.mk_appl (Ast.Constant "_Lam1") - [Ast.Variable "x", Ast.Variable "A"], Ast.Variable "B"], - Ast.mk_appl (Ast.Constant (Lexicon.mark_const "AbsP")) [Ast.Variable "A", - (Ast.mk_appl (Ast.Constant "_abs") [Ast.Variable "x", Ast.Variable "B"])]), - (Ast.mk_appl (Ast.Constant "_Lam") [Ast.Variable "x", Ast.Variable "A"], - Ast.mk_appl (Ast.Constant (Lexicon.mark_const "Abst")) - [(Ast.mk_appl (Ast.Constant "_abs") [Ast.Variable "x", Ast.Variable "A"])])]); - - -(**** translation between proof terms and pure terms ****) - -fun proof_of_term thy ty = - let - val thms = Global_Theory.all_thms_of thy true; - val axms = Theory.all_axioms_of thy; - - fun mk_term t = (if ty then I else map_types (K dummyT)) - (Term.no_dummy_patterns t); - - fun prf_of [] (Bound i) = PBound i - | prf_of Ts (Const (s, Type ("proof", _))) = - Proofterm.change_type (if ty then SOME Ts else NONE) - (case Long_Name.explode s of - "axm" :: xs => - let - val name = Long_Name.implode xs; - val prop = (case AList.lookup (op =) axms name of - SOME prop => prop - | NONE => error ("Unknown axiom " ^ quote name)) - in PAxm (name, prop, NONE) end - | "thm" :: xs => - let val name = Long_Name.implode xs; - in (case AList.lookup (op =) thms name of - SOME thm => - fst (Proofterm.strip_combt (fst (Proofterm.strip_combP (Thm.proof_of thm)))) - | NONE => error ("Unknown theorem " ^ quote name)) - end - | _ => error ("Illegal proof constant name: " ^ quote s)) - | prf_of Ts (Const ("OfClass", _) $ Const (c_class, _)) = - (case try Logic.class_of_const c_class of - SOME c => - Proofterm.change_type (if ty then SOME Ts else NONE) - (OfClass (TVar ((Name.aT, 0), []), c)) - | NONE => error ("Bad class constant: " ^ quote c_class)) - | prf_of Ts (Const ("Hyp", _) $ prop) = Hyp prop - | prf_of Ts (v as Var ((_, Type ("proof", _)))) = Hyp v - | prf_of [] (Const ("Abst", _) $ Abs (s, T, prf)) = - if T = proofT then - error ("Term variable abstraction may not bind proof variable " ^ quote s) - else Abst (s, if ty then SOME T else NONE, - Proofterm.incr_pboundvars (~1) 0 (prf_of [] prf)) - | prf_of [] (Const ("AbsP", _) $ t $ Abs (s, _, prf)) = - AbsP (s, case t of - Const ("Pure.dummy_pattern", _) => NONE - | _ $ Const ("Pure.dummy_pattern", _) => NONE - | _ => SOME (mk_term t), - Proofterm.incr_pboundvars 0 (~1) (prf_of [] prf)) - | prf_of [] (Const ("AppP", _) $ prf1 $ prf2) = - prf_of [] prf1 %% prf_of [] prf2 - | prf_of Ts (Const ("Appt", _) $ prf $ Const ("Pure.type", Type (_, [T]))) = - prf_of (T::Ts) prf - | prf_of [] (Const ("Appt", _) $ prf $ t) = prf_of [] prf % - (case t of Const ("Pure.dummy_pattern", _) => NONE | _ => SOME (mk_term t)) - | prf_of _ t = error ("Not a proof term:\n" ^ - Syntax.string_of_term_global thy t) - - in prf_of [] end; - - -val AbsPt = Const ("AbsP", [propT, proofT --> proofT] ---> proofT); -val AppPt = Const ("AppP", [proofT, proofT] ---> proofT); -val Hypt = Const ("Hyp", propT --> proofT); -val Oraclet = Const ("Oracle", propT --> proofT); -val OfClasst = Const ("OfClass", (Term.itselfT dummyT --> propT) --> proofT); -val MinProoft = Const ("MinProof", proofT); - -val mk_tyapp = fold (fn T => fn prf => Const ("Appt", - [proofT, Term.itselfT T] ---> proofT) $ prf $ Logic.mk_type T); - -fun term_of _ (PThm (_, ((name, _, NONE), _))) = - Const (Long_Name.append "thm" name, proofT) - | term_of _ (PThm (_, ((name, _, SOME Ts), _))) = - mk_tyapp Ts (Const (Long_Name.append "thm" name, proofT)) - | term_of _ (PAxm (name, _, NONE)) = Const (Long_Name.append "axm" name, proofT) - | term_of _ (PAxm (name, _, SOME Ts)) = - mk_tyapp Ts (Const (Long_Name.append "axm" name, proofT)) - | term_of _ (OfClass (T, c)) = - mk_tyapp [T] (OfClasst $ Const (Logic.const_of_class c, Term.itselfT dummyT --> propT)) - | term_of _ (PBound i) = Bound i - | term_of Ts (Abst (s, opT, prf)) = - let val T = the_default dummyT opT - in Const ("Abst", (T --> proofT) --> proofT) $ - Abs (s, T, term_of (T::Ts) (Proofterm.incr_pboundvars 1 0 prf)) - end - | term_of Ts (AbsP (s, t, prf)) = - AbsPt $ the_default Term.dummy_prop t $ - Abs (s, proofT, term_of (proofT::Ts) (Proofterm.incr_pboundvars 0 1 prf)) - | term_of Ts (prf1 %% prf2) = - AppPt $ term_of Ts prf1 $ term_of Ts prf2 - | term_of Ts (prf % opt) = - let val t = the_default Term.dummy opt - in Const ("Appt", - [proofT, fastype_of1 (Ts, t) handle TERM _ => dummyT] ---> proofT) $ - term_of Ts prf $ t - end - | term_of Ts (Hyp t) = Hypt $ t - | term_of Ts (Oracle (_, t, _)) = Oraclet $ t - | term_of Ts MinProof = MinProoft; - -val term_of_proof = term_of []; - -fun cterm_of_proof thy prf = - let - val thm_names = map fst (Global_Theory.all_thms_of thy true); - val axm_names = map fst (Theory.all_axioms_of thy); - val thy' = thy - |> add_proof_syntax - |> add_proof_atom_consts - (map (Long_Name.append "axm") axm_names @ map (Long_Name.append "thm") thm_names); - in - (cterm_of thy' (term_of_proof prf), proof_of_term thy true o Thm.term_of) - end; - -fun read_term thy topsort = - let - val thm_names = filter_out (fn s => s = "") (map fst (Global_Theory.all_thms_of thy true)); - val axm_names = map fst (Theory.all_axioms_of thy); - val ctxt = thy - |> add_proof_syntax - |> add_proof_atom_consts - (map (Long_Name.append "axm") axm_names @ map (Long_Name.append "thm") thm_names) - |> Proof_Context.init_global - |> Proof_Context.allow_dummies - |> Proof_Context.set_mode Proof_Context.mode_schematic - |> topsort ? (Config.put Type_Infer_Context.const_sorts false #> Proof_Context.set_defsort []); - in - fn ty => fn s => - (if ty = propT then Syntax.parse_prop else Syntax.parse_term) ctxt s - |> Type.constraint ty |> Syntax.check_term ctxt - end; - -fun read_proof thy topsort = - let val rd = read_term thy topsort proofT - in fn ty => fn s => proof_of_term thy ty (Logic.varify_global (rd s)) end; - -fun proof_syntax prf = - let - val thm_names = Symtab.keys (Proofterm.fold_proof_atoms true - (fn PThm (_, ((name, _, _), _)) => if name <> "" then Symtab.update (name, ()) else I - | _ => I) [prf] Symtab.empty); - val axm_names = Symtab.keys (Proofterm.fold_proof_atoms true - (fn PAxm (name, _, _) => Symtab.update (name, ()) | _ => I) [prf] Symtab.empty); - in - add_proof_syntax #> - add_proof_atom_consts - (map (Long_Name.append "thm") thm_names @ map (Long_Name.append "axm") axm_names) - end; - -fun proof_of full thm = - let - val thy = Thm.theory_of_thm thm; - val prop = Thm.full_prop_of thm; - val prf = Thm.proof_of thm; - val prf' = - (case Proofterm.strip_combt (fst (Proofterm.strip_combP prf)) of - (PThm (_, ((_, prop', _), body)), _) => - if prop = prop' then Proofterm.join_proof body else prf - | _ => prf) - in if full then Reconstruct.reconstruct_proof thy prop prf' else prf' end; - -fun pretty_proof ctxt prf = - Proof_Context.pretty_term_abbrev - (Proof_Context.transfer (proof_syntax prf (Proof_Context.theory_of ctxt)) ctxt) - (term_of_proof prf); - -fun pretty_proof_of ctxt full th = - pretty_proof ctxt (proof_of full th); - -end; diff --git a/core/Pure/Proof/reconstruct.ML b/core/Pure/Proof/reconstruct.ML deleted file mode 100644 index bef30ad2..00000000 --- a/core/Pure/Proof/reconstruct.ML +++ /dev/null @@ -1,384 +0,0 @@ -(* Title: Pure/Proof/reconstruct.ML - Author: Stefan Berghofer, TU Muenchen - -Reconstruction of partial proof terms. -*) - -signature RECONSTRUCT = -sig - val quiet_mode : bool Unsynchronized.ref - val reconstruct_proof : theory -> term -> Proofterm.proof -> Proofterm.proof - val prop_of' : term list -> Proofterm.proof -> term - val prop_of : Proofterm.proof -> term - val proof_of : thm -> Proofterm.proof - val expand_proof : theory -> (string * term option) list -> - Proofterm.proof -> Proofterm.proof -end; - -structure Reconstruct : RECONSTRUCT = -struct - -val quiet_mode = Unsynchronized.ref true; -fun message s = if !quiet_mode then () else writeln s; - -fun vars_of t = map Var (rev (Term.add_vars t [])); -fun frees_of t = map Free (rev (Term.add_frees t [])); - -fun forall_intr_vfs prop = fold_rev Logic.all - (vars_of prop @ frees_of prop) prop; - -fun forall_intr_vfs_prf prop prf = fold_rev Proofterm.forall_intr_proof' - (vars_of prop @ frees_of prop) prf; - - -(**** generate constraints for proof term ****) - -fun mk_var env Ts T = - let val (env', v) = Envir.genvar "a" (env, rev Ts ---> T) - in (list_comb (v, map Bound (length Ts - 1 downto 0)), env') end; - -fun mk_tvar S (Envir.Envir {maxidx, tenv, tyenv}) = - (TVar (("'t", maxidx + 1), S), - Envir.Envir {maxidx = maxidx + 1, tenv = tenv, tyenv = tyenv}); - -val mk_abs = fold (fn T => fn u => Abs ("", T, u)); - -fun unifyT thy env T U = - let - val Envir.Envir {maxidx, tenv, tyenv} = env; - val (tyenv', maxidx') = Sign.typ_unify thy (T, U) (tyenv, maxidx); - in Envir.Envir {maxidx = maxidx', tenv = tenv, tyenv = tyenv'} end; - -fun chaseT env (T as TVar v) = - (case Type.lookup (Envir.type_env env) v of - NONE => T - | SOME T' => chaseT env T') - | chaseT _ T = T; - -fun infer_type thy (env as Envir.Envir {maxidx, tenv, tyenv}) Ts vTs - (t as Const (s, T)) = if T = dummyT then - (case Sign.const_type thy s of - NONE => error ("reconstruct_proof: No such constant: " ^ quote s) - | SOME T => - let val T' = Type.strip_sorts (Logic.incr_tvar (maxidx + 1) T) - in (Const (s, T'), T', vTs, - Envir.Envir {maxidx = maxidx + 1, tenv = tenv, tyenv = tyenv}) - end) - else (t, T, vTs, env) - | infer_type thy env Ts vTs (t as Free (s, T)) = - if T = dummyT then (case Symtab.lookup vTs s of - NONE => - let val (T, env') = mk_tvar [] env - in (Free (s, T), T, Symtab.update_new (s, T) vTs, env') end - | SOME T => (Free (s, T), T, vTs, env)) - else (t, T, vTs, env) - | infer_type thy env Ts vTs (Var _) = error "reconstruct_proof: internal error" - | infer_type thy env Ts vTs (Abs (s, T, t)) = - let - val (T', env') = if T = dummyT then mk_tvar [] env else (T, env); - val (t', U, vTs', env'') = infer_type thy env' (T' :: Ts) vTs t - in (Abs (s, T', t'), T' --> U, vTs', env'') end - | infer_type thy env Ts vTs (t $ u) = - let - val (t', T, vTs1, env1) = infer_type thy env Ts vTs t; - val (u', U, vTs2, env2) = infer_type thy env1 Ts vTs1 u; - in (case chaseT env2 T of - Type ("fun", [U', V]) => (t' $ u', V, vTs2, unifyT thy env2 U U') - | _ => - let val (V, env3) = mk_tvar [] env2 - in (t' $ u', V, vTs2, unifyT thy env3 T (U --> V)) end) - end - | infer_type thy env Ts vTs (t as Bound i) = ((t, nth Ts i, vTs, env) - handle General.Subscript => error ("infer_type: bad variable index " ^ string_of_int i)); - -fun cantunify thy (t, u) = error ("Non-unifiable terms:\n" ^ - Syntax.string_of_term_global thy t ^ "\n\n" ^ Syntax.string_of_term_global thy u); - -fun decompose thy Ts (p as (t, u)) env = - let - fun rigrig (a, T) (b, U) uT ts us = - if a <> b then cantunify thy p - else apfst flat (fold_map (decompose thy Ts) (ts ~~ us) (uT env T U)) - in - case pairself (strip_comb o Envir.head_norm env) p of - ((Const c, ts), (Const d, us)) => rigrig c d (unifyT thy) ts us - | ((Free c, ts), (Free d, us)) => rigrig c d (unifyT thy) ts us - | ((Bound i, ts), (Bound j, us)) => - rigrig (i, dummyT) (j, dummyT) (K o K) ts us - | ((Abs (_, T, t), []), (Abs (_, U, u), [])) => - decompose thy (T::Ts) (t, u) (unifyT thy env T U) - | ((Abs (_, T, t), []), _) => - decompose thy (T::Ts) (t, incr_boundvars 1 u $ Bound 0) env - | (_, (Abs (_, T, u), [])) => - decompose thy (T::Ts) (incr_boundvars 1 t $ Bound 0, u) env - | _ => ([(mk_abs Ts t, mk_abs Ts u)], env) - end; - -fun make_constraints_cprf thy env cprf = - let - fun add_cnstrt Ts prop prf cs env vTs (t, u) = - let - val t' = mk_abs Ts t; - val u' = mk_abs Ts u - in - (prop, prf, cs, Pattern.unify thy (t', u') env, vTs) - handle Pattern.Pattern => - let val (cs', env') = decompose thy [] (t', u') env - in (prop, prf, cs @ cs', env', vTs) end - | Pattern.Unif => - cantunify thy (Envir.norm_term env t', Envir.norm_term env u') - end; - - fun mk_cnstrts_atom env vTs prop opTs prf = - let - val tvars = Term.add_tvars prop [] |> rev; - val tfrees = Term.add_tfrees prop [] |> rev; - val (Ts, env') = - (case opTs of - NONE => fold_map mk_tvar (map snd tvars @ map snd tfrees) env - | SOME Ts => (Ts, env)); - val prop' = subst_atomic_types (map TVar tvars @ map TFree tfrees ~~ Ts) - (forall_intr_vfs prop) handle ListPair.UnequalLengths => - error ("Wrong number of type arguments for " ^ quote (Proofterm.guess_name prf)) - in (prop', Proofterm.change_type (SOME Ts) prf, [], env', vTs) end; - - fun head_norm (prop, prf, cnstrts, env, vTs) = - (Envir.head_norm env prop, prf, cnstrts, env, vTs); - - fun mk_cnstrts env _ Hs vTs (PBound i) = ((nth Hs i, PBound i, [], env, vTs) - handle General.Subscript => error ("mk_cnstrts: bad variable index " ^ string_of_int i)) - | mk_cnstrts env Ts Hs vTs (Abst (s, opT, cprf)) = - let - val (T, env') = - (case opT of - NONE => mk_tvar [] env - | SOME T => (T, env)); - val (t, prf, cnstrts, env'', vTs') = - mk_cnstrts env' (T::Ts) (map (incr_boundvars 1) Hs) vTs cprf; - in (Const ("Pure.all", (T --> propT) --> propT) $ Abs (s, T, t), Abst (s, SOME T, prf), - cnstrts, env'', vTs') - end - | mk_cnstrts env Ts Hs vTs (AbsP (s, SOME t, cprf)) = - let - val (t', _, vTs', env') = infer_type thy env Ts vTs t; - val (u, prf, cnstrts, env'', vTs'') = mk_cnstrts env' Ts (t'::Hs) vTs' cprf; - in (Logic.mk_implies (t', u), AbsP (s, SOME t', prf), cnstrts, env'', vTs'') - end - | mk_cnstrts env Ts Hs vTs (AbsP (s, NONE, cprf)) = - let - val (t, env') = mk_var env Ts propT; - val (u, prf, cnstrts, env'', vTs') = mk_cnstrts env' Ts (t::Hs) vTs cprf; - in (Logic.mk_implies (t, u), AbsP (s, SOME t, prf), cnstrts, env'', vTs') - end - | mk_cnstrts env Ts Hs vTs (cprf1 %% cprf2) = - let val (u, prf2, cnstrts, env', vTs') = mk_cnstrts env Ts Hs vTs cprf2 - in (case head_norm (mk_cnstrts env' Ts Hs vTs' cprf1) of - (Const ("Pure.imp", _) $ u' $ t', prf1, cnstrts', env'', vTs'') => - add_cnstrt Ts t' (prf1 %% prf2) (cnstrts' @ cnstrts) - env'' vTs'' (u, u') - | (t, prf1, cnstrts', env'', vTs'') => - let val (v, env''') = mk_var env'' Ts propT - in add_cnstrt Ts v (prf1 %% prf2) (cnstrts' @ cnstrts) - env''' vTs'' (t, Logic.mk_implies (u, v)) - end) - end - | mk_cnstrts env Ts Hs vTs (cprf % SOME t) = - let val (t', U, vTs1, env1) = infer_type thy env Ts vTs t - in (case head_norm (mk_cnstrts env1 Ts Hs vTs1 cprf) of - (Const ("Pure.all", Type ("fun", [Type ("fun", [T, _]), _])) $ f, - prf, cnstrts, env2, vTs2) => - let val env3 = unifyT thy env2 T U - in (betapply (f, t'), prf % SOME t', cnstrts, env3, vTs2) - end - | (u, prf, cnstrts, env2, vTs2) => - let val (v, env3) = mk_var env2 Ts (U --> propT); - in - add_cnstrt Ts (v $ t') (prf % SOME t') cnstrts env3 vTs2 - (u, Const ("Pure.all", (U --> propT) --> propT) $ v) - end) - end - | mk_cnstrts env Ts Hs vTs (cprf % NONE) = - (case head_norm (mk_cnstrts env Ts Hs vTs cprf) of - (Const ("Pure.all", Type ("fun", [Type ("fun", [T, _]), _])) $ f, - prf, cnstrts, env', vTs') => - let val (t, env'') = mk_var env' Ts T - in (betapply (f, t), prf % SOME t, cnstrts, env'', vTs') - end - | (u, prf, cnstrts, env', vTs') => - let - val (T, env1) = mk_tvar [] env'; - val (v, env2) = mk_var env1 Ts (T --> propT); - val (t, env3) = mk_var env2 Ts T - in - add_cnstrt Ts (v $ t) (prf % SOME t) cnstrts env3 vTs' - (u, Const ("Pure.all", (T --> propT) --> propT) $ v) - end) - | mk_cnstrts env _ _ vTs (prf as PThm (_, ((_, prop, opTs), _))) = - mk_cnstrts_atom env vTs prop opTs prf - | mk_cnstrts env _ _ vTs (prf as PAxm (_, prop, opTs)) = - mk_cnstrts_atom env vTs prop opTs prf - | mk_cnstrts env _ _ vTs (prf as OfClass (T, c)) = - mk_cnstrts_atom env vTs (Logic.mk_of_class (T, c)) NONE prf - | mk_cnstrts env _ _ vTs (prf as Oracle (_, prop, opTs)) = - mk_cnstrts_atom env vTs prop opTs prf - | mk_cnstrts env _ _ vTs (Hyp t) = (t, Hyp t, [], env, vTs) - | mk_cnstrts _ _ _ _ _ = error "reconstruct_proof: minimal proof object" - in mk_cnstrts env [] [] Symtab.empty cprf end; - - -(**** update list of free variables of constraints ****) - -fun upd_constrs env cs = - let - val tenv = Envir.term_env env; - val tyenv = Envir.type_env env; - val dom = [] - |> Vartab.fold (cons o #1) tenv - |> Vartab.fold (cons o #1) tyenv; - val vran = [] - |> Vartab.fold (Term.add_var_names o #2 o #2) tenv - |> Vartab.fold (Term.add_tvar_namesT o #2 o #2) tyenv; - fun check_cs [] = [] - | check_cs ((u, p, vs) :: ps) = - let val vs' = subtract (op =) dom vs in - if vs = vs' then (u, p, vs) :: check_cs ps - else (true, p, fold (insert op =) vs' vran) :: check_cs ps - end; - in check_cs cs end; - - -(**** solution of constraints ****) - -fun solve _ [] bigenv = bigenv - | solve thy cs bigenv = - let - fun search env [] = error ("Unsolvable constraints:\n" ^ - Pretty.string_of (Pretty.chunks (map (fn (_, p, _) => - Goal_Display.pretty_flexpair (Syntax.init_pretty_global thy) (pairself - (Envir.norm_term bigenv) p)) cs))) - | search env ((u, p as (t1, t2), vs)::ps) = - if u then - let - val tn1 = Envir.norm_term bigenv t1; - val tn2 = Envir.norm_term bigenv t2 - in - if Pattern.pattern tn1 andalso Pattern.pattern tn2 then - (Pattern.unify thy (tn1, tn2) env, ps) handle Pattern.Unif => - cantunify thy (tn1, tn2) - else - let val (cs', env') = decompose thy [] (tn1, tn2) env - in if cs' = [(tn1, tn2)] then - apsnd (cons (false, (tn1, tn2), vs)) (search env ps) - else search env' (map (fn q => (true, q, vs)) cs' @ ps) - end - end - else apsnd (cons (false, p, vs)) (search env ps); - val Envir.Envir {maxidx, ...} = bigenv; - val (env, cs') = search (Envir.empty maxidx) cs; - in - solve thy (upd_constrs env cs') (Envir.merge (bigenv, env)) - end; - - -(**** reconstruction of proofs ****) - -fun reconstruct_proof thy prop cprf = - let - val (cprf' % SOME prop', thawf) = Proofterm.freeze_thaw_prf (cprf % SOME prop); - val _ = message "Collecting constraints..."; - val (t, prf, cs, env, _) = make_constraints_cprf thy - (Envir.empty (Proofterm.maxidx_proof cprf ~1)) cprf'; - val cs' = - map (pairself (Envir.norm_term env)) ((t, prop') :: cs) - |> map (fn p => (true, p, Term.add_var_names (#1 p) (Term.add_var_names (#2 p) []))); - val _ = message ("Solving remaining constraints (" ^ string_of_int (length cs') ^ ") ..."); - val env' = solve thy cs' env - in - thawf (Proofterm.norm_proof env' prf) - end; - -fun prop_of_atom prop Ts = subst_atomic_types - (map TVar (Term.add_tvars prop [] |> rev) @ map TFree (Term.add_tfrees prop [] |> rev) ~~ Ts) - (forall_intr_vfs prop); - -val head_norm = Envir.head_norm (Envir.empty 0); - -fun prop_of0 Hs (PBound i) = nth Hs i - | prop_of0 Hs (Abst (s, SOME T, prf)) = - Logic.all_const T $ (Abs (s, T, prop_of0 Hs prf)) - | prop_of0 Hs (AbsP (s, SOME t, prf)) = - Logic.mk_implies (t, prop_of0 (t :: Hs) prf) - | prop_of0 Hs (prf % SOME t) = (case head_norm (prop_of0 Hs prf) of - Const ("Pure.all", _) $ f => f $ t - | _ => error "prop_of: all expected") - | prop_of0 Hs (prf1 %% prf2) = (case head_norm (prop_of0 Hs prf1) of - Const ("Pure.imp", _) $ P $ Q => Q - | _ => error "prop_of: ==> expected") - | prop_of0 Hs (Hyp t) = t - | prop_of0 Hs (PThm (_, ((_, prop, SOME Ts), _))) = prop_of_atom prop Ts - | prop_of0 Hs (PAxm (_, prop, SOME Ts)) = prop_of_atom prop Ts - | prop_of0 Hs (OfClass (T, c)) = Logic.mk_of_class (T, c) - | prop_of0 Hs (Oracle (_, prop, SOME Ts)) = prop_of_atom prop Ts - | prop_of0 _ _ = error "prop_of: partial proof object"; - -val prop_of' = Envir.beta_eta_contract oo prop_of0; -val prop_of = prop_of' []; - -fun proof_of thm = - reconstruct_proof (Thm.theory_of_thm thm) (Thm.prop_of thm) (Thm.proof_of thm); - - - -(**** expand and reconstruct subproofs ****) - -fun expand_proof thy thms prf = - let - fun expand maxidx prfs (AbsP (s, t, prf)) = - let val (maxidx', prfs', prf') = expand maxidx prfs prf - in (maxidx', prfs', AbsP (s, t, prf')) end - | expand maxidx prfs (Abst (s, T, prf)) = - let val (maxidx', prfs', prf') = expand maxidx prfs prf - in (maxidx', prfs', Abst (s, T, prf')) end - | expand maxidx prfs (prf1 %% prf2) = - let - val (maxidx', prfs', prf1') = expand maxidx prfs prf1; - val (maxidx'', prfs'', prf2') = expand maxidx' prfs' prf2; - in (maxidx'', prfs'', prf1' %% prf2') end - | expand maxidx prfs (prf % t) = - let val (maxidx', prfs', prf') = expand maxidx prfs prf - in (maxidx', prfs', prf' % t) end - | expand maxidx prfs (prf as PThm (_, ((a, prop, SOME Ts), body))) = - if not (exists - (fn (b, NONE) => a = b - | (b, SOME prop') => a = b andalso prop = prop') thms) - then (maxidx, prfs, prf) else - let - val (maxidx', prf, prfs') = - (case AList.lookup (op =) prfs (a, prop) of - NONE => - let - val _ = - message ("Reconstructing proof of " ^ a ^ "\n" ^ - Syntax.string_of_term_global thy prop); - val prf' = forall_intr_vfs_prf prop - (reconstruct_proof thy prop (Proofterm.join_proof body)); - val (maxidx', prfs', prf) = expand - (Proofterm.maxidx_proof prf' ~1) prfs prf' - in (maxidx' + maxidx + 1, Proofterm.incr_indexes (maxidx + 1) prf, - ((a, prop), (maxidx', prf)) :: prfs') - end - | SOME (maxidx', prf) => (maxidx' + maxidx + 1, - Proofterm.incr_indexes (maxidx + 1) prf, prfs)); - val tfrees = Term.add_tfrees prop [] |> rev; - val tye = map (fn ((s, j), _) => (s, maxidx + 1 + j)) - (Term.add_tvars prop [] |> rev) @ map (rpair ~1 o fst) tfrees ~~ Ts; - val varify = map_type_tfree (fn p as (a, S) => - if member (op =) tfrees p then TVar ((a, ~1), S) else TFree p) - in - (maxidx', prfs', Proofterm.map_proof_types (typ_subst_TVars tye o varify) prf) - end - | expand maxidx prfs prf = (maxidx, prfs, prf); - - in #3 (expand (Proofterm.maxidx_proof prf ~1) [] prf) end; - -end; diff --git a/core/Pure/Pure.thy b/core/Pure/Pure.thy deleted file mode 100644 index 79356c0d..00000000 --- a/core/Pure/Pure.thy +++ /dev/null @@ -1,311 +0,0 @@ -(* Title: Pure/Pure.thy - Author: Makarius - -Final stage of bootstrapping Pure, based on implicit background theory. -*) - -theory Pure - keywords - "!!" "!" "%" "(" ")" "+" "," "--" ":" "::" ";" "<" "<=" "=" "==" - "=>" "?" "[" "\" "\" "\" - "\" "\" "]" "and" "assumes" - "attach" "begin" "binder" "constrains" "defines" "fixes" "for" - "identifier" "if" "imports" "in" "includes" "infix" "infixl" - "infixr" "is" "keywords" "notes" "obtains" "open" "output" - "overloaded" "pervasive" "shows" "structure" "unchecked" "where" "|" - and "theory" :: thy_begin % "theory" - and "header" :: diag - and "chapter" :: thy_heading1 - and "section" :: thy_heading2 - and "subsection" :: thy_heading3 - and "subsubsection" :: thy_heading4 - and "text" "text_raw" :: thy_decl - and "sect" :: prf_heading2 % "proof" - and "subsect" :: prf_heading3 % "proof" - and "subsubsect" :: prf_heading4 % "proof" - and "txt" "txt_raw" :: prf_decl % "proof" - and "default_sort" :: thy_decl == "" - and "typedecl" "type_synonym" "nonterminal" "judgment" - "consts" "syntax" "no_syntax" "translations" "no_translations" "defs" - "definition" "abbreviation" "type_notation" "no_type_notation" "notation" - "no_notation" "axiomatization" "theorems" "lemmas" "declare" - "hide_class" "hide_type" "hide_const" "hide_fact" :: thy_decl - and "SML_file" "ML_file" :: thy_load % "ML" - and "SML_import" "SML_export" :: thy_decl % "ML" - and "ML" :: thy_decl % "ML" - and "ML_prf" :: prf_decl % "proof" (* FIXME % "ML" ?? *) - and "ML_val" "ML_command" :: diag % "ML" - and "simproc_setup" :: thy_decl % "ML" == "" - and "setup" "local_setup" "attribute_setup" "method_setup" - "declaration" "syntax_declaration" - "parse_ast_translation" "parse_translation" "print_translation" - "typed_print_translation" "print_ast_translation" "oracle" :: thy_decl % "ML" - and "bundle" :: thy_decl - and "include" "including" :: prf_decl - and "print_bundles" :: diag - and "context" "locale" :: thy_decl - and "sublocale" "interpretation" :: thy_goal - and "interpret" :: prf_goal % "proof" - and "class" :: thy_decl - and "subclass" :: thy_goal - and "instantiation" :: thy_decl - and "instance" :: thy_goal - and "overloading" :: thy_decl - and "code_datatype" :: thy_decl - and "theorem" "lemma" "corollary" :: thy_goal - and "schematic_theorem" "schematic_lemma" "schematic_corollary" :: thy_goal - and "notepad" :: thy_decl - and "have" :: prf_goal % "proof" - and "hence" :: prf_goal % "proof" == "then have" - and "show" :: prf_asm_goal % "proof" - and "thus" :: prf_asm_goal % "proof" == "then show" - and "then" "from" "with" :: prf_chain % "proof" - and "note" "using" "unfolding" :: prf_decl % "proof" - and "fix" "assume" "presume" "def" :: prf_asm % "proof" - and "obtain" :: prf_asm_goal % "proof" - and "guess" :: prf_asm_goal_script % "proof" - and "let" "write" :: prf_decl % "proof" - and "case" :: prf_asm % "proof" - and "{" :: prf_open % "proof" - and "}" :: prf_close % "proof" - and "next" :: prf_block % "proof" - and "qed" :: qed_block % "proof" - and "by" ".." "." "sorry" :: "qed" % "proof" - and "done" :: "qed_script" % "proof" - and "oops" :: qed_global % "proof" - and "defer" "prefer" "apply" :: prf_script % "proof" - and "apply_end" :: prf_script % "proof" == "" - and "proof" :: prf_block % "proof" - and "also" "moreover" :: prf_decl % "proof" - and "finally" "ultimately" :: prf_chain % "proof" - and "back" :: prf_script % "proof" - and "Isabelle.command" :: control - and "help" "print_commands" "print_options" "print_context" - "print_theory" "print_syntax" "print_abbrevs" "print_defn_rules" - "print_theorems" "print_locales" "print_classes" "print_locale" - "print_interps" "print_dependencies" "print_attributes" - "print_simpset" "print_rules" "print_trans_rules" "print_methods" - "print_antiquotations" "print_ML_antiquotations" "thy_deps" - "locale_deps" "class_deps" "thm_deps" "print_binds" "print_term_bindings" - "print_facts" "print_cases" "print_statement" "thm" "prf" "full_prf" - "prop" "term" "typ" "print_codesetup" "unused_thms" :: diag - and "use_thy" "remove_thy" "kill_thy" :: control - and "display_drafts" "print_state" "pr" :: diag - and "pretty_setmargin" "disable_pr" "enable_pr" "commit" "quit" "exit" :: control - and "welcome" :: diag - and "init_toplevel" "linear_undo" "undo" "undos_proof" "cannot_undo" "kill" :: control - and "end" :: thy_end % "theory" - and "realizers" :: thy_decl == "" - and "realizability" :: thy_decl == "" - and "extract_type" "extract" :: thy_decl - and "find_theorems" "find_consts" :: diag - and "ProofGeneral.process_pgip" "ProofGeneral.pr" "ProofGeneral.undo" - "ProofGeneral.restart" "ProofGeneral.kill_proof" "ProofGeneral.inform_file_processed" - "ProofGeneral.inform_file_retracted" :: control -begin - -ML_file "ML/ml_antiquotations.ML" -ML_file "ML/ml_thms.ML" -ML_file "Tools/print_operation.ML" -ML_file "Isar/isar_syn.ML" -ML_file "Isar/calculation.ML" -ML_file "Tools/rail.ML" -ML_file "Tools/rule_insts.ML"; -ML_file "Tools/find_theorems.ML" -ML_file "Tools/find_consts.ML" -ML_file "Tools/proof_general_pure.ML" -ML_file "Tools/simplifier_trace.ML" - - -section {* Basic attributes *} - -attribute_setup tagged = - "Scan.lift (Args.name -- Args.name) >> Thm.tag" - "tagged theorem" - -attribute_setup untagged = - "Scan.lift Args.name >> Thm.untag" - "untagged theorem" - -attribute_setup kind = - "Scan.lift Args.name >> Thm.kind" - "theorem kind" - -attribute_setup THEN = - "Scan.lift (Scan.optional (Args.bracks Parse.nat) 1) -- Attrib.thm - >> (fn (i, B) => Thm.rule_attribute (fn _ => fn A => A RSN (i, B)))" - "resolution with rule" - -attribute_setup OF = - "Attrib.thms >> (fn Bs => Thm.rule_attribute (fn _ => fn A => A OF Bs))" - "rule resolved with facts" - -attribute_setup rename_abs = - "Scan.lift (Scan.repeat (Args.maybe Args.name)) >> (fn vs => - Thm.rule_attribute (K (Drule.rename_bvars' vs)))" - "rename bound variables in abstractions" - -attribute_setup unfolded = - "Attrib.thms >> (fn ths => - Thm.rule_attribute (fn context => Local_Defs.unfold (Context.proof_of context) ths))" - "unfolded definitions" - -attribute_setup folded = - "Attrib.thms >> (fn ths => - Thm.rule_attribute (fn context => Local_Defs.fold (Context.proof_of context) ths))" - "folded definitions" - -attribute_setup consumes = - "Scan.lift (Scan.optional Parse.int 1) >> Rule_Cases.consumes" - "number of consumed facts" - -attribute_setup constraints = - "Scan.lift Parse.nat >> Rule_Cases.constraints" - "number of equality constraints" - -attribute_setup case_names = {* - Scan.lift (Scan.repeat1 (Args.name -- - Scan.optional (@{keyword "["} |-- Scan.repeat1 (Args.maybe Args.name) --| @{keyword "]"}) [])) - >> (fn cs => - Rule_Cases.cases_hyp_names - (map #1 cs) - (map (map (the_default Rule_Cases.case_hypsN) o #2) cs)) -*} "named rule cases" - -attribute_setup case_conclusion = - "Scan.lift (Args.name -- Scan.repeat Args.name) >> Rule_Cases.case_conclusion" - "named conclusion of rule cases" - -attribute_setup params = - "Scan.lift (Parse.and_list1 (Scan.repeat Args.name)) >> Rule_Cases.params" - "named rule parameters" - -attribute_setup rule_format = {* - Scan.lift (Args.mode "no_asm") - >> (fn true => Object_Logic.rule_format_no_asm | false => Object_Logic.rule_format) -*} "result put into canonical rule format" - -attribute_setup elim_format = - "Scan.succeed (Thm.rule_attribute (K Tactic.make_elim))" - "destruct rule turned into elimination rule format" - -attribute_setup no_vars = {* - Scan.succeed (Thm.rule_attribute (fn context => fn th => - let - val ctxt = Variable.set_body false (Context.proof_of context); - val ((_, [th']), _) = Variable.import true [th] ctxt; - in th' end)) -*} "imported schematic variables" - -attribute_setup eta_long = - "Scan.succeed (Thm.rule_attribute (fn _ => Conv.fconv_rule Drule.eta_long_conversion))" - "put theorem into eta long beta normal form" - -attribute_setup atomize = - "Scan.succeed Object_Logic.declare_atomize" - "declaration of atomize rule" - -attribute_setup rulify = - "Scan.succeed Object_Logic.declare_rulify" - "declaration of rulify rule" - -attribute_setup rotated = - "Scan.lift (Scan.optional Parse.int 1 - >> (fn n => Thm.rule_attribute (fn _ => rotate_prems n)))" - "rotated theorem premises" - -attribute_setup defn = - "Attrib.add_del Local_Defs.defn_add Local_Defs.defn_del" - "declaration of definitional transformations" - -attribute_setup abs_def = - "Scan.succeed (Thm.rule_attribute (fn context => - Local_Defs.meta_rewrite_rule (Context.proof_of context) #> Drule.abs_def))" - "abstract over free variables of definitional theorem" - - -section {* Further content for the Pure theory *} - -subsection {* Meta-level connectives in assumptions *} - -lemma meta_mp: - assumes "PROP P ==> PROP Q" and "PROP P" - shows "PROP Q" - by (rule `PROP P ==> PROP Q` [OF `PROP P`]) - -lemmas meta_impE = meta_mp [elim_format] - -lemma meta_spec: - assumes "!!x. PROP P x" - shows "PROP P x" - by (rule `!!x. PROP P x`) - -lemmas meta_allE = meta_spec [elim_format] - -lemma swap_params: - "(!!x y. PROP P x y) == (!!y x. PROP P x y)" .. - - -subsection {* Meta-level conjunction *} - -lemma all_conjunction: - "(!!x. PROP A x &&& PROP B x) == ((!!x. PROP A x) &&& (!!x. PROP B x))" -proof - assume conj: "!!x. PROP A x &&& PROP B x" - show "(!!x. PROP A x) &&& (!!x. PROP B x)" - proof - - fix x - from conj show "PROP A x" by (rule conjunctionD1) - from conj show "PROP B x" by (rule conjunctionD2) - qed -next - assume conj: "(!!x. PROP A x) &&& (!!x. PROP B x)" - fix x - show "PROP A x &&& PROP B x" - proof - - show "PROP A x" by (rule conj [THEN conjunctionD1, rule_format]) - show "PROP B x" by (rule conj [THEN conjunctionD2, rule_format]) - qed -qed - -lemma imp_conjunction: - "(PROP A ==> PROP B &&& PROP C) == ((PROP A ==> PROP B) &&& (PROP A ==> PROP C))" -proof - assume conj: "PROP A ==> PROP B &&& PROP C" - show "(PROP A ==> PROP B) &&& (PROP A ==> PROP C)" - proof - - assume "PROP A" - from conj [OF `PROP A`] show "PROP B" by (rule conjunctionD1) - from conj [OF `PROP A`] show "PROP C" by (rule conjunctionD2) - qed -next - assume conj: "(PROP A ==> PROP B) &&& (PROP A ==> PROP C)" - assume "PROP A" - show "PROP B &&& PROP C" - proof - - from `PROP A` show "PROP B" by (rule conj [THEN conjunctionD1]) - from `PROP A` show "PROP C" by (rule conj [THEN conjunctionD2]) - qed -qed - -lemma conjunction_imp: - "(PROP A &&& PROP B ==> PROP C) == (PROP A ==> PROP B ==> PROP C)" -proof - assume r: "PROP A &&& PROP B ==> PROP C" - assume ab: "PROP A" "PROP B" - show "PROP C" - proof (rule r) - from ab show "PROP A &&& PROP B" . - qed -next - assume r: "PROP A ==> PROP B ==> PROP C" - assume conj: "PROP A &&& PROP B" - show "PROP C" - proof (rule r) - from conj show "PROP A" by (rule conjunctionD1) - from conj show "PROP B" by (rule conjunctionD2) - qed -qed - -end - diff --git a/core/Pure/README b/core/Pure/README deleted file mode 100644 index 0ae4d329..00000000 --- a/core/Pure/README +++ /dev/null @@ -1,16 +0,0 @@ - - Pure: The Pure Isabelle System - - -This directory contains the ML source files for Pure Isabelle, which -is the basis for all object-logics. Building the Isabelle/Pure heap -image in batch mode works as for any other session: - - isabelle build -b Pure - -To explore the bootstrap of Pure interactively, the raw ML console can -be used like this: - - isabelle console -l RAW - use "ROOT.ML"; - diff --git a/core/Pure/ROOT b/core/Pure/ROOT deleted file mode 100644 index dabd64db..00000000 --- a/core/Pure/ROOT +++ /dev/null @@ -1,262 +0,0 @@ -chapter Pure - -session RAW = - theories - files - "General/exn.ML" - "ML-Systems/compiler_polyml.ML" - "ML-Systems/ml_name_space.ML" - "ML-Systems/ml_positions.ML" - "ML-Systems/ml_pretty.ML" - "ML-Systems/ml_system.ML" - "ML-Systems/multithreading.ML" - "ML-Systems/multithreading_polyml.ML" - "ML-Systems/overloading_smlnj.ML" - "ML-Systems/polyml.ML" - "ML-Systems/polyml-5.5.2.ML" - "ML-Systems/pp_dummy.ML" - "ML-Systems/proper_int.ML" - "ML-Systems/single_assignment.ML" - "ML-Systems/single_assignment_polyml.ML" - "ML-Systems/share_common_data_polyml-5.3.0.ML" - "ML-Systems/smlnj.ML" - "ML-Systems/thread_dummy.ML" - "ML-Systems/universal.ML" - "ML-Systems/unsynchronized.ML" - "ML-Systems/use_context.ML" - -session Pure = - global_theories Pure - files - "General/exn.ML" - "ML-Systems/compiler_polyml.ML" - "ML-Systems/ml_name_space.ML" - "ML-Systems/ml_positions.ML" - "ML-Systems/ml_pretty.ML" - "ML-Systems/ml_system.ML" - "ML-Systems/multithreading.ML" - "ML-Systems/multithreading_polyml.ML" - "ML-Systems/overloading_smlnj.ML" - "ML-Systems/polyml.ML" - "ML-Systems/polyml-5.5.2.ML" - "ML-Systems/pp_dummy.ML" - "ML-Systems/proper_int.ML" - "ML-Systems/single_assignment.ML" - "ML-Systems/single_assignment_polyml.ML" - "ML-Systems/smlnj.ML" - "ML-Systems/thread_dummy.ML" - "ML-Systems/universal.ML" - "ML-Systems/unsynchronized.ML" - "ML-Systems/use_context.ML" - - "Concurrent/bash.ML" - "Concurrent/bash_sequential.ML" - "Concurrent/cache.ML" - "Concurrent/event_timer.ML" - "Concurrent/future.ML" - "Concurrent/lazy.ML" - "Concurrent/lazy_sequential.ML" - "Concurrent/mailbox.ML" - "Concurrent/par_exn.ML" - "Concurrent/par_list.ML" - "Concurrent/par_list_sequential.ML" - "Concurrent/simple_thread.ML" - "Concurrent/single_assignment.ML" - "Concurrent/single_assignment_sequential.ML" - "Concurrent/synchronized.ML" - "Concurrent/synchronized_sequential.ML" - "Concurrent/task_queue.ML" - "Concurrent/time_limit.ML" - "General/alist.ML" - "General/antiquote.ML" - "General/balanced_tree.ML" - "General/basics.ML" - "General/binding.ML" - "General/buffer.ML" - "General/change_table.ML" - "General/completion.ML" - "General/file.ML" - "General/graph.ML" - "General/graph_display.ML" - "General/heap.ML" - "General/integer.ML" - "General/linear_set.ML" - "General/long_name.ML" - "General/name_space.ML" - "General/ord_list.ML" - "General/output.ML" - "General/path.ML" - "General/position.ML" - "General/pretty.ML" - "General/print_mode.ML" - "General/properties.ML" - "General/queue.ML" - "General/same.ML" - "General/scan.ML" - "General/secure.ML" - "General/seq.ML" - "General/sha1.ML" - "General/sha1_polyml.ML" - "General/sha1_samples.ML" - "General/socket_io.ML" - "General/source.ML" - "General/stack.ML" - "General/symbol.ML" - "General/symbol_pos.ML" - "General/table.ML" - "General/timing.ML" - "General/url.ML" - "Isar/args.ML" - "Isar/attrib.ML" - "Isar/auto_bind.ML" - "Isar/bundle.ML" - "Isar/class.ML" - "Isar/class_declaration.ML" - "Isar/code.ML" - "Isar/context_rules.ML" - "Isar/element.ML" - "Isar/expression.ML" - "Isar/generic_target.ML" - "Isar/isar_cmd.ML" - "Isar/keyword.ML" - "Isar/local_defs.ML" - "Isar/local_theory.ML" - "Isar/locale.ML" - "Isar/method.ML" - "Isar/named_target.ML" - "Isar/object_logic.ML" - "Isar/obtain.ML" - "Isar/outer_syntax.ML" - "Isar/overloading.ML" - "Isar/parse.ML" - "Isar/parse_spec.ML" - "Isar/proof.ML" - "Isar/proof_context.ML" - "Isar/proof_display.ML" - "Isar/proof_node.ML" - "Isar/rule_cases.ML" - "Isar/runtime.ML" - "Isar/spec_rules.ML" - "Isar/specification.ML" - "Isar/token.ML" - "Isar/toplevel.ML" - "Isar/typedecl.ML" - "ML/exn_output.ML" - "ML/exn_output_polyml.ML" - "ML/exn_properties_dummy.ML" - "ML/exn_properties_polyml.ML" - "ML/exn_trace_polyml-5.5.1.ML" - "ML/install_pp_polyml.ML" - "ML/ml_antiquotation.ML" - "ML/ml_compiler.ML" - "ML/ml_compiler_polyml.ML" - "ML/ml_context.ML" - "ML/ml_env.ML" - "ML/ml_lex.ML" - "ML/ml_parse.ML" - "ML/ml_options.ML" - "ML/ml_statistics_dummy.ML" - "ML/ml_statistics_polyml-5.5.0.ML" - "ML/ml_syntax.ML" - "PIDE/active.ML" - "PIDE/command.ML" - "PIDE/document.ML" - "PIDE/document_id.ML" - "PIDE/execution.ML" - "PIDE/markup.ML" - "PIDE/protocol.ML" - "PIDE/query_operation.ML" - "PIDE/resources.ML" - "PIDE/session.ML" - "PIDE/xml.ML" - "PIDE/yxml.ML" - "Proof/extraction.ML" - "Proof/proof_checker.ML" - "Proof/proof_rewrite_rules.ML" - "Proof/proof_syntax.ML" - "Proof/reconstruct.ML" - "ROOT.ML" - "Syntax/ast.ML" - "Syntax/lexicon.ML" - "Syntax/local_syntax.ML" - "Syntax/mixfix.ML" - "Syntax/parser.ML" - "Syntax/printer.ML" - "Syntax/simple_syntax.ML" - "Syntax/syntax.ML" - "Syntax/syntax_ext.ML" - "Syntax/syntax_phases.ML" - "Syntax/syntax_trans.ML" - "Syntax/term_position.ML" - "Syntax/type_annotation.ML" - "System/command_line.ML" - "System/invoke_scala.ML" - "System/isabelle_process.ML" - "System/isabelle_system.ML" - "System/isar.ML" - "System/message_channel.ML" - "System/options.ML" - "System/system_channel.ML" - "Thy/html.ML" - "Thy/latex.ML" - "Thy/present.ML" - "Thy/term_style.ML" - "Thy/thm_deps.ML" - "Thy/thy_header.ML" - "Thy/thy_info.ML" - "Thy/thy_output.ML" - "Thy/thy_syntax.ML" - "Tools/build.ML" - "Tools/named_thms.ML" - "Tools/proof_general.ML" - "assumption.ML" - "axclass.ML" - "config.ML" - "conjunction.ML" - "consts.ML" - "context.ML" - "context_position.ML" - "conv.ML" - "defs.ML" - "display.ML" - "drule.ML" - "envir.ML" - "facts.ML" - "global_theory.ML" - "goal.ML" - "goal_display.ML" - "interpretation.ML" - "item_net.ML" - "library.ML" - "logic.ML" - "more_thm.ML" - "morphism.ML" - "name.ML" - "net.ML" - "pattern.ML" - "primitive_defs.ML" - "proofterm.ML" - "pure_syn.ML" - "pure_thy.ML" - "raw_simplifier.ML" - "search.ML" - "sign.ML" - "simplifier.ML" - "skip_proof.ML" - "sorts.ML" - "subgoal.ML" - "tactic.ML" - "tactical.ML" - "term.ML" - "term_ord.ML" - "term_sharing.ML" - "term_subst.ML" - "term_xml.ML" - "theory.ML" - "thm.ML" - "type.ML" - "type_infer.ML" - "type_infer_context.ML" - "unify.ML" - "variable.ML" - diff --git a/core/Pure/ROOT.ML b/core/Pure/ROOT.ML deleted file mode 100644 index e413f4b7..00000000 --- a/core/Pure/ROOT.ML +++ /dev/null @@ -1,380 +0,0 @@ -(*** Isabelle/Pure bootstrap from "RAW" environment ***) - -(** bootstrap phase 0: towards secure ML barrier *) - -structure Distribution = (*filled-in by makedist*) -struct - val version = "Isabelle2014: August 2014"; - val is_identified = true; - val is_official = true; -end; - - -(* library of general tools *) - -use "General/basics.ML"; -use "library.ML"; -use "General/print_mode.ML"; -use "General/alist.ML"; -use "General/table.ML"; - -use "Concurrent/simple_thread.ML"; - -use "Concurrent/synchronized.ML"; -if Multithreading.available then () -else use "Concurrent/synchronized_sequential.ML"; -use "Concurrent/counter.ML"; - -use "General/properties.ML"; -use "General/output.ML"; -use "PIDE/markup.ML"; -fun legacy_feature s = warning (Markup.markup Markup.legacy ("Legacy feature! " ^ s)); -use "General/scan.ML"; -use "General/source.ML"; -use "General/symbol.ML"; -use "General/position.ML"; -use "General/symbol_pos.ML"; -use "General/antiquote.ML"; -use "ML/ml_lex.ML"; -use "ML/ml_parse.ML"; -use "General/secure.ML"; - -val use_text = Secure.use_text; -val use_file = Secure.use_file; - -fun use s = - Position.setmp_thread_data (Position.file_only s) - (fn () => - Secure.use_file ML_Parse.global_context true s - handle ERROR msg => (writeln msg; error "ML error")) (); - -val toplevel_pp = Secure.toplevel_pp; - - -(** bootstrap phase 1: towards ML within Isar context *) - -(* library of general tools *) - -use "General/integer.ML"; -use "General/stack.ML"; -use "General/queue.ML"; -use "General/heap.ML"; -use "General/same.ML"; -use "General/ord_list.ML"; -use "General/balanced_tree.ML"; -use "General/linear_set.ML"; -use "General/buffer.ML"; -use "General/pretty.ML"; -use "General/path.ML"; -use "General/url.ML"; -use "General/file.ML"; -use "General/long_name.ML"; -use "General/binding.ML"; -use "General/socket_io.ML"; -use "General/seq.ML"; -use "General/timing.ML"; - -use "General/sha1.ML"; -if ML_System.is_polyml then use "General/sha1_polyml.ML" else (); -use "General/sha1_samples.ML"; - -use "PIDE/xml.ML"; -use "PIDE/yxml.ML"; -use "PIDE/document_id.ML"; - -use "General/change_table.ML"; -use "General/graph.ML"; - -use "System/options.ML"; - - -(* concurrency within the ML runtime *) - -if ML_System.is_polyml -then use "ML/exn_properties_polyml.ML" -else use "ML/exn_properties_dummy.ML"; - -if ML_System.name = "polyml-5.5.1" - orelse ML_System.name = "polyml-5.5.2" -then use "ML/exn_trace_polyml-5.5.1.ML" -else (); - -if ML_System.name = "polyml-5.5.0" - orelse ML_System.name = "polyml-5.5.1" - orelse ML_System.name = "polyml-5.5.2" -then use "ML/ml_statistics_polyml-5.5.0.ML" -else use "ML/ml_statistics_dummy.ML"; - -use "Concurrent/single_assignment.ML"; -if Multithreading.available then () -else use "Concurrent/single_assignment_sequential.ML"; - -if Multithreading.available -then use "Concurrent/bash.ML" -else use "Concurrent/bash_sequential.ML"; - -use "Concurrent/par_exn.ML"; -use "Concurrent/task_queue.ML"; -use "Concurrent/future.ML"; -use "Concurrent/event_timer.ML"; - -if ML_System.is_polyml then use "Concurrent/time_limit.ML" else (); - -use "Concurrent/lazy.ML"; -if Multithreading.available then () -else use "Concurrent/lazy_sequential.ML"; - -use "Concurrent/par_list.ML"; -if Multithreading.available then () -else use "Concurrent/par_list_sequential.ML"; - -use "Concurrent/mailbox.ML"; -use "Concurrent/cache.ML"; - -use "PIDE/active.ML"; - - -(* fundamental structures *) - -use "name.ML"; -use "term.ML"; -use "context.ML"; -use "context_position.ML"; -use "config.ML"; - - -(* inner syntax *) - -use "Syntax/type_annotation.ML"; -use "Syntax/term_position.ML"; -use "Syntax/lexicon.ML"; -use "Syntax/ast.ML"; -use "Syntax/syntax_ext.ML"; -use "Syntax/parser.ML"; -use "Syntax/syntax_trans.ML"; -use "Syntax/mixfix.ML"; -use "Syntax/printer.ML"; -use "Syntax/syntax.ML"; - - -(* core of tactical proof system *) - -use "term_ord.ML"; -use "term_subst.ML"; -use "term_xml.ML"; -use "General/completion.ML"; -use "General/name_space.ML"; -use "sorts.ML"; -use "type.ML"; -use "logic.ML"; -use "Syntax/simple_syntax.ML"; -use "net.ML"; -use "item_net.ML"; -use "envir.ML"; -use "consts.ML"; -use "primitive_defs.ML"; -use "defs.ML"; -use "sign.ML"; -use "term_sharing.ML"; -use "pattern.ML"; -use "unify.ML"; -use "theory.ML"; -use "interpretation.ML"; -use "proofterm.ML"; -use "thm.ML"; -use "more_thm.ML"; -use "facts.ML"; -use "global_theory.ML"; -use "pure_thy.ML"; -use "drule.ML"; -use "morphism.ML"; -use "variable.ML"; -use "conv.ML"; -use "goal_display.ML"; -use "tactical.ML"; -use "search.ML"; -use "tactic.ML"; -use "raw_simplifier.ML"; -use "conjunction.ML"; -use "assumption.ML"; -use "display.ML"; - - -(* Isar -- Intelligible Semi-Automated Reasoning *) - -(*ML support and global execution*) -use "ML/ml_syntax.ML"; -use "ML/ml_env.ML"; -use "ML/ml_options.ML"; -use "ML/exn_output.ML"; -if ML_System.is_polyml then use "ML/exn_output_polyml.ML" else (); -use "ML/ml_options.ML"; -use "Isar/runtime.ML"; -use "PIDE/execution.ML"; -use "ML/ml_compiler.ML"; -if ML_System.is_polyml then use "ML/ml_compiler_polyml.ML" else (); - -use "skip_proof.ML"; -use "goal.ML"; - -(*proof context*) -use "Isar/object_logic.ML"; -use "Isar/rule_cases.ML"; -use "Isar/auto_bind.ML"; -use "type_infer.ML"; -use "Syntax/local_syntax.ML"; -use "Isar/proof_context.ML"; -use "type_infer_context.ML"; -use "Syntax/syntax_phases.ML"; -use "Isar/local_defs.ML"; - -(*outer syntax*) -use "Isar/token.ML"; -use "Isar/keyword.ML"; -use "Isar/parse.ML"; -use "Isar/args.ML"; - -(*theory sources*) -use "Thy/thy_header.ML"; -use "Thy/thy_syntax.ML"; -use "Thy/html.ML"; -use "Thy/latex.ML"; - -(*ML with context and antiquotations*) -use "ML/ml_context.ML"; -use "ML/ml_antiquotation.ML"; - -fun use s = - ML_Context.eval_file (ML_Compiler.verbose true ML_Compiler.flags) (Path.explode s) - handle ERROR msg => (writeln msg; error "ML error"); - - - -(** bootstrap phase 2: towards Pure.thy and final ML toplevel setup *) - -(*basic proof engine*) -use "Isar/proof_display.ML"; -use "Isar/attrib.ML"; -use "Isar/context_rules.ML"; -use "Isar/method.ML"; -use "Isar/proof.ML"; -use "Isar/element.ML"; -use "Isar/obtain.ML"; - -(*local theories and targets*) -use "Isar/locale.ML"; -use "Isar/local_theory.ML"; -use "Isar/generic_target.ML"; -use "Isar/overloading.ML"; -use "axclass.ML"; -use "Isar/class.ML"; -use "Isar/named_target.ML"; -use "Isar/expression.ML"; -use "Isar/class_declaration.ML"; -use "Isar/bundle.ML"; - -use "simplifier.ML"; - -(*executable theory content*) -use "Isar/code.ML"; - -(*specifications*) -use "Isar/parse_spec.ML"; -use "Isar/spec_rules.ML"; -use "Isar/specification.ML"; -use "Isar/typedecl.ML"; - -(*toplevel transactions*) -use "Isar/proof_node.ML"; -use "Isar/toplevel.ML"; - -(*proof term operations*) -use "Proof/reconstruct.ML"; -use "Proof/proof_syntax.ML"; -use "Proof/proof_rewrite_rules.ML"; -use "Proof/proof_checker.ML"; -use "Proof/extraction.ML"; - -(*theory documents*) -use "System/isabelle_system.ML"; -use "Thy/term_style.ML"; -use "Thy/thy_output.ML"; -use "Isar/outer_syntax.ML"; -use "General/graph_display.ML"; -use "Thy/present.ML"; -use "PIDE/command.ML"; -use "PIDE/query_operation.ML"; -use "PIDE/resources.ML"; -use "Thy/thy_info.ML"; -use "PIDE/document.ML"; - -(*theory and proof operations*) -use "Thy/thm_deps.ML"; -use "Isar/isar_cmd.ML"; - -use "subgoal.ML"; - - -(* Isabelle/Isar system *) - -use "PIDE/session.ML"; -use "System/command_line.ML"; -use "System/system_channel.ML"; -use "System/message_channel.ML"; -use "System/isabelle_process.ML"; -use "System/invoke_scala.ML"; -use "PIDE/protocol.ML"; -use "System/isar.ML"; - - -(* miscellaneous tools and packages for Pure Isabelle *) - -use "Tools/build.ML"; -use "Tools/named_thms.ML"; -use "Tools/proof_general.ML"; - -structure Output: OUTPUT = Output; (*seal system channels!*) - - -(* ML toplevel pretty printing *) - -toplevel_pp ["Pretty", "T"] "(fn _: Pretty.T => Pretty.str \"\")"; -toplevel_pp ["Task_Queue", "task"] "Pretty.str o Task_Queue.str_of_task"; -toplevel_pp ["Task_Queue", "group"] "Pretty.str o Task_Queue.str_of_group"; -toplevel_pp ["Position", "T"] "Pretty.position"; -toplevel_pp ["Binding", "binding"] "Pretty.str o Binding.print"; -toplevel_pp ["Thm", "thm"] "Proof_Display.pp_thm"; -toplevel_pp ["Thm", "cterm"] "Proof_Display.pp_cterm"; -toplevel_pp ["Thm", "ctyp"] "Proof_Display.pp_ctyp"; -toplevel_pp ["Context", "theory"] "Context.pretty_thy"; -toplevel_pp ["Context", "Proof", "context"] "Proof_Display.pp_context"; -toplevel_pp ["Ast", "ast"] "Ast.pretty_ast"; -toplevel_pp ["Path", "T"] "Path.pretty"; -toplevel_pp ["SHA1", "digest"] "Pretty.str o quote o SHA1.rep"; -toplevel_pp ["Proof", "state"] "(fn _: Proof.state => Pretty.str \"\")"; -toplevel_pp ["Toplevel", "state"] "Toplevel.pretty_abstract"; -toplevel_pp ["Morphism", "morphism"] "Morphism.pretty"; - -if ML_System.is_polyml then use "ML/install_pp_polyml.ML" else (); - - -(* the Pure theory *) - -use "pure_syn.ML"; -Runtime.toplevel_program (fn () => Thy_Info.use_thy ("Pure", Position.none)); -Context.set_thread_data NONE; -structure Pure = struct val thy = Thy_Info.get_theory "Pure" end; - -toplevel_pp ["typ"] "Proof_Display.pp_typ Pure.thy"; - - -(* ML toplevel commands *) - -fun use_thys args = - Runtime.toplevel_program (fn () => Thy_Info.use_thys (map (rpair Position.none) args)); -val use_thy = use_thys o single; - -val cd = File.cd o Path.explode; - -Proofterm.proofs := 0; - diff --git a/core/Pure/ROOT.scala b/core/Pure/ROOT.scala deleted file mode 100644 index 77146af8..00000000 --- a/core/Pure/ROOT.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* Title: Pure/ROOT.scala - Module: PIDE - Author: Makarius - -Root of isabelle package. -*/ - -package object isabelle extends isabelle.Basic_Library -{ - object Distribution /*filled-in by makedist*/ - { - val version = "Isabelle2014: August 2014" - val is_identified = true - val is_official = true - } -} - diff --git a/core/Pure/Syntax/ast.ML b/core/Pure/Syntax/ast.ML deleted file mode 100644 index db810377..00000000 --- a/core/Pure/Syntax/ast.ML +++ /dev/null @@ -1,250 +0,0 @@ -(* Title: Pure/Syntax/ast.ML - Author: Markus Wenzel, TU Muenchen - -Abstract syntax trees, translation rules, matching and normalization of asts. -*) - -signature AST = -sig - datatype ast = - Constant of string | - Variable of string | - Appl of ast list - val mk_appl: ast -> ast list -> ast - exception AST of string * ast list - val pretty_ast: ast -> Pretty.T - val pretty_rule: ast * ast -> Pretty.T - val strip_positions: ast -> ast - val head_of_rule: ast * ast -> string - val rule_error: ast * ast -> string option - val fold_ast: string -> ast list -> ast - val fold_ast_p: string -> ast list * ast -> ast - val unfold_ast: string -> ast -> ast list - val unfold_ast_p: string -> ast -> ast list * ast - val trace_raw: Config.raw - val trace: bool Config.T - val stats_raw: Config.raw - val stats: bool Config.T - val normalize: Proof.context -> (string -> (ast * ast) list) -> ast -> ast -end; - -structure Ast: AST = -struct - -(** abstract syntax trees **) - -(*asts come in two flavours: - - ordinary asts representing terms and typs: Variables are (often) treated - like Constants; - - patterns used as lhs and rhs in rules: Variables are placeholders for - proper asts*) - -datatype ast = - Constant of string | (*"not", "_abs", "fun"*) - Variable of string | (*x, ?x, 'a, ?'a*) - Appl of ast list; (*(f x y z), ("fun" 'a 'b), ("_abs" x t)*) - -(*the list of subasts of an Appl node has to contain at least 2 elements, i.e. - there are no empty asts or nullary applications; use mk_appl for convenience*) -fun mk_appl f [] = f - | mk_appl f args = Appl (f :: args); - -(*exception for system errors involving asts*) -exception AST of string * ast list; - - - -(** print asts in a LISP-like style **) - -fun pretty_ast (Constant a) = Pretty.quote (Pretty.str a) - | pretty_ast (Variable x) = - (case Term_Position.decode x of - SOME pos => Term_Position.pretty pos - | NONE => Pretty.str x) - | pretty_ast (Appl asts) = Pretty.enclose "(" ")" (Pretty.breaks (map pretty_ast asts)); - -fun pretty_rule (lhs, rhs) = - Pretty.block [pretty_ast lhs, Pretty.str " ->", Pretty.brk 2, pretty_ast rhs]; - - -(* strip_positions *) - -fun strip_positions (Appl ((t as Constant c) :: u :: (v as Variable x) :: asts)) = - if member (op =) Term_Position.markers c andalso is_some (Term_Position.decode x) - then mk_appl (strip_positions u) (map strip_positions asts) - else Appl (map strip_positions (t :: u :: v :: asts)) - | strip_positions (Appl asts) = Appl (map strip_positions asts) - | strip_positions ast = ast; - - -(* head_of_ast and head_of_rule *) - -fun head_of_ast (Constant a) = a - | head_of_ast (Appl (Constant a :: _)) = a - | head_of_ast _ = ""; - -fun head_of_rule (lhs, _) = head_of_ast lhs; - - - -(** check translation rules **) - -fun rule_error (lhs, rhs) = - let - fun add_vars (Constant _) = I - | add_vars (Variable x) = cons x - | add_vars (Appl asts) = fold add_vars asts; - - val lvars = add_vars lhs []; - val rvars = add_vars rhs []; - in - if has_duplicates (op =) lvars then SOME "duplicate vars in lhs" - else if not (subset (op =) (rvars, lvars)) then SOME "rhs contains extra variables" - else NONE - end; - - - -(** ast translation utilities **) - -(* fold asts *) - -fun fold_ast _ [] = raise Match - | fold_ast _ [y] = y - | fold_ast c (x :: xs) = Appl [Constant c, x, fold_ast c xs]; - -fun fold_ast_p c = uncurry (fold_rev (fn x => fn xs => Appl [Constant c, x, xs])); - - -(* unfold asts *) - -fun unfold_ast c (y as Appl [Constant c', x, xs]) = - if c = c' then x :: unfold_ast c xs else [y] - | unfold_ast _ y = [y]; - -fun unfold_ast_p c (y as Appl [Constant c', x, xs]) = - if c = c' then apfst (cons x) (unfold_ast_p c xs) - else ([], y) - | unfold_ast_p _ y = ([], y); - - - -(** normalization of asts **) - -(* match *) - -fun match ast pat = - let - exception NO_MATCH; - - fun mtch (Constant a) (Constant b) env = - if a = b then env else raise NO_MATCH - | mtch (Variable a) (Constant b) env = - if a = b then env else raise NO_MATCH - | mtch ast (Variable x) env = Symtab.update (x, ast) env - | mtch (Appl asts) (Appl pats) env = mtch_lst asts pats env - | mtch _ _ _ = raise NO_MATCH - and mtch_lst (ast :: asts) (pat :: pats) env = - mtch_lst asts pats (mtch ast pat env) - | mtch_lst [] [] env = env - | mtch_lst _ _ _ = raise NO_MATCH; - - val (head, args) = - (case (ast, pat) of - (Appl asts, Appl pats) => - let val a = length asts and p = length pats in - if a > p then (Appl (take p asts), drop p asts) - else (ast, []) - end - | _ => (ast, [])); - in - SOME (mtch head pat Symtab.empty, args) handle NO_MATCH => NONE - end; - - -(* normalize *) - -val trace_raw = Config.declare ("syntax_ast_trace", @{here}) (fn _ => Config.Bool false); -val trace = Config.bool trace_raw; - -val stats_raw = Config.declare ("syntax_ast_stats", @{here}) (fn _ => Config.Bool false); -val stats = Config.bool stats_raw; - -fun message head body = - Pretty.string_of (Pretty.block [Pretty.str head, Pretty.brk 1, body]); - -(*the normalizer works yoyo-like: top-down, bottom-up, top-down, ...*) -fun normalize ctxt get_rules pre_ast = - let - val trace = Config.get ctxt trace; - val stats = Config.get ctxt stats; - - val passes = Unsynchronized.ref 0; - val failed_matches = Unsynchronized.ref 0; - val changes = Unsynchronized.ref 0; - - fun subst _ (ast as Constant _) = ast - | subst env (Variable x) = the (Symtab.lookup env x) - | subst env (Appl asts) = Appl (map (subst env) asts); - - fun try_rules ((lhs, rhs) :: pats) ast = - (case match ast lhs of - SOME (env, args) => - (Unsynchronized.inc changes; SOME (mk_appl (subst env rhs) args)) - | NONE => (Unsynchronized.inc failed_matches; try_rules pats ast)) - | try_rules [] _ = NONE; - val try_headless_rules = try_rules (get_rules ""); - - fun try ast a = - (case try_rules (get_rules a) ast of - NONE => try_headless_rules ast - | some => some); - - fun rewrite (ast as Constant a) = try ast a - | rewrite (ast as Variable a) = try ast a - | rewrite (ast as Appl (Constant a :: _)) = try ast a - | rewrite (ast as Appl (Variable a :: _)) = try ast a - | rewrite ast = try_headless_rules ast; - - fun rewrote old_ast new_ast = - if trace then tracing (message "rewrote:" (pretty_rule (old_ast, new_ast))) - else (); - - fun norm_root ast = - (case rewrite ast of - SOME new_ast => (rewrote ast new_ast; norm_root new_ast) - | NONE => ast); - - fun norm ast = - (case norm_root ast of - Appl sub_asts => - let - val old_changes = ! changes; - val new_ast = Appl (map norm sub_asts); - in - if old_changes = ! changes then new_ast else norm_root new_ast - end - | atomic_ast => atomic_ast); - - fun normal ast = - let - val old_changes = ! changes; - val new_ast = norm ast; - in - Unsynchronized.inc passes; - if old_changes = ! changes then new_ast else normal new_ast - end; - - - val _ = if trace then tracing (message "pre:" (pretty_ast pre_ast)) else (); - val post_ast = normal pre_ast; - val _ = - if trace orelse stats then - tracing (message "post:" (pretty_ast post_ast) ^ "\nnormalize: " ^ - string_of_int (! passes) ^ " passes, " ^ - string_of_int (! changes) ^ " changes, " ^ - string_of_int (! failed_matches) ^ " matches failed") - else (); - in post_ast end; - -end; diff --git a/core/Pure/Syntax/lexicon.ML b/core/Pure/Syntax/lexicon.ML deleted file mode 100644 index 572e0792..00000000 --- a/core/Pure/Syntax/lexicon.ML +++ /dev/null @@ -1,465 +0,0 @@ -(* Title: Pure/Syntax/lexicon.ML - Author: Tobias Nipkow and Markus Wenzel, TU Muenchen - -Lexer for the inner Isabelle syntax (terms and types). -*) - -signature LEXICON = -sig - structure Syntax: - sig - val const: string -> term - val free: string -> term - val var: indexname -> term - end - val scan_id: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_longid: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_tid: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_nat: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_int: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_float: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_hex: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_bin: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_var: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val scan_tvar: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list - val is_tid: string -> bool - datatype token_kind = - Literal | IdentSy | LongIdentSy | VarSy | TFreeSy | TVarSy | NumSy | - FloatSy | XNumSy | StrSy | StringSy | Cartouche | Space | Comment | EOF - datatype token = Token of token_kind * string * Position.range - val str_of_token: token -> string - val pos_of_token: token -> Position.T - val is_proper: token -> bool - val mk_eof: Position.T -> token - val eof: token - val is_eof: token -> bool - val stopper: token Scan.stopper - val terminals: string list - val is_terminal: string -> bool - val literal_markup: string -> Markup.T - val report_of_token: token -> Position.report - val reported_token_range: Proof.context -> token -> string - val matching_tokens: token * token -> bool - val valued_token: token -> bool - val predef_term: string -> token option - val implode_string: Symbol.symbol list -> string - val explode_string: string * Position.T -> Symbol_Pos.T list - val implode_str: Symbol.symbol list -> string - val explode_str: string * Position.T -> Symbol_Pos.T list - val tokenize: Scan.lexicon -> bool -> Symbol_Pos.T list -> token list - val read_indexname: string -> indexname - val read_var: string -> term - val read_variable: string -> indexname option - val read_nat: string -> int option - val read_int: string -> int option - val read_xnum: string -> {radix: int, leading_zeros: int, value: int} - val read_float: string -> {mant: int, exp: int} - val mark_class: string -> string val unmark_class: string -> string - val mark_type: string -> string val unmark_type: string -> string - val mark_const: string -> string val unmark_const: string -> string - val mark_fixed: string -> string val unmark_fixed: string -> string - val unmark: - {case_class: string -> 'a, - case_type: string -> 'a, - case_const: string -> 'a, - case_fixed: string -> 'a, - case_default: string -> 'a} -> string -> 'a - val is_marked: string -> bool - val dummy_type: term - val fun_type: term -end; - -structure Lexicon: LEXICON = -struct - -(** syntaxtic terms **) - -structure Syntax = -struct - -fun const c = Const (c, dummyT); -fun free x = Free (x, dummyT); -fun var xi = Var (xi, dummyT); - -end; - - - -(** basic scanners **) - -open Basic_Symbol_Pos; - -val err_prefix = "Inner lexical error: "; - -fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); - -val scan_id = Symbol_Pos.scan_ident; -val scan_longid = scan_id @@@ (Scan.repeat1 ($$$ "." @@@ scan_id) >> flat); -val scan_tid = $$$ "'" @@@ scan_id; - -val scan_nat = Scan.many1 (Symbol.is_digit o Symbol_Pos.symbol); -val scan_int = $$$ "-" @@@ scan_nat || scan_nat; -val scan_natdot = scan_nat @@@ $$$ "." @@@ scan_nat; -val scan_float = $$$ "-" @@@ scan_natdot || scan_natdot; -val scan_hex = $$$ "0" @@@ $$$ "x" @@@ Scan.many1 (Symbol.is_ascii_hex o Symbol_Pos.symbol); -val scan_bin = $$$ "0" @@@ $$$ "b" @@@ Scan.many1 (fn (s, _) => s = "0" orelse s = "1"); - -val scan_id_nat = scan_id @@@ Scan.optional ($$$ "." @@@ scan_nat) []; -val scan_var = $$$ "?" @@@ scan_id_nat; -val scan_tvar = $$$ "?" @@@ $$$ "'" @@@ scan_id_nat; - -fun is_tid s = - (case try (unprefix "'") s of - SOME s' => Symbol_Pos.is_identifier s' - | NONE => false); - - - -(** datatype token **) - -datatype token_kind = - Literal | IdentSy | LongIdentSy | VarSy | TFreeSy | TVarSy | NumSy | - FloatSy | XNumSy | StrSy | StringSy | Cartouche | Space | Comment | EOF; - -datatype token = Token of token_kind * string * Position.range; - -fun str_of_token (Token (_, s, _)) = s; -fun pos_of_token (Token (_, _, (pos, _))) = pos; - -fun is_proper (Token (Space, _, _)) = false - | is_proper (Token (Comment, _, _)) = false - | is_proper _ = true; - - -(* stopper *) - -fun mk_eof pos = Token (EOF, "", (pos, Position.none)); -val eof = mk_eof Position.none; - -fun is_eof (Token (EOF, _, _)) = true - | is_eof _ = false; - -val stopper = Scan.stopper (K eof) is_eof; - - -(* terminal arguments *) - -val terminal_kinds = - [("id", IdentSy), - ("longid", LongIdentSy), - ("var", VarSy), - ("tid", TFreeSy), - ("tvar", TVarSy), - ("num_token", NumSy), - ("float_token", FloatSy), - ("xnum_token", XNumSy), - ("str_token", StrSy), - ("string_token", StringSy), - ("cartouche", Cartouche)]; - -val terminals = map #1 terminal_kinds; -val is_terminal = member (op =) terminals; - - -(* markup *) - -fun literal_markup s = - if Symbol.is_ascii_identifier s orelse exists Symbol.is_letter (Symbol.explode s) - then Markup.literal - else Markup.delimiter; - -val token_kind_markup = - fn TFreeSy => Markup.tfree - | TVarSy => Markup.tvar - | NumSy => Markup.numeral - | FloatSy => Markup.numeral - | XNumSy => Markup.numeral - | StrSy => Markup.inner_string - | StringSy => Markup.inner_string - | Cartouche => Markup.inner_cartouche - | Comment => Markup.inner_comment - | _ => Markup.empty; - -fun report_of_token (Token (kind, s, (pos, _))) = - let val markup = if kind = Literal then literal_markup s else token_kind_markup kind - in (pos, markup) end; - -fun reported_token_range ctxt tok = - if is_proper tok - then Context_Position.reported_text ctxt (pos_of_token tok) Markup.token_range "" - else ""; - - -(* matching_tokens *) - -fun matching_tokens (Token (Literal, x, _), Token (Literal, y, _)) = x = y - | matching_tokens (Token (k, _, _), Token (k', _, _)) = k = k'; - - -(* valued_token *) - -fun valued_token (Token (Literal, _, _)) = false - | valued_token (Token (EOF, _, _)) = false - | valued_token _ = true; - - -(* predef_term *) - -fun predef_term s = - (case AList.lookup (op =) terminal_kinds s of - SOME sy => SOME (Token (sy, s, Position.no_range)) - | NONE => NONE); - - - -(** string literals **) - -fun explode_literal scan_body (str, pos) = - (case Scan.read Symbol_Pos.stopper scan_body (Symbol_Pos.explode (str, pos)) of - SOME ss => ss - | _ => error (err_prefix ^ "malformed string literal " ^ quote str ^ Position.here pos)); - - -(* string *) - -val scan_string = Scan.trace (Symbol_Pos.scan_string_qq err_prefix) >> #2; -val scan_string_body = Symbol_Pos.scan_string_qq err_prefix >> (#1 o #2); - -fun implode_string ss = quote (implode (map (fn "\"" => "\\\"" | s => s) ss)); -val explode_string = explode_literal scan_string_body; - - -(* str *) - -val scan_chr = - $$ "\\" |-- $$$ "'" || - Scan.one - ((fn s => s <> "\\" andalso s <> "'" andalso Symbol.is_regular s) o - Symbol_Pos.symbol) >> single || - $$$ "'" --| Scan.ahead (~$$ "'"); - -val scan_str = - Scan.ahead ($$ "'" -- $$ "'") |-- - !!! "unclosed string literal" - ($$$ "'" @@@ $$$ "'" @@@ (Scan.repeat scan_chr >> flat) @@@ $$$ "'" @@@ $$$ "'"); - -val scan_str_body = - Scan.ahead ($$ "'" |-- $$ "'") |-- - !!! "unclosed string literal" - ($$ "'" |-- $$ "'" |-- (Scan.repeat scan_chr >> flat) --| $$ "'" --| $$ "'"); - -fun implode_str ss = enclose "''" "''" (implode (map (fn "'" => "\\'" | s => s) ss)); -val explode_str = explode_literal scan_str_body; - - - -(** tokenize **) - -fun token_leq (Token (_, s1, _), Token (_, s2, _)) = s1 <= s2; -fun token kind ss = Token (kind, Symbol_Pos.content ss, Symbol_Pos.range ss); - -fun tokenize lex xids syms = - let - val scan_xid = - (if xids then $$$ "_" @@@ scan_id || scan_id else scan_id) || - $$$ "_" @@@ $$$ "_"; - - val scan_num = scan_hex || scan_bin || scan_int; - - val scan_val = - scan_tvar >> token TVarSy || - scan_var >> token VarSy || - scan_tid >> token TFreeSy || - scan_float >> token FloatSy || - scan_num >> token NumSy || - $$$ "#" @@@ scan_num >> token XNumSy || - scan_longid >> token LongIdentSy || - scan_xid >> token IdentSy; - - val scan_lit = Scan.literal lex >> token Literal; - - val scan_token = - Symbol_Pos.scan_cartouche err_prefix >> token Cartouche || - Symbol_Pos.scan_comment err_prefix >> token Comment || - Scan.max token_leq scan_lit scan_val || - scan_string >> token StringSy || - scan_str >> token StrSy || - Scan.many1 (Symbol.is_blank o Symbol_Pos.symbol) >> token Space; - in - (case Scan.error - (Scan.finite Symbol_Pos.stopper (Scan.repeat scan_token)) syms of - (toks, []) => toks - | (_, ss) => - error ("Inner lexical error" ^ Position.here (#1 (Symbol_Pos.range ss)) ^ - Markup.markup Markup.no_report ("\nat " ^ quote (Symbol_Pos.content ss)))) - - end; - - - -(** scan variables **) - -(* scan_indexname *) - -local - -val scan_vname = - let - fun nat n [] = n - | nat n (c :: cs) = nat (n * 10 + (ord c - ord "0")) cs; - - fun idxname cs ds = (implode (rev cs), nat 0 ds); - fun chop_idx [] ds = idxname [] ds - | chop_idx (cs as (_ :: "\\<^sub>" :: _)) ds = idxname cs ds - | chop_idx (c :: cs) ds = - if Symbol.is_digit c then chop_idx cs (c :: ds) - else idxname (c :: cs) ds; - - val scan = - (scan_id >> map Symbol_Pos.symbol) -- - Scan.optional ($$ "." |-- scan_nat >> (nat 0 o map Symbol_Pos.symbol)) ~1; - in - scan >> - (fn (cs, ~1) => chop_idx (rev cs) [] - | (cs, i) => (implode cs, i)) - end; - -in - -val scan_indexname = $$ "'" |-- scan_vname >> (fn (x, i) => ("'" ^ x, i)) || scan_vname; - -end; - - -(* indexname *) - -fun read_indexname s = - (case Scan.read Symbol_Pos.stopper scan_indexname (Symbol_Pos.explode (s, Position.none)) of - SOME xi => xi - | _ => error ("Lexical error in variable name: " ^ quote s)); - - -(* read_var *) - -fun read_var str = - let - val scan = - $$ "?" |-- scan_indexname --| Scan.ahead (Scan.one Symbol_Pos.is_eof) - >> Syntax.var || - Scan.many (Symbol.is_regular o Symbol_Pos.symbol) - >> (Syntax.free o implode o map Symbol_Pos.symbol); - in the (Scan.read Symbol_Pos.stopper scan (Symbol_Pos.explode (str, Position.none))) end; - - -(* read_variable *) - -fun read_variable str = - let val scan = $$ "?" |-- scan_indexname || scan_indexname - in Scan.read Symbol_Pos.stopper scan (Symbol_Pos.explode (str, Position.none)) end; - - -(* read numbers *) - -local - -fun nat cs = - Option.map (#1 o Library.read_int o map Symbol_Pos.symbol) - (Scan.read Symbol_Pos.stopper scan_nat cs); - -in - -fun read_nat s = nat (Symbol_Pos.explode (s, Position.none)); - -fun read_int s = - (case Symbol_Pos.explode (s, Position.none) of - ("-", _) :: cs => Option.map ~ (nat cs) - | cs => nat cs); - -end; - - -(* read_xnum: hex/bin/decimal *) - -local - -val ten = ord "0" + 10; -val a = ord "a"; -val A = ord "A"; -val _ = a > A orelse raise Fail "Bad ASCII"; - -fun remap_hex c = - let val x = ord c in - if x >= a then chr (x - a + ten) - else if x >= A then chr (x - A + ten) - else c - end; - -fun leading_zeros ["0"] = 0 - | leading_zeros ("0" :: cs) = 1 + leading_zeros cs - | leading_zeros _ = 0; - -in - -fun read_xnum str = - let - val (sign, radix, digs) = - (case Symbol.explode (perhaps (try (unprefix "#")) str) of - "0" :: "x" :: cs => (1, 16, map remap_hex cs) - | "0" :: "b" :: cs => (1, 2, cs) - | "-" :: cs => (~1, 10, cs) - | cs => (1, 10, cs)); - in - {radix = radix, - leading_zeros = leading_zeros digs, - value = sign * #1 (Library.read_radix_int radix digs)} - end; - -end; - -fun read_float str = - let - val (sign, cs) = - (case Symbol.explode str of - "-" :: cs => (~1, cs) - | cs => (1, cs)); - val (intpart, fracpart) = - (case take_prefix Symbol.is_digit cs of - (intpart, "." :: fracpart) => (intpart, fracpart) - | _ => raise Fail "read_float"); - in - {mant = sign * #1 (Library.read_int (intpart @ fracpart)), - exp = length fracpart} - end; - - -(* marked logical entities *) - -fun marker s = (prefix s, unprefix s); - -val (mark_class, unmark_class) = marker "\\<^class>"; -val (mark_type, unmark_type) = marker "\\<^type>"; -val (mark_const, unmark_const) = marker "\\<^const>"; -val (mark_fixed, unmark_fixed) = marker "\\<^fixed>"; - -fun unmark {case_class, case_type, case_const, case_fixed, case_default} s = - (case try unmark_class s of - SOME c => case_class c - | NONE => - (case try unmark_type s of - SOME c => case_type c - | NONE => - (case try unmark_const s of - SOME c => case_const c - | NONE => - (case try unmark_fixed s of - SOME c => case_fixed c - | NONE => case_default s)))); - -val is_marked = - unmark {case_class = K true, case_type = K true, case_const = K true, - case_fixed = K true, case_default = K false}; - -val dummy_type = Syntax.const (mark_type "dummy"); -val fun_type = Syntax.const (mark_type "fun"); - -end; diff --git a/core/Pure/Syntax/local_syntax.ML b/core/Pure/Syntax/local_syntax.ML deleted file mode 100644 index 6c4bb550..00000000 --- a/core/Pure/Syntax/local_syntax.ML +++ /dev/null @@ -1,124 +0,0 @@ -(* Title: Pure/Syntax/local_syntax.ML - Author: Makarius - -Local syntax depending on theory syntax, with special support for -implicit structure references. -*) - -signature LOCAL_SYNTAX = -sig - type T - val syn_of: T -> Syntax.syntax - val idents_of: T -> {structs: string list, fixes: string list} - val init: theory -> T - val rebuild: theory -> T -> T - datatype kind = Type | Const | Fixed - val add_syntax: theory -> (kind * (string * typ * mixfix)) list -> T -> T - val set_mode: Syntax.mode -> T -> T - val restore_mode: T -> T -> T - val update_modesyntax: theory -> bool -> Syntax.mode -> - (kind * (string * typ * mixfix)) list -> T -> T -end; - -structure Local_Syntax: LOCAL_SYNTAX = -struct - -(* datatype T *) - -type local_mixfix = - (string * bool) * (*name, fixed?*) - ((bool * bool * Syntax.mode) * (string * typ * mixfix)); (*type?, add?, mode, declaration*) - -datatype T = Syntax of - {thy_syntax: Syntax.syntax, - local_syntax: Syntax.syntax, - mode: Syntax.mode, - mixfixes: local_mixfix list, - idents: string list * string list}; - -fun make_syntax (thy_syntax, local_syntax, mode, mixfixes, idents) = - Syntax {thy_syntax = thy_syntax, local_syntax = local_syntax, - mode = mode, mixfixes = mixfixes, idents = idents}; - -fun map_syntax f (Syntax {thy_syntax, local_syntax, mode, mixfixes, idents}) = - make_syntax (f (thy_syntax, local_syntax, mode, mixfixes, idents)); - -fun is_consistent thy (Syntax {thy_syntax, ...}) = - Syntax.eq_syntax (Sign.syn_of thy, thy_syntax); - -fun syn_of (Syntax {local_syntax, ...}) = local_syntax; -fun idents_of (Syntax {idents = (structs, fixes), ...}) = {structs = structs, fixes = fixes}; - - -(* build syntax *) - -fun build_syntax thy mode mixfixes (idents as (structs, _)) = - let - val thy_syntax = Sign.syn_of thy; - fun update_gram ((true, add, m), decls) = Syntax.update_type_gram add m decls - | update_gram ((false, add, m), decls) = - Syntax.update_const_gram add (Sign.is_logtype thy) m decls; - - val local_syntax = thy_syntax - |> Syntax.update_trfuns - ([], [Syntax_Ext.mk_trfun (Syntax_Trans.struct_tr structs)], - [], [Syntax_Ext.mk_trfun (Syntax_Trans.struct_ast_tr' structs)]) - |> fold update_gram (AList.coalesce (op =) (rev (map snd mixfixes))); - in make_syntax (thy_syntax, local_syntax, mode, mixfixes, idents) end; - -fun init thy = build_syntax thy Syntax.mode_default [] ([], []); - -fun rebuild thy (syntax as Syntax {mode, mixfixes, idents, ...}) = - if is_consistent thy syntax then syntax - else build_syntax thy mode mixfixes idents; - - -(* mixfix declarations *) - -datatype kind = Type | Const | Fixed; - -local - -fun prep_mixfix _ _ (_, (_, _, Structure)) = NONE - | prep_mixfix add mode (Type, decl as (x, _, _)) = SOME ((x, false), ((true, add, mode), decl)) - | prep_mixfix add mode (Const, decl as (x, _, _)) = SOME ((x, false), ((false, add, mode), decl)) - | prep_mixfix add mode (Fixed, (x, T, mx)) = - SOME ((x, true), ((false, add, mode), (Lexicon.mark_fixed x, T, mx))); - -fun prep_struct (Fixed, (c, _, Structure)) = SOME c - | prep_struct (_, (c, _, Structure)) = error ("Bad mixfix declaration for " ^ quote c) - | prep_struct _ = NONE; - -in - -fun update_syntax add thy raw_decls - (syntax as (Syntax {mode, mixfixes, idents = (structs, _), ...})) = - (case filter_out (fn (_, (_, _, mx)) => mx = NoSyn) raw_decls of - [] => syntax - | decls => - let - val new_mixfixes = map_filter (prep_mixfix add mode) decls; - val new_structs = map_filter prep_struct decls; - val mixfixes' = rev new_mixfixes @ mixfixes; - val structs' = - if add then structs @ new_structs - else subtract (op =) new_structs structs; - val fixes' = fold (fn ((x, true), _) => cons x | _ => I) mixfixes' []; - in build_syntax thy mode mixfixes' (structs', fixes') end); - -val add_syntax = update_syntax true; - -end; - - -(* syntax mode *) - -fun set_mode mode = map_syntax (fn (thy_syntax, local_syntax, _, mixfixes, idents) => - (thy_syntax, local_syntax, mode, mixfixes, idents)); - -fun restore_mode (Syntax {mode, ...}) = set_mode mode; - -fun update_modesyntax thy add mode args syntax = - syntax |> set_mode mode |> update_syntax add thy args |> restore_mode syntax; - -end; diff --git a/core/Pure/Syntax/mixfix.ML b/core/Pure/Syntax/mixfix.ML deleted file mode 100644 index 0d8bf9ce..00000000 --- a/core/Pure/Syntax/mixfix.ML +++ /dev/null @@ -1,162 +0,0 @@ -(* Title: Pure/Syntax/mixfix.ML - Author: Tobias Nipkow and Markus Wenzel, TU Muenchen - -Mixfix declarations, infixes, binders. -*) - -signature BASIC_MIXFIX = -sig - datatype mixfix = - NoSyn | - Mixfix of string * int list * int | - Delimfix of string | - Infix of string * int | - Infixl of string * int | - Infixr of string * int | - Binder of string * int * int | - Structure -end; - -signature MIXFIX = -sig - include BASIC_MIXFIX - val pretty_mixfix: mixfix -> Pretty.T - val mixfix_args: mixfix -> int - val mixfixT: mixfix -> typ - val make_type: int -> typ - val binder_name: string -> string - val syn_ext_types: (string * typ * mixfix) list -> Syntax_Ext.syn_ext - val syn_ext_consts: (string -> bool) -> (string * typ * mixfix) list -> Syntax_Ext.syn_ext -end; - -structure Mixfix: MIXFIX = -struct - -(** mixfix declarations **) - -datatype mixfix = - NoSyn | - Mixfix of string * int list * int | - Delimfix of string | - Infix of string * int | - Infixl of string * int | - Infixr of string * int | - Binder of string * int * int | - Structure; - - -(* pretty_mixfix *) - -local - -val quoted = Pretty.quote o Pretty.str; -val keyword = Pretty.keyword2; -val parens = Pretty.enclose "(" ")"; -val brackets = Pretty.enclose "[" "]"; -val int = Pretty.str o string_of_int; - -in - -fun pretty_mixfix NoSyn = Pretty.str "" - | pretty_mixfix (Mixfix (s, ps, p)) = - parens (Pretty.breaks [quoted s, brackets (Pretty.commas (map int ps)), int p]) - | pretty_mixfix (Delimfix s) = parens [quoted s] - | pretty_mixfix (Infix (s, p)) = parens (Pretty.breaks [keyword "infix", quoted s, int p]) - | pretty_mixfix (Infixl (s, p)) = parens (Pretty.breaks [keyword "infixl", quoted s, int p]) - | pretty_mixfix (Infixr (s, p)) = parens (Pretty.breaks [keyword "infixl", quoted s, int p]) - | pretty_mixfix (Binder (s, p1, p2)) = - parens (Pretty.breaks [keyword "binder", quoted s, brackets [int p1], int p2]) - | pretty_mixfix Structure = parens [keyword "structure"]; - -end; - - -(* syntax specifications *) - -fun mixfix_args NoSyn = 0 - | mixfix_args (Mixfix (sy, _, _)) = Syntax_Ext.mfix_args sy - | mixfix_args (Delimfix sy) = Syntax_Ext.mfix_args sy - | mixfix_args (Infix (sy, _)) = 2 + Syntax_Ext.mfix_args sy - | mixfix_args (Infixl (sy, _)) = 2 + Syntax_Ext.mfix_args sy - | mixfix_args (Infixr (sy, _)) = 2 + Syntax_Ext.mfix_args sy - | mixfix_args (Binder _) = 1 - | mixfix_args Structure = 0; - -fun mixfixT (Binder _) = (dummyT --> dummyT) --> dummyT - | mixfixT mx = replicate (mixfix_args mx) dummyT ---> dummyT; - - -(* syn_ext_types *) - -val typeT = Type ("type", []); -fun make_type n = replicate n typeT ---> typeT; - -fun syn_ext_types type_decls = - let - fun mk_infix sy ty t p1 p2 p3 = Syntax_Ext.Mfix ("(_ " ^ sy ^ "/ _)", ty, t, [p1, p2], p3); - - fun mfix_of (_, _, NoSyn) = NONE - | mfix_of (t, ty, Mixfix (sy, ps, p)) = SOME (Syntax_Ext.Mfix (sy, ty, t, ps, p)) - | mfix_of (t, ty, Delimfix sy) = SOME (Syntax_Ext.Mfix (sy, ty, t, [], 1000)) - | mfix_of (t, ty, Infix (sy, p)) = SOME (mk_infix sy ty t (p + 1) (p + 1) p) - | mfix_of (t, ty, Infixl (sy, p)) = SOME (mk_infix sy ty t p (p + 1) p) - | mfix_of (t, ty, Infixr (sy, p)) = SOME (mk_infix sy ty t (p + 1) p p) - | mfix_of (t, _, _) = error ("Bad mixfix declaration for " ^ quote t); - - fun check_args (_, ty, _) (SOME (mfix as Syntax_Ext.Mfix (sy, _, _, _, _))) = - if length (Term.binder_types ty) = Syntax_Ext.mfix_args sy then () - else Syntax_Ext.err_in_mfix "Bad number of type constructor arguments" mfix - | check_args _ NONE = (); - - val mfix = map mfix_of type_decls; - val _ = map2 check_args type_decls mfix; - val consts = map (fn (t, _, _) => (t, "")) type_decls; - in Syntax_Ext.syn_ext (map_filter I mfix) consts ([], [], [], []) ([], []) end; - - -(* syn_ext_consts *) - -val binder_stamp = stamp (); -val binder_name = suffix "_binder"; - -fun syn_ext_consts is_logtype const_decls = - let - fun mk_infix sy ty c p1 p2 p3 = - [Syntax_Ext.Mfix ("op " ^ sy, ty, c, [], 1000), - Syntax_Ext.Mfix ("(_ " ^ sy ^ "/ _)", ty, c, [p1, p2], p3)]; - - fun binder_typ _ (Type ("fun", [Type ("fun", [_, ty2]), ty3])) = - [Type ("idts", []), ty2] ---> ty3 - | binder_typ c _ = error ("Bad type of binder: " ^ quote c); - - fun mfix_of (_, _, NoSyn) = [] - | mfix_of (c, ty, Mixfix (sy, ps, p)) = [Syntax_Ext.Mfix (sy, ty, c, ps, p)] - | mfix_of (c, ty, Delimfix sy) = [Syntax_Ext.Mfix (sy, ty, c, [], 1000)] - | mfix_of (c, ty, Infix (sy, p)) = mk_infix sy ty c (p + 1) (p + 1) p - | mfix_of (c, ty, Infixl (sy, p)) = mk_infix sy ty c p (p + 1) p - | mfix_of (c, ty, Infixr (sy, p)) = mk_infix sy ty c (p + 1) p p - | mfix_of (c, ty, Binder (sy, p, q)) = - [Syntax_Ext.Mfix ("(3" ^ sy ^ "_./ _)", binder_typ c ty, (binder_name c), [0, p], q)] - | mfix_of (c, _, _) = error ("Bad mixfix declaration for " ^ quote c); - - fun binder (c, _, Binder _) = SOME (binder_name c, c) - | binder _ = NONE; - - val mfix = maps mfix_of const_decls; - val binders = map_filter binder const_decls; - val binder_trs = binders - |> map (Syntax_Ext.stamp_trfun binder_stamp o Syntax_Trans.mk_binder_tr); - val binder_trs' = binders - |> map (Syntax_Ext.stamp_trfun binder_stamp o - apsnd Syntax_Trans.non_typed_tr' o Syntax_Trans.mk_binder_tr' o swap); - - val consts = binders @ map (fn (c, _, _) => (c, "")) const_decls; - in - Syntax_Ext.syn_ext' is_logtype mfix consts ([], binder_trs, binder_trs', []) ([], []) - end; - -end; - -structure Basic_Mixfix: BASIC_MIXFIX = Mixfix; -open Basic_Mixfix; - diff --git a/core/Pure/Syntax/parser.ML b/core/Pure/Syntax/parser.ML deleted file mode 100644 index 4f6d3c69..00000000 --- a/core/Pure/Syntax/parser.ML +++ /dev/null @@ -1,776 +0,0 @@ -(* Title: Pure/Syntax/parser.ML - Author: Carsten Clasohm, Sonia Mahjoub, and Markus Wenzel, TU Muenchen - -General context-free parser for the inner syntax of terms, types, etc. -*) - -signature PARSER = -sig - type gram - val empty_gram: gram - val extend_gram: Syntax_Ext.xprod list -> gram -> gram - val make_gram: Syntax_Ext.xprod list -> gram - val pretty_gram: gram -> Pretty.T list - datatype parsetree = - Node of string * parsetree list | - Tip of Lexicon.token - exception PARSETREE of parsetree - val pretty_parsetree: parsetree -> Pretty.T - val parse: gram -> string -> Lexicon.token list -> parsetree list - val guess_infix_lr: gram -> string -> (string * bool * bool * int) option - val branching_level: int Config.T -end; - -structure Parser: PARSER = -struct - -(** datatype gram **) - -(*production for the NTs are stored in a vector - so we can identify NTs by their index*) -type nt_tag = int; - -datatype symb = - Terminal of Lexicon.token -| Nonterminal of nt_tag * int; (*(tag, precedence)*) - -type nt_gram = - ((nt_tag list * Lexicon.token list) * - (Lexicon.token option * (symb list * string * int) list) list); - (*(([dependent_nts], [start_tokens]), [(start_token, [(rhs, name, prio)])])*) - (*depent_nts is a list of all NTs whose lookahead depends on this NT's lookahead*) - -datatype gram = - Gram of - {nt_count: int, - prod_count: int, - tags: nt_tag Symtab.table, - chains: (nt_tag * nt_tag list) list, (*[(to, [from])]*) - lambdas: nt_tag list, - prods: nt_gram Vector.vector}; - (*"tags" is used to map NT names (i.e. strings) to tags; - chain productions are not stored as normal productions - but instead as an entry in "chains"; - lambda productions are stored as normal productions - and also as an entry in "lambdas"*) - -val union_token = union Lexicon.matching_tokens; -val subtract_token = subtract Lexicon.matching_tokens; - -(*productions for which no starting token is - known yet are associated with this token*) -val unknown_start = Lexicon.eof; - -(*get all NTs that are connected with a list of NTs*) -fun connected_with _ ([]: nt_tag list) relatives = relatives - | connected_with chains (root :: roots) relatives = - let val branches = subtract (op =) relatives (these (AList.lookup (op =) chains root)); - in connected_with chains (branches @ roots) (branches @ relatives) end; - -(*convert productions to grammar; - N.B. that the chains parameter has the form [(from, [to])]; - prod_count is of type "int option" and is only updated if it is <> NONE*) -fun add_prods _ chains lambdas prod_count [] = (chains, lambdas, prod_count) - | add_prods prods chains lambdas prod_count ((lhs, new_prod as (rhs, name, pri)) :: ps) = - let - val chain_from = - (case (pri, rhs) of - (~1, [Nonterminal (id, ~1)]) => SOME id - | _ => NONE); - - (*store chain if it does not already exist*) - val (new_chain, chains') = - (case chain_from of - NONE => (NONE, chains) - | SOME from => - let val old_tos = these (AList.lookup (op =) chains from) in - if member (op =) old_tos lhs then (NONE, chains) - else (SOME from, AList.update (op =) (from, insert (op =) lhs old_tos) chains) - end); - - (*propagate new chain in lookahead and lambda lists; - added_starts is used later to associate existing - productions with new starting tokens*) - val (added_starts, lambdas') = - if is_none new_chain then ([], lambdas) - else - let (*lookahead of chain's source*) - val ((from_nts, from_tks), _) = Array.sub (prods, the new_chain); - - (*copy from's lookahead to chain's destinations*) - fun copy_lookahead [] added = added - | copy_lookahead (to :: tos) added = - let - val ((to_nts, to_tks), ps) = Array.sub (prods, to); - - val new_tks = subtract (op =) to_tks from_tks; (*added lookahead tokens*) - val _ = Array.update (prods, to, ((to_nts, to_tks @ new_tks), ps)); - in - copy_lookahead tos (if null new_tks then added else (to, new_tks) :: added) - end; - - val tos = connected_with chains' [lhs] [lhs]; - in - (copy_lookahead tos [], - union (op =) (if member (op =) lambdas lhs then tos else []) lambdas) - end; - - (*test if new production can produce lambda - (rhs must either be empty or only consist of lambda NTs)*) - val (new_lambda, lambdas') = - if forall - (fn Nonterminal (id, _) => member (op =) lambdas' id - | Terminal _ => false) rhs - then (true, union (op =) (connected_with chains' [lhs] [lhs]) lambdas') - else (false, lambdas'); - - (*list optional terminal and all nonterminals on which the lookahead - of a production depends*) - fun lookahead_dependency _ [] nts = (NONE, nts) - | lookahead_dependency _ (Terminal tk :: _) nts = (SOME tk, nts) - | lookahead_dependency lambdas (Nonterminal (nt, _) :: symbs) nts = - if member (op =) lambdas nt then - lookahead_dependency lambdas symbs (nt :: nts) - else (NONE, nt :: nts); - - (*get all known starting tokens for a nonterminal*) - fun starts_for_nt nt = snd (fst (Array.sub (prods, nt))); - - (*update prods, lookaheads, and lambdas according to new lambda NTs*) - val (added_starts', lambdas') = - let - (*propagate added lambda NT*) - fun propagate_lambda [] added_starts lambdas = (added_starts, lambdas) - | propagate_lambda (l :: ls) added_starts lambdas = - let - (*get lookahead for lambda NT*) - val ((dependent, l_starts), _) = Array.sub (prods, l); - - (*check productions whose lookahead may depend on lambda NT*) - fun examine_prods [] add_lambda nt_dependencies added_tks nt_prods = - (add_lambda, nt_dependencies, added_tks, nt_prods) - | examine_prods ((p as (rhs, _, _)) :: ps) add_lambda - nt_dependencies added_tks nt_prods = - let val (tk, nts) = lookahead_dependency lambdas rhs [] in - if member (op =) nts l then (*update production's lookahead*) - let - val new_lambda = is_none tk andalso subset (op =) (nts, lambdas); - - val new_tks = - (if is_some tk then [the tk] else []) - |> fold (union_token o starts_for_nt) nts - |> subtract (op =) l_starts; - - val added_tks' = union_token added_tks new_tks; - - val nt_dependencies' = union (op =) nts nt_dependencies; - - (*associate production with new starting tokens*) - fun copy ([]: Lexicon.token option list) nt_prods = nt_prods - | copy (tk :: tks) nt_prods = - let - val old_prods = these (AList.lookup (op =) nt_prods tk); - val prods' = p :: old_prods; - in - nt_prods - |> AList.update (op =) (tk, prods') - |> copy tks - end; - - val nt_prods' = - let val new_opt_tks = map SOME new_tks in - copy - ((if new_lambda then [NONE] else []) @ new_opt_tks) nt_prods - end; - in - examine_prods ps (add_lambda orelse new_lambda) - nt_dependencies' added_tks' nt_prods' - end - else (*skip production*) - examine_prods ps add_lambda nt_dependencies added_tks nt_prods - end; - - (*check each NT whose lookahead depends on new lambda NT*) - fun process_nts [] added_lambdas added_starts = - (added_lambdas, added_starts) - | process_nts (nt :: nts) added_lambdas added_starts = - let - val (lookahead as (old_nts, old_tks), nt_prods) = Array.sub (prods, nt); - - (*existing productions whose lookahead may depend on l*) - val tk_prods = - these - (AList.lookup (op =) nt_prods - (SOME (hd l_starts handle List.Empty => unknown_start))); - - (*add_lambda is true if an existing production of the nt - produces lambda due to the new lambda NT l*) - val (add_lambda, nt_dependencies, added_tks, nt_prods') = - examine_prods tk_prods false [] [] nt_prods; - - val added_nts = subtract (op =) old_nts nt_dependencies; - - val added_lambdas' = - if add_lambda then nt :: added_lambdas - else added_lambdas; - val _ = - Array.update - (prods, nt, ((added_nts @ old_nts, old_tks @ added_tks), nt_prods')); - (*N.B. that because the tks component - is used to access existing - productions we have to add new - tokens at the _end_ of the list*) - in - if null added_tks then - process_nts nts added_lambdas' added_starts - else - process_nts nts added_lambdas' ((nt, added_tks) :: added_starts) - end; - - val (added_lambdas, added_starts') = process_nts dependent [] added_starts; - val added_lambdas' = subtract (op =) lambdas added_lambdas; - in - propagate_lambda (ls @ added_lambdas') added_starts' (added_lambdas' @ lambdas) - end; - in propagate_lambda (subtract (op =) lambdas lambdas') added_starts lambdas' end; - - (*insert production into grammar*) - val (added_starts', prod_count') = - if is_some chain_from - then (added_starts', prod_count) (*don't store chain production*) - else - let - (*lookahead tokens of new production and on which - NTs lookahead depends*) - val (start_tk, start_nts) = lookahead_dependency lambdas' rhs []; - - val start_tks = - (if is_some start_tk then [the start_tk] else []) - |> fold (union_token o starts_for_nt) start_nts; - - val opt_starts = - (if new_lambda then [NONE] - else if null start_tks then [SOME unknown_start] - else []) @ map SOME start_tks; - - (*add lhs NT to list of dependent NTs in lookahead*) - fun add_nts [] = () - | add_nts (nt :: nts) = - let val ((old_nts, old_tks), ps) = Array.sub (prods, nt) in - if member (op =) old_nts lhs then () - else Array.update (prods, nt, ((lhs :: old_nts, old_tks), ps)) - end; - - (*add new start tokens to chained NTs' lookahead list; - also store new production for lhs NT*) - fun add_tks [] added prod_count = (added, prod_count) - | add_tks (nt :: nts) added prod_count = - let - val ((old_nts, old_tks), nt_prods) = Array.sub (prods, nt); - - val new_tks = subtract_token old_tks start_tks; - - (*store new production*) - fun store [] prods is_new = - (prods, - if is_some prod_count andalso is_new then - Option.map (fn x => x + 1) prod_count - else prod_count, is_new) - | store (tk :: tks) prods is_new = - let - val tk_prods = these (AList.lookup (op =) prods tk); - - (*if prod_count = NONE then we can assume that - grammar does not contain new production already*) - val (tk_prods', is_new') = - if is_some prod_count then - if member (op =) tk_prods new_prod then (tk_prods, false) - else (new_prod :: tk_prods, true) - else (new_prod :: tk_prods, true); - - val prods' = - if is_new' then - AList.update (op =) (tk: Lexicon.token option, tk_prods') prods - else prods; - in store tks prods' (is_new orelse is_new') end; - - val (nt_prods', prod_count', changed) = - if nt = lhs - then store opt_starts nt_prods false - else (nt_prods, prod_count, false); - val _ = - if not changed andalso null new_tks then () - else Array.update (prods, nt, ((old_nts, old_tks @ new_tks), nt_prods')); - in - add_tks nts - (if null new_tks then added else (nt, new_tks) :: added) prod_count' - end; - val _ = add_nts start_nts; - in - add_tks (connected_with chains' [lhs] [lhs]) [] prod_count - end; - - (*associate productions with new lookaheads*) - val _ = - let - (*propagate added start tokens*) - fun add_starts [] = () - | add_starts ((changed_nt, new_tks) :: starts) = - let - (*token under which old productions which - depend on changed_nt could be stored*) - val key = - (case find_first (not o member (op =) new_tks) (starts_for_nt changed_nt) of - NONE => SOME unknown_start - | t => t); - - (*copy productions whose lookahead depends on changed_nt; - if key = SOME unknown_start then tk_prods is used to hold - the productions not copied*) - fun update_prods [] result = result - | update_prods ((p as (rhs, _: string, _: nt_tag)) :: ps) - (tk_prods, nt_prods) = - let - (*lookahead dependency for production*) - val (tk, depends) = lookahead_dependency lambdas' rhs []; - - (*test if this production has to be copied*) - val update = member (op =) depends changed_nt; - - (*test if production could already be associated with - a member of new_tks*) - val lambda = - length depends > 1 orelse - not (null depends) andalso is_some tk - andalso member (op =) new_tks (the tk); - - (*associate production with new starting tokens*) - fun copy ([]: Lexicon.token list) nt_prods = nt_prods - | copy (tk :: tks) nt_prods = - let - val tk_prods = these (AList.lookup (op =) nt_prods (SOME tk)); - - val tk_prods' = - if not lambda then p :: tk_prods - else insert (op =) p tk_prods; - (*if production depends on lambda NT we - have to look for duplicates*) - in - nt_prods - |> AList.update (op =) (SOME tk, tk_prods') - |> copy tks - end; - val result = - if update then (tk_prods, copy new_tks nt_prods) - else if key = SOME unknown_start then (p :: tk_prods, nt_prods) - else (tk_prods, nt_prods); - in update_prods ps result end; - - (*copy existing productions for new starting tokens*) - fun process_nts [] added = added - | process_nts (nt :: nts) added = - let - val (lookahead as (old_nts, old_tks), nt_prods) = Array.sub (prods, nt); - - val tk_prods = these (AList.lookup (op =) nt_prods key); - - (*associate productions with new lookahead tokens*) - val (tk_prods', nt_prods') = update_prods tk_prods ([], nt_prods); - - val nt_prods'' = - if key = SOME unknown_start then - AList.update (op =) (key, tk_prods') nt_prods' - else nt_prods'; - - val added_tks = subtract_token old_tks new_tks; - in - if null added_tks then - (Array.update (prods, nt, (lookahead, nt_prods'')); - process_nts nts added) - else - (Array.update (prods, nt, ((old_nts, added_tks @ old_tks), nt_prods'')); - process_nts nts ((nt, added_tks) :: added)) - end; - - val ((dependent, _), _) = Array.sub (prods, changed_nt); - in add_starts (starts @ process_nts dependent []) end; - in add_starts added_starts' end; - in add_prods prods chains' lambdas' prod_count ps end; - - -(* pretty_gram *) - -fun pretty_gram (Gram {tags, prods, chains, ...}) = - let - fun pretty_name name = [Pretty.str (name ^ " =")]; - - val nt_name = the o Inttab.lookup (Inttab.make (map swap (Symtab.dest tags))); - - fun pretty_symb (Terminal (Lexicon.Token (Lexicon.Literal, s, _))) = Pretty.quote (Pretty.str s) - | pretty_symb (Terminal tok) = Pretty.str (Lexicon.str_of_token tok) - | pretty_symb (Nonterminal (tag, p)) = - Pretty.str (nt_name tag ^ "[" ^ signed_string_of_int p ^ "]"); - - fun pretty_const "" = [] - | pretty_const c = [Pretty.str ("=> " ^ quote c)]; - - fun pretty_pri p = [Pretty.str ("(" ^ signed_string_of_int p ^ ")")]; - - fun pretty_prod name (symbs, const, pri) = - Pretty.block (Pretty.breaks (pretty_name name @ - map pretty_symb symbs @ pretty_const const @ pretty_pri pri)); - - fun pretty_nt (name, tag) = - let - fun prod_of_chain from = ([Nonterminal (from, ~1)], "", ~1); - - val nt_prods = - fold (union (op =) o snd) (snd (Vector.sub (prods, tag))) [] @ - map prod_of_chain (these (AList.lookup (op =) chains tag)); - in map (pretty_prod name) nt_prods end; - - in maps pretty_nt (sort_wrt fst (Symtab.dest tags)) end; - - - -(** Operations on gramars **) - -val empty_gram = - Gram - {nt_count = 0, - prod_count = 0, - tags = Symtab.empty, chains = [], - lambdas = [], - prods = Vector.fromList [(([], []), [])]}; - - -(*Invert list of chain productions*) -fun inverse_chains [] result = result - | inverse_chains ((root, branches: nt_tag list) :: cs) result = - let - fun add ([]: nt_tag list) result = result - | add (id :: ids) result = - let val old = these (AList.lookup (op =) result id); - in add ids (AList.update (op =) (id, root :: old) result) end; - in inverse_chains cs (add branches result) end; - - -(*Add productions to a grammar*) -fun extend_gram [] gram = gram - | extend_gram xprods (Gram {nt_count, prod_count, tags, chains, lambdas, prods}) = - let - (*Get tag for existing nonterminal or create a new one*) - fun get_tag nt_count tags nt = - (case Symtab.lookup tags nt of - SOME tag => (nt_count, tags, tag) - | NONE => (nt_count + 1, Symtab.update_new (nt, nt_count) tags, nt_count)); - - (*Convert symbols to the form used by the parser; - delimiters and predefined terms are stored as terminals, - nonterminals are converted to integer tags*) - fun symb_of [] nt_count tags result = (nt_count, tags, rev result) - | symb_of (Syntax_Ext.Delim s :: ss) nt_count tags result = - symb_of ss nt_count tags - (Terminal (Lexicon.Token (Lexicon.Literal, s, Position.no_range)) :: result) - | symb_of (Syntax_Ext.Argument (s, p) :: ss) nt_count tags result = - let - val (nt_count', tags', new_symb) = - (case Lexicon.predef_term s of - NONE => - let val (nt_count', tags', s_tag) = get_tag nt_count tags s; - in (nt_count', tags', Nonterminal (s_tag, p)) end - | SOME tk => (nt_count, tags, Terminal tk)); - in symb_of ss nt_count' tags' (new_symb :: result) end - | symb_of (_ :: ss) nt_count tags result = symb_of ss nt_count tags result; - - (*Convert list of productions by invoking symb_of for each of them*) - fun prod_of [] nt_count prod_count tags result = - (nt_count, prod_count, tags, result) - | prod_of (Syntax_Ext.XProd (lhs, xsymbs, const, pri) :: ps) - nt_count prod_count tags result = - let - val (nt_count', tags', lhs_tag) = get_tag nt_count tags lhs; - val (nt_count'', tags'', prods) = symb_of xsymbs nt_count' tags' []; - in - prod_of ps nt_count'' (prod_count + 1) tags'' - ((lhs_tag, (prods, const, pri)) :: result) - end; - - val (nt_count', prod_count', tags', xprods') = - prod_of xprods nt_count prod_count tags []; - - (*Copy array containing productions of old grammar; - this has to be done to preserve the old grammar while being able - to change the array's content*) - val prods' = - let - fun get_prod i = - if i < nt_count then Vector.sub (prods, i) - else (([], []), []); - in Array.tabulate (nt_count', get_prod) end; - - val fromto_chains = inverse_chains chains []; - - (*Add new productions to old ones*) - val (fromto_chains', lambdas', _) = - add_prods prods' fromto_chains lambdas NONE xprods'; - - val chains' = inverse_chains fromto_chains' []; - in - Gram - {nt_count = nt_count', - prod_count = prod_count', - tags = tags', - chains = chains', - lambdas = lambdas', - prods = Array.vector prods'} - end; - -fun make_gram xprods = extend_gram xprods empty_gram; - - - -(** parser **) - -(* parsetree *) - -datatype parsetree = - Node of string * parsetree list | - Tip of Lexicon.token; - -exception PARSETREE of parsetree; - -fun pretty_parsetree parsetree = - let - fun pretty (Node (c, pts)) = - [Pretty.enclose "(" ")" (Pretty.breaks (Pretty.quote (Pretty.str c) :: maps pretty pts))] - | pretty (Tip tok) = - if Lexicon.valued_token tok then [Pretty.str (Lexicon.str_of_token tok)] else []; - in (case pretty parsetree of [prt] => prt | _ => raise PARSETREE parsetree) end; - - -(* parser state *) - -type state = - nt_tag * int * (*identification and production precedence*) - parsetree list * (*already parsed nonterminals on rhs*) - symb list * (*rest of rhs*) - string * (*name of production*) - int; (*index for previous state list*) - - -(*Get all rhss with precedence >= min_prec*) -fun get_RHS min_prec = filter (fn (_, _, prec: int) => prec >= min_prec); - -(*Get all rhss with precedence >= min_prec and < max_prec*) -fun get_RHS' min_prec max_prec = - filter (fn (_, _, prec: int) => prec >= min_prec andalso prec < max_prec); - -(*Make states using a list of rhss*) -fun mk_states i min_prec lhs_ID rhss = - let fun mk_state (rhs, id, prod_prec) = (lhs_ID, prod_prec, [], rhs, id, i); - in map mk_state rhss end; - -(*Add parse tree to list and eliminate duplicates - saving the maximum precedence*) -fun conc (t: parsetree list, prec: int) [] = (NONE, [(t, prec)]) - | conc (t, prec) ((t', prec') :: ts) = - if t = t' then - (SOME prec', - if prec' >= prec then (t', prec') :: ts - else (t, prec) :: ts) - else - let val (n, ts') = conc (t, prec) ts - in (n, (t', prec') :: ts') end; - -(*Update entry in used*) -fun update_trees (A, t) used = - let - val (i, ts) = the (Inttab.lookup used A); - val (n, ts') = conc t ts; - in (n, Inttab.update (A, (i, ts')) used) end; - -(*Replace entry in used*) -fun update_prec (A, prec) = - Inttab.map_entry A (fn (_, ts) => (prec, ts)); - -fun getS A max_prec NONE Si = - filter - (fn (_, _, _, Nonterminal (B, prec) :: _, _, _) => A = B andalso prec <= max_prec - | _ => false) Si - | getS A max_prec (SOME min_prec) Si = - filter - (fn (_, _, _, Nonterminal (B, prec) :: _, _, _) => - A = B andalso prec > min_prec andalso prec <= max_prec - | _ => false) Si; - -fun get_states Estate i ii A max_prec = - filter - (fn (_, _, _, Nonterminal (B, prec) :: _, _, _) => A = B andalso prec <= max_prec - | _ => false) - (Array.sub (Estate, ii)); - - -fun movedot_term c (A, j, ts, Terminal a :: sa, id, i) = - if Lexicon.valued_token c orelse id <> "" - then (A, j, Tip c :: ts, sa, id, i) - else (A, j, ts, sa, id, i); - -fun movedot_nonterm tt (A, j, ts, Nonterminal _ :: sa, id, i) = - (A, j, tt @ ts, sa, id, i); - -fun movedot_lambda [] _ = [] - | movedot_lambda ((t, ki) :: ts) (state as (B, j, tss, Nonterminal (A, k) :: sa, id, i)) = - if k <= ki then (B, j, t @ tss, sa, id, i) :: movedot_lambda ts state - else movedot_lambda ts state; - - -(*trigger value for warnings*) -val branching_level = - Config.int (Config.declare ("syntax_branching_level", @{here}) (fn _ => Config.Int 600)); - -(*get all productions of a NT and NTs chained to it which can - be started by specified token*) -fun prods_for prods chains include_none tk nts = - let - fun token_assoc (list, key) = - let - fun assoc [] result = result - | assoc ((keyi, pi) :: pairs) result = - if is_some keyi andalso Lexicon.matching_tokens (the keyi, key) - orelse include_none andalso is_none keyi then - assoc pairs (pi @ result) - else assoc pairs result; - in assoc list [] end; - - fun get_prods [] result = result - | get_prods (nt :: nts) result = - let val nt_prods = snd (Vector.sub (prods, nt)); - in get_prods nts (token_assoc (nt_prods, tk) @ result) end; - in get_prods (connected_with chains nts nts) [] end; - - -fun PROCESSS prods chains Estate i c states = - let - fun all_prods_for nt = prods_for prods chains true c [nt]; - - fun processS used [] (Si, Sii) = (Si, Sii) - | processS used (S :: States) (Si, Sii) = - (case S of - (_, _, _, Nonterminal (nt, min_prec) :: _, _, _) => - let (*predictor operation*) - val (used', new_states) = - (case Inttab.lookup used nt of - SOME (used_prec, l) => (*nonterminal has been processed*) - if used_prec <= min_prec then - (*wanted precedence has been processed*) - (used, movedot_lambda l S) - else (*wanted precedence hasn't been parsed yet*) - let - val tk_prods = all_prods_for nt; - val States' = - mk_states i min_prec nt (get_RHS' min_prec used_prec tk_prods); - in (update_prec (nt, min_prec) used, movedot_lambda l S @ States') end - | NONE => (*nonterminal is parsed for the first time*) - let - val tk_prods = all_prods_for nt; - val States' = mk_states i min_prec nt (get_RHS min_prec tk_prods); - in (Inttab.update (nt, (min_prec, [])) used, States') end); - in - processS used' (new_states @ States) (S :: Si, Sii) - end - | (_, _, _, Terminal a :: _, _, _) => (*scanner operation*) - processS used States - (S :: Si, - if Lexicon.matching_tokens (a, c) then movedot_term c S :: Sii else Sii) - | (A, prec, ts, [], id, j) => (*completer operation*) - let val tt = if id = "" then ts else [Node (id, rev ts)] in - if j = i then (*lambda production?*) - let - val (prec', used') = update_trees (A, (tt, prec)) used; - val Slist = getS A prec prec' Si; - val States' = map (movedot_nonterm tt) Slist; - in processS used' (States' @ States) (S :: Si, Sii) end - else - let val Slist = get_states Estate i j A prec - in processS used (map (movedot_nonterm tt) Slist @ States) (S :: Si, Sii) end - end) - in processS Inttab.empty states ([], []) end; - - -fun produce prods tags chains stateset i indata prev_token = - (case Array.sub (stateset, i) of - [] => - let - val toks = if Lexicon.is_eof prev_token then indata else prev_token :: indata; - val pos = Position.here (Lexicon.pos_of_token prev_token); - in - if null toks then - error ("Inner syntax error: unexpected end of input" ^ pos) - else - error ("Inner syntax error" ^ pos ^ - Markup.markup Markup.no_report - ("\n" ^ Pretty.string_of - (Pretty.block [ - Pretty.str "at", Pretty.brk 1, - Pretty.block - (Pretty.str "\"" :: - Pretty.breaks (map (Pretty.str o Lexicon.str_of_token) (#1 (split_last toks))) @ - [Pretty.str "\""])]))) - end - | s => - (case indata of - [] => s - | c :: cs => - let - val (si, sii) = PROCESSS prods chains stateset i c s; - val _ = Array.update (stateset, i, si); - val _ = Array.update (stateset, i + 1, sii); - in produce prods tags chains stateset (i + 1) cs c end)); - - -fun get_trees states = map_filter (fn (_, _, [pt], _, _, _) => SOME pt | _ => NONE) states; - -fun earley prods tags chains startsymbol indata = - let - val start_tag = - (case Symtab.lookup tags startsymbol of - SOME tag => tag - | NONE => error ("Inner syntax: bad grammar root symbol " ^ quote startsymbol)); - val S0 = [(~1, 0, [], [Nonterminal (start_tag, 0), Terminal Lexicon.eof], "", 0)]; - val s = length indata + 1; - val Estate = Array.array (s, []); - val _ = Array.update (Estate, 0, S0); - in - get_trees (produce prods tags chains Estate 0 indata Lexicon.eof) - end; - - -fun parse (Gram {tags, prods, chains, ...}) start toks = - let - val end_pos = - (case try List.last toks of - NONE => Position.none - | SOME (Lexicon.Token (_, _, (_, end_pos))) => end_pos); - val r = - (case earley prods tags chains start (toks @ [Lexicon.mk_eof end_pos]) of - [] => raise Fail "Inner syntax: no parse trees" - | pts => pts); - in r end; - - -fun guess_infix_lr (Gram gram) c = (*based on educated guess*) - let - fun freeze a = map_range (curry Vector.sub a) (Vector.length a); - val prods = maps snd (maps snd (freeze (#prods gram))); - fun guess (SOME ([Nonterminal (_, k), - Terminal (Lexicon.Token (Lexicon.Literal, s, _)), Nonterminal (_, l)], _, j)) = - if k = j andalso l = j + 1 then SOME (s, true, false, j) - else if k = j + 1 then if l = j then SOME (s, false, true, j) - else if l = j + 1 then SOME (s, false, false, j) - else NONE - else NONE - | guess _ = NONE; - in guess (find_first (fn (_, s, _) => s = c) prods) end; - -end; diff --git a/core/Pure/Syntax/printer.ML b/core/Pure/Syntax/printer.ML deleted file mode 100644 index 4945564f..00000000 --- a/core/Pure/Syntax/printer.ML +++ /dev/null @@ -1,274 +0,0 @@ -(* Title: Pure/Syntax/printer.ML - Author: Tobias Nipkow and Markus Wenzel, TU Muenchen - -Pretty printing of asts, terms, types and print (ast) translation. -*) - -signature BASIC_PRINTER = -sig - val show_brackets: bool Config.T - val show_types: bool Config.T - val show_sorts: bool Config.T - val show_markup: bool Config.T - val show_structs: bool Config.T - val show_question_marks: bool Config.T - val pretty_priority: int Config.T -end; - -signature PRINTER = -sig - include BASIC_PRINTER - val show_brackets_raw: Config.raw - val show_types_raw: Config.raw - val show_sorts_raw: Config.raw - val show_markup_default: bool Unsynchronized.ref - val show_markup_raw: Config.raw - val show_structs_raw: Config.raw - val show_question_marks_raw: Config.raw - val show_type_emphasis: bool Config.T - val type_emphasis: Proof.context -> typ -> bool - type prtabs - val empty_prtabs: prtabs - val update_prtabs: string -> Syntax_Ext.xprod list -> prtabs -> prtabs - val remove_prtabs: string -> Syntax_Ext.xprod list -> prtabs -> prtabs - val merge_prtabs: prtabs -> prtabs -> prtabs - val pretty_term_ast: bool -> Proof.context -> prtabs -> - (string -> Proof.context -> Ast.ast list -> Ast.ast) -> - (string -> Ast.ast list -> Pretty.T option) -> - (string -> Markup.T list * string) -> - Ast.ast -> Pretty.T list - val pretty_typ_ast: Proof.context -> prtabs -> - (string -> Proof.context -> Ast.ast list -> Ast.ast) -> - (string -> Ast.ast list -> Pretty.T option) -> - (string -> Markup.T list * string) -> Ast.ast -> Pretty.T list -end; - -structure Printer: PRINTER = -struct - -(** options for printing **) - -val show_brackets_raw = Config.declare_option ("show_brackets", @{here}); -val show_brackets = Config.bool show_brackets_raw; - -val show_types_raw = Config.declare_option ("show_types", @{here}); -val show_types = Config.bool show_types_raw; - -val show_sorts_raw = Config.declare_option ("show_sorts", @{here}); -val show_sorts = Config.bool show_sorts_raw; - -val show_markup_default = Unsynchronized.ref false; -val show_markup_raw = - Config.declare ("show_markup", @{here}) (fn _ => Config.Bool (! show_markup_default)); -val show_markup = Config.bool show_markup_raw; - -val show_structs_raw = - Config.declare ("show_structs", @{here}) (fn _ => Config.Bool false); -val show_structs = Config.bool show_structs_raw; - -val show_question_marks_raw = Config.declare_option ("show_question_marks", @{here}); -val show_question_marks = Config.bool show_question_marks_raw; - -val show_type_emphasis = - Config.bool (Config.declare ("show_type_emphasis", @{here}) (fn _ => Config.Bool true)); - -fun type_emphasis ctxt T = - T <> dummyT andalso - (Config.get ctxt show_types orelse Config.get ctxt show_markup orelse - Config.get ctxt show_type_emphasis andalso not (can dest_Type T)); - - - -(** type prtabs **) - -datatype symb = - Arg of int | - TypArg of int | - String of bool * string | - Break of int | - Block of int * symb list; - -type prtabs = (string * ((symb list * int * int) list) Symtab.table) list; - -fun mode_tab prtabs mode = the_default Symtab.empty (AList.lookup (op =) prtabs mode); -fun mode_tabs prtabs modes = map_filter (AList.lookup (op =) prtabs) (modes @ [""]); - - -(* xprod_to_fmt *) - -fun xprod_to_fmt (Syntax_Ext.XProd (_, _, "", _)) = NONE - | xprod_to_fmt (Syntax_Ext.XProd (_, xsymbs, const, pri)) = - let - fun arg (s, p) = - (if s = "type" then TypArg else Arg) - (if Lexicon.is_terminal s then 1000 else p); - - fun xsyms_to_syms (Syntax_Ext.Delim s :: xsyms) = - apfst (cons (String (not (exists Symbol.is_block_ctrl (Symbol.explode s)), s))) - (xsyms_to_syms xsyms) - | xsyms_to_syms (Syntax_Ext.Argument s_p :: xsyms) = - apfst (cons (arg s_p)) (xsyms_to_syms xsyms) - | xsyms_to_syms (Syntax_Ext.Space s :: xsyms) = - apfst (cons (String (false, s))) (xsyms_to_syms xsyms) - | xsyms_to_syms (Syntax_Ext.Bg i :: xsyms) = - let - val (bsyms, xsyms') = xsyms_to_syms xsyms; - val (syms, xsyms'') = xsyms_to_syms xsyms'; - in - (Block (i, bsyms) :: syms, xsyms'') - end - | xsyms_to_syms (Syntax_Ext.Brk i :: xsyms) = - apfst (cons (Break i)) (xsyms_to_syms xsyms) - | xsyms_to_syms (Syntax_Ext.En :: xsyms) = ([], xsyms) - | xsyms_to_syms [] = ([], []); - - fun nargs (Arg _ :: syms) = nargs syms + 1 - | nargs (TypArg _ :: syms) = nargs syms + 1 - | nargs (String _ :: syms) = nargs syms - | nargs (Break _ :: syms) = nargs syms - | nargs (Block (_, bsyms) :: syms) = nargs syms + nargs bsyms - | nargs [] = 0; - in - (case xsyms_to_syms xsymbs of - (symbs, []) => SOME (const, (symbs, nargs symbs, pri)) - | _ => raise Fail "Unbalanced pretty-printing blocks") - end; - - -(* empty, extend, merge prtabs *) - -val empty_prtabs = []; - -fun update_prtabs mode xprods prtabs = - let - val fmts = map_filter xprod_to_fmt xprods; - val tab' = fold (Symtab.update_list (op =)) fmts (mode_tab prtabs mode); - in AList.update (op =) (mode, tab') prtabs end; - -fun remove_prtabs mode xprods prtabs = - let - val tab = mode_tab prtabs mode; - val fmts = map_filter (fn xprod as Syntax_Ext.XProd (_, _, c, _) => - if null (Symtab.lookup_list tab c) then NONE - else xprod_to_fmt xprod) xprods; - val tab' = fold (Symtab.remove_list (op =)) fmts tab; - in AList.update (op =) (mode, tab') prtabs end; - -fun merge_prtabs prtabs1 prtabs2 = - let - val modes = distinct (op =) (map fst (prtabs1 @ prtabs2)); - fun merge m = (m, Symtab.merge_list (op =) (mode_tab prtabs1 m, mode_tab prtabs2 m)); - in map merge modes end; - - - -(** pretty term or typ asts **) - -fun is_chain [Block (_, pr)] = is_chain pr - | is_chain [Arg _] = true - | is_chain _ = false; - -val pretty_priority = - Config.int (Config.declare ("Syntax.pretty_priority", @{here}) (K (Config.Int 0))); - -fun pretty type_mode curried ctxt tabs trf markup_trans markup_extern ast0 = - let - val show_brackets = Config.get ctxt show_brackets; - - (*default applications: prefix / postfix*) - val appT = - if type_mode then Syntax_Trans.tappl_ast_tr' - else if curried then Syntax_Trans.applC_ast_tr' - else Syntax_Trans.appl_ast_tr'; - - fun synT _ ([], args) = ([], args) - | synT m (Arg p :: symbs, t :: args) = - let val (Ts, args') = synT m (symbs, args); - in (astT (t, p) @ Ts, args') end - | synT m (TypArg p :: symbs, t :: args) = - let - val (Ts, args') = synT m (symbs, args); - in - if type_mode then (astT (t, p) @ Ts, args') - else - (pretty true curried (Config.put pretty_priority p ctxt) - tabs trf markup_trans markup_extern t @ Ts, args') - end - | synT m (String (do_mark, s) :: symbs, args) = - let - val (Ts, args') = synT m (symbs, args); - val T = - if do_mark - then Pretty.marks_str (m @ [Lexicon.literal_markup s], s) - else Pretty.str s; - in (T :: Ts, args') end - | synT m (Block (i, bsymbs) :: symbs, args) = - let - val (bTs, args') = synT m (bsymbs, args); - val (Ts, args'') = synT m (symbs, args'); - val T = - if i < 0 then Pretty.unbreakable (Pretty.block bTs) - else Pretty.blk (i, bTs); - in (T :: Ts, args'') end - | synT m (Break i :: symbs, args) = - let - val (Ts, args') = synT m (symbs, args); - val T = if i < 0 then Pretty.fbrk else Pretty.brk i; - in (T :: Ts, args') end - - and parT m (pr, args, p, p': int) = #1 (synT m - (if p > p' orelse (show_brackets andalso p' <> 1000 andalso not (is_chain pr)) - then [Block (1, String (false, "(") :: pr @ [String (false, ")")])] - else pr, args)) - - and atomT a = Pretty.marks_str (markup_extern a) - - and prefixT (_, a, [], _) = [atomT a] - | prefixT (c, _, args, p) = astT (appT (c, args), p) - - and splitT 0 ([x], ys) = (x, ys) - | splitT 0 (rev_xs, ys) = (Ast.Appl (rev rev_xs), ys) - | splitT n (rev_xs, y :: ys) = splitT (n - 1) (y :: rev_xs, ys) - - and combT (tup as (c, a, args, p)) = - let - val nargs = length args; - - (*find matching table entry, or print as prefix / postfix*) - fun prnt ([], []) = prefixT tup - | prnt ([], tb :: tbs) = prnt (Symtab.lookup_list tb a, tbs) - | prnt ((pr, n, p') :: prnps, tbs) = - if nargs = n then parT (#1 (markup_extern a)) (pr, args, p, p') - else if nargs > n andalso not type_mode then - astT (appT (splitT n ([c], args)), p) - else prnt (prnps, tbs); - in - (case markup_trans a args of - SOME prt => [prt] - | NONE => astT (trf a ctxt args, p) handle Match => prnt ([], tabs)) - end - - and astT (c as Ast.Constant a, p) = combT (c, a, [], p) - | astT (ast as Ast.Variable _, _) = [Ast.pretty_ast ast] - | astT (Ast.Appl ((c as Ast.Constant a) :: (args as _ :: _)), p) = combT (c, a, args, p) - | astT (Ast.Appl (f :: (args as _ :: _)), p) = astT (appT (f, args), p) - | astT (ast as Ast.Appl _, _) = raise Ast.AST ("pretty: malformed ast", [ast]); - in astT (ast0, Config.get ctxt pretty_priority) end; - - -(* pretty_term_ast *) - -fun pretty_term_ast curried ctxt prtabs trf markup_trans extern ast = - pretty false curried ctxt (mode_tabs prtabs (print_mode_value ())) trf markup_trans extern ast; - - -(* pretty_typ_ast *) - -fun pretty_typ_ast ctxt prtabs trf markup_trans extern ast = - pretty true false ctxt (mode_tabs prtabs (print_mode_value ())) trf markup_trans extern ast; - -end; - -structure Basic_Printer: BASIC_PRINTER = Printer; -open Basic_Printer; - diff --git a/core/Pure/Syntax/simple_syntax.ML b/core/Pure/Syntax/simple_syntax.ML deleted file mode 100644 index 9c771811..00000000 --- a/core/Pure/Syntax/simple_syntax.ML +++ /dev/null @@ -1,145 +0,0 @@ -(* Title: Pure/Syntax/simple_syntax.ML - Author: Makarius - -Simple syntax for types and terms --- for bootstrapping Pure. -*) - -signature SIMPLE_SYNTAX = -sig - val read_typ: string -> typ - val read_term: string -> term - val read_prop: string -> term -end; - -structure Simple_Syntax: SIMPLE_SYNTAX = -struct - -(* scanning tokens *) - -val lexicon = Scan.make_lexicon - (map Symbol.explode ["!!", "%", "(", ")", ".", "::", "==", "==>", "=>", "&&&", "CONST"]); - -fun read scan s = - (case - Symbol_Pos.explode (s, Position.none) |> - Lexicon.tokenize lexicon false |> - filter Lexicon.is_proper |> - Scan.read Lexicon.stopper scan of - SOME x => x - | NONE => error ("Malformed input: " ^ quote s)); - - -(* basic scanners *) - -fun $$ s = Scan.some (fn Lexicon.Token (Lexicon.Literal, s', _) => - if s = s' then SOME s else NONE | _ => NONE); - -fun enum1 s scan = scan ::: Scan.repeat ($$ s |-- scan); -fun enum2 s scan = scan ::: Scan.repeat1 ($$ s |-- scan); - -val tfree = Scan.some (fn Lexicon.Token (Lexicon.TFreeSy, s, _) => SOME s | _ => NONE); -val ident = Scan.some (fn Lexicon.Token (Lexicon.IdentSy, s, _) => SOME s | _ => NONE); - -val var = Scan.some (fn Lexicon.Token (Lexicon.VarSy, s, _) => - SOME (Lexicon.read_indexname (unprefix "?" s)) | _ => NONE); - -val long_ident = Scan.some (fn Lexicon.Token (Lexicon.LongIdentSy, s, _) => SOME s | _ => NONE); -val const = long_ident || ident; - - -(* types *) - -(* - typ = typ1 => ... => typ1 - | typ1 - typ1 = typ2 const ... const - | typ2 - typ2 = tfree - | const - | ( typ ) -*) - -fun typ x = - (enum1 "=>" typ1 >> (op ---> o split_last)) x -and typ1 x = - (typ2 -- Scan.repeat const >> (fn (T, cs) => fold (fn c => fn U => Type (c, [U])) cs T)) x -and typ2 x = - (tfree >> (fn a => TFree (a, [])) || - const >> (fn c => Type (c, [])) || - $$ "(" |-- typ --| $$ ")") x; - -val read_typ = read typ; - - -(* terms *) - -(* - term = !!ident :: typ. term - | term1 - term1 = term2 ==> ... ==> term2 - | term2 - term2 = term3 == term2 - | term3 &&& term2 - | term3 - term3 = ident :: typ - | var :: typ - | CONST const :: typ - | %ident :: typ. term3 - | term4 - term4 = term5 ... term5 - | term5 - term5 = ident - | var - | CONST const - | ( term ) -*) - -local - -val constraint = $$ "::" |-- typ; -val idt = ident -- constraint; -val bind = idt --| $$ "."; - -fun term env T x = - ($$ "!!" |-- bind :|-- (fn v => term (v :: env) propT >> (Logic.all (Free v))) || - term1 env T) x -and term1 env T x = - (enum2 "==>" (term2 env propT) >> foldr1 Logic.mk_implies || - term2 env T) x -and term2 env T x = - (equal env || - term3 env propT -- ($$ "&&&" |-- term2 env propT) >> Logic.mk_conjunction || - term3 env T) x -and equal env x = - (term3 env dummyT -- ($$ "==" |-- term2 env dummyT) >> (fn (t, u) => - Const ("Pure.eq", Term.fastype_of t --> Term.fastype_of u --> propT) $ t $ u)) x -and term3 env T x = - (idt >> Free || - var -- constraint >> Var || - $$ "CONST" |-- const -- constraint >> Const || - $$ "%" |-- bind :|-- (fn v => term3 (v :: env) dummyT >> lambda (Free v)) || - term4 env T) x -and term4 env T x = - (term5 env dummyT -- Scan.repeat1 (term5 env dummyT) >> Term.list_comb || - term5 env T) x -and term5 env T x = - (ident >> (fn a => Free (a, the_default T (AList.lookup (op =) env a))) || - var >> (fn xi => Var (xi, T)) || - $$ "CONST" |-- const >> (fn c => Const (c, T)) || - $$ "(" |-- term env T --| $$ ")") x; - -fun read_tm T s = - let val t = read (term [] T) s in - if can (Term.map_types Term.no_dummyT) t then t - else error ("Unspecified types in input: " ^ quote s) - end; - -in - -val read_term = read_tm dummyT; -val read_prop = read_tm propT; - -end; - -end; - diff --git a/core/Pure/Syntax/syntax.ML b/core/Pure/Syntax/syntax.ML deleted file mode 100644 index 258c8006..00000000 --- a/core/Pure/Syntax/syntax.ML +++ /dev/null @@ -1,660 +0,0 @@ -(* Title: Pure/Syntax/syntax.ML - Author: Tobias Nipkow and Markus Wenzel, TU Muenchen - -Standard Isabelle syntax, based on arbitrary context-free grammars -(specified by mixfix declarations). -*) - -signature SYNTAX = -sig - type operations - val install_operations: operations -> unit - val root: string Config.T - val ambiguity_warning_raw: Config.raw - val ambiguity_warning: bool Config.T - val ambiguity_limit_raw: Config.raw - val ambiguity_limit: int Config.T - val read_token_pos: string -> Position.T - val read_token: string -> Symbol_Pos.source - val parse_token: Proof.context -> (XML.tree list -> 'a) -> - (bool -> Markup.T) -> (Symbol_Pos.T list * Position.T -> 'a) -> string -> 'a - val parse_sort: Proof.context -> string -> sort - val parse_typ: Proof.context -> string -> typ - val parse_term: Proof.context -> string -> term - val parse_prop: Proof.context -> string -> term - val unparse_sort: Proof.context -> sort -> Pretty.T - val unparse_classrel: Proof.context -> class list -> Pretty.T - val unparse_arity: Proof.context -> arity -> Pretty.T - val unparse_typ: Proof.context -> typ -> Pretty.T - val unparse_term: Proof.context -> term -> Pretty.T - val check_sort: Proof.context -> sort -> sort - val check_typ: Proof.context -> typ -> typ - val check_term: Proof.context -> term -> term - val check_prop: Proof.context -> term -> term - val check_typs: Proof.context -> typ list -> typ list - val check_terms: Proof.context -> term list -> term list - val check_props: Proof.context -> term list -> term list - val uncheck_sort: Proof.context -> sort -> sort - val uncheck_arity: Proof.context -> arity -> arity - val uncheck_classrel: Proof.context -> class list -> class list - val uncheck_typs: Proof.context -> typ list -> typ list - val uncheck_terms: Proof.context -> term list -> term list - val read_sort: Proof.context -> string -> sort - val read_typ: Proof.context -> string -> typ - val read_term: Proof.context -> string -> term - val read_prop: Proof.context -> string -> term - val read_typs: Proof.context -> string list -> typ list - val read_terms: Proof.context -> string list -> term list - val read_props: Proof.context -> string list -> term list - val read_sort_global: theory -> string -> sort - val read_typ_global: theory -> string -> typ - val read_term_global: theory -> string -> term - val read_prop_global: theory -> string -> term - val pretty_term: Proof.context -> term -> Pretty.T - val pretty_typ: Proof.context -> typ -> Pretty.T - val pretty_sort: Proof.context -> sort -> Pretty.T - val pretty_classrel: Proof.context -> class list -> Pretty.T - val pretty_arity: Proof.context -> arity -> Pretty.T - val string_of_term: Proof.context -> term -> string - val string_of_typ: Proof.context -> typ -> string - val string_of_sort: Proof.context -> sort -> string - val string_of_classrel: Proof.context -> class list -> string - val string_of_arity: Proof.context -> arity -> string - val is_pretty_global: Proof.context -> bool - val set_pretty_global: bool -> Proof.context -> Proof.context - val init_pretty_global: theory -> Proof.context - val init_pretty: Context.pretty -> Proof.context - val pretty_term_global: theory -> term -> Pretty.T - val pretty_typ_global: theory -> typ -> Pretty.T - val pretty_sort_global: theory -> sort -> Pretty.T - val string_of_term_global: theory -> term -> string - val string_of_typ_global: theory -> typ -> string - val string_of_sort_global: theory -> sort -> string - type syntax - val eq_syntax: syntax * syntax -> bool - val force_syntax: syntax -> unit - val lookup_const: syntax -> string -> string option - val is_keyword: syntax -> string -> bool - val tokenize: syntax -> bool -> Symbol_Pos.T list -> Lexicon.token list - val parse: syntax -> string -> Lexicon.token list -> Parser.parsetree list - val parse_ast_translation: syntax -> string -> (Proof.context -> Ast.ast list -> Ast.ast) option - val parse_rules: syntax -> string -> (Ast.ast * Ast.ast) list - val parse_translation: syntax -> string -> (Proof.context -> term list -> term) option - val print_translation: syntax -> string -> - Proof.context -> typ -> term list -> term (*exception Match*) - val print_rules: syntax -> string -> (Ast.ast * Ast.ast) list - val print_ast_translation: syntax -> string -> - Proof.context -> Ast.ast list -> Ast.ast (*exception Match*) - val prtabs: syntax -> Printer.prtabs - type mode - val mode_default: mode - val mode_input: mode - val empty_syntax: syntax - val merge_syntax: syntax * syntax -> syntax - val print_gram: syntax -> unit - val print_trans: syntax -> unit - val print_syntax: syntax -> unit - val guess_infix: syntax -> string -> mixfix option - datatype 'a trrule = - Parse_Rule of 'a * 'a | - Print_Rule of 'a * 'a | - Parse_Print_Rule of 'a * 'a - val map_trrule: ('a -> 'b) -> 'a trrule -> 'b trrule - val update_trfuns: - (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list * - (string * ((Proof.context -> term list -> term) * stamp)) list * - (string * ((Proof.context -> typ -> term list -> term) * stamp)) list * - (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list -> syntax -> syntax - val update_type_gram: bool -> mode -> (string * typ * mixfix) list -> syntax -> syntax - val update_const_gram: bool -> (string -> bool) -> - mode -> (string * typ * mixfix) list -> syntax -> syntax - val update_trrules: Ast.ast trrule list -> syntax -> syntax - val remove_trrules: Ast.ast trrule list -> syntax -> syntax - val const: string -> term - val free: string -> term - val var: indexname -> term -end; - -structure Syntax: SYNTAX = -struct - - -(** inner syntax operations **) - -(* back-patched operations *) - -type operations = - {parse_sort: Proof.context -> string -> sort, - parse_typ: Proof.context -> string -> typ, - parse_term: Proof.context -> string -> term, - parse_prop: Proof.context -> string -> term, - unparse_sort: Proof.context -> sort -> Pretty.T, - unparse_typ: Proof.context -> typ -> Pretty.T, - unparse_term: Proof.context -> term -> Pretty.T, - check_typs: Proof.context -> typ list -> typ list, - check_terms: Proof.context -> term list -> term list, - check_props: Proof.context -> term list -> term list, - uncheck_typs: Proof.context -> typ list -> typ list, - uncheck_terms: Proof.context -> term list -> term list}; - -val operations: operations Single_Assignment.var = Single_Assignment.var "Syntax.operations"; -fun install_operations ops = Single_Assignment.assign operations ops; - -fun operation which ctxt x = - (case Single_Assignment.peek operations of - NONE => raise Fail "Inner syntax operations not installed" - | SOME ops => which ops ctxt x); - - -(* configuration options *) - -val root = Config.string (Config.declare ("syntax_root", @{here}) (K (Config.String "any"))); - -val ambiguity_warning_raw = - Config.declare ("syntax_ambiguity_warning", @{here}) (fn _ => Config.Bool true); -val ambiguity_warning = Config.bool ambiguity_warning_raw; - -val ambiguity_limit_raw = - Config.declare ("syntax_ambiguity_limit", @{here}) (fn _ => Config.Int 10); -val ambiguity_limit = Config.int ambiguity_limit_raw; - - -(* outer syntax token -- with optional YXML content *) - -local - -fun token_position (XML.Elem ((name, props), _)) = - if name = Markup.tokenN - then (Markup.is_delimited props, Position.of_properties props) - else (false, Position.none) - | token_position (XML.Text _) = (false, Position.none); - -fun token_source tree = - let - val text = XML.content_of [tree]; - val (delimited, pos) = token_position tree; - in {delimited = delimited, text = text, pos = pos} end; - -in - -fun read_token_pos str = #2 (token_position (YXML.parse str handle Fail msg => error msg)); - -fun read_token str = token_source (YXML.parse str handle Fail msg => error msg); - -fun parse_token ctxt decode markup parse str = - let - fun parse_tree tree = - let - val {delimited, text, pos} = token_source tree; - val syms = Symbol_Pos.explode (text, pos); - val _ = Context_Position.report ctxt pos (markup delimited); - in parse (syms, pos) end; - in - (case YXML.parse_body str handle Fail msg => error msg of - body as [tree as XML.Elem ((name, _), _)] => - if name = Markup.tokenN then parse_tree tree else decode body - | [tree as XML.Text _] => parse_tree tree - | body => decode body) - end; - -end; - - -(* (un)parsing *) - -val parse_sort = operation #parse_sort; -val parse_typ = operation #parse_typ; -val parse_term = operation #parse_term; -val parse_prop = operation #parse_prop; -val unparse_sort = operation #unparse_sort; -val unparse_typ = operation #unparse_typ; -val unparse_term = operation #unparse_term; - - -(* (un)checking *) - -val check_typs = operation #check_typs; -val check_terms = operation #check_terms; -val check_props = operation #check_props; - -val check_typ = singleton o check_typs; -val check_term = singleton o check_terms; -val check_prop = singleton o check_props; - -val uncheck_typs = operation #uncheck_typs; -val uncheck_terms = operation #uncheck_terms; - - -(* derived operations for algebra of sorts *) - -local - -fun map_sort f S = - (case f (TFree ("", S)) of - TFree ("", S') => S' - | _ => raise TYPE ("map_sort", [TFree ("", S)], [])); - -in - -val check_sort = map_sort o check_typ; -val uncheck_sort = map_sort o singleton o uncheck_typs; - -end; - - -val uncheck_classrel = map o singleton o uncheck_sort; - -fun unparse_classrel ctxt cs = Pretty.block (flat - (separate [Pretty.str " <", Pretty.brk 1] (map (single o unparse_sort ctxt o single) cs))); - -fun uncheck_arity ctxt (a, Ss, S) = - let - val T = Type (a, replicate (length Ss) dummyT); - val a' = - (case singleton (uncheck_typs ctxt) T of - Type (a', _) => a' - | T => raise TYPE ("uncheck_arity", [T], [])); - val Ss' = map (uncheck_sort ctxt) Ss; - val S' = uncheck_sort ctxt S; - in (a', Ss', S') end; - -fun unparse_arity ctxt (a, Ss, S) = - let - val prtT = unparse_typ ctxt (Type (a, [])); - val dom = - if null Ss then [] - else [Pretty.list "(" ")" (map (unparse_sort ctxt) Ss), Pretty.brk 1]; - in Pretty.block ([prtT, Pretty.str " ::", Pretty.brk 1] @ dom @ [unparse_sort ctxt S]) end; - - -(* read = parse + check *) - -fun read_sort ctxt = parse_sort ctxt #> check_sort ctxt; - -fun read_typs ctxt = - grouped 10 (Par_List.map_name "Syntax.read_typs") (parse_typ ctxt) #> check_typs ctxt; - -fun read_terms ctxt = - grouped 10 (Par_List.map_name "Syntax.read_terms") (parse_term ctxt) #> check_terms ctxt; - -fun read_props ctxt = - grouped 10 (Par_List.map_name "Syntax.read_props") (parse_prop ctxt) #> check_props ctxt; - -val read_typ = singleton o read_typs; -val read_term = singleton o read_terms; -val read_prop = singleton o read_props; - -val read_sort_global = read_sort o Proof_Context.init_global; -val read_typ_global = read_typ o Proof_Context.init_global; -val read_term_global = read_term o Proof_Context.init_global; -val read_prop_global = read_prop o Proof_Context.init_global; - - -(* pretty = uncheck + unparse *) - -fun pretty_term ctxt = singleton (uncheck_terms ctxt) #> unparse_term ctxt; -fun pretty_typ ctxt = singleton (uncheck_typs ctxt) #> unparse_typ ctxt; -fun pretty_sort ctxt = uncheck_sort ctxt #> unparse_sort ctxt; -fun pretty_classrel ctxt = uncheck_classrel ctxt #> unparse_classrel ctxt; -fun pretty_arity ctxt = uncheck_arity ctxt #> unparse_arity ctxt; - -val string_of_term = Pretty.string_of oo pretty_term; -val string_of_typ = Pretty.string_of oo pretty_typ; -val string_of_sort = Pretty.string_of oo pretty_sort; -val string_of_classrel = Pretty.string_of oo pretty_classrel; -val string_of_arity = Pretty.string_of oo pretty_arity; - - -(* global pretty printing *) - -val pretty_global = - Config.bool (Config.declare ("Syntax.pretty_global", @{here}) (K (Config.Bool false))); -fun is_pretty_global ctxt = Config.get ctxt pretty_global; -val set_pretty_global = Config.put pretty_global; -val init_pretty_global = set_pretty_global true o Proof_Context.init_global; -val init_pretty = Context.pretty_context init_pretty_global; - -val pretty_term_global = pretty_term o init_pretty_global; -val pretty_typ_global = pretty_typ o init_pretty_global; -val pretty_sort_global = pretty_sort o init_pretty_global; - -val string_of_term_global = string_of_term o init_pretty_global; -val string_of_typ_global = string_of_typ o init_pretty_global; -val string_of_sort_global = string_of_sort o init_pretty_global; - - - -(** tables of translation functions **) - -(* parse (ast) translations *) - -fun err_dup_trfun name c = - error ("More than one " ^ name ^ " for " ^ quote c); - -fun lookup_tr tab c = Option.map fst (Symtab.lookup tab c); - -fun remove_trtab trfuns = fold (Symtab.remove Syntax_Ext.eq_trfun) trfuns; - -fun update_trtab name trfuns tab = fold Symtab.update_new trfuns (remove_trtab trfuns tab) - handle Symtab.DUP c => err_dup_trfun name c; - -fun merge_trtabs name tab1 tab2 = Symtab.merge Syntax_Ext.eq_trfun (tab1, tab2) - handle Symtab.DUP c => err_dup_trfun name c; - - -(* print (ast) translations *) - -fun apply_tr' tab c ctxt T args = - let - val fns = map fst (Symtab.lookup_list tab c); - fun app_first [] = raise Match - | app_first (f :: fs) = f ctxt T args handle Match => app_first fs; - in app_first fns end; - -fun apply_ast_tr' tab c ctxt args = - let - val fns = map fst (Symtab.lookup_list tab c); - fun app_first [] = raise Match - | app_first (f :: fs) = f ctxt args handle Match => app_first fs; - in app_first fns end; - -fun update_tr'tab trfuns = fold_rev (Symtab.update_list Syntax_Ext.eq_trfun) trfuns; -fun remove_tr'tab trfuns = fold (Symtab.remove_list Syntax_Ext.eq_trfun) trfuns; -fun merge_tr'tabs tab1 tab2 = Symtab.merge_list Syntax_Ext.eq_trfun (tab1, tab2); - - - -(** tables of translation rules **) - -type ruletab = (Ast.ast * Ast.ast) list Symtab.table; - -fun dest_ruletab tab = maps snd (Symtab.dest tab); - -val update_ruletab = fold_rev (fn r => Symtab.update_list (op =) (Ast.head_of_rule r, r)); -val remove_ruletab = fold (fn r => Symtab.remove_list (op =) (Ast.head_of_rule r, r)); -fun merge_ruletabs tab1 tab2 = Symtab.merge_list (op =) (tab1, tab2); - - - -(** datatype syntax **) - -datatype syntax = - Syntax of { - input: Syntax_Ext.xprod list, - lexicon: Scan.lexicon, - gram: Parser.gram lazy, - consts: string Symtab.table, - prmodes: string list, - parse_ast_trtab: ((Proof.context -> Ast.ast list -> Ast.ast) * stamp) Symtab.table, - parse_ruletab: ruletab, - parse_trtab: ((Proof.context -> term list -> term) * stamp) Symtab.table, - print_trtab: ((Proof.context -> typ -> term list -> term) * stamp) list Symtab.table, - print_ruletab: ruletab, - print_ast_trtab: ((Proof.context -> Ast.ast list -> Ast.ast) * stamp) list Symtab.table, - prtabs: Printer.prtabs} * stamp; - -fun eq_syntax (Syntax (_, s1), Syntax (_, s2)) = s1 = s2; - -fun force_syntax (Syntax ({gram, ...}, _)) = ignore (Lazy.force gram); - -fun lookup_const (Syntax ({consts, ...}, _)) = Symtab.lookup consts; -fun is_keyword (Syntax ({lexicon, ...}, _)) = Scan.is_literal lexicon o Symbol.explode; -fun tokenize (Syntax ({lexicon, ...}, _)) = Lexicon.tokenize lexicon; -fun parse (Syntax ({gram, ...}, _)) = Parser.parse (Lazy.force gram); - -fun parse_ast_translation (Syntax ({parse_ast_trtab, ...}, _)) = lookup_tr parse_ast_trtab; -fun parse_translation (Syntax ({parse_trtab, ...}, _)) = lookup_tr parse_trtab; -fun parse_rules (Syntax ({parse_ruletab, ...}, _)) = Symtab.lookup_list parse_ruletab; -fun print_translation (Syntax ({print_trtab, ...}, _)) = apply_tr' print_trtab; -fun print_rules (Syntax ({print_ruletab, ...}, _)) = Symtab.lookup_list print_ruletab; -fun print_ast_translation (Syntax ({print_ast_trtab, ...}, _)) = apply_ast_tr' print_ast_trtab; - -fun prtabs (Syntax ({prtabs, ...}, _)) = prtabs; - -type mode = string * bool; -val mode_default = ("", true); -val mode_input = (Print_Mode.input, true); - - -(* empty_syntax *) - -val empty_syntax = Syntax - ({input = [], - lexicon = Scan.empty_lexicon, - gram = Lazy.value Parser.empty_gram, - consts = Symtab.empty, - prmodes = [], - parse_ast_trtab = Symtab.empty, - parse_ruletab = Symtab.empty, - parse_trtab = Symtab.empty, - print_trtab = Symtab.empty, - print_ruletab = Symtab.empty, - print_ast_trtab = Symtab.empty, - prtabs = Printer.empty_prtabs}, stamp ()); - - -(* update_syntax *) - -fun update_const (c, b) tab = - if c = "" orelse (b = "" andalso (Lexicon.is_marked c orelse Symtab.defined tab c)) - then tab - else Symtab.update (c, b) tab; - -fun update_syntax (mode, inout) syn_ext (Syntax (tabs, _)) = - let - val {input, lexicon, gram, consts = consts1, prmodes, parse_ast_trtab, parse_ruletab, - parse_trtab, print_trtab, print_ruletab, print_ast_trtab, prtabs} = tabs; - val Syntax_Ext.Syn_Ext {xprods, consts = consts2, parse_ast_translation, - parse_rules, parse_translation, print_translation, print_rules, - print_ast_translation} = syn_ext; - val new_xprods = - if inout then distinct (op =) (filter_out (member (op =) input) xprods) else []; - fun if_inout xs = if inout then xs else []; - in - Syntax - ({input = new_xprods @ input, - lexicon = fold Scan.extend_lexicon (Syntax_Ext.delims_of new_xprods) lexicon, - gram = Lazy.value (Parser.extend_gram new_xprods (Lazy.force gram)), - consts = fold update_const consts2 consts1, - prmodes = insert (op =) mode prmodes, - parse_ast_trtab = - update_trtab "parse ast translation" (if_inout parse_ast_translation) parse_ast_trtab, - parse_ruletab = update_ruletab (if_inout parse_rules) parse_ruletab, - parse_trtab = update_trtab "parse translation" (if_inout parse_translation) parse_trtab, - print_trtab = update_tr'tab print_translation print_trtab, - print_ruletab = update_ruletab print_rules print_ruletab, - print_ast_trtab = update_tr'tab print_ast_translation print_ast_trtab, - prtabs = Printer.update_prtabs mode xprods prtabs}, stamp ()) - end; - - -(* remove_syntax *) - -fun remove_syntax (mode, inout) syn_ext (Syntax (tabs, _)) = - let - val Syntax_Ext.Syn_Ext {xprods, consts = _, parse_ast_translation, parse_rules, - parse_translation, print_translation, print_rules, print_ast_translation} = syn_ext; - val {input, lexicon, gram, consts, prmodes, parse_ast_trtab, parse_ruletab, - parse_trtab, print_trtab, print_ruletab, print_ast_trtab, prtabs} = tabs; - val input' = if inout then subtract (op =) xprods input else input; - val changed = length input <> length input'; - fun if_inout xs = if inout then xs else []; - in - Syntax - ({input = input', - lexicon = if changed then Scan.make_lexicon (Syntax_Ext.delims_of input') else lexicon, - gram = if changed then Lazy.value (Parser.make_gram input') else gram, - consts = consts, - prmodes = prmodes, - parse_ast_trtab = remove_trtab (if_inout parse_ast_translation) parse_ast_trtab, - parse_ruletab = remove_ruletab (if_inout parse_rules) parse_ruletab, - parse_trtab = remove_trtab (if_inout parse_translation) parse_trtab, - print_trtab = remove_tr'tab print_translation print_trtab, - print_ruletab = remove_ruletab print_rules print_ruletab, - print_ast_trtab = remove_tr'tab print_ast_translation print_ast_trtab, - prtabs = Printer.remove_prtabs mode xprods prtabs}, stamp ()) - end; - - -(* merge_syntax *) - -fun merge_syntax (Syntax (tabs1, _), Syntax (tabs2, _)) = - let - val {input = input1, lexicon = lexicon1, gram = gram1, consts = consts1, - prmodes = prmodes1, parse_ast_trtab = parse_ast_trtab1, parse_ruletab = parse_ruletab1, - parse_trtab = parse_trtab1, print_trtab = print_trtab1, print_ruletab = print_ruletab1, - print_ast_trtab = print_ast_trtab1, prtabs = prtabs1} = tabs1; - - val {input = input2, lexicon = lexicon2, gram = gram2, consts = consts2, - prmodes = prmodes2, parse_ast_trtab = parse_ast_trtab2, parse_ruletab = parse_ruletab2, - parse_trtab = parse_trtab2, print_trtab = print_trtab2, print_ruletab = print_ruletab2, - print_ast_trtab = print_ast_trtab2, prtabs = prtabs2} = tabs2; - - val (input', gram') = - (case subtract (op =) input1 input2 of - [] => (input1, gram1) - | new_xprods2 => - if subset (op =) (input1, input2) then (input2, gram2) - else - let - val input' = new_xprods2 @ input1; - val gram' = Lazy.lazy (fn () => Parser.make_gram input'); - in (input', gram') end); - in - Syntax - ({input = input', - lexicon = Scan.merge_lexicons (lexicon1, lexicon2), - gram = gram', - consts = Symtab.merge (K true) (consts1, consts2), - prmodes = Library.merge (op =) (prmodes1, prmodes2), - parse_ast_trtab = - merge_trtabs "parse ast translation" parse_ast_trtab1 parse_ast_trtab2, - parse_ruletab = merge_ruletabs parse_ruletab1 parse_ruletab2, - parse_trtab = merge_trtabs "parse translation" parse_trtab1 parse_trtab2, - print_trtab = merge_tr'tabs print_trtab1 print_trtab2, - print_ruletab = merge_ruletabs print_ruletab1 print_ruletab2, - print_ast_trtab = merge_tr'tabs print_ast_trtab1 print_ast_trtab2, - prtabs = Printer.merge_prtabs prtabs1 prtabs2}, stamp ()) - end; - - - -(** print syntax **) - -local - -fun pretty_strs_qs name strs = - Pretty.strs (name :: map quote (sort_strings strs)); - -fun pretty_gram (Syntax (tabs, _)) = - let - val {lexicon, prmodes, gram, ...} = tabs; - val prmodes' = sort_strings (filter_out (fn s => s = "") prmodes); - in - [pretty_strs_qs "lexicon:" (Scan.dest_lexicon lexicon), - Pretty.big_list "prods:" (Parser.pretty_gram (Lazy.force gram)), - pretty_strs_qs "print modes:" prmodes'] - end; - -fun pretty_trans (Syntax (tabs, _)) = - let - fun pretty_tab name tab = - pretty_strs_qs name (sort_strings (Symtab.keys tab)); - - fun pretty_ruletab name tab = - Pretty.big_list name (map (Pretty.item o single o Ast.pretty_rule) (dest_ruletab tab)); - - val {consts, parse_ast_trtab, parse_ruletab, parse_trtab, print_trtab, - print_ruletab, print_ast_trtab, ...} = tabs; - in - [pretty_tab "consts:" consts, - pretty_tab "parse_ast_translation:" parse_ast_trtab, - pretty_ruletab "parse_rules:" parse_ruletab, - pretty_tab "parse_translation:" parse_trtab, - pretty_tab "print_translation:" print_trtab, - pretty_ruletab "print_rules:" print_ruletab, - pretty_tab "print_ast_translation:" print_ast_trtab] - end; - -in - -fun print_gram syn = Pretty.writeln_chunks (pretty_gram syn); -fun print_trans syn = Pretty.writeln_chunks (pretty_trans syn); -fun print_syntax syn = Pretty.writeln_chunks (pretty_gram syn @ pretty_trans syn); - -end; - - -(* reconstructing infixes -- educated guessing *) - -fun guess_infix (Syntax ({gram, ...}, _)) c = - (case Parser.guess_infix_lr (Lazy.force gram) c of - SOME (s, l, r, j) => SOME - (if l then Mixfix.Infixl (s, j) - else if r then Mixfix.Infixr (s, j) - else Mixfix.Infix (s, j)) - | NONE => NONE); - - - -(** prepare translation rules **) - -(* rules *) - -datatype 'a trrule = - Parse_Rule of 'a * 'a | - Print_Rule of 'a * 'a | - Parse_Print_Rule of 'a * 'a; - -fun map_trrule f (Parse_Rule (x, y)) = Parse_Rule (f x, f y) - | map_trrule f (Print_Rule (x, y)) = Print_Rule (f x, f y) - | map_trrule f (Parse_Print_Rule (x, y)) = Parse_Print_Rule (f x, f y); - -fun parse_rule (Parse_Rule pats) = SOME pats - | parse_rule (Print_Rule _) = NONE - | parse_rule (Parse_Print_Rule pats) = SOME pats; - -fun print_rule (Parse_Rule _) = NONE - | print_rule (Print_Rule pats) = SOME (swap pats) - | print_rule (Parse_Print_Rule pats) = SOME (swap pats); - - -(* check_rules *) - -local - -fun check_rule rule = - (case Ast.rule_error rule of - SOME msg => - error ("Error in syntax translation rule: " ^ msg ^ "\n" ^ - Pretty.string_of (Ast.pretty_rule rule)) - | NONE => rule); - -in - -fun check_rules rules = - (map check_rule (map_filter parse_rule rules), - map check_rule (map_filter print_rule rules)); - -end; - - - -(** modify syntax **) - -val update_trfuns = update_syntax mode_default o Syntax_Ext.syn_ext_trfuns; - -fun update_type_gram add prmode decls = - (if add then update_syntax else remove_syntax) prmode (Mixfix.syn_ext_types decls); - -fun update_const_gram add is_logtype prmode decls = - (if add then update_syntax else remove_syntax) prmode (Mixfix.syn_ext_consts is_logtype decls); - -val update_trrules = update_syntax mode_default o Syntax_Ext.syn_ext_rules o check_rules; -val remove_trrules = remove_syntax mode_default o Syntax_Ext.syn_ext_rules o check_rules; - - -open Lexicon.Syntax; - -end; - diff --git a/core/Pure/Syntax/syntax_ext.ML b/core/Pure/Syntax/syntax_ext.ML deleted file mode 100644 index a5546b90..00000000 --- a/core/Pure/Syntax/syntax_ext.ML +++ /dev/null @@ -1,323 +0,0 @@ -(* Title: Pure/Syntax/syntax_ext.ML - Author: Markus Wenzel and Carsten Clasohm, TU Muenchen - -Syntax extension. -*) - -signature SYNTAX_EXT = -sig - val dddot_indexname: indexname - datatype mfix = Mfix of string * typ * string * int list * int - val err_in_mfix: string -> mfix -> 'a - val typ_to_nonterm: typ -> string - datatype xsymb = - Delim of string | - Argument of string * int | - Space of string | - Bg of int | Brk of int | En - datatype xprod = XProd of string * xsymb list * string * int - val chain_pri: int - val delims_of: xprod list -> string list list - datatype syn_ext = - Syn_Ext of { - xprods: xprod list, - consts: (string * string) list, - parse_ast_translation: (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list, - parse_rules: (Ast.ast * Ast.ast) list, - parse_translation: (string * ((Proof.context -> term list -> term) * stamp)) list, - print_translation: (string * ((Proof.context -> typ -> term list -> term) * stamp)) list, - print_rules: (Ast.ast * Ast.ast) list, - print_ast_translation: (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list} - val mfix_delims: string -> string list - val mfix_args: string -> int - val escape: string -> string - val syn_ext': (string -> bool) -> mfix list -> - (string * string) list -> (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list * - (string * ((Proof.context -> term list -> term) * stamp)) list * - (string * ((Proof.context -> typ -> term list -> term) * stamp)) list * - (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list -> - (Ast.ast * Ast.ast) list * (Ast.ast * Ast.ast) list -> syn_ext - val syn_ext: mfix list -> (string * string) list -> - (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list * - (string * ((Proof.context -> term list -> term) * stamp)) list * - (string * ((Proof.context -> typ -> term list -> term) * stamp)) list * - (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list -> - (Ast.ast * Ast.ast) list * (Ast.ast * Ast.ast) list -> syn_ext - val syn_ext_rules: (Ast.ast * Ast.ast) list * (Ast.ast * Ast.ast) list -> syn_ext - val syn_ext_trfuns: - (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list * - (string * ((Proof.context -> term list -> term) * stamp)) list * - (string * ((Proof.context -> typ -> term list -> term) * stamp)) list * - (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list -> syn_ext - val stamp_trfun: stamp -> string * 'a -> string * ('a * stamp) - val mk_trfun: string * 'a -> string * ('a * stamp) - val eq_trfun: ('a * stamp) * ('a * stamp) -> bool -end; - -structure Syntax_Ext: SYNTAX_EXT = -struct - - -(** misc definitions **) - -val dddot_indexname = ("dddot", 0); - - -(** datatype xprod **) - -(*Delim s: delimiter s - Argument (s, p): nonterminal s requiring priority >= p, or valued token - Space s: some white space for printing - Bg, Brk, En: blocks and breaks for pretty printing*) - -datatype xsymb = - Delim of string | - Argument of string * int | - Space of string | - Bg of int | Brk of int | En; - -fun is_delim (Delim _) = true - | is_delim _ = false; - -fun is_terminal (Delim _) = true - | is_terminal (Argument (s, _)) = Lexicon.is_terminal s - | is_terminal _ = false; - -fun is_argument (Argument _) = true - | is_argument _ = false; - -fun is_index (Argument ("index", _)) = true - | is_index _ = false; - -val index = Argument ("index", 1000); - - -(*XProd (lhs, syms, c, p): - lhs: name of nonterminal on the lhs of the production - syms: list of symbols on the rhs of the production - c: head of parse tree - p: priority of this production*) - -datatype xprod = XProd of string * xsymb list * string * int; - -val chain_pri = ~1; (*dummy for chain productions*) - -fun delims_of xprods = - fold (fn XProd (_, xsymbs, _, _) => - fold (fn Delim s => insert (op =) s | _ => I) xsymbs) xprods [] - |> map Symbol.explode; - - - -(** datatype mfix **) - -(*Mfix (sy, ty, c, ps, p): - sy: rhs of production as symbolic string - ty: type description of production - c: head of parse tree - ps: priorities of arguments in sy - p: priority of production*) - -datatype mfix = Mfix of string * typ * string * int list * int; - -fun err_in_mfix msg (Mfix (sy, _, const, _, _)) = - cat_error msg ("in mixfix annotation " ^ quote sy ^ " for " ^ quote const); - - -(* typ_to_nonterm *) - -fun typ_to_nt _ (Type (c, _)) = c - | typ_to_nt default _ = default; - -(*get nonterminal for rhs*) -val typ_to_nonterm = typ_to_nt "any"; - -(*get nonterminal for lhs*) -val typ_to_nonterm1 = typ_to_nt "logic"; - - -(* read mixfix annotations *) - -local - -val is_meta = member (op =) ["(", ")", "/", "_", "\\"]; - -val scan_delim_char = - $$ "'" |-- Scan.one ((not o Symbol.is_blank) andf Symbol.is_regular) || - Scan.one ((not o is_meta) andf (not o Symbol.is_blank) andf Symbol.is_regular); - -fun read_int ["0", "0"] = ~1 - | read_int cs = #1 (Library.read_int cs); - -val scan_sym = - $$ "_" >> K (Argument ("", 0)) || - $$ "\\" >> K index || - $$ "(" |-- Scan.many Symbol.is_digit >> (Bg o read_int) || - $$ ")" >> K En || - $$ "/" -- $$ "/" >> K (Brk ~1) || - $$ "/" |-- Scan.many Symbol.is_blank >> (Brk o length) || - Scan.many1 Symbol.is_blank >> (Space o implode) || - Scan.repeat1 scan_delim_char >> (Delim o implode); - -val scan_symb = - scan_sym >> SOME || - $$ "'" -- Scan.one Symbol.is_blank >> K NONE; - -val scan_symbs = Scan.repeat scan_symb --| Scan.ahead (~$$ "'"); -val read_symbs = map_filter I o the o Scan.read Symbol.stopper scan_symbs; - -fun unique_index xsymbs = - if length (filter is_index xsymbs) <= 1 then xsymbs - else error "Duplicate index arguments (\\)"; - -in - -val read_mfix = unique_index o read_symbs o Symbol.explode; - -fun mfix_delims sy = fold_rev (fn Delim s => cons s | _ => I) (read_mfix sy) []; -val mfix_args = length o filter is_argument o read_mfix; - -val escape = implode o map (fn s => if is_meta s then "'" ^ s else s) o Symbol.explode; - -end; - - -(* mfix_to_xprod *) - -fun mfix_to_xprod is_logtype (mfix as Mfix (sy, typ, const, pris, pri)) = - let - fun check_pri p = - if p >= 0 andalso p <= 1000 then () - else err_in_mfix ("Precedence out of range: " ^ string_of_int p) mfix; - - fun blocks_ok [] 0 = true - | blocks_ok [] _ = false - | blocks_ok (Bg _ :: syms) n = blocks_ok syms (n + 1) - | blocks_ok (En :: _) 0 = false - | blocks_ok (En :: syms) n = blocks_ok syms (n - 1) - | blocks_ok (_ :: syms) n = blocks_ok syms n; - - fun check_blocks syms = - if blocks_ok syms 0 then () - else err_in_mfix "Unbalanced block parentheses" mfix; - - - val cons_fst = apfst o cons; - - fun add_args [] ty [] = ([], typ_to_nonterm1 ty) - | add_args [] _ _ = err_in_mfix "Too many precedences" mfix - | add_args ((arg as Argument ("index", _)) :: syms) ty ps = - cons_fst arg (add_args syms ty ps) - | add_args (Argument _ :: syms) (Type ("fun", [ty, tys])) [] = - cons_fst (Argument (typ_to_nonterm ty, 0)) (add_args syms tys []) - | add_args (Argument _ :: syms) (Type ("fun", [ty, tys])) (p :: ps) = - cons_fst (Argument (typ_to_nonterm ty, p)) (add_args syms tys ps) - | add_args (Argument _ :: _) _ _ = - err_in_mfix "More arguments than in corresponding type" mfix - | add_args (sym :: syms) ty ps = cons_fst sym (add_args syms ty ps); - - fun rem_pri (Argument (s, _)) = Argument (s, chain_pri) - | rem_pri sym = sym; - - fun logify_types (a as (Argument (s, p))) = - if s <> "prop" andalso is_logtype s then Argument ("logic", p) else a - | logify_types a = a; - - - val raw_symbs = read_mfix sy handle ERROR msg => err_in_mfix msg mfix; - val args = filter (fn Argument _ => true | _ => false) raw_symbs; - val (const', typ', syntax_consts, parse_rules) = - if not (exists is_index args) then (const, typ, NONE, NONE) - else - let - val indexed_const = - if const <> "" then const ^ "_indexed" - else err_in_mfix "Missing constant name for indexed syntax" mfix; - val rangeT = Term.range_type typ handle Match => - err_in_mfix "Missing structure argument for indexed syntax" mfix; - - val xs = map Ast.Variable (Name.invent Name.context "xa" (length args - 1)); - val (xs1, xs2) = chop (find_index is_index args) xs; - val i = Ast.Variable "i"; - val lhs = Ast.mk_appl (Ast.Constant indexed_const) - (xs1 @ [Ast.mk_appl (Ast.Constant "_index") [i]] @ xs2); - val rhs = Ast.mk_appl (Ast.Constant const) (i :: xs); - in (indexed_const, rangeT, SOME (indexed_const, const), SOME (lhs, rhs)) end; - - val (symbs, lhs) = add_args raw_symbs typ' pris; - - val copy_prod = - (lhs = "prop" orelse lhs = "logic") - andalso const <> "" - andalso not (null symbs) - andalso not (exists is_delim symbs); - val lhs' = - if copy_prod orelse lhs = "prop" andalso symbs = [Argument ("prop'", 0)] then lhs - else if lhs = "prop" then "prop'" - else if is_logtype lhs then "logic" - else lhs; - val symbs' = map logify_types symbs; - val xprod = XProd (lhs', symbs', const', pri); - - val _ = (List.app check_pri pris; check_pri pri; check_blocks symbs'); - val xprod' = - if Lexicon.is_terminal lhs' then err_in_mfix ("Illegal lhs: " ^ lhs') mfix - else if const <> "" then xprod - else if length (filter is_argument symbs') <> 1 then - err_in_mfix "Copy production must have exactly one argument" mfix - else if exists is_terminal symbs' then xprod - else XProd (lhs', map rem_pri symbs', "", chain_pri); - - in (xprod', syntax_consts, parse_rules) end; - - - -(** datatype syn_ext **) - -datatype syn_ext = - Syn_Ext of { - xprods: xprod list, - consts: (string * string) list, - parse_ast_translation: (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list, - parse_rules: (Ast.ast * Ast.ast) list, - parse_translation: (string * ((Proof.context -> term list -> term) * stamp)) list, - print_translation: (string * ((Proof.context -> typ -> term list -> term) * stamp)) list, - print_rules: (Ast.ast * Ast.ast) list, - print_ast_translation: (string * ((Proof.context -> Ast.ast list -> Ast.ast) * stamp)) list}; - - -(* syn_ext *) - -fun syn_ext' is_logtype mfixes consts trfuns (parse_rules, print_rules) = - let - val (parse_ast_translation, parse_translation, print_translation, - print_ast_translation) = trfuns; - - val xprod_results = map (mfix_to_xprod is_logtype) mfixes; - val xprods = map #1 xprod_results; - val consts' = map_filter #2 xprod_results; - val parse_rules' = rev (map_filter #3 xprod_results); - val mfix_consts = map (fn Mfix x => (#3 x, "")) mfixes @ map (fn XProd x => (#3 x, "")) xprods; - in - Syn_Ext { - xprods = xprods, - consts = mfix_consts @ consts' @ consts, - parse_ast_translation = parse_ast_translation, - parse_rules = parse_rules' @ parse_rules, - parse_translation = parse_translation, - print_translation = print_translation, - print_rules = map swap parse_rules' @ print_rules, - print_ast_translation = print_ast_translation} - end; - - -val syn_ext = syn_ext' (K false); - -fun syn_ext_rules rules = syn_ext [] [] ([], [], [], []) rules; -fun syn_ext_trfuns trfuns = syn_ext [] [] trfuns ([], []); - -fun stamp_trfun s (c, f) = (c, (f, s)); -fun mk_trfun tr = stamp_trfun (stamp ()) tr; -fun eq_trfun ((_, s1: stamp), (_, s2)) = s1 = s2; - -end; diff --git a/core/Pure/Syntax/syntax_phases.ML b/core/Pure/Syntax/syntax_phases.ML deleted file mode 100644 index 5dded90b..00000000 --- a/core/Pure/Syntax/syntax_phases.ML +++ /dev/null @@ -1,994 +0,0 @@ -(* Title: Pure/Syntax/syntax_phases.ML - Author: Makarius - -Main phases of inner syntax processing, with standard implementations -of parse/unparse operations. -*) - -signature SYNTAX_PHASES = -sig - val decode_sort: term -> sort - val decode_typ: term -> typ - val decode_term: Proof.context -> - Position.report_text list * term Exn.result -> Position.report_text list * term Exn.result - val parse_ast_pattern: Proof.context -> string * string -> Ast.ast - val term_of_typ: Proof.context -> typ -> term - val print_checks: Proof.context -> unit - val typ_check: int -> string -> (Proof.context -> typ list -> typ list) -> - Context.generic -> Context.generic - val term_check: int -> string -> (Proof.context -> term list -> term list) -> - Context.generic -> Context.generic - val typ_uncheck: int -> string -> (Proof.context -> typ list -> typ list) -> - Context.generic -> Context.generic - val term_uncheck: int -> string -> (Proof.context -> term list -> term list) -> - Context.generic -> Context.generic - val typ_check': int -> string -> - (typ list -> Proof.context -> (typ list * Proof.context) option) -> - Context.generic -> Context.generic - val term_check': int -> string -> - (term list -> Proof.context -> (term list * Proof.context) option) -> - Context.generic -> Context.generic - val typ_uncheck': int -> string -> - (typ list -> Proof.context -> (typ list * Proof.context) option) -> - Context.generic -> Context.generic - val term_uncheck': int -> string -> - (term list -> Proof.context -> (term list * Proof.context) option) -> - Context.generic -> Context.generic -end - -structure Syntax_Phases: SYNTAX_PHASES = -struct - -(** markup logical entities **) - -fun markup_class ctxt c = - [Name_Space.markup (Type.class_space (Proof_Context.tsig_of ctxt)) c]; - -fun markup_type ctxt c = - [Name_Space.markup (Type.type_space (Proof_Context.tsig_of ctxt)) c]; - -fun markup_const ctxt c = - [Name_Space.markup (Consts.space_of (Proof_Context.consts_of ctxt)) c]; - -fun markup_free ctxt x = - [if Name.is_skolem x then Markup.skolem else Markup.free] @ - (if Variable.is_body ctxt orelse Variable.is_fixed ctxt x - then [Variable.markup_fixed ctxt x] - else []); - -fun markup_var xi = [Markup.name (Term.string_of_vname xi) Markup.var]; - -fun markup_bound def ps (name, id) = - let val entity = Markup.entity Markup.boundN name in - Markup.bound :: - map (fn pos => Markup.properties (Position.entity_properties_of def id pos) entity) ps - end; - -fun markup_entity ctxt c = - (case Syntax.lookup_const (Proof_Context.syn_of ctxt) c of - SOME "" => [] - | SOME b => markup_entity ctxt b - | NONE => c |> Lexicon.unmark - {case_class = markup_class ctxt, - case_type = markup_type ctxt, - case_const = markup_const ctxt, - case_fixed = markup_free ctxt, - case_default = K []}); - - - -(** decode parse trees **) - -(* decode_sort *) - -fun decode_sort tm = - let - fun err () = raise TERM ("decode_sort: bad encoding of classes", [tm]); - - fun class s = Lexicon.unmark_class s handle Fail _ => err (); - - fun classes (Const (s, _)) = [class s] - | classes (Const ("_classes", _) $ Const (s, _) $ cs) = class s :: classes cs - | classes _ = err (); - - fun sort (Const ("_topsort", _)) = [] - | sort (Const ("_sort", _) $ cs) = classes cs - | sort (Const (s, _)) = [class s] - | sort _ = err (); - in sort tm end; - - -(* decode_typ *) - -fun decode_pos (Free (s, _)) = - if is_some (Term_Position.decode s) then SOME s else NONE - | decode_pos _ = NONE; - -fun decode_typ tm = - let - fun err () = raise TERM ("decode_typ: bad encoding of type", [tm]); - - fun typ ps sort tm = - (case tm of - Const ("_tfree", _) $ t => typ ps sort t - | Const ("_tvar", _) $ t => typ ps sort t - | Const ("_ofsort", _) $ t $ s => - (case decode_pos s of - SOME p => typ (p :: ps) sort t - | NONE => - if is_none sort then typ ps (SOME (decode_sort s)) t - else err ()) - | Const ("_dummy_ofsort", _) $ s => TFree ("'_dummy_", decode_sort s) - | Free (x, _) => TFree (x, ps @ the_default dummyS sort) - | Var (xi, _) => TVar (xi, ps @ the_default dummyS sort) - | _ => - if null ps andalso is_none sort then - let - val (head, args) = Term.strip_comb tm; - val a = - (case head of - Const (c, _) => (Lexicon.unmark_type c handle Fail _ => err ()) - | _ => err ()); - in Type (a, map (typ [] NONE) args) end - else err ()); - in typ [] NONE tm end; - - -(* parsetree_to_ast *) - -fun parsetree_to_ast ctxt trf parsetree = - let - val reports = Unsynchronized.ref ([]: Position.report_text list); - fun report pos = Position.store_reports reports [pos]; - val append_reports = Position.append_reports reports; - - fun trans a args = - (case trf a of - NONE => Ast.mk_appl (Ast.Constant a) args - | SOME f => f ctxt args); - - fun asts_of_token tok = - if Lexicon.valued_token tok - then [Ast.Variable (Lexicon.str_of_token tok)] - else []; - - fun ast_of_position tok = - Ast.Variable (Term_Position.encode (Lexicon.pos_of_token tok)); - - fun ast_of_dummy a tok = - Ast.Appl [Ast.Constant "_constrain", Ast.Constant a, ast_of_position tok]; - - fun asts_of_position c tok = - [Ast.Appl [Ast.Constant c, ast_of (Parser.Tip tok), ast_of_position tok]] - - and asts_of (Parser.Node ("_class_name", [Parser.Tip tok])) = - let - val pos = Lexicon.pos_of_token tok; - val (c, rs) = Proof_Context.check_class ctxt (Lexicon.str_of_token tok, pos); - val _ = append_reports rs; - in [Ast.Constant (Lexicon.mark_class c)] end - | asts_of (Parser.Node ("_type_name", [Parser.Tip tok])) = - let - val pos = Lexicon.pos_of_token tok; - val (Type (c, _), rs) = - Proof_Context.check_type_name {proper = true, strict = false} ctxt - (Lexicon.str_of_token tok, pos); - val _ = append_reports rs; - in [Ast.Constant (Lexicon.mark_type c)] end - | asts_of (Parser.Node ("_position", [Parser.Tip tok])) = asts_of_position "_constrain" tok - | asts_of (Parser.Node ("_position_sort", [Parser.Tip tok])) = asts_of_position "_ofsort" tok - | asts_of (Parser.Node (a as "\\<^const>Pure.dummy_pattern", [Parser.Tip tok])) = - [ast_of_dummy a tok] - | asts_of (Parser.Node (a as "_idtdummy", [Parser.Tip tok])) = - [ast_of_dummy a tok] - | asts_of (Parser.Node ("_idtypdummy", pts as [Parser.Tip tok, _, _])) = - [Ast.Appl (Ast.Constant "_constrain" :: ast_of_dummy "_idtdummy" tok :: maps asts_of pts)] - | asts_of (Parser.Node (a, pts)) = - let - val _ = pts |> List.app - (fn Parser.Node _ => () | Parser.Tip tok => - if Lexicon.valued_token tok then () - else report (Lexicon.pos_of_token tok) (markup_entity ctxt) a); - in [trans a (maps asts_of pts)] end - | asts_of (Parser.Tip tok) = asts_of_token tok - - and ast_of pt = - (case asts_of pt of - [ast] => ast - | asts => raise Ast.AST ("parsetree_to_ast: malformed parsetree", asts)); - - val ast = Exn.interruptible_capture ast_of parsetree; - in (! reports, ast) end; - - -(* ast_to_term *) - -fun ast_to_term ctxt trf = - let - fun trans a args = - (case trf a of - NONE => Term.list_comb (Syntax.const a, args) - | SOME f => f ctxt args); - - fun term_of (Ast.Constant a) = trans a [] - | term_of (Ast.Variable x) = Lexicon.read_var x - | term_of (Ast.Appl (Ast.Constant a :: (asts as _ :: _))) = - trans a (map term_of asts) - | term_of (Ast.Appl (ast :: (asts as _ :: _))) = - Term.list_comb (term_of ast, map term_of asts) - | term_of (ast as Ast.Appl _) = raise Ast.AST ("ast_to_term: malformed ast", [ast]); - in term_of end; - - -(* decode_term -- transform parse tree into raw term *) - -fun decode_const ctxt (c, ps) = - let - val (Const (c', _), reports) = - Proof_Context.check_const {proper = true, strict = false} ctxt (c, ps); - in (c', reports) end; - -local - -fun get_free ctxt x = - let - val fixed = Variable.lookup_fixed ctxt x; - val is_const = can (decode_const ctxt) (x, []) orelse Long_Name.is_qualified x; - val is_declared = is_some (Variable.def_type ctxt false (x, ~1)); - in - if Variable.is_const ctxt x then NONE - else if is_some fixed then fixed - else if not is_const orelse is_declared then SOME x - else NONE - end; - -in - -fun decode_term _ (result as (_: Position.report_text list, Exn.Exn _)) = result - | decode_term ctxt (reports0, Exn.Res tm) = - let - val reports = Unsynchronized.ref reports0; - fun report ps = Position.store_reports reports ps; - val append_reports = Position.append_reports reports; - - fun decode ps qs bs (Const ("_constrain", _) $ t $ typ) = - (case Term_Position.decode_position typ of - SOME (p, T) => Type.constraint T (decode (p :: ps) qs bs t) - | NONE => Type.constraint (decode_typ typ) (decode ps qs bs t)) - | decode ps qs bs (Const ("_constrainAbs", _) $ t $ typ) = - (case Term_Position.decode_position typ of - SOME (q, T) => Type.constraint (T --> dummyT) (decode ps (q :: qs) bs t) - | NONE => Type.constraint (decode_typ typ --> dummyT) (decode ps qs bs t)) - | decode _ qs bs (Abs (x, T, t)) = - let - val id = serial (); - val _ = report qs (markup_bound true qs) (x, id); - in Abs (x, T, decode [] [] ((qs, (x, id)) :: bs) t) end - | decode _ _ bs (t $ u) = decode [] [] bs t $ decode [] [] bs u - | decode ps _ _ (Const (a, T)) = - (case try Lexicon.unmark_fixed a of - SOME x => (report ps (markup_free ctxt) x; Free (x, T)) - | NONE => - let - val c = - (case try Lexicon.unmark_const a of - SOME c => c - | NONE => #1 (decode_const ctxt (a, []))); - val _ = report ps (markup_const ctxt) c; - in Const (c, T) end) - | decode ps _ _ (Free (a, T)) = - ((Name.reject_internal (a, ps) handle ERROR msg => - error (msg ^ Proof_Context.consts_completion_message ctxt (a, ps))); - (case get_free ctxt a of - SOME x => (report ps (markup_free ctxt) x; Free (x, T)) - | NONE => - let - val (c, rs) = decode_const ctxt (a, ps); - val _ = append_reports rs; - in Const (c, T) end)) - | decode ps _ _ (Var (xi, T)) = (report ps markup_var xi; Var (xi, T)) - | decode ps _ bs (t as Bound i) = - (case try (nth bs) i of - SOME (qs, (x, id)) => (report ps (markup_bound false qs) (x, id); t) - | NONE => t); - - val tm' = Exn.interruptible_capture (fn () => decode [] [] [] tm) (); - in (! reports, tm') end; - -end; - - - -(** parse **) - -(* results *) - -fun proper_results results = map_filter (fn (y, Exn.Res x) => SOME (y, x) | _ => NONE) results; -fun failed_results results = map_filter (fn (y, Exn.Exn e) => SOME (y, e) | _ => NONE) results; - -fun report_result ctxt pos ambig_msgs results = - (case (proper_results results, failed_results results) of - ([], (reports, exn) :: _) => (Context_Position.reports_text ctxt reports; reraise exn) - | ([(reports, x)], _) => (Context_Position.reports_text ctxt reports; x) - | _ => - if null ambig_msgs then - error ("Parse error: ambiguous syntax" ^ Position.here pos) - else error (cat_lines ambig_msgs)); - - -(* parse raw asts *) - -fun parse_asts ctxt raw root (syms, pos) = - let - val syn = Proof_Context.syn_of ctxt; - val ast_tr = Syntax.parse_ast_translation syn; - - val toks = Syntax.tokenize syn raw syms; - val _ = Context_Position.reports ctxt (map Lexicon.report_of_token toks); - - val pts = Syntax.parse syn root (filter Lexicon.is_proper toks) - handle ERROR msg => - error (msg ^ Markup.markup_report (implode (map (Lexicon.reported_token_range ctxt) toks))); - val len = length pts; - - val limit = Config.get ctxt Syntax.ambiguity_limit; - val ambig_msgs = - if len <= 1 then [] - else - [cat_lines - (("Ambiguous input" ^ Position.here (Position.reset_range pos) ^ - " produces " ^ string_of_int len ^ " parse trees" ^ - (if len <= limit then "" else " (" ^ string_of_int limit ^ " displayed)") ^ ":") :: - map (Pretty.string_of o Pretty.item o single o Parser.pretty_parsetree) - (take limit pts))]; - - in (ambig_msgs, map (parsetree_to_ast ctxt ast_tr) pts) end; - -fun parse_tree ctxt root input = - let - val syn = Proof_Context.syn_of ctxt; - val tr = Syntax.parse_translation syn; - val parse_rules = Syntax.parse_rules syn; - val (ambig_msgs, asts) = parse_asts ctxt false root input; - val results = - (map o apsnd o Exn.maps_result) - (Ast.normalize ctxt parse_rules #> Exn.interruptible_capture (ast_to_term ctxt tr)) asts; - in (ambig_msgs, results) end; - - -(* parse logical entities *) - -fun parse_failed ctxt pos msg kind = - cat_error msg ("Failed to parse " ^ kind ^ - Markup.markup_report (Context_Position.reported_text ctxt pos Markup.bad "")); - -fun parse_sort ctxt = - Syntax.parse_token ctxt Term_XML.Decode.sort Markup.language_sort - (fn (syms, pos) => - parse_tree ctxt "sort" (syms, pos) - |> uncurry (report_result ctxt pos) - |> decode_sort - |> Type.minimize_sort (Proof_Context.tsig_of ctxt) - handle ERROR msg => parse_failed ctxt pos msg "sort"); - -fun parse_typ ctxt = - Syntax.parse_token ctxt Term_XML.Decode.typ Markup.language_type - (fn (syms, pos) => - parse_tree ctxt "type" (syms, pos) - |> uncurry (report_result ctxt pos) - |> decode_typ - handle ERROR msg => parse_failed ctxt pos msg "type"); - -fun parse_term is_prop ctxt = - let - val (markup, kind, root, constrain) = - if is_prop - then (Markup.language_prop, "prop", "prop", Type.constraint propT) - else (Markup.language_term, "term", Config.get ctxt Syntax.root, I); - val decode = constrain o Term_XML.Decode.term; - in - Syntax.parse_token ctxt decode markup - (fn (syms, pos) => - let - val (ambig_msgs, results) = parse_tree ctxt root (syms, pos) ||> map (decode_term ctxt); - val parsed_len = length (proper_results results); - - val ambiguity_warning = Config.get ctxt Syntax.ambiguity_warning; - val limit = Config.get ctxt Syntax.ambiguity_limit; - - (*brute-force disambiguation via type-inference*) - fun check t = (Syntax.check_term ctxt (constrain t); Exn.Res t) - handle exn as ERROR _ => Exn.Exn exn; - - val results' = - if parsed_len > 1 then - (grouped 10 (Par_List.map_name "Syntax_Phases.parse_term") o apsnd o Exn.maps_result) - check results - else results; - val reports' = fst (hd results'); - - val errs = map snd (failed_results results'); - val checked = map snd (proper_results results'); - val checked_len = length checked; - - val pretty_term = Syntax.pretty_term (Config.put Printer.show_brackets true ctxt); - in - if checked_len = 0 then - report_result ctxt pos [] - [(reports', Exn.Exn (Exn.EXCEPTIONS (map ERROR ambig_msgs @ errs)))] - else if checked_len = 1 then - (if not (null ambig_msgs) andalso ambiguity_warning andalso - Context_Position.is_visible ctxt then - warning - (cat_lines (ambig_msgs @ - ["Fortunately, only one parse tree is well-formed and type-correct,\n\ - \but you may still want to disambiguate your grammar or your input."])) - else (); report_result ctxt pos [] results') - else - report_result ctxt pos [] - [(reports', Exn.Exn (ERROR (cat_lines (ambig_msgs @ - (("Ambiguous input\n" ^ string_of_int checked_len ^ " terms are type correct" ^ - (if checked_len <= limit then "" - else " (" ^ string_of_int limit ^ " displayed)") ^ ":") :: - map (Pretty.string_of o Pretty.item o single o pretty_term) - (take limit checked))))))] - end handle ERROR msg => parse_failed ctxt pos msg kind) - end; - - -(* parse_ast_pattern *) - -fun parse_ast_pattern ctxt (root, str) = - let - val syn = Proof_Context.syn_of ctxt; - - val reports = Unsynchronized.ref ([]: Position.report_text list); - fun report ps = Position.store_reports reports ps; - - fun decode_const ps c = (report ps (markup_entity ctxt) c; Ast.Constant c); - fun decode_var ps x = (report ps (fn () => [Markup.name x Markup.free]) (); Ast.Variable x); - fun decode_appl ps asts = Ast.Appl (map (decode ps) asts) - and decode ps (Ast.Constant c) = decode_const ps c - | decode ps (Ast.Variable x) = - if is_some (Syntax.lookup_const syn x) orelse Long_Name.is_qualified x - then decode_const ps x - else decode_var ps x - | decode ps (Ast.Appl (asts as (Ast.Constant c :: ast :: Ast.Variable x :: args))) = - if member (op =) Term_Position.markers c then - (case Term_Position.decode x of - SOME p => Ast.mk_appl (decode (p :: ps) ast) (map (decode ps) args) - | NONE => decode_appl ps asts) - else decode_appl ps asts - | decode ps (Ast.Appl asts) = decode_appl ps asts; - - val {text, pos, ...} = Syntax.read_token str; - val syms = Symbol_Pos.explode (text, pos); - val ast = - parse_asts ctxt true root (syms, pos) - |> uncurry (report_result ctxt pos) - |> decode []; - val _ = Context_Position.reports_text ctxt (! reports); - in ast end; - - - -(** encode parse trees **) - -(* term_of_sort *) - -fun term_of_sort S = - let - val class = Syntax.const o Lexicon.mark_class; - - fun classes [c] = class c - | classes (c :: cs) = Syntax.const "_classes" $ class c $ classes cs; - in - (case S of - [] => Syntax.const "_topsort" - | [c] => class c - | cs => Syntax.const "_sort" $ classes cs) - end; - - -(* term_of_typ *) - -fun term_of_typ ctxt ty = - let - val show_sorts = Config.get ctxt show_sorts orelse Config.get ctxt show_markup; - - fun ofsort t raw_S = - if show_sorts then - let val S = #2 (Term_Position.decode_positionS raw_S) - in if S = dummyS then t else Syntax.const "_ofsort" $ t $ term_of_sort S end - else t; - - fun term_of (Type (a, Ts)) = - Term.list_comb (Syntax.const (Lexicon.mark_type a), map term_of Ts) - | term_of (TFree (x, S)) = - if is_some (Term_Position.decode x) then Syntax.free x - else ofsort (Syntax.const "_tfree" $ Syntax.free x) S - | term_of (TVar (xi, S)) = ofsort (Syntax.const "_tvar" $ Syntax.var xi) S; - in term_of ty end; - - -(* simple_ast_of *) - -fun simple_ast_of ctxt = - let - val tune_var = if Config.get ctxt show_question_marks then I else unprefix "?"; - fun ast_of (Const (c, _)) = Ast.Constant c - | ast_of (Free (x, _)) = Ast.Variable x - | ast_of (Var (xi, _)) = Ast.Variable (tune_var (Term.string_of_vname xi)) - | ast_of (t as _ $ _) = - let val (f, args) = strip_comb t - in Ast.mk_appl (ast_of f) (map ast_of args) end - | ast_of (Bound i) = Ast.Appl [Ast.Constant "_loose", Ast.Variable ("B." ^ string_of_int i)] - | ast_of (Abs _) = raise Fail "simple_ast_of: Abs"; - in ast_of end; - - -(* sort_to_ast and typ_to_ast *) - -fun ast_of_termT ctxt trf tm = - let - val ctxt' = Config.put show_sorts false ctxt; - fun ast_of (t as Const ("_tfree", _) $ Free _) = simple_ast_of ctxt t - | ast_of (t as Const ("_tvar", _) $ Var _) = simple_ast_of ctxt t - | ast_of (Const (a, _)) = trans a [] - | ast_of (t as _ $ _) = - (case strip_comb t of - (Const (a, _), args) => trans a args - | (f, args) => Ast.Appl (map ast_of (f :: args))) - | ast_of t = simple_ast_of ctxt t - and trans a args = ast_of (trf a ctxt' dummyT args) - handle Match => Ast.mk_appl (Ast.Constant a) (map ast_of args); - in ast_of tm end; - -fun sort_to_ast ctxt trf S = ast_of_termT ctxt trf (term_of_sort S); -fun typ_to_ast ctxt trf T = ast_of_termT ctxt trf (term_of_typ ctxt T); - - -(* term_to_ast *) - -local - -fun mark_aprop tm = - let - fun aprop t = Syntax.const "_aprop" $ t; - - fun is_prop Ts t = - Type_Annotation.clean (Type_Annotation.fastype_of Ts t) = propT - handle TERM _ => false; - - fun is_term (Const ("Pure.term", _) $ _) = true - | is_term _ = false; - - fun mark _ (t as Const _) = t - | mark Ts (t as Const ("_bound", _) $ u) = if is_prop Ts u then aprop t else t - | mark Ts (t as Free _) = if is_prop Ts t then aprop t else t - | mark Ts (t as Var _) = if is_prop Ts t then aprop t else t - | mark Ts (t as Bound _) = if is_prop Ts t then aprop t else t - | mark Ts (Abs (x, T, t)) = Abs (x, T, mark (T :: Ts) t) - | mark Ts (t as t1 $ (t2 as Const ("Pure.type", Type ("itself", [T])))) = - if is_prop Ts t andalso not (is_term t) then Const ("_type_prop", T) $ mark Ts t1 - else mark Ts t1 $ mark Ts t2 - | mark Ts (t as t1 $ t2) = - (if is_Const (Term.head_of t) orelse not (is_prop Ts t) then I else aprop) - (mark Ts t1 $ mark Ts t2); - in mark [] tm end; - -fun prune_types ctxt tm = - let - fun regard t t' seen = - if Type_Annotation.is_omitted (Type_Annotation.fastype_of [] t) then (t, seen) - else if member (op aconv) seen t then (t', seen) - else (t, t :: seen); - - fun prune (t as Const _, seen) = (t, seen) - | prune (t as Free (x, T), seen) = regard t (Free (x, Type_Annotation.ignore_type T)) seen - | prune (t as Var (xi, T), seen) = regard t (Var (xi, Type_Annotation.ignore_type T)) seen - | prune (t as Bound _, seen) = (t, seen) - | prune (Abs (x, T, t), seen) = - let val (t', seen') = prune (t, seen); - in (Abs (x, T, t'), seen') end - | prune (t1 $ t2, seen) = - let - val (t1', seen') = prune (t1, seen); - val (t2', seen'') = prune (t2, seen'); - in (t1' $ t2', seen'') end; - in #1 (prune (tm, [])) end; - -fun mark_atoms {structs, fixes} is_syntax_const ctxt tm = - let - val show_structs = Config.get ctxt show_structs; - - fun mark ((t as Const (c, _)) $ u) = - if member (op =) Pure_Thy.token_markers c - then t $ u else mark t $ mark u - | mark (t $ u) = mark t $ mark u - | mark (Abs (x, T, t)) = Abs (x, T, mark t) - | mark (t as Const (c, T)) = - if is_syntax_const c then t - else Const (Lexicon.mark_const c, T) - | mark (t as Free (x, T)) = - let val i = find_index (fn s => s = x) structs + 1 in - if i = 0 andalso member (op =) fixes x then - Const (Lexicon.mark_fixed x, T) - else if i = 1 andalso not show_structs then - Syntax.const "_struct" $ Syntax.const "_indexdefault" - else Syntax.const "_free" $ t - end - | mark (t as Var (xi, T)) = - if xi = Syntax_Ext.dddot_indexname then Const ("_DDDOT", T) - else Syntax.const "_var" $ t - | mark a = a; - in mark tm end; - -in - -fun term_to_ast idents is_syntax_const ctxt trf tm = - let - val show_types = Config.get ctxt show_types orelse Config.get ctxt show_sorts; - val show_markup = Config.get ctxt show_markup; - - fun ast_of tm = - (case strip_comb tm of - (t as Abs _, ts) => Ast.mk_appl (ast_of (Syntax_Trans.abs_tr' ctxt t)) (map ast_of ts) - | ((c as Const ("_free", _)), Free (x, T) :: ts) => - Ast.mk_appl (constrain (c $ Syntax.free x) T) (map ast_of ts) - | ((c as Const ("_var", _)), Var (xi, T) :: ts) => - Ast.mk_appl (constrain (c $ Syntax.var xi) T) (map ast_of ts) - | ((c as Const ("_bound", B)), Free (x, T) :: ts) => - let - val X = - if show_markup andalso not show_types orelse B <> dummyT then T - else dummyT; - in Ast.mk_appl (constrain (c $ Syntax.free x) X) (map ast_of ts) end - | (Const ("_idtdummy", T), ts) => - Ast.mk_appl (constrain (Syntax.const "_idtdummy") T) (map ast_of ts) - | (const as Const (c, T), ts) => trans c (Type_Annotation.smash T) ts - | (t, ts) => Ast.mk_appl (simple_ast_of ctxt t) (map ast_of ts)) - - and trans a T args = ast_of (trf a ctxt T args) - handle Match => Ast.mk_appl (Ast.Constant a) (map ast_of args) - - and constrain t T0 = - let - val T = - if show_markup andalso not show_types - then Type_Annotation.clean T0 - else Type_Annotation.smash T0; - in - if (show_types orelse show_markup) andalso T <> dummyT then - Ast.Appl [Ast.Constant "_constrain", simple_ast_of ctxt t, - ast_of_termT ctxt trf (term_of_typ ctxt T)] - else simple_ast_of ctxt t - end; - in - tm - |> mark_aprop - |> show_types ? prune_types ctxt - |> Variable.revert_bounds ctxt - |> mark_atoms idents is_syntax_const ctxt - |> ast_of - end; - -end; - - - -(** unparse **) - -local - -fun free_or_skolem ctxt x = - let - val m = - if Variable.is_fixed ctxt x orelse Syntax.is_pretty_global ctxt - then Markup.fixed x else Markup.intensify; - in - if Name.is_skolem x - then ([m, Markup.skolem], Variable.revert_fixed ctxt x) - else ([m, Markup.free], x) - end; - -fun var_or_skolem s = - (case Lexicon.read_variable s of - SOME (x, i) => - (case try Name.dest_skolem x of - NONE => (Markup.var, s) - | SOME x' => (Markup.skolem, Term.string_of_vname (x', i))) - | NONE => (Markup.var, s)); - -val typing_elem = YXML.output_markup_elem Markup.typing; -val sorting_elem = YXML.output_markup_elem Markup.sorting; - -fun unparse_t t_to_ast prt_t markup ctxt t = - let - val show_markup = Config.get ctxt show_markup; - val show_sorts = Config.get ctxt show_sorts; - val show_types = Config.get ctxt show_types orelse show_sorts; - - val syn = Proof_Context.syn_of ctxt; - val prtabs = Syntax.prtabs syn; - val trf = Syntax.print_ast_translation syn; - - fun markup_extern c = - (case Syntax.lookup_const syn c of - SOME "" => ([], c) - | SOME b => markup_extern b - | NONE => c |> Lexicon.unmark - {case_class = fn x => (markup_class ctxt x, Proof_Context.extern_class ctxt x), - case_type = fn x => (markup_type ctxt x, Proof_Context.extern_type ctxt x), - case_const = fn x => (markup_const ctxt x, Proof_Context.extern_const ctxt x), - case_fixed = fn x => free_or_skolem ctxt x, - case_default = fn x => ([], x)}); - - fun token_trans "_tfree" x = SOME (Pretty.mark_str (Markup.tfree, x)) - | token_trans "_tvar" x = SOME (Pretty.mark_str (Markup.tvar, x)) - | token_trans "_free" x = SOME (Pretty.marks_str (free_or_skolem ctxt x)) - | token_trans "_bound" x = SOME (Pretty.mark_str (Markup.bound, x)) - | token_trans "_loose" x = SOME (Pretty.mark_str (Markup.bad, x)) - | token_trans "_var" x = SOME (Pretty.mark_str (var_or_skolem x)) - | token_trans "_numeral" x = SOME (Pretty.mark_str (Markup.numeral, x)) - | token_trans "_inner_string" x = SOME (Pretty.mark_str (Markup.inner_string, x)) - | token_trans _ _ = NONE; - - fun markup_trans a [Ast.Variable x] = token_trans a x - | markup_trans "_constrain" [t, ty] = constrain_trans t ty - | markup_trans "_idtyp" [t, ty] = constrain_trans t ty - | markup_trans "_ofsort" [ty, s] = ofsort_trans ty s - | markup_trans _ _ = NONE - - and constrain_trans t ty = - if show_markup andalso not show_types then - let - val ((bg1, bg2), en) = typing_elem; - val bg = bg1 ^ Pretty.symbolic_output (pretty_typ_ast Markup.empty ty) ^ bg2; - in SOME (Pretty.raw_markup (bg, en) (0, [pretty_ast Markup.empty t])) end - else NONE - - and ofsort_trans ty s = - if show_markup andalso not show_sorts then - let - val ((bg1, bg2), en) = sorting_elem; - val bg = bg1 ^ Pretty.symbolic_output (pretty_typ_ast Markup.empty s) ^ bg2; - in SOME (Pretty.raw_markup (bg, en) (0, [pretty_typ_ast Markup.empty ty])) end - else NONE - - and pretty_typ_ast m ast = ast - |> Printer.pretty_typ_ast ctxt prtabs trf markup_trans markup_extern - |> Pretty.markup m - - and pretty_ast m ast = ast - |> prt_t ctxt prtabs trf markup_trans markup_extern - |> Pretty.markup m; - in - t_to_ast ctxt (Syntax.print_translation syn) t - |> Ast.normalize ctxt (Syntax.print_rules syn) - |> pretty_ast markup - end; - -in - -val unparse_sort = unparse_t sort_to_ast Printer.pretty_typ_ast (Markup.language_sort false); -val unparse_typ = unparse_t typ_to_ast Printer.pretty_typ_ast (Markup.language_type false); - -fun unparse_term ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val syn = Proof_Context.syn_of ctxt; - val idents = Local_Syntax.idents_of (Proof_Context.syntax_of ctxt); - in - unparse_t (term_to_ast idents (is_some o Syntax.lookup_const syn)) - (Printer.pretty_term_ast (not (Pure_Thy.old_appl_syntax thy))) - (Markup.language_term false) ctxt - end; - -end; - - - -(** translations **) - -(* type propositions *) - -fun type_prop_tr' ctxt T [Const ("\\<^const>Pure.sort_constraint", _)] = - Syntax.const "_sort_constraint" $ term_of_typ (Config.put show_sorts true ctxt) T - | type_prop_tr' ctxt T [t] = - Syntax.const "_ofclass" $ term_of_typ ctxt T $ t - | type_prop_tr' _ T ts = raise TYPE ("type_prop_tr'", [T], ts); - - -(* type reflection *) - -fun type_tr' ctxt (Type ("itself", [T])) ts = - Term.list_comb (Syntax.const "_TYPE" $ term_of_typ ctxt T, ts) - | type_tr' _ _ _ = raise Match; - - -(* type constraints *) - -fun type_constraint_tr' ctxt (Type ("fun", [T, _])) (t :: ts) = - Term.list_comb (Syntax.const "_constrain" $ t $ term_of_typ ctxt T, ts) - | type_constraint_tr' _ _ _ = raise Match; - - -(* authentic syntax *) - -fun const_ast_tr intern ctxt asts = - (case asts of - [Ast.Appl [Ast.Constant "_constrain", Ast.Variable c, T as Ast.Variable p]] => - let - val pos = the_default Position.none (Term_Position.decode p); - val (c', _) = decode_const ctxt (c, [pos]); - val d = if intern then Lexicon.mark_const c' else c; - in Ast.Appl [Ast.Constant "_constrain", Ast.Constant d, T] end - | _ => raise Ast.AST ("const_ast_tr", asts)); - - -(* setup translations *) - -val _ = Theory.setup - (Sign.parse_ast_translation - [("_context_const", const_ast_tr true), - ("_context_xconst", const_ast_tr false)] #> - Sign.typed_print_translation - [("_type_prop", type_prop_tr'), - ("\\<^const>Pure.type", type_tr'), - ("_type_constraint_", type_constraint_tr')]); - - - -(** check/uncheck **) - -(* context-sensitive (un)checking *) - -type key = int * bool; - -structure Checks = Generic_Data -( - type 'a check = 'a list -> Proof.context -> ('a list * Proof.context) option; - type T = - ((key * ((string * typ check) * stamp) list) list * - (key * ((string * term check) * stamp) list) list); - val empty = ([], []); - val extend = I; - fun merge ((typ_checks1, term_checks1), (typ_checks2, term_checks2)) : T = - (AList.join (op =) (K (Library.merge (eq_snd (op =)))) (typ_checks1, typ_checks2), - AList.join (op =) (K (Library.merge (eq_snd (op =)))) (term_checks1, term_checks2)); -); - -fun print_checks ctxt = - let - fun split_checks checks = - List.partition (fn ((_, un), _) => not un) checks - |> pairself (map (fn ((i, _), fs) => (i, map (fst o fst) fs)) - #> sort (int_ord o pairself fst)); - fun pretty_checks kind checks = - checks |> map (fn (i, names) => Pretty.block - [Pretty.str (kind ^ " (stage " ^ signed_string_of_int i ^ "):"), - Pretty.brk 1, Pretty.strs names]); - - val (typs, terms) = Checks.get (Context.Proof ctxt); - val (typ_checks, typ_unchecks) = split_checks typs; - val (term_checks, term_unchecks) = split_checks terms; - in - pretty_checks "typ_checks" typ_checks @ - pretty_checks "term_checks" term_checks @ - pretty_checks "typ_unchecks" typ_unchecks @ - pretty_checks "term_unchecks" term_unchecks - end |> Pretty.writeln_chunks; - - -local - -fun context_check which (key: key) name f = - Checks.map (which (AList.map_default op = (key, []) (cons ((name, f), stamp ())))); - -fun simple_check eq f xs ctxt = - let val xs' = f ctxt xs - in if eq_list eq (xs, xs') then NONE else SOME (xs', ctxt) end; - -in - -fun typ_check' stage = context_check apfst (stage, false); -fun term_check' stage = context_check apsnd (stage, false); -fun typ_uncheck' stage = context_check apfst (stage, true); -fun term_uncheck' stage = context_check apsnd (stage, true); - -fun typ_check key name f = typ_check' key name (simple_check (op =) f); -fun term_check key name f = term_check' key name (simple_check (op aconv) f); -fun typ_uncheck key name f = typ_uncheck' key name (simple_check (op =) f); -fun term_uncheck key name f = term_uncheck' key name (simple_check (op aconv) f); - -end; - - -local - -fun check_stage fs = perhaps_loop (perhaps_apply (map uncurry fs)); -fun check_all fs = perhaps_apply (map check_stage fs); - -fun check which uncheck ctxt0 xs0 = - let - val funs = which (Checks.get (Context.Proof ctxt0)) - |> map_filter (fn ((i, u), fs) => if uncheck = u then SOME (i, map (snd o fst) fs) else NONE) - |> Library.sort (int_ord o pairself fst) |> map snd - |> not uncheck ? map rev; - in #1 (perhaps (check_all funs) (xs0, ctxt0)) end; - -val apply_typ_check = check fst false; -val apply_term_check = check snd false; -val apply_typ_uncheck = check fst true; -val apply_term_uncheck = check snd true; - -in - -fun check_typs ctxt raw_tys = - let - val (sorting_report, tys) = Proof_Context.prepare_sortsT ctxt raw_tys; - val _ = if Context_Position.is_visible ctxt then Output.report sorting_report else (); - in - tys - |> apply_typ_check ctxt - |> Term_Sharing.typs (Proof_Context.theory_of ctxt) - end; - -fun check_terms ctxt raw_ts = - let - val (sorting_report, raw_ts') = Proof_Context.prepare_sorts ctxt raw_ts; - val (ts, ps) = Type_Infer_Context.prepare_positions ctxt raw_ts'; - - val tys = map (Logic.mk_type o snd) ps; - val (ts', tys') = ts @ tys - |> apply_term_check ctxt - |> chop (length ts); - val typing_report = - fold2 (fn (pos, _) => fn ty => - if Position.is_reported pos then - cons (Position.reported_text pos Markup.typing - (Syntax.string_of_typ ctxt (Logic.dest_type ty))) - else I) ps tys' []; - - val _ = - if Context_Position.is_visible ctxt then Output.report (sorting_report @ typing_report) - else (); - in Term_Sharing.terms (Proof_Context.theory_of ctxt) ts' end; - -fun check_props ctxt = map (Type.constraint propT) #> check_terms ctxt; - -val uncheck_typs = apply_typ_uncheck; -val uncheck_terms = apply_term_uncheck; - -end; - - -(* standard phases *) - -val _ = Context.>> - (typ_check 0 "standard" Proof_Context.standard_typ_check #> - term_check 0 "standard" - (fn ctxt => Type_Infer_Context.infer_types ctxt #> map (Proof_Context.expand_abbrevs ctxt)) #> - term_check 100 "standard_finish" Proof_Context.standard_term_check_finish #> - term_uncheck 0 "standard" Proof_Context.standard_term_uncheck); - - - -(** install operations **) - -val _ = Syntax.install_operations - {parse_sort = parse_sort, - parse_typ = parse_typ, - parse_term = parse_term false, - parse_prop = parse_term true, - unparse_sort = unparse_sort, - unparse_typ = unparse_typ, - unparse_term = unparse_term, - check_typs = check_typs, - check_terms = check_terms, - check_props = check_props, - uncheck_typs = uncheck_typs, - uncheck_terms = uncheck_terms}; - -end; diff --git a/core/Pure/Syntax/syntax_trans.ML b/core/Pure/Syntax/syntax_trans.ML deleted file mode 100644 index 7f6aae69..00000000 --- a/core/Pure/Syntax/syntax_trans.ML +++ /dev/null @@ -1,508 +0,0 @@ -(* Title: Pure/Syntax/syntax_trans.ML - Author: Tobias Nipkow and Markus Wenzel, TU Muenchen - -Syntax translation functions. -*) - -signature BASIC_SYNTAX_TRANS = -sig - val eta_contract: bool Config.T -end - -signature SYNTAX_TRANS = -sig - include BASIC_SYNTAX_TRANS - val bracketsN: string - val no_bracketsN: string - val no_brackets: unit -> bool - val type_bracketsN: string - val no_type_bracketsN: string - val no_type_brackets: unit -> bool - val abs_tr: term list -> term - val mk_binder_tr: string * string -> string * (Proof.context -> term list -> term) - val antiquote_tr: string -> term -> term - val quote_tr: string -> term -> term - val quote_antiquote_tr: string -> string -> string -> - string * (Proof.context -> term list -> term) - val non_typed_tr': (Proof.context -> term list -> term) -> - Proof.context -> typ -> term list -> term - val tappl_ast_tr': Ast.ast * Ast.ast list -> Ast.ast - val appl_ast_tr': Ast.ast * Ast.ast list -> Ast.ast - val applC_ast_tr': Ast.ast * Ast.ast list -> Ast.ast - val eta_contract_raw: Config.raw - val mark_bound_abs: string * typ -> term - val mark_bound_body: string * typ -> term - val bound_vars: (string * typ) list -> term -> term - val abs_tr': Proof.context -> term -> term - val atomic_abs_tr': string * typ * term -> term * term - val const_abs_tr': term -> term - val mk_binder_tr': string * string -> string * (Proof.context -> term list -> term) - val preserve_binder_abs_tr': string -> string -> string * (Proof.context -> term list -> term) - val preserve_binder_abs2_tr': string -> string -> string * (Proof.context -> term list -> term) - val variant_abs: string * typ * term -> string * term - val variant_abs': string * typ * term -> string * term - val dependent_tr': string * string -> term list -> term - val antiquote_tr': string -> term -> term - val quote_tr': string -> term -> term - val quote_antiquote_tr': string -> string -> string -> - string * (Proof.context -> term list -> term) - val update_name_tr': term -> term - val pure_parse_ast_translation: (string * (Proof.context -> Ast.ast list -> Ast.ast)) list - val pure_parse_translation: (string * (Proof.context -> term list -> term)) list - val pure_print_ast_translation: (string * (Proof.context -> Ast.ast list -> Ast.ast)) list - val struct_tr: string list -> string * (Proof.context -> term list -> term) - val struct_ast_tr': string list -> string * (Proof.context -> Ast.ast list -> Ast.ast) -end; - -structure Syntax_Trans: SYNTAX_TRANS = -struct - -structure Syntax = Lexicon.Syntax; - - -(* print mode *) - -val bracketsN = "brackets"; -val no_bracketsN = "no_brackets"; - -fun no_brackets () = - find_first (fn mode => mode = bracketsN orelse mode = no_bracketsN) - (print_mode_value ()) = SOME no_bracketsN; - -val type_bracketsN = "type_brackets"; -val no_type_bracketsN = "no_type_brackets"; - -fun no_type_brackets () = - find_first (fn mode => mode = type_bracketsN orelse mode = no_type_bracketsN) - (print_mode_value ()) <> SOME type_bracketsN; - - - -(** parse (ast) translations **) - -(* strip_positions *) - -fun strip_positions_ast_tr [ast] = Ast.strip_positions ast - | strip_positions_ast_tr asts = raise Ast.AST ("strip_positions_ast_tr", asts); - - -(* constify *) - -fun constify_ast_tr [Ast.Appl [c as Ast.Constant "_constrain", ast1, ast2]] = - Ast.Appl [c, constify_ast_tr [ast1], ast2] - | constify_ast_tr [Ast.Variable c] = Ast.Constant c - | constify_ast_tr asts = raise Ast.AST ("constify_ast_tr", asts); - - -(* type syntax *) - -fun tapp_ast_tr [ty, c] = Ast.Appl [c, ty] - | tapp_ast_tr asts = raise Ast.AST ("tapp_ast_tr", asts); - -fun tappl_ast_tr [ty, tys, c] = Ast.mk_appl c (ty :: Ast.unfold_ast "_types" tys) - | tappl_ast_tr asts = raise Ast.AST ("tappl_ast_tr", asts); - -fun bracket_ast_tr [dom, cod] = Ast.fold_ast_p "\\<^type>fun" (Ast.unfold_ast "_types" dom, cod) - | bracket_ast_tr asts = raise Ast.AST ("bracket_ast_tr", asts); - - -(* application *) - -fun appl_ast_tr [f, args] = Ast.Appl (f :: Ast.unfold_ast "_args" args) - | appl_ast_tr asts = raise Ast.AST ("appl_ast_tr", asts); - -fun applC_ast_tr [f, args] = Ast.Appl (f :: Ast.unfold_ast "_cargs" args) - | applC_ast_tr asts = raise Ast.AST ("applC_ast_tr", asts); - - -(* abstraction *) - -fun idtyp_ast_tr [x, ty] = Ast.Appl [Ast.Constant "_constrain", x, ty] - | idtyp_ast_tr asts = raise Ast.AST ("idtyp_ast_tr", asts); - -fun lambda_ast_tr [pats, body] = Ast.fold_ast_p "_abs" (Ast.unfold_ast "_pttrns" pats, body) - | lambda_ast_tr asts = raise Ast.AST ("lambda_ast_tr", asts); - -fun absfree_proper (x, T) t = - if Name.is_internal x - then error ("Illegal internal variable in abstraction: " ^ quote x) - else absfree (x, T) t; - -fun abs_tr [Free x, t] = absfree_proper x t - | abs_tr [Const ("_idtdummy", T), t] = absdummy T t - | abs_tr [Const ("_constrain", _) $ x $ tT, t] = - Syntax.const "_constrainAbs" $ abs_tr [x, t] $ tT - | abs_tr ts = raise TERM ("abs_tr", ts); - - -(* binder *) - -fun mk_binder_tr (syn, name) = - let - fun err ts = raise TERM ("binder_tr: " ^ syn, ts) - fun binder_tr [Const ("_idts", _) $ idt $ idts, t] = binder_tr [idt, binder_tr [idts, t]] - | binder_tr [x, t] = - let val abs = abs_tr [x, t] handle TERM _ => err [x, t] - in Syntax.const name $ abs end - | binder_tr ts = err ts; - in (syn, fn _ => binder_tr) end; - - -(* type propositions *) - -fun mk_type ty = - Syntax.const "_constrain" $ - Syntax.const "\\<^const>Pure.type" $ (Syntax.const "\\<^type>itself" $ ty); - -fun ofclass_tr [ty, cls] = cls $ mk_type ty - | ofclass_tr ts = raise TERM ("ofclass_tr", ts); - -fun sort_constraint_tr [ty] = Syntax.const "\\<^const>Pure.sort_constraint" $ mk_type ty - | sort_constraint_tr ts = raise TERM ("sort_constraint_tr", ts); - - -(* meta propositions *) - -fun aprop_tr [t] = Syntax.const "_constrain" $ t $ Syntax.const "\\<^type>prop" - | aprop_tr ts = raise TERM ("aprop_tr", ts); - - -(* meta implication *) - -fun bigimpl_ast_tr (asts as [asms, concl]) = - let val prems = - (case Ast.unfold_ast_p "_asms" asms of - (asms', Ast.Appl [Ast.Constant "_asm", asm']) => asms' @ [asm'] - | _ => raise Ast.AST ("bigimpl_ast_tr", asts)) - in Ast.fold_ast_p "\\<^const>Pure.imp" (prems, concl) end - | bigimpl_ast_tr asts = raise Ast.AST ("bigimpl_ast_tr", asts); - - -(* type/term reflection *) - -fun type_tr [ty] = mk_type ty - | type_tr ts = raise TERM ("type_tr", ts); - - -(* dddot *) - -fun dddot_tr ts = Term.list_comb (Syntax.var Syntax_Ext.dddot_indexname, ts); - - -(* quote / antiquote *) - -fun antiquote_tr name = - let - fun tr i ((t as Const (c, _)) $ u) = - if c = name then tr i u $ Bound i - else tr i t $ tr i u - | tr i (t $ u) = tr i t $ tr i u - | tr i (Abs (x, T, t)) = Abs (x, T, tr (i + 1) t) - | tr _ a = a; - in tr 0 end; - -fun quote_tr name t = Abs ("s", dummyT, antiquote_tr name (Term.incr_boundvars 1 t)); - -fun quote_antiquote_tr quoteN antiquoteN name = - let - fun tr [t] = Syntax.const name $ quote_tr antiquoteN t - | tr ts = raise TERM ("quote_tr", ts); - in (quoteN, fn _ => tr) end; - - -(* corresponding updates *) - -fun update_name_tr (Free (x, T) :: ts) = list_comb (Free (suffix "_update" x, T), ts) - | update_name_tr (Const (x, T) :: ts) = list_comb (Const (suffix "_update" x, T), ts) - | update_name_tr (((c as Const ("_constrain", _)) $ t $ ty) :: ts) = - if Term_Position.is_position ty then list_comb (c $ update_name_tr [t] $ ty, ts) - else - list_comb (c $ update_name_tr [t] $ - (Lexicon.fun_type $ - (Lexicon.fun_type $ Lexicon.dummy_type $ ty) $ Lexicon.dummy_type), ts) - | update_name_tr ts = raise TERM ("update_name_tr", ts); - - -(* indexed syntax *) - -fun indexdefault_ast_tr [] = - Ast.Appl [Ast.Constant "_index", - Ast.Appl [Ast.Constant "_struct", Ast.Constant "_indexdefault"]] - | indexdefault_ast_tr asts = raise Ast.AST ("indexdefault_ast_tr", asts); - -fun indexvar_ast_tr [] = Ast.Appl [Ast.Constant "_index", Ast.Variable "some_index"] - | indexvar_ast_tr asts = raise Ast.AST ("indexvar_ast_tr", asts); - -fun struct_ast_tr [Ast.Appl [Ast.Constant "_index", ast]] = ast - | struct_ast_tr asts = Ast.mk_appl (Ast.Constant "_struct") asts; - -fun index_tr [t] = t - | index_tr ts = raise TERM ("index_tr", ts); - -fun struct_tr structs = - ("_struct", fn _ => - (fn [Const ("_indexdefault", _)] => - (case structs of - x :: _ => Syntax.const (Lexicon.mark_fixed x) - | _ => error "Illegal reference to implicit structure") - | ts => raise TERM ("struct_tr", ts))); - - - -(** print (ast) translations **) - -(* types *) - -fun non_typed_tr' f ctxt _ ts = f ctxt ts; - - -(* type syntax *) - -fun tappl_ast_tr' (f, []) = raise Ast.AST ("tappl_ast_tr'", [f]) - | tappl_ast_tr' (f, [ty]) = Ast.Appl [Ast.Constant "_tapp", ty, f] - | tappl_ast_tr' (f, ty :: tys) = - Ast.Appl [Ast.Constant "_tappl", ty, Ast.fold_ast "_types" tys, f]; - -fun fun_ast_tr' asts = - if no_brackets () orelse no_type_brackets () then raise Match - else - (case Ast.unfold_ast_p "\\<^type>fun" (Ast.Appl (Ast.Constant "\\<^type>fun" :: asts)) of - (dom as _ :: _ :: _, cod) - => Ast.Appl [Ast.Constant "_bracket", Ast.fold_ast "_types" dom, cod] - | _ => raise Match); - - -(* application *) - -fun appl_ast_tr' (f, []) = raise Ast.AST ("appl_ast_tr'", [f]) - | appl_ast_tr' (f, args) = Ast.Appl [Ast.Constant "_appl", f, Ast.fold_ast "_args" args]; - -fun applC_ast_tr' (f, []) = raise Ast.AST ("applC_ast_tr'", [f]) - | applC_ast_tr' (f, args) = Ast.Appl [Ast.Constant "_applC", f, Ast.fold_ast "_cargs" args]; - - -(* partial eta-contraction before printing *) - -fun eta_abs (Abs (a, T, t)) = - (case eta_abs t of - t' as Const ("_aprop", _) $ _ => Abs (a, T, t') - | t' as f $ u => - (case eta_abs u of - Bound 0 => - if Term.is_dependent f then Abs (a, T, t') - else incr_boundvars ~1 f - | _ => Abs (a, T, t')) - | t' => Abs (a, T, t')) - | eta_abs t = t; - -val eta_contract_raw = Config.declare_option ("eta_contract", @{here}); -val eta_contract = Config.bool eta_contract_raw; - -fun eta_contr ctxt tm = - if Config.get ctxt eta_contract then eta_abs tm else tm; - - -(* abstraction *) - -fun mark_bound_abs (x, T) = Const ("_bound", T --> T) $ Free (x, T); -fun mark_bound_body (x, T) = Const ("_bound", dummyT) $ Free (x, T); - -fun bound_vars vars body = - subst_bounds (map mark_bound_abs (Term.rename_wrt_term body vars), body); - -fun strip_abss vars_of body_of tm = - let - val vars = vars_of tm; - val body = body_of tm; - val rev_new_vars = Term.rename_wrt_term body vars; - fun subst (x, T) b = - if Name.is_internal x andalso not (Term.is_dependent b) - then (Const ("_idtdummy", T), incr_boundvars ~1 b) - else (mark_bound_abs (x, T), Term.subst_bound (mark_bound_body (x, T), b)); - val (rev_vars', body') = fold_map subst rev_new_vars body; - in (rev rev_vars', body') end; - - -fun abs_tr' ctxt tm = - uncurry (fold_rev (fn x => fn t => Syntax.const "_abs" $ x $ t)) - (strip_abss strip_abs_vars strip_abs_body (eta_contr ctxt tm)); - -fun atomic_abs_tr' (x, T, t) = - let val [xT] = Term.rename_wrt_term t [(x, T)] - in (mark_bound_abs xT, subst_bound (mark_bound_body xT, t)) end; - -fun abs_ast_tr' asts = - (case Ast.unfold_ast_p "_abs" (Ast.Appl (Ast.Constant "_abs" :: asts)) of - ([], _) => raise Ast.AST ("abs_ast_tr'", asts) - | (xs, body) => Ast.Appl [Ast.Constant "_lambda", Ast.fold_ast "_pttrns" xs, body]); - -fun const_abs_tr' t = - (case eta_abs t of - Abs (_, _, t') => - if Term.is_dependent t' then raise Match - else incr_boundvars ~1 t' - | _ => raise Match); - - -(* binders *) - -fun mk_binder_tr' (name, syn) = - let - fun mk_idts [] = raise Match (*abort translation*) - | mk_idts [idt] = idt - | mk_idts (idt :: idts) = Syntax.const "_idts" $ idt $ mk_idts idts; - - fun tr' t = - let - val (xs, bd) = strip_abss (strip_qnt_vars name) (strip_qnt_body name) t; - in Syntax.const syn $ mk_idts xs $ bd end; - - fun binder_tr' (t :: ts) = Term.list_comb (tr' (Syntax.const name $ t), ts) - | binder_tr' [] = raise Match; - in (name, fn _ => binder_tr') end; - -fun preserve_binder_abs_tr' name syn = (name, fn _ => fn Abs abs :: ts => - let val (x, t) = atomic_abs_tr' abs - in list_comb (Syntax.const syn $ x $ t, ts) end); - -fun preserve_binder_abs2_tr' name syn = (name, fn _ => fn A :: Abs abs :: ts => - let val (x, t) = atomic_abs_tr' abs - in list_comb (Syntax.const syn $ x $ A $ t, ts) end); - - -(* idtyp constraints *) - -fun idtyp_ast_tr' a [Ast.Appl [Ast.Constant "_constrain", x, ty], xs] = - Ast.Appl [Ast.Constant a, Ast.Appl [Ast.Constant "_idtyp", x, ty], xs] - | idtyp_ast_tr' _ _ = raise Match; - - -(* meta implication *) - -fun impl_ast_tr' asts = - if no_brackets () then raise Match - else - (case Ast.unfold_ast_p "\\<^const>Pure.imp" - (Ast.Appl (Ast.Constant "\\<^const>Pure.imp" :: asts)) of - (prems as _ :: _ :: _, concl) => - let - val (asms, asm) = split_last prems; - val asms' = Ast.fold_ast_p "_asms" (asms, Ast.Appl [Ast.Constant "_asm", asm]); - in Ast.Appl [Ast.Constant "_bigimpl", asms', concl] end - | _ => raise Match); - - -(* dependent / nondependent quantifiers *) - -fun var_abs mark (x, T, b) = - let val (x', _) = Name.variant x (Term.declare_term_names b Name.context) - in (x', subst_bound (mark (x', T), b)) end; - -val variant_abs = var_abs Free; -val variant_abs' = var_abs mark_bound_abs; - -fun dependent_tr' (q, r) (A :: Abs (x, T, B) :: ts) = - if Term.is_dependent B then - let val (x', B') = variant_abs' (x, dummyT, B); - in Term.list_comb (Syntax.const q $ mark_bound_abs (x', T) $ A $ B', ts) end - else Term.list_comb (Syntax.const r $ A $ incr_boundvars ~1 B, ts) - | dependent_tr' _ _ = raise Match; - - -(* quote / antiquote *) - -fun antiquote_tr' name = - let - fun tr' i (t $ u) = - if u aconv Bound i then Syntax.const name $ tr' i t - else tr' i t $ tr' i u - | tr' i (Abs (x, T, t)) = Abs (x, T, tr' (i + 1) t) - | tr' i a = if a aconv Bound i then raise Match else a; - in tr' 0 end; - -fun quote_tr' name (Abs (_, _, t)) = Term.incr_boundvars ~1 (antiquote_tr' name t) - | quote_tr' _ _ = raise Match; - -fun quote_antiquote_tr' quoteN antiquoteN name = - let - fun tr' (t :: ts) = Term.list_comb (Syntax.const quoteN $ quote_tr' antiquoteN t, ts) - | tr' _ = raise Match; - in (name, fn _ => tr') end; - - -(* corresponding updates *) - -local - -fun upd_type (Type ("fun", [Type ("fun", [_, T]), _])) = T - | upd_type _ = dummyT; - -fun upd_tr' (x_upd, T) = - (case try (unsuffix "_update") x_upd of - SOME x => (x, upd_type T) - | NONE => raise Match); - -in - -fun update_name_tr' (Free x) = Free (upd_tr' x) - | update_name_tr' ((c as Const ("_free", _)) $ Free x) = c $ Free (upd_tr' x) - | update_name_tr' (Const x) = Const (upd_tr' x) - | update_name_tr' _ = raise Match; - -end; - - -(* indexed syntax *) - -fun index_ast_tr' [Ast.Appl [Ast.Constant "_struct", ast]] = ast - | index_ast_tr' _ = raise Match; - -fun struct_ast_tr' structs = - ("_struct", fn _ => - (fn [Ast.Constant "_indexdefault"] => - (case structs of - x :: _ => Ast.Appl [Ast.Constant "_free", Ast.Variable x] - | _ => raise Match) - | _ => raise Match)); - - - -(** Pure translations **) - -val pure_parse_ast_translation = - [("_strip_positions", fn _ => strip_positions_ast_tr), - ("_constify", fn _ => constify_ast_tr), - ("_tapp", fn _ => tapp_ast_tr), - ("_tappl", fn _ => tappl_ast_tr), - ("_bracket", fn _ => bracket_ast_tr), - ("_appl", fn _ => appl_ast_tr), - ("_applC", fn _ => applC_ast_tr), - ("_lambda", fn _ => lambda_ast_tr), - ("_idtyp", fn _ => idtyp_ast_tr), - ("_bigimpl", fn _ => bigimpl_ast_tr), - ("_indexdefault", fn _ => indexdefault_ast_tr), - ("_indexvar", fn _ => indexvar_ast_tr), - ("_struct", fn _ => struct_ast_tr)]; - -val pure_parse_translation = - [("_abs", fn _ => abs_tr), - ("_aprop", fn _ => aprop_tr), - ("_ofclass", fn _ => ofclass_tr), - ("_sort_constraint", fn _ => sort_constraint_tr), - ("_TYPE", fn _ => type_tr), - ("_DDDOT", fn _ => dddot_tr), - ("_update_name", fn _ => update_name_tr), - ("_index", fn _ => index_tr)]; - -val pure_print_ast_translation = - [("\\<^type>fun", fn _ => fun_ast_tr'), - ("_abs", fn _ => abs_ast_tr'), - ("_idts", fn _ => idtyp_ast_tr' "_idts"), - ("_pttrns", fn _ => idtyp_ast_tr' "_pttrns"), - ("\\<^const>Pure.imp", fn _ => impl_ast_tr'), - ("_index", fn _ => index_ast_tr')]; - -end; - -structure Basic_Syntax_Trans: BASIC_SYNTAX_TRANS = Syntax_Trans; -open Basic_Syntax_Trans; diff --git a/core/Pure/Syntax/term_position.ML b/core/Pure/Syntax/term_position.ML deleted file mode 100644 index f2895f0f..00000000 --- a/core/Pure/Syntax/term_position.ML +++ /dev/null @@ -1,72 +0,0 @@ -(* Title: Pure/Syntax/term_position.ML - Author: Makarius - -Encoded position within term syntax trees. -*) - -signature TERM_POSITION = -sig - val pretty: Position.T -> Pretty.T - val encode: Position.T -> string - val decode: string -> Position.T option - val decode_position: term -> (Position.T * typ) option - val decode_positionT: typ -> Position.T option - val decode_positionS: sort -> Position.T list * sort - val is_position: term -> bool - val is_positionT: typ -> bool - val markers: string list - val strip_positions: term -> term -end; - -structure Term_Position: TERM_POSITION = -struct - -(* markup *) - -val position_dummy = ""; -val position_text = XML.Text position_dummy; - -fun pretty pos = - Pretty.markup (Position.markup pos Markup.position) [Pretty.str position_dummy]; - -fun encode pos = - YXML.string_of (XML.Elem (Position.markup pos Markup.position, [position_text])); - -fun decode str = - (case YXML.parse_body str handle Fail msg => error msg of - [XML.Elem ((name, props), [arg])] => - if name = Markup.positionN andalso arg = position_text - then SOME (Position.of_properties props) - else NONE - | _ => NONE); - - -(* positions within parse trees *) - -fun decode_position (Free (x, _)) = - (case decode x of - SOME pos => SOME (pos, TFree (x, dummyS)) - | NONE => NONE) - | decode_position _ = NONE; - -fun decode_positionT (TFree (x, _)) = decode x - | decode_positionT _ = NONE; - -fun decode_positionS cs = - let val (ps, sort) = List.partition (is_some o decode) cs - in (map (the o decode) ps, sort) end; - -val is_position = is_some o decode_position; -val is_positionT = is_some o decode_positionT; - -val markers = ["_constrain", "_constrainAbs", "_ofsort"]; - -fun strip_positions ((t as Const (c, _)) $ u $ v) = - if member (op =) markers c andalso is_position v - then strip_positions u - else t $ strip_positions u $ strip_positions v - | strip_positions (t $ u) = strip_positions t $ strip_positions u - | strip_positions (Abs (x, T, t)) = Abs (x, T, strip_positions t) - | strip_positions t = t; - -end; diff --git a/core/Pure/Syntax/type_annotation.ML b/core/Pure/Syntax/type_annotation.ML deleted file mode 100644 index d6694e9f..00000000 --- a/core/Pure/Syntax/type_annotation.ML +++ /dev/null @@ -1,65 +0,0 @@ -(* Title: Pure/Syntax/type_annotation.ML - Author: Makarius - -Type annotations within syntax trees, notably for pretty printing. -*) - -signature TYPE_ANNOTATION = -sig - val ignore_type: typ -> typ - val ignore_free_types: term -> term - val is_ignored: typ -> bool - val is_omitted: typ -> bool - val clean: typ -> typ - val smash: typ -> typ - val fastype_of: typ list -> term -> typ -end; - -structure Type_Annotation: TYPE_ANNOTATION = -struct - -(* annotations *) - -fun ignore_type T = Type ("_ignore_type", [T]); - -val ignore_free_types = Term.map_aterms (fn Free (x, T) => Free (x, ignore_type T) | a => a); - -fun is_ignored (Type ("_ignore_type", _)) = true - | is_ignored _ = false; - -fun is_omitted T = is_ignored T orelse T = dummyT; - -fun clean (Type ("_ignore_type", [T])) = clean T - | clean (Type (a, Ts)) = Type (a, map clean Ts) - | clean T = T; - -fun smash (Type ("_ignore_type", [_])) = dummyT - | smash (Type (a, Ts)) = Type (a, map smash Ts) - | smash T = T; - - -(* determine type -- propagate annotations *) - -local - -fun dest_fun ignored (Type ("fun", [_, T])) = SOME ((ignored ? ignore_type) T) - | dest_fun _ (Type ("_ignore_type", [T])) = dest_fun true T - | dest_fun _ _ = NONE; - -in - -fun fastype_of Ts (t $ u) = - (case dest_fun false (fastype_of Ts t) of - SOME T => T - | NONE => raise TERM ("fastype_of: expected function type", [t $ u])) - | fastype_of _ (Const (_, T)) = T - | fastype_of _ (Free (_, T)) = T - | fastype_of _ (Var (_, T)) = T - | fastype_of Ts (Bound i) = - (nth Ts i handle General.Subscript => raise TERM ("fastype_of: Bound", [Bound i])) - | fastype_of Ts (Abs (_, T, u)) = T --> fastype_of (T :: Ts) u; - -end; - -end; - diff --git a/core/Pure/System/command_line.ML b/core/Pure/System/command_line.ML deleted file mode 100644 index da47dfd8..00000000 --- a/core/Pure/System/command_line.ML +++ /dev/null @@ -1,27 +0,0 @@ -(* Title: Pure/System/command_line.ML - Author: Makarius - -Support for Isabelle/ML command line tools. -*) - -signature COMMAND_LINE = -sig - val tool: (unit -> int) -> unit - val tool0: (unit -> unit) -> unit -end; - -structure Command_Line: COMMAND_LINE = -struct - -fun tool body = - uninterruptible (fn restore_attributes => fn () => - let - val rc = - restore_attributes body () handle exn => - Exn.capture_exit 2 (fn () => (Runtime.exn_error_message exn; raise exn)) (); - in if rc = 0 then () else exit rc end) (); - -fun tool0 body = tool (fn () => (body (); 0)); - -end; - diff --git a/core/Pure/System/command_line.scala b/core/Pure/System/command_line.scala deleted file mode 100644 index a49abbab..00000000 --- a/core/Pure/System/command_line.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* Title: Pure/System/command_line.scala - Author: Makarius - -Support for Isabelle/Scala command line tools. -*/ - -package isabelle - - -object Command_Line -{ - object Chunks - { - private def chunks(list: List[String]): List[List[String]] = - list.indexWhere(_ == "\n") match { - case -1 => List(list) - case i => - val (chunk, rest) = list.splitAt(i) - chunk :: chunks(rest.tail) - } - def unapplySeq(list: List[String]): Option[List[List[String]]] = Some(chunks(list)) - } - - var debug = false - - def tool(body: => Int): Nothing = - { - val rc = - try { body } - catch { - case exn: Throwable => - if (debug) exn.printStackTrace - Output.error_message(Exn.message(exn)) - Exn.return_code(exn, 2) - } - sys.exit(rc) - } - - def tool0(body: => Unit): Nothing = tool { body; 0 } -} - diff --git a/core/Pure/System/invoke_scala.ML b/core/Pure/System/invoke_scala.ML deleted file mode 100644 index e287979f..00000000 --- a/core/Pure/System/invoke_scala.ML +++ /dev/null @@ -1,69 +0,0 @@ -(* Title: Pure/System/invoke_scala.ML - Author: Makarius - -JVM method invocation service via Isabelle/Scala. -*) - -signature INVOKE_SCALA = -sig - val method: string -> string -> string - val promise_method: string -> string -> string future - exception Null -end; - -structure Invoke_Scala: INVOKE_SCALA = -struct - -val _ = Session.protocol_handler "isabelle.Invoke_Scala"; - - -(* pending promises *) - -val new_id = string_of_int o Counter.make (); - -val promises = - Synchronized.var "Invoke_Scala.promises" (Symtab.empty: string future Symtab.table); - - -(* method invocation *) - -fun promise_method name arg = - let - val id = new_id (); - fun abort () = Output.protocol_message (Markup.cancel_scala id) []; - val promise = Future.promise abort : string future; - val _ = Synchronized.change promises (Symtab.update (id, promise)); - val _ = Output.protocol_message (Markup.invoke_scala name id) [arg]; - in promise end; - -fun method name arg = Future.join (promise_method name arg); - - -(* fulfill *) - -exception Null; - -fun fulfill id tag res = - let - val result = - (case tag of - "0" => Exn.Exn Null - | "1" => Exn.Res res - | "2" => Exn.Exn (ERROR res) - | "3" => Exn.Exn (Fail res) - | "4" => Exn.Exn Exn.Interrupt - | _ => raise Fail "Bad tag"); - val promise = - Synchronized.change_result promises - (fn tab => (the (Symtab.lookup tab id), Symtab.delete id tab)); - val _ = Future.fulfill_result promise result; - in () end; - -val _ = - Isabelle_Process.protocol_command "Invoke_Scala.fulfill" - (fn [id, tag, res] => - fulfill id tag res - handle exn => if Exn.is_interrupt exn then () else reraise exn); - -end; - diff --git a/core/Pure/System/invoke_scala.scala b/core/Pure/System/invoke_scala.scala deleted file mode 100644 index c4e5fcda..00000000 --- a/core/Pure/System/invoke_scala.scala +++ /dev/null @@ -1,129 +0,0 @@ -/* Title: Pure/System/invoke_scala.scala - Author: Makarius - -JVM method invocation service via Isabelle/Scala. -*/ - -package isabelle - - -import java.lang.reflect.{Method, Modifier, InvocationTargetException} -import java.util.concurrent.{Future => JFuture} - -import scala.util.matching.Regex - - -object Invoke_Scala -{ - /* method reflection */ - - private val Ext = new Regex("(.*)\\.([^.]*)") - private val STRING = Class.forName("java.lang.String") - - private def get_method(name: String): String => String = - name match { - case Ext(class_name, method_name) => - val m = - try { Class.forName(class_name).getMethod(method_name, STRING) } - catch { - case _: ClassNotFoundException | _: NoSuchMethodException => - error("No such method: " + quote(name)) - } - if (!Modifier.isStatic(m.getModifiers)) error("Not at static method: " + m.toString) - if (m.getReturnType != STRING) error("Bad method return type: " + m.toString) - - (arg: String) => { - try { m.invoke(null, arg).asInstanceOf[String] } - catch { - case e: InvocationTargetException if e.getCause != null => - throw e.getCause - } - } - case _ => error("Malformed method name: " + quote(name)) - } - - - /* method invocation */ - - object Tag extends Enumeration - { - val NULL = Value("0") - val OK = Value("1") - val ERROR = Value("2") - val FAIL = Value("3") - val INTERRUPT = Value("4") - } - - def method(name: String, arg: String): (Tag.Value, String) = - Exn.capture { get_method(name) } match { - case Exn.Res(f) => - Exn.capture { f(arg) } match { - case Exn.Res(null) => (Tag.NULL, "") - case Exn.Res(res) => (Tag.OK, res) - case Exn.Exn(Exn.Interrupt()) => (Tag.INTERRUPT, "") - case Exn.Exn(e) => (Tag.ERROR, Exn.message(e)) - } - case Exn.Exn(e) => (Tag.FAIL, Exn.message(e)) - } -} - - -/* protocol handler */ - -class Invoke_Scala extends Session.Protocol_Handler -{ - private var futures = Map.empty[String, JFuture[Unit]] - - private def fulfill(prover: Prover, id: String, tag: Invoke_Scala.Tag.Value, res: String): Unit = - synchronized - { - if (futures.isDefinedAt(id)) { - prover.protocol_command("Invoke_Scala.fulfill", id, tag.toString, res) - futures -= id - } - } - - private def cancel(prover: Prover, id: String, future: JFuture[Unit]) - { - future.cancel(true) - fulfill(prover, id, Invoke_Scala.Tag.INTERRUPT, "") - } - - private def invoke_scala(prover: Prover, msg: Prover.Protocol_Output): Boolean = synchronized - { - msg.properties match { - case Markup.Invoke_Scala(name, id) => - futures += (id -> - Simple_Thread.submit_task { - val (tag, result) = Invoke_Scala.method(name, msg.text) - fulfill(prover, id, tag, result) - }) - true - case _ => false - } - } - - private def cancel_scala(prover: Prover, msg: Prover.Protocol_Output): Boolean = synchronized - { - msg.properties match { - case Markup.Cancel_Scala(id) => - futures.get(id) match { - case Some(future) => cancel(prover, id, future) - case None => - } - true - case _ => false - } - } - - override def stop(prover: Prover): Unit = synchronized - { - for ((id, future) <- futures) cancel(prover, id, future) - futures = Map.empty - } - - val functions = Map( - Markup.INVOKE_SCALA -> invoke_scala _, - Markup.CANCEL_SCALA -> cancel_scala _) -} - diff --git a/core/Pure/System/isabelle_charset.scala b/core/Pure/System/isabelle_charset.scala deleted file mode 100644 index b26645b4..00000000 --- a/core/Pure/System/isabelle_charset.scala +++ /dev/null @@ -1,52 +0,0 @@ -/* Title: Pure/System/isabelle_charset.scala - Author: Makarius - -Charset for Isabelle symbols. -*/ - -package isabelle - - -import java.nio.Buffer -import java.nio.{ByteBuffer, CharBuffer} -import java.nio.charset.{Charset, CharsetDecoder, CharsetEncoder, CoderResult} -import java.nio.charset.spi.CharsetProvider - - -object Isabelle_Charset -{ - val name: String = "UTF-8-Isabelle-test" // FIXME - lazy val charset: Charset = new Isabelle_Charset -} - - -class Isabelle_Charset extends Charset(Isabelle_Charset.name, null) -{ - override def contains(cs: Charset): Boolean = - cs.name.equalsIgnoreCase(UTF8.charset_name) || UTF8.charset.contains(cs) - - override def newDecoder(): CharsetDecoder = UTF8.charset.newDecoder - - override def newEncoder(): CharsetEncoder = UTF8.charset.newEncoder -} - - -class Isabelle_Charset_Provider extends CharsetProvider -{ - override def charsetForName(name: String): Charset = - { - // FIXME inactive - // if (name.equalsIgnoreCase(Isabelle_Charset.name)) Isabelle_Charset.charset - // else null - null - } - - override def charsets(): java.util.Iterator[Charset] = - { - import scala.collection.JavaConversions._ - // FIXME inactive - // Iterator(Isabelle_Charset.charset) - Iterator() - } -} - diff --git a/core/Pure/System/isabelle_font.scala b/core/Pure/System/isabelle_font.scala deleted file mode 100644 index 8f66eafc..00000000 --- a/core/Pure/System/isabelle_font.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* Title: Pure/System/isabelle_font.scala - Author: Makarius - -Isabelle font support. -*/ - -package isabelle - - -import java.awt.{GraphicsEnvironment, Font} -import java.io.{FileInputStream, BufferedInputStream} -import javafx.scene.text.{Font => JFX_Font} - - -object Isabelle_Font -{ - def apply(family: String = "IsabelleText", size: Int = 1, bold: Boolean = false): Font = - new Font(family, if (bold) Font.BOLD else Font.PLAIN, size) - - def install_fonts() - { - val ge = GraphicsEnvironment.getLocalGraphicsEnvironment() - for (font <- Path.split(Isabelle_System.getenv_strict("ISABELLE_FONTS"))) - ge.registerFont(Font.createFont(Font.TRUETYPE_FONT, font.file)) - } - - def install_fonts_jfx() - { - for (font <- Path.split(Isabelle_System.getenv_strict("ISABELLE_FONTS"))) { - val stream = new BufferedInputStream(new FileInputStream(font.file)) - try { JFX_Font.loadFont(stream, 1.0) } - finally { stream.close } - } - } -} - diff --git a/core/Pure/System/isabelle_process.ML b/core/Pure/System/isabelle_process.ML deleted file mode 100644 index bbc558d9..00000000 --- a/core/Pure/System/isabelle_process.ML +++ /dev/null @@ -1,212 +0,0 @@ -(* Title: Pure/System/isabelle_process.ML - Author: Makarius - -Isabelle process wrapper, based on private fifos for maximum -robustness and performance, or local socket for maximum portability. -*) - -signature ISABELLE_PROCESS = -sig - val is_active: unit -> bool - val protocol_command: string -> (string list -> unit) -> unit - val reset_tracing: Document_ID.exec -> unit - val crashes: exn list Synchronized.var - val init_fifos: string -> string -> unit - val init_socket: string -> unit -end; - -structure Isabelle_Process: ISABELLE_PROCESS = -struct - -(* print mode *) - -val isabelle_processN = "isabelle_process"; - -fun is_active () = Print_Mode.print_mode_active isabelle_processN; - -val _ = Output.add_mode isabelle_processN Output.default_output Output.default_escape; -val _ = Markup.add_mode isabelle_processN YXML.output_markup; - - -(* protocol commands *) - -local - -val commands = - Synchronized.var "Isabelle_Process.commands" - (Symtab.empty: (string list -> unit) Symtab.table); - -in - -fun protocol_command name cmd = - Synchronized.change commands (fn cmds => - (if not (Symtab.defined cmds name) then () - else warning ("Redefining Isabelle protocol command " ^ quote name); - Symtab.update (name, cmd) cmds)); - -fun run_command name args = - (case Symtab.lookup (Synchronized.value commands) name of - NONE => error ("Undefined Isabelle protocol command " ^ quote name) - | SOME cmd => - (Runtime.debugging NONE cmd args handle exn => - error ("Isabelle protocol command failure: " ^ quote name ^ "\n" ^ - Runtime.exn_message exn))); - -end; - - -(* restricted tracing messages *) - -val tracing_messages = - Synchronized.var "tracing_messages" (Inttab.empty: int Inttab.table); - -fun reset_tracing exec_id = - Synchronized.change tracing_messages (Inttab.delete_safe exec_id); - -fun update_tracing () = - (case Position.parse_id (Position.thread_data ()) of - NONE => () - | SOME exec_id => - let - val ok = - Synchronized.change_result tracing_messages (fn tab => - let - val n = the_default 0 (Inttab.lookup tab exec_id) + 1; - val ok = n <= Options.default_int "editor_tracing_messages"; - in (ok, Inttab.update (exec_id, n) tab) end); - in - if ok then () - else - let - val (text, promise) = Active.dialog_text (); - val _ = - writeln ("Tracing paused. " ^ text "Stop" ^ ", or continue with next " ^ - text "100" ^ ", " ^ text "1000" ^ ", " ^ text "10000" ^ " messages?") - val m = Markup.parse_int (Future.join promise) - handle Fail _ => error "Stopped"; - in - Synchronized.change tracing_messages - (Inttab.map_default (exec_id, 0) (fn k => k - m)) - end - end); - - -(* output channels *) - -val serial_props = Markup.serial_properties o serial; - -fun init_channels channel = - let - val _ = TextIO.StreamIO.setBufferMode (TextIO.getOutstream TextIO.stdOut, IO.LINE_BUF); - val _ = TextIO.StreamIO.setBufferMode (TextIO.getOutstream TextIO.stdErr, IO.LINE_BUF); - - val msg_channel = Message_Channel.make channel; - - fun message name props body = - Message_Channel.send msg_channel (Message_Channel.message name props body); - - fun standard_message props name body = - if forall (fn s => s = "") body then () - else - message name - (fold_rev Properties.put props (Position.properties_of (Position.thread_data ()))) body; - in - Output.status_fn := standard_message [] Markup.statusN; - Output.report_fn := standard_message [] Markup.reportN; - Output.result_fn := - (fn props => fn s => standard_message (props @ serial_props ()) Markup.resultN s); - Output.writeln_fn := (fn s => standard_message (serial_props ()) Markup.writelnN s); - Output.tracing_fn := - (fn s => (update_tracing (); standard_message (serial_props ()) Markup.tracingN s)); - Output.warning_fn := (fn s => standard_message (serial_props ()) Markup.warningN s); - Output.error_message_fn := - (fn (i, s) => standard_message (Markup.serial_properties i) Markup.errorN s); - Output.system_message_fn := message Markup.systemN []; - Output.protocol_message_fn := message Markup.protocolN; - Output.urgent_message_fn := ! Output.writeln_fn; - Output.prompt_fn := ignore; - message Markup.initN [] [Session.welcome ()]; - msg_channel - end; - - -(* protocol loop -- uninterruptible *) - -val crashes = Synchronized.var "Isabelle_Process.crashes" ([]: exn list); - -local - -fun recover crash = - (Synchronized.change crashes (cons crash); - Output.physical_stderr - "Recovered from Isabelle process crash -- see also Isabelle_Process.crashes\n"); - -fun read_chunk channel len = - let - val n = - (case Int.fromString len of - SOME n => n - | NONE => error ("Isabelle process: malformed header " ^ quote len)); - val chunk = System_Channel.inputN channel n; - val i = size chunk; - in - if i <> n then - error ("Isabelle process: bad chunk (unexpected EOF after " ^ - string_of_int i ^ " of " ^ string_of_int n ^ " bytes)") - else chunk - end; - -fun read_command channel = - (case System_Channel.input_line channel of - NONE => raise Runtime.TERMINATE - | SOME line => map (read_chunk channel) (space_explode "," line)); - -fun task_context e = - Future.task_context "Isabelle_Process.loop" (Future.new_group NONE) e (); - -in - -fun loop channel = - let val continue = - (case read_command channel of - [] => (Output.system_message "Isabelle process: no input"; true) - | name :: args => (task_context (fn () => run_command name args); true)) - handle Runtime.TERMINATE => false - | exn => (Runtime.exn_system_message exn handle crash => recover crash; true); - in - if continue then loop channel - else (Future.shutdown (); Execution.reset (); ()) - end; - -end; - - -(* init *) - -val default_modes1 = - [Syntax_Trans.no_bracketsN, Syntax_Trans.no_type_bracketsN, Graph_Display.active_graphN]; -val default_modes2 = [Symbol.xsymbolsN, isabelle_processN, Pretty.symbolicN]; - -val init = uninterruptible (fn _ => fn rendezvous => - let - val _ = SHA1_Samples.test () - handle exn as Fail msg => (Output.physical_stderr (msg ^ "\n"); reraise exn); - val _ = Output.physical_stderr Symbol.STX; - - val _ = Printer.show_markup_default := true; - val _ = Context.set_thread_data NONE; - val _ = - Unsynchronized.change print_mode - (fn mode => (mode @ default_modes1) |> fold (update op =) default_modes2); - - val channel = rendezvous (); - val msg_channel = init_channels channel; - val _ = Session.init_protocol_handlers (); - val _ = (loop |> Unsynchronized.setmp Toplevel.quiet true) channel; - in Message_Channel.shutdown msg_channel end); - -fun init_fifos fifo1 fifo2 = init (fn () => System_Channel.fifo_rendezvous fifo1 fifo2); -fun init_socket name = init (fn () => System_Channel.socket_rendezvous name); - -end; - diff --git a/core/Pure/System/isabelle_process.scala b/core/Pure/System/isabelle_process.scala deleted file mode 100644 index 3d78611d..00000000 --- a/core/Pure/System/isabelle_process.scala +++ /dev/null @@ -1,318 +0,0 @@ -/* Title: Pure/System/isabelle_process.scala - Author: Makarius - Options: :folding=explicit:collapseFolds=1: - -Isabelle process management -- always reactive due to multi-threaded I/O. -*/ - -package isabelle - - -import java.io.{InputStream, OutputStream, BufferedOutputStream, IOException} - - -class Isabelle_Process( - receiver: Prover.Message => Unit = Console.println(_), - prover_args: List[String] = Nil) -{ - /* text and tree data */ - - def encode(s: String): String = Symbol.encode(s) - def decode(s: String): String = Symbol.decode(s) - - val xml_cache = new XML.Cache() - - - /* output */ - - private def system_output(text: String) - { - receiver(new Prover.Output(XML.Elem(Markup(Markup.SYSTEM, Nil), List(XML.Text(text))))) - } - - private def protocol_output(props: Properties.T, bytes: Bytes) - { - receiver(new Prover.Protocol_Output(props, bytes)) - } - - private def output(kind: String, props: Properties.T, body: XML.Body) - { - if (kind == Markup.INIT) system_channel.accepted() - - val main = XML.Elem(Markup(kind, props), Protocol.clean_message(body)) - val reports = Protocol.message_reports(props, body) - for (msg <- main :: reports) receiver(new Prover.Output(xml_cache.elem(msg))) - } - - private def exit_message(rc: Int) - { - output(Markup.EXIT, Markup.Return_Code(rc), List(XML.Text("Return code: " + rc.toString))) - } - - - - /** process manager **/ - - def command_line(channel: System_Channel, args: List[String]): List[String] = - Isabelle_System.getenv_strict("ISABELLE_PROCESS") :: (channel.isabelle_args ::: args) - - private val system_channel = System_Channel() - - private val process = - try { - val cmdline = command_line(system_channel, prover_args) - new Isabelle_System.Managed_Process(null, null, false, cmdline: _*) - } - catch { case e: IOException => system_channel.accepted(); throw(e) } - - private val (_, process_result) = - Simple_Thread.future("process_result") { process.join } - - private def terminate_process() - { - try { process.terminate } - catch { case e: IOException => system_output("Failed to terminate Isabelle: " + e.getMessage) } - } - - private val process_manager = Simple_Thread.fork("process_manager") - { - val (startup_failed, startup_errors) = - { - var finished: Option[Boolean] = None - val result = new StringBuilder(100) - while (finished.isEmpty && (process.stderr.ready || !process_result.is_finished)) { - while (finished.isEmpty && process.stderr.ready) { - try { - val c = process.stderr.read - if (c == 2) finished = Some(true) - else result += c.toChar - } - catch { case _: IOException => finished = Some(false) } - } - Thread.sleep(10) - } - (finished.isEmpty || !finished.get, result.toString.trim) - } - if (startup_errors != "") system_output(startup_errors) - - process.stdin.close - if (startup_failed) { - terminate_process() - process_result.join - exit_message(127) - } - else { - val (command_stream, message_stream) = system_channel.rendezvous() - - command_input_init(command_stream) - val stdout = physical_output(false) - val stderr = physical_output(true) - val message = message_output(message_stream) - - val rc = process_result.join - system_output("process terminated") - command_input_close() - for (thread <- List(stdout, stderr, message)) thread.join - system_output("process_manager terminated") - exit_message(rc) - } - system_channel.accepted() - } - - - /* management methods */ - - def join() { process_manager.join() } - - def interrupt() - { - try { process.interrupt } - catch { case e: IOException => system_output("Failed to interrupt Isabelle: " + e.getMessage) } - } - - def terminate() - { - command_input_close() - system_output("Terminating Isabelle process") - terminate_process() - } - - - - /** process streams **/ - - /* command input */ - - private var command_input: Option[Consumer_Thread[List[Bytes]]] = None - - private def command_input_close(): Unit = command_input.foreach(_.shutdown) - - private def command_input_init(raw_stream: OutputStream) - { - val name = "command_input" - val stream = new BufferedOutputStream(raw_stream) - command_input = - Some( - Consumer_Thread.fork(name)( - consume = - { - case chunks => - try { - Bytes(chunks.map(_.length).mkString("", ",", "\n")).write(stream) - chunks.foreach(_.write(stream)) - stream.flush - true - } - catch { case e: IOException => system_output(name + ": " + e.getMessage); false } - }, - finish = { case () => stream.close; system_output(name + " terminated") } - ) - ) - } - - - /* physical output */ - - private def physical_output(err: Boolean): Thread = - { - val (name, reader, markup) = - if (err) ("standard_error", process.stderr, Markup.STDERR) - else ("standard_output", process.stdout, Markup.STDOUT) - - Simple_Thread.fork(name) { - try { - var result = new StringBuilder(100) - var finished = false - while (!finished) { - //{{{ - var c = -1 - var done = false - while (!done && (result.length == 0 || reader.ready)) { - c = reader.read - if (c >= 0) result.append(c.asInstanceOf[Char]) - else done = true - } - if (result.length > 0) { - output(markup, Nil, List(XML.Text(decode(result.toString)))) - result.length = 0 - } - else { - reader.close - finished = true - } - //}}} - } - } - catch { case e: IOException => system_output(name + ": " + e.getMessage) } - system_output(name + " terminated") - } - } - - - /* message output */ - - private def message_output(stream: InputStream): Thread = - { - class EOF extends Exception - class Protocol_Error(msg: String) extends Exception(msg) - - val name = "message_output" - Simple_Thread.fork(name) { - val default_buffer = new Array[Byte](65536) - var c = -1 - - def read_int(): Int = - //{{{ - { - var n = 0 - c = stream.read - if (c == -1) throw new EOF - while (48 <= c && c <= 57) { - n = 10 * n + (c - 48) - c = stream.read - } - if (c != 10) - throw new Protocol_Error("malformed header: expected integer followed by newline") - else n - } - //}}} - - def read_chunk_bytes(): (Array[Byte], Int) = - //{{{ - { - val n = read_int() - val buf = - if (n <= default_buffer.size) default_buffer - else new Array[Byte](n) - - var i = 0 - var m = 0 - do { - m = stream.read(buf, i, n - i) - if (m != -1) i += m - } - while (m != -1 && n > i) - - if (i != n) - throw new Protocol_Error("bad chunk (unexpected EOF after " + i + " of " + n + " bytes)") - - (buf, n) - } - //}}} - - def read_chunk(): XML.Body = - { - val (buf, n) = read_chunk_bytes() - YXML.parse_body_failsafe(UTF8.decode_chars(decode, buf, 0, n)) - } - - try { - do { - try { - val header = read_chunk() - header match { - case List(XML.Elem(Markup(name, props), Nil)) => - val kind = name.intern - if (kind == Markup.PROTOCOL) { - val (buf, n) = read_chunk_bytes() - protocol_output(props, Bytes(buf, 0, n)) - } - else { - val body = read_chunk() - output(kind, props, body) - } - case _ => - read_chunk() - throw new Protocol_Error("bad header: " + header.toString) - } - } - catch { case _: EOF => } - } - while (c != -1) - } - catch { - case e: IOException => system_output("Cannot read message:\n" + e.getMessage) - case e: Protocol_Error => system_output("Malformed message:\n" + e.getMessage) - } - stream.close - - system_output(name + " terminated") - } - } - - - - /** protocol commands **/ - - def protocol_command_bytes(name: String, args: Bytes*): Unit = - command_input match { - case Some(thread) => thread.send(Bytes(name) :: args.toList) - case None => error("Uninitialized command input thread") - } - - def protocol_command(name: String, args: String*) - { - receiver(new Prover.Input(name, args.toList)) - protocol_command_bytes(name, args.map(Bytes(_)): _*) - } -} diff --git a/core/Pure/System/isabelle_system.ML b/core/Pure/System/isabelle_system.ML deleted file mode 100644 index f91d2d2c..00000000 --- a/core/Pure/System/isabelle_system.ML +++ /dev/null @@ -1,128 +0,0 @@ -(* Title: Pure/System/isabelle_system.ML - Author: Makarius - -Isabelle system support. -*) - -signature ISABELLE_SYSTEM = -sig - val isabelle_tool: string -> string -> int - val mkdirs: Path.T -> unit - val mkdir: Path.T -> unit - val copy_dir: Path.T -> Path.T -> unit - val copy_file: Path.T -> Path.T -> unit - val copy_file_base: Path.T * Path.T -> Path.T -> unit - val create_tmp_path: string -> string -> Path.T - val with_tmp_file: string -> string -> (Path.T -> 'a) -> 'a - val with_tmp_dir: string -> (Path.T -> 'a) -> 'a - val with_tmp_fifo: (Path.T -> 'a) -> 'a - val bash_output: string -> string * int - val bash: string -> int -end; - -structure Isabelle_System: ISABELLE_SYSTEM = -struct - -(* bash *) - -fun bash_output s = - let - val {out, err, rc, ...} = Bash.process s; - val _ = warning (trim_line err); - in (out, rc) end; - -fun bash s = - let - val (out, rc) = bash_output s; - val _ = writeln (trim_line out); - in rc end; - - -(* system commands *) - -fun isabelle_tool name args = - (case space_explode ":" (getenv "ISABELLE_TOOLS") |> get_first (fn dir => - let val path = File.platform_path (Path.append (Path.explode dir) (Path.basic name)) in - if can OS.FileSys.modTime path andalso - not (OS.FileSys.isDir path) andalso - OS.FileSys.access (path, [OS.FileSys.A_READ, OS.FileSys.A_EXEC]) - then SOME path - else NONE - end handle OS.SysErr _ => NONE) of - SOME path => bash (File.shell_quote path ^ " " ^ args) - | NONE => (warning ("Unknown Isabelle tool: " ^ name); 2)); - -fun system_command cmd = - if bash cmd <> 0 then error ("System command failed: " ^ cmd) - else (); - - -(* directory operations *) - -fun mkdirs path = system_command ("mkdir -p " ^ File.shell_path path); - -fun mkdir path = - if File.is_dir path then () else OS.FileSys.mkDir (File.platform_path path); - -fun copy_dir src dst = - if File.eq (src, dst) then () - else (system_command ("cp -p -R -f " ^ File.shell_path src ^ "/. " ^ File.shell_path dst); ()); - -fun copy_file src0 dst0 = - let - val src = Path.expand src0; - val dst = Path.expand dst0; - val target = if File.is_dir dst then Path.append dst (Path.base src) else dst; - in - if File.eq (src, target) then () - else - ignore (system_command ("cp -p -f " ^ File.shell_path src ^ " " ^ File.shell_path target)) - end; - -fun copy_file_base (base_dir, src0) target_dir = - let - val src = Path.expand src0; - val src_dir = Path.dir src; - val _ = - if Path.starts_basic src then () - else error ("Illegal path specification " ^ Path.print src ^ " beyond base directory"); - val _ = mkdirs (Path.append target_dir src_dir); - in copy_file (Path.append base_dir src) (Path.append target_dir src) end; - - -(* tmp files *) - -fun create_tmp_path name ext = - let - val path = File.tmp_path (Path.basic (name ^ serial_string ()) |> Path.ext ext); - val _ = File.exists path andalso - raise Fail ("Temporary file already exists: " ^ Path.print path); - in path end; - -fun with_tmp_file name ext f = - let val path = create_tmp_path name ext - in Exn.release (Exn.capture f path before ignore (try File.rm path)) end; - - -(* tmp dirs *) - -fun rm_tree path = system_command ("rm -r -f " ^ File.shell_path path); - -fun with_tmp_dir name f = - let - val path = create_tmp_path name ""; - val _ = mkdirs path; - in Exn.release (Exn.capture f path before ignore (try rm_tree path)) end; - - -(* fifo *) - -fun with_tmp_fifo f = - with_tmp_file "isabelle-fifo-" "" - (fn path => - (case bash_output ("mkfifo -m 600 " ^ File.shell_path path) of - (_, 0) => f path - | (out, _) => error (trim_line out))); - -end; - diff --git a/core/Pure/System/isabelle_system.scala b/core/Pure/System/isabelle_system.scala deleted file mode 100644 index 64de6a22..00000000 --- a/core/Pure/System/isabelle_system.scala +++ /dev/null @@ -1,564 +0,0 @@ -/* Title: Pure/System/isabelle_system.scala - Author: Makarius - -Fundamental Isabelle system environment: quasi-static module with -optional init operation. -*/ - -package isabelle - - -import java.util.regex.Pattern -import java.io.{File => JFile, BufferedReader, InputStreamReader, - BufferedWriter, OutputStreamWriter, IOException} -import java.nio.file.{Path => JPath, Files, SimpleFileVisitor, FileVisitResult} -import java.nio.file.attribute.BasicFileAttributes -import java.net.{URL, URLDecoder, MalformedURLException} - -import scala.util.matching.Regex - - -object Isabelle_System -{ - /** bootstrap information **/ - - def jdk_home(): String = - { - val java_home = System.getProperty("java.home", "") - val home = new JFile(java_home) - val parent = home.getParent - if (home.getName == "jre" && parent != null && - (new JFile(new JFile(parent, "bin"), "javac")).exists) parent - else java_home - } - - private def find_cygwin_root(cygwin_root0: String = ""): String = - { - require(Platform.is_windows) - - val cygwin_root1 = System.getenv("CYGWIN_ROOT") - val cygwin_root2 = System.getProperty("cygwin.root") - - if (cygwin_root0 != null && cygwin_root0 != "") cygwin_root0 - else if (cygwin_root1 != null && cygwin_root1 != "") cygwin_root1 - else if (cygwin_root2 != null && cygwin_root2 != "") cygwin_root2 - else error("Cannot determine Cygwin root directory") - } - - - - /** implicit settings environment **/ - - @volatile private var _settings: Option[Map[String, String]] = None - - def settings(): Map[String, String] = - { - if (_settings.isEmpty) init() // unsynchronized check - _settings.get - } - - /* - Isabelle home precedence: - (1) isabelle_home as explicit argument - (2) ISABELLE_HOME process environment variable (e.g. inherited from running isabelle tool) - (3) isabelle.home system property (e.g. via JVM application boot process) - */ - def init(isabelle_home: String = "", cygwin_root: String = ""): Unit = synchronized { - if (_settings.isEmpty) { - import scala.collection.JavaConversions._ - - def set_cygwin_root() - { - if (Platform.is_windows) - _settings = Some(_settings.getOrElse(Map.empty) + - ("CYGWIN_ROOT" -> find_cygwin_root(cygwin_root))) - } - - set_cygwin_root() - - val env = - { - val user_home = System.getProperty("user.home", "") - val isabelle_app = System.getProperty("isabelle.app", "") - - val env0 = sys.env + ("ISABELLE_JDK_HOME" -> posix_path(jdk_home())) - val env1 = - if (user_home == "" || env0.isDefinedAt("HOME")) env0 - else env0 + ("HOME" -> user_home) - val env2 = - if (isabelle_app == "") env1 - else env1 + ("ISABELLE_APP" -> "true") - - env2 - } - - val system_home = - if (isabelle_home != null && isabelle_home != "") isabelle_home - else - env.get("ISABELLE_HOME") match { - case None | Some("") => - val path = System.getProperty("isabelle.home", "") - if (path == "") error("Unknown Isabelle home directory") - else path - case Some(path) => path - } - - val settings = - { - val dump = JFile.createTempFile("settings", null) - dump.deleteOnExit - try { - val shell_prefix = - if (Platform.is_windows) List(find_cygwin_root(cygwin_root) + "\\bin\\bash", "-l") - else Nil - val cmdline = - shell_prefix ::: List(system_home + "/bin/isabelle", "getenv", "-d", dump.toString) - val (output, rc) = process_output(raw_execute(null, env, true, cmdline: _*)) - if (rc != 0) error(output) - - val entries = - (for (entry <- File.read(dump) split "\u0000" if entry != "") yield { - val i = entry.indexOf('=') - if (i <= 0) (entry -> "") - else (entry.substring(0, i) -> entry.substring(i + 1)) - }).toMap - entries + ("PATH" -> entries("PATH_JVM")) - "PATH_JVM" - } - finally { dump.delete } - } - _settings = Some(settings) - set_cygwin_root() - } - } - - - /* getenv */ - - def getenv(name: String): String = settings.getOrElse(name, "") - - def getenv_strict(name: String): String = - { - val value = getenv(name) - if (value != "") value else error("Undefined environment variable: " + name) - } - - def get_cygwin_root(): String = getenv_strict("CYGWIN_ROOT") - - - - /** file-system operations **/ - - /* jvm_path */ - - private val Cygdrive = new Regex("/cygdrive/([a-zA-Z])($|/.*)") - private val Named_Root = new Regex("//+([^/]*)(.*)") - - def jvm_path(posix_path: String): String = - if (Platform.is_windows) { - val result_path = new StringBuilder - val rest = - posix_path match { - case Cygdrive(drive, rest) => - result_path ++= (Word.uppercase(drive) + ":" + JFile.separator) - rest - case Named_Root(root, rest) => - result_path ++= JFile.separator - result_path ++= JFile.separator - result_path ++= root - rest - case path if path.startsWith("/") => - result_path ++= get_cygwin_root() - path - case path => path - } - for (p <- space_explode('/', rest) if p != "") { - val len = result_path.length - if (len > 0 && result_path(len - 1) != JFile.separatorChar) - result_path += JFile.separatorChar - result_path ++= p - } - result_path.toString - } - else posix_path - - - /* posix_path */ - - def posix_path(jvm_path: String): String = - if (Platform.is_windows) { - val Platform_Root = new Regex("(?i)" + - Pattern.quote(get_cygwin_root()) + """(?:\\+|\z)(.*)""") - val Drive = new Regex("""([a-zA-Z]):\\*(.*)""") - - jvm_path.replace('/', '\\') match { - case Platform_Root(rest) => "/" + rest.replace('\\', '/') - case Drive(letter, rest) => - "/cygdrive/" + Word.lowercase(letter) + - (if (rest == "") "" else "/" + rest.replace('\\', '/')) - case path => path.replace('\\', '/') - } - } - else jvm_path - - def posix_path(file: JFile): String = posix_path(file.getPath) - - def posix_path_url(name: String): String = - try { - val url = new URL(name) - if (url.getProtocol == "file") - posix_path(URLDecoder.decode(url.getPath, UTF8.charset_name)) - else name - } - catch { case _: MalformedURLException => posix_path(name) } - - - /* misc path specifications */ - - def standard_path(path: Path): String = path.expand.implode - - def platform_path(path: Path): String = jvm_path(standard_path(path)) - def platform_file(path: Path): JFile = new JFile(platform_path(path)) - - def platform_file_url(raw_path: Path): String = - { - val path = raw_path.expand - require(path.is_absolute) - val s = platform_path(path).replaceAll(" ", "%20") - if (!Platform.is_windows) "file://" + s - else if (s.startsWith("\\\\")) "file:" + s.replace('\\', '/') - else "file:///" + s.replace('\\', '/') - } - - def shell_path(path: Path): String = "'" + standard_path(path) + "'" - def shell_path(file: JFile): String = "'" + posix_path(file) + "'" - - - /* source files of Isabelle/ML bootstrap */ - - def source_file(path: Path): Option[Path] = - { - def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None - - if (path.is_absolute || path.is_current) check(path) - else { - check(Path.explode("~~/src/Pure") + path) orElse - (if (getenv("ML_SOURCES") == "") None - else check(Path.explode("$ML_SOURCES") + path)) - } - } - - - /* mkdirs */ - - def mkdirs(path: Path) - { - path.file.mkdirs - if (!path.is_dir) error("Cannot create directory: " + quote(platform_path(path))) - } - - - - /** external processes **/ - - /* raw execute for bootstrapping */ - - def raw_execute(cwd: JFile, env: Map[String, String], redirect: Boolean, args: String*): Process = - { - val cmdline = new java.util.LinkedList[String] - for (s <- args) cmdline.add(s) - - val proc = new ProcessBuilder(cmdline) - if (cwd != null) proc.directory(cwd) - if (env != null) { - proc.environment.clear - for ((x, y) <- env) proc.environment.put(x, y) - } - proc.redirectErrorStream(redirect) - proc.start - } - - private def process_output(proc: Process): (String, Int) = - { - proc.getOutputStream.close - val output = File.read_stream(proc.getInputStream) - val rc = - try { proc.waitFor } - finally { - proc.getInputStream.close - proc.getErrorStream.close - proc.destroy - Thread.interrupted - } - (output, rc) - } - - - /* plain execute */ - - def execute_env(cwd: JFile, env: Map[String, String], redirect: Boolean, args: String*): Process = - { - val cmdline = - if (Platform.is_windows) List(get_cygwin_root() + "\\bin\\env.exe") ::: args.toList - else args - val env1 = if (env == null) settings else settings ++ env - raw_execute(cwd, env1, redirect, cmdline: _*) - } - - def execute(redirect: Boolean, args: String*): Process = - execute_env(null, null, redirect, args: _*) - - - /* managed process */ - - class Managed_Process(cwd: JFile, env: Map[String, String], redirect: Boolean, args: String*) - { - private val params = - List(standard_path(Path.explode("~~/lib/scripts/process")), "group", "-", "no_script") - private val proc = execute_env(cwd, env, redirect, (params ::: args.toList):_*) - - - // channels - - val stdin: BufferedWriter = - new BufferedWriter(new OutputStreamWriter(proc.getOutputStream, UTF8.charset)) - - val stdout: BufferedReader = - new BufferedReader(new InputStreamReader(proc.getInputStream, UTF8.charset)) - - val stderr: BufferedReader = - new BufferedReader(new InputStreamReader(proc.getErrorStream, UTF8.charset)) - - - // signals - - private val pid = stdout.readLine - - private def kill_cmd(signal: String): Int = - execute(true, "/usr/bin/env", "bash", "-c", "kill -" + signal + " -" + pid).waitFor - - private def kill(signal: String): Boolean = - Exn.Interrupt.postpone { kill_cmd(signal); kill_cmd("0") == 0 } getOrElse true - - private def multi_kill(signal: String): Boolean = - { - var running = true - var count = 10 - while (running && count > 0) { - if (kill(signal)) { - Exn.Interrupt.postpone { - Thread.sleep(100) - count -= 1 - } - } - else running = false - } - running - } - - def interrupt() { multi_kill("INT") } - def terminate() { multi_kill("INT") && multi_kill("TERM") && kill("KILL"); proc.destroy } - - - // JVM shutdown hook - - private val shutdown_hook = new Thread { override def run = terminate() } - - try { Runtime.getRuntime.addShutdownHook(shutdown_hook) } - catch { case _: IllegalStateException => } - - private def cleanup() = - try { Runtime.getRuntime.removeShutdownHook(shutdown_hook) } - catch { case _: IllegalStateException => } - - - /* result */ - - def join: Int = { val rc = proc.waitFor; cleanup(); rc } - } - - - /* tmp files */ - - private def isabelle_tmp_prefix(): JFile = - { - val path = Path.explode("$ISABELLE_TMP_PREFIX") - mkdirs(path) - platform_file(path) - } - - def tmp_file[A](name: String, ext: String = ""): JFile = - { - val suffix = if (ext == "") "" else "." + ext - val file = Files.createTempFile(isabelle_tmp_prefix().toPath, name, suffix).toFile - file.deleteOnExit - file - } - - def with_tmp_file[A](name: String, ext: String = "")(body: JFile => A): A = - { - val file = tmp_file(name, ext) - try { body(file) } finally { file.delete } - } - - - /* tmp dirs */ - - def rm_tree(root: JFile) - { - root.delete - if (root.isDirectory) { - Files.walkFileTree(root.toPath, - new SimpleFileVisitor[JPath] { - override def visitFile(file: JPath, attrs: BasicFileAttributes): FileVisitResult = - { - Files.delete(file) - FileVisitResult.CONTINUE - } - - override def postVisitDirectory(dir: JPath, e: IOException): FileVisitResult = - { - if (e == null) { - Files.delete(dir) - FileVisitResult.CONTINUE - } - else throw e - } - } - ) - } - } - - def tmp_dir(name: String): JFile = - { - val dir = Files.createTempDirectory(isabelle_tmp_prefix().toPath, name).toFile - dir.deleteOnExit - dir - } - - def with_tmp_dir[A](name: String)(body: JFile => A): A = - { - val dir = tmp_dir(name) - try { body(dir) } finally { rm_tree(dir) } - } - - - /* bash */ - - final case class Bash_Result(out_lines: List[String], err_lines: List[String], rc: Int) - { - def out: String = cat_lines(out_lines) - def err: String = cat_lines(err_lines) - def add_err(s: String): Bash_Result = copy(err_lines = err_lines ::: List(s)) - def set_rc(i: Int): Bash_Result = copy(rc = i) - - def check_error: Bash_Result = - if (rc == Exn.Interrupt.return_code) throw Exn.Interrupt() - else if (rc != 0) error(err) - else this - } - - private class Limited_Progress(proc: Managed_Process, progress_limit: Option[Long]) - { - private var count = 0L - def apply(progress: String => Unit)(line: String): Unit = synchronized { - progress(line) - count = count + line.length + 1 - progress_limit match { - case Some(limit) if count > limit => proc.terminate - case _ => - } - } - } - - def bash_env(cwd: JFile, env: Map[String, String], script: String, - progress_stdout: String => Unit = (_: String) => (), - progress_stderr: String => Unit = (_: String) => (), - progress_limit: Option[Long] = None, - strict: Boolean = true): Bash_Result = - { - with_tmp_file("isabelle_script") { script_file => - File.write(script_file, script) - val proc = new Managed_Process(cwd, env, false, "bash", posix_path(script_file)) - proc.stdin.close - - val limited = new Limited_Progress(proc, progress_limit) - val (_, stdout) = - Simple_Thread.future("bash_stdout") { - File.read_lines(proc.stdout, limited(progress_stdout)) - } - val (_, stderr) = - Simple_Thread.future("bash_stderr") { - File.read_lines(proc.stderr, limited(progress_stderr)) - } - - val rc = - try { proc.join } - catch { case Exn.Interrupt() => proc.terminate; Exn.Interrupt.return_code } - if (strict && rc == Exn.Interrupt.return_code) throw Exn.Interrupt() - - Bash_Result(stdout.join, stderr.join, rc) - } - } - - def bash(script: String): Bash_Result = bash_env(null, null, script) - - - /* system tools */ - - def isabelle_tool(name: String, args: String*): (String, Int) = - { - Path.split(getenv_strict("ISABELLE_TOOLS")).find { dir => - val file = (dir + Path.basic(name)).file - try { - file.isFile && file.canRead && file.canExecute && - !name.endsWith("~") && !name.endsWith(".orig") - } - catch { case _: SecurityException => false } - } match { - case Some(dir) => - val file = standard_path(dir + Path.basic(name)) - process_output(execute(true, (List(file) ::: args.toList): _*)) - case None => ("Unknown Isabelle tool: " + name, 2) - } - } - - def open(arg: String): Unit = - bash("exec \"$ISABELLE_OPEN\" '" + arg + "' >/dev/null 2>/dev/null &") - - def pdf_viewer(arg: Path): Unit = - bash("exec \"$PDF_VIEWER\" '" + standard_path(arg) + "' >/dev/null 2>/dev/null &") - - def hg(cmd_line: String, cwd: Path = Path.current): Bash_Result = - bash("cd " + shell_path(cwd) + " && \"${HG:-hg}\" " + cmd_line) - - - /** Isabelle resources **/ - - /* components */ - - def components(): List[Path] = - Path.split(getenv_strict("ISABELLE_COMPONENTS")) - - - /* logic images */ - - def find_logics_dirs(): List[Path] = - { - val ml_ident = Path.explode("$ML_IDENTIFIER").expand - Path.split(getenv_strict("ISABELLE_PATH")).map(_ + ml_ident) - } - - def find_logics(): List[String] = - (for { - dir <- find_logics_dirs() - files = dir.file.listFiles() if files != null - file <- files.toList if file.isFile } yield file.getName).sorted - - def default_logic(args: String*): String = - { - args.find(_ != "") match { - case Some(logic) => logic - case None => Isabelle_System.getenv_strict("ISABELLE_LOGIC") - } - } -} diff --git a/core/Pure/System/isar.ML b/core/Pure/System/isar.ML deleted file mode 100644 index c6cd997a..00000000 --- a/core/Pure/System/isar.ML +++ /dev/null @@ -1,174 +0,0 @@ -(* Title: Pure/System/isar.ML - Author: Makarius - -Global state of the raw Isar read-eval-print loop. -*) - -signature ISAR = -sig - val init: unit -> unit - val exn: unit -> (exn * string) option - val state: unit -> Toplevel.state - val goal: unit -> {context: Proof.context, facts: thm list, goal: thm} - val print: unit -> unit - val >> : Toplevel.transition -> bool - val >>> : Toplevel.transition list -> unit - val linear_undo: int -> unit - val undo: int -> unit - val kill: unit -> unit - val kill_proof: unit -> unit - val crashes: exn list Synchronized.var - val toplevel_loop: TextIO.instream -> - {init: bool, welcome: bool, sync: bool, secure: bool} -> unit - val loop: unit -> unit - val main: unit -> unit -end; - -structure Isar: ISAR = -struct - - -(** TTY model -- SINGLE-THREADED! **) - -(* the global state *) - -type history = (Toplevel.state * Toplevel.transition) list; - (*previous state, state transition -- regular commands only*) - -local - val global_history = Unsynchronized.ref ([]: history); - val global_state = Unsynchronized.ref Toplevel.toplevel; - val global_exn = Unsynchronized.ref (NONE: (exn * string) option); -in - -fun edit_history count f = NAMED_CRITICAL "Isar" (fn () => - let - fun edit 0 (st, hist) = (global_history := hist; global_state := st; global_exn := NONE) - | edit n (st, hist) = edit (n - 1) (f st hist); - in edit count (! global_state, ! global_history) end); - -fun state () = ! global_state; - -fun exn () = ! global_exn; -fun set_exn exn = global_exn := exn; - -end; - - -fun init () = edit_history 1 (K (K (Toplevel.toplevel, []))); - -fun goal () = Proof.goal (Toplevel.proof_of (state ())) - handle Toplevel.UNDEF => error "No goal present"; - -fun print () = Toplevel.print_state (state ()); - - -(* history navigation *) - -local - -fun find_and_undo _ [] = error "Undo history exhausted" - | find_and_undo which ((prev, tr) :: hist) = - if which (Toplevel.name_of tr) then (prev, hist) else find_and_undo which hist; - -in - -fun linear_undo n = edit_history n (K (find_and_undo (K true))); - -fun undo n = edit_history n (fn st => fn hist => - find_and_undo (if Toplevel.is_proof st then K true else Keyword.is_theory) hist); - -fun kill () = edit_history 1 (fn st => fn hist => - find_and_undo - (if Toplevel.is_proof st then Keyword.is_theory else Keyword.is_theory_begin) hist); - -fun kill_proof () = edit_history 1 (fn st => fn hist => - if Toplevel.is_proof st then find_and_undo Keyword.is_theory hist - else raise Toplevel.UNDEF); - -end; - - -(* interactive state transformations *) - -fun op >> tr = - (case Toplevel.transition true tr (state ()) of - NONE => false - | SOME (_, SOME exn_info) => - (set_exn (SOME exn_info); - Toplevel.setmp_thread_position tr - Runtime.exn_error_message (Runtime.EXCURSION_FAIL exn_info); - true) - | SOME (st', NONE) => - let - val name = Toplevel.name_of tr; - val _ = if Keyword.is_theory_begin name then init () else (); - val _ = - if Keyword.is_regular name - then edit_history 1 (fn st => fn hist => (st', (st, tr) :: hist)) else (); - in true end); - -fun op >>> [] = () - | op >>> (tr :: trs) = if op >> tr then op >>> trs else (); - - -(* toplevel loop -- uninterruptible *) - -val crashes = Synchronized.var "Isar.crashes" ([]: exn list); - -local - -fun protocol_message props output = - (case props of - function :: args => - if function = Markup.command_timing then - let - val name = the_default "" (Properties.get args Markup.nameN); - val pos = Position.of_properties args; - val timing = Markup.parse_timing_properties args; - in - if Timing.is_relevant timing andalso (! Toplevel.profiling > 0 orelse ! Toplevel.timing) - andalso name <> "" andalso not (Keyword.is_control name) - then tracing ("command " ^ quote name ^ Position.here pos ^ ": " ^ Timing.message timing) - else () - end - else raise Output.Protocol_Message props - | [] => raise Output.Protocol_Message props); - -fun raw_loop secure src = - let - fun check_secure () = - (if secure then warning "Secure loop -- cannot exit to ML" else (); secure); - in - (case Source.get_single (Source.set_prompt Source.default_prompt src) of - NONE => if secure then quit () else () - | SOME (tr, src') => if op >> tr orelse check_secure () then raw_loop secure src' else ()) - handle exn => - (Runtime.exn_error_message exn - handle crash => - (Synchronized.change crashes (cons crash); - warning "Recovering from Isar toplevel crash -- see also Isar.crashes"); - raw_loop secure src) - end; - -in - -fun toplevel_loop in_stream {init = do_init, welcome, sync, secure} = - (Context.set_thread_data NONE; - Multithreading.max_threads_update (Options.default_int "threads"); - if do_init then init () else (); - Output.protocol_message_fn := protocol_message; - if welcome then writeln (Session.welcome ()) else (); - uninterruptible (fn _ => fn () => raw_loop secure (Outer_Syntax.isar in_stream sync)) ()); - -end; - -fun loop () = - toplevel_loop TextIO.stdIn - {init = false, welcome = false, sync = false, secure = Secure.is_secure ()}; - -fun main () = - toplevel_loop TextIO.stdIn - {init = true, welcome = true, sync = false, secure = Secure.is_secure ()}; - -end; diff --git a/core/Pure/System/message_channel.ML b/core/Pure/System/message_channel.ML deleted file mode 100644 index 3683f4b0..00000000 --- a/core/Pure/System/message_channel.ML +++ /dev/null @@ -1,74 +0,0 @@ -(* Title: Pure/System/message_channel.ML - Author: Makarius - -Preferably asynchronous channel for Isabelle messages. -*) - -signature MESSAGE_CHANNEL = -sig - type message - val message: string -> Properties.T -> string list -> message - type T - val send: T -> message -> unit - val shutdown: T -> unit - val make: System_Channel.T -> T -end; - -structure Message_Channel: MESSAGE_CHANNEL = -struct - -(* message *) - -datatype message = Message of string list; - -fun chunk ss = - string_of_int (fold (Integer.add o size) ss 0) :: "\n" :: ss; - -fun message name raw_props body = - let - val robust_props = map (pairself YXML.embed_controls) raw_props; - val header = YXML.string_of (XML.Elem ((name, robust_props), [])); - in Message (chunk [header] @ chunk body) end; - -fun output_message channel (Message ss) = - List.app (System_Channel.output channel) ss; - - -(* channel *) - -datatype T = Message_Channel of {send: message -> unit, shutdown: unit -> unit}; - -fun send (Message_Channel {send, ...}) = send; -fun shutdown (Message_Channel {shutdown, ...}) = shutdown (); - -fun flush channel = ignore (try System_Channel.flush channel); -val flush_timeout = SOME (seconds 0.02); - -fun message_output mbox channel = - let - fun continue timeout = - (case Mailbox.receive timeout mbox of - [] => (flush channel; continue NONE) - | msgs => received timeout msgs) - and received _ (NONE :: _) = flush channel - | received timeout (SOME msg :: rest) = - (output_message channel msg; received flush_timeout rest) - | received timeout [] = continue timeout; - in fn () => continue NONE end; - -fun make channel = - if Multithreading.available then - let - val mbox = Mailbox.create (); - val thread = Simple_Thread.fork false (message_output mbox channel); - fun send msg = Mailbox.send mbox (SOME msg); - fun shutdown () = - (Mailbox.send mbox NONE; Mailbox.await_empty mbox; Simple_Thread.join thread); - in Message_Channel {send = send, shutdown = shutdown} end - else - let - fun send msg = (output_message channel msg; flush channel); - in Message_Channel {send = send, shutdown = fn () => ()} end; - -end; - diff --git a/core/Pure/System/options.ML b/core/Pure/System/options.ML deleted file mode 100644 index 61f9a847..00000000 --- a/core/Pure/System/options.ML +++ /dev/null @@ -1,218 +0,0 @@ -(* Title: Pure/System/options.ML - Author: Makarius - -System options with external string representation. -*) - -signature OPTIONS = -sig - val boolT: string - val intT: string - val realT: string - val stringT: string - val unknownT: string - type T - val empty: T - val markup: T -> string * Position.T -> Markup.T - val typ: T -> string -> string - val bool: T -> string -> bool - val int: T -> string -> int - val real: T -> string -> real - val string: T -> string -> string - val put_bool: string -> bool -> T -> T - val put_int: string -> int -> T -> T - val put_real: string -> real -> T -> T - val put_string: string -> string -> T -> T - val declare: {pos: Position.T, name: string, typ: string, value: string} -> T -> T - val update: string -> string -> T -> T - val decode: XML.body -> T - val default: unit -> T - val default_markup: string * Position.T -> Markup.T - val default_typ: string -> string - val default_bool: string -> bool - val default_int: string -> int - val default_real: string -> real - val default_string: string -> string - val default_put_bool: string -> bool -> unit - val default_put_int: string -> int -> unit - val default_put_real: string -> real -> unit - val default_put_string: string -> string -> unit - val get_default: string -> string - val put_default: string -> string -> unit - val set_default: T -> unit - val reset_default: unit -> unit - val load_default: unit -> unit -end; - -structure Options: OPTIONS = -struct - -(* representation *) - -val boolT = "bool"; -val intT = "int"; -val realT = "real"; -val stringT = "string"; -val unknownT = "unknown"; - -datatype T = Options of {pos: Position.T, typ: string, value: string} Symtab.table; - -val empty = Options Symtab.empty; - - -(* check *) - -fun check_name (Options tab) name = - let val opt = Symtab.lookup tab name in - if is_some opt andalso #typ (the opt) <> unknownT then the opt - else error ("Unknown system option " ^ quote name) - end; - -fun check_type options name typ = - let val opt = check_name options name in - if #typ opt = typ then opt - else error ("Ill-typed system option " ^ quote name ^ " : " ^ #typ opt ^ " vs. " ^ typ) - end; - - -(* markup *) - -fun markup options (name, pos) = - let - val opt = - check_name options name - handle ERROR msg => error (msg ^ Position.here pos); - val props = Position.def_properties_of (#pos opt); - in Markup.properties props (Markup.entity Markup.system_optionN name) end; - - -(* typ *) - -fun typ options name = #typ (check_name options name); - - -(* basic operations *) - -fun put T print name x (options as Options tab) = - let val opt = check_type options name T - in Options (Symtab.update (name, {pos = #pos opt, typ = #typ opt, value = print x}) tab) end; - -fun get T parse options name = - let val opt = check_type options name T in - (case parse (#value opt) of - SOME x => x - | NONE => - error ("Malformed value for system option " ^ quote name ^ - " : " ^ T ^ " =\n" ^ quote (#value opt))) - end; - - -(* internal lookup and update *) - -val bool = get boolT (try Markup.parse_bool); -val int = get intT (try Markup.parse_int); -val real = get realT (try Markup.parse_real); -val string = get stringT SOME; - -val put_bool = put boolT Markup.print_bool; -val put_int = put intT Markup.print_int; -val put_real = put realT Markup.print_real; -val put_string = put stringT I; - - -(* external updates *) - -fun check_value options name = - let val opt = check_name options name in - if #typ opt = boolT then ignore (bool options name) - else if #typ opt = intT then ignore (int options name) - else if #typ opt = realT then ignore (real options name) - else if #typ opt = stringT then ignore (string options name) - else () - end; - -fun declare {pos, name, typ, value} (Options tab) = - let - val options' = - (case Symtab.lookup tab name of - SOME other => - error ("Duplicate declaration of system option " ^ quote name ^ Position.here pos ^ - Position.here (#pos other)) - | NONE => Options (Symtab.update (name, {pos = pos, typ = typ, value = value}) tab)); - val _ = - typ = boolT orelse typ = intT orelse typ = realT orelse typ = stringT orelse - error ("Unknown type for system option " ^ quote name ^ " : " ^ quote typ ^ - Position.here pos); - val _ = check_value options' name; - in options' end; - -fun update name value (options as Options tab) = - let - val opt = check_name options name; - val options' = - Options (Symtab.update (name, {pos = #pos opt, typ = #typ opt, value = value}) tab); - val _ = check_value options' name; - in options' end; - - -(* decode *) - -fun decode_opt body = - let open XML.Decode - in list (pair properties (pair string (pair string string))) end body - |> map (fn (props, (name, (typ, value))) => - {pos = Position.of_properties props, name = name, typ = typ, value = value}); - -fun decode body = fold declare (decode_opt body) empty; - - - -(** global default **) - -val global_default = Synchronized.var "Options.default" (NONE: T option); - -fun err_no_default () = error "Missing default for system options within Isabelle process"; - -fun change_default f x y = - Synchronized.change global_default - (fn SOME options => SOME (f x y options) - | NONE => err_no_default ()); - -fun default () = - (case Synchronized.value global_default of - SOME options => options - | NONE => err_no_default ()); - -fun default_markup arg = markup (default ()) arg; -fun default_typ name = typ (default ()) name; -fun default_bool name = bool (default ()) name; -fun default_int name = int (default ()) name; -fun default_real name = real (default ()) name; -fun default_string name = string (default ()) name; - -val default_put_bool = change_default put_bool; -val default_put_int = change_default put_int; -val default_put_real = change_default put_real; -val default_put_string = change_default put_string; - -fun get_default name = - let val options = default () in get (typ options name) SOME options name end; -val put_default = change_default update; - -fun set_default options = Synchronized.change global_default (K (SOME options)); -fun reset_default () = Synchronized.change global_default (K NONE); - -fun load_default () = - (case getenv "ISABELLE_PROCESS_OPTIONS" of - "" => () - | name => - let val path = Path.explode name in - (case try File.read path of - SOME s => (set_default (decode (YXML.parse_body s)); ignore (try File.rm path)) - | NONE => ()) - end); - -val _ = load_default (); - -end; - diff --git a/core/Pure/System/options.scala b/core/Pure/System/options.scala deleted file mode 100644 index 524ebfcc..00000000 --- a/core/Pure/System/options.scala +++ /dev/null @@ -1,432 +0,0 @@ -/* Title: Pure/System/options.scala - Author: Makarius - -System options with external string representation. -*/ - -package isabelle - - -import java.util.Calendar - - -object Options -{ - type Spec = (String, Option[String]) - - val empty: Options = new Options() - - - /* representation */ - - sealed abstract class Type - { - def print: String = Word.lowercase(toString) - } - case object Bool extends Type - case object Int extends Type - case object Real extends Type - case object String extends Type - case object Unknown extends Type - - case class Opt( - public: Boolean, - pos: Position.T, - name: String, - typ: Type, - value: String, - default_value: String, - description: String, - section: String) - { - private def print(default: Boolean): String = - { - val x = if (default) default_value else value - "option " + name + " : " + typ.print + " = " + - (if (typ == Options.String) quote(x) else x) + - (if (description == "") "" else "\n -- " + quote(description)) - } - - def print: String = print(false) - def print_default: String = print(true) - - def title(strip: String = ""): String = - { - val words = Word.explode('_', name) - val words1 = - words match { - case word :: rest if word == strip => rest - case _ => words - } - Word.implode(words1.map(Word.perhaps_capitalize(_))) - } - - def unknown: Boolean = typ == Unknown - } - - - /* parsing */ - - private val SECTION = "section" - private val PUBLIC = "public" - private val OPTION = "option" - private val OPTIONS = Path.explode("etc/options") - private val PREFS_DIR = Path.explode("$ISABELLE_HOME_USER/etc") - private val PREFS = PREFS_DIR + Path.basic("preferences") - - lazy val options_syntax = - Outer_Syntax.init() + ":" + "=" + "--" + - (SECTION, Keyword.THY_HEADING2) + (PUBLIC, Keyword.THY_DECL) + (OPTION, Keyword.THY_DECL) - - lazy val prefs_syntax = Outer_Syntax.init() + "=" - - object Parser extends Parse.Parser - { - val option_name = atom("option name", _.is_xname) - val option_type = atom("option type", _.is_ident) - val option_value = - opt(token("-", tok => tok.is_sym_ident && tok.content == "-")) ~ atom("nat", _.is_nat) ^^ - { case s ~ n => if (s.isDefined) "-" + n else n } | - atom("option value", tok => tok.is_name || tok.is_float) - - val option_entry: Parser[Options => Options] = - { - command(SECTION) ~! text ^^ - { case _ ~ a => (options: Options) => options.set_section(a) } | - opt(command(PUBLIC)) ~ command(OPTION) ~! (option_name ~ keyword(":") ~ option_type ~ - keyword("=") ~ option_value ~ (keyword("--") ~! text ^^ { case _ ~ x => x } | success(""))) ^^ - { case a ~ pos ~ (b ~ _ ~ c ~ _ ~ d ~ e) => - (options: Options) => options.declare(a.isDefined, pos, b, c, d, e) } - } - - val prefs_entry: Parser[Options => Options] = - { - option_name ~ (keyword("=") ~! option_value) ^^ - { case a ~ (_ ~ b) => (options: Options) => options.add_permissive(a, b) } - } - - def parse_file(syntax: Outer_Syntax, parser: Parser[Options => Options], - options: Options, file: Path): Options = - { - val toks = syntax.scan(File.read(file)) - val ops = - parse_all(rep(parser), Token.reader(toks, file.implode)) match { - case Success(result, _) => result - case bad => error(bad.toString) - } - try { (options.set_section("") /: ops) { case (opts, op) => op(opts) } } - catch { case ERROR(msg) => error(msg + Position.here(file.position)) } - } - } - - def init_defaults(): Options = - { - var options = empty - for { - dir <- Isabelle_System.components() - file = dir + OPTIONS if file.is_file - } { options = Parser.parse_file(options_syntax, Parser.option_entry, options, file) } - options - } - - def init(): Options = init_defaults().load_prefs() - - - /* encode */ - - val encode: XML.Encode.T[Options] = (options => options.encode) - - - /* command line entry point */ - - def main(args: Array[String]) - { - Command_Line.tool0 { - args.toList match { - case get_option :: export_file :: more_options => - val options = (Options.init() /: more_options)(_ + _) - - if (get_option != "") - Console.println(options.check_name(get_option).value) - - if (export_file != "") - File.write(Path.explode(export_file), YXML.string_of_body(options.encode)) - - if (get_option == "" && export_file == "") - Console.println(options.print) - - case _ => error("Bad arguments:\n" + cat_lines(args)) - } - } - } -} - - -final class Options private( - val options: Map[String, Options.Opt] = Map.empty, - val section: String = "") -{ - override def toString: String = options.iterator.mkString("Options (", ",", ")") - - private def print_opt(opt: Options.Opt): String = - if (opt.public) "public " + opt.print else opt.print - - def print: String = cat_lines(options.toList.sortBy(_._1).map(p => print_opt(p._2))) - - def description(name: String): String = check_name(name).description - - - /* check */ - - def check_name(name: String): Options.Opt = - options.get(name) match { - case Some(opt) if !opt.unknown => opt - case _ => error("Unknown option " + quote(name)) - } - - private def check_type(name: String, typ: Options.Type): Options.Opt = - { - val opt = check_name(name) - if (opt.typ == typ) opt - else error("Ill-typed option " + quote(name) + " : " + opt.typ.print + " vs. " + typ.print) - } - - - /* basic operations */ - - private def put[A](name: String, typ: Options.Type, value: String): Options = - { - val opt = check_type(name, typ) - new Options(options + (name -> opt.copy(value = value)), section) - } - - private def get[A](name: String, typ: Options.Type, parse: String => Option[A]): A = - { - val opt = check_type(name, typ) - parse(opt.value) match { - case Some(x) => x - case None => - error("Malformed value for option " + quote(name) + - " : " + typ.print + " =\n" + quote(opt.value)) - } - } - - - /* internal lookup and update */ - - class Bool_Access - { - def apply(name: String): Boolean = get(name, Options.Bool, Properties.Value.Boolean.unapply) - def update(name: String, x: Boolean): Options = - put(name, Options.Bool, Properties.Value.Boolean(x)) - } - val bool = new Bool_Access - - class Int_Access - { - def apply(name: String): Int = get(name, Options.Int, Properties.Value.Int.unapply) - def update(name: String, x: Int): Options = - put(name, Options.Int, Properties.Value.Int(x)) - } - val int = new Int_Access - - class Real_Access - { - def apply(name: String): Double = get(name, Options.Real, Properties.Value.Double.unapply) - def update(name: String, x: Double): Options = - put(name, Options.Real, Properties.Value.Double(x)) - } - val real = new Real_Access - - class String_Access - { - def apply(name: String): String = get(name, Options.String, s => Some(s)) - def update(name: String, x: String): Options = put(name, Options.String, x) - } - val string = new String_Access - - class Seconds_Access - { - def apply(name: String): Time = Time.seconds(real(name)) - } - val seconds = new Seconds_Access - - - /* external updates */ - - private def check_value(name: String): Options = - { - val opt = check_name(name) - opt.typ match { - case Options.Bool => bool(name); this - case Options.Int => int(name); this - case Options.Real => real(name); this - case Options.String => string(name); this - case Options.Unknown => this - } - } - - def declare( - public: Boolean, - pos: Position.T, - name: String, - typ_name: String, - value: String, - description: String): Options = - { - options.get(name) match { - case Some(other) => - error("Duplicate declaration of option " + quote(name) + Position.here(pos) + - Position.here(other.pos)) - case None => - val typ = - typ_name match { - case "bool" => Options.Bool - case "int" => Options.Int - case "real" => Options.Real - case "string" => Options.String - case _ => - error("Unknown type for option " + quote(name) + " : " + quote(typ_name) + - Position.here(pos)) - } - val opt = Options.Opt(public, pos, name, typ, value, value, description, section) - (new Options(options + (name -> opt), section)).check_value(name) - } - } - - def add_permissive(name: String, value: String): Options = - { - if (options.isDefinedAt(name)) this + (name, value) - else { - val opt = Options.Opt(false, Position.none, name, Options.Unknown, value, value, "", "") - new Options(options + (name -> opt), section) - } - } - - def + (name: String, value: String): Options = - { - val opt = check_name(name) - (new Options(options + (name -> opt.copy(value = value)), section)).check_value(name) - } - - def + (name: String, opt_value: Option[String]): Options = - { - val opt = check_name(name) - opt_value match { - case Some(value) => this + (name, value) - case None if opt.typ == Options.Bool => this + (name, "true") - case None => error("Missing value for option " + quote(name) + " : " + opt.typ.print) - } - } - - def + (str: String): Options = - { - str.indexOf('=') match { - case -1 => this + (str, None) - case i => this + (str.substring(0, i), str.substring(i + 1)) - } - } - - def ++ (specs: List[Options.Spec]): Options = - (this /: specs)({ case (x, (y, z)) => x + (y, z) }) - - - /* sections */ - - def set_section(new_section: String): Options = - new Options(options, new_section) - - def sections: List[(String, List[Options.Opt])] = - options.groupBy(_._2.section).toList.map({ case (a, opts) => (a, opts.toList.map(_._2)) }) - - - /* encode */ - - def encode: XML.Body = - { - val opts = - for ((_, opt) <- options.toList; if !opt.unknown) - yield (opt.pos, (opt.name, (opt.typ.print, opt.value))) - - import XML.Encode.{string => string_, _} - list(pair(properties, pair(string_, pair(string_, string_))))(opts) - } - - - /* user preferences */ - - def load_prefs(): Options = - if (Options.PREFS.is_file) - Options.Parser.parse_file( - Options.prefs_syntax, Options.Parser.prefs_entry, this, Options.PREFS) - else this - - def save_prefs() - { - val defaults = Options.init_defaults() - val changed = - (for { - (name, opt2) <- options.iterator - opt1 = defaults.options.get(name) - if (opt1.isEmpty || opt1.get.value != opt2.value) - } yield (name, opt2.value, if (opt1.isEmpty) " (* unknown *)" else "")).toList - - val prefs = - changed.sortBy(_._1) - .map({ case (x, y, z) => x + " = " + Outer_Syntax.quote_string(y) + z + "\n" }).mkString - - Isabelle_System.mkdirs(Options.PREFS_DIR) - File.write_backup(Options.PREFS, - "(* generated by Isabelle " + Calendar.getInstance.getTime + " *)\n\n" + prefs) - } -} - - -class Options_Variable -{ - private var options = Options.empty - - def value: Options = synchronized { options } - def update(new_options: Options): Unit = synchronized { options = new_options } - - def + (name: String, x: String): Unit = synchronized { options = options + (name, x) } - - class Bool_Access - { - def apply(name: String): Boolean = synchronized { options.bool(name) } - def update(name: String, x: Boolean): Unit = - synchronized { options = options.bool.update(name, x) } - } - val bool = new Bool_Access - - class Int_Access - { - def apply(name: String): Int = synchronized { options.int(name) } - def update(name: String, x: Int): Unit = - synchronized { options = options.int.update(name, x) } - } - val int = new Int_Access - - class Real_Access - { - def apply(name: String): Double = synchronized { options.real(name) } - def update(name: String, x: Double): Unit = - synchronized { options = options.real.update(name, x) } - } - val real = new Real_Access - - class String_Access - { - def apply(name: String): String = synchronized { options.string(name) } - def update(name: String, x: String): Unit = - synchronized { options = options.string.update(name, x) } - } - val string = new String_Access - - class Seconds_Access - { - def apply(name: String): Time = synchronized { options.seconds(name) } - } - val seconds = new Seconds_Access -} - diff --git a/core/Pure/System/platform.scala b/core/Pure/System/platform.scala deleted file mode 100644 index d400809b..00000000 --- a/core/Pure/System/platform.scala +++ /dev/null @@ -1,60 +0,0 @@ -/* Title: Pure/System/platform.scala - Module: PIDE - Author: Makarius - -Raw platform identification. -*/ - -package isabelle - - -import scala.util.matching.Regex - - -object Platform -{ - /* main OS variants */ - - val is_macos = System.getProperty("os.name", "") == "Mac OS X" - val is_windows = System.getProperty("os.name", "").startsWith("Windows") - - - /* Platform identifiers */ - - private val Solaris = new Regex("SunOS|Solaris") - private val Linux = new Regex("Linux") - private val Darwin = new Regex("Mac OS X") - private val Windows = new Regex("Windows.*") - - private val X86 = new Regex("i.86|x86") - private val X86_64 = new Regex("amd64|x86_64") - private val Sparc = new Regex("sparc") - private val PPC = new Regex("PowerPC|ppc") - - lazy val jvm_platform: String = - { - val arch = - System.getProperty("os.arch", "") match { - case X86() => "x86" - case X86_64() => "x86_64" - case Sparc() => "sparc" - case PPC() => "ppc" - case _ => error("Failed to determine CPU architecture") - } - val os = - System.getProperty("os.name", "") match { - case Solaris() => "solaris" - case Linux() => "linux" - case Darwin() => "darwin" - case Windows() => "windows" - case _ => error("Failed to determine operating system platform") - } - arch + "-" + os - } - - - /* JVM name */ - - val jvm_name: String = System.getProperty("java.vm.name", "") -} - diff --git a/core/Pure/System/posix_interrupt.scala b/core/Pure/System/posix_interrupt.scala deleted file mode 100644 index 0d0eb5fc..00000000 --- a/core/Pure/System/posix_interrupt.scala +++ /dev/null @@ -1,29 +0,0 @@ -/* Title: Pure/System/interrupt.scala - Author: Makarius - -Support for POSIX interrupts (bypassed on Windows). -*/ - -package isabelle - - -import sun.misc.{Signal, SignalHandler} - - -object POSIX_Interrupt -{ - def handler[A](h: => Unit)(e: => A): A = - { - val SIGINT = new Signal("INT") - val new_handler = new SignalHandler { def handle(s: Signal) { h } } - val old_handler = Signal.handle(SIGINT, new_handler) - try { e } finally { Signal.handle(SIGINT, old_handler) } - } - - def exception[A](e: => A): A = - { - val thread = Thread.currentThread - handler { thread.interrupt } { e } - } -} - diff --git a/core/Pure/System/system_channel.ML b/core/Pure/System/system_channel.ML deleted file mode 100644 index 6d73152a..00000000 --- a/core/Pure/System/system_channel.ML +++ /dev/null @@ -1,79 +0,0 @@ -(* Title: Pure/System/system_channel.ML - Author: Makarius - -Portable system channel for inter-process communication, based on -named pipes or sockets. -*) - -signature SYSTEM_CHANNEL = -sig - type T - val input_line: T -> string option - val inputN: T -> int -> string - val output: T -> string -> unit - val flush: T -> unit - val fifo_rendezvous: string -> string -> T - val socket_rendezvous: string -> T -end; - -structure System_Channel: SYSTEM_CHANNEL = -struct - -datatype T = System_Channel of - {input_line: unit -> string option, - inputN: int -> string, - output: string -> unit, - flush: unit -> unit}; - -fun input_line (System_Channel {input_line = f, ...}) = f (); -fun inputN (System_Channel {inputN = f, ...}) n = f n; -fun output (System_Channel {output = f, ...}) s = f s; -fun flush (System_Channel {flush = f, ...}) = f (); - - -(* named pipes *) - -fun fifo_rendezvous fifo1 fifo2 = - let - val in_stream = TextIO.openIn fifo1; - val out_stream = TextIO.openOut fifo2; - val _ = TextIO.StreamIO.setBufferMode (TextIO.getOutstream out_stream, IO.BLOCK_BUF); - in - System_Channel - {input_line = fn () => TextIO.inputLine in_stream, - inputN = fn n => TextIO.inputN (in_stream, n), - output = fn s => TextIO.output (out_stream, s), - flush = fn () => TextIO.flushOut out_stream} - end; - - -(* sockets *) - -fun read_line in_stream = - let - fun result cs = String.implode (rev (#"\n" :: cs)); - fun read cs = - (case BinIO.input1 in_stream of - NONE => if null cs then NONE else SOME (result cs) - | SOME b => - (case Byte.byteToChar b of - #"\n" => SOME (result cs) - | c => read (c :: cs))); - in read [] end; - -fun socket_rendezvous name = - let - val (in_stream, out_stream) = Socket_IO.open_streams name; - val _ = BinIO.StreamIO.setBufferMode (BinIO.getOutstream out_stream, IO.BLOCK_BUF); - in - System_Channel - {input_line = fn () => read_line in_stream, - inputN = fn n => - if n = 0 then "" (*workaround for polyml-5.5.1 or earlier*) - else Byte.bytesToString (BinIO.inputN (in_stream, n)), - output = fn s => BinIO.output (out_stream, Byte.stringToBytes s), - flush = fn () => BinIO.flushOut out_stream} - end; - -end; - diff --git a/core/Pure/System/system_channel.scala b/core/Pure/System/system_channel.scala deleted file mode 100644 index 44c18fac..00000000 --- a/core/Pure/System/system_channel.scala +++ /dev/null @@ -1,94 +0,0 @@ -/* Title: Pure/System/system_channel.scala - Author: Makarius - -Portable system channel for inter-process communication, based on -named pipes or sockets. -*/ - -package isabelle - - -import java.io.{InputStream, OutputStream, File => JFile, FileInputStream, - FileOutputStream, IOException} -import java.net.{ServerSocket, InetAddress} - - -object System_Channel -{ - def apply(): System_Channel = - if (Platform.is_windows) new Socket_Channel else new Fifo_Channel -} - -abstract class System_Channel -{ - def params: List[String] - def isabelle_args: List[String] - def rendezvous(): (OutputStream, InputStream) - def accepted(): Unit -} - - -/** named pipes **/ - -private object Fifo_Channel -{ - private val next_fifo = Counter.make() -} - -private class Fifo_Channel extends System_Channel -{ - require(!Platform.is_windows) - - private def mk_fifo(): String = - { - val i = Fifo_Channel.next_fifo() - val script = - "FIFO=\"/tmp/isabelle-fifo-${PPID}-$$" + i + "\"\n" + - "echo -n \"$FIFO\"\n" + - "mkfifo -m 600 \"$FIFO\"\n" - val result = Isabelle_System.bash(script) - if (result.rc == 0) result.out else error(result.err) - } - - private def rm_fifo(fifo: String): Boolean = (new JFile(fifo)).delete - - private def fifo_input_stream(fifo: String): InputStream = new FileInputStream(fifo) - private def fifo_output_stream(fifo: String): OutputStream = new FileOutputStream(fifo) - - private val fifo1 = mk_fifo() - private val fifo2 = mk_fifo() - - def params: List[String] = List(fifo1, fifo2) - - val isabelle_args: List[String] = List ("-W", fifo1 + ":" + fifo2) - - def rendezvous(): (OutputStream, InputStream) = - { - val output_stream = fifo_output_stream(fifo1) - val input_stream = fifo_input_stream(fifo2) - (output_stream, input_stream) - } - - def accepted() { rm_fifo(fifo1); rm_fifo(fifo2) } -} - - -/** sockets **/ - -private class Socket_Channel extends System_Channel -{ - private val server = new ServerSocket(0, 2, InetAddress.getByName("127.0.0.1")) - - def params: List[String] = List("127.0.0.1", server.getLocalPort.toString) - - def isabelle_args: List[String] = List("-T", "127.0.0.1:" + server.getLocalPort) - - def rendezvous(): (OutputStream, InputStream) = - { - val socket = server.accept - socket.setTcpNoDelay(true) - (socket.getOutputStream, socket.getInputStream) - } - - def accepted() { server.close } -} diff --git a/core/Pure/System/utf8.scala b/core/Pure/System/utf8.scala deleted file mode 100644 index 7727852b..00000000 --- a/core/Pure/System/utf8.scala +++ /dev/null @@ -1,90 +0,0 @@ -/* Title: Pure/System/utf8.scala - Module: PIDE - Author: Makarius - -Variations on UTF-8. -*/ - -package isabelle - - -import java.nio.charset.Charset -import scala.io.Codec - - -object UTF8 -{ - /* charset */ - - val charset_name: String = "UTF-8" - val charset: Charset = Charset.forName(charset_name) - def codec(): Codec = Codec(charset) - - - /* permissive UTF-8 decoding */ - - // see also http://en.wikipedia.org/wiki/UTF-8#Description - // overlong encodings enable byte-stuffing of low-ASCII - - def decode_permissive(text: CharSequence): String = - { - val buf = new java.lang.StringBuilder(text.length) - var code = -1 - var rest = 0 - def flush() - { - if (code != -1) { - if (rest == 0 && Character.isValidCodePoint(code)) - buf.appendCodePoint(code) - else buf.append('\uFFFD') - code = -1 - rest = 0 - } - } - def init(x: Int, n: Int) - { - flush() - code = x - rest = n - } - def push(x: Int) - { - if (rest <= 0) init(x, -1) - else { - code <<= 6 - code += x - rest -= 1 - } - } - for (i <- 0 until text.length) { - val c = text.charAt(i) - if (c < 128) { flush(); buf.append(c) } - else if ((c & 0xC0) == 0x80) push(c & 0x3F) - else if ((c & 0xE0) == 0xC0) init(c & 0x1F, 1) - else if ((c & 0xF0) == 0xE0) init(c & 0x0F, 2) - else if ((c & 0xF8) == 0xF0) init(c & 0x07, 3) - } - flush() - buf.toString - } - - private class Decode_Chars(decode: String => String, - buffer: Array[Byte], start: Int, end: Int) extends CharSequence - { - def length: Int = end - start - def charAt(i: Int): Char = (buffer(start + i).asInstanceOf[Int] & 0xFF).asInstanceOf[Char] - def subSequence(i: Int, j: Int): CharSequence = - new Decode_Chars(decode, buffer, start + i, start + j) - - // toString with adhoc decoding: abuse of CharSequence interface - override def toString: String = decode(decode_permissive(this)) - } - - def decode_chars(decode: String => String, - buffer: Array[Byte], start: Int, end: Int): CharSequence = - { - require(0 <= start && start <= end && end <= buffer.length) - new Decode_Chars(decode, buffer, start, end) - } -} - diff --git a/core/Pure/Thy/html.ML b/core/Pure/Thy/html.ML deleted file mode 100644 index 5f4dc80b..00000000 --- a/core/Pure/Thy/html.ML +++ /dev/null @@ -1,332 +0,0 @@ -(* Title: Pure/Thy/html.ML - Author: Markus Wenzel and Stefan Berghofer, TU Muenchen - -HTML presentation elements. -*) - -signature HTML = -sig - val html_mode: ('a -> 'b) -> 'a -> 'b - type text = string - val plain: string -> text - val name: string -> text - val keyword: string -> text - val command: string -> text - val href_name: string -> text -> text - val href_path: Url.T -> text -> text - val href_opt_path: Url.T option -> text -> text - val para: text -> text - val preform: text -> text - val verbatim: string -> text - val begin_document: string -> text - val end_document: text - val begin_session_index: string -> (Url.T * string) list -> Url.T -> text - val applet_pages: string -> Url.T * string -> (string * string) list - val theory_entry: Url.T * string -> text - val theory: string -> (Url.T option * string) list -> text -> text -end; - -structure HTML: HTML = -struct - - -(** HTML print modes **) - -(* mode *) - -val htmlN = "HTML"; -fun html_mode f x = Print_Mode.with_modes [htmlN] f x; - - -(* common markup *) - -fun span class = ("", ""); - -val _ = Markup.add_mode htmlN (span o fst); - - -(* symbol output *) - -local - val hidden = span Markup.hiddenN |-> enclose; - - (* FIXME proper unicode -- produced on Scala side *) - val html_syms = Symtab.make - [("", (0, "")), - ("'", (1, "'")), - ("\\", (1, "¡")), - ("\\", (1, "¢")), - ("\\", (1, "£")), - ("\\", (1, "¤")), - ("\\", (1, "¥")), - ("\\", (1, "¦")), - ("\\
", (1, "§")), - ("\\", (1, "¨")), - ("\\", (1, "©")), - ("\\", (1, "ª")), - ("\\", (1, "«")), - ("\\", (1, "¬")), - ("\\", (1, "­")), - ("\\", (1, "®")), - ("\\", (1, "¯")), - ("\\", (1, "°")), - ("\\", (1, "±")), - ("\\", (1, "´")), - ("\\", (1, "¶")), - ("\\", (1, "·")), - ("\\", (1, "¸")), - ("\\", (1, "º")), - ("\\", (1, "»")), - ("\\", (1, "¼")), - ("\\", (1, "½")), - ("\\", (1, "¾")), - ("\\", (1, "¿")), - ("\\", (1, "×")), - ("\\
", (1, "÷")), - ("\\", (1, "o")), - ("\\", (1, "Α")), - ("\\", (1, "Β")), - ("\\", (1, "Γ")), - ("\\", (1, "Δ")), - ("\\", (1, "Ε")), - ("\\", (1, "Ζ")), - ("\\", (1, "Η")), - ("\\", (1, "Θ")), - ("\\", (1, "Ι")), - ("\\", (1, "Κ")), - ("\\", (1, "Λ")), - ("\\", (1, "Μ")), - ("\\", (1, "Ν")), - ("\\", (1, "Ξ")), - ("\\", (1, "Ο")), - ("\\", (1, "Π")), - ("\\", (1, "Ρ")), - ("\\", (1, "Σ")), - ("\\", (1, "Τ")), - ("\\", (1, "Υ")), - ("\\", (1, "Φ")), - ("\\", (1, "Χ")), - ("\\", (1, "Ψ")), - ("\\", (1, "Ω")), - ("\\", (1, "α")), - ("\\", (1, "β")), - ("\\", (1, "γ")), - ("\\", (1, "δ")), - ("\\", (1, "ε")), - ("\\", (1, "ζ")), - ("\\", (1, "η")), - ("\\", (1, "ϑ")), - ("\\", (1, "ι")), - ("\\", (1, "κ")), - ("\\", (1, "λ")), - ("\\", (1, "μ")), - ("\\", (1, "ν")), - ("\\", (1, "ξ")), - ("\\", (1, "ο")), - ("\\", (1, "π")), - ("\\", (1, "ρ")), - ("\\", (1, "σ")), - ("\\", (1, "τ")), - ("\\", (1, "υ")), - ("\\", (1, "φ")), - ("\\", (1, "χ")), - ("\\", (1, "ψ")), - ("\\", (1, "ω")), - ("\\", (1, "•")), - ("\\", (1, "…")), - ("\\", (1, "℘")), - ("\\", (1, "∀")), - ("\\", (1, "∂")), - ("\\", (1, "∃")), - ("\\", (1, "∅")), - ("\\", (1, "∇")), - ("\\", (1, "∈")), - ("\\", (1, "∉")), - ("\\", (1, "∏")), - ("\\", (1, "∑")), - ("\\", (1, "∗")), - ("\\", (1, "∝")), - ("\\", (1, "∞")), - ("\\", (1, "∠")), - ("\\", (1, "∧")), - ("\\", (1, "∨")), - ("\\", (1, "∩")), - ("\\", (1, "∪")), - ("\\", (1, "∼")), - ("\\", (1, "≅")), - ("\\", (1, "≈")), - ("\\", (1, "≠")), - ("\\", (1, "≡")), - ("\\", (1, "≤")), - ("\\", (1, "≥")), - ("\\", (1, "⊂")), - ("\\", (1, "⊃")), - ("\\", (1, "⊆")), - ("\\", (1, "⊇")), - ("\\", (1, "⊕")), - ("\\", (1, "⊗")), - ("\\", (1, "⊥")), - ("\\", (1, "⌈")), - ("\\", (1, "⌉")), - ("\\", (1, "⌊")), - ("\\", (1, "⌋")), - ("\\", (1, "⟨")), - ("\\", (1, "⟩")), - ("\\", (1, "◊")), - ("\\", (1, "♠")), - ("\\", (1, "♣")), - ("\\", (1, "♥")), - ("\\", (1, "♦")), - ("\\", (2, "[|")), - ("\\", (2, "|]")), - ("\\", (3, "==>")), - ("\\", (2, "=>")), - ("\\", (2, "!!")), - ("\\", (2, "::")), - ("\\", (2, "(|")), - ("\\", (2, "|)),")), - ("\\", (3, "<->")), - ("\\", (3, "-->")), - ("\\", (2, "->")), - ("\\", (1, "‹")), - ("\\", (1, "›")), - ("\\", (1, "⏎")), - ("\\<^bsub>", (0, hidden "⇘" ^ "")), - ("\\<^esub>", (0, hidden "⇙" ^ "")), - ("\\<^bsup>", (0, hidden "⇗" ^ "")), - ("\\<^esup>", (0, hidden "⇖" ^ ""))]; - - fun output_sym s = - if Symbol.is_raw s then (1, Symbol.decode_raw s) - else - (case Symtab.lookup html_syms s of - SOME x => x - | NONE => (size s, XML.text s)); - - fun output_markup (bg, en) s1 s2 = - let val (n, txt) = output_sym s2 - in (n, hidden s1 ^ enclose bg en txt) end; - - val output_sub = output_markup ("", ""); - val output_sup = output_markup ("", ""); - val output_bold = output_markup (span "bold"); - - fun output_syms [] (result, width) = (implode (rev result), width) - | output_syms (s1 :: rest) (result, width) = - let - val (s2, ss) = (case rest of [] => ("", []) | s2 :: ss => (s2, ss)); - val ((w, s), r) = - if s1 = "\\<^sub>" then (output_sub "⇩" s2, ss) - else if s1 = "\\<^sup>" then (output_sup "⇧" s2, ss) - else if s1 = "\\<^bold>" then (output_bold "❙" s2, ss) - else (output_sym s1, rest); - in output_syms r (s :: result, width + w) end; -in - -fun output_width str = output_syms (Symbol.explode str) ([], 0); -val output = #1 o output_width; - -val _ = Output.add_mode htmlN output_width Symbol.encode_raw; - -end; - - - -(** HTML markup **) - -type text = string; - - -(* atoms *) - -val plain = output; -val name = enclose "" "" o output; -val keyword = enclose "" "" o output; -val command = enclose "" "" o output; - - -(* misc *) - -fun href_name s txt = "" ^ txt ^ ""; -fun href_path path txt = href_name (Url.implode path) txt; - -fun href_opt_path NONE txt = txt - | href_opt_path (SOME p) txt = href_path p txt; - -fun para txt = "\n

" ^ txt ^ "

\n"; -fun preform txt = "
" ^ txt ^ "
"; -val verbatim = preform o output; - - -(* document *) - -fun begin_document title = - "\n\ - \\n\ - \\n\ - \\n\ - \\n\ - \" ^ plain (title ^ " (" ^ Distribution.version ^ ")") ^ "\n\ - \\n\ - \\n\ - \\n\ - \\n\ - \
\ - \

" ^ plain title ^ "

\n"; - -val end_document = "\n
\n\n\n"; - - -(* session index *) - -fun begin_session_index session docs graph = - begin_document ("Session " ^ plain session) ^ - para ("View " ^ href_path graph "theory dependencies" ^ - implode (map (fn (p, name) => "
\nView " ^ href_path p name) docs)) ^ - "\n
\n
\n

Theories

\n
    \n"; - -fun choice chs s = space_implode " " (map (fn (s', lnk) => - enclose "[" "]" (if s = s' then keyword s' else href_name lnk s')) chs); - -fun back_link (path, name) = para (href_path path "Back" ^ " to index of " ^ plain name); - -fun applet_pages session back = - let - val sizes = - [("small", "small.html", ("500", "400")), - ("medium", "medium.html", ("650", "520")), - ("large", "large.html", ("800", "640"))]; - - fun applet_page (size, name, (width, height)) = - let - val browser_size = "Set browser size: " ^ - choice (map (fn (y, z, _) => (y, z)) sizes) size; - in - (name, begin_document ("Theory dependencies of " ^ session) ^ - back_link back ^ - para browser_size ^ - "\n
\n
\n\ - \\n\ - \\n\ - \" ^ end_document) - end; - in map applet_page sizes end; - - -fun theory_entry (p, s) = "
  • " ^ href_path p (plain s) ^ "
  • \n"; - - -(* theory *) - -fun theory A Bs txt = - begin_document ("Theory " ^ A) ^ "\n" ^ - command "theory" ^ " " ^ name A ^ "
    \n" ^ - keyword "imports" ^ " " ^ space_implode " " (map (uncurry href_opt_path o apsnd name) Bs) ^ - "
    \n" ^ - enclose "\n
    \n
    \n
    " "
    \n" txt ^ - end_document; - -end; diff --git a/core/Pure/Thy/html.scala b/core/Pure/Thy/html.scala deleted file mode 100644 index b26f392e..00000000 --- a/core/Pure/Thy/html.scala +++ /dev/null @@ -1,72 +0,0 @@ -/* Title: Pure/Thy/html.scala - Module: PIDE - Author: Makarius - -HTML presentation elements. -*/ - -package isabelle - - -object HTML -{ - /* encode text */ - - def encode(text: String): String = - { - val s = new StringBuilder - for (c <- text.iterator) c match { - case '<' => s ++= "<" - case '>' => s ++= ">" - case '&' => s ++= "&" - case '"' => s ++= """ - case '\'' => s ++= "'" - case '\n' => s ++= "
    " - case _ => s += c - } - s.toString - } - - - /* document */ - - val end_document = "\n
    \n\n\n" - - def begin_document(title: String): String = - "\n" + - "\n" + - "\n" + - "\n" + - "\n" + - "" + encode(title) + "\n" + - "\n" + - "\n" + - "\n" + - "\n" + - "
    " + - "

    " + encode(title) + "

    \n" - - - /* common markup elements */ - - private def session_entry(entry: (String, String)): String = - { - val (name, description) = entry - val descr = - if (description == "") Nil - else List(XML.elem("br"), XML.elem("pre", List(XML.Text(description)))) - XML.string_of_tree( - XML.elem("li", - List(XML.Elem(Markup("a", List(("href", name + "/index.html"))), - List(XML.Text(name)))) ::: descr)) + "\n" - } - - def chapter_index(chapter: String, sessions: List[(String, String)]): String = - { - begin_document("Isabelle/" + chapter + " sessions") + - (if (sessions.isEmpty) "" - else "
      \n" + sessions.map(session_entry(_)).mkString + "
    ") + - end_document - } -} diff --git a/core/Pure/Thy/latex.ML b/core/Pure/Thy/latex.ML deleted file mode 100644 index 4dfaf232..00000000 --- a/core/Pure/Thy/latex.ML +++ /dev/null @@ -1,192 +0,0 @@ -(* Title: Pure/Thy/latex.ML - Author: Markus Wenzel, TU Muenchen - -LaTeX presentation elements -- based on outer lexical syntax. -*) - -signature LATEX = -sig - val output_known_symbols: (string -> bool) * (string -> bool) -> - Symbol.symbol list -> string - val output_symbols: Symbol.symbol list -> string - val output_basic: Token.T -> string - val output_markup: string -> string -> string - val output_markup_env: string -> string -> string - val output_verbatim: string -> string - val markup_true: string - val markup_false: string - val begin_delim: string -> string - val end_delim: string -> string - val begin_tag: string -> string - val end_tag: string -> string - val tex_trailer: string - val isabelle_file: string -> string -> string - val symbol_source: (string -> bool) * (string -> bool) -> - string -> Symbol.symbol list -> string - val theory_entry: string -> string - val modes: string list -end; - -structure Latex: LATEX = -struct - -(* symbol output *) - -local - -val char_table = - Symtab.make - [("!", "{\\isacharbang}"), - ("\"", "{\\isachardoublequote}"), - ("#", "{\\isacharhash}"), - ("$", "{\\isachardollar}"), - ("%", "{\\isacharpercent}"), - ("&", "{\\isacharampersand}"), - ("'", "{\\isacharprime}"), - ("(", "{\\isacharparenleft}"), - (")", "{\\isacharparenright}"), - ("*", "{\\isacharasterisk}"), - ("+", "{\\isacharplus}"), - (",", "{\\isacharcomma}"), - ("-", "{\\isacharminus}"), - (".", "{\\isachardot}"), - ("/", "{\\isacharslash}"), - (":", "{\\isacharcolon}"), - (";", "{\\isacharsemicolon}"), - ("<", "{\\isacharless}"), - ("=", "{\\isacharequal}"), - (">", "{\\isachargreater}"), - ("?", "{\\isacharquery}"), - ("@", "{\\isacharat}"), - ("[", "{\\isacharbrackleft}"), - ("\\", "{\\isacharbackslash}"), - ("]", "{\\isacharbrackright}"), - ("^", "{\\isacharcircum}"), - ("_", "{\\isacharunderscore}"), - ("`", "{\\isacharbackquote}"), - ("{", "{\\isacharbraceleft}"), - ("|", "{\\isacharbar}"), - ("}", "{\\isacharbraceright}"), - ("~", "{\\isachartilde}")]; - -fun output_chr " " = "\\ " - | output_chr "\t" = "\\ " - | output_chr "\n" = "\\isanewline\n" - | output_chr c = - (case Symtab.lookup char_table c of - SOME s => s - | NONE => if Symbol.is_ascii_digit c then enclose "{\\isadigit{" "}}" c else c); - -val output_chrs = translate_string output_chr; - -fun output_known_sym (known_sym, known_ctrl) sym = - (case Symbol.decode sym of - Symbol.Char s => output_chr s - | Symbol.UTF8 s => s - | Symbol.Sym s => if known_sym s then enclose "{\\isasym" "}" s else output_chrs sym - | Symbol.Ctrl s => if known_ctrl s then enclose "\\isactrl" " " s else output_chrs sym - | Symbol.Raw s => s - | Symbol.Malformed s => error (Symbol.malformed_msg s) - | Symbol.EOF => error "Bad EOF symbol"); - -in - -val output_known_symbols = implode oo (map o output_known_sym); -val output_symbols = output_known_symbols (K true, K true); -val output_syms = output_symbols o Symbol.explode; - -val output_syms_antiq = - (fn Antiquote.Text ss => output_symbols (map Symbol_Pos.symbol ss) - | Antiquote.Antiq (ss, _) => - enclose "%\n\\isaantiq\n" "{}%\n\\endisaantiq\n" - (output_symbols (map Symbol_Pos.symbol ss))); - -end; - - -(* token output *) - -val invisible_token = Token.keyword_with (fn s => s = ";") orf Token.is_kind Token.Comment; - -fun output_basic tok = - let val s = Token.content_of tok in - if invisible_token tok then "" - else if Token.is_command tok then - "\\isacommand{" ^ output_syms s ^ "}" - else if Token.is_kind Token.Keyword tok andalso Symbol.is_ascii_identifier s then - "\\isakeyword{" ^ output_syms s ^ "}" - else if Token.is_kind Token.String tok then - enclose "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}" (output_syms s) - else if Token.is_kind Token.AltString tok then - enclose "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" (output_syms s) - else if Token.is_kind Token.Verbatim tok then - let - val {text, pos, ...} = Token.source_position_of tok; - val ants = Antiquote.read (Symbol_Pos.explode (text, pos), pos); - val out = implode (map output_syms_antiq ants); - in enclose "{\\isacharverbatimopen}" "{\\isacharverbatimclose}" out end - else if Token.is_kind Token.Cartouche tok then - enclose "{\\isacartoucheopen}" "{\\isacartoucheclose}" (output_syms s) - else output_syms s - end; - -fun output_markup cmd txt = "%\n\\isamarkup" ^ cmd ^ "{" ^ Symbol.strip_blanks txt ^ "%\n}\n"; - -fun output_markup_env cmd txt = - "%\n\\begin{isamarkup" ^ cmd ^ "}%\n" ^ - Symbol.strip_blanks txt ^ - "%\n\\end{isamarkup" ^ cmd ^ "}%\n"; - -fun output_verbatim txt = "%\n" ^ Symbol.strip_blanks txt ^ "\n"; - -val markup_true = "\\isamarkuptrue%\n"; -val markup_false = "\\isamarkupfalse%\n"; - -val begin_delim = enclose "%\n\\isadelim" "\n"; -val end_delim = enclose "%\n\\endisadelim" "\n"; -val begin_tag = enclose "%\n\\isatag" "\n"; -fun end_tag tg = enclose "%\n\\endisatag" "\n" tg ^ enclose "{\\isafold" "}%\n" tg; - - -(* theory presentation *) - -val tex_trailer = - "%%% Local Variables:\n\ - \%%% mode: latex\n\ - \%%% TeX-master: \"root\"\n\ - \%%% End:\n"; - -fun isabelle_file name txt = - "%\n\\begin{isabellebody}%\n\ - \\\def\\isabellecontext{" ^ output_syms name ^ "}%\n" ^ txt ^ - "\\end{isabellebody}%\n" ^ tex_trailer; - -fun symbol_source known name syms = isabelle_file name - ("\\isamarkupheader{" ^ output_known_symbols known (Symbol.explode name) ^ "}%\n" ^ - output_known_symbols known syms); - -fun theory_entry name = "\\input{" ^ name ^ ".tex}\n\n"; - - -(* print mode *) - -val latexN = "latex"; -val modes = [latexN, Symbol.xsymbolsN]; - -fun latex_output str = - let val syms = Symbol.explode str - in (output_symbols syms, Symbol.length syms) end; - -fun latex_markup (s, _) = - if s = Markup.commandN orelse s = Markup.keyword1N then ("\\isacommand{", "}") - else if s = Markup.keyword2N then ("\\isakeyword{", "}") - else Markup.no_output; - -fun latex_indent "" _ = "" - | latex_indent s _ = enclose "\\isaindent{" "}" s; - -val _ = Output.add_mode latexN latex_output Symbol.encode_raw; -val _ = Markup.add_mode latexN latex_markup; -val _ = Pretty.add_mode latexN latex_indent; - -end; diff --git a/core/Pure/Thy/present.ML b/core/Pure/Thy/present.ML deleted file mode 100644 index 83c8eb15..00000000 --- a/core/Pure/Thy/present.ML +++ /dev/null @@ -1,460 +0,0 @@ -(* Title: Pure/Thy/present.ML - Author: Markus Wenzel and Stefan Berghofer, TU Muenchen - -Theory presentation: HTML, graph files, (PDF)LaTeX documents. -*) - -signature PRESENT = -sig - val session_name: theory -> string - val document_enabled: string -> bool - val document_variants: string -> (string * string) list - val init: bool -> bool -> Path.T -> string -> bool -> string -> (string * string) list -> - (Path.T * Path.T) list -> string * string -> bool -> theory list -> unit (*not thread-safe!*) - val finish: unit -> unit (*not thread-safe!*) - val theory_output: string -> string -> unit - val begin_theory: int -> (unit -> HTML.text) -> theory -> theory - val display_drafts: Path.T list -> int -end; - -structure Present: PRESENT = -struct - - -(** paths **) - -val tex_ext = Path.ext "tex"; -val tex_path = tex_ext o Path.basic; -val html_ext = Path.ext "html"; -val html_path = html_ext o Path.basic; -val index_path = Path.basic "index.html"; -val readme_html_path = Path.basic "README.html"; -val documentN = "document"; -val document_path = Path.basic documentN; -val doc_indexN = "session"; -val graph_path = Path.basic "session.graph"; -val graph_pdf_path = Path.basic "session_graph.pdf"; -val graph_eps_path = Path.basic "session_graph.eps"; - -fun show_path path = Path.implode (Path.expand (Path.append (File.pwd ()) path)); - - - -(** additional theory data **) - -structure Browser_Info = Theory_Data -( - type T = {chapter: string, name: string}; - val empty = {chapter = "Unsorted", name = "Unknown"}: T; - fun extend _ = empty; - fun merge _ = empty; -); - -val _ = Theory.setup - (Browser_Info.put {chapter = Context.PureN, name = Context.PureN}); - -val session_name = #name o Browser_Info.get; -val session_chapter_name = (fn {chapter, name} => [chapter, name]) o Browser_Info.get; - - - -(** graphs **) - -fun ID_of sess s = space_implode "/" (sess @ [s]); -fun ID_of_thy thy = ID_of (session_chapter_name thy) (Context.theory_name thy); - -fun theory_link (curr_chapter, curr_session) thy = - let - val {chapter, name = session} = Browser_Info.get thy; - val link = html_path (Context.theory_name thy); - in - if curr_session = session then SOME link - else if curr_chapter = chapter then - SOME (Path.appends [Path.parent, Path.basic session, link]) - else if chapter = Context.PureN then NONE - else SOME (Path.appends [Path.parent, Path.parent, Path.basic chapter, Path.basic session, link]) - end; - -(*retrieve graph data from initial collection of theories*) -fun init_graph (curr_chapter, curr_session) = rev o map (fn thy => - let - val {chapter, name = session_name} = Browser_Info.get thy; - val thy_name = Context.theory_name thy; - val path = - (case theory_link (curr_chapter, curr_session) thy of - NONE => "" - | SOME p => Path.implode p); - val entry = - {name = thy_name, - ID = ID_of [chapter, session_name] thy_name, - dir = session_name, - path = path, - unfold = false, - parents = map ID_of_thy (Theory.parents_of thy), - content = []}; - in (0, entry) end); - -fun ins_graph_entry (i, entry as {ID, ...}) (gr: (int * Graph_Display.node) list) = - (i, entry) :: filter_out (fn (_, entry') => #ID entry' = ID) gr; - - - -(** global browser info state **) - -(* type theory_info *) - -type theory_info = {tex_source: string, html_source: string}; - -fun make_theory_info (tex_source, html_source) = - {tex_source = tex_source, html_source = html_source}: theory_info; - -fun map_theory_info f {tex_source, html_source} = - make_theory_info (f (tex_source, html_source)); - - -(* type browser_info *) - -type browser_info = - {theories: theory_info Symtab.table, - tex_index: (int * string) list, - html_index: (int * string) list, - graph: (int * Graph_Display.node) list}; - -fun make_browser_info (theories, tex_index, html_index, graph) : browser_info = - {theories = theories, tex_index = tex_index, html_index = html_index, graph = graph}; - -val empty_browser_info = make_browser_info (Symtab.empty, [], [], []); - -fun init_browser_info session thys = - make_browser_info (Symtab.empty, [], [], init_graph session thys); - -fun map_browser_info f {theories, tex_index, html_index, graph} = - make_browser_info (f (theories, tex_index, html_index, graph)); - - -(* state *) - -val browser_info = Unsynchronized.ref empty_browser_info; -fun change_browser_info f = - CRITICAL (fn () => Unsynchronized.change browser_info (map_browser_info f)); - -fun init_theory_info name info = - change_browser_info (fn (theories, tex_index, html_index, graph) => - (Symtab.update (name, info) theories, tex_index, html_index, graph)); - -fun change_theory_info name f = - change_browser_info (fn (theories, tex_index, html_index, graph) => - (case Symtab.lookup theories name of - NONE => error ("Browser info: cannot access theory document " ^ quote name) - | SOME info => - (Symtab.update (name, map_theory_info f info) theories, tex_index, html_index, graph))); - - -fun add_tex_index txt = - change_browser_info (fn (theories, tex_index, html_index, graph) => - (theories, txt :: tex_index, html_index, graph)); - -fun add_html_index txt = - change_browser_info (fn (theories, tex_index, html_index, graph) => - (theories, tex_index, txt :: html_index, graph)); - -fun add_graph_entry entry = - change_browser_info (fn (theories, tex_index, html_index, graph) => - (theories, tex_index, html_index, ins_graph_entry entry graph)); - - - -(** global session state **) - -(* session_info *) - -type session_info = - {name: string, chapter: string, info_path: Path.T, info: bool, - doc_format: string, doc_graph: bool, doc_output: Path.T option, - doc_files: (Path.T * Path.T) list, documents: (string * string) list, - verbose: bool, readme: Path.T option}; - -fun make_session_info - (name, chapter, info_path, info, doc_format, doc_graph, doc_output, - doc_files, documents, verbose, readme) = - {name = name, chapter = chapter, info_path = info_path, info = info, - doc_format = doc_format, doc_graph = doc_graph, doc_output = doc_output, - doc_files = doc_files, documents = documents, verbose = verbose, - readme = readme}: session_info; - - -(* state *) - -val session_info = Unsynchronized.ref (NONE: session_info option); - -fun with_session_info x f = (case ! session_info of NONE => x | SOME info => f info); - - - -(** document preparation **) - -(* options *) - -fun document_enabled s = s <> "" andalso s <> "false"; - -fun document_variants str = - let - fun read_variant s = - (case space_explode "=" s of - [name] => (name, "") - | [name, tags] => (name, tags) - | _ => error ("Malformed document variant specification: " ^ quote s)); - val variants = map read_variant (space_explode ":" str); - val _ = - (case duplicates (op =) (map #1 variants) of - [] => () - | dups => error ("Duplicate document variants: " ^ commas_quote dups)); - in variants end; - - -(* init session *) - -fun init build info info_path doc doc_graph document_output doc_variants doc_files - (chapter, name) verbose thys = - if not build andalso not info andalso doc = "" then - (browser_info := empty_browser_info; session_info := NONE) - else - let - val doc_output = - if document_output = "" then NONE else SOME (Path.explode document_output); - - val documents = - if doc = "" then [] - else if null doc_files andalso not (can File.check_dir document_path) then - (if verbose then Output.physical_stderr "Warning: missing document directory\n" - else (); []) - else doc_variants; - - val readme = if File.exists readme_html_path then SOME readme_html_path else NONE; - - val docs = - (case readme of NONE => [] | SOME p => [(Url.File p, "README")]) @ - map (fn (name, _) => (Url.File (Path.ext doc (Path.basic name)), name)) documents; - in - session_info := - SOME (make_session_info (name, chapter, info_path, info, doc, - doc_graph, doc_output, doc_files, documents, verbose, readme)); - browser_info := init_browser_info (chapter, name) thys; - add_html_index (0, HTML.begin_session_index name docs (Url.explode "medium.html")) - end; - - -(* isabelle tool wrappers *) - -fun isabelle_document {verbose, purge} format name tags dir = - let - val s = "\"$ISABELLE_TOOL\" document" ^ (if purge then " -c" else "") ^ " -o '" ^ format ^ "' \ - \-n '" ^ name ^ "' -t '" ^ tags ^ "' " ^ File.shell_path dir ^ " 2>&1"; - val doc_path = Path.appends [dir, Path.parent, Path.basic name |> Path.ext format]; - val _ = if verbose then writeln s else (); - val (out, rc) = Isabelle_System.bash_output s; - val _ = - if not (File.exists doc_path) orelse rc <> 0 then - cat_error out ("Failed to build document " ^ quote (show_path doc_path)) - else if verbose then writeln out - else (); - in doc_path end; - -fun isabelle_browser graph = Isabelle_System.with_tmp_dir "browser" (fn dir => - let - val pdf_path = Path.append dir graph_pdf_path; - val eps_path = Path.append dir graph_eps_path; - val graph_path = Path.append dir graph_path; - val _ = Graph_Display.write_graph_browser graph_path graph; - val args = "-o " ^ File.shell_path pdf_path ^ " " ^ File.shell_path graph_path; - in - if Isabelle_System.isabelle_tool "browser" args = 0 andalso - File.exists pdf_path andalso File.exists eps_path - then (File.read pdf_path, File.read eps_path) - else error "Failed to prepare dependency graph" - end); - - -(* finish session -- output all generated text *) - -fun sorted_index index = map snd (sort (int_ord o pairself fst) (rev index)); -fun index_buffer index = Buffer.add (implode (sorted_index index)) Buffer.empty; - -fun write_tex src name path = - File.write_buffer (Path.append path (tex_path name)) src; - -fun write_tex_index tex_index path = - write_tex (index_buffer tex_index |> Buffer.add Latex.tex_trailer) doc_indexN path; - -fun finish () = - with_session_info () (fn {name, chapter, info, info_path, doc_format, doc_graph, - doc_output, doc_files, documents, verbose, readme, ...} => - let - val {theories, tex_index, html_index, graph} = ! browser_info; - val thys = Symtab.dest theories; - - val chapter_prefix = Path.append info_path (Path.basic chapter); - val session_prefix = Path.append chapter_prefix (Path.basic name); - - fun finish_html (a, {html_source, ...}: theory_info) = - File.write (Path.append session_prefix (html_path a)) html_source; - - val sorted_graph = sorted_index graph; - val opt_graphs = - if doc_graph andalso not (null documents) then - SOME (isabelle_browser sorted_graph) - else NONE; - - val _ = - if info then - (Isabelle_System.mkdirs session_prefix; - File.write_buffer (Path.append session_prefix index_path) - (index_buffer html_index |> Buffer.add HTML.end_document); - (case readme of NONE => () | SOME path => Isabelle_System.copy_file path session_prefix); - Graph_Display.write_graph_browser (Path.append session_prefix graph_path) sorted_graph; - Isabelle_System.isabelle_tool "browser" "-b"; - Isabelle_System.copy_file (Path.explode "~~/lib/browser/GraphBrowser.jar") session_prefix; - List.app (fn (a, txt) => File.write (Path.append session_prefix (Path.basic a)) txt) - (HTML.applet_pages name (Url.File index_path, name)); - Isabelle_System.copy_file (Path.explode "~~/etc/isabelle.css") session_prefix; - List.app finish_html thys; - if verbose - then Output.physical_stderr ("Browser info at " ^ show_path session_prefix ^ "\n") - else ()) - else (); - - fun document_job doc_prefix backdrop (doc_name, tags) = - let - val doc_dir = Path.append doc_prefix (Path.basic doc_name); - val _ = Isabelle_System.mkdirs doc_dir; - val _ = - Isabelle_System.isabelle_tool "latex" - ("-o sty " ^ File.shell_path (Path.append doc_dir (Path.basic "root.tex"))); - val _ = - if null doc_files then Isabelle_System.copy_dir document_path doc_dir - else List.app (fn file => Isabelle_System.copy_file_base file doc_dir) doc_files; - val _ = - (case opt_graphs of - NONE => () - | SOME (pdf, eps) => - (File.write (Path.append doc_dir graph_pdf_path) pdf; - File.write (Path.append doc_dir graph_eps_path) eps)); - val _ = write_tex_index tex_index doc_dir; - val _ = - List.app (fn (a, {tex_source, ...}) => - write_tex (Buffer.add tex_source Buffer.empty) a doc_dir) thys; - in - fn () => - (isabelle_document {verbose = true, purge = backdrop} doc_format doc_name tags doc_dir, - fn doc => - if verbose orelse not backdrop then - Output.physical_stderr ("Document at " ^ show_path doc ^ "\n") - else ()) - end; - - val jobs = - (if info orelse is_none doc_output then - map (document_job session_prefix true) documents - else []) @ - (case doc_output of - NONE => [] - | SOME path => map (document_job path false) documents); - - val _ = - if not (null jobs) andalso null doc_files then - Output.physical_stderr ("### Legacy feature! Document preparation for session " ^ quote name ^ - " without 'document_files'\n") - else (); - - val _ = jobs |> Par_List.map (fn job => job ()) |> List.app (op |>); - in - browser_info := empty_browser_info; - session_info := NONE - end); - - -(* theory elements *) - -fun theory_output name s = - with_session_info () (fn _ => - change_theory_info name (fn (_, html_source) => (Latex.isabelle_file name s, html_source))); - -fun begin_theory update_time mk_text thy = - with_session_info thy (fn {name = session_name, chapter, ...} => - let - val name = Context.theory_name thy; - val parents = Theory.parents_of thy; - - val parent_specs = parents |> map (fn parent => - (Option.map Url.File (theory_link (chapter, session_name) parent), - (Context.theory_name parent))); - val html_source = HTML.theory name parent_specs (mk_text ()); - - val graph_entry = - {name = name, - ID = ID_of [chapter, session_name] name, - dir = session_name, - unfold = true, - path = Path.implode (html_path name), - parents = map ID_of_thy parents, - content = []}; - in - init_theory_info name (make_theory_info ("", html_source)); - add_graph_entry (update_time, graph_entry); - add_html_index (update_time, HTML.theory_entry (Url.File (html_path name), name)); - add_tex_index (update_time, Latex.theory_entry name); - Browser_Info.put {chapter = chapter, name = session_name} thy - end); - - - -(** draft document output **) - -fun display_drafts src_paths = Isabelle_System.with_tmp_dir "drafts" (fn dir => - let - fun prep_draft path i = - let - val base = Path.base path; - val name = - (case Path.implode (#1 (Path.split_ext base)) of - "" => "DUMMY" - | s => s) ^ serial_string (); - in - if File.exists path then - (((name, base, File.read path), (i, Latex.theory_entry name)), i + 1) - else error ("Bad file: " ^ Path.print path) - end; - val (srcs, tex_index) = split_list (fst (fold_map prep_draft src_paths 0)); - - val doc_path = Path.append dir document_path; - val _ = Isabelle_System.mkdirs doc_path; - val root_path = Path.append doc_path (Path.basic "root.tex"); - val _ = Isabelle_System.copy_file (Path.explode "~~/lib/texinputs/draft.tex") root_path; - val _ = Isabelle_System.isabelle_tool "latex" ("-o sty " ^ File.shell_path root_path); - val _ = Isabelle_System.isabelle_tool "latex" ("-o syms " ^ File.shell_path root_path); - - fun known name = - let val ss = split_lines (File.read (Path.append doc_path (Path.basic name))) - in member (op =) ss end; - val known_syms = known "syms.lst"; - val known_ctrls = known "ctrls.lst"; - - val _ = srcs |> List.app (fn (name, base, txt) => - Symbol.explode txt - |> Latex.symbol_source (known_syms, known_ctrls) (Path.implode base) - |> File.write (Path.append doc_path (tex_path name))); - val _ = write_tex_index tex_index doc_path; - - val result = - isabelle_document {verbose = false, purge = true} "pdf" documentN "" doc_path; - - val target_dir = Path.explode "$ISABELLE_HOME_USER/tmp"; - val target = Path.explode "$ISABELLE_HOME_USER/tmp/drafts.pdf" - val _ = Isabelle_System.mkdirs target_dir; - val _ = Isabelle_System.copy_file result target; - in - Isabelle_System.isabelle_tool "display" (File.shell_path target ^ " &") - end); - -end; - diff --git a/core/Pure/Thy/present.scala b/core/Pure/Thy/present.scala deleted file mode 100644 index 8fdeb928..00000000 --- a/core/Pure/Thy/present.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* Title: Pure/Thy/present.scala - Author: Makarius - -Theory presentation: HTML. -*/ - -package isabelle - - -import scala.collection.immutable.SortedMap - - -object Present -{ - /* maintain chapter index -- NOT thread-safe */ - - private val index_path = Path.basic("index.html") - private val sessions_path = Path.basic(".sessions") - - private def read_sessions(dir: Path): List[(String, String)] = - { - val path = dir + sessions_path - if (path.is_file) { - import XML.Decode._ - list(pair(string, string))(YXML.parse_body(File.read(path))) - } - else Nil - } - - private def write_sessions(dir: Path, sessions: List[(String, String)]) - { - import XML.Encode._ - File.write(dir + sessions_path, YXML.string_of_body(list(pair(string, string))(sessions))) - } - - def update_chapter_index(info_path: Path, chapter: String, new_sessions: List[(String, String)]) - { - val dir = info_path + Path.basic(chapter) - Isabelle_System.mkdirs(dir) - - val sessions0 = - try { read_sessions(dir) } - catch { case _: XML.Error => Nil } - - val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList - - write_sessions(dir, sessions) - File.write(dir + index_path, HTML.chapter_index(chapter, sessions)) - } -} - diff --git a/core/Pure/Thy/term_style.ML b/core/Pure/Thy/term_style.ML deleted file mode 100644 index 5fe5ca04..00000000 --- a/core/Pure/Thy/term_style.ML +++ /dev/null @@ -1,93 +0,0 @@ -(* Title: Pure/Thy/term_style.ML - Author: Florian Haftmann, TU Muenchen - -Styles for term printing. -*) - -signature TERM_STYLE = -sig - val setup: binding -> (Proof.context -> term -> term) parser -> theory -> theory - val parse: (term -> term) context_parser -end; - -structure Term_Style: TERM_STYLE = -struct - -(* theory data *) - -structure Data = Theory_Data -( - type T = (Proof.context -> term -> term) parser Name_Space.table; - val empty : T = Name_Space.empty_table "antiquotation_style"; - val extend = I; - fun merge data : T = Name_Space.merge_tables data; -); - -val get_data = Data.get o Proof_Context.theory_of; -val get_style = Name_Space.get o get_data; - -fun setup binding style thy = - Data.map (#2 o Name_Space.define (Context.Theory thy) true (binding, style)) thy; - - -(* style parsing *) - -fun parse_single ctxt = - Parse.position Parse.xname -- Parse.args >> (fn (name, args) => - let - val (src, parse) = Args.check_src ctxt (get_data ctxt) (Args.src name args); - val (f, _) = Args.syntax (Scan.lift parse) src ctxt; - in f ctxt end); - -val parse = Args.context :|-- (fn ctxt => Scan.lift - (Args.parens (parse_single ctxt ::: Scan.repeat (Args.$$$ "," |-- parse_single ctxt)) - >> fold I - || Scan.succeed I)); - - -(* predefined styles *) - -fun style_lhs_rhs proj = Scan.succeed (fn ctxt => fn t => - let - val concl = - Object_Logic.drop_judgment (Proof_Context.theory_of ctxt) (Logic.strip_imp_concl t) - in - (case concl of - _ $ l $ r => proj (l, r) - | _ => error ("Binary operator expected in term: " ^ Syntax.string_of_term ctxt concl)) - end); - -val style_prem = Parse.nat >> (fn i => fn ctxt => fn t => - let - val prems = Logic.strip_imp_prems t; - in - if i <= length prems then nth prems (i - 1) - else - error ("Not enough premises for prem " ^ string_of_int i ^ - " in propositon: " ^ Syntax.string_of_term ctxt t) - end); - -fun sub_symbols (d :: s :: ss) = - if Symbol.is_ascii_digit d andalso not (String.isPrefix ("\<^") s) - then d :: "\<^sub>" :: sub_symbols (s :: ss) - else d :: s :: ss - | sub_symbols cs = cs; - -val sub_name = implode o rev o sub_symbols o rev o Symbol.explode; - -fun sub_term (Free (n, T)) = Free (sub_name n, T) - | sub_term (Var ((n, idx), T)) = - if idx <> 0 then Var ((sub_name (n ^ string_of_int idx), 0), T) - else Var ((sub_name n, 0), T) - | sub_term (t $ u) = sub_term t $ sub_term u - | sub_term (Abs (n, T, b)) = Abs (sub_name n, T, sub_term b) - | sub_term t = t; - -val _ = Theory.setup - (setup @{binding lhs} (style_lhs_rhs fst) #> - setup @{binding rhs} (style_lhs_rhs snd) #> - setup @{binding prem} style_prem #> - setup @{binding concl} (Scan.succeed (K Logic.strip_imp_concl)) #> - setup @{binding sub} (Scan.succeed (K sub_term))); - -end; diff --git a/core/Pure/Thy/thm_deps.ML b/core/Pure/Thy/thm_deps.ML deleted file mode 100644 index 6e67dc77..00000000 --- a/core/Pure/Thy/thm_deps.ML +++ /dev/null @@ -1,99 +0,0 @@ -(* Title: Pure/Thy/thm_deps.ML - Author: Stefan Berghofer, TU Muenchen - -Visualize dependencies of theorems. -*) - -signature THM_DEPS = -sig - val thm_deps: theory -> thm list -> unit - val unused_thms: theory list * theory list -> (string * thm) list -end; - -structure Thm_Deps: THM_DEPS = -struct - -(* thm_deps *) - -fun thm_deps thy thms = - let - fun add_dep ("", _, _) = I - | add_dep (name, _, PBody {thms = thms', ...}) = - let - val prefix = #1 (split_last (Long_Name.explode name)); - val session = - (case prefix of - a :: _ => - (case try (Context.get_theory thy) a of - SOME thy => - (case Present.session_name thy of - "" => [] - | session => [session]) - | NONE => []) - | _ => ["global"]); - val parents = filter_out (fn s => s = "") (map (#1 o #2) thms'); - val entry = - {name = Long_Name.base_name name, - ID = name, - dir = space_implode "/" (session @ prefix), - unfold = false, - path = "", - parents = parents, - content = []}; - in cons entry end; - val deps = Proofterm.fold_body_thms add_dep (Thm.proof_bodies_of thms) []; - in Graph_Display.display_graph (sort_wrt #ID deps) end; - - -(* unused_thms *) - -fun unused_thms (base_thys, thys) = - let - fun add_fact space (name, ths) = - if exists (fn thy => Global_Theory.defined_fact thy name) base_thys then I - else - let val {concealed, group, ...} = Name_Space.the_entry space name in - fold_rev (fn th => - (case Thm.derivation_name th of - "" => I - | a => cons (a, (th, concealed, group)))) ths - end; - fun add_facts facts = Facts.fold_static (add_fact (Facts.space_of facts)) facts; - - val new_thms = - fold (add_facts o Global_Theory.facts_of) thys [] - |> sort_distinct (string_ord o pairself #1); - - val used = - Proofterm.fold_body_thms - (fn (a, _, _) => a <> "" ? Symtab.update (a, ())) - (map Proofterm.strip_thm (Thm.proof_bodies_of (map (#1 o #2) new_thms))) - Symtab.empty; - - fun is_unused a = not (Symtab.defined used a); - - (* groups containing at least one used theorem *) - val used_groups = fold (fn (a, (_, _, group)) => - if is_unused a then I - else - (case group of - NONE => I - | SOME grp => Inttab.update (grp, ()))) new_thms Inttab.empty; - - val (thms', _) = fold (fn (a, (th, concealed, group)) => fn q as (thms, seen_groups) => - if not concealed andalso - (* FIXME replace by robust treatment of thm groups *) - member (op =) [Thm.theoremK, Thm.lemmaK, Thm.corollaryK] (Thm.legacy_get_kind th) andalso - is_unused a - then - (case group of - NONE => ((a, th) :: thms, seen_groups) - | SOME grp => - if Inttab.defined used_groups grp orelse - Inttab.defined seen_groups grp then q - else ((a, th) :: thms, Inttab.update (grp, ()) seen_groups)) - else q) new_thms ([], Inttab.empty); - in rev thms' end; - -end; - diff --git a/core/Pure/Thy/thy_header.ML b/core/Pure/Thy/thy_header.ML deleted file mode 100644 index f4ad24ab..00000000 --- a/core/Pure/Thy/thy_header.ML +++ /dev/null @@ -1,148 +0,0 @@ -(* Title: Pure/Thy/thy_header.ML - Author: Makarius - -Static theory header information. -*) - -signature THY_HEADER = -sig - type keywords = (string * Keyword.spec option) list - type header = - {name: string * Position.T, - imports: (string * Position.T) list, - keywords: keywords} - val make: string * Position.T -> (string * Position.T) list -> keywords -> header - val define_keywords: header -> unit - val declare_keyword: string * Keyword.spec option -> theory -> theory - val the_keyword: theory -> string -> Keyword.spec option - val args: header parser - val read: Position.T -> string -> header - val read_tokens: Token.T list -> header -end; - -structure Thy_Header: THY_HEADER = -struct - -type keywords = (string * Keyword.spec option) list; - -type header = - {name: string * Position.T, - imports: (string * Position.T) list, - keywords: keywords}; - -fun make name imports keywords : header = - {name = name, imports = imports, keywords = keywords}; - - - -(** keyword declarations **) - -fun define_keywords ({keywords, ...}: header) = - List.app (Keyword.define o apsnd (Option.map Keyword.spec)) keywords; - -fun err_dup name = error ("Duplicate declaration of outer syntax keyword " ^ quote name); - -structure Data = Theory_Data -( - type T = Keyword.spec option Symtab.table; - val empty = Symtab.empty; - val extend = I; - fun merge data : T = Symtab.merge (op =) data handle Symtab.DUP name => err_dup name; -); - -fun declare_keyword (name, spec) = - Data.map (fn data => - (Option.map Keyword.spec spec; - Symtab.update_new (name, spec) data handle Symtab.DUP dup => err_dup dup)); - -fun the_keyword thy name = - (case Symtab.lookup (Data.get thy) name of - SOME spec => spec - | NONE => error ("Undeclared outer syntax keyword " ^ quote name)); - - - -(** concrete syntax **) - -(* header keywords *) - -val headerN = "header"; -val theoryN = "theory"; -val importsN = "imports"; -val keywordsN = "keywords"; -val beginN = "begin"; - -val header_lexicons = - pairself (Scan.make_lexicon o map Symbol.explode) - (["%", "(", ")", ",", "::", ";", "==", "and", beginN, importsN, keywordsN], - [headerN, theoryN]); - - -(* header args *) - -local - -val theory_name = Parse.group (fn () => "theory name") (Parse.position Parse.name); -val theory_xname = Parse.group (fn () => "theory name reference") (Parse.position Parse.xname); - -val imports = Parse.$$$ importsN |-- Parse.!!! (Scan.repeat1 theory_xname); - -val opt_files = - Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Parse.list1 Parse.name) --| Parse.$$$ ")") []; - -val keyword_spec = - Parse.group (fn () => "outer syntax keyword specification") - (Parse.name -- opt_files -- Parse.tags); - -val keyword_compl = - Parse.group (fn () => "outer syntax keyword completion") Parse.name; - -val keyword_decl = - Scan.repeat1 Parse.string -- - Scan.option (Parse.$$$ "::" |-- Parse.!!! keyword_spec) -- - Scan.option (Parse.$$$ "==" |-- Parse.!!! keyword_compl) - >> (fn ((names, spec), _) => map (rpair spec) names); - -val keyword_decls = Parse.and_list1 keyword_decl >> flat; - -in - -val args = - theory_name :|-- (fn (name, pos) => - (if name = Context.PureN then Scan.succeed [] else imports) -- - Scan.optional (Parse.$$$ keywordsN |-- Parse.!!! keyword_decls) [] --| - Parse.$$$ beginN >> (fn (imports, keywords) => make (name, pos) imports keywords)); - -end; - - -(* read header *) - -val header = - (Parse.command_name headerN -- Parse.tags) |-- - (Parse.!!! (Parse.document_source -- Scan.repeat Parse.semicolon -- - (Parse.command_name theoryN -- Parse.tags) |-- args)) || - (Parse.command_name theoryN -- Parse.tags) |-- Parse.!!! args; - -fun token_source pos str = - str - |> Source.of_string_limited 8000 - |> Symbol.source - |> Token.source {do_recover = NONE} (K header_lexicons) pos; - -fun read_source pos source = - let val res = - source - |> Token.source_proper - |> Source.source Token.stopper (Scan.single (Scan.error (Parse.!!! header))) NONE - |> Source.get_single; - in - (case res of - SOME (h, _) => h - | NONE => error ("Unexpected end of input" ^ Position.here pos)) - end; - -fun read pos str = read_source pos (token_source pos str); -fun read_tokens toks = read_source Position.none (Source.of_list toks); - -end; diff --git a/core/Pure/Thy/thy_header.scala b/core/Pure/Thy/thy_header.scala deleted file mode 100644 index 9aeadf6b..00000000 --- a/core/Pure/Thy/thy_header.scala +++ /dev/null @@ -1,134 +0,0 @@ -/* Title: Pure/Thy/thy_header.scala - Author: Makarius - -Static theory header information. -*/ - -package isabelle - - -import scala.annotation.tailrec -import scala.collection.mutable -import scala.util.parsing.input.{Reader, CharSequenceReader} -import scala.util.matching.Regex - - -object Thy_Header extends Parse.Parser -{ - val HEADER = "header" - val THEORY = "theory" - val IMPORTS = "imports" - val KEYWORDS = "keywords" - val AND = "and" - val BEGIN = "begin" - - private val lexicon = - Scan.Lexicon("%", "(", ")", ",", "::", ";", "==", - AND, BEGIN, HEADER, IMPORTS, KEYWORDS, THEORY) - - - /* theory file name */ - - private val Base_Name = new Regex(""".*?([^/\\:]+)""") - private val Thy_Name = new Regex(""".*?([^/\\:]+)\.thy""") - - def base_name(s: String): String = - s match { case Base_Name(name) => name case _ => error("Malformed import: " + quote(s)) } - - def thy_name(s: String): Option[String] = - s match { case Thy_Name(name) => Some(name) case _ => None } - - - /* header */ - - val header: Parser[Thy_Header] = - { - val file_name = atom("file name", _.is_name) - - val opt_files = - keyword("(") ~! (rep1sep(name, keyword(",")) <~ keyword(")")) ^^ { case _ ~ x => x } | - success(Nil) - val keyword_spec = - atom("outer syntax keyword specification", _.is_name) ~ opt_files ~ tags ^^ - { case x ~ y ~ z => ((x, y), z) } - - val keyword_decl = - rep1(string) ~ - opt(keyword("::") ~! keyword_spec ^^ { case _ ~ x => x }) ~ - opt(keyword("==") ~! name ^^ { case _ ~ x => x }) ^^ - { case xs ~ y ~ z => xs.map((_, y, z)) } - val keyword_decls = - keyword_decl ~ rep(keyword(AND) ~! keyword_decl ^^ { case _ ~ x => x }) ^^ - { case xs ~ yss => (xs :: yss).flatten } - - val file = - keyword("(") ~! (file_name ~ keyword(")")) ^^ { case _ ~ (x ~ _) => (x, false) } | - file_name ^^ (x => (x, true)) - - val args = - theory_name ~ - (opt(keyword(IMPORTS) ~! (rep1(theory_xname))) ^^ - { case None => Nil case Some(_ ~ xs) => xs }) ~ - (opt(keyword(KEYWORDS) ~! keyword_decls) ^^ - { case None => Nil case Some(_ ~ xs) => xs }) ~ - keyword(BEGIN) ^^ - { case x ~ ys ~ zs ~ _ => Thy_Header(x, ys, zs) } - - (keyword(HEADER) ~ tags) ~! - ((document_source ~ rep(keyword(";")) ~ keyword(THEORY) ~ tags) ~> args) ^^ - { case _ ~ x => x } | - (keyword(THEORY) ~ tags) ~! args ^^ { case _ ~ x => x } - } - - - /* read -- lazy scanning */ - - def read(reader: Reader[Char]): Thy_Header = - { - val token = Token.Parsers.token(lexicon, _ => false) - val toks = new mutable.ListBuffer[Token] - - @tailrec def scan_to_begin(in: Reader[Char]) - { - token(in) match { - case Token.Parsers.Success(tok, rest) => - toks += tok - if (!tok.is_begin) scan_to_begin(rest) - case _ => - } - } - scan_to_begin(reader) - - parse(commit(header), Token.reader(toks.toList)) match { - case Success(result, _) => result - case bad => error(bad.toString) - } - } - - def read(source: CharSequence): Thy_Header = - read(new CharSequenceReader(source)) - - - /* keywords */ - - type Keywords = List[(String, Option[((String, List[String]), List[String])], Option[String])] -} - - -sealed case class Thy_Header( - name: String, - imports: List[String], - keywords: Thy_Header.Keywords) -{ - def map(f: String => String): Thy_Header = - Thy_Header(f(name), imports.map(f), keywords) - - def decode_symbols: Thy_Header = - { - val f = Symbol.decode _ - Thy_Header(f(name), imports.map(f), - keywords.map({ case (a, b, c) => - (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f)) }), c.map(f)) })) - } -} - diff --git a/core/Pure/Thy/thy_info.ML b/core/Pure/Thy/thy_info.ML deleted file mode 100644 index 96ff5596..00000000 --- a/core/Pure/Thy/thy_info.ML +++ /dev/null @@ -1,417 +0,0 @@ -(* Title: Pure/Thy/thy_info.ML - Author: Markus Wenzel, TU Muenchen - -Global theory info database, with auto-loading according to theory and -file dependencies. -*) - -signature THY_INFO = -sig - datatype action = Update | Remove - val add_hook: (action -> string -> unit) -> unit - val get_names: unit -> string list - val lookup_theory: string -> theory option - val get_theory: string -> theory - val is_finished: string -> bool - val master_directory: string -> Path.T - val loaded_files: string -> Path.T list - val remove_thy: string -> unit - val kill_thy: string -> unit - val use_theories: - {document: bool, last_timing: Toplevel.transition -> Time.time option, master_dir: Path.T} -> - (string * Position.T) list -> unit - val use_thys: (string * Position.T) list -> unit - val use_thy: string * Position.T -> unit - val script_thy: Position.T -> string -> theory - val toplevel_begin_theory: Path.T -> Thy_Header.header -> theory - val register_thy: theory -> unit - val finish: unit -> unit -end; - -structure Thy_Info: THY_INFO = -struct - -(** theory loader actions and hooks **) - -datatype action = Update | Remove; - -local - val hooks = Synchronized.var "Thy_Info.hooks" ([]: (action -> string -> unit) list); -in - fun add_hook f = Synchronized.change hooks (cons f); - fun perform action name = - List.app (fn f => (try (fn () => f action name) (); ())) (Synchronized.value hooks); -end; - - - -(** thy database **) - -(* messages *) - -val show_path = space_implode " via " o map quote; - -fun cycle_msg names = "Cyclic dependency of " ^ show_path names; - - -(* derived graph operations *) - -fun add_deps name parents G = String_Graph.add_deps_acyclic (name, parents) G - handle String_Graph.CYCLES namess => error (cat_lines (map cycle_msg namess)); - -fun new_entry name parents entry = - String_Graph.new_node (name, entry) #> add_deps name parents; - - -(* thy database *) - -type deps = - {master: (Path.T * SHA1.digest), (*master dependencies for thy file*) - imports: (string * Position.T) list}; (*source specification of imports (partially qualified)*) - -fun make_deps master imports : deps = {master = master, imports = imports}; - -fun master_dir (d: deps option) = the_default Path.current (Option.map (Path.dir o #1 o #master) d); -fun base_name s = Path.implode (Path.base (Path.explode s)); - -local - val database = - Unsynchronized.ref (String_Graph.empty: (deps option * theory option) String_Graph.T); -in - fun get_thys () = ! database; - fun change_thys f = NAMED_CRITICAL "Thy_Info" (fn () => Unsynchronized.change database f); -end; - - -(* access thy graph *) - -fun thy_graph f x = f (get_thys ()) x; - -fun get_names () = String_Graph.topological_order (get_thys ()); - - -(* access thy *) - -fun lookup_thy name = - SOME (thy_graph String_Graph.get_node name) handle String_Graph.UNDEF _ => NONE; - -val known_thy = is_some o lookup_thy; - -fun get_thy name = - (case lookup_thy name of - SOME thy => thy - | NONE => error ("Theory loader: nothing known about theory " ^ quote name)); - - -(* access deps *) - -val lookup_deps = Option.map #1 o lookup_thy; -val get_deps = #1 o get_thy; - -val is_finished = is_none o get_deps; -val master_directory = master_dir o get_deps; - - -(* access theory *) - -fun lookup_theory name = - (case lookup_thy name of - SOME (_, SOME theory) => SOME theory - | _ => NONE); - -fun get_theory name = - (case lookup_theory name of - SOME theory => theory - | _ => error ("Theory loader: undefined entry for theory " ^ quote name)); - -val get_imports = Resources.imports_of o get_theory; - -(*Proof General legacy*) -fun loaded_files name = NAMED_CRITICAL "Thy_Info" (fn () => - (case get_deps name of - NONE => [] - | SOME {master = (thy_path, _), ...} => thy_path :: Resources.loaded_files (get_theory name))); - - - -(** thy operations **) - -(* main loader actions *) - -fun remove_thy name = NAMED_CRITICAL "Thy_Info" (fn () => - if is_finished name then error ("Cannot update finished theory " ^ quote name) - else - let - val succs = thy_graph String_Graph.all_succs [name]; - val _ = Output.urgent_message ("Theory loader: removing " ^ commas_quote succs); - val _ = List.app (perform Remove) succs; - val _ = change_thys (fold String_Graph.del_node succs); - in () end); - -fun kill_thy name = NAMED_CRITICAL "Thy_Info" (fn () => - if known_thy name then remove_thy name - else ()); - -fun update_thy deps theory = NAMED_CRITICAL "Thy_Info" (fn () => - let - val name = Context.theory_name theory; - val parents = map Context.theory_name (Theory.parents_of theory); - val _ = kill_thy name; - val _ = map get_theory parents; - val _ = change_thys (new_entry name parents (SOME deps, SOME theory)); - val _ = perform Update name; - in () end); - - -(* scheduling loader tasks *) - -datatype result = - Result of {theory: theory, exec_id: Document_ID.exec, - present: unit -> unit, commit: unit -> unit, weight: int}; - -fun theory_result theory = - Result {theory = theory, exec_id = Document_ID.none, present = I, commit = I, weight = 0}; - -fun result_theory (Result {theory, ...}) = theory; -fun result_present (Result {present, ...}) = present; -fun result_commit (Result {commit, ...}) = commit; -fun result_ord (Result {weight = i, ...}, Result {weight = j, ...}) = int_ord (j, i); - -fun join_theory (Result {theory, exec_id, ...}) = - let - (*toplevel proofs and diags*) - val _ = Future.join_tasks (maps Future.group_snapshot (Execution.peek exec_id)); - (*fully nested proofs*) - val res = Exn.capture Thm.join_theory_proofs theory; - in res :: map Exn.Exn (maps Task_Queue.group_status (Execution.peek exec_id)) end; - -datatype task = - Task of Path.T * string list * (theory list -> result) | - Finished of theory; - -fun task_finished (Task _) = false - | task_finished (Finished _) = true; - -fun task_parents deps (parents: string list) = map (the o AList.lookup (op =) deps) parents; - -local - -val schedule_seq = - String_Graph.schedule (fn deps => fn (_, task) => - (case task of - Task (_, parents, body) => - let - val result = body (task_parents deps parents); - val _ = Par_Exn.release_all (join_theory result); - val _ = result_present result (); - val _ = result_commit result (); - in result_theory result end - | Finished thy => thy)) #> ignore; - -val schedule_futures = uninterruptible (fn _ => fn tasks => - let - val futures = tasks - |> String_Graph.schedule (fn deps => fn (name, task) => - (case task of - Task (_, parents, body) => - (singleton o Future.forks) - {name = "theory:" ^ name, group = NONE, - deps = map (Future.task_of o #2) deps, pri = 0, interrupts = true} - (fn () => - (case filter (not o can Future.join o #2) deps of - [] => body (map (result_theory o Future.join) (task_parents deps parents)) - | bad => - error - ("Failed to load theory " ^ quote name ^ - " (unresolved " ^ commas_quote (map #1 bad) ^ ")"))) - | Finished theory => Future.value (theory_result theory))); - - val results1 = futures - |> maps (fn future => - (case Future.join_result future of - Exn.Res result => join_theory result - | Exn.Exn exn => [Exn.Exn exn])); - - val results2 = futures - |> map_filter (Exn.get_res o Future.join_result) - |> sort result_ord - |> Par_List.map (fn result => Exn.capture (result_present result) ()); - - (* FIXME more precise commit order (!?) *) - val results3 = futures - |> map (fn future => Exn.capture (fn () => result_commit (Future.join future) ()) ()); - - (* FIXME avoid global Execution.reset (!??) *) - val results4 = map Exn.Exn (maps Task_Queue.group_status (Execution.reset ())); - - val _ = Par_Exn.release_all (results1 @ results2 @ results3 @ results4); - in () end); - -in - -fun schedule_tasks tasks = - if not (Multithreading.enabled ()) then schedule_seq tasks - else if Multithreading.self_critical () then - (warning "Theory loader: no multithreading within critical section"; - schedule_seq tasks) - else schedule_futures tasks; - -end; - - -(* require_thy -- checking database entries wrt. the file-system *) - -local - -fun required_by _ [] = "" - | required_by s initiators = s ^ "(required by " ^ show_path (rev initiators) ^ ")"; - -fun load_thy document last_timing initiators update_time deps text (name, pos) keywords parents = - let - val _ = kill_thy name; - val _ = Output.urgent_message ("Loading theory " ^ quote name ^ required_by " " initiators); - val _ = Output.try_protocol_message (Markup.loading_theory name) []; - - val {master = (thy_path, _), imports} = deps; - val dir = Path.dir thy_path; - val header = Thy_Header.make (name, pos) imports keywords; - - val _ = Position.reports (map #2 imports ~~ map Theory.get_markup parents); - - val exec_id = Document_ID.make (); - val _ = - Execution.running Document_ID.none exec_id [] orelse - raise Fail ("Failed to register execution: " ^ Document_ID.print exec_id); - - val text_pos = Position.put_id (Document_ID.print exec_id) (Path.position thy_path); - val (theory, present, weight) = - Resources.load_thy document last_timing update_time dir header text_pos text - (if name = Context.PureN then [ML_Context.the_global_context ()] else parents); - fun commit () = update_thy deps theory; - in - Result {theory = theory, exec_id = exec_id, present = present, commit = commit, weight = weight} - end; - -fun check_deps dir name = - (case lookup_deps name of - SOME NONE => (true, NONE, Position.none, get_imports name, []) - | NONE => - let val {master, text, theory_pos, imports, keywords} = Resources.check_thy dir name - in (false, SOME (make_deps master imports, text), theory_pos, imports, keywords) end - | SOME (SOME {master, ...}) => - let - val {master = master', text = text', theory_pos = theory_pos', imports = imports', - keywords = keywords'} = Resources.check_thy dir name; - val deps' = SOME (make_deps master' imports', text'); - val current = - #2 master = #2 master' andalso - (case lookup_theory name of - NONE => false - | SOME theory => Resources.loaded_files_current theory); - in (current, deps', theory_pos', imports', keywords') end); - -in - -fun require_thys document last_timing initiators dir strs tasks = - fold_map (require_thy document last_timing initiators dir) strs tasks |>> forall I -and require_thy document last_timing initiators dir (str, require_pos) tasks = - let - val path = Path.expand (Path.explode str); - val name = Path.implode (Path.base path); - val node_name = File.full_path dir (Resources.thy_path path); - fun check_entry (Task (node_name', _, _)) = - if node_name = node_name' then () - else - error ("Incoherent imports for theory " ^ quote name ^ - Position.here require_pos ^ ":\n" ^ - " " ^ Path.print node_name ^ "\n" ^ - " " ^ Path.print node_name') - | check_entry _ = (); - in - (case try (String_Graph.get_node tasks) name of - SOME task => (check_entry task; (task_finished task, tasks)) - | NONE => - let - val dir' = Path.append dir (Path.dir path); - val _ = member (op =) initiators name andalso error (cycle_msg initiators); - - val (current, deps, theory_pos, imports, keywords) = check_deps dir' name - handle ERROR msg => - cat_error msg - ("The error(s) above occurred for theory " ^ quote name ^ - Position.here require_pos ^ required_by "\n" initiators); - - val parents = map (base_name o #1) imports; - val (parents_current, tasks') = - require_thys document last_timing (name :: initiators) - (Path.append dir (master_dir (Option.map #1 deps))) imports tasks; - - val all_current = current andalso parents_current; - val task = - if all_current then Finished (get_theory name) - else - (case deps of - NONE => raise Fail "Malformed deps" - | SOME (dep, text) => - let - val update_time = serial (); - val load = - load_thy document last_timing initiators update_time dep - text (name, theory_pos) keywords; - in Task (node_name, parents, load) end); - - val tasks'' = new_entry name parents task tasks'; - in (all_current, tasks'') end) - end; - -end; - - -(* use_thy *) - -fun use_theories {document, last_timing, master_dir} imports = - schedule_tasks (snd (require_thys document last_timing [] master_dir imports String_Graph.empty)); - -val use_thys = use_theories {document = false, last_timing = K NONE, master_dir = Path.current}; -val use_thy = use_thys o single; - - -(* toplevel scripting -- without maintaining database *) - -fun script_thy pos txt = - let - val trs = Outer_Syntax.parse pos txt; - val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs); - val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel; - in Toplevel.end_theory end_pos end_state end; - -fun toplevel_begin_theory master_dir (header: Thy_Header.header) = - let - val {name = (name, _), imports, ...} = header; - val _ = kill_thy name; - val _ = use_theories {document = false, last_timing = K NONE, master_dir = master_dir} imports; - val _ = Thy_Header.define_keywords header; - val parents = map (get_theory o base_name o fst) imports; - in Resources.begin_theory master_dir header parents end; - - -(* register theory *) - -fun register_thy theory = - let - val name = Context.theory_name theory; - val {master, ...} = Resources.check_thy (Resources.master_directory theory) name; - val imports = Resources.imports_of theory; - in - NAMED_CRITICAL "Thy_Info" (fn () => - (kill_thy name; - Output.urgent_message ("Registering theory " ^ quote name); - update_thy (make_deps master imports) theory)) - end; - - -(* finish all theories *) - -fun finish () = change_thys (String_Graph.map (fn _ => fn (_, entry) => (NONE, entry))); - -end; diff --git a/core/Pure/Thy/thy_info.scala b/core/Pure/Thy/thy_info.scala deleted file mode 100644 index dc8275f5..00000000 --- a/core/Pure/Thy/thy_info.scala +++ /dev/null @@ -1,133 +0,0 @@ -/* Title: Pure/Thy/thy_info.scala - Author: Makarius - -Theory and file dependencies. -*/ - -package isabelle - - -class Thy_Info(resources: Resources) -{ - /* messages */ - - private def show_path(names: List[Document.Node.Name]): String = - names.map(name => quote(name.theory)).mkString(" via ") - - private def cycle_msg(names: List[Document.Node.Name]): String = - "Cyclic dependency of " + show_path(names) - - private def required_by(initiators: List[Document.Node.Name]): String = - if (initiators.isEmpty) "" - else "\n(required by " + show_path(initiators.reverse) + ")" - - - /* dependencies */ - - sealed case class Dep( - name: Document.Node.Name, - header: Document.Node.Header) - { - def loaded_files(syntax: Prover.Syntax): List[String] = - { - val string = resources.with_thy_reader(name, reader => Symbol.decode(reader.source.toString)) - resources.loaded_files(syntax, string) - } - } - - object Dependencies - { - val empty = new Dependencies(Nil, Nil, Multi_Map.empty, Multi_Map.empty) - } - - final class Dependencies private( - rev_deps: List[Dep], - val keywords: Thy_Header.Keywords, - val seen_names: Multi_Map[String, Document.Node.Name], - val seen_positions: Multi_Map[String, Position.T]) - { - def :: (dep: Dep): Dependencies = - new Dependencies(dep :: rev_deps, dep.header.keywords ::: keywords, - seen_names, seen_positions) - - def + (thy: (Document.Node.Name, Position.T)): Dependencies = - { - val (name, pos) = thy - new Dependencies(rev_deps, keywords, - seen_names + (name.theory -> name), - seen_positions + (name.theory -> pos)) - } - - def deps: List[Dep] = rev_deps.reverse - - def errors: List[String] = - { - val header_errors = deps.flatMap(dep => dep.header.errors) - val import_errors = - (for { - (theory, names) <- seen_names.iterator_list - if !resources.loaded_theories(theory) - if names.length > 1 - } yield - "Incoherent imports for theory " + quote(theory) + ":\n" + - cat_lines(names.flatMap(name => - seen_positions.get_list(theory).map(pos => - " " + quote(name.node) + Position.here(pos)))) - ).toList - header_errors ::: import_errors - } - - lazy val syntax: Prover.Syntax = resources.base_syntax.add_keywords(keywords) - - def loaded_theories: Set[String] = - (resources.loaded_theories /: rev_deps) { case (loaded, dep) => loaded + dep.name.theory } - - def loaded_files: List[Path] = - { - val dep_files = - rev_deps.par.map(dep => - Exn.capture { - dep.loaded_files(syntax).map(a => Path.explode(dep.name.master_dir) + Path.explode(a)) - }).toList - ((Nil: List[Path]) /: dep_files) { - case (acc_files, files) => Exn.release(files) ::: acc_files - } - } - } - - private def require_thys(session: String, initiators: List[Document.Node.Name], - required: Dependencies, thys: List[(Document.Node.Name, Position.T)]): Dependencies = - (required /: thys)(require_thy(session, initiators, _, _)) - - private def require_thy(session: String, initiators: List[Document.Node.Name], - required: Dependencies, thy: (Document.Node.Name, Position.T)): Dependencies = - { - val (name, require_pos) = thy - val theory = name.theory - - def message: String = - "The error(s) above occurred for theory " + quote(theory) + - required_by(initiators) + Position.here(require_pos) - - val required1 = required + thy - if (required.seen_names.isDefinedAt(theory) || resources.loaded_theories(theory)) - required1 - else { - try { - if (initiators.contains(name)) error(cycle_msg(initiators)) - val header = - try { resources.check_thy(session, name).cat_errors(message) } - catch { case ERROR(msg) => cat_error(msg, message) } - val imports = header.imports.map((_, Position.File(name.node))) - Dep(name, header) :: require_thys(session, name :: initiators, required1, imports) - } - catch { - case e: Throwable => - Dep(name, Document.Node.bad_header(Exn.message(e))) :: required1 - } - } - } - - def dependencies(session: String, thys: List[(Document.Node.Name, Position.T)]): Dependencies = - require_thys(session, Nil, Dependencies.empty, thys) -} diff --git a/core/Pure/Thy/thy_output.ML b/core/Pure/Thy/thy_output.ML deleted file mode 100644 index af6752fd..00000000 --- a/core/Pure/Thy/thy_output.ML +++ /dev/null @@ -1,689 +0,0 @@ -(* Title: Pure/Thy/thy_output.ML - Author: Markus Wenzel, TU Muenchen - -Theory document output with antiquotations. -*) - -signature THY_OUTPUT = -sig - val display: bool Config.T - val quotes: bool Config.T - val indent: int Config.T - val source: bool Config.T - val break: bool Config.T - val modes: string Config.T - val add_wrapper: ((unit -> string) -> unit -> string) -> Proof.context -> Proof.context - val add_option: binding -> (string -> Proof.context -> Proof.context) -> theory -> theory - val check_command: Proof.context -> xstring * Position.T -> string - val check_option: Proof.context -> xstring * Position.T -> string - val print_antiquotations: Proof.context -> unit - val antiquotation: binding -> 'a context_parser -> - ({source: Args.src, state: Toplevel.state, context: Proof.context} -> 'a -> string) -> - theory -> theory - val boolean: string -> bool - val integer: string -> int - datatype markup = Markup | MarkupEnv | Verbatim - val eval_antiq: Scan.lexicon -> Toplevel.state -> Antiquote.antiq -> string - val check_text: Symbol_Pos.source -> Toplevel.state -> unit - val present_thy: Scan.lexicon -> (string -> string list) -> (markup -> string -> bool) -> - (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T - val pretty_text: Proof.context -> string -> Pretty.T - val pretty_term: Proof.context -> term -> Pretty.T - val pretty_thm: Proof.context -> thm -> Pretty.T - val str_of_source: Args.src -> string - val maybe_pretty_source: (Proof.context -> 'a -> Pretty.T) -> Proof.context -> - Args.src -> 'a list -> Pretty.T list - val output: Proof.context -> Pretty.T list -> string - val verb_text: string -> string -end; - -structure Thy_Output: THY_OUTPUT = -struct - -(** options **) - -val display = Attrib.setup_option_bool ("thy_output_display", @{here}); -val break = Attrib.setup_option_bool ("thy_output_break", @{here}); -val quotes = Attrib.setup_option_bool ("thy_output_quotes", @{here}); -val indent = Attrib.setup_option_int ("thy_output_indent", @{here}); -val source = Attrib.setup_option_bool ("thy_output_source", @{here}); -val modes = Attrib.setup_option_string ("thy_output_modes", @{here}); - - -structure Wrappers = Proof_Data -( - type T = ((unit -> string) -> unit -> string) list; - fun init _ = []; -); - -fun add_wrapper wrapper = Wrappers.map (cons wrapper); - -val wrap = Wrappers.get #> fold (fn wrapper => fn f => wrapper f); - - - -(** maintain global antiquotations **) - -structure Antiquotations = Theory_Data -( - type T = - (Args.src -> Toplevel.state -> Proof.context -> string) Name_Space.table * - (string -> Proof.context -> Proof.context) Name_Space.table; - val empty : T = - (Name_Space.empty_table Markup.document_antiquotationN, - Name_Space.empty_table Markup.document_antiquotation_optionN); - val extend = I; - fun merge ((commands1, options1), (commands2, options2)) : T = - (Name_Space.merge_tables (commands1, commands2), - Name_Space.merge_tables (options1, options2)); -); - -val get_antiquotations = Antiquotations.get o Proof_Context.theory_of; - -fun add_command name cmd thy = thy - |> Antiquotations.map (apfst (Name_Space.define (Context.Theory thy) true (name, cmd) #> snd)); - -fun add_option name opt thy = thy - |> Antiquotations.map (apsnd (Name_Space.define (Context.Theory thy) true (name, opt) #> snd)); - -fun check_command ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#1 (get_antiquotations ctxt)); - -fun check_option ctxt = #1 o Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)); - -fun command src state ctxt = - let val (src', f) = Args.check_src ctxt (#1 (get_antiquotations ctxt)) src - in f src' state ctxt end; - -fun option ((xname, pos), s) ctxt = - let - val (_, opt) = - Name_Space.check (Context.Proof ctxt) (#2 (get_antiquotations ctxt)) (xname, pos); - in opt s ctxt end; - -fun print_antiquotations ctxt = - let - val (commands, options) = get_antiquotations ctxt; - val command_names = map #1 (Name_Space.markup_table ctxt commands); - val option_names = map #1 (Name_Space.markup_table ctxt options); - in - [Pretty.big_list "document antiquotations:" (map Pretty.mark_str command_names), - Pretty.big_list "document antiquotation options:" (map Pretty.mark_str option_names)] - |> Pretty.writeln_chunks - end; - -fun antiquotation name scan body = - add_command name - (fn src => fn state => fn ctxt => - let val (x, ctxt') = Args.syntax scan src ctxt - in body {source = src, state = state, context = ctxt'} x end); - - - -(** syntax of antiquotations **) - -(* option values *) - -fun boolean "" = true - | boolean "true" = true - | boolean "false" = false - | boolean s = error ("Bad boolean value: " ^ quote s); - -fun integer s = - let - fun int ss = - (case Library.read_int ss of (i, []) => i - | _ => error ("Bad integer value: " ^ quote s)); - in (case Symbol.explode s of "-" :: ss => ~ (int ss) | ss => int ss) end; - - -(* outer syntax *) - -local - -val property = - Parse.position Parse.xname -- Scan.optional (Parse.$$$ "=" |-- Parse.!!! Parse.xname) ""; - -val properties = - Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.enum "," property --| Parse.$$$ "]")) []; - -in - -val antiq = - Parse.!!! - (Parse.position Parse.liberal_name -- properties -- Parse.args --| Scan.ahead Parse.eof) - >> (fn ((name, props), args) => (props, Args.src name args)); - -end; - - -(* eval_antiq *) - -fun eval_antiq lex state ((ss, {range = (pos, _), ...}): Antiquote.antiq) = - let - val (opts, src) = Token.read_antiq lex antiq (ss, pos); - fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) (); - val preview_ctxt = fold option opts (Toplevel.presentation_context_of state); - val print_ctxt = Context_Position.set_visible false preview_ctxt; - val _ = cmd preview_ctxt; - val print_modes = space_explode "," (Config.get print_ctxt modes) @ Latex.modes; - in Print_Mode.with_modes print_modes (fn () => cmd print_ctxt) () end; - - -(* check_text *) - -fun eval_antiquote lex state (txt, pos) = - let - fun words (Antiquote.Text ss) = [(#1 (Symbol_Pos.range ss), Markup.words)] - | words (Antiquote.Antiq _) = []; - - fun expand (Antiquote.Text ss) = Symbol_Pos.content ss - | expand (Antiquote.Antiq antiq) = eval_antiq lex state antiq; - - val ants = Antiquote.read (Symbol_Pos.explode (txt, pos), pos); - val _ = Position.reports (maps words ants); - in - if Toplevel.is_toplevel state andalso not (forall Antiquote.is_text ants) then - error ("Unknown context -- cannot expand document antiquotations" ^ Position.here pos) - else implode (map expand ants) - end; - -fun check_text {delimited, text, pos} state = - (Position.report pos (Markup.language_document delimited); - if Toplevel.is_skipped_proof state then () - else ignore (eval_antiquote (#1 (Keyword.get_lexicons ())) state (text, pos))); - - - -(** present theory source **) - -(*NB: arranging white space around command spans is a black art.*) - -(* presentation tokens *) - -datatype token = - NoToken - | BasicToken of Token.T - | MarkupToken of string * (string * Position.T) - | MarkupEnvToken of string * (string * Position.T) - | VerbatimToken of string * Position.T; - -fun output_token lex state = - let val eval = eval_antiquote lex state in - fn NoToken => "" - | BasicToken tok => Latex.output_basic tok - | MarkupToken (cmd, txt) => Latex.output_markup cmd (eval txt) - | MarkupEnvToken (cmd, txt) => Latex.output_markup_env cmd (eval txt) - | VerbatimToken txt => Latex.output_verbatim (eval txt) - end; - -fun basic_token pred (BasicToken tok) = pred tok - | basic_token _ _ = false; - -val improper_token = basic_token Token.is_improper; -val comment_token = basic_token Token.is_comment; -val blank_token = basic_token Token.is_blank; -val newline_token = basic_token Token.is_newline; - - -(* command spans *) - -type command = string * Position.T * string list; (*name, position, tags*) -type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*) - -datatype span = Span of command * (source * source * source * source) * bool; - -fun make_span cmd src = - let - fun take_newline (tok :: toks) = - if newline_token (fst tok) then ([tok], toks, true) - else ([], tok :: toks, false) - | take_newline [] = ([], [], false); - val (((src_prefix, src_main), src_suffix1), (src_suffix2, src_appendix, newline)) = - src - |> take_prefix (improper_token o fst) - ||>> take_suffix (improper_token o fst) - ||>> take_prefix (comment_token o fst) - ||> take_newline; - in Span (cmd, (src_prefix, src_main, src_suffix1 @ src_suffix2, src_appendix), newline) end; - - -(* present spans *) - -local - -fun err_bad_nesting pos = - error ("Bad nesting of commands in presentation" ^ pos); - -fun edge which f (x: string option, y) = - if x = y then I - else (case which (x, y) of NONE => I | SOME txt => Buffer.add (f txt)); - -val begin_tag = edge #2 Latex.begin_tag; -val end_tag = edge #1 Latex.end_tag; -fun open_delim delim e = edge #2 Latex.begin_delim e #> delim #> edge #2 Latex.end_delim e; -fun close_delim delim e = edge #1 Latex.begin_delim e #> delim #> edge #1 Latex.end_delim e; - -in - -fun present_span lex default_tags span state state' - (tag_stack, active_tag, newline, buffer, present_cont) = - let - val present = fold (fn (tok, (flag, 0)) => - Buffer.add (output_token lex state' tok) - #> Buffer.add flag - | _ => I); - - val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span; - - val (tag, tags) = tag_stack; - val tag' = try hd (fold (update (op =)) cmd_tags (the_list tag)); - - val active_tag' = - if is_some tag' then tag' - else if cmd_name = "end" andalso not (Toplevel.is_toplevel state') then NONE - else try hd (default_tags cmd_name); - val edge = (active_tag, active_tag'); - - val newline' = - if is_none active_tag' then span_newline else newline; - - val nesting = Toplevel.level state' - Toplevel.level state; - val tag_stack' = - if nesting = 0 andalso not (Toplevel.is_proof state) then tag_stack - else if nesting >= 0 then (tag', replicate nesting tag @ tags) - else - (case drop (~ nesting - 1) tags of - tgs :: tgss => (tgs, tgss) - | [] => err_bad_nesting (Position.here cmd_pos)); - - val buffer' = - buffer - |> end_tag edge - |> close_delim (fst present_cont) edge - |> snd present_cont - |> open_delim (present (#1 srcs)) edge - |> begin_tag edge - |> present (#2 srcs); - val present_cont' = - if newline then (present (#3 srcs), present (#4 srcs)) - else (I, present (#3 srcs) #> present (#4 srcs)); - in (tag_stack', active_tag', newline', buffer', present_cont') end; - -fun present_trailer ((_, tags), active_tag, _, buffer, present_cont) = - if not (null tags) then err_bad_nesting " at end of theory" - else - buffer - |> end_tag (active_tag, NONE) - |> close_delim (fst present_cont) (active_tag, NONE) - |> snd present_cont; - -end; - - -(* present_thy *) - -datatype markup = Markup | MarkupEnv | Verbatim; - -local - -val space_proper = - Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper; - -val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore); -val improper = Scan.many is_improper; -val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper)); -val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank)); - -val opt_newline = Scan.option (Scan.one Token.is_newline); - -val ignore = - Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore - >> pair (d + 1)) || - Scan.depend (fn d => Scan.one Token.is_end_ignore --| - (if d = 0 then Scan.fail_with (K (fn () => "Bad nesting of meta-comments")) else opt_newline) - >> pair (d - 1)); - -val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (Parse.tag_name --| blank_end); - -val locale = - Scan.option ((Parse.$$$ "(" -- improper -- Parse.$$$ "in") |-- - Parse.!!! (improper |-- Parse.xname --| (improper -- Parse.$$$ ")"))); - -in - -fun present_thy lex default_tags is_markup command_results toks = - let - (* tokens *) - - val ignored = Scan.state --| ignore - >> (fn d => (NONE, (NoToken, ("", d)))); - - fun markup mark mk flag = Scan.peek (fn d => - improper |-- - Parse.position (Scan.one (Token.is_command andf is_markup mark o Token.content_of)) -- - Scan.repeat tag -- - Parse.!!!! ((improper -- locale -- improper) |-- Parse.document_source --| improper_end) - >> (fn (((tok, pos'), tags), {text, pos, ...}) => - let val name = Token.content_of tok - in (SOME (name, pos', tags), (mk (name, (text, pos)), (flag, d))) end)); - - val command = Scan.peek (fn d => - Parse.position (Scan.one (Token.is_command)) -- - Scan.repeat tag - >> (fn ((tok, pos), tags) => - let val name = Token.content_of tok - in (SOME (name, pos, tags), (BasicToken tok, (Latex.markup_false, d))) end)); - - val cmt = Scan.peek (fn d => - Parse.$$$ "--" |-- Parse.!!!! (improper |-- Parse.document_source) - >> (fn {text, pos, ...} => (NONE, (MarkupToken ("cmt", (text, pos)), ("", d))))); - - val other = Scan.peek (fn d => - Parse.not_eof >> (fn tok => (NONE, (BasicToken tok, ("", d))))); - - val token = - ignored || - markup Markup MarkupToken Latex.markup_true || - markup MarkupEnv MarkupEnvToken Latex.markup_true || - markup Verbatim (VerbatimToken o #2) "" || - command || cmt || other; - - - (* spans *) - - val is_eof = fn (_, (BasicToken x, _)) => Token.is_eof x | _ => false; - val stopper = Scan.stopper (K (NONE, (BasicToken Token.eof, ("", 0)))) is_eof; - - val cmd = Scan.one (is_some o fst); - val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2; - - val comments = Scan.many (comment_token o fst o snd); - val blank = Scan.one (blank_token o fst o snd); - val newline = Scan.one (newline_token o fst o snd); - val before_cmd = - Scan.option (newline -- comments) -- - Scan.option (newline -- comments) -- - Scan.option (blank -- comments) -- cmd; - - val span = - Scan.repeat non_cmd -- cmd -- - Scan.repeat (Scan.unless before_cmd non_cmd) -- - Scan.option (newline >> (single o snd)) - >> (fn (((toks1, (cmd, tok2)), toks3), tok4) => - make_span (the cmd) (toks1 @ (tok2 :: (toks3 @ the_default [] tok4)))); - - val spans = toks - |> filter_out Token.is_semicolon - |> take_suffix Token.is_space |> #1 - |> Source.of_list - |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk token)) NONE - |> Source.source stopper (Scan.error (Scan.bulk span)) NONE - |> Source.exhaust; - - - (* present commands *) - - fun present_command tr span st st' = - Toplevel.setmp_thread_position tr (present_span lex default_tags span st st'); - - fun present _ [] = I - | present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest; - in - if length command_results = length spans then - ((NONE, []), NONE, true, Buffer.empty, (I, I)) - |> present Toplevel.toplevel (command_results ~~ spans) - |> present_trailer - else error "Messed-up outer syntax for presentation" - end; - -end; - - - -(** setup default output **) - -(* options *) - -val _ = Theory.setup - (add_option @{binding show_types} (Config.put show_types o boolean) #> - add_option @{binding show_sorts} (Config.put show_sorts o boolean) #> - add_option @{binding show_structs} (Config.put show_structs o boolean) #> - add_option @{binding show_question_marks} (Config.put show_question_marks o boolean) #> - add_option @{binding show_abbrevs} (Config.put show_abbrevs o boolean) #> - add_option @{binding names_long} (Config.put Name_Space.names_long o boolean) #> - add_option @{binding names_short} (Config.put Name_Space.names_short o boolean) #> - add_option @{binding names_unique} (Config.put Name_Space.names_unique o boolean) #> - add_option @{binding eta_contract} (Config.put Syntax_Trans.eta_contract o boolean) #> - add_option @{binding display} (Config.put display o boolean) #> - add_option @{binding break} (Config.put break o boolean) #> - add_option @{binding quotes} (Config.put quotes o boolean) #> - add_option @{binding mode} (add_wrapper o Print_Mode.with_modes o single) #> - add_option @{binding margin} (add_wrapper o setmp_CRITICAL Pretty.margin_default o integer) #> - add_option @{binding indent} (Config.put indent o integer) #> - add_option @{binding source} (Config.put source o boolean) #> - add_option @{binding goals_limit} (Config.put Goal_Display.goals_limit o integer)); - - -(* basic pretty printing *) - -fun tweak_line ctxt s = - if Config.get ctxt display then s else Symbol.strip_blanks s; - -fun pretty_text ctxt = - Pretty.chunks o map Pretty.str o map (tweak_line ctxt) o split_lines; - -fun pretty_text_report ctxt source = - let - val {delimited, pos, ...} = source; - val _ = Context_Position.report ctxt pos (Markup.language_text delimited); - val (s, _) = Symbol_Pos.source_content source; - in pretty_text ctxt s end; - -fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t; - -fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of; - -fun pretty_term_style ctxt (style, t) = - pretty_term ctxt (style t); - -fun pretty_thm_style ctxt (style, th) = - pretty_term ctxt (style (Thm.full_prop_of th)); - -fun pretty_term_typ ctxt (style, t) = - let val t' = style t - in pretty_term ctxt (Type.constraint (Term.fastype_of t') t') end; - -fun pretty_term_typeof ctxt (style, t) = - Syntax.pretty_typ ctxt (Term.fastype_of (style t)); - -fun pretty_const ctxt c = - let - val t = Const (c, Consts.type_scheme (Proof_Context.consts_of ctxt) c) - handle TYPE (msg, _, _) => error msg; - val ([t'], _) = Variable.import_terms true [t] ctxt; - in pretty_term ctxt t' end; - -fun pretty_abbrev ctxt s = - let - val t = Syntax.read_term (Proof_Context.set_mode Proof_Context.mode_abbrev ctxt) s; - fun err () = error ("Abbreviated constant expected: " ^ Syntax.string_of_term ctxt t); - val (head, args) = Term.strip_comb t; - val (c, T) = Term.dest_Const head handle TERM _ => err (); - val (U, u) = Consts.the_abbreviation (Proof_Context.consts_of ctxt) c - handle TYPE _ => err (); - val t' = Term.betapplys (Envir.expand_atom T (U, u), args); - val eq = Logic.mk_equals (t, t'); - val ctxt' = Variable.auto_fixes eq ctxt; - in Proof_Context.pretty_term_abbrev ctxt' eq end; - -fun pretty_class ctxt = - Pretty.str o Proof_Context.extern_class ctxt o Proof_Context.read_class ctxt; - -fun pretty_type ctxt s = - let val Type (name, _) = Proof_Context.read_type_name {proper = true, strict = false} ctxt s - in Pretty.str (Proof_Context.extern_type ctxt name) end; - -fun pretty_prf full ctxt = Proof_Syntax.pretty_proof_of ctxt full; - -fun pretty_theory ctxt (name, pos) = - (case find_first (fn thy => Context.theory_name thy = name) - (Theory.nodes_of (Proof_Context.theory_of ctxt)) of - NONE => error ("No ancestor theory " ^ quote name ^ Position.here pos) - | SOME thy => (Context_Position.report ctxt pos (Theory.get_markup thy); Pretty.str name)); - - -(* default output *) - -val str_of_source = space_implode " " o Args.unparse_src; - -fun maybe_pretty_source pretty ctxt src xs = - map (pretty ctxt) xs (*always pretty in order to exhibit errors!*) - |> (if Config.get ctxt source then K [pretty_text ctxt (str_of_source src)] else I); - -fun output ctxt prts = - prts - |> (if Config.get ctxt quotes then map Pretty.quote else I) - |> (if Config.get ctxt display then - map (Output.output o Pretty.string_of o Pretty.indent (Config.get ctxt indent)) - #> space_implode "\\isasep\\isanewline%\n" - #> enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}" - else - map (Output.output o (if Config.get ctxt break then Pretty.string_of else Pretty.str_of)) - #> space_implode "\\isasep\\isanewline%\n" - #> enclose "\\isa{" "}"); - - - -(** concrete antiquotations **) - -(* basic entities *) - -local - -fun basic_entities name scan pretty = antiquotation name scan - (fn {source, context, ...} => output context o maybe_pretty_source pretty context source); - -fun basic_entities_style name scan pretty = antiquotation name scan - (fn {source, context, ...} => fn (style, xs) => - output context - (maybe_pretty_source (fn ctxt => fn x => pretty ctxt (style, x)) context source xs)); - -fun basic_entity name scan = basic_entities name (scan >> single); - -in - -val _ = Theory.setup - (basic_entities_style @{binding thm} (Term_Style.parse -- Attrib.thms) pretty_thm_style #> - basic_entity @{binding prop} (Term_Style.parse -- Args.prop) pretty_term_style #> - basic_entity @{binding term} (Term_Style.parse -- Args.term) pretty_term_style #> - basic_entity @{binding term_type} (Term_Style.parse -- Args.term) pretty_term_typ #> - basic_entity @{binding typeof} (Term_Style.parse -- Args.term) pretty_term_typeof #> - basic_entity @{binding const} (Args.const {proper = true, strict = false}) pretty_const #> - basic_entity @{binding abbrev} (Scan.lift Args.name_inner_syntax) pretty_abbrev #> - basic_entity @{binding typ} Args.typ_abbrev Syntax.pretty_typ #> - basic_entity @{binding class} (Scan.lift Args.name_inner_syntax) pretty_class #> - basic_entity @{binding type} (Scan.lift Args.name) pretty_type #> - basic_entity @{binding text} (Scan.lift Args.name_source_position) pretty_text_report #> - basic_entities @{binding prf} Attrib.thms (pretty_prf false) #> - basic_entities @{binding full_prf} Attrib.thms (pretty_prf true) #> - basic_entity @{binding theory} (Scan.lift (Parse.position Args.name)) pretty_theory); - -end; - - -(* goal state *) - -local - -fun proof_state state = - (case try (Proof.goal o Toplevel.proof_of) state of - SOME {goal, ...} => goal - | _ => error "No proof state"); - -fun goal_state name main = antiquotation name (Scan.succeed ()) - (fn {state, context = ctxt, ...} => fn () => output ctxt - [Goal_Display.pretty_goal - (Config.put Goal_Display.show_main_goal main ctxt) (proof_state state)]); - -in - -val _ = Theory.setup - (goal_state @{binding goals} true #> - goal_state @{binding subgoals} false); - -end; - - -(* embedded lemma *) - -val _ = Keyword.define ("by", NONE); (*overlap with command category*) - -val _ = Theory.setup - (antiquotation @{binding lemma} - (Scan.lift (Scan.ahead Parse.not_eof) -- Args.prop -- - Scan.lift (Parse.position (Args.$$$ "by") -- Method.parse -- Scan.option Method.parse)) - (fn {source, context = ctxt, ...} => fn ((prop_token, prop), (((_, by_pos), m1), m2)) => - let - val prop_src = Args.src (Args.name_of_src source) [prop_token]; - - val reports = (by_pos, Markup.keyword1) :: maps Method.reports_of (m1 :: the_list m2); - val _ = Context_Position.reports ctxt reports; - - (* FIXME check proof!? *) - val _ = ctxt - |> Proof.theorem NONE (K I) [[(prop, [])]] - |> Proof.global_terminal_proof (m1, m2); - in output ctxt (maybe_pretty_source pretty_term ctxt prop_src [prop]) end)); - - -(* ML text *) - -val verb_text = - split_lines - #> map (space_implode "\\verb,|," o map (enclose "\\verb|" "|") o space_explode "|") - #> space_implode "\\isasep\\isanewline%\n"; - -local - -fun ml_text name ml = antiquotation name (Scan.lift Args.name_source_position) - (fn {context, ...} => fn source => - (ML_Context.eval_in (SOME context) ML_Compiler.flags (#pos source) (ml source); - Symbol_Pos.source_content source - |> #1 - |> (if Config.get context quotes then quote else I) - |> (if Config.get context display then enclose "\\begin{verbatim}\n" "\n\\end{verbatim}" - else verb_text))); - -fun ml_enclose bg en source = - ML_Lex.read Position.none bg @ - ML_Lex.read_source false source @ - ML_Lex.read Position.none en; - -in - -val _ = Theory.setup - (ml_text @{binding ML} (ml_enclose "fn _ => (" ");") #> - ml_text @{binding ML_op} (ml_enclose "fn _ => (op " ");") #> - ml_text @{binding ML_type} (ml_enclose "val _ = NONE : (" ") option;") #> - ml_text @{binding ML_structure} - (ml_enclose "functor XXX() = struct structure XX = " " end;") #> - - ml_text @{binding ML_functor} (* FIXME formal treatment of functor name (!?) *) - (fn source => - ML_Lex.read Position.none ("ML_Env.check_functor " ^ - ML_Syntax.print_string (#1 (Symbol_Pos.source_content source)))) #> - - ml_text @{binding ML_text} (K [])); - -end; - - -(* URLs *) - -val _ = Theory.setup - (antiquotation @{binding url} (Scan.lift (Parse.position Parse.name)) - (fn {context = ctxt, ...} => fn (name, pos) => - (Context_Position.reports ctxt [(pos, Markup.language_path), (pos, Markup.url name)]; - enclose "\\url{" "}" name))); - -end; diff --git a/core/Pure/Thy/thy_syntax.ML b/core/Pure/Thy/thy_syntax.ML deleted file mode 100644 index 188d305d..00000000 --- a/core/Pure/Thy/thy_syntax.ML +++ /dev/null @@ -1,213 +0,0 @@ -(* Title: Pure/Thy/thy_syntax.ML - Author: Makarius - -Superficial theory syntax: tokens and spans. -*) - -signature THY_SYNTAX = -sig - val parse_tokens: Scan.lexicon * Scan.lexicon -> Position.T -> string -> Token.T list - val reports_of_tokens: Token.T list -> bool * Position.report_text list - val present_token: Token.T -> Output.output - datatype span_kind = Command of string * Position.T | Ignored | Malformed - datatype span = Span of span_kind * Token.T list - val span_kind: span -> span_kind - val span_content: span -> Token.T list - val present_span: span -> Output.output - val parse_spans: Token.T list -> span list - val resolve_files: (string -> Path.T * Position.T -> Token.file Exn.result list) -> span -> span - datatype 'a element = Element of 'a * ('a element list * 'a) option - val atom: 'a -> 'a element - val map_element: ('a -> 'b) -> 'a element -> 'b element - val flat_element: 'a element -> 'a list - val last_element: 'a element -> 'a - val parse_elements: span list -> span element list -end; - -structure Thy_Syntax: THY_SYNTAX = -struct - -(** tokens **) - -(* parse *) - -fun parse_tokens lexs pos = - Source.of_string #> - Symbol.source #> - Token.source {do_recover = SOME false} (K lexs) pos #> - Source.exhaust; - - -(* present *) - -local - -fun reports_of_token tok = - let - val {text, pos, ...} = Token.source_position_of tok; - val malformed_symbols = - Symbol_Pos.explode (text, pos) - |> map_filter (fn (sym, pos) => - if Symbol.is_malformed sym - then SOME ((pos, Markup.bad), "Malformed symbolic character") else NONE); - val is_malformed = Token.is_error tok orelse not (null malformed_symbols); - val reports = Token.report tok :: Token.completion_report tok @ malformed_symbols; - in (is_malformed, reports) end; - -in - -fun reports_of_tokens toks = - let val results = map reports_of_token toks - in (exists fst results, maps snd results) end; - -fun present_token tok = - Markup.enclose (Token.markup tok) (Output.output (Token.unparse tok)); - -end; - - - -(** spans **) - -(* type span *) - -datatype span_kind = Command of string * Position.T | Ignored | Malformed; -datatype span = Span of span_kind * Token.T list; - -fun span_kind (Span (k, _)) = k; -fun span_content (Span (_, toks)) = toks; - -val present_span = implode o map present_token o span_content; - - -(* parse *) - -local - -fun make_span toks = - if not (null toks) andalso Token.is_command (hd toks) then - Span (Command (Token.content_of (hd toks), Token.pos_of (hd toks)), toks) - else if forall Token.is_improper toks then Span (Ignored, toks) - else Span (Malformed, toks); - -fun flush (result, span, improper) = - result - |> not (null span) ? cons (rev span) - |> not (null improper) ? cons (rev improper); - -fun parse tok (result, span, improper) = - if Token.is_command tok then (flush (result, span, improper), [tok], []) - else if Token.is_improper tok then (result, span, tok :: improper) - else (result, tok :: (improper @ span), []); - -in - -fun parse_spans toks = - fold parse toks ([], [], []) - |> flush |> rev |> map make_span; - -end; - - -(* inlined files *) - -local - -fun clean ((i1, t1) :: (i2, t2) :: toks) = - if Token.keyword_with (fn s => s = "%" orelse s = "--") t1 then clean toks - else (i1, t1) :: clean ((i2, t2) :: toks) - | clean toks = toks; - -fun clean_tokens toks = - ((0 upto length toks - 1) ~~ toks) - |> filter (fn (_, tok) => Token.is_proper tok) - |> clean; - -fun find_file ((_, tok) :: toks) = - if Token.is_command tok then - toks |> get_first (fn (i, tok) => - if Token.is_name tok then - SOME (i, (Path.explode (Token.content_of tok), Token.pos_of tok)) - handle ERROR msg => error (msg ^ Position.here (Token.pos_of tok)) - else NONE) - else NONE - | find_file [] = NONE; - -in - -fun resolve_files read_files span = - (case span of - Span (Command (cmd, pos), toks) => - if Keyword.is_theory_load cmd then - (case find_file (clean_tokens toks) of - NONE => error ("Bad file argument of command " ^ quote cmd ^ Position.here pos) - | SOME (i, path) => - let - val toks' = toks |> map_index (fn (j, tok) => - if i = j then Token.put_files (read_files cmd path) tok - else tok); - in Span (Command (cmd, pos), toks') end) - else span - | _ => span); - -end; - - - -(** specification elements: commands with optional proof **) - -datatype 'a element = Element of 'a * ('a element list * 'a) option; - -fun element (a, b) = Element (a, SOME b); -fun atom a = Element (a, NONE); - -fun map_element f (Element (a, NONE)) = Element (f a, NONE) - | map_element f (Element (a, SOME (elems, b))) = - Element (f a, SOME ((map o map_element) f elems, f b)); - -fun flat_element (Element (a, NONE)) = [a] - | flat_element (Element (a, SOME (elems, b))) = a :: maps flat_element elems @ [b]; - -fun last_element (Element (a, NONE)) = a - | last_element (Element (_, SOME (_, b))) = b; - - -(* scanning spans *) - -val eof = Span (Command ("", Position.none), []); - -fun is_eof (Span (Command ("", _), _)) = true - | is_eof _ = false; - -val not_eof = not o is_eof; - -val stopper = Scan.stopper (K eof) is_eof; - - -(* parse *) - -local - -fun command_with pred = - Scan.one (fn (Span (Command (name, _), _)) => pred name | _ => false); - -val proof_atom = - Scan.one (fn (Span (Command (name, _), _)) => Keyword.is_proof_body name | _ => true) >> atom; - -fun proof_element x = (command_with Keyword.is_proof_goal -- proof_rest >> element || proof_atom) x -and proof_rest x = (Scan.repeat proof_element -- command_with Keyword.is_qed) x; - -val other_element = - command_with Keyword.is_theory_goal -- proof_rest >> element || - Scan.one not_eof >> atom; - -in - -val parse_elements = - Source.of_list #> - Source.source stopper (Scan.bulk other_element) NONE #> - Source.exhaust; - -end; - -end; diff --git a/core/Pure/Thy/thy_syntax.scala b/core/Pure/Thy/thy_syntax.scala deleted file mode 100644 index 5ec0e485..00000000 --- a/core/Pure/Thy/thy_syntax.scala +++ /dev/null @@ -1,495 +0,0 @@ -/* Title: Pure/Thy/thy_syntax.scala - Author: Makarius - -Superficial theory syntax: tokens and spans. -*/ - -package isabelle - - -import scala.collection.mutable -import scala.annotation.tailrec - - -object Thy_Syntax -{ - /** nested structure **/ - - object Structure - { - sealed abstract class Entry { def length: Int } - case class Block(val name: String, val body: List[Entry]) extends Entry - { - val length: Int = (0 /: body)(_ + _.length) - } - case class Atom(val command: Command) extends Entry - { - def length: Int = command.length - } - - def parse(syntax: Outer_Syntax, node_name: Document.Node.Name, text: CharSequence): Entry = - { - /* stack operations */ - - def buffer(): mutable.ListBuffer[Entry] = new mutable.ListBuffer[Entry] - var stack: List[(Int, String, mutable.ListBuffer[Entry])] = - List((0, node_name.toString, buffer())) - - @tailrec def close(level: Int => Boolean) - { - stack match { - case (lev, name, body) :: (_, _, body2) :: rest if level(lev) => - body2 += Block(name, body.toList) - stack = stack.tail - close(level) - case _ => - } - } - - def result(): Entry = - { - close(_ => true) - val (_, name, body) = stack.head - Block(name, body.toList) - } - - def add(command: Command) - { - syntax.heading_level(command) match { - case Some(i) => - close(_ > i) - stack = (i + 1, command.source, buffer()) :: stack - case None => - } - stack.head._3 += Atom(command) - } - - - /* result structure */ - - val spans = parse_spans(syntax.scan(text)) - spans.foreach(span => add(Command(Document_ID.none, node_name, Nil, span))) - result() - } - } - - - - /** parse spans **/ - - def parse_spans(toks: List[Token]): List[List[Token]] = - { - val result = new mutable.ListBuffer[List[Token]] - val span = new mutable.ListBuffer[Token] - val improper = new mutable.ListBuffer[Token] - - def flush() - { - if (!span.isEmpty) { result += span.toList; span.clear } - if (!improper.isEmpty) { result += improper.toList; improper.clear } - } - for (tok <- toks) { - if (tok.is_command) { flush(); span += tok } - else if (tok.is_improper) improper += tok - else { span ++= improper; improper.clear; span += tok } - } - flush() - - result.toList - } - - - - /** perspective **/ - - def command_perspective( - node: Document.Node, - perspective: Text.Perspective, - overlays: Document.Node.Overlays): (Command.Perspective, Command.Perspective) = - { - if (perspective.is_empty && overlays.is_empty) - (Command.Perspective.empty, Command.Perspective.empty) - else { - val has_overlay = overlays.commands - val visible = new mutable.ListBuffer[Command] - val visible_overlay = new mutable.ListBuffer[Command] - @tailrec - def check_ranges(ranges: List[Text.Range], commands: Stream[(Command, Text.Offset)]) - { - (ranges, commands) match { - case (range :: more_ranges, (command, offset) #:: more_commands) => - val command_range = command.range + offset - range compare command_range match { - case 0 => - visible += command - visible_overlay += command - check_ranges(ranges, more_commands) - case c => - if (has_overlay(command)) visible_overlay += command - - if (c < 0) check_ranges(more_ranges, commands) - else check_ranges(ranges, more_commands) - } - - case (Nil, (command, _) #:: more_commands) => - if (has_overlay(command)) visible_overlay += command - - check_ranges(Nil, more_commands) - - case _ => - } - } - - val commands = - (if (overlays.is_empty) node.command_iterator(perspective.range) - else node.command_iterator()).toStream - check_ranges(perspective.ranges, commands) - (Command.Perspective(visible.toList), Command.Perspective(visible_overlay.toList)) - } - } - - - - /** header edits: structure and outer syntax **/ - - private def header_edits( - resources: Resources, - previous: Document.Version, - edits: List[Document.Edit_Text]): - (Prover.Syntax, Boolean, Boolean, List[Document.Node.Name], Document.Nodes, - List[Document.Edit_Command]) = - { - var updated_imports = false - var updated_keywords = false - var nodes = previous.nodes - val doc_edits = new mutable.ListBuffer[Document.Edit_Command] - - edits foreach { - case (name, Document.Node.Deps(header)) => - val node = nodes(name) - val update_header = - !node.header.errors.isEmpty || !header.errors.isEmpty || node.header != header - if (update_header) { - val node1 = node.update_header(header) - updated_imports = updated_imports || (node.header.imports != node1.header.imports) - updated_keywords = updated_keywords || (node.header.keywords != node1.header.keywords) - nodes += (name -> node1) - doc_edits += (name -> Document.Node.Deps(header)) - } - case _ => - } - - val (syntax, syntax_changed) = - previous.syntax match { - case Some(syntax) if !updated_keywords => - (syntax, false) - case _ => - val syntax = - (resources.base_syntax /: nodes.iterator) { - case (syn, (_, node)) => syn.add_keywords(node.header.keywords) - } - (syntax, true) - } - - val reparse = - if (updated_imports || updated_keywords) - nodes.descendants(doc_edits.iterator.map(_._1).toList) - else Nil - - (syntax, syntax_changed, updated_imports, reparse, nodes, doc_edits.toList) - } - - - - /** text edits **/ - - /* edit individual command source */ - - @tailrec def edit_text(eds: List[Text.Edit], commands: Linear_Set[Command]): Linear_Set[Command] = - { - eds match { - case e :: es => - Document.Node.Commands.starts(commands.iterator).find { - case (cmd, cmd_start) => - e.can_edit(cmd.source, cmd_start) || - e.is_insert && e.start == cmd_start + cmd.length - } match { - case Some((cmd, cmd_start)) if e.can_edit(cmd.source, cmd_start) => - val (rest, text) = e.edit(cmd.source, cmd_start) - val new_commands = commands.insert_after(Some(cmd), Command.unparsed(text)) - cmd - edit_text(rest.toList ::: es, new_commands) - - case Some((cmd, cmd_start)) => - edit_text(es, commands.insert_after(Some(cmd), Command.unparsed(e.text))) - - case None => - require(e.is_insert && e.start == 0) - edit_text(es, commands.insert_after(None, Command.unparsed(e.text))) - } - case Nil => commands - } - } - - - /* inlined files */ - - private def find_file(tokens: List[Token]): Option[String] = - { - def clean(toks: List[Token]): List[Token] = - toks match { - case t :: _ :: ts if t.is_keyword && (t.source == "%" || t.source == "--") => clean(ts) - case t :: ts => t :: clean(ts) - case Nil => Nil - } - clean(tokens.filter(_.is_proper)) match { - case tok :: toks if tok.is_command => toks.find(_.is_name).map(_.content) - case _ => None - } - } - - def span_files(syntax: Prover.Syntax, span: List[Token]): List[String] = - syntax.load(span) match { - case Some(exts) => - find_file(span) match { - case Some(file) => - if (exts.isEmpty) List(file) - else exts.map(ext => file + "." + ext) - case None => Nil - } - case None => Nil - } - - def resolve_files( - resources: Resources, - syntax: Prover.Syntax, - node_name: Document.Node.Name, - span: List[Token], - get_blob: Document.Node.Name => Option[Document.Blob]) - : List[Command.Blob] = - { - span_files(syntax, span).map(file_name => - Exn.capture { - val name = - Document.Node.Name(resources.append(node_name.master_dir, Path.explode(file_name))) - val blob = get_blob(name).map(blob => ((blob.bytes.sha1_digest, blob.chunk))) - (name, blob) - }) - } - - - /* reparse range of command spans */ - - @tailrec private def chop_common( - cmds: List[Command], - blobs_spans: List[(List[Command.Blob], List[Token])]) - : (List[Command], List[(List[Command.Blob], List[Token])]) = - { - (cmds, blobs_spans) match { - case (cmd :: cmds, (blobs, span) :: rest) if cmd.blobs == blobs && cmd.span == span => - chop_common(cmds, rest) - case _ => (cmds, blobs_spans) - } - } - - private def reparse_spans( - resources: Resources, - syntax: Prover.Syntax, - get_blob: Document.Node.Name => Option[Document.Blob], - name: Document.Node.Name, - commands: Linear_Set[Command], - first: Command, last: Command): Linear_Set[Command] = - { - val cmds0 = commands.iterator(first, last).toList - val blobs_spans0 = - parse_spans(syntax.scan(cmds0.iterator.map(_.source).mkString)). - map(span => (resolve_files(resources, syntax, name, span, get_blob), span)) - - val (cmds1, blobs_spans1) = chop_common(cmds0, blobs_spans0) - - val (rev_cmds2, rev_blobs_spans2) = chop_common(cmds1.reverse, blobs_spans1.reverse) - val cmds2 = rev_cmds2.reverse - val blobs_spans2 = rev_blobs_spans2.reverse - - cmds2 match { - case Nil => - assert(blobs_spans2.isEmpty) - commands - case cmd :: _ => - val hook = commands.prev(cmd) - val inserted = - blobs_spans2.map({ case (blobs, span) => Command(Document_ID.make(), name, blobs, span) }) - (commands /: cmds2)(_ - _).append_after(hook, inserted) - } - } - - - /* recover command spans after edits */ - - // FIXME somewhat slow - private def recover_spans( - resources: Resources, - syntax: Prover.Syntax, - get_blob: Document.Node.Name => Option[Document.Blob], - name: Document.Node.Name, - perspective: Command.Perspective, - commands: Linear_Set[Command]): Linear_Set[Command] = - { - val visible = perspective.commands.toSet - - def next_invisible_command(cmds: Linear_Set[Command], from: Command): Command = - cmds.iterator(from).dropWhile(cmd => !cmd.is_command || visible(cmd)) - .find(_.is_command) getOrElse cmds.last - - @tailrec def recover(cmds: Linear_Set[Command]): Linear_Set[Command] = - cmds.find(_.is_unparsed) match { - case Some(first_unparsed) => - val first = next_invisible_command(cmds.reverse, first_unparsed) - val last = next_invisible_command(cmds, first_unparsed) - recover(reparse_spans(resources, syntax, get_blob, name, cmds, first, last)) - case None => cmds - } - recover(commands) - } - - - /* consolidate unfinished spans */ - - private def consolidate_spans( - resources: Resources, - syntax: Prover.Syntax, - get_blob: Document.Node.Name => Option[Document.Blob], - reparse_limit: Int, - name: Document.Node.Name, - perspective: Command.Perspective, - commands: Linear_Set[Command]): Linear_Set[Command] = - { - if (perspective.commands.isEmpty) commands - else { - commands.find(_.is_unfinished) match { - case Some(first_unfinished) => - val visible = perspective.commands.toSet - commands.reverse.find(visible) match { - case Some(last_visible) => - val it = commands.iterator(last_visible) - var last = last_visible - var i = 0 - while (i < reparse_limit && it.hasNext) { - last = it.next - i += last.length - } - reparse_spans(resources, syntax, get_blob, name, commands, first_unfinished, last) - case None => commands - } - case None => commands - } - } - } - - - /* main */ - - def diff_commands(old_cmds: Linear_Set[Command], new_cmds: Linear_Set[Command]) - : List[Command.Edit] = - { - val removed = old_cmds.iterator.filter(!new_cmds.contains(_)).toList - val inserted = new_cmds.iterator.filter(!old_cmds.contains(_)).toList - - removed.reverse.map(cmd => (old_cmds.prev(cmd), None)) ::: - inserted.map(cmd => (new_cmds.prev(cmd), Some(cmd))) - } - - private def text_edit( - resources: Resources, - syntax: Prover.Syntax, - get_blob: Document.Node.Name => Option[Document.Blob], - reparse_limit: Int, - node: Document.Node, edit: Document.Edit_Text): Document.Node = - { - edit match { - case (_, Document.Node.Clear()) => node.clear - - case (_, Document.Node.Blob(blob)) => node.init_blob(blob) - - case (name, Document.Node.Edits(text_edits)) => - if (name.is_theory) { - val commands0 = node.commands - val commands1 = edit_text(text_edits, commands0) - val commands2 = - recover_spans(resources, syntax, get_blob, name, node.perspective.visible, commands1) - node.update_commands(commands2) - } - else node - - case (_, Document.Node.Deps(_)) => node - - case (name, Document.Node.Perspective(required, text_perspective, overlays)) => - val (visible, visible_overlay) = command_perspective(node, text_perspective, overlays) - val perspective: Document.Node.Perspective_Command = - Document.Node.Perspective(required, visible_overlay, overlays) - if (node.same_perspective(perspective)) node - else - node.update_perspective(perspective).update_commands( - consolidate_spans(resources, syntax, get_blob, reparse_limit, - name, visible, node.commands)) - } - } - - def parse_change( - resources: Resources, - reparse_limit: Int, - previous: Document.Version, - doc_blobs: Document.Blobs, - edits: List[Document.Edit_Text]): Session.Change = - { - def get_blob(name: Document.Node.Name) = - doc_blobs.get(name) orElse previous.nodes(name).get_blob - - val (syntax, syntax_changed, deps_changed, reparse0, nodes0, doc_edits0) = - header_edits(resources, previous, edits) - - val (doc_edits, version) = - if (edits.isEmpty) (Nil, Document.Version.make(Some(syntax), previous.nodes)) - else { - val reparse = - (reparse0 /: nodes0.iterator)({ - case (reparse, (name, node)) => - if (node.load_commands.exists(_.blobs_changed(doc_blobs))) - name :: reparse - else reparse - }) - val reparse_set = reparse.toSet - - var nodes = nodes0 - val doc_edits = new mutable.ListBuffer[Document.Edit_Command]; doc_edits ++= doc_edits0 - - val node_edits = - (edits ::: reparse.map((_, Document.Node.Edits(Nil)))).groupBy(_._1) - .asInstanceOf[Map[Document.Node.Name, List[Document.Edit_Text]]] // FIXME ??? - - node_edits foreach { - case (name, edits) => - val node = nodes(name) - val commands = node.commands - - val node1 = - if (reparse_set(name) && !commands.isEmpty) - node.update_commands( - reparse_spans(resources, syntax, get_blob, - name, commands, commands.head, commands.last)) - else node - val node2 = - (node1 /: edits)(text_edit(resources, syntax, get_blob, reparse_limit, _, _)) - - if (!(node.same_perspective(node2.perspective))) - doc_edits += (name -> node2.perspective) - - doc_edits += (name -> Document.Node.Edits(diff_commands(commands, node2.commands))) - - nodes += (name -> node2) - } - (doc_edits.toList.filterNot(_._2.is_void), Document.Version.make(Some(syntax), nodes)) - } - - Session.Change(previous, syntax_changed, deps_changed, doc_edits, version) - } -} diff --git a/core/Pure/Tools/build.ML b/core/Pure/Tools/build.ML deleted file mode 100644 index 32c0c993..00000000 --- a/core/Pure/Tools/build.ML +++ /dev/null @@ -1,174 +0,0 @@ -(* Title: Pure/Tools/build.ML - Author: Makarius - -Build Isabelle sessions. -*) - -signature BUILD = -sig - val build: string -> unit -end; - -structure Build: BUILD = -struct - -(* command timings *) - -type timings = ((string * Time.time) Inttab.table) Symtab.table; (*file -> offset -> name, time*) - -val empty_timings: timings = Symtab.empty; - -fun update_timings props = - (case Markup.parse_command_timing_properties props of - SOME ({file, offset, name}, time) => - Symtab.map_default (file, Inttab.empty) - (Inttab.map_default (offset, (name, time)) (fn (_, t) => (name, Time.+ (t, time)))) - | NONE => I); - -fun approximative_id name pos = - (case (Position.file_of pos, Position.offset_of pos) of - (SOME file, SOME offset) => - if name = "" then NONE else SOME {file = file, offset = offset, name = name} - | _ => NONE); - -fun lookup_timings timings tr = - (case approximative_id (Toplevel.name_of tr) (Toplevel.pos_of tr) of - SOME {file, offset, name} => - (case Symtab.lookup timings file of - SOME offsets => - (case Inttab.lookup offsets offset of - SOME (name', time) => if name = name' then SOME time else NONE - | NONE => NONE) - | NONE => NONE) - | NONE => NONE); - - -(* session timing *) - -fun session_timing name verbose f x = - let - val start = Timing.start (); - val y = f x; - val timing = Timing.result start; - - val threads = string_of_int (Multithreading.max_threads_value ()); - val factor = Time.toReal (#cpu timing) / Time.toReal (#elapsed timing) - |> Real.fmt (StringCvt.FIX (SOME 2)); - - val timing_props = - [("threads", threads)] @ Markup.timing_properties timing @ [("factor", factor)]; - val _ = writeln ("\fTiming = " ^ YXML.string_of_body (XML.Encode.properties timing_props)); - val _ = - if verbose then - Output.physical_stderr ("Timing " ^ name ^ " (" ^ - threads ^ " threads, " ^ Timing.message timing ^ ", factor " ^ factor ^ ")\n") - else (); - in y end; - - -(* protocol messages *) - -fun inline_message a args = - writeln ("\f" ^ a ^ " = " ^ YXML.string_of_body (XML.Encode.properties args)); - -fun protocol_message props output = - (case props of - function :: args => - if function = Markup.ML_statistics orelse function = Markup.task_statistics then - inline_message (#2 function) args - else if function = Markup.command_timing then - let - val name = the_default "" (Properties.get args Markup.nameN); - val pos = Position.of_properties args; - val {elapsed, ...} = Markup.parse_timing_properties args; - in - (case approximative_id name pos of - SOME id => inline_message (#2 function) (Markup.command_timing_properties id elapsed) - | NONE => ()) - end - else - (case Markup.dest_loading_theory props of - SOME name => writeln ("\floading_theory = " ^ name) - | NONE => raise Output.Protocol_Message props) - | [] => raise Output.Protocol_Message props); - - -(* build *) - -local - -fun use_theories last_timing options = - Thy_Info.use_theories { - document = Present.document_enabled (Options.string options "document"), - last_timing = last_timing, - master_dir = Path.current} - |> Unsynchronized.setmp print_mode - (space_explode "," (Options.string options "print_mode") @ print_mode_value ()) - |> Unsynchronized.setmp Goal.parallel_proofs (Options.int options "parallel_proofs") - |> Unsynchronized.setmp Multithreading.trace (Options.int options "threads_trace") - |> Multithreading.max_threads_setmp (Options.int options "threads") - |> Unsynchronized.setmp Future.ML_statistics true - |> Unsynchronized.setmp Pretty.margin_default (Options.int options "pretty_margin") - |> Unsynchronized.setmp Toplevel.timing (Options.bool options "timing"); - -fun use_theories_condition last_timing (options, thys) = - let val condition = space_explode "," (Options.string options "condition") in - (case filter_out (can getenv_strict) condition of - [] => - (Options.set_default options; - use_theories last_timing options (map (rpair Position.none) thys)) - | conds => - Output.physical_stderr ("Skipping theories " ^ commas_quote thys ^ - " (undefined " ^ commas conds ^ ")\n")) - end; - -in - -fun build args_file = Command_Line.tool0 (fn () => - let - val _ = SHA1_Samples.test (); - - val (command_timings, (do_output, (options, (verbose, (browser_info, - (document_files, (parent_name, (chapter, (name, theories))))))))) = - File.read (Path.explode args_file) |> YXML.parse_body |> - let open XML.Decode in - pair (list properties) (pair bool (pair Options.decode (pair bool (pair string - (pair (list (pair string string)) (pair string (pair string (pair string - ((list (pair Options.decode (list string)))))))))))) - end; - - val _ = Options.set_default options; - - val _ = writeln ("\fSession.name = " ^ name); - val _ = - Session.init do_output - (Options.bool options "browser_info") - (Path.explode browser_info) - (Options.string options "document") - (Options.bool options "document_graph") - (Options.string options "document_output") - (Present.document_variants (Options.string options "document_variants")) - (map (pairself Path.explode) document_files) - parent_name (chapter, name) - verbose; - - val last_timing = lookup_timings (fold update_timings command_timings empty_timings); - - val res1 = - theories |> - (List.app (use_theories_condition last_timing) - |> session_timing name verbose - |> Unsynchronized.setmp Outer_Syntax.batch_mode true - |> Unsynchronized.setmp Output.protocol_message_fn protocol_message - |> Multithreading.max_threads_setmp (Options.int options "threads") - |> Exn.capture); - val res2 = Exn.capture Session.finish (); - val _ = Par_Exn.release_all [res1, res2]; - - val _ = Options.reset_default (); - val _ = if do_output then () else exit 0; - in () end); - -end; - -end; diff --git a/core/Pure/Tools/build.scala b/core/Pure/Tools/build.scala deleted file mode 100644 index d72ab239..00000000 --- a/core/Pure/Tools/build.scala +++ /dev/null @@ -1,1030 +0,0 @@ -/* Title: Pure/Tools/build.scala - Author: Makarius - Options: :folding=explicit:collapseFolds=1: - -Build and manage Isabelle sessions. -*/ - -package isabelle - - -import java.io.{BufferedInputStream, FileInputStream, - BufferedReader, InputStreamReader, IOException} -import java.util.zip.GZIPInputStream - -import scala.collection.SortedSet -import scala.collection.mutable -import scala.annotation.tailrec - - -object Build -{ - /** progress context **/ - - class Progress - { - def echo(msg: String) {} - def theory(session: String, theory: String) {} - def stopped: Boolean = false - override def toString: String = if (stopped) "Progress(stopped)" else "Progress" - } - - object Ignore_Progress extends Progress - - class Console_Progress(verbose: Boolean = false) extends Progress - { - override def echo(msg: String) { Console.println(msg) } - override def theory(session: String, theory: String): Unit = - if (verbose) echo(session + ": theory " + theory) - - @volatile private var is_stopped = false - def interrupt_handler[A](e: => A): A = POSIX_Interrupt.handler { is_stopped = true } { e } - override def stopped: Boolean = - { - if (Thread.interrupted) is_stopped = true - is_stopped - } - } - - - - /** session information **/ - - // external version - abstract class Entry - sealed case class Chapter(name: String) extends Entry - sealed case class Session_Entry( - pos: Position.T, - name: String, - groups: List[String], - path: String, - parent: Option[String], - description: String, - options: List[Options.Spec], - theories: List[(Boolean, List[Options.Spec], List[String])], - files: List[String], - document_files: List[(String, String)]) extends Entry - - // internal version - sealed case class Session_Info( - chapter: String, - select: Boolean, - pos: Position.T, - groups: List[String], - dir: Path, - parent: Option[String], - description: String, - options: Options, - theories: List[(Boolean, Options, List[Path])], - files: List[Path], - document_files: List[(Path, Path)], - entry_digest: SHA1.Digest) - - def is_pure(name: String): Boolean = name == "RAW" || name == "Pure" - - def session_info(options: Options, select: Boolean, dir: Path, - chapter: String, entry: Session_Entry): (String, Session_Info) = - try { - val name = entry.name - - if (name == "") error("Bad session name") - if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session") - if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session") - - val session_options = options ++ entry.options - - val theories = - entry.theories.map({ case (global, opts, thys) => - (global, session_options ++ opts, thys.map(Path.explode(_))) }) - val files = entry.files.map(Path.explode(_)) - val document_files = - entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) - - val entry_digest = - SHA1.digest((chapter, name, entry.parent, entry.options, - entry.theories, entry.files, entry.document_files).toString) - - val info = - Session_Info(chapter, select, entry.pos, entry.groups, dir + Path.explode(entry.path), - entry.parent, entry.description, session_options, theories, files, - document_files, entry_digest) - - (name, info) - } - catch { - case ERROR(msg) => - error(msg + "\nThe error(s) above occurred in session entry " + - quote(entry.name) + Position.here(entry.pos)) - } - - - /* session tree */ - - object Session_Tree - { - def apply(infos: Seq[(String, Session_Info)]): Session_Tree = - { - val graph1 = - (Graph.string[Session_Info] /: infos) { - case (graph, (name, info)) => - if (graph.defined(name)) - error("Duplicate session " + quote(name) + Position.here(info.pos) + - Position.here(graph.get_node(name).pos)) - else graph.new_node(name, info) - } - val graph2 = - (graph1 /: graph1.iterator) { - case (graph, (name, (info, _))) => - info.parent match { - case None => graph - case Some(parent) => - if (!graph.defined(parent)) - error("Bad parent session " + quote(parent) + " for " + - quote(name) + Position.here(info.pos)) - - try { graph.add_edge_acyclic(parent, name) } - catch { - case exn: Graph.Cycles[_] => - error(cat_lines(exn.cycles.map(cycle => - "Cyclic session dependency of " + - cycle.map(c => quote(c.toString)).mkString(" via "))) + - Position.here(info.pos)) - } - } - } - new Session_Tree(graph2) - } - } - - final class Session_Tree private(val graph: Graph[String, Session_Info]) - extends PartialFunction[String, Session_Info] - { - def apply(name: String): Session_Info = graph.get_node(name) - def isDefinedAt(name: String): Boolean = graph.defined(name) - - def selection(requirements: Boolean, all_sessions: Boolean, - session_groups: List[String], sessions: List[String]): (List[String], Session_Tree) = - { - val bad_sessions = sessions.filterNot(isDefinedAt(_)) - if (!bad_sessions.isEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) - - val pre_selected = - { - if (all_sessions) graph.keys - else { - val select_group = session_groups.toSet - val select = sessions.toSet - (for { - (name, (info, _)) <- graph.iterator - if info.select || select(name) || apply(name).groups.exists(select_group) - } yield name).toList - } - } - val selected = - if (requirements) (graph.all_preds(pre_selected).toSet -- pre_selected).toList - else pre_selected - - val graph1 = graph.restrict(graph.all_preds(selected).toSet) - (selected, new Session_Tree(graph1)) - } - - def topological_order: List[(String, Session_Info)] = - graph.topological_order.map(name => (name, apply(name))) - - override def toString: String = graph.keys_iterator.mkString("Session_Tree(", ", ", ")") - } - - - /* parser */ - - val chapter_default = "Unsorted" - - private val CHAPTER = "chapter" - private val SESSION = "session" - private val IN = "in" - private val DESCRIPTION = "description" - private val OPTIONS = "options" - private val GLOBAL_THEORIES = "global_theories" - private val THEORIES = "theories" - private val FILES = "files" - private val DOCUMENT_FILES = "document_files" - - lazy val root_syntax = - Outer_Syntax.init() + "(" + ")" + "+" + "," + "=" + "[" + "]" + - (CHAPTER, Keyword.THY_DECL) + (SESSION, Keyword.THY_DECL) + - IN + DESCRIPTION + OPTIONS + GLOBAL_THEORIES + THEORIES + FILES + DOCUMENT_FILES - - object Parser extends Parse.Parser - { - private val chapter: Parser[Chapter] = - { - val chapter_name = atom("chapter name", _.is_name) - - command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } - } - - private val session_entry: Parser[Session_Entry] = - { - val session_name = atom("session name", _.is_name) - - val option = - name ~ opt(keyword("=") ~! name ^^ { case _ ~ x => x }) ^^ { case x ~ y => (x, y) } - val options = keyword("[") ~> rep1sep(option, keyword(",")) <~ keyword("]") - - val theories = - (keyword(GLOBAL_THEORIES) | keyword(THEORIES)) ~! - ((options | success(Nil)) ~ rep(theory_xname)) ^^ - { case x ~ (y ~ z) => (x == GLOBAL_THEORIES, y, z) } - - val document_files = - keyword(DOCUMENT_FILES) ~! - ((keyword("(") ~! (keyword(IN) ~! (path ~ keyword(")"))) ^^ - { case _ ~ (_ ~ (x ~ _)) => x } | success("document")) ~ - rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } - - command(SESSION) ~! - (session_name ~ - ((keyword("(") ~! (rep1(name) <~ keyword(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ - ((keyword(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ - (keyword("=") ~! - (opt(session_name ~! keyword("+") ^^ { case x ~ _ => x }) ~ - ((keyword(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ - ((keyword(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ - rep1(theories) ~ - ((keyword(FILES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ - (rep(document_files) ^^ (x => x.flatten))))) ^^ - { case pos ~ (a ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i))) => - Session_Entry(pos, a, b, c, d, e, f, g, h, i) } - } - - def parse_entries(root: Path): List[(String, Session_Entry)] = - { - val toks = root_syntax.scan(File.read(root)) - - parse_all(rep(chapter | session_entry), Token.reader(toks, root.implode)) match { - case Success(result, _) => - var chapter = chapter_default - val entries = new mutable.ListBuffer[(String, Session_Entry)] - result.foreach { - case Chapter(name) => chapter = name - case session_entry: Session_Entry => entries += ((chapter, session_entry)) - } - entries.toList - case bad => error(bad.toString) - } - } - } - - - /* find sessions within certain directories */ - - private val ROOT = Path.explode("ROOT") - private val ROOTS = Path.explode("ROOTS") - - private def is_session_dir(dir: Path): Boolean = - (dir + ROOT).is_file || (dir + ROOTS).is_file - - private def check_session_dir(dir: Path): Path = - if (is_session_dir(dir)) dir - else error("Bad session root directory: " + dir.toString) - - def find_sessions(options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil) - : Session_Tree = - { - def find_dir(select: Boolean, dir: Path): List[(String, Session_Info)] = - find_root(select, dir) ::: find_roots(select, dir) - - def find_root(select: Boolean, dir: Path): List[(String, Session_Info)] = - { - val root = dir + ROOT - if (root.is_file) - Parser.parse_entries(root).map(p => session_info(options, select, dir, p._1, p._2)) - else Nil - } - - def find_roots(select: Boolean, dir: Path): List[(String, Session_Info)] = - { - val roots = dir + ROOTS - if (roots.is_file) { - for { - line <- split_lines(File.read(roots)) - if !(line == "" || line.startsWith("#")) - dir1 = - try { check_session_dir(dir + Path.explode(line)) } - catch { - case ERROR(msg) => - error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) - } - info <- find_dir(select, dir1) - } yield info - } - else Nil - } - - val default_dirs = Isabelle_System.components().filter(is_session_dir(_)) - dirs.foreach(check_session_dir(_)) - select_dirs.foreach(check_session_dir(_)) - - Session_Tree( - for { - (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) - info <- find_dir(select, dir) - } yield info) - } - - - - /** build **/ - - /* queue */ - - object Queue - { - def apply(tree: Session_Tree, load_timings: String => (List[Properties.T], Double)): Queue = - { - val graph = tree.graph - val sessions = graph.keys - - val timings = - sessions.par.map((name: String) => - Exn.capture { (name, load_timings(name)) }).toList.map(Exn.release(_)) - val command_timings = - Map(timings.map({ case (name, (ts, _)) => (name, ts) }): _*).withDefaultValue(Nil) - val session_timing = - Map(timings.map({ case (name, (_, t)) => (name, t) }): _*).withDefaultValue(0.0) - - def outdegree(name: String): Int = graph.imm_succs(name).size - def timeout(name: String): Double = tree(name).options.real("timeout") - - object Ordering extends scala.math.Ordering[String] - { - def compare_timing(name1: String, name2: String): Int = - { - val t1 = session_timing(name1) - val t2 = session_timing(name2) - if (t1 == 0.0 || t2 == 0.0) 0 - else t1 compare t2 - } - - def compare(name1: String, name2: String): Int = - outdegree(name2) compare outdegree(name1) match { - case 0 => - compare_timing(name2, name1) match { - case 0 => - timeout(name2) compare timeout(name1) match { - case 0 => name1 compare name2 - case ord => ord - } - case ord => ord - } - case ord => ord - } - } - - new Queue(graph, SortedSet(sessions: _*)(Ordering), command_timings) - } - } - - final class Queue private( - graph: Graph[String, Session_Info], - order: SortedSet[String], - val command_timings: String => List[Properties.T]) - { - def is_inner(name: String): Boolean = !graph.is_maximal(name) - - def is_empty: Boolean = graph.is_empty - - def - (name: String): Queue = - new Queue(graph.del_node(name), - order - name, // FIXME scala-2.10.0 TreeSet problem!? - command_timings) - - def dequeue(skip: String => Boolean): Option[(String, Session_Info)] = - { - val it = order.iterator.dropWhile(name => - skip(name) - || !graph.defined(name) // FIXME scala-2.10.0 TreeSet problem!? - || !graph.is_minimal(name)) - if (it.hasNext) { val name = it.next; Some((name, graph.get_node(name))) } - else None - } - } - - - /* source dependencies and static content */ - - sealed case class Session_Content( - loaded_theories: Set[String], - known_theories: Map[String, Document.Node.Name], - keywords: Thy_Header.Keywords, - syntax: Outer_Syntax, - sources: List[(Path, SHA1.Digest)]) - - sealed case class Deps(deps: Map[String, Session_Content]) - { - def is_empty: Boolean = deps.isEmpty - def apply(name: String): Session_Content = deps(name) - def sources(name: String): List[SHA1.Digest] = deps(name).sources.map(_._2) - } - - def dependencies(progress: Progress, inlined_files: Boolean, - verbose: Boolean, list_files: Boolean, tree: Session_Tree): Deps = - Deps((Map.empty[String, Session_Content] /: tree.topological_order)( - { case (deps, (name, info)) => - if (progress.stopped) throw Exn.Interrupt() - - try { - val (loaded_theories0, known_theories0, syntax0) = - info.parent.map(deps(_)) match { - case None => - (Set.empty[String], Map.empty[String, Document.Node.Name], Outer_Syntax.init()) - case Some(parent) => - (parent.loaded_theories, parent.known_theories, parent.syntax) - } - val resources = new Resources(loaded_theories0, known_theories0, syntax0) - val thy_info = new Thy_Info(resources) - - if (verbose || list_files) { - val groups = - if (info.groups.isEmpty) "" - else info.groups.mkString(" (", " ", ")") - progress.echo("Session " + info.chapter + "/" + name + groups) - } - - val thy_deps = - { - val root_theories = - info.theories.flatMap({ - case (global, _, thys) => - thys.map(thy => - (resources.node_name( - if (global) "" else name, info.dir + Resources.thy_path(thy)), info.pos)) - }) - val thy_deps = thy_info.dependencies(name, root_theories) - - thy_deps.errors match { - case Nil => thy_deps - case errs => error(cat_lines(errs)) - } - } - - val known_theories = - (known_theories0 /: thy_deps.deps)({ case (known, dep) => - val name = dep.name - known.get(name.theory) match { - case Some(name1) if name != name1 => - error("Duplicate theory " + quote(name.node) + " vs. " + quote(name1.node)) - case _ => - known + (name.theory -> name) + (Long_Name.base_name(name.theory) -> name) - } - }) - - val loaded_theories = thy_deps.loaded_theories - val keywords = thy_deps.keywords - val syntax = thy_deps.syntax.asInstanceOf[Outer_Syntax] - - val loaded_files = if (inlined_files) thy_deps.loaded_files else Nil - - val all_files = - (thy_deps.deps.map(dep => Path.explode(dep.name.node)) ::: loaded_files ::: - info.files.map(file => info.dir + file) ::: - info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) - - if (list_files) - progress.echo(cat_lines(all_files.map(_.implode).sorted.map(" " + _))) - - val sources = all_files.map(p => (p, SHA1.digest(p.file))) - - val content = - Session_Content(loaded_theories, known_theories, keywords, syntax, sources) - deps + (name -> content) - } - catch { - case ERROR(msg) => - cat_error(msg, "The error(s) above occurred in session " + - quote(name) + Position.here(info.pos)) - } - })) - - def session_dependencies( - options: Options, - inlined_files: Boolean, - dirs: List[Path], - sessions: List[String]): Deps = - { - val (_, tree) = find_sessions(options, dirs = dirs).selection(false, false, Nil, sessions) - dependencies(Ignore_Progress, inlined_files, false, false, tree) - } - - def session_content( - options: Options, - inlined_files: Boolean, - dirs: List[Path], - session: String): Session_Content = - { - session_dependencies(options, inlined_files, dirs, List(session))(session) - } - - def outer_syntax(options: Options, session: String): Outer_Syntax = - session_content(options, false, Nil, session).syntax - - - /* jobs */ - - private class Job(progress: Progress, - name: String, val info: Session_Info, output: Path, do_output: Boolean, - verbose: Boolean, browser_info: Path, command_timings: List[Properties.T]) - { - def output_path: Option[Path] = if (do_output) Some(output) else None - - private val parent = info.parent.getOrElse("") - - private val args_file = Isabelle_System.tmp_file("args") - File.write(args_file, YXML.string_of_body( - if (is_pure(name)) Options.encode(info.options) - else - { - val theories = info.theories.map(x => (x._2, x._3)) - import XML.Encode._ - pair(list(properties), pair(bool, pair(Options.encode, pair(bool, pair(Path.encode, - pair(list(pair(Path.encode, Path.encode)), pair(string, pair(string, pair(string, - list(pair(Options.encode, list(Path.encode))))))))))))( - (command_timings, (do_output, (info.options, (verbose, (browser_info, - (info.document_files, (parent, (info.chapter, (name, theories)))))))))) - })) - - private val env = - Map("INPUT" -> parent, "TARGET" -> name, "OUTPUT" -> Isabelle_System.standard_path(output), - (if (is_pure(name)) "ISABELLE_PROCESS_OPTIONS" else "ARGS_FILE") -> - Isabelle_System.posix_path(args_file)) - - private val script = - if (is_pure(name)) { - if (do_output) "./build " + name + " \"$OUTPUT\"" - else """ rm -f "$OUTPUT"; ./build """ + name - } - else { - """ - . "$ISABELLE_HOME/lib/scripts/timestart.bash" - """ + - (if (do_output) - """ - "$ISABELLE_PROCESS" -e "Build.build \"$ARGS_FILE\";" -q -w "$INPUT" "$OUTPUT" - """ - else - """ - rm -f "$OUTPUT"; "$ISABELLE_PROCESS" -e "Build.build \"$ARGS_FILE\";" -r -q "$INPUT" - """) + - """ - RC="$?" - - . "$ISABELLE_HOME/lib/scripts/timestop.bash" - - if [ "$RC" -eq 0 ]; then - echo "Finished $TARGET ($TIMES_REPORT)" >&2 - fi - - exit "$RC" - """ - } - - private val (thread, result) = - Simple_Thread.future("build") { - Isabelle_System.bash_env(info.dir.file, env, script, - progress_stdout = (line: String) => - Library.try_unprefix("\floading_theory = ", line) match { - case Some(theory) => progress.theory(name, theory) - case None => - }, - progress_limit = - info.options.int("process_output_limit") match { - case 0 => None - case m => Some(m * 1000000L) - }, - strict = false) - } - - def terminate: Unit = thread.interrupt - def is_finished: Boolean = result.is_finished - - @volatile private var was_timeout = false - private val timeout_request: Option[Event_Timer.Request] = - { - val timeout = info.options.seconds("timeout") - if (timeout > Time.zero) - Some(Event_Timer.request(Time.now() + timeout) { terminate; was_timeout = true }) - else None - } - - def join: Isabelle_System.Bash_Result = - { - val res = result.join - - args_file.delete - timeout_request.foreach(_.cancel) - - if (res.rc == Exn.Interrupt.return_code) { - if (was_timeout) res.add_err(Output.error_text("Timeout")).set_rc(1) - else res.add_err(Output.error_text("Interrupt")) - } - else res - } - } - - - /* inlined properties (YXML) */ - - object Props - { - def parse(text: String): Properties.T = XML.Decode.properties(YXML.parse_body(text)) - - def parse_lines(prefix: String, lines: List[String]): List[Properties.T] = - for (line <- lines; s <- Library.try_unprefix(prefix, line)) yield parse(s) - - def find_parse_line(prefix: String, lines: List[String]): Option[Properties.T] = - lines.find(_.startsWith(prefix)).map(line => parse(line.substring(prefix.length))) - } - - - /* log files */ - - private val LOG = Path.explode("log") - private def log(name: String): Path = LOG + Path.basic(name) - private def log_gz(name: String): Path = log(name).ext("gz") - - private val SESSION_NAME = "\fSession.name = " - - - sealed case class Log_Info( - name: String, - stats: List[Properties.T], - tasks: List[Properties.T], - command_timings: List[Properties.T], - session_timing: Properties.T) - - def parse_log(full_stats: Boolean, text: String): Log_Info = - { - val lines = split_lines(text) - val xml_cache = new XML.Cache() - def parse_lines(prfx: String): List[Properties.T] = - Props.parse_lines(prfx, lines).map(xml_cache.props(_)) - - val name = - lines.find(_.startsWith(SESSION_NAME)).map(_.substring(SESSION_NAME.length)) getOrElse "" - val stats = if (full_stats) parse_lines("\fML_statistics = ") else Nil - val tasks = if (full_stats) parse_lines("\ftask_statistics = ") else Nil - val command_timings = parse_lines("\fcommand_timing = ") - val session_timing = Props.find_parse_line("\fTiming = ", lines) getOrElse Nil - Log_Info(name, stats, tasks, command_timings, session_timing) - } - - - /* sources and heaps */ - - private def sources_stamp(digests: List[SHA1.Digest]): String = - digests.map(_.toString).sorted.mkString("sources: ", " ", "") - - private val no_heap: String = "heap: -" - - private def heap_stamp(heap: Option[Path]): String = - { - "heap: " + - (heap match { - case Some(path) => - val file = path.file - if (file.isFile) file.length.toString + " " + file.lastModified.toString - else "-" - case None => "-" - }) - } - - private def read_stamps(path: Path): Option[(String, String, String)] = - if (path.is_file) { - val stream = new GZIPInputStream (new BufferedInputStream(new FileInputStream(path.file))) - val reader = new BufferedReader(new InputStreamReader(stream, UTF8.charset)) - val (s, h1, h2) = - try { (reader.readLine, reader.readLine, reader.readLine) } - finally { reader.close } - if (s != null && s.startsWith("sources: ") && - h1 != null && h1.startsWith("heap: ") && - h2 != null && h2.startsWith("heap: ")) Some((s, h1, h2)) - else None - } - else None - - - /* build_results */ - - def build_results( - options: Options, - progress: Progress = Ignore_Progress, - requirements: Boolean = false, - all_sessions: Boolean = false, - build_heap: Boolean = false, - clean_build: Boolean = false, - dirs: List[Path] = Nil, - select_dirs: List[Path] = Nil, - session_groups: List[String] = Nil, - max_jobs: Int = 1, - list_files: Boolean = false, - no_build: Boolean = false, - system_mode: Boolean = false, - verbose: Boolean = false, - sessions: List[String] = Nil): Map[String, Int] = - { - /* session tree and dependencies */ - - val full_tree = find_sessions(options, dirs, select_dirs) - val (selected, selected_tree) = - full_tree.selection(requirements, all_sessions, session_groups, sessions) - - val deps = dependencies(progress, true, verbose, list_files, selected_tree) - - def make_stamp(name: String): String = - sources_stamp(selected_tree(name).entry_digest :: deps.sources(name)) - - - /* persistent information */ - - val (input_dirs, output_dir, browser_info) = - if (system_mode) { - val output_dir = Path.explode("~~/heaps/$ML_IDENTIFIER") - (List(output_dir), output_dir, Path.explode("~~/browser_info")) - } - else { - val output_dir = Path.explode("$ISABELLE_OUTPUT") - (output_dir :: Isabelle_System.find_logics_dirs(), output_dir, - Path.explode("$ISABELLE_BROWSER_INFO")) - } - - def find_log(name: String): Option[(Path, Path)] = - input_dirs.find(dir => (dir + log(name)).is_file).map(dir => (dir, dir + log(name))) - - - /* queue with scheduling information */ - - def load_timings(name: String): (List[Properties.T], Double) = - { - val (path, text) = - find_log(name + ".gz") match { - case Some((_, path)) => (path, File.read_gzip(path)) - case None => - find_log(name) match { - case Some((_, path)) => (path, File.read(path)) - case None => (Path.current, "") - } - } - - def ignore_error(msg: String): (List[Properties.T], Double) = - { - Output.warning("Ignoring bad log file: " + path + (if (msg == "") "" else "\n" + msg)) - (Nil, 0.0) - } - - try { - val info = parse_log(false, text) - val session_timing = Markup.Elapsed.unapply(info.session_timing) getOrElse 0.0 - (info.command_timings, session_timing) - } - catch { - case ERROR(msg) => ignore_error(msg) - case exn: java.lang.Error => ignore_error(Exn.message(exn)) - case _: XML.Error => ignore_error("") - } - } - - val queue = Queue(selected_tree, load_timings) - - - /* main build process */ - - // prepare log dir - Isabelle_System.mkdirs(output_dir + LOG) - - // optional cleanup - if (clean_build) { - for (name <- full_tree.graph.all_succs(selected)) { - val files = - List(Path.basic(name), log(name), log_gz(name)).map(output_dir + _).filter(_.is_file) - if (!files.isEmpty) progress.echo("Cleaning " + name + " ...") - if (!files.forall(p => p.file.delete)) progress.echo(name + " FAILED to delete") - } - } - - // scheduler loop - case class Result(current: Boolean, heap: String, rc: Int) - - def sleep() - { - try { Thread.sleep(500) } - catch { case Exn.Interrupt() => Exn.Interrupt.impose() } - } - - @tailrec def loop( - pending: Queue, - running: Map[String, (String, Job)], - results: Map[String, Result]): Map[String, Result] = - { - if (pending.is_empty) results - else { - if (progress.stopped) - for ((_, (_, job)) <- running) job.terminate - - running.find({ case (_, (_, job)) => job.is_finished }) match { - case Some((name, (parent_heap, job))) => - //{{{ finish job - - val res = job.join - progress.echo(res.err) - - val heap = - if (res.rc == 0) { - (output_dir + log(name)).file.delete - - val sources = make_stamp(name) - val heap = heap_stamp(job.output_path) - File.write_gzip(output_dir + log_gz(name), - Library.terminate_lines(sources :: parent_heap :: heap :: res.out_lines)) - - heap - } - else { - (output_dir + Path.basic(name)).file.delete - (output_dir + log_gz(name)).file.delete - - File.write(output_dir + log(name), Library.terminate_lines(res.out_lines)) - progress.echo(name + " FAILED") - if (res.rc != Exn.Interrupt.return_code) { - progress.echo("(see also " + (output_dir + log(name)).file.toString + ")") - val lines = res.out_lines.filterNot(_.startsWith("\f")) - val tail = lines.drop(lines.length - 20 max 0) - progress.echo("\n" + cat_lines(tail)) - } - - no_heap - } - loop(pending - name, running - name, - results + (name -> Result(false, heap, res.rc))) - //}}} - case None if (running.size < (max_jobs max 1)) => - //{{{ check/start next job - pending.dequeue(running.isDefinedAt(_)) match { - case Some((name, info)) => - val parent_result = - info.parent match { - case None => Result(true, no_heap, 0) - case Some(parent) => results(parent) - } - val output = output_dir + Path.basic(name) - val do_output = build_heap || queue.is_inner(name) - - val (current, heap) = - { - find_log(name + ".gz") match { - case Some((dir, path)) => - read_stamps(path) match { - case Some((s, h1, h2)) => - val heap = heap_stamp(Some(dir + Path.basic(name))) - (s == make_stamp(name) && h1 == parent_result.heap && h2 == heap && - !(do_output && heap == no_heap), heap) - case None => (false, no_heap) - } - case None => (false, no_heap) - } - } - val all_current = current && parent_result.current - - if (all_current) - loop(pending - name, running, results + (name -> Result(true, heap, 0))) - else if (no_build) { - if (verbose) progress.echo("Skipping " + name + " ...") - loop(pending - name, running, results + (name -> Result(false, heap, 1))) - } - else if (parent_result.rc == 0 && !progress.stopped) { - progress.echo((if (do_output) "Building " else "Running ") + name + " ...") - val job = - new Job(progress, name, info, output, do_output, verbose, browser_info, - queue.command_timings(name)) - loop(pending, running + (name -> (parent_result.heap, job)), results) - } - else { - progress.echo(name + " CANCELLED") - loop(pending - name, running, results + (name -> Result(false, heap, 1))) - } - case None => sleep(); loop(pending, running, results) - } - ///}}} - case None => sleep(); loop(pending, running, results) - } - } - } - - - /* build results */ - - val results = - if (deps.is_empty) { - progress.echo(Output.warning_text("Nothing to build")) - Map.empty[String, Result] - } - else loop(queue, Map.empty, Map.empty) - - - /* global browser info */ - - if (!no_build) { - val browser_chapters = - (for { - (name, result) <- results.iterator - if result.rc == 0 - info = full_tree(name) - if info.options.bool("browser_info") - } yield (info.chapter, (name, info.description))).toList.groupBy(_._1). - map({ case (chapter, es) => (chapter, es.map(_._2)) }).filterNot(_._2.isEmpty) - - for ((chapter, entries) <- browser_chapters) - Present.update_chapter_index(browser_info, chapter, entries) - - if (!browser_chapters.isEmpty && !(browser_info + Path.explode("index.html")).is_file) - { - Isabelle_System.mkdirs(browser_info) - File.copy(Path.explode("~~/lib/logo/isabelle.gif"), - browser_info + Path.explode("isabelle.gif")) - File.write(browser_info + Path.explode("index.html"), - File.read(Path.explode("~~/lib/html/library_index_header.template")) + - File.read(Path.explode("~~/lib/html/library_index_content.template")) + - File.read(Path.explode("~~/lib/html/library_index_footer.template"))) - } - } - - - /* results */ - - results.map({ case (name, result) => (name, result.rc) }) - } - - - /* build */ - - def build( - options: Options, - progress: Progress = Ignore_Progress, - requirements: Boolean = false, - all_sessions: Boolean = false, - build_heap: Boolean = false, - clean_build: Boolean = false, - dirs: List[Path] = Nil, - select_dirs: List[Path] = Nil, - session_groups: List[String] = Nil, - max_jobs: Int = 1, - list_files: Boolean = false, - no_build: Boolean = false, - system_mode: Boolean = false, - verbose: Boolean = false, - sessions: List[String] = Nil): Int = - { - val results = - build_results(options, progress, requirements, all_sessions, - build_heap, clean_build, dirs, select_dirs, session_groups, max_jobs, - list_files, no_build, system_mode, verbose, sessions) - - val rc = (0 /: results)({ case (rc1, (_, rc2)) => rc1 max rc2 }) - if (rc != 0 && (verbose || !no_build)) { - val unfinished = - (for ((name, r) <- results.iterator if r != 0) yield name).toList.sorted - progress.echo("Unfinished session(s): " + commas(unfinished)) - } - rc - } - - - /* command line entry point */ - - def main(args: Array[String]) - { - Command_Line.tool { - args.toList match { - case - Properties.Value.Boolean(requirements) :: - Properties.Value.Boolean(all_sessions) :: - Properties.Value.Boolean(build_heap) :: - Properties.Value.Boolean(clean_build) :: - Properties.Value.Int(max_jobs) :: - Properties.Value.Boolean(list_files) :: - Properties.Value.Boolean(no_build) :: - Properties.Value.Boolean(system_mode) :: - Properties.Value.Boolean(verbose) :: - Command_Line.Chunks(dirs, select_dirs, session_groups, build_options, sessions) => - val options = (Options.init() /: build_options)(_ + _) - val progress = new Console_Progress(verbose) - progress.interrupt_handler { - build(options, progress, requirements, all_sessions, build_heap, clean_build, - dirs.map(Path.explode(_)), select_dirs.map(Path.explode(_)), session_groups, - max_jobs, list_files, no_build, system_mode, verbose, sessions) - } - case _ => error("Bad arguments:\n" + cat_lines(args)) - } - } - } -} - diff --git a/core/Pure/Tools/build_console.scala b/core/Pure/Tools/build_console.scala deleted file mode 100644 index d8390e8a..00000000 --- a/core/Pure/Tools/build_console.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* Title: Pure/Tools/build_console.scala - Author: Makarius - -Check and build Isabelle session for console tool. -*/ - -package isabelle - - -object Build_Console -{ - /* build_console */ - - def build_console( - options: Options, - progress: Build.Progress = Build.Ignore_Progress, - dirs: List[Path] = Nil, - no_build: Boolean = false, - system_mode: Boolean = false, - session: String): Int = - { - if (no_build || - Build.build(options = options, build_heap = true, no_build = true, - dirs = dirs, sessions = List(session)) == 0) 0 - else { - progress.echo("Build started for Isabelle/" + session + " ...") - Build.build(options = options, progress = progress, build_heap = true, - dirs = dirs, system_mode = system_mode, sessions = List(session)) - } - } - - - /* command line entry point */ - - def main(args: Array[String]) - { - Command_Line.tool { - args.toList match { - case - session :: - Properties.Value.Boolean(no_build) :: - Properties.Value.Boolean(system_mode) :: - options_file :: - Command_Line.Chunks(dirs, system_options) => - val options = (Options.init() /: system_options)(_ + _) - File.write(Path.explode(options_file), YXML.string_of_body(options.encode)) - - val progress = new Build.Console_Progress() - progress.interrupt_handler { - build_console(options, progress, - dirs.map(Path.explode(_)), no_build, system_mode, session) - } - case _ => error("Bad arguments:\n" + cat_lines(args)) - } - } - } -} - diff --git a/core/Pure/Tools/build_doc.scala b/core/Pure/Tools/build_doc.scala deleted file mode 100644 index 07dd358d..00000000 --- a/core/Pure/Tools/build_doc.scala +++ /dev/null @@ -1,91 +0,0 @@ -/* Title: Pure/Tools/build_doc.scala - Author: Makarius - -Build Isabelle documentation. -*/ - -package isabelle - - -import java.io.{File => JFile} - - -object Build_Doc -{ - /* build_doc */ - - def build_doc( - options: Options, - progress: Build.Progress = Build.Ignore_Progress, - all_docs: Boolean = false, - max_jobs: Int = 1, - system_mode: Boolean = false, - docs: List[String] = Nil): Int = - { - val selection = - for { - (name, info) <- Build.find_sessions(options).topological_order - if info.groups.contains("doc") - doc = info.options.string("document_variants") - if all_docs || docs.contains(doc) - } yield (doc, name) - - val selected_docs = selection.map(_._1) - val sessions = selection.map(_._2) - - docs.filter(doc => !selected_docs.contains(doc)) match { - case Nil => - case bad => error("No documentation session for " + commas_quote(bad)) - } - - progress.echo("Build started for documentation " + commas_quote(selected_docs)) - - val rc1 = - Build.build(options, progress, requirements = true, build_heap = true, - max_jobs = max_jobs, system_mode = system_mode, sessions = sessions) - if (rc1 == 0) { - Isabelle_System.with_tmp_dir("document_output")(output => - { - val rc2 = - Build.build( - options.bool.update("browser_info", false). - string.update("document", "pdf"). - string.update("document_output", Isabelle_System.posix_path(output)), - progress, clean_build = true, max_jobs = max_jobs, system_mode = system_mode, - sessions = sessions) - if (rc2 == 0) { - val doc_dir = Path.explode("$ISABELLE_HOME/doc").file - for (doc <- selected_docs) { - val name = doc + ".pdf" - File.copy(new JFile(output, name), new JFile(doc_dir, name)) - } - } - rc2 - }) - } - else rc1 - } - - - /* command line entry point */ - - def main(args: Array[String]) - { - Command_Line.tool { - args.toList match { - case - Properties.Value.Boolean(all_docs) :: - Properties.Value.Int(max_jobs) :: - Properties.Value.Boolean(system_mode) :: - Command_Line.Chunks(docs) => - val options = Options.init() - val progress = new Build.Console_Progress() - progress.interrupt_handler { - build_doc(options, progress, all_docs, max_jobs, system_mode, docs) - } - case _ => error("Bad arguments:\n" + cat_lines(args)) - } - } - } -} - diff --git a/core/Pure/Tools/check_source.scala b/core/Pure/Tools/check_source.scala deleted file mode 100644 index 22f9d143..00000000 --- a/core/Pure/Tools/check_source.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* Title: Pure/Tools/check_source.scala - Author: Makarius - -Some sanity checks for Isabelle sources. -*/ - -package isabelle - - -object Check_Source -{ - def check_file(path: Path) - { - val file_name = path.implode - val file_pos = path.position - def line_pos(i: Int) = Position.Line_File(i + 1, file_name) - - val content = File.read(path) - - for { (line, i) <- split_lines(content).iterator.zipWithIndex } - { - try { - Symbol.decode_strict(line) - - for { c <- Word.codepoint_iterator(line); if c > 128 && !Character.isAlphabetic(c) } - { - Output.warning("Suspicious Unicode character " + quote(Word.codepoint(c)) + - Position.here(line_pos(i))) - } - } - catch { case ERROR(msg) => Output.error_message(msg + Position.here(line_pos(i))) } - - if (line.contains('\t')) - Output.warning("TAB character" + Position.here(line_pos(i))) - } - - if (content.contains('\r')) - Output.warning("CR character" + Position.here(file_pos)) - } - - def check_hg(root: Path) - { - Output.writeln("Checking " + root + " ...") - Isabelle_System.hg("--repository " + Isabelle_System.shell_path(root) + " root").check_error - for { - file <- Isabelle_System.hg("manifest", root).check_error.out_lines - if file.endsWith(".thy") || file.endsWith(".ML") - } check_file(root + Path.explode(file)) - } - - - /* command line entry point */ - - def main(args: Array[String]) - { - Command_Line.tool0 { - for (root <- args) check_hg(Path.explode(root)) - } - } -} - diff --git a/core/Pure/Tools/doc.scala b/core/Pure/Tools/doc.scala deleted file mode 100644 index b545eb54..00000000 --- a/core/Pure/Tools/doc.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* Title: Pure/Tools/doc.scala - Author: Makarius - -Access to Isabelle documentation. -*/ - -package isabelle - - -import scala.util.matching.Regex - - -object Doc -{ - /* dirs */ - - def dirs(): List[Path] = - Path.split(Isabelle_System.getenv("ISABELLE_DOCS")).map(dir => - if (dir.is_dir) dir - else error("Bad documentation directory: " + dir)) - - - /* contents */ - - private def contents_lines(): List[(Path, String)] = - for { - dir <- dirs() - catalog = dir + Path.basic("Contents") - if catalog.is_file - line <- split_lines(Library.trim_line(File.read(catalog))) - } yield (dir, line) - - sealed abstract class Entry - case class Section(text: String, important: Boolean) extends Entry - case class Doc(name: String, title: String, path: Path) extends Entry - case class Text_File(name: String, path: Path) extends Entry - - def text_file(name: Path): Option[Text_File] = - { - val path = Path.variable("ISABELLE_HOME") + name - if (path.is_file) Some(Text_File(name.implode, path)) - else None - } - - private val Section_Entry = new Regex("""^(\S.*)\s*$""") - private val Doc_Entry = new Regex("""^\s+(\S+)\s+(.+)\s*$""") - - private def release_notes(): List[Entry] = - Section("Release notes", true) :: - Path.split(Isabelle_System.getenv_strict("ISABELLE_DOCS_RELEASE_NOTES")).flatMap(text_file(_)) - - private def examples(): List[Entry] = - Section("Examples", true) :: - Path.split(Isabelle_System.getenv_strict("ISABELLE_DOCS_EXAMPLES")).map(file => - text_file(file) match { - case Some(entry) => entry - case None => error("Bad entry in ISABELLE_DOCS_EXAMPLES: " + file) - }) - - def contents(): List[Entry] = - (for { - (dir, line) <- contents_lines() - entry <- - line match { - case Section_Entry(text) => - Library.try_unsuffix("!", text) match { - case None => Some(Section(text, false)) - case Some(txt) => Some(Section(txt, true)) - } - case Doc_Entry(name, title) => Some(Doc(name, title, dir + Path.basic(name))) - case _ => None - } - } yield entry) ::: release_notes() ::: examples() - - - /* view */ - - def view(path: Path) - { - if (path.is_file) Console.println(Library.trim_line(File.read(path))) - else { - val pdf = path.ext("pdf") - if (pdf.is_file) Isabelle_System.pdf_viewer(pdf) - else error("Bad Isabelle documentation file: " + pdf) - } - } - - - /* command line entry point */ - - def main(args: Array[String]) - { - Command_Line.tool0 { - val entries = contents() - if (args.isEmpty) Console.println(cat_lines(contents_lines().map(_._2))) - else { - args.foreach(arg => - entries.collectFirst { case Doc(name, _, path) if arg == name => path } match { - case Some(path) => view(path) - case None => error("No Isabelle documentation entry: " + quote(arg)) - } - ) - } - } - } -} - diff --git a/core/Pure/Tools/find_consts.ML b/core/Pure/Tools/find_consts.ML deleted file mode 100644 index 4cbe9470..00000000 --- a/core/Pure/Tools/find_consts.ML +++ /dev/null @@ -1,174 +0,0 @@ -(* Title: Pure/Tools/find_consts.ML - Author: Timothy Bourke and Gerwin Klein, NICTA - -Hoogle-like (http://www-users.cs.york.ac.uk/~ndm/hoogle) searching by -type over constants, but matching is not fuzzy. -*) - -signature FIND_CONSTS = -sig - datatype criterion = - Strict of string - | Loose of string - | Name of string - val read_query: Position.T -> string -> (bool * criterion) list - val find_consts : Proof.context -> (bool * criterion) list -> unit -end; - -structure Find_Consts : FIND_CONSTS = -struct - -(* search criteria *) - -datatype criterion = - Strict of string - | Loose of string - | Name of string; - - -(* matching types/consts *) - -fun matches_subtype thy typat = - Term.exists_subtype (fn ty => Sign.typ_instance thy (ty, typat)); - -fun check_const pred (nm, (ty, _)) = - if pred (nm, ty) then SOME (Term.size_of_typ ty) else NONE; - -fun opt_not f (c as (_, (ty, _))) = - if is_some (f c) then NONE else SOME (Term.size_of_typ ty); - -fun filter_const _ _ NONE = NONE - | filter_const c f (SOME rank) = - (case f c of - NONE => NONE - | SOME i => SOME (Int.min (rank, i))); - - -(* pretty results *) - -fun pretty_criterion (b, c) = - let - fun prfx s = if b then s else "-" ^ s; - in - (case c of - Strict pat => Pretty.str (prfx "strict: " ^ quote pat) - | Loose pat => Pretty.str (prfx (quote pat)) - | Name name => Pretty.str (prfx "name: " ^ quote name)) - end; - -fun pretty_const ctxt (c, ty) = - let - val ty' = Logic.unvarifyT_global ty; - val const_space = Consts.space_of (Sign.consts_of (Proof_Context.theory_of ctxt)); - val markup = Name_Space.markup const_space c; - in - Pretty.block - [Pretty.mark markup (Pretty.str c), Pretty.str " ::", Pretty.brk 1, - Pretty.quote (Syntax.pretty_typ ctxt ty')] - end; - - -(* find_consts *) - -fun pretty_consts ctxt raw_criteria = - let - val thy = Proof_Context.theory_of ctxt; - val low_ranking = 10000; - - fun user_visible consts (nm, _) = - if Consts.is_concealed consts nm then NONE else SOME low_ranking; - - fun make_pattern crit = - let - val raw_T = Syntax.parse_typ ctxt crit; - val t = - Syntax.check_term - (Proof_Context.set_mode Proof_Context.mode_pattern ctxt) - (Term.dummy_pattern raw_T); - in Term.type_of t end; - - fun make_match (Strict arg) = - let val qty = make_pattern arg; in - fn (_, (ty, _)) => - let - val tye = Sign.typ_match thy (qty, ty) Vartab.empty; - val sub_size = - Vartab.fold (fn (_, (_, t)) => fn n => Term.size_of_typ t + n) tye 0; - in SOME sub_size end handle Type.TYPE_MATCH => NONE - end - | make_match (Loose arg) = - check_const (matches_subtype thy (make_pattern arg) o snd) - | make_match (Name arg) = check_const (match_string arg o fst); - - fun make_criterion (b, crit) = (if b then I else opt_not) (make_match crit); - val criteria = map make_criterion raw_criteria; - - val consts = Sign.consts_of thy; - val {constants, ...} = Consts.dest consts; - fun eval_entry c = - fold (filter_const c) (user_visible consts :: criteria) (SOME low_ranking); - - val matches = - fold (fn c => (case eval_entry c of NONE => I | SOME rank => cons (rank, c))) constants [] - |> sort (prod_ord (rev_order o int_ord) (string_ord o pairself fst)) - |> map (apsnd fst o snd); - - val position_markup = Position.markup (Position.thread_data ()) Markup.position; - in - Pretty.block - (Pretty.fbreaks - (Pretty.mark position_markup (Pretty.keyword1 "find_consts") :: - map pretty_criterion raw_criteria)) :: - Pretty.str "" :: - (if null matches then [Pretty.str "found nothing"] - else - Pretty.str ("found " ^ string_of_int (length matches) ^ " constant(s):") :: - grouped 10 Par_List.map (Pretty.item o single o pretty_const ctxt) matches) - end |> Pretty.fbreaks |> curry Pretty.blk 0; - -fun find_consts ctxt args = Pretty.writeln (pretty_consts ctxt args); - - -(* command syntax *) - -local - -val criterion = - Parse.reserved "strict" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.xname) >> Strict || - Parse.reserved "name" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.xname) >> Name || - Parse.xname >> Loose; - -val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion); - -in - -fun read_query pos str = - Outer_Syntax.scan pos str - |> filter Token.is_proper - |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof))) - |> #1; - -val _ = - Outer_Syntax.improper_command @{command_spec "find_consts"} - "find constants by name / type patterns" - (query >> (fn spec => - Toplevel.keep (fn st => - Pretty.writeln (pretty_consts (Toplevel.context_of st) spec)))); - -end; - - -(* PIDE query operation *) - -val _ = - Query_Operation.register "find_consts" (fn {state, args, output_result} => - (case try Toplevel.context_of state of - SOME ctxt => - let - val [query_arg] = args; - val query = read_query Position.none query_arg; - in output_result (Pretty.string_of (pretty_consts ctxt query)) end - | NONE => error "Unknown context")); - -end; - diff --git a/core/Pure/Tools/find_theorems.ML b/core/Pure/Tools/find_theorems.ML deleted file mode 100644 index 933cad8b..00000000 --- a/core/Pure/Tools/find_theorems.ML +++ /dev/null @@ -1,561 +0,0 @@ -(* Title: Pure/Tools/find_theorems.ML - Author: Rafal Kolanski and Gerwin Klein, NICTA - Author: Lars Noschinski and Alexander Krauss, TU Muenchen - -Retrieve theorems from proof context. -*) - -signature FIND_THEOREMS = -sig - datatype 'term criterion = - Name of string | Intro | Elim | Dest | Solves | Simp of 'term | Pattern of 'term - type 'term query = { - goal: thm option, - limit: int option, - rem_dups: bool, - criteria: (bool * 'term criterion) list - } - val read_query: Position.T -> string -> (bool * string criterion) list - val find_theorems: Proof.context -> thm option -> int option -> bool -> - (bool * term criterion) list -> int option * (Facts.ref * thm) list - val find_theorems_cmd: Proof.context -> thm option -> int option -> bool -> - (bool * string criterion) list -> int option * (Facts.ref * thm) list - val pretty_thm: Proof.context -> Facts.ref * thm -> Pretty.T -end; - -structure Find_Theorems: FIND_THEOREMS = -struct - -(** search criteria **) - -datatype 'term criterion = - Name of string | Intro | Elim | Dest | Solves | Simp of 'term | Pattern of 'term; - -fun apply_dummies tm = - let - val (xs, _) = Term.strip_abs tm; - val tm' = Term.betapplys (tm, map (Term.dummy_pattern o #2) xs); - in #1 (Term.replace_dummy_patterns tm' 1) end; - -fun parse_pattern ctxt nm = - let - val consts = Proof_Context.consts_of ctxt; - val nm' = - (case Syntax.parse_term ctxt nm of - Const (c, _) => c - | _ => Consts.intern consts nm); - in - (case try (Consts.the_abbreviation consts) nm' of - SOME (_, rhs) => apply_dummies (Proof_Context.expand_abbrevs ctxt rhs) - | NONE => Proof_Context.read_term_pattern ctxt nm) - end; - -fun read_criterion _ (Name name) = Name name - | read_criterion _ Intro = Intro - | read_criterion _ Elim = Elim - | read_criterion _ Dest = Dest - | read_criterion _ Solves = Solves - | read_criterion ctxt (Simp str) = Simp (Proof_Context.read_term_pattern ctxt str) - | read_criterion ctxt (Pattern str) = Pattern (parse_pattern ctxt str); - -fun pretty_criterion ctxt (b, c) = - let - fun prfx s = if b then s else "-" ^ s; - in - (case c of - Name name => Pretty.str (prfx "name: " ^ quote name) - | Intro => Pretty.str (prfx "intro") - | Elim => Pretty.str (prfx "elim") - | Dest => Pretty.str (prfx "dest") - | Solves => Pretty.str (prfx "solves") - | Simp pat => Pretty.block [Pretty.str (prfx "simp:"), Pretty.brk 1, - Pretty.quote (Syntax.pretty_term ctxt (Term.show_dummy_patterns pat))] - | Pattern pat => Pretty.enclose (prfx "\"") "\"" - [Syntax.pretty_term ctxt (Term.show_dummy_patterns pat)]) - end; - - - -(** queries **) - -type 'term query = { - goal: thm option, - limit: int option, - rem_dups: bool, - criteria: (bool * 'term criterion) list -}; - -fun map_criteria f {goal, limit, rem_dups, criteria} = - {goal = goal, limit = limit, rem_dups = rem_dups, criteria = f criteria}; - - -(** search criterion filters **) - -(*generated filters are to be of the form - input: (Facts.ref * thm) - output: (p:int, s:int, t:int) option, where - NONE indicates no match - p is the primary sorting criterion - (eg. size of term) - s is the secondary sorting criterion - (eg. number of assumptions in the theorem) - t is the tertiary sorting criterion - (eg. size of the substitution for intro, elim and dest) - when applying a set of filters to a thm, fold results in: - (max p, max s, sum of all t) -*) - - -(* matching theorems *) - -fun is_nontrivial thy = Term.is_Const o Term.head_of o Object_Logic.drop_judgment thy; - -(*extract terms from term_src, refine them to the parts that concern us, - if po try match them against obj else vice versa. - trivial matches are ignored. - returns: smallest substitution size*) -fun is_matching_thm (extract_terms, refine_term) ctxt po obj term_src = - let - val thy = Proof_Context.theory_of ctxt; - - fun matches pat = - is_nontrivial thy pat andalso - Pattern.matches thy (if po then (pat, obj) else (obj, pat)); - - fun subst_size pat = - let val (_, subst) = - Pattern.match thy (if po then (pat, obj) else (obj, pat)) (Vartab.empty, Vartab.empty) - in Vartab.fold (fn (_, (_, t)) => fn n => size_of_term t + n) subst 0 end; - - fun best_match [] = NONE - | best_match xs = SOME (foldl1 Int.min xs); - - val match_thm = matches o refine_term; - in - map (subst_size o refine_term) (filter match_thm (extract_terms term_src)) - |> best_match - end; - - -(* filter_name *) - -fun filter_name str_pat (thmref, _) = - if match_string str_pat (Facts.name_of_ref thmref) - then SOME (0, 0, 0) else NONE; - - -(* filter intro/elim/dest/solves rules *) - -fun filter_dest ctxt goal (_, thm) = - let - val extract_dest = - (fn thm => if Thm.no_prems thm then [] else [Thm.full_prop_of thm], - hd o Logic.strip_imp_prems); - val prems = Logic.prems_of_goal goal 1; - - fun try_subst prem = is_matching_thm extract_dest ctxt true prem thm; - val successful = prems |> map_filter try_subst; - in - (*if possible, keep best substitution (one with smallest size)*) - (*dest rules always have assumptions, so a dest with one - assumption is as good as an intro rule with none*) - if not (null successful) then - SOME (size_of_term (Thm.prop_of thm), Thm.nprems_of thm - 1, foldl1 Int.min successful) - else NONE - end; - -fun filter_intro ctxt goal (_, thm) = - let - val extract_intro = (single o Thm.full_prop_of, Logic.strip_imp_concl); - val concl = Logic.concl_of_goal goal 1; - in - (case is_matching_thm extract_intro ctxt true concl thm of - SOME ss => SOME (size_of_term (Thm.prop_of thm), Thm.nprems_of thm, ss) - | NONE => NONE) - end; - -fun filter_elim ctxt goal (_, thm) = - if Thm.nprems_of thm > 0 then - let - val rule = Thm.full_prop_of thm; - val prems = Logic.prems_of_goal goal 1; - val goal_concl = Logic.concl_of_goal goal 1; - val rule_mp = hd (Logic.strip_imp_prems rule); - val rule_concl = Logic.strip_imp_concl rule; - fun combine t1 t2 = Const ("*combine*", dummyT --> dummyT) $ (t1 $ t2); (* FIXME ?!? *) - val rule_tree = combine rule_mp rule_concl; - fun goal_tree prem = combine prem goal_concl; - fun try_subst prem = is_matching_thm (single, I) ctxt true (goal_tree prem) rule_tree; - val successful = prems |> map_filter try_subst; - in - (*elim rules always have assumptions, so an elim with one - assumption is as good as an intro rule with none*) - if is_nontrivial (Proof_Context.theory_of ctxt) (Thm.major_prem_of thm) - andalso not (null successful) then - SOME (size_of_term (Thm.prop_of thm), Thm.nprems_of thm - 1, foldl1 Int.min successful) - else NONE - end - else NONE; - -fun filter_solves ctxt goal = - let - val thy' = - Proof_Context.theory_of ctxt - |> Context_Position.set_visible_global (Context_Position.is_visible ctxt); - val ctxt' = Proof_Context.transfer thy' ctxt; - val goal' = Thm.transfer thy' goal; - - fun limited_etac thm i = - Seq.take (Options.default_int @{system_option find_theorems_tactic_limit}) o etac thm i; - fun try_thm thm = - if Thm.no_prems thm then rtac thm 1 goal' - else - (limited_etac thm THEN_ALL_NEW (Goal.norm_hhf_tac ctxt' THEN' Method.assm_tac ctxt')) - 1 goal'; - in - fn (_, thm) => - if is_some (Seq.pull (try_thm thm)) - then SOME (size_of_term (Thm.prop_of thm), Thm.nprems_of thm, 0) - else NONE - end; - - -(* filter_simp *) - -fun filter_simp ctxt t (_, thm) = - let - val mksimps = Simplifier.mksimps ctxt; - val extract_simp = - (map Thm.full_prop_of o mksimps, #1 o Logic.dest_equals o Logic.strip_imp_concl); - in - (case is_matching_thm extract_simp ctxt false t thm of - SOME ss => SOME (size_of_term (Thm.prop_of thm), Thm.nprems_of thm, ss) - | NONE => NONE) - end; - - -(* filter_pattern *) - -fun get_names t = Term.add_const_names t (Term.add_free_names t []); - -(*Including all constants and frees is only sound because matching - uses higher-order patterns. If full matching were used, then - constants that may be subject to beta-reduction after substitution - of frees should not be included for LHS set because they could be - thrown away by the substituted function. E.g. for (?F 1 2) do not - include 1 or 2, if it were possible for ?F to be (%x y. 3). The - largest possible set should always be included on the RHS.*) - -fun filter_pattern ctxt pat = - let - val pat_consts = get_names pat; - - fun check ((x, thm), NONE) = check ((x, thm), SOME (get_names (Thm.full_prop_of thm))) - | check ((_, thm), c as SOME thm_consts) = - (if subset (op =) (pat_consts, thm_consts) andalso - Pattern.matches_subterm (Proof_Context.theory_of ctxt) (pat, Thm.full_prop_of thm) - then SOME (size_of_term (Thm.prop_of thm), Thm.nprems_of thm, 0) else NONE, c); - in check end; - - -(* interpret criteria as filters *) - -local - -fun err_no_goal c = - error ("Current goal required for " ^ c ^ " search criterion"); - -fun filter_crit _ _ (Name name) = apfst (filter_name name) - | filter_crit _ NONE Intro = err_no_goal "intro" - | filter_crit _ NONE Elim = err_no_goal "elim" - | filter_crit _ NONE Dest = err_no_goal "dest" - | filter_crit _ NONE Solves = err_no_goal "solves" - | filter_crit ctxt (SOME goal) Intro = apfst (filter_intro ctxt (Thm.prop_of goal)) - | filter_crit ctxt (SOME goal) Elim = apfst (filter_elim ctxt (Thm.prop_of goal)) - | filter_crit ctxt (SOME goal) Dest = apfst (filter_dest ctxt (Thm.prop_of goal)) - | filter_crit ctxt (SOME goal) Solves = apfst (filter_solves ctxt goal) - | filter_crit ctxt _ (Simp pat) = apfst (filter_simp ctxt pat) - | filter_crit ctxt _ (Pattern pat) = filter_pattern ctxt pat; - -fun opt_not x = if is_some x then NONE else SOME (0, 0, 0); - -fun opt_add (SOME (a, c, x)) (SOME (b, d, y)) = SOME (Int.max (a,b), Int.max (c, d), x + y : int) - | opt_add _ _ = NONE; - -fun app_filters thm = - let - fun app (NONE, _, _) = NONE - | app (SOME v, _, []) = SOME (v, thm) - | app (r, consts, f :: fs) = - let val (r', consts') = f (thm, consts) - in app (opt_add r r', consts', fs) end; - in app end; - -in - -fun filter_criterion ctxt opt_goal (b, c) = - (if b then I else (apfst opt_not)) o filter_crit ctxt opt_goal c; - -fun sorted_filter filters thms = - let - fun eval_filters thm = app_filters thm (SOME (0, 0, 0), NONE, filters); - - (*filters return: (thm size, number of assumptions, substitution size) option, so - sort according to size of thm first, then number of assumptions, - then by the substitution size, then by term order *) - fun result_ord (((p0, s0, t0), (_, thm0)), ((p1, s1, t1), (_, thm1))) = - prod_ord int_ord (prod_ord int_ord (prod_ord int_ord Term_Ord.term_ord)) - ((p1, (s1, (t1, Thm.full_prop_of thm1))), (p0, (s0, (t0, Thm.full_prop_of thm0)))); - in - grouped 100 Par_List.map eval_filters thms - |> map_filter I |> sort result_ord |> map #2 - end; - -fun lazy_filter filters = - let - fun lazy_match thms = Seq.make (fn () => first_match thms) - and first_match [] = NONE - | first_match (thm :: thms) = - (case app_filters thm (SOME (0, 0, 0), NONE, filters) of - NONE => first_match thms - | SOME (_, t) => SOME (t, lazy_match thms)); - in lazy_match end; - -end; - - -(* removing duplicates, preferring nicer names, roughly O(n log n) *) - -local - -val index_ord = option_ord (K EQUAL); -val hidden_ord = bool_ord o pairself Long_Name.is_hidden; -val qual_ord = int_ord o pairself Long_Name.qualification; -val txt_ord = int_ord o pairself size; - -fun nicer_name (x, i) (y, j) = - (case hidden_ord (x, y) of EQUAL => - (case index_ord (i, j) of EQUAL => - (case qual_ord (x, y) of EQUAL => txt_ord (x, y) | ord => ord) - | ord => ord) - | ord => ord) <> GREATER; - -fun rem_cdups nicer xs = - let - fun rem_c rev_seen [] = rev rev_seen - | rem_c rev_seen [x] = rem_c (x :: rev_seen) [] - | rem_c rev_seen ((x as ((n, thm), _)) :: (y as ((n', thm'), _)) :: rest) = - if Thm.eq_thm_prop (thm, thm') - then rem_c rev_seen ((if nicer n n' then x else y) :: rest) - else rem_c (x :: rev_seen) (y :: rest); - in rem_c [] xs end; - -in - -fun nicer_shortest ctxt = - let - fun extern_shortest name = - Name_Space.extern_shortest ctxt - (Facts.space_of (Proof_Context.facts_of_fact ctxt name)) name; - - fun nicer (Facts.Named ((x, _), i)) (Facts.Named ((y, _), j)) = - nicer_name (extern_shortest x, i) (extern_shortest y, j) - | nicer (Facts.Fact _) (Facts.Named _) = true - | nicer (Facts.Named _) (Facts.Fact _) = false - | nicer (Facts.Fact _) (Facts.Fact _) = true; - in nicer end; - -fun rem_thm_dups nicer xs = - (xs ~~ (1 upto length xs)) - |> sort (Term_Ord.fast_term_ord o pairself (Thm.full_prop_of o #2 o #1)) - |> rem_cdups nicer - |> sort (int_ord o pairself #2) - |> map #1; - -end; - - - -(** main operations **) - -(* filter_theorems *) - -fun all_facts_of ctxt = - let - val local_facts = Proof_Context.facts_of ctxt; - val global_facts = Global_Theory.facts_of (Proof_Context.theory_of ctxt); - in - maps Facts.selections - (Facts.dest_static false [global_facts] local_facts @ - Facts.dest_static false [] global_facts) - end; - -fun filter_theorems ctxt theorems query = - let - val {goal = opt_goal, limit = opt_limit, rem_dups, criteria} = query; - val filters = map (filter_criterion ctxt opt_goal) criteria; - - fun find_all theorems = - let - val raw_matches = sorted_filter filters theorems; - - val matches = - if rem_dups - then rem_thm_dups (nicer_shortest ctxt) raw_matches - else raw_matches; - - val len = length matches; - val lim = the_default (Options.default_int @{system_option find_theorems_limit}) opt_limit; - in (SOME len, drop (Int.max (len - lim, 0)) matches) end; - - val find = - if rem_dups orelse is_none opt_limit - then find_all - else pair NONE o Seq.list_of o Seq.take (the opt_limit) o lazy_filter filters; - - in find theorems end; - -fun filter_theorems_cmd ctxt theorems raw_query = - filter_theorems ctxt theorems (map_criteria (map (apsnd (read_criterion ctxt))) raw_query); - - -(* find_theorems *) - -local - -fun gen_find_theorems filter ctxt opt_goal opt_limit rem_dups raw_criteria = - let - val assms = - Proof_Context.get_fact ctxt (Facts.named "local.assms") - handle ERROR _ => []; - val add_prems = Seq.hd o TRY (Method.insert_tac assms 1); - val opt_goal' = Option.map add_prems opt_goal; - in - filter ctxt (all_facts_of ctxt) - {goal = opt_goal', limit = opt_limit, rem_dups = rem_dups, criteria = raw_criteria} - end; - -in - -val find_theorems = gen_find_theorems filter_theorems; -val find_theorems_cmd = gen_find_theorems filter_theorems_cmd; - -end; - - -(* pretty_theorems *) - -local - -fun pretty_ref ctxt thmref = - let - val (name, sel) = - (case thmref of - Facts.Named ((name, _), sel) => (name, sel) - | Facts.Fact _ => raise Fail "Illegal literal fact"); - in - [Pretty.mark (#1 (Proof_Context.markup_extern_fact ctxt name)) (Pretty.str name), - Pretty.str (Facts.string_of_selection sel), Pretty.str ":", Pretty.brk 1] - end; - -in - -fun pretty_thm ctxt (thmref, thm) = - Pretty.block (pretty_ref ctxt thmref @ [Display.pretty_thm ctxt thm]); - -fun pretty_theorems state opt_limit rem_dups raw_criteria = - let - val ctxt = Proof.context_of state; - val opt_goal = try Proof.simple_goal state |> Option.map #goal; - val criteria = map (apsnd (read_criterion ctxt)) raw_criteria; - - val (opt_found, theorems) = - filter_theorems ctxt (all_facts_of ctxt) - {goal = opt_goal, limit = opt_limit, rem_dups = rem_dups, criteria = criteria}; - val returned = length theorems; - - val tally_msg = - (case opt_found of - NONE => "displaying " ^ string_of_int returned ^ " theorem(s)" - | SOME found => - "found " ^ string_of_int found ^ " theorem(s)" ^ - (if returned < found - then " (" ^ string_of_int returned ^ " displayed)" - else "")); - val position_markup = Position.markup (Position.thread_data ()) Markup.position; - in - Pretty.block - (Pretty.fbreaks - (Pretty.mark position_markup (Pretty.keyword1 "find_theorems") :: - map (pretty_criterion ctxt) criteria)) :: - Pretty.str "" :: - (if null theorems then [Pretty.str "found nothing"] - else - Pretty.str (tally_msg ^ ":") :: - grouped 10 Par_List.map (Pretty.item o single o pretty_thm ctxt) (rev theorems)) - end |> Pretty.fbreaks |> curry Pretty.blk 0; - -end; - - - -(** Isar command syntax **) - -fun proof_state st = - (case try Toplevel.proof_of st of - SOME state => state - | NONE => Proof.init (Toplevel.context_of st)); - -local - -val criterion = - Parse.reserved "name" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.xname) >> Name || - Parse.reserved "intro" >> K Intro || - Parse.reserved "elim" >> K Elim || - Parse.reserved "dest" >> K Dest || - Parse.reserved "solves" >> K Solves || - Parse.reserved "simp" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.term) >> Simp || - Parse.term >> Pattern; - -val options = - Scan.optional - (Parse.$$$ "(" |-- - Parse.!!! (Scan.option Parse.nat -- Scan.optional (Parse.reserved "with_dups" >> K false) true - --| Parse.$$$ ")")) (NONE, true); - -val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion); - -in - -fun read_query pos str = - Outer_Syntax.scan pos str - |> filter Token.is_proper - |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof))) - |> #1; - -val _ = - Outer_Syntax.improper_command @{command_spec "find_theorems"} - "find theorems meeting specified criteria" - (options -- query >> (fn ((opt_lim, rem_dups), spec) => - Toplevel.keep (fn st => - Pretty.writeln (pretty_theorems (proof_state st) opt_lim rem_dups spec)))); - -end; - - - -(** PIDE query operation **) - -val _ = - Query_Operation.register "find_theorems" (fn {state = st, args, output_result} => - if can Toplevel.context_of st then - let - val [limit_arg, allow_dups_arg, query_arg] = args; - val state = proof_state st; - val opt_limit = Int.fromString limit_arg; - val rem_dups = allow_dups_arg = "false"; - val criteria = read_query Position.none query_arg; - in output_result (Pretty.string_of (pretty_theorems state opt_limit rem_dups criteria)) end - else error "Unknown context"); - -end; diff --git a/core/Pure/Tools/keywords.scala b/core/Pure/Tools/keywords.scala deleted file mode 100644 index 710e64fc..00000000 --- a/core/Pure/Tools/keywords.scala +++ /dev/null @@ -1,178 +0,0 @@ -/* Title: Pure/Tools/keywords.scala - Author: Makarius - -Generate keyword files for Emacs Proof General. -*/ - -/*Proof General legacy*/ - -package isabelle - - -import scala.collection.mutable - - -object Keywords -{ - /* keywords */ - - private val convert = Map( - "thy_begin" -> "theory-begin", - "thy_end" -> "theory-end", - "thy_heading1" -> "theory-heading", - "thy_heading2" -> "theory-heading", - "thy_heading3" -> "theory-heading", - "thy_heading4" -> "theory-heading", - "thy_load" -> "theory-decl", - "thy_decl" -> "theory-decl", - "thy_goal" -> "theory-goal", - "qed_script" -> "qed", - "qed_block" -> "qed-block", - "qed_global" -> "qed-global", - "prf_heading2" -> "proof-heading", - "prf_heading3" -> "proof-heading", - "prf_heading4" -> "proof-heading", - "prf_goal" -> "proof-goal", - "prf_block" -> "proof-block", - "prf_open" -> "proof-open", - "prf_close" -> "proof-close", - "prf_chain" -> "proof-chain", - "prf_decl" -> "proof-decl", - "prf_asm" -> "proof-asm", - "prf_asm_goal" -> "proof-asm-goal", - "prf_asm_goal_script" -> "proof-asm-goal", - "prf_script" -> "proof-script" - ).withDefault((s: String) => s) - - private val emacs_kinds = List( - "major", - "minor", - "control", - "diag", - "theory-begin", - "theory-switch", - "theory-end", - "theory-heading", - "theory-decl", - "theory-script", - "theory-goal", - "qed", - "qed-block", - "qed-global", - "proof-heading", - "proof-goal", - "proof-block", - "proof-open", - "proof-close", - "proof-chain", - "proof-decl", - "proof-asm", - "proof-asm-goal", - "proof-script") - - def keywords( - options: Options, - name: String = "", - dirs: List[Path] = Nil, - sessions: List[String] = Nil) - { - val relevant_sessions = - for { - (name, content) <- - Build.session_dependencies(options, false, dirs, sessions).deps.toList - keywords = content.keywords - if !keywords.isEmpty - } yield (name, keywords) - - val keywords_raw = - (Map.empty[String, Set[String]].withDefaultValue(Set.empty) /: relevant_sessions) { - case (map, (_, ks)) => - (map /: ks) { - case (m, (name, Some(((kind, _), _)), _)) => - m + (name -> (m(name) + convert(kind))) - case (m, (name, None, _)) => - m + (name -> (m(name) + "minor")) - } - } - - val keywords_unique = - for ((name, kinds) <- keywords_raw) yield { - kinds.toList match { - case List(kind) => (name, kind) - case _ => - (kinds - "minor").toList match { - case List(kind) => (name, kind) - case _ => - error("Inconsistent declaration of keyword " + quote(name) + ": " + - kinds.toList.sorted.mkString(" vs ")) - } - } - } - - val output = - { - val out = new mutable.StringBuilder - - out ++= ";;\n" - out ++= ";; Keyword classification tables for Isabelle/Isar.\n" - out ++= ";; Generated from " + relevant_sessions.map(_._1).sorted.mkString(" + ") + ".\n" - out ++= ";; *** DO NOT EDIT *** DO NOT EDIT *** DO NOT EDIT ***\n" - out ++= ";;\n" - - for (kind <- emacs_kinds) { - val names = - (for { - (name, k) <- keywords_unique.iterator - if (if (kind == "major") k != "minor" else k == kind) - if kind != "minor" || Symbol.is_ascii_identifier(name) - } yield name).toList.sorted - - out ++= "\n(defconst isar-keywords-" + kind - out ++= "\n '(" - out ++= - names.map(name => quote("""[\.\*\+\?\[\]\^\$]""".r replaceAllIn (name, """\\\\$0"""))) - .mkString("\n ") - out ++= "))\n" - } - - out ++= "\n(provide 'isar-keywords)\n" - - out.toString - } - - val file = if (name == "") "isar-keywords.el" else "isar-keywords-" + name + ".el" - Output.writeln(file) - File.write(Path.explode(file), output) - } - - - /* administrative update_keywords */ - - def update_keywords(options: Options) - { - val tree = Build.find_sessions(options) - - def chapter(ch: String): List[String] = - for ((name, info) <- tree.topological_order if info.chapter == ch) yield name - - keywords(options, sessions = chapter("HOL")) - keywords(options, name = "ZF", sessions = chapter("ZF")) - } - - - /* command line entry point */ - - def main(args: Array[String]) - { - Command_Line.tool0 { - args.toList match { - case "keywords" :: name :: Command_Line.Chunks(dirs, sessions) => - keywords(Options.init(), name, dirs.map(Path.explode), sessions) - case "update_keywords" :: Nil => - update_keywords(Options.init()) - case _ => error("Bad arguments:\n" + cat_lines(args)) - } - } - } -} - diff --git a/core/Pure/Tools/main.scala b/core/Pure/Tools/main.scala deleted file mode 100644 index 23f6afcc..00000000 --- a/core/Pure/Tools/main.scala +++ /dev/null @@ -1,271 +0,0 @@ -/* Title: Pure/Tools/main.scala - Author: Makarius - -Main Isabelle application entry point. -*/ - -package isabelle - - -import java.lang.{Class, ClassLoader} -import java.io.{File => JFile, BufferedReader, InputStreamReader} -import java.nio.file.Files - -import scala.annotation.tailrec - - -object Main -{ - /** main entry point **/ - - def main(args: Array[String]) - { - val system_dialog = new System_Dialog - - def exit_error(exn: Throwable): Nothing = - { - GUI.dialog(null, "Isabelle", GUI.scrollable_text(Exn.message(exn))) - system_dialog.return_code(Exn.return_code(exn, 2)) - system_dialog.join_exit - } - - def build - { - try { - GUI.init_laf() - Isabelle_System.init() - - val mode = Isabelle_System.getenv("JEDIT_BUILD_MODE") - if (mode == "none") - system_dialog.return_code(0) - else { - val options = Options.init() - val system_mode = mode == "" || mode == "system" - val dirs = Path.split(Isabelle_System.getenv("JEDIT_SESSION_DIRS")) - val session = Isabelle_System.default_logic( - Isabelle_System.getenv("JEDIT_LOGIC"), - options.string("jedit_logic")) - - if (Build.build(options = options, build_heap = true, no_build = true, - dirs = dirs, sessions = List(session)) == 0) - system_dialog.return_code(0) - else { - system_dialog.title("Isabelle build (" + Isabelle_System.getenv("ML_IDENTIFIER") + ")") - system_dialog.echo("Build started for Isabelle/" + session + " ...") - - val (out, rc) = - try { - ("", - Build.build(options = options, progress = system_dialog, build_heap = true, - dirs = dirs, system_mode = system_mode, sessions = List(session))) - } - catch { - case exn: Throwable => - (Output.error_text(Exn.message(exn)) + "\n", Exn.return_code(exn, 2)) - } - - system_dialog.echo(out + (if (rc == 0) "OK\n" else "Return code: " + rc + "\n")) - system_dialog.return_code(rc) - } - } - } - catch { case exn: Throwable => exit_error(exn) } - } - - def start - { - val do_start = - { - try { - /* settings directory */ - - val settings_dir = Path.explode("$JEDIT_SETTINGS") - Isabelle_System.mkdirs(settings_dir + Path.explode("DockableWindowManager")) - - if (!(settings_dir + Path.explode("perspective.xml")).is_file) { - File.write(settings_dir + Path.explode("DockableWindowManager/perspective-view0.xml"), - """""") - File.write(settings_dir + Path.explode("perspective.xml"), - """ - - - - - -""") - } - - - /* args */ - - val jedit_options = - Isabelle_System.getenv_strict("JEDIT_OPTIONS").split(" +") - - val jedit_settings = - Array("-settings=" + Isabelle_System.platform_path(Path.explode("$JEDIT_SETTINGS"))) - - val more_args = - if (args.isEmpty) - Array(Isabelle_System.platform_path(Path.explode("$USER_HOME/Scratch.thy"))) - else args - - - /* startup */ - - update_environment() - - System.setProperty("jedit.home", - Isabelle_System.platform_path(Path.explode("$JEDIT_HOME/dist"))) - - System.setProperty("scala.home", - Isabelle_System.platform_path(Path.explode("$SCALA_HOME"))) - - val jedit = - Class.forName("org.gjt.sp.jedit.jEdit", true, ClassLoader.getSystemClassLoader) - val jedit_main = jedit.getDeclaredMethod("main", classOf[Array[String]]) - - () => jedit_main.invoke(null, jedit_options ++ jedit_settings ++ more_args) - } - catch { case exn: Throwable => exit_error(exn) } - } - do_start() - } - - if (Platform.is_windows) { - try { - GUI.init_laf() - - val isabelle_home0 = System.getenv("ISABELLE_HOME") - val isabelle_home = System.getProperty("isabelle.home") - - if (isabelle_home0 == null || isabelle_home0 == "") { - if (isabelle_home == null || isabelle_home == "") - error("Unknown Isabelle home directory") - if (!(new JFile(isabelle_home)).isDirectory) - error("Bad Isabelle home directory: " + quote(isabelle_home)) - - val cygwin_root = isabelle_home + "\\contrib\\cygwin" - if ((new JFile(cygwin_root)).isDirectory) - System.setProperty("cygwin.root", cygwin_root) - - val uninitialized_file = new JFile(cygwin_root, "isabelle\\uninitialized") - val uninitialized = uninitialized_file.isFile && uninitialized_file.delete - - if (uninitialized) cygwin_init(system_dialog, isabelle_home, cygwin_root) - } - } - catch { case exn: Throwable => exit_error(exn) } - - if (system_dialog.stopped) { - system_dialog.return_code(Exn.Interrupt.return_code) - system_dialog.join_exit - } - } - - build - val rc = system_dialog.join - if (rc == 0) start else sys.exit(rc) - } - - - - /** Cygwin init (e.g. after extraction via 7zip) **/ - - private def cygwin_init(system_dialog: System_Dialog, isabelle_home: String, cygwin_root: String) - { - system_dialog.title("Isabelle system initialization") - system_dialog.echo("Initializing Cygwin ...") - - def execute(args: String*): Int = - { - val cwd = new JFile(isabelle_home) - val env = Map("CYGWIN" -> "nodosfilewarning") - system_dialog.execute(cwd, env, args: _*) - } - - system_dialog.echo("symlinks ...") - val symlinks = - { - val path = (new JFile(cygwin_root + "\\isabelle\\symlinks")).toPath - Files.readAllLines(path, UTF8.charset).toArray.toList.asInstanceOf[List[String]] - } - @tailrec def recover_symlinks(list: List[String]): Unit = - { - list match { - case Nil | List("") => - case link :: content :: rest => - val path = (new JFile(isabelle_home, link)).toPath - - val writer = Files.newBufferedWriter(path, UTF8.charset) - try { writer.write("!" + content + "\u0000") } - finally { writer.close } - - Files.setAttribute(path, "dos:system", true) - - recover_symlinks(rest) - case _ => error("Unbalanced symlinks list") - } - } - recover_symlinks(symlinks) - - system_dialog.echo("rebaseall ...") - execute(cygwin_root + "\\bin\\dash.exe", "/isabelle/rebaseall") - - system_dialog.echo("postinstall ...") - execute(cygwin_root + "\\bin\\bash.exe", "/isabelle/postinstall") - - system_dialog.echo("init ...") - Isabelle_System.init() - } - - - - /** adhoc update of JVM environment variables **/ - - def update_environment() - { - val update = - { - val isabelle_home = Isabelle_System.getenv("ISABELLE_HOME") - val isabelle_home_user = Isabelle_System.getenv("ISABELLE_HOME_USER") - val upd = - if (Platform.is_windows) - List( - "ISABELLE_HOME" -> Isabelle_System.jvm_path(isabelle_home), - "ISABELLE_HOME_USER" -> Isabelle_System.jvm_path(isabelle_home_user), - "INI_DIR" -> "") - else - List( - "ISABELLE_HOME" -> isabelle_home, - "ISABELLE_HOME_USER" -> isabelle_home_user) - - (env0: Any) => { - val env = env0.asInstanceOf[java.util.Map[String, String]] - upd.foreach { - case (x, "") => env.remove(x) - case (x, y) => env.put(x, y) - } - } - } - - classOf[java.util.Collections].getDeclaredClasses - .find(c => c.getName == "java.util.Collections$UnmodifiableMap") match - { - case Some(c) => - val m = c.getDeclaredField("m") - m.setAccessible(true) - update(m.get(System.getenv())) - - if (Platform.is_windows) { - val ProcessEnvironment = Class.forName("java.lang.ProcessEnvironment") - val field = ProcessEnvironment.getDeclaredField("theCaseInsensitiveEnvironment") - field.setAccessible(true) - update(field.get(null)) - } - - case None => - error("Failed to update JVM environment -- platform incompatibility") - } - } -} - diff --git a/core/Pure/Tools/ml_statistics.scala b/core/Pure/Tools/ml_statistics.scala deleted file mode 100644 index 62338380..00000000 --- a/core/Pure/Tools/ml_statistics.scala +++ /dev/null @@ -1,143 +0,0 @@ -/* Title: Pure/Tools/ml_statistics.scala - Author: Makarius - -ML runtime statistics. -*/ - -package isabelle - - -import scala.collection.mutable -import scala.collection.immutable.{SortedSet, SortedMap} -import scala.swing.{Frame, Component} - -import org.jfree.data.xy.{XYSeries, XYSeriesCollection} -import org.jfree.chart.{JFreeChart, ChartPanel, ChartFactory} -import org.jfree.chart.plot.PlotOrientation - - -object ML_Statistics -{ - /* content interpretation */ - - final case class Entry(time: Double, data: Map[String, Double]) - - def apply(name: String, stats: List[Properties.T]): ML_Statistics = - new ML_Statistics(name, stats) - - def apply(info: Build.Log_Info): ML_Statistics = - apply(info.name, info.stats) - - val empty = apply("", Nil) - - - /* standard fields */ - - val tasks_fields = - ("Future tasks", List("tasks_ready", "tasks_pending", "tasks_running", "tasks_passive")) - - val workers_fields = - ("Worker threads", List("workers_total", "workers_active", "workers_waiting")) - - val GC_fields = ("GCs", List("partial_GCs", "full_GCs")) - - val heap_fields = - ("Heap", List("size_heap", "size_allocation", "size_allocation_free", - "size_heap_free_last_full_GC", "size_heap_free_last_GC")) - - val threads_fields = - ("Threads", List("threads_total", "threads_in_ML", "threads_wait_condvar", - "threads_wait_IO", "threads_wait_mutex", "threads_wait_signal")) - - val time_fields = ("Time", List("time_CPU", "time_GC")) - - val speed_fields = ("Speed", List("speed_CPU", "speed_GC")) - - val standard_fields = - List(tasks_fields, workers_fields, GC_fields, heap_fields, threads_fields, - time_fields, speed_fields) -} - -final class ML_Statistics private(val name: String, val stats: List[Properties.T]) -{ - val Now = new Properties.Double("now") - def now(props: Properties.T): Double = Now.unapply(props).get - - require(stats.forall(props => Now.unapply(props).isDefined)) - - val time_start = if (stats.isEmpty) 0.0 else now(stats.head) - val duration = if (stats.isEmpty) 0.0 else now(stats.last) - time_start - - val fields: Set[String] = - SortedSet.empty[String] ++ - (for (props <- stats.iterator; (x, _) <- props.iterator if x != Now.name) - yield x) - - val content: List[ML_Statistics.Entry] = - { - var last_edge = Map.empty[String, (Double, Double, Double)] - val result = new mutable.ListBuffer[ML_Statistics.Entry] - for (props <- stats) { - val time = now(props) - time_start - require(time >= 0.0) - - // rising edges -- relative speed - val speeds = - for ((key, value) <- props; a <- Library.try_unprefix("time", key)) yield { - val (x0, y0, s0) = last_edge.getOrElse(a, (0.0, 0.0, 0.0)) - - val x1 = time - val y1 = java.lang.Double.parseDouble(value) - val s1 = if (x1 == x0) 0.0 else (y1 - y0) / (x1 - x0) - - val b = ("speed" + a).intern - if (y1 > y0) { last_edge += (a -> (x1, y1, s1)); (b, s1) } else (b, s0) - } - - val data = - SortedMap.empty[String, Double] ++ speeds ++ - (for ((x, y) <- props.iterator if x != Now.name) - yield (x, java.lang.Double.parseDouble(y))) - result += ML_Statistics.Entry(time, data) - } - result.toList - } - - - /* charts */ - - def update_data(data: XYSeriesCollection, selected_fields: Iterable[String]) - { - data.removeAllSeries - for { - field <- selected_fields.iterator - series = new XYSeries(field) - } { - content.foreach(entry => series.add(entry.time, entry.data(field))) - data.addSeries(series) - } - } - - def chart(title: String, selected_fields: Iterable[String]): JFreeChart = - { - val data = new XYSeriesCollection - update_data(data, selected_fields) - - ChartFactory.createXYLineChart(title, "time", "value", data, - PlotOrientation.VERTICAL, true, true, true) - } - - def chart(arg: (String, Iterable[String])): JFreeChart = chart(arg._1, arg._2) - - def show_standard_frames(): Unit = - ML_Statistics.standard_fields.map(chart(_)).foreach(c => - GUI_Thread.later { - new Frame { - iconImage = GUI.isabelle_image() - title = name - contents = Component.wrap(new ChartPanel(c)) - visible = true - } - }) -} - diff --git a/core/Pure/Tools/named_thms.ML b/core/Pure/Tools/named_thms.ML deleted file mode 100644 index 0b0f78df..00000000 --- a/core/Pure/Tools/named_thms.ML +++ /dev/null @@ -1,44 +0,0 @@ -(* Title: Pure/Tools/named_thms.ML - Author: Makarius - -Named collections of theorems in canonical order. -*) - -signature NAMED_THMS = -sig - val member: Proof.context -> thm -> bool - val get: Proof.context -> thm list - val add_thm: thm -> Context.generic -> Context.generic - val del_thm: thm -> Context.generic -> Context.generic - val add: attribute - val del: attribute - val setup: theory -> theory -end; - -functor Named_Thms(val name: binding val description: string): NAMED_THMS = -struct - -structure Data = Generic_Data -( - type T = thm Item_Net.T; - val empty = Thm.full_rules; - val extend = I; - val merge = Item_Net.merge; -); - -val member = Item_Net.member o Data.get o Context.Proof; - -val content = Item_Net.content o Data.get; -val get = content o Context.Proof; - -val add_thm = Data.map o Item_Net.update; -val del_thm = Data.map o Item_Net.remove; - -val add = Thm.declaration_attribute add_thm; -val del = Thm.declaration_attribute del_thm; - -val setup = - Attrib.setup name (Attrib.add_del add del) ("declaration of " ^ description) #> - Global_Theory.add_thms_dynamic (name, content); - -end; diff --git a/core/Pure/Tools/print_operation.ML b/core/Pure/Tools/print_operation.ML deleted file mode 100644 index 5b826ed8..00000000 --- a/core/Pure/Tools/print_operation.ML +++ /dev/null @@ -1,82 +0,0 @@ -(* Title: Pure/Tools/print_operation.ML - Author: Makarius - -Print operations as asynchronous query. -*) - - -signature PRINT_OPERATION = -sig - val register: string -> string -> (Toplevel.state -> Pretty.T list) -> unit -end; - -structure Print_Operation: PRINT_OPERATION = -struct - -(* maintain print operations *) - -local - -val print_operations = - Synchronized.var "print_operations" - ([]: (string * (string * (Toplevel.state -> Pretty.T list))) list); - -fun report () = - Output.try_protocol_message Markup.print_operations - let - val yxml = - Synchronized.value print_operations - |> map (fn (x, (y, _)) => (x, y)) |> rev - |> let open XML.Encode in list (pair string string) end - |> YXML.string_of_body; - in [yxml] end; - -val _ = Isabelle_Process.protocol_command "print_operations" (fn [] => report ()); - -val _ = Session.protocol_handler "isabelle.Print_Operation$Handler"; - -in - -fun register name description pr = - (Synchronized.change print_operations (fn tab => - (if not (AList.defined (op =) tab name) then () - else warning ("Redefining print operation: " ^ quote name); - AList.update (op =) (name, (description, pr)) tab)); - report ()); - -val _ = - Query_Operation.register "print_operation" (fn {state, args, output_result} => - let - val _ = Toplevel.context_of state handle Toplevel.UNDEF => error "Unknown context"; - fun err s = Pretty.mark_str (Markup.bad, s); - fun print name = - (case AList.lookup (op =) (Synchronized.value print_operations) name of - SOME (_, pr) => (pr state handle Toplevel.UNDEF => [err "Unknown context"]) - | NONE => [err ("Unknown print operation: " ^ quote name)]); - in output_result (Pretty.string_of (Pretty.chunks (maps print args))) end); - -end; - - -(* common print operations *) - -val _ = - register "context" "context of local theory target" Toplevel.pretty_context; - -val _ = - register "cases" "cases of proof context" - (Proof_Context.pretty_cases o Toplevel.context_of); - -val _ = - register "terms" "term bindings of proof context" - (Proof_Context.pretty_term_bindings o Toplevel.context_of); - -val _ = - register "theorems" "theorems of local theory or proof context" - (Isar_Cmd.pretty_theorems false); - -val _ = - register "state" "proof state" Toplevel.pretty_state; - -end; - diff --git a/core/Pure/Tools/print_operation.scala b/core/Pure/Tools/print_operation.scala deleted file mode 100644 index a93b07b8..00000000 --- a/core/Pure/Tools/print_operation.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* Title: Pure/System/print_operation.scala - Author: Makarius - -Print operations as asynchronous query. -*/ - -package isabelle - - -object Print_Operation -{ - def print_operations(session: Session): List[(String, String)] = - session.protocol_handler("isabelle.Print_Operation$Handler") match { - case Some(handler: Handler) => handler.get - case _ => Nil - } - - - /* protocol handler */ - - class Handler extends Session.Protocol_Handler - { - private val print_operations = Synchronized(Nil: List[(String, String)]) - - def get: List[(String, String)] = print_operations.value - - private def put(prover: Prover, msg: Prover.Protocol_Output): Boolean = - { - val ops = - { - import XML.Decode._ - list(pair(string, string))(YXML.parse_body(msg.text)) - } - print_operations.change(_ => ops) - true - } - - override def start(prover: Prover): Unit = - prover.protocol_command(Markup.PRINT_OPERATIONS) - - val functions = Map(Markup.PRINT_OPERATIONS -> put _) - } -} diff --git a/core/Pure/Tools/proof_general.ML b/core/Pure/Tools/proof_general.ML deleted file mode 100644 index 02c272f9..00000000 --- a/core/Pure/Tools/proof_general.ML +++ /dev/null @@ -1,443 +0,0 @@ -(* Title: Pure/Tools/proof_general.ML - Author: David Aspinall - Author: Makarius - -Isabelle/Isar configuration for Proof General / Emacs. -See also http://proofgeneral.inf.ed.ac.uk -*) - -(*Proof General legacy*) - -signature PROOF_GENERAL = -sig - type category = string - val category_display: category - val category_advanced_display: category - val category_tracing: category - val category_proof: category - type pgiptype = string - val pgipbool: pgiptype - val pgipint: pgiptype - val pgipfloat: pgiptype - val pgipstring: pgiptype - val preference: category -> string option -> - (unit -> string) -> (string -> unit) -> string -> string -> string -> unit - val preference_bool: category -> string option -> - bool Unsynchronized.ref -> string -> string -> unit - val preference_int: category -> string option -> - int Unsynchronized.ref -> string -> string -> unit - val preference_real: category -> string option -> - real Unsynchronized.ref -> string -> string -> unit - val preference_string: category -> string option -> - string Unsynchronized.ref -> string -> string -> unit - val preference_option: category -> string option -> string -> string -> string -> unit - val process_pgip: string -> unit - val tell_clear_goals: unit -> unit - val tell_clear_response: unit -> unit - val inform_file_processed: string -> unit - val inform_file_retracted: string -> unit - val master_path: Path.T Unsynchronized.ref - structure ThyLoad: sig val add_path: string -> unit end - val thm_deps: bool Unsynchronized.ref - val proof_generalN: string - val init: unit -> unit - val restart: unit -> unit -end; - -structure ProofGeneral: PROOF_GENERAL = -struct - -(** preferences **) - -(* type preference *) - -type category = string; -val category_display = "Display"; -val category_advanced_display = "Advanced Display"; -val category_tracing = "Tracing"; -val category_proof = "Proof"; - -type pgiptype = string; -val pgipbool = "pgipbool"; -val pgipint = "pgipint"; -val pgipfloat = "pgipint"; (*NB: PG 3.7.x and 4.0 lack pgipfloat, but accept floats as pgipint*) -val pgipstring = "pgipstring"; - -type preference = - {category: string, - override: string option, - descr: string, - pgiptype: pgiptype, - get: unit -> string, - set: string -> unit}; - - -(* global preferences *) - -local - val preferences = - Synchronized.var "ProofGeneral.preferences" ([]: (string * preference) list); -in - -fun add_preference name pref = - Synchronized.change preferences (fn prefs => - (if not (AList.defined (op =) prefs name) then () - else warning ("Redefining ProofGeneral preference: " ^ quote name); - AList.update (op =) (name, pref) prefs)); - -fun set_preference name value = - (case AList.lookup (op =) (Synchronized.value preferences) name of - SOME {set, ...} => set value - | NONE => error ("Unknown ProofGeneral preference: " ^ quote name)); - -fun all_preferences () = - rev (Synchronized.value preferences) - |> map (fn (name, {category, descr, pgiptype, get, ...}) => - (category, {name = name, descr = descr, default = get (), pgiptype = pgiptype})) - |> AList.group (op =); - -fun init_preferences () = - Synchronized.value preferences - |> List.app (fn (_, {set, override = SOME value, ...}) => set value | _ => ()); - -end; - - - -(* raw preferences *) - -fun preference category override get set typ name descr = - add_preference name - {category = category, override = override, descr = descr, pgiptype = typ, get = get, set = set}; - -fun preference_ref category override read write typ r = - preference category override (fn () => read (! r)) (fn x => r := write x) typ; - -fun preference_bool x y = preference_ref x y Markup.print_bool Markup.parse_bool pgipbool; -fun preference_int x y = preference_ref x y Markup.print_int Markup.parse_int pgipint; -fun preference_real x y = preference_ref x y Markup.print_real Markup.parse_real pgipfloat; -fun preference_string x y = preference_ref x y I I pgipstring; - - -(* system options *) - -fun preference_option category override option_name pgip_name descr = - let - val typ = Options.default_typ option_name; - val pgiptype = - if typ = Options.boolT then pgipbool - else if typ = Options.intT then pgipint - else if typ = Options.realT then pgipfloat - else pgipstring; - in - add_preference pgip_name - {category = category, - override = override, - descr = descr, - pgiptype = pgiptype, - get = fn () => Options.get_default option_name, - set = Options.put_default option_name} - end; - - -(* minimal PGIP support for , , *) - -local - -fun get_attr attrs name = - (case Properties.get attrs name of - SOME value => value - | NONE => raise Fail ("Missing attribute: " ^ quote name)); - -fun attr x y = [(x, y)] : XML.attributes; - -fun opt_attr _ NONE = [] - | opt_attr name (SOME value) = attr name value; - -val pgip_id = "dummy"; -val pgip_serial = Counter.make (); - -fun output_pgip refid refseq content = - XML.Elem (("pgip", - attr "tag" "Isabelle/Isar" @ - attr "id" pgip_id @ - opt_attr "destid" refid @ - attr "class" "pg" @ - opt_attr "refid" refid @ - attr "refseq" refseq @ - attr "seq" (string_of_int (pgip_serial ()))), content) - |> XML.string_of - |> Output.urgent_message; - - -fun invalid_pgip () = raise Fail "Invalid PGIP packet"; - -fun haspref {name, descr, default, pgiptype} = - XML.Elem (("haspref", [("name", name), ("descr", descr), ("default", default)]), - [XML.Elem ((pgiptype, []), [])]); - -fun process_element refid refseq (XML.Elem (("askprefs", _), _)) = - all_preferences () |> List.app (fn (category, prefs) => - output_pgip refid refseq - [XML.Elem (("hasprefs", [("prefcategory", category)]), map haspref prefs)]) - | process_element _ _ (XML.Elem (("setpref", attrs), data)) = - let - val name = - (case Properties.get attrs "name" of - SOME name => name - | NONE => invalid_pgip ()); - val value = XML.content_of data; - in set_preference name value end - | process_element _ _ _ = invalid_pgip (); - -in - -fun process_pgip str = - (case XML.parse str of - XML.Elem (("pgip", attrs), pgips) => - let - val class = get_attr attrs "class"; - val dest = Properties.get attrs "destid"; - val refid = Properties.get attrs "id"; - val refseq = get_attr attrs "seq"; - val processit = - (case dest of - NONE => class = "pa" - | SOME id => id = pgip_id); - in if processit then List.app (process_element refid refseq) pgips else () end - | _ => invalid_pgip ()) - handle Fail msg => raise Fail (msg ^ "\n" ^ str); - -end; - - -(** messages **) - -(* render markup *) - -fun special ch = chr 1 ^ ch; - -local - -fun render_trees ts = fold render_tree ts -and render_tree t = - (case XML.unwrap_elem t of - SOME (_, ts) => render_trees ts - | NONE => - (case t of - XML.Text s => Buffer.add s - | XML.Elem ((name, props), ts) => - let - val (bg, en) = - if null ts then Markup.no_output - else if name = Markup.stateN then (special "O" ^ "\n", "\n" ^ special "P") - else if name = Markup.sendbackN then (special "W", special "X") - else if name = Markup.intensifyN then (special "0", special "1") - else if name = Markup.informationN then ("\n" ^ special "0", special "1") - else if name = Markup.tfreeN then (special "C", special "A") - else if name = Markup.tvarN then (special "D", special "A") - else if name = Markup.freeN then (special "E", special "A") - else if name = Markup.boundN then (special "F", special "A") - else if name = Markup.varN then (special "G", special "A") - else if name = Markup.skolemN then (special "H", special "A") - else - (case Markup.get_entity_kind (name, props) of - SOME kind => - if kind = Markup.classN then (special "B", special "A") - else Markup.no_output - | NONE => Markup.no_output); - in Buffer.add bg #> render_trees ts #> Buffer.add en end)); - -in - -fun render text = - Buffer.content (render_trees (YXML.parse_body text) Buffer.empty); - -end; - - -(* hooks *) - -fun message bg en prfx body = - (case render (implode body) of - "" => () - | s => Output.physical_writeln (enclose bg en (prefix_lines prfx s))); - -fun setup_messages () = - (Output.writeln_fn := message "" "" ""; - Output.status_fn := (fn _ => ()); - Output.report_fn := (fn _ => ()); - Output.urgent_message_fn := message (special "I") (special "J") ""; - Output.tracing_fn := message (special "I" ^ special "V") (special "J") ""; - Output.warning_fn := message (special "K") (special "L") "### "; - Output.error_message_fn := (fn (_, s) => message (special "M") (special "N") "*** " s); - Output.prompt_fn := (fn s => Output.physical_stdout (render s ^ special "S"))); - - -(* notification *) - -fun emacs_notify s = message (special "I") (special "J") "" [s]; - -fun tell_clear_goals () = - emacs_notify "Proof General, please clear the goals buffer."; - -fun tell_clear_response () = - emacs_notify "Proof General, please clear the response buffer."; - -fun tell_file_loaded path = - emacs_notify ("Proof General, this file is loaded: " ^ quote (File.platform_path path)); - -fun tell_file_retracted path = - emacs_notify ("Proof General, you can unlock the file " ^ quote (File.platform_path path)); - - - -(** theory loader **) - -(* global master path *) - -val master_path = Unsynchronized.ref Path.current; - -(*fake old ThyLoad -- with new semantics*) -structure ThyLoad = -struct - fun add_path path = master_path := Path.explode path; -end; - - -(* actions *) - -local - -fun trace_action action name = - if action = Thy_Info.Update then - List.app tell_file_loaded (Thy_Info.loaded_files name) - else if action = Thy_Info.Remove then - List.app tell_file_retracted (Thy_Info.loaded_files name) - else (); - -in - fun setup_thy_loader () = Thy_Info.add_hook trace_action; - fun sync_thy_loader () = List.app (trace_action Thy_Info.Update) (Thy_Info.get_names ()); -end; - - -(* get informed about files *) - -(*liberal low-level version*) -val thy_name = perhaps (try (unsuffix ".thy")) o List.last o space_explode "/"; - -val inform_file_retracted = Thy_Info.kill_thy o thy_name; - -fun inform_file_processed file = - let - val name = thy_name file; - val _ = name = "" andalso error ("Bad file name: " ^ quote file); - val _ = - Thy_Info.register_thy (Toplevel.end_theory Position.none (Isar.state ())) - handle ERROR msg => - (warning (cat_lines ["Failed to register theory: " ^ quote name, msg]); - tell_file_retracted (Resources.thy_path (Path.basic name))) - val _ = Isar.init (); - in () end; - - - -(** theorem dependencies **) - -(* thm_deps *) - -local - -fun add_proof_body (PBody {thms, ...}) = - thms |> fold (fn (_, (name, _, _)) => name <> "" ? Symtab.update (name, ())); - -fun add_thm th = - (case Thm.proof_body_of th of - PBody {proof = PThm (_, ((name, _, _), body)), ...} => - if Thm.has_name_hint th andalso Thm.get_name_hint th = name - then add_proof_body (Future.join body) - else I - | body => add_proof_body body); - -in - -fun get_thm_deps ths = - let - (* FIXME proper derivation names!? *) - val names = map Thm.get_name_hint (filter Thm.has_name_hint ths); - val deps = Symtab.keys (fold add_thm ths Symtab.empty); - in (names, deps) end; - -end; - - -(* report via hook *) - -val thm_deps = Unsynchronized.ref false; - -local - -val spaces_quote = space_implode " " o map quote; - -fun thm_deps_message (thms, deps) = - emacs_notify ("Proof General, theorem dependencies of " ^ thms ^ " are " ^ deps); - -in - -fun setup_present_hook () = Toplevel.add_hook (fn _ => fn state => fn state' => - if ! thm_deps andalso can Toplevel.theory_of state andalso Toplevel.is_theory state' - then - let - val prev_facts = Global_Theory.facts_of (Toplevel.theory_of state); - val facts = Global_Theory.facts_of (Toplevel.theory_of state'); - val (names, deps) = get_thm_deps (maps #2 (Facts.dest_static true [prev_facts] facts)); - in - if null names orelse null deps then () - else thm_deps_message (spaces_quote names, spaces_quote deps) - end - else ()); - -end; - - - -(** startup **) - -(* init *) - -val proof_generalN = "ProofGeneral"; - -val initialized = Unsynchronized.ref false; - -fun init () = - (if ! initialized then () - else - (Output.add_mode Symbol.xsymbolsN Symbol.output Output.default_escape; - Output.add_mode proof_generalN Output.default_output Output.default_escape; - Markup.add_mode proof_generalN YXML.output_markup; - setup_messages (); - setup_thy_loader (); - setup_present_hook (); - initialized := true); - init_preferences (); - sync_thy_loader (); - Unsynchronized.change print_mode (update (op =) proof_generalN); - Secure.PG_setup (); - Isar.toplevel_loop TextIO.stdIn - {init = true, welcome = true, sync = true, secure = Secure.is_secure ()}); - - -(* restart *) - -val welcome = Output.urgent_message o Session.welcome; - -fun restart () = - (sync_thy_loader (); - tell_clear_goals (); - tell_clear_response (); - Isar.init (); - welcome ()); - -end; - diff --git a/core/Pure/Tools/proof_general_pure.ML b/core/Pure/Tools/proof_general_pure.ML deleted file mode 100644 index 852336a2..00000000 --- a/core/Pure/Tools/proof_general_pure.ML +++ /dev/null @@ -1,227 +0,0 @@ -(* Title: Pure/Tools/proof_general_pure.ML - Author: David Aspinall - Author: Makarius - -Proof General setup within theory Pure. -*) - -(*Proof General legacy*) - -structure ProofGeneral_Pure: sig end = -struct - -(** preferences **) - -(* display *) - -val _ = - ProofGeneral.preference_option ProofGeneral.category_display - NONE - @{system_option show_types} - "show-types" - "Include types in display of Isabelle terms"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_display - NONE - @{system_option show_sorts} - "show-sorts" - "Include sorts in display of Isabelle types"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_display - NONE - @{system_option show_consts} - "show-consts" - "Show types of consts in Isabelle goal display"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_display - NONE - @{system_option names_long} - "long-names" - "Show fully qualified names in Isabelle terms"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_display - NONE - @{system_option show_brackets} - "show-brackets" - "Show full bracketing in Isabelle terms"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_display - NONE - @{system_option show_main_goal} - "show-main-goal" - "Show main goal in proof state display"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_display - NONE - @{system_option eta_contract} - "eta-contract" - "Print terms eta-contracted"; - - -(* advanced display *) - -val _ = - ProofGeneral.preference_option ProofGeneral.category_advanced_display - NONE - @{system_option goals_limit} - "goals-limit" - "Setting for maximum number of subgoals to be printed"; - -val _ = - ProofGeneral.preference ProofGeneral.category_advanced_display - NONE - (Markup.print_int o get_default_print_depth) - (default_print_depth o Markup.parse_int) - ProofGeneral.pgipint - "print-depth" - "Setting for the ML print depth"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_advanced_display - NONE - @{system_option show_question_marks} - "show-question-marks" - "Show leading question mark of variable name"; - - -(* tracing *) - -val _ = - ProofGeneral.preference_bool ProofGeneral.category_tracing - NONE - Raw_Simplifier.simp_trace_default - "trace-simplifier" - "Trace simplification rules"; - -val _ = - ProofGeneral.preference_int ProofGeneral.category_tracing - NONE - Raw_Simplifier.simp_trace_depth_limit_default - "trace-simplifier-depth" - "Trace simplifier depth limit"; - -val _ = - ProofGeneral.preference_bool ProofGeneral.category_tracing - NONE - Pattern.unify_trace_failure_default - "trace-unification" - "Output error diagnostics during unification"; - -val _ = - ProofGeneral.preference_bool ProofGeneral.category_tracing - NONE - Toplevel.timing - "global-timing" - "Whether to enable timing in Isabelle"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_tracing - NONE - @{system_option ML_exception_trace} - "debugging" - "Whether to enable exception trace for toplevel command execution"; - -val _ = - ProofGeneral.preference_bool ProofGeneral.category_tracing - NONE - ProofGeneral.thm_deps - "theorem-dependencies" - "Track theorem dependencies within Proof General"; - - -(* proof *) - -val _ = - ProofGeneral.preference_option ProofGeneral.category_proof - (SOME "true") - @{system_option quick_and_dirty} - "quick-and-dirty" - "Take a few short cuts"; - -val _ = - ProofGeneral.preference_option ProofGeneral.category_proof - NONE - @{system_option skip_proofs} - "skip-proofs" - "Skip over proofs"; - -val _ = - ProofGeneral.preference ProofGeneral.category_proof - NONE - (Markup.print_bool o Proofterm.proofs_enabled) - (fn s => Proofterm.proofs := (if Markup.parse_bool s then 2 else 0)) - ProofGeneral.pgipbool - "full-proofs" - "Record full proof objects internally"; - -val _ = - ProofGeneral.preference ProofGeneral.category_proof - NONE - (Markup.print_int o Multithreading.max_threads_value) - (Multithreading.max_threads_update o Markup.parse_int) - ProofGeneral.pgipint - "max-threads" - "Maximum number of threads"; - -val _ = - ProofGeneral.preference ProofGeneral.category_proof - NONE - (fn () => Markup.print_bool (! Goal.parallel_proofs >= 1)) - (fn s => Goal.parallel_proofs := (if Markup.parse_bool s then 1 else 0)) - ProofGeneral.pgipint - "parallel-proofs" - "Check proofs in parallel"; - - - -(** command syntax **) - -val _ = - Outer_Syntax.improper_command - @{command_spec "ProofGeneral.process_pgip"} "(internal)" - (Parse.text >> (fn str => Toplevel.imperative (fn () => ProofGeneral.process_pgip str))); - -val _ = - Outer_Syntax.improper_command - @{command_spec "ProofGeneral.pr"} "(internal)" - (Scan.succeed (Toplevel.keep (fn state => - if Toplevel.is_toplevel state orelse Toplevel.is_theory state - then ProofGeneral.tell_clear_goals () - else (Toplevel.quiet := false; Toplevel.print_state state)))); - -val _ = (*undo without output -- historical*) - Outer_Syntax.improper_command - @{command_spec "ProofGeneral.undo"} "(internal)" - (Scan.succeed (Toplevel.imperative (fn () => Isar.undo 1))); - -val _ = - Outer_Syntax.improper_command - @{command_spec "ProofGeneral.restart"} "(internal)" - (Parse.opt_unit >> (K (Toplevel.imperative ProofGeneral.restart))); - -val _ = - Outer_Syntax.improper_command - @{command_spec "ProofGeneral.kill_proof"} "(internal)" - (Scan.succeed (Toplevel.imperative (fn () => - (Isar.kill_proof (); ProofGeneral.tell_clear_goals ())))); - -val _ = - Outer_Syntax.improper_command - @{command_spec "ProofGeneral.inform_file_processed"} "(internal)" - (Parse.name >> (fn file => Toplevel.imperative (fn () => - ProofGeneral.inform_file_processed file))); - -val _ = - Outer_Syntax.improper_command - @{command_spec "ProofGeneral.inform_file_retracted"} "(internal)" - (Parse.name >> (fn file => Toplevel.imperative (fn () => - ProofGeneral.inform_file_retracted file))); - -end; - diff --git a/core/Pure/Tools/rail.ML b/core/Pure/Tools/rail.ML deleted file mode 100644 index c7b1789e..00000000 --- a/core/Pure/Tools/rail.ML +++ /dev/null @@ -1,302 +0,0 @@ -(* Title: Pure/Tools/rail.ML - Author: Michael Kerscher, TU München - Author: Makarius - -Railroad diagrams in LaTeX. -*) - -structure Rail: sig end = -struct - -(** lexical syntax **) - -(* singleton keywords *) - -val keywords = - Symtab.make [ - ("|", Markup.keyword3), - ("*", Markup.keyword3), - ("+", Markup.keyword3), - ("?", Markup.keyword3), - ("(", Markup.empty), - (")", Markup.empty), - ("\", Markup.keyword2), - (";", Markup.keyword2), - (":", Markup.keyword2), - ("@", Markup.keyword1)]; - - -(* datatype token *) - -datatype kind = - Keyword | Ident | String | Antiq of Antiquote.antiq | EOF; - -datatype token = Token of Position.range * (kind * string); - -fun pos_of (Token ((pos, _), _)) = pos; -fun end_pos_of (Token ((_, pos), _)) = pos; - -fun kind_of (Token (_, (k, _))) = k; -fun content_of (Token (_, (_, x))) = x; - - -(* diagnostics *) - -val print_kind = - fn Keyword => "rail keyword" - | Ident => "identifier" - | String => "single-quoted string" - | Antiq _ => "antiquotation" - | EOF => "end-of-input"; - -fun print (Token ((pos, _), (k, x))) = - (if k = EOF then print_kind k else print_kind k ^ " " ^ quote x) ^ - Position.here pos; - -fun print_keyword x = print_kind Keyword ^ " " ^ quote x; - -fun reports_of_token (Token ((pos, _), (String, _))) = [(pos, Markup.inner_string)] - | reports_of_token (Token ((pos, _), (Keyword, x))) = - map (pair pos) (the_list (Symtab.lookup keywords x) @ Completion.suppress_abbrevs x) - | reports_of_token (Token (_, (Antiq antiq, _))) = Antiquote.antiq_reports antiq - | reports_of_token _ = []; - - -(* stopper *) - -fun mk_eof pos = Token ((pos, Position.none), (EOF, "")); -val eof = mk_eof Position.none; - -fun is_eof (Token (_, (EOF, _))) = true - | is_eof _ = false; - -val stopper = - Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof; - - -(* tokenize *) - -local - -fun token k ss = [Token (Symbol_Pos.range ss, (k, Symbol_Pos.content ss))]; - -val scan_space = Scan.many1 (Symbol.is_blank o Symbol_Pos.symbol); - -val scan_keyword = - Scan.one (Symtab.defined keywords o Symbol_Pos.symbol); - -val err_prefix = "Rail lexical error: "; - -val scan_token = - scan_space >> K [] || - Antiquote.scan_antiq >> (fn antiq as (ss, _) => token (Antiq antiq) ss) || - scan_keyword >> (token Keyword o single) || - Lexicon.scan_id >> token Ident || - Symbol_Pos.scan_string_q err_prefix >> (fn (pos1, (ss, pos2)) => - [Token (Position.range pos1 pos2, (String, Symbol_Pos.content ss))]); - -val scan = - (Scan.repeat scan_token >> flat) --| - Symbol_Pos.!!! (fn () => err_prefix ^ "bad input") - (Scan.ahead (Scan.one Symbol_Pos.is_eof)); - -in - -val tokenize = #1 o Scan.error (Scan.finite Symbol_Pos.stopper scan); - -end; - - - -(** parsing **) - -fun !!! scan = - let - val prefix = "Rail syntax error"; - - fun get_pos [] = " (end-of-input)" - | get_pos (tok :: _) = Position.here (pos_of tok); - - fun err (toks, NONE) = (fn () => prefix ^ get_pos toks) - | err (toks, SOME msg) = - (fn () => - let val s = msg () in - if String.isPrefix prefix s then s - else prefix ^ get_pos toks ^ ": " ^ s - end); - in Scan.!! err scan end; - -fun $$$ x = - Scan.one (fn tok => kind_of tok = Keyword andalso content_of tok = x) || - Scan.fail_with - (fn [] => (fn () => print_keyword x ^ " expected,\nbut end-of-input was found") - | tok :: _ => (fn () => print_keyword x ^ " expected,\nbut " ^ print tok ^ " was found")); - -fun enum1 sep scan = scan ::: Scan.repeat ($$$ sep |-- !!! scan); -fun enum sep scan = enum1 sep scan || Scan.succeed []; - -val ident = Scan.some (fn tok => if kind_of tok = Ident then SOME (content_of tok) else NONE); -val string = Scan.some (fn tok => if kind_of tok = String then SOME (content_of tok) else NONE); - -val antiq = Scan.some (fn tok => (case kind_of tok of Antiq a => SOME a | _ => NONE)); - - - -(** rail expressions **) - -(* datatype *) - -datatype rails = - Cat of int * rail list -and rail = - Bar of rails list | - Plus of rails * rails | - Newline of int | - Nonterminal of string | - Terminal of bool * string | - Antiquote of bool * Antiquote.antiq; - -fun reverse_cat (Cat (y, rails)) = Cat (y, rev (map reverse rails)) -and reverse (Bar cats) = Bar (map reverse_cat cats) - | reverse (Plus (cat1, cat2)) = Plus (reverse_cat cat1, reverse_cat cat2) - | reverse x = x; - -fun cat rails = Cat (0, rails); - -val empty = cat []; -fun is_empty (Cat (_, [])) = true | is_empty _ = false; - -fun is_newline (Newline _) = true | is_newline _ = false; - -fun bar [Cat (_, [rail])] = rail - | bar cats = Bar cats; - -fun plus cat1 cat2 = Plus (cat1, reverse_cat cat2); - -fun star cat1 cat2 = - if is_empty cat2 then plus empty cat1 - else bar [empty, cat [plus cat1 cat2]]; - -fun maybe rail = bar [empty, cat [rail]]; - - -(* read *) - -local - -val at_mode = Scan.option ($$$ "@") >> (fn NONE => false | _ => true); - -fun body x = (enum1 "|" body1 >> bar) x -and body0 x = (enum "|" body1 >> bar) x -and body1 x = - (body2 :|-- (fn a => - $$$ "*" |-- !!! body4e >> (cat o single o star a) || - $$$ "+" |-- !!! body4e >> (cat o single o plus a) || - Scan.succeed a)) x -and body2 x = (Scan.repeat1 body3 >> cat) x -and body3 x = (body4 :|-- (fn a => $$$ "?" >> K (maybe a) || Scan.succeed a)) x -and body4 x = - ($$$ "(" |-- !!! (body0 --| $$$ ")") || - $$$ "\" >> K (Newline 0) || - ident >> Nonterminal || - at_mode -- string >> Terminal || - at_mode -- antiq >> Antiquote) x -and body4e x = (Scan.option body4 >> (cat o the_list)) x; - -val rule_name = ident >> Antiquote.Text || antiq >> Antiquote.Antiq; -val rule = rule_name -- ($$$ ":" |-- !!! body) || body >> pair (Antiquote.Text ""); -val rules = enum1 ";" (Scan.option rule) >> map_filter I; - -in - -fun read ctxt (source: Symbol_Pos.source) = - let - val {text, pos, ...} = source; - val _ = Context_Position.report ctxt pos Markup.language_rail; - val toks = tokenize (Symbol_Pos.explode (text, pos)); - val _ = Context_Position.reports ctxt (maps reports_of_token toks); - in #1 (Scan.error (Scan.finite stopper (rules --| !!! (Scan.ahead (Scan.one is_eof)))) toks) end; - -end; - - -(* latex output *) - -local - -fun vertical_range_cat (Cat (_, rails)) y = - let val (rails', (_, y')) = - fold_map (fn rail => fn (y0, y') => - if is_newline rail then (Newline (y' + 1), (y' + 1, y' + 2)) - else - let val (rail', y0') = vertical_range rail y0; - in (rail', (y0, Int.max (y0', y'))) end) rails (y, y + 1) - in (Cat (y, rails'), y') end - -and vertical_range (Bar cats) y = - let val (cats', y') = fold_map vertical_range_cat cats y - in (Bar cats', Int.max (y + 1, y')) end - | vertical_range (Plus (cat1, cat2)) y = - let val ([cat1', cat2'], y') = fold_map vertical_range_cat [cat1, cat2] y; - in (Plus (cat1', cat2'), Int.max (y + 1, y')) end - | vertical_range (Newline _) y = (Newline (y + 2), y + 3) - | vertical_range atom y = (atom, y + 1); - -fun output_rules state rules = - let - val output_antiq = Thy_Output.eval_antiq (#1 (Keyword.get_lexicons ())) state; - fun output_text b s = - Output.output s - |> b ? enclose "\\isakeyword{" "}" - |> enclose "\\isa{" "}"; - - fun output_cat c (Cat (_, rails)) = outputs c rails - and outputs c [rail] = output c rail - | outputs _ rails = implode (map (output "") rails) - and output _ (Bar []) = "" - | output c (Bar [cat]) = output_cat c cat - | output _ (Bar (cat :: cats)) = - "\\rail@bar\n" ^ output_cat "" cat ^ - implode (map (fn Cat (y, rails) => - "\\rail@nextbar{" ^ string_of_int y ^ "}\n" ^ outputs "" rails) cats) ^ - "\\rail@endbar\n" - | output c (Plus (cat, Cat (y, rails))) = - "\\rail@plus\n" ^ output_cat c cat ^ - "\\rail@nextplus{" ^ string_of_int y ^ "}\n" ^ outputs "c" rails ^ - "\\rail@endplus\n" - | output _ (Newline y) = "\\rail@cr{" ^ string_of_int y ^ "}\n" - | output c (Nonterminal s) = "\\rail@" ^ c ^ "nont{" ^ output_text false s ^ "}[]\n" - | output c (Terminal (b, s)) = "\\rail@" ^ c ^ "term{" ^ output_text b s ^ "}[]\n" - | output c (Antiquote (b, a)) = - "\\rail@" ^ c ^ (if b then "term{" else "nont{") ^ output_antiq a ^ "}[]\n"; - - fun output_rule (name, rail) = - let - val (rail', y') = vertical_range rail 0; - val out_name = - (case name of - Antiquote.Text "" => "" - | Antiquote.Text s => output_text false s - | Antiquote.Antiq a => output_antiq a); - in - "\\rail@begin{" ^ string_of_int y' ^ "}{" ^ out_name ^ "}\n" ^ - output "" rail' ^ - "\\rail@end\n" - end; - in - "\\begin{railoutput}\n" ^ - implode (map output_rule rules) ^ - "\\end{railoutput}\n" - end; - -in - -val _ = Theory.setup - (Thy_Output.antiquotation @{binding rail} - (Scan.lift (Parse.source_position (Parse.string || Parse.cartouche))) - (fn {state, context, ...} => output_rules state o read context)); - -end; - -end; - diff --git a/core/Pure/Tools/rule_insts.ML b/core/Pure/Tools/rule_insts.ML deleted file mode 100644 index 7be8306e..00000000 --- a/core/Pure/Tools/rule_insts.ML +++ /dev/null @@ -1,371 +0,0 @@ -(* Title: Pure/Tools/rule_insts.ML - Author: Makarius - -Rule instantiations -- operations within implicit rule / subgoal context. -*) - -signature BASIC_RULE_INSTS = -sig - val res_inst_tac: Proof.context -> (indexname * string) list -> thm -> int -> tactic - val eres_inst_tac: Proof.context -> (indexname * string) list -> thm -> int -> tactic - val cut_inst_tac: Proof.context -> (indexname * string) list -> thm -> int -> tactic - val forw_inst_tac: Proof.context -> (indexname * string) list -> thm -> int -> tactic - val dres_inst_tac: Proof.context -> (indexname * string) list -> thm -> int -> tactic - val thin_tac: Proof.context -> string -> int -> tactic - val subgoal_tac: Proof.context -> string -> int -> tactic -end; - -signature RULE_INSTS = -sig - include BASIC_RULE_INSTS - val where_rule: Proof.context -> (indexname * string) list -> - (binding * string option * mixfix) list -> thm -> thm - val of_rule: Proof.context -> string option list * string option list -> - (binding * string option * mixfix) list -> thm -> thm - val read_instantiate: Proof.context -> (indexname * string) list -> string list -> thm -> thm - val instantiate_tac: Proof.context -> (indexname * string) list -> string list -> tactic - val make_elim_preserve: thm -> thm - val method: (Proof.context -> (indexname * string) list -> thm -> int -> tactic) -> - (Proof.context -> thm list -> int -> tactic) -> (Proof.context -> Proof.method) context_parser -end; - -structure Rule_Insts: RULE_INSTS = -struct - -(** reading instantiations **) - -local - -fun error_var msg xi = error (msg ^ quote (Term.string_of_vname xi)); - -fun the_sort tvars (xi: indexname) = - (case AList.lookup (op =) tvars xi of - SOME S => S - | NONE => error_var "No such type variable in theorem: " xi); - -fun the_type vars (xi: indexname) = - (case AList.lookup (op =) vars xi of - SOME T => T - | NONE => error_var "No such variable in theorem: " xi); - -fun instantiate inst = - Term_Subst.instantiate ([], map (fn (xi, t) => ((xi, Term.fastype_of t), t)) inst) #> - Envir.beta_norm; - -fun make_instT f v = - let - val T = TVar v; - val T' = f T; - in if T = T' then NONE else SOME (T, T') end; - -fun make_inst f v = - let - val t = Var v; - val t' = f t; - in if t aconv t' then NONE else SOME (t, t') end; - -val add_used = - (Thm.fold_terms o fold_types o fold_atyps) - (fn TFree (a, _) => insert (op =) a - | TVar ((a, _), _) => insert (op =) a - | _ => I); - -in - -fun read_termTs ctxt ss Ts = - let - fun parse T = if T = propT then Syntax.parse_prop ctxt else Syntax.parse_term ctxt; - val ts = map2 parse Ts ss; - val ts' = - map2 (Type.constraint o Type_Infer.paramify_vars) Ts ts - |> Syntax.check_terms ctxt - |> Variable.polymorphic ctxt; - val Ts' = map Term.fastype_of ts'; - val tyenv = fold Type.raw_match (Ts ~~ Ts') Vartab.empty; - in (ts', map (apsnd snd) (Vartab.dest tyenv)) end; - -fun read_insts ctxt mixed_insts (tvars, vars) = - let - val thy = Proof_Context.theory_of ctxt; - val cert = Thm.cterm_of thy; - val certT = Thm.ctyp_of thy; - - val (type_insts, term_insts) = List.partition (String.isPrefix "'" o fst o fst) mixed_insts; - - - (* type instantiations *) - - fun readT (xi, s) = - let - val S = the_sort tvars xi; - val T = Syntax.read_typ ctxt s; - in - if Sign.of_sort thy (T, S) then ((xi, S), T) - else error_var "Incompatible sort for typ instantiation of " xi - end; - - val instT1 = Term_Subst.instantiateT (map readT type_insts); - val vars1 = map (apsnd instT1) vars; - - - (* term instantiations *) - - val (xs, ss) = split_list term_insts; - val Ts = map (the_type vars1) xs; - val (ts, inferred) = read_termTs ctxt ss Ts; - - val instT2 = Term.typ_subst_TVars inferred; - val vars2 = map (apsnd instT2) vars1; - val inst2 = instantiate (xs ~~ ts); - - - (* result *) - - val inst_tvars = map_filter (make_instT (instT2 o instT1)) tvars; - val inst_vars = map_filter (make_inst inst2) vars2; - in - (map (pairself certT) inst_tvars, map (pairself cert) inst_vars) - end; - -fun where_rule ctxt mixed_insts fixes thm = - let - val ctxt' = ctxt - |> Proof_Context.read_vars fixes |-> Proof_Context.add_fixes |> #2 - |> Variable.declare_thm thm - |> fold (fn a => Variable.declare_names (Logic.mk_type (TFree (a, dummyS)))) (add_used thm []); (* FIXME !? *) - val tvars = Thm.fold_terms Term.add_tvars thm []; - val vars = Thm.fold_terms Term.add_vars thm []; - val insts = read_insts ctxt' mixed_insts (tvars, vars); - in - Drule.instantiate_normalize insts thm - |> singleton (Proof_Context.export ctxt' ctxt) - |> Rule_Cases.save thm - end; - -fun of_rule ctxt (args, concl_args) fixes thm = - let - fun zip_vars _ [] = [] - | zip_vars (_ :: xs) (NONE :: rest) = zip_vars xs rest - | zip_vars ((x, _) :: xs) (SOME t :: rest) = (x, t) :: zip_vars xs rest - | zip_vars [] _ = error "More instantiations than variables in theorem"; - val insts = - zip_vars (rev (Term.add_vars (Thm.full_prop_of thm) [])) args @ - zip_vars (rev (Term.add_vars (Thm.concl_of thm) [])) concl_args; - in where_rule ctxt insts fixes thm end; - -end; - - -(* instantiation of rule or goal state *) - -fun read_instantiate ctxt insts xs = - where_rule ctxt insts (map (fn x => (Binding.name x, NONE, NoSyn)) xs); - -fun instantiate_tac ctxt insts fixes = - PRIMITIVE (read_instantiate ctxt insts fixes); - - - -(** attributes **) - -(* where: named instantiation *) - -val _ = Theory.setup - (Attrib.setup @{binding "where"} - (Scan.lift - (Parse.and_list (Args.var -- (Args.$$$ "=" |-- Args.name_inner_syntax)) -- Parse.for_fixes) >> - (fn (insts, fixes) => - Thm.rule_attribute (fn context => where_rule (Context.proof_of context) insts fixes))) - "named instantiation of theorem"); - - -(* of: positional instantiation (terms only) *) - -local - -val inst = Args.maybe Args.name_inner_syntax; -val concl = Args.$$$ "concl" -- Args.colon; - -val insts = - Scan.repeat (Scan.unless concl inst) -- - Scan.optional (concl |-- Scan.repeat inst) []; - -in - -val _ = Theory.setup - (Attrib.setup @{binding "of"} - (Scan.lift (insts -- Parse.for_fixes) >> (fn (args, fixes) => - Thm.rule_attribute (fn context => of_rule (Context.proof_of context) args fixes))) - "positional instantiation of theorem"); - -end; - - - -(** tactics **) - -(* resolution after lifting and instantation; may refer to parameters of the subgoal *) - -(* FIXME cleanup this mess!!! *) - -fun bires_inst_tac bires_flag ctxt insts thm = - let - val thy = Proof_Context.theory_of ctxt; - (* Separate type and term insts *) - fun has_type_var ((x, _), _) = - (case Symbol.explode x of "'" :: _ => true | _ => false); - val Tinsts = filter has_type_var insts; - val tinsts = filter_out has_type_var insts; - - (* Tactic *) - fun tac i st = CSUBGOAL (fn (cgoal, _) => - let - val goal = term_of cgoal; - val params = Logic.strip_params goal; (*params of subgoal i as string typ pairs*) - val params = rev (Term.rename_wrt_term goal params) - (*as they are printed: bound variables with*) - (*the same name are renamed during printing*) - - val (param_names, ctxt') = ctxt - |> Variable.declare_thm thm - |> Thm.fold_terms Variable.declare_constraints st - |> Proof_Context.add_fixes (map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) params); - - (* Process type insts: Tinsts_env *) - fun absent xi = error - ("No such variable in theorem: " ^ Term.string_of_vname xi); - val (rtypes, rsorts) = Drule.types_sorts thm; - fun readT (xi, s) = - let val S = case rsorts xi of SOME S => S | NONE => absent xi; - val T = Syntax.read_typ ctxt' s; - val U = TVar (xi, S); - in if Sign.typ_instance thy (T, U) then (U, T) - else error ("Instantiation of " ^ Term.string_of_vname xi ^ " fails") - end; - val Tinsts_env = map readT Tinsts; - (* Preprocess rule: extract vars and their types, apply Tinsts *) - fun get_typ xi = - (case rtypes xi of - SOME T => typ_subst_atomic Tinsts_env T - | NONE => absent xi); - val (xis, ss) = Library.split_list tinsts; - val Ts = map get_typ xis; - - val (ts, envT) = - read_termTs (Proof_Context.set_mode Proof_Context.mode_schematic ctxt') ss Ts; - val envT' = map (fn (ixn, T) => - (TVar (ixn, the (rsorts ixn)), T)) envT @ Tinsts_env; - val cenv = - map - (fn (xi, t) => - pairself (Thm.cterm_of thy) (Var (xi, fastype_of t), t)) - (distinct - (fn ((x1, t1), (x2, t2)) => x1 = x2 andalso t1 aconv t2) - (xis ~~ ts)); - (* Lift and instantiate rule *) - val maxidx = Thm.maxidx_of st; - val paramTs = map #2 params - and inc = maxidx+1 - fun liftvar (Var ((a,j), T)) = - Var((a, j+inc), paramTs ---> Logic.incr_tvar inc T) - | liftvar t = raise TERM("Variable expected", [t]); - fun liftterm t = - fold_rev absfree (param_names ~~ paramTs) (Logic.incr_indexes (paramTs, inc) t); - fun liftpair (cv, ct) = (cterm_fun liftvar cv, cterm_fun liftterm ct); - val lifttvar = pairself (ctyp_of thy o Logic.incr_tvar inc); - val rule = Drule.instantiate_normalize - (map lifttvar envT', map liftpair cenv) - (Thm.lift_rule cgoal thm) - in - compose_tac (bires_flag, rule, nprems_of thm) i - end) i st; - in tac end; - -val res_inst_tac = bires_inst_tac false; -val eres_inst_tac = bires_inst_tac true; - - -(* forward resolution *) - -fun make_elim_preserve rl = - let - val cert = Thm.cterm_of (Thm.theory_of_thm rl); - val maxidx = Thm.maxidx_of rl; - fun cvar xi = cert (Var (xi, propT)); - val revcut_rl' = - Drule.instantiate_normalize ([], [(cvar ("V", 0), cvar ("V", maxidx + 1)), - (cvar ("W", 0), cvar ("W", maxidx + 1))]) Drule.revcut_rl; - in - (case Seq.list_of - (Thm.bicompose {flatten = true, match = false, incremented = false} - (false, rl, Thm.nprems_of rl) 1 revcut_rl') - of - [th] => th - | _ => raise THM ("make_elim_preserve", 1, [rl])) - end; - -(*instantiate and cut -- for atomic fact*) -fun cut_inst_tac ctxt insts rule = res_inst_tac ctxt insts (make_elim_preserve rule); - -(*forward tactic applies a rule to an assumption without deleting it*) -fun forw_inst_tac ctxt insts rule = cut_inst_tac ctxt insts rule THEN' assume_tac; - -(*dresolve tactic applies a rule to replace an assumption*) -fun dres_inst_tac ctxt insts rule = eres_inst_tac ctxt insts (make_elim_preserve rule); - - -(* derived tactics *) - -(*deletion of an assumption*) -fun thin_tac ctxt s = eres_inst_tac ctxt [(("V", 0), s)] Drule.thin_rl; - -(*Introduce the given proposition as lemma and subgoal*) -fun subgoal_tac ctxt A = DETERM o res_inst_tac ctxt [(("psi", 0), A)] cut_rl; - - - -(* method wrapper *) - -fun method inst_tac tac = - Args.goal_spec -- - Scan.optional (Scan.lift - (Parse.and_list1 (Args.var -- (Args.$$$ "=" |-- Parse.!!! Args.name_inner_syntax)) --| - Args.$$$ "in")) [] -- - Attrib.thms >> - (fn ((quant, insts), thms) => fn ctxt => METHOD (fn facts => - if null insts then quant (Method.insert_tac facts THEN' tac ctxt thms) - else - (case thms of - [thm] => quant (Method.insert_tac facts THEN' inst_tac ctxt insts thm) - | _ => error "Cannot have instantiations with multiple rules"))); - - -(* setup *) - -(*warning: rule_tac etc. refer to dynamic subgoal context!*) - -val _ = Theory.setup - (Method.setup @{binding rule_tac} (method res_inst_tac (K resolve_tac)) - "apply rule (dynamic instantiation)" #> - Method.setup @{binding erule_tac} (method eres_inst_tac (K eresolve_tac)) - "apply rule in elimination manner (dynamic instantiation)" #> - Method.setup @{binding drule_tac} (method dres_inst_tac (K dresolve_tac)) - "apply rule in destruct manner (dynamic instantiation)" #> - Method.setup @{binding frule_tac} (method forw_inst_tac (K forward_tac)) - "apply rule in forward manner (dynamic instantiation)" #> - Method.setup @{binding cut_tac} (method cut_inst_tac (K cut_rules_tac)) - "cut rule (dynamic instantiation)" #> - Method.setup @{binding subgoal_tac} - (Args.goal_spec -- Scan.lift (Scan.repeat1 Args.name_inner_syntax) >> - (fn (quant, props) => fn ctxt => - SIMPLE_METHOD'' quant (EVERY' (map (subgoal_tac ctxt) props)))) - "insert subgoal (dynamic instantiation)" #> - Method.setup @{binding thin_tac} - (Args.goal_spec -- Scan.lift Args.name_inner_syntax >> - (fn (quant, prop) => fn ctxt => SIMPLE_METHOD'' quant (thin_tac ctxt prop))) - "remove premise (dynamic instantiation)"); - -end; - -structure Basic_Rule_Insts: BASIC_RULE_INSTS = Rule_Insts; -open Basic_Rule_Insts; diff --git a/core/Pure/Tools/simplifier_trace.ML b/core/Pure/Tools/simplifier_trace.ML deleted file mode 100644 index 173f0f6e..00000000 --- a/core/Pure/Tools/simplifier_trace.ML +++ /dev/null @@ -1,439 +0,0 @@ -(* Title: Pure/Tools/simplifier_trace.ML - Author: Lars Hupel - -Interactive Simplifier trace. -*) - -signature SIMPLIFIER_TRACE = -sig - val add_term_breakpoint: term -> Context.generic -> Context.generic - val add_thm_breakpoint: thm -> Context.generic -> Context.generic -end - -structure Simplifier_Trace: SIMPLIFIER_TRACE = -struct - -(** context data **) - -datatype mode = Disabled | Normal | Full - -fun merge_modes Disabled m = m - | merge_modes Normal Full = Full - | merge_modes Normal _ = Normal - | merge_modes Full _ = Full - -val empty_breakpoints = - (Item_Net.init (op aconv) single, - Item_Net.init eq_rrule (single o Thm.full_prop_of o #thm)) - -fun merge_breakpoints ((term_bs1, thm_bs1), (term_bs2, thm_bs2)) = - (Item_Net.merge (term_bs1, term_bs2), - Item_Net.merge (thm_bs1, thm_bs2)) - -structure Data = Generic_Data -( - type T = - {max_depth: int, - mode: mode, - interactive: bool, - memory: bool, - parent: int, - breakpoints: term Item_Net.T * rrule Item_Net.T} - val empty = - {max_depth = 10, - mode = Disabled, - interactive = false, - memory = true, - parent = 0, - breakpoints = empty_breakpoints} - val extend = I - fun merge - ({max_depth = max_depth1, mode = mode1, interactive = interactive1, - memory = memory1, breakpoints = breakpoints1, ...}: T, - {max_depth = max_depth2, mode = mode2, interactive = interactive2, - memory = memory2, breakpoints = breakpoints2, ...}: T) = - {max_depth = Int.max (max_depth1, max_depth2), - mode = merge_modes mode1 mode2, - interactive = interactive1 orelse interactive2, - memory = memory1 andalso memory2, - parent = 0, - breakpoints = merge_breakpoints (breakpoints1, breakpoints2)}: T -) - -val get_data = Data.get o Context.Proof -val put_data = Context.proof_map o Data.put - -val get_breakpoints = #breakpoints o get_data - -fun map_breakpoints f = - Data.map - (fn {max_depth, mode, interactive, parent, memory, breakpoints} => - {max_depth = max_depth, - mode = mode, - interactive = interactive, - memory = memory, - parent = parent, - breakpoints = f breakpoints}) - -fun add_term_breakpoint term = - map_breakpoints (apfst (Item_Net.update term)) - -fun add_thm_breakpoint thm context = - let - val rrules = mk_rrules (Context.proof_of context) [thm] - in - map_breakpoints (apsnd (fold Item_Net.update rrules)) context - end - -fun check_breakpoint (term, rrule) ctxt = - let - val thy = Proof_Context.theory_of ctxt - val (term_bs, thm_bs) = get_breakpoints ctxt - - val term_matches = - filter (fn pat => Pattern.matches thy (pat, term)) - (Item_Net.retrieve_matching term_bs term) - - val thm_matches = - exists (eq_rrule o pair rrule) - (Item_Net.retrieve_matching thm_bs (Thm.full_prop_of (#thm rrule))) - in - (term_matches, thm_matches) - end - - - -(** config and attributes **) - -fun config raw_mode interactive max_depth memory = - let - val mode = - (case raw_mode of - "normal" => Normal - | "full" => Full - | _ => error ("Simplifier_Trace.config: unknown mode " ^ raw_mode)) - - val update = Data.map (fn {parent, breakpoints, ...} => - {max_depth = max_depth, - mode = mode, - interactive = interactive, - memory = memory, - parent = parent, - breakpoints = breakpoints}) - in Thm.declaration_attribute (K update) end - -fun breakpoint terms = - Thm.declaration_attribute (fn thm => add_thm_breakpoint thm o fold add_term_breakpoint terms) - - - -(** tracing state **) - -val futures = - Synchronized.var "Simplifier_Trace.futures" (Inttab.empty: string future Inttab.table) - - - -(** markup **) - -fun output_result (id, data) = - Output.result (Markup.serial_properties id) [data] - -val parentN = "parent" -val textN = "text" -val memoryN = "memory" -val successN = "success" - -type payload = - {props: Properties.T, - pretty: Pretty.T} - -fun empty_payload () : payload = - {props = [], pretty = Pretty.str ""} - -fun mk_generic_result markup text triggered (payload : unit -> payload) ctxt = - let - val {mode, interactive, memory, parent, ...} = get_data ctxt - - val eligible = - (case mode of - Disabled => false - | Normal => triggered - | Full => true) - - val markup' = - if markup = Markup.simp_trace_stepN andalso not interactive - then Markup.simp_trace_logN - else markup - in - if not eligible then NONE - else - let - val {props = more_props, pretty} = payload () - val props = - [(textN, text), - (memoryN, Markup.print_bool memory), - (parentN, Markup.print_int parent)] - val data = - Pretty.string_of (Pretty.markup (markup', props @ more_props) [pretty]) - in - SOME (serial (), data) - end - end - - - -(** tracing output **) - -fun see_panel () = - let - val ((bg1, bg2), en) = - YXML.output_markup_elem - (Active.make_markup Markup.simp_trace_panelN {implicit = false, properties = []}) - in "See " ^ bg1 ^ bg2 ^ "simplifier trace" ^ en end - - -fun send_request (result_id, content) = - let - fun break () = - (Output.protocol_message (Markup.simp_trace_cancel result_id) []; - Synchronized.change futures (Inttab.delete_safe result_id)) - val promise = Future.promise break : string future - in - Synchronized.change futures (Inttab.update_new (result_id, promise)); - output_result (result_id, content); - promise - end - - -type data = {term: term, thm: thm, unconditional: bool, ctxt: Proof.context, rrule: rrule} - -fun step ({term, thm, unconditional, ctxt, rrule}: data) = - let - val (matching_terms, thm_triggered) = check_breakpoint (term, rrule) ctxt - - val {name, ...} = rrule - val term_triggered = not (null matching_terms) - - val text = - if unconditional then "Apply rewrite rule?" - else "Apply conditional rewrite rule?" - - fun payload () = - let - (* FIXME pretty printing via Proof_Context.pretty_fact *) - val pretty_thm = Pretty.block - [Pretty.str ("Instance of " ^ name ^ ":"), - Pretty.brk 1, - Syntax.pretty_term ctxt (Thm.prop_of thm)] - - val pretty_term = Pretty.block - [Pretty.str "Trying to rewrite:", - Pretty.brk 1, - Syntax.pretty_term ctxt term] - - val pretty_matchings = - let - val items = map (Pretty.item o single o Syntax.pretty_term ctxt) matching_terms - in - if not (null matching_terms) then - [Pretty.block (Pretty.fbreaks (Pretty.str "Matching terms:" :: items))] - else [] - end - - val pretty = - Pretty.chunks ([pretty_thm, pretty_term] @ pretty_matchings) - in - {props = [], pretty = pretty} - end - - val {max_depth, mode, interactive, memory, breakpoints, ...} = get_data ctxt - - fun mk_promise result = - let - val result_id = #1 result - - fun put mode' interactive' = put_data - {max_depth = max_depth, - mode = mode', - interactive = interactive', - memory = memory, - parent = result_id, - breakpoints = breakpoints} ctxt - - fun to_response "skip" = NONE - | to_response "continue" = SOME (put mode true) - | to_response "continue_trace" = SOME (put Full true) - | to_response "continue_passive" = SOME (put mode false) - | to_response "continue_disable" = SOME (put Disabled false) - | to_response _ = raise Fail "Simplifier_Trace.step: invalid message" - in - if not interactive then - (output_result result; Future.value (SOME (put mode false))) - else Future.map to_response (send_request result) - end - - in - (case mk_generic_result Markup.simp_trace_stepN text - (thm_triggered orelse term_triggered) payload ctxt of - NONE => Future.value (SOME ctxt) - | SOME res => mk_promise res) - end - -fun recurse text depth term ctxt = - let - fun payload () = - {props = [], - pretty = Syntax.pretty_term ctxt term} - - val {max_depth, mode, interactive, memory, breakpoints, ...} = get_data ctxt - - fun put result_id = put_data - {max_depth = max_depth, - mode = if depth >= max_depth then Disabled else mode, - interactive = interactive, - memory = memory, - parent = result_id, - breakpoints = breakpoints} ctxt - in - (case mk_generic_result Markup.simp_trace_recurseN text true payload ctxt of - NONE => put 0 - | SOME res => - (if depth = 1 then writeln (see_panel ()) else (); - output_result res; - put (#1 res))) - end - -fun indicate_failure ({term, ctxt, thm, rrule, ...}: data) ctxt' = - let - fun payload () = - let - val {name, ...} = rrule - val pretty_thm = - (* FIXME pretty printing via Proof_Context.pretty_fact *) - Pretty.block - [Pretty.str ("In an instance of " ^ name ^ ":"), - Pretty.brk 1, - Syntax.pretty_term ctxt (Thm.prop_of thm)] - - val pretty_term = - Pretty.block - [Pretty.str "Was trying to rewrite:", - Pretty.brk 1, - Syntax.pretty_term ctxt term] - - val pretty = - Pretty.chunks [pretty_thm, pretty_term] - in - {props = [(successN, "false")], pretty = pretty} - end - - val {interactive, ...} = get_data ctxt - - fun mk_promise result = - let - fun to_response "exit" = false - | to_response "redo" = - (Option.app output_result - (mk_generic_result Markup.simp_trace_ignoreN "Ignore" true empty_payload ctxt'); - true) - | to_response _ = raise Fail "Simplifier_Trace.indicate_failure: invalid message" - in - if not interactive then - (output_result result; Future.value false) - else Future.map to_response (send_request result) - end - in - (case mk_generic_result Markup.simp_trace_hintN "Step failed" true payload ctxt' of - NONE => Future.value false - | SOME res => mk_promise res) - end - -fun indicate_success thm ctxt = - let - fun payload () = - {props = [(successN, "true")], - pretty = Syntax.pretty_term ctxt (Thm.prop_of thm)} - in - Option.app output_result - (mk_generic_result Markup.simp_trace_hintN "Successfully rewrote" true payload ctxt) - end - - - -(** setup **) - -fun simp_apply args ctxt cont = - let - val {unconditional: bool, term: term, thm: thm, rrule: rrule} = args - val data = - {term = term, - unconditional = unconditional, - ctxt = ctxt, - thm = thm, - rrule = rrule} - in - (case Future.join (step data) of - NONE => NONE - | SOME ctxt' => - let val res = cont ctxt' in - (case res of - NONE => - if Future.join (indicate_failure data ctxt') then - simp_apply args ctxt cont - else NONE - | SOME (thm, _) => (indicate_success thm ctxt'; res)) - end) - end - -val _ = Session.protocol_handler "isabelle.Simplifier_Trace$Handler" - -val _ = Theory.setup - (Simplifier.set_trace_ops - {trace_invoke = fn {depth, term} => recurse "Simplifier invoked" depth term, - trace_apply = simp_apply}) - -val _ = - Isabelle_Process.protocol_command "Simplifier_Trace.reply" - (fn [s, r] => - let - val serial = Markup.parse_int s - fun lookup_delete tab = - (Inttab.lookup tab serial, Inttab.delete_safe serial tab) - fun apply_result (SOME promise) = Future.fulfill promise r - | apply_result NONE = () (* FIXME handle protocol failure, just like in active.ML? *) - in - (Synchronized.change_result futures lookup_delete |> apply_result) - handle exn => if Exn.is_interrupt exn then () (*sic!*) else reraise exn - end) - - - -(** attributes **) - -val mode_parser = - Scan.optional - (Args.$$$ "mode" |-- Args.$$$ "=" |-- (Args.$$$ "normal" || Args.$$$ "full")) - "normal" - -val interactive_parser = - Scan.optional (Args.$$$ "interactive" >> K true) false - -val memory_parser = - Scan.optional (Args.$$$ "no_memory" >> K false) true - -val depth_parser = - Scan.optional (Args.$$$ "depth" |-- Args.$$$ "=" |-- Parse.nat) 10 - -val config_parser = - (interactive_parser -- mode_parser -- depth_parser -- memory_parser) >> - (fn (((interactive, mode), depth), memory) => config mode interactive depth memory) - -val _ = Theory.setup - (Attrib.setup @{binding simp_break} - (Scan.repeat Args.term_pattern >> breakpoint) - "declaration of a simplifier breakpoint" #> - Attrib.setup @{binding simp_trace_new} (Scan.lift config_parser) - "simplifier trace configuration") - -end diff --git a/core/Pure/Tools/simplifier_trace.scala b/core/Pure/Tools/simplifier_trace.scala deleted file mode 100644 index 74615636..00000000 --- a/core/Pure/Tools/simplifier_trace.scala +++ /dev/null @@ -1,310 +0,0 @@ -/* Title: Pure/Tools/simplifier_trace.scala - Author: Lars Hupel - -Interactive Simplifier trace. -*/ - -package isabelle - - -import scala.annotation.tailrec -import scala.collection.immutable.SortedMap - - -object Simplifier_Trace -{ - /* trace items from the prover */ - - val TEXT = "text" - val Text = new Properties.String(TEXT) - - val PARENT = "parent" - val Parent = new Properties.Long(PARENT) - - val SUCCESS = "success" - val Success = new Properties.Boolean(SUCCESS) - - val MEMORY = "memory" - val Memory = new Properties.Boolean(MEMORY) - - object Item - { - case class Data( - serial: Long, markup: String, text: String, - parent: Long, props: Properties.T, content: XML.Body) - { - def memory: Boolean = Memory.unapply(props) getOrElse true - } - - def unapply(tree: XML.Tree): Option[(String, Data)] = - tree match { - case XML.Elem(Markup(Markup.RESULT, Markup.Serial(serial)), - List(XML.Elem(Markup(markup, props), content))) - if markup.startsWith("simp_trace_") => // FIXME proper comparison of string constants - (props, props) match { - case (Text(text), Parent(parent)) => - Some((markup, Data(serial, markup, text, parent, props, content))) - case _ => None - } - case _ => None - } - } - - - /* replies to the prover */ - - case class Answer private[Simplifier_Trace](val name: String, val string: String) - - object Answer - { - object step - { - val skip = Answer("skip", "Skip") - val continue = Answer("continue", "Continue") - val continue_trace = Answer("continue_trace", "Continue (with full trace)") - val continue_passive = Answer("continue_passive", "Continue (without asking)") - val continue_disable = Answer("continue_disable", "Continue (without any trace)") - - val all = List(continue, continue_trace, continue_passive, continue_disable, skip) - } - - object hint_fail - { - val exit = Answer("exit", "Exit") - val redo = Answer("redo", "Redo") - - val all = List(redo, exit) - } - } - - val all_answers: List[Answer] = Answer.step.all ::: Answer.hint_fail.all - - - /* GUI interaction */ - - case object Event - - - /* manager thread */ - - private case class Handle_Results( - session: Session, id: Document_ID.Command, results: Command.Results, slot: Promise[Context]) - private case class Generate_Trace(results: Command.Results, slot: Promise[Trace]) - private case class Cancel(serial: Long) - private object Clear_Memory - case class Reply(session: Session, serial: Long, answer: Answer) - - case class Question(data: Item.Data, answers: List[Answer]) - - case class Context( - last_serial: Long = 0L, - questions: SortedMap[Long, Question] = SortedMap.empty) - { - def +(q: Question): Context = - copy(questions = questions + ((q.data.serial, q))) - - def -(s: Long): Context = - copy(questions = questions - s) - - def with_serial(s: Long): Context = - copy(last_serial = Math.max(last_serial, s)) - } - - case class Trace(entries: List[Item.Data]) - - case class Index(text: String, content: XML.Body) - - object Index - { - def of_data(data: Item.Data): Index = - Index(data.text, data.content) - } - - def handle_results(session: Session, id: Document_ID.Command, results: Command.Results): Context = - { - val slot = Future.promise[Context] - manager.send(Handle_Results(session, id, results, slot)) - slot.join - } - - def generate_trace(results: Command.Results): Trace = - { - val slot = Future.promise[Trace] - manager.send(Generate_Trace(results, slot)) - slot.join - } - - def clear_memory() = - manager.send(Clear_Memory) - - def send_reply(session: Session, serial: Long, answer: Answer) = - manager.send(Reply(session, serial, answer)) - - private lazy val manager: Consumer_Thread[Any] = - { - var contexts = Map.empty[Document_ID.Command, Context] - - var memory_children = Map.empty[Long, Set[Long]] - var memory = Map.empty[Index, Answer] - - def find_question(serial: Long): Option[(Document_ID.Command, Question)] = - contexts collectFirst { - case (id, context) if context.questions contains serial => - (id, context.questions(serial)) - } - - def do_cancel(serial: Long, id: Document_ID.Command) - { - // To save memory, we could try to remove empty contexts at this point. - // However, if a new serial gets attached to the same command_id after we deleted - // its context, its last_serial counter will start at 0 again, and we'll think the - // old serials are actually new - contexts += (id -> (contexts(id) - serial)) - } - - def do_reply(session: Session, serial: Long, answer: Answer) - { - session.protocol_command( - "Simplifier_Trace.reply", Properties.Value.Long(serial), answer.name) - } - - Consumer_Thread.fork[Any]("Simplifier_Trace.manager", daemon = true)( - consume = (arg: Any) => - { - arg match { - case Handle_Results(session, id, results, slot) => - var new_context = contexts.getOrElse(id, Context()) - var new_serial = new_context.last_serial - - for ((serial, result) <- results.iterator if serial > new_context.last_serial) - { - result match { - case Item(markup, data) => - memory_children += - (data.parent -> (memory_children.getOrElse(data.parent, Set.empty) + serial)) - - markup match { - - case Markup.SIMP_TRACE_STEP => - val index = Index.of_data(data) - memory.get(index) match { - case Some(answer) if data.memory => - do_reply(session, serial, answer) - case _ => - new_context += Question(data, Answer.step.all) - } - - case Markup.SIMP_TRACE_HINT => - data.props match { - case Success(false) => - results.get(data.parent) match { - case Some(Item(Markup.SIMP_TRACE_STEP, _)) => - new_context += Question(data, Answer.hint_fail.all) - case _ => - // unknown, better send a default reply - do_reply(session, data.serial, Answer.hint_fail.exit) - } - case _ => - } - - case Markup.SIMP_TRACE_IGNORE => - // At this point, we know that the parent of this 'IGNORE' entry is a 'STEP' - // entry, and that that 'STEP' entry is about to be replayed. Hence, we need - // to selectively purge the replies which have been memorized, going down from - // the parent to all leaves. - - @tailrec - def purge(queue: Vector[Long]): Unit = - queue match { - case s +: rest => - for (Item(Markup.SIMP_TRACE_STEP, data) <- results.get(s)) - memory -= Index.of_data(data) - val children = memory_children.getOrElse(s, Set.empty) - memory_children -= s - purge(rest ++ children.toVector) - case _ => - } - - purge(Vector(data.parent)) - - case _ => - } - - case _ => - } - - new_serial = serial - } - - new_context = new_context.with_serial(new_serial) - contexts += (id -> new_context) - slot.fulfill(new_context) - - case Generate_Trace(results, slot) => - // Since there are potentially lots of trace messages, we do not cache them here again. - // Instead, everytime the trace is being requested, we re-assemble it based on the - // current results. - - val items = - results.iterator.collect { case (_, Item(_, data)) => data }.toList - - slot.fulfill(Trace(items)) - - case Cancel(serial) => - find_question(serial) match { - case Some((id, _)) => - do_cancel(serial, id) - case None => - } - - case Clear_Memory => - memory_children = Map.empty - memory = Map.empty - - case Reply(session, serial, answer) => - find_question(serial) match { - case Some((id, Question(data, _))) => - if (data.markup == Markup.SIMP_TRACE_STEP && data.memory) - { - val index = Index.of_data(data) - memory += (index -> answer) - } - do_cancel(serial, id) - case None => - Output.warning("send_reply: unknown serial " + serial) - } - - do_reply(session, serial, answer) - session.trace_events.post(Event) - } - true - }, - finish = () => contexts = Map.empty - ) - } - - - /* protocol handler */ - - class Handler extends Session.Protocol_Handler - { - assert(manager.is_active) - - private def cancel(prover: Prover, msg: Prover.Protocol_Output): Boolean = - msg.properties match { - case Markup.Simp_Trace_Cancel(serial) => - manager.send(Cancel(serial)) - true - case _ => - false - } - - override def stop(prover: Prover) = - { - manager.send(Clear_Memory) - manager.shutdown() - } - - val functions = Map(Markup.SIMP_TRACE_CANCEL -> cancel _) - } -} diff --git a/core/Pure/Tools/task_statistics.scala b/core/Pure/Tools/task_statistics.scala deleted file mode 100644 index 82519d85..00000000 --- a/core/Pure/Tools/task_statistics.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* Title: Pure/Tools/task_statistics.scala - Author: Makarius - -Future task runtime statistics. -*/ - -package isabelle - - -import scala.swing.{Frame, Component} - -import org.jfree.data.statistics.HistogramDataset -import org.jfree.chart.{JFreeChart, ChartPanel, ChartFactory} -import org.jfree.chart.plot.{XYPlot, PlotOrientation} -import org.jfree.chart.renderer.xy.{XYBarRenderer, StandardXYBarPainter} - - -object Task_Statistics -{ - def apply(name: String, tasks: List[Properties.T]): Task_Statistics = - new Task_Statistics(name, tasks) - - def apply(info: Build.Log_Info): Task_Statistics = - apply(info.name, info.tasks) -} - -final class Task_Statistics private(val name: String, val tasks: List[Properties.T]) -{ - private val Task_Name = new Properties.String("task_name") - private val Run = new Properties.Int("run") - - def chart(bins: Int = 100): JFreeChart = - { - val values = new Array[Double](tasks.length) - for ((Run(x), i) <- tasks.iterator.zipWithIndex) - values(i) = java.lang.Math.log10((x max 1).toDouble / 1000000) - - val data = new HistogramDataset - data.addSeries("tasks", values, bins) - - val c = - ChartFactory.createHistogram("Task runtime distribution", - "log10(runtime / s)", "number of tasks", data, - PlotOrientation.VERTICAL, true, true, true) - - val renderer = c.getPlot.asInstanceOf[XYPlot].getRenderer.asInstanceOf[XYBarRenderer] - renderer.setMargin(0.1) - renderer.setBarPainter(new StandardXYBarPainter) - - c - } - - def show_frame(bins: Int = 100): Unit = - GUI_Thread.later { - new Frame { - iconImage = GUI.isabelle_image() - title = name - contents = Component.wrap(new ChartPanel(chart(bins))) - visible = true - } - } -} - diff --git a/core/Pure/assumption.ML b/core/Pure/assumption.ML deleted file mode 100644 index bbab9965..00000000 --- a/core/Pure/assumption.ML +++ /dev/null @@ -1,128 +0,0 @@ -(* Title: Pure/assumption.ML - Author: Makarius - -Context assumptions, parameterized by export rules. -*) - -signature ASSUMPTION = -sig - type export = bool -> cterm list -> (thm -> thm) * (term -> term) - val assume_export: export - val presume_export: export - val assume: Proof.context -> cterm -> thm - val assume_hyps: cterm -> Proof.context -> thm * Proof.context - val all_assms_of: Proof.context -> cterm list - val all_prems_of: Proof.context -> thm list - val local_assms_of: Proof.context -> Proof.context -> cterm list - val local_prems_of: Proof.context -> Proof.context -> thm list - val add_assms: export -> cterm list -> Proof.context -> thm list * Proof.context - val add_assumes: cterm list -> Proof.context -> thm list * Proof.context - val export: bool -> Proof.context -> Proof.context -> thm -> thm - val export_term: Proof.context -> Proof.context -> term -> term - val export_morphism: Proof.context -> Proof.context -> morphism -end; - -structure Assumption: ASSUMPTION = -struct - -(** basic rules **) - -type export = bool -> cterm list -> (thm -> thm) * (term -> term); - -(* - [A] - : - B - -------- - #A ==> B -*) -fun assume_export is_goal asms = - (if is_goal then Drule.implies_intr_protected asms else Drule.implies_intr_list asms, fn t => t); - -(* - [A] - : - B - ------- - A ==> B -*) -fun presume_export _ = assume_export false; - - -fun assume ctxt = Raw_Simplifier.norm_hhf ctxt o Thm.assume; - -fun assume_hyps ct ctxt = - let val (th, ctxt') = Thm.assume_hyps ct ctxt - in (Raw_Simplifier.norm_hhf ctxt' th, ctxt') end; - - - -(** local context data **) - -datatype data = Data of - {assms: (export * cterm list) list, (*assumes: A ==> _*) - prems: thm list}; (*prems: A |- norm_hhf A*) - -fun make_data (assms, prems) = Data {assms = assms, prems = prems}; - -structure Data = Proof_Data -( - type T = data; - fun init _ = make_data ([], []); -); - -fun map_data f = Data.map (fn Data {assms, prems} => make_data (f (assms, prems))); -fun rep_data ctxt = Data.get ctxt |> (fn Data rep => rep); - - -(* all assumptions *) - -val all_assumptions_of = #assms o rep_data; -val all_assms_of = maps #2 o all_assumptions_of; -val all_prems_of = #prems o rep_data; - - -(* local assumptions *) - -fun local_assumptions_of inner outer = - drop (length (all_assumptions_of outer)) (all_assumptions_of inner); - -val local_assms_of = maps #2 oo local_assumptions_of; - -fun local_prems_of inner outer = - drop (length (all_prems_of outer)) (all_prems_of inner); - - -(* add assumptions *) - -fun add_assms export new_asms ctxt = - let val (new_prems, ctxt') = fold_map assume_hyps new_asms ctxt in - ctxt' - |> map_data (fn (asms, prems) => (asms @ [(export, new_asms)], prems @ new_prems)) - |> pair new_prems - end; - -val add_assumes = add_assms assume_export; - - -(* export *) - -fun export is_goal inner outer = - Raw_Simplifier.norm_hhf_protect inner #> - fold_rev (fn (e, As) => #1 (e is_goal As)) (local_assumptions_of inner outer) #> - Raw_Simplifier.norm_hhf_protect outer; - -fun export_term inner outer = - fold_rev (fn (e, As) => #2 (e false As)) (local_assumptions_of inner outer); - -fun export_morphism inner outer = - let - val thm = export false inner outer; - val term = export_term inner outer; - val typ = Logic.type_map term; - in - Morphism.morphism "Assumption.export" - {binding = [], typ = [typ], term = [term], fact = [map thm]} - end; - -end; diff --git a/core/Pure/axclass.ML b/core/Pure/axclass.ML deleted file mode 100644 index cf2d6a60..00000000 --- a/core/Pure/axclass.ML +++ /dev/null @@ -1,622 +0,0 @@ -(* Title: Pure/axclass.ML - Author: Markus Wenzel, TU Muenchen - -Type classes defined as predicates, associated with a record of -parameters. Proven class relations and type arities. -*) - -signature AXCLASS = -sig - type info = {def: thm, intro: thm, axioms: thm list, params: (string * typ) list} - val get_info: theory -> class -> info - val class_of_param: theory -> string -> class option - val instance_name: string * class -> string - val thynames_of_arity: theory -> string * class -> string list - val param_of_inst: theory -> string * string -> string - val inst_of_param: theory -> string -> (string * string) option - val unoverload: theory -> thm -> thm - val overload: theory -> thm -> thm - val unoverload_conv: theory -> conv - val overload_conv: theory -> conv - val lookup_inst_param: Consts.T -> ((string * string) * 'a) list -> string * typ -> 'a option - val unoverload_const: theory -> string * typ -> string - val cert_classrel: theory -> class * class -> class * class - val read_classrel: theory -> xstring * xstring -> class * class - val declare_overloaded: string * typ -> theory -> term * theory - val define_overloaded: binding -> string * term -> theory -> thm * theory - val add_classrel: thm -> theory -> theory - val add_arity: thm -> theory -> theory - val prove_classrel: class * class -> (Proof.context -> tactic) -> theory -> theory - val prove_arity: string * sort list * sort -> (Proof.context -> tactic) -> theory -> theory - val define_class: binding * class list -> string list -> - (Thm.binding * term list) list -> theory -> class * theory - val classrel_axiomatization: (class * class) list -> theory -> theory - val arity_axiomatization: arity -> theory -> theory - val class_axiomatization: binding * class list -> theory -> theory -end; - -structure Axclass: AXCLASS = -struct - -(** theory data **) - -(* axclass info *) - -type info = - {def: thm, - intro: thm, - axioms: thm list, - params: (string * typ) list}; - -fun make_axclass (def, intro, axioms, params): info = - {def = def, intro = intro, axioms = axioms, params = params}; - - -(* class parameters (canonical order) *) - -type param = string * class; - -fun add_param ctxt ((x, c): param) params = - (case AList.lookup (op =) params x of - NONE => (x, c) :: params - | SOME c' => - error ("Duplicate class parameter " ^ quote x ^ " for " ^ Syntax.string_of_sort ctxt [c] ^ - (if c = c' then "" else " and " ^ Syntax.string_of_sort ctxt [c']))); - - -(* setup data *) - -datatype data = Data of - {axclasses: info Symtab.table, - params: param list, - proven_classrels: thm Symreltab.table, - proven_arities: ((class * sort list) * (thm * string)) list Symtab.table, - (*arity theorems with theory name*) - inst_params: - (string * thm) Symtab.table Symtab.table * - (*constant name ~> type constructor ~> (constant name, equation)*) - (string * string) Symtab.table (*constant name ~> (constant name, type constructor)*)}; - -fun make_data - (axclasses, params, proven_classrels, proven_arities, inst_params) = - Data {axclasses = axclasses, params = params, proven_classrels = proven_classrels, - proven_arities = proven_arities, inst_params = inst_params}; - -structure Data = Theory_Data_PP -( - type T = data; - val empty = - make_data (Symtab.empty, [], Symreltab.empty, Symtab.empty, (Symtab.empty, Symtab.empty)); - val extend = I; - fun merge pp - (Data {axclasses = axclasses1, params = params1, proven_classrels = proven_classrels1, - proven_arities = proven_arities1, inst_params = inst_params1}, - Data {axclasses = axclasses2, params = params2, proven_classrels = proven_classrels2, - proven_arities = proven_arities2, inst_params = inst_params2}) = - let - val ctxt = Syntax.init_pretty pp; - - val axclasses' = Symtab.merge (K true) (axclasses1, axclasses2); - val params' = - if null params1 then params2 - else - fold_rev (fn p => if member (op =) params1 p then I else add_param ctxt p) - params2 params1; - - (*see Theory.at_begin hook for transitive closure of classrels and arity completion*) - val proven_classrels' = Symreltab.merge (K true) (proven_classrels1, proven_classrels2); - val proven_arities' = Symtab.merge_list (eq_fst op =) (proven_arities1, proven_arities2); - - val inst_params' = - (Symtab.join (K (Symtab.merge (K true))) (#1 inst_params1, #1 inst_params2), - Symtab.merge (K true) (#2 inst_params1, #2 inst_params2)); - in - make_data (axclasses', params', proven_classrels', proven_arities', inst_params') - end; -); - -fun map_data f = - Data.map (fn Data {axclasses, params, proven_classrels, proven_arities, inst_params} => - make_data (f (axclasses, params, proven_classrels, proven_arities, inst_params))); - -fun map_axclasses f = - map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params) => - (f axclasses, params, proven_classrels, proven_arities, inst_params)); - -fun map_params f = - map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params) => - (axclasses, f params, proven_classrels, proven_arities, inst_params)); - -fun map_proven_classrels f = - map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params) => - (axclasses, params, f proven_classrels, proven_arities, inst_params)); - -fun map_proven_arities f = - map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params) => - (axclasses, params, proven_classrels, f proven_arities, inst_params)); - -fun map_inst_params f = - map_data (fn (axclasses, params, proven_classrels, proven_arities, inst_params) => - (axclasses, params, proven_classrels, proven_arities, f inst_params)); - -val rep_data = Data.get #> (fn Data args => args); - -val axclasses_of = #axclasses o rep_data; -val params_of = #params o rep_data; -val proven_classrels_of = #proven_classrels o rep_data; -val proven_arities_of = #proven_arities o rep_data; -val inst_params_of = #inst_params o rep_data; - - -(* axclasses with parameters *) - -fun get_info thy c = - (case Symtab.lookup (axclasses_of thy) c of - SOME info => info - | NONE => error ("No such axclass: " ^ quote c)); - -fun all_params_of thy S = - let val params = params_of thy; - in fold (fn (x, c) => if Sign.subsort thy (S, [c]) then cons x else I) params [] end; - -fun class_of_param thy = AList.lookup (op =) (params_of thy); - - -(* maintain instances *) - -val classrel_prefix = "classrel_"; -val arity_prefix = "arity_"; - -fun instance_name (a, c) = Long_Name.base_name c ^ "_" ^ Long_Name.base_name a; - - -val update_classrel = map_proven_classrels o Symreltab.update; - -val is_classrel = Symreltab.defined o proven_classrels_of; - -fun the_classrel thy (c1, c2) = - (case Symreltab.lookup (proven_classrels_of thy) (c1, c2) of - SOME thm => Thm.transfer thy thm - | NONE => error ("Unproven class relation " ^ - Syntax.string_of_classrel (Proof_Context.init_global thy) [c1, c2])); - -fun complete_classrels thy = - let - fun complete (c, (_, (all_preds, all_succs))) (finished1, thy1) = - let - fun compl c1 c2 (finished2, thy2) = - if is_classrel thy2 (c1, c2) then (finished2, thy2) - else - (false, - thy2 - |> update_classrel ((c1, c2), - (the_classrel thy2 (c1, c) RS the_classrel thy2 (c, c2)) - |> Drule.instantiate' [SOME (ctyp_of thy2 (TVar ((Name.aT, 0), [])))] [] - |> Thm.close_derivation)); - - val proven = is_classrel thy1; - val preds = Graph.Keys.fold (fn c1 => proven (c1, c) ? cons c1) all_preds []; - val succs = Graph.Keys.fold (fn c2 => proven (c, c2) ? cons c2) all_succs []; - in - fold_product compl preds succs (finished1, thy1) - end; - in - (case Graph.fold complete (Sorts.classes_of (Sign.classes_of thy)) (true, thy) of - (true, _) => NONE - | (_, thy') => SOME thy') - end; - - -fun the_arity thy (a, Ss, c) = - (case AList.lookup (op =) (Symtab.lookup_list (proven_arities_of thy) a) (c, Ss) of - SOME (thm, _) => Thm.transfer thy thm - | NONE => error ("Unproven type arity " ^ - Syntax.string_of_arity (Proof_Context.init_global thy) (a, Ss, [c]))); - -fun thynames_of_arity thy (a, c) = - Symtab.lookup_list (proven_arities_of thy) a - |> map_filter (fn ((c', _), (_, name)) => if c = c' then SOME name else NONE) - |> rev; - -fun insert_arity_completions thy t ((c, Ss), ((th, thy_name))) (finished, arities) = - let - val algebra = Sign.classes_of thy; - val ars = Symtab.lookup_list arities t; - val super_class_completions = - Sign.super_classes thy c - |> filter_out (fn c1 => exists (fn ((c2, Ss2), _) => - c1 = c2 andalso Sorts.sorts_le algebra (Ss2, Ss)) ars); - - val names = Name.invent Name.context Name.aT (length Ss); - val std_vars = map (fn a => SOME (ctyp_of thy (TVar ((a, 0), [])))) names; - - val completions = super_class_completions |> map (fn c1 => - let - val th1 = - (th RS the_classrel thy (c, c1)) - |> Drule.instantiate' std_vars [] - |> Thm.close_derivation; - in ((th1, thy_name), c1) end); - - val finished' = finished andalso null completions; - val arities' = fold (fn (th, c1) => Symtab.cons_list (t, ((c1, Ss), th))) completions arities; - in (finished', arities') end; - -fun put_arity_completion ((t, Ss, c), th) thy = - let val ar = ((c, Ss), (th, Context.theory_name thy)) in - thy - |> map_proven_arities - (Symtab.insert_list (eq_fst op =) (t, ar) #> - curry (insert_arity_completions thy t ar) true #> #2) - end; - -fun complete_arities thy = - let - val arities = proven_arities_of thy; - val (finished, arities') = - Symtab.fold (fn (t, ars) => fold (insert_arity_completions thy t) ars) - arities (true, arities); - in - if finished then NONE - else SOME (map_proven_arities (K arities') thy) - end; - -val _ = Theory.setup - (Theory.at_begin complete_classrels #> Theory.at_begin complete_arities); - -val _ = Proofterm.install_axclass_proofs - {classrel_proof = Thm.proof_of oo the_classrel, - arity_proof = Thm.proof_of oo the_arity}; - - -(* maintain instance parameters *) - -fun get_inst_param thy (c, tyco) = - (case Symtab.lookup (the_default Symtab.empty (Symtab.lookup (#1 (inst_params_of thy)) c)) tyco of - SOME c' => c' - | NONE => error ("No instance parameter for constant " ^ quote c ^ " on type " ^ quote tyco)); - -fun add_inst_param (c, tyco) inst = - (map_inst_params o apfst o Symtab.map_default (c, Symtab.empty)) (Symtab.update_new (tyco, inst)) - #> (map_inst_params o apsnd) (Symtab.update_new (#1 inst, (c, tyco))); - -val inst_of_param = Symtab.lookup o #2 o inst_params_of; -val param_of_inst = #1 oo get_inst_param; - -fun inst_thms thy = - Symtab.fold (Symtab.fold (cons o #2 o #2) o #2) (#1 (inst_params_of thy)) []; - -fun get_inst_tyco consts = try (#1 o dest_Type o the_single o Consts.typargs consts); - -fun unoverload thy = - rewrite_rule (Proof_Context.init_global thy) (inst_thms thy); - -fun overload thy = - rewrite_rule (Proof_Context.init_global thy) (map Thm.symmetric (inst_thms thy)); - -fun unoverload_conv thy = - Raw_Simplifier.rewrite (Proof_Context.init_global thy) true (inst_thms thy); - -fun overload_conv thy = - Raw_Simplifier.rewrite (Proof_Context.init_global thy) true (map Thm.symmetric (inst_thms thy)); - -fun lookup_inst_param consts params (c, T) = - (case get_inst_tyco consts (c, T) of - SOME tyco => AList.lookup (op =) params (c, tyco) - | NONE => NONE); - -fun unoverload_const thy (c_ty as (c, _)) = - if is_some (class_of_param thy c) then - (case get_inst_tyco (Sign.consts_of thy) c_ty of - SOME tyco => try (param_of_inst thy) (c, tyco) |> the_default c - | NONE => c) - else c; - - - -(** instances **) - -(* class relations *) - -fun cert_classrel thy raw_rel = - let - val string_of_sort = Syntax.string_of_sort_global thy; - val (c1, c2) = pairself (Sign.certify_class thy) raw_rel; - val _ = Sign.primitive_classrel (c1, c2) thy; - val _ = - (case subtract (op =) (all_params_of thy [c1]) (all_params_of thy [c2]) of - [] => () - | xs => raise TYPE ("Class " ^ string_of_sort [c1] ^ " lacks parameter(s) " ^ - commas_quote xs ^ " of " ^ string_of_sort [c2], [], [])); - in (c1, c2) end; - -fun read_classrel thy raw_rel = - cert_classrel thy (pairself (Proof_Context.read_class (Proof_Context.init_global thy)) raw_rel) - handle TYPE (msg, _, _) => error msg; - - -(* declaration and definition of instances of overloaded constants *) - -fun inst_tyco_of thy (c, T) = - (case get_inst_tyco (Sign.consts_of thy) (c, T) of - SOME tyco => tyco - | NONE => error ("Illegal type for instantiation of class parameter: " ^ - quote (c ^ " :: " ^ Syntax.string_of_typ_global thy T))); - -fun declare_overloaded (c, T) thy = - let - val class = - (case class_of_param thy c of - SOME class => class - | NONE => error ("Not a class parameter: " ^ quote c)); - val tyco = inst_tyco_of thy (c, T); - val name_inst = instance_name (tyco, class) ^ "_inst"; - val c' = instance_name (tyco, c); - val T' = Type.strip_sorts T; - in - thy - |> Sign.qualified_path true (Binding.name name_inst) - |> Sign.declare_const_global ((Binding.name c', T'), NoSyn) - |-> (fn const' as Const (c'', _) => - Thm.add_def_global false true - (Binding.name (Thm.def_name c'), Logic.mk_equals (Const (c, T'), const')) - #>> apsnd Thm.varifyT_global - #-> (fn (_, thm) => add_inst_param (c, tyco) (c'', thm) - #> Global_Theory.add_thm ((Binding.conceal (Binding.name c'), thm), []) - #> #2 - #> pair (Const (c, T)))) - ||> Sign.restore_naming thy - end; - -fun define_overloaded b (c, t) thy = - let - val T = Term.fastype_of t; - val tyco = inst_tyco_of thy (c, T); - val (c', eq) = get_inst_param thy (c, tyco); - val prop = Logic.mk_equals (Const (c', T), t); - val b' = Thm.def_binding_optional (Binding.name (instance_name (tyco, c))) b; - in - thy - |> Thm.add_def_global false false (b', prop) - |>> (fn (_, thm) => Drule.transitive_thm OF [eq, thm]) - end; - - -(* primitive rules *) - -fun add_classrel raw_th thy = - let - val th = Thm.strip_shyps (Thm.transfer thy raw_th); - val prop = Thm.plain_prop_of th; - fun err () = raise THM ("add_classrel: malformed class relation", 0, [th]); - val rel = Logic.dest_classrel prop handle TERM _ => err (); - val (c1, c2) = cert_classrel thy rel handle TYPE _ => err (); - val binding = - Binding.conceal (Binding.name (prefix classrel_prefix (Logic.name_classrel (c1, c2)))); - val (th', thy') = Global_Theory.store_thm (binding, th) thy; - val th'' = th' - |> Thm.unconstrainT - |> Drule.instantiate' [SOME (ctyp_of thy' (TVar ((Name.aT, 0), [])))] []; - in - thy' - |> Sign.primitive_classrel (c1, c2) - |> map_proven_classrels (Symreltab.update ((c1, c2), th'')) - |> perhaps complete_classrels - |> perhaps complete_arities - end; - -fun add_arity raw_th thy = - let - val th = Thm.strip_shyps (Thm.transfer thy raw_th); - val prop = Thm.plain_prop_of th; - fun err () = raise THM ("add_arity: malformed type arity", 0, [th]); - val arity as (t, Ss, c) = Logic.dest_arity prop handle TERM _ => err (); - - val binding = - Binding.conceal (Binding.name (prefix arity_prefix (Logic.name_arity arity))); - val (th', thy') = Global_Theory.store_thm (binding, th) thy; - - val args = Name.invent_names Name.context Name.aT Ss; - val T = Type (t, map TFree args); - val std_vars = map (fn (a, S) => SOME (ctyp_of thy' (TVar ((a, 0), [])))) args; - - val missing_params = Sign.complete_sort thy' [c] - |> maps (these o Option.map #params o try (get_info thy')) - |> filter_out (fn (const, _) => can (get_inst_param thy') (const, t)) - |> (map o apsnd o map_atyps) (K T); - val th'' = th' - |> Thm.unconstrainT - |> Drule.instantiate' std_vars []; - in - thy' - |> fold (#2 oo declare_overloaded) missing_params - |> Sign.primitive_arity (t, Ss, [c]) - |> put_arity_completion ((t, Ss, c), th'') - end; - - -(* tactical proofs *) - -fun prove_classrel raw_rel tac thy = - let - val ctxt = Proof_Context.init_global thy; - val (c1, c2) = cert_classrel thy raw_rel; - val th = - Goal.prove ctxt [] [] (Logic.mk_classrel (c1, c2)) (fn {context, ...} => tac context) - handle ERROR msg => - cat_error msg ("The error(s) above occurred while trying to prove class relation " ^ - quote (Syntax.string_of_classrel ctxt [c1, c2])); - in - thy |> add_classrel th - end; - -fun prove_arity raw_arity tac thy = - let - val ctxt = Proof_Context.init_global thy; - val arity = Proof_Context.cert_arity ctxt raw_arity; - val names = map (prefix arity_prefix) (Logic.name_arities arity); - val props = Logic.mk_arities arity; - val ths = - Goal.prove_multi ctxt [] [] props - (fn {context, ...} => Goal.precise_conjunction_tac (length props) 1 THEN tac context) - handle ERROR msg => - cat_error msg ("The error(s) above occurred while trying to prove type arity " ^ - quote (Syntax.string_of_arity ctxt arity)); - in - thy |> fold add_arity ths - end; - - - -(** class definitions **) - -fun split_defined n eq = - let - val intro = - (eq RS Drule.equal_elim_rule2) - |> Conjunction.curry_balanced n - |> n = 0 ? Thm.eq_assumption 1; - val dests = - if n = 0 then [] - else - (eq RS Drule.equal_elim_rule1) - |> Balanced_Tree.dest (fn th => - (th RS Conjunction.conjunctionD1, th RS Conjunction.conjunctionD2)) n; - in (intro, dests) end; - -fun define_class (bclass, raw_super) raw_params raw_specs thy = - let - val ctxt = Syntax.init_pretty_global thy; - - - (* class *) - - val bconst = Binding.map_name Logic.const_of_class bclass; - val class = Sign.full_name thy bclass; - val super = Sign.minimize_sort thy (Sign.certify_sort thy raw_super); - - fun check_constraint (a, S) = - if Sign.subsort thy (super, S) then () - else error ("Sort constraint of type variable " ^ - Syntax.string_of_typ (Config.put show_sorts true ctxt) (TFree (a, S)) ^ - " needs to be weaker than " ^ Syntax.string_of_sort ctxt super); - - - (* params *) - - val params = raw_params |> map (fn p => - let - val T = Sign.the_const_type thy p; - val _ = - (case Term.add_tvarsT T [] of - [((a, _), S)] => check_constraint (a, S) - | _ => error ("Exactly one type variable expected in class parameter " ^ quote p)); - val T' = Term.map_type_tvar (fn _ => TFree (Name.aT, [class])) T; - in (p, T') end); - - - (* axioms *) - - fun prep_axiom t = - (case Term.add_tfrees t [] of - [(a, S)] => check_constraint (a, S) - | [] => () - | _ => error ("Multiple type variables in class axiom:\n" ^ Syntax.string_of_term ctxt t); - t - |> Term.map_types (Term.map_atyps (fn TFree _ => Term.aT [] | U => U)) - |> Logic.close_form); - - val axiomss = map (map (prep_axiom o Sign.cert_prop thy) o snd) raw_specs; - val name_atts = map fst raw_specs; - - - (* definition *) - - val conjs = Logic.mk_of_sort (Term.aT [], super) @ flat axiomss; - val class_eq = - Logic.mk_equals (Logic.mk_of_class (Term.aT [], class), Logic.mk_conjunction_balanced conjs); - - val ([def], def_thy) = - thy - |> Sign.primitive_class (bclass, super) - |> Global_Theory.add_defs false [((Thm.def_binding bconst, class_eq), [])]; - val (raw_intro, (raw_classrel, raw_axioms)) = - split_defined (length conjs) def ||> chop (length super); - - - (* facts *) - - val class_triv = Thm.class_triv def_thy class; - val ([(_, [intro]), (_, classrel), (_, axioms)], facts_thy) = - def_thy - |> Sign.qualified_path true bconst - |> Global_Theory.note_thmss "" - [((Binding.name "intro", []), [([Drule.export_without_context raw_intro], [])]), - ((Binding.name "super", []), [(map Drule.export_without_context raw_classrel, [])]), - ((Binding.name "axioms", []), - [(map (fn th => Drule.export_without_context (class_triv RS th)) raw_axioms, [])])] - ||> Sign.restore_naming def_thy; - - - (* result *) - - val axclass = make_axclass (def, intro, axioms, params); - val result_thy = - facts_thy - |> map_proven_classrels - (fold2 (fn c => fn th => Symreltab.update ((class, c), th)) super classrel) - |> perhaps complete_classrels - |> Sign.qualified_path false bconst - |> Global_Theory.note_thmss "" (name_atts ~~ map Thm.simple_fact (unflat axiomss axioms)) - |> #2 - |> Sign.restore_naming facts_thy - |> map_axclasses (Symtab.update (class, axclass)) - |> map_params (fold (fn (x, _) => add_param ctxt (x, class)) params); - - in (class, result_thy) end; - - - -(** axiomatizations **) - -local - -(*old-style axioms*) -fun add_axioms prep mk name add raw_args thy = - let - val args = prep thy raw_args; - val specs = mk args; - val names = name args; - in - thy - |> fold_map Thm.add_axiom_global (map Binding.name names ~~ specs) - |-> fold (add o Drule.export_without_context o snd) - end; - -fun class_const c = - (Logic.const_of_class c, Term.itselfT (Term.aT []) --> propT); - -in - -val classrel_axiomatization = - add_axioms (map o cert_classrel) (map Logic.mk_classrel) - (map (prefix classrel_prefix o Logic.name_classrel)) add_classrel; - -val arity_axiomatization = - add_axioms (Proof_Context.cert_arity o Proof_Context.init_global) Logic.mk_arities - (map (prefix arity_prefix) o Logic.name_arities) add_arity; - -fun class_axiomatization (bclass, raw_super) thy = - let - val class = Sign.full_name thy bclass; - val super = map (Sign.certify_class thy) raw_super |> Sign.minimize_sort thy; - in - thy - |> Sign.primitive_class (bclass, super) - |> classrel_axiomatization (map (fn c => (class, c)) super) - |> Theory.add_deps_global "" (class_const class) (map class_const super) - end; - -end; - -end; diff --git a/core/Pure/build b/core/Pure/build deleted file mode 100755 index e7bfeb8a..00000000 --- a/core/Pure/build +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env bash -# -# Author: Makarius -# -# build - build Isabelle/ML -# -# Requires proper Isabelle settings environment. - - -## diagnostics - -function usage() -{ - echo - echo "Usage: $PRG TARGET [OUTPUT]" - echo - exit 1 -} - -function fail() -{ - echo "$1" >&2 - exit 2 -} - -[ -z "$ISABELLE_HOME" ] && fail "Missing Isabelle settings environment" - - -## process command line - -# args - -if [ "$#" -eq 1 ]; then - TARGET="$1"; shift - OUTPUT=""; shift -elif [ "$#" -eq 2 ]; then - TARGET="$1"; shift - OUTPUT="$1"; shift -else - usage -fi - - -## main - -# get compatibility file - -ML_SYSTEM_BASE=$(echo "$ML_SYSTEM" | cut -f1 -d-) -[ -z "$ML_SYSTEM" ] && fail "Missing ML_SYSTEM settings!" - -COMPAT="" -[ -f "ML-Systems/${ML_SYSTEM_BASE}.ML" ] && COMPAT="ML-Systems/${ML_SYSTEM_BASE}.ML" -[ -f "ML-Systems/${ML_SYSTEM}.ML" ] && COMPAT="ML-Systems/${ML_SYSTEM}.ML" -[ -z "$COMPAT" ] && fail "Missing compatibility file for ML system \"$ML_SYSTEM\"!" - - -# run isabelle - -. "$ISABELLE_HOME/lib/scripts/timestart.bash" - -if [ "$TARGET" = RAW ]; then - if [ -z "$OUTPUT" ]; then - "$ISABELLE_PROCESS" \ - -e "use\"$COMPAT\" handle _ => Posix.Process.exit 0w1;" \ - -q RAW_ML_SYSTEM - else - "$ISABELLE_PROCESS" \ - -e "use\"$COMPAT\" handle _ => Posix.Process.exit 0w1;" \ - -e "structure Isar = struct fun main () = () end;" \ - -e "ml_prompts \"ML> \" \"ML# \";" \ - -q -w RAW_ML_SYSTEM "$OUTPUT" - fi -else - if [ -z "$OUTPUT" ]; then - "$ISABELLE_PROCESS" \ - -e "(use\"$COMPAT\"; use\"ROOT.ML\") handle _ => Posix.Process.exit 0w1;" \ - -q RAW_ML_SYSTEM - else - "$ISABELLE_PROCESS" \ - -e "(use\"$COMPAT\"; use\"ROOT.ML\") handle _ => Posix.Process.exit 0w1;" \ - -e "ml_prompts \"ML> \" \"ML# \";" \ - -e "Command_Line.tool0 Session.finish;" \ - -e "Options.reset_default ();" \ - -q -w RAW_ML_SYSTEM "$OUTPUT" - fi -fi - -RC="$?" - -. "$ISABELLE_HOME/lib/scripts/timestop.bash" - -if [ "$RC" -eq 0 ]; then - echo "Finished $TARGET ($TIMES_REPORT)" >&2 -fi - -exit "$RC" diff --git a/core/Pure/build-jars b/core/Pure/build-jars deleted file mode 100755 index f6e993f2..00000000 --- a/core/Pure/build-jars +++ /dev/null @@ -1,256 +0,0 @@ -#!/usr/bin/env bash -# -# Author: Makarius -# -# build-jars - build Isabelle/Scala -# -# Requires proper Isabelle settings environment. - -## sources - -declare -a SOURCES=( - Concurrent/consumer_thread.scala - Concurrent/counter.scala - Concurrent/event_timer.scala - Concurrent/future.scala - Concurrent/mailbox.scala - Concurrent/simple_thread.scala - Concurrent/synchronized.scala - GUI/color_value.scala - GUI/gui.scala - GUI/gui_thread.scala - GUI/html5_panel.scala - GUI/jfx_thread.scala - GUI/popup.scala - GUI/system_dialog.scala - GUI/wrap_panel.scala - General/antiquote.scala - General/bytes.scala - General/completion.scala - General/exn.scala - General/file.scala - General/graph.scala - General/graphics_file.scala - General/linear_set.scala - General/long_name.scala - General/multi_map.scala - General/output.scala - General/path.scala - General/position.scala - General/pretty.scala - General/properties.scala - General/scan.scala - General/sha1.scala - General/symbol.scala - General/time.scala - General/timing.scala - General/untyped.scala - General/url.scala - General/word.scala - General/xz_file.scala - Isar/keyword.scala - Isar/outer_syntax.scala - Isar/parse.scala - Isar/token.scala - ML/ml_lex.scala - PIDE/command.scala - PIDE/document.scala - PIDE/document_id.scala - PIDE/editor.scala - PIDE/markup.scala - PIDE/markup_tree.scala - PIDE/protocol.scala - PIDE/prover.scala - PIDE/query_operation.scala - PIDE/resources.scala - PIDE/session.scala - PIDE/text.scala - PIDE/xml.scala - PIDE/yxml.scala - ROOT.scala - System/command_line.scala - System/invoke_scala.scala - System/isabelle_charset.scala - System/isabelle_font.scala - System/isabelle_process.scala - System/isabelle_system.scala - System/options.scala - System/platform.scala - System/posix_interrupt.scala - System/system_channel.scala - System/utf8.scala - Thy/html.scala - Thy/present.scala - Thy/thy_header.scala - Thy/thy_info.scala - Thy/thy_syntax.scala - Tools/build.scala - Tools/build_console.scala - Tools/build_doc.scala - Tools/check_source.scala - Tools/doc.scala - Tools/keywords.scala - Tools/main.scala - Tools/ml_statistics.scala - Tools/print_operation.scala - Tools/simplifier_trace.scala - Tools/task_statistics.scala - library.scala - term.scala - term_xml.scala - "../Tools/Graphview/src/graph_panel.scala" - "../Tools/Graphview/src/layout_pendulum.scala" - "../Tools/Graphview/src/main_panel.scala" - "../Tools/Graphview/src/model.scala" - "../Tools/Graphview/src/mutator_dialog.scala" - "../Tools/Graphview/src/mutator_event.scala" - "../Tools/Graphview/src/mutator.scala" - "../Tools/Graphview/src/popups.scala" - "../Tools/Graphview/src/shapes.scala" - "../Tools/Graphview/src/visualizer.scala" -) - - -## diagnostics - -PRG="$(basename "$0")" - -function usage() -{ - echo - echo "Usage: isabelle $PRG [OPTIONS]" - echo - echo " Options are:" - echo " -f fresh build" - echo " -t test separate compilation of PIDE" - echo - exit 1 -} - -function fail() -{ - echo "$1" >&2 - exit 2 -} - -[ -z "$ISABELLE_HOME" ] && fail "Missing Isabelle settings environment" - - -## process command line - -# options - -FRESH="" -TEST_PIDE="" - -while getopts "ft" OPT -do - case "$OPT" in - f) - FRESH=true - ;; - t) - TEST_PIDE=true - ;; - \?) - usage - ;; - esac -done - -shift $(($OPTIND - 1)) - - -# args - -[ "$#" -ne 0 ] && usage - - -## build - -TARGET_DIR="$ISABELLE_HOME/lib/classes" -TARGET="$TARGET_DIR/Pure.jar" - -declare -a PIDE_SOURCES=() -declare -a PURE_SOURCES=() - -for DEP in "${SOURCES[@]}" -do - if grep "Module:.*PIDE" "$DEP" >/dev/null - then - PIDE_SOURCES["${#PIDE_SOURCES[@]}"]="$DEP" - else - PURE_SOURCES["${#PURE_SOURCES[@]}"]="$DEP" - fi -done - -declare -a UPDATED=() - -if [ -n "$FRESH" ]; then - OUTDATED=true -else - OUTDATED=false - if [ ! -e "$TARGET" ]; then - OUTDATED=true - else - for DEP in "${SOURCES[@]}" - do - [ ! -e "$DEP" ] && fail "Missing file: $DEP" - [ "$DEP" -nt "$TARGET" ] && { - OUTDATED=true - UPDATED["${#UPDATED[@]}"]="$DEP" - } - done - fi -fi - -if [ "$OUTDATED" = true ] -then - echo "### Building Isabelle/Scala ..." - - [ "${#UPDATED[@]}" -gt 0 ] && { - echo "Changed files:" - for FILE in "${UPDATED[@]}" - do - echo " $FILE" - done - } - - rm -rf classes && mkdir classes - - SCALAC_OPTIONS="$ISABELLE_SCALA_BUILD_OPTIONS -d classes" - - ( - classpath "$ISABELLE_JDK_HOME/jre/lib/jfxrt.jar" - classpath classes - export CLASSPATH="$(jvmpath "$ISABELLE_CLASSPATH")" - - if [ "$TEST_PIDE" = true ]; then - isabelle_scala scalac $SCALAC_OPTIONS "${PIDE_SOURCES[@]}" || \ - fail "Failed to compile PIDE sources" - isabelle_scala scalac $SCALAC_OPTIONS "${PURE_SOURCES[@]}" || \ - fail "Failed to compile Pure sources" - else - isabelle_scala scalac $SCALAC_OPTIONS "${PIDE_SOURCES[@]}" "${PURE_SOURCES[@]}" || \ - fail "Failed to compile sources" - fi - ) || exit "$?" - - mkdir -p "$TARGET_DIR" || fail "Failed to create directory $TARGET_DIR" - - pushd classes >/dev/null - - CHARSET_SERVICE="META-INF/services/java.nio.charset.spi.CharsetProvider" - mkdir -p "$(dirname "$CHARSET_SERVICE")" - echo isabelle.Isabelle_Charset_Provider > "$CHARSET_SERVICE" - - cp "$ISABELLE_HOME/lib/logo/isabelle_transparent-32.gif" isabelle/. - cp "$ISABELLE_HOME/lib/logo/isabelle_transparent.gif" isabelle/. - - isabelle_jdk jar cfe "$(jvmpath "$TARGET")" isabelle.Main META-INF isabelle || \ - fail "Failed to produce $TARGET" - - popd >/dev/null - - rm -rf classes -fi diff --git a/core/Pure/config.ML b/core/Pure/config.ML deleted file mode 100644 index ecb390f1..00000000 --- a/core/Pure/config.ML +++ /dev/null @@ -1,171 +0,0 @@ -(* Title: Pure/config.ML - Author: Makarius - -Configuration options as values within the local context. -*) - -signature CONFIG = -sig - datatype value = Bool of bool | Int of int | Real of real | String of string - val print_value: value -> string - val print_type: value -> string - type 'a T - type raw = value T - val bool: raw -> bool T - val int: raw -> int T - val real: raw -> real T - val string: raw -> string T - val get: Proof.context -> 'a T -> 'a - val map: 'a T -> ('a -> 'a) -> Proof.context -> Proof.context - val put: 'a T -> 'a -> Proof.context -> Proof.context - val get_global: theory -> 'a T -> 'a - val map_global: 'a T -> ('a -> 'a) -> theory -> theory - val put_global: 'a T -> 'a -> theory -> theory - val get_generic: Context.generic -> 'a T -> 'a - val map_generic: 'a T -> ('a -> 'a) -> Context.generic -> Context.generic - val put_generic: 'a T -> 'a -> Context.generic -> Context.generic - val declare: string * Position.T -> (Context.generic -> value) -> raw - val declare_global: string * Position.T -> (Context.generic -> value) -> raw - val declare_option: string * Position.T -> raw - val declare_option_global: string * Position.T -> raw - val name_of: 'a T -> string - val pos_of: 'a T -> Position.T -end; - -structure Config: CONFIG = -struct - -(* simple values *) - -datatype value = - Bool of bool | - Int of int | - Real of real | - String of string; - -fun print_value (Bool true) = "true" - | print_value (Bool false) = "false" - | print_value (Int i) = signed_string_of_int i - | print_value (Real x) = Markup.print_real x - | print_value (String s) = quote s; - -fun print_type (Bool _) = "bool" - | print_type (Int _) = "int" - | print_type (Real _) = "real" - | print_type (String _) = "string"; - -fun same_type (Bool _) (Bool _) = true - | same_type (Int _) (Int _) = true - | same_type (Real _) (Real _) = true - | same_type (String _) (String _) = true - | same_type _ _ = false; - -fun type_check (name, pos) f value = - let - val value' = f value; - val _ = same_type value value' orelse - error ("Ill-typed configuration option " ^ quote name ^ Position.here pos ^ ": " ^ - print_type value ^ " expected,\nbut " ^ print_type value' ^ " was found"); - in value' end; - - -(* abstract configuration options *) - -datatype 'a T = Config of - {name: string, - pos: Position.T, - get_value: Context.generic -> 'a, - map_value: ('a -> 'a) -> Context.generic -> Context.generic}; - -type raw = value T; - -fun coerce make dest (Config {name, pos, get_value, map_value}) = Config - {name = name, - pos = pos, - get_value = dest o get_value, - map_value = fn f => map_value (make o f o dest)}; - -val bool = coerce Bool (fn Bool b => b); -val int = coerce Int (fn Int i => i); -val real = coerce Real (fn Real x => x); -val string = coerce String (fn String s => s); - -fun get_generic context (Config {get_value, ...}) = get_value context; -fun map_generic (Config {map_value, ...}) f context = map_value f context; -fun put_generic config value = map_generic config (K value); - -fun get_ctxt ctxt = get_generic (Context.Proof ctxt); -fun map_ctxt config f = Context.proof_map (map_generic config f); -fun put_ctxt config value = map_ctxt config (K value); - -fun get_global thy = get_generic (Context.Theory thy); -fun map_global config f = Context.theory_map (map_generic config f); -fun put_global config value = map_global config (K value); - - -(* context information *) - -structure Value = Generic_Data -( - type T = value Inttab.table; - val empty = Inttab.empty; - val extend = I; - fun merge data = Inttab.merge (K true) data; -); - -local - -fun declare_generic global (name, pos) default = - let - val id = serial (); - - fun get_value context = - (case Inttab.lookup (Value.get context) id of - SOME value => value - | NONE => default context); - - fun update_value f context = - Value.map (Inttab.update (id, type_check (name, pos) f (get_value context))) context; - - fun map_value f (context as Context.Proof ctxt) = - let val context' = update_value f context in - if global andalso - Context_Position.is_really_visible ctxt andalso - print_value (get_value (Context.Theory (Context.theory_of context'))) <> - print_value (get_value context') - then (warning ("Ignoring local change of global option " ^ quote name); context) - else context' - end - | map_value f context = update_value f context; - in Config {name = name, pos = pos, get_value = get_value, map_value = map_value} end; - -fun declare_option_generic global (name, pos) = - let - val typ = Options.default_typ name; - val default = - if typ = Options.boolT then fn _ => Bool (Options.default_bool name) - else if typ = Options.intT then fn _ => Int (Options.default_int name) - else if typ = Options.realT then fn _ => Real (Options.default_real name) - else if typ = Options.stringT then fn _ => String (Options.default_string name) - else error ("Unknown type for option " ^ quote name ^ Position.here pos ^ " : " ^ quote typ); - in declare_generic global (name, pos) default end; - -in - -val declare = declare_generic false; -val declare_global = declare_generic true; -val declare_option = declare_option_generic false; -val declare_option_global = declare_option_generic true; - -end; - -fun name_of (Config {name, ...}) = name; -fun pos_of (Config {pos, ...}) = pos; - - -(*final declarations of this structure!*) -val get = get_ctxt; -val map = map_ctxt; -val put = put_ctxt; - -end; diff --git a/core/Pure/conjunction.ML b/core/Pure/conjunction.ML deleted file mode 100644 index beef7180..00000000 --- a/core/Pure/conjunction.ML +++ /dev/null @@ -1,185 +0,0 @@ -(* Title: Pure/conjunction.ML - Author: Makarius - -Meta-level conjunction. -*) - -signature CONJUNCTION = -sig - val conjunction: cterm - val mk_conjunction: cterm * cterm -> cterm - val mk_conjunction_balanced: cterm list -> cterm - val dest_conjunction: cterm -> cterm * cterm - val dest_conjunctions: cterm -> cterm list - val cong: thm -> thm -> thm - val convs: (cterm -> thm) -> cterm -> thm - val conjunctionD1: thm - val conjunctionD2: thm - val conjunctionI: thm - val intr: thm -> thm -> thm - val intr_balanced: thm list -> thm - val elim: thm -> thm * thm - val elim_balanced: int -> thm -> thm list - val curry_balanced: int -> thm -> thm - val uncurry_balanced: int -> thm -> thm -end; - -structure Conjunction: CONJUNCTION = -struct - -(** abstract syntax **) - -fun certify t = Thm.cterm_of (Context.the_theory (Context.the_thread_data ())) t; -val read_prop = certify o Simple_Syntax.read_prop; - -val true_prop = certify Logic.true_prop; -val conjunction = certify Logic.conjunction; - -fun mk_conjunction (A, B) = Thm.apply (Thm.apply conjunction A) B; - -fun mk_conjunction_balanced [] = true_prop - | mk_conjunction_balanced ts = Balanced_Tree.make mk_conjunction ts; - -fun dest_conjunction ct = - (case Thm.term_of ct of - (Const ("Pure.conjunction", _) $ _ $ _) => Thm.dest_binop ct - | _ => raise TERM ("dest_conjunction", [Thm.term_of ct])); - -fun dest_conjunctions ct = - (case try dest_conjunction ct of - NONE => [ct] - | SOME (A, B) => dest_conjunctions A @ dest_conjunctions B); - - - -(** derived rules **) - -(* conversion *) - -val cong = Thm.combination o Thm.combination (Thm.reflexive conjunction); - -fun convs cv ct = - (case try dest_conjunction ct of - NONE => cv ct - | SOME (A, B) => cong (convs cv A) (convs cv B)); - - -(* intro/elim *) - -local - -val A = read_prop "A" and vA = read_prop "?A"; -val B = read_prop "B" and vB = read_prop "?B"; -val C = read_prop "C"; -val ABC = read_prop "A ==> B ==> C"; -val A_B = read_prop "A &&& B"; - -val conjunction_def = - Thm.unvarify_global - (Thm.axiom (Context.the_theory (Context.the_thread_data ())) "Pure.conjunction_def"); - -fun conjunctionD which = - Drule.implies_intr_list [A, B] (Thm.assume (which (A, B))) COMP - Thm.forall_elim_vars 0 (Thm.equal_elim conjunction_def (Thm.assume A_B)); - -in - -val conjunctionD1 = - Drule.store_standard_thm (Binding.make ("conjunctionD1", @{here})) (conjunctionD #1); - -val conjunctionD2 = - Drule.store_standard_thm (Binding.make ("conjunctionD2", @{here})) (conjunctionD #2); - -val conjunctionI = - Drule.store_standard_thm (Binding.make ("conjunctionI", @{here})) - (Drule.implies_intr_list [A, B] - (Thm.equal_elim - (Thm.symmetric conjunction_def) - (Thm.forall_intr C (Thm.implies_intr ABC - (Drule.implies_elim_list (Thm.assume ABC) [Thm.assume A, Thm.assume B]))))); - - -fun intr tha thb = - Thm.implies_elim - (Thm.implies_elim - (Thm.instantiate ([], [(vA, Thm.cprop_of tha), (vB, Thm.cprop_of thb)]) conjunctionI) - tha) - thb; - -fun elim th = - let - val (A, B) = dest_conjunction (Thm.cprop_of th) - handle TERM (msg, _) => raise THM (msg, 0, [th]); - val inst = Thm.instantiate ([], [(vA, A), (vB, B)]); - in - (Thm.implies_elim (inst conjunctionD1) th, - Thm.implies_elim (inst conjunctionD2) th) - end; - -end; - - -(* balanced conjuncts *) - -fun intr_balanced [] = asm_rl - | intr_balanced ths = Balanced_Tree.make (uncurry intr) ths; - -fun elim_balanced 0 _ = [] - | elim_balanced n th = Balanced_Tree.dest elim n th; - - -(* currying *) - -local - -fun conjs thy n = - let val As = map (fn A => Thm.cterm_of thy (Free (A, propT))) (Name.invent Name.context "A" n) - in (As, mk_conjunction_balanced As) end; - -val B = read_prop "B"; - -fun comp_rule th rule = - Thm.adjust_maxidx_thm ~1 (th COMP - (rule |> Thm.forall_intr_frees |> Thm.forall_elim_vars (Thm.maxidx_of th + 1))); - -in - -(* - A1 &&& ... &&& An ==> B - ----------------------- - A1 ==> ... ==> An ==> B -*) -fun curry_balanced n th = - if n < 2 then th - else - let - val thy = Thm.theory_of_thm th; - val (As, C) = conjs thy n; - val D = Drule.mk_implies (C, B); - in - comp_rule th - (Thm.implies_elim (Thm.assume D) (intr_balanced (map Thm.assume As)) - |> Drule.implies_intr_list (D :: As)) - end; - -(* - A1 ==> ... ==> An ==> B - ----------------------- - A1 &&& ... &&& An ==> B -*) -fun uncurry_balanced n th = - if n < 2 then th - else - let - val thy = Thm.theory_of_thm th; - val (As, C) = conjs thy n; - val D = Drule.list_implies (As, B); - in - comp_rule th - (Drule.implies_elim_list (Thm.assume D) (elim_balanced n (Thm.assume C)) - |> Drule.implies_intr_list [D, C]) - end; - -end; - -end; diff --git a/core/Pure/consts.ML b/core/Pure/consts.ML deleted file mode 100644 index 30949297..00000000 --- a/core/Pure/consts.ML +++ /dev/null @@ -1,328 +0,0 @@ -(* Title: Pure/consts.ML - Author: Makarius - -Polymorphic constants: declarations, abbreviations, additional type -constraints. -*) - -signature CONSTS = -sig - type T - val eq_consts: T * T -> bool - val change_base: bool -> T -> T - val change_ignore: T -> T - val retrieve_abbrevs: T -> string list -> term -> (term * term) list - val dest: T -> - {const_space: Name_Space.T, - constants: (string * (typ * term option)) list, - constraints: (string * typ) list} - val the_const: T -> string -> string * typ (*exception TYPE*) - val the_abbreviation: T -> string -> typ * term (*exception TYPE*) - val type_scheme: T -> string -> typ (*exception TYPE*) - val is_monomorphic: T -> string -> bool (*exception TYPE*) - val the_constraint: T -> string -> typ (*exception TYPE*) - val space_of: T -> Name_Space.T - val alias: Name_Space.naming -> binding -> string -> T -> T - val is_concealed: T -> string -> bool - val intern: T -> xstring -> string - val intern_syntax: T -> xstring -> string - val check_const: Context.generic -> T -> xstring * Position.T list -> term * Position.report list - val certify: Context.pretty -> Type.tsig -> bool -> T -> term -> term (*exception TYPE*) - val typargs: T -> string * typ -> typ list - val instance: T -> string * typ list -> typ - val declare: Context.generic -> binding * typ -> T -> T - val constrain: string * typ option -> T -> T - val abbreviate: Context.generic -> Type.tsig -> string -> binding * term -> T -> (term * term) * T - val revert_abbrev: string -> string -> T -> T - val hide: bool -> string -> T -> T - val empty: T - val merge: T * T -> T -end; - -structure Consts: CONSTS = -struct - -(** consts type **) - -(* datatype T *) - -type decl = {T: typ, typargs: int list list}; -type abbrev = {rhs: term, normal_rhs: term, force_expand: bool}; - -datatype T = Consts of - {decls: (decl * abbrev option) Name_Space.table, - constraints: typ Symtab.table, - rev_abbrevs: (term * term) Item_Net.T Symtab.table}; - -fun eq_consts - (Consts {decls = decls1, constraints = constraints1, rev_abbrevs = rev_abbrevs1}, - Consts {decls = decls2, constraints = constraints2, rev_abbrevs = rev_abbrevs2}) = - pointer_eq (decls1, decls2) andalso - pointer_eq (constraints1, constraints2) andalso - pointer_eq (rev_abbrevs1, rev_abbrevs2); - -fun make_consts (decls, constraints, rev_abbrevs) = - Consts {decls = decls, constraints = constraints, rev_abbrevs = rev_abbrevs}; - -fun map_consts f (Consts {decls, constraints, rev_abbrevs}) = - make_consts (f (decls, constraints, rev_abbrevs)); - -fun change_base begin = map_consts (fn (decls, constraints, rev_abbrevs) => - (Name_Space.change_base begin decls, constraints, rev_abbrevs)); - -val change_ignore = map_consts (fn (decls, constraints, rev_abbrevs) => - (Name_Space.change_ignore decls, constraints, rev_abbrevs)); - - -(* reverted abbrevs *) - -val empty_abbrevs = - Item_Net.init (fn ((t, u), (t', u')) => t aconv t' andalso u aconv u') (single o #1); - -fun update_abbrevs mode abbrs = - Symtab.map_default (mode, empty_abbrevs) (Item_Net.update abbrs); - -fun retrieve_abbrevs (Consts {rev_abbrevs, ...}) modes = - let val nets = map_filter (Symtab.lookup rev_abbrevs) modes - in fn t => maps (fn net => Item_Net.retrieve net t) nets end; - - -(* dest consts *) - -fun dest (Consts {decls, constraints, ...}) = - {const_space = Name_Space.space_of_table decls, - constants = - Name_Space.fold_table (fn (c, ({T, ...}, abbr)) => - cons (c, (T, Option.map #rhs abbr))) decls [], - constraints = Symtab.dest constraints}; - - -(* lookup consts *) - -fun the_entry (Consts {decls, ...}) c = - (case Name_Space.lookup_key decls c of - SOME entry => entry - | NONE => raise TYPE ("Unknown constant: " ^ quote c, [], [])); - -fun the_const consts c = - (case the_entry consts c of - (c', ({T, ...}, NONE)) => (c', T) - | _ => raise TYPE ("Not a logical constant: " ^ quote c, [], [])); - -fun the_abbreviation consts c = - (case the_entry consts c of - (_, ({T, ...}, SOME {rhs, ...})) => (T, rhs) - | _ => raise TYPE ("Not an abbreviated constant: " ^ quote c, [], [])); - -fun the_decl consts = #1 o #2 o the_entry consts; -val type_scheme = #T oo the_decl; -val type_arguments = #typargs oo the_decl; - -val is_monomorphic = null oo type_arguments; - -fun the_constraint (consts as Consts {constraints, ...}) c = - (case Symtab.lookup constraints c of - SOME T => T - | NONE => type_scheme consts c); - - -(* name space and syntax *) - -fun space_of (Consts {decls, ...}) = Name_Space.space_of_table decls; - -fun alias naming binding name = map_consts (fn (decls, constraints, rev_abbrevs) => - ((Name_Space.alias_table naming binding name decls), constraints, rev_abbrevs)); - -val is_concealed = Name_Space.is_concealed o space_of; - -val intern = Name_Space.intern o space_of; - -fun intern_syntax consts s = - (case try Lexicon.unmark_const s of - SOME c => c - | NONE => intern consts s); - - -(* check_const *) - -fun check_const context consts (xname, ps) = - let - val Consts {decls, ...} = consts; - val ((c, reports), _) = Name_Space.check_reports context decls (xname, ps); - val T = type_scheme consts c handle TYPE (msg, _, _) => error (msg ^ Position.here_list ps); - in (Const (c, T), reports) end; - - -(* certify *) - -fun certify pp tsig do_expand consts = - let - fun err msg (c, T) = - raise TYPE (msg ^ " " ^ quote c ^ " :: " ^ - Syntax.string_of_typ (Syntax.init_pretty pp) T, [], []); - val certT = Type.cert_typ tsig; - fun cert tm = - let - val (head, args) = Term.strip_comb tm; - val args' = map cert args; - fun comb head' = Term.list_comb (head', args'); - in - (case head of - Abs (x, T, t) => comb (Abs (x, certT T, cert t)) - | Const (c, T) => - let - val T' = certT T; - val (_, ({T = U, ...}, abbr)) = the_entry consts c; - fun expand u = - Term.betapplys (Envir.expand_atom T' (U, u) handle TYPE _ => - err "Illegal type for abbreviation" (c, T), args'); - in - if not (Type.raw_instance (T', U)) then - err "Illegal type for constant" (c, T) - else - (case abbr of - SOME {rhs, normal_rhs, force_expand} => - if do_expand then expand normal_rhs - else if force_expand then expand rhs - else comb head - | _ => comb head) - end - | _ => comb head) - end; - in cert end; - - -(* typargs -- view actual const type as instance of declaration *) - -local - -fun args_of (Type (_, Ts)) pos = args_of_list Ts 0 pos - | args_of (TVar v) pos = insert (eq_fst op =) (v, rev pos) - | args_of (TFree _) _ = I -and args_of_list (T :: Ts) i is = args_of T (i :: is) #> args_of_list Ts (i + 1) is - | args_of_list [] _ _ = I; - -fun subscript (Type (_, Ts)) (i :: is) = subscript (nth Ts i) is - | subscript T [] = T - | subscript _ _ = raise Subscript; - -in - -fun typargs_of T = map #2 (rev (args_of T [] [])); - -fun typargs consts (c, T) = map (subscript T) (type_arguments consts c); - -end; - -fun instance consts (c, Ts) = - let - val declT = type_scheme consts c; - val vars = map Term.dest_TVar (typargs consts (c, declT)); - val inst = vars ~~ Ts handle ListPair.UnequalLengths => - raise TYPE ("Consts.instance", Ts, [Const (c, dummyT)]); - in declT |> Term_Subst.instantiateT inst end; - - - -(** build consts **) - -(* name space *) - -fun hide fully c = map_consts (fn (decls, constraints, rev_abbrevs) => - (Name_Space.hide_table fully c decls, constraints, rev_abbrevs)); - - -(* declarations *) - -fun declare context (b, declT) = - map_consts (fn (decls, constraints, rev_abbrevs) => - let - val decl = {T = declT, typargs = typargs_of declT}; - val _ = Binding.check b; - val (_, decls') = decls |> Name_Space.define context true (b, (decl, NONE)); - in (decls', constraints, rev_abbrevs) end); - - -(* constraints *) - -fun constrain (c, C) consts = - consts |> map_consts (fn (decls, constraints, rev_abbrevs) => - (#2 (the_entry consts c) handle TYPE (msg, _, _) => error msg; - (decls, - constraints |> (case C of SOME T => Symtab.update (c, T) | NONE => Symtab.delete_safe c), - rev_abbrevs))); - - -(* abbreviations *) - -local - -fun strip_abss (t as Abs (x, T, b)) = - if Term.is_dependent b then strip_abss b |>> cons (x, T) (* FIXME decr!? *) - else ([], t) - | strip_abss t = ([], t); - -fun rev_abbrev lhs rhs = - let - val (xs, body) = strip_abss (Envir.beta_eta_contract rhs); - val vars = fold (fn (x, T) => cons (Var ((x, 0), T))) (Term.rename_wrt_term body xs) []; - in (Term.subst_bounds (rev vars, body), Term.list_comb (lhs, vars)) end; - -in - -fun abbreviate context tsig mode (b, raw_rhs) consts = - let - val pp = Context.pretty_generic context; - val cert_term = certify pp tsig false consts; - val expand_term = certify pp tsig true consts; - val force_expand = mode = Print_Mode.internal; - - val _ = Term.exists_subterm Term.is_Var raw_rhs andalso - error ("Illegal schematic variables on rhs of abbreviation " ^ Binding.print b); - - val rhs = raw_rhs - |> Term.map_types (Type.cert_typ tsig) - |> cert_term - |> Term.close_schematic_term; - val normal_rhs = expand_term rhs; - val T = Term.fastype_of rhs; - val lhs = Const (Name_Space.full_name (Name_Space.naming_of context) b, T); - in - consts |> map_consts (fn (decls, constraints, rev_abbrevs) => - let - val decl = {T = T, typargs = typargs_of T}; - val abbr = {rhs = rhs, normal_rhs = normal_rhs, force_expand = force_expand}; - val _ = Binding.check b; - val (_, decls') = decls - |> Name_Space.define context true (b, (decl, SOME abbr)); - val rev_abbrevs' = rev_abbrevs - |> update_abbrevs mode (rev_abbrev lhs rhs); - in (decls', constraints, rev_abbrevs') end) - |> pair (lhs, rhs) - end; - -fun revert_abbrev mode c consts = consts |> map_consts (fn (decls, constraints, rev_abbrevs) => - let - val (T, rhs) = the_abbreviation consts c; - val rev_abbrevs' = rev_abbrevs - |> update_abbrevs mode (rev_abbrev (Const (c, T)) rhs); - in (decls, constraints, rev_abbrevs') end); - -end; - - -(* empty and merge *) - -val empty = - make_consts (Name_Space.empty_table Markup.constantN, Symtab.empty, Symtab.empty); - -fun merge - (Consts {decls = decls1, constraints = constraints1, rev_abbrevs = rev_abbrevs1}, - Consts {decls = decls2, constraints = constraints2, rev_abbrevs = rev_abbrevs2}) = - let - val decls' = Name_Space.merge_tables (decls1, decls2); - val constraints' = Symtab.merge (K true) (constraints1, constraints2); - val rev_abbrevs' = Symtab.join (K Item_Net.merge) (rev_abbrevs1, rev_abbrevs2); - in make_consts (decls', constraints', rev_abbrevs') end; - -end; diff --git a/core/Pure/context.ML b/core/Pure/context.ML deleted file mode 100644 index f0722ef0..00000000 --- a/core/Pure/context.ML +++ /dev/null @@ -1,630 +0,0 @@ -(* Title: Pure/context.ML - Author: Markus Wenzel, TU Muenchen - -Generic theory contexts with unique identity, arbitrarily typed data, -monotonic development graph and history support. Generic proof -contexts with arbitrarily typed data. - -Firm naming conventions: - thy, thy', thy1, thy2: theory - ctxt, ctxt', ctxt1, ctxt2: Proof.context - context: Context.generic -*) - -signature BASIC_CONTEXT = -sig - type theory - exception THEORY of string * theory list - structure Proof: sig type context end - structure Proof_Context: - sig - val theory_of: Proof.context -> theory - val init_global: theory -> Proof.context - val get_global: theory -> string -> Proof.context - end -end; - -signature CONTEXT = -sig - include BASIC_CONTEXT - (*theory context*) - val timing: bool Unsynchronized.ref - type pretty - val parents_of: theory -> theory list - val ancestors_of: theory -> theory list - val theory_name: theory -> string - val PureN: string - val display_names: theory -> string list - val pretty_thy: theory -> Pretty.T - val string_of_thy: theory -> string - val pretty_abbrev_thy: theory -> Pretty.T - val str_of_thy: theory -> string - val get_theory: theory -> string -> theory - val this_theory: theory -> string -> theory - val eq_thy: theory * theory -> bool - val subthy: theory * theory -> bool - val merge: theory * theory -> theory - val finish_thy: theory -> theory - val begin_thy: (theory -> pretty) -> string -> theory list -> theory - (*proof context*) - val raw_transfer: theory -> Proof.context -> Proof.context - (*generic context*) - datatype generic = Theory of theory | Proof of Proof.context - val cases: (theory -> 'a) -> (Proof.context -> 'a) -> generic -> 'a - val mapping: (theory -> theory) -> (Proof.context -> Proof.context) -> generic -> generic - val mapping_result: (theory -> 'a * theory) -> (Proof.context -> 'a * Proof.context) -> - generic -> 'a * generic - val the_theory: generic -> theory - val the_proof: generic -> Proof.context - val map_theory: (theory -> theory) -> generic -> generic - val map_proof: (Proof.context -> Proof.context) -> generic -> generic - val map_theory_result: (theory -> 'a * theory) -> generic -> 'a * generic - val map_proof_result: (Proof.context -> 'a * Proof.context) -> generic -> 'a * generic - val theory_map: (generic -> generic) -> theory -> theory - val proof_map: (generic -> generic) -> Proof.context -> Proof.context - val theory_of: generic -> theory (*total*) - val proof_of: generic -> Proof.context (*total*) - (*pretty printing context*) - val pretty: Proof.context -> pretty - val pretty_global: theory -> pretty - val pretty_generic: generic -> pretty - val pretty_context: (theory -> Proof.context) -> pretty -> Proof.context - (*thread data*) - val thread_data: unit -> generic option - val the_thread_data: unit -> generic - val set_thread_data: generic option -> unit - val setmp_thread_data: generic option -> ('a -> 'b) -> 'a -> 'b - val >> : (generic -> generic) -> unit - val >>> : (generic -> 'a * generic) -> 'a -end; - -signature PRIVATE_CONTEXT = -sig - include CONTEXT - structure Theory_Data: - sig - val declare: Position.T -> Any.T -> (Any.T -> Any.T) -> - (pretty -> Any.T * Any.T -> Any.T) -> serial - val get: serial -> (Any.T -> 'a) -> theory -> 'a - val put: serial -> ('a -> Any.T) -> 'a -> theory -> theory - end - structure Proof_Data: - sig - val declare: (theory -> Any.T) -> serial - val get: serial -> (Any.T -> 'a) -> Proof.context -> 'a - val put: serial -> ('a -> Any.T) -> 'a -> Proof.context -> Proof.context - end -end; - -structure Context: PRIVATE_CONTEXT = -struct - -(*** theory context ***) - -(** theory data **) - -(* data kinds and access methods *) - -val timing = Unsynchronized.ref false; - -(*private copy avoids potential conflict of table exceptions*) -structure Datatab = Table(type key = int val ord = int_ord); - -datatype pretty = Pretty of Any.T; - -local - -type kind = - {pos: Position.T, - empty: Any.T, - extend: Any.T -> Any.T, - merge: pretty -> Any.T * Any.T -> Any.T}; - -val kinds = Synchronized.var "Theory_Data" (Datatab.empty: kind Datatab.table); - -fun invoke name f k x = - (case Datatab.lookup (Synchronized.value kinds) k of - SOME kind => - if ! timing andalso name <> "" then - Timing.cond_timeit true ("Theory_Data." ^ name ^ Position.here (#pos kind)) - (fn () => f kind x) - else f kind x - | NONE => raise Fail "Invalid theory data identifier"); - -in - -fun invoke_empty k = invoke "" (K o #empty) k (); -val invoke_extend = invoke "extend" #extend; -fun invoke_merge pp = invoke "merge" (fn kind => #merge kind pp); - -fun declare_theory_data pos empty extend merge = - let - val k = serial (); - val kind = {pos = pos, empty = empty, extend = extend, merge = merge}; - val _ = Synchronized.change kinds (Datatab.update (k, kind)); - in k end; - -val extend_data = Datatab.map invoke_extend; -fun merge_data pp = Datatab.join (invoke_merge pp) o pairself extend_data; - -end; - - - -(** datatype theory **) - -datatype theory = - Theory of - (*identity*) - {id: serial, (*identifier*) - ids: Inttab.set} * (*cumulative identifiers -- symbolic body content*) - (*data*) - Any.T Datatab.table * (*body content*) - (*ancestry*) - {parents: theory list, (*immediate predecessors*) - ancestors: theory list} * (*all predecessors -- canonical reverse order*) - (*history*) - {name: string, (*official theory name*) - stage: int}; (*counter for anonymous updates*) - -exception THEORY of string * theory list; - -fun rep_theory (Theory args) = args; - -val identity_of = #1 o rep_theory; -val data_of = #2 o rep_theory; -val ancestry_of = #3 o rep_theory; -val history_of = #4 o rep_theory; - -fun make_identity id ids = {id = id, ids = ids}; -fun make_ancestry parents ancestors = {parents = parents, ancestors = ancestors}; -fun make_history name stage = {name = name, stage = stage}; - -val parents_of = #parents o ancestry_of; -val ancestors_of = #ancestors o ancestry_of; -val theory_name = #name o history_of; - - -(* names *) - -val PureN = "Pure"; -val finished = ~1; - -fun display_names thy = - let - val {name, stage} = history_of thy; - val name' = - if stage = finished then name - else name ^ ":" ^ string_of_int stage; - val ancestor_names = map theory_name (ancestors_of thy); - in rev (name' :: ancestor_names) end; - -val pretty_thy = Pretty.str_list "{" "}" o display_names; -val string_of_thy = Pretty.string_of o pretty_thy; - -fun pretty_abbrev_thy thy = - let - val names = display_names thy; - val n = length names; - val abbrev = if n > 5 then "..." :: List.drop (names, n - 5) else names; - in Pretty.str_list "{" "}" abbrev end; - -val str_of_thy = Pretty.str_of o pretty_abbrev_thy; - -fun get_theory thy name = - if theory_name thy <> name then - (case find_first (fn thy' => theory_name thy' = name) (ancestors_of thy) of - SOME thy' => thy' - | NONE => error ("Unknown ancestor theory " ^ quote name)) - else if #stage (history_of thy) = finished then thy - else error ("Unfinished theory " ^ quote name); - -fun this_theory thy name = - if theory_name thy = name then thy - else get_theory thy name; - - -(* build ids *) - -fun insert_id id ids = Inttab.update (id, ()) ids; - -fun merge_ids - (Theory ({id = id1, ids = ids1, ...}, _, _, _)) - (Theory ({id = id2, ids = ids2, ...}, _, _, _)) = - Inttab.merge (K true) (ids1, ids2) - |> insert_id id1 - |> insert_id id2; - - -(* equality and inclusion *) - -val eq_thy = op = o pairself (#id o identity_of); - -fun proper_subthy (Theory ({id, ...}, _, _, _), Theory ({ids, ...}, _, _, _)) = - Inttab.defined ids id; - -fun subthy thys = eq_thy thys orelse proper_subthy thys; - - -(* consistent ancestors *) - -fun eq_thy_consistent (thy1, thy2) = - eq_thy (thy1, thy2) orelse - (theory_name thy1 = theory_name thy2 andalso - raise THEORY ("Duplicate theory name", [thy1, thy2])); - -fun extend_ancestors thy thys = - if member eq_thy_consistent thys thy then - raise THEORY ("Duplicate theory node", thy :: thys) - else thy :: thys; - -val merge_ancestors = merge eq_thy_consistent; - - -(* trivial merge *) - -fun merge (thy1, thy2) = - if eq_thy (thy1, thy2) then thy1 - else if proper_subthy (thy2, thy1) then thy1 - else if proper_subthy (thy1, thy2) then thy2 - else error (cat_lines ["Attempt to perform non-trivial merge of theories:", - str_of_thy thy1, str_of_thy thy2]); - - - -(** build theories **) - -(* primitives *) - -fun create_thy ids data ancestry history = - Theory (make_identity (serial ()) ids, data, ancestry, history); - -val pre_pure_thy = - create_thy Inttab.empty Datatab.empty (make_ancestry [] []) (make_history PureN 0); - -local - -fun change_thy finish f thy = - let - val Theory ({id, ids}, data, ancestry, {name, stage}) = thy; - val (data', ancestry') = - if stage = finished then - (extend_data data, make_ancestry [thy] (extend_ancestors thy (ancestors_of thy))) - else (data, ancestry); - val history' = {name = name, stage = if finish then finished else stage + 1}; - val ids' = insert_id id ids; - val data'' = f data'; - in create_thy ids' data'' ancestry' history' end; - -in - -val update_thy = change_thy false; -val extend_thy = update_thy I; -val finish_thy = change_thy true I; - -end; - - -(* named theory nodes *) - -fun merge_thys pp (thy1, thy2) = - let - val ids = merge_ids thy1 thy2; - val data = merge_data (pp thy1) (data_of thy1, data_of thy2); - val ancestry = make_ancestry [] []; - val history = make_history "" 0; - in create_thy ids data ancestry history end; - -fun maximal_thys thys = - thys |> filter_out (fn thy => exists (fn thy' => proper_subthy (thy, thy')) thys); - -fun begin_thy pp name imports = - if name = "" then error ("Bad theory name: " ^ quote name) - else - let - val parents = maximal_thys (distinct eq_thy imports); - val ancestors = - Library.foldl merge_ancestors ([], map ancestors_of parents) - |> fold extend_ancestors parents; - - val Theory ({ids, ...}, data, _, _) = - (case parents of - [] => error "Missing theory imports" - | [thy] => extend_thy thy - | thy :: thys => Library.foldl (merge_thys pp) (thy, thys)); - - val ancestry = make_ancestry parents ancestors; - val history = make_history name 0; - in create_thy ids data ancestry history end; - - -(* theory data *) - -structure Theory_Data = -struct - -val declare = declare_theory_data; - -fun get k dest thy = - (case Datatab.lookup (data_of thy) k of - SOME x => x - | NONE => invoke_empty k) |> dest; - -fun put k mk x = update_thy (Datatab.update (k, mk x)); - -end; - - - -(*** proof context ***) - -(* datatype Proof.context *) - -structure Proof = -struct - datatype context = Context of Any.T Datatab.table * theory; -end; - -fun theory_of_proof (Proof.Context (_, thy)) = thy; -fun data_of_proof (Proof.Context (data, _)) = data; -fun map_prf f (Proof.Context (data, thy)) = Proof.Context (f data, thy); - - -(* proof data kinds *) - -local - -val kinds = Synchronized.var "Proof_Data" (Datatab.empty: (theory -> Any.T) Datatab.table); - -fun invoke_init k = - (case Datatab.lookup (Synchronized.value kinds) k of - SOME init => init - | NONE => raise Fail "Invalid proof data identifier"); - -fun init_data thy = - Datatab.map (fn k => fn _ => invoke_init k thy) (Synchronized.value kinds); - -fun init_new_data data thy = - Datatab.merge (K true) (data, init_data thy); - -in - -fun raw_transfer thy' (Proof.Context (data, thy)) = - let - val _ = subthy (thy, thy') orelse error "Cannot transfer proof context: not a super theory"; - val data' = init_new_data data thy'; - in Proof.Context (data', thy') end; - -structure Proof_Context = -struct - val theory_of = theory_of_proof; - fun init_global thy = Proof.Context (init_data thy, thy); - fun get_global thy name = init_global (get_theory thy name); -end; - -structure Proof_Data = -struct - -fun declare init = - let - val k = serial (); - val _ = Synchronized.change kinds (Datatab.update (k, init)); - in k end; - -fun get k dest prf = - (case Datatab.lookup (data_of_proof prf) k of - SOME x => x - | NONE => invoke_init k (Proof_Context.theory_of prf)) |> dest; (*adhoc value for old theories*) - -fun put k mk x = map_prf (Datatab.update (k, mk x)); - -end; - -end; - - - -(*** generic context ***) - -datatype generic = Theory of theory | Proof of Proof.context; - -fun cases f _ (Theory thy) = f thy - | cases _ g (Proof prf) = g prf; - -fun mapping f g = cases (Theory o f) (Proof o g); -fun mapping_result f g = cases (apsnd Theory o f) (apsnd Proof o g); - -val the_theory = cases I (fn _ => error "Ill-typed context: theory expected"); -val the_proof = cases (fn _ => error "Ill-typed context: proof expected") I; - -fun map_theory f = Theory o f o the_theory; -fun map_proof f = Proof o f o the_proof; - -fun map_theory_result f = apsnd Theory o f o the_theory; -fun map_proof_result f = apsnd Proof o f o the_proof; - -fun theory_map f = the_theory o f o Theory; -fun proof_map f = the_proof o f o Proof; - -val theory_of = cases I Proof_Context.theory_of; -val proof_of = cases Proof_Context.init_global I; - - -(* pretty printing context *) - -exception PRETTY of generic; - -val pretty_generic = Pretty o PRETTY; -val pretty = pretty_generic o Proof; -val pretty_global = pretty_generic o Theory; - -fun pretty_context init (Pretty (PRETTY context)) = cases init I context; - - - -(** thread data **) - -local val tag = Universal.tag () : generic option Universal.tag in - -fun thread_data () = - (case Thread.getLocal tag of - SOME (SOME context) => SOME context - | _ => NONE); - -fun the_thread_data () = - (case thread_data () of - SOME context => context - | _ => error "Unknown context"); - -fun set_thread_data context = Thread.setLocal (tag, context); -fun setmp_thread_data context = Library.setmp_thread_data tag (thread_data ()) context; - -end; - -fun >>> f = - let - val (res, context') = f (the_thread_data ()); - val _ = set_thread_data (SOME context'); - in res end; - -nonfix >>; -fun >> f = >>> (fn context => ((), f context)); - -val _ = set_thread_data (SOME (Theory pre_pure_thy)); - -end; - -structure Basic_Context: BASIC_CONTEXT = Context; -open Basic_Context; - - - -(*** type-safe interfaces for data declarations ***) - -(** theory data **) - -signature THEORY_DATA_PP_ARGS = -sig - type T - val empty: T - val extend: T -> T - val merge: Context.pretty -> T * T -> T -end; - -signature THEORY_DATA_ARGS = -sig - type T - val empty: T - val extend: T -> T - val merge: T * T -> T -end; - -signature THEORY_DATA = -sig - type T - val get: theory -> T - val put: T -> theory -> theory - val map: (T -> T) -> theory -> theory -end; - -functor Theory_Data_PP(Data: THEORY_DATA_PP_ARGS): THEORY_DATA = -struct - -type T = Data.T; -exception Data of T; - -val kind = - Context.Theory_Data.declare - (Position.thread_data ()) - (Data Data.empty) - (fn Data x => Data (Data.extend x)) - (fn pp => fn (Data x1, Data x2) => Data (Data.merge pp (x1, x2))); - -val get = Context.Theory_Data.get kind (fn Data x => x); -val put = Context.Theory_Data.put kind Data; -fun map f thy = put (f (get thy)) thy; - -end; - -functor Theory_Data(Data: THEORY_DATA_ARGS): THEORY_DATA = - Theory_Data_PP - ( - type T = Data.T; - val empty = Data.empty; - val extend = Data.extend; - fun merge _ = Data.merge; - ); - - - -(** proof data **) - -signature PROOF_DATA_ARGS = -sig - type T - val init: theory -> T -end; - -signature PROOF_DATA = -sig - type T - val get: Proof.context -> T - val put: T -> Proof.context -> Proof.context - val map: (T -> T) -> Proof.context -> Proof.context -end; - -functor Proof_Data(Data: PROOF_DATA_ARGS): PROOF_DATA = -struct - -type T = Data.T; -exception Data of T; - -val kind = Context.Proof_Data.declare (Data o Data.init); - -val get = Context.Proof_Data.get kind (fn Data x => x); -val put = Context.Proof_Data.put kind Data; -fun map f prf = put (f (get prf)) prf; - -end; - - - -(** generic data **) - -signature GENERIC_DATA_ARGS = -sig - type T - val empty: T - val extend: T -> T - val merge: T * T -> T -end; - -signature GENERIC_DATA = -sig - type T - val get: Context.generic -> T - val put: T -> Context.generic -> Context.generic - val map: (T -> T) -> Context.generic -> Context.generic -end; - -functor Generic_Data(Data: GENERIC_DATA_ARGS): GENERIC_DATA = -struct - -structure Thy_Data = Theory_Data(Data); -structure Prf_Data = Proof_Data(type T = Data.T val init = Thy_Data.get); - -type T = Data.T; - -fun get (Context.Theory thy) = Thy_Data.get thy - | get (Context.Proof prf) = Prf_Data.get prf; - -fun put x (Context.Theory thy) = Context.Theory (Thy_Data.put x thy) - | put x (Context.Proof prf) = Context.Proof (Prf_Data.put x prf); - -fun map f ctxt = put (f (get ctxt)) ctxt; - -end; - -(*hide private interface*) -structure Context: CONTEXT = Context; - diff --git a/core/Pure/context_position.ML b/core/Pure/context_position.ML deleted file mode 100644 index 775e8bf0..00000000 --- a/core/Pure/context_position.ML +++ /dev/null @@ -1,70 +0,0 @@ -(* Title: Pure/context_position.ML - Author: Makarius - -Context position visibility flag. -*) - -signature CONTEXT_POSITION = -sig - val is_visible_generic: Context.generic -> bool - val is_visible: Proof.context -> bool - val is_visible_global: theory -> bool - val set_visible: bool -> Proof.context -> Proof.context - val set_visible_global: bool -> theory -> theory - val is_really_visible: Proof.context -> bool - val not_really: Proof.context -> Proof.context - val restore_visible: Proof.context -> Proof.context -> Proof.context - val restore_visible_global: theory -> theory -> theory - val is_reported_generic: Context.generic -> Position.T -> bool - val is_reported: Proof.context -> Position.T -> bool - val report_generic: Context.generic -> Position.T -> Markup.T -> unit - val reported_text: Proof.context -> Position.T -> Markup.T -> string -> string - val report_text: Proof.context -> Position.T -> Markup.T -> string -> unit - val report: Proof.context -> Position.T -> Markup.T -> unit - val reports_text: Proof.context -> Position.report_text list -> unit - val reports: Proof.context -> Position.report list -> unit -end; - -structure Context_Position: CONTEXT_POSITION = -struct - -structure Data = Generic_Data -( - type T = bool option * bool option; (*really, visible*) - val empty: T = (NONE, NONE); - val extend = I; - fun merge ((a, b), (a', b')) : T = (merge_options (a, a'), merge_options (b, b')); -); - -val is_visible_generic = the_default true o snd o Data.get; -val is_visible = is_visible_generic o Context.Proof; -val is_visible_global = is_visible_generic o Context.Theory; - -val set_visible = Context.proof_map o Data.map o apsnd o K o SOME; -val set_visible_global = Context.theory_map o Data.map o apsnd o K o SOME; - -val is_really = the_default true o fst o Data.get o Context.Proof; -fun is_really_visible ctxt = is_really ctxt andalso is_visible ctxt; -val not_really = Context.proof_map (Data.map (apfst (K (SOME false)))); - -val restore_visible = Context.proof_map o Data.put o Data.get o Context.Proof; -val restore_visible_global = Context.theory_map o Data.put o Data.get o Context.Theory; - -fun is_reported_generic context pos = is_visible_generic context andalso Position.is_reported pos; -fun is_reported ctxt pos = is_visible ctxt andalso Position.is_reported pos; - -fun report_generic context pos markup = - if is_reported_generic context pos then - Output.report [Position.reported_text pos markup ""] - else (); - -fun reported_text ctxt pos markup txt = - if is_reported ctxt pos then Position.reported_text pos markup txt else ""; - -fun report_text ctxt pos markup txt = Output.report [reported_text ctxt pos markup txt]; -fun report ctxt pos markup = report_text ctxt pos markup ""; - -fun reports_text ctxt reps = if is_visible ctxt then Position.reports_text reps else (); -fun reports ctxt reps = if is_visible ctxt then Position.reports reps else (); - -end; diff --git a/core/Pure/conv.ML b/core/Pure/conv.ML deleted file mode 100644 index 36d3dd61..00000000 --- a/core/Pure/conv.ML +++ /dev/null @@ -1,222 +0,0 @@ -(* Title: Pure/conv.ML - Author: Amine Chaieb, TU Muenchen - Author: Sascha Boehme, TU Muenchen - Author: Makarius - -Conversions: primitive equality reasoning. -*) - -infix 1 then_conv; -infix 0 else_conv; - -signature BASIC_CONV = -sig - val then_conv: conv * conv -> conv - val else_conv: conv * conv -> conv -end; - -signature CONV = -sig - include BASIC_CONV - val no_conv: conv - val all_conv: conv - val first_conv: conv list -> conv - val every_conv: conv list -> conv - val try_conv: conv -> conv - val repeat_conv: conv -> conv - val cache_conv: conv -> conv - val abs_conv: (cterm * Proof.context -> conv) -> Proof.context -> conv - val combination_conv: conv -> conv -> conv - val comb_conv: conv -> conv - val arg_conv: conv -> conv - val fun_conv: conv -> conv - val arg1_conv: conv -> conv - val fun2_conv: conv -> conv - val binop_conv: conv -> conv - val binder_conv: (cterm * Proof.context -> conv) -> Proof.context -> conv - val forall_conv: (cterm * Proof.context -> conv) -> Proof.context -> conv - val implies_conv: conv -> conv -> conv - val implies_concl_conv: conv -> conv - val rewr_conv: thm -> conv - val rewrs_conv: thm list -> conv - val sub_conv: (Proof.context -> conv) -> Proof.context -> conv - val bottom_conv: (Proof.context -> conv) -> Proof.context -> conv - val top_conv: (Proof.context -> conv) -> Proof.context -> conv - val top_sweep_conv: (Proof.context -> conv) -> Proof.context -> conv - val params_conv: int -> (Proof.context -> conv) -> Proof.context -> conv - val prems_conv: int -> conv -> conv - val concl_conv: int -> conv -> conv - val fconv_rule: conv -> thm -> thm - val gconv_rule: conv -> int -> thm -> thm -end; - -structure Conv: CONV = -struct - -(* basic conversionals *) - -fun no_conv _ = raise CTERM ("no conversion", []); -val all_conv = Thm.reflexive; - -fun (cv1 then_conv cv2) ct = - let - val eq1 = cv1 ct; - val eq2 = cv2 (Thm.rhs_of eq1); - in - if Thm.is_reflexive eq1 then eq2 - else if Thm.is_reflexive eq2 then eq1 - else Thm.transitive eq1 eq2 - end; - -fun (cv1 else_conv cv2) ct = - (cv1 ct - handle THM _ => cv2 ct - | CTERM _ => cv2 ct - | TERM _ => cv2 ct - | TYPE _ => cv2 ct); - -fun first_conv cvs = fold_rev (curry op else_conv) cvs no_conv; -fun every_conv cvs = fold_rev (curry op then_conv) cvs all_conv; - -fun try_conv cv = cv else_conv all_conv; -fun repeat_conv cv ct = try_conv (cv then_conv repeat_conv cv) ct; - -fun cache_conv (cv: conv) = Thm.cterm_cache cv; - - - -(** Pure conversions **) - -(* lambda terms *) - -fun abs_conv cv ctxt ct = - (case Thm.term_of ct of - Abs (x, _, _) => - let - val (u, ctxt') = yield_singleton Variable.variant_fixes Name.uu ctxt; - val (v, ct') = Thm.dest_abs (SOME u) ct; - val eq = cv (v, ctxt') ct'; - in if Thm.is_reflexive eq then all_conv ct else Thm.abstract_rule x v eq end - | _ => raise CTERM ("abs_conv", [ct])); - -fun combination_conv cv1 cv2 ct = - let val (ct1, ct2) = Thm.dest_comb ct - in Thm.combination (cv1 ct1) (cv2 ct2) end; - -fun comb_conv cv = combination_conv cv cv; -fun arg_conv cv = combination_conv all_conv cv; -fun fun_conv cv = combination_conv cv all_conv; - -val arg1_conv = fun_conv o arg_conv; -val fun2_conv = fun_conv o fun_conv; - -fun binop_conv cv = combination_conv (arg_conv cv) cv; - -fun binder_conv cv ctxt = arg_conv (abs_conv cv ctxt); - - -(* subterm structure *) - -(*cf. SUB_CONV in HOL*) -fun sub_conv conv ctxt = - comb_conv (conv ctxt) else_conv - abs_conv (conv o snd) ctxt else_conv - all_conv; - -(*cf. BOTTOM_CONV in HOL*) -fun bottom_conv conv ctxt ct = - (sub_conv (bottom_conv conv) ctxt then_conv conv ctxt) ct; - -(*cf. TOP_CONV in HOL*) -fun top_conv conv ctxt ct = - (conv ctxt then_conv sub_conv (top_conv conv) ctxt) ct; - -(*cf. TOP_SWEEP_CONV in HOL*) -fun top_sweep_conv conv ctxt ct = - (conv ctxt else_conv sub_conv (top_sweep_conv conv) ctxt) ct; - - -(* primitive logic *) - -fun forall_conv cv ctxt ct = - (case Thm.term_of ct of - Const ("Pure.all", _) $ Abs _ => arg_conv (abs_conv cv ctxt) ct - | _ => raise CTERM ("forall_conv", [ct])); - -fun implies_conv cv1 cv2 ct = - (case Thm.term_of ct of - Const ("Pure.imp", _) $ _ $ _ => combination_conv (arg_conv cv1) cv2 ct - | _ => raise CTERM ("implies_conv", [ct])); - -fun implies_concl_conv cv ct = - (case Thm.term_of ct of - Const ("Pure.imp", _) $ _ $ _ => arg_conv cv ct - | _ => raise CTERM ("implies_concl_conv", [ct])); - - -(* single rewrite step, cf. REWR_CONV in HOL *) - -fun rewr_conv rule ct = - let - val rule1 = Thm.incr_indexes (#maxidx (Thm.rep_cterm ct) + 1) rule; - val lhs = Thm.lhs_of rule1; - val rule2 = Thm.rename_boundvars (Thm.term_of lhs) (Thm.term_of ct) rule1; - val rule3 = - Thm.instantiate (Thm.match (lhs, ct)) rule2 - handle Pattern.MATCH => raise CTERM ("rewr_conv", [lhs, ct]); - val rule4 = - if Thm.lhs_of rule3 aconvc ct then rule3 - else - let val ceq = Thm.dest_fun2 (Thm.cprop_of rule3) - in rule3 COMP Thm.trivial (Thm.mk_binop ceq ct (Thm.rhs_of rule3)) end; - in Thm.transitive rule4 (Thm.beta_conversion true (Thm.rhs_of rule4)) end; - -fun rewrs_conv rules = first_conv (map rewr_conv rules); - - -(* conversions on HHF rules *) - -(*rewrite B in !!x1 ... xn. B*) -fun params_conv n cv ctxt ct = - if n <> 0 andalso Logic.is_all (Thm.term_of ct) - then arg_conv (abs_conv (params_conv (n - 1) cv o #2) ctxt) ct - else cv ctxt ct; - -(*rewrite the A's in A1 ==> ... ==> An ==> B*) -fun prems_conv 0 _ ct = all_conv ct - | prems_conv n cv ct = - (case try Thm.dest_implies ct of - NONE => all_conv ct - | SOME (A, B) => Drule.imp_cong_rule (cv A) (prems_conv (n - 1) cv B)); - -(*rewrite B in A1 ==> ... ==> An ==> B*) -fun concl_conv 0 cv ct = cv ct - | concl_conv n cv ct = - (case try Thm.dest_implies ct of - NONE => cv ct - | SOME (A, B) => Drule.imp_cong_rule (all_conv A) (concl_conv (n - 1) cv B)); - - -(* conversions as inference rules *) - -(*forward conversion, cf. FCONV_RULE in LCF*) -fun fconv_rule cv th = - let val eq = cv (Thm.cprop_of th) in - if Thm.is_reflexive eq then th - else Thm.equal_elim eq th - end; - -(*goal conversion*) -fun gconv_rule cv i th = - (case try (Thm.cprem_of th) i of - SOME ct => - let val eq = cv ct in - if Thm.is_reflexive eq then th - else Drule.with_subgoal i (fconv_rule (arg1_conv (K eq))) th - end - | NONE => raise THM ("gconv_rule", i, [th])); - -end; - -structure Basic_Conv: BASIC_CONV = Conv; -open Basic_Conv; diff --git a/core/Pure/defs.ML b/core/Pure/defs.ML deleted file mode 100644 index ea0c7903..00000000 --- a/core/Pure/defs.ML +++ /dev/null @@ -1,228 +0,0 @@ -(* Title: Pure/defs.ML - Author: Makarius - -Global well-formedness checks for constant definitions. Covers plain -definitions and simple sub-structural overloading. -*) - -signature DEFS = -sig - val pretty_const: Proof.context -> string * typ list -> Pretty.T - val plain_args: typ list -> bool - type T - type spec = - {def: string option, - description: string, - pos: Position.T, - lhs: typ list, - rhs: (string * typ list) list} - val all_specifications_of: T -> (string * spec list) list - val specifications_of: T -> string -> spec list - val dest: T -> - {restricts: ((string * typ list) * string) list, - reducts: ((string * typ list) * (string * typ list) list) list} - val empty: T - val merge: Proof.context -> T * T -> T - val define: Proof.context -> bool -> string option -> string -> - string * typ list -> (string * typ list) list -> T -> T -end - -structure Defs: DEFS = -struct - -(* type arguments *) - -type args = typ list; - -fun pretty_const ctxt (c, args) = - let - val prt_args = - if null args then [] - else [Pretty.list "(" ")" (map (Syntax.pretty_typ ctxt o Logic.unvarifyT_global) args)]; - in Pretty.block (Pretty.str c :: prt_args) end; - -fun plain_args args = - forall Term.is_TVar args andalso not (has_duplicates (op =) args); - -fun disjoint_args (Ts, Us) = - not (Type.could_unifys (Ts, Us)) orelse - ((Type.raw_unifys (Ts, map (Logic.incr_tvar (maxidx_of_typs Ts + 1)) Us) Vartab.empty; false) - handle Type.TUNIFY => true); - -fun match_args (Ts, Us) = - if Type.could_matches (Ts, Us) then - Option.map Envir.subst_type - (SOME (Type.raw_matches (Ts, Us) Vartab.empty) handle Type.TYPE_MATCH => NONE) - else NONE; - - -(* datatype defs *) - -type spec = - {def: string option, - description: string, - pos: Position.T, - lhs: args, - rhs: (string * args) list}; - -type def = - {specs: spec Inttab.table, (*source specifications*) - restricts: (args * string) list, (*global restrictions imposed by incomplete patterns*) - reducts: (args * (string * args) list) list}; (*specifications as reduction system*) - -fun make_def (specs, restricts, reducts) = - {specs = specs, restricts = restricts, reducts = reducts}: def; - -fun map_def c f = - Symtab.default (c, make_def (Inttab.empty, [], [])) #> - Symtab.map_entry c (fn {specs, restricts, reducts}: def => - make_def (f (specs, restricts, reducts))); - - -datatype T = Defs of def Symtab.table; - -fun lookup_list which defs c = - (case Symtab.lookup defs c of - SOME (def: def) => which def - | NONE => []); - -fun all_specifications_of (Defs defs) = - (map o apsnd) (map snd o Inttab.dest o #specs) (Symtab.dest defs); - -fun specifications_of (Defs defs) = lookup_list (map snd o Inttab.dest o #specs) defs; - -val restricts_of = lookup_list #restricts; -val reducts_of = lookup_list #reducts; - -fun dest (Defs defs) = - let - val restricts = Symtab.fold (fn (c, {restricts, ...}) => - fold (fn (args, description) => cons ((c, args), description)) restricts) defs []; - val reducts = Symtab.fold (fn (c, {reducts, ...}) => - fold (fn (args, deps) => cons ((c, args), deps)) reducts) defs []; - in {restricts = restricts, reducts = reducts} end; - -val empty = Defs Symtab.empty; - - -(* specifications *) - -fun disjoint_specs c (i, {description = a, pos = pos_a, lhs = Ts, ...}: spec) = - Inttab.forall (fn (j, {description = b, pos = pos_b, lhs = Us, ...}: spec) => - i = j orelse disjoint_args (Ts, Us) orelse - error ("Clash of specifications for constant " ^ quote c ^ ":\n" ^ - " " ^ quote a ^ Position.here pos_a ^ "\n" ^ - " " ^ quote b ^ Position.here pos_b)); - -fun join_specs c ({specs = specs1, restricts, reducts}, {specs = specs2, ...}: def) = - let - val specs' = - Inttab.fold (fn spec2 => (disjoint_specs c spec2 specs1; Inttab.update spec2)) specs2 specs1; - in make_def (specs', restricts, reducts) end; - -fun update_specs c spec = map_def c (fn (specs, restricts, reducts) => - (disjoint_specs c spec specs; (Inttab.update spec specs, restricts, reducts))); - - -(* normalized dependencies: reduction with well-formedness check *) - -local - -val prt = Pretty.string_of oo pretty_const; -fun err ctxt (c, args) (d, Us) s1 s2 = - error (s1 ^ " dependency of constant " ^ prt ctxt (c, args) ^ " -> " ^ prt ctxt (d, Us) ^ s2); - -fun contained (U as TVar _) (Type (_, Ts)) = exists (fn T => T = U orelse contained U T) Ts - | contained _ _ = false; - -fun acyclic ctxt (c, args) (d, Us) = - c <> d orelse - exists (fn U => exists (contained U) args) Us orelse - is_none (match_args (args, Us)) orelse - err ctxt (c, args) (d, Us) "Circular" ""; - -fun wellformed ctxt defs (c, args) (d, Us) = - plain_args Us orelse - (case find_first (fn (Ts, _) => not (disjoint_args (Ts, Us))) (restricts_of defs d) of - SOME (Ts, description) => - err ctxt (c, args) (d, Us) "Malformed" - ("\n(restriction " ^ prt ctxt (d, Ts) ^ " from " ^ quote description ^ ")") - | NONE => true); - -fun reduction ctxt defs const deps = - let - fun reduct Us (Ts, rhs) = - (case match_args (Ts, Us) of - NONE => NONE - | SOME subst => SOME (map (apsnd (map subst)) rhs)); - fun reducts (d, Us) = get_first (reduct Us) (reducts_of defs d); - - val reds = map (`reducts) deps; - val deps' = - if forall (is_none o #1) reds then NONE - else SOME (fold_rev - (fn (NONE, dp) => insert (op =) dp | (SOME dps, _) => fold (insert (op =)) dps) reds []); - val _ = forall (acyclic ctxt const) (the_default deps deps'); - in deps' end; - -in - -fun normalize ctxt = - let - fun norm_update (c, {reducts, ...}: def) (changed, defs) = - let - val reducts' = reducts |> map (fn (args, deps) => - (args, perhaps (reduction ctxt defs (c, args)) deps)); - in - if reducts = reducts' then (changed, defs) - else (true, defs |> map_def c (fn (specs, restricts, _) => (specs, restricts, reducts'))) - end; - fun norm_all defs = - (case Symtab.fold norm_update defs (false, defs) of - (true, defs') => norm_all defs' - | (false, _) => defs); - fun check defs (c, {reducts, ...}: def) = - reducts |> forall (fn (args, deps) => forall (wellformed ctxt defs (c, args)) deps); - in norm_all #> (fn defs => tap (Symtab.forall (check defs)) defs) end; - -fun dependencies ctxt (c, args) restr deps = - map_def c (fn (specs, restricts, reducts) => - let - val restricts' = Library.merge (op =) (restricts, restr); - val reducts' = insert (op =) (args, deps) reducts; - in (specs, restricts', reducts') end) - #> normalize ctxt; - -end; - - -(* merge *) - -fun merge ctxt (Defs defs1, Defs defs2) = - let - fun add_deps (c, args) restr deps defs = - if AList.defined (op =) (reducts_of defs c) args then defs - else dependencies ctxt (c, args) restr deps defs; - fun add_def (c, {restricts, reducts, ...}: def) = - fold (fn (args, deps) => add_deps (c, args) restricts deps) reducts; - in - Defs (Symtab.join join_specs (defs1, defs2) - |> normalize ctxt |> Symtab.fold add_def defs2) - end; - - -(* define *) - -fun define ctxt unchecked def description (c, args) deps (Defs defs) = - let - val pos = Position.thread_data (); - val restr = - if plain_args args orelse - (case args of [Type (_, rec_args)] => plain_args rec_args | _ => false) - then [] else [(args, description)]; - val spec = - (serial (), {def = def, description = description, pos = pos, lhs = args, rhs = deps}); - val defs' = defs |> update_specs c spec; - in Defs (defs' |> (if unchecked then I else dependencies ctxt (c, args) restr deps)) end; - -end; diff --git a/core/Pure/display.ML b/core/Pure/display.ML deleted file mode 100644 index 17f80be6..00000000 --- a/core/Pure/display.ML +++ /dev/null @@ -1,209 +0,0 @@ -(* Title: Pure/display.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Makarius - -Printing of theorems, results etc. -*) - -signature BASIC_DISPLAY = -sig - val show_consts: bool Config.T - val show_hyps_raw: Config.raw - val show_hyps: bool Config.T - val show_tags_raw: Config.raw - val show_tags: bool Config.T -end; - -signature DISPLAY = -sig - include BASIC_DISPLAY - val pretty_thm_raw: Proof.context -> {quote: bool, show_hyps: bool} -> thm -> Pretty.T - val pretty_thm: Proof.context -> thm -> Pretty.T - val pretty_thm_item: Proof.context -> thm -> Pretty.T - val pretty_thm_global: theory -> thm -> Pretty.T - val pretty_thm_without_context: thm -> Pretty.T - val string_of_thm: Proof.context -> thm -> string - val string_of_thm_global: theory -> thm -> string - val string_of_thm_without_context: thm -> string - val pretty_full_theory: bool -> theory -> Pretty.T list -end; - -structure Display: DISPLAY = -struct - -(** options **) - -val show_consts = Goal_Display.show_consts; - -val show_hyps_raw = Config.declare ("show_hyps", @{here}) (fn _ => Config.Bool false); -val show_hyps = Config.bool show_hyps_raw; - -val show_tags_raw = Config.declare ("show_tags", @{here}) (fn _ => Config.Bool false); -val show_tags = Config.bool show_tags_raw; - - - -(** print thm **) - -fun pretty_tag (name, arg) = Pretty.strs [name, quote arg]; -val pretty_tags = Pretty.list "[" "]" o map pretty_tag; - -fun pretty_thm_raw ctxt {quote, show_hyps = show_hyps'} raw_th = - let - val show_tags = Config.get ctxt show_tags; - val show_hyps = Config.get ctxt show_hyps; - - val th = Thm.strip_shyps raw_th; - val {hyps, tpairs, prop, ...} = Thm.rep_thm th; - val hyps' = if show_hyps then hyps else Thm.undeclared_hyps (Context.Proof ctxt) th; - val extra_shyps = Thm.extra_shyps th; - val tags = Thm.get_tags th; - - val q = if quote then Pretty.quote else I; - val prt_term = q o Syntax.pretty_term ctxt; - - - val hlen = length extra_shyps + length hyps' + length tpairs; - val hsymbs = - if hlen = 0 then [] - else if show_hyps orelse show_hyps' then - [Pretty.brk 2, Pretty.list "[" "]" - (map (q o Goal_Display.pretty_flexpair ctxt) tpairs @ map prt_term hyps' @ - map (Syntax.pretty_sort ctxt) extra_shyps)] - else [Pretty.brk 2, Pretty.str ("[" ^ replicate_string hlen "." ^ "]")]; - val tsymbs = - if null tags orelse not show_tags then [] - else [Pretty.brk 1, pretty_tags tags]; - in Pretty.block (prt_term prop :: (hsymbs @ tsymbs)) end; - -fun pretty_thm ctxt = pretty_thm_raw ctxt {quote = false, show_hyps = true}; -fun pretty_thm_item ctxt th = Pretty.item [pretty_thm ctxt th]; - -fun pretty_thm_global thy = - pretty_thm_raw (Syntax.init_pretty_global thy) {quote = false, show_hyps = false}; - -fun pretty_thm_without_context th = pretty_thm_global (Thm.theory_of_thm th) th; - -val string_of_thm = Pretty.string_of oo pretty_thm; -val string_of_thm_global = Pretty.string_of oo pretty_thm_global; -val string_of_thm_without_context = Pretty.string_of o pretty_thm_without_context; - - - -(** print theory **) - -(* pretty_full_theory *) - -fun pretty_full_theory verbose thy = - let - val ctxt = Syntax.init_pretty_global thy; - - fun prt_cls c = Syntax.pretty_sort ctxt [c]; - fun prt_sort S = Syntax.pretty_sort ctxt S; - fun prt_arity t (c, Ss) = Syntax.pretty_arity ctxt (t, Ss, [c]); - fun prt_typ ty = Pretty.quote (Syntax.pretty_typ ctxt ty); - val prt_typ_no_tvars = prt_typ o Logic.unvarifyT_global; - fun prt_term t = Pretty.quote (Syntax.pretty_term ctxt t); - val prt_term_no_vars = prt_term o Logic.unvarify_global; - fun prt_const (c, ty) = [Pretty.mark_str c, Pretty.str " ::", Pretty.brk 1, prt_typ_no_tvars ty]; - val prt_const' = Defs.pretty_const ctxt; - - fun pretty_classrel (c, []) = prt_cls c - | pretty_classrel (c, cs) = Pretty.block - (prt_cls c :: Pretty.str " <" :: Pretty.brk 1 :: Pretty.commas (map prt_cls cs)); - - fun pretty_default S = Pretty.block - [Pretty.str "default sort:", Pretty.brk 1, prt_sort S]; - - val tfrees = map (fn v => TFree (v, [])); - fun pretty_type syn (t, (Type.LogicalType n)) = - if syn then NONE - else SOME (prt_typ (Type (t, tfrees (Name.invent Name.context Name.aT n)))) - | pretty_type syn (t, (Type.Abbreviation (vs, U, syn'))) = - if syn <> syn' then NONE - else SOME (Pretty.block - [prt_typ (Type (t, tfrees vs)), Pretty.str " =", Pretty.brk 1, prt_typ U]) - | pretty_type syn (t, Type.Nonterminal) = - if not syn then NONE - else SOME (prt_typ (Type (t, []))); - - val pretty_arities = maps (fn (t, ars) => map (prt_arity t) ars); - - fun pretty_abbrev (c, (ty, t)) = Pretty.block - (prt_const (c, ty) @ [Pretty.str " ==", Pretty.brk 1, prt_term_no_vars t]); - - fun pretty_axm (a, t) = - Pretty.block [Pretty.mark_str a, Pretty.str ":", Pretty.brk 1, prt_term_no_vars t]; - - fun pretty_finals reds = Pretty.block - (Pretty.str "final:" :: Pretty.brk 1 :: Pretty.commas (map (prt_const' o fst) reds)); - - fun pretty_reduct (lhs, rhs) = Pretty.block - ([prt_const' lhs, Pretty.str " ->", Pretty.brk 2] @ - Pretty.commas (map prt_const' (sort_wrt #1 rhs))); - - fun pretty_restrict (const, name) = - Pretty.block ([prt_const' const, Pretty.brk 2, Pretty.str ("(from " ^ quote name ^ ")")]); - - val defs = Theory.defs_of thy; - val {restricts, reducts} = Defs.dest defs; - val tsig = Sign.tsig_of thy; - val consts = Sign.consts_of thy; - val {const_space, constants, constraints} = Consts.dest consts; - val extern_const = Name_Space.extern ctxt const_space; - val {classes, default, types, ...} = Type.rep_tsig tsig; - val (class_space, class_algebra) = classes; - val classes = Sorts.classes_of class_algebra; - val arities = Sorts.arities_of class_algebra; - - val clsses = - Name_Space.extern_entries ctxt class_space - (map (fn ((c, _), cs) => (c, Sign.minimize_sort thy cs)) (Graph.dest classes)) - |> map (apfst #1); - val tdecls = Name_Space.extern_table ctxt types |> map (apfst #1); - val arties = - Name_Space.extern_entries ctxt (Type.type_space tsig) (Symtab.dest arities) - |> map (apfst #1); - - fun prune_const c = not verbose andalso Consts.is_concealed consts c; - val cnsts = - Name_Space.markup_entries ctxt const_space - (filter_out (prune_const o fst) constants); - - val log_cnsts = map_filter (fn (c, (ty, NONE)) => SOME (c, ty) | _ => NONE) cnsts; - val abbrevs = map_filter (fn (c, (ty, SOME t)) => SOME (c, (ty, t)) | _ => NONE) cnsts; - val cnstrs = Name_Space.markup_entries ctxt const_space constraints; - - val axms = Name_Space.markup_table ctxt (Theory.axiom_table thy); - - val (reds0, (reds1, reds2)) = filter_out (prune_const o fst o fst) reducts - |> map (fn (lhs, rhs) => - (apfst extern_const lhs, map (apfst extern_const) (filter_out (prune_const o fst) rhs))) - |> sort_wrt (#1 o #1) - |> List.partition (null o #2) - ||> List.partition (Defs.plain_args o #2 o #1); - val rests = restricts |> map (apfst (apfst extern_const)) |> sort_wrt (#1 o #1); - in - [Pretty.strs ("names:" :: Context.display_names thy)] @ - [Pretty.big_list "classes:" (map pretty_classrel clsses), - pretty_default default, - Pretty.big_list "syntactic types:" (map_filter (pretty_type true) tdecls), - Pretty.big_list "logical types:" (map_filter (pretty_type false) tdecls), - Pretty.big_list "type arities:" (pretty_arities arties), - Pretty.big_list "logical consts:" (map (Pretty.block o prt_const) log_cnsts), - Pretty.big_list "abbreviations:" (map pretty_abbrev abbrevs), - Pretty.big_list "constraints:" (map (Pretty.block o prt_const) cnstrs), - Pretty.big_list "axioms:" (map pretty_axm axms), - Pretty.block - (Pretty.breaks (Pretty.str "oracles:" :: map Pretty.mark_str (Thm.extern_oracles ctxt))), - Pretty.big_list "definitions:" - [pretty_finals reds0, - Pretty.big_list "non-overloaded:" (map pretty_reduct reds1), - Pretty.big_list "overloaded:" (map pretty_reduct reds2), - Pretty.big_list "pattern restrictions:" (map pretty_restrict rests)]] - end; - -end; - -structure Basic_Display: BASIC_DISPLAY = Display; -open Basic_Display; diff --git a/core/Pure/drule.ML b/core/Pure/drule.ML deleted file mode 100644 index 27ccdd31..00000000 --- a/core/Pure/drule.ML +++ /dev/null @@ -1,878 +0,0 @@ -(* Title: Pure/drule.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - -Derived rules and other operations on theorems. -*) - -infix 0 RS RSN RL RLN MRS OF COMP INCR_COMP COMP_INCR; - -signature BASIC_DRULE = -sig - val mk_implies: cterm * cterm -> cterm - val list_implies: cterm list * cterm -> cterm - val strip_imp_prems: cterm -> cterm list - val strip_imp_concl: cterm -> cterm - val cprems_of: thm -> cterm list - val cterm_fun: (term -> term) -> (cterm -> cterm) - val ctyp_fun: (typ -> typ) -> (ctyp -> ctyp) - val forall_intr_list: cterm list -> thm -> thm - val forall_intr_vars: thm -> thm - val forall_elim_list: cterm list -> thm -> thm - val gen_all: thm -> thm - val lift_all: cterm -> thm -> thm - val implies_elim_list: thm -> thm list -> thm - val implies_intr_list: cterm list -> thm -> thm - val instantiate_normalize: (ctyp * ctyp) list * (cterm * cterm) list -> thm -> thm - val zero_var_indexes_list: thm list -> thm list - val zero_var_indexes: thm -> thm - val implies_intr_hyps: thm -> thm - val rotate_prems: int -> thm -> thm - val rearrange_prems: int list -> thm -> thm - val RSN: thm * (int * thm) -> thm - val RS: thm * thm -> thm - val RLN: thm list * (int * thm list) -> thm list - val RL: thm list * thm list -> thm list - val MRS: thm list * thm -> thm - val OF: thm * thm list -> thm - val COMP: thm * thm -> thm - val INCR_COMP: thm * thm -> thm - val COMP_INCR: thm * thm -> thm - val cterm_instantiate: (cterm * cterm) list -> thm -> thm - val size_of_thm: thm -> int - val reflexive_thm: thm - val symmetric_thm: thm - val transitive_thm: thm - val extensional: thm -> thm - val asm_rl: thm - val cut_rl: thm - val revcut_rl: thm - val thin_rl: thm - val instantiate': ctyp option list -> cterm option list -> thm -> thm -end; - -signature DRULE = -sig - include BASIC_DRULE - val generalize: string list * string list -> thm -> thm - val list_comb: cterm * cterm list -> cterm - val strip_comb: cterm -> cterm * cterm list - val strip_type: ctyp -> ctyp list * ctyp - val beta_conv: cterm -> cterm -> cterm - val types_sorts: thm -> (indexname-> typ option) * (indexname-> sort option) - val flexflex_unique: thm -> thm - val export_without_context: thm -> thm - val export_without_context_open: thm -> thm - val store_thm: binding -> thm -> thm - val store_standard_thm: binding -> thm -> thm - val store_thm_open: binding -> thm -> thm - val store_standard_thm_open: binding -> thm -> thm - val multi_resolve: thm list -> thm -> thm Seq.seq - val multi_resolves: thm list -> thm list -> thm Seq.seq - val compose: thm * int * thm -> thm - val equals_cong: thm - val imp_cong: thm - val swap_prems_eq: thm - val imp_cong_rule: thm -> thm -> thm - val arg_cong_rule: cterm -> thm -> thm - val binop_cong_rule: cterm -> thm -> thm -> thm - val fun_cong_rule: thm -> cterm -> thm - val beta_eta_conversion: cterm -> thm - val eta_long_conversion: cterm -> thm - val eta_contraction_rule: thm -> thm - val norm_hhf_eq: thm - val norm_hhf_eqs: thm list - val is_norm_hhf: term -> bool - val norm_hhf: theory -> term -> term - val norm_hhf_cterm: cterm -> cterm - val protect: cterm -> cterm - val protectI: thm - val protectD: thm - val protect_cong: thm - val implies_intr_protected: cterm list -> thm -> thm - val termI: thm - val mk_term: cterm -> thm - val dest_term: thm -> cterm - val cterm_rule: (thm -> thm) -> cterm -> cterm - val dummy_thm: thm - val sort_constraintI: thm - val sort_constraint_eq: thm - val with_subgoal: int -> (thm -> thm) -> thm -> thm - val comp_no_flatten: thm * int -> int -> thm -> thm - val rename_bvars: (string * string) list -> thm -> thm - val rename_bvars': string option list -> thm -> thm - val incr_indexes: thm -> thm -> thm - val incr_indexes2: thm -> thm -> thm -> thm - val triv_forall_equality: thm - val distinct_prems_rl: thm - val equal_intr_rule: thm - val equal_elim_rule1: thm - val equal_elim_rule2: thm - val remdups_rl: thm - val abs_def: thm -> thm -end; - -structure Drule: DRULE = -struct - - -(** some cterm->cterm operations: faster than calling cterm_of! **) - -(* A1==>...An==>B goes to [A1,...,An], where B is not an implication *) -fun strip_imp_prems ct = - let val (cA, cB) = Thm.dest_implies ct - in cA :: strip_imp_prems cB end - handle TERM _ => []; - -(* A1==>...An==>B goes to B, where B is not an implication *) -fun strip_imp_concl ct = - (case Thm.term_of ct of - Const ("Pure.imp", _) $ _ $ _ => strip_imp_concl (Thm.dest_arg ct) - | _ => ct); - -(*The premises of a theorem, as a cterm list*) -val cprems_of = strip_imp_prems o cprop_of; - -fun cterm_fun f ct = Thm.cterm_of (Thm.theory_of_cterm ct) (f (Thm.term_of ct)); -fun ctyp_fun f cT = Thm.ctyp_of (Thm.theory_of_ctyp cT) (f (Thm.typ_of cT)); - -fun certify t = Thm.cterm_of (Context.the_theory (Context.the_thread_data ())) t; - -val implies = certify Logic.implies; -fun mk_implies (A, B) = Thm.apply (Thm.apply implies A) B; - -(*cterm version of list_implies: [A1,...,An], B goes to [|A1;==>;An|]==>B *) -fun list_implies([], B) = B - | list_implies(A::AS, B) = mk_implies (A, list_implies(AS,B)); - -(*cterm version of list_comb: maps (f, [t1,...,tn]) to f(t1,...,tn) *) -fun list_comb (f, []) = f - | list_comb (f, t::ts) = list_comb (Thm.apply f t, ts); - -(*cterm version of strip_comb: maps f(t1,...,tn) to (f, [t1,...,tn]) *) -fun strip_comb ct = - let - fun stripc (p as (ct, cts)) = - let val (ct1, ct2) = Thm.dest_comb ct - in stripc (ct1, ct2 :: cts) end handle CTERM _ => p - in stripc (ct, []) end; - -(* cterm version of strip_type: maps [T1,...,Tn]--->T to ([T1,T2,...,Tn], T) *) -fun strip_type cT = (case Thm.typ_of cT of - Type ("fun", _) => - let - val [cT1, cT2] = Thm.dest_ctyp cT; - val (cTs, cT') = strip_type cT2 - in (cT1 :: cTs, cT') end - | _ => ([], cT)); - -(*Beta-conversion for cterms, where x is an abstraction. Simply returns the rhs - of the meta-equality returned by the beta_conversion rule.*) -fun beta_conv x y = - Thm.dest_arg (cprop_of (Thm.beta_conversion false (Thm.apply x y))); - - - -(*** Find the type (sort) associated with a (T)Var or (T)Free in a term - Used for establishing default types (of variables) and sorts (of - type variables) when reading another term. - Index -1 indicates that a (T)Free rather than a (T)Var is wanted. -***) - -fun types_sorts thm = - let - val vars = Thm.fold_terms Term.add_vars thm []; - val frees = Thm.fold_terms Term.add_frees thm []; - val tvars = Thm.fold_terms Term.add_tvars thm []; - val tfrees = Thm.fold_terms Term.add_tfrees thm []; - fun types (a, i) = - if i < 0 then AList.lookup (op =) frees a else AList.lookup (op =) vars (a, i); - fun sorts (a, i) = - if i < 0 then AList.lookup (op =) tfrees a else AList.lookup (op =) tvars (a, i); - in (types, sorts) end; - - - - -(** Standardization of rules **) - -(*Generalization over a list of variables*) -val forall_intr_list = fold_rev Thm.forall_intr; - -(*Generalization over Vars -- canonical order*) -fun forall_intr_vars th = - fold Thm.forall_intr - (map (Thm.cterm_of (Thm.theory_of_thm th) o Var) (Thm.fold_terms Term.add_vars th [])) th; - -fun outer_params t = - let val vs = Term.strip_all_vars t - in Name.variant_list [] (map (Name.clean o #1) vs) ~~ map #2 vs end; - -(*generalize outermost parameters*) -fun gen_all th = - let - val thy = Thm.theory_of_thm th; - val {prop, maxidx, ...} = Thm.rep_thm th; - val cert = Thm.cterm_of thy; - fun elim (x, T) = Thm.forall_elim (cert (Var ((x, maxidx + 1), T))); - in fold elim (outer_params prop) th end; - -(*lift vars wrt. outermost goal parameters - -- reverses the effect of gen_all modulo higher-order unification*) -fun lift_all goal th = - let - val thy = Theory.merge (Thm.theory_of_cterm goal, Thm.theory_of_thm th); - val cert = Thm.cterm_of thy; - val maxidx = Thm.maxidx_of th; - val ps = outer_params (Thm.term_of goal) - |> map (fn (x, T) => Var ((x, maxidx + 1), Logic.incr_tvar (maxidx + 1) T)); - val Ts = map Term.fastype_of ps; - val inst = Thm.fold_terms Term.add_vars th [] |> map (fn (xi, T) => - (cert (Var (xi, T)), cert (Term.list_comb (Var (xi, Ts ---> T), ps)))); - in - th |> Thm.instantiate ([], inst) - |> fold_rev (Thm.forall_intr o cert) ps - end; - -(*direct generalization*) -fun generalize names th = Thm.generalize names (Thm.maxidx_of th + 1) th; - -(*specialization over a list of cterms*) -val forall_elim_list = fold Thm.forall_elim; - -(*maps A1,...,An |- B to [| A1;...;An |] ==> B*) -val implies_intr_list = fold_rev Thm.implies_intr; - -(*maps [| A1;...;An |] ==> B and [A1,...,An] to B*) -fun implies_elim_list impth ths = fold Thm.elim_implies ths impth; - -(*Reset Var indexes to zero, renaming to preserve distinctness*) -fun zero_var_indexes_list [] = [] - | zero_var_indexes_list ths = - let - val thy = Theory.merge_list (map Thm.theory_of_thm ths); - val certT = Thm.ctyp_of thy and cert = Thm.cterm_of thy; - val (instT, inst) = Term_Subst.zero_var_indexes_inst (map Thm.full_prop_of ths); - val cinstT = map (fn (v, T) => (certT (TVar v), certT T)) instT; - val cinst = map (fn (v, t) => (cert (Var v), cert t)) inst; - in map (Thm.adjust_maxidx_thm ~1 o Thm.instantiate (cinstT, cinst)) ths end; - -val zero_var_indexes = singleton zero_var_indexes_list; - - -(** Standard form of object-rule: no hypotheses, flexflex constraints, - Frees, or outer quantifiers; all generality expressed by Vars of index 0.**) - -(*Discharge all hypotheses.*) -fun implies_intr_hyps th = - fold Thm.implies_intr (#hyps (Thm.crep_thm th)) th; - -(*Squash a theorem's flexflex constraints provided it can be done uniquely. - This step can lose information.*) -fun flexflex_unique th = - if null (Thm.tpairs_of th) then th else - case distinct Thm.eq_thm (Seq.list_of (Thm.flexflex_rule th)) of - [th] => th - | [] => raise THM("flexflex_unique: impossible constraints", 0, [th]) - | _ => raise THM("flexflex_unique: multiple unifiers", 0, [th]); - - -(* old-style export without context *) - -val export_without_context_open = - implies_intr_hyps - #> Thm.forall_intr_frees - #> `Thm.maxidx_of - #-> (fn maxidx => - Thm.forall_elim_vars (maxidx + 1) - #> Thm.strip_shyps - #> zero_var_indexes - #> Thm.varifyT_global); - -val export_without_context = - flexflex_unique - #> export_without_context_open - #> Thm.close_derivation; - - -(*Rotates a rule's premises to the left by k*) -fun rotate_prems 0 = I - | rotate_prems k = Thm.permute_prems 0 k; - -fun with_subgoal i f = rotate_prems (i - 1) #> f #> rotate_prems (1 - i); - -(*Permute prems, where the i-th position in the argument list (counting from 0) - gives the position within the original thm to be transferred to position i. - Any remaining trailing positions are left unchanged.*) -val rearrange_prems = - let - fun rearr new [] thm = thm - | rearr new (p :: ps) thm = - rearr (new + 1) - (map (fn q => if new <= q andalso q < p then q + 1 else q) ps) - (Thm.permute_prems (new + 1) (new - p) (Thm.permute_prems new (p - new) thm)) - in rearr 0 end; - - -(*Resolution: multiple arguments, multiple results*) -local - fun res th i rule = - Thm.biresolution false [(false, th)] i rule handle THM _ => Seq.empty; - - fun multi_res _ [] rule = Seq.single rule - | multi_res i (th :: ths) rule = Seq.maps (res th i) (multi_res (i + 1) ths rule); -in - val multi_resolve = multi_res 1; - fun multi_resolves facts rules = Seq.maps (multi_resolve facts) (Seq.of_list rules); -end; - -(*Resolution: exactly one resolvent must be produced*) -fun tha RSN (i, thb) = - (case Seq.chop 2 (Thm.biresolution false [(false, tha)] i thb) of - ([th], _) => th - | ([], _) => raise THM ("RSN: no unifiers", i, [tha, thb]) - | _ => raise THM ("RSN: multiple unifiers", i, [tha, thb])); - -(*Resolution: P==>Q, Q==>R gives P==>R*) -fun tha RS thb = tha RSN (1,thb); - -(*For joining lists of rules*) -fun thas RLN (i, thbs) = - let val resolve = Thm.biresolution false (map (pair false) thas) i - fun resb thb = Seq.list_of (resolve thb) handle THM _ => [] - in maps resb thbs end; - -fun thas RL thbs = thas RLN (1, thbs); - -(*Isar-style multi-resolution*) -fun bottom_rl OF rls = - (case Seq.chop 2 (multi_resolve rls bottom_rl) of - ([th], _) => th - | ([], _) => raise THM ("OF: no unifiers", 0, bottom_rl :: rls) - | _ => raise THM ("OF: multiple unifiers", 0, bottom_rl :: rls)); - -(*Resolve a list of rules against bottom_rl from right to left; - makes proof trees*) -fun rls MRS bottom_rl = bottom_rl OF rls; - -(*compose Q and [...,Qi,Q(i+1),...]==>R to [...,Q(i+1),...]==>R - with no lifting or renaming! Q may contain ==> or meta-quants - ALWAYS deletes premise i *) -fun compose (tha, i, thb) = - Thm.bicompose {flatten = true, match = false, incremented = false} (false, tha, 0) i thb - |> Seq.list_of |> distinct Thm.eq_thm - |> (fn [th] => th | _ => raise THM ("compose: unique result expected", i, [tha, thb])); - - -(** theorem equality **) - -(*Useful "distance" function for BEST_FIRST*) -val size_of_thm = size_of_term o Thm.full_prop_of; - - - -(*** Meta-Rewriting Rules ***) - -val read_prop = certify o Simple_Syntax.read_prop; - -fun store_thm name th = - Context.>>> (Context.map_theory_result (Global_Theory.store_thm (name, th))); - -fun store_thm_open name th = - Context.>>> (Context.map_theory_result (Global_Theory.store_thm_open (name, th))); - -fun store_standard_thm name th = store_thm name (export_without_context th); -fun store_standard_thm_open name thm = store_thm_open name (export_without_context_open thm); - -val reflexive_thm = - let val cx = certify (Var(("x",0),TVar(("'a",0),[]))) - in store_standard_thm_open (Binding.make ("reflexive", @{here})) (Thm.reflexive cx) end; - -val symmetric_thm = - let - val xy = read_prop "x::'a == y::'a"; - val thm = Thm.implies_intr xy (Thm.symmetric (Thm.assume xy)); - in store_standard_thm_open (Binding.make ("symmetric", @{here})) thm end; - -val transitive_thm = - let - val xy = read_prop "x::'a == y::'a"; - val yz = read_prop "y::'a == z::'a"; - val xythm = Thm.assume xy; - val yzthm = Thm.assume yz; - val thm = Thm.implies_intr yz (Thm.transitive xythm yzthm); - in store_standard_thm_open (Binding.make ("transitive", @{here})) thm end; - -fun extensional eq = - let val eq' = - Thm.abstract_rule "x" (Thm.dest_arg (fst (Thm.dest_equals (cprop_of eq)))) eq - in Thm.equal_elim (Thm.eta_conversion (cprop_of eq')) eq' end; - -val equals_cong = - store_standard_thm_open (Binding.make ("equals_cong", @{here})) - (Thm.reflexive (read_prop "x::'a == y::'a")); - -val imp_cong = - let - val ABC = read_prop "A ==> B::prop == C::prop" - val AB = read_prop "A ==> B" - val AC = read_prop "A ==> C" - val A = read_prop "A" - in - store_standard_thm_open (Binding.make ("imp_cong", @{here})) - (Thm.implies_intr ABC (Thm.equal_intr - (Thm.implies_intr AB (Thm.implies_intr A - (Thm.equal_elim (Thm.implies_elim (Thm.assume ABC) (Thm.assume A)) - (Thm.implies_elim (Thm.assume AB) (Thm.assume A))))) - (Thm.implies_intr AC (Thm.implies_intr A - (Thm.equal_elim (Thm.symmetric (Thm.implies_elim (Thm.assume ABC) (Thm.assume A))) - (Thm.implies_elim (Thm.assume AC) (Thm.assume A))))))) - end; - -val swap_prems_eq = - let - val ABC = read_prop "A ==> B ==> C" - val BAC = read_prop "B ==> A ==> C" - val A = read_prop "A" - val B = read_prop "B" - in - store_standard_thm_open (Binding.make ("swap_prems_eq", @{here})) - (Thm.equal_intr - (Thm.implies_intr ABC (Thm.implies_intr B (Thm.implies_intr A - (Thm.implies_elim (Thm.implies_elim (Thm.assume ABC) (Thm.assume A)) (Thm.assume B))))) - (Thm.implies_intr BAC (Thm.implies_intr A (Thm.implies_intr B - (Thm.implies_elim (Thm.implies_elim (Thm.assume BAC) (Thm.assume B)) (Thm.assume A)))))) - end; - -val imp_cong_rule = Thm.combination o Thm.combination (Thm.reflexive implies); - -fun arg_cong_rule ct th = Thm.combination (Thm.reflexive ct) th; (*AP_TERM in LCF/HOL*) -fun fun_cong_rule th ct = Thm.combination th (Thm.reflexive ct); (*AP_THM in LCF/HOL*) -fun binop_cong_rule ct th1 th2 = Thm.combination (arg_cong_rule ct th1) th2; - -local - val dest_eq = Thm.dest_equals o cprop_of - val rhs_of = snd o dest_eq -in -fun beta_eta_conversion t = - let val thm = Thm.beta_conversion true t - in Thm.transitive thm (Thm.eta_conversion (rhs_of thm)) end -end; - -fun eta_long_conversion ct = - Thm.transitive - (beta_eta_conversion ct) - (Thm.symmetric (beta_eta_conversion (cterm_fun (Envir.eta_long []) ct))); - -(*Contract all eta-redexes in the theorem, lest they give rise to needless abstractions*) -fun eta_contraction_rule th = - Thm.equal_elim (Thm.eta_conversion (cprop_of th)) th; - - -(* abs_def *) - -(* - f ?x1 ... ?xn == u - -------------------- - f == %x1 ... xn. u -*) - -local - -fun contract_lhs th = - Thm.transitive (Thm.symmetric (beta_eta_conversion - (fst (Thm.dest_equals (cprop_of th))))) th; - -fun var_args ct = - (case try Thm.dest_comb ct of - SOME (f, arg) => - (case Thm.term_of arg of - Var ((x, _), _) => update (eq_snd (op aconvc)) (x, arg) (var_args f) - | _ => []) - | NONE => []); - -in - -fun abs_def th = - let - val th' = contract_lhs th; - val args = var_args (Thm.lhs_of th'); - in contract_lhs (fold (uncurry Thm.abstract_rule) args th') end; - -end; - - - -(*** Some useful meta-theorems ***) - -(*The rule V/V, obtains assumption solving for eresolve_tac*) -val asm_rl = - store_standard_thm_open (Binding.make ("asm_rl", @{here})) - (Thm.trivial (read_prop "?psi")); - -(*Meta-level cut rule: [| V==>W; V |] ==> W *) -val cut_rl = - store_standard_thm_open (Binding.make ("cut_rl", @{here})) - (Thm.trivial (read_prop "?psi ==> ?theta")); - -(*Generalized elim rule for one conclusion; cut_rl with reversed premises: - [| PROP V; PROP V ==> PROP W |] ==> PROP W *) -val revcut_rl = - let - val V = read_prop "V"; - val VW = read_prop "V ==> W"; - in - store_standard_thm_open (Binding.make ("revcut_rl", @{here})) - (Thm.implies_intr V - (Thm.implies_intr VW (Thm.implies_elim (Thm.assume VW) (Thm.assume V)))) - end; - -(*for deleting an unwanted assumption*) -val thin_rl = - let - val V = read_prop "V"; - val W = read_prop "W"; - val thm = Thm.implies_intr V (Thm.implies_intr W (Thm.assume W)); - in store_standard_thm_open (Binding.make ("thin_rl", @{here})) thm end; - -(* (!!x. PROP ?V) == PROP ?V Allows removal of redundant parameters*) -val triv_forall_equality = - let - val V = read_prop "V"; - val QV = read_prop "!!x::'a. V"; - val x = certify (Free ("x", Term.aT [])); - in - store_standard_thm_open (Binding.make ("triv_forall_equality", @{here})) - (Thm.equal_intr (Thm.implies_intr QV (Thm.forall_elim x (Thm.assume QV))) - (Thm.implies_intr V (Thm.forall_intr x (Thm.assume V)))) - end; - -(* (PROP ?Phi ==> PROP ?Phi ==> PROP ?Psi) ==> - (PROP ?Phi ==> PROP ?Psi) -*) -val distinct_prems_rl = - let - val AAB = read_prop "Phi ==> Phi ==> Psi"; - val A = read_prop "Phi"; - in - store_standard_thm_open (Binding.make ("distinct_prems_rl", @{here})) - (implies_intr_list [AAB, A] - (implies_elim_list (Thm.assume AAB) [Thm.assume A, Thm.assume A])) - end; - -(* [| PROP ?phi ==> PROP ?psi; PROP ?psi ==> PROP ?phi |] - ==> PROP ?phi == PROP ?psi - Introduction rule for == as a meta-theorem. -*) -val equal_intr_rule = - let - val PQ = read_prop "phi ==> psi"; - val QP = read_prop "psi ==> phi"; - in - store_standard_thm_open (Binding.make ("equal_intr_rule", @{here})) - (Thm.implies_intr PQ - (Thm.implies_intr QP (Thm.equal_intr (Thm.assume PQ) (Thm.assume QP)))) - end; - -(* PROP ?phi == PROP ?psi ==> PROP ?phi ==> PROP ?psi *) -val equal_elim_rule1 = - let - val eq = read_prop "phi::prop == psi::prop"; - val P = read_prop "phi"; - in - store_standard_thm_open (Binding.make ("equal_elim_rule1", @{here})) - (Thm.equal_elim (Thm.assume eq) (Thm.assume P) |> implies_intr_list [eq, P]) - end; - -(* PROP ?psi == PROP ?phi ==> PROP ?phi ==> PROP ?psi *) -val equal_elim_rule2 = - store_standard_thm_open (Binding.make ("equal_elim_rule2", @{here})) - (symmetric_thm RS equal_elim_rule1); - -(* PROP ?phi ==> PROP ?phi ==> PROP ?psi ==> PROP ?psi *) -val remdups_rl = - let - val P = read_prop "phi"; - val Q = read_prop "psi"; - val thm = implies_intr_list [P, P, Q] (Thm.assume Q); - in store_standard_thm_open (Binding.make ("remdups_rl", @{here})) thm end; - - - -(** embedded terms and types **) - -local - val A = certify (Free ("A", propT)); - val axiom = Thm.unvarify_global o Thm.axiom (Context.the_theory (Context.the_thread_data ())); - val prop_def = axiom "Pure.prop_def"; - val term_def = axiom "Pure.term_def"; - val sort_constraint_def = axiom "Pure.sort_constraint_def"; - val C = Thm.lhs_of sort_constraint_def; - val T = Thm.dest_arg C; - val CA = mk_implies (C, A); -in - -(* protect *) - -val protect = Thm.apply (certify Logic.protectC); - -val protectI = - store_standard_thm (Binding.conceal (Binding.make ("protectI", @{here}))) - (Thm.equal_elim (Thm.symmetric prop_def) (Thm.assume A)); - -val protectD = - store_standard_thm (Binding.conceal (Binding.make ("protectD", @{here}))) - (Thm.equal_elim prop_def (Thm.assume (protect A))); - -val protect_cong = - store_standard_thm_open (Binding.make ("protect_cong", @{here})) - (Thm.reflexive (protect A)); - -fun implies_intr_protected asms th = - let val asms' = map protect asms in - implies_elim_list - (implies_intr_list asms th) - (map (fn asm' => Thm.assume asm' RS protectD) asms') - |> implies_intr_list asms' - end; - - -(* term *) - -val termI = - store_standard_thm (Binding.conceal (Binding.make ("termI", @{here}))) - (Thm.equal_elim (Thm.symmetric term_def) (Thm.forall_intr A (Thm.trivial A))); - -fun mk_term ct = - let - val thy = Thm.theory_of_cterm ct; - val cert = Thm.cterm_of thy; - val certT = Thm.ctyp_of thy; - val T = Thm.typ_of (Thm.ctyp_of_term ct); - val a = certT (TVar (("'a", 0), [])); - val x = cert (Var (("x", 0), T)); - in Thm.instantiate ([(a, certT T)], [(x, ct)]) termI end; - -fun dest_term th = - let val cprop = strip_imp_concl (Thm.cprop_of th) in - if can Logic.dest_term (Thm.term_of cprop) then - Thm.dest_arg cprop - else raise THM ("dest_term", 0, [th]) - end; - -fun cterm_rule f = dest_term o f o mk_term; - -val dummy_thm = mk_term (certify Term.dummy_prop); - - -(* sort_constraint *) - -val sort_constraintI = - store_standard_thm (Binding.conceal (Binding.make ("sort_constraintI", @{here}))) - (Thm.equal_elim (Thm.symmetric sort_constraint_def) (mk_term T)); - -val sort_constraint_eq = - store_standard_thm (Binding.conceal (Binding.make ("sort_constraint_eq", @{here}))) - (Thm.equal_intr - (Thm.implies_intr CA (Thm.implies_elim (Thm.assume CA) - (Thm.unvarify_global sort_constraintI))) - (implies_intr_list [A, C] (Thm.assume A))); - -end; - - -(* HHF normalization *) - -(* (PROP ?phi ==> (!!x. PROP ?psi x)) == (!!x. PROP ?phi ==> PROP ?psi x) *) -val norm_hhf_eq = - let - val aT = TFree ("'a", []); - val x = Free ("x", aT); - val phi = Free ("phi", propT); - val psi = Free ("psi", aT --> propT); - - val cx = certify x; - val cphi = certify phi; - val lhs = certify (Logic.mk_implies (phi, Logic.all x (psi $ x))); - val rhs = certify (Logic.all x (Logic.mk_implies (phi, psi $ x))); - in - Thm.equal_intr - (Thm.implies_elim (Thm.assume lhs) (Thm.assume cphi) - |> Thm.forall_elim cx - |> Thm.implies_intr cphi - |> Thm.forall_intr cx - |> Thm.implies_intr lhs) - (Thm.implies_elim - (Thm.assume rhs |> Thm.forall_elim cx) (Thm.assume cphi) - |> Thm.forall_intr cx - |> Thm.implies_intr cphi - |> Thm.implies_intr rhs) - |> store_standard_thm_open (Binding.make ("norm_hhf_eq", @{here})) - end; - -val norm_hhf_prop = Logic.dest_equals (Thm.prop_of norm_hhf_eq); -val norm_hhf_eqs = [norm_hhf_eq, sort_constraint_eq]; - -fun is_norm_hhf (Const ("Pure.sort_constraint", _)) = false - | is_norm_hhf (Const ("Pure.imp", _) $ _ $ (Const ("Pure.all", _) $ _)) = false - | is_norm_hhf (Abs _ $ _) = false - | is_norm_hhf (t $ u) = is_norm_hhf t andalso is_norm_hhf u - | is_norm_hhf (Abs (_, _, t)) = is_norm_hhf t - | is_norm_hhf _ = true; - -fun norm_hhf thy t = - if is_norm_hhf t then t - else Pattern.rewrite_term thy [norm_hhf_prop] [] t; - -fun norm_hhf_cterm ct = - if is_norm_hhf (Thm.term_of ct) then ct - else cterm_fun (Pattern.rewrite_term (Thm.theory_of_cterm ct) [norm_hhf_prop] []) ct; - - -(* var indexes *) - -fun incr_indexes th = Thm.incr_indexes (Thm.maxidx_of th + 1); - -fun incr_indexes2 th1 th2 = - Thm.incr_indexes (Int.max (Thm.maxidx_of th1, Thm.maxidx_of th2) + 1); - -local - -(*compose Q and [Q1,Q2,...,Qk]==>R to [Q2,...,Qk]==>R getting unique result*) -fun comp incremented th1 th2 = - Thm.bicompose {flatten = true, match = false, incremented = incremented} (false, th1, 0) 1 th2 - |> Seq.list_of |> distinct Thm.eq_thm - |> (fn [th] => th | _ => raise THM ("COMP", 1, [th1, th2])); - -in - -fun th1 COMP th2 = comp false th1 th2; -fun th1 INCR_COMP th2 = comp true (incr_indexes th2 th1) th2; -fun th1 COMP_INCR th2 = comp true th1 (incr_indexes th1 th2); - -end; - -fun comp_no_flatten (th, n) i rule = - (case distinct Thm.eq_thm (Seq.list_of - (Thm.bicompose {flatten = false, match = false, incremented = true} - (false, th, n) i (incr_indexes th rule))) of - [th'] => th' - | [] => raise THM ("comp_no_flatten", i, [th, rule]) - | _ => raise THM ("comp_no_flatten: unique result expected", i, [th, rule])); - - - -(** variations on Thm.instantiate **) - -fun instantiate_normalize instpair th = - Thm.adjust_maxidx_thm ~1 (Thm.instantiate instpair th COMP_INCR asm_rl); - -(*Left-to-right replacements: tpairs = [..., (vi, ti), ...]. - Instantiates distinct Vars by terms, inferring type instantiations.*) -local - fun add_types (ct, cu) (thy, tye, maxidx) = - let - val {t, T, maxidx = maxt, ...} = Thm.rep_cterm ct; - val {t = u, T = U, maxidx = maxu, ...} = Thm.rep_cterm cu; - val maxi = Int.max (maxidx, Int.max (maxt, maxu)); - val thy' = Theory.merge (thy, Theory.merge (Thm.theory_of_cterm ct, Thm.theory_of_cterm cu)); - val (tye', maxi') = Sign.typ_unify thy' (T, U) (tye, maxi) - handle Type.TUNIFY => raise TYPE ("Ill-typed instantiation:\nType\n" ^ - Syntax.string_of_typ_global thy' (Envir.norm_type tye T) ^ - "\nof variable " ^ - Syntax.string_of_term_global thy' (Term.map_types (Envir.norm_type tye) t) ^ - "\ncannot be unified with type\n" ^ - Syntax.string_of_typ_global thy' (Envir.norm_type tye U) ^ "\nof term " ^ - Syntax.string_of_term_global thy' (Term.map_types (Envir.norm_type tye) u), - [T, U], [t, u]) - in (thy', tye', maxi') end; -in - -fun cterm_instantiate [] th = th - | cterm_instantiate ctpairs th = - let - val (thy, tye, _) = fold_rev add_types ctpairs (Thm.theory_of_thm th, Vartab.empty, 0); - val certT = ctyp_of thy; - val instT = - Vartab.fold (fn (xi, (S, T)) => - cons (certT (TVar (xi, S)), certT (Envir.norm_type tye T))) tye []; - val inst = map (pairself (Thm.instantiate_cterm (instT, []))) ctpairs; - in instantiate_normalize (instT, inst) th end - handle TERM (msg, _) => raise THM (msg, 0, [th]) - | TYPE (msg, _, _) => raise THM (msg, 0, [th]); -end; - - -(* instantiate by left-to-right occurrence of variables *) - -fun instantiate' cTs cts thm = - let - fun err msg = - raise TYPE ("instantiate': " ^ msg, - map_filter (Option.map Thm.typ_of) cTs, - map_filter (Option.map Thm.term_of) cts); - - fun inst_of (v, ct) = - (Thm.cterm_of (Thm.theory_of_cterm ct) (Var v), ct) - handle TYPE (msg, _, _) => err msg; - - fun tyinst_of (v, cT) = - (Thm.ctyp_of (Thm.theory_of_ctyp cT) (TVar v), cT) - handle TYPE (msg, _, _) => err msg; - - fun zip_vars xs ys = - zip_options xs ys handle ListPair.UnequalLengths => - err "more instantiations than variables in thm"; - - (*instantiate types first!*) - val thm' = - if forall is_none cTs then thm - else Thm.instantiate - (map tyinst_of (zip_vars (rev (Thm.fold_terms Term.add_tvars thm [])) cTs), []) thm; - val thm'' = - if forall is_none cts then thm' - else Thm.instantiate - ([], map inst_of (zip_vars (rev (Thm.fold_terms Term.add_vars thm' [])) cts)) thm'; - in thm'' end; - - - -(** renaming of bound variables **) - -(* replace bound variables x_i in thm by y_i *) -(* where vs = [(x_1, y_1), ..., (x_n, y_n)] *) - -fun rename_bvars [] thm = thm - | rename_bvars vs thm = - let - val cert = Thm.cterm_of (Thm.theory_of_thm thm); - fun ren (Abs (x, T, t)) = Abs (AList.lookup (op =) vs x |> the_default x, T, ren t) - | ren (t $ u) = ren t $ ren u - | ren t = t; - in Thm.equal_elim (Thm.reflexive (cert (ren (Thm.prop_of thm)))) thm end; - - -(* renaming in left-to-right order *) - -fun rename_bvars' xs thm = - let - val cert = Thm.cterm_of (Thm.theory_of_thm thm); - val prop = Thm.prop_of thm; - fun rename [] t = ([], t) - | rename (x' :: xs) (Abs (x, T, t)) = - let val (xs', t') = rename xs t - in (xs', Abs (the_default x x', T, t')) end - | rename xs (t $ u) = - let - val (xs', t') = rename xs t; - val (xs'', u') = rename xs' u - in (xs'', t' $ u') end - | rename xs t = (xs, t); - in case rename xs prop of - ([], prop') => Thm.equal_elim (Thm.reflexive (cert prop')) thm - | _ => error "More names than abstractions in theorem" - end; - -end; - -structure Basic_Drule: BASIC_DRULE = Drule; -open Basic_Drule; diff --git a/core/Pure/envir.ML b/core/Pure/envir.ML deleted file mode 100644 index b805c1cf..00000000 --- a/core/Pure/envir.ML +++ /dev/null @@ -1,422 +0,0 @@ -(* Title: Pure/envir.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - -Free-form environments. The type of a term variable / sort of a type variable is -part of its name. The lookup function must apply type substitutions, -since they may change the identity of a variable. -*) - -signature ENVIR = -sig - type tenv = (typ * term) Vartab.table - datatype env = Envir of {maxidx: int, tenv: tenv, tyenv: Type.tyenv} - val maxidx_of: env -> int - val term_env: env -> tenv - val type_env: env -> Type.tyenv - val is_empty: env -> bool - val empty: int -> env - val merge: env * env -> env - val insert_sorts: env -> sort list -> sort list - val genvars: string -> env * typ list -> env * term list - val genvar: string -> env * typ -> env * term - val lookup1: tenv -> indexname * typ -> term option - val lookup: env -> indexname * typ -> term option - val update: (indexname * typ) * term -> env -> env - val above: env -> int -> bool - val vupdate: (indexname * typ) * term -> env -> env - val norm_type_same: Type.tyenv -> typ Same.operation - val norm_types_same: Type.tyenv -> typ list Same.operation - val norm_type: Type.tyenv -> typ -> typ - val norm_term_same: env -> term Same.operation - val norm_term: env -> term -> term - val beta_norm: term -> term - val head_norm: env -> term -> term - val eta_long: typ list -> term -> term - val eta_contract: term -> term - val beta_eta_contract: term -> term - val aeconv: term * term -> bool - val body_type: env -> typ -> typ - val binder_types: env -> typ -> typ list - val strip_type: env -> typ -> typ list * typ - val fastype: env -> typ list -> term -> typ - val subst_type_same: Type.tyenv -> typ Same.operation - val subst_term_types_same: Type.tyenv -> term Same.operation - val subst_term_same: Type.tyenv * tenv -> term Same.operation - val subst_type: Type.tyenv -> typ -> typ - val subst_term_types: Type.tyenv -> term -> term - val subst_term: Type.tyenv * tenv -> term -> term - val expand_atom: typ -> typ * term -> term - val expand_term: (term -> (typ * term) option) -> term -> term - val expand_term_frees: ((string * typ) * term) list -> term -> term -end; - -structure Envir: ENVIR = -struct - -(** datatype env **) - -(*Updating can destroy environment in 2 ways! - (1) variables out of range - (2) circular assignments -*) - -type tenv = (typ * term) Vartab.table; - -datatype env = Envir of - {maxidx: int, (*upper bound of maximum index of vars*) - tenv: tenv, (*assignments to Vars*) - tyenv: Type.tyenv}; (*assignments to TVars*) - -fun make_env (maxidx, tenv, tyenv) = - Envir {maxidx = maxidx, tenv = tenv, tyenv = tyenv}; - -fun maxidx_of (Envir {maxidx, ...}) = maxidx; -fun term_env (Envir {tenv, ...}) = tenv; -fun type_env (Envir {tyenv, ...}) = tyenv; - -fun is_empty env = - Vartab.is_empty (term_env env) andalso - Vartab.is_empty (type_env env); - - -(* build env *) - -fun empty maxidx = make_env (maxidx, Vartab.empty, Vartab.empty); - -fun merge - (Envir {maxidx = maxidx1, tenv = tenv1, tyenv = tyenv1}, - Envir {maxidx = maxidx2, tenv = tenv2, tyenv = tyenv2}) = - make_env (Int.max (maxidx1, maxidx2), - Vartab.merge (op =) (tenv1, tenv2), - Vartab.merge (op =) (tyenv1, tyenv2)); - - -(*NB: type unification may invent new sorts*) (* FIXME tenv!? *) -val insert_sorts = Vartab.fold (fn (_, (_, T)) => Sorts.insert_typ T) o type_env; - -(*Generate a list of distinct variables. - Increments index to make them distinct from ALL present variables. *) -fun genvars name (Envir {maxidx, tenv, tyenv}, Ts) : env * term list = - let - fun genvs (_, [] : typ list) : term list = [] - | genvs (n, [T]) = [Var ((name, maxidx + 1), T)] - | genvs (n, T :: Ts) = - Var ((name ^ radixstring (26, "a" , n), maxidx + 1), T) - :: genvs (n + 1, Ts); - in (Envir {maxidx = maxidx + 1, tenv = tenv, tyenv = tyenv}, genvs (0, Ts)) end; - -(*Generate a variable.*) -fun genvar name (env, T) : env * term = - let val (env', [v]) = genvars name (env, [T]) - in (env', v) end; - -fun var_clash xi T T' = - raise TYPE ("Variable has two distinct types", [], [Var (xi, T'), Var (xi, T)]); - -fun lookup_check check tenv (xi, T) = - (case Vartab.lookup tenv xi of - NONE => NONE - | SOME (U, t) => if check (T, U) then SOME t else var_clash xi T U); - -(*When dealing with environments produced by matching instead - of unification, there is no need to chase assigned TVars. - In this case, we can simply ignore the type substitution - and use = instead of eq_type.*) -fun lookup1 tenv = lookup_check (op =) tenv; - -fun lookup2 (tyenv, tenv) = lookup_check (Type.eq_type tyenv) tenv; - -fun lookup (Envir {tenv, tyenv, ...}) = lookup2 (tyenv, tenv); - -fun update ((xi, T), t) (Envir {maxidx, tenv, tyenv}) = - Envir {maxidx = maxidx, tenv = Vartab.update_new (xi, (T, t)) tenv, tyenv = tyenv}; - -(*Determine if the least index updated exceeds lim*) -fun above (Envir {tenv, tyenv, ...}) lim = - (case Vartab.min tenv of SOME ((_, i), _) => i > lim | NONE => true) andalso - (case Vartab.min tyenv of SOME ((_, i), _) => i > lim | NONE => true); - -(*Update, checking Var-Var assignments: try to suppress higher indexes*) -fun vupdate (aU as (a, U), t) (env as Envir {tyenv, ...}) = - (case t of - Var (nT as (name', T)) => - if a = name' then env (*cycle!*) - else if Term_Ord.indexname_ord (a, name') = LESS then - (case lookup env nT of (*if already assigned, chase*) - NONE => update (nT, Var (a, T)) env - | SOME u => vupdate (aU, u) env) - else update (aU, t) env - | _ => update (aU, t) env); - - - -(** beta normalization wrt. environment **) - -(*Chases variables in env. Does not exploit sharing of variable bindings - Does not check types, so could loop.*) - -local - -fun norm_type0 tyenv : typ Same.operation = - let - fun norm (Type (a, Ts)) = Type (a, Same.map norm Ts) - | norm (TFree _) = raise Same.SAME - | norm (TVar v) = - (case Type.lookup tyenv v of - SOME U => Same.commit norm U - | NONE => raise Same.SAME); - in norm end; - -fun norm_term1 tenv : term Same.operation = - let - fun norm (Var v) = - (case lookup1 tenv v of - SOME u => Same.commit norm u - | NONE => raise Same.SAME) - | norm (Abs (a, T, body)) = Abs (a, T, norm body) - | norm (Abs (_, _, body) $ t) = Same.commit norm (subst_bound (t, body)) - | norm (f $ t) = - ((case norm f of - Abs (_, _, body) => Same.commit norm (subst_bound (t, body)) - | nf => nf $ Same.commit norm t) - handle Same.SAME => f $ norm t) - | norm _ = raise Same.SAME; - in norm end; - -fun norm_term2 tenv tyenv : term Same.operation = - let - val normT = norm_type0 tyenv; - fun norm (Const (a, T)) = Const (a, normT T) - | norm (Free (a, T)) = Free (a, normT T) - | norm (Var (xi, T)) = - (case lookup2 (tyenv, tenv) (xi, T) of - SOME u => Same.commit norm u - | NONE => Var (xi, normT T)) - | norm (Abs (a, T, body)) = - (Abs (a, normT T, Same.commit norm body) - handle Same.SAME => Abs (a, T, norm body)) - | norm (Abs (_, _, body) $ t) = Same.commit norm (subst_bound (t, body)) - | norm (f $ t) = - ((case norm f of - Abs (_, _, body) => Same.commit norm (subst_bound (t, body)) - | nf => nf $ Same.commit norm t) - handle Same.SAME => f $ norm t) - | norm _ = raise Same.SAME; - in norm end; - -in - -fun norm_type_same tyenv T = - if Vartab.is_empty tyenv then raise Same.SAME - else norm_type0 tyenv T; - -fun norm_types_same tyenv Ts = - if Vartab.is_empty tyenv then raise Same.SAME - else Same.map (norm_type0 tyenv) Ts; - -fun norm_type tyenv T = norm_type_same tyenv T handle Same.SAME => T; - -fun norm_term_same (Envir {tenv, tyenv, ...}) = - if Vartab.is_empty tyenv then norm_term1 tenv - else norm_term2 tenv tyenv; - -fun norm_term envir t = norm_term_same envir t handle Same.SAME => t; -fun beta_norm t = if Term.has_abs t then norm_term (empty 0) t else t; - -end; - - -(* head normal form for unification *) - -fun head_norm env = - let - fun norm (Var v) = - (case lookup env v of - SOME u => head_norm env u - | NONE => raise Same.SAME) - | norm (Abs (a, T, body)) = Abs (a, T, norm body) - | norm (Abs (_, _, body) $ t) = Same.commit norm (subst_bound (t, body)) - | norm (f $ t) = - (case norm f of - Abs (_, _, body) => Same.commit norm (subst_bound (t, body)) - | nf => nf $ t) - | norm _ = raise Same.SAME; - in Same.commit norm end; - - -(* eta-long beta-normal form *) - -fun eta_long Ts (Abs (s, T, t)) = Abs (s, T, eta_long (T :: Ts) t) - | eta_long Ts t = - (case strip_comb t of - (Abs _, _) => eta_long Ts (beta_norm t) - | (u, ts) => - let - val Us = binder_types (fastype_of1 (Ts, t)); - val i = length Us; - val long = eta_long (rev Us @ Ts); - in - fold_rev (Term.abs o pair "x") Us - (list_comb (incr_boundvars i u, - map (long o incr_boundvars i) ts @ map (long o Bound) (i - 1 downto 0))) - end); - - -(* full eta contraction *) - -local - -fun decr lev (Bound i) = if i >= lev then Bound (i - 1) else raise Same.SAME - | decr lev (Abs (a, T, body)) = Abs (a, T, decr (lev + 1) body) - | decr lev (t $ u) = (decr lev t $ decrh lev u handle Same.SAME => t $ decr lev u) - | decr _ _ = raise Same.SAME -and decrh lev t = (decr lev t handle Same.SAME => t); - -fun eta (Abs (a, T, body)) = - ((case eta body of - body' as (f $ Bound 0) => - if Term.is_dependent f then Abs (a, T, body') - else decrh 0 f - | body' => Abs (a, T, body')) handle Same.SAME => - (case body of - f $ Bound 0 => - if Term.is_dependent f then raise Same.SAME - else decrh 0 f - | _ => raise Same.SAME)) - | eta (t $ u) = (eta t $ Same.commit eta u handle Same.SAME => t $ eta u) - | eta _ = raise Same.SAME; - -in - -fun eta_contract t = - if Term.has_abs t then Same.commit eta t else t; - -end; - -val beta_eta_contract = eta_contract o beta_norm; - -fun aeconv (t, u) = t aconv u orelse eta_contract t aconv eta_contract u; - - -fun body_type env (Type ("fun", [_, T])) = body_type env T - | body_type env (T as TVar v) = - (case Type.lookup (type_env env) v of - NONE => T - | SOME T' => body_type env T') - | body_type _ T = T; - -fun binder_types env (Type ("fun", [T, U])) = T :: binder_types env U - | binder_types env (TVar v) = - (case Type.lookup (type_env env) v of - NONE => [] - | SOME T' => binder_types env T') - | binder_types _ _ = []; - -fun strip_type env T = (binder_types env T, body_type env T); - -(*finds type of term without checking that combinations are consistent - Ts holds types of bound variables*) -fun fastype (Envir {tyenv, ...}) = - let - val funerr = "fastype: expected function type"; - fun fast Ts (f $ u) = - (case Type.devar tyenv (fast Ts f) of - Type ("fun", [_, T]) => T - | TVar v => raise TERM (funerr, [f $ u]) - | _ => raise TERM (funerr, [f $ u])) - | fast Ts (Const (_, T)) = T - | fast Ts (Free (_, T)) = T - | fast Ts (Bound i) = - (nth Ts i handle General.Subscript => raise TERM ("fastype: Bound", [Bound i])) - | fast Ts (Var (_, T)) = T - | fast Ts (Abs (_, T, u)) = T --> fast (T :: Ts) u; - in fast end; - - -(** plain substitution -- without variable chasing **) - -local - -fun subst_type0 tyenv = Term_Subst.map_atypsT_same - (fn TVar v => - (case Type.lookup tyenv v of - SOME U => U - | NONE => raise Same.SAME) - | _ => raise Same.SAME); - -fun subst_term1 tenv = Term_Subst.map_aterms_same - (fn Var v => - (case lookup1 tenv v of - SOME u => u - | NONE => raise Same.SAME) - | _ => raise Same.SAME); - -fun subst_term2 tenv tyenv : term Same.operation = - let - val substT = subst_type0 tyenv; - fun subst (Const (a, T)) = Const (a, substT T) - | subst (Free (a, T)) = Free (a, substT T) - | subst (Var (xi, T)) = - (case lookup1 tenv (xi, T) of - SOME u => u - | NONE => Var (xi, substT T)) - | subst (Bound _) = raise Same.SAME - | subst (Abs (a, T, t)) = - (Abs (a, substT T, Same.commit subst t) - handle Same.SAME => Abs (a, T, subst t)) - | subst (t $ u) = (subst t $ Same.commit subst u handle Same.SAME => t $ subst u); - in subst end; - -in - -fun subst_type_same tyenv T = - if Vartab.is_empty tyenv then raise Same.SAME - else subst_type0 tyenv T; - -fun subst_term_types_same tyenv t = - if Vartab.is_empty tyenv then raise Same.SAME - else Term_Subst.map_types_same (subst_type0 tyenv) t; - -fun subst_term_same (tyenv, tenv) = - if Vartab.is_empty tenv then subst_term_types_same tyenv - else if Vartab.is_empty tyenv then subst_term1 tenv - else subst_term2 tenv tyenv; - -fun subst_type tyenv T = subst_type_same tyenv T handle Same.SAME => T; -fun subst_term_types tyenv t = subst_term_types_same tyenv t handle Same.SAME => t; -fun subst_term envs t = subst_term_same envs t handle Same.SAME => t; - -end; - - - -(** expand defined atoms -- with local beta reduction **) - -fun expand_atom T (U, u) = - subst_term_types (Type.raw_match (U, T) Vartab.empty) u - handle Type.TYPE_MATCH => raise TYPE ("expand_atom: ill-typed replacement", [T, U], [u]); - -fun expand_term get = - let - fun expand tm = - let - val (head, args) = Term.strip_comb tm; - val args' = map expand args; - fun comb head' = Term.list_comb (head', args'); - in - (case head of - Abs (x, T, t) => comb (Abs (x, T, expand t)) - | _ => - (case get head of - SOME def => Term.betapplys (expand_atom (Term.fastype_of head) def, args') - | NONE => comb head)) - end; - in expand end; - -fun expand_term_frees defs = - let - val eqs = map (fn ((x, U), u) => (x, (U, u))) defs; - val get = fn Free (x, _) => AList.lookup (op =) eqs x | _ => NONE; - in expand_term get end; - -end; diff --git a/core/Pure/facts.ML b/core/Pure/facts.ML deleted file mode 100644 index f29aafcb..00000000 --- a/core/Pure/facts.ML +++ /dev/null @@ -1,245 +0,0 @@ -(* Title: Pure/facts.ML - Author: Makarius - -Environment of named facts, optionally indexed by proposition. -*) - -signature FACTS = -sig - val the_single: string * Position.T -> thm list -> thm - datatype interval = FromTo of int * int | From of int | Single of int - datatype ref = - Named of (string * Position.T) * interval list option | - Fact of string - val named: string -> ref - val string_of_selection: interval list option -> string - val string_of_ref: ref -> string - val name_of_ref: ref -> string - val pos_of_ref: ref -> Position.T - val map_name_of_ref: (string -> string) -> ref -> ref - val select: ref -> thm list -> thm list - val selections: string * thm list -> (ref * thm) list - type T - val empty: T - val space_of: T -> Name_Space.T - val is_concealed: T -> string -> bool - val check: Context.generic -> T -> xstring * Position.T -> string - val intern: T -> xstring -> string - val extern: Proof.context -> T -> string -> xstring - val markup_extern: Proof.context -> T -> string -> Markup.T * xstring - val lookup: Context.generic -> T -> string -> (bool * thm list) option - val retrieve: Context.generic -> T -> xstring * Position.T -> string * thm list - val defined: T -> string -> bool - val fold_static: (string * thm list -> 'a -> 'a) -> T -> 'a -> 'a - val dest_static: bool -> T list -> T -> (string * thm list) list - val props: T -> thm list - val could_unify: T -> term -> thm list - val merge: T * T -> T - val add_static: Context.generic -> {strict: bool, index: bool} -> - binding * thm list -> T -> string * T - val add_dynamic: Context.generic -> binding * (Context.generic -> thm list) -> T -> string * T - val del: string -> T -> T - val hide: bool -> string -> T -> T -end; - -structure Facts: FACTS = -struct - -(** fact references **) - -fun the_single _ [th] : thm = th - | the_single (name, pos) ths = - error ("Expected singleton fact " ^ quote name ^ - " (length " ^ string_of_int (length ths) ^ ")" ^ Position.here pos); - - -(* datatype interval *) - -datatype interval = - FromTo of int * int | - From of int | - Single of int; - -fun string_of_interval (FromTo (i, j)) = string_of_int i ^ "-" ^ string_of_int j - | string_of_interval (From i) = string_of_int i ^ "-" - | string_of_interval (Single i) = string_of_int i; - -fun interval n iv = - let fun err () = raise Fail ("Bad interval specification " ^ string_of_interval iv) in - (case iv of - FromTo (i, j) => if i <= j then i upto j else err () - | From i => if i <= n then i upto n else err () - | Single i => [i]) - end; - - -(* datatype ref *) - -datatype ref = - Named of (string * Position.T) * interval list option | - Fact of string; - -fun named name = Named ((name, Position.none), NONE); - -fun name_of_ref (Named ((name, _), _)) = name - | name_of_ref (Fact _) = raise Fail "Illegal literal fact"; - -fun pos_of_ref (Named ((_, pos), _)) = pos - | pos_of_ref (Fact _) = Position.none; - -fun map_name_of_ref f (Named ((name, pos), is)) = Named ((f name, pos), is) - | map_name_of_ref _ r = r; - -fun string_of_selection NONE = "" - | string_of_selection (SOME is) = enclose "(" ")" (commas (map string_of_interval is)); - -fun string_of_ref (Named ((name, _), sel)) = name ^ string_of_selection sel - | string_of_ref (Fact _) = raise Fail "Illegal literal fact"; - - -(* select *) - -fun select (Fact _) ths = ths - | select (Named (_, NONE)) ths = ths - | select (Named ((name, pos), SOME ivs)) ths = - let - val n = length ths; - fun err msg = - error (msg ^ " for fact " ^ quote name ^ " (length " ^ string_of_int n ^ ")" ^ - Position.here pos); - fun sel i = - if i < 1 orelse i > n then err ("Bad subscript " ^ string_of_int i) - else nth ths (i - 1); - val is = maps (interval n) ivs handle Fail msg => err msg; - in map sel is end; - - -(* selections *) - -fun selections (name, [th]) = [(Named ((name, Position.none), NONE), th)] - | selections (name, ths) = map2 (fn i => fn th => - (Named ((name, Position.none), SOME [Single i]), th)) (1 upto length ths) ths; - - - -(** fact environment **) - -(* datatypes *) - -datatype fact = Static of thm list | Dynamic of Context.generic -> thm list; - -datatype T = Facts of - {facts: fact Name_Space.table, - props: thm Net.net}; - -fun make_facts facts props = Facts {facts = facts, props = props}; - -val empty = make_facts (Name_Space.empty_table "fact") Net.empty; - - -(* named facts *) - -fun facts_of (Facts {facts, ...}) = facts; - -val space_of = Name_Space.space_of_table o facts_of; - -val is_concealed = Name_Space.is_concealed o space_of; - -fun check context facts (xname, pos) = - let - val (name, fact) = Name_Space.check context (facts_of facts) (xname, pos); - val _ = - (case fact of - Static _ => () - | Dynamic _ => Context_Position.report_generic context pos (Markup.dynamic_fact name)); - in name end; - -val intern = Name_Space.intern o space_of; -fun extern ctxt = Name_Space.extern ctxt o space_of; -fun markup_extern ctxt = Name_Space.markup_extern ctxt o space_of - - -(* retrieve *) - -val defined = is_some oo (Name_Space.lookup_key o facts_of); - -fun lookup context facts name = - (case Name_Space.lookup_key (facts_of facts) name of - NONE => NONE - | SOME (_, Static ths) => SOME (true, ths) - | SOME (_, Dynamic f) => SOME (false, f context)); - -fun retrieve context facts (xname, pos) = - let - val name = check context facts (xname, pos); - val thms = - (case lookup context facts name of - SOME (static, thms) => - (if static then () - else Context_Position.report_generic context pos (Markup.dynamic_fact name); - thms) - | NONE => error ("Unknown fact " ^ quote name ^ Position.here pos)); - in (name, map (Thm.transfer (Context.theory_of context)) thms) end; - - -(* static content *) - -fun fold_static f = - Name_Space.fold_table (fn (name, Static ths) => f (name, ths) | _ => I) o facts_of; - -fun dest_static verbose prev_facts facts = - fold_static (fn (name, ths) => - if exists (fn prev => defined prev name) prev_facts orelse - not verbose andalso is_concealed facts name then I - else cons (name, ths)) facts [] - |> sort_wrt #1; - - -(* indexed props *) - -val prop_ord = Term_Ord.term_ord o pairself Thm.full_prop_of; - -fun props (Facts {props, ...}) = sort_distinct prop_ord (Net.content props); -fun could_unify (Facts {props, ...}) = Net.unify_term props; - - -(* merge facts *) - -fun merge (Facts {facts = facts1, props = props1}, Facts {facts = facts2, props = props2}) = - let - val facts' = Name_Space.merge_tables (facts1, facts2); - val props' = - if Net.is_empty props2 then props1 - else if Net.is_empty props1 then props2 - else Net.merge (is_equal o prop_ord) (props1, props2); (*beware of non-canonical merge*) - in make_facts facts' props' end; - - -(* add static entries *) - -fun add_static context {strict, index} (b, ths) (Facts {facts, props}) = - let - val (name, facts') = - if Binding.is_empty b then ("", facts) - else Name_Space.define context strict (b, Static ths) facts; - val props' = props - |> index ? fold (fn th => Net.insert_term (K false) (Thm.full_prop_of th, th)) ths; - in (name, make_facts facts' props') end; - - -(* add dynamic entries *) - -fun add_dynamic context (b, f) (Facts {facts, props}) = - let val (name, facts') = Name_Space.define context true (b, Dynamic f) facts; - in (name, make_facts facts' props) end; - - -(* remove entries *) - -fun del name (Facts {facts, props}) = - make_facts (Name_Space.del_table name facts) props; - -fun hide fully name (Facts {facts, props}) = - make_facts (Name_Space.hide_table fully name facts) props; - -end; diff --git a/core/Pure/global_theory.ML b/core/Pure/global_theory.ML deleted file mode 100644 index 4398e6da..00000000 --- a/core/Pure/global_theory.ML +++ /dev/null @@ -1,205 +0,0 @@ -(* Title: Pure/global_theory.ML - Author: Makarius - -Global theory content: stored facts. -*) - -signature GLOBAL_THEORY = -sig - val facts_of: theory -> Facts.T - val check_fact: theory -> xstring * Position.T -> string - val intern_fact: theory -> xstring -> string - val defined_fact: theory -> string -> bool - val hide_fact: bool -> string -> theory -> theory - val get_thms: theory -> xstring -> thm list - val get_thm: theory -> xstring -> thm - val all_thms_of: theory -> bool -> (string * thm) list - val map_facts: ('a -> 'b) -> ('c * ('a list * 'd) list) list -> ('c * ('b list * 'd) list) list - val burrow_fact: ('a list -> 'b list) -> ('a list * 'c) list -> ('b list * 'c) list - val burrow_facts: ('a list -> 'b list) -> - ('c * ('a list * 'd) list) list -> ('c * ('b list * 'd) list) list - val name_multi: string -> 'a list -> (string * 'a) list - val name_thm: bool -> bool -> string -> thm -> thm - val name_thms: bool -> bool -> string -> thm list -> thm list - val name_thmss: bool -> string -> (thm list * 'a) list -> (thm list * 'a) list - val store_thms: binding * thm list -> theory -> thm list * theory - val store_thm: binding * thm -> theory -> thm * theory - val store_thm_open: binding * thm -> theory -> thm * theory - val add_thms: ((binding * thm) * attribute list) list -> theory -> thm list * theory - val add_thm: (binding * thm) * attribute list -> theory -> thm * theory - val add_thmss: ((binding * thm list) * attribute list) list -> theory -> thm list list * theory - val add_thms_dynamic: binding * (Context.generic -> thm list) -> theory -> theory - val note_thmss: string -> (Thm.binding * (thm list * attribute list) list) list - -> theory -> (string * thm list) list * theory - val add_defs: bool -> ((binding * term) * attribute list) list -> - theory -> thm list * theory - val add_defs_unchecked: bool -> ((binding * term) * attribute list) list -> - theory -> thm list * theory - val add_defs_cmd: bool -> ((binding * string) * attribute list) list -> - theory -> thm list * theory - val add_defs_unchecked_cmd: bool -> ((binding * string) * attribute list) list -> - theory -> thm list * theory -end; - -structure Global_Theory: GLOBAL_THEORY = -struct - -(** theory data **) - -structure Data = Theory_Data -( - type T = Facts.T; - val empty = Facts.empty; - val extend = I; - val merge = Facts.merge; -); - -val facts_of = Data.get; - -fun check_fact thy = Facts.check (Context.Theory thy) (facts_of thy); -val intern_fact = Facts.intern o facts_of; -val defined_fact = Facts.defined o facts_of; - -fun hide_fact fully name = Data.map (Facts.hide fully name); - - -(* retrieve theorems *) - -fun get_thms thy xname = - #2 (Facts.retrieve (Context.Theory thy) (facts_of thy) (xname, Position.none)); - -fun get_thm thy xname = - Facts.the_single (xname, Position.none) (get_thms thy xname); - -fun all_thms_of thy verbose = - let - val facts = facts_of thy; - fun add (name, ths) = - if not verbose andalso Facts.is_concealed facts name then I - else append (map (`(Thm.get_name_hint)) ths); - in Facts.fold_static add facts [] end; - - - -(** store theorems **) - -(* fact specifications *) - -fun map_facts f = map (apsnd (map (apfst (map f)))); -fun burrow_fact f = split_list #>> burrow f #> op ~~; -fun burrow_facts f = split_list ##> burrow (burrow_fact f) #> op ~~; - - -(* naming *) - -fun name_multi name [x] = [(name, x)] - | name_multi "" xs = map (pair "") xs - | name_multi name xs = map_index (fn (i, x) => (name ^ "_" ^ string_of_int (i + 1), x)) xs; - -fun name_thm pre official name thm = thm - |> (if not official orelse pre andalso Thm.derivation_name thm <> "" then I - else Thm.name_derivation name) - |> (if name = "" orelse pre andalso Thm.has_name_hint thm then I - else Thm.put_name_hint name); - -fun name_thms pre official name xs = - map (uncurry (name_thm pre official)) (name_multi name xs); - -fun name_thmss official name fact = - burrow_fact (name_thms true official name) fact; - - -(* enter_thms *) - -fun register_proofs thms thy = (thms, Thm.register_proofs thms thy); - -fun enter_thms pre_name post_name app_att (b, thms) thy = - if Binding.is_empty b - then app_att thms thy |-> register_proofs - else - let - val name = Sign.full_name thy b; - val (thms', thy') = app_att (pre_name name thms) thy |>> post_name name |-> register_proofs; - val thms'' = map (Thm.transfer thy') thms'; - val thy'' = thy' |> Data.map - (Facts.add_static (Context.Theory thy') {strict = true, index = false} (b, thms'') #> snd); - in (thms'', thy'') end; - - -(* store_thm(s) *) - -fun store_thms (b, thms) = - enter_thms (name_thms true true) (name_thms false true) pair (b, thms); - -fun store_thm (b, th) = store_thms (b, [th]) #>> the_single; - -fun store_thm_open (b, th) = - enter_thms (name_thms true false) (name_thms false false) pair (b, [th]) #>> the_single; - - -(* add_thms(s) *) - -fun add_thms_atts pre_name ((b, thms), atts) = - enter_thms pre_name (name_thms false true) (fold_map (Thm.theory_attributes atts)) (b, thms); - -fun gen_add_thmss pre_name = - fold_map (add_thms_atts pre_name); - -fun gen_add_thms pre_name args = - apfst (map hd) o gen_add_thmss pre_name (map (apfst (apsnd single)) args); - -val add_thmss = gen_add_thmss (name_thms true true); -val add_thms = gen_add_thms (name_thms true true); -val add_thm = yield_singleton add_thms; - - -(* add_thms_dynamic *) - -fun add_thms_dynamic (b, f) thy = thy - |> Data.map (Facts.add_dynamic (Context.Theory thy) (b, f) #> snd); - - -(* note_thmss *) - -fun note_thmss kind = fold_map (fn ((b, more_atts), facts) => fn thy => - let - val name = Sign.full_name thy b; - fun app (ths, atts) = - fold_map (Thm.theory_attributes (surround (Thm.kind kind) (atts @ more_atts))) ths; - val (thms, thy') = - enter_thms (name_thmss true) (name_thms false true) (apfst flat oo fold_map app) - (b, facts) thy; - in ((name, thms), thy') end); - - -(* store axioms as theorems *) - -local - -fun no_read _ (_, t) = t; - -fun read ctxt (b, str) = - Syntax.read_prop ctxt str handle ERROR msg => - cat_error msg ("The error(s) above occurred in definition " ^ Binding.print b); - -fun add prep unchecked overloaded = fold_map (fn ((b, raw_prop), atts) => fn thy => - let - val ctxt = Syntax.init_pretty_global thy; - val prop = prep ctxt (b, raw_prop); - val ((_, def), thy') = Thm.add_def ctxt unchecked overloaded (b, prop) thy; - val thm = def - |> Thm.forall_intr_frees - |> Thm.forall_elim_vars 0 - |> Thm.varifyT_global; - in yield_singleton (gen_add_thms (K I)) ((b, thm), atts) thy' end); - -in - -val add_defs = add no_read false; -val add_defs_unchecked = add no_read true; -val add_defs_cmd = add read false; -val add_defs_unchecked_cmd = add read true; - -end; - -end; diff --git a/core/Pure/goal.ML b/core/Pure/goal.ML deleted file mode 100644 index 27fee75c..00000000 --- a/core/Pure/goal.ML +++ /dev/null @@ -1,390 +0,0 @@ -(* Title: Pure/goal.ML - Author: Makarius - -Goals in tactical theorem proving, with support for forked proofs. -*) - -signature BASIC_GOAL = -sig - val parallel_proofs: int Unsynchronized.ref - val SELECT_GOAL: tactic -> int -> tactic - val PREFER_GOAL: tactic -> int -> tactic - val CONJUNCTS: tactic -> int -> tactic - val PRECISE_CONJUNCTS: int -> tactic -> int -> tactic - val PARALLEL_CHOICE: tactic list -> tactic - val PARALLEL_GOALS: tactic -> tactic -end; - -signature GOAL = -sig - include BASIC_GOAL - val init: cterm -> thm - val protect: int -> thm -> thm - val conclude: thm -> thm - val check_finished: Proof.context -> thm -> thm - val finish: Proof.context -> thm -> thm - val norm_result: Proof.context -> thm -> thm - val skip_proofs_enabled: unit -> bool - val future_enabled: int -> bool - val future_enabled_timing: Time.time -> bool - val future_result: Proof.context -> thm future -> term -> thm - val prove_internal: Proof.context -> cterm list -> cterm -> (thm list -> tactic) -> thm - val is_schematic: term -> bool - val prove_multi: Proof.context -> string list -> term list -> term list -> - ({prems: thm list, context: Proof.context} -> tactic) -> thm list - val prove_future: Proof.context -> string list -> term list -> term -> - ({prems: thm list, context: Proof.context} -> tactic) -> thm - val prove: Proof.context -> string list -> term list -> term -> - ({prems: thm list, context: Proof.context} -> tactic) -> thm - val prove_global_future: theory -> string list -> term list -> term -> - ({prems: thm list, context: Proof.context} -> tactic) -> thm - val prove_global: theory -> string list -> term list -> term -> - ({prems: thm list, context: Proof.context} -> tactic) -> thm - val prove_sorry: Proof.context -> string list -> term list -> term -> - ({prems: thm list, context: Proof.context} -> tactic) -> thm - val prove_sorry_global: theory -> string list -> term list -> term -> - ({prems: thm list, context: Proof.context} -> tactic) -> thm - val restrict: int -> int -> thm -> thm - val unrestrict: int -> thm -> thm - val conjunction_tac: int -> tactic - val precise_conjunction_tac: int -> int -> tactic - val recover_conjunction_tac: tactic - val norm_hhf_tac: Proof.context -> int -> tactic - val assume_rule_tac: Proof.context -> int -> tactic -end; - -structure Goal: GOAL = -struct - -(** goals **) - -(* - -------- (init) - C ==> #C -*) -val init = - let val A = #1 (Thm.dest_implies (Thm.cprop_of Drule.protectI)) - in fn C => Thm.instantiate ([], [(A, C)]) Drule.protectI end; - -(* - A1 ==> ... ==> An ==> C - ------------------------ (protect n) - A1 ==> ... ==> An ==> #C -*) -fun protect n th = Drule.comp_no_flatten (th, n) 1 Drule.protectI; - -(* - A ==> ... ==> #C - ---------------- (conclude) - A ==> ... ==> C -*) -fun conclude th = Drule.comp_no_flatten (th, Thm.nprems_of th) 1 Drule.protectD; - -(* - #C - --- (finish) - C -*) -fun check_finished ctxt th = - if Thm.no_prems th then th - else - raise THM ("Proof failed.\n" ^ Pretty.string_of (Goal_Display.pretty_goal ctxt th), 0, [th]); - -fun finish ctxt = check_finished ctxt #> conclude; - - - -(** results **) - -(* normal form *) - -fun norm_result ctxt = - Drule.flexflex_unique - #> Raw_Simplifier.norm_hhf_protect ctxt - #> Thm.strip_shyps - #> Drule.zero_var_indexes; - - -(* scheduling parameters *) - -fun skip_proofs_enabled () = - let val skip = Options.default_bool "skip_proofs" in - if Proofterm.proofs_enabled () andalso skip then - (warning "Proof terms enabled -- cannot skip proofs"; false) - else skip - end; - -val parallel_proofs = Unsynchronized.ref 1; - -fun future_enabled n = - Multithreading.enabled () andalso ! parallel_proofs >= n andalso - is_some (Future.worker_task ()); - -fun future_enabled_timing t = - future_enabled 1 andalso - Time.toReal t >= Options.default_real "parallel_subproofs_threshold"; - - -(* future_result *) - -fun future_result ctxt result prop = - let - val thy = Proof_Context.theory_of ctxt; - val cert = Thm.cterm_of thy; - val certT = Thm.ctyp_of thy; - - val assms = Assumption.all_assms_of ctxt; - val As = map Thm.term_of assms; - - val xs = map Free (fold Term.add_frees (prop :: As) []); - val fixes = map cert xs; - - val tfrees = fold Term.add_tfrees (prop :: As) []; - val instT = map (fn (a, S) => (certT (TVar ((a, 0), S)), certT (TFree (a, S)))) tfrees; - - val global_prop = - cert (Logic.varify_types_global (fold_rev Logic.all xs (Logic.list_implies (As, prop)))) - |> Thm.weaken_sorts (Variable.sorts_of ctxt); - val global_result = result |> Future.map - (Drule.flexflex_unique #> - Thm.adjust_maxidx_thm ~1 #> - Drule.implies_intr_list assms #> - Drule.forall_intr_list fixes #> - Thm.generalize (map #1 tfrees, []) 0 #> - Thm.strip_shyps); - val local_result = - Thm.future global_result global_prop - |> Thm.close_derivation - |> Thm.instantiate (instT, []) - |> Drule.forall_elim_list fixes - |> fold (Thm.elim_implies o Thm.assume) assms; - in local_result end; - - - -(** tactical theorem proving **) - -(* prove_internal -- minimal checks, no normalization of result! *) - -fun prove_internal ctxt casms cprop tac = - (case SINGLE (tac (map (Assumption.assume ctxt) casms)) (init cprop) of - SOME th => Drule.implies_intr_list casms (finish ctxt th) - | NONE => error "Tactic failed"); - - -(* prove variations *) - -fun is_schematic t = - Term.exists_subterm Term.is_Var t orelse - Term.exists_type (Term.exists_subtype Term.is_TVar) t; - -fun prove_common immediate pri ctxt xs asms props tac = - let - val thy = Proof_Context.theory_of ctxt; - - val schematic = exists is_schematic props; - val future = future_enabled 1; - val skip = not immediate andalso not schematic andalso future andalso skip_proofs_enabled (); - - val pos = Position.thread_data (); - fun err msg = - cat_error msg - ("The error(s) above occurred for the goal statement" ^ Position.here pos ^ ":\n" ^ - Syntax.string_of_term ctxt (Logic.list_implies (asms, Logic.mk_conjunction_list props))); - - fun cert_safe t = Thm.cterm_of thy (Envir.beta_norm (Term.no_dummy_patterns t)) - handle TERM (msg, _) => err msg | TYPE (msg, _, _) => err msg; - val casms = map cert_safe asms; - val cprops = map cert_safe props; - - val (prems, ctxt') = ctxt - |> Variable.add_fixes_direct xs - |> fold Variable.declare_term (asms @ props) - |> Assumption.add_assumes casms - ||> Variable.set_body true; - val sorts = Variable.sorts_of ctxt'; - - val stmt = Thm.weaken_sorts sorts (Conjunction.mk_conjunction_balanced cprops); - - fun tac' args st = - if skip then ALLGOALS Skip_Proof.cheat_tac st before Skip_Proof.report ctxt - else tac args st; - fun result () = - (case SINGLE (tac' {prems = prems, context = ctxt'}) (init stmt) of - NONE => err "Tactic failed" - | SOME st => - let - val _ = - Theory.subthy (theory_of_thm st, thy) orelse err "Bad background theory of goal state"; - val res = - (finish ctxt' st - |> Drule.flexflex_unique - |> Thm.check_shyps sorts - |> Thm.check_hyps (Context.Proof ctxt')) - handle THM (msg, _, _) => err msg | ERROR msg => err msg; - in - if Unify.matches_list thy [Thm.term_of stmt] [Thm.prop_of res] then res - else err ("Proved a different theorem: " ^ Syntax.string_of_term ctxt' (Thm.prop_of res)) - end); - val res = - if immediate orelse schematic orelse not future orelse skip then result () - else - future_result ctxt' - (Execution.fork {name = "Goal.prove", pos = Position.thread_data (), pri = pri} result) - (Thm.term_of stmt); - in - Conjunction.elim_balanced (length props) res - |> map (Assumption.export false ctxt' ctxt) - |> Variable.export ctxt' ctxt - |> map Drule.zero_var_indexes - end; - -val prove_multi = prove_common true 0; - -fun prove_future_pri pri ctxt xs asms prop tac = - hd (prove_common false pri ctxt xs asms [prop] tac); - -val prove_future = prove_future_pri ~1; - -fun prove ctxt xs asms prop tac = hd (prove_multi ctxt xs asms [prop] tac); - -fun prove_global_future thy xs asms prop tac = - Drule.export_without_context (prove_future (Proof_Context.init_global thy) xs asms prop tac); - -fun prove_global thy xs asms prop tac = - Drule.export_without_context (prove (Proof_Context.init_global thy) xs asms prop tac); - -fun prove_sorry ctxt xs asms prop tac = - if Config.get ctxt quick_and_dirty then - prove ctxt xs asms prop (fn _ => ALLGOALS Skip_Proof.cheat_tac) - else (if future_enabled 1 then prove_future_pri ~2 else prove) ctxt xs asms prop tac; - -fun prove_sorry_global thy xs asms prop tac = - Drule.export_without_context - (prove_sorry (Proof_Context.init_global thy) xs asms prop tac); - - - -(** goal structure **) - -(* rearrange subgoals *) - -fun restrict i n st = - if i < 1 orelse n < 1 orelse i + n - 1 > Thm.nprems_of st - then raise THM ("Goal.restrict", i, [st]) - else rotate_prems (i - 1) st |> protect n; - -fun unrestrict i = conclude #> rotate_prems (1 - i); - -(*with structural marker*) -fun SELECT_GOAL tac i st = - if Thm.nprems_of st = 1 andalso i = 1 then tac st - else (PRIMITIVE (restrict i 1) THEN tac THEN PRIMITIVE (unrestrict i)) st; - -(*without structural marker*) -fun PREFER_GOAL tac i st = - if i < 1 orelse i > Thm.nprems_of st then Seq.empty - else (PRIMITIVE (rotate_prems (i - 1)) THEN tac THEN PRIMITIVE (rotate_prems (1 - i))) st; - - -(* multiple goals *) - -fun precise_conjunction_tac 0 i = eq_assume_tac i - | precise_conjunction_tac 1 i = SUBGOAL (K all_tac) i - | precise_conjunction_tac n i = PRIMITIVE (Drule.with_subgoal i (Conjunction.curry_balanced n)); - -val adhoc_conjunction_tac = REPEAT_ALL_NEW - (SUBGOAL (fn (goal, i) => - if can Logic.dest_conjunction goal then rtac Conjunction.conjunctionI i - else no_tac)); - -val conjunction_tac = SUBGOAL (fn (goal, i) => - precise_conjunction_tac (length (Logic.dest_conjunctions goal)) i ORELSE - TRY (adhoc_conjunction_tac i)); - -val recover_conjunction_tac = PRIMITIVE (fn th => - Conjunction.uncurry_balanced (Thm.nprems_of th) th); - -fun PRECISE_CONJUNCTS n tac = - SELECT_GOAL (precise_conjunction_tac n 1 - THEN tac - THEN recover_conjunction_tac); - -fun CONJUNCTS tac = - SELECT_GOAL (conjunction_tac 1 - THEN tac - THEN recover_conjunction_tac); - - -(* hhf normal form *) - -fun norm_hhf_tac ctxt = - rtac Drule.asm_rl (*cheap approximation -- thanks to builtin Logic.flatten_params*) - THEN' SUBGOAL (fn (t, i) => - if Drule.is_norm_hhf t then all_tac - else rewrite_goal_tac ctxt Drule.norm_hhf_eqs i); - - -(* non-atomic goal assumptions *) - -fun non_atomic (Const ("Pure.imp", _) $ _ $ _) = true - | non_atomic (Const ("Pure.all", _) $ _) = true - | non_atomic _ = false; - -fun assume_rule_tac ctxt = norm_hhf_tac ctxt THEN' CSUBGOAL (fn (goal, i) => - let - val ((_, goal'), ctxt') = Variable.focus_cterm goal ctxt; - val goal'' = Drule.cterm_rule (singleton (Variable.export ctxt' ctxt)) goal'; - val Rs = filter (non_atomic o Thm.term_of) (Drule.strip_imp_prems goal''); - val tacs = Rs |> map (fn R => - etac (Raw_Simplifier.norm_hhf ctxt (Thm.trivial R)) THEN_ALL_NEW assume_tac); - in fold_rev (curry op APPEND') tacs (K no_tac) i end); - - - -(** parallel tacticals **) - -(* parallel choice of single results *) - -fun PARALLEL_CHOICE tacs st = - (case Par_List.get_some (fn tac => SINGLE tac st) tacs of - NONE => Seq.empty - | SOME st' => Seq.single st'); - - -(* parallel refinement of non-schematic goal by single results *) - -local - -exception FAILED of unit; - -fun retrofit st' = - rotate_prems ~1 #> - Thm.bicompose {flatten = false, match = false, incremented = false} - (false, conclude st', Thm.nprems_of st') 1; - -in - -fun PARALLEL_GOALS tac = - Thm.adjust_maxidx_thm ~1 #> - (fn st => - if not (Multithreading.enabled ()) orelse Thm.maxidx_of st >= 0 orelse Thm.nprems_of st <= 1 - then DETERM tac st - else - let - fun try_tac g = - (case SINGLE tac g of - NONE => raise FAILED () - | SOME g' => g'); - - val goals = Drule.strip_imp_prems (Thm.cprop_of st); - val results = Par_List.map (try_tac o init) goals; - in EVERY (map retrofit (rev results)) st end - handle FAILED () => Seq.empty); - -end; - -end; - -structure Basic_Goal: BASIC_GOAL = Goal; -open Basic_Goal; diff --git a/core/Pure/goal_display.ML b/core/Pure/goal_display.ML deleted file mode 100644 index 6ae49674..00000000 --- a/core/Pure/goal_display.ML +++ /dev/null @@ -1,139 +0,0 @@ -(* Title: Pure/goal_display.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Makarius - -Display tactical goal state. -*) - -signature GOAL_DISPLAY = -sig - val goals_limit_raw: Config.raw - val goals_limit: int Config.T - val show_main_goal_raw: Config.raw - val show_main_goal: bool Config.T - val show_consts_raw: Config.raw - val show_consts: bool Config.T - val pretty_flexpair: Proof.context -> term * term -> Pretty.T - val pretty_goals: Proof.context -> thm -> Pretty.T list - val pretty_goal: Proof.context -> thm -> Pretty.T - val string_of_goal: Proof.context -> thm -> string -end; - -structure Goal_Display: GOAL_DISPLAY = -struct - -val goals_limit_raw = Config.declare_option ("goals_limit", @{here}); -val goals_limit = Config.int goals_limit_raw; - -val show_main_goal_raw = Config.declare_option ("show_main_goal", @{here}); -val show_main_goal = Config.bool show_main_goal_raw; - -val show_consts_raw = Config.declare_option ("show_consts", @{here}); -val show_consts = Config.bool show_consts_raw; - -fun pretty_flexpair ctxt (t, u) = Pretty.block - [Syntax.pretty_term ctxt t, Pretty.str " =?=", Pretty.brk 1, Syntax.pretty_term ctxt u]; - - -(*print thm A1,...,An/B in "goal style" -- premises as numbered subgoals*) - -local - -fun ins_entry (x, y) = - AList.default (op =) (x, []) #> - AList.map_entry (op =) x (insert (op =) y); - -val add_consts = Term.fold_aterms - (fn Const (c, T) => ins_entry (T, (c, T)) - | _ => I); - -val add_vars = Term.fold_aterms - (fn Free (x, T) => ins_entry (T, (x, ~1)) - | Var (xi, T) => ins_entry (T, xi) - | _ => I); - -val add_varsT = Term.fold_atyps - (fn TFree (x, S) => ins_entry (S, (x, ~1)) - | TVar (xi, S) => ins_entry (S, xi) - | _ => I); - -fun sort_idxs vs = map (apsnd (sort (prod_ord string_ord int_ord))) vs; -fun sort_cnsts cs = map (apsnd (sort_wrt fst)) cs; - -fun consts_of t = sort_cnsts (add_consts t []); -fun vars_of t = sort_idxs (add_vars t []); -fun varsT_of t = rev (sort_idxs (Term.fold_types add_varsT t [])); - -in - -fun pretty_goals ctxt0 state = - let - val ctxt = ctxt0 - |> Config.put show_types (Config.get ctxt0 show_types orelse Config.get ctxt0 show_sorts) - |> Config.put show_sorts false; - - val show_sorts0 = Config.get ctxt0 show_sorts; - val show_types = Config.get ctxt show_types; - val show_consts = Config.get ctxt show_consts - val show_main_goal = Config.get ctxt show_main_goal; - val goals_limit = Config.get ctxt goals_limit; - - val prt_sort = Syntax.pretty_sort ctxt; - val prt_typ = Syntax.pretty_typ ctxt; - val prt_term = - singleton (Syntax.uncheck_terms ctxt) #> - Type_Annotation.ignore_free_types #> - Syntax.unparse_term ctxt; - - fun prt_atoms prt prtT (X, xs) = Pretty.block - [Pretty.block (Pretty.commas (map prt xs)), Pretty.str " ::", - Pretty.brk 1, prtT X]; - - fun prt_var (x, ~1) = prt_term (Syntax.free x) - | prt_var xi = prt_term (Syntax.var xi); - - fun prt_varT (x, ~1) = prt_typ (TFree (x, [])) - | prt_varT xi = prt_typ (TVar (xi, [])); - - val prt_consts = prt_atoms (prt_term o Const) prt_typ; - val prt_vars = prt_atoms prt_var prt_typ; - val prt_varsT = prt_atoms prt_varT prt_sort; - - - fun pretty_list _ _ [] = [] - | pretty_list name prt lst = [Pretty.big_list name (map prt lst)]; - - fun pretty_subgoal s A = - Pretty.markup (Markup.subgoal s) [Pretty.str (" " ^ s ^ ". "), prt_term A]; - val pretty_subgoals = map_index (fn (i, A) => pretty_subgoal (string_of_int (i + 1)) A); - - val pretty_ffpairs = pretty_list "flex-flex pairs:" (pretty_flexpair ctxt); - - val pretty_consts = pretty_list "constants:" prt_consts o consts_of; - val pretty_vars = pretty_list "variables:" prt_vars o vars_of; - val pretty_varsT = pretty_list "type variables:" prt_varsT o varsT_of; - - - val {prop, tpairs, ...} = Thm.rep_thm state; - val (As, B) = Logic.strip_horn prop; - val ngoals = length As; - in - (if show_main_goal then [Pretty.mark Markup.goal (prt_term B)] else []) @ - (if ngoals = 0 then [Pretty.str "No subgoals!"] - else if ngoals > goals_limit then - pretty_subgoals (take goals_limit As) @ - [Pretty.str ("A total of " ^ string_of_int ngoals ^ " subgoals...")] - else pretty_subgoals As) @ - pretty_ffpairs tpairs @ - (if show_consts then pretty_consts prop else []) @ - (if show_types then pretty_vars prop else []) @ - (if show_sorts0 then pretty_varsT prop else []) - end; - -val pretty_goal = Pretty.chunks oo pretty_goals; -val string_of_goal = Pretty.string_of oo pretty_goal; - -end; - -end; - diff --git a/core/Pure/interpretation.ML b/core/Pure/interpretation.ML deleted file mode 100644 index 42d5e5df..00000000 --- a/core/Pure/interpretation.ML +++ /dev/null @@ -1,50 +0,0 @@ -(* Title: Pure/interpretation.ML - Author: Florian Haftmann and Makarius - -Generic interpretation of theory data. -*) - -signature INTERPRETATION = -sig - type T - val result: theory -> T list - val interpretation: (T -> theory -> theory) -> theory -> theory - val data: T -> theory -> theory - val init: theory -> theory -end; - -functor Interpretation(type T val eq: T * T -> bool): INTERPRETATION = -struct - -type T = T; - -structure Interp = Theory_Data -( - type T = T list * (((T -> theory -> theory) * stamp) * T list) list; - val empty = ([], []); - val extend = I; - fun merge ((data1, interps1), (data2, interps2)) : T = - (Library.merge eq (data1, data2), - AList.join (eq_snd (op =)) (K (Library.merge eq)) (interps1, interps2)); -); - -val result = #1 o Interp.get; - -fun consolidate thy = - let - val (data, interps) = Interp.get thy; - val unfinished = interps |> map (fn ((f, _), xs) => - (f, if eq_list eq (xs, data) then [] else subtract eq xs data)); - val finished = interps |> map (fn (interp, _) => (interp, data)); - in - if forall (null o #2) unfinished then NONE - else SOME (thy |> fold_rev (uncurry fold_rev) unfinished |> Interp.put (data, finished)) - end; - -fun interpretation f = Interp.map (apsnd (cons ((f, stamp ()), []))) #> perhaps consolidate; -fun data x = Interp.map (apfst (cons x)) #> perhaps consolidate; - -val init = Theory.at_begin consolidate; - -end; - diff --git a/core/Pure/item_net.ML b/core/Pure/item_net.ML deleted file mode 100644 index df11acaf..00000000 --- a/core/Pure/item_net.ML +++ /dev/null @@ -1,72 +0,0 @@ -(* Title: Pure/item_net.ML - Author: Markus Wenzel, TU Muenchen - -Efficient storage of items indexed by terms; preserves order and -prefers later entries. -*) - -signature ITEM_NET = -sig - type 'a T - val init: ('a * 'a -> bool) -> ('a -> term list) -> 'a T - val content: 'a T -> 'a list - val length: 'a T -> int - val retrieve: 'a T -> term -> 'a list - val retrieve_matching: 'a T -> term -> 'a list - val member: 'a T -> 'a -> bool - val merge: 'a T * 'a T -> 'a T - val remove: 'a -> 'a T -> 'a T - val update: 'a -> 'a T -> 'a T -end; - -structure Item_Net: ITEM_NET = -struct - -(* datatype *) - -datatype 'a T = - Items of { - eq: 'a * 'a -> bool, - index: 'a -> term list, - content: 'a list, - next: int, - net: (int * 'a) Net.net}; - -fun mk_items eq index content next net = - Items {eq = eq, index = index, content = content, next = next, net = net}; - -fun init eq index = mk_items eq index [] ~1 Net.empty; - -fun content (Items {content, ...}) = content; -fun length items = List.length (content items); -fun retrieve (Items {net, ...}) = order_list o Net.unify_term net; -fun retrieve_matching (Items {net, ...}) = order_list o Net.match_term net; - - -(* standard operations *) - -fun member (Items {eq, index, content, net, ...}) x = - (case index x of - [] => Library.member eq content x - | t :: _ => exists (fn (_, y) => eq (x, y)) (Net.unify_term net t)); - -fun cons x (Items {eq, index, content, next, net}) = - mk_items eq index (x :: content) (next - 1) - (fold (fn t => Net.insert_term (K false) (t, (next, x))) (index x) net); - -fun merge - (items1 as Items {net = net1, ...}, - items2 as Items {net = net2, content = content2, ...}) = - if pointer_eq (net1, net2) then items1 - else if Net.is_empty net1 then items2 - else fold_rev (fn y => if member items1 y then I else cons y) content2 items1; - -fun remove x (items as Items {eq, index, content, next, net}) = - if member items x then - mk_items eq index (Library.remove eq x content) next - (fold (fn t => Net.delete_term_safe (eq o pairself #2) (t, (0, x))) (index x) net) - else items; - -fun update x items = cons x (remove x items); - -end; diff --git a/core/Pure/library.ML b/core/Pure/library.ML deleted file mode 100644 index 3f8129cf..00000000 --- a/core/Pure/library.ML +++ /dev/null @@ -1,1102 +0,0 @@ -(* Title: Pure/library.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Markus Wenzel, TU Muenchen - -Basic library: functions, pairs, booleans, lists, integers, -strings, lists as sets, orders, current directory, misc. - -See also General/basics.ML for the most fundamental concepts. -*) - -infix 2 ? -infix 3 o oo ooo oooo -infix 4 ~~ upto downto -infix orf andf - -signature BASIC_LIBRARY = -sig - (*functions*) - val undefined: 'a -> 'b - val I: 'a -> 'a - val K: 'a -> 'b -> 'a - val curry: ('a * 'b -> 'c) -> 'a -> 'b -> 'c - val uncurry: ('a -> 'b -> 'c) -> 'a * 'b -> 'c - val ? : bool * ('a -> 'a) -> 'a -> 'a - val oo: ('a -> 'b) * ('c -> 'd -> 'a) -> 'c -> 'd -> 'b - val ooo: ('a -> 'b) * ('c -> 'd -> 'e -> 'a) -> 'c -> 'd -> 'e -> 'b - val oooo: ('a -> 'b) * ('c -> 'd -> 'e -> 'f -> 'a) -> 'c -> 'd -> 'e -> 'f -> 'b - val funpow: int -> ('a -> 'a) -> 'a -> 'a - val funpow_yield: int -> ('a -> 'b * 'a) -> 'a -> 'b list * 'a - - (*user errors*) - exception ERROR of string - val error: string -> 'a - val cat_error: string -> string -> 'a - val assert_all: ('a -> bool) -> 'a list -> ('a -> string) -> unit - - (*pairs*) - val pair: 'a -> 'b -> 'a * 'b - val rpair: 'a -> 'b -> 'b * 'a - val fst: 'a * 'b -> 'a - val snd: 'a * 'b -> 'b - val eq_fst: ('a * 'c -> bool) -> ('a * 'b) * ('c * 'd) -> bool - val eq_snd: ('b * 'd -> bool) -> ('a * 'b) * ('c * 'd) -> bool - val eq_pair: ('a * 'c -> bool) -> ('b * 'd -> bool) -> ('a * 'b) * ('c * 'd) -> bool - val swap: 'a * 'b -> 'b * 'a - val apfst: ('a -> 'b) -> 'a * 'c -> 'b * 'c - val apsnd: ('a -> 'b) -> 'c * 'a -> 'c * 'b - val pairself: ('a -> 'b) -> 'a * 'a -> 'b * 'b - - (*booleans*) - val equal: ''a -> ''a -> bool - val not_equal: ''a -> ''a -> bool - val orf: ('a -> bool) * ('a -> bool) -> 'a -> bool - val andf: ('a -> bool) * ('a -> bool) -> 'a -> bool - val exists: ('a -> bool) -> 'a list -> bool - val forall: ('a -> bool) -> 'a list -> bool - val setmp_CRITICAL: 'a Unsynchronized.ref -> 'a -> ('b -> 'c) -> 'b -> 'c - val setmp_thread_data: 'a Universal.tag -> 'a -> 'a -> ('b -> 'c) -> 'b -> 'c - - (*lists*) - val single: 'a -> 'a list - val the_single: 'a list -> 'a - val singleton: ('a list -> 'b list) -> 'a -> 'b - val yield_singleton: ('a list -> 'c -> 'b list * 'c) -> 'a -> 'c -> 'b * 'c - val perhaps_apply: ('a -> 'a option) list -> 'a -> 'a option - val perhaps_loop: ('a -> 'a option) -> 'a -> 'a option - val foldl1: ('a * 'a -> 'a) -> 'a list -> 'a - val foldr1: ('a * 'a -> 'a) -> 'a list -> 'a - val eq_list: ('a * 'a -> bool) -> 'a list * 'a list -> bool - val maps: ('a -> 'b list) -> 'a list -> 'b list - val filter: ('a -> bool) -> 'a list -> 'a list - val filter_out: ('a -> bool) -> 'a list -> 'a list - val map_filter: ('a -> 'b option) -> 'a list -> 'b list - val take: int -> 'a list -> 'a list - val drop: int -> 'a list -> 'a list - val chop: int -> 'a list -> 'a list * 'a list - val chop_groups: int -> 'a list -> 'a list list - val nth: 'a list -> int -> 'a - val nth_list: 'a list list -> int -> 'a list - val nth_map: int -> ('a -> 'a) -> 'a list -> 'a list - val nth_drop: int -> 'a list -> 'a list - val map_index: (int * 'a -> 'b) -> 'a list -> 'b list - val fold_index: (int * 'a -> 'b -> 'b) -> 'a list -> 'b -> 'b - val map_range: (int -> 'a) -> int -> 'a list - val fold_range: (int -> 'a -> 'a) -> int -> 'a -> 'a - val split_last: 'a list -> 'a list * 'a - val find_first: ('a -> bool) -> 'a list -> 'a option - val find_index: ('a -> bool) -> 'a list -> int - val get_first: ('a -> 'b option) -> 'a list -> 'b option - val get_index: ('a -> 'b option) -> 'a list -> (int * 'b) option - val flat: 'a list list -> 'a list - val unflat: 'a list list -> 'b list -> 'b list list - val grouped: int -> (('a list -> 'b list) -> 'c list list -> 'd list list) -> - ('a -> 'b) -> 'c list -> 'd list - val burrow: ('a list -> 'b list) -> 'a list list -> 'b list list - val burrow_options: ('a list -> 'b list) -> 'a option list -> 'b option list - val fold_burrow: ('a list -> 'c -> 'b list * 'd) -> 'a list list -> 'c -> 'b list list * 'd - val separate: 'a -> 'a list -> 'a list - val surround: 'a -> 'a list -> 'a list - val replicate: int -> 'a -> 'a list - val map_product: ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list - val fold_product: ('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> 'c -> 'c - val map2: ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list - val fold2: ('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> 'c -> 'c - val fold_rev2: ('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> 'c -> 'c - val forall2: ('a -> 'b -> bool) -> 'a list -> 'b list -> bool - val map_split: ('a -> 'b * 'c) -> 'a list -> 'b list * 'c list - val zip_options: 'a list -> 'b option list -> ('a * 'b) list - val ~~ : 'a list * 'b list -> ('a * 'b) list - val split_list: ('a * 'b) list -> 'a list * 'b list - val burrow_fst: ('a list -> 'b list) -> ('a * 'c) list -> ('b * 'c) list - val is_prefix: ('a * 'a -> bool) -> 'a list -> 'a list -> bool - val take_prefix: ('a -> bool) -> 'a list -> 'a list * 'a list - val chop_prefix: ('a * 'b -> bool) -> 'a list * 'b list -> 'a list * ('a list * 'b list) - val take_suffix: ('a -> bool) -> 'a list -> 'a list * 'a list - val prefixes1: 'a list -> 'a list list - val prefixes: 'a list -> 'a list list - val suffixes1: 'a list -> 'a list list - val suffixes: 'a list -> 'a list list - - (*integers*) - val upto: int * int -> int list - val downto: int * int -> int list - val radixpand: int * int -> int list - val radixstring: int * string * int -> string - val string_of_int: int -> string - val signed_string_of_int: int -> string - val string_of_indexname: string * int -> string - val read_radix_int: int -> string list -> int * string list - val read_int: string list -> int * string list - val oct_char: string -> string - - (*strings*) - val nth_string: string -> int -> string - val fold_string: (string -> 'a -> 'a) -> string -> 'a -> 'a - val exists_string: (string -> bool) -> string -> bool - val forall_string: (string -> bool) -> string -> bool - val first_field: string -> string -> (string * string) option - val enclose: string -> string -> string -> string - val unenclose: string -> string - val quote: string -> string - val cartouche: string -> string - val space_implode: string -> string list -> string - val commas: string list -> string - val commas_quote: string list -> string - val cat_lines: string list -> string - val space_explode: string -> string -> string list - val split_lines: string -> string list - val plain_words: string -> string - val prefix_lines: string -> string -> string - val prefix: string -> string -> string - val suffix: string -> string -> string - val unprefix: string -> string -> string - val unsuffix: string -> string -> string - val trim_line: string -> string - val replicate_string: int -> string -> string - val translate_string: (string -> string) -> string -> string - val match_string: string -> string -> bool - - (*reals*) - val string_of_real: real -> string - val signed_string_of_real: real -> string - - (*lists as sets -- see also Pure/General/ord_list.ML*) - val member: ('b * 'a -> bool) -> 'a list -> 'b -> bool - val insert: ('a * 'a -> bool) -> 'a -> 'a list -> 'a list - val remove: ('b * 'a -> bool) -> 'b -> 'a list -> 'a list - val update: ('a * 'a -> bool) -> 'a -> 'a list -> 'a list - val union: ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - val subtract: ('b * 'a -> bool) -> 'b list -> 'a list -> 'a list - val inter: ('a * 'b -> bool) -> 'b list -> 'a list -> 'a list - val merge: ('a * 'a -> bool) -> 'a list * 'a list -> 'a list - val subset: ('a * 'b -> bool) -> 'a list * 'b list -> bool - val eq_set: ('a * 'a -> bool) -> 'a list * 'a list -> bool - val distinct: ('a * 'a -> bool) -> 'a list -> 'a list - val duplicates: ('a * 'a -> bool) -> 'a list -> 'a list - val has_duplicates: ('a * 'a -> bool) -> 'a list -> bool - val map_transpose: ('a list -> 'b) -> 'a list list -> 'b list - - (*lists as multisets*) - val remove1: ('b * 'a -> bool) -> 'b -> 'a list -> 'a list - val combine: ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - val submultiset: ('a * 'b -> bool) -> 'a list * 'b list -> bool - - (*orders*) - val is_equal: order -> bool - val rev_order: order -> order - val make_ord: ('a * 'a -> bool) -> 'a * 'a -> order - val bool_ord: bool * bool -> order - val int_ord: int * int -> order - val string_ord: string * string -> order - val fast_string_ord: string * string -> order - val option_ord: ('a * 'b -> order) -> 'a option * 'b option -> order - val prod_ord: ('a * 'b -> order) -> ('c * 'd -> order) -> ('a * 'c) * ('b * 'd) -> order - val dict_ord: ('a * 'b -> order) -> 'a list * 'b list -> order - val list_ord: ('a * 'b -> order) -> 'a list * 'b list -> order - val sort: ('a * 'a -> order) -> 'a list -> 'a list - val sort_distinct: ('a * 'a -> order) -> 'a list -> 'a list - val sort_strings: string list -> string list - val sort_wrt: ('a -> string) -> 'a list -> 'a list - val tag_list: int -> 'a list -> (int * 'a) list - val untag_list: (int * 'a) list -> 'a list - val order_list: (int * 'a) list -> 'a list - - (*random numbers*) - exception RANDOM - val random: unit -> real - val random_range: int -> int -> int - - (*misc*) - val divide_and_conquer: ('a -> 'a list * ('b list -> 'b)) -> 'a -> 'b - val divide_and_conquer': ('a -> 'b -> ('a list * ('c list * 'b -> 'c * 'b)) * 'b) -> - 'a -> 'b -> 'c * 'b - val partition_eq: ('a * 'a -> bool) -> 'a list -> 'a list list - val partition_list: (int -> 'a -> bool) -> int -> int -> 'a list -> 'a list list - type serial = int - val serial: unit -> serial - val serial_string: unit -> string - eqtype stamp - val stamp: unit -> stamp - structure Any: sig type T = exn end - val cd: string -> unit - val pwd: unit -> string - val getenv: string -> string - val getenv_strict: string -> string -end; - -signature LIBRARY = -sig - include BASIC_LIBRARY - val foldl: ('a * 'b -> 'a) -> 'a * 'b list -> 'a - val foldr: ('a * 'b -> 'b) -> 'a list * 'b -> 'b -end; - -structure Library: LIBRARY = -struct - -(* functions *) - -fun undefined _ = raise Match; - -fun I x = x; -fun K x = fn _ => x; -fun curry f x y = f (x, y); -fun uncurry f (x, y) = f x y; - -(*conditional application*) -fun b ? f = fn x => if b then f x else x; - -(*composition with multiple args*) -fun (f oo g) x y = f (g x y); -fun (f ooo g) x y z = f (g x y z); -fun (f oooo g) x y z w = f (g x y z w); - -(*function exponentiation: f (... (f x) ...) with n applications of f*) -fun funpow (0 : int) _ = I - | funpow n f = f #> funpow (n - 1) f; - -fun funpow_yield (0 : int) _ x = ([], x) - | funpow_yield n f x = x |> f ||>> funpow_yield (n - 1) f |>> op ::; - - -(* user errors *) - -exception ERROR of string; -fun error msg = raise ERROR msg; - -fun cat_error "" msg = error msg - | cat_error msg "" = error msg - | cat_error msg1 msg2 = error (msg1 ^ "\n" ^ msg2); - -fun assert_all pred list msg = - let - fun ass [] = () - | ass (x :: xs) = if pred x then ass xs else error (msg x); - in ass list end; - - -(* pairs *) - -fun pair x y = (x, y); -fun rpair x y = (y, x); - -fun fst (x, y) = x; -fun snd (x, y) = y; - -fun eq_fst eq ((x1, _), (x2, _)) = eq (x1, x2); -fun eq_snd eq ((_, y1), (_, y2)) = eq (y1, y2); -fun eq_pair eqx eqy ((x1, y1), (x2, y2)) = eqx (x1, x2) andalso eqy (y1, y2); - -fun swap (x, y) = (y, x); - -fun apfst f (x, y) = (f x, y); -fun apsnd f (x, y) = (x, f y); -fun pairself f (x, y) = (f x, f y); - - -(* booleans *) - -(*polymorphic equality*) -fun equal x y = x = y; -fun not_equal x y = x <> y; - -(*combining predicates*) -fun p orf q = fn x => p x orelse q x; -fun p andf q = fn x => p x andalso q x; - -val exists = List.exists; -val forall = List.all; - - -(* flags *) - -fun setmp_CRITICAL flag value f x = - NAMED_CRITICAL "setmp" (fn () => Unsynchronized.setmp flag value f x); - -fun setmp_thread_data tag orig_data data f x = - uninterruptible (fn restore_attributes => fn () => - let - val _ = Thread.setLocal (tag, data); - val result = Exn.capture (restore_attributes f) x; - val _ = Thread.setLocal (tag, orig_data); - in Exn.release result end) (); - - - -(** lists **) - -fun single x = [x]; - -fun the_single [x] = x - | the_single _ = raise List.Empty; - -fun singleton f x = the_single (f [x]); - -fun yield_singleton f x = f [x] #>> the_single; - -fun perhaps_apply funs arg = - let - fun app [] res = res - | app (f :: fs) (changed, x) = - (case f x of - NONE => app fs (changed, x) - | SOME x' => app fs (true, x')); - in (case app funs (false, arg) of (false, _) => NONE | (true, arg') => SOME arg') end; - -fun perhaps_loop f arg = - let - fun loop (changed, x) = - (case f x of - NONE => (changed, x) - | SOME x' => loop (true, x')); - in (case loop (false, arg) of (false, _) => NONE | (true, arg') => SOME arg') end; - - -(* fold -- old versions *) - -(*the following versions of fold are designed to fit nicely with infixes*) - -(* (op @) (e, [x1, ..., xn]) ===> ((e @ x1) @ x2) ... @ xn - for operators that associate to the left (TAIL RECURSIVE)*) -fun foldl (f: 'a * 'b -> 'a) : 'a * 'b list -> 'a = - let fun itl (e, []) = e - | itl (e, a::l) = itl (f(e, a), l) - in itl end; - -(* (op @) ([x1, ..., xn], e) ===> x1 @ (x2 ... @ (xn @ e)) - for operators that associate to the right (not tail recursive)*) -fun foldr f (l, e) = - let fun itr [] = e - | itr (a::l) = f(a, itr l) - in itr l end; - -(* (op @) [x1, ..., xn] ===> ((x1 @ x2) @ x3) ... @ xn - for operators that associate to the left (TAIL RECURSIVE)*) -fun foldl1 f [] = raise List.Empty - | foldl1 f (x :: xs) = foldl f (x, xs); - -(* (op @) [x1, ..., xn] ===> x1 @ (x2 ... @ (x[n-1] @ xn)) - for n > 0, operators that associate to the right (not tail recursive)*) -fun foldr1 f [] = raise List.Empty - | foldr1 f l = - let fun itr [x] = x - | itr (x::l) = f(x, itr l) - in itr l end; - - -(* basic list functions *) - -fun eq_list eq (list1, list2) = - pointer_eq (list1, list2) orelse - let - fun eq_lst (x :: xs, y :: ys) = eq (x, y) andalso eq_lst (xs, ys) - | eq_lst _ = true; - in length list1 = length list2 andalso eq_lst (list1, list2) end; - -fun maps f [] = [] - | maps f (x :: xs) = f x @ maps f xs; - -val filter = List.filter; -fun filter_out f = filter (not o f); -val map_filter = List.mapPartial; - -fun take (0: int) xs = [] - | take _ [] = [] - | take n (x :: xs) = x :: take (n - 1) xs; - -fun drop (0: int) xs = xs - | drop _ [] = [] - | drop n (x :: xs) = drop (n - 1) xs; - -fun chop (0: int) xs = ([], xs) - | chop _ [] = ([], []) - | chop n (x :: xs) = chop (n - 1) xs |>> cons x; - -fun chop_groups n list = - (case chop (Int.max (n, 1)) list of - ([], _) => [] - | (g, rest) => g :: chop_groups n rest); - - -(*return nth element of a list, where 0 designates the first element; - raise Subscript if list too short*) -fun nth xs i = List.nth (xs, i); - -fun nth_list xss i = nth xss i handle General.Subscript => []; - -fun nth_map 0 f (x :: xs) = f x :: xs - | nth_map n f (x :: xs) = x :: nth_map (n - 1) f xs - | nth_map (_: int) _ [] = raise Subscript; - -fun nth_drop n xs = - List.take (xs, n) @ List.drop (xs, n + 1); - -fun map_index f = - let - fun map_aux (_: int) [] = [] - | map_aux i (x :: xs) = f (i, x) :: map_aux (i + 1) xs - in map_aux 0 end; - -fun fold_index f = - let - fun fold_aux (_: int) [] y = y - | fold_aux i (x :: xs) y = fold_aux (i + 1) xs (f (i, x) y) - in fold_aux 0 end; - -fun map_range f i = - let - fun map_aux (k: int) = - if k < i then f k :: map_aux (k + 1) else [] - in map_aux 0 end; - -fun fold_range f i = - let - fun fold_aux (k: int) y = - if k < i then fold_aux (k + 1) (f k y) else y - in fold_aux 0 end; - - -(*rear decomposition*) -fun split_last [] = raise List.Empty - | split_last [x] = ([], x) - | split_last (x :: xs) = apfst (cons x) (split_last xs); - -(*find first element satisfying predicate*) -val find_first = List.find; - -(*find position of first element satisfying a predicate*) -fun find_index pred = - let fun find (_: int) [] = ~1 - | find n (x :: xs) = if pred x then n else find (n + 1) xs; - in find 0 end; - -(*get first element by lookup function*) -fun get_first _ [] = NONE - | get_first f (x :: xs) = - (case f x of - NONE => get_first f xs - | some => some); - -fun get_index f = - let - fun get (_: int) [] = NONE - | get i (x :: xs) = - (case f x of - NONE => get (i + 1) xs - | SOME y => SOME (i, y)) - in get 0 end; - -val flat = List.concat; - -fun unflat (xs :: xss) ys = - let val (ps, qs) = chop (length xs) ys - in ps :: unflat xss qs end - | unflat [] [] = [] - | unflat _ _ = raise ListPair.UnequalLengths; - -fun grouped n comb f = chop_groups n #> comb (map f) #> flat; - -fun burrow f xss = unflat xss (f (flat xss)); - -fun burrow_options f os = map (try hd) (burrow f (map the_list os)); - -fun fold_burrow f xss s = - apfst (unflat xss) (f (flat xss) s); - -(*separate s [x1, x2, ..., xn] ===> [x1, s, x2, s, ..., s, xn]*) -fun separate s (x :: (xs as _ :: _)) = x :: s :: separate s xs - | separate _ xs = xs; - -fun surround s (x :: xs) = s :: x :: surround s xs - | surround s [] = [s]; - -(*make the list [x, x, ..., x] of length n*) -fun replicate (n: int) x = - let fun rep (0, xs) = xs - | rep (n, xs) = rep (n - 1, x :: xs) - in - if n < 0 then raise Subscript - else rep (n, []) - end; - - -(* direct product *) - -fun map_product f _ [] = [] - | map_product f [] _ = [] - | map_product f (x :: xs) ys = map (f x) ys @ map_product f xs ys; - -fun fold_product f _ [] z = z - | fold_product f [] _ z = z - | fold_product f (x :: xs) ys z = z |> fold (f x) ys |> fold_product f xs ys; - - -(* lists of pairs *) - -fun map2 _ [] [] = [] - | map2 f (x :: xs) (y :: ys) = f x y :: map2 f xs ys - | map2 _ _ _ = raise ListPair.UnequalLengths; - -fun fold2 f [] [] z = z - | fold2 f (x :: xs) (y :: ys) z = fold2 f xs ys (f x y z) - | fold2 f _ _ _ = raise ListPair.UnequalLengths; - -fun fold_rev2 f [] [] z = z - | fold_rev2 f (x :: xs) (y :: ys) z = f x y (fold_rev2 f xs ys z) - | fold_rev2 f _ _ _ = raise ListPair.UnequalLengths; - -fun forall2 P [] [] = true - | forall2 P (x :: xs) (y :: ys) = P x y andalso forall2 P xs ys - | forall2 P _ _ = raise ListPair.UnequalLengths; - -fun map_split f [] = ([], []) - | map_split f (x :: xs) = - let - val (y, w) = f x; - val (ys, ws) = map_split f xs; - in (y :: ys, w :: ws) end; - -fun zip_options (x :: xs) (SOME y :: ys) = (x, y) :: zip_options xs ys - | zip_options (_ :: xs) (NONE :: ys) = zip_options xs ys - | zip_options _ [] = [] - | zip_options [] _ = raise ListPair.UnequalLengths; - -(*combine two lists forming a list of pairs: - [x1, ..., xn] ~~ [y1, ..., yn] ===> [(x1, y1), ..., (xn, yn)]*) -fun [] ~~ [] = [] - | (x :: xs) ~~ (y :: ys) = (x, y) :: (xs ~~ ys) - | _ ~~ _ = raise ListPair.UnequalLengths; - -(*inverse of ~~; the old 'split': - [(x1, y1), ..., (xn, yn)] ===> ([x1, ..., xn], [y1, ..., yn])*) -val split_list = ListPair.unzip; - -fun burrow_fst f xs = split_list xs |>> f |> op ~~; - - -(* prefixes, suffixes *) - -fun is_prefix _ [] _ = true - | is_prefix eq (x :: xs) (y :: ys) = eq (x, y) andalso is_prefix eq xs ys - | is_prefix eq _ _ = false; - -(* [x1, ..., xi, ..., xn] ---> ([x1, ..., x(i-1)], [xi, ..., xn]) - where xi is the first element that does not satisfy the predicate*) -fun take_prefix (pred : 'a -> bool) (xs: 'a list) : 'a list * 'a list = - let fun take (rxs, []) = (rev rxs, []) - | take (rxs, x :: xs) = - if pred x then take(x :: rxs, xs) else (rev rxs, x :: xs) - in take([], xs) end; - -fun chop_prefix eq ([], ys) = ([], ([], ys)) - | chop_prefix eq (xs, []) = ([], (xs, [])) - | chop_prefix eq (xs as x :: xs', ys as y :: ys') = - if eq (x, y) then - let val (ps', xys'') = chop_prefix eq (xs', ys') - in (x :: ps', xys'') end - else ([], (xs, ys)); - -(* [x1, ..., xi, ..., xn] ---> ([x1, ..., xi], [x(i+1), ..., xn]) - where xi is the last element that does not satisfy the predicate*) -fun take_suffix _ [] = ([], []) - | take_suffix pred (x :: xs) = - (case take_suffix pred xs of - ([], sffx) => if pred x then ([], x :: sffx) else ([x], sffx) - | (prfx, sffx) => (x :: prfx, sffx)); - -fun prefixes1 [] = [] - | prefixes1 (x :: xs) = map (cons x) ([] :: prefixes1 xs); - -fun prefixes xs = [] :: prefixes1 xs; - -fun suffixes1 xs = map rev (prefixes1 (rev xs)); -fun suffixes xs = [] :: suffixes1 xs; - - - -(** integers **) - -(* lists of integers *) - -(*make the list [from, from + 1, ..., to]*) -fun ((i: int) upto j) = - if i > j then [] else i :: (i + 1 upto j); - -(*make the list [from, from - 1, ..., to]*) -fun ((i: int) downto j) = - if i < j then [] else i :: (i - 1 downto j); - - -(* convert integers to strings *) - -(*expand the number in the given base; - example: radixpand (2, 8) gives [1, 0, 0, 0]*) -fun radixpand (base, num) : int list = - let - fun radix (n, tail) = - if n < base then n :: tail - else radix (n div base, (n mod base) :: tail) - in radix (num, []) end; - -(*expands a number into a string of characters starting from "zerochar"; - example: radixstring (2, "0", 8) gives "1000"*) -fun radixstring (base, zerochar, num) = - let val offset = ord zerochar; - fun chrof n = chr (offset + n) - in implode (map chrof (radixpand (base, num))) end; - - -local - val zero = ord "0"; - val small = 10000: int; - val small_table = Vector.tabulate (small, Int.toString); -in - -fun string_of_int i = - if i < 0 then Int.toString i - else if i < 10 then chr (zero + i) - else if i < small then Vector.sub (small_table, i) - else Int.toString i; - -end; - -fun signed_string_of_int i = - if i < 0 then "-" ^ string_of_int (~ i) else string_of_int i; - -fun string_of_indexname (a, 0) = a - | string_of_indexname (a, i) = a ^ "_" ^ string_of_int i; - - -(* read integers *) - -fun read_radix_int radix cs = - let - val zero = ord "0"; - val limit = zero + radix; - fun scan (num, []) = (num, []) - | scan (num, c :: cs) = - if zero <= ord c andalso ord c < limit then - scan (radix * num + (ord c - zero), cs) - else (num, c :: cs); - in scan (0, cs) end; - -val read_int = read_radix_int 10; - -fun oct_char s = chr (#1 (read_radix_int 8 (raw_explode s))); - - - -(** strings **) - -(* functions tuned for strings, avoiding explode *) - -fun nth_string str i = - (case try String.substring (str, i, 1) of - SOME s => s - | NONE => raise Subscript); - -fun fold_string f str x0 = - let - val n = size str; - fun iter (x, i) = - if i < n then iter (f (String.substring (str, i, 1)) x, i + 1) else x; - in iter (x0, 0) end; - -fun exists_string pred str = - let - val n = size str; - fun ex i = i < n andalso (pred (String.substring (str, i, 1)) orelse ex (i + 1)); - in ex 0 end; - -fun forall_string pred = not o exists_string (not o pred); - -fun first_field sep str = - let - val n = size sep; - val len = size str; - fun find i = - if i + n > len then NONE - else if String.substring (str, i, n) = sep then SOME i - else find (i + 1); - in - (case find 0 of - NONE => NONE - | SOME i => SOME (String.substring (str, 0, i), String.extract (str, i + n, NONE))) - end; - -(*enclose in brackets*) -fun enclose lpar rpar str = lpar ^ str ^ rpar; -fun unenclose str = String.substring (str, 1, size str - 2); - -(*simple quoting (does not escape special chars)*) -val quote = enclose "\"" "\""; - -val cartouche = enclose "\\" "\\"; - -fun space_implode a bs = implode (separate a bs); - -val commas = space_implode ", "; -val commas_quote = commas o map quote; - -val cat_lines = space_implode "\n"; - -(*space_explode "." "h.e..l.lo" = ["h", "e", "", "l", "lo"]*) -fun space_explode _ "" = [] - | space_explode sep s = String.fields (fn c => str c = sep) s; - -val split_lines = space_explode "\n"; - -fun plain_words s = space_explode "_" s |> space_implode " "; - -fun prefix_lines "" txt = txt - | prefix_lines prfx txt = txt |> split_lines |> map (fn s => prfx ^ s) |> cat_lines; - -fun prefix prfx s = prfx ^ s; -fun suffix sffx s = s ^ sffx; - -fun unprefix prfx s = - if String.isPrefix prfx s then String.substring (s, size prfx, size s - size prfx) - else raise Fail "unprefix"; - -fun unsuffix sffx s = - if String.isSuffix sffx s then String.substring (s, 0, size s - size sffx) - else raise Fail "unsuffix"; - -val trim_line = perhaps (try (unsuffix "\n")); - -fun replicate_string (0: int) _ = "" - | replicate_string 1 a = a - | replicate_string k a = - if k mod 2 = 0 then replicate_string (k div 2) (a ^ a) - else replicate_string (k div 2) (a ^ a) ^ a; - -fun translate_string f = String.translate (f o String.str); - -(*crude matching of str against simple glob pat*) -fun match_string pat str = - let - fun match [] _ = true - | match (p :: ps) s = - size p <= size s andalso - (case try (unprefix p) s of - SOME s' => match ps s' - | NONE => match (p :: ps) (String.substring (s, 1, size s - 1))); - in match (space_explode "*" pat) str end; - - -(** reals **) - -val string_of_real = Real.fmt (StringCvt.GEN NONE); - -fun signed_string_of_real x = - if x < 0.0 then "-" ^ string_of_real (~ x) else string_of_real x; - - - -(** lists as sets -- see also Pure/General/ord_list.ML **) - -(* canonical operations *) - -fun member eq list x = - let - fun memb [] = false - | memb (y :: ys) = eq (x, y) orelse memb ys; - in memb list end; - -fun insert eq x xs = if member eq xs x then xs else x :: xs; -fun remove eq x xs = if member eq xs x then filter_out (fn y => eq (x, y)) xs else xs; -fun update eq x xs = cons x (remove eq x xs); - -fun inter eq xs = filter (member eq xs); - -fun union eq = fold (insert eq); -fun subtract eq = fold (remove eq); - -fun merge eq (xs, ys) = - if pointer_eq (xs, ys) then xs - else if null xs then ys - else fold_rev (insert eq) ys xs; - - -(* subset and set equality *) - -fun subset eq (xs, ys) = forall (member eq ys) xs; - -fun eq_set eq (xs, ys) = - eq_list eq (xs, ys) orelse - (subset eq (xs, ys) andalso subset (eq o swap) (ys, xs)); - - -(*makes a list of the distinct members of the input; preserves order, takes - first of equal elements*) -fun distinct eq lst = - let - fun dist (rev_seen, []) = rev rev_seen - | dist (rev_seen, x :: xs) = - if member eq rev_seen x then dist (rev_seen, xs) - else dist (x :: rev_seen, xs); - in dist ([], lst) end; - -(*returns a list containing all repeated elements exactly once; preserves - order, takes first of equal elements*) -fun duplicates eq lst = - let - fun dups (rev_dups, []) = rev rev_dups - | dups (rev_dups, x :: xs) = - if member eq rev_dups x orelse not (member eq xs x) then - dups (rev_dups, xs) - else dups (x :: rev_dups, xs); - in dups ([], lst) end; - -fun has_duplicates eq = - let - fun dups [] = false - | dups (x :: xs) = member eq xs x orelse dups xs; - in dups end; - - -(* matrices *) - -fun map_transpose f xss = - let - val n = - (case distinct (op =) (map length xss) of - [] => 0 - | [n] => n - | _ => raise ListPair.UnequalLengths); - in map_range (fn m => f (map (fn xs => nth xs m) xss)) n end; - - - -(** lists as multisets **) - -fun remove1 eq x [] = [] - | remove1 eq x (y :: ys) = if eq (x, y) then ys else y :: remove1 eq x ys; - -fun combine eq xs ys = fold (remove1 eq) ys xs @ ys; - -fun submultiset _ ([], _) = true - | submultiset eq (x :: xs, ys) = member eq ys x andalso submultiset eq (xs, remove1 eq x ys); - - - -(** orders **) - -fun is_equal EQUAL = true - | is_equal _ = false; - -fun rev_order LESS = GREATER - | rev_order EQUAL = EQUAL - | rev_order GREATER = LESS; - -(*assume rel is a linear strict order*) -fun make_ord rel (x, y) = - if rel (x, y) then LESS - else if rel (y, x) then GREATER - else EQUAL; - -fun bool_ord (false, true) = LESS - | bool_ord (true, false) = GREATER - | bool_ord _ = EQUAL; - -val int_ord = Int.compare; -val string_ord = String.compare; - -fun fast_string_ord (s1, s2) = - if pointer_eq (s1, s2) then EQUAL - else (case int_ord (size s1, size s2) of EQUAL => string_ord (s1, s2) | ord => ord); - -fun option_ord ord (SOME x, SOME y) = ord (x, y) - | option_ord _ (NONE, NONE) = EQUAL - | option_ord _ (NONE, SOME _) = LESS - | option_ord _ (SOME _, NONE) = GREATER; - -(*lexicographic product*) -fun prod_ord a_ord b_ord ((x, y), (x', y')) = - (case a_ord (x, x') of EQUAL => b_ord (y, y') | ord => ord); - -(*dictionary order -- in general NOT well-founded!*) -fun dict_ord elem_ord (x :: xs, y :: ys) = - (case elem_ord (x, y) of EQUAL => dict_ord elem_ord (xs, ys) | ord => ord) - | dict_ord _ ([], []) = EQUAL - | dict_ord _ ([], _ :: _) = LESS - | dict_ord _ (_ :: _, []) = GREATER; - -(*lexicographic product of lists*) -fun list_ord elem_ord (xs, ys) = - (case int_ord (length xs, length ys) of EQUAL => dict_ord elem_ord (xs, ys) | ord => ord); - - -(* sorting *) - -(*stable mergesort -- preserves order of equal elements*) -fun mergesort unique ord = - let - fun merge (xs as x :: xs') (ys as y :: ys') = - (case ord (x, y) of - LESS => x :: merge xs' ys - | EQUAL => - if unique then merge xs ys' - else x :: merge xs' ys - | GREATER => y :: merge xs ys') - | merge [] ys = ys - | merge xs [] = xs; - - fun merge_all [xs] = xs - | merge_all xss = merge_all (merge_pairs xss) - and merge_pairs (xs :: ys :: xss) = merge xs ys :: merge_pairs xss - | merge_pairs xss = xss; - - fun runs (x :: y :: xs) = - (case ord (x, y) of - LESS => ascending y [x] xs - | EQUAL => - if unique then runs (x :: xs) - else ascending y [x] xs - | GREATER => descending y [x] xs) - | runs xs = [xs] - - and ascending x xs (zs as y :: ys) = - (case ord (x, y) of - LESS => ascending y (x :: xs) ys - | EQUAL => - if unique then ascending x xs ys - else ascending y (x :: xs) ys - | GREATER => rev (x :: xs) :: runs zs) - | ascending x xs [] = [rev (x :: xs)] - - and descending x xs (zs as y :: ys) = - (case ord (x, y) of - GREATER => descending y (x :: xs) ys - | EQUAL => - if unique then descending x xs ys - else (x :: xs) :: runs zs - | LESS => (x :: xs) :: runs zs) - | descending x xs [] = [x :: xs]; - - in merge_all o runs end; - -fun sort ord = mergesort false ord; -fun sort_distinct ord = mergesort true ord; - -val sort_strings = sort string_ord; -fun sort_wrt key xs = sort (string_ord o pairself key) xs; - - -(* items tagged by integer index *) - -(*insert tags*) -fun tag_list k [] = [] - | tag_list k (x :: xs) = (k:int, x) :: tag_list (k + 1) xs; - -(*remove tags and suppress duplicates -- list is assumed sorted!*) -fun untag_list [] = [] - | untag_list [(k: int, x)] = [x] - | untag_list ((k, x) :: (rest as (k', x') :: _)) = - if k = k' then untag_list rest - else x :: untag_list rest; - -(*return list elements in original order*) -fun order_list list = untag_list (sort (int_ord o pairself fst) list); - - - -(** random numbers **) - -exception RANDOM; - -fun rmod x y = x - y * Real.realFloor (x / y); - -local - val a = 16807.0; - val m = 2147483647.0; - val random_seed = Unsynchronized.ref 1.0; -in - -fun random () = CRITICAL (fn () => - let val r = rmod (a * ! random_seed) m - in (random_seed := r; r) end); - -end; - -fun random_range l h = - if h < l orelse l < 0 then raise RANDOM - else l + Real.floor (rmod (random ()) (real (h - l + 1))); - - - -(** misc **) - -fun divide_and_conquer decomp x = - let val (ys, recomb) = decomp x - in recomb (map (divide_and_conquer decomp) ys) end; - -fun divide_and_conquer' decomp x s = - let val ((ys, recomb), s') = decomp x s - in recomb (fold_map (divide_and_conquer' decomp) ys s') end; - - -(*Partition a list into buckets [ bi, b(i+1), ..., bj ] - putting x in bk if p(k)(x) holds. Preserve order of elements if possible.*) -fun partition_list p i j = - let - fun part (k: int) xs = - if k > j then - (case xs of - [] => [] - | _ => raise Fail "partition_list") - else - let val (ns, rest) = List.partition (p k) xs - in ns :: part (k + 1) rest end; - in part (i: int) end; - -fun partition_eq (eq: 'a * 'a -> bool) = - let - fun part [] = [] - | part (x :: ys) = - let val (xs, xs') = List.partition (fn y => eq (x, y)) ys - in (x :: xs) :: part xs' end; - in part end; - - -(* serial numbers and abstract stamps *) - -type serial = int; -val serial = Multithreading.serial; -val serial_string = string_of_int o serial; - -datatype stamp = Stamp of serial; -fun stamp () = Stamp (serial ()); - - -(* values of any type *) - -(*note that the builtin exception datatype may be extended by new - constructors at any time*) -structure Any = struct type T = exn end; - - -(* current directory *) - -val cd = OS.FileSys.chDir; -val pwd = OS.FileSys.getDir; - - -(* getenv *) - -fun getenv x = - (case OS.Process.getEnv x of - NONE => "" - | SOME y => y); - -fun getenv_strict x = - (case getenv x of - "" => error ("Undefined Isabelle environment variable: " ^ quote x) - | y => y); - -end; - -structure Basic_Library: BASIC_LIBRARY = Library; -open Basic_Library; - diff --git a/core/Pure/library.scala b/core/Pure/library.scala deleted file mode 100644 index ffe36ef0..00000000 --- a/core/Pure/library.scala +++ /dev/null @@ -1,181 +0,0 @@ -/* Title: Pure/library.scala - Module: PIDE - Author: Makarius - -Basic library. -*/ - -package isabelle - - -import scala.collection.mutable - - -object Library -{ - /* user errors */ - - object ERROR - { - def apply(message: String): Throwable = new RuntimeException(message) - def unapply(exn: Throwable): Option[String] = Exn.user_message(exn) - } - - def error(message: String): Nothing = throw ERROR(message) - - def cat_message(msg1: String, msg2: String): String = - if (msg1 == "") msg2 - else if (msg2 == "") msg1 - else msg1 + "\n" + msg2 - - def cat_error(msg1: String, msg2: String): Nothing = - error(cat_message(msg1, msg2)) - - - /* separated chunks */ - - def separate[A](s: A, list: List[A]): List[A] = - { - val result = new mutable.ListBuffer[A] - var first = true - for (x <- list) { - if (first) { - first = false - result += x - } - else { - result += s - result += x - } - } - result.toList - } - - def separated_chunks(sep: Char => Boolean, source: CharSequence): Iterator[CharSequence] = - new Iterator[CharSequence] { - private val end = source.length - private def next_chunk(i: Int): Option[(CharSequence, Int)] = - { - if (i < end) { - var j = i; do j += 1 while (j < end && !sep(source.charAt(j))) - Some((source.subSequence(i + 1, j), j)) - } - else None - } - private var state: Option[(CharSequence, Int)] = if (end == 0) None else next_chunk(-1) - - def hasNext(): Boolean = state.isDefined - def next(): CharSequence = - state match { - case Some((s, i)) => { state = next_chunk(i); s } - case None => Iterator.empty.next() - } - } - - def space_explode(sep: Char, str: String): List[String] = - separated_chunks(_ == sep, str).map(_.toString).toList - - - /* lines */ - - def terminate_lines(lines: Iterable[CharSequence]): Iterable[CharSequence] = - new Iterable[CharSequence] { - def iterator: Iterator[CharSequence] = - lines.iterator.map(line => new Line_Termination(line)) - } - - def cat_lines(lines: TraversableOnce[String]): String = lines.mkString("\n") - - def split_lines(str: String): List[String] = space_explode('\n', str) - - def first_line(source: CharSequence): String = - { - val lines = separated_chunks(_ == '\n', source) - if (lines.hasNext) lines.next.toString - else "" - } - - - /* strings */ - - def try_unprefix(prfx: String, s: String): Option[String] = - if (s.startsWith(prfx)) Some(s.substring(prfx.length)) else None - - def try_unsuffix(sffx: String, s: String): Option[String] = - if (s.endsWith(sffx)) Some(s.substring(0, s.length - sffx.length)) else None - - def trim_line(s: String): String = - if (s.endsWith("\r\n")) s.substring(0, s.length - 2) - else if (s.endsWith("\r") || s.endsWith("\n")) s.substring(0, s.length - 1) - else s - - - /* quote */ - - def quote(s: String): String = "\"" + s + "\"" - - def try_unquote(s: String): Option[String] = - if (s.startsWith("\"") && s.endsWith("\"")) Some(s.substring(1, s.length - 1)) - else None - - def commas(ss: Iterable[String]): String = ss.iterator.mkString(", ") - def commas_quote(ss: Iterable[String]): String = ss.iterator.map(quote).mkString(", ") - - - /* CharSequence */ - - class Reverse(text: CharSequence, start: Int, end: Int) extends CharSequence - { - require(0 <= start && start <= end && end <= text.length) - - def this(text: CharSequence) = this(text, 0, text.length) - - def length: Int = end - start - def charAt(i: Int): Char = text.charAt(end - i - 1) - - def subSequence(i: Int, j: Int): CharSequence = - if (0 <= i && i <= j && j <= length) new Reverse(text, end - j, end - i) - else throw new IndexOutOfBoundsException - - override def toString: String = - { - val buf = new StringBuilder(length) - for (i <- 0 until length) - buf.append(charAt(i)) - buf.toString - } - } - - class Line_Termination(text: CharSequence) extends CharSequence - { - def length: Int = text.length + 1 - def charAt(i: Int): Char = if (i == text.length) '\n' else text.charAt(i) - def subSequence(i: Int, j: Int): CharSequence = - if (j == text.length + 1) new Line_Termination(text.subSequence(i, j - 1)) - else text.subSequence(i, j) - override def toString: String = text.toString + "\n" - } - - - /* canonical list operations */ - - def member[A, B](xs: List[A])(x: B): Boolean = xs.exists(_ == x) - def insert[A](x: A)(xs: List[A]): List[A] = if (xs.contains(x)) xs else x :: xs - def remove[A, B](x: B)(xs: List[A]): List[A] = if (member(xs)(x)) xs.filterNot(_ == x) else xs - def update[A](x: A)(xs: List[A]): List[A] = x :: remove(x)(xs) -} - - -class Basic_Library -{ - val ERROR = Library.ERROR - val error = Library.error _ - val cat_error = Library.cat_error _ - - val space_explode = Library.space_explode _ - val split_lines = Library.split_lines _ - val cat_lines = Library.cat_lines _ - val quote = Library.quote _ - val commas = Library.commas _ - val commas_quote = Library.commas_quote _ -} diff --git a/core/Pure/logic.ML b/core/Pure/logic.ML deleted file mode 100644 index 8accb0c0..00000000 --- a/core/Pure/logic.ML +++ /dev/null @@ -1,581 +0,0 @@ -(* Title: Pure/logic.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Makarius - -Abstract syntax operations of the Pure meta-logic. -*) - -signature LOGIC = -sig - val all_const: typ -> term - val all: term -> term -> term - val is_all: term -> bool - val dest_all: term -> (string * typ) * term - val list_all: (string * typ) list * term -> term - val mk_equals: term * term -> term - val dest_equals: term -> term * term - val implies: term - val mk_implies: term * term -> term - val dest_implies: term -> term * term - val list_implies: term list * term -> term - val strip_imp_prems: term -> term list - val strip_imp_concl: term -> term - val strip_prems: int * term list * term -> term list * term - val count_prems: term -> int - val nth_prem: int * term -> term - val true_prop: term - val conjunction: term - val mk_conjunction: term * term -> term - val mk_conjunction_list: term list -> term - val mk_conjunction_balanced: term list -> term - val dest_conjunction: term -> term * term - val dest_conjunction_list: term -> term list - val dest_conjunction_balanced: int -> term -> term list - val dest_conjunctions: term -> term list - val strip_horn: term -> term list * term - val mk_type: typ -> term - val dest_type: term -> typ - val type_map: (term -> term) -> typ -> typ - val const_of_class: class -> string - val class_of_const: string -> class - val mk_of_class: typ * class -> term - val dest_of_class: term -> typ * class - val mk_of_sort: typ * sort -> term list - val name_classrel: string * string -> string - val mk_classrel: class * class -> term - val dest_classrel: term -> class * class - val name_arities: arity -> string list - val name_arity: string * sort list * class -> string - val mk_arities: arity -> term list - val dest_arity: term -> string * sort list * class - val unconstrainT: sort list -> term -> - ((typ -> typ) * ((typ * class) * term) list * (typ * class) list) * term - val protectC: term - val protect: term -> term - val unprotect: term -> term - val mk_term: term -> term - val dest_term: term -> term - val occs: term * term -> bool - val close_form: term -> term - val combound: term * int * int -> term - val rlist_abs: (string * typ) list * term -> term - val incr_tvar_same: int -> typ Same.operation - val incr_tvar: int -> typ -> typ - val incr_indexes_same: typ list * int -> term Same.operation - val incr_indexes: typ list * int -> term -> term - val lift_abs: int -> term -> term -> term - val lift_all: int -> term -> term -> term - val strip_assums_hyp: term -> term list - val strip_assums_concl: term -> term - val strip_params: term -> (string * typ) list - val has_meta_prems: term -> bool - val flatten_params: int -> term -> term - val list_rename_params: string list -> term -> term - val assum_pairs: int * term -> (term * term) list - val assum_problems: int * term -> (term -> term) * term list * term - val varifyT_global: typ -> typ - val unvarifyT_global: typ -> typ - val varify_types_global: term -> term - val unvarify_types_global: term -> term - val varify_global: term -> term - val unvarify_global: term -> term - val get_goal: term -> int -> term - val goal_params: term -> int -> term * term list - val prems_of_goal: term -> int -> term list - val concl_of_goal: term -> int -> term -end; - -structure Logic : LOGIC = -struct - - -(*** Abstract syntax operations on the meta-connectives ***) - -(** all **) - -fun all_const T = Const ("Pure.all", (T --> propT) --> propT); - -fun all v t = all_const (Term.fastype_of v) $ lambda v t; - -fun is_all (Const ("Pure.all", _) $ Abs _) = true - | is_all _ = false; - -fun dest_all (Const ("Pure.all", _) $ Abs (abs as (_, T, _))) = - let val (x, b) = Term.dest_abs abs (*potentially slow*) - in ((x, T), b) end - | dest_all t = raise TERM ("dest_all", [t]); - -fun list_all ([], t) = t - | list_all ((a, T) :: vars, t) = all_const T $ Abs (a, T, list_all (vars, t)); - - -(** equality **) - -fun mk_equals (t, u) = - let val T = Term.fastype_of t - in Const ("Pure.eq", T --> T --> propT) $ t $ u end; - -fun dest_equals (Const ("Pure.eq", _) $ t $ u) = (t, u) - | dest_equals t = raise TERM ("dest_equals", [t]); - - -(** implies **) - -val implies = Const ("Pure.imp", propT --> propT --> propT); - -fun mk_implies (A, B) = implies $ A $ B; - -fun dest_implies (Const ("Pure.imp", _) $ A $ B) = (A, B) - | dest_implies A = raise TERM ("dest_implies", [A]); - - -(** nested implications **) - -(* [A1,...,An], B goes to A1==>...An==>B *) -fun list_implies ([], B) = B - | list_implies (A::As, B) = implies $ A $ list_implies(As,B); - -(* A1==>...An==>B goes to [A1,...,An], where B is not an implication *) -fun strip_imp_prems (Const("Pure.imp", _) $ A $ B) = A :: strip_imp_prems B - | strip_imp_prems _ = []; - -(* A1==>...An==>B goes to B, where B is not an implication *) -fun strip_imp_concl (Const("Pure.imp", _) $ A $ B) = strip_imp_concl B - | strip_imp_concl A = A : term; - -(*Strip and return premises: (i, [], A1==>...Ai==>B) - goes to ([Ai, A(i-1),...,A1] , B) (REVERSED) - if i<0 or else i too big then raises TERM*) -fun strip_prems (0, As, B) = (As, B) - | strip_prems (i, As, Const("Pure.imp", _) $ A $ B) = - strip_prems (i-1, A::As, B) - | strip_prems (_, As, A) = raise TERM("strip_prems", A::As); - -(*Count premises -- quicker than (length o strip_prems) *) -fun count_prems (Const ("Pure.imp", _) $ _ $ B) = 1 + count_prems B - | count_prems _ = 0; - -(*Select Ai from A1 ==>...Ai==>B*) -fun nth_prem (1, Const ("Pure.imp", _) $ A $ _) = A - | nth_prem (i, Const ("Pure.imp", _) $ _ $ B) = nth_prem (i - 1, B) - | nth_prem (_, A) = raise TERM ("nth_prem", [A]); - -(*strip a proof state (Horn clause): - B1 ==> ... Bn ==> C goes to ([B1, ..., Bn], C) *) -fun strip_horn A = (strip_imp_prems A, strip_imp_concl A); - - - -(** conjunction **) - -val true_prop = all_const propT $ Abs ("dummy", propT, mk_implies (Bound 0, Bound 0)); -val conjunction = Const ("Pure.conjunction", propT --> propT --> propT); - - -(*A &&& B*) -fun mk_conjunction (A, B) = conjunction $ A $ B; - -(*A &&& B &&& C -- improper*) -fun mk_conjunction_list [] = true_prop - | mk_conjunction_list ts = foldr1 mk_conjunction ts; - -(*(A &&& B) &&& (C &&& D) -- balanced*) -fun mk_conjunction_balanced [] = true_prop - | mk_conjunction_balanced ts = Balanced_Tree.make mk_conjunction ts; - - -(*A &&& B*) -fun dest_conjunction (Const ("Pure.conjunction", _) $ A $ B) = (A, B) - | dest_conjunction t = raise TERM ("dest_conjunction", [t]); - -(*A &&& B &&& C -- improper*) -fun dest_conjunction_list t = - (case try dest_conjunction t of - NONE => [t] - | SOME (A, B) => A :: dest_conjunction_list B); - -(*(A &&& B) &&& (C &&& D) -- balanced*) -fun dest_conjunction_balanced 0 _ = [] - | dest_conjunction_balanced n t = Balanced_Tree.dest dest_conjunction n t; - -(*((A &&& B) &&& C) &&& D &&& E -- flat*) -fun dest_conjunctions t = - (case try dest_conjunction t of - NONE => [t] - | SOME (A, B) => dest_conjunctions A @ dest_conjunctions B); - - - -(** types as terms **) - -fun mk_type ty = Const ("Pure.type", Term.itselfT ty); - -fun dest_type (Const ("Pure.type", Type ("itself", [ty]))) = ty - | dest_type t = raise TERM ("dest_type", [t]); - -fun type_map f = dest_type o f o mk_type; - - - -(** type classes **) - -(* const names *) - -val classN = "_class"; - -val const_of_class = suffix classN; - -fun class_of_const c = unsuffix classN c - handle Fail _ => raise TERM ("class_of_const: bad name " ^ quote c, []); - - -(* class/sort membership *) - -fun mk_of_class (ty, c) = - Const (const_of_class c, Term.itselfT ty --> propT) $ mk_type ty; - -fun dest_of_class (Const (c_class, _) $ ty) = (dest_type ty, class_of_const c_class) - | dest_of_class t = raise TERM ("dest_of_class", [t]); - -fun mk_of_sort (ty, S) = map (fn c => mk_of_class (ty, c)) S; - - -(* class relations *) - -fun name_classrel (c1, c2) = - Long_Name.base_name c1 ^ "_" ^ Long_Name.base_name c2; - -fun mk_classrel (c1, c2) = mk_of_class (Term.aT [c1], c2); - -fun dest_classrel tm = - (case dest_of_class tm of - (TVar (_, [c1]), c2) => (c1, c2) - | _ => raise TERM ("dest_classrel", [tm])); - - -(* type arities *) - -fun name_arities (t, _, S) = - let val b = Long_Name.base_name t - in S |> map (fn c => Long_Name.base_name c ^ "_" ^ b) end; - -fun name_arity (t, dom, c) = hd (name_arities (t, dom, [c])); - -fun mk_arities (t, Ss, S) = - let val T = Type (t, ListPair.map TFree (Name.invent Name.context Name.aT (length Ss), Ss)) - in map (fn c => mk_of_class (T, c)) S end; - -fun dest_arity tm = - let - fun err () = raise TERM ("dest_arity", [tm]); - - val (ty, c) = dest_of_class tm; - val (t, tvars) = - (case ty of - Type (t, tys) => (t, map dest_TVar tys handle TYPE _ => err ()) - | _ => err ()); - val Ss = - if has_duplicates (eq_fst (op =)) tvars then err () - else map snd tvars; - in (t, Ss, c) end; - - -(* internalized sort constraints *) - -fun unconstrainT shyps prop = - let - val present = rev ((fold_types o fold_atyps_sorts) (insert (eq_fst op =)) prop []); - val extra = fold (Sorts.remove_sort o #2) present shyps; - - val n = length present; - val (names1, names2) = Name.invent Name.context Name.aT (n + length extra) |> chop n; - - val present_map = - map2 (fn (T, S) => fn a => (T, TVar ((a, 0), S))) present names1; - val constraints_map = - map2 (fn (_, S) => fn a => (S, TVar ((a, 0), S))) present names1 @ - map2 (fn S => fn a => (S, TVar ((a, 0), S))) extra names2; - - fun atyp_map T = - (case AList.lookup (op =) present_map T of - SOME U => U - | NONE => - (case AList.lookup (op =) constraints_map (Type.sort_of_atyp T) of - SOME U => U - | NONE => raise TYPE ("Dangling type variable", [T], []))); - - val constraints = - maps (fn (_, T as TVar (ai, S)) => - map (fn c => ((T, c), mk_of_class (TVar (ai, []), c))) S) - constraints_map; - - val outer_constraints = - maps (fn (T, S) => map (pair T) S) - (present @ map (fn S => (TFree ("'dummy", S), S)) extra); - - val prop' = - prop - |> (Term.map_types o Term.map_atyps) (Type.strip_sorts o atyp_map) - |> curry list_implies (map snd constraints); - in ((atyp_map, constraints, outer_constraints), prop') end; - - - -(** protected propositions and embedded terms **) - -val protectC = Const ("Pure.prop", propT --> propT); -fun protect t = protectC $ t; - -fun unprotect (Const ("Pure.prop", _) $ t) = t - | unprotect t = raise TERM ("unprotect", [t]); - - -fun mk_term t = Const ("Pure.term", Term.fastype_of t --> propT) $ t; - -fun dest_term (Const ("Pure.term", _) $ t) = t - | dest_term t = raise TERM ("dest_term", [t]); - - - -(*** Low-level term operations ***) - -(*Does t occur in u? Or is alpha-convertible to u? - The term t must contain no loose bound variables*) -fun occs (t, u) = exists_subterm (fn s => t aconv s) u; - -(*Close up a formula over all free variables by quantification*) -fun close_form A = fold (all o Free) (Term.add_frees A []) A; - - - -(*** Specialized operations for resolution... ***) - -(*computes t(Bound(n+k-1),...,Bound(n)) *) -fun combound (t, n, k) = - if k>0 then combound (t,n+1,k-1) $ (Bound n) else t; - -(* ([xn,...,x1], t) ======> (x1,...,xn)t *) -fun rlist_abs ([], body) = body - | rlist_abs ((a,T)::pairs, body) = rlist_abs(pairs, Abs(a, T, body)); - -fun incr_tvar_same 0 = Same.same - | incr_tvar_same k = Term_Subst.map_atypsT_same - (fn TVar ((a, i), S) => TVar ((a, i + k), S) - | _ => raise Same.SAME); - -fun incr_tvar k T = incr_tvar_same k T handle Same.SAME => T; - -(*For all variables in the term, increment indexnames and lift over the Us - result is ?Gidx(B.(lev+n-1),...,B.lev) where lev is abstraction level *) -fun incr_indexes_same ([], 0) = Same.same - | incr_indexes_same (Ts, k) = - let - val n = length Ts; - val incrT = incr_tvar_same k; - - fun incr lev (Var ((x, i), T)) = - combound (Var ((x, i + k), Ts ---> Same.commit incrT T), lev, n) - | incr lev (Abs (x, T, body)) = - (Abs (x, incrT T, incr (lev + 1) body handle Same.SAME => body) - handle Same.SAME => Abs (x, T, incr (lev + 1) body)) - | incr lev (t $ u) = - (incr lev t $ (incr lev u handle Same.SAME => u) - handle Same.SAME => t $ incr lev u) - | incr _ (Const (c, T)) = Const (c, incrT T) - | incr _ (Free (x, T)) = Free (x, incrT T) - | incr _ (Bound _) = raise Same.SAME; - in incr 0 end; - -fun incr_indexes arg t = incr_indexes_same arg t handle Same.SAME => t; - - -(* Lifting functions from subgoal and increment: - lift_abs operates on terms - lift_all operates on propositions *) - -fun lift_abs inc = - let - fun lift Ts (Const ("Pure.imp", _) $ _ $ B) t = lift Ts B t - | lift Ts (Const ("Pure.all", _) $ Abs (a, T, B)) t = Abs (a, T, lift (T :: Ts) B t) - | lift Ts _ t = incr_indexes (rev Ts, inc) t; - in lift [] end; - -fun lift_all inc = - let - fun lift Ts ((c as Const ("Pure.imp", _)) $ A $ B) t = c $ A $ lift Ts B t - | lift Ts ((c as Const ("Pure.all", _)) $ Abs (a, T, B)) t = c $ Abs (a, T, lift (T :: Ts) B t) - | lift Ts _ t = incr_indexes (rev Ts, inc) t; - in lift [] end; - -(*Strips assumptions in goal, yielding list of hypotheses. *) -fun strip_assums_hyp B = - let - fun strip Hs (Const ("Pure.imp", _) $ H $ B) = strip (H :: Hs) B - | strip Hs (Const ("Pure.all", _) $ Abs (a, T, t)) = - strip (map (incr_boundvars 1) Hs) t - | strip Hs B = rev Hs - in strip [] B end; - -(*Strips assumptions in goal, yielding conclusion. *) -fun strip_assums_concl (Const("Pure.imp", _) $ H $ B) = strip_assums_concl B - | strip_assums_concl (Const("Pure.all",_)$Abs(a,T,t)) = strip_assums_concl t - | strip_assums_concl B = B; - -(*Make a list of all the parameters in a subgoal, even if nested*) -fun strip_params (Const("Pure.imp", _) $ H $ B) = strip_params B - | strip_params (Const("Pure.all",_)$Abs(a,T,t)) = (a,T) :: strip_params t - | strip_params B = []; - -(*test for nested meta connectives in prems*) -val has_meta_prems = - let - fun is_meta (Const ("Pure.eq", _) $ _ $ _) = true - | is_meta (Const ("Pure.imp", _) $ _ $ _) = true - | is_meta (Const ("Pure.all", _) $ _) = true - | is_meta _ = false; - fun ex_meta (Const ("Pure.imp", _) $ A $ B) = is_meta A orelse ex_meta B - | ex_meta (Const ("Pure.all", _) $ Abs (_, _, B)) = ex_meta B - | ex_meta _ = false; - in ex_meta end; - -(*Removes the parameters from a subgoal and renumber bvars in hypotheses, - where j is the total number of parameters (precomputed) - If n>0 then deletes assumption n. *) -fun remove_params j n A = - if j=0 andalso n<=0 then A (*nothing left to do...*) - else case A of - Const("Pure.imp", _) $ H $ B => - if n=1 then (remove_params j (n-1) B) - else implies $ (incr_boundvars j H) $ (remove_params j (n-1) B) - | Const("Pure.all",_)$Abs(a,T,t) => remove_params (j-1) n t - | _ => if n>0 then raise TERM("remove_params", [A]) - else A; - -(*Move all parameters to the front of the subgoal, renaming them apart; - if n>0 then deletes assumption n. *) -fun flatten_params n A = - let val params = strip_params A; - val vars = ListPair.zip (Name.variant_list [] (map #1 params), - map #2 params) - in list_all (vars, remove_params (length vars) n A) end; - -(*Makes parameters in a goal have the names supplied by the list cs.*) -fun list_rename_params cs (Const ("Pure.imp", _) $ A $ B) = - implies $ A $ list_rename_params cs B - | list_rename_params (c :: cs) ((a as Const ("Pure.all", _)) $ Abs (_, T, t)) = - a $ Abs (c, T, list_rename_params cs t) - | list_rename_params cs B = B; - - - -(*** Treatment of "assume", "erule", etc. ***) - -(*Strips assumptions in goal yielding - HS = [Hn,...,H1], params = [xm,...,x1], and B, - where x1...xm are the parameters. This version (21.1.2005) REQUIRES - the the parameters to be flattened, but it allows erule to work on - assumptions of the form !!x. phi. Any !! after the outermost string - will be regarded as belonging to the conclusion, and left untouched. - Used ONLY by assum_pairs. - Unless nasms<0, it can terminate the recursion early; that allows - erule to work on assumptions of the form P==>Q.*) -fun strip_assums_imp (0, Hs, B) = (Hs, B) (*recursion terminated by nasms*) - | strip_assums_imp (nasms, Hs, Const("Pure.imp", _) $ H $ B) = - strip_assums_imp (nasms-1, H::Hs, B) - | strip_assums_imp (_, Hs, B) = (Hs, B); (*recursion terminated by B*) - -(*Strips OUTER parameters only.*) -fun strip_assums_all (params, Const("Pure.all",_)$Abs(a,T,t)) = - strip_assums_all ((a,T)::params, t) - | strip_assums_all (params, B) = (params, B); - -(*Produces disagreement pairs, one for each assumption proof, in order. - A is the first premise of the lifted rule, and thus has the form - H1 ==> ... Hk ==> B and the pairs are (H1,B),...,(Hk,B). - nasms is the number of assumptions in the original subgoal, needed when B - has the form B1 ==> B2: it stops B1 from being taken as an assumption. *) -fun assum_pairs(nasms,A) = - let val (params, A') = strip_assums_all ([],A) - val (Hs,B) = strip_assums_imp (nasms,[],A') - fun abspar t = rlist_abs(params, t) - val D = abspar B - fun pairrev ([], pairs) = pairs - | pairrev (H::Hs, pairs) = pairrev(Hs, (abspar H, D) :: pairs) - in pairrev (Hs,[]) - end; - -fun assum_problems (nasms, A) = - let - val (params, A') = strip_assums_all ([], A) - val (Hs, B) = strip_assums_imp (nasms, [], A') - fun abspar t = rlist_abs (params, t) - in (abspar, rev Hs, B) end; - - -(* global schematic variables *) - -fun bad_schematic xi = "Illegal schematic variable: " ^ quote (Term.string_of_vname xi); -fun bad_fixed x = "Illegal fixed variable: " ^ quote x; - -fun varifyT_global_same ty = ty - |> Term_Subst.map_atypsT_same - (fn TFree (a, S) => TVar ((a, 0), S) - | TVar (ai, _) => raise TYPE (bad_schematic ai, [ty], [])); - -fun unvarifyT_global_same ty = ty - |> Term_Subst.map_atypsT_same - (fn TVar ((a, 0), S) => TFree (a, S) - | TVar (ai, _) => raise TYPE (bad_schematic ai, [ty], []) - | TFree (a, _) => raise TYPE (bad_fixed a, [ty], [])); - -val varifyT_global = Same.commit varifyT_global_same; -val unvarifyT_global = Same.commit unvarifyT_global_same; - -fun varify_types_global tm = tm - |> Same.commit (Term_Subst.map_types_same varifyT_global_same) - handle TYPE (msg, _, _) => raise TERM (msg, [tm]); - -fun unvarify_types_global tm = tm - |> Same.commit (Term_Subst.map_types_same unvarifyT_global_same) - handle TYPE (msg, _, _) => raise TERM (msg, [tm]); - -fun varify_global tm = tm - |> Same.commit (Term_Subst.map_aterms_same - (fn Free (x, T) => Var ((x, 0), T) - | Var (xi, _) => raise TERM (bad_schematic xi, [tm]) - | _ => raise Same.SAME)) - |> varify_types_global; - -fun unvarify_global tm = tm - |> Same.commit (Term_Subst.map_aterms_same - (fn Var ((x, 0), T) => Free (x, T) - | Var (xi, _) => raise TERM (bad_schematic xi, [tm]) - | Free (x, _) => raise TERM (bad_fixed x, [tm]) - | _ => raise Same.SAME)) - |> unvarify_types_global; - - -(* goal states *) - -fun get_goal st i = - nth_prem (i, st) handle TERM _ => - error ("Subgoal number " ^ string_of_int i ^ " out of range (a total of " ^ - string_of_int (count_prems st) ^ " subgoals)"); - -(*reverses parameters for substitution*) -fun goal_params st i = - let val gi = get_goal st i - val rfrees = map Free (Term.rename_wrt_term gi (strip_params gi)) - in (gi, rfrees) end; - -fun concl_of_goal st i = - let val (gi, rfrees) = goal_params st i - val B = strip_assums_concl gi - in subst_bounds (rfrees, B) end; - -fun prems_of_goal st i = - let val (gi, rfrees) = goal_params st i - val As = strip_assums_hyp gi - in map (fn A => subst_bounds (rfrees, A)) As end; - -end; diff --git a/core/Pure/more_thm.ML b/core/Pure/more_thm.ML deleted file mode 100644 index ab8981ee..00000000 --- a/core/Pure/more_thm.ML +++ /dev/null @@ -1,541 +0,0 @@ -(* Title: Pure/more_thm.ML - Author: Makarius - -Further operations on type ctyp/cterm/thm, outside the inference kernel. -*) - -infix aconvc; - -signature BASIC_THM = -sig - include BASIC_THM - structure Ctermtab: TABLE - structure Thmtab: TABLE - val aconvc: cterm * cterm -> bool - type attribute = Context.generic * thm -> Context.generic option * thm option -end; - -signature THM = -sig - include THM - structure Ctermtab: TABLE - structure Thmtab: TABLE - val aconvc: cterm * cterm -> bool - val add_cterm_frees: cterm -> cterm list -> cterm list - val all_name: string * cterm -> cterm -> cterm - val all: cterm -> cterm -> cterm - val mk_binop: cterm -> cterm -> cterm -> cterm - val dest_binop: cterm -> cterm * cterm - val dest_implies: cterm -> cterm * cterm - val dest_equals: cterm -> cterm * cterm - val dest_equals_lhs: cterm -> cterm - val dest_equals_rhs: cterm -> cterm - val lhs_of: thm -> cterm - val rhs_of: thm -> cterm - val thm_ord: thm * thm -> order - val cterm_cache: (cterm -> 'a) -> cterm -> 'a - val thm_cache: (thm -> 'a) -> thm -> 'a - val is_reflexive: thm -> bool - val eq_thm: thm * thm -> bool - val eq_thm_prop: thm * thm -> bool - val eq_thm_strict: thm * thm -> bool - val equiv_thm: thm * thm -> bool - val class_triv: theory -> class -> thm - val of_sort: ctyp * sort -> thm list - val check_shyps: sort list -> thm -> thm - val is_dummy: thm -> bool - val plain_prop_of: thm -> term - val add_thm: thm -> thm list -> thm list - val del_thm: thm -> thm list -> thm list - val merge_thms: thm list * thm list -> thm list - val full_rules: thm Item_Net.T - val intro_rules: thm Item_Net.T - val elim_rules: thm Item_Net.T - val declare_hyps: cterm -> Proof.context -> Proof.context - val assume_hyps: cterm -> Proof.context -> thm * Proof.context - val unchecked_hyps: Proof.context -> Proof.context - val restore_hyps: Proof.context -> Proof.context -> Proof.context - val undeclared_hyps: Context.generic -> thm -> term list - val check_hyps: Context.generic -> thm -> thm - val elim_implies: thm -> thm -> thm - val forall_elim_var: int -> thm -> thm - val forall_elim_vars: int -> thm -> thm - val certify_inst: theory -> - ((indexname * sort) * typ) list * ((indexname * typ) * term) list -> - (ctyp * ctyp) list * (cterm * cterm) list - val certify_instantiate: - ((indexname * sort) * typ) list * ((indexname * typ) * term) list -> thm -> thm - val forall_intr_frees: thm -> thm - val unvarify_global: thm -> thm - val close_derivation: thm -> thm - val add_axiom: Proof.context -> binding * term -> theory -> (string * thm) * theory - val add_axiom_global: binding * term -> theory -> (string * thm) * theory - val add_def: Proof.context -> bool -> bool -> binding * term -> theory -> (string * thm) * theory - val add_def_global: bool -> bool -> binding * term -> theory -> (string * thm) * theory - type attribute = Context.generic * thm -> Context.generic option * thm option - type binding = binding * attribute list - val empty_binding: binding - val rule_attribute: (Context.generic -> thm -> thm) -> attribute - val declaration_attribute: (thm -> Context.generic -> Context.generic) -> attribute - val mixed_attribute: (Context.generic * thm -> Context.generic * thm) -> attribute - val apply_attribute: attribute -> thm -> Context.generic -> thm * Context.generic - val attribute_declaration: attribute -> thm -> Context.generic -> Context.generic - val theory_attributes: attribute list -> thm -> theory -> thm * theory - val proof_attributes: attribute list -> thm -> Proof.context -> thm * Proof.context - val no_attributes: 'a -> 'a * 'b list - val simple_fact: 'a -> ('a * 'b list) list - val tag_rule: string * string -> thm -> thm - val untag_rule: string -> thm -> thm - val tag: string * string -> attribute - val untag: string -> attribute - val def_name: string -> string - val def_name_optional: string -> string -> string - val def_binding: Binding.binding -> Binding.binding - val def_binding_optional: Binding.binding -> Binding.binding -> Binding.binding - val has_name_hint: thm -> bool - val get_name_hint: thm -> string - val put_name_hint: string -> thm -> thm - val theoremK: string - val lemmaK: string - val corollaryK: string - val legacy_get_kind: thm -> string - val kind_rule: string -> thm -> thm - val kind: string -> attribute - val register_proofs: thm list -> theory -> theory - val join_theory_proofs: theory -> unit -end; - -structure Thm: THM = -struct - -(** basic operations **) - -(* collecting cterms *) - -val op aconvc = op aconv o pairself Thm.term_of; - -fun add_cterm_frees ct = - let - val cert = Thm.cterm_of (Thm.theory_of_cterm ct); - val t = Thm.term_of ct; - in Term.fold_aterms (fn v as Free _ => insert (op aconvc) (cert v) | _ => I) t end; - - -(* cterm constructors and destructors *) - -fun all_name (x, t) A = - let - val cert = Thm.cterm_of (Thm.theory_of_cterm t); - val T = #T (Thm.rep_cterm t); - in Thm.apply (cert (Const ("Pure.all", (T --> propT) --> propT))) (Thm.lambda_name (x, t) A) end; - -fun all t A = all_name ("", t) A; - -fun mk_binop c a b = Thm.apply (Thm.apply c a) b; -fun dest_binop ct = (Thm.dest_arg1 ct, Thm.dest_arg ct); - -fun dest_implies ct = - (case Thm.term_of ct of - Const ("Pure.imp", _) $ _ $ _ => dest_binop ct - | _ => raise TERM ("dest_implies", [Thm.term_of ct])); - -fun dest_equals ct = - (case Thm.term_of ct of - Const ("Pure.eq", _) $ _ $ _ => dest_binop ct - | _ => raise TERM ("dest_equals", [Thm.term_of ct])); - -fun dest_equals_lhs ct = - (case Thm.term_of ct of - Const ("Pure.eq", _) $ _ $ _ => Thm.dest_arg1 ct - | _ => raise TERM ("dest_equals_lhs", [Thm.term_of ct])); - -fun dest_equals_rhs ct = - (case Thm.term_of ct of - Const ("Pure.eq", _) $ _ $ _ => Thm.dest_arg ct - | _ => raise TERM ("dest_equals_rhs", [Thm.term_of ct])); - -val lhs_of = dest_equals_lhs o Thm.cprop_of; -val rhs_of = dest_equals_rhs o Thm.cprop_of; - - -(* thm order: ignores theory context! *) - -fun thm_ord (th1, th2) = - let - val {shyps = shyps1, hyps = hyps1, tpairs = tpairs1, prop = prop1, ...} = Thm.rep_thm th1; - val {shyps = shyps2, hyps = hyps2, tpairs = tpairs2, prop = prop2, ...} = Thm.rep_thm th2; - in - (case Term_Ord.fast_term_ord (prop1, prop2) of - EQUAL => - (case list_ord (prod_ord Term_Ord.fast_term_ord Term_Ord.fast_term_ord) (tpairs1, tpairs2) of - EQUAL => - (case list_ord Term_Ord.fast_term_ord (hyps1, hyps2) of - EQUAL => list_ord Term_Ord.sort_ord (shyps1, shyps2) - | ord => ord) - | ord => ord) - | ord => ord) - end; - - -(* tables and caches *) - -structure Ctermtab = Table(type key = cterm val ord = Term_Ord.fast_term_ord o pairself Thm.term_of); -structure Thmtab = Table(type key = thm val ord = thm_ord); - -fun cterm_cache f = Cache.create Ctermtab.empty Ctermtab.lookup Ctermtab.update f; -fun thm_cache f = Cache.create Thmtab.empty Thmtab.lookup Thmtab.update f; - - -(* equality *) - -fun is_reflexive th = op aconv (Logic.dest_equals (Thm.prop_of th)) - handle TERM _ => false; - -val eq_thm = is_equal o thm_ord; - -val eq_thm_prop = op aconv o pairself Thm.full_prop_of; - -fun eq_thm_strict ths = - eq_thm ths andalso - let val (rep1, rep2) = pairself Thm.rep_thm ths in - Theory.eq_thy (#thy rep1, #thy rep2) andalso - #maxidx rep1 = #maxidx rep2 andalso - #tags rep1 = #tags rep2 - end; - - -(* pattern equivalence *) - -fun equiv_thm ths = - Pattern.equiv (Theory.merge (pairself Thm.theory_of_thm ths)) (pairself Thm.full_prop_of ths); - - -(* type classes and sorts *) - -fun class_triv thy c = - Thm.of_class (Thm.ctyp_of thy (TVar ((Name.aT, 0), [c])), c); - -fun of_sort (T, S) = map (fn c => Thm.of_class (T, c)) S; - -fun check_shyps sorts raw_th = - let - val th = Thm.strip_shyps raw_th; - val prt_sort = Syntax.pretty_sort_global (Thm.theory_of_thm th); - val pending = Sorts.subtract sorts (Thm.extra_shyps th); - in - if null pending then th - else error (Pretty.string_of (Pretty.block (Pretty.str "Pending sort hypotheses:" :: - Pretty.brk 1 :: Pretty.commas (map prt_sort pending)))) - end; - - -(* misc operations *) - -fun is_dummy thm = - (case try Logic.dest_term (Thm.concl_of thm) of - NONE => false - | SOME t => Term.is_dummy_pattern t); - -fun plain_prop_of raw_thm = - let - val thm = Thm.strip_shyps raw_thm; - fun err msg = raise THM ("plain_prop_of: " ^ msg, 0, [thm]); - val {hyps, prop, tpairs, ...} = Thm.rep_thm thm; - in - if not (null hyps) then - err "theorem may not contain hypotheses" - else if not (null (Thm.extra_shyps thm)) then - err "theorem may not contain sort hypotheses" - else if not (null tpairs) then - err "theorem may not contain flex-flex pairs" - else prop - end; - - -(* collections of theorems in canonical order *) - -val add_thm = update eq_thm_prop; -val del_thm = remove eq_thm_prop; -val merge_thms = merge eq_thm_prop; - -val full_rules = Item_Net.init eq_thm_prop (single o Thm.full_prop_of); -val intro_rules = Item_Net.init eq_thm_prop (single o Thm.concl_of); -val elim_rules = Item_Net.init eq_thm_prop (single o Thm.major_prem_of); - - - -(** declared hyps **) - -structure Hyps = Proof_Data -( - type T = Termtab.set * bool; - fun init _ : T = (Termtab.empty, true); -); - -fun declare_hyps ct ctxt = - if Theory.subthy (theory_of_cterm ct, Proof_Context.theory_of ctxt) then - (Hyps.map o apfst) (Termtab.update (term_of ct, ())) ctxt - else raise CTERM ("assume_hyps: bad background theory", [ct]); - -fun assume_hyps ct ctxt = (Thm.assume ct, declare_hyps ct ctxt); - -val unchecked_hyps = (Hyps.map o apsnd) (K false); -fun restore_hyps ctxt = (Hyps.map o apsnd) (K (#2 (Hyps.get ctxt))); - -fun undeclared_hyps context th = - Thm.hyps_of th - |> filter_out - (case context of - Context.Theory _ => K false - | Context.Proof ctxt => - (case Hyps.get ctxt of - (_, false) => K true - | (hyps, _) => Termtab.defined hyps)); - -fun check_hyps context th = - (case undeclared_hyps context th of - [] => th - | undeclared => - let - val ctxt = Context.cases Syntax.init_pretty_global I context; - in - error (Pretty.string_of (Pretty.big_list "Undeclared hyps:" - (map (Pretty.item o single o Syntax.pretty_term ctxt) undeclared))) - end); - - - -(** basic derived rules **) - -(*Elimination of implication - A A ==> B - ------------ - B -*) -fun elim_implies thA thAB = Thm.implies_elim thAB thA; - - -(* forall_elim_var(s) *) - -local - -fun forall_elim_vars_aux strip_vars i th = - let - val thy = Thm.theory_of_thm th; - val {tpairs, prop, ...} = Thm.rep_thm th; - val add_used = Term.fold_aterms - (fn Var ((x, j), _) => if i = j then insert (op =) x else I | _ => I); - val used = fold (fn (t, u) => add_used t o add_used u) tpairs (add_used prop []); - val vars = strip_vars prop; - val cvars = (Name.variant_list used (map #1 vars), vars) - |> ListPair.map (fn (x, (_, T)) => Thm.cterm_of thy (Var ((x, i), T))); - in fold Thm.forall_elim cvars th end; - -in - -val forall_elim_vars = forall_elim_vars_aux Term.strip_all_vars; - -fun forall_elim_var i th = - forall_elim_vars_aux - (fn Const ("Pure.all", _) $ Abs (a, T, _) => [(a, T)] - | _ => raise THM ("forall_elim_vars", i, [th])) i th; - -end; - - -(* certify_instantiate *) - -fun certify_inst thy (instT, inst) = - (map (fn (v, T) => (Thm.ctyp_of thy (TVar v), Thm.ctyp_of thy T)) instT, - map (fn (v, t) => (Thm.cterm_of thy (Var v), Thm.cterm_of thy t)) inst); - -fun certify_instantiate insts th = - Thm.instantiate (certify_inst (Thm.theory_of_thm th) insts) th; - - -(* forall_intr_frees: generalization over all suitable Free variables *) - -fun forall_intr_frees th = - let - val thy = Thm.theory_of_thm th; - val {prop, hyps, tpairs, ...} = Thm.rep_thm th; - val fixed = fold Term.add_frees (Thm.terms_of_tpairs tpairs @ hyps) []; - val frees = Term.fold_aterms (fn Free v => - if member (op =) fixed v then I else insert (op =) v | _ => I) prop []; - in fold (Thm.forall_intr o Thm.cterm_of thy o Free) frees th end; - - -(* unvarify_global: global schematic variables *) - -fun unvarify_global th = - let - val prop = Thm.full_prop_of th; - val _ = map Logic.unvarify_global (prop :: Thm.hyps_of th) - handle TERM (msg, _) => raise THM (msg, 0, [th]); - - val instT = rev (Term.add_tvars prop []) |> map (fn v as ((a, _), S) => (v, TFree (a, S))); - val inst = rev (Term.add_vars prop []) |> map (fn ((a, i), T) => - let val T' = Term_Subst.instantiateT instT T - in (((a, i), T'), Free ((a, T'))) end); - in certify_instantiate (instT, inst) th end; - - -(* close_derivation *) - -fun close_derivation thm = - if Thm.derivation_name thm = "" then Thm.name_derivation "" thm - else thm; - - - -(** specification primitives **) - -(* rules *) - -fun stripped_sorts thy t = - let - val tfrees = rev (map TFree (Term.add_tfrees t [])); - val tfrees' = map (fn a => TFree (a, [])) (Name.invent Name.context Name.aT (length tfrees)); - val strip = tfrees ~~ tfrees'; - val recover = map (pairself (Thm.ctyp_of thy o Logic.varifyT_global) o swap) strip; - val t' = Term.map_types (Term.map_atyps (perhaps (AList.lookup (op =) strip))) t; - in (strip, recover, t') end; - -fun add_axiom ctxt (b, prop) thy = - let - val _ = Sign.no_vars ctxt prop; - val (strip, recover, prop') = stripped_sorts thy prop; - val constraints = map (fn (TFree (_, S), T) => (T, S)) strip; - val of_sorts = maps (fn (T as TFree (_, S), _) => of_sort (Thm.ctyp_of thy T, S)) strip; - - val thy' = thy - |> Theory.add_axiom ctxt (b, Logic.list_implies (maps Logic.mk_of_sort constraints, prop')); - val axm_name = Sign.full_name thy' b; - val axm' = Thm.axiom thy' axm_name; - val thm = - Thm.instantiate (recover, []) axm' - |> unvarify_global - |> fold elim_implies of_sorts; - in ((axm_name, thm), thy') end; - -fun add_axiom_global arg thy = add_axiom (Syntax.init_pretty_global thy) arg thy; - -fun add_def ctxt unchecked overloaded (b, prop) thy = - let - val _ = Sign.no_vars ctxt prop; - val prems = map (Thm.cterm_of thy) (Logic.strip_imp_prems prop); - val (_, recover, concl') = stripped_sorts thy (Logic.strip_imp_concl prop); - - val thy' = Theory.add_def ctxt unchecked overloaded (b, concl') thy; - val axm_name = Sign.full_name thy' b; - val axm' = Thm.axiom thy' axm_name; - val thm = - Thm.instantiate (recover, []) axm' - |> unvarify_global - |> fold_rev Thm.implies_intr prems; - in ((axm_name, thm), thy') end; - -fun add_def_global unchecked overloaded arg thy = - add_def (Syntax.init_pretty_global thy) unchecked overloaded arg thy; - - - -(** attributes **) - -(*attributes subsume any kind of rules or context modifiers*) -type attribute = Context.generic * thm -> Context.generic option * thm option; - -type binding = binding * attribute list; -val empty_binding: binding = (Binding.empty, []); - -fun rule_attribute f (x, th) = (NONE, SOME (f x th)); -fun declaration_attribute f (x, th) = (SOME (f th x), NONE); -fun mixed_attribute f (x, th) = let val (x', th') = f (x, th) in (SOME x', SOME th') end; - -fun apply_attribute (att: attribute) th x = - let val (x', th') = att (x, check_hyps x (Thm.transfer (Context.theory_of x) th)) - in (the_default th th', the_default x x') end; - -fun attribute_declaration att th x = #2 (apply_attribute att th x); - -fun apply_attributes mk dest = - let - fun app [] th x = (th, x) - | app (att :: atts) th x = apply_attribute att th (mk x) ||> dest |-> app atts; - in app end; - -val theory_attributes = apply_attributes Context.Theory Context.the_theory; -val proof_attributes = apply_attributes Context.Proof Context.the_proof; - -fun no_attributes x = (x, []); -fun simple_fact x = [(x, [])]; - - - -(*** theorem tags ***) - -(* add / delete tags *) - -fun tag_rule tg = Thm.map_tags (insert (op =) tg); -fun untag_rule s = Thm.map_tags (filter_out (fn (s', _) => s = s')); - -fun tag tg = rule_attribute (K (tag_rule tg)); -fun untag s = rule_attribute (K (untag_rule s)); - - -(* def_name *) - -fun def_name c = c ^ "_def"; - -fun def_name_optional c "" = def_name c - | def_name_optional _ name = name; - -val def_binding = Binding.map_name def_name; - -fun def_binding_optional b name = - if Binding.is_empty name then def_binding b else name; - - -(* unofficial theorem names *) - -fun the_name_hint thm = the (AList.lookup (op =) (Thm.get_tags thm) Markup.nameN); - -val has_name_hint = can the_name_hint; -val get_name_hint = the_default "??.unknown" o try the_name_hint; - -fun put_name_hint name = untag_rule Markup.nameN #> tag_rule (Markup.nameN, name); - - -(* theorem kinds *) - -val theoremK = "theorem"; -val lemmaK = "lemma"; -val corollaryK = "corollary"; - -fun legacy_get_kind thm = the_default "" (Properties.get (Thm.get_tags thm) Markup.kindN); - -fun kind_rule k = tag_rule (Markup.kindN, k) o untag_rule Markup.kindN; -fun kind k = rule_attribute (K (k <> "" ? kind_rule k)); - - -(* forked proofs *) - -structure Proofs = Theory_Data -( - type T = thm list; - val empty = []; - fun extend _ = empty; - fun merge _ = empty; -); - -fun register_proofs more_thms = Proofs.map (fn thms => fold cons more_thms thms); -val join_theory_proofs = Thm.join_proofs o rev o Proofs.get; - - -open Thm; - -end; - -structure Basic_Thm: BASIC_THM = Thm; -open Basic_Thm; - diff --git a/core/Pure/morphism.ML b/core/Pure/morphism.ML deleted file mode 100644 index e90b2f18..00000000 --- a/core/Pure/morphism.ML +++ /dev/null @@ -1,118 +0,0 @@ -(* Title: Pure/morphism.ML - Author: Makarius - -Abstract morphisms on formal entities. -*) - -infix 1 $> - -signature BASIC_MORPHISM = -sig - type morphism - type declaration = morphism -> Context.generic -> Context.generic - val $> : morphism * morphism -> morphism -end - -signature MORPHISM = -sig - include BASIC_MORPHISM - exception MORPHISM of string * exn - val pretty: morphism -> Pretty.T - val binding: morphism -> binding -> binding - val typ: morphism -> typ -> typ - val term: morphism -> term -> term - val fact: morphism -> thm list -> thm list - val thm: morphism -> thm -> thm - val cterm: morphism -> cterm -> cterm - val morphism: string -> - {binding: (binding -> binding) list, - typ: (typ -> typ) list, - term: (term -> term) list, - fact: (thm list -> thm list) list} -> morphism - val binding_morphism: string -> (binding -> binding) -> morphism - val typ_morphism: string -> (typ -> typ) -> morphism - val term_morphism: string -> (term -> term) -> morphism - val fact_morphism: string -> (thm list -> thm list) -> morphism - val thm_morphism: string -> (thm -> thm) -> morphism - val transfer_morphism: theory -> morphism - val identity: morphism - val compose: morphism -> morphism -> morphism - val transform: morphism -> (morphism -> 'a) -> morphism -> 'a - val form: (morphism -> 'a) -> 'a -end; - -structure Morphism: MORPHISM = -struct - -(* named functions *) - -type 'a funs = (string * ('a -> 'a)) list; - -exception MORPHISM of string * exn; - -fun app (name, f) x = f x - handle exn => if Exn.is_interrupt exn then reraise exn else raise MORPHISM (name, exn); - -fun apply fs = fold_rev app fs; - - -(* type morphism *) - -datatype morphism = Morphism of - {names: string list, - binding: binding funs, - typ: typ funs, - term: term funs, - fact: thm list funs}; - -type declaration = morphism -> Context.generic -> Context.generic; - -fun pretty (Morphism {names, ...}) = Pretty.enum ";" "{" "}" (map Pretty.str (rev names)); - -fun binding (Morphism {binding, ...}) = apply binding; -fun typ (Morphism {typ, ...}) = apply typ; -fun term (Morphism {term, ...}) = apply term; -fun fact (Morphism {fact, ...}) = apply fact; -val thm = singleton o fact; -val cterm = Drule.cterm_rule o thm; - - -fun morphism a {binding, typ, term, fact} = - Morphism { - names = if a = "" then [] else [a], - binding = map (pair a) binding, - typ = map (pair a) typ, - term = map (pair a) term, - fact = map (pair a) fact}; - -fun binding_morphism a binding = morphism a {binding = [binding], typ = [], term = [], fact = []}; -fun typ_morphism a typ = morphism a {binding = [], typ = [typ], term = [], fact = []}; -fun term_morphism a term = morphism a {binding = [], typ = [], term = [term], fact = []}; -fun fact_morphism a fact = morphism a {binding = [], typ = [], term = [], fact = [fact]}; -fun thm_morphism a thm = morphism a {binding = [], typ = [], term = [], fact = [map thm]}; -val transfer_morphism = thm_morphism "transfer" o Thm.transfer; - -val identity = morphism "" {binding = [], typ = [], term = [], fact = []}; - - -(* morphism combinators *) - -fun compose - (Morphism {names = names1, binding = binding1, typ = typ1, term = term1, fact = fact1}) - (Morphism {names = names2, binding = binding2, typ = typ2, term = term2, fact = fact2}) = - Morphism { - names = names1 @ names2, - binding = binding1 @ binding2, - typ = typ1 @ typ2, - term = term1 @ term2, - fact = fact1 @ fact2}; - -fun phi1 $> phi2 = compose phi2 phi1; - -fun transform phi f = fn psi => f (phi $> psi); -fun form f = f identity; - -end; - -structure Basic_Morphism: BASIC_MORPHISM = Morphism; -open Basic_Morphism; diff --git a/core/Pure/name.ML b/core/Pure/name.ML deleted file mode 100644 index eb476e78..00000000 --- a/core/Pure/name.ML +++ /dev/null @@ -1,185 +0,0 @@ -(* Title: Pure/name.ML - Author: Makarius - -Names of basic logical entities (variables etc.). -*) - -signature NAME = -sig - val uu: string - val uu_: string - val aT: string - val bound: int -> string - val is_bound: string -> bool - val internal: string -> string - val dest_internal: string -> string - val is_internal: string -> bool - val reject_internal: string * Position.T list -> unit - val skolem: string -> string - val dest_skolem: string -> string - val is_skolem: string -> bool - val reject_skolem: string * Position.T list -> unit - val clean_index: string * int -> string * int - val clean: string -> string - type context - val context: context - val make_context: string list -> context - val declare: string -> context -> context - val is_declared: context -> string -> bool - val invent: context -> string -> int -> string list - val invent_names: context -> string -> 'a list -> (string * 'a) list - val invent_list: string list -> string -> int -> string list - val variant: string -> context -> string * context - val variant_list: string list -> string list -> string list - val enforce_case: bool -> string -> string - val desymbolize: bool option -> string -> string -end; - -structure Name: NAME = -struct - -(** common defaults **) - -val uu = "uu"; -val uu_ = "uu_"; -val aT = "'a"; - - - -(** special variable names **) - -(* encoded bounds *) - -(*names for numbered variables -- - preserves order wrt. int_ord vs. string_ord, avoids allocating new strings*) - -val small_int = Vector.tabulate (1000, fn i => - let val leading = if i < 10 then "00" else if i < 100 then "0" else "" - in ":" ^ leading ^ string_of_int i end); - -fun bound n = - if n < 1000 then Vector.sub (small_int, n) - else ":" ^ bound (n div 1000) ^ Vector.sub (small_int, n mod 1000); - -val is_bound = String.isPrefix ":"; - - -(* internal names -- NB: internal subsumes skolem *) - -val internal = suffix "_"; -val dest_internal = unsuffix "_"; -val is_internal = String.isSuffix "_"; -fun reject_internal (x, ps) = - if is_internal x then error ("Bad name: " ^ quote x ^ Position.here_list ps) else (); - -val skolem = suffix "__"; -val dest_skolem = unsuffix "__"; -val is_skolem = String.isSuffix "__"; -fun reject_skolem (x, ps) = - if is_skolem x then error ("Bad name: " ^ quote x ^ Position.here_list ps) else (); - -fun clean_index (x, i) = - (case try dest_internal x of - NONE => (x, i) - | SOME x' => clean_index (x', i + 1)); - -fun clean x = #1 (clean_index (x, 0)); - - - -(** generating fresh names **) - -(* context *) - -datatype context = - Context of string option Symtab.table; (*declared names with latest renaming*) - -fun declare x (Context tab) = - Context (Symtab.default (clean x, NONE) tab); - -fun declare_renaming (x, x') (Context tab) = - Context (Symtab.update (clean x, SOME (clean x')) tab); - -fun is_declared (Context tab) = Symtab.defined tab; -fun declared (Context tab) = Symtab.lookup tab; - -val context = Context Symtab.empty |> fold declare ["", "'"]; -fun make_context used = fold declare used context; - - -(* invent names *) - -fun invent ctxt = - let - fun invs _ 0 = [] - | invs x n = - let val x' = Symbol.bump_string x - in if is_declared ctxt x then invs x' n else x :: invs x' (n - 1) end; - in invs o clean end; - -fun invent_names ctxt x xs = invent ctxt x (length xs) ~~ xs; - -val invent_list = invent o make_context; - - -(* variants *) - -(*makes a variant of a name distinct from already used names in a - context; preserves a suffix of underscores "_"*) -fun variant name ctxt = - let - fun vary x = - (case declared ctxt x of - NONE => x - | SOME x' => vary (Symbol.bump_string (the_default x x'))); - - val (x, n) = clean_index (name, 0); - val (x', ctxt') = - if not (is_declared ctxt x) then (x, declare x ctxt) - else - let - val x0 = Symbol.bump_init x; - val x' = vary x0; - val ctxt' = ctxt - |> x0 <> x' ? declare_renaming (x0, x') - |> declare x'; - in (x', ctxt') end; - in (x' ^ replicate_string n "_", ctxt') end; - -fun variant_list used names = #1 (make_context used |> fold_map variant names); - - -(* names conforming to typical requirements of identifiers in the world outside *) - -fun enforce_case' false cs = - (if forall Symbol.is_ascii_upper cs then map else nth_map 0) Symbol.to_ascii_lower cs - | enforce_case' true cs = - nth_map 0 Symbol.to_ascii_upper cs; - -fun enforce_case upper = implode o enforce_case' upper o raw_explode; - -fun desymbolize perhaps_upper "" = - if the_default false perhaps_upper then "X" else "x" - | desymbolize perhaps_upper s = - let - val xs as (x :: _) = Symbol.explode s; - val ys = - if Symbol.is_ascii_letter x orelse Symbol.is_symbolic x then xs - else "x" :: xs; - fun is_valid x = - Symbol.is_ascii_letter x orelse Symbol.is_ascii_digit x; - fun sep [] = [] - | sep (xs as "_" :: _) = xs - | sep xs = "_" :: xs; - fun desep ("_" :: xs) = xs - | desep xs = xs; - fun desymb x xs = - if is_valid x then x :: xs - else - (case Symbol.decode x of - Symbol.Sym name => "_" :: raw_explode name @ sep xs - | _ => sep xs); - val upper_lower = Option.map enforce_case' perhaps_upper |> the_default I; - in fold_rev desymb ys [] |> desep |> upper_lower |> implode end; - -end; diff --git a/core/Pure/net.ML b/core/Pure/net.ML deleted file mode 100644 index 1da4999a..00000000 --- a/core/Pure/net.ML +++ /dev/null @@ -1,260 +0,0 @@ -(* Title: Pure/net.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Copyright 1993 University of Cambridge - -Discrimination nets: a data structure for indexing items - -From the book - E. Charniak, C. K. Riesbeck, D. V. McDermott. - Artificial Intelligence Programming. - (Lawrence Erlbaum Associates, 1980). [Chapter 14] - -match_term no longer treats abstractions as wildcards; instead they match -only wildcards in patterns. Requires operands to be beta-eta-normal. -*) - -signature NET = -sig - type key - val key_of_term: term -> key list - val encode_type: typ -> term - type 'a net - val empty: 'a net - val is_empty: 'a net -> bool - exception INSERT - val insert: ('a * 'a -> bool) -> key list * 'a -> 'a net -> 'a net - val insert_term: ('a * 'a -> bool) -> term * 'a -> 'a net -> 'a net - val insert_safe: ('a * 'a -> bool) -> key list * 'a -> 'a net -> 'a net - val insert_term_safe: ('a * 'a -> bool) -> term * 'a -> 'a net -> 'a net - exception DELETE - val delete: ('b * 'a -> bool) -> key list * 'b -> 'a net -> 'a net - val delete_term: ('b * 'a -> bool) -> term * 'b -> 'a net -> 'a net - val delete_safe: ('b * 'a -> bool) -> key list * 'b -> 'a net -> 'a net - val delete_term_safe: ('b * 'a -> bool) -> term * 'b -> 'a net -> 'a net - val lookup: 'a net -> key list -> 'a list - val match_term: 'a net -> term -> 'a list - val unify_term: 'a net -> term -> 'a list - val entries: 'a net -> 'a list - val subtract: ('b * 'a -> bool) -> 'a net -> 'b net -> 'b list - val merge: ('a * 'a -> bool) -> 'a net * 'a net -> 'a net - val content: 'a net -> 'a list -end; - -structure Net: NET = -struct - -datatype key = CombK | VarK | AtomK of string; - -(*Keys are preorder lists of symbols -- Combinations, Vars, Atoms. - Any term whose head is a Var is regarded entirely as a Var. - Abstractions are also regarded as Vars; this covers eta-conversion - and "near" eta-conversions such as %x.?P(?f(x)). -*) -fun add_key_of_terms (t, cs) = - let fun rands (f$t, cs) = CombK :: rands (f, add_key_of_terms(t, cs)) - | rands (Const(c,_), cs) = AtomK c :: cs - | rands (Free(c,_), cs) = AtomK c :: cs - | rands (Bound i, cs) = AtomK (Name.bound i) :: cs - in case head_of t of - Var _ => VarK :: cs - | Abs _ => VarK :: cs - | _ => rands(t,cs) - end; - -(*convert a term to a list of keys*) -fun key_of_term t = add_key_of_terms (t, []); - -(*encode_type -- for indexing purposes*) -fun encode_type (Type (c, Ts)) = Term.list_comb (Const (c, dummyT), map encode_type Ts) - | encode_type (TFree (a, _)) = Free (a, dummyT) - | encode_type (TVar (a, _)) = Var (a, dummyT); - - -(*Trees indexed by key lists: each arc is labelled by a key. - Each node contains a list of items, and arcs to children. - The empty key addresses the entire net. - Lookup functions preserve order in items stored at same level. -*) -datatype 'a net = Leaf of 'a list - | Net of {comb: 'a net, - var: 'a net, - atoms: 'a net Symtab.table}; - -val empty = Leaf[]; -fun is_empty (Leaf []) = true | is_empty _ = false; -val emptynet = Net{comb=empty, var=empty, atoms=Symtab.empty}; - - -(*** Insertion into a discrimination net ***) - -exception INSERT; (*duplicate item in the net*) - - -(*Adds item x to the list at the node addressed by the keys. - Creates node if not already present. - eq is the equality test for items. - The empty list of keys generates a Leaf node, others a Net node. -*) -fun insert eq (keys,x) net = - let fun ins1 ([], Leaf xs) = - if member eq xs x then raise INSERT else Leaf(x::xs) - | ins1 (keys, Leaf[]) = ins1 (keys, emptynet) (*expand empty...*) - | ins1 (CombK :: keys, Net{comb,var,atoms}) = - Net{comb=ins1(keys,comb), var=var, atoms=atoms} - | ins1 (VarK :: keys, Net{comb,var,atoms}) = - Net{comb=comb, var=ins1(keys,var), atoms=atoms} - | ins1 (AtomK a :: keys, Net{comb,var,atoms}) = - let val atoms' = Symtab.map_default (a, empty) (fn net' => ins1 (keys, net')) atoms; - in Net{comb=comb, var=var, atoms=atoms'} end - in ins1 (keys,net) end; - -fun insert_term eq (t, x) = insert eq (key_of_term t, x); - -fun insert_safe eq entry net = insert eq entry net handle INSERT => net; -fun insert_term_safe eq entry net = insert_term eq entry net handle INSERT => net; - - -(*** Deletion from a discrimination net ***) - -exception DELETE; (*missing item in the net*) - -(*Create a new Net node if it would be nonempty*) -fun newnet (args as {comb,var,atoms}) = - if is_empty comb andalso is_empty var andalso Symtab.is_empty atoms - then empty else Net args; - -(*Deletes item x from the list at the node addressed by the keys. - Raises DELETE if absent. Collapses the net if possible. - eq is the equality test for items. *) -fun delete eq (keys, x) net = - let fun del1 ([], Leaf xs) = - if member eq xs x then Leaf (remove eq x xs) - else raise DELETE - | del1 (keys, Leaf[]) = raise DELETE - | del1 (CombK :: keys, Net{comb,var,atoms}) = - newnet{comb=del1(keys,comb), var=var, atoms=atoms} - | del1 (VarK :: keys, Net{comb,var,atoms}) = - newnet{comb=comb, var=del1(keys,var), atoms=atoms} - | del1 (AtomK a :: keys, Net{comb,var,atoms}) = - let val atoms' = - (case Symtab.lookup atoms a of - NONE => raise DELETE - | SOME net' => - (case del1 (keys, net') of - Leaf [] => Symtab.delete a atoms - | net'' => Symtab.update (a, net'') atoms)) - in newnet{comb=comb, var=var, atoms=atoms'} end - in del1 (keys,net) end; - -fun delete_term eq (t, x) = delete eq (key_of_term t, x); - -fun delete_safe eq entry net = delete eq entry net handle DELETE => net; -fun delete_term_safe eq entry net = delete_term eq entry net handle DELETE => net; - - -(*** Retrieval functions for discrimination nets ***) - -exception ABSENT; - -fun the_atom atoms a = - (case Symtab.lookup atoms a of - NONE => raise ABSENT - | SOME net => net); - -(*Return the list of items at the given node, [] if no such node*) -fun lookup (Leaf xs) [] = xs - | lookup (Leaf _) (_ :: _) = [] (*non-empty keys and empty net*) - | lookup (Net {comb, var, atoms}) (CombK :: keys) = lookup comb keys - | lookup (Net {comb, var, atoms}) (VarK :: keys) = lookup var keys - | lookup (Net {comb, var, atoms}) (AtomK a :: keys) = - lookup (the_atom atoms a) keys handle ABSENT => []; - - -(*Skipping a term in a net. Recursively skip 2 levels if a combination*) -fun net_skip (Leaf _) nets = nets - | net_skip (Net{comb,var,atoms}) nets = - fold_rev net_skip (net_skip comb []) (Symtab.fold (cons o #2) atoms (var::nets)); - - -(** Matching and Unification **) - -(*conses the linked net, if present, to nets*) -fun look1 (atoms, a) nets = - the_atom atoms a :: nets handle ABSENT => nets; - -(*Return the nodes accessible from the term (cons them before nets) - "unif" signifies retrieval for unification rather than matching. - Var in net matches any term. - Abs or Var in object: if "unif", regarded as wildcard, - else matches only a variable in net. -*) -fun matching unif t net nets = - let fun rands _ (Leaf _, nets) = nets - | rands t (Net{comb,atoms,...}, nets) = - case t of - f$t => fold_rev (matching unif t) (rands f (comb,[])) nets - | Const(c,_) => look1 (atoms, c) nets - | Free(c,_) => look1 (atoms, c) nets - | Bound i => look1 (atoms, Name.bound i) nets - | _ => nets - in - case net of - Leaf _ => nets - | Net{var,...} => - case head_of t of - Var _ => if unif then net_skip net nets - else var::nets (*only matches Var in net*) - (*If "unif" then a var instantiation in the abstraction could allow - an eta-reduction, so regard the abstraction as a wildcard.*) - | Abs _ => if unif then net_skip net nets - else var::nets (*only a Var can match*) - | _ => rands t (net, var::nets) (*var could match also*) - end; - -fun extract_leaves l = maps (fn Leaf xs => xs) l; - -(*return items whose key could match t, WHICH MUST BE BETA-ETA NORMAL*) -fun match_term net t = - extract_leaves (matching false t net []); - -(*return items whose key could unify with t*) -fun unify_term net t = - extract_leaves (matching true t net []); - - -(** operations on nets **) - -(*subtraction: collect entries of second net that are NOT present in first net*) -fun subtract eq net1 net2 = - let - fun subtr (Net _) (Leaf ys) = append ys - | subtr (Leaf xs) (Leaf ys) = - fold_rev (fn y => if member eq xs y then I else cons y) ys - | subtr (Leaf _) (net as Net _) = subtr emptynet net - | subtr (Net {comb = comb1, var = var1, atoms = atoms1}) - (Net {comb = comb2, var = var2, atoms = atoms2}) = - subtr comb1 comb2 - #> subtr var1 var2 - #> Symtab.fold (fn (a, net) => - subtr (the_default emptynet (Symtab.lookup atoms1 a)) net) atoms2 - in subtr net1 net2 [] end; - -fun entries net = subtract (K false) empty net; - - -(* merge *) - -fun cons_fst x (xs, y) = (x :: xs, y); - -fun dest (Leaf xs) = map (pair []) xs - | dest (Net {comb, var, atoms}) = - map (cons_fst CombK) (dest comb) @ - map (cons_fst VarK) (dest var) @ - maps (fn (a, net) => map (cons_fst (AtomK a)) (dest net)) (Symtab.dest atoms); - -fun merge eq (net1, net2) = - fold (insert_safe eq) (dest net2) net1; (* FIXME non-canonical merge order!?! *) - -fun content net = map #2 (dest net); - -end; diff --git a/core/Pure/pattern.ML b/core/Pure/pattern.ML deleted file mode 100644 index 0f9103e9..00000000 --- a/core/Pure/pattern.ML +++ /dev/null @@ -1,490 +0,0 @@ -(* Title: Pure/pattern.ML - Author: Tobias Nipkow, Christine Heinzelmann, and Stefan Berghofer, TU Muenchen - -Unification of Higher-Order Patterns. - -See also: -Tobias Nipkow. Functional Unification of Higher-Order Patterns. -In Proceedings of the 8th IEEE Symposium Logic in Computer Science, 1993. - -TODO: optimize red by special-casing it -*) - -signature PATTERN = -sig - val unify_trace_failure_default: bool Unsynchronized.ref - val unify_trace_failure_raw: Config.raw - val unify_trace_failure: bool Config.T - val match: theory -> term * term -> Type.tyenv * Envir.tenv -> Type.tyenv * Envir.tenv - val first_order_match: theory -> term * term - -> Type.tyenv * Envir.tenv -> Type.tyenv * Envir.tenv - val matches: theory -> term * term -> bool - val matchess: theory -> term list * term list -> bool - val equiv: theory -> term * term -> bool - val matches_subterm: theory -> term * term -> bool - val unify_types: theory -> typ * typ -> Envir.env -> Envir.env - val unify: theory -> term * term -> Envir.env -> Envir.env - val first_order: term -> bool - val pattern: term -> bool - val match_rew: theory -> term -> term * term -> (term * term) option - val rewrite_term: theory -> (term * term) list -> (term -> term option) list -> term -> term - val rewrite_term_top: theory -> (term * term) list -> (term -> term option) list -> term -> term - exception Unif - exception MATCH - exception Pattern -end; - -structure Pattern: PATTERN = -struct - -exception Unif; -exception Pattern; - -val unify_trace_failure_default = Unsynchronized.ref false; -val unify_trace_failure_raw = - Config.declare_global ("unify_trace_failure", @{here}) - (fn _ => Config.Bool (! unify_trace_failure_default)); -val unify_trace_failure = Config.bool unify_trace_failure_raw; - -fun string_of_term thy env binders t = - Syntax.string_of_term_global thy - (Envir.norm_term env (subst_bounds (map Free binders, t))); - -fun bname binders i = fst (nth binders i); -fun bnames binders is = space_implode " " (map (bname binders) is); - -fun typ_clash thy (tye,T,U) = - if Config.get_global thy unify_trace_failure - then let val t = Syntax.string_of_typ_global thy (Envir.norm_type tye T) - and u = Syntax.string_of_typ_global thy (Envir.norm_type tye U) - in tracing("The following types do not unify:\n" ^ t ^ "\n" ^ u) end - else () - -fun clash thy a b = - if Config.get_global thy unify_trace_failure then tracing("Clash: " ^ a ^ " =/= " ^ b) else () - -fun boundVar binders i = - "bound variable " ^ bname binders i ^ " (depth " ^ string_of_int i ^ ")"; - -fun clashBB thy binders i j = - if Config.get_global thy unify_trace_failure - then clash thy (boundVar binders i) (boundVar binders j) - else () - -fun clashB thy binders i s = - if Config.get_global thy unify_trace_failure - then clash thy (boundVar binders i) s - else () - -fun proj_fail thy (env,binders,F,_,is,t) = - if Config.get_global thy unify_trace_failure - then let val f = Term.string_of_vname F - val xs = bnames binders is - val u = string_of_term thy env binders t - val ys = bnames binders (subtract (op =) is (loose_bnos t)) - in tracing("Cannot unify variable " ^ f ^ - " (depending on bound variables " ^ xs ^ ")\nwith term " ^ u ^ - "\nTerm contains additional bound variable(s) " ^ ys) - end - else () - -fun ocheck_fail thy (F,t,binders,env) = - if Config.get_global thy unify_trace_failure - then let val f = Term.string_of_vname F - val u = string_of_term thy env binders t - in tracing("Variable " ^ f ^ " occurs in term\n" ^ u ^ - "\nCannot unify!\n") - end - else () - -fun occurs(F,t,env) = - let fun occ(Var (G, T)) = (case Envir.lookup env (G, T) of - SOME(t) => occ t - | NONE => F=G) - | occ(t1$t2) = occ t1 orelse occ t2 - | occ(Abs(_,_,t)) = occ t - | occ _ = false - in occ t end; - - -fun mapbnd f = - let fun mpb d (Bound(i)) = if i < d then Bound(i) else Bound(f(i-d)+d) - | mpb d (Abs(s,T,t)) = Abs(s,T,mpb(d+1) t) - | mpb d ((u1 $ u2)) = (mpb d u1)$(mpb d u2) - | mpb _ atom = atom - in mpb 0 end; - -fun idx [] j = raise Unif - | idx(i::is) j = if (i:int) =j then length is else idx is j; - -fun mkabs (binders,is,t) = - let fun mk(i::is) = let val (x,T) = nth binders i - in Abs(x,T,mk is) end - | mk [] = t - in mk is end; - -val incr = mapbnd (fn i => i+1); - -fun ints_of [] = [] - | ints_of (Bound i ::bs) = - let val is = ints_of bs - in if member (op =) is i then raise Pattern else i::is end - | ints_of _ = raise Pattern; - -fun ints_of' env ts = ints_of (map (Envir.head_norm env) ts); - - -fun app (s,(i::is)) = app (s$Bound(i),is) - | app (s,[]) = s; - -fun red (Abs(_,_,s)) (i::is) js = red s is (i::js) - | red t [] [] = t - | red t is jn = app (mapbnd (nth jn) t,is); - - -(* split_type ([T1,....,Tn]---> T,n,[]) = ([Tn,...,T1],T) *) -fun split_type (T,0,Ts) = (Ts,T) - | split_type (Type ("fun",[T1,T2]),n,Ts) = split_type (T2,n-1,T1::Ts) - | split_type _ = raise Fail "split_type"; - -fun type_of_G env (T, n, is) = - let - val tyenv = Envir.type_env env; - val (Ts, U) = split_type (Envir.norm_type tyenv T, n, []); - in map (nth Ts) is ---> U end; - -fun mk_hnf (binders,is,G,js) = mkabs (binders, is, app(G,js)); - -fun mk_new_hnf(env,binders,is,F as (a,_),T,js) = - let val (env',G) = Envir.genvar a (env,type_of_G env (T,length is,js)) - in Envir.update ((F, T), mk_hnf (binders, is, G, js)) env' end; - - -(*predicate: downto0 (is, n) <=> is = [n, n - 1, ..., 0]*) -fun downto0 (i :: is, n) = i = n andalso downto0 (is, n - 1) - | downto0 ([], n) = n = ~1; - -(*mk_proj_list(is) = [ |is| - k | 1 <= k <= |is| and is[k] >= 0 ]*) -fun mk_proj_list is = - let fun mk(i::is,j) = if is_some i then j :: mk(is,j-1) else mk(is,j-1) - | mk([],_) = [] - in mk(is,length is - 1) end; - -fun proj(s,env,binders,is) = - let fun trans d i = if i let val (t',env') = pr(t,env,d+1,((a,T)::binders)) - in (Abs(a,T,t'),env') end - | t => (case strip_comb t of - (c as Const _,ts) => - let val (ts',env') = prs(ts,env,d,binders) - in (list_comb(c,ts'),env') end - | (f as Free _,ts) => - let val (ts',env') = prs(ts,env,d,binders) - in (list_comb(f,ts'),env') end - | (Bound(i),ts) => - let val j = trans d i - val (ts',env') = prs(ts,env,d,binders) - in (list_comb(Bound j,ts'),env') end - | (Var(F as (a,_),Fty),ts) => - let val js = ints_of' env ts; - val js' = map (try (trans d)) js; - val ks = mk_proj_list js'; - val ls = map_filter I js' - val Hty = type_of_G env (Fty,length js,ks) - val (env',H) = Envir.genvar a (env,Hty) - val env'' = - Envir.update ((F, Fty), mk_hnf (binders, js, H, ks)) env' - in (app(H,ls),env'') end - | _ => raise Pattern)) - and prs(s::ss,env,d,binders) = - let val (s',env1) = pr(s,env,d,binders) - val (ss',env2) = prs(ss,env1,d,binders) - in (s'::ss',env2) end - | prs([],env,_,_) = ([],env) - in if downto0(is,length binders - 1) then (s,env) - else pr(s,env,0,binders) - end; - - -(* mk_ff_list(is,js) = [ length(is) - k | 1 <= k <= |is| and is[k] = js[k] ] *) -fun mk_ff_list(is,js) = - let fun mk([],[],_) = [] - | mk(i::is,j::js, k) = if (i:int) = j then k :: mk(is,js,k-1) - else mk(is,js,k-1) - | mk _ = raise Fail "mk_ff_list" - in mk(is,js,length is-1) end; - -fun flexflex1(env,binders,F,Fty,is,js) = - if is=js then env - else let val ks = mk_ff_list(is,js) - in mk_new_hnf(env,binders,is,F,Fty,ks) end; - -fun flexflex2(env,binders,F,Fty,is,G,Gty,js) = - let fun ff(F,Fty,is,G as (a,_),Gty,js) = - if subset (op =) (js, is) - then let val t= mkabs(binders,is,app(Var(G,Gty),map (idx is) js)) - in Envir.update ((F, Fty), t) env end - else let val ks = inter (op =) js is - val Hty = type_of_G env (Fty,length is,map (idx is) ks) - val (env',H) = Envir.genvar a (env,Hty) - fun lam(is) = mkabs(binders,is,app(H,map (idx is) ks)); - in Envir.update ((G, Gty), lam js) (Envir.update ((F, Fty), lam is) env') - end; - in if Term_Ord.indexname_ord (G,F) = LESS then ff(F,Fty,is,G,Gty,js) else ff(G,Gty,js,F,Fty,is) end - -fun unify_types thy (T, U) (env as Envir.Envir {maxidx, tenv, tyenv}) = - if T = U then env - else - let val (tyenv', maxidx') = Sign.typ_unify thy (U, T) (tyenv, maxidx) - in Envir.Envir {maxidx = maxidx', tenv = tenv, tyenv = tyenv'} end - handle Type.TUNIFY => (typ_clash thy (tyenv, T, U); raise Unif); - -fun unif thy binders (s,t) env = case (Envir.head_norm env s, Envir.head_norm env t) of - (Abs(ns,Ts,ts),Abs(nt,Tt,tt)) => - let val name = if ns = "" then nt else ns - in unif thy ((name,Ts)::binders) (ts,tt) (unify_types thy (Ts, Tt) env) end - | (Abs(ns,Ts,ts),t) => unif thy ((ns,Ts)::binders) (ts,(incr t)$Bound(0)) env - | (t,Abs(nt,Tt,tt)) => unif thy ((nt,Tt)::binders) ((incr t)$Bound(0),tt) env - | p => cases thy (binders,env,p) - -and cases thy (binders,env,(s,t)) = case (strip_comb s,strip_comb t) of - ((Var(F,Fty),ss),(Var(G,Gty),ts)) => - if F = G then flexflex1(env,binders,F,Fty,ints_of' env ss,ints_of' env ts) - else flexflex2(env,binders,F,Fty,ints_of' env ss,G,Gty,ints_of' env ts) - | ((Var(F,Fty),ss),_) => flexrigid thy (env,binders,F,Fty,ints_of' env ss,t) - | (_,(Var(F,Fty),ts)) => flexrigid thy (env,binders,F,Fty,ints_of' env ts,s) - | ((Const c,ss),(Const d,ts)) => rigidrigid thy (env,binders,c,d,ss,ts) - | ((Free(f),ss),(Free(g),ts)) => rigidrigid thy (env,binders,f,g,ss,ts) - | ((Bound(i),ss),(Bound(j),ts)) => rigidrigidB thy (env,binders,i,j,ss,ts) - | ((Abs(_),_),_) => raise Pattern - | (_,(Abs(_),_)) => raise Pattern - | ((Const(c,_),_),(Free(f,_),_)) => (clash thy c f; raise Unif) - | ((Const(c,_),_),(Bound i,_)) => (clashB thy binders i c; raise Unif) - | ((Free(f,_),_),(Const(c,_),_)) => (clash thy f c; raise Unif) - | ((Free(f,_),_),(Bound i,_)) => (clashB thy binders i f; raise Unif) - | ((Bound i,_),(Const(c,_),_)) => (clashB thy binders i c; raise Unif) - | ((Bound i,_),(Free(f,_),_)) => (clashB thy binders i f; raise Unif) - - -and rigidrigid thy (env,binders,(a,Ta),(b,Tb),ss,ts) = - if a<>b then (clash thy a b; raise Unif) - else env |> unify_types thy (Ta,Tb) |> fold (unif thy binders) (ss~~ts) - -and rigidrigidB thy (env,binders,i,j,ss,ts) = - if i <> j then (clashBB thy binders i j; raise Unif) - else fold (unif thy binders) (ss~~ts) env - -and flexrigid thy (params as (env,binders,F,Fty,is,t)) = - if occurs(F,t,env) then (ocheck_fail thy (F,t,binders,env); raise Unif) - else (let val (u,env') = proj(t,env,binders,is) - in Envir.update ((F, Fty), mkabs (binders, is, u)) env' end - handle Unif => (proj_fail thy params; raise Unif)); - -fun unify thy = unif thy []; - - - -(*** Matching ***) - -exception MATCH; - -fun typ_match thy TU tyenv = Sign.typ_match thy TU tyenv - handle Type.TYPE_MATCH => raise MATCH; - -(*First-order matching; - The pattern and object may have variables in common. - Instantiation does not affect the object, so matching ?a with ?a+1 works. - Object is eta-contracted on the fly (by eta-expanding the pattern). - Precondition: the pattern is already eta-contracted! - Types are matched on the fly*) -fun first_order_match thy = - let - fun mtch k (instsp as (tyinsts,insts)) = fn - (Var(ixn,T), t) => - if k > 0 andalso Term.is_open t then raise MATCH - else (case Envir.lookup1 insts (ixn, T) of - NONE => (typ_match thy (T, fastype_of t) tyinsts, - Vartab.update_new (ixn, (T, t)) insts) - | SOME u => if Envir.aeconv (t, u) then instsp else raise MATCH) - | (Free (a,T), Free (b,U)) => - if a=b then (typ_match thy (T,U) tyinsts, insts) else raise MATCH - | (Const (a,T), Const (b,U)) => - if a=b then (typ_match thy (T,U) tyinsts, insts) else raise MATCH - | (Bound i, Bound j) => if i=j then instsp else raise MATCH - | (Abs(_,T,t), Abs(_,U,u)) => - mtch (k + 1) (typ_match thy (T,U) tyinsts, insts) (t,u) - | (f$t, g$u) => mtch k (mtch k instsp (f,g)) (t, u) - | (t, Abs(_,U,u)) => mtch (k + 1) instsp ((incr t)$(Bound 0), u) - | _ => raise MATCH - in fn tu => fn env => mtch 0 env tu end; - - -(* Matching of higher-order patterns *) - -fun match_bind(itms,binders,ixn,T,is,t) = - let val js = loose_bnos t - in if null is - then if null js then Vartab.update_new (ixn, (T, t)) itms else raise MATCH - else if subset (op =) (js, is) - then let val t' = if downto0(is,length binders - 1) then t - else mapbnd (idx is) t - in Vartab.update_new (ixn, (T, mkabs (binders, is, t'))) itms end - else raise MATCH - end; - -fun match thy (po as (pat,obj)) envir = -let - (* Pre: pat and obj have same type *) - fun mtch binders (pat,obj) (env as (iTs,itms)) = - case pat of - Abs(ns,Ts,ts) => - (case obj of - Abs(nt,Tt,tt) => mtch ((nt,Tt)::binders) (ts,tt) env - | _ => let val Tt = Envir.subst_type iTs Ts - in mtch((ns,Tt)::binders) (ts,(incr obj)$Bound(0)) env end) - | _ => (case obj of - Abs(nt,Tt,tt) => - mtch((nt,Tt)::binders) ((incr pat)$Bound(0),tt) env - | _ => cases(binders,env,pat,obj)) - - and cases(binders,env as (iTs,itms),pat,obj) = - let val (ph,pargs) = strip_comb pat - fun rigrig1(iTs,oargs) = fold (mtch binders) (pargs~~oargs) (iTs,itms) - handle ListPair.UnequalLengths => raise MATCH - fun rigrig2((a:string,Ta),(b,Tb),oargs) = - if a <> b then raise MATCH - else rigrig1(typ_match thy (Ta,Tb) iTs, oargs) - in case ph of - Var(ixn,T) => - let val is = ints_of pargs - in case Envir.lookup1 itms (ixn, T) of - NONE => (iTs,match_bind(itms,binders,ixn,T,is,obj)) - | SOME u => if Envir.aeconv (obj, red u is []) then env - else raise MATCH - end - | _ => - let val (oh,oargs) = strip_comb obj - in case (ph,oh) of - (Const c,Const d) => rigrig2(c,d,oargs) - | (Free f,Free g) => rigrig2(f,g,oargs) - | (Bound i,Bound j) => if i<>j then raise MATCH - else rigrig1(iTs,oargs) - | (Abs _, _) => raise Pattern - | (_, Abs _) => raise Pattern - | _ => raise MATCH - end - end; - - val pT = fastype_of pat - and oT = fastype_of obj - val envir' = apfst (typ_match thy (pT, oT)) envir; -in mtch [] po envir' handle Pattern => first_order_match thy po envir' end; - -fun matches thy po = (match thy po (Vartab.empty, Vartab.empty); true) handle MATCH => false; - -fun matchess thy (ps, os) = - length ps = length os andalso - ((fold (match thy) (ps ~~ os) (Vartab.empty, Vartab.empty); true) handle MATCH => false); - -fun equiv thy (t, u) = matches thy (t, u) andalso matches thy (u, t); - - -(* Does pat match a subterm of obj? *) -fun matches_subterm thy (pat, obj) = - let - fun msub bounds obj = matches thy (pat, obj) orelse - (case obj of - Abs (x, T, t) => msub (bounds + 1) (snd (Term.dest_abs (Name.bound bounds, T, t))) - | t $ u => msub bounds t orelse msub bounds u - | _ => false) - in msub 0 obj end; - -fun first_order(Abs(_,_,t)) = first_order t - | first_order(t $ u) = first_order t andalso first_order u andalso - not(is_Var t) - | first_order _ = true; - -fun pattern (Abs (_, _, t)) = pattern t - | pattern t = - let val (head, args) = strip_comb t in - if is_Var head then - forall is_Bound args andalso not (has_duplicates (op aconv) args) - else forall pattern args - end; - - -(* rewriting -- simple but fast *) - -fun match_rew thy tm (tm1, tm2) = - let val rtm = the_default tm2 (Term.rename_abs tm1 tm tm2) in - SOME (Envir.subst_term (match thy (tm1, tm) (Vartab.empty, Vartab.empty)) rtm, rtm) - handle MATCH => NONE - end; - -fun gen_rewrite_term bot thy rules procs tm = - let - val skel0 = Bound 0; - - fun variant_absfree bounds (x, T, t) = - let - val (x', t') = Term.dest_abs (Name.bound bounds, T, t); - fun abs u = Abs (x, T, abstract_over (Free (x', T), u)); - in (abs, t') end; - - fun rew (Abs (_, _, body) $ t) = SOME (subst_bound (t, body), skel0) - | rew tm = - (case get_first (match_rew thy tm) rules of - NONE => Option.map (rpair skel0) (get_first (fn p => p tm) procs) - | x => x); - - fun rew_sub r bounds skel (tm1 $ tm2) = (case tm1 of - Abs (_, _, body) => - let val tm' = subst_bound (tm2, body) - in SOME (the_default tm' (rew_sub r bounds skel0 tm')) end - | _ => - let val (skel1, skel2) = (case skel of - skel1 $ skel2 => (skel1, skel2) - | _ => (skel0, skel0)) - in case r bounds skel1 tm1 of - SOME tm1' => (case r bounds skel2 tm2 of - SOME tm2' => SOME (tm1' $ tm2') - | NONE => SOME (tm1' $ tm2)) - | NONE => (case r bounds skel2 tm2 of - SOME tm2' => SOME (tm1 $ tm2') - | NONE => NONE) - end) - | rew_sub r bounds skel (Abs body) = - let - val (abs, tm') = variant_absfree bounds body; - val skel' = (case skel of Abs (_, _, skel') => skel' | _ => skel0) - in case r (bounds + 1) skel' tm' of - SOME tm'' => SOME (abs tm'') - | NONE => NONE - end - | rew_sub _ _ _ _ = NONE; - - fun rew_bot bounds (Var _) _ = NONE - | rew_bot bounds skel tm = (case rew_sub rew_bot bounds skel tm of - SOME tm1 => (case rew tm1 of - SOME (tm2, skel') => SOME (the_default tm2 (rew_bot bounds skel' tm2)) - | NONE => SOME tm1) - | NONE => (case rew tm of - SOME (tm1, skel') => SOME (the_default tm1 (rew_bot bounds skel' tm1)) - | NONE => NONE)); - - fun rew_top bounds _ tm = (case rew tm of - SOME (tm1, _) => (case rew_sub rew_top bounds skel0 tm1 of - SOME tm2 => SOME (the_default tm2 (rew_top bounds skel0 tm2)) - | NONE => SOME tm1) - | NONE => (case rew_sub rew_top bounds skel0 tm of - SOME tm1 => SOME (the_default tm1 (rew_top bounds skel0 tm1)) - | NONE => NONE)); - - in the_default tm ((if bot then rew_bot else rew_top) 0 skel0 tm) end; - -val rewrite_term = gen_rewrite_term true; -val rewrite_term_top = gen_rewrite_term false; - -end; - diff --git a/core/Pure/primitive_defs.ML b/core/Pure/primitive_defs.ML deleted file mode 100644 index da773ff5..00000000 --- a/core/Pure/primitive_defs.ML +++ /dev/null @@ -1,80 +0,0 @@ -(* Title: Pure/primitive_defs.ML - Author: Makarius - -Primitive definition forms. -*) - -signature PRIMITIVE_DEFS = -sig - val dest_def: Proof.context -> (term -> bool) -> (string -> bool) -> (string -> bool) -> - term -> (term * term) * term - val abs_def: term -> term * term -end; - -structure Primitive_Defs: PRIMITIVE_DEFS = -struct - -fun term_kind (Const _) = "existing constant " - | term_kind (Free _) = "free variable " - | term_kind (Bound _) = "bound variable " - | term_kind _ = ""; - -(*c x == t[x] to !!x. c x == t[x]*) -fun dest_def ctxt check_head is_fixed is_fixedT eq = - let - fun err msg = raise TERM (msg, [eq]); - val eq_vars = Term.strip_all_vars eq; - val eq_body = Term.strip_all_body eq; - - val display_terms = - commas_quote o map (Syntax.string_of_term ctxt o Syntax_Trans.bound_vars eq_vars); - val display_types = commas_quote o map (Syntax.string_of_typ ctxt); - - val (raw_lhs, rhs) = Logic.dest_equals eq_body handle TERM _ => err "Not a meta-equality (==)"; - val lhs = Envir.beta_eta_contract raw_lhs; - val (head, args) = Term.strip_comb lhs; - val head_tfrees = Term.add_tfrees head []; - - fun check_arg (Bound _) = true - | check_arg (Free (x, _)) = not (is_fixed x) - | check_arg (Const ("Pure.type", Type ("itself", [TFree _]))) = true - | check_arg _ = false; - fun close_arg (Bound _) t = t - | close_arg x t = Logic.all x t; - - val lhs_bads = filter_out check_arg args; - val lhs_dups = duplicates (op aconv) args; - val rhs_extras = Term.fold_aterms (fn v as Free (x, _) => - if is_fixed x orelse member (op aconv) args v then I - else insert (op aconv) v | _ => I) rhs []; - val rhs_extrasT = Term.fold_aterms (Term.fold_types (fn v as TFree (a, S) => - if is_fixedT a orelse member (op =) head_tfrees (a, S) then I - else insert (op =) v | _ => I)) rhs []; - in - if not (check_head head) then - err ("Bad head of lhs: " ^ term_kind head ^ display_terms [head]) - else if not (null lhs_bads) then - err ("Bad arguments on lhs: " ^ display_terms lhs_bads) - else if not (null lhs_dups) then - err ("Duplicate arguments on lhs: " ^ display_terms lhs_dups) - else if not (null rhs_extras) then - err ("Extra variables on rhs: " ^ display_terms rhs_extras) - else if not (null rhs_extrasT) then - err ("Extra type variables on rhs: " ^ display_types rhs_extrasT) - else if exists_subterm (fn t => t aconv head) rhs then - err "Entity to be defined occurs on rhs" - else - ((lhs, rhs), fold_rev close_arg args (Logic.list_all (eq_vars, (Logic.mk_equals (lhs, rhs))))) - end; - -(*!!x. c x == t[x] to c == %x. t[x]*) -fun abs_def eq = - let - val body = Term.strip_all_body eq; - val vars = map Free (Term.rename_wrt_term body (Term.strip_all_vars eq)); - val (lhs, rhs) = Logic.dest_equals (Term.subst_bounds (vars, body)); - val (lhs', args) = Term.strip_comb lhs; - val rhs' = fold_rev (absfree o dest_Free) args rhs; - in (lhs', rhs') end; - -end; diff --git a/core/Pure/proofterm.ML b/core/Pure/proofterm.ML deleted file mode 100644 index e672d513..00000000 --- a/core/Pure/proofterm.ML +++ /dev/null @@ -1,1598 +0,0 @@ -(* Title: Pure/proofterm.ML - Author: Stefan Berghofer, TU Muenchen - -LF style proof terms. -*) - -infix 8 % %% %>; - -signature BASIC_PROOFTERM = -sig - val proofs: int Unsynchronized.ref - - datatype proof = - MinProof - | PBound of int - | Abst of string * typ option * proof - | AbsP of string * term option * proof - | op % of proof * term option - | op %% of proof * proof - | Hyp of term - | PAxm of string * term * typ list option - | OfClass of typ * class - | Oracle of string * term * typ list option - | Promise of serial * term * typ list - | PThm of serial * ((string * term * typ list option) * proof_body future) - and proof_body = PBody of - {oracles: (string * term) Ord_List.T, - thms: (serial * (string * term * proof_body future)) Ord_List.T, - proof: proof} - - val %> : proof * term -> proof -end; - -signature PROOFTERM = -sig - include BASIC_PROOFTERM - - type oracle = string * term - type pthm = serial * (string * term * proof_body future) - val proof_of: proof_body -> proof - val join_proof: proof_body future -> proof - val fold_proof_atoms: bool -> (proof -> 'a -> 'a) -> proof list -> 'a -> 'a - val fold_body_thms: (string * term * proof_body -> 'a -> 'a) -> proof_body list -> 'a -> 'a - val join_bodies: proof_body list -> unit - val peek_status: proof_body list -> {failed: bool, oracle: bool, unfinished: bool} - - val oracle_ord: oracle * oracle -> order - val thm_ord: pthm * pthm -> order - val unions_oracles: oracle Ord_List.T list -> oracle Ord_List.T - val unions_thms: pthm Ord_List.T list -> pthm Ord_List.T - val all_oracles_of: proof_body -> oracle Ord_List.T - val approximate_proof_body: proof -> proof_body - val no_proof_body: proof_body - val no_thm_proofs: proof -> proof - - val encode: proof XML.Encode.T - val encode_body: proof_body XML.Encode.T - val decode: proof XML.Decode.T - val decode_body: proof_body XML.Decode.T - - (** primitive operations **) - val proofs_enabled: unit -> bool - val proof_combt: proof * term list -> proof - val proof_combt': proof * term option list -> proof - val proof_combP: proof * proof list -> proof - val strip_combt: proof -> proof * term option list - val strip_combP: proof -> proof * proof list - val strip_thm: proof_body -> proof_body - val map_proof_same: term Same.operation -> typ Same.operation - -> (typ * class -> proof) -> proof Same.operation - val map_proof_terms_same: term Same.operation -> typ Same.operation -> proof Same.operation - val map_proof_types_same: typ Same.operation -> proof Same.operation - val map_proof_terms: (term -> term) -> (typ -> typ) -> proof -> proof - val map_proof_types: (typ -> typ) -> proof -> proof - val fold_proof_terms: (term -> 'a -> 'a) -> (typ -> 'a -> 'a) -> proof -> 'a -> 'a - val maxidx_proof: proof -> int -> int - val size_of_proof: proof -> int - val change_type: typ list option -> proof -> proof - val prf_abstract_over: term -> proof -> proof - val prf_incr_bv: int -> int -> int -> int -> proof -> proof - val incr_pboundvars: int -> int -> proof -> proof - val prf_loose_bvar1: proof -> int -> bool - val prf_loose_Pbvar1: proof -> int -> bool - val prf_add_loose_bnos: int -> int -> proof -> int list * int list -> int list * int list - val norm_proof: Envir.env -> proof -> proof - val norm_proof': Envir.env -> proof -> proof - val prf_subst_bounds: term list -> proof -> proof - val prf_subst_pbounds: proof list -> proof -> proof - val freeze_thaw_prf: proof -> proof * (proof -> proof) - - (** proof terms for specific inference rules **) - val implies_intr_proof: term -> proof -> proof - val implies_intr_proof': term -> proof -> proof - val forall_intr_proof: term -> string -> proof -> proof - val forall_intr_proof': term -> proof -> proof - val varify_proof: term -> (string * sort) list -> proof -> proof - val legacy_freezeT: term -> proof -> proof - val rotate_proof: term list -> term -> int -> proof -> proof - val permute_prems_proof: term list -> int -> int -> proof -> proof - val generalize: string list * string list -> int -> proof -> proof - val instantiate: ((indexname * sort) * typ) list * ((indexname * typ) * term) list - -> proof -> proof - val lift_proof: term -> int -> term -> proof -> proof - val incr_indexes: int -> proof -> proof - val assumption_proof: term list -> term -> int -> proof -> proof - val bicompose_proof: bool -> term list -> term list -> term list -> term option -> - int -> int -> proof -> proof -> proof - val equality_axms: (string * term) list - val reflexive_axm: proof - val symmetric_axm: proof - val transitive_axm: proof - val equal_intr_axm: proof - val equal_elim_axm: proof - val abstract_rule_axm: proof - val combination_axm: proof - val reflexive: proof - val symmetric: proof -> proof - val transitive: term -> typ -> proof -> proof -> proof - val abstract_rule: term -> string -> proof -> proof - val combination: term -> term -> term -> term -> typ -> proof -> proof -> proof - val equal_intr: term -> term -> proof -> proof -> proof - val equal_elim: term -> term -> proof -> proof -> proof - val strip_shyps_proof: Sorts.algebra -> (typ * sort) list -> (typ * sort) list -> - sort list -> proof -> proof - val classrel_proof: theory -> class * class -> proof - val arity_proof: theory -> string * sort list * class -> proof - val of_sort_proof: theory -> (typ * class -> proof) -> typ * sort -> proof list - val install_axclass_proofs: - {classrel_proof: theory -> class * class -> proof, - arity_proof: theory -> string * sort list * class -> proof} -> unit - val axm_proof: string -> term -> proof - val oracle_proof: string -> term -> oracle * proof - - (** rewriting on proof terms **) - val add_prf_rrule: proof * proof -> theory -> theory - val add_prf_rproc: (typ list -> term option list -> proof -> (proof * proof) option) -> theory -> theory - val no_skel: proof - val normal_skel: proof - val rewrite_proof: theory -> (proof * proof) list * - (typ list -> term option list -> proof -> (proof * proof) option) list -> proof -> proof - val rewrite_proof_notypes: (proof * proof) list * - (typ list -> term option list -> proof -> (proof * proof) option) list -> proof -> proof - val rew_proof: theory -> proof -> proof - - val promise_proof: theory -> serial -> term -> proof - val fulfill_norm_proof: theory -> (serial * proof_body) list -> proof_body -> proof_body - val thm_proof: theory -> string -> sort list -> term list -> term -> - (serial * proof_body future) list -> proof_body -> pthm * proof - val unconstrain_thm_proof: theory -> sort list -> term -> - (serial * proof_body future) list -> proof_body -> pthm * proof - val get_name: sort list -> term list -> term -> proof -> string - val guess_name: proof -> string -end - -structure Proofterm : PROOFTERM = -struct - -(***** datatype proof *****) - -datatype proof = - MinProof - | PBound of int - | Abst of string * typ option * proof - | AbsP of string * term option * proof - | op % of proof * term option - | op %% of proof * proof - | Hyp of term - | PAxm of string * term * typ list option - | OfClass of typ * class - | Oracle of string * term * typ list option - | Promise of serial * term * typ list - | PThm of serial * ((string * term * typ list option) * proof_body future) -and proof_body = PBody of - {oracles: (string * term) Ord_List.T, - thms: (serial * (string * term * proof_body future)) Ord_List.T, - proof: proof}; - -type oracle = string * term; -type pthm = serial * (string * term * proof_body future); - -fun proof_of (PBody {proof, ...}) = proof; -val join_proof = Future.join #> proof_of; - -fun join_thms (thms: pthm list) = ignore (Future.joins (map (#3 o #2) thms)); - - -(***** proof atoms *****) - -fun fold_proof_atoms all f = - let - fun app (Abst (_, _, prf)) = app prf - | app (AbsP (_, _, prf)) = app prf - | app (prf % _) = app prf - | app (prf1 %% prf2) = app prf1 #> app prf2 - | app (prf as PThm (i, (_, body))) = (fn (x, seen) => - if Inttab.defined seen i then (x, seen) - else - let val (x', seen') = - (if all then app (join_proof body) else I) (x, Inttab.update (i, ()) seen) - in (f prf x', seen') end) - | app prf = (fn (x, seen) => (f prf x, seen)); - in fn prfs => fn x => #1 (fold app prfs (x, Inttab.empty)) end; - -fun fold_body_thms f = - let - fun app (PBody {thms, ...}) = - tap join_thms thms |> fold (fn (i, (name, prop, body)) => fn (x, seen) => - if Inttab.defined seen i then (x, seen) - else - let - val body' = Future.join body; - val (x', seen') = app body' (x, Inttab.update (i, ()) seen); - in (f (name, prop, body') x', seen') end); - in fn bodies => fn x => #1 (fold app bodies (x, Inttab.empty)) end; - -fun join_bodies bodies = fold_body_thms (fn _ => fn () => ()) bodies (); - -fun peek_status bodies = - let - fun status (PBody {oracles, thms, ...}) x = - let - val ((oracle, unfinished, failed), seen) = - (thms, x) |-> fold (fn (i, (_, _, body)) => fn (st, seen) => - if Inttab.defined seen i then (st, seen) - else - let val seen' = Inttab.update (i, ()) seen in - (case Future.peek body of - SOME (Exn.Res body') => status body' (st, seen') - | SOME (Exn.Exn _) => - let val (oracle, unfinished, _) = st - in ((oracle, unfinished, true), seen') end - | NONE => - let val (oracle, _, failed) = st - in ((oracle, true, failed), seen') end) - end); - in ((oracle orelse not (null oracles), unfinished, failed), seen) end; - val (oracle, unfinished, failed) = - #1 (fold status bodies ((false, false, false), Inttab.empty)); - in {oracle = oracle, unfinished = unfinished, failed = failed} end; - - -(* proof body *) - -val oracle_ord = prod_ord fast_string_ord Term_Ord.fast_term_ord; -fun thm_ord ((i, _): pthm, (j, _)) = int_ord (j, i); - -val unions_oracles = Ord_List.unions oracle_ord; -val unions_thms = Ord_List.unions thm_ord; - -val all_oracles_of = - let - fun collect (PBody {oracles, thms, ...}) = - tap join_thms thms |> fold (fn (i, (_, _, body)) => fn (x, seen) => - if Inttab.defined seen i then (x, seen) - else - let - val body' = Future.join body; - val (x', seen') = collect body' (x, Inttab.update (i, ()) seen); - in (if null oracles then x' else oracles :: x', seen') end); - in fn body => unions_oracles (#1 (collect body ([], Inttab.empty))) end; - -fun approximate_proof_body prf = - let - val (oracles, thms) = fold_proof_atoms false - (fn Oracle (s, prop, _) => apfst (cons (s, prop)) - | PThm (i, ((name, prop, _), body)) => apsnd (cons (i, (name, prop, body))) - | _ => I) [prf] ([], []); - in - PBody - {oracles = Ord_List.make oracle_ord oracles, - thms = Ord_List.make thm_ord thms, - proof = prf} - end; - -val no_proof_body = PBody {oracles = [], thms = [], proof = MinProof}; -val no_body = Future.value no_proof_body; - -fun no_thm_proofs (PThm (i, (a, _))) = PThm (i, (a, no_body)) - | no_thm_proofs (Abst (x, T, prf)) = Abst (x, T, no_thm_proofs prf) - | no_thm_proofs (AbsP (x, t, prf)) = AbsP (x, t, no_thm_proofs prf) - | no_thm_proofs (prf % t) = no_thm_proofs prf % t - | no_thm_proofs (prf1 %% prf2) = no_thm_proofs prf1 %% no_thm_proofs prf2 - | no_thm_proofs a = a; - - -(***** XML data representation *****) - -(* encode *) - -local - -open XML.Encode Term_XML.Encode; - -fun proof prf = prf |> variant - [fn MinProof => ([], []), - fn PBound a => ([int_atom a], []), - fn Abst (a, b, c) => ([a], pair (option typ) proof (b, c)), - fn AbsP (a, b, c) => ([a], pair (option term) proof (b, c)), - fn a % b => ([], pair proof (option term) (a, b)), - fn a %% b => ([], pair proof proof (a, b)), - fn Hyp a => ([], term a), - fn PAxm (a, b, c) => ([a], pair term (option (list typ)) (b, c)), - fn OfClass (a, b) => ([b], typ a), - fn Oracle (a, b, c) => ([a], pair term (option (list typ)) (b, c)), - fn Promise (a, b, c) => ([int_atom a], pair term (list typ) (b, c)), - fn PThm (a, ((b, c, d), body)) => - ([int_atom a, b], triple term (option (list typ)) proof_body (c, d, Future.join body))] -and proof_body (PBody {oracles, thms, proof = prf}) = - triple (list (pair string term)) (list pthm) proof (oracles, thms, prf) -and pthm (a, (b, c, body)) = - pair int (triple string term proof_body) (a, (b, c, Future.join body)); - -in - -val encode = proof; -val encode_body = proof_body; - -end; - - -(* decode *) - -local - -open XML.Decode Term_XML.Decode; - -fun proof prf = prf |> variant - [fn ([], []) => MinProof, - fn ([a], []) => PBound (int_atom a), - fn ([a], b) => let val (c, d) = pair (option typ) proof b in Abst (a, c, d) end, - fn ([a], b) => let val (c, d) = pair (option term) proof b in AbsP (a, c, d) end, - fn ([], a) => op % (pair proof (option term) a), - fn ([], a) => op %% (pair proof proof a), - fn ([], a) => Hyp (term a), - fn ([a], b) => let val (c, d) = pair term (option (list typ)) b in PAxm (a, c, d) end, - fn ([b], a) => OfClass (typ a, b), - fn ([a], b) => let val (c, d) = pair term (option (list typ)) b in Oracle (a, c, d) end, - fn ([a], b) => let val (c, d) = pair term (list typ) b in Promise (int_atom a, c, d) end, - fn ([a, b], c) => - let val (d, e, f) = triple term (option (list typ)) proof_body c - in PThm (int_atom a, ((b, d, e), Future.value f)) end] -and proof_body x = - let val (a, b, c) = triple (list (pair string term)) (list pthm) proof x - in PBody {oracles = a, thms = b, proof = c} end -and pthm x = - let val (a, (b, c, d)) = pair int (triple string term proof_body) x - in (a, (b, c, Future.value d)) end; - -in - -val decode = proof; -val decode_body = proof_body; - -end; - - -(***** proof objects with different levels of detail *****) - -fun (prf %> t) = prf % SOME t; - -val proof_combt = Library.foldl (op %>); -val proof_combt' = Library.foldl (op %); -val proof_combP = Library.foldl (op %%); - -fun strip_combt prf = - let fun stripc (prf % t, ts) = stripc (prf, t::ts) - | stripc x = x - in stripc (prf, []) end; - -fun strip_combP prf = - let fun stripc (prf %% prf', prfs) = stripc (prf, prf'::prfs) - | stripc x = x - in stripc (prf, []) end; - -fun strip_thm (body as PBody {proof, ...}) = - (case strip_combt (fst (strip_combP proof)) of - (PThm (_, (_, body')), _) => Future.join body' - | _ => body); - -val mk_Abst = fold_rev (fn (s, T:typ) => fn prf => Abst (s, NONE, prf)); -fun mk_AbsP (i, prf) = funpow i (fn prf => AbsP ("H", NONE, prf)) prf; - -fun map_proof_same term typ ofclass = - let - val typs = Same.map typ; - - fun proof (Abst (s, T, prf)) = - (Abst (s, Same.map_option typ T, Same.commit proof prf) - handle Same.SAME => Abst (s, T, proof prf)) - | proof (AbsP (s, t, prf)) = - (AbsP (s, Same.map_option term t, Same.commit proof prf) - handle Same.SAME => AbsP (s, t, proof prf)) - | proof (prf % t) = - (proof prf % Same.commit (Same.map_option term) t - handle Same.SAME => prf % Same.map_option term t) - | proof (prf1 %% prf2) = - (proof prf1 %% Same.commit proof prf2 - handle Same.SAME => prf1 %% proof prf2) - | proof (PAxm (a, prop, SOME Ts)) = PAxm (a, prop, SOME (typs Ts)) - | proof (OfClass T_c) = ofclass T_c - | proof (Oracle (a, prop, SOME Ts)) = Oracle (a, prop, SOME (typs Ts)) - | proof (Promise (i, prop, Ts)) = Promise (i, prop, typs Ts) - | proof (PThm (i, ((a, prop, SOME Ts), body))) = - PThm (i, ((a, prop, SOME (typs Ts)), body)) - | proof _ = raise Same.SAME; - in proof end; - -fun map_proof_terms_same term typ = map_proof_same term typ (fn (T, c) => OfClass (typ T, c)); -fun map_proof_types_same typ = map_proof_terms_same (Term_Subst.map_types_same typ) typ; - -fun same eq f x = - let val x' = f x - in if eq (x, x') then raise Same.SAME else x' end; - -fun map_proof_terms f g = Same.commit (map_proof_terms_same (same (op =) f) (same (op =) g)); -fun map_proof_types f = Same.commit (map_proof_types_same (same (op =) f)); - -fun fold_proof_terms f g (Abst (_, SOME T, prf)) = g T #> fold_proof_terms f g prf - | fold_proof_terms f g (Abst (_, NONE, prf)) = fold_proof_terms f g prf - | fold_proof_terms f g (AbsP (_, SOME t, prf)) = f t #> fold_proof_terms f g prf - | fold_proof_terms f g (AbsP (_, NONE, prf)) = fold_proof_terms f g prf - | fold_proof_terms f g (prf % SOME t) = fold_proof_terms f g prf #> f t - | fold_proof_terms f g (prf % NONE) = fold_proof_terms f g prf - | fold_proof_terms f g (prf1 %% prf2) = - fold_proof_terms f g prf1 #> fold_proof_terms f g prf2 - | fold_proof_terms _ g (PAxm (_, _, SOME Ts)) = fold g Ts - | fold_proof_terms _ g (OfClass (T, _)) = g T - | fold_proof_terms _ g (Oracle (_, _, SOME Ts)) = fold g Ts - | fold_proof_terms _ g (Promise (_, _, Ts)) = fold g Ts - | fold_proof_terms _ g (PThm (_, ((_, _, SOME Ts), _))) = fold g Ts - | fold_proof_terms _ _ _ = I; - -fun maxidx_proof prf = fold_proof_terms Term.maxidx_term Term.maxidx_typ prf; - -fun size_of_proof (Abst (_, _, prf)) = 1 + size_of_proof prf - | size_of_proof (AbsP (_, t, prf)) = 1 + size_of_proof prf - | size_of_proof (prf % _) = 1 + size_of_proof prf - | size_of_proof (prf1 %% prf2) = size_of_proof prf1 + size_of_proof prf2 - | size_of_proof _ = 1; - -fun change_type opTs (PAxm (name, prop, _)) = PAxm (name, prop, opTs) - | change_type (SOME [T]) (OfClass (_, c)) = OfClass (T, c) - | change_type opTs (Oracle (name, prop, _)) = Oracle (name, prop, opTs) - | change_type opTs (Promise _) = raise Fail "change_type: unexpected promise" - | change_type opTs (PThm (i, ((name, prop, _), body))) = - PThm (i, ((name, prop, opTs), body)) - | change_type _ prf = prf; - - -(***** utilities *****) - -fun strip_abs (_::Ts) (Abs (_, _, t)) = strip_abs Ts t - | strip_abs _ t = t; - -fun mk_abs Ts t = Library.foldl (fn (t', T) => Abs ("", T, t')) (t, Ts); - - -(*Abstraction of a proof term over its occurrences of v, - which must contain no loose bound variables. - The resulting proof term is ready to become the body of an Abst.*) - -fun prf_abstract_over v = - let - fun abst' lev u = if v aconv u then Bound lev else - (case u of - Abs (a, T, t) => Abs (a, T, abst' (lev + 1) t) - | f $ t => (abst' lev f $ absth' lev t handle Same.SAME => f $ abst' lev t) - | _ => raise Same.SAME) - and absth' lev t = (abst' lev t handle Same.SAME => t); - - fun abst lev (AbsP (a, t, prf)) = - (AbsP (a, Same.map_option (abst' lev) t, absth lev prf) - handle Same.SAME => AbsP (a, t, abst lev prf)) - | abst lev (Abst (a, T, prf)) = Abst (a, T, abst (lev + 1) prf) - | abst lev (prf1 %% prf2) = (abst lev prf1 %% absth lev prf2 - handle Same.SAME => prf1 %% abst lev prf2) - | abst lev (prf % t) = (abst lev prf % Option.map (absth' lev) t - handle Same.SAME => prf % Same.map_option (abst' lev) t) - | abst _ _ = raise Same.SAME - and absth lev prf = (abst lev prf handle Same.SAME => prf); - - in absth 0 end; - - -(*increments a proof term's non-local bound variables - required when moving a proof term within abstractions - inc is increment for bound variables - lev is level at which a bound variable is considered 'loose'*) - -fun incr_bv' inct tlev t = incr_bv (inct, tlev, t); - -fun prf_incr_bv' incP inct Plev tlev (PBound i) = - if i >= Plev then PBound (i+incP) else raise Same.SAME - | prf_incr_bv' incP inct Plev tlev (AbsP (a, t, body)) = - (AbsP (a, Same.map_option (same (op =) (incr_bv' inct tlev)) t, - prf_incr_bv incP inct (Plev+1) tlev body) handle Same.SAME => - AbsP (a, t, prf_incr_bv' incP inct (Plev+1) tlev body)) - | prf_incr_bv' incP inct Plev tlev (Abst (a, T, body)) = - Abst (a, T, prf_incr_bv' incP inct Plev (tlev+1) body) - | prf_incr_bv' incP inct Plev tlev (prf %% prf') = - (prf_incr_bv' incP inct Plev tlev prf %% prf_incr_bv incP inct Plev tlev prf' - handle Same.SAME => prf %% prf_incr_bv' incP inct Plev tlev prf') - | prf_incr_bv' incP inct Plev tlev (prf % t) = - (prf_incr_bv' incP inct Plev tlev prf % Option.map (incr_bv' inct tlev) t - handle Same.SAME => prf % Same.map_option (same (op =) (incr_bv' inct tlev)) t) - | prf_incr_bv' _ _ _ _ _ = raise Same.SAME -and prf_incr_bv incP inct Plev tlev prf = - (prf_incr_bv' incP inct Plev tlev prf handle Same.SAME => prf); - -fun incr_pboundvars 0 0 prf = prf - | incr_pboundvars incP inct prf = prf_incr_bv incP inct 0 0 prf; - - -fun prf_loose_bvar1 (prf1 %% prf2) k = prf_loose_bvar1 prf1 k orelse prf_loose_bvar1 prf2 k - | prf_loose_bvar1 (prf % SOME t) k = prf_loose_bvar1 prf k orelse loose_bvar1 (t, k) - | prf_loose_bvar1 (_ % NONE) _ = true - | prf_loose_bvar1 (AbsP (_, SOME t, prf)) k = loose_bvar1 (t, k) orelse prf_loose_bvar1 prf k - | prf_loose_bvar1 (AbsP (_, NONE, _)) k = true - | prf_loose_bvar1 (Abst (_, _, prf)) k = prf_loose_bvar1 prf (k+1) - | prf_loose_bvar1 _ _ = false; - -fun prf_loose_Pbvar1 (PBound i) k = i = k - | prf_loose_Pbvar1 (prf1 %% prf2) k = prf_loose_Pbvar1 prf1 k orelse prf_loose_Pbvar1 prf2 k - | prf_loose_Pbvar1 (prf % _) k = prf_loose_Pbvar1 prf k - | prf_loose_Pbvar1 (AbsP (_, _, prf)) k = prf_loose_Pbvar1 prf (k+1) - | prf_loose_Pbvar1 (Abst (_, _, prf)) k = prf_loose_Pbvar1 prf k - | prf_loose_Pbvar1 _ _ = false; - -fun prf_add_loose_bnos plev tlev (PBound i) (is, js) = - if i < plev then (is, js) else (insert (op =) (i-plev) is, js) - | prf_add_loose_bnos plev tlev (prf1 %% prf2) p = - prf_add_loose_bnos plev tlev prf2 - (prf_add_loose_bnos plev tlev prf1 p) - | prf_add_loose_bnos plev tlev (prf % opt) (is, js) = - prf_add_loose_bnos plev tlev prf (case opt of - NONE => (is, insert (op =) ~1 js) - | SOME t => (is, add_loose_bnos (t, tlev, js))) - | prf_add_loose_bnos plev tlev (AbsP (_, opt, prf)) (is, js) = - prf_add_loose_bnos (plev+1) tlev prf (case opt of - NONE => (is, insert (op =) ~1 js) - | SOME t => (is, add_loose_bnos (t, tlev, js))) - | prf_add_loose_bnos plev tlev (Abst (_, _, prf)) p = - prf_add_loose_bnos plev (tlev+1) prf p - | prf_add_loose_bnos _ _ _ _ = ([], []); - - -(**** substitutions ****) - -fun del_conflicting_tvars envT T = Term_Subst.instantiateT - (map_filter (fn ixnS as (_, S) => - (Type.lookup envT ixnS; NONE) handle TYPE _ => - SOME (ixnS, TFree ("'dummy", S))) (Term.add_tvarsT T [])) T; - -fun del_conflicting_vars env t = Term_Subst.instantiate - (map_filter (fn ixnS as (_, S) => - (Type.lookup (Envir.type_env env) ixnS; NONE) handle TYPE _ => - SOME (ixnS, TFree ("'dummy", S))) (Term.add_tvars t []), - map_filter (fn (ixnT as (_, T)) => - (Envir.lookup env ixnT; NONE) handle TYPE _ => - SOME (ixnT, Free ("dummy", T))) (Term.add_vars t [])) t; - -fun norm_proof env = - let - val envT = Envir.type_env env; - fun msg s = warning ("type conflict in norm_proof:\n" ^ s); - fun htype f t = f env t handle TYPE (s, _, _) => - (msg s; f env (del_conflicting_vars env t)); - fun htypeT f T = f envT T handle TYPE (s, _, _) => - (msg s; f envT (del_conflicting_tvars envT T)); - fun htypeTs f Ts = f envT Ts handle TYPE (s, _, _) => - (msg s; f envT (map (del_conflicting_tvars envT) Ts)); - - fun norm (Abst (s, T, prf)) = - (Abst (s, Same.map_option (htypeT Envir.norm_type_same) T, Same.commit norm prf) - handle Same.SAME => Abst (s, T, norm prf)) - | norm (AbsP (s, t, prf)) = - (AbsP (s, Same.map_option (htype Envir.norm_term_same) t, Same.commit norm prf) - handle Same.SAME => AbsP (s, t, norm prf)) - | norm (prf % t) = - (norm prf % Option.map (htype Envir.norm_term) t - handle Same.SAME => prf % Same.map_option (htype Envir.norm_term_same) t) - | norm (prf1 %% prf2) = - (norm prf1 %% Same.commit norm prf2 - handle Same.SAME => prf1 %% norm prf2) - | norm (PAxm (s, prop, Ts)) = - PAxm (s, prop, Same.map_option (htypeTs Envir.norm_types_same) Ts) - | norm (OfClass (T, c)) = - OfClass (htypeT Envir.norm_type_same T, c) - | norm (Oracle (s, prop, Ts)) = - Oracle (s, prop, Same.map_option (htypeTs Envir.norm_types_same) Ts) - | norm (Promise (i, prop, Ts)) = - Promise (i, prop, htypeTs Envir.norm_types_same Ts) - | norm (PThm (i, ((s, t, Ts), body))) = - PThm (i, ((s, t, Same.map_option (htypeTs Envir.norm_types_same) Ts), body)) - | norm _ = raise Same.SAME; - in Same.commit norm end; - - -(***** Remove some types in proof term (to save space) *****) - -fun remove_types (Abs (s, _, t)) = Abs (s, dummyT, remove_types t) - | remove_types (t $ u) = remove_types t $ remove_types u - | remove_types (Const (s, _)) = Const (s, dummyT) - | remove_types t = t; - -fun remove_types_env (Envir.Envir {maxidx, tenv, tyenv}) = - Envir.Envir {maxidx = maxidx, tenv = Vartab.map (K (apsnd remove_types)) tenv, tyenv = tyenv}; - -fun norm_proof' env prf = norm_proof (remove_types_env env) prf; - - -(**** substitution of bound variables ****) - -fun prf_subst_bounds args prf = - let - val n = length args; - fun subst' lev (Bound i) = - (if i Bound (i-n)) (*loose: change it*) - | subst' lev (Abs (a, T, body)) = Abs (a, T, subst' (lev+1) body) - | subst' lev (f $ t) = (subst' lev f $ substh' lev t - handle Same.SAME => f $ subst' lev t) - | subst' _ _ = raise Same.SAME - and substh' lev t = (subst' lev t handle Same.SAME => t); - - fun subst lev (AbsP (a, t, body)) = - (AbsP (a, Same.map_option (subst' lev) t, substh lev body) - handle Same.SAME => AbsP (a, t, subst lev body)) - | subst lev (Abst (a, T, body)) = Abst (a, T, subst (lev+1) body) - | subst lev (prf %% prf') = (subst lev prf %% substh lev prf' - handle Same.SAME => prf %% subst lev prf') - | subst lev (prf % t) = (subst lev prf % Option.map (substh' lev) t - handle Same.SAME => prf % Same.map_option (subst' lev) t) - | subst _ _ = raise Same.SAME - and substh lev prf = (subst lev prf handle Same.SAME => prf); - in case args of [] => prf | _ => substh 0 prf end; - -fun prf_subst_pbounds args prf = - let - val n = length args; - fun subst (PBound i) Plev tlev = - (if i < Plev then raise Same.SAME (*var is locally bound*) - else incr_pboundvars Plev tlev (nth args (i-Plev)) - handle General.Subscript => PBound (i-n) (*loose: change it*)) - | subst (AbsP (a, t, body)) Plev tlev = AbsP (a, t, subst body (Plev+1) tlev) - | subst (Abst (a, T, body)) Plev tlev = Abst (a, T, subst body Plev (tlev+1)) - | subst (prf %% prf') Plev tlev = (subst prf Plev tlev %% substh prf' Plev tlev - handle Same.SAME => prf %% subst prf' Plev tlev) - | subst (prf % t) Plev tlev = subst prf Plev tlev % t - | subst prf _ _ = raise Same.SAME - and substh prf Plev tlev = (subst prf Plev tlev handle Same.SAME => prf) - in case args of [] => prf | _ => substh prf 0 0 end; - - -(**** Freezing and thawing of variables in proof terms ****) - -local - -fun frzT names = - map_type_tvar (fn (ixn, S) => TFree (the (AList.lookup (op =) names ixn), S)); - -fun thawT names = - map_type_tfree (fn (a, S) => - (case AList.lookup (op =) names a of - NONE => TFree (a, S) - | SOME ixn => TVar (ixn, S))); - -fun freeze names names' (t $ u) = - freeze names names' t $ freeze names names' u - | freeze names names' (Abs (s, T, t)) = - Abs (s, frzT names' T, freeze names names' t) - | freeze names names' (Const (s, T)) = Const (s, frzT names' T) - | freeze names names' (Free (s, T)) = Free (s, frzT names' T) - | freeze names names' (Var (ixn, T)) = - Free (the (AList.lookup (op =) names ixn), frzT names' T) - | freeze names names' t = t; - -fun thaw names names' (t $ u) = - thaw names names' t $ thaw names names' u - | thaw names names' (Abs (s, T, t)) = - Abs (s, thawT names' T, thaw names names' t) - | thaw names names' (Const (s, T)) = Const (s, thawT names' T) - | thaw names names' (Free (s, T)) = - let val T' = thawT names' T in - (case AList.lookup (op =) names s of - NONE => Free (s, T') - | SOME ixn => Var (ixn, T')) - end - | thaw names names' (Var (ixn, T)) = Var (ixn, thawT names' T) - | thaw names names' t = t; - -in - -fun freeze_thaw_prf prf = - let - val (fs, Tfs, vs, Tvs) = fold_proof_terms - (fn t => fn (fs, Tfs, vs, Tvs) => - (Term.add_free_names t fs, Term.add_tfree_names t Tfs, - Term.add_var_names t vs, Term.add_tvar_names t Tvs)) - (fn T => fn (fs, Tfs, vs, Tvs) => - (fs, Term.add_tfree_namesT T Tfs, - vs, Term.add_tvar_namesT T Tvs)) - prf ([], [], [], []); - val names = vs ~~ Name.variant_list fs (map fst vs); - val names' = Tvs ~~ Name.variant_list Tfs (map fst Tvs); - val rnames = map swap names; - val rnames' = map swap names'; - in - (map_proof_terms (freeze names names') (frzT names') prf, - map_proof_terms (thaw rnames rnames') (thawT rnames')) - end; - -end; - - -(***** implication introduction *****) - -fun gen_implies_intr_proof f h prf = - let - fun abshyp i (Hyp t) = if h aconv t then PBound i else raise Same.SAME - | abshyp i (Abst (s, T, prf)) = Abst (s, T, abshyp i prf) - | abshyp i (AbsP (s, t, prf)) = AbsP (s, t, abshyp (i + 1) prf) - | abshyp i (prf % t) = abshyp i prf % t - | abshyp i (prf1 %% prf2) = - (abshyp i prf1 %% abshyph i prf2 - handle Same.SAME => prf1 %% abshyp i prf2) - | abshyp _ _ = raise Same.SAME - and abshyph i prf = (abshyp i prf handle Same.SAME => prf); - in - AbsP ("H", f h, abshyph 0 prf) - end; - -val implies_intr_proof = gen_implies_intr_proof (K NONE); -val implies_intr_proof' = gen_implies_intr_proof SOME; - - -(***** forall introduction *****) - -fun forall_intr_proof x a prf = Abst (a, NONE, prf_abstract_over x prf); - -fun forall_intr_proof' t prf = - let val (a, T) = (case t of Var ((a, _), T) => (a, T) | Free p => p) - in Abst (a, SOME T, prf_abstract_over t prf) end; - - -(***** varify *****) - -fun varify_proof t fixed prf = - let - val fs = Term.fold_types (Term.fold_atyps - (fn TFree v => if member (op =) fixed v then I else insert (op =) v | _ => I)) t []; - val used = Name.context - |> fold_types (fold_atyps (fn TVar ((a, _), _) => Name.declare a | _ => I)) t; - val fmap = fs ~~ #1 (fold_map Name.variant (map fst fs) used); - fun thaw (f as (a, S)) = - (case AList.lookup (op =) fmap f of - NONE => TFree f - | SOME b => TVar ((b, 0), S)); - in map_proof_terms (map_types (map_type_tfree thaw)) (map_type_tfree thaw) prf end; - - -local - -fun new_name ix (pairs, used) = - let val v = singleton (Name.variant_list used) (string_of_indexname ix) - in ((ix, v) :: pairs, v :: used) end; - -fun freeze_one alist (ix, sort) = - (case AList.lookup (op =) alist ix of - NONE => TVar (ix, sort) - | SOME name => TFree (name, sort)); - -in - -fun legacy_freezeT t prf = - let - val used = Term.add_tfree_names t []; - val (alist, _) = fold_rev new_name (map #1 (Term.add_tvars t [])) ([], used); - in - (case alist of - [] => prf (*nothing to do!*) - | _ => - let val frzT = map_type_tvar (freeze_one alist) - in map_proof_terms (map_types frzT) frzT prf end) - end; - -end; - - -(***** rotate assumptions *****) - -fun rotate_proof Bs Bi m prf = - let - val params = Term.strip_all_vars Bi; - val asms = Logic.strip_imp_prems (Term.strip_all_body Bi); - val i = length asms; - val j = length Bs; - in - mk_AbsP (j+1, proof_combP (prf, map PBound - (j downto 1) @ [mk_Abst params (mk_AbsP (i, - proof_combP (proof_combt (PBound i, map Bound ((length params - 1) downto 0)), - map PBound (((i-m-1) downto 0) @ ((i-1) downto (i-m))))))])) - end; - - -(***** permute premises *****) - -fun permute_prems_proof prems j k prf = - let val n = length prems - in mk_AbsP (n, proof_combP (prf, - map PBound ((n-1 downto n-j) @ (k-1 downto 0) @ (n-j-1 downto k)))) - end; - - -(***** generalization *****) - -fun generalize (tfrees, frees) idx = - Same.commit (map_proof_terms_same - (Term_Subst.generalize_same (tfrees, frees) idx) - (Term_Subst.generalizeT_same tfrees idx)); - - -(***** instantiation *****) - -fun instantiate (instT, inst) = - Same.commit (map_proof_terms_same - (Term_Subst.instantiate_same (instT, map (apsnd remove_types) inst)) - (Term_Subst.instantiateT_same instT)); - - -(***** lifting *****) - -fun lift_proof Bi inc prop prf = - let - fun lift'' Us Ts t = - strip_abs Ts (Logic.incr_indexes (Us, inc) (mk_abs Ts t)); - - fun lift' Us Ts (Abst (s, T, prf)) = - (Abst (s, Same.map_option (Logic.incr_tvar_same inc) T, lifth' Us (dummyT::Ts) prf) - handle Same.SAME => Abst (s, T, lift' Us (dummyT::Ts) prf)) - | lift' Us Ts (AbsP (s, t, prf)) = - (AbsP (s, Same.map_option (same (op =) (lift'' Us Ts)) t, lifth' Us Ts prf) - handle Same.SAME => AbsP (s, t, lift' Us Ts prf)) - | lift' Us Ts (prf % t) = (lift' Us Ts prf % Option.map (lift'' Us Ts) t - handle Same.SAME => prf % Same.map_option (same (op =) (lift'' Us Ts)) t) - | lift' Us Ts (prf1 %% prf2) = (lift' Us Ts prf1 %% lifth' Us Ts prf2 - handle Same.SAME => prf1 %% lift' Us Ts prf2) - | lift' _ _ (PAxm (s, prop, Ts)) = - PAxm (s, prop, (Same.map_option o Same.map) (Logic.incr_tvar_same inc) Ts) - | lift' _ _ (OfClass (T, c)) = - OfClass (Logic.incr_tvar_same inc T, c) - | lift' _ _ (Oracle (s, prop, Ts)) = - Oracle (s, prop, (Same.map_option o Same.map) (Logic.incr_tvar_same inc) Ts) - | lift' _ _ (Promise (i, prop, Ts)) = - Promise (i, prop, Same.map (Logic.incr_tvar_same inc) Ts) - | lift' _ _ (PThm (i, ((s, prop, Ts), body))) = - PThm (i, ((s, prop, (Same.map_option o Same.map) (Logic.incr_tvar inc) Ts), body)) - | lift' _ _ _ = raise Same.SAME - and lifth' Us Ts prf = (lift' Us Ts prf handle Same.SAME => prf); - - val ps = map (Logic.lift_all inc Bi) (Logic.strip_imp_prems prop); - val k = length ps; - - fun mk_app b (i, j, prf) = - if b then (i-1, j, prf %% PBound i) else (i, j-1, prf %> Bound j); - - fun lift Us bs i j (Const ("Pure.imp", _) $ A $ B) = - AbsP ("H", NONE (*A*), lift Us (true::bs) (i+1) j B) - | lift Us bs i j (Const ("Pure.all", _) $ Abs (a, T, t)) = - Abst (a, NONE (*T*), lift (T::Us) (false::bs) i (j+1) t) - | lift Us bs i j _ = proof_combP (lifth' (rev Us) [] prf, - map (fn k => (#3 (fold_rev mk_app bs (i-1, j-1, PBound k)))) - (i + k - 1 downto i)); - in - mk_AbsP (k, lift [] [] 0 0 Bi) - end; - -fun incr_indexes i = - Same.commit (map_proof_terms_same - (Logic.incr_indexes_same ([], i)) (Logic.incr_tvar_same i)); - - -(***** proof by assumption *****) - -fun mk_asm_prf t i m = - let - fun imp_prf _ i 0 = PBound i - | imp_prf (Const ("Pure.imp", _) $ A $ B) i m = AbsP ("H", NONE (*A*), imp_prf B (i+1) (m-1)) - | imp_prf _ i _ = PBound i; - fun all_prf (Const ("Pure.all", _) $ Abs (a, T, t)) = Abst (a, NONE (*T*), all_prf t) - | all_prf t = imp_prf t (~i) m - in all_prf t end; - -fun assumption_proof Bs Bi n prf = - mk_AbsP (length Bs, proof_combP (prf, - map PBound (length Bs - 1 downto 0) @ [mk_asm_prf Bi n ~1])); - - -(***** Composition of object rule with proof state *****) - -fun flatten_params_proof i j n (Const ("Pure.imp", _) $ A $ B, k) = - AbsP ("H", NONE (*A*), flatten_params_proof (i+1) j n (B, k)) - | flatten_params_proof i j n (Const ("Pure.all", _) $ Abs (a, T, t), k) = - Abst (a, NONE (*T*), flatten_params_proof i (j+1) n (t, k)) - | flatten_params_proof i j n (_, k) = proof_combP (proof_combt (PBound (k+i), - map Bound (j-1 downto 0)), map PBound (remove (op =) (i-n) (i-1 downto 0))); - -fun bicompose_proof flatten Bs oldAs newAs A n m rprf sprf = - let - val la = length newAs; - val lb = length Bs; - in - mk_AbsP (lb+la, proof_combP (sprf, - map PBound (lb + la - 1 downto la)) %% - proof_combP (rprf, (if n>0 then [mk_asm_prf (the A) n m] else []) @ - map (if flatten then flatten_params_proof 0 0 n else PBound o snd) - (oldAs ~~ (la - 1 downto 0)))) - end; - - -(***** axioms for equality *****) - -val aT = TFree ("'a", []); -val bT = TFree ("'b", []); -val x = Free ("x", aT); -val y = Free ("y", aT); -val z = Free ("z", aT); -val A = Free ("A", propT); -val B = Free ("B", propT); -val f = Free ("f", aT --> bT); -val g = Free ("g", aT --> bT); - -val equality_axms = - [("reflexive", Logic.mk_equals (x, x)), - ("symmetric", Logic.mk_implies (Logic.mk_equals (x, y), Logic.mk_equals (y, x))), - ("transitive", - Logic.list_implies ([Logic.mk_equals (x, y), Logic.mk_equals (y, z)], Logic.mk_equals (x, z))), - ("equal_intr", - Logic.list_implies ([Logic.mk_implies (A, B), Logic.mk_implies (B, A)], Logic.mk_equals (A, B))), - ("equal_elim", Logic.list_implies ([Logic.mk_equals (A, B), A], B)), - ("abstract_rule", - Logic.mk_implies - (Logic.all x - (Logic.mk_equals (f $ x, g $ x)), Logic.mk_equals (lambda x (f $ x), lambda x (g $ x)))), - ("combination", Logic.list_implies - ([Logic.mk_equals (f, g), Logic.mk_equals (x, y)], Logic.mk_equals (f $ x, g $ y)))]; - -val [reflexive_axm, symmetric_axm, transitive_axm, equal_intr_axm, - equal_elim_axm, abstract_rule_axm, combination_axm] = - map (fn (s, t) => PAxm ("Pure." ^ s, Logic.varify_global t, NONE)) equality_axms; - -val reflexive = reflexive_axm % NONE; - -fun symmetric (prf as PAxm ("Pure.reflexive", _, _) % _) = prf - | symmetric prf = symmetric_axm % NONE % NONE %% prf; - -fun transitive _ _ (PAxm ("Pure.reflexive", _, _) % _) prf2 = prf2 - | transitive _ _ prf1 (PAxm ("Pure.reflexive", _, _) % _) = prf1 - | transitive u (Type ("prop", [])) prf1 prf2 = - transitive_axm % NONE % SOME (remove_types u) % NONE %% prf1 %% prf2 - | transitive u T prf1 prf2 = - transitive_axm % NONE % NONE % NONE %% prf1 %% prf2; - -fun abstract_rule x a prf = - abstract_rule_axm % NONE % NONE %% forall_intr_proof x a prf; - -fun check_comb (PAxm ("Pure.combination", _, _) % f % g % _ % _ %% prf %% _) = - is_some f orelse check_comb prf - | check_comb (PAxm ("Pure.transitive", _, _) % _ % _ % _ %% prf1 %% prf2) = - check_comb prf1 andalso check_comb prf2 - | check_comb (PAxm ("Pure.symmetric", _, _) % _ % _ %% prf) = check_comb prf - | check_comb _ = false; - -fun combination f g t u (Type (_, [T, U])) prf1 prf2 = - let - val f = Envir.beta_norm f; - val g = Envir.beta_norm g; - val prf = - if check_comb prf1 then - combination_axm % NONE % NONE - else - (case prf1 of - PAxm ("Pure.reflexive", _, _) % _ => - combination_axm %> remove_types f % NONE - | _ => combination_axm %> remove_types f %> remove_types g) - in - (case T of - Type ("fun", _) => prf % - (case head_of f of - Abs _ => SOME (remove_types t) - | Var _ => SOME (remove_types t) - | _ => NONE) % - (case head_of g of - Abs _ => SOME (remove_types u) - | Var _ => SOME (remove_types u) - | _ => NONE) %% prf1 %% prf2 - | _ => prf % NONE % NONE %% prf1 %% prf2) - end; - -fun equal_intr A B prf1 prf2 = - equal_intr_axm %> remove_types A %> remove_types B %% prf1 %% prf2; - -fun equal_elim A B prf1 prf2 = - equal_elim_axm %> remove_types A %> remove_types B %% prf1 %% prf2; - - -(**** type classes ****) - -fun strip_shyps_proof algebra present witnessed extra_sorts prf = - let - fun get S2 (T, S1) = if Sorts.sort_le algebra (S1, S2) then SOME T else NONE; - val extra = map (fn S => (TFree ("'dummy", S), S)) extra_sorts; - val replacements = present @ extra @ witnessed; - fun replace T = - if exists (fn (T', _) => T' = T) present then raise Same.SAME - else - (case get_first (get (Type.sort_of_atyp T)) replacements of - SOME T' => T' - | NONE => raise Fail "strip_shyps_proof: bad type variable in proof term"); - in Same.commit (map_proof_types_same (Term_Subst.map_atypsT_same replace)) prf end; - - -local - -type axclass_proofs = - {classrel_proof: theory -> class * class -> proof, - arity_proof: theory -> string * sort list * class -> proof}; - -val axclass_proofs: axclass_proofs Single_Assignment.var = - Single_Assignment.var "Proofterm.axclass_proofs"; - -fun axclass_proof which thy x = - (case Single_Assignment.peek axclass_proofs of - NONE => raise Fail "Axclass proof operations not installed" - | SOME prfs => which prfs thy x); - -in - -val classrel_proof = axclass_proof #classrel_proof; -val arity_proof = axclass_proof #arity_proof; - -fun install_axclass_proofs prfs = Single_Assignment.assign axclass_proofs prfs; - -end; - - -local - -fun canonical_instance typs = - let - val names = Name.invent Name.context Name.aT (length typs); - val instT = map2 (fn a => fn T => (((a, 0), []), Type.strip_sorts T)) names typs; - in instantiate (instT, []) end; - -in - -fun of_sort_proof thy hyps = - Sorts.of_sort_derivation (Sign.classes_of thy) - {class_relation = fn typ => fn (prf, c1) => fn c2 => - if c1 = c2 then prf - else canonical_instance [typ] (classrel_proof thy (c1, c2)) %% prf, - type_constructor = fn (a, typs) => fn dom => fn c => - let val Ss = map (map snd) dom and prfs = maps (map fst) dom - in proof_combP (canonical_instance typs (arity_proof thy (a, Ss, c)), prfs) end, - type_variable = fn typ => map (fn c => (hyps (typ, c), c)) (Type.sort_of_atyp typ)}; - -end; - - -(***** axioms and theorems *****) - -val proofs = Unsynchronized.ref 2; -fun proofs_enabled () = ! proofs >= 2; - -fun vars_of t = map Var (rev (Term.add_vars t [])); -fun frees_of t = map Free (rev (Term.add_frees t [])); - -fun test_args _ [] = true - | test_args is (Bound i :: ts) = - not (member (op =) is i) andalso test_args (i :: is) ts - | test_args _ _ = false; - -fun is_fun (Type ("fun", _)) = true - | is_fun (TVar _) = true - | is_fun _ = false; - -fun add_funvars Ts (vs, t) = - if is_fun (fastype_of1 (Ts, t)) then - union (op =) vs (map_filter (fn Var (ixn, T) => - if is_fun T then SOME ixn else NONE | _ => NONE) (vars_of t)) - else vs; - -fun add_npvars q p Ts (vs, Const ("Pure.imp", _) $ t $ u) = - add_npvars q p Ts (add_npvars q (not p) Ts (vs, t), u) - | add_npvars q p Ts (vs, Const ("Pure.all", Type (_, [Type (_, [T, _]), _])) $ t) = - add_npvars q p Ts (vs, if p andalso q then betapply (t, Var (("",0), T)) else t) - | add_npvars q p Ts (vs, Abs (_, T, t)) = add_npvars q p (T::Ts) (vs, t) - | add_npvars _ _ Ts (vs, t) = add_npvars' Ts (vs, t) -and add_npvars' Ts (vs, t) = (case strip_comb t of - (Var (ixn, _), ts) => if test_args [] ts then vs - else Library.foldl (add_npvars' Ts) - (AList.update (op =) (ixn, - Library.foldl (add_funvars Ts) ((these ooo AList.lookup) (op =) vs ixn, ts)) vs, ts) - | (Abs (_, T, u), ts) => Library.foldl (add_npvars' (T::Ts)) (vs, u :: ts) - | (_, ts) => Library.foldl (add_npvars' Ts) (vs, ts)); - -fun prop_vars (Const ("Pure.imp", _) $ P $ Q) = union (op =) (prop_vars P) (prop_vars Q) - | prop_vars (Const ("Pure.all", _) $ Abs (_, _, t)) = prop_vars t - | prop_vars t = (case strip_comb t of - (Var (ixn, _), _) => [ixn] | _ => []); - -fun is_proj t = - let - fun is_p i t = (case strip_comb t of - (Bound j, []) => false - | (Bound j, ts) => j >= i orelse exists (is_p i) ts - | (Abs (_, _, u), _) => is_p (i+1) u - | (_, ts) => exists (is_p i) ts) - in (case strip_abs_body t of - Bound _ => true - | t' => is_p 0 t') - end; - -fun needed_vars prop = - union (op =) (Library.foldl (uncurry (union (op =))) - ([], map (uncurry (insert (op =))) (add_npvars true true [] ([], prop)))) - (prop_vars prop); - -fun gen_axm_proof c name prop = - let - val nvs = needed_vars prop; - val args = map (fn (v as Var (ixn, _)) => - if member (op =) nvs ixn then SOME v else NONE) (vars_of prop) @ - map SOME (frees_of prop); - in - proof_combt' (c (name, prop, NONE), args) - end; - -val axm_proof = gen_axm_proof PAxm; - -fun oracle_proof name prop = - if ! proofs = 0 then ((name, Term.dummy), Oracle (name, Term.dummy, NONE)) - else ((name, prop), gen_axm_proof Oracle name prop); - -fun shrink_proof thy = - let - fun shrink ls lev (prf as Abst (a, T, body)) = - let val (b, is, ch, body') = shrink ls (lev+1) body - in (b, is, ch, if ch then Abst (a, T, body') else prf) end - | shrink ls lev (prf as AbsP (a, t, body)) = - let val (b, is, ch, body') = shrink (lev::ls) lev body - in (b orelse member (op =) is 0, map_filter (fn 0 => NONE | i => SOME (i-1)) is, - ch, if ch then AbsP (a, t, body') else prf) - end - | shrink ls lev prf = - let val (is, ch, _, prf') = shrink' ls lev [] [] prf - in (false, is, ch, prf') end - and shrink' ls lev ts prfs (prf as prf1 %% prf2) = - let - val p as (_, is', ch', prf') = shrink ls lev prf2; - val (is, ch, ts', prf'') = shrink' ls lev ts (p::prfs) prf1 - in (union (op =) is is', ch orelse ch', ts', - if ch orelse ch' then prf'' %% prf' else prf) - end - | shrink' ls lev ts prfs (prf as prf1 % t) = - let val (is, ch, (ch', t')::ts', prf') = shrink' ls lev (t::ts) prfs prf1 - in (is, ch orelse ch', ts', - if ch orelse ch' then prf' % t' else prf) end - | shrink' ls lev ts prfs (prf as PBound i) = - (if exists (fn SOME (Bound j) => lev-j <= nth ls i | _ => true) ts - orelse has_duplicates (op =) - (Library.foldl (fn (js, SOME (Bound j)) => j :: js | (js, _) => js) ([], ts)) - orelse exists #1 prfs then [i] else [], false, map (pair false) ts, prf) - | shrink' ls lev ts prfs (Hyp t) = ([], false, map (pair false) ts, Hyp t) - | shrink' ls lev ts prfs (prf as MinProof) = ([], false, map (pair false) ts, prf) - | shrink' ls lev ts prfs (prf as OfClass _) = ([], false, map (pair false) ts, prf) - | shrink' ls lev ts prfs prf = - let - val prop = - (case prf of - PAxm (_, prop, _) => prop - | Oracle (_, prop, _) => prop - | Promise (_, prop, _) => prop - | PThm (_, ((_, prop, _), _)) => prop - | _ => raise Fail "shrink: proof not in normal form"); - val vs = vars_of prop; - val (ts', ts'') = chop (length vs) ts; - val insts = take (length ts') (map (fst o dest_Var) vs) ~~ ts'; - val nvs = Library.foldl (fn (ixns', (ixn, ixns)) => - insert (op =) ixn (case AList.lookup (op =) insts ixn of - SOME (SOME t) => if is_proj t then union (op =) ixns ixns' else ixns' - | _ => union (op =) ixns ixns')) - (needed prop ts'' prfs, add_npvars false true [] ([], prop)); - val insts' = map - (fn (ixn, x as SOME _) => if member (op =) nvs ixn then (false, x) else (true, NONE) - | (_, x) => (false, x)) insts - in ([], false, insts' @ map (pair false) ts'', prf) end - and needed (Const ("Pure.imp", _) $ t $ u) ts ((b, _, _, _)::prfs) = - union (op =) (if b then map (fst o dest_Var) (vars_of t) else []) (needed u ts prfs) - | needed (Var (ixn, _)) (_::_) _ = [ixn] - | needed _ _ _ = []; - in shrink end; - - -(**** Simple first order matching functions for terms and proofs ****) - -exception PMatch; - -(** see pattern.ML **) - -fun flt (i: int) = filter (fn n => n < i); - -fun fomatch Ts tymatch j instsp p = - let - fun mtch (instsp as (tyinsts, insts)) = fn - (Var (ixn, T), t) => - if j>0 andalso not (null (flt j (loose_bnos t))) - then raise PMatch - else (tymatch (tyinsts, fn () => (T, fastype_of1 (Ts, t))), - (ixn, t) :: insts) - | (Free (a, T), Free (b, U)) => - if a=b then (tymatch (tyinsts, K (T, U)), insts) else raise PMatch - | (Const (a, T), Const (b, U)) => - if a=b then (tymatch (tyinsts, K (T, U)), insts) else raise PMatch - | (f $ t, g $ u) => mtch (mtch instsp (f, g)) (t, u) - | (Bound i, Bound j) => if i=j then instsp else raise PMatch - | _ => raise PMatch - in mtch instsp (pairself Envir.beta_eta_contract p) end; - -fun match_proof Ts tymatch = - let - fun optmatch _ inst (NONE, _) = inst - | optmatch _ _ (SOME _, NONE) = raise PMatch - | optmatch mtch inst (SOME x, SOME y) = mtch inst (x, y) - - fun matcht Ts j (pinst, tinst) (t, u) = - (pinst, fomatch Ts tymatch j tinst (t, Envir.beta_norm u)); - fun matchT (pinst, (tyinsts, insts)) p = - (pinst, (tymatch (tyinsts, K p), insts)); - fun matchTs inst (Ts, Us) = Library.foldl (uncurry matchT) (inst, Ts ~~ Us); - - fun mtch Ts i j (pinst, tinst) (Hyp (Var (ixn, _)), prf) = - if i = 0 andalso j = 0 then ((ixn, prf) :: pinst, tinst) - else (case apfst (flt i) (apsnd (flt j) - (prf_add_loose_bnos 0 0 prf ([], []))) of - ([], []) => ((ixn, incr_pboundvars (~i) (~j) prf) :: pinst, tinst) - | ([], _) => if j = 0 then - ((ixn, incr_pboundvars (~i) (~j) prf) :: pinst, tinst) - else raise PMatch - | _ => raise PMatch) - | mtch Ts i j inst (prf1 % opt1, prf2 % opt2) = - optmatch (matcht Ts j) (mtch Ts i j inst (prf1, prf2)) (opt1, opt2) - | mtch Ts i j inst (prf1 %% prf2, prf1' %% prf2') = - mtch Ts i j (mtch Ts i j inst (prf1, prf1')) (prf2, prf2') - | mtch Ts i j inst (Abst (_, opT, prf1), Abst (_, opU, prf2)) = - mtch (the_default dummyT opU :: Ts) i (j+1) - (optmatch matchT inst (opT, opU)) (prf1, prf2) - | mtch Ts i j inst (prf1, Abst (_, opU, prf2)) = - mtch (the_default dummyT opU :: Ts) i (j+1) inst - (incr_pboundvars 0 1 prf1 %> Bound 0, prf2) - | mtch Ts i j inst (AbsP (_, opt, prf1), AbsP (_, opu, prf2)) = - mtch Ts (i+1) j (optmatch (matcht Ts j) inst (opt, opu)) (prf1, prf2) - | mtch Ts i j inst (prf1, AbsP (_, _, prf2)) = - mtch Ts (i+1) j inst (incr_pboundvars 1 0 prf1 %% PBound 0, prf2) - | mtch Ts i j inst (PAxm (s1, _, opTs), PAxm (s2, _, opUs)) = - if s1 = s2 then optmatch matchTs inst (opTs, opUs) - else raise PMatch - | mtch Ts i j inst (OfClass (T1, c1), OfClass (T2, c2)) = - if c1 = c2 then matchT inst (T1, T2) - else raise PMatch - | mtch Ts i j inst (PThm (_, ((name1, prop1, opTs), _)), PThm (_, ((name2, prop2, opUs), _))) = - if name1 = name2 andalso prop1 = prop2 then - optmatch matchTs inst (opTs, opUs) - else raise PMatch - | mtch _ _ _ inst (PBound i, PBound j) = if i = j then inst else raise PMatch - | mtch _ _ _ _ _ = raise PMatch - in mtch Ts 0 0 end; - -fun prf_subst (pinst, (tyinsts, insts)) = - let - val substT = Envir.subst_type_same tyinsts; - val substTs = Same.map substT; - - fun subst' lev (Var (xi, _)) = - (case AList.lookup (op =) insts xi of - NONE => raise Same.SAME - | SOME u => incr_boundvars lev u) - | subst' _ (Const (s, T)) = Const (s, substT T) - | subst' _ (Free (s, T)) = Free (s, substT T) - | subst' lev (Abs (a, T, body)) = - (Abs (a, substT T, Same.commit (subst' (lev + 1)) body) - handle Same.SAME => Abs (a, T, subst' (lev + 1) body)) - | subst' lev (f $ t) = - (subst' lev f $ Same.commit (subst' lev) t - handle Same.SAME => f $ subst' lev t) - | subst' _ _ = raise Same.SAME; - - fun subst plev tlev (AbsP (a, t, body)) = - (AbsP (a, Same.map_option (subst' tlev) t, Same.commit (subst (plev + 1) tlev) body) - handle Same.SAME => AbsP (a, t, subst (plev + 1) tlev body)) - | subst plev tlev (Abst (a, T, body)) = - (Abst (a, Same.map_option substT T, Same.commit (subst plev (tlev + 1)) body) - handle Same.SAME => Abst (a, T, subst plev (tlev + 1) body)) - | subst plev tlev (prf %% prf') = - (subst plev tlev prf %% Same.commit (subst plev tlev) prf' - handle Same.SAME => prf %% subst plev tlev prf') - | subst plev tlev (prf % t) = - (subst plev tlev prf % Same.commit (Same.map_option (subst' tlev)) t - handle Same.SAME => prf % Same.map_option (subst' tlev) t) - | subst plev tlev (Hyp (Var (xi, _))) = - (case AList.lookup (op =) pinst xi of - NONE => raise Same.SAME - | SOME prf' => incr_pboundvars plev tlev prf') - | subst _ _ (PAxm (id, prop, Ts)) = PAxm (id, prop, Same.map_option substTs Ts) - | subst _ _ (OfClass (T, c)) = OfClass (substT T, c) - | subst _ _ (Oracle (id, prop, Ts)) = Oracle (id, prop, Same.map_option substTs Ts) - | subst _ _ (Promise (i, prop, Ts)) = Promise (i, prop, substTs Ts) - | subst _ _ (PThm (i, ((id, prop, Ts), body))) = - PThm (i, ((id, prop, Same.map_option substTs Ts), body)) - | subst _ _ _ = raise Same.SAME; - in fn t => subst 0 0 t handle Same.SAME => t end; - -(*A fast unification filter: true unless the two terms cannot be unified. - Terms must be NORMAL. Treats all Vars as distinct. *) -fun could_unify prf1 prf2 = - let - fun matchrands (prf1 %% prf2) (prf1' %% prf2') = - could_unify prf2 prf2' andalso matchrands prf1 prf1' - | matchrands (prf % SOME t) (prf' % SOME t') = - Term.could_unify (t, t') andalso matchrands prf prf' - | matchrands (prf % _) (prf' % _) = matchrands prf prf' - | matchrands _ _ = true - - fun head_of (prf %% _) = head_of prf - | head_of (prf % _) = head_of prf - | head_of prf = prf - - in case (head_of prf1, head_of prf2) of - (_, Hyp (Var _)) => true - | (Hyp (Var _), _) => true - | (PAxm (a, _, _), PAxm (b, _, _)) => a = b andalso matchrands prf1 prf2 - | (OfClass (_, c), OfClass (_, d)) => c = d andalso matchrands prf1 prf2 - | (PThm (_, ((a, propa, _), _)), PThm (_, ((b, propb, _), _))) => - a = b andalso propa = propb andalso matchrands prf1 prf2 - | (PBound i, PBound j) => i = j andalso matchrands prf1 prf2 - | (AbsP _, _) => true (*because of possible eta equality*) - | (Abst _, _) => true - | (_, AbsP _) => true - | (_, Abst _) => true - | _ => false - end; - - -(**** rewriting on proof terms ****) - -val no_skel = PBound 0; -val normal_skel = Hyp (Var ((Name.uu, 0), propT)); - -fun rewrite_prf tymatch (rules, procs) prf = - let - fun rew _ _ (Abst (_, _, body) % SOME t) = SOME (prf_subst_bounds [t] body, no_skel) - | rew _ _ (AbsP (_, _, body) %% prf) = SOME (prf_subst_pbounds [prf] body, no_skel) - | rew Ts hs prf = - (case get_first (fn r => r Ts hs prf) procs of - NONE => get_first (fn (prf1, prf2) => SOME (prf_subst - (match_proof Ts tymatch ([], (Vartab.empty, [])) (prf1, prf)) prf2, prf2) - handle PMatch => NONE) (filter (could_unify prf o fst) rules) - | some => some); - - fun rew0 Ts hs (prf as AbsP (_, _, prf' %% PBound 0)) = - if prf_loose_Pbvar1 prf' 0 then rew Ts hs prf - else - let val prf'' = incr_pboundvars (~1) 0 prf' - in SOME (the_default (prf'', no_skel) (rew Ts hs prf'')) end - | rew0 Ts hs (prf as Abst (_, _, prf' % SOME (Bound 0))) = - if prf_loose_bvar1 prf' 0 then rew Ts hs prf - else - let val prf'' = incr_pboundvars 0 (~1) prf' - in SOME (the_default (prf'', no_skel) (rew Ts hs prf'')) end - | rew0 Ts hs prf = rew Ts hs prf; - - fun rew1 _ _ (Hyp (Var _)) _ = NONE - | rew1 Ts hs skel prf = (case rew2 Ts hs skel prf of - SOME prf1 => (case rew0 Ts hs prf1 of - SOME (prf2, skel') => SOME (the_default prf2 (rew1 Ts hs skel' prf2)) - | NONE => SOME prf1) - | NONE => (case rew0 Ts hs prf of - SOME (prf1, skel') => SOME (the_default prf1 (rew1 Ts hs skel' prf1)) - | NONE => NONE)) - - and rew2 Ts hs skel (prf % SOME t) = (case prf of - Abst (_, _, body) => - let val prf' = prf_subst_bounds [t] body - in SOME (the_default prf' (rew2 Ts hs no_skel prf')) end - | _ => (case rew1 Ts hs (case skel of skel' % _ => skel' | _ => no_skel) prf of - SOME prf' => SOME (prf' % SOME t) - | NONE => NONE)) - | rew2 Ts hs skel (prf % NONE) = Option.map (fn prf' => prf' % NONE) - (rew1 Ts hs (case skel of skel' % _ => skel' | _ => no_skel) prf) - | rew2 Ts hs skel (prf1 %% prf2) = (case prf1 of - AbsP (_, _, body) => - let val prf' = prf_subst_pbounds [prf2] body - in SOME (the_default prf' (rew2 Ts hs no_skel prf')) end - | _ => - let val (skel1, skel2) = (case skel of - skel1 %% skel2 => (skel1, skel2) - | _ => (no_skel, no_skel)) - in case rew1 Ts hs skel1 prf1 of - SOME prf1' => (case rew1 Ts hs skel2 prf2 of - SOME prf2' => SOME (prf1' %% prf2') - | NONE => SOME (prf1' %% prf2)) - | NONE => (case rew1 Ts hs skel2 prf2 of - SOME prf2' => SOME (prf1 %% prf2') - | NONE => NONE) - end) - | rew2 Ts hs skel (Abst (s, T, prf)) = (case rew1 (the_default dummyT T :: Ts) hs - (case skel of Abst (_, _, skel') => skel' | _ => no_skel) prf of - SOME prf' => SOME (Abst (s, T, prf')) - | NONE => NONE) - | rew2 Ts hs skel (AbsP (s, t, prf)) = (case rew1 Ts (t :: hs) - (case skel of AbsP (_, _, skel') => skel' | _ => no_skel) prf of - SOME prf' => SOME (AbsP (s, t, prf')) - | NONE => NONE) - | rew2 _ _ _ _ = NONE; - - in the_default prf (rew1 [] [] no_skel prf) end; - -fun rewrite_proof thy = rewrite_prf (fn (tyenv, f) => - Sign.typ_match thy (f ()) tyenv handle Type.TYPE_MATCH => raise PMatch); - -fun rewrite_proof_notypes rews = rewrite_prf fst rews; - - -(**** theory data ****) - -structure Data = Theory_Data -( - type T = - (stamp * (proof * proof)) list * - (stamp * (typ list -> term option list -> proof -> (proof * proof) option)) list; - - val empty = ([], []); - val extend = I; - fun merge ((rules1, procs1), (rules2, procs2)) : T = - (AList.merge (op =) (K true) (rules1, rules2), - AList.merge (op =) (K true) (procs1, procs2)); -); - -fun get_data thy = let val (rules, procs) = Data.get thy in (map #2 rules, map #2 procs) end; -fun rew_proof thy = rewrite_prf fst (get_data thy); - -fun add_prf_rrule r = (Data.map o apfst) (cons (stamp (), r)); -fun add_prf_rproc p = (Data.map o apsnd) (cons (stamp (), p)); - - -(***** promises *****) - -fun promise_proof thy i prop = - let - val _ = prop |> Term.exists_subterm (fn t => - (Term.is_Free t orelse Term.is_Var t) andalso - raise Fail ("promise_proof: illegal variable " ^ Syntax.string_of_term_global thy t)); - val _ = prop |> Term.exists_type (Term.exists_subtype - (fn TFree (a, _) => raise Fail ("promise_proof: illegal type variable " ^ quote a) - | _ => false)); - in Promise (i, prop, map TVar (Term.add_tvars prop [])) end; - -fun fulfill_norm_proof thy ps body0 = - let - val PBody {oracles = oracles0, thms = thms0, proof = proof0} = body0; - val oracles = - unions_oracles - (fold (fn (_, PBody {oracles, ...}) => not (null oracles) ? cons oracles) ps [oracles0]); - val thms = - unions_thms (fold (fn (_, PBody {thms, ...}) => not (null thms) ? cons thms) ps [thms0]); - val proofs = fold (fn (i, PBody {proof, ...}) => Inttab.update (i, proof)) ps Inttab.empty; - - fun fill (Promise (i, prop, Ts)) = - (case Inttab.lookup proofs i of - NONE => NONE - | SOME prf => SOME (instantiate (Term.add_tvars prop [] ~~ Ts, []) prf, normal_skel)) - | fill _ = NONE; - val (rules, procs) = get_data thy; - val proof = rewrite_prf fst (rules, K (K fill) :: procs) proof0; - in PBody {oracles = oracles, thms = thms, proof = proof} end; - -fun fulfill_proof_future thy promises postproc body = - let - fun fulfill () = - postproc (fulfill_norm_proof thy (map (apsnd Future.join) promises) (Future.join body)); - in - if null promises then Future.map postproc body - else if Future.is_finished body andalso length promises = 1 then - Future.map (fn _ => fulfill ()) (snd (hd promises)) - else - (singleton o Future.forks) - {name = "Proofterm.fulfill_proof_future", group = NONE, - deps = Future.task_of body :: map (Future.task_of o snd) promises, pri = 0, - interrupts = true} - fulfill - end; - - -(***** abstraction over sort constraints *****) - -fun unconstrainT_prf thy (atyp_map, constraints) = - let - fun hyp_map hyp = - (case AList.lookup (op =) constraints hyp of - SOME t => Hyp t - | NONE => raise Fail "unconstrainT_prf: missing constraint"); - - val typ = Term_Subst.map_atypsT_same (Type.strip_sorts o atyp_map); - fun ofclass (ty, c) = - let val ty' = Term.map_atyps atyp_map ty; - in the_single (of_sort_proof thy hyp_map (ty', [c])) end; - in - Same.commit (map_proof_same (Term_Subst.map_types_same typ) typ ofclass) - #> fold_rev (implies_intr_proof o snd) constraints - end; - -fun unconstrainT_body thy constrs (PBody {oracles, thms, proof}) = - PBody - {oracles = oracles, (* FIXME merge (!), unconstrain (!?!) *) - thms = thms, (* FIXME merge (!) *) - proof = unconstrainT_prf thy constrs proof}; - - -(***** theorems *****) - -fun prepare_thm_proof thy name shyps hyps concl promises body = - let - val PBody {oracles = oracles0, thms = thms0, proof = prf} = body; - val prop = Logic.list_implies (hyps, concl); - val nvs = needed_vars prop; - val args = map (fn (v as Var (ixn, _)) => - if member (op =) nvs ixn then SOME v else NONE) (vars_of prop) @ - map SOME (frees_of prop); - - val ((atyp_map, constraints, outer_constraints), prop1) = Logic.unconstrainT shyps prop; - val postproc = unconstrainT_body thy (atyp_map, constraints); - val args1 = - (map o Option.map o Term.map_types o Term.map_atyps) - (Type.strip_sorts o atyp_map) args; - val argsP = map OfClass outer_constraints @ map Hyp hyps; - - fun make_body0 proof0 = PBody {oracles = oracles0, thms = thms0, proof = proof0}; - val body0 = - if not (proofs_enabled ()) then Future.value (make_body0 MinProof) - else - (singleton o Future.cond_forks) - {name = "Proofterm.prepare_thm_proof", group = NONE, - deps = [], pri = 0, interrupts = true} - (fn () => - make_body0 - (#4 (shrink_proof thy [] 0 (rew_proof thy (fold_rev implies_intr_proof hyps prf))))); - - fun new_prf () = (serial (), fulfill_proof_future thy promises postproc body0); - val (i, body') = - (*non-deterministic, depends on unknown promises*) - (case strip_combt (fst (strip_combP prf)) of - (PThm (i, ((old_name, prop', NONE), body')), args') => - if (old_name = "" orelse old_name = name) andalso prop1 = prop' andalso args = args' - then (i, body') - else new_prf () - | _ => new_prf ()); - val head = PThm (i, ((name, prop1, NONE), body')); - in ((i, (name, prop1, body')), head, args, argsP, args1) end; - -fun thm_proof thy name shyps hyps concl promises body = - let val (pthm, head, args, argsP, _) = prepare_thm_proof thy name shyps hyps concl promises body - in (pthm, proof_combP (proof_combt' (head, args), argsP)) end; - -fun unconstrain_thm_proof thy shyps concl promises body = - let - val (pthm, head, _, _, args) = prepare_thm_proof thy "" shyps [] concl promises body - in (pthm, proof_combt' (head, args)) end; - - -fun get_name shyps hyps prop prf = - let val (_, prop) = Logic.unconstrainT shyps (Logic.list_implies (hyps, prop)) in - (case strip_combt (fst (strip_combP prf)) of - (PThm (_, ((name, prop', _), _)), _) => if prop = prop' then name else "" - | _ => "") - end; - -fun guess_name (PThm (_, ((name, _, _), _))) = name - | guess_name (prf %% Hyp _) = guess_name prf - | guess_name (prf %% OfClass _) = guess_name prf - | guess_name (prf % NONE) = guess_name prf - | guess_name (prf % SOME (Var _)) = guess_name prf - | guess_name _ = ""; - -end; - -structure Basic_Proofterm : BASIC_PROOFTERM = Proofterm; -open Basic_Proofterm; diff --git a/core/Pure/pure_syn.ML b/core/Pure/pure_syn.ML deleted file mode 100644 index 0ac39e9f..00000000 --- a/core/Pure/pure_syn.ML +++ /dev/null @@ -1,34 +0,0 @@ -(* Title: Pure/pure_syn.ML - Author: Makarius - -Minimal outer syntax for bootstrapping Isabelle/Pure. -*) - -structure Pure_Syn: sig end = -struct - -val _ = - Outer_Syntax.command - (("theory", Keyword.tag_theory Keyword.thy_begin), @{here}) "begin theory" - (Thy_Header.args >> (fn header => - Toplevel.init_theory - (fn () => Thy_Info.toplevel_begin_theory (! ProofGeneral.master_path) header))); - -val _ = - Outer_Syntax.command - (("ML_file", Keyword.tag_ml Keyword.thy_load), @{here}) "ML text from file" - (Resources.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy => - let - val [{src_path, lines, digest, pos}] = files (Context.theory_of gthy); - val provide = Resources.provide (src_path, digest); - val source = {delimited = true, text = cat_lines lines, pos = pos}; - val flags = {SML = false, exchange = false, redirect = true, verbose = true}; - in - gthy - |> ML_Context.exec (fn () => ML_Context.eval_source flags source) - |> Local_Theory.propagate_ml_env - |> Context.mapping provide (Local_Theory.background_theory provide) - end))); - -end; - diff --git a/core/Pure/pure_thy.ML b/core/Pure/pure_thy.ML deleted file mode 100644 index 07fce209..00000000 --- a/core/Pure/pure_thy.ML +++ /dev/null @@ -1,231 +0,0 @@ -(* Title: Pure/pure_thy.ML - Author: Markus Wenzel, TU Muenchen - -Pure theory syntax and further logical content. -*) - -signature PURE_THY = -sig - val old_appl_syntax: theory -> bool - val old_appl_syntax_setup: theory -> theory - val token_markers: string list -end; - -structure Pure_Thy: PURE_THY = -struct - -val typ = Simple_Syntax.read_typ; -val prop = Simple_Syntax.read_prop; - -val tycon = Lexicon.mark_type; -val const = Lexicon.mark_const; - -val qualify = Binding.qualify true Context.PureN; - - -(* application syntax variants *) - -val appl_syntax = - [("_appl", typ "('b => 'a) => args => logic", Mixfix ("(1_/(1'(_')))", [1000, 0], 1000)), - ("_appl", typ "('b => 'a) => args => aprop", Mixfix ("(1_/(1'(_')))", [1000, 0], 1000))]; - -val applC_syntax = - [("", typ "'a => cargs", Delimfix "_"), - ("_cargs", typ "'a => cargs => cargs", Mixfix ("_/ _", [1000, 1000], 1000)), - ("_applC", typ "('b => 'a) => cargs => logic", Mixfix ("(1_/ _)", [1000, 1000], 999)), - ("_applC", typ "('b => 'a) => cargs => aprop", Mixfix ("(1_/ _)", [1000, 1000], 999))]; - -structure Old_Appl_Syntax = Theory_Data -( - type T = bool; - val empty = false; - val extend = I; - fun merge (b1, b2) : T = - if b1 = b2 then b1 - else error "Cannot merge theories with different application syntax"; -); - -val old_appl_syntax = Old_Appl_Syntax.get; - -val old_appl_syntax_setup = - Old_Appl_Syntax.put true #> - Sign.del_syntax Syntax.mode_default applC_syntax #> - Sign.add_syntax Syntax.mode_default appl_syntax; - - -(* main content *) - -val token_markers = - ["_tfree", "_tvar", "_free", "_bound", "_loose", "_var", "_numeral", "_inner_string"]; - -val _ = Theory.setup - (Sign.map_naming (Name_Space.set_theory_name Context.PureN) #> - Old_Appl_Syntax.put false #> - Sign.add_types_global - [(Binding.make ("fun", @{here}), 2, NoSyn), - (Binding.make ("prop", @{here}), 0, NoSyn), - (Binding.make ("itself", @{here}), 1, NoSyn), - (Binding.make ("dummy", @{here}), 0, NoSyn)] - #> Sign.add_nonterminals_global - (map (fn name => Binding.make (name, @{here})) - (Lexicon.terminals @ ["logic", "type", "types", "sort", "classes", - "args", "cargs", "pttrn", "pttrns", "idt", "idts", "aprop", "asms", - "any", "prop'", "num_const", "float_const", "xnum_const", "num_position", - "float_position", "xnum_position", "index", "struct", "tid_position", - "tvar_position", "id_position", "longid_position", "var_position", - "str_position", "string_position", "cartouche_position", "type_name", - "class_name"])) - #> Sign.add_syntax Syntax.mode_default (map (fn x => (x, typ "'a", NoSyn)) token_markers) - #> Sign.add_syntax Syntax.mode_default - [("", typ "prop' => prop", Delimfix "_"), - ("", typ "logic => any", Delimfix "_"), - ("", typ "prop' => any", Delimfix "_"), - ("", typ "logic => logic", Delimfix "'(_')"), - ("", typ "prop' => prop'", Delimfix "'(_')"), - ("_constrain", typ "logic => type => logic", Mixfix ("_::_", [4, 0], 3)), - ("_constrain", typ "prop' => type => prop'", Mixfix ("_::_", [4, 0], 3)), - ("_ignore_type", typ "'a", NoSyn), - ("", typ "tid_position => type", Delimfix "_"), - ("", typ "tvar_position => type", Delimfix "_"), - ("", typ "type_name => type", Delimfix "_"), - ("_type_name", typ "id => type_name", Delimfix "_"), - ("_type_name", typ "longid => type_name", Delimfix "_"), - ("_ofsort", typ "tid_position => sort => type", Mixfix ("_::_", [1000, 0], 1000)), - ("_ofsort", typ "tvar_position => sort => type", Mixfix ("_::_", [1000, 0], 1000)), - ("_dummy_ofsort", typ "sort => type", Mixfix ("'_()::_", [0], 1000)), - ("", typ "class_name => sort", Delimfix "_"), - ("_class_name", typ "id => class_name", Delimfix "_"), - ("_class_name", typ "longid => class_name", Delimfix "_"), - ("_topsort", typ "sort", Delimfix "{}"), - ("_sort", typ "classes => sort", Delimfix "{_}"), - ("", typ "class_name => classes", Delimfix "_"), - ("_classes", typ "class_name => classes => classes", Delimfix "_,_"), - ("_tapp", typ "type => type_name => type", Mixfix ("_ _", [1000, 0], 1000)), - ("_tappl", typ "type => types => type_name => type", Delimfix "((1'(_,/ _')) _)"), - ("", typ "type => types", Delimfix "_"), - ("_types", typ "type => types => types", Delimfix "_,/ _"), - ("\\<^type>fun", typ "type => type => type", Mixfix ("(_/ => _)", [1, 0], 0)), - ("_bracket", typ "types => type => type", Mixfix ("([_]/ => _)", [0, 0], 0)), - ("", typ "type => type", Delimfix "'(_')"), - ("\\<^type>dummy", typ "type", Delimfix "'_"), - ("_type_prop", typ "'a", NoSyn), - ("_lambda", typ "pttrns => 'a => logic", Mixfix ("(3%_./ _)", [0, 3], 3)), - ("_abs", typ "'a", NoSyn), - ("", typ "'a => args", Delimfix "_"), - ("_args", typ "'a => args => args", Delimfix "_,/ _"), - ("", typ "id_position => idt", Delimfix "_"), - ("_idtdummy", typ "idt", Delimfix "'_"), - ("_idtyp", typ "id_position => type => idt", Mixfix ("_::_", [], 0)), - ("_idtypdummy", typ "type => idt", Mixfix ("'_()::_", [], 0)), - ("", typ "idt => idt", Delimfix "'(_')"), - ("", typ "idt => idts", Delimfix "_"), - ("_idts", typ "idt => idts => idts", Mixfix ("_/ _", [1, 0], 0)), - ("", typ "idt => pttrn", Delimfix "_"), - ("", typ "pttrn => pttrns", Delimfix "_"), - ("_pttrns", typ "pttrn => pttrns => pttrns", Mixfix ("_/ _", [1, 0], 0)), - ("", typ "aprop => aprop", Delimfix "'(_')"), - ("", typ "id_position => aprop", Delimfix "_"), - ("", typ "longid_position => aprop", Delimfix "_"), - ("", typ "var_position => aprop", Delimfix "_"), - ("_DDDOT", typ "aprop", Delimfix "..."), - ("_aprop", typ "aprop => prop", Delimfix "PROP _"), - ("_asm", typ "prop => asms", Delimfix "_"), - ("_asms", typ "prop => asms => asms", Delimfix "_;/ _"), - ("_bigimpl", typ "asms => prop => prop", Mixfix ("((3[| _ |])/ ==> _)", [0, 1], 1)), - ("_ofclass", typ "type => logic => prop", Delimfix "(1OFCLASS/(1'(_,/ _')))"), - ("_mk_ofclass", typ "dummy", NoSyn), - ("_TYPE", typ "type => logic", Delimfix "(1TYPE/(1'(_')))"), - ("", typ "id_position => logic", Delimfix "_"), - ("", typ "longid_position => logic", Delimfix "_"), - ("", typ "var_position => logic", Delimfix "_"), - ("_DDDOT", typ "logic", Delimfix "..."), - ("_strip_positions", typ "'a", NoSyn), - ("_position", typ "num_token => num_position", Delimfix "_"), - ("_position", typ "float_token => float_position", Delimfix "_"), - ("_position", typ "xnum_token => xnum_position", Delimfix "_"), - ("_constify", typ "num_position => num_const", Delimfix "_"), - ("_constify", typ "float_position => float_const", Delimfix "_"), - ("_constify", typ "xnum_position => xnum_const", Delimfix "_"), - ("_index", typ "logic => index", Delimfix "(00\\<^bsub>_\\<^esub>)"), - ("_indexdefault", typ "index", Delimfix ""), - ("_indexvar", typ "index", Delimfix "'\\"), - ("_struct", typ "index => logic", Mixfix ("\\_", [1000], 1000)), - ("_update_name", typ "idt", NoSyn), - ("_constrainAbs", typ "'a", NoSyn), - ("_position_sort", typ "tid => tid_position", Delimfix "_"), - ("_position_sort", typ "tvar => tvar_position", Delimfix "_"), - ("_position", typ "id => id_position", Delimfix "_"), - ("_position", typ "longid => longid_position", Delimfix "_"), - ("_position", typ "var => var_position", Delimfix "_"), - ("_position", typ "str_token => str_position", Delimfix "_"), - ("_position", typ "string_token => string_position", Delimfix "_"), - ("_position", typ "cartouche => cartouche_position", Delimfix "_"), - ("_type_constraint_", typ "'a", NoSyn), - ("_context_const", typ "id_position => logic", Delimfix "CONST _"), - ("_context_const", typ "id_position => aprop", Delimfix "CONST _"), - ("_context_const", typ "longid_position => logic", Delimfix "CONST _"), - ("_context_const", typ "longid_position => aprop", Delimfix "CONST _"), - ("_context_xconst", typ "id_position => logic", Delimfix "XCONST _"), - ("_context_xconst", typ "id_position => aprop", Delimfix "XCONST _"), - ("_context_xconst", typ "longid_position => logic", Delimfix "XCONST _"), - ("_context_xconst", typ "longid_position => aprop", Delimfix "XCONST _"), - (const "Pure.imp", typ "prop => prop => prop", Delimfix "op ==>"), - (const "Pure.dummy_pattern", typ "aprop", Delimfix "'_"), - ("_sort_constraint", typ "type => prop", Delimfix "(1SORT'_CONSTRAINT/(1'(_')))"), - (const "Pure.term", typ "logic => prop", Delimfix "TERM _"), - (const "Pure.conjunction", typ "prop => prop => prop", Infixr ("&&&", 2))] - #> Sign.add_syntax Syntax.mode_default applC_syntax - #> Sign.add_syntax (Symbol.xsymbolsN, true) - [(tycon "fun", typ "type => type => type", Mixfix ("(_/ \\ _)", [1, 0], 0)), - ("_bracket", typ "types => type => type", Mixfix ("([_]/ \\ _)", [0, 0], 0)), - ("_ofsort", typ "tid_position => sort => type", Mixfix ("_\\_", [1000, 0], 1000)), - ("_constrain", typ "logic => type => logic", Mixfix ("_\\_", [4, 0], 3)), - ("_constrain", typ "prop' => type => prop'", Mixfix ("_\\_", [4, 0], 3)), - ("_idtyp", typ "id_position => type => idt", Mixfix ("_\\_", [], 0)), - ("_idtypdummy", typ "type => idt", Mixfix ("'_()\\_", [], 0)), - ("_lambda", typ "pttrns => 'a => logic", Mixfix ("(3\\_./ _)", [0, 3], 3)), - (const "Pure.eq", typ "'a => 'a => prop", Infix ("\\", 2)), - (const "Pure.all_binder", typ "idts => prop => prop", Mixfix ("(3\\_./ _)", [0, 0], 0)), - (const "Pure.imp", typ "prop => prop => prop", Infixr ("\\", 1)), - ("_DDDOT", typ "aprop", Delimfix "\\"), - ("_bigimpl", typ "asms => prop => prop", Mixfix ("((1\\_\\)/ \\ _)", [0, 1], 1)), - ("_DDDOT", typ "logic", Delimfix "\\")] - #> Sign.add_syntax ("", false) - [(const "Pure.prop", typ "prop => prop", Mixfix ("_", [0], 0))] - #> Sign.add_syntax ("HTML", false) - [("_lambda", typ "pttrns => 'a => logic", Mixfix ("(3\\_./ _)", [0, 3], 3))] - #> Sign.add_consts - [(qualify (Binding.make ("eq", @{here})), typ "'a => 'a => prop", Infix ("==", 2)), - (qualify (Binding.make ("imp", @{here})), typ "prop => prop => prop", Mixfix ("(_/ ==> _)", [2, 1], 1)), - (qualify (Binding.make ("all", @{here})), typ "('a => prop) => prop", Binder ("!!", 0, 0)), - (qualify (Binding.make ("prop", @{here})), typ "prop => prop", NoSyn), - (qualify (Binding.make ("type", @{here})), typ "'a itself", NoSyn), - (qualify (Binding.make ("dummy_pattern", @{here})), typ "'a", Delimfix "'_")] - #> Theory.add_deps_global "Pure.eq" ("Pure.eq", typ "'a => 'a => prop") [] - #> Theory.add_deps_global "Pure.imp" ("Pure.imp", typ "prop => prop => prop") [] - #> Theory.add_deps_global "Pure.all" ("Pure.all", typ "('a => prop) => prop") [] - #> Theory.add_deps_global "Pure.type" ("Pure.type", typ "'a itself") [] - #> Theory.add_deps_global "Pure.dummy_pattern" ("Pure.dummy_pattern", typ "'a") [] - #> Sign.parse_ast_translation Syntax_Trans.pure_parse_ast_translation - #> Sign.parse_translation Syntax_Trans.pure_parse_translation - #> Sign.print_ast_translation Syntax_Trans.pure_print_ast_translation - #> Sign.add_consts - [(qualify (Binding.make ("term", @{here})), typ "'a => prop", NoSyn), - (qualify (Binding.make ("sort_constraint", @{here})), typ "'a itself => prop", NoSyn), - (qualify (Binding.make ("conjunction", @{here})), typ "prop => prop => prop", NoSyn)] - #> Sign.local_path - #> (Global_Theory.add_defs false o map Thm.no_attributes) - [(Binding.make ("prop_def", @{here}), - prop "(CONST Pure.prop :: prop => prop) (A::prop) == A::prop"), - (Binding.make ("term_def", @{here}), - prop "(CONST Pure.term :: 'a => prop) (x::'a) == (!!A::prop. A ==> A)"), - (Binding.make ("sort_constraint_def", @{here}), - prop "(CONST Pure.sort_constraint :: 'a itself => prop) (CONST Pure.type :: 'a itself) ==\ - \ (CONST Pure.term :: 'a itself => prop) (CONST Pure.type :: 'a itself)"), - (Binding.make ("conjunction_def", @{here}), - prop "(A &&& B) == (!!C::prop. (A ==> B ==> C) ==> C)")] #> snd - #> Global_Theory.add_thmss [((Binding.make ("nothing", @{here}), []), [])] #> snd - #> fold (fn (a, prop) => - snd o Thm.add_axiom_global (Binding.make (a, @{here}), prop)) Proofterm.equality_axms); - -end; diff --git a/core/Pure/raw_simplifier.ML b/core/Pure/raw_simplifier.ML deleted file mode 100644 index b428460d..00000000 --- a/core/Pure/raw_simplifier.ML +++ /dev/null @@ -1,1410 +0,0 @@ -(* Title: Pure/raw_simplifier.ML - Author: Tobias Nipkow and Stefan Berghofer, TU Muenchen - -Higher-order Simplification. -*) - -infix 4 - addsimps delsimps addsimprocs delsimprocs - setloop addloop delloop - setSSolver addSSolver setSolver addSolver; - -signature BASIC_RAW_SIMPLIFIER = -sig - val simp_depth_limit: int Config.T - val simp_trace_depth_limit: int Config.T - val simp_debug: bool Config.T - val simp_trace: bool Config.T - type cong_name = bool * string - type rrule - val mk_rrules: Proof.context -> thm list -> rrule list - val eq_rrule: rrule * rrule -> bool - type proc - type solver - val mk_solver: string -> (Proof.context -> int -> tactic) -> solver - type simpset - val empty_ss: simpset - val merge_ss: simpset * simpset -> simpset - val dest_ss: simpset -> - {simps: (string * thm) list, - procs: (string * cterm list) list, - congs: (cong_name * thm) list, - weak_congs: cong_name list, - loopers: string list, - unsafe_solvers: string list, - safe_solvers: string list} - type simproc - val eq_simproc: simproc * simproc -> bool - val transform_simproc: morphism -> simproc -> simproc - val make_simproc: {name: string, lhss: cterm list, - proc: morphism -> Proof.context -> cterm -> thm option, identifier: thm list} -> simproc - val mk_simproc: string -> cterm list -> (Proof.context -> term -> thm option) -> simproc - val simpset_of: Proof.context -> simpset - val put_simpset: simpset -> Proof.context -> Proof.context - val simpset_map: Proof.context -> (Proof.context -> Proof.context) -> simpset -> simpset - val map_theory_simpset: (Proof.context -> Proof.context) -> theory -> theory - val empty_simpset: Proof.context -> Proof.context - val clear_simpset: Proof.context -> Proof.context - val addsimps: Proof.context * thm list -> Proof.context - val delsimps: Proof.context * thm list -> Proof.context - val addsimprocs: Proof.context * simproc list -> Proof.context - val delsimprocs: Proof.context * simproc list -> Proof.context - val setloop: Proof.context * (Proof.context -> int -> tactic) -> Proof.context - val addloop: Proof.context * (string * (Proof.context -> int -> tactic)) -> Proof.context - val delloop: Proof.context * string -> Proof.context - val setSSolver: Proof.context * solver -> Proof.context - val addSSolver: Proof.context * solver -> Proof.context - val setSolver: Proof.context * solver -> Proof.context - val addSolver: Proof.context * solver -> Proof.context - - val rewrite_rule: Proof.context -> thm list -> thm -> thm - val rewrite_goals_rule: Proof.context -> thm list -> thm -> thm - val rewrite_goals_tac: Proof.context -> thm list -> tactic - val rewrite_goal_tac: Proof.context -> thm list -> int -> tactic - val prune_params_tac: Proof.context -> tactic - val fold_rule: Proof.context -> thm list -> thm -> thm - val fold_goals_tac: Proof.context -> thm list -> tactic - val norm_hhf: Proof.context -> thm -> thm - val norm_hhf_protect: Proof.context -> thm -> thm -end; - -signature RAW_SIMPLIFIER = -sig - include BASIC_RAW_SIMPLIFIER - exception SIMPLIFIER of string * thm list - type trace_ops - val set_trace_ops: trace_ops -> theory -> theory - val internal_ss: simpset -> - {congs: (cong_name * thm) list * cong_name list, - procs: proc Net.net, - mk_rews: - {mk: Proof.context -> thm -> thm list, - mk_cong: Proof.context -> thm -> thm, - mk_sym: Proof.context -> thm -> thm option, - mk_eq_True: Proof.context -> thm -> thm option, - reorient: Proof.context -> term list -> term -> term -> bool}, - termless: term * term -> bool, - subgoal_tac: Proof.context -> int -> tactic, - loop_tacs: (string * (Proof.context -> int -> tactic)) list, - solvers: solver list * solver list} - val map_ss: (Proof.context -> Proof.context) -> Context.generic -> Context.generic - val prems_of: Proof.context -> thm list - val add_simp: thm -> Proof.context -> Proof.context - val del_simp: thm -> Proof.context -> Proof.context - val add_eqcong: thm -> Proof.context -> Proof.context - val del_eqcong: thm -> Proof.context -> Proof.context - val add_cong: thm -> Proof.context -> Proof.context - val del_cong: thm -> Proof.context -> Proof.context - val mksimps: Proof.context -> thm -> thm list - val set_mksimps: (Proof.context -> thm -> thm list) -> Proof.context -> Proof.context - val set_mkcong: (Proof.context -> thm -> thm) -> Proof.context -> Proof.context - val set_mksym: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context - val set_mkeqTrue: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context - val set_termless: (term * term -> bool) -> Proof.context -> Proof.context - val set_subgoaler: (Proof.context -> int -> tactic) -> Proof.context -> Proof.context - val solver: Proof.context -> solver -> int -> tactic - val simp_depth_limit_raw: Config.raw - val default_mk_sym: Proof.context -> thm -> thm option - val simproc_global_i: theory -> string -> term list -> - (Proof.context -> term -> thm option) -> simproc - val simproc_global: theory -> string -> string list -> - (Proof.context -> term -> thm option) -> simproc - val simp_trace_depth_limit_raw: Config.raw - val simp_trace_depth_limit_default: int Unsynchronized.ref - val simp_trace_default: bool Unsynchronized.ref - val simp_trace_raw: Config.raw - val simp_debug_raw: Config.raw - val add_prems: thm list -> Proof.context -> Proof.context - val set_reorient: (Proof.context -> term list -> term -> term -> bool) -> - Proof.context -> Proof.context - val set_solvers: solver list -> Proof.context -> Proof.context - val rewrite_cterm: bool * bool * bool -> - (Proof.context -> thm -> thm option) -> Proof.context -> conv - val rewrite_term: theory -> thm list -> (term -> term option) list -> term -> term - val rewrite_thm: bool * bool * bool -> - (Proof.context -> thm -> thm option) -> Proof.context -> thm -> thm - val generic_rewrite_goal_tac: bool * bool * bool -> - (Proof.context -> tactic) -> Proof.context -> int -> tactic - val rewrite: Proof.context -> bool -> thm list -> conv -end; - -structure Raw_Simplifier: RAW_SIMPLIFIER = -struct - -(** datatype simpset **) - -(* congruence rules *) - -type cong_name = bool * string; - -fun cong_name (Const (a, _)) = SOME (true, a) - | cong_name (Free (a, _)) = SOME (false, a) - | cong_name _ = NONE; - - -(* rewrite rules *) - -type rrule = - {thm: thm, (*the rewrite rule*) - name: string, (*name of theorem from which rewrite rule was extracted*) - lhs: term, (*the left-hand side*) - elhs: cterm, (*the etac-contracted lhs*) - extra: bool, (*extra variables outside of elhs*) - fo: bool, (*use first-order matching*) - perm: bool}; (*the rewrite rule is permutative*) - -(* -Remarks: - - elhs is used for matching, - lhs only for preservation of bound variable names; - - fo is set iff - either elhs is first-order (no Var is applied), - in which case fo-matching is complete, - or elhs is not a pattern, - in which case there is nothing better to do; -*) - -fun eq_rrule ({thm = thm1, ...}: rrule, {thm = thm2, ...}: rrule) = - Thm.eq_thm_prop (thm1, thm2); - -(* FIXME: it seems that the conditions on extra variables are too liberal if -prems are nonempty: does solving the prems really guarantee instantiation of -all its Vars? Better: a dynamic check each time a rule is applied. -*) -fun rewrite_rule_extra_vars prems elhs erhs = - let - val elhss = elhs :: prems; - val tvars = fold Term.add_tvars elhss []; - val vars = fold Term.add_vars elhss []; - in - erhs |> Term.exists_type (Term.exists_subtype - (fn TVar v => not (member (op =) tvars v) | _ => false)) orelse - erhs |> Term.exists_subterm - (fn Var v => not (member (op =) vars v) | _ => false) - end; - -fun rrule_extra_vars elhs thm = - rewrite_rule_extra_vars [] (term_of elhs) (Thm.full_prop_of thm); - -fun mk_rrule2 {thm, name, lhs, elhs, perm} = - let - val t = term_of elhs; - val fo = Pattern.first_order t orelse not (Pattern.pattern t); - val extra = rrule_extra_vars elhs thm; - in {thm = thm, name = name, lhs = lhs, elhs = elhs, extra = extra, fo = fo, perm = perm} end; - -(*simple test for looping rewrite rules and stupid orientations*) -fun default_reorient ctxt prems lhs rhs = - rewrite_rule_extra_vars prems lhs rhs - orelse - is_Var (head_of lhs) - orelse -(* turns t = x around, which causes a headache if x is a local variable - - usually it is very useful :-( - is_Free rhs andalso not(is_Free lhs) andalso not(Logic.occs(rhs,lhs)) - andalso not(exists_subterm is_Var lhs) - orelse -*) - exists (fn t => Logic.occs (lhs, t)) (rhs :: prems) - orelse - null prems andalso Pattern.matches (Proof_Context.theory_of ctxt) (lhs, rhs) - (*the condition "null prems" is necessary because conditional rewrites - with extra variables in the conditions may terminate although - the rhs is an instance of the lhs; example: ?m < ?n ==> f(?n) == f(?m)*) - orelse - is_Const lhs andalso not (is_Const rhs); - - -(* simplification procedures *) - -datatype proc = - Proc of - {name: string, - lhs: cterm, - proc: Proof.context -> cterm -> thm option, - id: stamp * thm list}; - -fun eq_procid ((s1: stamp, ths1: thm list), (s2, ths2)) = - s1 = s2 andalso eq_list Thm.eq_thm (ths1, ths2); - -fun eq_proc (Proc {id = id1, ...}, Proc {id = id2, ...}) = eq_procid (id1, id2); - - -(* solvers *) - -datatype solver = - Solver of - {name: string, - solver: Proof.context -> int -> tactic, - id: stamp}; - -fun mk_solver name solver = Solver {name = name, solver = solver, id = stamp ()}; - -fun solver_name (Solver {name, ...}) = name; -fun solver ctxt (Solver {solver = tac, ...}) = tac ctxt; -fun eq_solver (Solver {id = id1, ...}, Solver {id = id2, ...}) = (id1 = id2); - - -(* simplification sets *) - -(*A simpset contains data required during conversion: - rules: discrimination net of rewrite rules; - prems: current premises; - depth: simp_depth and exceeded flag; - congs: association list of congruence rules and - a list of `weak' congruence constants. - A congruence is `weak' if it avoids normalization of some argument. - procs: discrimination net of simplification procedures - (functions that prove rewrite rules on the fly); - mk_rews: - mk: turn simplification thms into rewrite rules; - mk_cong: prepare congruence rules; - mk_sym: turn == around; - mk_eq_True: turn P into P == True; - termless: relation for ordered rewriting;*) - -datatype simpset = - Simpset of - {rules: rrule Net.net, - prems: thm list, - depth: int * bool Unsynchronized.ref} * - {congs: (cong_name * thm) list * cong_name list, - procs: proc Net.net, - mk_rews: - {mk: Proof.context -> thm -> thm list, - mk_cong: Proof.context -> thm -> thm, - mk_sym: Proof.context -> thm -> thm option, - mk_eq_True: Proof.context -> thm -> thm option, - reorient: Proof.context -> term list -> term -> term -> bool}, - termless: term * term -> bool, - subgoal_tac: Proof.context -> int -> tactic, - loop_tacs: (string * (Proof.context -> int -> tactic)) list, - solvers: solver list * solver list}; - -fun internal_ss (Simpset (_, ss2)) = ss2; - -fun make_ss1 (rules, prems, depth) = {rules = rules, prems = prems, depth = depth}; - -fun map_ss1 f {rules, prems, depth} = make_ss1 (f (rules, prems, depth)); - -fun make_ss2 (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) = - {congs = congs, procs = procs, mk_rews = mk_rews, termless = termless, - subgoal_tac = subgoal_tac, loop_tacs = loop_tacs, solvers = solvers}; - -fun map_ss2 f {congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers} = - make_ss2 (f (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers)); - -fun make_simpset (args1, args2) = Simpset (make_ss1 args1, make_ss2 args2); - -fun dest_ss (Simpset ({rules, ...}, {congs, procs, loop_tacs, solvers, ...})) = - {simps = Net.entries rules - |> map (fn {name, thm, ...} => (name, thm)), - procs = Net.entries procs - |> map (fn Proc {name, lhs, id, ...} => ((name, lhs), id)) - |> partition_eq (eq_snd eq_procid) - |> map (fn ps => (fst (fst (hd ps)), map (snd o fst) ps)), - congs = #1 congs, - weak_congs = #2 congs, - loopers = map fst loop_tacs, - unsafe_solvers = map solver_name (#1 solvers), - safe_solvers = map solver_name (#2 solvers)}; - - -(* empty *) - -fun init_ss depth mk_rews termless subgoal_tac solvers = - make_simpset ((Net.empty, [], depth), - (([], []), Net.empty, mk_rews, termless, subgoal_tac, [], solvers)); - -fun default_mk_sym _ th = SOME (th RS Drule.symmetric_thm); - -val empty_ss = - init_ss (0, Unsynchronized.ref false) - {mk = fn _ => fn th => if can Logic.dest_equals (Thm.concl_of th) then [th] else [], - mk_cong = K I, - mk_sym = default_mk_sym, - mk_eq_True = K (K NONE), - reorient = default_reorient} - Term_Ord.termless (K (K no_tac)) ([], []); - - -(* merge *) (*NOTE: ignores some fields of 2nd simpset*) - -fun merge_ss (ss1, ss2) = - if pointer_eq (ss1, ss2) then ss1 - else - let - val Simpset ({rules = rules1, prems = prems1, depth = depth1}, - {congs = (congs1, weak1), procs = procs1, mk_rews, termless, subgoal_tac, - loop_tacs = loop_tacs1, solvers = (unsafe_solvers1, solvers1)}) = ss1; - val Simpset ({rules = rules2, prems = prems2, depth = depth2}, - {congs = (congs2, weak2), procs = procs2, mk_rews = _, termless = _, subgoal_tac = _, - loop_tacs = loop_tacs2, solvers = (unsafe_solvers2, solvers2)}) = ss2; - - val rules' = Net.merge eq_rrule (rules1, rules2); - val prems' = Thm.merge_thms (prems1, prems2); - val depth' = if #1 depth1 < #1 depth2 then depth2 else depth1; - val congs' = merge (Thm.eq_thm_prop o pairself #2) (congs1, congs2); - val weak' = merge (op =) (weak1, weak2); - val procs' = Net.merge eq_proc (procs1, procs2); - val loop_tacs' = AList.merge (op =) (K true) (loop_tacs1, loop_tacs2); - val unsafe_solvers' = merge eq_solver (unsafe_solvers1, unsafe_solvers2); - val solvers' = merge eq_solver (solvers1, solvers2); - in - make_simpset ((rules', prems', depth'), ((congs', weak'), procs', - mk_rews, termless, subgoal_tac, loop_tacs', (unsafe_solvers', solvers'))) - end; - - - -(** context data **) - -structure Simpset = Generic_Data -( - type T = simpset; - val empty = empty_ss; - val extend = I; - val merge = merge_ss; -); - -val simpset_of = Simpset.get o Context.Proof; - -fun map_simpset f = Context.proof_map (Simpset.map f); -fun map_simpset1 f = map_simpset (fn Simpset (ss1, ss2) => Simpset (map_ss1 f ss1, ss2)); -fun map_simpset2 f = map_simpset (fn Simpset (ss1, ss2) => Simpset (ss1, map_ss2 f ss2)); - -fun simpset_map ctxt f ss = ctxt |> map_simpset (K ss) |> f |> Context.Proof |> Simpset.get; - -fun put_simpset ss = map_simpset (K ss); - -val empty_simpset = put_simpset empty_ss; - -fun map_theory_simpset f thy = - let - val ctxt' = f (Proof_Context.init_global thy); - val thy' = Proof_Context.theory_of ctxt'; - in Context.theory_map (Simpset.map (K (simpset_of ctxt'))) thy' end; - -fun map_ss f = Context.mapping (map_theory_simpset (f o Context_Position.not_really)) f; - -val clear_simpset = - map_simpset (fn Simpset ({depth, ...}, {mk_rews, termless, subgoal_tac, solvers, ...}) => - init_ss depth mk_rews termless subgoal_tac solvers); - - -(* simp depth *) - -val simp_depth_limit_raw = Config.declare ("simp_depth_limit", @{here}) (K (Config.Int 100)); -val simp_depth_limit = Config.int simp_depth_limit_raw; - -val simp_trace_depth_limit_default = Unsynchronized.ref 1; -val simp_trace_depth_limit_raw = - Config.declare ("simp_trace_depth_limit", @{here}) - (fn _ => Config.Int (! simp_trace_depth_limit_default)); -val simp_trace_depth_limit = Config.int simp_trace_depth_limit_raw; - -fun inc_simp_depth ctxt = - ctxt |> map_simpset1 (fn (rules, prems, (depth, exceeded)) => - (rules, prems, - (depth + 1, - if depth = Config.get ctxt simp_trace_depth_limit - then Unsynchronized.ref false else exceeded))); - -fun simp_depth ctxt = - let val Simpset ({depth = (depth, _), ...}, _) = simpset_of ctxt - in depth end; - - -(* diagnostics *) - -exception SIMPLIFIER of string * thm list; - -val simp_debug_raw = Config.declare ("simp_debug", @{here}) (K (Config.Bool false)); -val simp_debug = Config.bool simp_debug_raw; - -val simp_trace_default = Unsynchronized.ref false; -val simp_trace_raw = - Config.declare ("simp_trace", @{here}) (fn _ => Config.Bool (! simp_trace_default)); -val simp_trace = Config.bool simp_trace_raw; - -fun cond_warning ctxt msg = - if Context_Position.is_really_visible ctxt then warning (msg ()) else (); - -fun cond_tracing' ctxt flag msg = - if Config.get ctxt flag then - let - val Simpset ({depth = (depth, exceeded), ...}, _) = simpset_of ctxt; - val depth_limit = Config.get ctxt simp_trace_depth_limit; - in - if depth > depth_limit then - if ! exceeded then () else (tracing "simp_trace_depth_limit exceeded!"; exceeded := true) - else (tracing (enclose "[" "]" (string_of_int depth) ^ msg ()); exceeded := false) - end - else (); - -fun cond_tracing ctxt = cond_tracing' ctxt simp_trace; - -fun print_term ctxt s t = - s ^ "\n" ^ Syntax.string_of_term ctxt t; - -fun print_thm ctxt s (name, th) = - print_term ctxt (if name = "" then s else s ^ " " ^ quote name ^ ":") (Thm.full_prop_of th); - - - -(** simpset operations **) - -(* prems *) - -fun prems_of ctxt = - let val Simpset ({prems, ...}, _) = simpset_of ctxt in prems end; - -fun add_prems ths = - map_simpset1 (fn (rules, prems, depth) => (rules, ths @ prems, depth)); - - -(* maintain simp rules *) - -fun del_rrule (rrule as {thm, elhs, ...}) ctxt = - ctxt |> map_simpset1 (fn (rules, prems, depth) => - (Net.delete_term eq_rrule (term_of elhs, rrule) rules, prems, depth)) - handle Net.DELETE => - (cond_warning ctxt (fn () => print_thm ctxt "Rewrite rule not in simpset:" ("", thm)); ctxt); - -fun insert_rrule (rrule as {thm, name, ...}) ctxt = - (cond_tracing ctxt (fn () => print_thm ctxt "Adding rewrite rule" (name, thm)); - ctxt |> map_simpset1 (fn (rules, prems, depth) => - let - val rrule2 as {elhs, ...} = mk_rrule2 rrule; - val rules' = Net.insert_term eq_rrule (term_of elhs, rrule2) rules; - in (rules', prems, depth) end) - handle Net.INSERT => - (cond_warning ctxt (fn () => print_thm ctxt "Ignoring duplicate rewrite rule:" ("", thm)); - ctxt)); - -local - -fun vperm (Var _, Var _) = true - | vperm (Abs (_, _, s), Abs (_, _, t)) = vperm (s, t) - | vperm (t1 $ t2, u1 $ u2) = vperm (t1, u1) andalso vperm (t2, u2) - | vperm (t, u) = (t = u); - -fun var_perm (t, u) = - vperm (t, u) andalso eq_set (op =) (Term.add_vars t [], Term.add_vars u []); - -in - -fun decomp_simp thm = - let - val prop = Thm.prop_of thm; - val prems = Logic.strip_imp_prems prop; - val concl = Drule.strip_imp_concl (Thm.cprop_of thm); - val (lhs, rhs) = Thm.dest_equals concl handle TERM _ => - raise SIMPLIFIER ("Rewrite rule not a meta-equality", [thm]); - val elhs = Thm.dest_arg (Thm.cprop_of (Thm.eta_conversion lhs)); - val erhs = Envir.eta_contract (term_of rhs); - val perm = - var_perm (term_of elhs, erhs) andalso - not (term_of elhs aconv erhs) andalso - not (is_Var (term_of elhs)); - in (prems, term_of lhs, elhs, term_of rhs, perm) end; - -end; - -fun decomp_simp' thm = - let val (_, lhs, _, rhs, _) = decomp_simp thm in - if Thm.nprems_of thm > 0 then raise SIMPLIFIER ("Bad conditional rewrite rule", [thm]) - else (lhs, rhs) - end; - -fun mk_eq_True ctxt (thm, name) = - let val Simpset (_, {mk_rews = {mk_eq_True, ...}, ...}) = simpset_of ctxt in - (case mk_eq_True ctxt thm of - NONE => [] - | SOME eq_True => - let val (_, lhs, elhs, _, _) = decomp_simp eq_True; - in [{thm = eq_True, name = name, lhs = lhs, elhs = elhs, perm = false}] end) - end; - -(*create the rewrite rule and possibly also the eq_True variant, - in case there are extra vars on the rhs*) -fun rrule_eq_True ctxt thm name lhs elhs rhs thm2 = - let val rrule = {thm = thm, name = name, lhs = lhs, elhs = elhs, perm = false} in - if rewrite_rule_extra_vars [] lhs rhs then - mk_eq_True ctxt (thm2, name) @ [rrule] - else [rrule] - end; - -fun mk_rrule ctxt (thm, name) = - let val (prems, lhs, elhs, rhs, perm) = decomp_simp thm in - if perm then [{thm = thm, name = name, lhs = lhs, elhs = elhs, perm = true}] - else - (*weak test for loops*) - if rewrite_rule_extra_vars prems lhs rhs orelse is_Var (term_of elhs) - then mk_eq_True ctxt (thm, name) - else rrule_eq_True ctxt thm name lhs elhs rhs thm - end; - -fun orient_rrule ctxt (thm, name) = - let - val (prems, lhs, elhs, rhs, perm) = decomp_simp thm; - val Simpset (_, {mk_rews = {reorient, mk_sym, ...}, ...}) = simpset_of ctxt; - in - if perm then [{thm = thm, name = name, lhs = lhs, elhs = elhs, perm = true}] - else if reorient ctxt prems lhs rhs then - if reorient ctxt prems rhs lhs - then mk_eq_True ctxt (thm, name) - else - (case mk_sym ctxt thm of - NONE => [] - | SOME thm' => - let val (_, lhs', elhs', rhs', _) = decomp_simp thm' - in rrule_eq_True ctxt thm' name lhs' elhs' rhs' thm end) - else rrule_eq_True ctxt thm name lhs elhs rhs thm - end; - -fun extract_rews ctxt thms = - let val Simpset (_, {mk_rews = {mk, ...}, ...}) = simpset_of ctxt - in maps (fn thm => map (rpair (Thm.get_name_hint thm)) (mk ctxt thm)) thms end; - -fun extract_safe_rrules ctxt thm = - maps (orient_rrule ctxt) (extract_rews ctxt [thm]); - -fun mk_rrules ctxt thms = - let - val rews = extract_rews ctxt thms - val raw_rrules = flat (map (mk_rrule ctxt) rews) - in map mk_rrule2 raw_rrules end - - -(* add/del rules explicitly *) - -fun comb_simps ctxt comb mk_rrule thms = - let - val rews = extract_rews ctxt thms; - in fold (fold comb o mk_rrule) rews ctxt end; - -fun ctxt addsimps thms = - comb_simps ctxt insert_rrule (mk_rrule ctxt) thms; - -fun ctxt delsimps thms = - comb_simps ctxt del_rrule (map mk_rrule2 o mk_rrule ctxt) thms; - -fun add_simp thm ctxt = ctxt addsimps [thm]; -fun del_simp thm ctxt = ctxt delsimps [thm]; - - -(* congs *) - -local - -fun is_full_cong_prems [] [] = true - | is_full_cong_prems [] _ = false - | is_full_cong_prems (p :: prems) varpairs = - (case Logic.strip_assums_concl p of - Const ("Pure.eq", _) $ lhs $ rhs => - let val (x, xs) = strip_comb lhs and (y, ys) = strip_comb rhs in - is_Var x andalso forall is_Bound xs andalso - not (has_duplicates (op =) xs) andalso xs = ys andalso - member (op =) varpairs (x, y) andalso - is_full_cong_prems prems (remove (op =) (x, y) varpairs) - end - | _ => false); - -fun is_full_cong thm = - let - val prems = Thm.prems_of thm and concl = Thm.concl_of thm; - val (lhs, rhs) = Logic.dest_equals concl; - val (f, xs) = strip_comb lhs and (g, ys) = strip_comb rhs; - in - f = g andalso not (has_duplicates (op =) (xs @ ys)) andalso length xs = length ys andalso - is_full_cong_prems prems (xs ~~ ys) - end; - -fun mk_cong ctxt = - let val Simpset (_, {mk_rews = {mk_cong = f, ...}, ...}) = simpset_of ctxt - in f ctxt end; - -in - -fun add_eqcong thm ctxt = ctxt |> map_simpset2 - (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) => - let - val (lhs, _) = Logic.dest_equals (Thm.concl_of thm) - handle TERM _ => raise SIMPLIFIER ("Congruence not a meta-equality", [thm]); - (*val lhs = Envir.eta_contract lhs;*) - val a = the (cong_name (head_of lhs)) handle Option.Option => - raise SIMPLIFIER ("Congruence must start with a constant or free variable", [thm]); - val (xs, weak) = congs; - val _ = - if AList.defined (op =) xs a then - cond_warning ctxt (fn () => "Overwriting congruence rule for " ^ quote (#2 a)) - else (); - val xs' = AList.update (op =) (a, thm) xs; - val weak' = if is_full_cong thm then weak else a :: weak; - in ((xs', weak'), procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) end); - -fun del_eqcong thm ctxt = ctxt |> map_simpset2 - (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) => - let - val (lhs, _) = Logic.dest_equals (Thm.concl_of thm) - handle TERM _ => raise SIMPLIFIER ("Congruence not a meta-equality", [thm]); - (*val lhs = Envir.eta_contract lhs;*) - val a = the (cong_name (head_of lhs)) handle Option.Option => - raise SIMPLIFIER ("Congruence must start with a constant", [thm]); - val (xs, _) = congs; - val xs' = filter_out (fn (x : cong_name, _) => x = a) xs; - val weak' = xs' |> map_filter (fn (a, thm) => - if is_full_cong thm then NONE else SOME a); - in ((xs', weak'), procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) end); - -fun add_cong thm ctxt = add_eqcong (mk_cong ctxt thm) ctxt; -fun del_cong thm ctxt = del_eqcong (mk_cong ctxt thm) ctxt; - -end; - - -(* simprocs *) - -datatype simproc = - Simproc of - {name: string, - lhss: cterm list, - proc: morphism -> Proof.context -> cterm -> thm option, - id: stamp * thm list}; - -fun eq_simproc (Simproc {id = id1, ...}, Simproc {id = id2, ...}) = eq_procid (id1, id2); - -fun transform_simproc phi (Simproc {name, lhss, proc, id = (s, ths)}) = - Simproc - {name = name, - lhss = map (Morphism.cterm phi) lhss, - proc = Morphism.transform phi proc, - id = (s, Morphism.fact phi ths)}; - -fun make_simproc {name, lhss, proc, identifier} = - Simproc {name = name, lhss = lhss, proc = proc, id = (stamp (), identifier)}; - -fun mk_simproc name lhss proc = - make_simproc {name = name, lhss = lhss, proc = fn _ => fn ctxt => fn ct => - proc ctxt (term_of ct), identifier = []}; - -(* FIXME avoid global thy and Logic.varify_global *) -fun simproc_global_i thy name = mk_simproc name o map (Thm.cterm_of thy o Logic.varify_global); -fun simproc_global thy name = simproc_global_i thy name o map (Syntax.read_term_global thy); - - -local - -fun add_proc (proc as Proc {name, lhs, ...}) ctxt = - (cond_tracing ctxt (fn () => - print_term ctxt ("Adding simplification procedure " ^ quote name ^ " for") (term_of lhs)); - ctxt |> map_simpset2 - (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) => - (congs, Net.insert_term eq_proc (term_of lhs, proc) procs, - mk_rews, termless, subgoal_tac, loop_tacs, solvers)) - handle Net.INSERT => - (cond_warning ctxt (fn () => "Ignoring duplicate simplification procedure " ^ quote name); - ctxt)); - -fun del_proc (proc as Proc {name, lhs, ...}) ctxt = - ctxt |> map_simpset2 - (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) => - (congs, Net.delete_term eq_proc (term_of lhs, proc) procs, - mk_rews, termless, subgoal_tac, loop_tacs, solvers)) - handle Net.DELETE => - (cond_warning ctxt (fn () => "Simplification procedure " ^ quote name ^ " not in simpset"); - ctxt); - -fun prep_procs (Simproc {name, lhss, proc, id}) = - lhss |> map (fn lhs => Proc {name = name, lhs = lhs, proc = Morphism.form proc, id = id}); - -in - -fun ctxt addsimprocs ps = fold (fold add_proc o prep_procs) ps ctxt; -fun ctxt delsimprocs ps = fold (fold del_proc o prep_procs) ps ctxt; - -end; - - -(* mk_rews *) - -local - -fun map_mk_rews f = - map_simpset2 (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) => - let - val {mk, mk_cong, mk_sym, mk_eq_True, reorient} = mk_rews; - val (mk', mk_cong', mk_sym', mk_eq_True', reorient') = - f (mk, mk_cong, mk_sym, mk_eq_True, reorient); - val mk_rews' = {mk = mk', mk_cong = mk_cong', mk_sym = mk_sym', mk_eq_True = mk_eq_True', - reorient = reorient'}; - in (congs, procs, mk_rews', termless, subgoal_tac, loop_tacs, solvers) end); - -in - -fun mksimps ctxt = - let val Simpset (_, {mk_rews = {mk, ...}, ...}) = simpset_of ctxt - in mk ctxt end; - -fun set_mksimps mk = map_mk_rews (fn (_, mk_cong, mk_sym, mk_eq_True, reorient) => - (mk, mk_cong, mk_sym, mk_eq_True, reorient)); - -fun set_mkcong mk_cong = map_mk_rews (fn (mk, _, mk_sym, mk_eq_True, reorient) => - (mk, mk_cong, mk_sym, mk_eq_True, reorient)); - -fun set_mksym mk_sym = map_mk_rews (fn (mk, mk_cong, _, mk_eq_True, reorient) => - (mk, mk_cong, mk_sym, mk_eq_True, reorient)); - -fun set_mkeqTrue mk_eq_True = map_mk_rews (fn (mk, mk_cong, mk_sym, _, reorient) => - (mk, mk_cong, mk_sym, mk_eq_True, reorient)); - -fun set_reorient reorient = map_mk_rews (fn (mk, mk_cong, mk_sym, mk_eq_True, _) => - (mk, mk_cong, mk_sym, mk_eq_True, reorient)); - -end; - - -(* termless *) - -fun set_termless termless = - map_simpset2 (fn (congs, procs, mk_rews, _, subgoal_tac, loop_tacs, solvers) => - (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers)); - - -(* tactics *) - -fun set_subgoaler subgoal_tac = - map_simpset2 (fn (congs, procs, mk_rews, termless, _, loop_tacs, solvers) => - (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers)); - -fun ctxt setloop tac = ctxt |> - map_simpset2 (fn (congs, procs, mk_rews, termless, subgoal_tac, _, solvers) => - (congs, procs, mk_rews, termless, subgoal_tac, [("", tac)], solvers)); - -fun ctxt addloop (name, tac) = ctxt |> - map_simpset2 (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) => - (congs, procs, mk_rews, termless, subgoal_tac, - AList.update (op =) (name, tac) loop_tacs, solvers)); - -fun ctxt delloop name = ctxt |> - map_simpset2 (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, solvers) => - (congs, procs, mk_rews, termless, subgoal_tac, - (if AList.defined (op =) loop_tacs name then () - else cond_warning ctxt (fn () => "No such looper in simpset: " ^ quote name); - AList.delete (op =) name loop_tacs), solvers)); - -fun ctxt setSSolver solver = ctxt |> map_simpset2 - (fn (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, (unsafe_solvers, _)) => - (congs, procs, mk_rews, termless, subgoal_tac, loop_tacs, (unsafe_solvers, [solver]))); - -fun ctxt addSSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, (unsafe_solvers, solvers)) => (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, (unsafe_solvers, insert eq_solver solver solvers))); - -fun ctxt setSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, (_, solvers)) => (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, ([solver], solvers))); - -fun ctxt addSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, (unsafe_solvers, solvers)) => (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, (insert eq_solver solver unsafe_solvers, solvers))); - -fun set_solvers solvers = map_simpset2 (fn (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, _) => (congs, procs, mk_rews, termless, - subgoal_tac, loop_tacs, (solvers, solvers))); - - -(* trace operations *) - -type trace_ops = - {trace_invoke: {depth: int, term: term} -> Proof.context -> Proof.context, - trace_apply: {unconditional: bool, term: term, thm: thm, rrule: rrule} -> - Proof.context -> (Proof.context -> (thm * term) option) -> (thm * term) option}; - -structure Trace_Ops = Theory_Data -( - type T = trace_ops; - val empty: T = - {trace_invoke = fn _ => fn ctxt => ctxt, - trace_apply = fn _ => fn ctxt => fn cont => cont ctxt}; - val extend = I; - fun merge (trace_ops, _) = trace_ops; -); - -val set_trace_ops = Trace_Ops.put; - -val trace_ops = Trace_Ops.get o Proof_Context.theory_of; -fun trace_invoke args ctxt = #trace_invoke (trace_ops ctxt) args ctxt; -fun trace_apply args ctxt = #trace_apply (trace_ops ctxt) args ctxt; - - - -(** rewriting **) - -(* - Uses conversions, see: - L C Paulson, A higher-order implementation of rewriting, - Science of Computer Programming 3 (1983), pages 119-149. -*) - -fun check_conv ctxt msg thm thm' = - let - val thm'' = Thm.transitive thm thm' handle THM _ => - Thm.transitive thm (Thm.transitive - (Thm.symmetric (Drule.beta_eta_conversion (Thm.lhs_of thm'))) thm') - val _ = - if msg then cond_tracing ctxt (fn () => print_thm ctxt "SUCCEEDED" ("", thm')) - else (); - in SOME thm'' end - handle THM _ => - let - val _ $ _ $ prop0 = Thm.prop_of thm; - val _ = - cond_tracing ctxt (fn () => - print_thm ctxt "Proved wrong theorem (bad subgoaler?)" ("", thm') ^ "\n" ^ - print_term ctxt "Should have proved:" prop0); - in NONE end; - - -(* mk_procrule *) - -fun mk_procrule ctxt thm = - let val (prems, lhs, elhs, rhs, _) = decomp_simp thm in - if rewrite_rule_extra_vars prems lhs rhs - then (cond_warning ctxt (fn () => print_thm ctxt "Extra vars on rhs:" ("", thm)); []) - else [mk_rrule2 {thm = thm, name = "", lhs = lhs, elhs = elhs, perm = false}] - end; - - -(* rewritec: conversion to apply the meta simpset to a term *) - -(*Since the rewriting strategy is bottom-up, we avoid re-normalizing already - normalized terms by carrying around the rhs of the rewrite rule just - applied. This is called the `skeleton'. It is decomposed in parallel - with the term. Once a Var is encountered, the corresponding term is - already in normal form. - skel0 is a dummy skeleton that is to enforce complete normalization.*) - -val skel0 = Bound 0; - -(*Use rhs as skeleton only if the lhs does not contain unnormalized bits. - The latter may happen iff there are weak congruence rules for constants - in the lhs.*) - -fun uncond_skel ((_, weak), (lhs, rhs)) = - if null weak then rhs (*optimization*) - else if exists_subterm - (fn Const (a, _) => member (op =) weak (true, a) - | Free (a, _) => member (op =) weak (false, a) - | _ => false) lhs then skel0 - else rhs; - -(*Behaves like unconditional rule if rhs does not contain vars not in the lhs. - Otherwise those vars may become instantiated with unnormalized terms - while the premises are solved.*) - -fun cond_skel (args as (_, (lhs, rhs))) = - if subset (op =) (Term.add_vars rhs [], Term.add_vars lhs []) then uncond_skel args - else skel0; - -(* - Rewriting -- we try in order: - (1) beta reduction - (2) unconditional rewrite rules - (3) conditional rewrite rules - (4) simplification procedures - - IMPORTANT: rewrite rules must not introduce new Vars or TVars! -*) - -fun rewritec (prover, maxt) ctxt t = - let - val Simpset ({rules, ...}, {congs, procs, termless, ...}) = simpset_of ctxt; - val eta_thm = Thm.eta_conversion t; - val eta_t' = Thm.rhs_of eta_thm; - val eta_t = term_of eta_t'; - fun rew rrule = - let - val {thm, name, lhs, elhs, extra, fo, perm} = rrule - val prop = Thm.prop_of thm; - val (rthm, elhs') = - if maxt = ~1 orelse not extra then (thm, elhs) - else (Thm.incr_indexes (maxt + 1) thm, Thm.incr_indexes_cterm (maxt + 1) elhs); - val insts = - if fo then Thm.first_order_match (elhs', eta_t') - else Thm.match (elhs', eta_t'); - val thm' = Thm.instantiate insts (Thm.rename_boundvars lhs eta_t rthm); - val prop' = Thm.prop_of thm'; - val unconditional = (Logic.count_prems prop' = 0); - val (lhs', rhs') = Logic.dest_equals (Logic.strip_imp_concl prop'); - val trace_args = {unconditional = unconditional, term = eta_t, thm = thm', rrule = rrule}; - in - if perm andalso not (termless (rhs', lhs')) - then - (cond_tracing ctxt (fn () => - print_thm ctxt "Cannot apply permutative rewrite rule" (name, thm) ^ "\n" ^ - print_thm ctxt "Term does not become smaller:" ("", thm')); - NONE) - else - (cond_tracing ctxt (fn () => - print_thm ctxt "Applying instance of rewrite rule" (name, thm)); - if unconditional - then - (cond_tracing ctxt (fn () => print_thm ctxt "Rewriting:" ("", thm')); - trace_apply trace_args ctxt (fn ctxt' => - let - val lr = Logic.dest_equals prop; - val SOME thm'' = check_conv ctxt' false eta_thm thm'; - in SOME (thm'', uncond_skel (congs, lr)) end)) - else - (cond_tracing ctxt (fn () => print_thm ctxt "Trying to rewrite:" ("", thm')); - if simp_depth ctxt > Config.get ctxt simp_depth_limit - then (cond_tracing ctxt (fn () => "simp_depth_limit exceeded - giving up"); NONE) - else - trace_apply trace_args ctxt (fn ctxt' => - (case prover ctxt' thm' of - NONE => (cond_tracing ctxt' (fn () => print_thm ctxt' "FAILED" ("", thm')); NONE) - | SOME thm2 => - (case check_conv ctxt' true eta_thm thm2 of - NONE => NONE - | SOME thm2' => - let - val concl = Logic.strip_imp_concl prop; - val lr = Logic.dest_equals concl; - in SOME (thm2', cond_skel (congs, lr)) end))))) - end; - - fun rews [] = NONE - | rews (rrule :: rrules) = - let val opt = rew rrule handle Pattern.MATCH => NONE - in (case opt of NONE => rews rrules | some => some) end; - - fun sort_rrules rrs = - let - fun is_simple ({thm, ...}: rrule) = - (case Thm.prop_of thm of - Const ("Pure.eq", _) $ _ $ _ => true - | _ => false); - fun sort [] (re1, re2) = re1 @ re2 - | sort (rr :: rrs) (re1, re2) = - if is_simple rr - then sort rrs (rr :: re1, re2) - else sort rrs (re1, rr :: re2); - in sort rrs ([], []) end; - - fun proc_rews [] = NONE - | proc_rews (Proc {name, proc, lhs, ...} :: ps) = - if Pattern.matches (Proof_Context.theory_of ctxt) (term_of lhs, term_of t) then - (cond_tracing' ctxt simp_debug (fn () => - print_term ctxt ("Trying procedure " ^ quote name ^ " on:") eta_t); - (case proc ctxt eta_t' of - NONE => (cond_tracing' ctxt simp_debug (fn () => "FAILED"); proc_rews ps) - | SOME raw_thm => - (cond_tracing ctxt (fn () => - print_thm ctxt ("Procedure " ^ quote name ^ " produced rewrite rule:") - ("", raw_thm)); - (case rews (mk_procrule ctxt raw_thm) of - NONE => - (cond_tracing ctxt (fn () => - print_term ctxt ("IGNORED result of simproc " ^ quote name ^ - " -- does not match") (Thm.term_of t)); - proc_rews ps) - | some => some)))) - else proc_rews ps; - in - (case eta_t of - Abs _ $ _ => SOME (Thm.transitive eta_thm (Thm.beta_conversion false eta_t'), skel0) - | _ => - (case rews (sort_rrules (Net.match_term rules eta_t)) of - NONE => proc_rews (Net.match_term procs eta_t) - | some => some)) - end; - - -(* conversion to apply a congruence rule to a term *) - -fun congc prover ctxt maxt cong t = - let - val rthm = Thm.incr_indexes (maxt + 1) cong; - val rlhs = fst (Thm.dest_equals (Drule.strip_imp_concl (cprop_of rthm))); - val insts = Thm.match (rlhs, t) - (* Thm.match can raise Pattern.MATCH; - is handled when congc is called *) - val thm' = Thm.instantiate insts (Thm.rename_boundvars (term_of rlhs) (term_of t) rthm); - val _ = - cond_tracing ctxt (fn () => print_thm ctxt "Applying congruence rule:" ("", thm')); - fun err (msg, thm) = (cond_tracing ctxt (fn () => print_thm ctxt msg ("", thm)); NONE); - in - (case prover thm' of - NONE => err ("Congruence proof failed. Could not prove", thm') - | SOME thm2 => - (case check_conv ctxt true (Drule.beta_eta_conversion t) thm2 of - NONE => err ("Congruence proof failed. Should not have proved", thm2) - | SOME thm2' => - if op aconv (pairself term_of (Thm.dest_equals (cprop_of thm2'))) - then NONE else SOME thm2')) - end; - -val (cA, (cB, cC)) = - apsnd Thm.dest_equals (Thm.dest_implies (hd (cprems_of Drule.imp_cong))); - -fun transitive1 NONE NONE = NONE - | transitive1 (SOME thm1) NONE = SOME thm1 - | transitive1 NONE (SOME thm2) = SOME thm2 - | transitive1 (SOME thm1) (SOME thm2) = SOME (Thm.transitive thm1 thm2); - -fun transitive2 thm = transitive1 (SOME thm); -fun transitive3 thm = transitive1 thm o SOME; - -fun bottomc ((simprem, useprem, mutsimp), prover, maxidx) = - let - fun botc skel ctxt t = - if is_Var skel then NONE - else - (case subc skel ctxt t of - some as SOME thm1 => - (case rewritec (prover, maxidx) ctxt (Thm.rhs_of thm1) of - SOME (thm2, skel2) => - transitive2 (Thm.transitive thm1 thm2) - (botc skel2 ctxt (Thm.rhs_of thm2)) - | NONE => some) - | NONE => - (case rewritec (prover, maxidx) ctxt t of - SOME (thm2, skel2) => transitive2 thm2 - (botc skel2 ctxt (Thm.rhs_of thm2)) - | NONE => NONE)) - - and try_botc ctxt t = - (case botc skel0 ctxt t of - SOME trec1 => trec1 - | NONE => Thm.reflexive t) - - and subc skel ctxt t0 = - let val Simpset (_, {congs, ...}) = simpset_of ctxt in - (case term_of t0 of - Abs (a, T, _) => - let - val (v, ctxt') = Variable.next_bound (a, T) ctxt; - val b = #1 (Term.dest_Free v); - val (v', t') = Thm.dest_abs (SOME b) t0; - val b' = #1 (Term.dest_Free (term_of v')); - val _ = - if b <> b' then - warning ("Bad Simplifier context: renamed bound variable " ^ - quote b ^ " to " ^ quote b' ^ Position.here (Position.thread_data ())) - else (); - val skel' = (case skel of Abs (_, _, sk) => sk | _ => skel0); - in - (case botc skel' ctxt' t' of - SOME thm => SOME (Thm.abstract_rule a v' thm) - | NONE => NONE) - end - | t $ _ => - (case t of - Const ("Pure.imp", _) $ _ => impc t0 ctxt - | Abs _ => - let val thm = Thm.beta_conversion false t0 - in - (case subc skel0 ctxt (Thm.rhs_of thm) of - NONE => SOME thm - | SOME thm' => SOME (Thm.transitive thm thm')) - end - | _ => - let - fun appc () = - let - val (tskel, uskel) = - (case skel of - tskel $ uskel => (tskel, uskel) - | _ => (skel0, skel0)); - val (ct, cu) = Thm.dest_comb t0; - in - (case botc tskel ctxt ct of - SOME thm1 => - (case botc uskel ctxt cu of - SOME thm2 => SOME (Thm.combination thm1 thm2) - | NONE => SOME (Thm.combination thm1 (Thm.reflexive cu))) - | NONE => - (case botc uskel ctxt cu of - SOME thm1 => SOME (Thm.combination (Thm.reflexive ct) thm1) - | NONE => NONE)) - end; - val (h, ts) = strip_comb t; - in - (case cong_name h of - SOME a => - (case AList.lookup (op =) (fst congs) a of - NONE => appc () - | SOME cong => - (*post processing: some partial applications h t1 ... tj, j <= length ts, - may be a redex. Example: map (%x. x) = (%xs. xs) wrt map_cong*) - (let - val thm = congc (prover ctxt) ctxt maxidx cong t0; - val t = the_default t0 (Option.map Thm.rhs_of thm); - val (cl, cr) = Thm.dest_comb t - val dVar = Var(("", 0), dummyT) - val skel = - list_comb (h, replicate (length ts) dVar) - in - (case botc skel ctxt cl of - NONE => thm - | SOME thm' => - transitive3 thm (Thm.combination thm' (Thm.reflexive cr))) - end handle Pattern.MATCH => appc ())) - | _ => appc ()) - end) - | _ => NONE) - end - and impc ct ctxt = - if mutsimp then mut_impc0 [] ct [] [] ctxt - else nonmut_impc ct ctxt - - and rules_of_prem prem ctxt = - if maxidx_of_term (term_of prem) <> ~1 - then - (cond_tracing ctxt (fn () => - print_term ctxt "Cannot add premise as rewrite rule because it contains (type) unknowns:" - (term_of prem)); - (([], NONE), ctxt)) - else - let val (asm, ctxt') = Thm.assume_hyps prem ctxt - in ((extract_safe_rrules ctxt' asm, SOME asm), ctxt') end - - and add_rrules (rrss, asms) ctxt = - (fold o fold) insert_rrule rrss ctxt |> add_prems (map_filter I asms) - - and disch r prem eq = - let - val (lhs, rhs) = Thm.dest_equals (Thm.cprop_of eq); - val eq' = - Thm.implies_elim - (Thm.instantiate ([], [(cA, prem), (cB, lhs), (cC, rhs)]) Drule.imp_cong) - (Thm.implies_intr prem eq); - in - if not r then eq' - else - let - val (prem', concl) = Thm.dest_implies lhs; - val (prem'', _) = Thm.dest_implies rhs; - in - Thm.transitive - (Thm.transitive - (Thm.instantiate ([], [(cA, prem'), (cB, prem), (cC, concl)]) Drule.swap_prems_eq) - eq') - (Thm.instantiate ([], [(cA, prem), (cB, prem''), (cC, concl)]) Drule.swap_prems_eq) - end - end - - and rebuild [] _ _ _ _ eq = eq - | rebuild (prem :: prems) concl (_ :: rrss) (_ :: asms) ctxt eq = - let - val ctxt' = add_rrules (rev rrss, rev asms) ctxt; - val concl' = - Drule.mk_implies (prem, the_default concl (Option.map Thm.rhs_of eq)); - val dprem = Option.map (disch false prem); - in - (case rewritec (prover, maxidx) ctxt' concl' of - NONE => rebuild prems concl' rrss asms ctxt (dprem eq) - | SOME (eq', _) => - transitive2 (fold (disch false) prems (the (transitive3 (dprem eq) eq'))) - (mut_impc0 (rev prems) (Thm.rhs_of eq') (rev rrss) (rev asms) ctxt)) - end - - and mut_impc0 prems concl rrss asms ctxt = - let - val prems' = strip_imp_prems concl; - val ((rrss', asms'), ctxt') = fold_map rules_of_prem prems' ctxt |>> split_list; - in - mut_impc (prems @ prems') (strip_imp_concl concl) (rrss @ rrss') - (asms @ asms') [] [] [] [] ctxt' ~1 ~1 - end - - and mut_impc [] concl [] [] prems' rrss' asms' eqns ctxt changed k = - transitive1 (fold (fn (eq1, prem) => fn eq2 => transitive1 eq1 - (Option.map (disch false prem) eq2)) (eqns ~~ prems') NONE) - (if changed > 0 then - mut_impc (rev prems') concl (rev rrss') (rev asms') - [] [] [] [] ctxt ~1 changed - else rebuild prems' concl rrss' asms' ctxt - (botc skel0 (add_rrules (rev rrss', rev asms') ctxt) concl)) - - | mut_impc (prem :: prems) concl (rrs :: rrss) (asm :: asms) - prems' rrss' asms' eqns ctxt changed k = - (case (if k = 0 then NONE else botc skel0 (add_rrules - (rev rrss' @ rrss, rev asms' @ asms) ctxt) prem) of - NONE => mut_impc prems concl rrss asms (prem :: prems') - (rrs :: rrss') (asm :: asms') (NONE :: eqns) ctxt changed - (if k = 0 then 0 else k - 1) - | SOME eqn => - let - val prem' = Thm.rhs_of eqn; - val tprems = map term_of prems; - val i = 1 + fold Integer.max (map (fn p => - find_index (fn q => q aconv p) tprems) (Thm.hyps_of eqn)) ~1; - val ((rrs', asm'), ctxt') = rules_of_prem prem' ctxt; - in - mut_impc prems concl rrss asms (prem' :: prems') - (rrs' :: rrss') (asm' :: asms') - (SOME (fold_rev (disch true) - (take i prems) - (Drule.imp_cong_rule eqn (Thm.reflexive (Drule.list_implies - (drop i prems, concl))))) :: eqns) - ctxt' (length prems') ~1 - end) - - (*legacy code -- only for backwards compatibility*) - and nonmut_impc ct ctxt = - let - val (prem, conc) = Thm.dest_implies ct; - val thm1 = if simprem then botc skel0 ctxt prem else NONE; - val prem1 = the_default prem (Option.map Thm.rhs_of thm1); - val ctxt1 = - if not useprem then ctxt - else - let val ((rrs, asm), ctxt') = rules_of_prem prem1 ctxt - in add_rrules ([rrs], [asm]) ctxt' end; - in - (case botc skel0 ctxt1 conc of - NONE => - (case thm1 of - NONE => NONE - | SOME thm1' => SOME (Drule.imp_cong_rule thm1' (Thm.reflexive conc))) - | SOME thm2 => - let val thm2' = disch false prem1 thm2 in - (case thm1 of - NONE => SOME thm2' - | SOME thm1' => - SOME (Thm.transitive (Drule.imp_cong_rule thm1' (Thm.reflexive conc)) thm2')) - end) - end; - - in try_botc end; - - -(* Meta-rewriting: rewrites t to u and returns the theorem t==u *) - -(* - Parameters: - mode = (simplify A, - use A in simplifying B, - use prems of B (if B is again a meta-impl.) to simplify A) - when simplifying A ==> B - prover: how to solve premises in conditional rewrites and congruences -*) - -fun rewrite_cterm mode prover raw_ctxt raw_ct = - let - val thy = Proof_Context.theory_of raw_ctxt; - - val ct = Thm.adjust_maxidx_cterm ~1 raw_ct; - val {maxidx, ...} = Thm.rep_cterm ct; - val _ = - Theory.subthy (theory_of_cterm ct, thy) orelse - raise CTERM ("rewrite_cterm: bad background theory", [ct]); - - val ctxt = - raw_ctxt - |> Context_Position.set_visible false - |> inc_simp_depth - |> (fn ctxt => trace_invoke {depth = simp_depth ctxt, term = term_of ct} ctxt); - - val _ = - cond_tracing ctxt (fn () => - print_term ctxt "SIMPLIFIER INVOKED ON THE FOLLOWING TERM:" (term_of ct)); - in bottomc (mode, Option.map Drule.flexflex_unique oo prover, maxidx) ctxt ct end; - -val simple_prover = - SINGLE o (fn ctxt => ALLGOALS (resolve_tac (prems_of ctxt))); - -fun rewrite _ _ [] = Thm.reflexive - | rewrite ctxt full thms = - rewrite_cterm (full, false, false) simple_prover - (empty_simpset ctxt addsimps thms); - -fun rewrite_rule ctxt = Conv.fconv_rule o rewrite ctxt true; - -(*simple term rewriting -- no proof*) -fun rewrite_term thy rules procs = - Pattern.rewrite_term thy (map decomp_simp' rules) procs; - -fun rewrite_thm mode prover ctxt = Conv.fconv_rule (rewrite_cterm mode prover ctxt); - -(*Rewrite the subgoals of a proof state (represented by a theorem)*) -fun rewrite_goals_rule ctxt thms th = - Conv.fconv_rule (Conv.prems_conv ~1 (rewrite_cterm (true, true, true) simple_prover - (empty_simpset ctxt addsimps thms))) th; - - -(** meta-rewriting tactics **) - -(*Rewrite all subgoals*) -fun rewrite_goals_tac ctxt defs = PRIMITIVE (rewrite_goals_rule ctxt defs); - -(*Rewrite one subgoal*) -fun generic_rewrite_goal_tac mode prover_tac ctxt i thm = - if 0 < i andalso i <= Thm.nprems_of thm then - Seq.single (Conv.gconv_rule (rewrite_cterm mode (SINGLE o prover_tac) ctxt) i thm) - else Seq.empty; - -fun rewrite_goal_tac ctxt rews = - generic_rewrite_goal_tac (true, false, false) (K no_tac) - (empty_simpset ctxt addsimps rews); - -(*Prunes all redundant parameters from the proof state by rewriting.*) -fun prune_params_tac ctxt = rewrite_goals_tac ctxt [Drule.triv_forall_equality]; - - -(* for folding definitions, handling critical pairs *) - -(*The depth of nesting in a term*) -fun term_depth (Abs (_, _, t)) = 1 + term_depth t - | term_depth (f $ t) = 1 + Int.max (term_depth f, term_depth t) - | term_depth _ = 0; - -val lhs_of_thm = #1 o Logic.dest_equals o prop_of; - -(*folding should handle critical pairs! E.g. K == Inl(0), S == Inr(Inl(0)) - Returns longest lhs first to avoid folding its subexpressions.*) -fun sort_lhs_depths defs = - let val keylist = AList.make (term_depth o lhs_of_thm) defs - val keys = sort_distinct (rev_order o int_ord) (map #2 keylist) - in map (AList.find (op =) keylist) keys end; - -val rev_defs = sort_lhs_depths o map Thm.symmetric; - -fun fold_rule ctxt defs = fold (rewrite_rule ctxt) (rev_defs defs); -fun fold_goals_tac ctxt defs = EVERY (map (rewrite_goals_tac ctxt) (rev_defs defs)); - - -(* HHF normal form: !! before ==>, outermost !! generalized *) - -local - -fun gen_norm_hhf ss ctxt th = - (if Drule.is_norm_hhf (Thm.prop_of th) then th - else - Conv.fconv_rule - (rewrite_cterm (true, false, false) (K (K NONE)) (put_simpset ss ctxt)) th) - |> Thm.adjust_maxidx_thm ~1 - |> Drule.gen_all; - -val hhf_ss = - simpset_of (empty_simpset (Context.proof_of (Context.the_thread_data ())) - addsimps Drule.norm_hhf_eqs); - -val hhf_protect_ss = - simpset_of (empty_simpset (Context.proof_of (Context.the_thread_data ())) - addsimps Drule.norm_hhf_eqs |> add_eqcong Drule.protect_cong); - -in - -val norm_hhf = gen_norm_hhf hhf_ss; -val norm_hhf_protect = gen_norm_hhf hhf_protect_ss; - -end; - -end; - -structure Basic_Meta_Simplifier: BASIC_RAW_SIMPLIFIER = Raw_Simplifier; -open Basic_Meta_Simplifier; diff --git a/core/Pure/search.ML b/core/Pure/search.ML deleted file mode 100644 index 9fb02f3c..00000000 --- a/core/Pure/search.ML +++ /dev/null @@ -1,301 +0,0 @@ -(* Title: Pure/search.ML - Author: Lawrence C Paulson and Norbert Voelker - -Search tacticals. -*) - -infix 1 THEN_MAYBE THEN_MAYBE'; - -signature SEARCH = -sig - val trace_DEPTH_FIRST: bool Unsynchronized.ref - val DEPTH_FIRST: (thm -> bool) -> tactic -> tactic - val has_fewer_prems: int -> thm -> bool - val IF_UNSOLVED: tactic -> tactic - val SOLVE: tactic -> tactic - val THEN_MAYBE: tactic * tactic -> tactic - val THEN_MAYBE': ('a -> tactic) * ('a -> tactic) -> 'a -> tactic - val DEPTH_SOLVE: tactic -> tactic - val DEPTH_SOLVE_1: tactic -> tactic - val THEN_ITER_DEEPEN: int -> tactic -> (thm -> bool) -> (int -> tactic) -> tactic - val ITER_DEEPEN: int -> (thm -> bool) -> (int -> tactic) -> tactic - val trace_DEEPEN: bool Unsynchronized.ref - val DEEPEN: int * int -> (int -> int -> tactic) -> int -> int -> tactic - val trace_BEST_FIRST: bool Unsynchronized.ref - val THEN_BEST_FIRST: tactic -> (thm -> bool) * (thm -> int) -> tactic -> tactic - val BEST_FIRST: (thm -> bool) * (thm -> int) -> tactic -> tactic - val BREADTH_FIRST: (thm -> bool) -> tactic -> tactic - val QUIET_BREADTH_FIRST: (thm -> bool) -> tactic -> tactic - val trace_ASTAR: bool Unsynchronized.ref - val THEN_ASTAR: tactic -> (thm -> bool) * (int -> thm -> int) -> tactic -> tactic - val ASTAR: (thm -> bool) * (int -> thm -> int) -> tactic -> tactic -end; - -structure Search: SEARCH = -struct - -(**** Depth-first search ****) - -val trace_DEPTH_FIRST = Unsynchronized.ref false; - -(*Searches until "satp" reports proof tree as satisfied. - Suppresses duplicate solutions to minimize search space.*) -fun DEPTH_FIRST satp tac = - let val tac = tracify trace_DEPTH_FIRST tac - fun depth used [] = NONE - | depth used (q::qs) = - case Seq.pull q of - NONE => depth used qs - | SOME(st,stq) => - if satp st andalso not (member Thm.eq_thm used st) - then SOME(st, Seq.make - (fn()=> depth (st::used) (stq::qs))) - else depth used (tac st :: stq :: qs) - in traced_tac (fn st => depth [] [Seq.single st]) end; - - - -(*Predicate: Does the rule have fewer than n premises?*) -fun has_fewer_prems n rule = (nprems_of rule < n); - -(*Apply a tactic if subgoals remain, else do nothing.*) -val IF_UNSOLVED = COND (has_fewer_prems 1) all_tac; - -(*Force a tactic to solve its goal completely, otherwise fail *) -fun SOLVE tac = tac THEN COND (has_fewer_prems 1) all_tac no_tac; - -(*Execute tac1, but only execute tac2 if there are at least as many subgoals - as before. This ensures that tac2 is only applied to an outcome of tac1.*) -fun (tac1 THEN_MAYBE tac2) st = - (tac1 THEN COND (has_fewer_prems (nprems_of st)) all_tac tac2) st; - -fun (tac1 THEN_MAYBE' tac2) x = tac1 x THEN_MAYBE tac2 x; - -(*Tactical to reduce the number of premises by 1. - If no subgoals then it must fail! *) -fun DEPTH_SOLVE_1 tac st = st |> - (case nprems_of st of - 0 => no_tac - | n => DEPTH_FIRST (has_fewer_prems n) tac); - -(*Uses depth-first search to solve ALL subgoals*) -val DEPTH_SOLVE = DEPTH_FIRST (has_fewer_prems 1); - - - -(**** Iterative deepening with pruning ****) - -fun has_vars (Var _) = true - | has_vars (Abs (_,_,t)) = has_vars t - | has_vars (f$t) = has_vars f orelse has_vars t - | has_vars _ = false; - -(*Counting of primitive inferences is APPROXIMATE, as the step tactic - may perform >1 inference*) - -(*Pruning of rigid ancestor to prevent backtracking*) -fun prune (new as (k', np':int, rgd', stq), qs) = - let fun prune_aux (qs, []) = new::qs - | prune_aux (qs, (k,np,rgd,q)::rqs) = - if np'+1 = np andalso rgd then - (if !trace_DEPTH_FIRST then - tracing ("Pruning " ^ - string_of_int (1+length rqs) ^ " levels") - else (); - (*Use OLD k: zero-cost solution; see Stickel, p 365*) - (k, np', rgd', stq) :: qs) - else prune_aux ((k,np,rgd,q)::qs, rqs) - fun take ([], rqs) = ([], rqs) - | take (arg as ((k,np,rgd,stq)::qs, rqs)) = - if np' < np then take (qs, (k,np,rgd,stq)::rqs) - else arg - in prune_aux (take (qs, [])) end; - - -(*Depth-first iterative deepening search for a state that satisfies satp - tactic tac0 sets up the initial goal queue, while tac1 searches it. - The solution sequence is redundant: the cutoff heuristic makes it impossible - to suppress solutions arising from earlier searches, as the accumulated cost - (k) can be wrong.*) -fun THEN_ITER_DEEPEN lim tac0 satp tac1 = traced_tac (fn st => - let val countr = Unsynchronized.ref 0 - and tf = tracify trace_DEPTH_FIRST (tac1 1) - and qs0 = tac0 st - (*bnd = depth bound; inc = estimate of increment required next*) - fun depth (bnd,inc) [] = - if bnd > lim then - (if !trace_DEPTH_FIRST then - tracing (string_of_int (!countr) ^ - " inferences so far. Giving up at " ^ - string_of_int bnd) - else (); - NONE) - else - (if !trace_DEPTH_FIRST then - tracing (string_of_int (!countr) ^ - " inferences so far. Searching to depth " ^ - string_of_int bnd) - else (); - (*larger increments make it run slower for the hard problems*) - depth (bnd+inc, 10)) [(0, 1, false, qs0)] - | depth (bnd,inc) ((k,np,rgd,q)::qs) = - if k>=bnd then depth (bnd,inc) qs - else - case (Unsynchronized.inc countr; - if !trace_DEPTH_FIRST then - tracing (string_of_int np ^ implode (map (fn _ => "*") qs)) - else (); - Seq.pull q) of - NONE => depth (bnd,inc) qs - | SOME(st,stq) => - if satp st (*solution!*) - then SOME(st, Seq.make - (fn()=> depth (bnd,inc) ((k,np,rgd,stq)::qs))) - - else - let val np' = nprems_of st - (*rgd' calculation assumes tactic operates on subgoal 1*) - val rgd' = not (has_vars (hd (prems_of st))) - val k' = k+np'-np+1 (*difference in # of subgoals, +1*) - in if k'+np' >= bnd - then depth (bnd, Int.min(inc, k'+np'+1-bnd)) qs - else if np' < np (*solved a subgoal; prune rigid ancestors*) - then depth (bnd,inc) - (prune ((k', np', rgd', tf st), (k,np,rgd,stq) :: qs)) - else depth (bnd,inc) ((k', np', rgd', tf st) :: - (k,np,rgd,stq) :: qs) - end - in depth (0,5) [] end); - -fun ITER_DEEPEN lim = THEN_ITER_DEEPEN lim all_tac; - - -(*Simple iterative deepening tactical. It merely "deepens" any search tactic - using increment "inc" up to limit "lim". *) -val trace_DEEPEN = Unsynchronized.ref false; - -fun DEEPEN (inc, lim) tacf m i = - let - fun dpn m st = - st |> - (if has_fewer_prems i st then no_tac - else if m > lim then - (if !trace_DEEPEN then tracing "Search depth limit exceeded: giving up" else (); - no_tac) - else - (if !trace_DEEPEN then tracing ("Search depth = " ^ string_of_int m) else (); - tacf m i ORELSE dpn (m+inc))) - in dpn m end; - - -(*** Best-first search ***) - -(*total ordering on theorems, allowing duplicates to be found*) -structure Thm_Heap = Heap -( - type elem = int * thm; - val ord = prod_ord int_ord (Term_Ord.term_ord o pairself Thm.prop_of); -); - -val trace_BEST_FIRST = Unsynchronized.ref false; - -(*For creating output sequence*) -fun some_of_list [] = NONE - | some_of_list (x::l) = SOME (x, Seq.make (fn () => some_of_list l)); - -(*Check for and delete duplicate proof states*) -fun delete_all_min prf heap = - if Thm_Heap.is_empty heap then heap - else if Thm.eq_thm (prf, #2 (Thm_Heap.min heap)) - then delete_all_min prf (Thm_Heap.delete_min heap) - else heap; - -(*Best-first search for a state that satisfies satp (incl initial state) - Function sizef estimates size of problem remaining (smaller means better). - tactic tac0 sets up the initial priority queue, while tac1 searches it. *) -fun THEN_BEST_FIRST tac0 (satp, sizef) tac1 = - let val tac = tracify trace_BEST_FIRST tac1 - fun pairsize th = (sizef th, th); - fun bfs (news,nprf_heap) = - (case List.partition satp news of - ([],nonsats) => next(fold_rev Thm_Heap.insert (map pairsize nonsats) nprf_heap) - | (sats,_) => some_of_list sats) - and next nprf_heap = - if Thm_Heap.is_empty nprf_heap then NONE - else - let val (n,prf) = Thm_Heap.min nprf_heap - in if !trace_BEST_FIRST - then tracing("state size = " ^ string_of_int n) - else (); - bfs (Seq.list_of (tac prf), - delete_all_min prf (Thm_Heap.delete_min nprf_heap)) - end - fun btac st = bfs (Seq.list_of (tac0 st), Thm_Heap.empty) - in traced_tac btac end; - -(*Ordinary best-first search, with no initial tactic*) -val BEST_FIRST = THEN_BEST_FIRST all_tac; - -(*Breadth-first search to satisfy satpred (including initial state) - SLOW -- SHOULD NOT USE APPEND!*) -fun gen_BREADTH_FIRST message satpred (tac:tactic) = - let val tacf = Seq.list_of o tac; - fun bfs prfs = - (case List.partition satpred prfs of - ([],[]) => [] - | ([],nonsats) => - (message("breadth=" ^ string_of_int(length nonsats)); - bfs (maps tacf nonsats)) - | (sats,_) => sats) - in (fn st => Seq.of_list (bfs [st])) end; - -val BREADTH_FIRST = gen_BREADTH_FIRST tracing; -val QUIET_BREADTH_FIRST = gen_BREADTH_FIRST (K ()); - - -(* Author: Norbert Voelker, FernUniversitaet Hagen - Remarks: Implementation of A*-like proof procedure by modification - of the existing code for BEST_FIRST and best_tac so that the - current level of search is taken into account. -*) - -(*Insertion into priority queue of states, marked with level *) -fun insert_with_level (lnth: int*int*thm) [] = [lnth] - | insert_with_level (l,m,th) ((l',n,th') :: nths) = - if n some_of_list l)); - -val trace_ASTAR = Unsynchronized.ref false; - -fun THEN_ASTAR tac0 (satp, costf) tac1 = - let val tf = tracify trace_ASTAR tac1; - fun bfs (news,nprfs,level) = - let fun cost thm = (level, costf level thm, thm) - in (case List.partition satp news of - ([],nonsats) - => next (fold_rev (insert_with_level o cost) nonsats nprfs) - | (sats,_) => some_of_list sats) - end and - next [] = NONE - | next ((level,n,prf)::nprfs) = - (if !trace_ASTAR - then tracing("level = " ^ string_of_int level ^ - " cost = " ^ string_of_int n ^ - " queue length =" ^ string_of_int (length nprfs)) - else (); - bfs (Seq.list_of (tf prf), nprfs,level+1)) - fun tf st = bfs (Seq.list_of (tac0 st), [], 0) - in traced_tac tf end; - -(*Ordinary ASTAR, with no initial tactic*) -val ASTAR = THEN_ASTAR all_tac; - -end; - -open Search; diff --git a/core/Pure/sign.ML b/core/Pure/sign.ML deleted file mode 100644 index f45b8484..00000000 --- a/core/Pure/sign.ML +++ /dev/null @@ -1,520 +0,0 @@ -(* Title: Pure/sign.ML - Author: Lawrence C Paulson and Markus Wenzel - -Logical signature content: naming conventions, concrete syntax, type -signature, polymorphic constants. -*) - -signature SIGN = -sig - val change_begin: theory -> theory - val change_end: theory -> theory - val change_end_local: Proof.context -> Proof.context - val change_check: theory -> theory - val syn_of: theory -> Syntax.syntax - val tsig_of: theory -> Type.tsig - val classes_of: theory -> Sorts.algebra - val all_classes: theory -> class list - val super_classes: theory -> class -> class list - val minimize_sort: theory -> sort -> sort - val complete_sort: theory -> sort -> sort - val set_defsort: sort -> theory -> theory - val defaultS: theory -> sort - val subsort: theory -> sort * sort -> bool - val of_sort: theory -> typ * sort -> bool - val inter_sort: theory -> sort * sort -> sort - val witness_sorts: theory -> (typ * sort) list -> sort list -> (typ * sort) list - val is_logtype: theory -> string -> bool - val typ_instance: theory -> typ * typ -> bool - val typ_equiv: theory -> typ * typ -> bool - val typ_match: theory -> typ * typ -> Type.tyenv -> Type.tyenv - val typ_unify: theory -> typ * typ -> Type.tyenv * int -> Type.tyenv * int - val consts_of: theory -> Consts.T - val the_const_constraint: theory -> string -> typ - val const_type: theory -> string -> typ option - val the_const_type: theory -> string -> typ - val declared_tyname: theory -> string -> bool - val declared_const: theory -> string -> bool - val naming_of: theory -> Name_Space.naming - val map_naming: (Name_Space.naming -> Name_Space.naming) -> theory -> theory - val restore_naming: theory -> theory -> theory - val inherit_naming: theory -> Proof.context -> Context.generic - val full_name: theory -> binding -> string - val full_name_path: theory -> string -> binding -> string - val full_bname: theory -> bstring -> string - val full_bname_path: theory -> string -> bstring -> string - val const_monomorphic: theory -> string -> bool - val const_typargs: theory -> string * typ -> typ list - val const_instance: theory -> string * typ list -> typ - val mk_const: theory -> string * typ list -> term - val class_space: theory -> Name_Space.T - val type_space: theory -> Name_Space.T - val const_space: theory -> Name_Space.T - val intern_class: theory -> xstring -> string - val intern_type: theory -> xstring -> string - val intern_const: theory -> xstring -> string - val class_alias: binding -> class -> theory -> theory - val type_alias: binding -> string -> theory -> theory - val const_alias: binding -> string -> theory -> theory - val arity_number: theory -> string -> int - val arity_sorts: theory -> string -> sort -> sort list - val certify_class: theory -> class -> class - val certify_sort: theory -> sort -> sort - val certify_typ: theory -> typ -> typ - val certify_typ_mode: Type.mode -> theory -> typ -> typ - val certify': bool -> Context.pretty -> bool -> Consts.T -> theory -> term -> term * typ * int - val certify_term: theory -> term -> term * typ * int - val cert_term: theory -> term -> term - val cert_prop: theory -> term -> term - val no_frees: Proof.context -> term -> term - val no_vars: Proof.context -> term -> term - val add_type: Proof.context -> binding * int * mixfix -> theory -> theory - val add_types_global: (binding * int * mixfix) list -> theory -> theory - val add_nonterminals: Proof.context -> binding list -> theory -> theory - val add_nonterminals_global: binding list -> theory -> theory - val add_type_abbrev: Proof.context -> binding * string list * typ -> theory -> theory - val add_syntax: Syntax.mode -> (string * typ * mixfix) list -> theory -> theory - val add_syntax_cmd: Syntax.mode -> (string * string * mixfix) list -> theory -> theory - val del_syntax: Syntax.mode -> (string * typ * mixfix) list -> theory -> theory - val del_syntax_cmd: Syntax.mode -> (string * string * mixfix) list -> theory -> theory - val type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> theory -> theory - val notation: bool -> Syntax.mode -> (term * mixfix) list -> theory -> theory - val declare_const: Proof.context -> (binding * typ) * mixfix -> theory -> term * theory - val declare_const_global: (binding * typ) * mixfix -> theory -> term * theory - val add_consts: (binding * typ * mixfix) list -> theory -> theory - val add_consts_cmd: (binding * string * mixfix) list -> theory -> theory - val add_abbrev: string -> binding * term -> theory -> (term * term) * theory - val revert_abbrev: string -> string -> theory -> theory - val add_const_constraint: string * typ option -> theory -> theory - val primitive_class: binding * class list -> theory -> theory - val primitive_classrel: class * class -> theory -> theory - val primitive_arity: arity -> theory -> theory - val parse_ast_translation: - (string * (Proof.context -> Ast.ast list -> Ast.ast)) list -> theory -> theory - val parse_translation: - (string * (Proof.context -> term list -> term)) list -> theory -> theory - val print_translation: - (string * (Proof.context -> term list -> term)) list -> theory -> theory - val typed_print_translation: - (string * (Proof.context -> typ -> term list -> term)) list -> theory -> theory - val print_ast_translation: - (string * (Proof.context -> Ast.ast list -> Ast.ast)) list -> theory -> theory - val add_trrules: Ast.ast Syntax.trrule list -> theory -> theory - val del_trrules: Ast.ast Syntax.trrule list -> theory -> theory - val new_group: theory -> theory - val reset_group: theory -> theory - val add_path: string -> theory -> theory - val root_path: theory -> theory - val parent_path: theory -> theory - val mandatory_path: string -> theory -> theory - val qualified_path: bool -> binding -> theory -> theory - val local_path: theory -> theory - val hide_class: bool -> string -> theory -> theory - val hide_type: bool -> string -> theory -> theory - val hide_const: bool -> string -> theory -> theory -end - -structure Sign: SIGN = -struct - -(** datatype sign **) - -datatype sign = Sign of - {syn: Syntax.syntax, (*concrete syntax for terms, types, sorts*) - tsig: Type.tsig, (*order-sorted signature of types*) - consts: Consts.T}; (*polymorphic constants*) - -fun make_sign (syn, tsig, consts) = Sign {syn = syn, tsig = tsig, consts = consts}; - -structure Data = Theory_Data_PP -( - type T = sign; - fun extend (Sign {syn, tsig, consts, ...}) = make_sign (syn, tsig, consts); - - val empty = make_sign (Syntax.empty_syntax, Type.empty_tsig, Consts.empty); - - fun merge pp (sign1, sign2) = - let - val Sign {syn = syn1, tsig = tsig1, consts = consts1} = sign1; - val Sign {syn = syn2, tsig = tsig2, consts = consts2} = sign2; - - val syn = Syntax.merge_syntax (syn1, syn2); - val tsig = Type.merge_tsig pp (tsig1, tsig2); - val consts = Consts.merge (consts1, consts2); - in make_sign (syn, tsig, consts) end; -); - -fun rep_sg thy = Data.get thy |> (fn Sign args => args); - -fun map_sign f = Data.map (fn Sign {syn, tsig, consts} => make_sign (f (syn, tsig, consts))); - -fun map_syn f = map_sign (fn (syn, tsig, consts) => (f syn, tsig, consts)); -fun map_tsig f = map_sign (fn (syn, tsig, consts) => (syn, f tsig, consts)); -fun map_consts f = map_sign (fn (syn, tsig, consts) => (syn, tsig, f consts)); - - -(* linear change discipline *) - -fun change_base begin = map_sign (fn (syn, tsig, consts) => - (syn, Type.change_base begin tsig, Consts.change_base begin consts)); - -val change_begin = change_base true; -val change_end = change_base false; - -fun change_end_local ctxt = - Context.raw_transfer (change_end (Proof_Context.theory_of ctxt)) ctxt; - -fun change_check thy = - if can change_end thy - then raise Fail "Unfinished linear change of theory content" else thy; - - -(* syntax *) - -val syn_of = #syn o rep_sg; - - -(* type signature *) - -val tsig_of = #tsig o rep_sg; - -val classes_of = #2 o #classes o Type.rep_tsig o tsig_of; -val all_classes = Sorts.all_classes o classes_of; -val super_classes = Sorts.super_classes o classes_of; -val minimize_sort = Sorts.minimize_sort o classes_of; -val complete_sort = Sorts.complete_sort o classes_of; - -val set_defsort = map_tsig o Type.set_defsort; -val defaultS = Type.defaultS o tsig_of; -val subsort = Type.subsort o tsig_of; -val of_sort = Type.of_sort o tsig_of; -val inter_sort = Type.inter_sort o tsig_of; -val witness_sorts = Type.witness_sorts o tsig_of; -val is_logtype = member (op =) o Type.logical_types o tsig_of; - -val typ_instance = Type.typ_instance o tsig_of; -fun typ_equiv thy (T, U) = typ_instance thy (T, U) andalso typ_instance thy (U, T); -val typ_match = Type.typ_match o tsig_of; -val typ_unify = Type.unify o tsig_of; - - -(* polymorphic constants *) - -val consts_of = #consts o rep_sg; -val the_const_constraint = Consts.the_constraint o consts_of; -val the_const_type = #2 oo (Consts.the_const o consts_of); -val const_type = try o the_const_type; -val const_monomorphic = Consts.is_monomorphic o consts_of; -val const_typargs = Consts.typargs o consts_of; -val const_instance = Consts.instance o consts_of; - -fun mk_const thy (c, Ts) = Const (c, const_instance thy (c, Ts)); - -fun declared_tyname ctxt c = can (Type.the_decl (tsig_of ctxt)) (c, Position.none); -val declared_const = can o the_const_constraint; - - -(* naming *) - -val naming_of = Name_Space.naming_of o Context.Theory; -val map_naming = Context.theory_map o Name_Space.map_naming; -val restore_naming = map_naming o K o naming_of; -fun inherit_naming thy = Name_Space.map_naming (K (naming_of thy)) o Context.Proof; - -val full_name = Name_Space.full_name o naming_of; -fun full_name_path thy path = Name_Space.full_name (Name_Space.add_path path (naming_of thy)); - -fun full_bname thy = Name_Space.full_name (naming_of thy) o Binding.name; -fun full_bname_path thy path = full_name_path thy path o Binding.name; - - - -(** name spaces **) - -val class_space = Type.class_space o tsig_of; -val type_space = Type.type_space o tsig_of; -val const_space = Consts.space_of o consts_of; - -val intern_class = Name_Space.intern o class_space; -val intern_type = Name_Space.intern o type_space; -val intern_const = Name_Space.intern o const_space; - -fun class_alias b c thy = map_tsig (Type.class_alias (naming_of thy) b c) thy; -fun type_alias b c thy = map_tsig (Type.type_alias (naming_of thy) b c) thy; -fun const_alias b c thy = map_consts (Consts.alias (naming_of thy) b c) thy; - - - -(** certify entities **) (*exception TYPE*) - -(* certify wrt. type signature *) - -val arity_number = Type.arity_number o tsig_of; -fun arity_sorts thy = Type.arity_sorts (Context.pretty_global thy) (tsig_of thy); - -val certify_class = Type.cert_class o tsig_of; -val certify_sort = Type.cert_sort o tsig_of; -val certify_typ = Type.cert_typ o tsig_of; -fun certify_typ_mode mode = Type.cert_typ_mode mode o tsig_of; - - -(* certify term/prop *) - -local - -fun type_check pp tm = - let - fun err_appl bs t T u U = - let - val xs = map Free bs; (*we do not rename here*) - val t' = subst_bounds (xs, t); - val u' = subst_bounds (xs, u); - val msg = Type.appl_error (Syntax.init_pretty pp) t' T u' U; - in raise TYPE (msg, [T, U], [t', u']) end; - - fun typ_of (_, Const (_, T)) = T - | typ_of (_, Free (_, T)) = T - | typ_of (_, Var (_, T)) = T - | typ_of (bs, Bound i) = snd (nth bs i handle General.Subscript => - raise TYPE ("Loose bound variable: B." ^ string_of_int i, [], [Bound i])) - | typ_of (bs, Abs (x, T, body)) = T --> typ_of ((x, T) :: bs, body) - | typ_of (bs, t $ u) = - let val T = typ_of (bs, t) and U = typ_of (bs, u) in - (case T of - Type ("fun", [T1, T2]) => - if T1 = U then T2 else err_appl bs t T u U - | _ => err_appl bs t T u U) - end; - in typ_of ([], tm) end; - -fun err msg = raise TYPE (msg, [], []); - -fun check_vars (t $ u) = (check_vars t; check_vars u) - | check_vars (Abs (_, _, t)) = check_vars t - | check_vars (Free (x, _)) = - if Long_Name.is_qualified x then err ("Malformed variable: " ^ quote x) else () - | check_vars (Var (xi as (_, i), _)) = - if i < 0 then err ("Malformed variable: " ^ quote (Term.string_of_vname xi)) else () - | check_vars _ = (); - -in - -fun certify' prop pp do_expand consts thy tm = - let - val _ = check_vars tm; - val tm' = Term.map_types (certify_typ thy) tm; - val T = type_check pp tm'; - val _ = if prop andalso T <> propT then err "Term not of type prop" else (); - val tm'' = Consts.certify pp (tsig_of thy) do_expand consts tm'; - in (if tm = tm'' then tm else tm'', T, Term.maxidx_of_term tm'') end; - -fun certify_term thy = certify' false (Context.pretty_global thy) true (consts_of thy) thy; -fun cert_term_abbrev thy = - #1 o certify' false (Context.pretty_global thy) false (consts_of thy) thy; -val cert_term = #1 oo certify_term; -fun cert_prop thy = #1 o certify' true (Context.pretty_global thy) true (consts_of thy) thy; - -end; - - -(* specifications *) - -fun no_variables kind add addT mk mkT ctxt tm = - (case (add tm [], addT tm []) of - ([], []) => tm - | (frees, tfrees) => error (Pretty.string_of (Pretty.block - (Pretty.str ("Illegal " ^ kind ^ " variable(s) in term:") :: Pretty.brk 1 :: - Pretty.commas - (map (Syntax.pretty_term ctxt o mk) frees @ map (Syntax.pretty_typ ctxt o mkT) tfrees))))); - -val no_frees = no_variables "free" Term.add_frees Term.add_tfrees Free TFree; -val no_vars = no_variables "schematic" Term.add_vars Term.add_tvars Var TVar; - - - -(** signature extension functions **) (*exception ERROR/TYPE*) - -(* add type constructors *) - -fun add_type ctxt (b, n, mx) thy = thy |> map_sign (fn (syn, tsig, consts) => - let - val type_syntax = (Lexicon.mark_type (full_name thy b), Mixfix.make_type n, mx); - val syn' = Syntax.update_type_gram true Syntax.mode_default [type_syntax] syn; - val tsig' = Type.add_type (inherit_naming thy ctxt) (b, n) tsig; - in (syn', tsig', consts) end); - -fun add_types_global types thy = - fold (add_type (Syntax.init_pretty_global thy)) types thy; - - -(* add nonterminals *) - -fun add_nonterminals ctxt ns thy = thy |> map_sign (fn (syn, tsig, consts) => - (syn, fold (Type.add_nonterminal (inherit_naming thy ctxt)) ns tsig, consts)); - -fun add_nonterminals_global ns thy = - add_nonterminals (Syntax.init_pretty_global thy) ns thy; - - -(* add type abbreviations *) - -fun add_type_abbrev ctxt abbr thy = thy |> map_sign (fn (syn, tsig, consts) => - (syn, Type.add_abbrev (inherit_naming thy ctxt) abbr tsig, consts)); - - -(* modify syntax *) - -fun gen_syntax change_gram parse_typ mode args thy = - let - val ctxt = Type.set_mode Type.mode_syntax (Proof_Context.init_global thy); - fun prep (c, T, mx) = (c, certify_typ_mode Type.mode_syntax thy (parse_typ ctxt T), mx) - handle ERROR msg => cat_error msg ("in syntax declaration " ^ quote c); - in thy |> map_syn (change_gram (is_logtype thy) mode (map prep args)) end; - -fun gen_add_syntax x = gen_syntax (Syntax.update_const_gram true) x; - -val add_syntax = gen_add_syntax (K I); -val add_syntax_cmd = gen_add_syntax Syntax.read_typ; -val del_syntax = gen_syntax (Syntax.update_const_gram false) (K I); -val del_syntax_cmd = gen_syntax (Syntax.update_const_gram false) Syntax.read_typ; - -fun type_notation add mode args = - let - fun type_syntax (Type (c, args), mx) = - SOME (Lexicon.mark_type c, Mixfix.make_type (length args), mx) - | type_syntax _ = NONE; - in map_syn (Syntax.update_type_gram add mode (map_filter type_syntax args)) end; - -fun notation add mode args thy = - let - fun const_syntax (Const (c, _), mx) = - (case try (Consts.type_scheme (consts_of thy)) c of - SOME T => SOME (Lexicon.mark_const c, T, mx) - | NONE => NONE) - | const_syntax _ = NONE; - in gen_syntax (Syntax.update_const_gram add) (K I) mode (map_filter const_syntax args) thy end; - - -(* add constants *) - -local - -fun gen_add_consts prep_typ ctxt raw_args thy = - let - val prepT = Type.no_tvars o Term.no_dummyT o certify_typ thy o prep_typ ctxt; - fun prep (b, raw_T, mx) = - let - val c = full_name thy b; - val T = (prepT raw_T handle TYPE (msg, _, _) => error msg) handle ERROR msg => - cat_error msg ("in declaration of constant " ^ Binding.print b); - val T' = Logic.varifyT_global T; - in ((b, T'), (Lexicon.mark_const c, T', mx), Const (c, T)) end; - val args = map prep raw_args; - in - thy - |> map_consts (fold (Consts.declare (inherit_naming thy ctxt) o #1) args) - |> add_syntax Syntax.mode_default (map #2 args) - |> pair (map #3 args) - end; - -in - -fun add_consts args thy = - #2 (gen_add_consts (K I) (Proof_Context.init_global thy) args thy); - -fun add_consts_cmd args thy = - #2 (gen_add_consts Syntax.read_typ (Proof_Context.init_global thy) args thy); - -fun declare_const ctxt ((b, T), mx) = yield_singleton (gen_add_consts (K I) ctxt) (b, T, mx); -fun declare_const_global arg thy = declare_const (Proof_Context.init_global thy) arg thy; - -end; - - -(* abbreviations *) - -fun add_abbrev mode (b, raw_t) thy = (* FIXME proper ctxt (?) *) - let - val ctxt = Syntax.init_pretty_global thy; - val prep_tm = no_frees ctxt o Term.no_dummy_patterns o cert_term_abbrev thy; - val t = (prep_tm raw_t handle TYPE (msg, _, _) => error msg | TERM (msg, _) => error msg) - handle ERROR msg => cat_error msg ("in constant abbreviation " ^ Binding.print b); - val (res, consts') = consts_of thy - |> Consts.abbreviate (inherit_naming thy ctxt) (tsig_of thy) mode (b, t); - in (res, thy |> map_consts (K consts')) end; - -fun revert_abbrev mode c = map_consts (Consts.revert_abbrev mode c); - - -(* add constraints *) - -fun add_const_constraint (c, opt_T) thy = - let - fun prepT raw_T = - let val T = Logic.varifyT_global (Type.no_tvars (Term.no_dummyT (certify_typ thy raw_T))) - in cert_term thy (Const (c, T)); T end - handle TYPE (msg, _, _) => error msg; - in thy |> map_consts (Consts.constrain (c, Option.map prepT opt_T)) end; - - -(* primitive classes and arities *) - -fun primitive_class (bclass, classes) thy = - thy - |> map_sign (fn (syn, tsig, consts) => - let val tsig' = Type.add_class (Context.Theory thy) (bclass, classes) tsig; - in (syn, tsig', consts) end) - |> add_consts [(Binding.map_name Logic.const_of_class bclass, Term.a_itselfT --> propT, NoSyn)]; - -fun primitive_classrel arg thy = - thy |> map_tsig (Type.add_classrel (Context.pretty_global thy) arg); - -fun primitive_arity arg thy = - thy |> map_tsig (Type.add_arity (Context.pretty_global thy) arg); - - -(* add translation functions *) - -local - -fun mk trs = map Syntax_Ext.mk_trfun trs; - -in - -fun parse_ast_translation atrs = map_syn (Syntax.update_trfuns (mk atrs, [], [], [])); -fun parse_translation trs = map_syn (Syntax.update_trfuns ([], mk trs, [], [])); -fun print_translation tr's = - map_syn (Syntax.update_trfuns ([], [], mk (map (apsnd Syntax_Trans.non_typed_tr') tr's), [])); -fun typed_print_translation tr's = map_syn (Syntax.update_trfuns ([], [], mk tr's, [])); -fun print_ast_translation atr's = map_syn (Syntax.update_trfuns ([], [], [], mk atr's)); - -end; - - -(* translation rules *) - -val add_trrules = map_syn o Syntax.update_trrules; -val del_trrules = map_syn o Syntax.remove_trrules; - - -(* naming *) - -val new_group = map_naming Name_Space.new_group; -val reset_group = map_naming Name_Space.reset_group; - -val add_path = map_naming o Name_Space.add_path; -val root_path = map_naming Name_Space.root_path; -val parent_path = map_naming Name_Space.parent_path; -val mandatory_path = map_naming o Name_Space.mandatory_path; -val qualified_path = map_naming oo Name_Space.qualified_path; - -fun local_path thy = thy |> root_path |> add_path (Context.theory_name thy); - - -(* hide names *) - -val hide_class = map_tsig oo Type.hide_class; -val hide_type = map_tsig oo Type.hide_type; -val hide_const = map_consts oo Consts.hide; - -end; diff --git a/core/Pure/simplifier.ML b/core/Pure/simplifier.ML deleted file mode 100644 index f2cc2e7d..00000000 --- a/core/Pure/simplifier.ML +++ /dev/null @@ -1,409 +0,0 @@ -(* Title: Pure/simplifier.ML - Author: Tobias Nipkow and Markus Wenzel, TU Muenchen - -Generic simplifier, suitable for most logics (see also -raw_simplifier.ML for the actual meta-level rewriting engine). -*) - -signature BASIC_SIMPLIFIER = -sig - include BASIC_RAW_SIMPLIFIER - val simp_tac: Proof.context -> int -> tactic - val asm_simp_tac: Proof.context -> int -> tactic - val full_simp_tac: Proof.context -> int -> tactic - val asm_lr_simp_tac: Proof.context -> int -> tactic - val asm_full_simp_tac: Proof.context -> int -> tactic - val safe_simp_tac: Proof.context -> int -> tactic - val safe_asm_simp_tac: Proof.context -> int -> tactic - val safe_full_simp_tac: Proof.context -> int -> tactic - val safe_asm_lr_simp_tac: Proof.context -> int -> tactic - val safe_asm_full_simp_tac: Proof.context -> int -> tactic - val simplify: Proof.context -> thm -> thm - val asm_simplify: Proof.context -> thm -> thm - val full_simplify: Proof.context -> thm -> thm - val asm_lr_simplify: Proof.context -> thm -> thm - val asm_full_simplify: Proof.context -> thm -> thm -end; - -signature SIMPLIFIER = -sig - include BASIC_SIMPLIFIER - val map_ss: (Proof.context -> Proof.context) -> Context.generic -> Context.generic - val attrib: (thm -> Proof.context -> Proof.context) -> attribute - val simp_add: attribute - val simp_del: attribute - val cong_add: attribute - val cong_del: attribute - val check_simproc: Proof.context -> xstring * Position.T -> string - val the_simproc: Proof.context -> string -> simproc - val def_simproc: {name: binding, lhss: term list, - proc: morphism -> Proof.context -> cterm -> thm option, identifier: thm list} -> - local_theory -> local_theory - val def_simproc_cmd: {name: binding, lhss: string list, - proc: morphism -> Proof.context -> cterm -> thm option, identifier: thm list} -> - local_theory -> local_theory - val pretty_simpset: Proof.context -> Pretty.T - val default_mk_sym: Proof.context -> thm -> thm option - val prems_of: Proof.context -> thm list - val add_simp: thm -> Proof.context -> Proof.context - val del_simp: thm -> Proof.context -> Proof.context - val add_eqcong: thm -> Proof.context -> Proof.context - val del_eqcong: thm -> Proof.context -> Proof.context - val add_cong: thm -> Proof.context -> Proof.context - val del_cong: thm -> Proof.context -> Proof.context - val add_prems: thm list -> Proof.context -> Proof.context - val mksimps: Proof.context -> thm -> thm list - val set_mksimps: (Proof.context -> thm -> thm list) -> Proof.context -> Proof.context - val set_mkcong: (Proof.context -> thm -> thm) -> Proof.context -> Proof.context - val set_mksym: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context - val set_mkeqTrue: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context - val set_termless: (term * term -> bool) -> Proof.context -> Proof.context - val set_subgoaler: (Proof.context -> int -> tactic) -> Proof.context -> Proof.context - type trace_ops - val set_trace_ops: trace_ops -> theory -> theory - val simproc_global_i: theory -> string -> term list -> - (Proof.context -> term -> thm option) -> simproc - val simproc_global: theory -> string -> string list -> - (Proof.context -> term -> thm option) -> simproc - val rewrite: Proof.context -> conv - val asm_rewrite: Proof.context -> conv - val full_rewrite: Proof.context -> conv - val asm_lr_rewrite: Proof.context -> conv - val asm_full_rewrite: Proof.context -> conv - val cong_modifiers: Method.modifier parser list - val simp_modifiers': Method.modifier parser list - val simp_modifiers: Method.modifier parser list - val method_setup: Method.modifier parser list -> theory -> theory - val easy_setup: thm -> thm list -> theory -> theory -end; - -structure Simplifier: SIMPLIFIER = -struct - -open Raw_Simplifier; - - -(** declarations **) - -(* attributes *) - -fun attrib f = Thm.declaration_attribute (map_ss o f); - -val simp_add = attrib add_simp; -val simp_del = attrib del_simp; -val cong_add = attrib add_cong; -val cong_del = attrib del_cong; - - -(** named simprocs **) - -structure Simprocs = Generic_Data -( - type T = simproc Name_Space.table; - val empty : T = Name_Space.empty_table "simproc"; - val extend = I; - fun merge data : T = Name_Space.merge_tables data; -); - - -(* get simprocs *) - -val get_simprocs = Simprocs.get o Context.Proof; - -fun check_simproc ctxt = Name_Space.check (Context.Proof ctxt) (get_simprocs ctxt) #> #1; -val the_simproc = Name_Space.get o get_simprocs; - -val _ = Theory.setup - (ML_Antiquotation.value @{binding simproc} - (Args.context -- Scan.lift (Parse.position Args.name) - >> (fn (ctxt, name) => - "Simplifier.the_simproc ML_context " ^ ML_Syntax.print_string (check_simproc ctxt name)))); - - -(* define simprocs *) - -local - -fun gen_simproc prep {name = b, lhss, proc, identifier} lthy = - let - val simproc = make_simproc - {name = Local_Theory.full_name lthy b, - lhss = - let - val lhss' = prep lthy lhss; - val ctxt' = fold Variable.auto_fixes lhss' lthy; - in Variable.export_terms ctxt' lthy lhss' end - |> map (Thm.cterm_of (Proof_Context.theory_of lthy)), - proc = proc, - identifier = identifier}; - in - lthy |> Local_Theory.declaration {syntax = false, pervasive = true} (fn phi => fn context => - let - val b' = Morphism.binding phi b; - val simproc' = transform_simproc phi simproc; - in - context - |> Simprocs.map (#2 o Name_Space.define context true (b', simproc')) - |> map_ss (fn ctxt => ctxt addsimprocs [simproc']) - end) - end; - -in - -val def_simproc = gen_simproc Syntax.check_terms; -val def_simproc_cmd = gen_simproc Syntax.read_terms; - -end; - - - -(** pretty_simpset **) - -fun pretty_simpset ctxt = - let - val pretty_term = Syntax.pretty_term ctxt; - val pretty_thm = Display.pretty_thm ctxt; - val pretty_thm_item = Display.pretty_thm_item ctxt; - - fun pretty_simproc (name, lhss) = - Pretty.block - (Pretty.mark_str name :: Pretty.str ":" :: Pretty.fbrk :: - Pretty.fbreaks (map (Pretty.item o single o pretty_term o Thm.term_of) lhss)); - - fun pretty_cong_name (const, name) = - pretty_term ((if const then Const else Free) (name, dummyT)); - fun pretty_cong (name, thm) = - Pretty.block [pretty_cong_name name, Pretty.str ":", Pretty.brk 1, pretty_thm thm]; - - val {simps, procs, congs, loopers, unsafe_solvers, safe_solvers, ...} = - dest_ss (simpset_of ctxt); - val simprocs = - Name_Space.markup_entries ctxt (Name_Space.space_of_table (get_simprocs ctxt)) procs; - in - [Pretty.big_list "simplification rules:" (map (pretty_thm_item o #2) simps), - Pretty.big_list "simplification procedures:" (map pretty_simproc simprocs), - Pretty.big_list "congruences:" (map pretty_cong congs), - Pretty.strs ("loopers:" :: map quote loopers), - Pretty.strs ("unsafe solvers:" :: map quote unsafe_solvers), - Pretty.strs ("safe solvers:" :: map quote safe_solvers)] - |> Pretty.chunks - end; - - - -(** simplification tactics and rules **) - -fun solve_all_tac solvers ctxt = - let - val {subgoal_tac, ...} = Raw_Simplifier.internal_ss (simpset_of ctxt); - val solve_tac = subgoal_tac (Raw_Simplifier.set_solvers solvers ctxt) THEN_ALL_NEW (K no_tac); - in DEPTH_SOLVE (solve_tac 1) end; - -(*NOTE: may instantiate unknowns that appear also in other subgoals*) -fun generic_simp_tac safe mode ctxt = - let - val {loop_tacs, solvers = (unsafe_solvers, solvers), ...} = - Raw_Simplifier.internal_ss (simpset_of ctxt); - val loop_tac = FIRST' (map (fn (_, tac) => tac ctxt) (rev loop_tacs)); - val solve_tac = FIRST' (map (Raw_Simplifier.solver ctxt) - (rev (if safe then solvers else unsafe_solvers))); - - fun simp_loop_tac i = - Raw_Simplifier.generic_rewrite_goal_tac mode (solve_all_tac unsafe_solvers) ctxt i THEN - (solve_tac i ORELSE TRY ((loop_tac THEN_ALL_NEW simp_loop_tac) i)); - in PREFER_GOAL (simp_loop_tac 1) end; - -local - -fun simp rew mode ctxt thm = - let - val {solvers = (unsafe_solvers, _), ...} = Raw_Simplifier.internal_ss (simpset_of ctxt); - val tacf = solve_all_tac (rev unsafe_solvers); - fun prover s th = Option.map #1 (Seq.pull (tacf s th)); - in rew mode prover ctxt thm end; - -in - -val simp_thm = simp Raw_Simplifier.rewrite_thm; -val simp_cterm = simp Raw_Simplifier.rewrite_cterm; - -end; - - -(* tactics *) - -val simp_tac = generic_simp_tac false (false, false, false); -val asm_simp_tac = generic_simp_tac false (false, true, false); -val full_simp_tac = generic_simp_tac false (true, false, false); -val asm_lr_simp_tac = generic_simp_tac false (true, true, false); -val asm_full_simp_tac = generic_simp_tac false (true, true, true); - -(*not totally safe: may instantiate unknowns that appear also in other subgoals*) -val safe_simp_tac = generic_simp_tac true (false, false, false); -val safe_asm_simp_tac = generic_simp_tac true (false, true, false); -val safe_full_simp_tac = generic_simp_tac true (true, false, false); -val safe_asm_lr_simp_tac = generic_simp_tac true (true, true, false); -val safe_asm_full_simp_tac = generic_simp_tac true (true, true, true); - - -(* conversions *) - -val simplify = simp_thm (false, false, false); -val asm_simplify = simp_thm (false, true, false); -val full_simplify = simp_thm (true, false, false); -val asm_lr_simplify = simp_thm (true, true, false); -val asm_full_simplify = simp_thm (true, true, true); - -val rewrite = simp_cterm (false, false, false); -val asm_rewrite = simp_cterm (false, true, false); -val full_rewrite = simp_cterm (true, false, false); -val asm_lr_rewrite = simp_cterm (true, true, false); -val asm_full_rewrite = simp_cterm (true, true, true); - - - -(** concrete syntax of attributes **) - -(* add / del *) - -val simpN = "simp"; -val congN = "cong"; -val onlyN = "only"; -val no_asmN = "no_asm"; -val no_asm_useN = "no_asm_use"; -val no_asm_simpN = "no_asm_simp"; -val asm_lrN = "asm_lr"; - - -(* simprocs *) - -local - -val add_del = - (Args.del -- Args.colon >> K (op delsimprocs) || - Scan.option (Args.add -- Args.colon) >> K (op addsimprocs)) - >> (fn f => fn simproc => fn phi => Thm.declaration_attribute - (K (Raw_Simplifier.map_ss (fn ctxt => f (ctxt, [transform_simproc phi simproc]))))); - -in - -val simproc_att = - (Args.context -- Scan.lift add_del) :|-- (fn (ctxt, decl) => - Scan.repeat1 (Scan.lift (Args.named_attribute (decl o the_simproc ctxt o check_simproc ctxt)))) - >> (fn atts => Thm.declaration_attribute (fn th => - fold (fn att => Thm.attribute_declaration (Morphism.form att) th) atts)); - -end; - - -(* conversions *) - -local - -fun conv_mode x = - ((Args.parens (Args.$$$ no_asmN) >> K simplify || - Args.parens (Args.$$$ no_asm_simpN) >> K asm_simplify || - Args.parens (Args.$$$ no_asm_useN) >> K full_simplify || - Scan.succeed asm_full_simplify) |> Scan.lift) x; - -in - -val simplified = conv_mode -- Attrib.thms >> - (fn (f, ths) => Thm.rule_attribute (fn context => - f ((if null ths then I else Raw_Simplifier.clear_simpset) - (Context.proof_of context) addsimps ths))); - -end; - - -(* setup attributes *) - -val _ = Theory.setup - (Attrib.setup @{binding simp} (Attrib.add_del simp_add simp_del) - "declaration of Simplifier rewrite rule" #> - Attrib.setup @{binding cong} (Attrib.add_del cong_add cong_del) - "declaration of Simplifier congruence rule" #> - Attrib.setup @{binding simproc} simproc_att - "declaration of simplification procedures" #> - Attrib.setup @{binding simplified} simplified "simplified rule"); - - - -(** method syntax **) - -val cong_modifiers = - [Args.$$$ congN -- Args.colon >> K ((I, cong_add): Method.modifier), - Args.$$$ congN -- Args.add -- Args.colon >> K (I, cong_add), - Args.$$$ congN -- Args.del -- Args.colon >> K (I, cong_del)]; - -val simp_modifiers = - [Args.$$$ simpN -- Args.colon >> K (I, simp_add), - Args.$$$ simpN -- Args.add -- Args.colon >> K (I, simp_add), - Args.$$$ simpN -- Args.del -- Args.colon >> K (I, simp_del), - Args.$$$ simpN -- Args.$$$ onlyN -- Args.colon >> K (Raw_Simplifier.clear_simpset, simp_add)] - @ cong_modifiers; - -val simp_modifiers' = - [Args.add -- Args.colon >> K (I, simp_add), - Args.del -- Args.colon >> K (I, simp_del), - Args.$$$ onlyN -- Args.colon >> K (Raw_Simplifier.clear_simpset, simp_add)] - @ cong_modifiers; - -val simp_options = - (Args.parens (Args.$$$ no_asmN) >> K simp_tac || - Args.parens (Args.$$$ no_asm_simpN) >> K asm_simp_tac || - Args.parens (Args.$$$ no_asm_useN) >> K full_simp_tac || - Args.parens (Args.$$$ asm_lrN) >> K asm_lr_simp_tac || - Scan.succeed asm_full_simp_tac); - -fun simp_method more_mods meth = - Scan.lift simp_options --| - Method.sections (more_mods @ simp_modifiers') >> - (fn tac => fn ctxt => METHOD (fn facts => meth ctxt tac facts)); - - - -(** setup **) - -fun method_setup more_mods = - Method.setup @{binding simp} - (simp_method more_mods (fn ctxt => fn tac => fn facts => - HEADGOAL (Method.insert_tac facts THEN' - (CHANGED_PROP oo tac) ctxt))) - "simplification" #> - Method.setup @{binding simp_all} - (simp_method more_mods (fn ctxt => fn tac => fn facts => - ALLGOALS (Method.insert_tac facts) THEN - (CHANGED_PROP o PARALLEL_GOALS o ALLGOALS o tac) ctxt)) - "simplification (all goals)"; - -fun easy_setup reflect trivs = method_setup [] #> Context.theory_map (map_ss (fn ctxt0 => - let - val trivialities = Drule.reflexive_thm :: trivs; - - fun unsafe_solver_tac ctxt = - FIRST' [resolve_tac (trivialities @ Raw_Simplifier.prems_of ctxt), assume_tac]; - val unsafe_solver = mk_solver "easy unsafe" unsafe_solver_tac; - - (*no premature instantiation of variables during simplification*) - fun safe_solver_tac ctxt = - FIRST' [match_tac (trivialities @ Raw_Simplifier.prems_of ctxt), eq_assume_tac]; - val safe_solver = mk_solver "easy safe" safe_solver_tac; - - fun mk_eq thm = - if can Logic.dest_equals (Thm.concl_of thm) then [thm] - else [thm RS reflect] handle THM _ => []; - - fun mksimps thm = mk_eq (Thm.forall_elim_vars (Thm.maxidx_of thm + 1) thm); - in - empty_simpset ctxt0 - setSSolver safe_solver - setSolver unsafe_solver - |> set_subgoaler asm_simp_tac - |> set_mksimps (K mksimps) - end)); - -end; - -structure Basic_Simplifier: BASIC_SIMPLIFIER = Simplifier; -open Basic_Simplifier; diff --git a/core/Pure/skip_proof.ML b/core/Pure/skip_proof.ML deleted file mode 100644 index 0b54f959..00000000 --- a/core/Pure/skip_proof.ML +++ /dev/null @@ -1,43 +0,0 @@ -(* Title: Pure/skip_proof.ML - Author: Makarius - -Skip proof via oracle invocation. -*) - -val quick_and_dirty_raw = Config.declare_option ("quick_and_dirty", @{here}); -val quick_and_dirty = Config.bool quick_and_dirty_raw; - -signature SKIP_PROOF = -sig - val report: Proof.context -> unit - val make_thm_cterm: cterm -> thm - val make_thm: theory -> term -> thm - val cheat_tac: int -> tactic -end; - -structure Skip_Proof: SKIP_PROOF = -struct - -(* report *) - -fun report ctxt = - if Context_Position.is_visible ctxt then - Output.report [Markup.markup Markup.bad "Skipped proof"] - else (); - - -(* oracle setup *) - -val (_, make_thm_cterm) = - Context.>>> - (Context.map_theory_result (Thm.add_oracle (Binding.make ("skip_proof", @{here}), I))); - -fun make_thm thy prop = make_thm_cterm (Thm.cterm_of thy prop); - - -(* cheat_tac *) - -fun cheat_tac i st = - rtac (make_thm (Thm.theory_of_thm st) (Var (("A", 0), propT))) i st; - -end; diff --git a/core/Pure/sorts.ML b/core/Pure/sorts.ML deleted file mode 100644 index 68052e3a..00000000 --- a/core/Pure/sorts.ML +++ /dev/null @@ -1,477 +0,0 @@ -(* Title: Pure/sorts.ML - Author: Markus Wenzel and Stefan Berghofer, TU Muenchen - -The order-sorted algebra of type classes. - -Classes denote (possibly empty) collections of types that are -partially ordered by class inclusion. They are represented -symbolically by strings. - -Sorts are intersections of finitely many classes. They are represented -by lists of classes. Normal forms of sorts are sorted lists of -minimal classes (wrt. current class inclusion). -*) - -signature SORTS = -sig - val make: sort list -> sort Ord_List.T - val subset: sort Ord_List.T * sort Ord_List.T -> bool - val union: sort Ord_List.T -> sort Ord_List.T -> sort Ord_List.T - val subtract: sort Ord_List.T -> sort Ord_List.T -> sort Ord_List.T - val remove_sort: sort -> sort Ord_List.T -> sort Ord_List.T - val insert_sort: sort -> sort Ord_List.T -> sort Ord_List.T - val insert_typ: typ -> sort Ord_List.T -> sort Ord_List.T - val insert_typs: typ list -> sort Ord_List.T -> sort Ord_List.T - val insert_term: term -> sort Ord_List.T -> sort Ord_List.T - val insert_terms: term list -> sort Ord_List.T -> sort Ord_List.T - type algebra - val classes_of: algebra -> serial Graph.T - val arities_of: algebra -> (class * sort list) list Symtab.table - val all_classes: algebra -> class list - val super_classes: algebra -> class -> class list - val class_less: algebra -> class * class -> bool - val class_le: algebra -> class * class -> bool - val sort_eq: algebra -> sort * sort -> bool - val sort_le: algebra -> sort * sort -> bool - val sorts_le: algebra -> sort list * sort list -> bool - val inter_sort: algebra -> sort * sort -> sort - val minimize_sort: algebra -> sort -> sort - val complete_sort: algebra -> sort -> sort - val minimal_sorts: algebra -> sort list -> sort Ord_List.T - val add_class: Context.pretty -> class * class list -> algebra -> algebra - val add_classrel: Context.pretty -> class * class -> algebra -> algebra - val add_arities: Context.pretty -> string * (class * sort list) list -> algebra -> algebra - val empty_algebra: algebra - val merge_algebra: Context.pretty -> algebra * algebra -> algebra - val subalgebra: Context.pretty -> (class -> bool) -> (class * string -> sort list option) - -> algebra -> (sort -> sort) * algebra - type class_error - val class_error: Context.pretty -> class_error -> string - exception CLASS_ERROR of class_error - val has_instance: algebra -> string -> sort -> bool - val mg_domain: algebra -> string -> sort -> sort list (*exception CLASS_ERROR*) - val meet_sort: algebra -> typ * sort - -> sort Vartab.table -> sort Vartab.table (*exception CLASS_ERROR*) - val meet_sort_typ: algebra -> typ * sort -> typ -> typ (*exception CLASS_ERROR*) - val of_sort: algebra -> typ * sort -> bool - val of_sort_derivation: algebra -> - {class_relation: typ -> 'a * class -> class -> 'a, - type_constructor: string * typ list -> ('a * class) list list -> class -> 'a, - type_variable: typ -> ('a * class) list} -> - typ * sort -> 'a list (*exception CLASS_ERROR*) - val classrel_derivation: algebra -> - ('a * class -> class -> 'a) -> 'a * class -> class -> 'a (*exception CLASS_ERROR*) - val witness_sorts: algebra -> string list -> (typ * sort) list -> sort list -> (typ * sort) list -end; - -structure Sorts: SORTS = -struct - - -(** ordered lists of sorts **) - -val make = Ord_List.make Term_Ord.sort_ord; -val subset = Ord_List.subset Term_Ord.sort_ord; -val union = Ord_List.union Term_Ord.sort_ord; -val subtract = Ord_List.subtract Term_Ord.sort_ord; - -val remove_sort = Ord_List.remove Term_Ord.sort_ord; -val insert_sort = Ord_List.insert Term_Ord.sort_ord; - -fun insert_typ (TFree (_, S)) Ss = insert_sort S Ss - | insert_typ (TVar (_, S)) Ss = insert_sort S Ss - | insert_typ (Type (_, Ts)) Ss = insert_typs Ts Ss -and insert_typs [] Ss = Ss - | insert_typs (T :: Ts) Ss = insert_typs Ts (insert_typ T Ss); - -fun insert_term (Const (_, T)) Ss = insert_typ T Ss - | insert_term (Free (_, T)) Ss = insert_typ T Ss - | insert_term (Var (_, T)) Ss = insert_typ T Ss - | insert_term (Bound _) Ss = Ss - | insert_term (Abs (_, T, t)) Ss = insert_term t (insert_typ T Ss) - | insert_term (t $ u) Ss = insert_term t (insert_term u Ss); - -fun insert_terms [] Ss = Ss - | insert_terms (t :: ts) Ss = insert_terms ts (insert_term t Ss); - - - -(** order-sorted algebra **) - -(* - classes: graph representing class declarations together with proper - subclass relation, which needs to be transitive and acyclic. - - arities: table of association lists of all type arities; (t, ars) - means that type constructor t has the arities ars; an element - (c, Ss) of ars represents the arity t::(Ss)c. "Coregularity" of - the arities structure requires that for any two declarations - t::(Ss1)c1 and t::(Ss2)c2 such that c1 <= c2 holds Ss1 <= Ss2. -*) - -datatype algebra = Algebra of - {classes: serial Graph.T, - arities: (class * sort list) list Symtab.table}; - -fun classes_of (Algebra {classes, ...}) = classes; -fun arities_of (Algebra {arities, ...}) = arities; - -fun make_algebra (classes, arities) = - Algebra {classes = classes, arities = arities}; - -fun map_classes f (Algebra {classes, arities}) = make_algebra (f classes, arities); -fun map_arities f (Algebra {classes, arities}) = make_algebra (classes, f arities); - - -(* classes *) - -fun all_classes (Algebra {classes, ...}) = Graph.all_preds classes (Graph.maximals classes); - -val super_classes = Graph.immediate_succs o classes_of; - - -(* class relations *) - -val class_less = Graph.is_edge o classes_of; -fun class_le algebra (c1, c2) = c1 = c2 orelse class_less algebra (c1, c2); - - -(* sort relations *) - -fun sort_le algebra (S1, S2) = - S1 = S2 orelse forall (fn c2 => exists (fn c1 => class_le algebra (c1, c2)) S1) S2; - -fun sorts_le algebra (Ss1, Ss2) = - ListPair.all (sort_le algebra) (Ss1, Ss2); - -fun sort_eq algebra (S1, S2) = - sort_le algebra (S1, S2) andalso sort_le algebra (S2, S1); - - -(* intersection *) - -fun inter_class algebra c S = - let - fun intr [] = [c] - | intr (S' as c' :: c's) = - if class_le algebra (c', c) then S' - else if class_le algebra (c, c') then intr c's - else c' :: intr c's - in intr S end; - -fun inter_sort algebra (S1, S2) = - sort_strings (fold (inter_class algebra) S1 S2); - - -(* normal forms *) - -fun minimize_sort _ [] = [] - | minimize_sort _ (S as [_]) = S - | minimize_sort algebra S = - filter (fn c => not (exists (fn c' => class_less algebra (c', c)) S)) S - |> sort_distinct string_ord; - -fun complete_sort algebra = - Graph.all_succs (classes_of algebra) o minimize_sort algebra; - -fun minimal_sorts algebra raw_sorts = - let - fun le S1 S2 = sort_le algebra (S1, S2); - val sorts = make (map (minimize_sort algebra) raw_sorts); - in sorts |> filter_out (fn S => exists (fn S' => le S' S andalso not (le S S')) sorts) end; - - - -(** build algebras **) - -(* classes *) - -fun err_dup_class c = error ("Duplicate declaration of class: " ^ quote c); - -fun err_cyclic_classes pp css = - error (cat_lines (map (fn cs => - "Cycle in class relation: " ^ Syntax.string_of_classrel (Syntax.init_pretty pp) cs) css)); - -fun add_class pp (c, cs) = map_classes (fn classes => - let - val classes' = classes |> Graph.new_node (c, serial ()) - handle Graph.DUP dup => err_dup_class dup; - val classes'' = classes' |> fold Graph.add_edge_trans_acyclic (map (pair c) cs) - handle Graph.CYCLES css => err_cyclic_classes pp css; - in classes'' end); - - -(* arities *) - -local - -fun for_classes _ NONE = "" - | for_classes ctxt (SOME (c1, c2)) = " for classes " ^ Syntax.string_of_classrel ctxt [c1, c2]; - -fun err_conflict pp t cc (c, Ss) (c', Ss') = - let val ctxt = Syntax.init_pretty pp in - error ("Conflict of type arities" ^ for_classes ctxt cc ^ ":\n " ^ - Syntax.string_of_arity ctxt (t, Ss, [c]) ^ " and\n " ^ - Syntax.string_of_arity ctxt (t, Ss', [c'])) - end; - -fun coregular pp algebra t (c, Ss) ars = - let - fun conflict (c', Ss') = - if class_le algebra (c, c') andalso not (sorts_le algebra (Ss, Ss')) then - SOME ((c, c'), (c', Ss')) - else if class_le algebra (c', c) andalso not (sorts_le algebra (Ss', Ss)) then - SOME ((c', c), (c', Ss')) - else NONE; - in - (case get_first conflict ars of - SOME ((c1, c2), (c', Ss')) => err_conflict pp t (SOME (c1, c2)) (c, Ss) (c', Ss') - | NONE => (c, Ss) :: ars) - end; - -fun complete algebra (c, Ss) = map (rpair Ss) (c :: super_classes algebra c); - -fun insert pp algebra t (c, Ss) ars = - (case AList.lookup (op =) ars c of - NONE => coregular pp algebra t (c, Ss) ars - | SOME Ss' => - if sorts_le algebra (Ss, Ss') then ars - else if sorts_le algebra (Ss', Ss) - then coregular pp algebra t (c, Ss) (remove (op =) (c, Ss') ars) - else err_conflict pp t NONE (c, Ss) (c, Ss')); - -in - -fun insert_ars pp algebra t = fold_rev (insert pp algebra t); - -fun insert_complete_ars pp algebra (t, ars) arities = - let val ars' = - Symtab.lookup_list arities t - |> fold_rev (insert_ars pp algebra t) (map (complete algebra) ars); - in Symtab.update (t, ars') arities end; - -fun add_arities pp arg algebra = - algebra |> map_arities (insert_complete_ars pp algebra arg); - -fun add_arities_table pp algebra = - Symtab.fold (fn (t, ars) => insert_complete_ars pp algebra (t, ars)); - -end; - - -(* classrel *) - -fun rebuild_arities pp algebra = algebra |> map_arities (fn arities => - Symtab.empty - |> add_arities_table pp algebra arities); - -fun add_classrel pp rel = rebuild_arities pp o map_classes (fn classes => - classes |> Graph.add_edge_trans_acyclic rel - handle Graph.CYCLES css => err_cyclic_classes pp css); - - -(* empty and merge *) - -val empty_algebra = make_algebra (Graph.empty, Symtab.empty); - -fun merge_algebra pp - (Algebra {classes = classes1, arities = arities1}, - Algebra {classes = classes2, arities = arities2}) = - let - val classes' = Graph.merge_trans_acyclic (op =) (classes1, classes2) - handle Graph.DUP c => err_dup_class c - | Graph.CYCLES css => err_cyclic_classes pp css; - val algebra0 = make_algebra (classes', Symtab.empty); - val arities' = - (case (pointer_eq (classes1, classes2), pointer_eq (arities1, arities2)) of - (true, true) => arities1 - | (true, false) => (*no completion*) - (arities1, arities2) |> Symtab.join (fn t => fn (ars1, ars2) => - if pointer_eq (ars1, ars2) then raise Symtab.SAME - else insert_ars pp algebra0 t ars2 ars1) - | (false, true) => (*unary completion*) - Symtab.empty - |> add_arities_table pp algebra0 arities1 - | (false, false) => (*binary completion*) - Symtab.empty - |> add_arities_table pp algebra0 arities1 - |> add_arities_table pp algebra0 arities2); - in make_algebra (classes', arities') end; - - -(* algebra projections *) (* FIXME potentially violates abstract type integrity *) - -fun subalgebra pp P sargs (algebra as Algebra {classes, arities}) = - let - val restrict_sort = minimize_sort algebra o filter P o Graph.all_succs classes; - fun restrict_arity t (c, Ss) = - if P c then - (case sargs (c, t) of - SOME sorts => - SOME (c, Ss |> map2 (curry (inter_sort algebra)) sorts |> map restrict_sort) - | NONE => NONE) - else NONE; - val classes' = classes |> Graph.restrict P; - val arities' = arities |> Symtab.map (map_filter o restrict_arity); - in (restrict_sort, rebuild_arities pp (make_algebra (classes', arities'))) end; - - - -(** sorts of types **) - -(* errors -- performance tuning via delayed message composition *) - -datatype class_error = - No_Classrel of class * class | - No_Arity of string * class | - No_Subsort of sort * sort; - -fun class_error pp = - let val ctxt = Syntax.init_pretty pp in - fn No_Classrel (c1, c2) => "No class relation " ^ Syntax.string_of_classrel ctxt [c1, c2] - | No_Arity (a, c) => "No type arity " ^ Syntax.string_of_arity ctxt (a, [], [c]) - | No_Subsort (S1, S2) => - "Cannot derive subsort relation " ^ - Syntax.string_of_sort ctxt S1 ^ " < " ^ Syntax.string_of_sort ctxt S2 - end; - -exception CLASS_ERROR of class_error; - - -(* instances *) - -fun has_instance algebra a = - forall (AList.defined (op =) (Symtab.lookup_list (arities_of algebra) a)); - -fun mg_domain algebra a S = - let - val ars = Symtab.lookup_list (arities_of algebra) a; - fun dom c = - (case AList.lookup (op =) ars c of - NONE => raise CLASS_ERROR (No_Arity (a, c)) - | SOME Ss => Ss); - fun dom_inter c Ss = ListPair.map (inter_sort algebra) (dom c, Ss); - in - (case S of - [] => raise Fail "Unknown domain of empty intersection" - | c :: cs => fold dom_inter cs (dom c)) - end; - - -(* meet_sort *) - -fun meet_sort algebra = - let - fun inters S S' = inter_sort algebra (S, S'); - fun meet _ [] = I - | meet (TFree (_, S)) S' = - if sort_le algebra (S, S') then I - else raise CLASS_ERROR (No_Subsort (S, S')) - | meet (TVar (v, S)) S' = - if sort_le algebra (S, S') then I - else Vartab.map_default (v, S) (inters S') - | meet (Type (a, Ts)) S = fold2 meet Ts (mg_domain algebra a S); - in uncurry meet end; - -fun meet_sort_typ algebra (T, S) = - let val tab = meet_sort algebra (T, S) Vartab.empty; - in Term.map_type_tvar (fn (v, _) => TVar (v, (the o Vartab.lookup tab) v)) end; - - -(* of_sort *) - -fun of_sort algebra = - let - fun ofS (_, []) = true - | ofS (TFree (_, S), S') = sort_le algebra (S, S') - | ofS (TVar (_, S), S') = sort_le algebra (S, S') - | ofS (Type (a, Ts), S) = - let val Ss = mg_domain algebra a S in - ListPair.all ofS (Ts, Ss) - end handle CLASS_ERROR _ => false; - in ofS end; - - -(* animating derivations *) - -fun of_sort_derivation algebra {class_relation, type_constructor, type_variable} = - let - val arities = arities_of algebra; - - fun weaken T D1 S2 = - let val S1 = map snd D1 in - if S1 = S2 then map fst D1 - else - S2 |> map (fn c2 => - (case D1 |> find_first (fn (_, c1) => class_le algebra (c1, c2)) of - SOME d1 => class_relation T d1 c2 - | NONE => raise CLASS_ERROR (No_Subsort (S1, S2)))) - end; - - fun derive (_, []) = [] - | derive (Type (a, Us), S) = - let - val Ss = mg_domain algebra a S; - val dom = map2 (fn U => fn S => derive (U, S) ~~ S) Us Ss; - in - S |> map (fn c => - let - val Ss' = the (AList.lookup (op =) (Symtab.lookup_list arities a) c); - val dom' = map (fn ((U, d), S') => weaken U d S' ~~ S') ((Us ~~ dom) ~~ Ss'); - in type_constructor (a, Us) dom' c end) - end - | derive (T, S) = weaken T (type_variable T) S; - in derive end; - -fun classrel_derivation algebra class_relation = - let - fun path (x, c1 :: c2 :: cs) = path (class_relation (x, c1) c2, c2 :: cs) - | path (x, _) = x; - in - fn (x, c1) => fn c2 => - (case Graph.irreducible_paths (classes_of algebra) (c1, c2) of - [] => raise CLASS_ERROR (No_Classrel (c1, c2)) - | cs :: _ => path (x, cs)) - end; - - -(* witness_sorts *) - -fun witness_sorts algebra types hyps sorts = - let - fun le S1 S2 = sort_le algebra (S1, S2); - fun get S2 (T, S1) = if le S1 S2 then SOME (T, S2) else NONE; - fun mg_dom t S = SOME (mg_domain algebra t S) handle CLASS_ERROR _ => NONE; - - fun witn_sort _ [] solved_failed = (SOME (propT, []), solved_failed) - | witn_sort path S (solved, failed) = - if exists (le S) failed then (NONE, (solved, failed)) - else - (case get_first (get S) solved of - SOME w => (SOME w, (solved, failed)) - | NONE => - (case get_first (get S) hyps of - SOME w => (SOME w, (w :: solved, failed)) - | NONE => witn_types path types S (solved, failed))) - - and witn_sorts path x = fold_map (witn_sort path) x - - and witn_types _ [] S (solved, failed) = (NONE, (solved, S :: failed)) - | witn_types path (t :: ts) S solved_failed = - (case mg_dom t S of - SOME SS => - (*do not descend into stronger args (achieving termination)*) - if exists (fn D => le D S orelse exists (le D) path) SS then - witn_types path ts S solved_failed - else - let val (ws, (solved', failed')) = witn_sorts (S :: path) SS solved_failed in - if forall is_some ws then - let val w = (Type (t, map (#1 o the) ws), S) - in (SOME w, (w :: solved', failed')) end - else witn_types path ts S (solved', failed') - end - | NONE => witn_types path ts S solved_failed); - - in map_filter I (#1 (witn_sorts [] sorts ([], []))) end; - -end; diff --git a/core/Pure/subgoal.ML b/core/Pure/subgoal.ML deleted file mode 100644 index 051ad21b..00000000 --- a/core/Pure/subgoal.ML +++ /dev/null @@ -1,151 +0,0 @@ -(* Title: Pure/subgoal.ML - Author: Makarius - -Tactical operations with explicit subgoal focus, based on canonical -proof decomposition. The "visible" part of the text within the -context is fixed, the remaining goal may be schematic. -*) - -signature SUBGOAL = -sig - type focus = {context: Proof.context, params: (string * cterm) list, prems: thm list, - asms: cterm list, concl: cterm, schematics: (ctyp * ctyp) list * (cterm * cterm) list} - val focus_params: Proof.context -> int -> thm -> focus * thm - val focus_prems: Proof.context -> int -> thm -> focus * thm - val focus: Proof.context -> int -> thm -> focus * thm - val retrofit: Proof.context -> Proof.context -> (string * cterm) list -> cterm list -> - int -> thm -> thm -> thm Seq.seq - val FOCUS_PARAMS: (focus -> tactic) -> Proof.context -> int -> tactic - val FOCUS_PREMS: (focus -> tactic) -> Proof.context -> int -> tactic - val FOCUS: (focus -> tactic) -> Proof.context -> int -> tactic - val SUBPROOF: (focus -> tactic) -> Proof.context -> int -> tactic -end; - -structure Subgoal: SUBGOAL = -struct - -(* focus *) - -type focus = {context: Proof.context, params: (string * cterm) list, prems: thm list, - asms: cterm list, concl: cterm, schematics: (ctyp * ctyp) list * (cterm * cterm) list}; - -fun gen_focus (do_prems, do_concl) ctxt i raw_st = - let - val st = Simplifier.norm_hhf_protect ctxt raw_st; - val ((schematic_types, [st']), ctxt1) = Variable.importT [st] ctxt; - val ((params, goal), ctxt2) = Variable.focus_cterm (Thm.cprem_of st' i) ctxt1; - - val (asms, concl) = - if do_prems then (Drule.strip_imp_prems goal, Drule.strip_imp_concl goal) - else ([], goal); - val text = asms @ (if do_concl then [concl] else []); - - val ((_, schematic_terms), ctxt3) = - Variable.import_inst true (map Thm.term_of text) ctxt2 - |>> Thm.certify_inst (Thm.theory_of_thm raw_st); - - val schematics = (schematic_types, schematic_terms); - val asms' = map (Thm.instantiate_cterm schematics) asms; - val concl' = Thm.instantiate_cterm schematics concl; - val (prems, context) = Assumption.add_assumes asms' ctxt3; - in - ({context = context, params = params, prems = prems, - asms = asms', concl = concl', schematics = schematics}, Goal.init concl') - end; - -val focus_params = gen_focus (false, false); -val focus_prems = gen_focus (true, false); -val focus = gen_focus (true, true); - - -(* lift and retrofit *) - -(* - B [?'b, ?y] - ---------------- - B ['b, y params] -*) -fun lift_import idx params th ctxt = - let - val cert = Thm.cterm_of (Proof_Context.theory_of ctxt); - val ((_, [th']), ctxt') = Variable.importT [th] ctxt; - - val Ts = map (#T o Thm.rep_cterm) params; - val ts = map Thm.term_of params; - - val prop = Thm.full_prop_of th'; - val concl_vars = Term.add_vars (Logic.strip_imp_concl prop) []; - val vars = rev (Term.add_vars prop []); - val (ys, ctxt'') = Variable.variant_fixes (map (Name.clean o #1 o #1) vars) ctxt'; - - fun var_inst v y = - let - val ((x, i), T) = v; - val (U, args) = - if member (op =) concl_vars v then (T, []) - else (Ts ---> T, ts); - val u = Free (y, U); - in ((Var v, list_comb (u, args)), (u, Var ((x, i + idx), U))) end; - val (inst1, inst2) = split_list (map (pairself (pairself cert)) (map2 var_inst vars ys)); - - val th'' = Thm.instantiate ([], inst1) th'; - in ((inst2, th''), ctxt'') end; - -(* - [x, A x] - : - B x ==> C - ------------------ - [!!x. A x ==> B x] - : - C -*) -fun lift_subgoals params asms th = - let - fun lift ct = fold_rev Thm.all_name params (Drule.list_implies (asms, ct)); - val unlift = - fold (Thm.elim_implies o Thm.assume) asms o - Drule.forall_elim_list (map #2 params) o Thm.assume; - val subgoals = map lift (Drule.strip_imp_prems (Thm.cprop_of th)); - val th' = fold (Thm.elim_implies o unlift) subgoals th; - in (subgoals, th') end; - -fun retrofit ctxt1 ctxt0 params asms i st1 st0 = - let - val idx = Thm.maxidx_of st0 + 1; - val ps = map #2 params; - val ((subgoal_inst, st2), ctxt2) = lift_import idx ps st1 ctxt1; - val (subgoals, st3) = lift_subgoals params asms st2; - val result = st3 - |> Goal.conclude - |> Drule.implies_intr_list asms - |> Drule.forall_intr_list ps - |> Drule.implies_intr_list subgoals - |> fold_rev (Thm.forall_intr o #1) subgoal_inst - |> fold (Thm.forall_elim o #2) subgoal_inst - |> Thm.adjust_maxidx_thm idx - |> singleton (Variable.export ctxt2 ctxt0); - in - Thm.bicompose {flatten = true, match = false, incremented = false} - (false, result, Thm.nprems_of st1) i st0 - end; - - -(* tacticals *) - -fun GEN_FOCUS flags tac ctxt i st = - if Thm.nprems_of st < i then Seq.empty - else - let val (args as {context = ctxt', params, asms, ...}, st') = gen_focus flags ctxt i st; - in Seq.lifts (retrofit ctxt' ctxt params asms i) (tac args st') st end; - -val FOCUS_PARAMS = GEN_FOCUS (false, false); -val FOCUS_PREMS = GEN_FOCUS (true, false); -val FOCUS = GEN_FOCUS (true, true); - -fun SUBPROOF tac ctxt = FOCUS (Seq.map (Goal.check_finished ctxt) oo tac) ctxt; - -end; - -val SUBPROOF = Subgoal.SUBPROOF; - diff --git a/core/Pure/tactic.ML b/core/Pure/tactic.ML deleted file mode 100644 index d4baec99..00000000 --- a/core/Pure/tactic.ML +++ /dev/null @@ -1,341 +0,0 @@ -(* Title: Pure/tactic.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - -Fundamental tactics. -*) - -signature BASIC_TACTIC = -sig - val trace_goalno_tac: (int -> tactic) -> int -> tactic - val rule_by_tactic: Proof.context -> tactic -> thm -> thm - val assume_tac: int -> tactic - val eq_assume_tac: int -> tactic - val compose_tac: (bool * thm * int) -> int -> tactic - val make_elim: thm -> thm - val biresolve_tac: (bool * thm) list -> int -> tactic - val resolve_tac: thm list -> int -> tactic - val eresolve_tac: thm list -> int -> tactic - val forward_tac: thm list -> int -> tactic - val dresolve_tac: thm list -> int -> tactic - val atac: int -> tactic - val rtac: thm -> int -> tactic - val dtac: thm -> int -> tactic - val etac: thm -> int -> tactic - val ftac: thm -> int -> tactic - val ares_tac: thm list -> int -> tactic - val solve_tac: thm list -> int -> tactic - val bimatch_tac: (bool * thm) list -> int -> tactic - val match_tac: thm list -> int -> tactic - val ematch_tac: thm list -> int -> tactic - val dmatch_tac: thm list -> int -> tactic - val flexflex_tac: tactic - val distinct_subgoal_tac: int -> tactic - val distinct_subgoals_tac: tactic - val cut_tac: thm -> int -> tactic - val cut_rules_tac: thm list -> int -> tactic - val cut_facts_tac: thm list -> int -> tactic - val filter_thms: (term * term -> bool) -> int * term * thm list -> thm list - val biresolution_from_nets_tac: ('a list -> (bool * thm) list) -> - bool -> 'a Net.net * 'a Net.net -> int -> tactic - val biresolve_from_nets_tac: (int * (bool * thm)) Net.net * (int * (bool * thm)) Net.net -> - int -> tactic - val bimatch_from_nets_tac: (int * (bool * thm)) Net.net * (int * (bool * thm)) Net.net -> - int -> tactic - val net_biresolve_tac: (bool * thm) list -> int -> tactic - val net_bimatch_tac: (bool * thm) list -> int -> tactic - val filt_resolve_tac: thm list -> int -> int -> tactic - val resolve_from_net_tac: (int * thm) Net.net -> int -> tactic - val match_from_net_tac: (int * thm) Net.net -> int -> tactic - val net_resolve_tac: thm list -> int -> tactic - val net_match_tac: thm list -> int -> tactic - val subgoals_of_brl: bool * thm -> int - val lessb: (bool * thm) * (bool * thm) -> bool - val rename_tac: string list -> int -> tactic - val rotate_tac: int -> int -> tactic - val defer_tac: int -> tactic - val prefer_tac: int -> tactic - val filter_prems_tac: (term -> bool) -> int -> tactic -end; - -signature TACTIC = -sig - include BASIC_TACTIC - val insert_tagged_brl: 'a * (bool * thm) -> - ('a * (bool * thm)) Net.net * ('a * (bool * thm)) Net.net -> - ('a * (bool * thm)) Net.net * ('a * (bool * thm)) Net.net - val build_netpair: (int * (bool * thm)) Net.net * (int * (bool * thm)) Net.net -> - (bool * thm) list -> (int * (bool * thm)) Net.net * (int * (bool * thm)) Net.net - val delete_tagged_brl: bool * thm -> - ('a * (bool * thm)) Net.net * ('a * (bool * thm)) Net.net -> - ('a * (bool * thm)) Net.net * ('a * (bool * thm)) Net.net - val eq_kbrl: ('a * (bool * thm)) * ('a * (bool * thm)) -> bool - val build_net: thm list -> (int * thm) Net.net -end; - -structure Tactic: TACTIC = -struct - -(*Discover which goal is chosen: SOMEGOAL(trace_goalno_tac tac) *) -fun trace_goalno_tac tac i st = - case Seq.pull(tac i st) of - NONE => Seq.empty - | seqcell => (tracing ("Subgoal " ^ string_of_int i ^ " selected"); - Seq.make(fn()=> seqcell)); - -(*Makes a rule by applying a tactic to an existing rule*) -fun rule_by_tactic ctxt tac rl = - let - val thy = Proof_Context.theory_of ctxt; - val ctxt' = Variable.declare_thm rl ctxt; - val ((_, [st]), ctxt'') = Variable.import true [Thm.transfer thy rl] ctxt'; - in - (case Seq.pull (tac st) of - NONE => raise THM ("rule_by_tactic", 0, [rl]) - | SOME (st', _) => zero_var_indexes (singleton (Variable.export ctxt'' ctxt') st')) - end; - - -(*** Basic tactics ***) - -(*** The following fail if the goal number is out of range: - thus (REPEAT (resolve_tac rules i)) stops once subgoal i disappears. *) - -(*Solve subgoal i by assumption*) -fun assume_tac i = PRIMSEQ (Thm.assumption i); - -(*Solve subgoal i by assumption, using no unification*) -fun eq_assume_tac i = PRIMITIVE (Thm.eq_assumption i); - - -(** Resolution/matching tactics **) - -(*The composition rule/state: no lifting or var renaming. - The arg = (bires_flg, orule, m); see Thm.bicompose for explanation.*) -fun compose_tac arg i = - PRIMSEQ (Thm.bicompose {flatten = true, match = false, incremented = false} arg i); - -(*Converts a "destruct" rule like P&Q==>P to an "elimination" rule - like [| P&Q; P==>R |] ==> R *) -fun make_elim rl = zero_var_indexes (rl RS revcut_rl); - -(*Attack subgoal i by resolution, using flags to indicate elimination rules*) -fun biresolve_tac brules i = PRIMSEQ (Thm.biresolution false brules i); - -(*Resolution: the simple case, works for introduction rules*) -fun resolve_tac rules = biresolve_tac (map (pair false) rules); - -(*Resolution with elimination rules only*) -fun eresolve_tac rules = biresolve_tac (map (pair true) rules); - -(*Forward reasoning using destruction rules.*) -fun forward_tac rls = resolve_tac (map make_elim rls) THEN' assume_tac; - -(*Like forward_tac, but deletes the assumption after use.*) -fun dresolve_tac rls = eresolve_tac (map make_elim rls); - -(*Shorthand versions: for resolution with a single theorem*) -val atac = assume_tac; -fun rtac rl = resolve_tac [rl]; -fun dtac rl = dresolve_tac [rl]; -fun etac rl = eresolve_tac [rl]; -fun ftac rl = forward_tac [rl]; - -(*Use an assumption or some rules ... A popular combination!*) -fun ares_tac rules = assume_tac ORELSE' resolve_tac rules; - -fun solve_tac rules = resolve_tac rules THEN_ALL_NEW assume_tac; - -(*Matching tactics -- as above, but forbid updating of state*) -fun bimatch_tac brules i = PRIMSEQ (Thm.biresolution true brules i); -fun match_tac rules = bimatch_tac (map (pair false) rules); -fun ematch_tac rules = bimatch_tac (map (pair true) rules); -fun dmatch_tac rls = ematch_tac (map make_elim rls); - -(*Smash all flex-flex disagreement pairs in the proof state.*) -val flexflex_tac = PRIMSEQ Thm.flexflex_rule; - -(*Remove duplicate subgoals.*) -val permute_tac = PRIMITIVE oo Thm.permute_prems; -fun distinct_tac (i, k) = - permute_tac 0 (i - 1) THEN - permute_tac 1 (k - 1) THEN - PRIMITIVE (fn st => Drule.comp_no_flatten (st, 0) 1 Drule.distinct_prems_rl) THEN - permute_tac 1 (1 - k) THEN - permute_tac 0 (1 - i); - -fun distinct_subgoal_tac i st = - (case drop (i - 1) (Thm.prems_of st) of - [] => no_tac st - | A :: Bs => - st |> EVERY (fold (fn (B, k) => - if A aconv B then cons (distinct_tac (i, k)) else I) (Bs ~~ (1 upto length Bs)) [])); - -fun distinct_subgoals_tac state = - let - val goals = Thm.prems_of state; - val dups = distinct (eq_fst (op aconv)) (goals ~~ (1 upto length goals)); - in EVERY (rev (map (distinct_subgoal_tac o snd) dups)) state end; - - -(*** Applications of cut_rl ***) - -(*The conclusion of the rule gets assumed in subgoal i, - while subgoal i+1,... are the premises of the rule.*) -fun cut_tac rule i = rtac cut_rl i THEN rtac rule (i + 1); - -(*"Cut" a list of rules into the goal. Their premises will become new - subgoals.*) -fun cut_rules_tac ths i = EVERY (map (fn th => cut_tac th i) ths); - -(*As above, but inserts only facts (unconditional theorems); - generates no additional subgoals. *) -fun cut_facts_tac ths = cut_rules_tac (filter Thm.no_prems ths); - - -(**** Indexing and filtering of theorems ****) - -(*Returns the list of potentially resolvable theorems for the goal "prem", - using the predicate could(subgoal,concl). - Resulting list is no longer than "limit"*) -fun filter_thms could (limit, prem, ths) = - let val pb = Logic.strip_assums_concl prem; (*delete assumptions*) - fun filtr (limit, []) = [] - | filtr (limit, th::ths) = - if limit=0 then [] - else if could(pb, concl_of th) then th :: filtr(limit-1, ths) - else filtr(limit,ths) - in filtr(limit,ths) end; - - -(*** biresolution and resolution using nets ***) - -(** To preserve the order of the rules, tag them with increasing integers **) - -(*insert one tagged brl into the pair of nets*) -fun insert_tagged_brl (kbrl as (k, (eres, th))) (inet, enet) = - if eres then - (case try Thm.major_prem_of th of - SOME prem => (inet, Net.insert_term (K false) (prem, kbrl) enet) - | NONE => error "insert_tagged_brl: elimination rule with no premises") - else (Net.insert_term (K false) (concl_of th, kbrl) inet, enet); - -(*build a pair of nets for biresolution*) -fun build_netpair netpair brls = - fold_rev insert_tagged_brl (tag_list 1 brls) netpair; - -(*delete one kbrl from the pair of nets*) -fun eq_kbrl ((_, (_, th)), (_, (_, th'))) = Thm.eq_thm_prop (th, th') - -fun delete_tagged_brl (brl as (eres, th)) (inet, enet) = - (if eres then - (case try Thm.major_prem_of th of - SOME prem => (inet, Net.delete_term eq_kbrl (prem, ((), brl)) enet) - | NONE => (inet, enet)) (*no major premise: ignore*) - else (Net.delete_term eq_kbrl (Thm.concl_of th, ((), brl)) inet, enet)) - handle Net.DELETE => (inet,enet); - - -(*biresolution using a pair of nets rather than rules. - function "order" must sort and possibly filter the list of brls. - boolean "match" indicates matching or unification.*) -fun biresolution_from_nets_tac order match (inet,enet) = - SUBGOAL - (fn (prem,i) => - let val hyps = Logic.strip_assums_hyp prem - and concl = Logic.strip_assums_concl prem - val kbrls = Net.unify_term inet concl @ maps (Net.unify_term enet) hyps - in PRIMSEQ (Thm.biresolution match (order kbrls) i) end); - -(*versions taking pre-built nets. No filtering of brls*) -val biresolve_from_nets_tac = biresolution_from_nets_tac order_list false; -val bimatch_from_nets_tac = biresolution_from_nets_tac order_list true; - -(*fast versions using nets internally*) -val net_biresolve_tac = - biresolve_from_nets_tac o build_netpair(Net.empty,Net.empty); - -val net_bimatch_tac = - bimatch_from_nets_tac o build_netpair(Net.empty,Net.empty); - -(*** Simpler version for resolve_tac -- only one net, and no hyps ***) - -(*insert one tagged rl into the net*) -fun insert_krl (krl as (k,th)) = - Net.insert_term (K false) (concl_of th, krl); - -(*build a net of rules for resolution*) -fun build_net rls = - fold_rev insert_krl (tag_list 1 rls) Net.empty; - -(*resolution using a net rather than rules; pred supports filt_resolve_tac*) -fun filt_resolution_from_net_tac match pred net = - SUBGOAL - (fn (prem,i) => - let val krls = Net.unify_term net (Logic.strip_assums_concl prem) - in - if pred krls - then PRIMSEQ - (Thm.biresolution match (map (pair false) (order_list krls)) i) - else no_tac - end); - -(*Resolve the subgoal using the rules (making a net) unless too flexible, - which means more than maxr rules are unifiable. *) -fun filt_resolve_tac rules maxr = - let fun pred krls = length krls <= maxr - in filt_resolution_from_net_tac false pred (build_net rules) end; - -(*versions taking pre-built nets*) -val resolve_from_net_tac = filt_resolution_from_net_tac false (K true); -val match_from_net_tac = filt_resolution_from_net_tac true (K true); - -(*fast versions using nets internally*) -val net_resolve_tac = resolve_from_net_tac o build_net; -val net_match_tac = match_from_net_tac o build_net; - - -(*** For Natural Deduction using (bires_flg, rule) pairs ***) - -(*The number of new subgoals produced by the brule*) -fun subgoals_of_brl (true,rule) = nprems_of rule - 1 - | subgoals_of_brl (false,rule) = nprems_of rule; - -(*Less-than test: for sorting to minimize number of new subgoals*) -fun lessb (brl1,brl2) = subgoals_of_brl brl1 < subgoals_of_brl brl2; - - -(*Renaming of parameters in a subgoal*) -fun rename_tac xs i = - case Library.find_first (not o Symbol_Pos.is_identifier) xs of - SOME x => error ("Not an identifier: " ^ x) - | NONE => PRIMITIVE (Thm.rename_params_rule (xs, i)); - -(*rotate_tac n i: rotate the assumptions of subgoal i by n positions, from - right to left if n is positive, and from left to right if n is negative.*) -fun rotate_tac 0 i = all_tac - | rotate_tac k i = PRIMITIVE (Thm.rotate_rule k i); - -(*Rotates the given subgoal to be the last.*) -fun defer_tac i = PRIMITIVE (Thm.permute_prems (i - 1) 1); - -(*Rotates the given subgoal to be the first.*) -fun prefer_tac i = PRIMITIVE (Thm.permute_prems (i - 1) 1 #> Thm.permute_prems 0 ~1); - -(* remove premises that do not satisfy p; fails if all prems satisfy p *) -fun filter_prems_tac p = - let fun Then NONE tac = SOME tac - | Then (SOME tac) tac' = SOME(tac THEN' tac'); - fun thins H (tac,n) = - if p H then (tac,n+1) - else (Then tac (rotate_tac n THEN' etac thin_rl),0); - in SUBGOAL(fn (subg,n) => - let val Hs = Logic.strip_assums_hyp subg - in case fst(fold thins Hs (NONE,0)) of - NONE => no_tac | SOME tac => tac n - end) - end; - -end; - -structure Basic_Tactic: BASIC_TACTIC = Tactic; -open Basic_Tactic; diff --git a/core/Pure/tactical.ML b/core/Pure/tactical.ML deleted file mode 100644 index fce1612f..00000000 --- a/core/Pure/tactical.ML +++ /dev/null @@ -1,378 +0,0 @@ -(* Title: Pure/tactical.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - -Tacticals. -*) - -infix 1 THEN THEN' THEN_ALL_NEW; -infix 0 ORELSE APPEND ORELSE' APPEND'; -infix 0 THEN_ELSE; - -signature TACTICAL = -sig - type tactic = thm -> thm Seq.seq - val THEN: tactic * tactic -> tactic - val ORELSE: tactic * tactic -> tactic - val APPEND: tactic * tactic -> tactic - val THEN_ELSE: tactic * (tactic*tactic) -> tactic - val THEN': ('a -> tactic) * ('a -> tactic) -> 'a -> tactic - val ORELSE': ('a -> tactic) * ('a -> tactic) -> 'a -> tactic - val APPEND': ('a -> tactic) * ('a -> tactic) -> 'a -> tactic - val all_tac: tactic - val no_tac: tactic - val DETERM: tactic -> tactic - val COND: (thm -> bool) -> tactic -> tactic -> tactic - val TRY: tactic -> tactic - val EVERY: tactic list -> tactic - val EVERY': ('a -> tactic) list -> 'a -> tactic - val EVERY1: (int -> tactic) list -> tactic - val FIRST: tactic list -> tactic - val FIRST': ('a -> tactic) list -> 'a -> tactic - val FIRST1: (int -> tactic) list -> tactic - val RANGE: (int -> tactic) list -> int -> tactic - val print_tac: Proof.context -> string -> tactic - val pause_tac: tactic - val trace_REPEAT: bool Unsynchronized.ref - val suppress_tracing: bool Unsynchronized.ref - val tracify: bool Unsynchronized.ref -> tactic -> tactic - val traced_tac: (thm -> (thm * thm Seq.seq) option) -> tactic - val REPEAT_DETERM_N: int -> tactic -> tactic - val REPEAT_DETERM: tactic -> tactic - val REPEAT: tactic -> tactic - val REPEAT_DETERM1: tactic -> tactic - val REPEAT1: tactic -> tactic - val FILTER: (thm -> bool) -> tactic -> tactic - val CHANGED: tactic -> tactic - val CHANGED_PROP: tactic -> tactic - val ALLGOALS: (int -> tactic) -> tactic - val SOMEGOAL: (int -> tactic) -> tactic - val FIRSTGOAL: (int -> tactic) -> tactic - val HEADGOAL: (int -> thm -> 'a Seq.seq) -> thm -> 'a Seq.seq - val REPEAT_SOME: (int -> tactic) -> tactic - val REPEAT_DETERM_SOME: (int -> tactic) -> tactic - val REPEAT_FIRST: (int -> tactic) -> tactic - val REPEAT_DETERM_FIRST: (int -> tactic) -> tactic - val TRYALL: (int -> tactic) -> tactic - val CSUBGOAL: ((cterm * int) -> tactic) -> int -> tactic - val SUBGOAL: ((term * int) -> tactic) -> int -> tactic - val ASSERT_SUBGOAL: (int -> tactic) -> int -> tactic - val CHANGED_GOAL: (int -> tactic) -> int -> tactic - val SOLVED': (int -> tactic) -> int -> tactic - val THEN_ALL_NEW: (int -> tactic) * (int -> tactic) -> int -> tactic - val REPEAT_ALL_NEW: (int -> tactic) -> int -> tactic - val PRIMSEQ: (thm -> thm Seq.seq) -> tactic - val PRIMITIVE: (thm -> thm) -> tactic - val SINGLE: tactic -> thm -> thm option - val CONVERSION: conv -> int -> tactic -end; - -structure Tactical : TACTICAL = -struct - -(**** Tactics ****) - -(*A tactic maps a proof tree to a sequence of proof trees: - if length of sequence = 0 then the tactic does not apply; - if length > 1 then backtracking on the alternatives can occur.*) - -type tactic = thm -> thm Seq.seq; - - -(*** LCF-style tacticals ***) - -(*the tactical THEN performs one tactic followed by another*) -fun (tac1 THEN tac2) st = Seq.maps tac2 (tac1 st); - - -(*The tactical ORELSE uses the first tactic that returns a nonempty sequence. - Like in LCF, ORELSE commits to either tac1 or tac2 immediately. - Does not backtrack to tac2 if tac1 was initially chosen. *) -fun (tac1 ORELSE tac2) st = - case Seq.pull(tac1 st) of - NONE => tac2 st - | sequencecell => Seq.make(fn()=> sequencecell); - - -(*The tactical APPEND combines the results of two tactics. - Like ORELSE, but allows backtracking on both tac1 and tac2. - The tactic tac2 is not applied until needed.*) -fun (tac1 APPEND tac2) st = - Seq.append (tac1 st) (Seq.make(fn()=> Seq.pull (tac2 st))); - -(*Conditional tactic. - tac1 ORELSE tac2 = tac1 THEN_ELSE (all_tac, tac2) - tac1 THEN tac2 = tac1 THEN_ELSE (tac2, no_tac) -*) -fun (tac THEN_ELSE (tac1, tac2)) st = - case Seq.pull(tac st) of - NONE => tac2 st (*failed; try tactic 2*) - | seqcell => Seq.maps tac1 (Seq.make(fn()=> seqcell)); (*succeeded; use tactic 1*) - - -(*Versions for combining tactic-valued functions, as in - SOMEGOAL (resolve_tac rls THEN' assume_tac) *) -fun (tac1 THEN' tac2) x = tac1 x THEN tac2 x; -fun (tac1 ORELSE' tac2) x = tac1 x ORELSE tac2 x; -fun (tac1 APPEND' tac2) x = tac1 x APPEND tac2 x; - -(*passes all proofs through unchanged; identity of THEN*) -fun all_tac st = Seq.single st; - -(*passes no proofs through; identity of ORELSE and APPEND*) -fun no_tac st = Seq.empty; - - -(*Make a tactic deterministic by chopping the tail of the proof sequence*) -fun DETERM tac = Seq.DETERM tac; - -(*Conditional tactical: testfun controls which tactic to use next. - Beware: due to eager evaluation, both thentac and elsetac are evaluated.*) -fun COND testfun thenf elsef = (fn prf => - if testfun prf then thenf prf else elsef prf); - -(*Do the tactic or else do nothing*) -fun TRY tac = tac ORELSE all_tac; - -(*** List-oriented tactics ***) - -local - (*This version of EVERY avoids backtracking over repeated states*) - - fun EVY (trail, []) st = - Seq.make (fn()=> SOME(st, - Seq.make (fn()=> Seq.pull (evyBack trail)))) - | EVY (trail, tac::tacs) st = - case Seq.pull(tac st) of - NONE => evyBack trail (*failed: backtrack*) - | SOME(st',q) => EVY ((st',q,tacs)::trail, tacs) st' - and evyBack [] = Seq.empty (*no alternatives*) - | evyBack ((st',q,tacs)::trail) = - case Seq.pull q of - NONE => evyBack trail - | SOME(st,q') => if Thm.eq_thm (st',st) - then evyBack ((st',q',tacs)::trail) - else EVY ((st,q',tacs)::trail, tacs) st -in - -(* EVERY [tac1,...,tacn] equals tac1 THEN ... THEN tacn *) -fun EVERY tacs = EVY ([], tacs); -end; - - -(* EVERY' [tac1,...,tacn] i equals tac1 i THEN ... THEN tacn i *) -fun EVERY' tacs i = EVERY (map (fn f => f i) tacs); - -(*Apply every tactic to 1*) -fun EVERY1 tacs = EVERY' tacs 1; - -(* FIRST [tac1,...,tacn] equals tac1 ORELSE ... ORELSE tacn *) -fun FIRST tacs = fold_rev (curry op ORELSE) tacs no_tac; - -(* FIRST' [tac1,...,tacn] i equals tac1 i ORELSE ... ORELSE tacn i *) -fun FIRST' tacs = fold_rev (curry op ORELSE') tacs (K no_tac); - -(*Apply first tactic to 1*) -fun FIRST1 tacs = FIRST' tacs 1; - -(*Apply tactics on consecutive subgoals*) -fun RANGE [] _ = all_tac - | RANGE (tac :: tacs) i = RANGE tacs (i + 1) THEN tac i; - - -(*** Tracing tactics ***) - -(*Print the current proof state and pass it on.*) -fun print_tac ctxt msg st = - (tracing (msg ^ "\n" ^ Pretty.string_of (Pretty.chunks (Goal_Display.pretty_goals ctxt st))); - Seq.single st); - -(*Pause until a line is typed -- if non-empty then fail. *) -fun pause_tac st = - (tracing "** Press RETURN to continue:"; - if TextIO.inputLine TextIO.stdIn = SOME "\n" then Seq.single st - else (tracing "Goodbye"; Seq.empty)); - -exception TRACE_EXIT of thm -and TRACE_QUIT; - -(*Tracing flags*) -val trace_REPEAT= Unsynchronized.ref false -and suppress_tracing = Unsynchronized.ref false; - -(*Handle all tracing commands for current state and tactic *) -fun exec_trace_command flag (tac, st) = - case TextIO.inputLine TextIO.stdIn of - SOME "\n" => tac st - | SOME "f\n" => Seq.empty - | SOME "o\n" => (flag := false; tac st) - | SOME "s\n" => (suppress_tracing := true; tac st) - | SOME "x\n" => (tracing "Exiting now"; raise (TRACE_EXIT st)) - | SOME "quit\n" => raise TRACE_QUIT - | _ => (tracing -"Type RETURN to continue or...\n\ -\ f - to fail here\n\ -\ o - to switch tracing off\n\ -\ s - to suppress tracing until next entry to a tactical\n\ -\ x - to exit at this point\n\ -\ quit - to abort this tracing run\n\ -\** Well? " ; exec_trace_command flag (tac, st)); - - -(*Extract from a tactic, a thm->thm seq function that handles tracing*) -fun tracify flag tac st = - if !flag andalso not (!suppress_tracing) then - (tracing (Pretty.string_of (Pretty.chunks - (Goal_Display.pretty_goals (Syntax.init_pretty_global (Thm.theory_of_thm st)) st @ - [Pretty.str "** Press RETURN to continue:"]))); - exec_trace_command flag (tac, st)) - else tac st; - -(*Create a tactic whose outcome is given by seqf, handling TRACE_EXIT*) -fun traced_tac seqf st = - (suppress_tracing := false; - Seq.make (fn()=> seqf st - handle TRACE_EXIT st' => SOME(st', Seq.empty))); - - -(*Deterministic REPEAT: only retains the first outcome; - uses less space than REPEAT; tail recursive. - If non-negative, n bounds the number of repetitions.*) -fun REPEAT_DETERM_N n tac = - let val tac = tracify trace_REPEAT tac - fun drep 0 st = SOME(st, Seq.empty) - | drep n st = - (case Seq.pull(tac st) of - NONE => SOME(st, Seq.empty) - | SOME(st',_) => drep (n-1) st') - in traced_tac (drep n) end; - -(*Allows any number of repetitions*) -val REPEAT_DETERM = REPEAT_DETERM_N ~1; - -(*General REPEAT: maintains a stack of alternatives; tail recursive*) -fun REPEAT tac = - let val tac = tracify trace_REPEAT tac - fun rep qs st = - case Seq.pull(tac st) of - NONE => SOME(st, Seq.make(fn()=> repq qs)) - | SOME(st',q) => rep (q::qs) st' - and repq [] = NONE - | repq(q::qs) = case Seq.pull q of - NONE => repq qs - | SOME(st,q) => rep (q::qs) st - in traced_tac (rep []) end; - -(*Repeat 1 or more times*) -fun REPEAT_DETERM1 tac = DETERM tac THEN REPEAT_DETERM tac; -fun REPEAT1 tac = tac THEN REPEAT tac; - - -(** Filtering tacticals **) - -fun FILTER pred tac st = Seq.filter pred (tac st); - -(*Accept only next states that change the theorem somehow*) -fun CHANGED tac st = - let fun diff st' = not (Thm.eq_thm (st, st')); - in Seq.filter diff (tac st) end; - -(*Accept only next states that change the theorem's prop field - (changes to signature, hyps, etc. don't count)*) -fun CHANGED_PROP tac st = - let fun diff st' = not (Thm.eq_thm_prop (st, st')); - in Seq.filter diff (tac st) end; - - -(*** Tacticals based on subgoal numbering ***) - -(*For n subgoals, performs tac(n) THEN ... THEN tac(1) - Essential to work backwards since tac(i) may add/delete subgoals at i. *) -fun ALLGOALS tac st = - let fun doall 0 = all_tac - | doall n = tac(n) THEN doall(n-1) - in doall(nprems_of st)st end; - -(*For n subgoals, performs tac(n) ORELSE ... ORELSE tac(1) *) -fun SOMEGOAL tac st = - let fun find 0 = no_tac - | find n = tac(n) ORELSE find(n-1) - in find(nprems_of st)st end; - -(*For n subgoals, performs tac(1) ORELSE ... ORELSE tac(n). - More appropriate than SOMEGOAL in some cases.*) -fun FIRSTGOAL tac st = - let fun find (i,n) = if i>n then no_tac else tac(i) ORELSE find (i+1,n) - in find(1, nprems_of st)st end; - -(*First subgoal only.*) -fun HEADGOAL tac = tac 1; - -(*Repeatedly solve some using tac. *) -fun REPEAT_SOME tac = REPEAT1 (SOMEGOAL (REPEAT1 o tac)); -fun REPEAT_DETERM_SOME tac = REPEAT_DETERM1 (SOMEGOAL (REPEAT_DETERM1 o tac)); - -(*Repeatedly solve the first possible subgoal using tac. *) -fun REPEAT_FIRST tac = REPEAT1 (FIRSTGOAL (REPEAT1 o tac)); -fun REPEAT_DETERM_FIRST tac = REPEAT_DETERM1 (FIRSTGOAL (REPEAT_DETERM1 o tac)); - -(*For n subgoals, tries to apply tac to n,...1 *) -fun TRYALL tac = ALLGOALS (TRY o tac); - - -(*Make a tactic for subgoal i, if there is one. *) -fun CSUBGOAL goalfun i st = - (case SOME (Thm.cprem_of st i) handle THM _ => NONE of - SOME goal => goalfun (goal, i) st - | NONE => Seq.empty); - -fun SUBGOAL goalfun = - CSUBGOAL (fn (goal, i) => goalfun (Thm.term_of goal, i)); - -fun ASSERT_SUBGOAL (tac: int -> tactic) i st = (Logic.get_goal (Thm.prop_of st) i; tac i st); - -(*Returns all states that have changed in subgoal i, counted from the LAST - subgoal. For stac, for example.*) -fun CHANGED_GOAL tac i st = - SUBGOAL (fn (t, _) => - let - val np = Thm.nprems_of st; - val d = np - i; (*distance from END*) - fun diff st' = - Thm.nprems_of st' - d <= 0 orelse (*the subgoal no longer exists*) - not (Envir.aeconv (t, Thm.term_of (Thm.cprem_of st' (Thm.nprems_of st' - d)))); - in Seq.filter diff o tac i end) i st; - -(*Returns all states where some subgoals have been solved. For - subgoal-based tactics this means subgoal i has been solved - altogether -- no new subgoals have emerged.*) -fun SOLVED' tac i st = - tac i st |> Seq.filter (fn st' => nprems_of st' < nprems_of st); - -(*Apply second tactic to all subgoals emerging from the first -- - following usual convention for subgoal-based tactics.*) -fun (tac1 THEN_ALL_NEW tac2) i st = - st |> (tac1 i THEN (fn st' => Seq.INTERVAL tac2 i (i + nprems_of st' - nprems_of st) st')); - -(*Repeatedly dig into any emerging subgoals.*) -fun REPEAT_ALL_NEW tac = - tac THEN_ALL_NEW (TRY o (fn i => REPEAT_ALL_NEW tac i)); - -(*Makes a tactic whose effect on a state is given by thmfun: thm->thm seq.*) -fun PRIMSEQ thmfun st = thmfun st handle THM _ => Seq.empty; - -(*Makes a tactic whose effect on a state is given by thmfun: thm->thm.*) -fun PRIMITIVE thmfun = PRIMSEQ (Seq.single o thmfun); - -(*Inverse (more or less) of PRIMITIVE*) -fun SINGLE tacf = Option.map fst o Seq.pull o tacf - -(*Conversions as tactics*) -fun CONVERSION cv i st = Seq.single (Conv.gconv_rule cv i st) - handle THM _ => Seq.empty - | CTERM _ => Seq.empty - | TERM _ => Seq.empty - | TYPE _ => Seq.empty; - -end; - -open Tactical; diff --git a/core/Pure/term.ML b/core/Pure/term.ML deleted file mode 100644 index 63d1b4ae..00000000 --- a/core/Pure/term.ML +++ /dev/null @@ -1,996 +0,0 @@ -(* Title: Pure/term.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Makarius - -Simply typed lambda-calculus: types, terms, and basic operations. -*) - -infix 9 $; -infixr 5 -->; -infixr --->; -infix aconv; - -signature BASIC_TERM = -sig - type indexname = string * int - type class = string - type sort = class list - type arity = string * sort list * sort - datatype typ = - Type of string * typ list | - TFree of string * sort | - TVar of indexname * sort - datatype term = - Const of string * typ | - Free of string * typ | - Var of indexname * typ | - Bound of int | - Abs of string * typ * term | - $ of term * term - exception TYPE of string * typ list * term list - exception TERM of string * term list - val dummyS: sort - val dummyT: typ - val no_dummyT: typ -> typ - val --> : typ * typ -> typ - val ---> : typ list * typ -> typ - val dest_Type: typ -> string * typ list - val dest_TVar: typ -> indexname * sort - val dest_TFree: typ -> string * sort - val is_Bound: term -> bool - val is_Const: term -> bool - val is_Free: term -> bool - val is_Var: term -> bool - val is_TVar: typ -> bool - val dest_Const: term -> string * typ - val dest_Free: term -> string * typ - val dest_Var: term -> indexname * typ - val dest_comb: term -> term * term - val domain_type: typ -> typ - val range_type: typ -> typ - val dest_funT: typ -> typ * typ - val binder_types: typ -> typ list - val body_type: typ -> typ - val strip_type: typ -> typ list * typ - val type_of1: typ list * term -> typ - val type_of: term -> typ - val fastype_of1: typ list * term -> typ - val fastype_of: term -> typ - val strip_abs: term -> (string * typ) list * term - val strip_abs_body: term -> term - val strip_abs_vars: term -> (string * typ) list - val strip_qnt_body: string -> term -> term - val strip_qnt_vars: string -> term -> (string * typ) list - val list_comb: term * term list -> term - val strip_comb: term -> term * term list - val head_of: term -> term - val size_of_term: term -> int - val size_of_typ: typ -> int - val map_atyps: (typ -> typ) -> typ -> typ - val map_aterms: (term -> term) -> term -> term - val map_type_tvar: (indexname * sort -> typ) -> typ -> typ - val map_type_tfree: (string * sort -> typ) -> typ -> typ - val map_types: (typ -> typ) -> term -> term - val fold_atyps: (typ -> 'a -> 'a) -> typ -> 'a -> 'a - val fold_atyps_sorts: (typ * sort -> 'a -> 'a) -> typ -> 'a -> 'a - val fold_aterms: (term -> 'a -> 'a) -> term -> 'a -> 'a - val fold_term_types: (term -> typ -> 'a -> 'a) -> term -> 'a -> 'a - val fold_types: (typ -> 'a -> 'a) -> term -> 'a -> 'a - val burrow_types: (typ list -> typ list) -> term list -> term list - val aconv: term * term -> bool - val propT: typ - val strip_all_body: term -> term - val strip_all_vars: term -> (string * typ) list - val incr_bv: int * int * term -> term - val incr_boundvars: int -> term -> term - val add_loose_bnos: term * int * int list -> int list - val loose_bnos: term -> int list - val loose_bvar: term * int -> bool - val loose_bvar1: term * int -> bool - val subst_bounds: term list * term -> term - val subst_bound: term * term -> term - val betapply: term * term -> term - val betapplys: term * term list -> term - val subst_free: (term * term) list -> term -> term - val abstract_over: term * term -> term - val lambda: term -> term -> term - val absfree: string * typ -> term -> term - val absdummy: typ -> term -> term - val subst_atomic: (term * term) list -> term -> term - val typ_subst_atomic: (typ * typ) list -> typ -> typ - val subst_atomic_types: (typ * typ) list -> term -> term - val typ_subst_TVars: (indexname * typ) list -> typ -> typ - val subst_TVars: (indexname * typ) list -> term -> term - val subst_Vars: (indexname * term) list -> term -> term - val subst_vars: (indexname * typ) list * (indexname * term) list -> term -> term - val is_first_order: string list -> term -> bool - val maxidx_of_typ: typ -> int - val maxidx_of_typs: typ list -> int - val maxidx_of_term: term -> int - val exists_subtype: (typ -> bool) -> typ -> bool - val exists_type: (typ -> bool) -> term -> bool - val exists_subterm: (term -> bool) -> term -> bool - val exists_Const: (string * typ -> bool) -> term -> bool -end; - -signature TERM = -sig - include BASIC_TERM - val aT: sort -> typ - val itselfT: typ -> typ - val a_itselfT: typ - val argument_type_of: term -> int -> typ - val abs: string * typ -> term -> term - val add_tvar_namesT: typ -> indexname list -> indexname list - val add_tvar_names: term -> indexname list -> indexname list - val add_tvarsT: typ -> (indexname * sort) list -> (indexname * sort) list - val add_tvars: term -> (indexname * sort) list -> (indexname * sort) list - val add_var_names: term -> indexname list -> indexname list - val add_vars: term -> (indexname * typ) list -> (indexname * typ) list - val add_tfree_namesT: typ -> string list -> string list - val add_tfree_names: term -> string list -> string list - val add_tfreesT: typ -> (string * sort) list -> (string * sort) list - val add_tfrees: term -> (string * sort) list -> (string * sort) list - val add_free_names: term -> string list -> string list - val add_frees: term -> (string * typ) list -> (string * typ) list - val add_const_names: term -> string list -> string list - val add_consts: term -> (string * typ) list -> (string * typ) list - val hidden_polymorphism: term -> (indexname * sort) list - val declare_typ_names: typ -> Name.context -> Name.context - val declare_term_names: term -> Name.context -> Name.context - val declare_term_frees: term -> Name.context -> Name.context - val variant_frees: term -> (string * 'a) list -> (string * 'a) list - val rename_wrt_term: term -> (string * 'a) list -> (string * 'a) list - val eq_ix: indexname * indexname -> bool - val eq_tvar: (indexname * sort) * (indexname * sort) -> bool - val eq_var: (indexname * typ) * (indexname * typ) -> bool - val aconv_untyped: term * term -> bool - val could_unify: term * term -> bool - val strip_abs_eta: int -> term -> (string * typ) list * term - val match_bvars: (term * term) -> (string * string) list -> (string * string) list - val map_abs_vars: (string -> string) -> term -> term - val rename_abs: term -> term -> term -> term option - val is_open: term -> bool - val is_dependent: term -> bool - val lambda_name: string * term -> term -> term - val close_schematic_term: term -> term - val maxidx_typ: typ -> int -> int - val maxidx_typs: typ list -> int -> int - val maxidx_term: term -> int -> int - val has_abs: term -> bool - val dest_abs: string * typ * term -> string * term - val dummy_pattern: typ -> term - val dummy: term - val dummy_prop: term - val is_dummy_pattern: term -> bool - val free_dummy_patterns: term -> Name.context -> term * Name.context - val no_dummy_patterns: term -> term - val replace_dummy_patterns: term -> int -> term * int - val is_replaced_dummy_pattern: indexname -> bool - val show_dummy_patterns: term -> term - val string_of_vname: indexname -> string - val string_of_vname': indexname -> string -end; - -structure Term: TERM = -struct - -(*Indexnames can be quickly renamed by adding an offset to the integer part, - for resolution.*) -type indexname = string * int; - -(* Types are classified by sorts. *) -type class = string; -type sort = class list; -type arity = string * sort list * sort; - -(* The sorts attached to TFrees and TVars specify the sort of that variable *) -datatype typ = Type of string * typ list - | TFree of string * sort - | TVar of indexname * sort; - -(*Terms. Bound variables are indicated by depth number. - Free variables, (scheme) variables and constants have names. - An term is "closed" if every bound variable of level "lev" - is enclosed by at least "lev" abstractions. - - It is possible to create meaningless terms containing loose bound vars - or type mismatches. But such terms are not allowed in rules. *) - -datatype term = - Const of string * typ - | Free of string * typ - | Var of indexname * typ - | Bound of int - | Abs of string*typ*term - | op $ of term*term; - -(*Errors involving type mismatches*) -exception TYPE of string * typ list * term list; - -(*Errors errors involving terms*) -exception TERM of string * term list; - -(*Note variable naming conventions! - a,b,c: string - f,g,h: functions (including terms of function type) - i,j,m,n: int - t,u: term - v,w: indexnames - x,y: any - A,B,C: term (denoting formulae) - T,U: typ -*) - - -(** Types **) - -(*dummies for type-inference etc.*) -val dummyS = [""]; -val dummyT = Type ("dummy", []); - -fun no_dummyT typ = - let - fun check (T as Type ("dummy", _)) = - raise TYPE ("Illegal occurrence of '_' dummy type", [T], []) - | check (Type (_, Ts)) = List.app check Ts - | check _ = (); - in check typ; typ end; - -fun S --> T = Type("fun",[S,T]); - -(*handy for multiple args: [T1,...,Tn]--->T gives T1-->(T2--> ... -->T)*) -val op ---> = Library.foldr (op -->); - -fun dest_Type (Type x) = x - | dest_Type T = raise TYPE ("dest_Type", [T], []); -fun dest_TVar (TVar x) = x - | dest_TVar T = raise TYPE ("dest_TVar", [T], []); -fun dest_TFree (TFree x) = x - | dest_TFree T = raise TYPE ("dest_TFree", [T], []); - - -(** Discriminators **) - -fun is_Bound (Bound _) = true - | is_Bound _ = false; - -fun is_Const (Const _) = true - | is_Const _ = false; - -fun is_Free (Free _) = true - | is_Free _ = false; - -fun is_Var (Var _) = true - | is_Var _ = false; - -fun is_TVar (TVar _) = true - | is_TVar _ = false; - - -(** Destructors **) - -fun dest_Const (Const x) = x - | dest_Const t = raise TERM("dest_Const", [t]); - -fun dest_Free (Free x) = x - | dest_Free t = raise TERM("dest_Free", [t]); - -fun dest_Var (Var x) = x - | dest_Var t = raise TERM("dest_Var", [t]); - -fun dest_comb (t1 $ t2) = (t1, t2) - | dest_comb t = raise TERM("dest_comb", [t]); - - -fun domain_type (Type ("fun", [T, _])) = T; - -fun range_type (Type ("fun", [_, U])) = U; - -fun dest_funT (Type ("fun", [T, U])) = (T, U) - | dest_funT T = raise TYPE ("dest_funT", [T], []); - - -(* maps [T1,...,Tn]--->T to the list [T1,T2,...,Tn]*) -fun binder_types (Type ("fun", [T, U])) = T :: binder_types U - | binder_types _ = []; - -(* maps [T1,...,Tn]--->T to T*) -fun body_type (Type ("fun", [_, U])) = body_type U - | body_type T = T; - -(* maps [T1,...,Tn]--->T to ([T1,T2,...,Tn], T) *) -fun strip_type T = (binder_types T, body_type T); - - -(*Compute the type of the term, checking that combinations are well-typed - Ts = [T0,T1,...] holds types of bound variables 0, 1, ...*) -fun type_of1 (Ts, Const (_,T)) = T - | type_of1 (Ts, Free (_,T)) = T - | type_of1 (Ts, Bound i) = (nth Ts i - handle General.Subscript => raise TYPE("type_of: bound variable", [], [Bound i])) - | type_of1 (Ts, Var (_,T)) = T - | type_of1 (Ts, Abs (_,T,body)) = T --> type_of1(T::Ts, body) - | type_of1 (Ts, f$u) = - let val U = type_of1(Ts,u) - and T = type_of1(Ts,f) - in case T of - Type("fun",[T1,T2]) => - if T1=U then T2 else raise TYPE - ("type_of: type mismatch in application", [T1,U], [f$u]) - | _ => raise TYPE - ("type_of: function type is expected in application", - [T,U], [f$u]) - end; - -fun type_of t : typ = type_of1 ([],t); - -(*Determines the type of a term, with minimal checking*) -fun fastype_of1 (Ts, f$u) = - (case fastype_of1 (Ts,f) of - Type("fun",[_,T]) => T - | _ => raise TERM("fastype_of: expected function type", [f$u])) - | fastype_of1 (_, Const (_,T)) = T - | fastype_of1 (_, Free (_,T)) = T - | fastype_of1 (Ts, Bound i) = (nth Ts i - handle General.Subscript => raise TERM("fastype_of: Bound", [Bound i])) - | fastype_of1 (_, Var (_,T)) = T - | fastype_of1 (Ts, Abs (_,T,u)) = T --> fastype_of1 (T::Ts, u); - -fun fastype_of t : typ = fastype_of1 ([],t); - -(*Determine the argument type of a function*) -fun argument_type_of tm k = - let - fun argT i (Type ("fun", [T, U])) = if i = 0 then T else argT (i - 1) U - | argT _ T = raise TYPE ("argument_type_of", [T], []); - - fun arg 0 _ (Abs (_, T, _)) = T - | arg i Ts (Abs (_, T, t)) = arg (i - 1) (T :: Ts) t - | arg i Ts (t $ _) = arg (i + 1) Ts t - | arg i Ts a = argT i (fastype_of1 (Ts, a)); - in arg k [] tm end; - - -fun abs (x, T) t = Abs (x, T, t); - -fun strip_abs (Abs (a, T, t)) = - let val (a', t') = strip_abs t - in ((a, T) :: a', t') end - | strip_abs t = ([], t); - -(* maps (x1,...,xn)t to t *) -fun strip_abs_body (Abs(_,_,t)) = strip_abs_body t - | strip_abs_body u = u; - -(* maps (x1,...,xn)t to [x1, ..., xn] *) -fun strip_abs_vars (Abs(a,T,t)) = (a,T) :: strip_abs_vars t - | strip_abs_vars u = [] : (string*typ) list; - - -fun strip_qnt_body qnt = -let fun strip(tm as Const(c,_)$Abs(_,_,t)) = if c=qnt then strip t else tm - | strip t = t -in strip end; - -fun strip_qnt_vars qnt = -let fun strip(Const(c,_)$Abs(a,T,t)) = if c=qnt then (a,T)::strip t else [] - | strip t = [] : (string*typ) list -in strip end; - - -(* maps (f, [t1,...,tn]) to f(t1,...,tn) *) -val list_comb : term * term list -> term = Library.foldl (op $); - - -(* maps f(t1,...,tn) to (f, [t1,...,tn]) ; naturally tail-recursive*) -fun strip_comb u : term * term list = - let fun stripc (f$t, ts) = stripc (f, t::ts) - | stripc x = x - in stripc(u,[]) end; - - -(* maps f(t1,...,tn) to f , which is never a combination *) -fun head_of (f$t) = head_of f - | head_of u = u; - -(*number of atoms and abstractions in a term*) -fun size_of_term tm = - let - fun add_size (t $ u) n = add_size t (add_size u n) - | add_size (Abs (_ ,_, t)) n = add_size t (n + 1) - | add_size _ n = n + 1; - in add_size tm 0 end; - -(*number of atoms and constructors in a type*) -fun size_of_typ ty = - let - fun add_size (Type (_, tys)) n = fold add_size tys (n + 1) - | add_size _ n = n + 1; - in add_size ty 0 end; - -fun map_atyps f (Type (a, Ts)) = Type (a, map (map_atyps f) Ts) - | map_atyps f T = f T; - -fun map_aterms f (t $ u) = map_aterms f t $ map_aterms f u - | map_aterms f (Abs (a, T, t)) = Abs (a, T, map_aterms f t) - | map_aterms f t = f t; - -fun map_type_tvar f = map_atyps (fn TVar x => f x | T => T); -fun map_type_tfree f = map_atyps (fn TFree x => f x | T => T); - -fun map_types f = - let - fun map_aux (Const (a, T)) = Const (a, f T) - | map_aux (Free (a, T)) = Free (a, f T) - | map_aux (Var (v, T)) = Var (v, f T) - | map_aux (Bound i) = Bound i - | map_aux (Abs (a, T, t)) = Abs (a, f T, map_aux t) - | map_aux (t $ u) = map_aux t $ map_aux u; - in map_aux end; - - -(* fold types and terms *) - -fun fold_atyps f (Type (_, Ts)) = fold (fold_atyps f) Ts - | fold_atyps f T = f T; - -fun fold_atyps_sorts f = - fold_atyps (fn T as TFree (_, S) => f (T, S) | T as TVar (_, S) => f (T, S)); - -fun fold_aterms f (t $ u) = fold_aterms f t #> fold_aterms f u - | fold_aterms f (Abs (_, _, t)) = fold_aterms f t - | fold_aterms f a = f a; - -fun fold_term_types f (t as Const (_, T)) = f t T - | fold_term_types f (t as Free (_, T)) = f t T - | fold_term_types f (t as Var (_, T)) = f t T - | fold_term_types f (Bound _) = I - | fold_term_types f (t as Abs (_, T, b)) = f t T #> fold_term_types f b - | fold_term_types f (t $ u) = fold_term_types f t #> fold_term_types f u; - -fun fold_types f = fold_term_types (K f); - -fun replace_types (Const (c, _)) (T :: Ts) = (Const (c, T), Ts) - | replace_types (Free (x, _)) (T :: Ts) = (Free (x, T), Ts) - | replace_types (Var (xi, _)) (T :: Ts) = (Var (xi, T), Ts) - | replace_types (Bound i) Ts = (Bound i, Ts) - | replace_types (Abs (x, _, b)) (T :: Ts) = - let val (b', Ts') = replace_types b Ts - in (Abs (x, T, b'), Ts') end - | replace_types (t $ u) Ts = - let - val (t', Ts') = replace_types t Ts; - val (u', Ts'') = replace_types u Ts'; - in (t' $ u', Ts'') end; - -fun burrow_types f ts = - let - val Ts = rev ((fold o fold_types) cons ts []); - val Ts' = f Ts; - val (ts', []) = fold_map replace_types ts Ts'; - in ts' end; - -(*collect variables*) -val add_tvar_namesT = fold_atyps (fn TVar (xi, _) => insert (op =) xi | _ => I); -val add_tvar_names = fold_types add_tvar_namesT; -val add_tvarsT = fold_atyps (fn TVar v => insert (op =) v | _ => I); -val add_tvars = fold_types add_tvarsT; -val add_var_names = fold_aterms (fn Var (xi, _) => insert (op =) xi | _ => I); -val add_vars = fold_aterms (fn Var v => insert (op =) v | _ => I); -val add_tfree_namesT = fold_atyps (fn TFree (a, _) => insert (op =) a | _ => I); -val add_tfree_names = fold_types add_tfree_namesT; -val add_tfreesT = fold_atyps (fn TFree v => insert (op =) v | _ => I); -val add_tfrees = fold_types add_tfreesT; -val add_free_names = fold_aterms (fn Free (x, _) => insert (op =) x | _ => I); -val add_frees = fold_aterms (fn Free v => insert (op =) v | _ => I); -val add_const_names = fold_aterms (fn Const (c, _) => insert (op =) c | _ => I); -val add_consts = fold_aterms (fn Const c => insert (op =) c | _ => I); - -(*extra type variables in a term, not covered by its type*) -fun hidden_polymorphism t = - let - val T = fastype_of t; - val tvarsT = add_tvarsT T []; - val extra_tvars = fold_types (fold_atyps - (fn TVar v => if member (op =) tvarsT v then I else insert (op =) v | _ => I)) t []; - in extra_tvars end; - - -(* renaming variables *) - -val declare_typ_names = fold_atyps (fn TFree (a, _) => Name.declare a | _ => I); - -fun declare_term_names tm = - fold_aterms - (fn Const (a, _) => Name.declare (Long_Name.base_name a) - | Free (a, _) => Name.declare a - | _ => I) tm #> - fold_types declare_typ_names tm; - -val declare_term_frees = fold_aterms (fn Free (x, _) => Name.declare x | _ => I); - -fun variant_frees t frees = - fst (fold_map Name.variant (map fst frees) (declare_term_names t Name.context)) ~~ - map snd frees; - -fun rename_wrt_term t frees = rev (variant_frees t frees); (*reversed result!*) - - - -(** Comparing terms **) - -(* variables *) - -fun eq_ix ((x, i): indexname, (y, j)) = i = j andalso x = y; - -fun eq_tvar ((xi, S: sort), (xi', S')) = eq_ix (xi, xi') andalso S = S'; -fun eq_var ((xi, T: typ), (xi', T')) = eq_ix (xi, xi') andalso T = T'; - - -(* alpha equivalence *) - -fun tm1 aconv tm2 = - pointer_eq (tm1, tm2) orelse - (case (tm1, tm2) of - (t1 $ u1, t2 $ u2) => t1 aconv t2 andalso u1 aconv u2 - | (Abs (_, T1, t1), Abs (_, T2, t2)) => t1 aconv t2 andalso T1 = T2 - | (a1, a2) => a1 = a2); - -fun aconv_untyped (tm1, tm2) = - pointer_eq (tm1, tm2) orelse - (case (tm1, tm2) of - (t1 $ u1, t2 $ u2) => aconv_untyped (t1, t2) andalso aconv_untyped (u1, u2) - | (Abs (_, _, t1), Abs (_, _, t2)) => aconv_untyped (t1, t2) - | (Const (a, _), Const (b, _)) => a = b - | (Free (x, _), Free (y, _)) => x = y - | (Var (xi, _), Var (yj, _)) => xi = yj - | (Bound i, Bound j) => i = j - | _ => false); - - -(*A fast unification filter: true unless the two terms cannot be unified. - Terms must be NORMAL. Treats all Vars as distinct. *) -fun could_unify (t, u) = - let - fun matchrands (f $ t) (g $ u) = could_unify (t, u) andalso matchrands f g - | matchrands _ _ = true; - in - case (head_of t, head_of u) of - (_, Var _) => true - | (Var _, _) => true - | (Const (a, _), Const (b, _)) => a = b andalso matchrands t u - | (Free (a, _), Free (b, _)) => a = b andalso matchrands t u - | (Bound i, Bound j) => i = j andalso matchrands t u - | (Abs _, _) => true (*because of possible eta equality*) - | (_, Abs _) => true - | _ => false - end; - - - -(** Connectives of higher order logic **) - -fun aT S = TFree (Name.aT, S); - -fun itselfT ty = Type ("itself", [ty]); -val a_itselfT = itselfT (TFree (Name.aT, [])); - -val propT : typ = Type ("prop",[]); - -(* maps !!x1...xn. t to t *) -fun strip_all_body (Const("Pure.all",_)$Abs(_,_,t)) = strip_all_body t - | strip_all_body t = t; - -(* maps !!x1...xn. t to [x1, ..., xn] *) -fun strip_all_vars (Const("Pure.all",_)$Abs(a,T,t)) = - (a,T) :: strip_all_vars t - | strip_all_vars t = [] : (string*typ) list; - -(*increments a term's non-local bound variables - required when moving a term within abstractions - inc is increment for bound variables - lev is level at which a bound variable is considered 'loose'*) -fun incr_bv (inc, lev, u as Bound i) = if i>=lev then Bound(i+inc) else u - | incr_bv (inc, lev, Abs(a,T,body)) = - Abs(a, T, incr_bv(inc,lev+1,body)) - | incr_bv (inc, lev, f$t) = - incr_bv(inc,lev,f) $ incr_bv(inc,lev,t) - | incr_bv (inc, lev, u) = u; - -fun incr_boundvars 0 t = t - | incr_boundvars inc t = incr_bv(inc,0,t); - -(*Scan a pair of terms; while they are similar, - accumulate corresponding bound vars in "al"*) -fun match_bvs(Abs(x,_,s),Abs(y,_,t), al) = - match_bvs(s, t, if x="" orelse y="" then al - else (x,y)::al) - | match_bvs(f$s, g$t, al) = match_bvs(f,g,match_bvs(s,t,al)) - | match_bvs(_,_,al) = al; - -(* strip abstractions created by parameters *) -fun match_bvars (s,t) al = match_bvs(strip_abs_body s, strip_abs_body t, al); - -fun map_abs_vars f (t $ u) = map_abs_vars f t $ map_abs_vars f u - | map_abs_vars f (Abs (a, T, t)) = Abs (f a, T, map_abs_vars f t) - | map_abs_vars f t = t; - -fun rename_abs pat obj t = - let - val ren = match_bvs (pat, obj, []); - fun ren_abs (Abs (x, T, b)) = - Abs (the_default x (AList.lookup (op =) ren x), T, ren_abs b) - | ren_abs (f $ t) = ren_abs f $ ren_abs t - | ren_abs t = t - in if null ren then NONE else SOME (ren_abs t) end; - -(*Accumulate all 'loose' bound vars referring to level 'lev' or beyond. - (Bound 0) is loose at level 0 *) -fun add_loose_bnos (Bound i, lev, js) = - if i= k - | loose_bvar(f$t, k) = loose_bvar(f,k) orelse loose_bvar(t,k) - | loose_bvar(Abs(_,_,t),k) = loose_bvar(t,k+1) - | loose_bvar _ = false; - -fun loose_bvar1(Bound i,k) = i = k - | loose_bvar1(f$t, k) = loose_bvar1(f,k) orelse loose_bvar1(t,k) - | loose_bvar1(Abs(_,_,t),k) = loose_bvar1(t,k+1) - | loose_bvar1 _ = false; - -fun is_open t = loose_bvar (t, 0); -fun is_dependent t = loose_bvar1 (t, 0); - -(*Substitute arguments for loose bound variables. - Beta-reduction of arg(n-1)...arg0 into t replacing (Bound i) with (argi). - Note that for ((%x y. c) a b), the bound vars in c are x=1 and y=0 - and the appropriate call is subst_bounds([b,a], c) . - Loose bound variables >=n are reduced by "n" to - compensate for the disappearance of lambdas. -*) -fun subst_bounds (args: term list, t) : term = - let - val n = length args; - fun subst (t as Bound i, lev) = - (if i < lev then raise Same.SAME (*var is locally bound*) - else incr_boundvars lev (nth args (i - lev)) - handle General.Subscript => Bound (i - n)) (*loose: change it*) - | subst (Abs (a, T, body), lev) = Abs (a, T, subst (body, lev + 1)) - | subst (f $ t, lev) = - (subst (f, lev) $ (subst (t, lev) handle Same.SAME => t) - handle Same.SAME => f $ subst (t, lev)) - | subst _ = raise Same.SAME; - in case args of [] => t | _ => (subst (t, 0) handle Same.SAME => t) end; - -(*Special case: one argument*) -fun subst_bound (arg, t) : term = - let - fun subst (Bound i, lev) = - if i < lev then raise Same.SAME (*var is locally bound*) - else if i = lev then incr_boundvars lev arg - else Bound (i - 1) (*loose: change it*) - | subst (Abs (a, T, body), lev) = Abs (a, T, subst (body, lev + 1)) - | subst (f $ t, lev) = - (subst (f, lev) $ (subst (t, lev) handle Same.SAME => t) - handle Same.SAME => f $ subst (t, lev)) - | subst _ = raise Same.SAME; - in subst (t, 0) handle Same.SAME => t end; - -(*beta-reduce if possible, else form application*) -fun betapply (Abs(_,_,t), u) = subst_bound (u,t) - | betapply (f,u) = f$u; - -val betapplys = Library.foldl betapply; - - -(*unfolding abstractions with substitution - of bound variables and implicit eta-expansion*) -fun strip_abs_eta k t = - let - val used = fold_aterms declare_term_frees t Name.context; - fun strip_abs t (0, used) = (([], t), (0, used)) - | strip_abs (Abs (v, T, t)) (k, used) = - let - val (v', used') = Name.variant v used; - val t' = subst_bound (Free (v', T), t); - val ((vs, t''), (k', used'')) = strip_abs t' (k - 1, used'); - in (((v', T) :: vs, t''), (k', used'')) end - | strip_abs t (k, used) = (([], t), (k, used)); - fun expand_eta [] t _ = ([], t) - | expand_eta (T::Ts) t used = - let - val (v, used') = Name.variant "" used; - val (vs, t') = expand_eta Ts (t $ Free (v, T)) used'; - in ((v, T) :: vs, t') end; - val ((vs1, t'), (k', used')) = strip_abs t (k, used); - val Ts = fst (chop k' (binder_types (fastype_of t'))); - val (vs2, t'') = expand_eta Ts t' used'; - in (vs1 @ vs2, t'') end; - - -(*Substitute new for free occurrences of old in a term*) -fun subst_free [] = I - | subst_free pairs = - let fun substf u = - case AList.lookup (op aconv) pairs u of - SOME u' => u' - | NONE => (case u of Abs(a,T,t) => Abs(a, T, substf t) - | t$u' => substf t $ substf u' - | _ => u) - in substf end; - -(*Abstraction of the term "body" over its occurrences of v, - which must contain no loose bound variables. - The resulting term is ready to become the body of an Abs.*) -fun abstract_over (v, body) = - let - fun abs lev tm = - if v aconv tm then Bound lev - else - (case tm of - Abs (a, T, t) => Abs (a, T, abs (lev + 1) t) - | t $ u => - (abs lev t $ (abs lev u handle Same.SAME => u) - handle Same.SAME => t $ abs lev u) - | _ => raise Same.SAME); - in abs 0 body handle Same.SAME => body end; - -fun term_name (Const (x, _)) = Long_Name.base_name x - | term_name (Free (x, _)) = x - | term_name (Var ((x, _), _)) = x - | term_name _ = Name.uu; - -fun lambda_name (x, v) t = - Abs (if x = "" then term_name v else x, fastype_of v, abstract_over (v, t)); - -fun lambda v t = lambda_name ("", v) t; - -fun absfree (a, T) body = Abs (a, T, abstract_over (Free (a, T), body)); -fun absdummy T body = Abs (Name.uu_, T, body); - -(*Replace the ATOMIC term ti by ui; inst = [(t1,u1), ..., (tn,un)]. - A simultaneous substitution: [ (a,b), (b,a) ] swaps a and b. *) -fun subst_atomic [] tm = tm - | subst_atomic inst tm = - let - fun subst (Abs (a, T, body)) = Abs (a, T, subst body) - | subst (t $ u) = subst t $ subst u - | subst t = the_default t (AList.lookup (op aconv) inst t); - in subst tm end; - -(*Replace the ATOMIC type Ti by Ui; inst = [(T1,U1), ..., (Tn,Un)].*) -fun typ_subst_atomic [] ty = ty - | typ_subst_atomic inst ty = - let - fun subst (Type (a, Ts)) = Type (a, map subst Ts) - | subst T = the_default T (AList.lookup (op = : typ * typ -> bool) inst T); - in subst ty end; - -fun subst_atomic_types [] tm = tm - | subst_atomic_types inst tm = map_types (typ_subst_atomic inst) tm; - -fun typ_subst_TVars [] ty = ty - | typ_subst_TVars inst ty = - let - fun subst (Type (a, Ts)) = Type (a, map subst Ts) - | subst (T as TVar (xi, _)) = the_default T (AList.lookup (op =) inst xi) - | subst T = T; - in subst ty end; - -fun subst_TVars [] tm = tm - | subst_TVars inst tm = map_types (typ_subst_TVars inst) tm; - -fun subst_Vars [] tm = tm - | subst_Vars inst tm = - let - fun subst (t as Var (xi, _)) = the_default t (AList.lookup (op =) inst xi) - | subst (Abs (a, T, t)) = Abs (a, T, subst t) - | subst (t $ u) = subst t $ subst u - | subst t = t; - in subst tm end; - -fun subst_vars ([], []) tm = tm - | subst_vars ([], inst) tm = subst_Vars inst tm - | subst_vars (instT, inst) tm = - let - fun subst (Const (a, T)) = Const (a, typ_subst_TVars instT T) - | subst (Free (a, T)) = Free (a, typ_subst_TVars instT T) - | subst (Var (xi, T)) = - (case AList.lookup (op =) inst xi of - NONE => Var (xi, typ_subst_TVars instT T) - | SOME t => t) - | subst (t as Bound _) = t - | subst (Abs (a, T, t)) = Abs (a, typ_subst_TVars instT T, subst t) - | subst (t $ u) = subst t $ subst u; - in subst tm end; - -fun close_schematic_term t = - let - val extra_types = map (fn v => Const ("Pure.type", itselfT (TVar v))) (hidden_polymorphism t); - val extra_terms = map Var (add_vars t []); - in fold lambda (extra_terms @ extra_types) t end; - - - -(** Identifying first-order terms **) - -(*Differs from proofterm/is_fun in its treatment of TVar*) -fun is_funtype (Type ("fun", [_, _])) = true - | is_funtype _ = false; - -(*Argument Ts is a reverse list of binder types, needed if term t contains Bound vars*) -fun has_not_funtype Ts t = not (is_funtype (fastype_of1 (Ts, t))); - -(*First order means in all terms of the form f(t1,...,tn) no argument has a - function type. The supplied quantifiers are excluded: their argument always - has a function type through a recursive call into its body.*) -fun is_first_order quants = - let fun first_order1 Ts (Abs (_,T,body)) = first_order1 (T::Ts) body - | first_order1 Ts (Const(q,_) $ Abs(a,T,body)) = - member (op =) quants q andalso (*it is a known quantifier*) - not (is_funtype T) andalso first_order1 (T::Ts) body - | first_order1 Ts t = - case strip_comb t of - (Var _, ts) => forall (first_order1 Ts andf has_not_funtype Ts) ts - | (Free _, ts) => forall (first_order1 Ts andf has_not_funtype Ts) ts - | (Const _, ts) => forall (first_order1 Ts andf has_not_funtype Ts) ts - | (Bound _, ts) => forall (first_order1 Ts andf has_not_funtype Ts) ts - | (Abs _, ts) => false (*not in beta-normal form*) - | _ => error "first_order: unexpected case" - in first_order1 [] end; - - -(* maximum index of typs and terms *) - -fun maxidx_typ (TVar ((_, j), _)) i = Int.max (i, j) - | maxidx_typ (Type (_, Ts)) i = maxidx_typs Ts i - | maxidx_typ (TFree _) i = i -and maxidx_typs [] i = i - | maxidx_typs (T :: Ts) i = maxidx_typs Ts (maxidx_typ T i); - -fun maxidx_term (Var ((_, j), T)) i = maxidx_typ T (Int.max (i, j)) - | maxidx_term (Const (_, T)) i = maxidx_typ T i - | maxidx_term (Free (_, T)) i = maxidx_typ T i - | maxidx_term (Bound _) i = i - | maxidx_term (Abs (_, T, t)) i = maxidx_term t (maxidx_typ T i) - | maxidx_term (t $ u) i = maxidx_term u (maxidx_term t i); - -fun maxidx_of_typ T = maxidx_typ T ~1; -fun maxidx_of_typs Ts = maxidx_typs Ts ~1; -fun maxidx_of_term t = maxidx_term t ~1; - - - -(** misc syntax operations **) - -(* substructure *) - -fun exists_subtype P = - let - fun ex ty = P ty orelse - (case ty of Type (_, Ts) => exists ex Ts | _ => false); - in ex end; - -fun exists_type P = - let - fun ex (Const (_, T)) = P T - | ex (Free (_, T)) = P T - | ex (Var (_, T)) = P T - | ex (Bound _) = false - | ex (Abs (_, T, t)) = P T orelse ex t - | ex (t $ u) = ex t orelse ex u; - in ex end; - -fun exists_subterm P = - let - fun ex tm = P tm orelse - (case tm of - t $ u => ex t orelse ex u - | Abs (_, _, t) => ex t - | _ => false); - in ex end; - -fun exists_Const P = exists_subterm (fn Const c => P c | _ => false); - -fun has_abs (Abs _) = true - | has_abs (t $ u) = has_abs t orelse has_abs u - | has_abs _ = false; - - -(* dest abstraction *) - -fun dest_abs (x, T, body) = - let - fun name_clash (Free (y, _)) = (x = y) - | name_clash (t $ u) = name_clash t orelse name_clash u - | name_clash (Abs (_, _, t)) = name_clash t - | name_clash _ = false; - in - if name_clash body then - dest_abs (singleton (Name.variant_list [x]) x, T, body) (*potentially slow*) - else (x, subst_bound (Free (x, T), body)) - end; - - -(* dummy patterns *) - -fun dummy_pattern T = Const ("Pure.dummy_pattern", T); -val dummy = dummy_pattern dummyT; -val dummy_prop = dummy_pattern propT; - -fun is_dummy_pattern (Const ("Pure.dummy_pattern", _)) = true - | is_dummy_pattern _ = false; - -fun no_dummy_patterns tm = - if not (fold_aterms (fn t => fn b => b orelse is_dummy_pattern t) tm false) then tm - else raise TERM ("Illegal occurrence of '_' dummy pattern", [tm]); - -fun free_dummy_patterns (Const ("Pure.dummy_pattern", T)) used = - let val [x] = Name.invent used Name.uu 1 - in (Free (Name.internal x, T), Name.declare x used) end - | free_dummy_patterns (Abs (x, T, b)) used = - let val (b', used') = free_dummy_patterns b used - in (Abs (x, T, b'), used') end - | free_dummy_patterns (t $ u) used = - let - val (t', used') = free_dummy_patterns t used; - val (u', used'') = free_dummy_patterns u used'; - in (t' $ u', used'') end - | free_dummy_patterns a used = (a, used); - -fun replace_dummy Ts (Const ("Pure.dummy_pattern", T)) i = - (list_comb (Var (("_dummy_", i), Ts ---> T), map_range Bound (length Ts)), i + 1) - | replace_dummy Ts (Abs (x, T, t)) i = - let val (t', i') = replace_dummy (T :: Ts) t i - in (Abs (x, T, t'), i') end - | replace_dummy Ts (t $ u) i = - let - val (t', i') = replace_dummy Ts t i; - val (u', i'') = replace_dummy Ts u i'; - in (t' $ u', i'') end - | replace_dummy _ a i = (a, i); - -val replace_dummy_patterns = replace_dummy []; - -fun is_replaced_dummy_pattern ("_dummy_", _) = true - | is_replaced_dummy_pattern _ = false; - -fun show_dummy_patterns (Var (("_dummy_", _), T)) = dummy_pattern T - | show_dummy_patterns (t $ u) = show_dummy_patterns t $ show_dummy_patterns u - | show_dummy_patterns (Abs (x, T, t)) = Abs (x, T, show_dummy_patterns t) - | show_dummy_patterns a = a; - - -(* display variables *) - -fun string_of_vname (x, i) = - let - val idx = string_of_int i; - val dot = - (case rev (Symbol.explode x) of - _ :: "\\<^sub>" :: _ => false - | c :: _ => Symbol.is_digit c - | _ => true); - in - if dot then "?" ^ x ^ "." ^ idx - else if i <> 0 then "?" ^ x ^ idx - else "?" ^ x - end; - -fun string_of_vname' (x, ~1) = x - | string_of_vname' xi = string_of_vname xi; - -end; - -structure Basic_Term: BASIC_TERM = Term; -open Basic_Term; diff --git a/core/Pure/term.scala b/core/Pure/term.scala deleted file mode 100644 index efa91e35..00000000 --- a/core/Pure/term.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* Title: Pure/term.scala - Author: Makarius - -Lambda terms, types, sorts. - -Note: Isabelle/ML is the primary environment for logical operations. -*/ - -package isabelle - - -object Term -{ - type Indexname = (String, Int) - - type Sort = List[String] - val dummyS: Sort = List("") - - sealed abstract class Typ - case class Type(name: String, args: List[Typ] = Nil) extends Typ - case class TFree(name: String, sort: Sort = dummyS) extends Typ - case class TVar(name: Indexname, sort: Sort = dummyS) extends Typ - val dummyT = Type("dummy") - - sealed abstract class Term - case class Const(name: String, typ: Typ = dummyT) extends Term - case class Free(name: String, typ: Typ = dummyT) extends Term - case class Var(name: Indexname, typ: Typ = dummyT) extends Term - case class Bound(index: Int) extends Term - case class Abs(name: String, typ: Typ = dummyT, body: Term) extends Term - case class App(fun: Term, arg: Term) extends Term -} - diff --git a/core/Pure/term_ord.ML b/core/Pure/term_ord.ML deleted file mode 100644 index ca1a9656..00000000 --- a/core/Pure/term_ord.ML +++ /dev/null @@ -1,242 +0,0 @@ -(* Title: Pure/term_ord.ML - Author: Tobias Nipkow and Makarius, TU Muenchen - -Term orderings. -*) - -signature BASIC_TERM_ORD = -sig - structure Vartab: TABLE - structure Sorttab: TABLE - structure Typtab: TABLE - structure Termtab: TABLE -end; - -signature TERM_ORD = -sig - include BASIC_TERM_ORD - val fast_indexname_ord: indexname * indexname -> order - val sort_ord: sort * sort -> order - val typ_ord: typ * typ -> order - val fast_term_ord: term * term -> order - val syntax_term_ord: term * term -> order - val indexname_ord: indexname * indexname -> order - val tvar_ord: (indexname * sort) * (indexname * sort) -> order - val var_ord: (indexname * typ) * (indexname * typ) -> order - val term_ord: term * term -> order - val hd_ord: term * term -> order - val termless: term * term -> bool - val term_lpo: (term -> int) -> term * term -> order - val term_cache: (term -> 'a) -> term -> 'a -end; - -structure Term_Ord: TERM_ORD = -struct - -(* fast syntactic ordering -- tuned for inequalities *) - -fun fast_indexname_ord ((x, i), (y, j)) = - (case int_ord (i, j) of EQUAL => fast_string_ord (x, y) | ord => ord); - -fun sort_ord SS = - if pointer_eq SS then EQUAL - else dict_ord fast_string_ord SS; - -local - -fun cons_nr (TVar _) = 0 - | cons_nr (TFree _) = 1 - | cons_nr (Type _) = 2; - -in - -fun typ_ord TU = - if pointer_eq TU then EQUAL - else - (case TU of - (Type (a, Ts), Type (b, Us)) => - (case fast_string_ord (a, b) of EQUAL => dict_ord typ_ord (Ts, Us) | ord => ord) - | (TFree (a, S), TFree (b, S')) => - (case fast_string_ord (a, b) of EQUAL => sort_ord (S, S') | ord => ord) - | (TVar (xi, S), TVar (yj, S')) => - (case fast_indexname_ord (xi, yj) of EQUAL => sort_ord (S, S') | ord => ord) - | (T, U) => int_ord (cons_nr T, cons_nr U)); - -end; - -local - -fun cons_nr (Const _) = 0 - | cons_nr (Free _) = 1 - | cons_nr (Var _) = 2 - | cons_nr (Bound _) = 3 - | cons_nr (Abs _) = 4 - | cons_nr (_ $ _) = 5; - -fun struct_ord (Abs (_, _, t), Abs (_, _, u)) = struct_ord (t, u) - | struct_ord (t1 $ t2, u1 $ u2) = - (case struct_ord (t1, u1) of EQUAL => struct_ord (t2, u2) | ord => ord) - | struct_ord (t, u) = int_ord (cons_nr t, cons_nr u); - -fun atoms_ord (Abs (_, _, t), Abs (_, _, u)) = atoms_ord (t, u) - | atoms_ord (t1 $ t2, u1 $ u2) = - (case atoms_ord (t1, u1) of EQUAL => atoms_ord (t2, u2) | ord => ord) - | atoms_ord (Const (a, _), Const (b, _)) = fast_string_ord (a, b) - | atoms_ord (Free (x, _), Free (y, _)) = fast_string_ord (x, y) - | atoms_ord (Var (xi, _), Var (yj, _)) = fast_indexname_ord (xi, yj) - | atoms_ord (Bound i, Bound j) = int_ord (i, j) - | atoms_ord _ = EQUAL; - -fun types_ord (Abs (_, T, t), Abs (_, U, u)) = - (case typ_ord (T, U) of EQUAL => types_ord (t, u) | ord => ord) - | types_ord (t1 $ t2, u1 $ u2) = - (case types_ord (t1, u1) of EQUAL => types_ord (t2, u2) | ord => ord) - | types_ord (Const (_, T), Const (_, U)) = typ_ord (T, U) - | types_ord (Free (_, T), Free (_, U)) = typ_ord (T, U) - | types_ord (Var (_, T), Var (_, U)) = typ_ord (T, U) - | types_ord _ = EQUAL; - -fun comments_ord (Abs (x, _, t), Abs (y, _, u)) = - (case fast_string_ord (x, y) of EQUAL => comments_ord (t, u) | ord => ord) - | comments_ord (t1 $ t2, u1 $ u2) = - (case comments_ord (t1, u1) of EQUAL => comments_ord (t2, u2) | ord => ord) - | comments_ord _ = EQUAL; - -in - -fun fast_term_ord tu = - if pointer_eq tu then EQUAL - else - (case struct_ord tu of - EQUAL => (case atoms_ord tu of EQUAL => types_ord tu | ord => ord) - | ord => ord); - -fun syntax_term_ord tu = - (case fast_term_ord tu of EQUAL => comments_ord tu | ord => ord); - -end; - - -(* term_ord *) - -(*a linear well-founded AC-compatible ordering for terms: - s < t <=> 1. size(s) < size(t) or - 2. size(s) = size(t) and s=f(...) and t=g(...) and f string_ord (x, y) | ord => ord); - -val tvar_ord = prod_ord indexname_ord sort_ord; -val var_ord = prod_ord indexname_ord typ_ord; - - -local - -fun hd_depth (t $ _, n) = hd_depth (t, n + 1) - | hd_depth p = p; - -fun dest_hd (Const (a, T)) = (((a, 0), T), 0) - | dest_hd (Free (a, T)) = (((a, 0), T), 1) - | dest_hd (Var v) = (v, 2) - | dest_hd (Bound i) = ((("", i), dummyT), 3) - | dest_hd (Abs (_, T, _)) = ((("", 0), T), 4); - -in - -fun term_ord tu = - if pointer_eq tu then EQUAL - else - (case tu of - (Abs (_, T, t), Abs(_, U, u)) => - (case term_ord (t, u) of EQUAL => typ_ord (T, U) | ord => ord) - | (t, u) => - (case int_ord (size_of_term t, size_of_term u) of - EQUAL => - (case prod_ord hd_ord int_ord (hd_depth (t, 0), hd_depth (u, 0)) of - EQUAL => args_ord (t, u) | ord => ord) - | ord => ord)) -and hd_ord (f, g) = - prod_ord (prod_ord indexname_ord typ_ord) int_ord (dest_hd f, dest_hd g) -and args_ord (f $ t, g $ u) = - (case args_ord (f, g) of EQUAL => term_ord (t, u) | ord => ord) - | args_ord _ = EQUAL; - -fun termless tu = (term_ord tu = LESS); - -end; - - -(* Lexicographic path order on terms *) - -(* - See Baader & Nipkow, Term rewriting, CUP 1998. - Without variables. Const, Var, Bound, Free and Abs are treated all as - constants. - - f_ord maps terms to integers and serves two purposes: - - Predicate on constant symbols. Those that are not recognised by f_ord - must be mapped to ~1. - - Order on the recognised symbols. These must be mapped to distinct - integers >= 0. - The argument of f_ord is never an application. -*) - -local - -fun unrecognized (Const (a, T)) = ((1, ((a, 0), T)), 0) - | unrecognized (Free (a, T)) = ((1, ((a, 0), T)), 0) - | unrecognized (Var v) = ((1, v), 1) - | unrecognized (Bound i) = ((1, (("", i), dummyT)), 2) - | unrecognized (Abs (_, T, _)) = ((1, (("", 0), T)), 3); - -fun dest_hd f_ord t = - let val ord = f_ord t - in if ord = ~1 then unrecognized t else ((0, (("", ord), fastype_of t)), 0) end; - -fun term_lpo f_ord (s, t) = - let val (f, ss) = strip_comb s and (g, ts) = strip_comb t in - if forall (fn si => term_lpo f_ord (si, t) = LESS) ss - then case hd_ord f_ord (f, g) of - GREATER => - if forall (fn ti => term_lpo f_ord (s, ti) = GREATER) ts - then GREATER else LESS - | EQUAL => - if forall (fn ti => term_lpo f_ord (s, ti) = GREATER) ts - then list_ord (term_lpo f_ord) (ss, ts) - else LESS - | LESS => LESS - else GREATER - end -and hd_ord f_ord (f, g) = case (f, g) of - (Abs (_, T, t), Abs (_, U, u)) => - (case term_lpo f_ord (t, u) of EQUAL => typ_ord (T, U) | ord => ord) - | (_, _) => prod_ord (prod_ord int_ord - (prod_ord indexname_ord typ_ord)) int_ord - (dest_hd f_ord f, dest_hd f_ord g); - -in -val term_lpo = term_lpo -end; - - -(* tables and caches *) - -structure Vartab = Table(type key = indexname val ord = fast_indexname_ord); -structure Sorttab = Table(type key = sort val ord = sort_ord); -structure Typtab = Table(type key = typ val ord = typ_ord); -structure Termtab = Table(type key = term val ord = fast_term_ord); - -fun term_cache f = Cache.create Termtab.empty Termtab.lookup Termtab.update f; - -end; - -structure Basic_Term_Ord: BASIC_TERM_ORD = Term_Ord; -open Basic_Term_Ord; - -structure Var_Graph = Graph(type key = indexname val ord = Term_Ord.fast_indexname_ord); -structure Sort_Graph = Graph(type key = sort val ord = Term_Ord.sort_ord); -structure Typ_Graph = Graph(type key = typ val ord = Term_Ord.typ_ord); -structure Term_Graph = Graph(type key = term val ord = Term_Ord.fast_term_ord); - diff --git a/core/Pure/term_sharing.ML b/core/Pure/term_sharing.ML deleted file mode 100644 index 0890a24e..00000000 --- a/core/Pure/term_sharing.ML +++ /dev/null @@ -1,72 +0,0 @@ -(* Title: Pure/term_sharing.ML - Author: Makarius - -Local sharing of type/term sub-structure, with global interning of -formal entity names. -*) - -signature TERM_SHARING = -sig - val init: theory -> (typ -> typ) * (term -> term) - val typs: theory -> typ list -> typ list - val terms: theory -> term list -> term list -end; - -structure Term_Sharing: TERM_SHARING = -struct - -structure Syntax_Termtab = Table(type key = term val ord = Term_Ord.syntax_term_ord); - -fun init thy = - let - val {classes = (_, algebra), types, ...} = Type.rep_tsig (Sign.tsig_of thy); - - val class = perhaps (try (#1 o Graph.get_entry (Sorts.classes_of algebra))); - val tycon = perhaps (Option.map #1 o Name_Space.lookup_key types); - val const = perhaps (try (#1 o Consts.the_const (Sign.consts_of thy))); - - val typs = Unsynchronized.ref (Typtab.empty: unit Typtab.table); - val terms = Unsynchronized.ref (Syntax_Termtab.empty: unit Syntax_Termtab.table); - - fun typ T = - (case Typtab.lookup_key (! typs) T of - SOME (T', ()) => T' - | NONE => - let - val T' = - (case T of - Type (a, Ts) => Type (tycon a, map typ Ts) - | TFree (a, S) => TFree (a, map class S) - | TVar (a, S) => TVar (a, map class S)); - val _ = Unsynchronized.change typs (Typtab.update (T', ())); - in T' end); - - fun term tm = - (case Syntax_Termtab.lookup_key (! terms) tm of - SOME (tm', ()) => tm' - | NONE => - let - val tm' = - (case tm of - Const (c, T) => Const (const c, typ T) - | Free (x, T) => Free (x, typ T) - | Var (xi, T) => Var (xi, typ T) - | Bound i => Bound i - | Abs (x, T, t) => Abs (x, typ T, term t) - | t $ u => term t $ term u); - val _ = Unsynchronized.change terms (Syntax_Termtab.update (tm', ())); - in tm' end); - - fun check eq f x = - let val x' = f x in - if eq (x, x') then x' - else raise Fail "Something is utterly wrong" - end; - - in (check (op =) typ, check (op =) term) end; - -val typs = map o #1 o init; -val terms = map o #2 o init; - -end; - diff --git a/core/Pure/term_subst.ML b/core/Pure/term_subst.ML deleted file mode 100644 index ab14f8c0..00000000 --- a/core/Pure/term_subst.ML +++ /dev/null @@ -1,187 +0,0 @@ -(* Title: Pure/term_subst.ML - Author: Makarius - -Efficient type/term substitution. -*) - -signature TERM_SUBST = -sig - val map_atypsT_same: typ Same.operation -> typ Same.operation - val map_types_same: typ Same.operation -> term Same.operation - val map_aterms_same: term Same.operation -> term Same.operation - val generalizeT_same: string list -> int -> typ Same.operation - val generalize_same: string list * string list -> int -> term Same.operation - val generalizeT: string list -> int -> typ -> typ - val generalize: string list * string list -> int -> term -> term - val instantiateT_maxidx: ((indexname * sort) * (typ * int)) list -> typ -> int -> typ * int - val instantiate_maxidx: - ((indexname * sort) * (typ * int)) list * ((indexname * typ) * (term * int)) list -> - term -> int -> term * int - val instantiateT_same: ((indexname * sort) * typ) list -> typ Same.operation - val instantiate_same: ((indexname * sort) * typ) list * ((indexname * typ) * term) list -> - term Same.operation - val instantiateT: ((indexname * sort) * typ) list -> typ -> typ - val instantiate: ((indexname * sort) * typ) list * ((indexname * typ) * term) list -> - term -> term - val zero_var_indexes: term -> term - val zero_var_indexes_inst: term list -> - ((indexname * sort) * typ) list * ((indexname * typ) * term) list -end; - -structure Term_Subst: TERM_SUBST = -struct - -(* generic mapping *) - -fun map_atypsT_same f = - let - fun typ (Type (a, Ts)) = Type (a, Same.map typ Ts) - | typ T = f T; - in typ end; - -fun map_types_same f = - let - fun term (Const (a, T)) = Const (a, f T) - | term (Free (a, T)) = Free (a, f T) - | term (Var (v, T)) = Var (v, f T) - | term (Bound _) = raise Same.SAME - | term (Abs (x, T, t)) = - (Abs (x, f T, Same.commit term t) - handle Same.SAME => Abs (x, T, term t)) - | term (t $ u) = (term t $ Same.commit term u handle Same.SAME => t $ term u); - in term end; - -fun map_aterms_same f = - let - fun term (Abs (x, T, t)) = Abs (x, T, term t) - | term (t $ u) = (term t $ Same.commit term u handle Same.SAME => t $ term u) - | term a = f a; - in term end; - - -(* generalization of fixed variables *) - -fun generalizeT_same [] _ _ = raise Same.SAME - | generalizeT_same tfrees idx ty = - let - fun gen (Type (a, Ts)) = Type (a, Same.map gen Ts) - | gen (TFree (a, S)) = - if member (op =) tfrees a then TVar ((a, idx), S) - else raise Same.SAME - | gen _ = raise Same.SAME; - in gen ty end; - -fun generalize_same ([], []) _ _ = raise Same.SAME - | generalize_same (tfrees, frees) idx tm = - let - val genT = generalizeT_same tfrees idx; - fun gen (Free (x, T)) = - if member (op =) frees x then - Var (Name.clean_index (x, idx), Same.commit genT T) - else Free (x, genT T) - | gen (Var (xi, T)) = Var (xi, genT T) - | gen (Const (c, T)) = Const (c, genT T) - | gen (Bound _) = raise Same.SAME - | gen (Abs (x, T, t)) = - (Abs (x, genT T, Same.commit gen t) - handle Same.SAME => Abs (x, T, gen t)) - | gen (t $ u) = (gen t $ Same.commit gen u handle Same.SAME => t $ gen u); - in gen tm end; - -fun generalizeT names i ty = Same.commit (generalizeT_same names i) ty; -fun generalize names i tm = Same.commit (generalize_same names i) tm; - - -(* instantiation of schematic variables (types before terms) -- recomputes maxidx *) - -local - -fun no_index (x, y) = (x, (y, ~1)); -fun no_indexes1 inst = map no_index inst; -fun no_indexes2 (inst1, inst2) = (map no_index inst1, map no_index inst2); - -fun instT_same maxidx instT ty = - let - fun maxify i = if i > ! maxidx then maxidx := i else (); - - fun subst_typ (Type (a, Ts)) = Type (a, subst_typs Ts) - | subst_typ (TVar ((a, i), S)) = - (case AList.lookup Term.eq_tvar instT ((a, i), S) of - SOME (T, j) => (maxify j; T) - | NONE => (maxify i; raise Same.SAME)) - | subst_typ _ = raise Same.SAME - and subst_typs (T :: Ts) = - (subst_typ T :: Same.commit subst_typs Ts - handle Same.SAME => T :: subst_typs Ts) - | subst_typs [] = raise Same.SAME; - in subst_typ ty end; - -fun inst_same maxidx (instT, inst) tm = - let - fun maxify i = if i > ! maxidx then maxidx := i else (); - - val substT = instT_same maxidx instT; - fun subst (Const (c, T)) = Const (c, substT T) - | subst (Free (x, T)) = Free (x, substT T) - | subst (Var ((x, i), T)) = - let val (T', same) = (substT T, false) handle Same.SAME => (T, true) in - (case AList.lookup Term.eq_var inst ((x, i), T') of - SOME (t, j) => (maxify j; t) - | NONE => (maxify i; if same then raise Same.SAME else Var ((x, i), T'))) - end - | subst (Bound _) = raise Same.SAME - | subst (Abs (x, T, t)) = - (Abs (x, substT T, Same.commit subst t) - handle Same.SAME => Abs (x, T, subst t)) - | subst (t $ u) = (subst t $ Same.commit subst u handle Same.SAME => t $ subst u); - in subst tm end; - -in - -fun instantiateT_maxidx instT ty i = - let val maxidx = Unsynchronized.ref i - in (Same.commit (instT_same maxidx instT) ty, ! maxidx) end; - -fun instantiate_maxidx insts tm i = - let val maxidx = Unsynchronized.ref i - in (Same.commit (inst_same maxidx insts) tm, ! maxidx) end; - -fun instantiateT_same [] _ = raise Same.SAME - | instantiateT_same instT ty = instT_same (Unsynchronized.ref ~1) (no_indexes1 instT) ty; - -fun instantiate_same ([], []) _ = raise Same.SAME - | instantiate_same insts tm = inst_same (Unsynchronized.ref ~1) (no_indexes2 insts) tm; - -fun instantiateT instT ty = Same.commit (instantiateT_same instT) ty; -fun instantiate inst tm = Same.commit (instantiate_same inst) tm; - -end; - - -(* zero var indexes *) - -structure TVars = Table(type key = indexname * sort val ord = Term_Ord.tvar_ord); -structure Vars = Table(type key = indexname * typ val ord = Term_Ord.var_ord); - -fun zero_var_inst mk (v as ((x, i), X)) (inst, used) = - let - val (x', used') = Name.variant (if Name.is_bound x then "u" else x) used; - in if x = x' andalso i = 0 then (inst, used') else ((v, mk ((x', 0), X)) :: inst, used') end; - -fun zero_var_indexes_inst ts = - let - val (instT, _) = - TVars.fold (zero_var_inst TVar o #1) - ((fold o fold_types o fold_atyps) (fn TVar v => - TVars.insert (K true) (v, ()) | _ => I) ts TVars.empty) - ([], Name.context); - val (inst, _) = - Vars.fold (zero_var_inst Var o #1) - ((fold o fold_aterms) (fn Var (xi, T) => - Vars.insert (K true) ((xi, instantiateT instT T), ()) | _ => I) ts Vars.empty) - ([], Name.context); - in (instT, inst) end; - -fun zero_var_indexes t = instantiate (zero_var_indexes_inst [t]) t; - -end; diff --git a/core/Pure/term_xml.ML b/core/Pure/term_xml.ML deleted file mode 100644 index 43c676c7..00000000 --- a/core/Pure/term_xml.ML +++ /dev/null @@ -1,68 +0,0 @@ -(* Title: Pure/term_xml.ML - Author: Makarius - -XML data representation of lambda terms. -*) - -signature TERM_XML_OPS = -sig - type 'a T - val sort: sort T - val typ: typ T - val term: term T -end - -signature TERM_XML = -sig - structure Encode: TERM_XML_OPS - structure Decode: TERM_XML_OPS -end; - -structure Term_XML: TERM_XML = -struct - -structure Encode = -struct - -open XML.Encode; - -val sort = list string; - -fun typ T = T |> variant - [fn Type (a, b) => ([a], list typ b), - fn TFree (a, b) => ([a], sort b), - fn TVar ((a, b), c) => ([a, int_atom b], sort c)]; - -fun term t = t |> variant - [fn Const (a, b) => ([a], typ b), - fn Free (a, b) => ([a], typ b), - fn Var ((a, b), c) => ([a, int_atom b], typ c), - fn Bound a => ([int_atom a], []), - fn Abs (a, b, c) => ([a], pair typ term (b, c)), - fn op $ a => ([], pair term term a)]; - -end; - -structure Decode = -struct - -open XML.Decode; - -val sort = list string; - -fun typ T = T |> variant - [fn ([a], b) => Type (a, list typ b), - fn ([a], b) => TFree (a, sort b), - fn ([a, b], c) => TVar ((a, int_atom b), sort c)]; - -fun term t = t |> variant - [fn ([a], b) => Const (a, typ b), - fn ([a], b) => Free (a, typ b), - fn ([a, b], c) => Var ((a, int_atom b), typ c), - fn ([a], []) => Bound (int_atom a), - fn ([a], b) => let val (c, d) = pair typ term b in Abs (a, c, d) end, - fn ([], a) => op $ (pair term term a)]; - -end; - -end; diff --git a/core/Pure/term_xml.scala b/core/Pure/term_xml.scala deleted file mode 100644 index 196637c5..00000000 --- a/core/Pure/term_xml.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* Title: Pure/term_xml.scala - Author: Makarius - -XML data representation of lambda terms. -*/ - -package isabelle - - -object Term_XML -{ - import Term._ - - object Encode - { - import XML.Encode._ - - val sort: T[Sort] = list(string) - - def typ: T[Typ] = - variant[Typ](List( - { case Type(a, b) => (List(a), list(typ)(b)) }, - { case TFree(a, b) => (List(a), sort(b)) }, - { case TVar((a, b), c) => (List(a, int_atom(b)), sort(c)) })) - - def term: T[Term] = - variant[Term](List( - { case Const(a, b) => (List(a), typ(b)) }, - { case Free(a, b) => (List(a), typ(b)) }, - { case Var((a, b), c) => (List(a, int_atom(b)), typ(c)) }, - { case Bound(a) => (List(int_atom(a)), Nil) }, - { case Abs(a, b, c) => (List(a), pair(typ, term)(b, c)) }, - { case App(a, b) => (Nil, pair(term, term)(a, b)) })) - } - - object Decode - { - import XML.Decode._ - - val sort: T[Sort] = list(string) - - def typ: T[Typ] = - variant[Typ](List( - { case (List(a), b) => Type(a, list(typ)(b)) }, - { case (List(a), b) => TFree(a, sort(b)) }, - { case (List(a, b), c) => TVar((a, int_atom(b)), sort(c)) })) - - def term: T[Term] = - variant[Term](List( - { case (List(a), b) => Const(a, typ(b)) }, - { case (List(a), b) => Free(a, typ(b)) }, - { case (List(a, b), c) => Var((a, int_atom(b)), typ(c)) }, - { case (List(a), Nil) => Bound(int_atom(a)) }, - { case (List(a), b) => val (c, d) = pair(typ, term)(b); Abs(a, c, d) }, - { case (Nil, a) => val (b, c) = pair(term, term)(a); App(b, c) })) - } -} diff --git a/core/Pure/theory.ML b/core/Pure/theory.ML deleted file mode 100644 index 176f6cfa..00000000 --- a/core/Pure/theory.ML +++ /dev/null @@ -1,298 +0,0 @@ -(* Title: Pure/theory.ML - Author: Lawrence C Paulson and Markus Wenzel - -Logical theory content: axioms, definitions, and begin/end wrappers. -*) - -signature THEORY = -sig - val eq_thy: theory * theory -> bool - val subthy: theory * theory -> bool - val assert_super: theory -> theory -> theory - val parents_of: theory -> theory list - val ancestors_of: theory -> theory list - val nodes_of: theory -> theory list - val merge: theory * theory -> theory - val merge_list: theory list -> theory - val requires: theory -> string -> string -> unit - val setup: (theory -> theory) -> unit - val get_markup: theory -> Markup.T - val axiom_table: theory -> term Name_Space.table - val axiom_space: theory -> Name_Space.T - val axioms_of: theory -> (string * term) list - val all_axioms_of: theory -> (string * term) list - val defs_of: theory -> Defs.T - val at_begin: (theory -> theory option) -> theory -> theory - val at_end: (theory -> theory option) -> theory -> theory - val begin_theory: string * Position.T -> theory list -> theory - val end_theory: theory -> theory - val add_axiom: Proof.context -> binding * term -> theory -> theory - val add_deps: Proof.context -> string -> string * typ -> (string * typ) list -> theory -> theory - val add_deps_global: string -> string * typ -> (string * typ) list -> theory -> theory - val add_def: Proof.context -> bool -> bool -> binding * term -> theory -> theory - val specify_const: (binding * typ) * mixfix -> theory -> term * theory - val check_overloading: Proof.context -> bool -> string * typ -> unit -end - -structure Theory: THEORY = -struct - - -(** theory context operations **) - -val eq_thy = Context.eq_thy; -val subthy = Context.subthy; - -fun assert_super thy1 thy2 = - if subthy (thy1, thy2) then thy2 - else raise THEORY ("Not a super theory", [thy1, thy2]); - -val parents_of = Context.parents_of; -val ancestors_of = Context.ancestors_of; -fun nodes_of thy = thy :: ancestors_of thy; - -val merge = Context.merge; - -fun merge_list [] = raise THEORY ("Empty merge of theories", []) - | merge_list (thy :: thys) = Library.foldl merge (thy, thys); - -fun requires thy name what = - if exists (fn thy' => Context.theory_name thy' = name) (nodes_of thy) then () - else error ("Require theory " ^ quote name ^ " as an ancestor for " ^ what); - -fun setup f = Context.>> (Context.map_theory f); - - - -(** datatype thy **) - -type wrapper = (theory -> theory option) * stamp; - -fun apply_wrappers (wrappers: wrapper list) = - perhaps (perhaps_loop (perhaps_apply (map fst wrappers))); - -datatype thy = Thy of - {pos: Position.T, - id: serial, - axioms: term Name_Space.table, - defs: Defs.T, - wrappers: wrapper list * wrapper list}; - -fun make_thy (pos, id, axioms, defs, wrappers) = - Thy {pos = pos, id = id, axioms = axioms, defs = defs, wrappers = wrappers}; - -structure Thy = Theory_Data_PP -( - type T = thy; - val empty_axioms = Name_Space.empty_table "axiom" : term Name_Space.table; - val empty = make_thy (Position.none, 0, empty_axioms, Defs.empty, ([], [])); - - fun extend (Thy {pos = _, id = _, axioms = _, defs, wrappers}) = - make_thy (Position.none, 0, empty_axioms, defs, wrappers); - - fun merge pp (thy1, thy2) = - let - val ctxt = Syntax.init_pretty pp; - val Thy {pos = _, id = _, axioms = _, defs = defs1, wrappers = (bgs1, ens1)} = thy1; - val Thy {pos = _, id = _, axioms = _, defs = defs2, wrappers = (bgs2, ens2)} = thy2; - - val axioms' = empty_axioms; - val defs' = Defs.merge ctxt (defs1, defs2); - val bgs' = Library.merge (eq_snd op =) (bgs1, bgs2); - val ens' = Library.merge (eq_snd op =) (ens1, ens2); - in make_thy (Position.none, 0, axioms', defs', (bgs', ens')) end; -); - -fun rep_theory thy = Thy.get thy |> (fn Thy args => args); - -fun map_thy f = Thy.map (fn (Thy {pos, id, axioms, defs, wrappers}) => - make_thy (f (pos, id, axioms, defs, wrappers))); - -fun map_axioms f = - map_thy (fn (pos, id, axioms, defs, wrappers) => (pos, id, f axioms, defs, wrappers)); - -fun map_defs f = - map_thy (fn (pos, id, axioms, defs, wrappers) => (pos, id, axioms, f defs, wrappers)); - -fun map_wrappers f = - map_thy (fn (pos, id, axioms, defs, wrappers) => (pos, id, axioms, defs, f wrappers)); - - -(* entity markup *) - -fun theory_markup def name id pos = - if id = 0 then Markup.empty - else - Markup.properties (Position.entity_properties_of def id pos) - (Markup.entity Markup.theoryN name); - -fun init_markup (name, pos) thy = - let - val id = serial (); - val _ = Position.report pos (theory_markup true name id pos); - in map_thy (fn (_, _, axioms, defs, wrappers) => (pos, id, axioms, defs, wrappers)) thy end; - -fun get_markup thy = - let val {pos, id, ...} = rep_theory thy - in theory_markup false (Context.theory_name thy) id pos end; - - -(* basic operations *) - -val axiom_table = #axioms o rep_theory; -val axiom_space = Name_Space.space_of_table o axiom_table; - -fun axioms_of thy = rev (Name_Space.fold_table cons (axiom_table thy) []); -fun all_axioms_of thy = maps axioms_of (nodes_of thy); - -val defs_of = #defs o rep_theory; - - -(* begin/end theory *) - -val begin_wrappers = rev o #1 o #wrappers o rep_theory; -val end_wrappers = rev o #2 o #wrappers o rep_theory; - -fun at_begin f = map_wrappers (apfst (cons (f, stamp ()))); -fun at_end f = map_wrappers (apsnd (cons (f, stamp ()))); - -fun begin_theory (name, pos) imports = - if name = Context.PureN then - (case imports of - [thy] => init_markup (name, pos) thy - | _ => error "Bad bootstrapping of theory Pure") - else - let - val thy = Context.begin_thy Context.pretty_global name imports; - val wrappers = begin_wrappers thy; - in - thy - |> init_markup (name, pos) - |> Sign.local_path - |> Sign.map_naming (Name_Space.set_theory_name name) - |> apply_wrappers wrappers - |> tap (Syntax.force_syntax o Sign.syn_of) - end; - -fun end_theory thy = - thy - |> apply_wrappers (end_wrappers thy) - |> Sign.change_check - |> Context.finish_thy; - - - -(** primitive specifications **) - -(* raw axioms *) - -fun cert_axm ctxt (b, raw_tm) = - let - val thy = Proof_Context.theory_of ctxt; - val t = Sign.cert_prop thy raw_tm - handle TYPE (msg, _, _) => error msg - | TERM (msg, _) => error msg; - val _ = Term.no_dummy_patterns t handle TERM (msg, _) => error msg; - - val bad_sorts = - rev ((fold_types o fold_atyps_sorts) (fn (_, []) => I | (T, _) => insert (op =) T) t []); - val _ = null bad_sorts orelse - error ("Illegal sort constraints in primitive specification: " ^ - commas (map (Syntax.string_of_typ (Config.put show_sorts true ctxt)) bad_sorts)); - in (b, Sign.no_vars ctxt t) end - handle ERROR msg => cat_error msg ("The error(s) above occurred in axiom " ^ Binding.print b); - -fun add_axiom ctxt raw_axm thy = thy |> map_axioms (fn axioms => - let - val axm = apsnd Logic.varify_global (cert_axm ctxt raw_axm); - val (_, axioms') = Name_Space.define (Sign.inherit_naming thy ctxt) true axm axioms; - in axioms' end); - - -(* dependencies *) - -fun dependencies ctxt unchecked def description lhs rhs = - let - val thy = Proof_Context.theory_of ctxt; - val consts = Sign.consts_of thy; - fun prep const = - let val Const (c, T) = Sign.no_vars ctxt (Const const) - in (c, Consts.typargs consts (c, Logic.varifyT_global T)) end; - - val lhs_vars = Term.add_tfreesT (#2 lhs) []; - val rhs_extras = fold (#2 #> Term.fold_atyps (fn TFree v => - if member (op =) lhs_vars v then I else insert (op =) v | _ => I)) rhs []; - val _ = - if null rhs_extras then () - else error ("Specification depends on extra type variables: " ^ - commas_quote (map (Syntax.string_of_typ ctxt o TFree) rhs_extras) ^ - "\nThe error(s) above occurred in " ^ quote description); - in Defs.define ctxt unchecked def description (prep lhs) (map prep rhs) end; - -fun add_deps ctxt a raw_lhs raw_rhs thy = - let - val lhs :: rhs = map (dest_Const o Sign.cert_term thy o Const) (raw_lhs :: raw_rhs); - val description = if a = "" then #1 lhs ^ " axiom" else a; - in thy |> map_defs (dependencies ctxt false NONE description lhs rhs) end; - -fun add_deps_global a x y thy = add_deps (Syntax.init_pretty_global thy) a x y thy; - -fun specify_const decl thy = - let val (t as Const const, thy') = Sign.declare_const_global decl thy; - in (t, add_deps_global "" const [] thy') end; - - -(* overloading *) - -fun check_overloading ctxt overloaded (c, T) = - let - val thy = Proof_Context.theory_of ctxt; - - val declT = Sign.the_const_constraint thy c - handle TYPE (msg, _, _) => error msg; - val T' = Logic.varifyT_global T; - - fun message sorts txt = - [Pretty.block [Pretty.str "Specification of constant ", - Pretty.str c, Pretty.str " ::", Pretty.brk 1, - Pretty.quote (Syntax.pretty_typ (Config.put show_sorts sorts ctxt) T)], - Pretty.str txt] |> Pretty.chunks |> Pretty.string_of; - in - if Sign.typ_instance thy (declT, T') then () - else if Type.raw_instance (declT, T') then - error (message true "imposes additional sort constraints on the constant declaration") - else if overloaded then () - else - error (message false "is strictly less general than the declared type (overloading required)") - end; - - -(* definitional axioms *) - -local - -fun check_def ctxt thy unchecked overloaded (b, tm) defs = - let - val name = Sign.full_name thy b; - val ((lhs, rhs), _) = Primitive_Defs.dest_def ctxt Term.is_Const (K false) (K false) tm - handle TERM (msg, _) => error msg; - val lhs_const = Term.dest_Const (Term.head_of lhs); - val rhs_consts = fold_aterms (fn Const const => insert (op =) const | _ => I) rhs []; - val _ = check_overloading ctxt overloaded lhs_const; - in defs |> dependencies ctxt unchecked (SOME name) name lhs_const rhs_consts end - handle ERROR msg => cat_error msg (Pretty.string_of (Pretty.block - [Pretty.str ("The error(s) above occurred in definition " ^ Binding.print b ^ ":"), - Pretty.fbrk, Pretty.quote (Syntax.pretty_term ctxt tm)])); - -in - -fun add_def ctxt unchecked overloaded raw_axm thy = - let val axm = cert_axm ctxt raw_axm in - thy - |> map_defs (check_def ctxt thy unchecked overloaded axm) - |> add_axiom ctxt axm - end; - -end; - -end; diff --git a/core/Pure/thm.ML b/core/Pure/thm.ML deleted file mode 100644 index fab07ed0..00000000 --- a/core/Pure/thm.ML +++ /dev/null @@ -1,1757 +0,0 @@ -(* Title: Pure/thm.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Author: Makarius - -The very core of Isabelle's Meta Logic: certified types and terms, -derivations, theorems, framework rules (including lifting and -resolution), oracles. -*) - -signature BASIC_THM = - sig - (*certified types*) - type ctyp - val rep_ctyp: ctyp -> {thy: theory, T: typ, maxidx: int, sorts: sort Ord_List.T} - val theory_of_ctyp: ctyp -> theory - val typ_of: ctyp -> typ - val ctyp_of: theory -> typ -> ctyp - - (*certified terms*) - type cterm - exception CTERM of string * cterm list - val rep_cterm: cterm -> {thy: theory, t: term, T: typ, maxidx: int, sorts: sort Ord_List.T} - val crep_cterm: cterm -> {thy: theory, t: term, T: ctyp, maxidx: int, sorts: sort Ord_List.T} - val theory_of_cterm: cterm -> theory - val term_of: cterm -> term - val cterm_of: theory -> term -> cterm - val ctyp_of_term: cterm -> ctyp - - (*theorems*) - type thm - type conv = cterm -> thm - val rep_thm: thm -> - {thy: theory, - tags: Properties.T, - maxidx: int, - shyps: sort Ord_List.T, - hyps: term Ord_List.T, - tpairs: (term * term) list, - prop: term} - val crep_thm: thm -> - {thy: theory, - tags: Properties.T, - maxidx: int, - shyps: sort Ord_List.T, - hyps: cterm Ord_List.T, - tpairs: (cterm * cterm) list, - prop: cterm} - exception THM of string * int * thm list - val theory_of_thm: thm -> theory - val prop_of: thm -> term - val concl_of: thm -> term - val prems_of: thm -> term list - val nprems_of: thm -> int - val cprop_of: thm -> cterm - val cprem_of: thm -> int -> cterm -end; - -signature THM = -sig - include BASIC_THM - val dest_ctyp: ctyp -> ctyp list - val dest_comb: cterm -> cterm * cterm - val dest_fun: cterm -> cterm - val dest_arg: cterm -> cterm - val dest_fun2: cterm -> cterm - val dest_arg1: cterm -> cterm - val dest_abs: string option -> cterm -> cterm * cterm - val apply: cterm -> cterm -> cterm - val lambda_name: string * cterm -> cterm -> cterm - val lambda: cterm -> cterm -> cterm - val adjust_maxidx_cterm: int -> cterm -> cterm - val incr_indexes_cterm: int -> cterm -> cterm - val match: cterm * cterm -> (ctyp * ctyp) list * (cterm * cterm) list - val first_order_match: cterm * cterm -> (ctyp * ctyp) list * (cterm * cterm) list - val fold_terms: (term -> 'a -> 'a) -> thm -> 'a -> 'a - val terms_of_tpairs: (term * term) list -> term list - val full_prop_of: thm -> term - val maxidx_of: thm -> int - val maxidx_thm: thm -> int -> int - val hyps_of: thm -> term list - val tpairs_of: thm -> (term * term) list - val no_prems: thm -> bool - val major_prem_of: thm -> term - val transfer: theory -> thm -> thm - val weaken: cterm -> thm -> thm - val weaken_sorts: sort list -> cterm -> cterm - val extra_shyps: thm -> sort list - val proof_bodies_of: thm list -> proof_body list - val proof_body_of: thm -> proof_body - val proof_of: thm -> proof - val join_proofs: thm list -> unit - val peek_status: thm -> {oracle: bool, unfinished: bool, failed: bool} - val future: thm future -> cterm -> thm - val derivation_name: thm -> string - val name_derivation: string -> thm -> thm - val axiom: theory -> string -> thm - val axioms_of: theory -> (string * thm) list - val get_tags: thm -> Properties.T - val map_tags: (Properties.T -> Properties.T) -> thm -> thm - val norm_proof: thm -> thm - val adjust_maxidx_thm: int -> thm -> thm - (*meta rules*) - val assume: cterm -> thm - val implies_intr: cterm -> thm -> thm - val implies_elim: thm -> thm -> thm - val forall_intr: cterm -> thm -> thm - val forall_elim: cterm -> thm -> thm - val reflexive: cterm -> thm - val symmetric: thm -> thm - val transitive: thm -> thm -> thm - val beta_conversion: bool -> conv - val eta_conversion: conv - val eta_long_conversion: conv - val abstract_rule: string -> cterm -> thm -> thm - val combination: thm -> thm -> thm - val equal_intr: thm -> thm -> thm - val equal_elim: thm -> thm -> thm - val flexflex_rule: thm -> thm Seq.seq - val generalize: string list * string list -> int -> thm -> thm - val instantiate: (ctyp * ctyp) list * (cterm * cterm) list -> thm -> thm - val instantiate_cterm: (ctyp * ctyp) list * (cterm * cterm) list -> cterm -> cterm - val trivial: cterm -> thm - val of_class: ctyp * class -> thm - val strip_shyps: thm -> thm - val unconstrainT: thm -> thm - val varifyT_global': (string * sort) list -> thm -> ((string * sort) * indexname) list * thm - val varifyT_global: thm -> thm - val legacy_freezeT: thm -> thm - val lift_rule: cterm -> thm -> thm - val incr_indexes: int -> thm -> thm - val assumption: int -> thm -> thm Seq.seq - val eq_assumption: int -> thm -> thm - val rotate_rule: int -> int -> thm -> thm - val permute_prems: int -> int -> thm -> thm - val rename_params_rule: string list * int -> thm -> thm - val rename_boundvars: term -> term -> thm -> thm - val bicompose: {flatten: bool, match: bool, incremented: bool} -> - bool * thm * int -> int -> thm -> thm Seq.seq - val biresolution: bool -> (bool * thm) list -> int -> thm -> thm Seq.seq - val extern_oracles: Proof.context -> (Markup.T * xstring) list - val add_oracle: binding * ('a -> cterm) -> theory -> (string * ('a -> thm)) * theory -end; - -structure Thm: THM = -struct - -(*** Certified terms and types ***) - -(** certified types **) - -abstype ctyp = Ctyp of {thy: theory, T: typ, maxidx: int, sorts: sort Ord_List.T} -with - -fun rep_ctyp (Ctyp args) = args; -fun theory_of_ctyp (Ctyp {thy, ...}) = thy; -fun typ_of (Ctyp {T, ...}) = T; - -fun ctyp_of thy raw_T = - let - val T = Sign.certify_typ thy raw_T; - val maxidx = Term.maxidx_of_typ T; - val sorts = Sorts.insert_typ T []; - in Ctyp {thy = thy, T = T, maxidx = maxidx, sorts = sorts} end; - -fun dest_ctyp (Ctyp {thy, T = Type (_, Ts), maxidx, sorts}) = - map (fn T => Ctyp {thy = thy, T = T, maxidx = maxidx, sorts = sorts}) Ts - | dest_ctyp cT = raise TYPE ("dest_ctyp", [typ_of cT], []); - - - -(** certified terms **) - -(*certified terms with checked typ, maxidx, and sorts*) -abstype cterm = Cterm of {thy: theory, t: term, T: typ, maxidx: int, sorts: sort Ord_List.T} -with - -exception CTERM of string * cterm list; - -fun rep_cterm (Cterm args) = args; - -fun crep_cterm (Cterm {thy, t, T, maxidx, sorts}) = - {thy = thy, t = t, maxidx = maxidx, sorts = sorts, - T = Ctyp {thy = thy, T = T, maxidx = maxidx, sorts = sorts}}; - -fun theory_of_cterm (Cterm {thy, ...}) = thy; -fun term_of (Cterm {t, ...}) = t; - -fun ctyp_of_term (Cterm {thy, T, maxidx, sorts, ...}) = - Ctyp {thy = thy, T = T, maxidx = maxidx, sorts = sorts}; - -fun cterm_of thy tm = - let - val (t, T, maxidx) = Sign.certify_term thy tm; - val sorts = Sorts.insert_term t []; - in Cterm {thy = thy, t = t, T = T, maxidx = maxidx, sorts = sorts} end; - -fun merge_thys0 (Cterm {thy = thy1, ...}) (Cterm {thy = thy2, ...}) = - Theory.merge (thy1, thy2); - - -(* destructors *) - -fun dest_comb (Cterm {t = c $ a, T, thy, maxidx, sorts}) = - let val A = Term.argument_type_of c 0 in - (Cterm {t = c, T = A --> T, thy = thy, maxidx = maxidx, sorts = sorts}, - Cterm {t = a, T = A, thy = thy, maxidx = maxidx, sorts = sorts}) - end - | dest_comb ct = raise CTERM ("dest_comb", [ct]); - -fun dest_fun (Cterm {t = c $ _, T, thy, maxidx, sorts}) = - let val A = Term.argument_type_of c 0 - in Cterm {t = c, T = A --> T, thy = thy, maxidx = maxidx, sorts = sorts} end - | dest_fun ct = raise CTERM ("dest_fun", [ct]); - -fun dest_arg (Cterm {t = c $ a, T = _, thy, maxidx, sorts}) = - let val A = Term.argument_type_of c 0 - in Cterm {t = a, T = A, thy = thy, maxidx = maxidx, sorts = sorts} end - | dest_arg ct = raise CTERM ("dest_arg", [ct]); - - -fun dest_fun2 (Cterm {t = c $ _ $ _, T, thy, maxidx, sorts}) = - let - val A = Term.argument_type_of c 0; - val B = Term.argument_type_of c 1; - in Cterm {t = c, T = A --> B --> T, thy = thy, maxidx = maxidx, sorts = sorts} end - | dest_fun2 ct = raise CTERM ("dest_fun2", [ct]); - -fun dest_arg1 (Cterm {t = c $ a $ _, T = _, thy, maxidx, sorts}) = - let val A = Term.argument_type_of c 0 - in Cterm {t = a, T = A, thy = thy, maxidx = maxidx, sorts = sorts} end - | dest_arg1 ct = raise CTERM ("dest_arg1", [ct]); - -fun dest_abs a (Cterm {t = Abs (x, T, t), T = Type ("fun", [_, U]), thy, maxidx, sorts}) = - let val (y', t') = Term.dest_abs (the_default x a, T, t) in - (Cterm {t = Free (y', T), T = T, thy = thy, maxidx = maxidx, sorts = sorts}, - Cterm {t = t', T = U, thy = thy, maxidx = maxidx, sorts = sorts}) - end - | dest_abs _ ct = raise CTERM ("dest_abs", [ct]); - - -(* constructors *) - -fun apply - (cf as Cterm {t = f, T = Type ("fun", [dty, rty]), maxidx = maxidx1, sorts = sorts1, ...}) - (cx as Cterm {t = x, T, maxidx = maxidx2, sorts = sorts2, ...}) = - if T = dty then - Cterm {thy = merge_thys0 cf cx, - t = f $ x, - T = rty, - maxidx = Int.max (maxidx1, maxidx2), - sorts = Sorts.union sorts1 sorts2} - else raise CTERM ("apply: types don't agree", [cf, cx]) - | apply cf cx = raise CTERM ("apply: first arg is not a function", [cf, cx]); - -fun lambda_name - (x, ct1 as Cterm {t = t1, T = T1, maxidx = maxidx1, sorts = sorts1, ...}) - (ct2 as Cterm {t = t2, T = T2, maxidx = maxidx2, sorts = sorts2, ...}) = - let val t = Term.lambda_name (x, t1) t2 in - Cterm {thy = merge_thys0 ct1 ct2, - t = t, T = T1 --> T2, - maxidx = Int.max (maxidx1, maxidx2), - sorts = Sorts.union sorts1 sorts2} - end; - -fun lambda t u = lambda_name ("", t) u; - - -(* indexes *) - -fun adjust_maxidx_cterm i (ct as Cterm {thy, t, T, maxidx, sorts}) = - if maxidx = i then ct - else if maxidx < i then - Cterm {maxidx = i, thy = thy, t = t, T = T, sorts = sorts} - else - Cterm {maxidx = Int.max (maxidx_of_term t, i), thy = thy, t = t, T = T, sorts = sorts}; - -fun incr_indexes_cterm i (ct as Cterm {thy, t, T, maxidx, sorts}) = - if i < 0 then raise CTERM ("negative increment", [ct]) - else if i = 0 then ct - else Cterm {thy = thy, t = Logic.incr_indexes ([], i) t, - T = Logic.incr_tvar i T, maxidx = maxidx + i, sorts = sorts}; - - -(* matching *) - -local - -fun gen_match match - (ct1 as Cterm {t = t1, sorts = sorts1, ...}, - ct2 as Cterm {t = t2, sorts = sorts2, maxidx = maxidx2, ...}) = - let - val thy = merge_thys0 ct1 ct2; - val (Tinsts, tinsts) = match thy (t1, t2) (Vartab.empty, Vartab.empty); - val sorts = Sorts.union sorts1 sorts2; - fun mk_cTinst ((a, i), (S, T)) = - (Ctyp {T = TVar ((a, i), S), thy = thy, maxidx = i, sorts = sorts}, - Ctyp {T = T, thy = thy, maxidx = maxidx2, sorts = sorts}); - fun mk_ctinst ((x, i), (T, t)) = - let val T = Envir.subst_type Tinsts T in - (Cterm {t = Var ((x, i), T), T = T, thy = thy, maxidx = i, sorts = sorts}, - Cterm {t = t, T = T, thy = thy, maxidx = maxidx2, sorts = sorts}) - end; - in (Vartab.fold (cons o mk_cTinst) Tinsts [], Vartab.fold (cons o mk_ctinst) tinsts []) end; - -in - -val match = gen_match Pattern.match; -val first_order_match = gen_match Pattern.first_order_match; - -end; - - - -(*** Derivations and Theorems ***) - -abstype thm = Thm of - deriv * (*derivation*) - {thy: theory, (*background theory*) - tags: Properties.T, (*additional annotations/comments*) - maxidx: int, (*maximum index of any Var or TVar*) - shyps: sort Ord_List.T, (*sort hypotheses*) - hyps: term Ord_List.T, (*hypotheses*) - tpairs: (term * term) list, (*flex-flex pairs*) - prop: term} (*conclusion*) -and deriv = Deriv of - {promises: (serial * thm future) Ord_List.T, - body: Proofterm.proof_body} -with - -type conv = cterm -> thm; - -(*errors involving theorems*) -exception THM of string * int * thm list; - -fun rep_thm (Thm (_, args)) = args; - -fun crep_thm (Thm (_, {thy, tags, maxidx, shyps, hyps, tpairs, prop})) = - let fun cterm max t = Cterm {thy = thy, t = t, T = propT, maxidx = max, sorts = shyps} in - {thy = thy, tags = tags, maxidx = maxidx, shyps = shyps, - hyps = map (cterm ~1) hyps, - tpairs = map (pairself (cterm maxidx)) tpairs, - prop = cterm maxidx prop} - end; - -fun fold_terms f (Thm (_, {tpairs, prop, hyps, ...})) = - fold (fn (t, u) => f t #> f u) tpairs #> f prop #> fold f hyps; - -fun terms_of_tpairs tpairs = fold_rev (fn (t, u) => cons t o cons u) tpairs []; - -fun eq_tpairs ((t, u), (t', u')) = t aconv t' andalso u aconv u'; -fun union_tpairs ts us = Library.merge eq_tpairs (ts, us); -val maxidx_tpairs = fold (fn (t, u) => Term.maxidx_term t #> Term.maxidx_term u); - -fun attach_tpairs tpairs prop = - Logic.list_implies (map Logic.mk_equals tpairs, prop); - -fun full_prop_of (Thm (_, {tpairs, prop, ...})) = attach_tpairs tpairs prop; - -val union_hyps = Ord_List.union Term_Ord.fast_term_ord; -val insert_hyps = Ord_List.insert Term_Ord.fast_term_ord; -val remove_hyps = Ord_List.remove Term_Ord.fast_term_ord; - - -(* merge theories of cterms/thms -- trivial absorption only *) - -fun merge_thys1 (Cterm {thy = thy1, ...}) (Thm (_, {thy = thy2, ...})) = - Theory.merge (thy1, thy2); - -fun merge_thys2 (Thm (_, {thy = thy1, ...})) (Thm (_, {thy = thy2, ...})) = - Theory.merge (thy1, thy2); - - -(* basic components *) - -val theory_of_thm = #thy o rep_thm; -val maxidx_of = #maxidx o rep_thm; -fun maxidx_thm th i = Int.max (maxidx_of th, i); -val hyps_of = #hyps o rep_thm; -val prop_of = #prop o rep_thm; -val tpairs_of = #tpairs o rep_thm; - -val concl_of = Logic.strip_imp_concl o prop_of; -val prems_of = Logic.strip_imp_prems o prop_of; -val nprems_of = Logic.count_prems o prop_of; -fun no_prems th = nprems_of th = 0; - -fun major_prem_of th = - (case prems_of th of - prem :: _ => Logic.strip_assums_concl prem - | [] => raise THM ("major_prem_of: rule with no premises", 0, [th])); - -(*the statement of any thm is a cterm*) -fun cprop_of (Thm (_, {thy, maxidx, shyps, prop, ...})) = - Cterm {thy = thy, maxidx = maxidx, T = propT, t = prop, sorts = shyps}; - -fun cprem_of (th as Thm (_, {thy, maxidx, shyps, prop, ...})) i = - Cterm {thy = thy, maxidx = maxidx, T = propT, sorts = shyps, - t = Logic.nth_prem (i, prop) handle TERM _ => raise THM ("cprem_of", i, [th])}; - -(*explicit transfer to a super theory*) -fun transfer thy' thm = - let - val Thm (der, {thy, tags, maxidx, shyps, hyps, tpairs, prop}) = thm; - val _ = Theory.subthy (thy, thy') orelse raise THM ("transfer: not a super theory", 0, [thm]); - in - if Theory.eq_thy (thy, thy') then thm - else - Thm (der, - {thy = thy', - tags = tags, - maxidx = maxidx, - shyps = shyps, - hyps = hyps, - tpairs = tpairs, - prop = prop}) - end; - -(*explicit weakening: maps |- B to A |- B*) -fun weaken raw_ct th = - let - val ct as Cterm {t = A, T, sorts, maxidx = maxidxA, ...} = adjust_maxidx_cterm ~1 raw_ct; - val Thm (der, {tags, maxidx, shyps, hyps, tpairs, prop, ...}) = th; - in - if T <> propT then - raise THM ("weaken: assumptions must have type prop", 0, []) - else if maxidxA <> ~1 then - raise THM ("weaken: assumptions may not contain schematic variables", maxidxA, []) - else - Thm (der, - {thy = merge_thys1 ct th, - tags = tags, - maxidx = maxidx, - shyps = Sorts.union sorts shyps, - hyps = insert_hyps A hyps, - tpairs = tpairs, - prop = prop}) - end; - -fun weaken_sorts raw_sorts ct = - let - val Cterm {thy, t, T, maxidx, sorts} = ct; - val more_sorts = Sorts.make (map (Sign.certify_sort thy) raw_sorts); - val sorts' = Sorts.union sorts more_sorts; - in Cterm {thy = thy, t = t, T = T, maxidx = maxidx, sorts = sorts'} end; - -(*dangling sort constraints of a thm*) -fun extra_shyps (th as Thm (_, {shyps, ...})) = - Sorts.subtract (fold_terms Sorts.insert_term th []) shyps; - - - -(** derivations and promised proofs **) - -fun make_deriv promises oracles thms proof = - Deriv {promises = promises, body = PBody {oracles = oracles, thms = thms, proof = proof}}; - -val empty_deriv = make_deriv [] [] [] Proofterm.MinProof; - - -(* inference rules *) - -fun promise_ord ((i, _), (j, _)) = int_ord (j, i); - -fun deriv_rule2 f - (Deriv {promises = ps1, body = PBody {oracles = oras1, thms = thms1, proof = prf1}}) - (Deriv {promises = ps2, body = PBody {oracles = oras2, thms = thms2, proof = prf2}}) = - let - val ps = Ord_List.union promise_ord ps1 ps2; - val oras = Proofterm.unions_oracles [oras1, oras2]; - val thms = Proofterm.unions_thms [thms1, thms2]; - val prf = - (case ! Proofterm.proofs of - 2 => f prf1 prf2 - | 1 => MinProof - | 0 => MinProof - | i => error ("Illegal level of detail for proof objects: " ^ string_of_int i)); - in make_deriv ps oras thms prf end; - -fun deriv_rule1 f = deriv_rule2 (K f) empty_deriv; -fun deriv_rule0 prf = deriv_rule1 I (make_deriv [] [] [] prf); - -fun deriv_rule_unconditional f (Deriv {promises, body = PBody {oracles, thms, proof}}) = - make_deriv promises oracles thms (f proof); - - -(* fulfilled proofs *) - -fun raw_body_of (Thm (Deriv {body, ...}, _)) = body; -fun raw_promises_of (Thm (Deriv {promises, ...}, _)) = promises; - -fun join_promises [] = () - | join_promises promises = join_promises_of (Future.joins (map snd promises)) -and join_promises_of thms = join_promises (Ord_List.make promise_ord (maps raw_promises_of thms)); - -fun fulfill_body (Thm (Deriv {promises, body}, {thy, ...})) = - Proofterm.fulfill_norm_proof thy (fulfill_promises promises) body -and fulfill_promises promises = - map fst promises ~~ map fulfill_body (Future.joins (map snd promises)); - -fun proof_bodies_of thms = - let - val _ = join_promises_of thms; - val bodies = map fulfill_body thms; - val _ = Proofterm.join_bodies bodies; - in bodies end; - -val proof_body_of = singleton proof_bodies_of; -val proof_of = Proofterm.proof_of o proof_body_of; - -val join_proofs = ignore o proof_bodies_of; - - -(* derivation status *) - -fun peek_status (Thm (Deriv {promises, body}, _)) = - let - val ps = map (Future.peek o snd) promises; - val bodies = body :: - map_filter (fn SOME (Exn.Res th) => SOME (raw_body_of th) | _ => NONE) ps; - val {oracle, unfinished, failed} = Proofterm.peek_status bodies; - in - {oracle = oracle, - unfinished = unfinished orelse exists is_none ps, - failed = failed orelse exists (fn SOME (Exn.Exn _) => true | _ => false) ps} - end; - - -(* future rule *) - -fun future_result i orig_thy orig_shyps orig_prop thm = - let - fun err msg = raise THM ("future_result: " ^ msg, 0, [thm]); - val Thm (Deriv {promises, ...}, {thy, shyps, hyps, tpairs, prop, ...}) = thm; - - val _ = Theory.eq_thy (thy, orig_thy) orelse err "bad theory"; - val _ = prop aconv orig_prop orelse err "bad prop"; - val _ = null tpairs orelse err "bad tpairs"; - val _ = null hyps orelse err "bad hyps"; - val _ = Sorts.subset (shyps, orig_shyps) orelse err "bad shyps"; - val _ = forall (fn (j, _) => i <> j) promises orelse err "bad dependencies"; - val _ = join_promises promises; - in thm end; - -fun future future_thm ct = - let - val Cterm {thy = thy, t = prop, T, maxidx, sorts} = ct; - val _ = T <> propT andalso raise CTERM ("future: prop expected", [ct]); - - val i = serial (); - val future = future_thm |> Future.map (future_result i thy sorts prop); - in - Thm (make_deriv [(i, future)] [] [] (Proofterm.promise_proof thy i prop), - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = prop}) - end; - - -(* closed derivations with official name *) - -(*non-deterministic, depends on unknown promises*) -fun derivation_name (Thm (Deriv {body, ...}, {shyps, hyps, prop, ...})) = - Proofterm.get_name shyps hyps prop (Proofterm.proof_of body); - -fun name_derivation name (thm as Thm (der, args)) = - let - val Deriv {promises, body} = der; - val {thy, shyps, hyps, prop, tpairs, ...} = args; - val _ = null tpairs orelse raise THM ("put_name: unsolved flex-flex constraints", 0, [thm]); - - val ps = map (apsnd (Future.map fulfill_body)) promises; - val (pthm, proof) = Proofterm.thm_proof thy name shyps hyps prop ps body; - val der' = make_deriv [] [] [pthm] proof; - in Thm (der', args) end; - - - -(** Axioms **) - -fun axiom theory name = - let - fun get_ax thy = - Name_Space.lookup_key (Theory.axiom_table thy) name - |> Option.map (fn (_, prop) => - let - val der = deriv_rule0 (Proofterm.axm_proof name prop); - val maxidx = maxidx_of_term prop; - val shyps = Sorts.insert_term prop []; - in - Thm (der, {thy = thy, tags = [], - maxidx = maxidx, shyps = shyps, hyps = [], tpairs = [], prop = prop}) - end); - in - (case get_first get_ax (Theory.nodes_of theory) of - SOME thm => thm - | NONE => raise THEORY ("No axiom " ^ quote name, [theory])) - end; - -(*return additional axioms of this theory node*) -fun axioms_of thy = - map (fn (name, _) => (name, axiom thy name)) (Theory.axioms_of thy); - - -(* tags *) - -val get_tags = #tags o rep_thm; - -fun map_tags f (Thm (der, {thy, tags, maxidx, shyps, hyps, tpairs, prop})) = - Thm (der, {thy = thy, tags = f tags, maxidx = maxidx, - shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop}); - - -(* technical adjustments *) - -fun norm_proof (Thm (der, args as {thy, ...})) = - Thm (deriv_rule1 (Proofterm.rew_proof thy) der, args); - -fun adjust_maxidx_thm i (th as Thm (der, {thy, tags, maxidx, shyps, hyps, tpairs, prop})) = - if maxidx = i then th - else if maxidx < i then - Thm (der, {maxidx = i, thy = thy, tags = tags, shyps = shyps, - hyps = hyps, tpairs = tpairs, prop = prop}) - else - Thm (der, {maxidx = Int.max (maxidx_tpairs tpairs (maxidx_of_term prop), i), thy = thy, - tags = tags, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop}); - - - -(*** Meta rules ***) - -(** primitive rules **) - -(*The assumption rule A |- A*) -fun assume raw_ct = - let val Cterm {thy, t = prop, T, maxidx, sorts} = adjust_maxidx_cterm ~1 raw_ct in - if T <> propT then - raise THM ("assume: prop", 0, []) - else if maxidx <> ~1 then - raise THM ("assume: variables", maxidx, []) - else Thm (deriv_rule0 (Proofterm.Hyp prop), - {thy = thy, - tags = [], - maxidx = ~1, - shyps = sorts, - hyps = [prop], - tpairs = [], - prop = prop}) - end; - -(*Implication introduction - [A] - : - B - ------- - A ==> B -*) -fun implies_intr - (ct as Cterm {t = A, T, maxidx = maxidxA, sorts, ...}) - (th as Thm (der, {maxidx, hyps, shyps, tpairs, prop, ...})) = - if T <> propT then - raise THM ("implies_intr: assumptions must have type prop", 0, [th]) - else - Thm (deriv_rule1 (Proofterm.implies_intr_proof A) der, - {thy = merge_thys1 ct th, - tags = [], - maxidx = Int.max (maxidxA, maxidx), - shyps = Sorts.union sorts shyps, - hyps = remove_hyps A hyps, - tpairs = tpairs, - prop = Logic.mk_implies (A, prop)}); - - -(*Implication elimination - A ==> B A - ------------ - B -*) -fun implies_elim thAB thA = - let - val Thm (derA, {maxidx = maxA, hyps = hypsA, shyps = shypsA, tpairs = tpairsA, - prop = propA, ...}) = thA - and Thm (der, {maxidx, hyps, shyps, tpairs, prop, ...}) = thAB; - fun err () = raise THM ("implies_elim: major premise", 0, [thAB, thA]); - in - case prop of - Const ("Pure.imp", _) $ A $ B => - if A aconv propA then - Thm (deriv_rule2 (curry Proofterm.%%) der derA, - {thy = merge_thys2 thAB thA, - tags = [], - maxidx = Int.max (maxA, maxidx), - shyps = Sorts.union shypsA shyps, - hyps = union_hyps hypsA hyps, - tpairs = union_tpairs tpairsA tpairs, - prop = B}) - else err () - | _ => err () - end; - -(*Forall introduction. The Free or Var x must not be free in the hypotheses. - [x] - : - A - ------ - !!x. A -*) -fun forall_intr - (ct as Cterm {t = x, T, sorts, ...}) - (th as Thm (der, {maxidx, shyps, hyps, tpairs, prop, ...})) = - let - fun result a = - Thm (deriv_rule1 (Proofterm.forall_intr_proof x a) der, - {thy = merge_thys1 ct th, - tags = [], - maxidx = maxidx, - shyps = Sorts.union sorts shyps, - hyps = hyps, - tpairs = tpairs, - prop = Logic.all_const T $ Abs (a, T, abstract_over (x, prop))}); - fun check_occs a x ts = - if exists (fn t => Logic.occs (x, t)) ts then - raise THM ("forall_intr: variable " ^ quote a ^ " free in assumptions", 0, [th]) - else (); - in - (case x of - Free (a, _) => (check_occs a x hyps; check_occs a x (terms_of_tpairs tpairs); result a) - | Var ((a, _), _) => (check_occs a x (terms_of_tpairs tpairs); result a) - | _ => raise THM ("forall_intr: not a variable", 0, [th])) - end; - -(*Forall elimination - !!x. A - ------ - A[t/x] -*) -fun forall_elim - (ct as Cterm {t, T, maxidx = maxt, sorts, ...}) - (th as Thm (der, {maxidx, shyps, hyps, tpairs, prop, ...})) = - (case prop of - Const ("Pure.all", Type ("fun", [Type ("fun", [qary, _]), _])) $ A => - if T <> qary then - raise THM ("forall_elim: type mismatch", 0, [th]) - else - Thm (deriv_rule1 (Proofterm.% o rpair (SOME t)) der, - {thy = merge_thys1 ct th, - tags = [], - maxidx = Int.max (maxidx, maxt), - shyps = Sorts.union sorts shyps, - hyps = hyps, - tpairs = tpairs, - prop = Term.betapply (A, t)}) - | _ => raise THM ("forall_elim: not quantified", 0, [th])); - - -(* Equality *) - -(*Reflexivity - t == t -*) -fun reflexive (Cterm {thy, t, T = _, maxidx, sorts}) = - Thm (deriv_rule0 Proofterm.reflexive, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = Logic.mk_equals (t, t)}); - -(*Symmetry - t == u - ------ - u == t -*) -fun symmetric (th as Thm (der, {thy, maxidx, shyps, hyps, tpairs, prop, ...})) = - (case prop of - (eq as Const ("Pure.eq", _)) $ t $ u => - Thm (deriv_rule1 Proofterm.symmetric der, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = shyps, - hyps = hyps, - tpairs = tpairs, - prop = eq $ u $ t}) - | _ => raise THM ("symmetric", 0, [th])); - -(*Transitivity - t1 == u u == t2 - ------------------ - t1 == t2 -*) -fun transitive th1 th2 = - let - val Thm (der1, {maxidx = max1, hyps = hyps1, shyps = shyps1, tpairs = tpairs1, - prop = prop1, ...}) = th1 - and Thm (der2, {maxidx = max2, hyps = hyps2, shyps = shyps2, tpairs = tpairs2, - prop = prop2, ...}) = th2; - fun err msg = raise THM ("transitive: " ^ msg, 0, [th1, th2]); - in - case (prop1, prop2) of - ((eq as Const ("Pure.eq", Type (_, [T, _]))) $ t1 $ u, Const ("Pure.eq", _) $ u' $ t2) => - if not (u aconv u') then err "middle term" - else - Thm (deriv_rule2 (Proofterm.transitive u T) der1 der2, - {thy = merge_thys2 th1 th2, - tags = [], - maxidx = Int.max (max1, max2), - shyps = Sorts.union shyps1 shyps2, - hyps = union_hyps hyps1 hyps2, - tpairs = union_tpairs tpairs1 tpairs2, - prop = eq $ t1 $ t2}) - | _ => err "premises" - end; - -(*Beta-conversion - (%x. t)(u) == t[u/x] - fully beta-reduces the term if full = true -*) -fun beta_conversion full (Cterm {thy, t, T = _, maxidx, sorts}) = - let val t' = - if full then Envir.beta_norm t - else - (case t of Abs (_, _, bodt) $ u => subst_bound (u, bodt) - | _ => raise THM ("beta_conversion: not a redex", 0, [])); - in - Thm (deriv_rule0 Proofterm.reflexive, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = Logic.mk_equals (t, t')}) - end; - -fun eta_conversion (Cterm {thy, t, T = _, maxidx, sorts}) = - Thm (deriv_rule0 Proofterm.reflexive, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = Logic.mk_equals (t, Envir.eta_contract t)}); - -fun eta_long_conversion (Cterm {thy, t, T = _, maxidx, sorts}) = - Thm (deriv_rule0 Proofterm.reflexive, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = Logic.mk_equals (t, Envir.eta_long [] t)}); - -(*The abstraction rule. The Free or Var x must not be free in the hypotheses. - The bound variable will be named "a" (since x will be something like x320) - t == u - -------------- - %x. t == %x. u -*) -fun abstract_rule a - (Cterm {t = x, T, sorts, ...}) - (th as Thm (der, {thy, maxidx, hyps, shyps, tpairs, prop, ...})) = - let - val (t, u) = Logic.dest_equals prop - handle TERM _ => raise THM ("abstract_rule: premise not an equality", 0, [th]); - val result = - Thm (deriv_rule1 (Proofterm.abstract_rule x a) der, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = Sorts.union sorts shyps, - hyps = hyps, - tpairs = tpairs, - prop = Logic.mk_equals - (Abs (a, T, abstract_over (x, t)), Abs (a, T, abstract_over (x, u)))}); - fun check_occs a x ts = - if exists (fn t => Logic.occs (x, t)) ts then - raise THM ("abstract_rule: variable " ^ quote a ^ " free in assumptions", 0, [th]) - else (); - in - (case x of - Free (a, _) => (check_occs a x hyps; check_occs a x (terms_of_tpairs tpairs); result) - | Var ((a, _), _) => (check_occs a x (terms_of_tpairs tpairs); result) - | _ => raise THM ("abstract_rule: not a variable", 0, [th])) - end; - -(*The combination rule - f == g t == u - -------------- - f t == g u -*) -fun combination th1 th2 = - let - val Thm (der1, {maxidx = max1, shyps = shyps1, hyps = hyps1, tpairs = tpairs1, - prop = prop1, ...}) = th1 - and Thm (der2, {maxidx = max2, shyps = shyps2, hyps = hyps2, tpairs = tpairs2, - prop = prop2, ...}) = th2; - fun chktypes fT tT = - (case fT of - Type ("fun", [T1, _]) => - if T1 <> tT then - raise THM ("combination: types", 0, [th1, th2]) - else () - | _ => raise THM ("combination: not function type", 0, [th1, th2])); - in - (case (prop1, prop2) of - (Const ("Pure.eq", Type ("fun", [fT, _])) $ f $ g, - Const ("Pure.eq", Type ("fun", [tT, _])) $ t $ u) => - (chktypes fT tT; - Thm (deriv_rule2 (Proofterm.combination f g t u fT) der1 der2, - {thy = merge_thys2 th1 th2, - tags = [], - maxidx = Int.max (max1, max2), - shyps = Sorts.union shyps1 shyps2, - hyps = union_hyps hyps1 hyps2, - tpairs = union_tpairs tpairs1 tpairs2, - prop = Logic.mk_equals (f $ t, g $ u)})) - | _ => raise THM ("combination: premises", 0, [th1, th2])) - end; - -(*Equality introduction - A ==> B B ==> A - ---------------- - A == B -*) -fun equal_intr th1 th2 = - let - val Thm (der1, {maxidx = max1, shyps = shyps1, hyps = hyps1, tpairs = tpairs1, - prop = prop1, ...}) = th1 - and Thm (der2, {maxidx = max2, shyps = shyps2, hyps = hyps2, tpairs = tpairs2, - prop = prop2, ...}) = th2; - fun err msg = raise THM ("equal_intr: " ^ msg, 0, [th1, th2]); - in - (case (prop1, prop2) of - (Const("Pure.imp", _) $ A $ B, Const("Pure.imp", _) $ B' $ A') => - if A aconv A' andalso B aconv B' then - Thm (deriv_rule2 (Proofterm.equal_intr A B) der1 der2, - {thy = merge_thys2 th1 th2, - tags = [], - maxidx = Int.max (max1, max2), - shyps = Sorts.union shyps1 shyps2, - hyps = union_hyps hyps1 hyps2, - tpairs = union_tpairs tpairs1 tpairs2, - prop = Logic.mk_equals (A, B)}) - else err "not equal" - | _ => err "premises") - end; - -(*The equal propositions rule - A == B A - --------- - B -*) -fun equal_elim th1 th2 = - let - val Thm (der1, {maxidx = max1, shyps = shyps1, hyps = hyps1, - tpairs = tpairs1, prop = prop1, ...}) = th1 - and Thm (der2, {maxidx = max2, shyps = shyps2, hyps = hyps2, - tpairs = tpairs2, prop = prop2, ...}) = th2; - fun err msg = raise THM ("equal_elim: " ^ msg, 0, [th1, th2]); - in - (case prop1 of - Const ("Pure.eq", _) $ A $ B => - if prop2 aconv A then - Thm (deriv_rule2 (Proofterm.equal_elim A B) der1 der2, - {thy = merge_thys2 th1 th2, - tags = [], - maxidx = Int.max (max1, max2), - shyps = Sorts.union shyps1 shyps2, - hyps = union_hyps hyps1 hyps2, - tpairs = union_tpairs tpairs1 tpairs2, - prop = B}) - else err "not equal" - | _ => err "major premise") - end; - - - -(**** Derived rules ****) - -(*Smash unifies the list of term pairs leaving no flex-flex pairs. - Instantiates the theorem and deletes trivial tpairs. Resulting - sequence may contain multiple elements if the tpairs are not all - flex-flex.*) -fun flexflex_rule (th as Thm (der, {thy, maxidx, shyps, hyps, tpairs, prop, ...})) = - Unify.smash_unifiers thy tpairs (Envir.empty maxidx) - |> Seq.map (fn env => - if Envir.is_empty env then th - else - let - val tpairs' = tpairs |> map (pairself (Envir.norm_term env)) - (*remove trivial tpairs, of the form t==t*) - |> filter_out (op aconv); - val der' = deriv_rule1 (Proofterm.norm_proof' env) der; - val prop' = Envir.norm_term env prop; - val maxidx = maxidx_tpairs tpairs' (maxidx_of_term prop'); - val shyps = Envir.insert_sorts env shyps; - in - Thm (der', {thy = thy, tags = [], maxidx = maxidx, - shyps = shyps, hyps = hyps, tpairs = tpairs', prop = prop'}) - end); - - -(*Generalization of fixed variables - A - -------------------- - A[?'a/'a, ?x/x, ...] -*) - -fun generalize ([], []) _ th = th - | generalize (tfrees, frees) idx th = - let - val Thm (der, {thy, maxidx, shyps, hyps, tpairs, prop, ...}) = th; - val _ = idx <= maxidx andalso raise THM ("generalize: bad index", idx, [th]); - - val bad_type = - if null tfrees then K false - else Term.exists_subtype (fn TFree (a, _) => member (op =) tfrees a | _ => false); - fun bad_term (Free (x, T)) = bad_type T orelse member (op =) frees x - | bad_term (Var (_, T)) = bad_type T - | bad_term (Const (_, T)) = bad_type T - | bad_term (Abs (_, T, t)) = bad_type T orelse bad_term t - | bad_term (t $ u) = bad_term t orelse bad_term u - | bad_term (Bound _) = false; - val _ = exists bad_term hyps andalso - raise THM ("generalize: variable free in assumptions", 0, [th]); - - val gen = Term_Subst.generalize (tfrees, frees) idx; - val prop' = gen prop; - val tpairs' = map (pairself gen) tpairs; - val maxidx' = maxidx_tpairs tpairs' (maxidx_of_term prop'); - in - Thm (deriv_rule1 (Proofterm.generalize (tfrees, frees) idx) der, - {thy = thy, - tags = [], - maxidx = maxidx', - shyps = shyps, - hyps = hyps, - tpairs = tpairs', - prop = prop'}) - end; - - -(*Instantiation of schematic variables - A - -------------------- - A[t1/v1, ..., tn/vn] -*) - -local - -fun pretty_typing thy t T = Pretty.block - [Syntax.pretty_term_global thy t, Pretty.str " ::", Pretty.brk 1, Syntax.pretty_typ_global thy T]; - -fun add_inst (ct, cu) (thy, sorts) = - let - val Cterm {t = t, T = T, ...} = ct; - val Cterm {t = u, T = U, sorts = sorts_u, maxidx = maxidx_u, ...} = cu; - val thy' = Theory.merge (thy, merge_thys0 ct cu); - val sorts' = Sorts.union sorts_u sorts; - in - (case t of Var v => - if T = U then ((v, (u, maxidx_u)), (thy', sorts')) - else raise TYPE (Pretty.string_of (Pretty.block - [Pretty.str "instantiate: type conflict", - Pretty.fbrk, pretty_typing thy' t T, - Pretty.fbrk, pretty_typing thy' u U]), [T, U], [t, u]) - | _ => raise TYPE (Pretty.string_of (Pretty.block - [Pretty.str "instantiate: not a variable", - Pretty.fbrk, Syntax.pretty_term_global thy' t]), [], [t])) - end; - -fun add_instT (cT, cU) (thy, sorts) = - let - val Ctyp {T, thy = thy1, ...} = cT - and Ctyp {T = U, thy = thy2, sorts = sorts_U, maxidx = maxidx_U, ...} = cU; - val thy' = Theory.merge (thy, Theory.merge (thy1, thy2)); - val sorts' = Sorts.union sorts_U sorts; - in - (case T of TVar (v as (_, S)) => - if Sign.of_sort thy' (U, S) then ((v, (U, maxidx_U)), (thy', sorts')) - else raise TYPE ("Type not of sort " ^ Syntax.string_of_sort_global thy' S, [U], []) - | _ => raise TYPE (Pretty.string_of (Pretty.block - [Pretty.str "instantiate: not a type variable", - Pretty.fbrk, Syntax.pretty_typ_global thy' T]), [T], [])) - end; - -in - -(*Left-to-right replacements: ctpairs = [..., (vi, ti), ...]. - Instantiates distinct Vars by terms of same type. - Does NOT normalize the resulting theorem!*) -fun instantiate ([], []) th = th - | instantiate (instT, inst) th = - let - val Thm (der, {thy, hyps, shyps, tpairs, prop, ...}) = th; - val (inst', (instT', (thy', shyps'))) = - (thy, shyps) |> fold_map add_inst inst ||> fold_map add_instT instT; - val subst = Term_Subst.instantiate_maxidx (instT', inst'); - val (prop', maxidx1) = subst prop ~1; - val (tpairs', maxidx') = - fold_map (fn (t, u) => fn i => subst t i ||>> subst u) tpairs maxidx1; - in - Thm (deriv_rule1 - (fn d => Proofterm.instantiate (map (apsnd #1) instT', map (apsnd #1) inst') d) der, - {thy = thy', - tags = [], - maxidx = maxidx', - shyps = shyps', - hyps = hyps, - tpairs = tpairs', - prop = prop'}) - end - handle TYPE (msg, _, _) => raise THM (msg, 0, [th]); - -fun instantiate_cterm ([], []) ct = ct - | instantiate_cterm (instT, inst) ct = - let - val Cterm {thy, t, T, sorts, ...} = ct; - val (inst', (instT', (thy', sorts'))) = - (thy, sorts) |> fold_map add_inst inst ||> fold_map add_instT instT; - val subst = Term_Subst.instantiate_maxidx (instT', inst'); - val substT = Term_Subst.instantiateT_maxidx instT'; - val (t', maxidx1) = subst t ~1; - val (T', maxidx') = substT T maxidx1; - in Cterm {thy = thy', t = t', T = T', sorts = sorts', maxidx = maxidx'} end - handle TYPE (msg, _, _) => raise CTERM (msg, [ct]); - -end; - - -(*The trivial implication A ==> A, justified by assume and forall rules. - A can contain Vars, not so for assume!*) -fun trivial (Cterm {thy, t = A, T, maxidx, sorts}) = - if T <> propT then - raise THM ("trivial: the term must have type prop", 0, []) - else - Thm (deriv_rule0 (Proofterm.AbsP ("H", NONE, Proofterm.PBound 0)), - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = Logic.mk_implies (A, A)}); - -(*Axiom-scheme reflecting signature contents - T :: c - ------------------- - OFCLASS(T, c_class) -*) -fun of_class (cT, raw_c) = - let - val Ctyp {thy, T, ...} = cT; - val c = Sign.certify_class thy raw_c; - val Cterm {t = prop, maxidx, sorts, ...} = cterm_of thy (Logic.mk_of_class (T, c)); - in - if Sign.of_sort thy (T, [c]) then - Thm (deriv_rule0 (Proofterm.OfClass (T, c)), - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = prop}) - else raise THM ("of_class: type not of class " ^ Syntax.string_of_sort_global thy [c], 0, []) - end; - -(*Remove extra sorts that are witnessed by type signature information*) -fun strip_shyps (thm as Thm (_, {shyps = [], ...})) = thm - | strip_shyps (thm as Thm (der, {thy, tags, maxidx, shyps, hyps, tpairs, prop})) = - let - val algebra = Sign.classes_of thy; - - val present = (fold_terms o fold_types o fold_atyps_sorts) (insert (eq_fst op =)) thm []; - val extra = fold (Sorts.remove_sort o #2) present shyps; - val witnessed = Sign.witness_sorts thy present extra; - val extra' = fold (Sorts.remove_sort o #2) witnessed extra - |> Sorts.minimal_sorts algebra; - val shyps' = fold (Sorts.insert_sort o #2) present extra'; - in - Thm (deriv_rule_unconditional - (Proofterm.strip_shyps_proof algebra present witnessed extra') der, - {thy = thy, tags = tags, maxidx = maxidx, - shyps = shyps', hyps = hyps, tpairs = tpairs, prop = prop}) - end; - -(*Internalize sort constraints of type variables*) -fun unconstrainT (thm as Thm (der, args)) = - let - val Deriv {promises, body} = der; - val {thy, shyps, hyps, tpairs, prop, ...} = args; - - fun err msg = raise THM ("unconstrainT: " ^ msg, 0, [thm]); - val _ = null hyps orelse err "illegal hyps"; - val _ = null tpairs orelse err "unsolved flex-flex constraints"; - val tfrees = rev (Term.add_tfree_names prop []); - val _ = null tfrees orelse err ("illegal free type variables " ^ commas_quote tfrees); - - val ps = map (apsnd (Future.map fulfill_body)) promises; - val (pthm as (_, (_, prop', _)), proof) = - Proofterm.unconstrain_thm_proof thy shyps prop ps body; - val der' = make_deriv [] [] [pthm] proof; - in - Thm (der', - {thy = thy, - tags = [], - maxidx = maxidx_of_term prop', - shyps = [[]], (*potentially redundant*) - hyps = [], - tpairs = [], - prop = prop'}) - end; - -(* Replace all TFrees not fixed or in the hyps by new TVars *) -fun varifyT_global' fixed (Thm (der, {thy, maxidx, shyps, hyps, tpairs, prop, ...})) = - let - val tfrees = fold Term.add_tfrees hyps fixed; - val prop1 = attach_tpairs tpairs prop; - val (al, prop2) = Type.varify_global tfrees prop1; - val (ts, prop3) = Logic.strip_prems (length tpairs, [], prop2); - in - (al, Thm (deriv_rule1 (Proofterm.varify_proof prop tfrees) der, - {thy = thy, - tags = [], - maxidx = Int.max (0, maxidx), - shyps = shyps, - hyps = hyps, - tpairs = rev (map Logic.dest_equals ts), - prop = prop3})) - end; - -val varifyT_global = #2 o varifyT_global' []; - -(* Replace all TVars by TFrees that are often new *) -fun legacy_freezeT (Thm (der, {thy, shyps, hyps, tpairs, prop, ...})) = - let - val prop1 = attach_tpairs tpairs prop; - val prop2 = Type.legacy_freeze prop1; - val (ts, prop3) = Logic.strip_prems (length tpairs, [], prop2); - in - Thm (deriv_rule1 (Proofterm.legacy_freezeT prop1) der, - {thy = thy, - tags = [], - maxidx = maxidx_of_term prop2, - shyps = shyps, - hyps = hyps, - tpairs = rev (map Logic.dest_equals ts), - prop = prop3}) - end; - - -(*** Inference rules for tactics ***) - -(*Destruct proof state into constraints, other goals, goal(i), rest *) -fun dest_state (state as Thm (_, {prop,tpairs,...}), i) = - (case Logic.strip_prems(i, [], prop) of - (B::rBs, C) => (tpairs, rev rBs, B, C) - | _ => raise THM("dest_state", i, [state])) - handle TERM _ => raise THM("dest_state", i, [state]); - -(*Prepare orule for resolution by lifting it over the parameters and -assumptions of goal.*) -fun lift_rule goal orule = - let - val Cterm {t = gprop, T, maxidx = gmax, sorts, ...} = goal; - val inc = gmax + 1; - val lift_abs = Logic.lift_abs inc gprop; - val lift_all = Logic.lift_all inc gprop; - val Thm (der, {maxidx, shyps, hyps, tpairs, prop, ...}) = orule; - val (As, B) = Logic.strip_horn prop; - in - if T <> propT then raise THM ("lift_rule: the term must have type prop", 0, []) - else - Thm (deriv_rule1 (Proofterm.lift_proof gprop inc prop) der, - {thy = merge_thys1 goal orule, - tags = [], - maxidx = maxidx + inc, - shyps = Sorts.union shyps sorts, (*sic!*) - hyps = hyps, - tpairs = map (pairself lift_abs) tpairs, - prop = Logic.list_implies (map lift_all As, lift_all B)}) - end; - -fun incr_indexes i (thm as Thm (der, {thy, maxidx, shyps, hyps, tpairs, prop, ...})) = - if i < 0 then raise THM ("negative increment", 0, [thm]) - else if i = 0 then thm - else - Thm (deriv_rule1 (Proofterm.incr_indexes i) der, - {thy = thy, - tags = [], - maxidx = maxidx + i, - shyps = shyps, - hyps = hyps, - tpairs = map (pairself (Logic.incr_indexes ([], i))) tpairs, - prop = Logic.incr_indexes ([], i) prop}); - -(*Solve subgoal Bi of proof state B1...Bn/C by assumption. *) -fun assumption i state = - let - val Thm (der, {thy, maxidx, shyps, hyps, ...}) = state; - val (tpairs, Bs, Bi, C) = dest_state (state, i); - fun newth n (env, tpairs) = - Thm (deriv_rule1 - ((if Envir.is_empty env then I else (Proofterm.norm_proof' env)) o - Proofterm.assumption_proof Bs Bi n) der, - {tags = [], - maxidx = Envir.maxidx_of env, - shyps = Envir.insert_sorts env shyps, - hyps = hyps, - tpairs = - if Envir.is_empty env then tpairs - else map (pairself (Envir.norm_term env)) tpairs, - prop = - if Envir.is_empty env then (*avoid wasted normalizations*) - Logic.list_implies (Bs, C) - else (*normalize the new rule fully*) - Envir.norm_term env (Logic.list_implies (Bs, C)), - thy = thy}); - - val (close, asms, concl) = Logic.assum_problems (~1, Bi); - val concl' = close concl; - fun addprfs [] _ = Seq.empty - | addprfs (asm :: rest) n = Seq.make (fn () => Seq.pull - (Seq.mapp (newth n) - (if Term.could_unify (asm, concl) then - (Unify.unifiers (thy, Envir.empty maxidx, (close asm, concl') :: tpairs)) - else Seq.empty) - (addprfs rest (n + 1)))) - in addprfs asms 1 end; - -(*Solve subgoal Bi of proof state B1...Bn/C by assumption. - Checks if Bi's conclusion is alpha/eta-convertible to one of its assumptions*) -fun eq_assumption i state = - let - val Thm (der, {thy, maxidx, shyps, hyps, ...}) = state; - val (tpairs, Bs, Bi, C) = dest_state (state, i); - val (_, asms, concl) = Logic.assum_problems (~1, Bi); - in - (case find_index (fn asm => Envir.aeconv (asm, concl)) asms of - ~1 => raise THM ("eq_assumption", 0, [state]) - | n => - Thm (deriv_rule1 (Proofterm.assumption_proof Bs Bi (n + 1)) der, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = shyps, - hyps = hyps, - tpairs = tpairs, - prop = Logic.list_implies (Bs, C)})) - end; - - -(*For rotate_tac: fast rotation of assumptions of subgoal i*) -fun rotate_rule k i state = - let - val Thm (der, {thy, maxidx, shyps, hyps, ...}) = state; - val (tpairs, Bs, Bi, C) = dest_state (state, i); - val params = Term.strip_all_vars Bi; - val rest = Term.strip_all_body Bi; - val asms = Logic.strip_imp_prems rest - val concl = Logic.strip_imp_concl rest; - val n = length asms; - val m = if k < 0 then n + k else k; - val Bi' = - if 0 = m orelse m = n then Bi - else if 0 < m andalso m < n then - let val (ps, qs) = chop m asms - in Logic.list_all (params, Logic.list_implies (qs @ ps, concl)) end - else raise THM ("rotate_rule", k, [state]); - in - Thm (deriv_rule1 (Proofterm.rotate_proof Bs Bi m) der, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = shyps, - hyps = hyps, - tpairs = tpairs, - prop = Logic.list_implies (Bs @ [Bi'], C)}) - end; - - -(*Rotates a rule's premises to the left by k, leaving the first j premises - unchanged. Does nothing if k=0 or if k equals n-j, where n is the - number of premises. Useful with etac and underlies defer_tac*) -fun permute_prems j k rl = - let - val Thm (der, {thy, maxidx, shyps, hyps, tpairs, prop, ...}) = rl; - val prems = Logic.strip_imp_prems prop - and concl = Logic.strip_imp_concl prop; - val moved_prems = List.drop (prems, j) - and fixed_prems = List.take (prems, j) - handle General.Subscript => raise THM ("permute_prems: j", j, [rl]); - val n_j = length moved_prems; - val m = if k < 0 then n_j + k else k; - val prop' = - if 0 = m orelse m = n_j then prop - else if 0 < m andalso m < n_j then - let val (ps, qs) = chop m moved_prems - in Logic.list_implies (fixed_prems @ qs @ ps, concl) end - else raise THM ("permute_prems: k", k, [rl]); - in - Thm (deriv_rule1 (Proofterm.permute_prems_proof prems j m) der, - {thy = thy, - tags = [], - maxidx = maxidx, - shyps = shyps, - hyps = hyps, - tpairs = tpairs, - prop = prop'}) - end; - - -(** User renaming of parameters in a subgoal **) - -(*Calls error rather than raising an exception because it is intended - for top-level use -- exception handling would not make sense here. - The names in cs, if distinct, are used for the innermost parameters; - preceding parameters may be renamed to make all params distinct.*) -fun rename_params_rule (cs, i) state = - let - val Thm (der, {thy, tags, maxidx, shyps, hyps, ...}) = state; - val (tpairs, Bs, Bi, C) = dest_state (state, i); - val iparams = map #1 (Logic.strip_params Bi); - val short = length iparams - length cs; - val newnames = - if short < 0 then error "More names than abstractions!" - else Name.variant_list cs (take short iparams) @ cs; - val freenames = Term.fold_aterms (fn Free (x, _) => insert (op =) x | _ => I) Bi []; - val newBi = Logic.list_rename_params newnames Bi; - in - (case duplicates (op =) cs of - a :: _ => (warning ("Can't rename. Bound variables not distinct: " ^ a); state) - | [] => - (case inter (op =) cs freenames of - a :: _ => (warning ("Can't rename. Bound/Free variable clash: " ^ a); state) - | [] => - Thm (der, - {thy = thy, - tags = tags, - maxidx = maxidx, - shyps = shyps, - hyps = hyps, - tpairs = tpairs, - prop = Logic.list_implies (Bs @ [newBi], C)}))) - end; - - -(*** Preservation of bound variable names ***) - -fun rename_boundvars pat obj (thm as Thm (der, {thy, tags, maxidx, shyps, hyps, tpairs, prop})) = - (case Term.rename_abs pat obj prop of - NONE => thm - | SOME prop' => Thm (der, - {thy = thy, - tags = tags, - maxidx = maxidx, - hyps = hyps, - shyps = shyps, - tpairs = tpairs, - prop = prop'})); - - -(* strip_apply f B A strips off all assumptions/parameters from A - introduced by lifting over B, and applies f to remaining part of A*) -fun strip_apply f = - let fun strip (Const ("Pure.imp", _) $ _ $ B1) - (Const ("Pure.imp", _) $ A2 $ B2) = Logic.mk_implies (A2, strip B1 B2) - | strip ((c as Const ("Pure.all", _)) $ Abs (_, _, t1)) - ( Const ("Pure.all", _) $ Abs (a, T, t2)) = c $ Abs (a, T, strip t1 t2) - | strip _ A = f A - in strip end; - -fun strip_lifted (Const ("Pure.imp", _) $ _ $ B1) - (Const ("Pure.imp", _) $ _ $ B2) = strip_lifted B1 B2 - | strip_lifted (Const ("Pure.all", _) $ Abs (_, _, t1)) - (Const ("Pure.all", _) $ Abs (_, _, t2)) = strip_lifted t1 t2 - | strip_lifted _ A = A; - -(*Use the alist to rename all bound variables and some unknowns in a term - dpairs = current disagreement pairs; tpairs = permanent ones (flexflex); - Preserves unknowns in tpairs and on lhs of dpairs. *) -fun rename_bvs [] _ _ _ _ = K I - | rename_bvs al dpairs tpairs B As = - let - val add_var = fold_aterms (fn Var ((x, _), _) => insert (op =) x | _ => I); - val vids = [] - |> fold (add_var o fst) dpairs - |> fold (add_var o fst) tpairs - |> fold (add_var o snd) tpairs; - val vids' = fold (add_var o strip_lifted B) As []; - (*unknowns appearing elsewhere be preserved!*) - val al' = distinct ((op =) o pairself fst) - (filter_out (fn (x, y) => - not (member (op =) vids' x) orelse - member (op =) vids x orelse member (op =) vids y) al); - val unchanged = filter_out (AList.defined (op =) al') vids'; - fun del_clashing clash xs _ [] qs = - if clash then del_clashing false xs xs qs [] else qs - | del_clashing clash xs ys ((p as (x, y)) :: ps) qs = - if member (op =) ys y - then del_clashing true (x :: xs) (x :: ys) ps qs - else del_clashing clash xs (y :: ys) ps (p :: qs); - val al'' = del_clashing false unchanged unchanged al' []; - fun rename (t as Var ((x, i), T)) = - (case AList.lookup (op =) al'' x of - SOME y => Var ((y, i), T) - | NONE => t) - | rename (Abs (x, T, t)) = - Abs (the_default x (AList.lookup (op =) al x), T, rename t) - | rename (f $ t) = rename f $ rename t - | rename t = t; - fun strip_ren f Ai = f rename B Ai - in strip_ren end; - -(*Function to rename bounds/unknowns in the argument, lifted over B*) -fun rename_bvars dpairs = - rename_bvs (fold_rev Term.match_bvars dpairs []) dpairs; - - -(*** RESOLUTION ***) - -(** Lifting optimizations **) - -(*strip off pairs of assumptions/parameters in parallel -- they are - identical because of lifting*) -fun strip_assums2 (Const("Pure.imp", _) $ _ $ B1, - Const("Pure.imp", _) $ _ $ B2) = strip_assums2 (B1,B2) - | strip_assums2 (Const("Pure.all",_)$Abs(a,T,t1), - Const("Pure.all",_)$Abs(_,_,t2)) = - let val (B1,B2) = strip_assums2 (t1,t2) - in (Abs(a,T,B1), Abs(a,T,B2)) end - | strip_assums2 BB = BB; - - -(*Faster normalization: skip assumptions that were lifted over*) -fun norm_term_skip env 0 t = Envir.norm_term env t - | norm_term_skip env n (Const ("Pure.all", _) $ Abs (a, T, t)) = - let - val T' = Envir.subst_type (Envir.type_env env) T - (*Must instantiate types of parameters because they are flattened; - this could be a NEW parameter*) - in Logic.all_const T' $ Abs (a, T', norm_term_skip env n t) end - | norm_term_skip env n (Const ("Pure.imp", _) $ A $ B) = - Logic.mk_implies (A, norm_term_skip env (n - 1) B) - | norm_term_skip _ _ _ = error "norm_term_skip: too few assumptions??"; - - -(*unify types of schematic variables (non-lifted case)*) -fun unify_var_types thy (th1, th2) env = - let - fun unify_vars (T :: Us) = fold (fn U => Pattern.unify_types thy (T, U)) Us - | unify_vars _ = I; - val add_vars = - full_prop_of #> - fold_aterms (fn Var v => Vartab.insert_list (op =) v | _ => I); - val vars = Vartab.empty |> add_vars th1 |> add_vars th2; - in SOME (Vartab.fold (unify_vars o #2) vars env) end - handle Pattern.Unif => NONE; - -(*Composition of object rule r=(A1...Am/B) with proof state s=(B1...Bn/C) - Unifies B with Bi, replacing subgoal i (1 <= i <= n) - If match then forbid instantiations in proof state - If lifted then shorten the dpair using strip_assums2. - If eres_flg then simultaneously proves A1 by assumption. - nsubgoal is the number of new subgoals (written m above). - Curried so that resolution calls dest_state only once. -*) -local exception COMPOSE -in -fun bicompose_aux {flatten, match, incremented} (state, (stpairs, Bs, Bi, C), lifted) - (eres_flg, orule, nsubgoal) = - let val Thm (sder, {maxidx=smax, shyps=sshyps, hyps=shyps, ...}) = state - and Thm (rder, {maxidx=rmax, shyps=rshyps, hyps=rhyps, - tpairs=rtpairs, prop=rprop,...}) = orule - (*How many hyps to skip over during normalization*) - and nlift = Logic.count_prems (strip_all_body Bi) + (if eres_flg then ~1 else 0) - val thy = merge_thys2 state orule; - (** Add new theorem with prop = '[| Bs; As |] ==> C' to thq **) - fun addth A (As, oldAs, rder', n) ((env, tpairs), thq) = - let val normt = Envir.norm_term env; - (*perform minimal copying here by examining env*) - val (ntpairs, normp) = - if Envir.is_empty env then (tpairs, (Bs @ As, C)) - else - let val ntps = map (pairself normt) tpairs - in if Envir.above env smax then - (*no assignments in state; normalize the rule only*) - if lifted - then (ntps, (Bs @ map (norm_term_skip env nlift) As, C)) - else (ntps, (Bs @ map normt As, C)) - else if match then raise COMPOSE - else (*normalize the new rule fully*) - (ntps, (map normt (Bs @ As), normt C)) - end - val th = - Thm (deriv_rule2 - ((if Envir.is_empty env then I - else if Envir.above env smax then - (fn f => fn der => f (Proofterm.norm_proof' env der)) - else - curry op oo (Proofterm.norm_proof' env)) - (Proofterm.bicompose_proof flatten Bs oldAs As A n (nlift+1))) rder' sder, - {tags = [], - maxidx = Envir.maxidx_of env, - shyps = Envir.insert_sorts env (Sorts.union rshyps sshyps), - hyps = union_hyps rhyps shyps, - tpairs = ntpairs, - prop = Logic.list_implies normp, - thy = thy}) - in Seq.cons th thq end handle COMPOSE => thq; - val (rAs,B) = Logic.strip_prems(nsubgoal, [], rprop) - handle TERM _ => raise THM("bicompose: rule", 0, [orule,state]); - (*Modify assumptions, deleting n-th if n>0 for e-resolution*) - fun newAs(As0, n, dpairs, tpairs) = - let val (As1, rder') = - if not lifted then (As0, rder) - else - let val rename = rename_bvars dpairs tpairs B As0 - in (map (rename strip_apply) As0, - deriv_rule1 (Proofterm.map_proof_terms (rename K) I) rder) - end; - in (map (if flatten then (Logic.flatten_params n) else I) As1, As1, rder', n) - handle TERM _ => - raise THM("bicompose: 1st premise", 0, [orule]) - end; - val BBi = if lifted then strip_assums2(B,Bi) else (B,Bi); - val dpairs = BBi :: (rtpairs@stpairs); - - (*elim-resolution: try each assumption in turn*) - fun eres _ [] = raise THM ("bicompose: no premises", 0, [orule, state]) - | eres env (A1 :: As) = - let - val A = SOME A1; - val (close, asms, concl) = Logic.assum_problems (nlift + 1, A1); - val concl' = close concl; - fun tryasms [] _ = Seq.empty - | tryasms (asm :: rest) n = - if Term.could_unify (asm, concl) then - let val asm' = close asm in - (case Seq.pull (Unify.unifiers (thy, env, (asm', concl') :: dpairs)) of - NONE => tryasms rest (n + 1) - | cell as SOME ((_, tpairs), _) => - Seq.it_right (addth A (newAs (As, n, [BBi, (concl', asm')], tpairs))) - (Seq.make (fn () => cell), - Seq.make (fn () => Seq.pull (tryasms rest (n + 1))))) - end - else tryasms rest (n + 1); - in tryasms asms 1 end; - - (*ordinary resolution*) - fun res env = - (case Seq.pull (Unify.unifiers (thy, env, dpairs)) of - NONE => Seq.empty - | cell as SOME ((_, tpairs), _) => - Seq.it_right (addth NONE (newAs (rev rAs, 0, [BBi], tpairs))) - (Seq.make (fn () => cell), Seq.empty)); - - val env0 = Envir.empty (Int.max (rmax, smax)); - in - (case if incremented then SOME env0 else unify_var_types thy (state, orule) env0 of - NONE => Seq.empty - | SOME env => if eres_flg then eres env (rev rAs) else res env) - end; -end; - -fun bicompose flags arg i state = - bicompose_aux flags (state, dest_state (state,i), false) arg; - -(*Quick test whether rule is resolvable with the subgoal with hyps Hs - and conclusion B. If eres_flg then checks 1st premise of rule also*) -fun could_bires (Hs, B, eres_flg, rule) = - let fun could_reshyp (A1::_) = exists (fn H => Term.could_unify (A1, H)) Hs - | could_reshyp [] = false; (*no premise -- illegal*) - in Term.could_unify(concl_of rule, B) andalso - (not eres_flg orelse could_reshyp (prems_of rule)) - end; - -(*Bi-resolution of a state with a list of (flag,rule) pairs. - Puts the rule above: rule/state. Renames vars in the rules. *) -fun biresolution match brules i state = - let val (stpairs, Bs, Bi, C) = dest_state(state,i); - val lift = lift_rule (cprem_of state i); - val B = Logic.strip_assums_concl Bi; - val Hs = Logic.strip_assums_hyp Bi; - val compose = - bicompose_aux {flatten = true, match = match, incremented = true} - (state, (stpairs, Bs, Bi, C), true); - fun res [] = Seq.empty - | res ((eres_flg, rule)::brules) = - if Config.get_global (theory_of_thm state) Pattern.unify_trace_failure orelse - could_bires (Hs, B, eres_flg, rule) - then Seq.make (*delay processing remainder till needed*) - (fn()=> SOME(compose (eres_flg, lift rule, nprems_of rule), - res brules)) - else res brules - in Seq.flat (res brules) end; - - - -(*** Oracles ***) - -(* oracle rule *) - -fun invoke_oracle thy1 name oracle arg = - let val Cterm {thy = thy2, t = prop, T, maxidx, sorts} = oracle arg in - if T <> propT then - raise THM ("Oracle's result must have type prop: " ^ name, 0, []) - else - let val (ora, prf) = Proofterm.oracle_proof name prop in - Thm (make_deriv [] [ora] [] prf, - {thy = Theory.merge (thy1, thy2), - tags = [], - maxidx = maxidx, - shyps = sorts, - hyps = [], - tpairs = [], - prop = prop}) - end - end; - -end; -end; -end; - - -(* authentic derivation names *) - -structure Oracles = Theory_Data -( - type T = unit Name_Space.table; - val empty : T = Name_Space.empty_table "oracle"; - val extend = I; - fun merge data : T = Name_Space.merge_tables data; -); - -fun extern_oracles ctxt = - map #1 (Name_Space.markup_table ctxt (Oracles.get (Proof_Context.theory_of ctxt))); - -fun add_oracle (b, oracle) thy = - let - val (name, tab') = Name_Space.define (Context.Theory thy) true (b, ()) (Oracles.get thy); - val thy' = Oracles.put tab' thy; - in ((name, invoke_oracle thy' name oracle), thy') end; - -end; - -structure Basic_Thm: BASIC_THM = Thm; -open Basic_Thm; diff --git a/core/Pure/type.ML b/core/Pure/type.ML deleted file mode 100644 index 0990fade..00000000 --- a/core/Pure/type.ML +++ /dev/null @@ -1,719 +0,0 @@ -(* Title: Pure/type.ML - Author: Tobias Nipkow, Lawrence C Paulson, and Markus Wenzel - -Type signatures and certified types, special treatment of type vars, -matching and unification of types, extend and merge type signatures. -*) - -signature TYPE = -sig - (*constraints*) - val mark_polymorphic: typ -> typ - val constraint: typ -> term -> term - val constraint_type: Proof.context -> typ -> typ - val strip_constraints: term -> term - val appl_error: Proof.context -> term -> typ -> term -> typ -> string - (*type signatures and certified types*) - datatype decl = - LogicalType of int | - Abbreviation of string list * typ * bool | - Nonterminal - type tsig - val eq_tsig: tsig * tsig -> bool - val rep_tsig: tsig -> - {classes: Name_Space.T * Sorts.algebra, - default: sort, - types: decl Name_Space.table, - log_types: string list} - val change_base: bool -> tsig -> tsig - val change_ignore: tsig -> tsig - val empty_tsig: tsig - val class_space: tsig -> Name_Space.T - val class_alias: Name_Space.naming -> binding -> string -> tsig -> tsig - val defaultS: tsig -> sort - val logical_types: tsig -> string list - val eq_sort: tsig -> sort * sort -> bool - val subsort: tsig -> sort * sort -> bool - val of_sort: tsig -> typ * sort -> bool - val inter_sort: tsig -> sort * sort -> sort - val cert_class: tsig -> class -> class - val cert_sort: tsig -> sort -> sort - val minimize_sort: tsig -> sort -> sort - val witness_sorts: tsig -> (typ * sort) list -> sort list -> (typ * sort) list - type mode - val mode_default: mode - val mode_syntax: mode - val mode_abbrev: mode - val get_mode: Proof.context -> mode - val set_mode: mode -> Proof.context -> Proof.context - val restore_mode: Proof.context -> Proof.context -> Proof.context - val type_space: tsig -> Name_Space.T - val type_alias: Name_Space.naming -> binding -> string -> tsig -> tsig - val is_logtype: tsig -> string -> bool - val check_decl: Context.generic -> tsig -> - xstring * Position.T -> (string * Position.report list) * decl - val the_decl: tsig -> string * Position.T -> decl - val cert_typ_mode: mode -> tsig -> typ -> typ - val cert_typ: tsig -> typ -> typ - val arity_number: tsig -> string -> int - val arity_sorts: Context.pretty -> tsig -> string -> sort -> sort list - - (*special treatment of type vars*) - val sort_of_atyp: typ -> sort - val strip_sorts: typ -> typ - val strip_sorts_dummy: typ -> typ - val no_tvars: typ -> typ - val varify_global: (string * sort) list -> term -> ((string * sort) * indexname) list * term - val legacy_freeze_thaw_type: typ -> typ * (typ -> typ) - val legacy_freeze_type: typ -> typ - val legacy_freeze_thaw: term -> term * (term -> term) - val legacy_freeze: term -> term - - (*matching and unification*) - exception TYPE_MATCH - type tyenv = (sort * typ) Vartab.table - val lookup: tyenv -> indexname * sort -> typ option - val devar: tyenv -> typ -> typ - val typ_match: tsig -> typ * typ -> tyenv -> tyenv - val typ_instance: tsig -> typ * typ -> bool - val raw_match: typ * typ -> tyenv -> tyenv - val raw_matches: typ list * typ list -> tyenv -> tyenv - val could_match: typ * typ -> bool - val could_matches: typ list * typ list -> bool - val raw_instance: typ * typ -> bool - exception TUNIFY - val unify: tsig -> typ * typ -> tyenv * int -> tyenv * int - val raw_unify: typ * typ -> tyenv -> tyenv - val raw_unifys: typ list * typ list -> tyenv -> tyenv - val could_unify: typ * typ -> bool - val could_unifys: typ list * typ list -> bool - val eq_type: tyenv -> typ * typ -> bool - - (*extend and merge type signatures*) - val add_class: Context.generic -> binding * class list -> tsig -> tsig - val hide_class: bool -> string -> tsig -> tsig - val set_defsort: sort -> tsig -> tsig - val add_type: Context.generic -> binding * int -> tsig -> tsig - val add_abbrev: Context.generic -> binding * string list * typ -> tsig -> tsig - val add_nonterminal: Context.generic -> binding -> tsig -> tsig - val hide_type: bool -> string -> tsig -> tsig - val add_arity: Context.pretty -> arity -> tsig -> tsig - val add_classrel: Context.pretty -> class * class -> tsig -> tsig - val merge_tsig: Context.pretty -> tsig * tsig -> tsig -end; - -structure Type: TYPE = -struct - -(** constraints **) - -(*indicate polymorphic Vars*) -fun mark_polymorphic T = Type ("_polymorphic_", [T]); - -fun constraint T t = - if T = dummyT then t - else Const ("_type_constraint_", T --> T) $ t; - -fun constraint_type ctxt T = - let fun err () = error ("Malformed internal type constraint: " ^ Syntax.string_of_typ ctxt T); - in (case T of Type ("fun", [A, B]) => if A = B then A else err () | _ => err ()) end; - -fun strip_constraints (Const ("_type_constraint_", _) $ t) = strip_constraints t - | strip_constraints (t $ u) = strip_constraints t $ strip_constraints u - | strip_constraints (Abs (x, T, t)) = Abs (x, T, strip_constraints t) - | strip_constraints a = a; - -fun appl_error ctxt (Const ("_type_constraint_", Type ("fun", [T, _]))) _ u U = - cat_lines - ["Failed to meet type constraint:", "", - Pretty.string_of (Pretty.block - [Pretty.str "Term:", Pretty.brk 2, Syntax.pretty_term ctxt u, - Pretty.str " ::", Pretty.brk 1, Syntax.pretty_typ ctxt U]), - Pretty.string_of (Pretty.block - [Pretty.str "Type:", Pretty.brk 2, Syntax.pretty_typ ctxt T])] - | appl_error ctxt t T u U = - cat_lines - ["Type error in application: " ^ - (case T of - Type ("fun", _) => "incompatible operand type" - | _ => "operator not of function type"), - "", - Pretty.string_of (Pretty.block - [Pretty.str "Operator:", Pretty.brk 2, Syntax.pretty_term ctxt t, - Pretty.str " ::", Pretty.brk 1, Syntax.pretty_typ ctxt T]), - Pretty.string_of (Pretty.block - [Pretty.str "Operand:", Pretty.brk 3, Syntax.pretty_term ctxt u, - Pretty.str " ::", Pretty.brk 1, Syntax.pretty_typ ctxt U])]; - - - -(** type signatures and certified types **) - -(* type declarations *) - -datatype decl = - LogicalType of int | - Abbreviation of string list * typ * bool | - Nonterminal; - - -(* type tsig *) - -datatype tsig = - TSig of { - classes: Name_Space.T * Sorts.algebra, (*order-sorted algebra of type classes*) - default: sort, (*default sort on input*) - types: decl Name_Space.table, (*declared types*) - log_types: string list}; (*logical types sorted by number of arguments*) - -fun eq_tsig - (TSig {classes = classes1, default = default1, types = types1, log_types = _}, - TSig {classes = classes2, default = default2, types = types2, log_types = _}) = - pointer_eq (classes1, classes2) andalso - default1 = default2 andalso - pointer_eq (types1, types2); - -fun rep_tsig (TSig comps) = comps; - -fun make_tsig (classes, default, types, log_types) = - TSig {classes = classes, default = default, types = types, log_types = log_types}; - -fun change_base begin (TSig {classes, default, types, log_types}) = - make_tsig (classes, default, Name_Space.change_base begin types, log_types); - -fun change_ignore (TSig {classes, default, types, log_types}) = - make_tsig (classes, default, Name_Space.change_ignore types, log_types); - -fun build_tsig (classes, default, types) = - let - val log_types = - Name_Space.fold_table (fn (c, LogicalType n) => cons (c, n) | _ => I) types [] - |> Library.sort (int_ord o pairself snd) |> map fst; - in make_tsig (classes, default, types, log_types) end; - -fun map_tsig f (TSig {classes, default, types, log_types = _}) = - build_tsig (f (classes, default, types)); - -val empty_tsig = - build_tsig ((Name_Space.empty Markup.classN, Sorts.empty_algebra), [], - Name_Space.empty_table Markup.type_nameN); - - -(* classes and sorts *) - -val class_space = #1 o #classes o rep_tsig; - -fun class_alias naming binding name = map_tsig (fn ((space, classes), default, types) => - ((Name_Space.alias naming binding name space, classes), default, types)); - -fun defaultS (TSig {default, ...}) = default; -fun logical_types (TSig {log_types, ...}) = log_types; - -fun eq_sort (TSig {classes, ...}) = Sorts.sort_eq (#2 classes); -fun subsort (TSig {classes, ...}) = Sorts.sort_le (#2 classes); -fun of_sort (TSig {classes, ...}) = Sorts.of_sort (#2 classes); -fun inter_sort (TSig {classes, ...}) = Sorts.inter_sort (#2 classes); - -fun cert_class (TSig {classes = (_, algebra), ...}) c = - if can (Graph.get_entry (Sorts.classes_of algebra)) c then c - else raise TYPE ("Undeclared class: " ^ quote c, [], []); - -val cert_sort = map o cert_class; - -fun minimize_sort (TSig {classes, ...}) = Sorts.minimize_sort (#2 classes); - -fun witness_sorts (TSig {classes, log_types, ...}) = - Sorts.witness_sorts (#2 classes) log_types; - - -(* certification mode *) - -datatype mode = Mode of {normalize: bool, logical: bool}; - -val mode_default = Mode {normalize = true, logical = true}; -val mode_syntax = Mode {normalize = true, logical = false}; -val mode_abbrev = Mode {normalize = false, logical = false}; - -structure Mode = Proof_Data -( - type T = mode; - fun init _ = mode_default; -); - -val get_mode = Mode.get; -fun set_mode mode = Mode.map (K mode); -fun restore_mode ctxt = set_mode (get_mode ctxt); - - -(* types *) - -val type_space = Name_Space.space_of_table o #types o rep_tsig; - -fun type_alias naming binding name = map_tsig (fn (classes, default, types) => - (classes, default, (Name_Space.alias_table naming binding name types))); - -val is_logtype = member (op =) o logical_types; - - -fun undecl_type c = "Undeclared type constructor: " ^ quote c; - -fun lookup_type (TSig {types, ...}) = Option.map #2 o Name_Space.lookup_key types; - -fun check_decl context (TSig {types, ...}) (c, pos) = - Name_Space.check_reports context types (c, [pos]); - -fun the_decl tsig (c, pos) = - (case lookup_type tsig c of - NONE => error (undecl_type c ^ Position.here pos) - | SOME decl => decl); - - -(* certified types *) - -fun bad_nargs t = "Bad number of arguments for type constructor: " ^ quote t; - -local - -fun inst_typ env (Type (c, Ts)) = Type (c, map (inst_typ env) Ts) - | inst_typ env (T as TFree (x, _)) = the_default T (AList.lookup (op =) env x) - | inst_typ _ T = T; - -in - -fun cert_typ_mode (Mode {normalize, logical}) tsig ty = - let - fun err msg = raise TYPE (msg, [ty], []); - - val check_logical = - if logical then fn c => err ("Illegal occurrence of syntactic type: " ^ quote c) - else fn _ => (); - - fun cert (T as Type (c, Ts)) = - let - val Ts' = map cert Ts; - fun nargs n = if length Ts <> n then err (bad_nargs c) else (); - in - (case the_decl tsig (c, Position.none) of - LogicalType n => (nargs n; Type (c, Ts')) - | Abbreviation (vs, U, syn) => - (nargs (length vs); - if syn then check_logical c else (); - if normalize then inst_typ (vs ~~ Ts') U - else Type (c, Ts')) - | Nonterminal => (nargs 0; check_logical c; T)) - end - | cert (TFree (x, S)) = TFree (x, cert_sort tsig S) - | cert (TVar (xi as (_, i), S)) = - if i < 0 then - err ("Malformed type variable: " ^ quote (Term.string_of_vname xi)) - else TVar (xi, cert_sort tsig S); - - val ty' = cert ty; - in if ty = ty' then ty else ty' end; (*avoid copying of already normal type*) - -val cert_typ = cert_typ_mode mode_default; - -end; - - -(* type arities *) - -fun arity_number tsig a = - (case lookup_type tsig a of - SOME (LogicalType n) => n - | _ => error (undecl_type a)); - -fun arity_sorts _ tsig a [] = replicate (arity_number tsig a) [] - | arity_sorts pp (TSig {classes, ...}) a S = - Sorts.mg_domain (#2 classes) a S - handle Sorts.CLASS_ERROR err => error (Sorts.class_error pp err); - - - -(** special treatment of type vars **) - -(* sort_of_atyp *) - -fun sort_of_atyp (TFree (_, S)) = S - | sort_of_atyp (TVar (_, S)) = S - | sort_of_atyp T = raise TYPE ("sort_of_atyp", [T], []); - - -(* strip_sorts *) - -val strip_sorts = map_atyps - (fn TFree (x, _) => TFree (x, []) - | TVar (xi, _) => TVar (xi, [])); - -val strip_sorts_dummy = map_atyps - (fn TFree (x, _) => TFree (x, dummyS) - | TVar (xi, _) => TVar (xi, dummyS)); - - -(* no_tvars *) - -fun no_tvars T = - (case Term.add_tvarsT T [] of [] => T - | vs => raise TYPE ("Illegal schematic type variable(s): " ^ - commas_quote (map (Term.string_of_vname o #1) (rev vs)), [T], [])); - - -(* varify_global *) - -fun varify_global fixed t = - let - val fs = Term.fold_types (Term.fold_atyps - (fn TFree v => if member (op =) fixed v then I else insert (op =) v | _ => I)) t []; - val used = Name.context - |> fold_types (fold_atyps (fn TVar ((a, _), _) => Name.declare a | _ => I)) t; - val fmap = fs ~~ map (rpair 0) (#1 (fold_map Name.variant (map fst fs) used)); - fun thaw (f as (_, S)) = - (case AList.lookup (op =) fmap f of - NONE => TFree f - | SOME xi => TVar (xi, S)); - in (fmap, map_types (map_type_tfree thaw) t) end; - - -(* freeze_thaw: freeze TVars in a term; return the "thaw" inverse *) - -local - -fun new_name ix (pairs, used) = - let val v = singleton (Name.variant_list used) (string_of_indexname ix) - in ((ix, v) :: pairs, v :: used) end; - -fun freeze_one alist (ix, sort) = - TFree (the (AList.lookup (op =) alist ix), sort) - handle Option.Option => - raise TYPE ("Failure during freezing of ?" ^ string_of_indexname ix, [], []); - -fun thaw_one alist (a, sort) = TVar (the (AList.lookup (op =) alist a), sort) - handle Option.Option => TFree (a, sort); - -in - -fun legacy_freeze_thaw_type T = - let - val used = Term.add_tfree_namesT T []; - val (alist, _) = fold_rev new_name (map #1 (Term.add_tvarsT T [])) ([], used); - in (map_type_tvar (freeze_one alist) T, map_type_tfree (thaw_one (map swap alist))) end; - -val legacy_freeze_type = #1 o legacy_freeze_thaw_type; - -fun legacy_freeze_thaw t = - let - val used = Term.add_tfree_names t []; - val (alist, _) = fold_rev new_name (map #1 (Term.add_tvars t [])) ([], used); - in - (case alist of - [] => (t, fn x => x) (*nothing to do!*) - | _ => (map_types (map_type_tvar (freeze_one alist)) t, - map_types (map_type_tfree (thaw_one (map swap alist))))) - end; - -val legacy_freeze = #1 o legacy_freeze_thaw; - -end; - - - -(** matching and unification of types **) - -type tyenv = (sort * typ) Vartab.table; - -fun tvar_clash ixn S S' = - raise TYPE ("Type variable has two distinct sorts", [TVar (ixn, S), TVar (ixn, S')], []); - -fun lookup tye (ixn, S) = - (case Vartab.lookup tye ixn of - NONE => NONE - | SOME (S', T) => if S = S' then SOME T else tvar_clash ixn S S'); - - -(* matching *) - -exception TYPE_MATCH; - -fun typ_match tsig = - let - fun match (V as TVar (v, S), T) subs = - (case lookup subs (v, S) of - NONE => - if V = T then subs - else if of_sort tsig (T, S) then Vartab.update_new (v, (S, T)) subs - else raise TYPE_MATCH - | SOME U => if U = T then subs else raise TYPE_MATCH) - | match (Type (a, Ts), Type (b, Us)) subs = - if a <> b then raise TYPE_MATCH - else matches (Ts, Us) subs - | match (TFree x, TFree y) subs = - if x = y then subs else raise TYPE_MATCH - | match _ _ = raise TYPE_MATCH - and matches (T :: Ts, U :: Us) subs = matches (Ts, Us) (match (T, U) subs) - | matches _ subs = subs; - in match end; - -fun typ_instance tsig (T, U) = - (typ_match tsig (U, T) Vartab.empty; true) handle TYPE_MATCH => false; - -(*purely structural matching*) -fun raw_match (V as TVar (v, S), T) subs = - (case lookup subs (v, S) of - NONE => if V = T then subs else Vartab.update_new (v, (S, T)) subs - | SOME U => if U = T then subs else raise TYPE_MATCH) - | raw_match (Type (a, Ts), Type (b, Us)) subs = - if a <> b then raise TYPE_MATCH - else raw_matches (Ts, Us) subs - | raw_match (TFree x, TFree y) subs = - if x = y then subs else raise TYPE_MATCH - | raw_match _ _ = raise TYPE_MATCH -and raw_matches (T :: Ts, U :: Us) subs = raw_matches (Ts, Us) (raw_match (T, U) subs) - | raw_matches ([], []) subs = subs - | raw_matches _ _ = raise TYPE_MATCH; - -(*fast matching filter*) -fun could_match (Type (a, Ts), Type (b, Us)) = a = b andalso could_matches (Ts, Us) - | could_match (TFree (a, _), TFree (b, _)) = a = b - | could_match (TVar _, _) = true - | could_match _ = false -and could_matches (T :: Ts, U :: Us) = could_match (T, U) andalso could_matches (Ts, Us) - | could_matches ([], []) = true - | could_matches _ = false; - -fun raw_instance (T, U) = - if could_match (U, T) then - (raw_match (U, T) Vartab.empty; true) handle TYPE_MATCH => false - else false; - - -(* unification *) - -exception TUNIFY; - -(*occurs check*) -fun occurs v tye = - let - fun occ (Type (_, Ts)) = exists occ Ts - | occ (TFree _) = false - | occ (TVar (w, S)) = - Term.eq_ix (v, w) orelse - (case lookup tye (w, S) of - NONE => false - | SOME U => occ U); - in occ end; - -(*chase variable assignments; if devar returns a type var then it must be unassigned*) -fun devar tye (T as TVar v) = - (case lookup tye v of - SOME U => devar tye U - | NONE => T) - | devar _ T = T; - -(*order-sorted unification*) -fun unify (TSig {classes = (_, classes), ...}) TU (tyenv, maxidx) = - let - val tyvar_count = Unsynchronized.ref maxidx; - fun gen_tyvar S = TVar ((Name.aT, Unsynchronized.inc tyvar_count), S); - - fun mg_domain a S = Sorts.mg_domain classes a S - handle Sorts.CLASS_ERROR _ => raise TUNIFY; - - fun meet (_, []) tye = tye - | meet (TVar (xi, S'), S) tye = - if Sorts.sort_le classes (S', S) then tye - else Vartab.update_new - (xi, (S', gen_tyvar (Sorts.inter_sort classes (S', S)))) tye - | meet (TFree (_, S'), S) tye = - if Sorts.sort_le classes (S', S) then tye - else raise TUNIFY - | meet (Type (a, Ts), S) tye = meets (Ts, mg_domain a S) tye - and meets (T :: Ts, S :: Ss) tye = meets (Ts, Ss) (meet (devar tye T, S) tye) - | meets _ tye = tye; - - fun unif (ty1, ty2) tye = - (case (devar tye ty1, devar tye ty2) of - (T as TVar (v, S1), U as TVar (w, S2)) => - if Term.eq_ix (v, w) then - if S1 = S2 then tye else tvar_clash v S1 S2 - else if Sorts.sort_le classes (S1, S2) then - Vartab.update_new (w, (S2, T)) tye - else if Sorts.sort_le classes (S2, S1) then - Vartab.update_new (v, (S1, U)) tye - else - let val S = gen_tyvar (Sorts.inter_sort classes (S1, S2)) in - Vartab.update_new (v, (S1, S)) (Vartab.update_new (w, (S2, S)) tye) - end - | (TVar (v, S), T) => - if occurs v tye T then raise TUNIFY - else meet (T, S) (Vartab.update_new (v, (S, T)) tye) - | (T, TVar (v, S)) => - if occurs v tye T then raise TUNIFY - else meet (T, S) (Vartab.update_new (v, (S, T)) tye) - | (Type (a, Ts), Type (b, Us)) => - if a <> b then raise TUNIFY - else unifs (Ts, Us) tye - | (T, U) => if T = U then tye else raise TUNIFY) - and unifs (T :: Ts, U :: Us) tye = unifs (Ts, Us) (unif (T, U) tye) - | unifs _ tye = tye; - in (unif TU tyenv, ! tyvar_count) end; - -(*purely structural unification*) -fun raw_unify (ty1, ty2) tye = - (case (devar tye ty1, devar tye ty2) of - (T as TVar (v, S1), TVar (w, S2)) => - if Term.eq_ix (v, w) then - if S1 = S2 then tye else tvar_clash v S1 S2 - else Vartab.update_new (w, (S2, T)) tye - | (TVar (v, S), T) => - if occurs v tye T then raise TUNIFY - else Vartab.update_new (v, (S, T)) tye - | (T, TVar (v, S)) => - if occurs v tye T then raise TUNIFY - else Vartab.update_new (v, (S, T)) tye - | (Type (a, Ts), Type (b, Us)) => - if a <> b then raise TUNIFY - else raw_unifys (Ts, Us) tye - | (T, U) => if T = U then tye else raise TUNIFY) -and raw_unifys (T :: Ts, U :: Us) tye = raw_unifys (Ts, Us) (raw_unify (T, U) tye) - | raw_unifys ([], []) tye = tye - | raw_unifys _ _ = raise TUNIFY; - -(*fast unification filter*) -fun could_unify (Type (a, Ts), Type (b, Us)) = a = b andalso could_unifys (Ts, Us) - | could_unify (TFree (a, _), TFree (b, _)) = a = b - | could_unify (TVar _, _) = true - | could_unify (_, TVar _) = true - | could_unify _ = false -and could_unifys (T :: Ts, U :: Us) = could_unify (T, U) andalso could_unifys (Ts, Us) - | could_unifys ([], []) = true - | could_unifys _ = false; - - -(*equality with respect to a type environment*) -fun equal_type tye (T, T') = - (case (devar tye T, devar tye T') of - (Type (s, Ts), Type (s', Ts')) => - s = s' andalso ListPair.all (equal_type tye) (Ts, Ts') - | (U, U') => U = U'); - -fun eq_type tye = - if Vartab.is_empty tye then op = else equal_type tye; - - - -(** extend and merge type signatures **) - -(* classes *) - -fun add_class context (c, cs) tsig = - tsig |> map_tsig (fn ((space, classes), default, types) => - let - val cs' = map (cert_class tsig) cs - handle TYPE (msg, _, _) => error msg; - val _ = Binding.check c; - val (c', space') = space |> Name_Space.declare context true c; - val classes' = classes |> Sorts.add_class (Context.pretty_generic context) (c', cs'); - in ((space', classes'), default, types) end); - -fun hide_class fully c = map_tsig (fn ((space, classes), default, types) => - ((Name_Space.hide fully c space, classes), default, types)); - - -(* arities *) - -fun add_arity pp (t, Ss, S) tsig = tsig |> map_tsig (fn ((space, classes), default, types) => - let - val _ = - (case lookup_type tsig t of - SOME (LogicalType n) => if length Ss <> n then error (bad_nargs t) else () - | SOME _ => error ("Logical type constructor expected: " ^ quote t) - | NONE => error (undecl_type t)); - val (Ss', S') = (map (cert_sort tsig) Ss, cert_sort tsig S) - handle TYPE (msg, _, _) => error msg; - val classes' = classes |> Sorts.add_arities pp ((t, map (fn c' => (c', Ss')) S')); - in ((space, classes'), default, types) end); - - -(* classrel *) - -fun add_classrel pp rel tsig = - tsig |> map_tsig (fn ((space, classes), default, types) => - let - val rel' = pairself (cert_class tsig) rel - handle TYPE (msg, _, _) => error msg; - val classes' = classes |> Sorts.add_classrel pp rel'; - in ((space, classes'), default, types) end); - - -(* default sort *) - -fun set_defsort S tsig = tsig |> map_tsig (fn (classes, _, types) => - (classes, cert_sort tsig S handle TYPE (msg, _, _) => error msg, types)); - - -(* types *) - -local - -fun new_decl context (c, decl) types = - (Binding.check c; #2 (Name_Space.define context true (c, decl) types)); - -fun map_types f = map_tsig (fn (classes, default, types) => - let - val types' = f types; - val _ = - Name_Space.intern (Name_Space.space_of_table types') "dummy" = "dummy" orelse - error "Illegal declaration of dummy type"; - in (classes, default, types') end); - -fun syntactic tsig (Type (c, Ts)) = - (case lookup_type tsig c of SOME Nonterminal => true | _ => false) - orelse exists (syntactic tsig) Ts - | syntactic _ _ = false; - -in - -fun add_type context (c, n) = - if n < 0 then error ("Bad type constructor declaration " ^ Binding.print c) - else map_types (new_decl context (c, LogicalType n)); - -fun add_abbrev context (a, vs, rhs) tsig = tsig |> map_types (fn types => - let - fun err msg = - cat_error msg ("The error(s) above occurred in type abbreviation " ^ Binding.print a); - val rhs' = strip_sorts (no_tvars (cert_typ_mode mode_syntax tsig rhs)) - handle TYPE (msg, _, _) => err msg; - val _ = - (case duplicates (op =) vs of - [] => [] - | dups => err ("Duplicate variables on lhs: " ^ commas_quote dups)); - val _ = - (case subtract (op =) vs (map #1 (Term.add_tfreesT rhs' [])) of - [] => [] - | extras => err ("Extra variables on rhs: " ^ commas_quote extras)); - in types |> new_decl context (a, Abbreviation (vs, rhs', syntactic tsig rhs')) end); - -fun add_nonterminal context = map_types o new_decl context o rpair Nonterminal; - -end; - -fun hide_type fully c = map_tsig (fn (classes, default, types) => - (classes, default, Name_Space.hide_table fully c types)); - - -(* merge type signatures *) - -fun merge_tsig pp (tsig1, tsig2) = - let - val (TSig {classes = (space1, classes1), default = default1, types = types1, - log_types = _}) = tsig1; - val (TSig {classes = (space2, classes2), default = default2, types = types2, - log_types = _}) = tsig2; - - val space' = Name_Space.merge (space1, space2); - val classes' = Sorts.merge_algebra pp (classes1, classes2); - val default' = Sorts.inter_sort classes' (default1, default2); - val types' = Name_Space.merge_tables (types1, types2); - in build_tsig ((space', classes'), default', types') end; - -end; diff --git a/core/Pure/type_infer.ML b/core/Pure/type_infer.ML deleted file mode 100644 index f4cabc23..00000000 --- a/core/Pure/type_infer.ML +++ /dev/null @@ -1,115 +0,0 @@ -(* Title: Pure/type_infer.ML - Author: Stefan Berghofer and Markus Wenzel, TU Muenchen - -Basic representation of type-inference problems. -*) - -signature TYPE_INFER = -sig - val is_param: indexname -> bool - val is_paramT: typ -> bool - val param_maxidx: term -> int -> int - val param_maxidx_of: term list -> int - val param: int -> string * sort -> typ - val mk_param: int -> sort -> typ - val anyT: sort -> typ - val paramify_vars: typ -> typ - val deref: typ Vartab.table -> typ -> typ - val finish: Proof.context -> typ Vartab.table -> typ list * term list -> typ list * term list - val fixate: Proof.context -> term list -> term list -end; - -structure Type_Infer: TYPE_INFER = -struct - -(** type parameters and constraints **) - -(* type inference parameters -- may get instantiated *) - -fun is_param (x, _: int) = String.isPrefix "?" x; - -fun is_paramT (TVar (xi, _)) = is_param xi - | is_paramT _ = false; - -val param_maxidx = - (Term.fold_types o Term.fold_atyps) - (fn (TVar (xi as (_, i), _)) => if is_param xi then Integer.max i else I | _ => I); - -fun param_maxidx_of ts = fold param_maxidx ts ~1; - -fun param i (x, S) = TVar (("?" ^ x, i), S); - -fun mk_param i S = TVar (("?'a", i), S); - - -(* pre-stage parameters *) - -fun anyT S = TFree ("'_dummy_", S); - -val paramify_vars = - Same.commit - (Term_Subst.map_atypsT_same - (fn TVar ((x, i), S) => (param i (x, S)) | _ => raise Same.SAME)); - - - -(** results **) - -(* dereferenced views *) - -fun deref tye (T as TVar (xi, _)) = - (case Vartab.lookup tye xi of - NONE => T - | SOME U => deref tye U) - | deref _ T = T; - -fun add_parms tye T = - (case deref tye T of - Type (_, Ts) => fold (add_parms tye) Ts - | TVar (xi, _) => if is_param xi then insert (op =) xi else I - | _ => I); - -fun add_names tye T = - (case deref tye T of - Type (_, Ts) => fold (add_names tye) Ts - | TFree (x, _) => Name.declare x - | TVar ((x, i), _) => if is_param (x, i) then I else Name.declare x); - - -(* finish -- standardize remaining parameters *) - -fun finish ctxt tye (Ts, ts) = - let - val used = - (fold o fold_types) (add_names tye) ts (fold (add_names tye) Ts (Variable.names_of ctxt)); - val parms = rev ((fold o fold_types) (add_parms tye) ts (fold (add_parms tye) Ts [])); - val names = Name.invent used ("?" ^ Name.aT) (length parms); - val tab = Vartab.make (parms ~~ names); - - fun finish_typ T = - (case deref tye T of - Type (a, Ts) => Type (a, map finish_typ Ts) - | U as TFree _ => U - | U as TVar (xi, S) => - (case Vartab.lookup tab xi of - NONE => U - | SOME a => TVar ((a, 0), S))); - in (map finish_typ Ts, map (Type.strip_constraints o Term.map_types finish_typ) ts) end; - - -(* fixate -- introduce fresh type variables *) - -fun fixate ctxt ts = - let - fun subst_param (xi, S) (inst, used) = - if is_param xi then - let - val [a] = Name.invent used Name.aT 1; - val used' = Name.declare a used; - in (((xi, S), TFree (a, S)) :: inst, used') end - else (inst, used); - val used = (fold o fold_types) Term.declare_typ_names ts (Variable.names_of ctxt); - val (inst, _) = fold_rev subst_param (fold Term.add_tvars ts []) ([], used); - in (map o map_types) (Term_Subst.instantiateT inst) ts end; - -end; diff --git a/core/Pure/type_infer_context.ML b/core/Pure/type_infer_context.ML deleted file mode 100644 index b3b1d2f8..00000000 --- a/core/Pure/type_infer_context.ML +++ /dev/null @@ -1,308 +0,0 @@ -(* Title: Pure/type_infer_context.ML - Author: Stefan Berghofer and Markus Wenzel, TU Muenchen - -Type-inference preparation and standard type inference. -*) - -signature TYPE_INFER_CONTEXT = -sig - val const_sorts: bool Config.T - val const_type: Proof.context -> string -> typ option - val prepare_positions: Proof.context -> term list -> term list * (Position.T * typ) list - val prepare: Proof.context -> term list -> int * term list - val infer_types: Proof.context -> term list -> term list -end; - -structure Type_Infer_Context: TYPE_INFER_CONTEXT = -struct - -(** prepare types/terms: create inference parameters **) - -(* constraints *) - -val const_sorts = - Config.bool (Config.declare ("const_sorts", @{here}) (K (Config.Bool true))); - -fun const_type ctxt = - try ((not (Config.get ctxt const_sorts) ? Type.strip_sorts) o - Consts.the_constraint (Proof_Context.consts_of ctxt)); - -fun var_type ctxt = the_default dummyT o Proof_Context.def_type ctxt; - - -(* prepare_typ *) - -fun prepare_typ typ params_idx = - let - val (params', idx) = fold_atyps - (fn TVar (xi, S) => - (fn ps_idx as (ps, idx) => - if Type_Infer.is_param xi andalso not (Vartab.defined ps xi) - then (Vartab.update (xi, Type_Infer.mk_param idx S) ps, idx + 1) else ps_idx) - | _ => I) typ params_idx; - - fun prepare (T as Type (a, Ts)) idx = - if T = dummyT then (Type_Infer.mk_param idx [], idx + 1) - else - let val (Ts', idx') = fold_map prepare Ts idx - in (Type (a, Ts'), idx') end - | prepare (T as TVar (xi, _)) idx = - (case Vartab.lookup params' xi of - NONE => T - | SOME p => p, idx) - | prepare (TFree ("'_dummy_", S)) idx = (Type_Infer.mk_param idx S, idx + 1) - | prepare (T as TFree _) idx = (T, idx); - - val (typ', idx') = prepare typ idx; - in (typ', (params', idx')) end; - - -(* prepare_term *) - -fun prepare_term ctxt tm (vparams, params, idx) = - let - fun add_vparm xi (ps_idx as (ps, idx)) = - if not (Vartab.defined ps xi) then - (Vartab.update (xi, Type_Infer.mk_param idx []) ps, idx + 1) - else ps_idx; - - val (vparams', idx') = fold_aterms - (fn Var (_, Type ("_polymorphic_", _)) => I - | Var (xi, _) => add_vparm xi - | Free (x, _) => add_vparm (x, ~1) - | _ => I) - tm (vparams, idx); - fun var_param xi = the (Vartab.lookup vparams' xi); - - fun polyT_of T idx = - apsnd snd (prepare_typ (Type_Infer.paramify_vars T) (Vartab.empty, idx)); - - fun constraint T t ps = - if T = dummyT then (t, ps) - else - let val (T', ps') = prepare_typ T ps - in (Type.constraint T' t, ps') end; - - fun prepare (Const ("_type_constraint_", T) $ t) ps_idx = - let - val A = Type.constraint_type ctxt T; - val (A', ps_idx') = prepare_typ A ps_idx; - val (t', ps_idx'') = prepare t ps_idx'; - in (Const ("_type_constraint_", A' --> A') $ t', ps_idx'') end - | prepare (Const (c, T)) (ps, idx) = - (case const_type ctxt c of - SOME U => - let val (U', idx') = polyT_of U idx - in constraint T (Const (c, U')) (ps, idx') end - | NONE => error ("Undeclared constant: " ^ quote c)) - | prepare (Var (xi, Type ("_polymorphic_", [T]))) (ps, idx) = - let val (T', idx') = polyT_of T idx - in (Var (xi, T'), (ps, idx')) end - | prepare (Var (xi, T)) ps_idx = constraint T (Var (xi, var_param xi)) ps_idx - | prepare (Free (x, T)) ps_idx = constraint T (Free (x, var_param (x, ~1))) ps_idx - | prepare (Bound i) ps_idx = (Bound i, ps_idx) - | prepare (Abs (x, T, t)) ps_idx = - let - val (T', ps_idx') = prepare_typ T ps_idx; - val (t', ps_idx'') = prepare t ps_idx'; - in (Abs (x, T', t'), ps_idx'') end - | prepare (t $ u) ps_idx = - let - val (t', ps_idx') = prepare t ps_idx; - val (u', ps_idx'') = prepare u ps_idx'; - in (t' $ u', ps_idx'') end; - - val (tm', (params', idx'')) = prepare tm (params, idx'); - in (tm', (vparams', params', idx'')) end; - - -(* prepare_positions *) - -fun prepare_positions ctxt tms = - let - fun prepareT (Type (a, Ts)) ps_idx = - let val (Ts', ps_idx') = fold_map prepareT Ts ps_idx - in (Type (a, Ts'), ps_idx') end - | prepareT T (ps, idx) = - (case Term_Position.decode_positionT T of - SOME pos => - let val U = Type_Infer.mk_param idx [] - in (U, ((pos, U) :: ps, idx + 1)) end - | NONE => (T, (ps, idx))); - - fun prepare (Const ("_type_constraint_", T)) ps_idx = - let - val A = Type.constraint_type ctxt T; - val (A', ps_idx') = prepareT A ps_idx; - in (Const ("_type_constraint_", A' --> A'), ps_idx') end - | prepare (Const (c, T)) ps_idx = - let val (T', ps_idx') = prepareT T ps_idx - in (Const (c, T'), ps_idx') end - | prepare (Free (x, T)) ps_idx = - let val (T', ps_idx') = prepareT T ps_idx - in (Free (x, T'), ps_idx') end - | prepare (Var (xi, T)) ps_idx = - let val (T', ps_idx') = prepareT T ps_idx - in (Var (xi, T'), ps_idx') end - | prepare (t as Bound _) ps_idx = (t, ps_idx) - | prepare (Abs (x, T, t)) ps_idx = - let - val (T', ps_idx') = prepareT T ps_idx; - val (t', ps_idx'') = prepare t ps_idx'; - in (Abs (x, T', t'), ps_idx'') end - | prepare (t $ u) ps_idx = - let - val (t', ps_idx') = prepare t ps_idx; - val (u', ps_idx'') = prepare u ps_idx'; - in (t' $ u', ps_idx'') end; - - val idx = Type_Infer.param_maxidx_of tms + 1; - val (tms', (ps, _)) = fold_map prepare tms ([], idx); - in (tms', ps) end; - - - -(** order-sorted unification of types **) - -exception NO_UNIFIER of string * typ Vartab.table; - -fun unify ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val arity_sorts = Type.arity_sorts (Context.pretty ctxt) (Sign.tsig_of thy); - - - (* adjust sorts of parameters *) - - fun not_of_sort x S' S = - "Variable " ^ x ^ "::" ^ Syntax.string_of_sort ctxt S' ^ " not of sort " ^ - Syntax.string_of_sort ctxt S; - - fun meet (_, []) tye_idx = tye_idx - | meet (Type (a, Ts), S) (tye_idx as (tye, _)) = - meets (Ts, arity_sorts a S handle ERROR msg => raise NO_UNIFIER (msg, tye)) tye_idx - | meet (TFree (x, S'), S) (tye_idx as (tye, _)) = - if Sign.subsort thy (S', S) then tye_idx - else raise NO_UNIFIER (not_of_sort x S' S, tye) - | meet (TVar (xi, S'), S) (tye_idx as (tye, idx)) = - if Sign.subsort thy (S', S) then tye_idx - else if Type_Infer.is_param xi then - (Vartab.update_new - (xi, Type_Infer.mk_param idx (Sign.inter_sort thy (S', S))) tye, idx + 1) - else raise NO_UNIFIER (not_of_sort (Term.string_of_vname xi) S' S, tye) - and meets (T :: Ts, S :: Ss) (tye_idx as (tye, _)) = - meets (Ts, Ss) (meet (Type_Infer.deref tye T, S) tye_idx) - | meets _ tye_idx = tye_idx; - - - (* occurs check and assignment *) - - fun occurs_check tye xi (TVar (xi', _)) = - if xi = xi' then raise NO_UNIFIER ("Occurs check!", tye) - else - (case Vartab.lookup tye xi' of - NONE => () - | SOME T => occurs_check tye xi T) - | occurs_check tye xi (Type (_, Ts)) = List.app (occurs_check tye xi) Ts - | occurs_check _ _ _ = (); - - fun assign xi (T as TVar (xi', _)) S env = - if xi = xi' then env - else env |> meet (T, S) |>> Vartab.update_new (xi, T) - | assign xi T S (env as (tye, _)) = - (occurs_check tye xi T; env |> meet (T, S) |>> Vartab.update_new (xi, T)); - - - (* unification *) - - fun show_tycon (a, Ts) = - quote (Syntax.string_of_typ ctxt (Type (a, replicate (length Ts) dummyT))); - - fun unif (T1, T2) (env as (tye, _)) = - (case pairself (`Type_Infer.is_paramT o Type_Infer.deref tye) (T1, T2) of - ((true, TVar (xi, S)), (_, T)) => assign xi T S env - | ((_, T), (true, TVar (xi, S))) => assign xi T S env - | ((_, Type (a, Ts)), (_, Type (b, Us))) => - if a <> b then - raise NO_UNIFIER - ("Clash of types " ^ show_tycon (a, Ts) ^ " and " ^ show_tycon (b, Us), tye) - else fold unif (Ts ~~ Us) env - | ((_, T), (_, U)) => if T = U then env else raise NO_UNIFIER ("", tye)); - - in unif end; - - - -(** simple type inference **) - -(* infer *) - -fun infer ctxt = - let - (* errors *) - - fun prep_output tye bs ts Ts = - let - val (Ts_bTs', ts') = Type_Infer.finish ctxt tye (Ts @ map snd bs, ts); - val (Ts', Ts'') = chop (length Ts) Ts_bTs'; - fun prep t = - let val xs = rev (Term.variant_frees t (rev (map fst bs ~~ Ts''))) - in Term.subst_bounds (map Syntax_Trans.mark_bound_abs xs, t) end; - in (map prep ts', Ts') end; - - fun err_loose i = error ("Loose bound variable: B." ^ string_of_int i); - - fun unif_failed msg = - "Type unification failed" ^ (if msg = "" then "" else ": " ^ msg) ^ "\n\n"; - - fun err_appl msg tye bs t T u U = - let val ([t', u'], [T', U']) = prep_output tye bs [t, u] [T, U] - in error (unif_failed msg ^ Type.appl_error ctxt t' T' u' U' ^ "\n") end; - - - (* main *) - - fun inf _ (Const (_, T)) tye_idx = (T, tye_idx) - | inf _ (Free (_, T)) tye_idx = (T, tye_idx) - | inf _ (Var (_, T)) tye_idx = (T, tye_idx) - | inf bs (Bound i) tye_idx = - (snd (nth bs i handle General.Subscript => err_loose i), tye_idx) - | inf bs (Abs (x, T, t)) tye_idx = - let val (U, tye_idx') = inf ((x, T) :: bs) t tye_idx - in (T --> U, tye_idx') end - | inf bs (t $ u) tye_idx = - let - val (T, tye_idx') = inf bs t tye_idx; - val (U, (tye, idx)) = inf bs u tye_idx'; - val V = Type_Infer.mk_param idx []; - val tye_idx'' = unify ctxt (U --> V, T) (tye, idx + 1) - handle NO_UNIFIER (msg, tye') => err_appl msg tye' bs t T u U; - in (V, tye_idx'') end; - - in inf [] end; - - -(* main interfaces *) - -fun prepare ctxt raw_ts = - let - val constrain_vars = Term.map_aterms - (fn Free (x, T) => Type.constraint T (Free (x, var_type ctxt (x, ~1))) - | Var (xi, T) => Type.constraint T (Var (xi, var_type ctxt xi)) - | t => t); - - val ts = burrow_types (Syntax.check_typs ctxt) raw_ts; - val idx = Type_Infer.param_maxidx_of ts + 1; - val (ts', (_, _, idx')) = - fold_map (prepare_term ctxt o constrain_vars) ts - (Vartab.empty, Vartab.empty, idx); - in (idx', ts') end; - -fun infer_types ctxt raw_ts = - let - val (idx, ts) = prepare ctxt raw_ts; - val (tye, _) = fold (snd oo infer ctxt) ts (Vartab.empty, idx); - val (_, ts') = Type_Infer.finish ctxt tye ([], ts); - in ts' end; - -end; diff --git a/core/Pure/unify.ML b/core/Pure/unify.ML deleted file mode 100644 index 9aaebbd8..00000000 --- a/core/Pure/unify.ML +++ /dev/null @@ -1,729 +0,0 @@ -(* Title: Pure/unify.ML - Author: Lawrence C Paulson, Cambridge University Computer Laboratory - Copyright Cambridge University 1992 - -Higher-Order Unification. - -Types as well as terms are unified. The outermost functions assume -the terms to be unified already have the same type. In resolution, -this is assured because both have type "prop". -*) - -signature UNIFY = -sig - val trace_bound_raw: Config.raw - val trace_bound: int Config.T - val search_bound_raw: Config.raw - val search_bound: int Config.T - val trace_simp_raw: Config.raw - val trace_simp: bool Config.T - val trace_types_raw: Config.raw - val trace_types: bool Config.T - val hounifiers: theory * Envir.env * ((term * term) list) -> - (Envir.env * (term * term) list) Seq.seq - val unifiers: theory * Envir.env * ((term * term) list) -> - (Envir.env * (term * term) list) Seq.seq - val smash_unifiers: theory -> (term * term) list -> Envir.env -> Envir.env Seq.seq - val matchers: theory -> (term * term) list -> Envir.env Seq.seq - val matches_list: theory -> term list -> term list -> bool -end - -structure Unify : UNIFY = -struct - -(*Unification options*) - -(*tracing starts above this depth, 0 for full*) -val trace_bound_raw = Config.declare_global ("unify_trace_bound", @{here}) (K (Config.Int 50)); -val trace_bound = Config.int trace_bound_raw; - -(*unification quits above this depth*) -val search_bound_raw = Config.declare_global ("unify_search_bound", @{here}) (K (Config.Int 60)); -val search_bound = Config.int search_bound_raw; - -(*print dpairs before calling SIMPL*) -val trace_simp_raw = Config.declare_global ("unify_trace_simp", @{here}) (K (Config.Bool false)); -val trace_simp = Config.bool trace_simp_raw; - -(*announce potential incompleteness of type unification*) -val trace_types_raw = Config.declare_global ("unify_trace_types", @{here}) (K (Config.Bool false)); -val trace_types = Config.bool trace_types_raw; - - -type binderlist = (string * typ) list; - -type dpair = binderlist * term * term; - -fun fastype env (Ts, t) = Envir.fastype env (map snd Ts) t; - - -(* eta normal form *) - -fun eta_norm env = - let - val tyenv = Envir.type_env env; - fun etif (Type ("fun", [T, U]), t) = - Abs ("", T, etif (U, incr_boundvars 1 t $ Bound 0)) - | etif (TVar v, t) = - (case Type.lookup tyenv v of - NONE => t - | SOME T => etif (T, t)) - | etif (_, t) = t; - fun eta_nm (rbinder, Abs (a, T, body)) = - Abs (a, T, eta_nm ((a, T) :: rbinder, body)) - | eta_nm (rbinder, t) = etif (fastype env (rbinder, t), t); - in eta_nm end; - - -(*OCCURS CHECK - Does the uvar occur in the term t? - two forms of search, for whether there is a rigid path to the current term. - "seen" is list of variables passed thru, is a memo variable for sharing. - This version searches for nonrigid occurrence, returns true if found. - Since terms may contain variables with same name and different types, - the occurs check must ignore the types of variables. This avoids - that ?x::?'a is unified with f(?x::T), which may lead to a cyclic - substitution when ?'a is instantiated with T later. *) -fun occurs_terms (seen: indexname list Unsynchronized.ref, - env: Envir.env, v: indexname, ts: term list): bool = - let - fun occurs [] = false - | occurs (t :: ts) = occur t orelse occurs ts - and occur (Const _) = false - | occur (Bound _) = false - | occur (Free _) = false - | occur (Var (w, T)) = - if member (op =) (!seen) w then false - else if Term.eq_ix (v, w) then true - (*no need to lookup: v has no assignment*) - else - (seen := w :: !seen; - case Envir.lookup env (w, T) of - NONE => false - | SOME t => occur t) - | occur (Abs (_, _, body)) = occur body - | occur (f $ t) = occur t orelse occur f; - in occurs ts end; - - -(* f a1 ... an ----> f using the assignments*) -fun head_of_in env t = - (case t of - f $ _ => head_of_in env f - | Var vT => - (case Envir.lookup env vT of - SOME u => head_of_in env u - | NONE => t) - | _ => t); - - -datatype occ = NoOcc | Nonrigid | Rigid; - -(* Rigid occur check -Returns Rigid if it finds a rigid occurrence of the variable, - Nonrigid if it finds a nonrigid path to the variable. - NoOcc otherwise. - Continues searching for a rigid occurrence even if it finds a nonrigid one. - -Condition for detecting non-unifable terms: [ section 5.3 of Huet (1975) ] - a rigid path to the variable, appearing with no arguments. -Here completeness is sacrificed in order to reduce danger of divergence: - reject ALL rigid paths to the variable. -Could check for rigid paths to bound variables that are out of scope. -Not necessary because the assignment test looks at variable's ENTIRE rbinder. - -Treatment of head(arg1,...,argn): -If head is a variable then no rigid path, switch to nonrigid search -for arg1,...,argn. -If head is an abstraction then possibly no rigid path (head could be a - constant function) so again use nonrigid search. Happens only if - term is not in normal form. - -Warning: finds a rigid occurrence of ?f in ?f(t). - Should NOT be called in this case: there is a flex-flex unifier -*) -fun rigid_occurs_term (seen: indexname list Unsynchronized.ref, env, v: indexname, t) = - let - fun nonrigid t = - if occurs_terms (seen, env, v, [t]) then Nonrigid - else NoOcc - fun occurs [] = NoOcc - | occurs (t :: ts) = - (case occur t of - Rigid => Rigid - | oc => (case occurs ts of NoOcc => oc | oc2 => oc2)) - and occomb (f $ t) = - (case occur t of - Rigid => Rigid - | oc => (case occomb f of NoOcc => oc | oc2 => oc2)) - | occomb t = occur t - and occur (Const _) = NoOcc - | occur (Bound _) = NoOcc - | occur (Free _) = NoOcc - | occur (Var (w, T)) = - if member (op =) (!seen) w then NoOcc - else if Term.eq_ix (v, w) then Rigid - else - (seen := w :: !seen; - case Envir.lookup env (w, T) of - NONE => NoOcc - | SOME t => occur t) - | occur (Abs (_, _, body)) = occur body - | occur (t as f $ _) = (*switch to nonrigid search?*) - (case head_of_in env f of - Var (w,_) => (*w is not assigned*) - if Term.eq_ix (v, w) then Rigid - else nonrigid t - | Abs _ => nonrigid t (*not in normal form*) - | _ => occomb t) - in occur t end; - - -exception CANTUNIFY; (*Signals non-unifiability. Does not signal errors!*) -exception ASSIGN; (*Raised if not an assignment*) - - -fun unify_types thy TU env = - Pattern.unify_types thy TU env handle Pattern.Unif => raise CANTUNIFY; - -fun test_unify_types thy (T, U) env = - let - val str_of = Syntax.string_of_typ_global thy; - fun warn () = - if Context_Position.is_visible_global thy then - tracing ("Potential loss of completeness: " ^ str_of U ^ " = " ^ str_of T) - else (); - val env' = unify_types thy (T, U) env; - in if is_TVar T orelse is_TVar U then warn () else (); env' end; - -(*Is the term eta-convertible to a single variable with the given rbinder? - Examples: ?a ?f(B.0) ?g(B.1,B.0) - Result is var a for use in SIMPL. *) -fun get_eta_var ([], _, Var vT) = vT - | get_eta_var (_::rbinder, n, f $ Bound i) = - if n = i then get_eta_var (rbinder, n + 1, f) - else raise ASSIGN - | get_eta_var _ = raise ASSIGN; - - -(*Solve v=u by assignment -- "fixedpoint" to Huet -- if v not in u. - If v occurs rigidly then nonunifiable. - If v occurs nonrigidly then must use full algorithm. *) -fun assignment thy (rbinder, t, u) env = - let val vT as (v,T) = get_eta_var (rbinder, 0, t) in - (case rigid_occurs_term (Unsynchronized.ref [], env, v, u) of - NoOcc => - let val env = unify_types thy (Envir.body_type env T, fastype env (rbinder, u)) env - in Envir.update (vT, Logic.rlist_abs (rbinder, u)) env end - | Nonrigid => raise ASSIGN - | Rigid => raise CANTUNIFY) - end; - - -(*Extends an rbinder with a new disagreement pair, if both are abstractions. - Tries to unify types of the bound variables! - Checks that binders have same length, since terms should be eta-normal; - if not, raises TERM, probably indicating type mismatch. - Uses variable a (unless the null string) to preserve user's naming.*) -fun new_dpair thy (rbinder, Abs (a, T, body1), Abs (b, U, body2)) env = - let - val env' = unify_types thy (T, U) env; - val c = if a = "" then b else a; - in new_dpair thy ((c,T) :: rbinder, body1, body2) env' end - | new_dpair _ (_, Abs _, _) _ = raise TERM ("new_dpair", []) - | new_dpair _ (_, _, Abs _) _ = raise TERM ("new_dpair", []) - | new_dpair _ (rbinder, t1, t2) env = ((rbinder, t1, t2), env); - - -fun head_norm_dpair thy (env, (rbinder, t, u)) : dpair * Envir.env = - new_dpair thy (rbinder, - eta_norm env (rbinder, Envir.head_norm env t), - eta_norm env (rbinder, Envir.head_norm env u)) env; - - - -(*flexflex: the flex-flex pairs, flexrigid: the flex-rigid pairs - Does not perform assignments for flex-flex pairs: - may create nonrigid paths, which prevent other assignments. - Does not even identify Vars in dpairs such as ?a =?= ?b; an attempt to - do so caused numerous problems with no compensating advantage. -*) -fun SIMPL0 thy dp0 (env,flexflex,flexrigid) : Envir.env * dpair list * dpair list = - let - val (dp as (rbinder, t, u), env) = head_norm_dpair thy (env, dp0); - fun SIMRANDS (f $ t, g $ u, env) = - SIMPL0 thy (rbinder, t, u) (SIMRANDS (f, g, env)) - | SIMRANDS (t as _$_, _, _) = - raise TERM ("SIMPL: operands mismatch", [t, u]) - | SIMRANDS (t, u as _ $ _, _) = - raise TERM ("SIMPL: operands mismatch", [t, u]) - | SIMRANDS (_, _, env) = (env, flexflex, flexrigid); - in - (case (head_of t, head_of u) of - (Var (_, T), Var (_, U)) => - let - val T' = Envir.body_type env T and U' = Envir.body_type env U; - val env = unify_types thy (T', U') env; - in (env, dp :: flexflex, flexrigid) end - | (Var _, _) => - ((assignment thy (rbinder,t,u) env, flexflex, flexrigid) - handle ASSIGN => (env, flexflex, dp :: flexrigid)) - | (_, Var _) => - ((assignment thy (rbinder, u, t) env, flexflex, flexrigid) - handle ASSIGN => (env, flexflex, (rbinder, u, t) :: flexrigid)) - | (Const (a, T), Const (b, U)) => - if a = b then SIMRANDS (t, u, unify_types thy (T, U) env) - else raise CANTUNIFY - | (Bound i, Bound j) => - if i = j then SIMRANDS (t, u, env) else raise CANTUNIFY - | (Free (a, T), Free (b, U)) => - if a = b then SIMRANDS (t, u, unify_types thy (T, U) env) - else raise CANTUNIFY - | _ => raise CANTUNIFY) - end; - - -(* changed(env,t) checks whether the head of t is a variable assigned in env*) -fun changed env (f $ _) = changed env f - | changed env (Var v) = (case Envir.lookup env v of NONE => false | _ => true) - | changed _ _ = false; - - -(*Recursion needed if any of the 'head variables' have been updated - Clever would be to re-do just the affected dpairs*) -fun SIMPL thy (env,dpairs) : Envir.env * dpair list * dpair list = - let - val all as (env', flexflex, flexrigid) = fold_rev (SIMPL0 thy) dpairs (env, [], []); - val dps = flexrigid @ flexflex; - in - if exists (fn (_, t, u) => changed env' t orelse changed env' u) dps - then SIMPL thy (env', dps) else all - end; - - -(*Makes the terms E1,...,Em, where Ts = [T...Tm]. - Each Ei is ?Gi(B.(n-1),...,B.0), and has type Ti - The B.j are bound vars of binder. - The terms are not made in eta-normal-form, SIMPL does that later. - If done here, eta-expansion must be recursive in the arguments! *) -fun make_args _ (_, env, []) = (env, []) (*frequent case*) - | make_args name (binder: typ list, env, Ts) : Envir.env * term list = - let - fun funtype T = binder ---> T; - val (env', vars) = Envir.genvars name (env, map funtype Ts); - in (env', map (fn var => Logic.combound (var, 0, length binder)) vars) end; - - -(*Abstraction over a list of types*) -fun types_abs ([], u) = u - | types_abs (T :: Ts, u) = Abs ("", T, types_abs (Ts, u)); - -(*Abstraction over the binder of a type*) -fun type_abs (env, T, t) = types_abs (Envir.binder_types env T, t); - - -(*MATCH taking "big steps". - Copies u into the Var v, using projection on targs or imitation. - A projection is allowed unless SIMPL raises an exception. - Allocates new variables in projection on a higher-order argument, - or if u is a variable (flex-flex dpair). - Returns long sequence of every way of copying u, for backtracking - For example, projection in ?b'(?a) may be wrong if other dpairs constrain ?a. - The order for trying projections is crucial in ?b'(?a) - NB "vname" is only used in the call to make_args!! *) -fun matchcopy thy vname = - let - fun mc (rbinder, targs, u, ed as (env, dpairs)) : (term * (Envir.env * dpair list)) Seq.seq = - let - val trace_types = Config.get_global thy trace_types; - (*Produce copies of uarg and cons them in front of uargs*) - fun copycons uarg (uargs, (env, dpairs)) = - Seq.map (fn (uarg', ed') => (uarg' :: uargs, ed')) - (mc (rbinder, targs,eta_norm env (rbinder, Envir.head_norm env uarg), - (env, dpairs))); - (*Produce sequence of all possible ways of copying the arg list*) - fun copyargs [] = Seq.cons ([], ed) Seq.empty - | copyargs (uarg :: uargs) = Seq.maps (copycons uarg) (copyargs uargs); - val (uhead, uargs) = strip_comb u; - val base = Envir.body_type env (fastype env (rbinder, uhead)); - fun joinargs (uargs', ed') = (list_comb (uhead, uargs'), ed'); - (*attempt projection on argument with given typ*) - val Ts = map (curry (fastype env) rbinder) targs; - fun projenv (head, (Us, bary), targ, tail) = - let - val env = - if trace_types then test_unify_types thy (base, bary) env - else unify_types thy (base, bary) env - in - Seq.make (fn () => - let - val (env', args) = make_args vname (Ts, env, Us); - (*higher-order projection: plug in targs for bound vars*) - fun plugin arg = list_comb (head_of arg, targs); - val dp = (rbinder, list_comb (targ, map plugin args), u); - val (env2, frigid, fflex) = SIMPL thy (env', dp :: dpairs); - (*may raise exception CANTUNIFY*) - in - SOME ((list_comb (head, args), (env2, frigid @ fflex)), tail) - end handle CANTUNIFY => Seq.pull tail) - end handle CANTUNIFY => tail; - (*make a list of projections*) - fun make_projs (T::Ts, targ::targs) = - (Bound(length Ts), T, targ) :: make_projs (Ts,targs) - | make_projs ([],[]) = [] - | make_projs _ = raise TERM ("make_projs", u::targs); - (*try projections and imitation*) - fun matchfun ((bvar,T,targ)::projs) = - (projenv(bvar, Envir.strip_type env T, targ, matchfun projs)) - | matchfun [] = (*imitation last of all*) - (case uhead of - Const _ => Seq.map joinargs (copyargs uargs) - | Free _ => Seq.map joinargs (copyargs uargs) - | _ => Seq.empty) (*if Var, would be a loop!*) - in - (case uhead of - Abs (a, T, body) => - Seq.map (fn (body', ed') => (Abs (a, T, body'), ed')) - (mc ((a, T) :: rbinder, (map (incr_boundvars 1) targs) @ [Bound 0], body, ed)) - | Var (w, _) => - (*a flex-flex dpair: make variable for t*) - let - val (env', newhd) = Envir.genvar (#1 w) (env, Ts ---> base); - val tabs = Logic.combound (newhd, 0, length Ts); - val tsub = list_comb (newhd, targs); - in Seq.single (tabs, (env', (rbinder, tsub, u) :: dpairs)) end - | _ => matchfun (rev (make_projs (Ts, targs)))) - end; - in mc end; - - -(*Call matchcopy to produce assignments to the variable in the dpair*) -fun MATCH thy (env, (rbinder, t, u), dpairs) : (Envir.env * dpair list) Seq.seq = - let - val (Var (vT as (v, T)), targs) = strip_comb t; - val Ts = Envir.binder_types env T; - fun new_dset (u', (env', dpairs')) = - (*if v was updated to s, must unify s with u' *) - (case Envir.lookup env' vT of - NONE => (Envir.update (vT, types_abs (Ts, u')) env', dpairs') - | SOME s => (env', ([], s, types_abs (Ts, u')) :: dpairs')); - in - Seq.map new_dset (matchcopy thy (#1 v) (rbinder, targs, u, (env, dpairs))) - end; - - - -(**** Flex-flex processing ****) - -(*At end of unification, do flex-flex assignments like ?a -> ?f(?b) - Attempts to update t with u, raising ASSIGN if impossible*) -fun ff_assign thy (env, rbinder, t, u) : Envir.env = - let val vT as (v, T) = get_eta_var (rbinder, 0, t) in - if occurs_terms (Unsynchronized.ref [], env, v, [u]) then raise ASSIGN - else - let val env = unify_types thy (Envir.body_type env T, fastype env (rbinder, u)) env - in Envir.vupdate (vT, Logic.rlist_abs (rbinder, u)) env end - end; - - -(*If an argument contains a banned Bound, then it should be deleted. - But if the only path is flexible, this is difficult; the code gives up! - In %x y.?a(x) =?= %x y.?b(?c(y)) should we instantiate ?b or ?c *) -exception CHANGE_FAIL; (*flexible occurrence of banned variable, or other reason to quit*) - - -(*Flex argument: a term, its type, and the index that refers to it.*) -type flarg = {t: term, T: typ, j: int}; - -(*Form the arguments into records for deletion/sorting.*) -fun flexargs ([], [], []) = [] : flarg list - | flexargs (j :: js, t :: ts, T :: Ts) = {j = j, t = t, T = T} :: flexargs (js, ts, Ts) - | flexargs _ = raise CHANGE_FAIL; -(*We give up if we see a variable of function type not applied to a full list of - arguments (remember, this code assumes that terms are fully eta-expanded). This situation - can occur if a type variable is instantiated with a function type. -*) - -(*Check whether the 'banned' bound var indices occur rigidly in t*) -fun rigid_bound (lev, banned) t = - let val (head,args) = strip_comb t in - (case head of - Bound i => - member (op =) banned (i - lev) orelse exists (rigid_bound (lev, banned)) args - | Var _ => false (*no rigid occurrences here!*) - | Abs (_, _, u) => - rigid_bound (lev + 1, banned) u orelse - exists (rigid_bound (lev, banned)) args - | _ => exists (rigid_bound (lev, banned)) args) - end; - -(*Squash down indices at level >=lev to delete the banned from a term.*) -fun change_bnos banned = - let - fun change lev (Bound i) = - if i < lev then Bound i - else if member (op =) banned (i - lev) then - raise CHANGE_FAIL (**flexible occurrence: give up**) - else Bound (i - length (filter (fn j => j < i - lev) banned)) - | change lev (Abs (a, T, t)) = Abs (a, T, change(lev + 1) t) - | change lev (t $ u) = change lev t $ change lev u - | change lev t = t; - in change 0 end; - -(*Change indices, delete the argument if it contains a banned Bound*) -fun change_arg banned {j, t, T} args : flarg list = - if rigid_bound (0, banned) t then args (*delete argument!*) - else {j = j, t = change_bnos banned t, T = T} :: args; - - -(*Sort the arguments to create assignments if possible: - create eta-terms like ?g B.1 B.0*) -local - fun less_arg ({t = Bound i1, ...}, {t = Bound i2, ...}) = (i2 < i1) - | less_arg (_: flarg, _: flarg) = false; - - fun ins_arg x [] = [x] - | ins_arg x (y :: ys) = - if less_arg (y, x) then y :: ins_arg x ys else x :: y :: ys; -in - fun sort_args [] = [] - | sort_args (x :: xs) = ins_arg x (sort_args xs); -end; - -(*Test whether the new term would be eta-equivalent to a variable -- - if so then there is no point in creating a new variable*) -fun decreasing n ([]: flarg list) = (n = 0) - | decreasing n ({j, ...} :: args) = j = n - 1 andalso decreasing (n - 1) args; - -(*Delete banned indices in the term, simplifying it. - Force an assignment, if possible, by sorting the arguments. - Update its head; squash indices in arguments. *) -fun clean_term banned (env,t) = - let - val (Var (v, T), ts) = strip_comb t; - val (Ts, U) = Envir.strip_type env T - and js = length ts - 1 downto 0; - val args = sort_args (fold_rev (change_arg banned) (flexargs (js, ts, Ts)) []) - val ts' = map #t args; - in - if decreasing (length Ts) args then (env, (list_comb (Var (v, T), ts'))) - else - let - val (env', v') = Envir.genvar (#1 v) (env, map #T args ---> U); - val body = list_comb (v', map (Bound o #j) args); - val env2 = Envir.vupdate ((v, T), types_abs (Ts, body)) env'; - (*the vupdate affects ts' if they contain v*) - in (env2, Envir.norm_term env2 (list_comb (v', ts'))) end - end; - - -(*Add tpair if not trivial or already there. - Should check for swapped pairs??*) -fun add_tpair (rbinder, (t0, u0), tpairs) : (term * term) list = - if t0 aconv u0 then tpairs - else - let - val t = Logic.rlist_abs (rbinder, t0) - and u = Logic.rlist_abs (rbinder, u0); - fun same (t', u') = (t aconv t') andalso (u aconv u') - in if exists same tpairs then tpairs else (t, u) :: tpairs end; - - -(*Simplify both terms and check for assignments. - Bound vars in the binder are "banned" unless used in both t AND u *) -fun clean_ffpair thy ((rbinder, t, u), (env, tpairs)) = - let - val loot = loose_bnos t and loou = loose_bnos u - fun add_index (j, (a, T)) (bnos, newbinder) = - if member (op =) loot j andalso member (op =) loou j - then (bnos, (a, T) :: newbinder) (*needed by both: keep*) - else (j :: bnos, newbinder); (*remove*) - val (banned, rbin') = fold_rev add_index ((0 upto (length rbinder - 1)) ~~ rbinder) ([], []); - val (env', t') = clean_term banned (env, t); - val (env'',u') = clean_term banned (env',u); - in - (ff_assign thy (env'', rbin', t', u'), tpairs) - handle ASSIGN => - (ff_assign thy (env'', rbin', u', t'), tpairs) - handle ASSIGN => (env'', add_tpair (rbin', (t', u'), tpairs)) - end - handle CHANGE_FAIL => (env, add_tpair (rbinder, (t, u), tpairs)); - - -(*IF the flex-flex dpair is an assignment THEN do it ELSE put in tpairs - eliminates trivial tpairs like t=t, as well as repeated ones - trivial tpairs can easily escape SIMPL: ?A=t, ?A=?B, ?B=t gives t=t - Resulting tpairs MAY NOT be in normal form: assignments may occur here.*) -fun add_ffpair thy (rbinder,t0,u0) (env,tpairs) : Envir.env * (term * term) list = - let - val t = Envir.norm_term env t0 - and u = Envir.norm_term env u0; - in - (case (head_of t, head_of u) of - (Var (v, T), Var (w, U)) => (*Check for identical variables...*) - if Term.eq_ix (v, w) then (*...occur check would falsely return true!*) - if T = U then (env, add_tpair (rbinder, (t, u), tpairs)) - else raise TERM ("add_ffpair: Var name confusion", [t, u]) - else if Term_Ord.indexname_ord (v, w) = LESS then (*prefer to update the LARGER variable*) - clean_ffpair thy ((rbinder, u, t), (env, tpairs)) - else clean_ffpair thy ((rbinder, t, u), (env, tpairs)) - | _ => raise TERM ("add_ffpair: Vars expected", [t, u])) - end; - - -(*Print a tracing message + list of dpairs. - In t==u print u first because it may be rigid or flexible -- - t is always flexible.*) -fun print_dpairs thy msg (env, dpairs) = - if Context_Position.is_visible_global thy then - let - fun pdp (rbinder, t, u) = - let - fun termT t = - Syntax.pretty_term_global thy (Envir.norm_term env (Logic.rlist_abs (rbinder, t))); - val bsymbs = [termT u, Pretty.str " =?=", Pretty.brk 1, termT t]; - in tracing (Pretty.string_of (Pretty.blk (0, bsymbs))) end; - in tracing msg; List.app pdp dpairs end - else (); - - -(*Unify the dpairs in the environment. - Returns flex-flex disagreement pairs NOT IN normal form. - SIMPL may raise exception CANTUNIFY. *) -fun hounifiers (thy, env, tus : (term * term) list) : (Envir.env * (term * term) list) Seq.seq = - let - val trace_bound = Config.get_global thy trace_bound; - val search_bound = Config.get_global thy search_bound; - val trace_simp = Config.get_global thy trace_simp; - fun add_unify tdepth ((env, dpairs), reseq) = - Seq.make (fn () => - let - val (env', flexflex, flexrigid) = - (if tdepth > trace_bound andalso trace_simp - then print_dpairs thy "Enter SIMPL" (env, dpairs) else (); - SIMPL thy (env, dpairs)); - in - (case flexrigid of - [] => SOME (fold_rev (add_ffpair thy) flexflex (env', []), reseq) - | dp :: frigid' => - if tdepth > search_bound then - (if Context_Position.is_visible_global thy - then warning "Unification bound exceeded" else (); Seq.pull reseq) - else - (if tdepth > trace_bound then - print_dpairs thy "Enter MATCH" (env',flexrigid@flexflex) - else (); - Seq.pull (Seq.it_right - (add_unify (tdepth + 1)) (MATCH thy (env',dp, frigid'@flexflex), reseq)))) - end - handle CANTUNIFY => - (if tdepth > trace_bound andalso Context_Position.is_visible_global thy - then tracing "Failure node" - else (); Seq.pull reseq)); - val dps = map (fn (t, u) => ([], t, u)) tus; - in add_unify 1 ((env, dps), Seq.empty) end; - -fun unifiers (params as (thy, env, tus)) = - Seq.cons (fold (Pattern.unify thy) tus env, []) Seq.empty - handle Pattern.Unif => Seq.empty - | Pattern.Pattern => hounifiers params; - - -(*For smash_flexflex1*) -fun var_head_of (env,t) : indexname * typ = - (case head_of (strip_abs_body (Envir.norm_term env t)) of - Var (v, T) => (v, T) - | _ => raise CANTUNIFY); (*not flexible, cannot use trivial substitution*) - - -(*Eliminate a flex-flex pair by the trivial substitution, see Huet (1975) - Unifies ?f(t1...rm) with ?g(u1...un) by ?f -> %x1...xm.?a, ?g -> %x1...xn.?a - Unfortunately, unifies ?f(t,u) with ?g(t,u) by ?f, ?g -> %(x,y)?a, - though just ?g->?f is a more general unifier. - Unlike Huet (1975), does not smash together all variables of same type -- - requires more work yet gives a less general unifier (fewer variables). - Handles ?f(t1...rm) with ?f(u1...um) to avoid multiple updates. *) -fun smash_flexflex1 (t, u) env : Envir.env = - let - val vT as (v, T) = var_head_of (env, t) - and wU as (w, U) = var_head_of (env, u); - val (env', var) = Envir.genvar (#1 v) (env, Envir.body_type env T); - val env'' = Envir.vupdate (wU, type_abs (env', U, var)) env'; - in - if vT = wU then env'' (*the other update would be identical*) - else Envir.vupdate (vT, type_abs (env', T, var)) env'' - end; - - -(*Smash all flex-flexpairs. Should allow selection of pairs by a predicate?*) -fun smash_flexflex (env, tpairs) : Envir.env = - fold_rev smash_flexflex1 tpairs env; - -(*Returns unifiers with no remaining disagreement pairs*) -fun smash_unifiers thy tus env = - Seq.map smash_flexflex (unifiers (thy, env, tus)); - - -(*Pattern matching*) -fun first_order_matchers thy pairs (Envir.Envir {maxidx, tenv, tyenv}) = - let val (tyenv', tenv') = fold (Pattern.first_order_match thy) pairs (tyenv, tenv) - in Seq.single (Envir.Envir {maxidx = maxidx, tenv = tenv', tyenv = tyenv'}) end - handle Pattern.MATCH => Seq.empty; - -(*General matching -- keep variables disjoint*) -fun matchers _ [] = Seq.single (Envir.empty ~1) - | matchers thy pairs = - let - val maxidx = fold (Term.maxidx_term o #2) pairs ~1; - val offset = maxidx + 1; - val pairs' = map (apfst (Logic.incr_indexes ([], offset))) pairs; - val maxidx' = fold (fn (t, u) => Term.maxidx_term t #> Term.maxidx_term u) pairs' ~1; - - val pat_tvars = fold (Term.add_tvars o #1) pairs' []; - val pat_vars = fold (Term.add_vars o #1) pairs' []; - - val decr_indexesT = - Term.map_atyps (fn T as TVar ((x, i), S) => - if i > maxidx then TVar ((x, i - offset), S) else T | T => T); - val decr_indexes = - Term.map_types decr_indexesT #> - Term.map_aterms (fn t as Var ((x, i), T) => - if i > maxidx then Var ((x, i - offset), T) else t | t => t); - - fun norm_tvar env ((x, i), S) = - let - val tyenv = Envir.type_env env; - val T' = Envir.norm_type tyenv (TVar ((x, i), S)); - in - if (case T' of TVar (v, _) => v = (x, i) | _ => false) then NONE - else SOME ((x, i - offset), (S, decr_indexesT T')) - end; - - fun norm_var env ((x, i), T) = - let - val tyenv = Envir.type_env env; - val T' = Envir.norm_type tyenv T; - val t' = Envir.norm_term env (Var ((x, i), T')); - in - if (case t' of Var (v, _) => v = (x, i) | _ => false) then NONE - else SOME ((x, i - offset), (decr_indexesT T', decr_indexes t')) - end; - - fun result env = - if Envir.above env maxidx then (* FIXME proper handling of generated vars!? *) - SOME (Envir.Envir {maxidx = maxidx, - tyenv = Vartab.make (map_filter (norm_tvar env) pat_tvars), - tenv = Vartab.make (map_filter (norm_var env) pat_vars)}) - else NONE; - - val empty = Envir.empty maxidx'; - in - Seq.append - (Seq.map_filter result (smash_unifiers thy pairs' empty)) - (first_order_matchers thy pairs empty) - end; - -fun matches_list thy ps os = - length ps = length os andalso is_some (Seq.pull (matchers thy (ps ~~ os))); - -end; diff --git a/core/Pure/variable.ML b/core/Pure/variable.ML deleted file mode 100644 index b9a44a70..00000000 --- a/core/Pure/variable.ML +++ /dev/null @@ -1,651 +0,0 @@ -(* Title: Pure/variable.ML - Author: Makarius - -Fixed type/term variables and polymorphic term abbreviations. -*) - -signature VARIABLE = -sig - val is_body: Proof.context -> bool - val set_body: bool -> Proof.context -> Proof.context - val restore_body: Proof.context -> Proof.context -> Proof.context - val names_of: Proof.context -> Name.context - val binds_of: Proof.context -> (typ * term) Vartab.table - val maxidx_of: Proof.context -> int - val sorts_of: Proof.context -> sort list - val constraints_of: Proof.context -> typ Vartab.table * sort Vartab.table - val is_declared: Proof.context -> string -> bool - val check_name: binding -> string - val default_type: Proof.context -> string -> typ option - val def_type: Proof.context -> bool -> indexname -> typ option - val def_sort: Proof.context -> indexname -> sort option - val declare_names: term -> Proof.context -> Proof.context - val declare_constraints: term -> Proof.context -> Proof.context - val declare_term: term -> Proof.context -> Proof.context - val declare_typ: typ -> Proof.context -> Proof.context - val declare_prf: Proofterm.proof -> Proof.context -> Proof.context - val declare_thm: thm -> Proof.context -> Proof.context - val global_thm_context: thm -> Proof.context - val variant_frees: Proof.context -> term list -> (string * 'a) list -> (string * 'a) list - val bind_term: indexname * term option -> Proof.context -> Proof.context - val expand_binds: Proof.context -> term -> term - val lookup_const: Proof.context -> string -> string option - val is_const: Proof.context -> string -> bool - val declare_const: string * string -> Proof.context -> Proof.context - val next_bound: string * typ -> Proof.context -> term * Proof.context - val revert_bounds: Proof.context -> term -> term - val is_fixed: Proof.context -> string -> bool - val newly_fixed: Proof.context -> Proof.context -> string -> bool - val fixed_ord: Proof.context -> string * string -> order - val intern_fixed: Proof.context -> string -> string - val markup_fixed: Proof.context -> string -> Markup.T - val lookup_fixed: Proof.context -> string -> string option - val revert_fixed: Proof.context -> string -> string - val add_fixed_names: Proof.context -> term -> string list -> string list - val add_fixed: Proof.context -> term -> (string * typ) list -> (string * typ) list - val add_free_names: Proof.context -> term -> string list -> string list - val add_frees: Proof.context -> term -> (string * typ) list -> (string * typ) list - val add_fixes_binding: binding list -> Proof.context -> string list * Proof.context - val add_fixes: string list -> Proof.context -> string list * Proof.context - val add_fixes_direct: string list -> Proof.context -> Proof.context - val auto_fixes: term -> Proof.context -> Proof.context - val variant_fixes: string list -> Proof.context -> string list * Proof.context - val dest_fixes: Proof.context -> (string * string) list - val invent_types: sort list -> Proof.context -> (string * sort) list * Proof.context - val export_terms: Proof.context -> Proof.context -> term list -> term list - val exportT_terms: Proof.context -> Proof.context -> term list -> term list - val exportT: Proof.context -> Proof.context -> thm list -> thm list - val export_prf: Proof.context -> Proof.context -> Proofterm.proof -> Proofterm.proof - val export: Proof.context -> Proof.context -> thm list -> thm list - val export_morphism: Proof.context -> Proof.context -> morphism - val importT_inst: term list -> Proof.context -> ((indexname * sort) * typ) list * Proof.context - val import_inst: bool -> term list -> Proof.context -> - (((indexname * sort) * typ) list * ((indexname * typ) * term) list) * Proof.context - val importT_terms: term list -> Proof.context -> term list * Proof.context - val import_terms: bool -> term list -> Proof.context -> term list * Proof.context - val importT: thm list -> Proof.context -> ((ctyp * ctyp) list * thm list) * Proof.context - val import_prf: bool -> Proofterm.proof -> Proof.context -> Proofterm.proof * Proof.context - val import: bool -> thm list -> Proof.context -> - (((ctyp * ctyp) list * (cterm * cterm) list) * thm list) * Proof.context - val tradeT: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list - val trade: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list - val focus: term -> Proof.context -> ((string * (string * typ)) list * term) * Proof.context - val focus_cterm: cterm -> Proof.context -> ((string * cterm) list * cterm) * Proof.context - val focus_subgoal: int -> thm -> Proof.context -> ((string * cterm) list * cterm) * Proof.context - val warn_extra_tfrees: Proof.context -> Proof.context -> unit - val polymorphic_types: Proof.context -> term list -> (indexname * sort) list * term list - val polymorphic: Proof.context -> term list -> term list -end; - -structure Variable: VARIABLE = -struct - -(** local context data **) - -type fixes = string Name_Space.table; -val empty_fixes: fixes = Name_Space.empty_table Markup.fixedN; - -datatype data = Data of - {is_body: bool, (*inner body mode*) - names: Name.context, (*type/term variable names*) - consts: string Symtab.table, (*consts within the local scope*) - bounds: int * ((string * typ) * string) list, (*next index, internal name, type, external name*) - fixes: fixes, (*term fixes -- global name space, intern ~> extern*) - binds: (typ * term) Vartab.table, (*term bindings*) - type_occs: string list Symtab.table, (*type variables -- possibly within term variables*) - maxidx: int, (*maximum var index*) - sorts: sort Ord_List.T, (*declared sort occurrences*) - constraints: - typ Vartab.table * (*type constraints*) - sort Vartab.table}; (*default sorts*) - -fun make_data - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) = - Data {is_body = is_body, names = names, consts = consts, bounds = bounds, fixes = fixes, - binds = binds, type_occs = type_occs, maxidx = maxidx, sorts = sorts, constraints = constraints}; - -structure Data = Proof_Data -( - type T = data; - fun init _ = - make_data (false, Name.context, Symtab.empty, (0, []), empty_fixes, Vartab.empty, - Symtab.empty, ~1, [], (Vartab.empty, Vartab.empty)); -); - -fun map_data f = - Data.map (fn - Data {is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints} => - make_data - (f (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints))); - -fun map_names f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, f names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints)); - -fun map_consts f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, f consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints)); - -fun map_bounds f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, consts, f bounds, fixes, binds, type_occs, maxidx, sorts, constraints)); - -fun map_fixes f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, consts, bounds, f fixes, binds, type_occs, maxidx, sorts, constraints)); - -fun map_binds f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, consts, bounds, fixes, f binds, type_occs, maxidx, sorts, constraints)); - -fun map_type_occs f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, consts, bounds, fixes, binds, f type_occs, maxidx, sorts, constraints)); - -fun map_maxidx f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, consts, bounds, fixes, binds, type_occs, f maxidx, sorts, constraints)); - -fun map_sorts f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, f sorts, constraints)); - -fun map_constraints f = - map_data (fn - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (is_body, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, f constraints)); - -fun rep_data ctxt = Data.get ctxt |> (fn Data rep => rep); - -val is_body = #is_body o rep_data; - -fun set_body b = - map_data (fn (_, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints) => - (b, names, consts, bounds, fixes, binds, type_occs, maxidx, sorts, constraints)); - -fun restore_body ctxt = set_body (is_body ctxt); - -val names_of = #names o rep_data; -val fixes_of = #fixes o rep_data; -val fixes_space = Name_Space.space_of_table o fixes_of; -val binds_of = #binds o rep_data; -val type_occs_of = #type_occs o rep_data; -val maxidx_of = #maxidx o rep_data; -val sorts_of = #sorts o rep_data; -val constraints_of = #constraints o rep_data; - -val is_declared = Name.is_declared o names_of; - -val check_name = Name_Space.base_name o tap Binding.check; - - - -(** declarations **) - -(* default sorts and types *) - -fun default_type ctxt x = Vartab.lookup (#1 (constraints_of ctxt)) (x, ~1); - -fun def_type ctxt pattern xi = - let val {binds, constraints = (types, _), ...} = rep_data ctxt in - (case Vartab.lookup types xi of - NONE => - if pattern then NONE - else Vartab.lookup binds xi |> Option.map (Type.mark_polymorphic o #1) - | some => some) - end; - -val def_sort = Vartab.lookup o #2 o constraints_of; - - -(* names *) - -fun declare_type_names t = - map_names (fold_types (fold_atyps Term.declare_typ_names) t) #> - map_maxidx (fold_types Term.maxidx_typ t); - -fun declare_names t = - declare_type_names t #> - map_names (fold_aterms Term.declare_term_frees t) #> - map_maxidx (Term.maxidx_term t); - - -(* type occurrences *) - -fun decl_type_occsT T = fold_atyps (fn TFree (a, _) => Symtab.default (a, []) | _ => I) T; - -val decl_type_occs = fold_term_types - (fn Free (x, _) => fold_atyps (fn TFree (a, _) => Symtab.insert_list (op =) (a, x) | _ => I) - | _ => decl_type_occsT); - -val declare_type_occsT = map_type_occs o fold_types decl_type_occsT; -val declare_type_occs = map_type_occs o decl_type_occs; - - -(* constraints *) - -fun constrain_tvar (xi, raw_S) = - let val S = #2 (Term_Position.decode_positionS raw_S) - in if S = dummyS then Vartab.delete_safe xi else Vartab.update (xi, S) end; - -fun declare_constraints t = map_constraints (fn (types, sorts) => - let - val types' = fold_aterms - (fn Free (x, T) => Vartab.update ((x, ~1), T) - | Var v => Vartab.update v - | _ => I) t types; - val sorts' = (fold_types o fold_atyps) - (fn TFree (x, S) => constrain_tvar ((x, ~1), S) - | TVar v => constrain_tvar v - | _ => I) t sorts; - in (types', sorts') end) - #> declare_type_occsT t - #> declare_type_names t; - - -(* common declarations *) - -fun declare_internal t = - declare_names t #> - declare_type_occs t #> - map_sorts (Sorts.insert_term t); - -fun declare_term t = - declare_internal t #> - declare_constraints t; - -val declare_typ = declare_term o Logic.mk_type; - -val declare_prf = Proofterm.fold_proof_terms declare_internal (declare_internal o Logic.mk_type); - -val declare_thm = Thm.fold_terms declare_internal; -fun global_thm_context th = declare_thm th (Proof_Context.init_global (Thm.theory_of_thm th)); - - -(* renaming term/type frees *) - -fun variant_frees ctxt ts frees = - let - val names = names_of (fold declare_names ts ctxt); - val xs = fst (fold_map Name.variant (map #1 frees) names); - in xs ~~ map snd frees end; - - - -(** term bindings **) - -fun bind_term (xi, NONE) = map_binds (Vartab.delete_safe xi) - | bind_term ((x, i), SOME t) = - let - val u = Term.close_schematic_term t; - val U = Term.fastype_of u; - in declare_term u #> map_binds (Vartab.update ((x, i), (U, u))) end; - -fun expand_binds ctxt = - let - val binds = binds_of ctxt; - val get = fn Var (xi, _) => Vartab.lookup binds xi | _ => NONE; - in Envir.beta_norm o Envir.expand_term get end; - - - -(** consts **) - -val lookup_const = Symtab.lookup o #consts o rep_data; -val is_const = is_some oo lookup_const; - -val declare_fixed = map_consts o Symtab.delete_safe; -val declare_const = map_consts o Symtab.update; - - - -(** bounds **) - -fun next_bound (a, T) ctxt = - let - val b = Name.bound (#1 (#bounds (rep_data ctxt))); - val ctxt' = ctxt |> map_bounds (fn (next, bounds) => (next + 1, ((b, T), a) :: bounds)); - in (Free (b, T), ctxt') end; - -fun revert_bounds ctxt t = - (case #2 (#bounds (rep_data ctxt)) of - [] => t - | bounds => - let - val names = Term.declare_term_names t (names_of ctxt); - val xs = rev (#1 (fold_map Name.variant (rev (map #2 bounds)) names)); - fun subst ((b, T), _) x' = (Free (b, T), Syntax_Trans.mark_bound_abs (x', T)); - in Term.subst_atomic (map2 subst bounds xs) t end); - - - -(** fixes **) - -(* specialized name space *) - -val is_fixed = Name_Space.defined_entry o fixes_space; -fun newly_fixed inner outer = is_fixed inner andf (not o is_fixed outer); - -val fixed_ord = Name_Space.entry_ord o fixes_space; -val intern_fixed = Name_Space.intern o fixes_space; - -fun lookup_fixed ctxt x = - let val x' = intern_fixed ctxt x - in if is_fixed ctxt x' then SOME x' else NONE end; - -fun revert_fixed ctxt x = - (case Name_Space.lookup_key (fixes_of ctxt) x of - SOME (_, x') => if intern_fixed ctxt x' = x then x' else x - | NONE => x); - -fun markup_fixed ctxt x = - Name_Space.markup (fixes_space ctxt) x - |> Markup.name (revert_fixed ctxt x); - -fun dest_fixes ctxt = - Name_Space.fold_table (fn (x, y) => cons (y, x)) (fixes_of ctxt) [] - |> sort (Name_Space.entry_ord (fixes_space ctxt) o pairself #2); - - -(* collect variables *) - -fun add_free_names ctxt = - fold_aterms (fn Free (x, _) => not (is_fixed ctxt x) ? insert (op =) x | _ => I); - -fun add_frees ctxt = - fold_aterms (fn Free (x, T) => not (is_fixed ctxt x) ? insert (op =) (x, T) | _ => I); - -fun add_fixed_names ctxt = - fold_aterms (fn Free (x, _) => is_fixed ctxt x ? insert (op =) x | _ => I); - -fun add_fixed ctxt = - fold_aterms (fn Free (x, T) => is_fixed ctxt x ? insert (op =) (x, T) | _ => I); - - -(* declarations *) - -local - -fun err_dups dups = - error ("Duplicate fixed variable(s): " ^ commas (map Binding.print dups)); - -fun new_fixed ((x, x'), pos) ctxt = - if is_some (lookup_fixed ctxt x') then err_dups [Binding.make (x, pos)] - else - let val context = Context.Proof ctxt |> Name_Space.map_naming (K Name_Space.default_naming) in - ctxt - |> map_fixes - (Name_Space.define context true (Binding.make (x', pos), x) #> snd #> - Name_Space.alias_table Name_Space.default_naming (Binding.make (x, pos)) x') - |> declare_fixed x - |> declare_constraints (Syntax.free x') - end; - -fun new_fixes names' xs xs' ps = - map_names (K names') #> - fold new_fixed ((xs ~~ xs') ~~ ps) #> - pair xs'; - -in - -fun add_fixes_binding bs ctxt = - let - val _ = - (case filter (Name.is_skolem o Binding.name_of) bs of - [] => () - | bads => error ("Illegal internal Skolem constant(s): " ^ commas (map Binding.print bads))); - val _ = - (case duplicates (op = o pairself Binding.name_of) bs of - [] => () - | dups => err_dups dups); - - val xs = map check_name bs; - val names = names_of ctxt; - val (xs', names') = - if is_body ctxt then fold_map Name.variant xs names |>> map Name.skolem - else (xs, fold Name.declare xs names); - in ctxt |> new_fixes names' xs xs' (map Binding.pos_of bs) end; - -fun variant_fixes raw_xs ctxt = - let - val names = names_of ctxt; - val xs = map (fn x => Name.clean x |> Name.is_internal x ? Name.internal) raw_xs; - val (xs', names') = fold_map Name.variant xs names |>> (is_body ctxt ? map Name.skolem); - in ctxt |> new_fixes names' xs xs' (replicate (length xs) Position.none) end; - -end; - -val add_fixes = add_fixes_binding o map Binding.name; - -fun add_fixes_direct xs ctxt = ctxt - |> set_body false - |> (snd o add_fixes xs) - |> restore_body ctxt; - -fun auto_fixes t ctxt = ctxt - |> not (is_body ctxt) ? add_fixes_direct (rev (add_free_names ctxt t [])) - |> declare_term t; - -fun invent_types Ss ctxt = - let - val tfrees = Name.invent (names_of ctxt) Name.aT (length Ss) ~~ Ss; - val ctxt' = fold (declare_constraints o Logic.mk_type o TFree) tfrees ctxt; - in (tfrees, ctxt') end; - - - -(** export -- generalize type/term variables (beware of closure sizes) **) - -fun export_inst inner outer = - let - val declared_outer = is_declared outer; - val still_fixed = not o newly_fixed inner outer; - - val gen_fixes = - Name_Space.fold_table (fn (y, _) => not (is_fixed outer y) ? cons y) - (fixes_of inner) []; - - val type_occs_inner = type_occs_of inner; - fun gen_fixesT ts = - Symtab.fold (fn (a, xs) => - if declared_outer a orelse exists still_fixed xs - then I else cons a) (fold decl_type_occs ts type_occs_inner) []; - in (gen_fixesT, gen_fixes) end; - -fun exportT_inst inner outer = #1 (export_inst inner outer); - -fun exportT_terms inner outer = - let val mk_tfrees = exportT_inst inner outer in - fn ts => ts |> map - (Term_Subst.generalize (mk_tfrees ts, []) - (fold (Term.fold_types Term.maxidx_typ) ts ~1 + 1)) - end; - -fun export_terms inner outer = - let val (mk_tfrees, tfrees) = export_inst inner outer in - fn ts => ts |> map - (Term_Subst.generalize (mk_tfrees ts, tfrees) - (fold Term.maxidx_term ts ~1 + 1)) - end; - -fun export_prf inner outer prf = - let - val (mk_tfrees, frees) = export_inst (declare_prf prf inner) outer; - val tfrees = mk_tfrees []; - val idx = Proofterm.maxidx_proof prf ~1 + 1; - val gen_term = Term_Subst.generalize_same (tfrees, frees) idx; - val gen_typ = Term_Subst.generalizeT_same tfrees idx; - in Same.commit (Proofterm.map_proof_terms_same gen_term gen_typ) prf end; - - -fun gen_export (mk_tfrees, frees) ths = - let - val tfrees = mk_tfrees (map Thm.full_prop_of ths); - val maxidx = fold Thm.maxidx_thm ths ~1; - in map (Thm.generalize (tfrees, frees) (maxidx + 1)) ths end; - -fun exportT inner outer = gen_export (exportT_inst inner outer, []); -fun export inner outer = gen_export (export_inst inner outer); - -fun export_morphism inner outer = - let - val fact = export inner outer; - val term = singleton (export_terms inner outer); - val typ = Logic.type_map term; - in - Morphism.morphism "Variable.export" {binding = [], typ = [typ], term = [term], fact = [fact]} - end; - - - -(** import -- fix schematic type/term variables **) - -fun importT_inst ts ctxt = - let - val tvars = rev (fold Term.add_tvars ts []); - val (tfrees, ctxt') = invent_types (map #2 tvars) ctxt; - in (tvars ~~ map TFree tfrees, ctxt') end; - -fun import_inst is_open ts ctxt = - let - val ren = Name.clean #> (if is_open then I else Name.internal); - val (instT, ctxt') = importT_inst ts ctxt; - val vars = map (apsnd (Term_Subst.instantiateT instT)) (rev (fold Term.add_vars ts [])); - val (xs, ctxt'') = variant_fixes (map (ren o #1 o #1) vars) ctxt'; - val inst = vars ~~ map Free (xs ~~ map #2 vars); - in ((instT, inst), ctxt'') end; - -fun importT_terms ts ctxt = - let val (instT, ctxt') = importT_inst ts ctxt - in (map (Term_Subst.instantiate (instT, [])) ts, ctxt') end; - -fun import_terms is_open ts ctxt = - let val (inst, ctxt') = import_inst is_open ts ctxt - in (map (Term_Subst.instantiate inst) ts, ctxt') end; - -fun importT ths ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val (instT, ctxt') = importT_inst (map Thm.full_prop_of ths) ctxt; - val insts' as (instT', _) = Thm.certify_inst thy (instT, []); - val ths' = map (Thm.instantiate insts') ths; - in ((instT', ths'), ctxt') end; - -fun import_prf is_open prf ctxt = - let - val ts = rev (Proofterm.fold_proof_terms cons (cons o Logic.mk_type) prf []); - val (insts, ctxt') = import_inst is_open ts ctxt; - in (Proofterm.instantiate insts prf, ctxt') end; - -fun import is_open ths ctxt = - let - val thy = Proof_Context.theory_of ctxt; - val (insts, ctxt') = import_inst is_open (map Thm.full_prop_of ths) ctxt; - val insts' = Thm.certify_inst thy insts; - val ths' = map (Thm.instantiate insts') ths; - in ((insts', ths'), ctxt') end; - - -(* import/export *) - -fun gen_trade imp exp f ctxt ths = - let val ((_, ths'), ctxt') = imp ths ctxt - in exp ctxt' ctxt (f ctxt' ths') end; - -val tradeT = gen_trade importT exportT; -val trade = gen_trade (import true) export; - - -(* focus on outermost parameters: !!x y z. B *) - -fun focus_params t ctxt = - let - val (xs, Ts) = - split_list (Term.variant_frees t (Term.strip_all_vars t)); (*as they are printed :-*) - val (xs', ctxt') = variant_fixes xs ctxt; - val ps = xs' ~~ Ts; - val ctxt'' = ctxt' |> fold (declare_constraints o Free) ps; - in ((xs, ps), ctxt'') end; - -fun focus t ctxt = - let - val ((xs, ps), ctxt') = focus_params t ctxt; - val t' = Term.subst_bounds (rev (map Free ps), Term.strip_all_body t); - in (((xs ~~ ps), t'), ctxt') end; - -fun forall_elim_prop t prop = - Thm.beta_conversion false (Thm.apply (Thm.dest_arg prop) t) - |> Thm.cprop_of |> Thm.dest_arg; - -fun focus_cterm goal ctxt = - let - val cert = Thm.cterm_of (Thm.theory_of_cterm goal); - val ((xs, ps), ctxt') = focus_params (Thm.term_of goal) ctxt; - val ps' = map (cert o Free) ps; - val goal' = fold forall_elim_prop ps' goal; - in ((xs ~~ ps', goal'), ctxt') end; - -fun focus_subgoal i st = - let - val all_vars = Thm.fold_terms Term.add_vars st []; - val no_binds = map (fn (xi, _) => (xi, NONE)) all_vars; - in - fold bind_term no_binds #> - fold (declare_constraints o Var) all_vars #> - focus_cterm (Thm.cprem_of st i) - end; - - - -(** implicit polymorphism **) - -(* warn_extra_tfrees *) - -fun warn_extra_tfrees ctxt1 ctxt2 = - let - fun occs_typ a = Term.exists_subtype (fn TFree (b, _) => a = b | _ => false); - fun occs_free a x = - (case def_type ctxt1 false (x, ~1) of - SOME T => if occs_typ a T then I else cons (a, x) - | NONE => cons (a, x)); - - val occs1 = type_occs_of ctxt1; - val occs2 = type_occs_of ctxt2; - val extras = Symtab.fold (fn (a, xs) => - if Symtab.defined occs1 a then I else fold (occs_free a) xs) occs2 []; - val tfrees = map #1 extras |> sort_distinct string_ord; - val frees = map #2 extras |> sort_distinct string_ord; - in - if null extras orelse not (Context_Position.is_visible ctxt2) then () - else warning ("Introduced fixed type variable(s): " ^ commas tfrees ^ " in " ^ - space_implode " or " (map quote frees)) - end; - - -(* polymorphic terms *) - -fun polymorphic_types ctxt ts = - let - val ctxt' = fold declare_term ts ctxt; - val occs = type_occs_of ctxt; - val occs' = type_occs_of ctxt'; - val types = Symtab.fold (fn (a, _) => if Symtab.defined occs a then I else cons a) occs' []; - val idx = maxidx_of ctxt' + 1; - val Ts' = (fold o fold_types o fold_atyps) - (fn T as TFree _ => - (case Term_Subst.generalizeT types idx T of TVar v => insert (op =) v | _ => I) - | _ => I) ts []; - val ts' = map (Term_Subst.generalize (types, []) idx) ts; - in (rev Ts', ts') end; - -fun polymorphic ctxt ts = snd (polymorphic_types ctxt ts); - -end; diff --git a/core/ROOT.ML b/core/ROOT.ML deleted file mode 100644 index 69ff932c..00000000 --- a/core/ROOT.ML +++ /dev/null @@ -1,60 +0,0 @@ -(* build the core *) - -use "isabelle_env.ML"; -use_thy "lib.thy"; -use_thy "core.thy"; -use_thy "theories.thy"; -use_thy "controller.thy"; -use_thy "synth.thy"; - - -(* core tests *) - -Testing.make_test "expressions/test/linrat_angle_expr-test.ML"; -Testing.make_test "expressions/test/linrat_expr-test.ML"; -Testing.make_test "expressions/test/linrat_matcher-test.ML"; -Testing.make_test "io/test/linrat-json-test.ML"; -Testing.make_test "graph/test/test-graph-setup.ML"; -Testing.make_test "graph/test/test-nhd.ML"; -Testing.make_test "graph/test/test-ograph.ML"; -Testing.make_test "graph/test/test-bang-graph.ML"; -Testing.make_test "io/test/graph-json-test.ML"; -Testing.make_test "matching/test/test-matching-setup.ML"; -Testing.make_test "matching/test/test-sg-to-sg-matching.ML"; -Testing.make_test "matching/test/test-bg-to-sg-matching.ML"; -Testing.make_test "matching/test/test-bg-to-bg-matching.ML"; -Testing.make_test "matching/test/test-concrete-matcher.ML"; -Testing.make_test "matching/test/test-greedy-matcher.ML"; -Testing.make_test "matching/test/test-bang-graph-homeomorphism-search.ML"; -Testing.make_test "rewriting/test/rule-test.ML"; -Testing.make_test "theories/test/ruleset-test.ML"; -Testing.make_test "rewriting/test/rewriter-test.ML"; -Testing.make_test "rewriting/test/ruleset_rewriter-tests.ML"; - -(* theory tests *) - -Testing.make_test "theories/red_green/test/test.ML"; -Testing.make_test "theories/red_green/rg_mathematica.ML"; -Testing.make_test "theories/ghz_w/test/test.ML"; -Testing.make_test "theories/substrings/test/test.ML"; -Testing.make_test "theories/substr_linrat/test/test.ML"; -Testing.make_test "theories/red_green_blue/test/test.ML"; -Testing.make_test "theories/petri/test/test.ML"; -Testing.make_test "theories/isaplanner_rtechn/test/test.ML"; -Testing.make_test "theories/rational_pair/test/test.ML"; - -(*PolyML.Project.make "dnets/DNetsLib.ML"; -PolyML.Project.make "dnets/Literal.ML"; -PolyML.Project.make "dnets/Contour.ML"; -PolyML.Project.make "dnets/ContourList.ML"; -PolyML.Project.make "dnets/TopDNet.ML"; -Testing.make_test "dnets/test.ML";*) - -(* top-level and regression tests *) - -Testing.make_test "test/old-rg-ruleset-rewriter-tests.ML"; -Testing.make_test "test/regression-tests.ML"; -(* Metrics tests depend on Substrings_Theory *) -(*Testing.make_test "metrics/test/test.ML";*) -(*Testing.make_test "example_code/ROOT.ML";*) - diff --git a/core/build_heap.ML b/core/build_heap.ML deleted file mode 100644 index f4b4477c..00000000 --- a/core/build_heap.ML +++ /dev/null @@ -1,4 +0,0 @@ -val _ = PolyML.exception_trace (fn () => PolyML.use "ROOT.ML") - handle _ => OS.Process.exit OS.Process.failure; -PolyML.SaveState.saveState "heaps/quanto.heap"; -val _ = OS.Process.exit OS.Process.success; diff --git a/core/controller.thy b/core/controller.thy deleted file mode 100644 index 10bbb419..00000000 --- a/core/controller.thy +++ /dev/null @@ -1,17 +0,0 @@ -theory controller -imports theories -begin -ML_file "json_interface/controller_util.ML" -ML_file "json_interface/controller_module.ML" -ML_file "json_interface/modules/test.ML" -ML_file "json_interface/modules/rewrite.ML" -ML_file "json_interface/modules/simplify.ML" -ML_file "json_interface/controller.ML" -ML_file "json_interface/controller_registry.ML" -ML_file "json_interface/protocol.ML" -ML_file "json_interface/run.ML" - -ML_file "rewriting/simp_util.ML" -ML_file "theories/red_green/rg_simp_util.ML" - -end diff --git a/core/core.thy b/core/core.thy deleted file mode 100644 index 1f8b3599..00000000 --- a/core/core.thy +++ /dev/null @@ -1,147 +0,0 @@ -theory core -imports lib -begin - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Compile quantomatic core *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -(* - * Utility Code - *) - -(* IO Utils *) -ML_file "io/input.ML" -ML_file "io/output.ML" -ML_file "io/json_io.ML" -ML_file "io/file_io.ML" - - -(* - * Names - *) -ML_file "names.ML" (* defines basic names used in Quantomatic *) - - -(* - * Expressions for use in graph component data - *) -ML_file "expressions/lex.ML" -ML_file "expressions/coeff.ML" -ML_file "expressions/matrix.ML" -ML_file "expressions/expr.ML" -ML_file "expressions/linrat_expr.ML" -ML_file "expressions/linrat_angle_expr.ML" -ML_file "expressions/semiring.ML" -ML_file "expressions/tensor.ML" -ML_file "expressions/linrat_angle_matcher.ML" -ML_file "expressions/linrat_matcher.ML" - -ML_file "expressions/alg.ML" (* algebraic expression utils *) - -(* I/O *) -ML_file "io/linrat_json.ML" - - -(* - * Graphs - *) - -(* arity of vertices (in,out,undir) *) -ML_file "graph/arity.ML" -(* neighbourhood data for non-commutative vertices *) -ML_file "graph/nhd.ML" - - -ML_file "graph/graph_data.ML" -ML_file "graph/ograph.sig.ML" -ML_file "graph/bang_graph.sig.ML" -ML_file "graph/bang_graph.ML" - -ML_file "graph/graph_annotations.ML" (* graph annotations *) - -(* I/O *) -ML_file "io/graph_json.ML" -ML_file "io/graph_annotations_json.ML" -ML_file "io/graph_dot_output.ML" - -(* new, combined IO struct *) -ML_file "io/graph_json_io.ML" - - - -(* - * Matching - *) -ML_file "matching/match.ML" -ML_file "matching/bg_match.ML" -ML_file "matching/match_state.ML" - -(* piece-by-piece matching utility *) -ML_file "matching/progressive_match_search.ML" - -(* signature for outer (e.g. !-box) matching *) -ML_file "matching/match_search.ML" -(* wrappers for inner_match_search *) -(* pattern-free wrapper (concrete graphs onto concrete graphs) *) -ML_file "matching/concrete_match_search.ML" -(* naive pattern-graph wrapper *) -ML_file "matching/greedy_match_search.ML" -ML_file "matching/bang_graph_homeomorphism_search.ML" - - -(* - * Rules - *) -ML_file "rewriting/rule.ML" - -(* I/O *) -ML_file "io/rule_json.ML" - -(* new, combined rule IO struct *) -ML_file "io/rule_json_io.ML" - - -(* - * Rulesets - *) -ML_file "theories/ruleset.ML" -ML_file "theories/ruleset_annotations.ML" - -(* Ruleset I/O *) -ML_file "io/ruleset_json.ML" -ML_file "io/ruleset_annotations_json.ML" - -ML_file "io/ruleset_json_io.ML" - - - -(* - * Rewriting - *) -(* substitution of a matched subgraph for another graph *) -ML_file "rewriting/rewriter.ML" -(* substitution used to provide rewriting with rulesets *) -ML_file "rewriting/ruleset_rewriter.ML" -(* Heuristic derived data structures *) -ML_file "rewriting/heuristic/distancematrix.ML" (* distance matrix *) -(* I/O *) -ML_file "io/rewrite_json.ML" - -(* - * Theories - *) -(* construction of everything in a graphical theory from just param *) -ML_file "io/graph_component_io.ML" -ML_file "theories/graphical_theory.ML" -ML_file "io/graphical_theory_io.ML" - -ML_file "dnets/DNetsLib.ML" -ML_file "dnets/Literal.ML" -ML_file "dnets/Contour.ML" -ML_file "dnets/ContourList.ML" -ML_file "dnets/TopDNet.ML" - - - -end diff --git a/core/dnets/.gitignore b/core/dnets/.gitignore deleted file mode 100644 index 4b7c4ed0..00000000 --- a/core/dnets/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/*.txt diff --git a/core/dnets/Contour.ML b/core/dnets/Contour.ML deleted file mode 100644 index 4961b36f..00000000 --- a/core/dnets/Contour.ML +++ /dev/null @@ -1,391 +0,0 @@ -signature CONTOUR = -sig - type T - structure LIB : TDNET_LIBRARY - structure G : BANG_GRAPH - structure L : LITERAL - - (* CONSTRUCTORS *) - val empty : T - val mk : G.T -> T -> T - val mk_first_contour : G.T -> V.name -> T - val target_function : G.T -> T - val add_literal : T -> L.T -> T - - (* GETTERS *) - val get_first_literal : T -> L.T - - val get_contour_mult_none : T -> T - val get_contour_mult_star : T -> T - val get_contour_mult_qm : T -> T - val get_contour_mult_star_or_qm : T -> T - - val contains_boundary : T -> bool - - (* COMPARISON FUNCTIONS *) - val eq : T * T -> bool - val equiv : T * T -> bool - val check_strong_compatibility : T * T -> bool - val check_weak_compatibility : T * T -> bool - - (* COMPARISON-EQUALITY FUNCTIONS *) - val subtract_eq_contour: T * T -> T - val intersect_eq_contours: T * T -> T - val complement_eq_contour: T * T -> T - val remove_eq_duplicate : T -> T - val is_eq_literal_contained : L.T * T -> bool - - (* COMPARISON-MATCHING FUNCTIONS *) - val is_matching_contour_contained : T * T -> bool - - (* PRINT FUNCTION *) - val printout : T -> string - -end - - - -functor Contour ( G : BANG_GRAPH ) : CONTOUR = -struct - - structure G = G - structure LIB = TDNet_Library - structure L = Literal(G) - - type T = L.T list; - - val empty = []; - - - - (* This function gets the first node in a contour *) - fun get_first_literal [] = raise ERROR "Contour is empty" - | get_first_literal (l::[]) = l - | get_first_literal (l::literals) = l - - (* This function returns a contout containing only the literal with multiplicity equal to None *) - fun get_contour_mult_none([]) = empty - | get_contour_mult_none(l::[]) = if L.is_kind_mult_none(l) then l::[] else [] - | get_contour_mult_none(l::ls) = if L.is_kind_mult_none(l) then l::get_contour_mult_none(ls) else get_contour_mult_none(ls) - - (* This function returns a contout containing only the literal with multiplicity equal to Star *) - fun get_contour_mult_star([]) = empty - | get_contour_mult_star(l::[]) = if L.is_kind_mult_star(l) then l::[] else [] - | get_contour_mult_star(l::ls) = if L.is_kind_mult_star(l) then l::get_contour_mult_star(ls) else get_contour_mult_star(ls) - - (* This function returns a contout containing only the literal with multiplicity equal to QM *) - fun get_contour_mult_qm([]) = empty - | get_contour_mult_qm(l::[]) = if L.is_kind_mult_qm(l) then l::[] else [] - | get_contour_mult_qm(l::ls) = if L.is_kind_mult_qm(l) then l::get_contour_mult_qm(ls) else get_contour_mult_qm(ls) - - (* This function returns a contout containing only the literal with multiplicity equal to Star or QM *) - fun get_contour_mult_star_or_qm(c) = flat (get_contour_mult_star(c) :: get_contour_mult_qm(c) :: []) - - (* This function checks if a boundary contains at least one boundary node-vertex *) - fun contains_boundary ([]) = false - | contains_boundary (l::[]) = L.is_boundary l - | contains_boundary (l::ls) = if L.is_boundary l then true else contains_boundary ls - - (* This function checks if a boundary contains at least two boundary node-vertex *) - fun contains_two_boundaries([]) = false - | contains_two_boundaries(l::[]) = false - | contains_two_boundaries(l::ls) = if L.is_boundary l then contains_boundary ls else contains_two_boundaries ls - - - (* This function finds if a literal l is contained in a contour c *) - fun is_eq_literal_contained(l,c) = LIB.is_contained L.eq l c - - (* This function removes a literal l from a contour c, given that l is contained in c *) - (* PRECONDITION: is_eq_literal_contained(l,c) = true *) - fun remove_eq_literal_in_contour(l,c) = LIB.rm_element L.eq l c - - (* This function subtracts c2 from c1, c1-c2, given that c2 is contained in c1 *) - (* PRECONDITION: c2 is contained in c1 *) - fun subtract_eq_contour(c1,c2) = LIB.sub_x_y L.eq c1 c2 - - (* This function finds the contour given by the intersection of two contours c1 and c2 *) - (* PRECONDITION: remove_eq_duplicate() has been executed *) - fun intersect_eq_contours(c1,c2) = LIB.intersect_x_y L.eq c2 c1 - - (* This function computes the complement of c1 wrt c2, c2 - (c1 intersection c2) *) - fun complement_eq_contour(c1,c2) = LIB.compl_x_y L.eq c2 c1 - - (* This function removes duplicates in a contour c *) - fun remove_eq_duplicate(c) = LIB.rm_duplicates L.eq c - - (* This function checks if two contours c1 and c2 are equal*) - fun is_eq_contour([],[]) = true - | is_eq_contour(c1::[],[]) = false - | is_eq_contour([],c2::[]) = false - | is_eq_contour(c1::c1s,[]) = false - | is_eq_contour([],c2::c2s) = false - | is_eq_contour(c1::[],c2::[]) = if L.eq(c1,c2) then true else false - | is_eq_contour(c1::c1s,c2::[]) = false - | is_eq_contour(c1::[],c2::c2s) = false - | is_eq_contour(c1::c1s,c2::c2s) = - if L.eq(c1,c2) then is_eq_contour (c1s,c2s) - else if is_eq_literal_contained(c1,c2s) then is_eq_contour(c1s,c2::remove_eq_literal_in_contour(c1,c2s)) else false - - (* This function checks for equality between c1 and c2, that is if c1 and c2 are equal contours *) - (* TRUE iff c1 and c2 contains equal literals in any order; they must be generated by the same node-vertices *) - (* EQUALITY is used when building contours in order to build a single literal for every node-vertex *) - fun eq (c1,c2) = - if (length c1 = length c2) then is_eq_contour(c1,c2) - else false - - - - (* This function finds if a literal l is contained in a contour c *) - fun is_equiv_literal_contained(l,c) = LIB.is_contained L.equiv l c - - (* This function removes a literal l from a contour c, given that l is contained in c *) - (* PRECONDITION: is_equiv_literal_contained(l,c) = true *) - fun remove_equiv_literal_in_contour(l,c) = LIB.rm_element L.equiv l c - - (* This function checks if two contours c1 and c2 are equivalent *) - fun is_equiv_contour([],[]) = true - | is_equiv_contour(c1::[],[]) = false - | is_equiv_contour([],c2::[]) = false - | is_equiv_contour(c1::c1s,[]) = false - | is_equiv_contour([],c2::c2s) = false - | is_equiv_contour(c1::[],c2::[]) = if L.equiv(c1,c2) then true else false - | is_equiv_contour(c1::c1s,c2::[]) = false - | is_equiv_contour(c1::[],c2::c2s) = false - | is_equiv_contour(c1::c1s,c2::c2s) = - if L.equiv(c1,c2) then is_equiv_contour (c1s,c2s) - else if is_equiv_literal_contained(c1,c2s) then is_equiv_contour(c1s,c2::remove_equiv_literal_in_contour(c1,c2s)) else false - - (* This function checks for equivalence between c1 and c2, that is if c1 and c2 are equivalent contours *) - (* TRUE iff c1 and c2 contains equivalent literals in any order; they can be generated by different node-vertices *) - (* EQUIVALENCE is used when building the tree in order identify contours which are equivalent and share a node in the tree *) - fun equiv (c1,c2) = - if (length c1 = length c2) then is_equiv_contour(c1,c2) - else false - - - - - - (* HELPER FUNCTIONS to build a literal *) - (* This functions checks if all the literals l in a contour are in a bang-box *) - fun all_bang(g, []) = false - | all_bang(g, l::[]) = G.is_bboxed g (L.get_name(l)) - | all_bang(g, l::ls) = - if G.is_bboxed g (L.get_name(l)) then all_bang(g, ls) - else false - - (* This functions checks if there is a literals l in a contour which is in a bang-box *) - fun exists_bang(g, []) = false - | exists_bang (g, l::[]) = G.is_bboxed g (L.get_name(l)) - | exists_bang (g, l::ls) = - if G.is_bboxed g (L.get_name(l)) then true - else exists_bang(g,ls) - - (* This function recomputes the multiplicity of a literal l in a graph g given the previous contour pc *) - fun set_kind_mult(g,pc,l) = - let - val adj = L.get_adj g l - val adj_in_contour = intersect_eq_contours(pc,adj) - in - if - G.is_bboxed g (L.get_name(l)) then L.mult_star - else if all_bang(g, adj_in_contour) then L.mult_qm - else L.mult_none - end - - (* This function recomputes the input arity evaluating the number of bang-boxes in the graph g having out-edges going to the literal l*) - fun set_input_arity(g,l) = - let - val pred = L.get_pred g l - - val bboxed = G.get_bboxed(g) - val bboxed = V.NSet.list_of(bboxed) - val g_list = replicate (length bboxed) g - val bboxed = map2 L.mk g_list bboxed - - val pred = complement_eq_contour(bboxed,pred) - in - length pred - end - - (* This function recomputes the input multiplicity of a literal l in a graph g given the previous contour pc *) - fun set_input_mult(g,pc,l) = - let - val prec = L.get_pred g l - in - if exists_bang(g,prec) then L.mult_star - else L.mult_none - end - - (* This function recomputes the output arity evaluating the number of bang-boxes in the graph g having in-edges coming from the literal l*) - fun set_output_arity(g,l) = - let - val succ = L.get_succ g l - - val bboxed = G.get_bboxed(g) - val bboxed = V.NSet.list_of(bboxed) - val g_list = replicate (length bboxed) g - val bboxed = map2 L.mk g_list bboxed - - val succ = complement_eq_contour(bboxed,succ) - in - length succ - end - - (* This function recomputes the input multiplicity of a literal l in a graph g given the previous contour pc *) - fun set_output_mult(g,pc,l) = - let - val succ = L.get_succ g l - in - if exists_bang(g,succ) then L.mult_star - else L.mult_none - end - - (* This function rebuilds the literals ls in a graph g recomputing arities and multiplicities according to the previous contour pc *) - fun rebuild_literals(_,_,[]) = [] - | rebuild_literals(g,pc,l::[]) = - L.build ( L.get_name(l), L.get_kind(l), set_kind_mult(g,pc,l), set_input_arity(g,l), set_input_mult(g,pc,l), set_output_arity(g,l), set_output_mult(g,pc,l) ) :: [] - | rebuild_literals(g,pc,l::ls) = - L.build ( L.get_name(l), L.get_kind(l), set_kind_mult(g,pc,l), set_input_arity(g,l), set_input_mult(g,pc,l), set_output_arity(g,l), set_output_mult(g,pc,l) ) :: rebuild_literals(g,pc,ls) - - (* This function adds a qm multiplicity to the boundary node-vertices in l::ls *) - fun add_qm_to_boundaries([]) = [] - | add_qm_to_boundaries(l::[]) = - if L.is_boundary(l) then L.build ( L.get_name(l), L.get_kind(l), L.mult_qm, L.get_input_arity(l), L.get_input_mult(l), L.get_output_arity(l), L.get_output_mult(l) ) :: [] - else l ::[] - | add_qm_to_boundaries(l::ls) = - if L.is_boundary(l) then L.build ( L.get_name(l), L.get_kind(l), L.mult_qm, L.get_input_arity(l), L.get_input_mult(l), L.get_output_arity(l), L.get_output_mult(l) ) :: add_qm_to_boundaries(ls) - else l :: add_qm_to_boundaries(ls) - - (* This function rebuilds the literals adding a qm multiplicity to a boundary node-vertex if the contour c contains more than one boundary node-vertex *) - fun rebuild_boundaries(c) = - if contains_two_boundaries(c) then add_qm_to_boundaries(c) else c - - (* This function builds the first contour given a graph g *) - fun target_function(g) = - let - val vnames = G.get_unbboxed(g) - val v = case V.NSet.get_local_bot vnames - of SOME v => v - | NONE => raise ERROR "No unboxed verts" - val lit = L.mk g v - val contour = rebuild_literals(g,[],lit::[]) - in - rebuild_boundaries(contour) - end - - (* This function build the first contour using the node-vertex v *) - fun mk_first_contour g v = - let - val lit = L.mk g v - val contour = rebuild_literals(g,[],lit::[]) - in - rebuild_boundaries(contour) - end - - (* This function builds a contour starting from another contour c *) - fun mk g c = - let - val g_list = replicate (length c) g - val gl_list = g_list ~~ c - - val contour = LIB.maps3 L.get_adj g c - val contour = remove_eq_duplicate(contour) - val contour = complement_eq_contour(c,contour) - val contour = rebuild_literals(g,c,contour) - in - rebuild_boundaries(contour) - end - - (* This function adds the literal l to the contour c *) - fun add_literal c l = l::c - - - - (* This function finds if a literal l is contained in a contour c *) - fun is_matching_literal_contained(l,c) = LIB.is_contained L.match l c - - (* This function removes a literal l from a contour c, given that l is contained in c *) - (* PRECONDITION: is_matching_literal_contained(l,c) = true *) - fun remove_matching_literal_in_contour(l,c) = LIB.rm_element L.match l c - - (* This function removes a matching literal from a contour considering its multiplicity *) - (* PRECONDITION: is_matching_literal_contained(l,c) = true *) - fun remove_matching_literal_in_contour_according_to_mult(l,c::[]) = - if(L.is_kind_mult_star(c)) then c::[] else [] - | remove_matching_literal_in_contour_according_to_mult(l,c::cs) = - if L.match(l,c) then - if(L.is_kind_mult_star(c)) then c::cs else cs - else c::remove_matching_literal_in_contour_according_to_mult(l,cs) - | remove_matching_literal_in_contour_according_to_mult(l,[]) = - raise ERROR "Empty contour" - - (* This function subtracts c2 from c1, c1-c2, given that c2 is contained in c1 *) - (* PRECONDITION: c2 is contained in c1 *) - fun subtract_matching_contour(c1,c2) = LIB.sub_x_y L.match c1 c2 - - (* This function checks if every literal in c1 is matched in c2 *) - fun is_matching_contour_contained([], []) = true - | is_matching_contour_contained(c1::[], []) = false - | is_matching_contour_contained([], c2::[]) = true - | is_matching_contour_contained([], c2::c2s) = true - | is_matching_contour_contained(c1::[],c2::[]) = if L.match(c1,c2) then true else false - | is_matching_contour_contained(c1::[],c2::c2s) = if is_matching_literal_contained(c1,c2::c2s) then true else false - | is_matching_contour_contained(c1::c1s,[]) = false - | is_matching_contour_contained(c1::c1s,c2::[]) = false - | is_matching_contour_contained(c1::c1s,c2::c2s) = - if (length c1s > length c2s) then false - else if is_matching_literal_contained(c1,c2::c2s) then is_matching_contour_contained(c1s,remove_matching_literal_in_contour(c1,c2::c2s)) - else false - - (* This function checks if if c1 is generated from c2 *) - fun is_contour_generated([],[]) = true - | is_contour_generated(c1::[],[]) = false - | is_contour_generated(c1::c1s,[]) = false - | is_contour_generated([],c2::[]) = true - | is_contour_generated([],c2::c2s) = true - | is_contour_generated(c1::[],c2::[]) = if L.match(c1,c2) then true else false - | is_contour_generated(c1::[],c2::c2s) = if is_matching_literal_contained(c1,c2::c2s) then true else false - | is_contour_generated(c1::c1s,c2::[]) = - if L.is_kind_mult_star(c2) - then if L.match(c1,c2) then is_contour_generated(c1s,c2::[]) else false - else false - | is_contour_generated(c1::c1s,c2::c2s) = - if is_matching_literal_contained(c1,c2::c2s) - then is_contour_generated(c1s,remove_matching_literal_in_contour_according_to_mult(c1,c2::c2s)) - else false - - (* This function checks if c1 is strongly compatible with c2, which may be different from checking if c2 is strongly compatible with c1 *) - (* STRONG COMPATIBILITY is a form of MATCHING, used when pruning the tree in order to identify contours that are strongly or weakly compatible *) - fun check_strong_compatibility (c1,c2) = - let - val c2_concrete = get_contour_mult_none(c2) - val c2_abstract = get_contour_mult_star_or_qm(c2) - in - if (is_matching_contour_contained(c2_concrete,c1)) - then - let - val c1_remaining = subtract_matching_contour(c1,c2_concrete) - in - if (is_contour_generated(c1_remaining,c2_abstract)) then true else false - end - else false - end - - (* This function checks if c1 is weakly compatible with c2, which may be different from checking if c2 is weakly compatible with c1 *) - (* WEAK COMPATIBILITY is a form of MATCHING, used when pruning the tree in order to identify contours that are strongly or weakly compatible *) - fun check_weak_compatibility (c1,c2) = - let - val c2_concrete = get_contour_mult_none(c2) - in - if (is_matching_contour_contained(c2_concrete,c1)) then true else false - end - - - - (* This function prints the contour c*) - fun printout([]) = "" - | printout(l::[]) = L.printout(l) - | printout(l::ls) = L.printout(l) ^ " " ^ printout(ls) ^ " " - -end diff --git a/core/dnets/ContourList.ML b/core/dnets/ContourList.ML deleted file mode 100644 index 84978405..00000000 --- a/core/dnets/ContourList.ML +++ /dev/null @@ -1,108 +0,0 @@ -signature CONTOUR_LIST = -sig - type T - structure G : BANG_GRAPH - structure C : CONTOUR - - (* CONSTRUCTORS *) - val empty : T - val mk : G.T -> T - val mk_from : G.T -> V.name -> T - - (* COMPARISON FUNCTION *) - val equiv : T * T -> bool - - (* PRINT FUNCTION *) - val printout : T -> string -end - - -functor Contour_List (G : BANG_GRAPH) : CONTOUR_LIST = -struct - - structure G = G - structure LIB = TDNet_Library - structure C = Contour(G) - - type T = C.T list - - val empty = [] - - - - (* This function builds contour recursively given a graph g, a previous contour c and a list of remaining literals r. *) - (* The new contour is added to the contour list cl *) - fun build_contours(_,_,cl,[]) = rev cl - | build_contours(_,[],cl,_) = rev cl - | build_contours(g,c,cl,r) = - let - val new_contour = C.mk g c - val new_contour = C.intersect_eq_contours(new_contour,r) - val r = C.subtract_eq_contour(r,new_contour) - in - if null(new_contour) then build_contours(g,new_contour,cl,r) - else build_contours(g,new_contour,(new_contour::cl),r) - end - - (* This function builds a contour list given a graph g *) - fun mk g = - let - val first_contour = C.target_function(g) - val contour_list = first_contour::[] - - val remaining_list = G.get_vertices g - val remaining_list = V.NSet.list_of remaining_list - val g_list = replicate (length remaining_list) g - val remaining_list = map2 C.L.mk g_list remaining_list - val remaining_list = C.subtract_eq_contour(remaining_list,first_contour) - in - build_contours (g,first_contour,contour_list,remaining_list) - end - - (* This function builds a contour list given a graph g a starting node-vertex v*) - fun mk_from g v = - let - val first_contour = C.mk_first_contour g v - val contour_list = first_contour::[] - - val remaining_list = G.get_vertices g - val remaining_list = V.NSet.list_of remaining_list - val g_list = replicate (length remaining_list) g - val remaining_list = map2 C.L.mk g_list remaining_list - val remaining_list = C.subtract_eq_contour(remaining_list,first_contour) - in - build_contours (g,first_contour,contour_list,remaining_list) - end - - - (* This function removes a contour c from a contour list cl, given that c is contained in cl *) - (* PRECONDITION: c is contained in cl *) - fun remove_equiv_contour(c,cl) = LIB.rm_element C.equiv c cl - - (* This function checks if a contour c is contained in the contour list cl*) - fun is_equiv_contour_contained(c,cl) = LIB.is_contained C.equiv c cl - - (* This function checks if two contour lists cl1 and cl2 are equivalent *) - fun is_equiv_contour_list(c1::[],c2::[]) = if C.equiv(c1,c2) then true else false - | is_equiv_contour_list(c1::cl1,c2::[]) = if C.equiv(c1,c2) then true else false - | is_equiv_contour_list(c1::cl1,c2::cl2) = - if C.equiv(c1,c2) then is_equiv_contour_list (cl1,cl2) - else if is_equiv_contour_contained(c1,cl2) then is_equiv_contour_list(cl1,c2::remove_equiv_contour(c1,cl2)) else false - | is_equiv_contour_list _ = false (* different lengths *) - - (* This function checks for equivalence between cl1 and cl2, that is if cl1 and cl2 are equivalent contour lists *) - (* TRUE iff cl1 and cl2 contains equivalent contours in any order; they can be generated by different node-vertices *) - fun equiv (cl1,cl2) = - if (length cl1 = length cl2) then is_equiv_contour_list(cl1,cl2) - else false - - - - - (* This function prints the contour list cl*) - fun printout ([]) = "" - | printout (c::[]) = C.printout(c) - | printout (c::cs) = C.printout(c) ^ " " ^ printout(cs) ^ " " - -end - diff --git a/core/dnets/DNetsLib.ML b/core/dnets/DNetsLib.ML deleted file mode 100644 index 7ae475ff..00000000 --- a/core/dnets/DNetsLib.ML +++ /dev/null @@ -1,53 +0,0 @@ -signature TDNET_LIBRARY = -sig - - (* GENERIC FUNCTIONS *) - val maps2 : ('a -> 'b -> 'b list) -> 'a list -> 'b list -> 'b list - val maps3 : ('a -> 'b -> 'c list) -> 'a -> 'b list -> 'c list - - (* LIST FUNCTIONS *) - val is_contained : ('a * 'a -> bool) -> 'a -> 'a list -> bool - val rm_duplicates : ('a * 'a -> bool) -> 'a list -> 'a list - val rm_element : ('a * 'a -> bool) -> 'a -> 'a list -> 'a list - val sub_x_y : ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - val intersect_x_y : ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - val compl_x_y : ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - -end - - -structure TDNet_Library : TDNET_LIBRARY = -struct - - fun maps2 _ [] [] = [] - | maps2 f (x::xs) (y::ys) = f x y @ maps2 f xs ys - | maps2 _ _ _ = raise ListPair.UnequalLengths; - - fun maps3 _ _ [] = [] - | maps3 f x (y::ys) = f x y @ maps3 f x ys - - fun is_contained _ x [] = false - | is_contained f x (y::[]) = if f(x,y) then true else false - | is_contained f x (y::ys) = if f(x,y) then true else is_contained f x ys - - fun rm_duplicates _ [] = [] - | rm_duplicates _ (x::[]) = x::[] - | rm_duplicates f (x::xs) = if is_contained f x xs then rm_duplicates f xs else x :: (rm_duplicates f xs) - - fun rm_element _ x [] = [] - | rm_element f x (y::[]) = if f(x,y) then [] else y::[] - | rm_element f x (y::ys) = if f(x,y) then ys else y::(rm_element f x ys) - - fun sub_x_y _ x [] = x - | sub_x_y f x (y::[]) = if is_contained f y x then rm_element f y x else x - | sub_x_y f x (y::ys) = if is_contained f y x then sub_x_y f (rm_element f y x) ys else sub_x_y f x ys - - fun intersect_x_y _ x [] = [] - | intersect_x_y _ [] y = [] - | intersect_x_y f x (y::[]) = if is_contained f y x then y::[] else [] - | intersect_x_y f x (y::ys) = if is_contained f y x then y::(intersect_x_y f (rm_element f y x) ys) else intersect_x_y f x ys - - fun compl_x_y f x y = sub_x_y f x (intersect_x_y f x y) - - -end \ No newline at end of file diff --git a/core/dnets/Literal.ML b/core/dnets/Literal.ML deleted file mode 100644 index 66cba0a2..00000000 --- a/core/dnets/Literal.ML +++ /dev/null @@ -1,230 +0,0 @@ -signature LITERAL = -sig - type T - type multiplicity - structure G : BANG_GRAPH - - val boundary : G.vdata - - (* CONSTRUCTOR *) - val mk : G.T -> V.name -> T - val build : V.name * G.vdata * multiplicity * int * multiplicity * int * multiplicity -> T - - (* COMPARISON FUNCTIONS *) - val eq : T * T -> bool - val equiv : T * T -> bool - val match : T * T -> bool - - (* GETTERS *) - val get_adj : G.T -> T -> T list - val get_pred : G.T -> T -> T list - val get_succ : G.T -> T -> T list - - val get_name : T -> V.name - val get_kind : T -> G.vdata - val get_kind_mult : T -> multiplicity - val get_input_arity : T -> int - val get_input_mult : T -> multiplicity - val get_output_arity : T -> int - val get_output_mult : T -> multiplicity - - val is_boundary : T -> bool - - (* MULTIPLICITY FUNCTIONS*) - val mult_none : multiplicity - val mult_star : multiplicity - val mult_qm : multiplicity - val is_kind_mult_none : T -> bool - val is_kind_mult_star : T -> bool - val is_kind_mult_qm : T -> bool - val mult_eq : multiplicity * multiplicity -> bool - - (* PRINT FUNCTION *) - val printout : T -> string - -end - - - -functor Literal ( G : BANG_GRAPH ) : LITERAL = -struct - - structure G = G; - - datatype multiplicity = None | Star | QM - - type T = { name : V.name, - kind : G.vdata, - kind_mult : multiplicity, - input_arity : int, - input_mult : multiplicity, - output_arity : int, - output_mult : multiplicity } - - val boundary = G.WVert - - val mult_none = None - val mult_qm = QM - val mult_star = Star - - (* This function builds a literal by packaging its data in a literal type *) - fun build (n, k, k_m, i_a, i_m, o_a, o_m) = - {name = n, kind = k, kind_mult = k_m, input_arity = i_a, input_mult = i_m, output_arity = o_a, output_mult = o_m} : T - - (* This function builds a literal given a graph g and a node-vertex v *) - fun mk g v = - let - val k = G.get_vertex_data g v - val k_m = None - val i_a = Arity.get_in (G.get_arity g v) - val i_m = None - val o_a = Arity.get_out (G.get_arity g v) - val o_m = None - in - build (v, k, k_m, i_a, i_m, o_a, o_m) - end - - - (* HELPER FUNCTIONS to compare literal matching *) - (* This function checks if one literal between l1 and l2 has a boundary kind *) - fun one_is_a_bondary(l1,l2) = - if (G.vdata_eq(#kind l1, boundary) orelse G.vdata_eq(#kind l2, boundary)) then true else false - - (* This functions checks if the kinds of two literals l1 and l2 match *) - fun match_kind(l1,l2) = - G.vdata_eq(#kind l1, #kind l2) orelse one_is_a_bondary(l1,l2) - - (* This functions checks if the input arity of two literals l1 and l2 match *) - fun match_input(l1,l2) = - if (#input_arity l1 = #input_arity l2) then true - else if (#input_arity l1 > #input_arity l2) andalso (#input_mult l2 = Star) then true - else if (#input_arity l1 < #input_arity l2) andalso (#input_mult l1 = Star) then true - else if one_is_a_bondary(l1,l2) - then true - else false - - (* This functions checks if the output arity of two literals l1 and l2 match *) - fun match_output(l1,l2) = - if (#output_arity l1 = #output_arity l2) then true - else if (#output_arity l1 > #output_arity l2) andalso (#output_mult l2 = Star) then true - else if (#output_arity l1 < #output_arity l2) andalso (#output_mult l1 = Star) then true - else if one_is_a_bondary(l1,l2) - then true - else false - - (* This function checks for matching between c1 and c2, that is if c1 and c2 can match each other *) - (* TRUE iff l1 and l2 are literals that can be reduced to one another; they can be generated by different node-vertices; they can differ on the multiplicity *) - (* MATCHING is used when pruning the tree in order to identify contours that are strongly or weakly compatible *) - fun match(l1,l2) = match_kind(l1,l2) andalso match_input(l1,l2) andalso match_output(l1,l2) - - - - (* This functions checks if the kinds of two literals l1 and l2 are equivalent *) - fun equiv_kind(l1,l2) = - if (G.vdata_eq(#kind l1, #kind l2)) andalso - ( (#input_mult l1 = None andalso #input_mult l2 = None) orelse - (#input_mult l1 = Star andalso #input_mult l2 = Star) orelse - (#input_mult l1 = QM andalso #input_mult l2 = QM) ) - then true - else false - - (* This functions checks if the input arity of two literals l1 and l2 are equivalent *) - fun equiv_input(l1,l2) = - if (#input_arity l1 = #input_arity l2) andalso - ( (#input_mult l1 = None andalso #input_mult l2 = None) orelse - (#input_mult l1 = Star andalso #input_mult l2 = Star) ) - then true - else false - - (* This functions checks if the output arity of two literals l1 and l2 are equivalent *) - fun equiv_output(l1,l2) = - if (#output_arity l1 = #output_arity l2) andalso - ( (#output_mult l1 = None andalso #output_mult l2 = None) orelse - (#output_mult l1 = Star andalso #output_mult l2 = Star) ) - then true - else false - - (* This function checks for equivalence between c1 and c2, that is if c1 and c2 are equivalent contours *) - (* TRUE iff l1 and l2 are literals that can be reduced to one another; they can be generated by different node-vertices *) - (* EQUIVALENCE is used when building the tree in order identify contours which are equivalent and share a node in the tree *) - fun equiv(l1,l2) = equiv_kind(l1,l2) andalso equiv_input(l1,l2) andalso equiv_output(l1,l2) - - - - (* This function checks for equality between l1 and l2, that is if l1 and l2 are the same literal *) - (* TRUE iff l1 and l2 are literals generated by the same node-vertex *) - (* EQUALITY is used when building contours in order to build a single literal for every node-vertex *) - fun eq(l1,l2) = V.name_eq(#name l1, #name l2) - - - - (* Getters for a literal *) - fun get_name(l) = #name l - - fun get_kind(l) = #kind l - - fun get_kind_mult(l) = #kind_mult l - - fun get_input_arity(l) = #input_arity l - - fun get_input_mult(l) = #input_mult l - - fun get_output_arity(l) = #output_arity l - - fun get_output_mult(l) = #output_mult l - - - (* This function returns the literals of all the adjacent nodes to the literal l in the graph g *) - fun get_adj g l = - let - val adj_vert = G.get_adj_vertices g (#name l) - val v_list = V.NSet.list_of adj_vert - - val g_list = replicate (length v_list) g - in - map2 mk g_list v_list - end - - (* This function returns the literals of all the predecessor nodes of the literal l in the graph g *) - fun get_pred g l = - let - val adj_vert = G.get_predecessor_vertices g (#name l) - val v_list = V.NSet.list_of adj_vert - - val g_list = replicate (length v_list) g - in - map2 mk g_list v_list - end - - (* This function returns the literals of all the successor nodes of the literal l in the graph g *) - fun get_succ g l = - let - val adj_vert = G.get_successor_vertices g (#name l) - val v_list = V.NSet.list_of adj_vert - - val g_list = replicate (length v_list) g - in - map2 mk g_list v_list - end - - (* This function checks if a literal is a boundary node-vertex *) - fun is_boundary(l) = G.vdata_eq(#kind l, boundary) - - - (* Functions to work with multiplicity *) - fun mult_eq (m1,m2) = - if ((m1=Star andalso m2=Star) orelse (m1=QM andalso m2=QM) orelse (m1=None andalso m2=None)) then true - else false - - fun is_kind_mult_none(l) = if mult_eq((#kind_mult l),None) then true else false - - fun is_kind_mult_star(l) = if mult_eq((#kind_mult l),Star) then true else false - - fun is_kind_mult_qm(l) = if mult_eq((#kind_mult l),QM) then true else false - - - - (* This function prints the literal l*) - fun printout(l) = V.string_of_name(#name l) - -end diff --git a/core/dnets/TopDNet.ML b/core/dnets/TopDNet.ML deleted file mode 100644 index 7a6f577d..00000000 --- a/core/dnets/TopDNet.ML +++ /dev/null @@ -1,416 +0,0 @@ -signature TOP_DNET = -sig - type T - type tree - structure G : BANG_GRAPH - structure CL : CONTOUR_LIST - sharing G.Sharing = CL.G.Sharing - - (* CONSTRUCTORS *) - val empty : T - val mk : G.T GraphName.NTab.T -> T - val add_cl_to_dnet : T -> CL.T -> GraphName.name -> T - val add_cl_list_to_dnet : T -> CL.T list -> GraphName.name list -> T - val add_graph : (GraphName.name * G.T) -> T -> T - - (* PRUNING FUNCTIONS *) - val extended_pruning : T -> G.T -> (V.name * GraphName.name list) list - val standard_pruning : T -> G.T -> (V.name * GraphName.name list) list - val extended_prune : V.name -> G.T -> T -> T - val standard_prune : V.name -> G.T -> T -> T - val graphs : T -> GraphName.name list - val get_match_candidates : T -> G.T -> GraphName.NSet.T - - (* FOLD FUNCTION *) - val fold : (('a * 'b) -> G.T GraphName.NTab.T -> G.T GraphName.NTab.T) -> 'a list -> 'b list -> G.T GraphName.NTab.T -> G.T GraphName.NTab.T - - (* GETTERS *) - val is_node : tree -> bool - val get_contour : tree -> CL.C.T - val get_children : tree -> tree list - val get_graph : tree -> GraphName.name - - (* COMPARISON FUNCTION *) - val is_eq_graphs : GraphName.name list * GraphName.name list -> bool - - (* PRINT FUNCTION*) - val printout : tree list -> string - -end - - -functor Top_DNet ( G: BANG_GRAPH ) : TOP_DNET = - -struct - - structure G = G - structure CL = Contour_List(G) - structure C = CL.C - - datatype tree = Node of {contour : C.T, children : tree list} - | Leaf of {graph : GraphName.name} - type T = tree - - val empty = Node{contour=C.empty, children=[]} - - (* This function checks if a tree node is a Node or a Leaf *) - fun is_node (Node _) = true - | is_node (Leaf _) = false - - (* Getters for a tree *) - fun get_contour(Node{contour=contour, ...}) = contour - | get_contour _ = raise ERROR "Not a contour" - - fun get_children(Node{children=children, ...}) = children - | get_children _ = raise ERROR "Not a contour" - - fun get_graph(Leaf{graph=graph}) = graph - | get_graph _ = raise ERROR "Not a graph" - - - - - (* This function checks is a contour c is contained in a list of Node or Leaf *) - fun is_equiv_contained(_,[]) = false - | is_equiv_contained(c,Node{contour=contour, children=_}::[]) = if C.equiv(contour,c) then true else false - | is_equiv_contained(_,Leaf{graph=_}::[]) = false - | is_equiv_contained(c,Node{contour=contour, children=_}::cl) = if C.equiv(contour,c) then true else is_equiv_contained(c,cl) - | is_equiv_contained(c,Leaf{graph=_}::cl) = is_equiv_contained(c,cl) - - (* This function checks if a contour c is equivalent with the contour stored in a Node *) - fun equiv(Node{contour=contour, children=_}, c) = - if C.equiv(contour,c) then true else false - | equiv _ = false - - (* This function retrieves a node x from a contour list cl, given that x is contained in cl*) - (* PRECONDITION: x is contained in cl *) - fun get_equiv_node(_,c::[]) = c - | get_equiv_node(x,c::cl) = - if is_node(c) - then if equiv(c,x) then c else get_equiv_node(x,cl) - else get_equiv_node(x,cl) - | get_equiv_node(_,[]) = raise ERROR "empty contour list" - - (* This function retrieves a list of literals not equiv to x from a contour list cl, given that x is contained in cl*) - (* PRECONDITION: x is contained in cl *) - fun get_equiv_remaining(_,_::[]) = [] - | get_equiv_remaining(x,c::cl) = - if is_node(c) - then if equiv(c,x) then cl else c::get_equiv_remaining(x,cl) - else c::get_equiv_remaining(x,cl) - | get_equiv_remaining(_,[]) = raise ERROR "empty contour list" - - - - (* This function add a contour list cl associated with the graph named gn to the tree *) - fun add_cl_to_dnet (Node{contour=contour, children=children}) (c::[]) gn = - if is_equiv_contained(c,children) - then - let - val n = get_equiv_node(c,children) - val r = get_equiv_remaining(c,children) - val new_leaf = Leaf{graph=gn} - val new_node = Node{contour=get_contour(n),children=new_leaf::get_children(n)} - in - Node{contour=contour, children=new_node::r} - end - else - let - val new_leaf = Leaf{graph=gn} - val new_node = Node{contour=c,children=new_leaf::[]} - in - Node{contour=contour, children=new_node::children} - end - | add_cl_to_dnet (Node{contour=contour, children=children}) (c::cs) gn = - if is_equiv_contained(c,children) - then - let - val n = get_equiv_node(c,children) - val r = get_equiv_remaining(c,children) - in - Node{contour=contour, children=(add_cl_to_dnet n cs gn)::r} - end - else - let - val n = Node{contour=c, children =[]} - in - Node{contour=contour, children = (add_cl_to_dnet n cs gn)::children} - end - | add_cl_to_dnet _ [] _ = raise ERROR "Empty contour list" - | add_cl_to_dnet _ _ _ = raise ERROR "Not a contour" - - (* This function add a list of contour lists cls together with the list of graph named gns to the tree *) - fun add_cl_list_to_dnet d (cl::[]) (gn::[]) = add_cl_to_dnet d cl gn - | add_cl_list_to_dnet d (cl::cll) (gn::gns) = add_cl_list_to_dnet (add_cl_to_dnet d cl gn) cll gns - | add_cl_list_to_dnet _ _ _ = raise ERROR "Contour and graph list lengths differ" - - fun add_graph (n,g) dnet = add_cl_to_dnet dnet (CL.mk g) n - - (* This function builds a tree given a set of pattern graphs tab *) - fun mk(tab) = - let - val g_list = GraphName.NTab.values tab - val g_names = GraphName.NTab.keys tab - val cl_list = map CL.mk g_list - val e = empty - in - add_cl_list_to_dnet e cl_list g_names - end - - - - (* This function recursively prunes the tree t using weak compatibility*) - fun weak_prune_trees([],[]) = [] - | weak_prune_trees(t::[],[]) = t::[] - | weak_prune_trees([],_::[]) = [] - | weak_prune_trees(t::[],c::[]) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: [] - else [] - else t :: [] - | weak_prune_trees([],_::_) = [] - | weak_prune_trees(t::ts,[]) = - if (is_node(t)) then weak_prune_trees(ts,[]) - else t :: weak_prune_trees(ts,[]) - | weak_prune_trees(t::[],c::cs) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: [] - else [] - else t :: [] - | weak_prune_trees(t::ts,c::[]) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: weak_prune_trees(ts,c::[]) - else weak_prune_trees(ts,c::[]) - else t :: weak_prune_trees(ts,c::[]) - | weak_prune_trees(t::ts,c::cs) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: weak_prune_trees(ts,c::cs) - else weak_prune_trees(ts,c::cs) - else t :: weak_prune_trees(ts,c::cs) - - - (* This function recursively prunes the tree t using strong compatibility*) - fun extended_strong_prune_trees([],[]) = [] - | extended_strong_prune_trees(t::[],[]) = t::[] - | extended_strong_prune_trees([],_::[]) = [] - | extended_strong_prune_trees(t::[],c::[]) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: [] - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),[])} :: [] - else [] - end - else - t :: [] - | extended_strong_prune_trees([],_::_) = [] - | extended_strong_prune_trees(t::ts,[]) = - if (is_node(t)) then extended_strong_prune_trees(ts,[]) - else t :: extended_strong_prune_trees(ts,[]) - | extended_strong_prune_trees(t::[],c::cs) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: [] - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),cs)} :: [] - else [] - end - else - t :: [] - | extended_strong_prune_trees(t::ts,c::[]) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: extended_strong_prune_trees(ts,c::[]) - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),[])} :: extended_strong_prune_trees(ts,c::[]) - else extended_strong_prune_trees(ts,c::[]) - end - else - t :: extended_strong_prune_trees(ts,c::[]) - | extended_strong_prune_trees(t::ts,c::cs) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: extended_strong_prune_trees(ts,c::cs) - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),cs)} :: extended_strong_prune_trees(ts,c::cs) - else extended_strong_prune_trees(ts,c::cs) - end - else - t :: extended_strong_prune_trees(ts,c::cs) - - - (* This function perform the first step of the extended pruning of the tree t at the root *) - fun extended_prune_tree(t,[]) = t - | extended_prune_tree(t,c::[]) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour=get_contour(t),children = extended_strong_prune_trees(children,c::[])} - end - else t - | extended_prune_tree(t,c::cs) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour = get_contour(t), children = extended_strong_prune_trees(children,c::cs)} - end - else t - - (* This function perform the first step of the standard pruning of the tree t at the root *) - fun standard_prune_tree(t,[]) = t - | standard_prune_tree(t,c::[]) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour=get_contour(t),children = weak_prune_trees(children,c::[])} - end - else t - | standard_prune_tree(t,c::cs) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour = get_contour(t), children = weak_prune_trees(children,c::cs)} - end - else t - - (* This function use the extended algorithm to prune a tree given a tree t, a graph g and a starting node-vertex v to build the contour list of g*) - fun extended_prune v g t = - let - val target_contour_list = CL.mk_from g v - in - extended_prune_tree(t,target_contour_list) - end - - (* This function use the standard algorithm to prune a tree given a tree t, a graph g and a starting node-vertex v to build the contour list of g*) - fun standard_prune v g t = - let - val target_contour_list = CL.mk_from g v - in - standard_prune_tree(t,target_contour_list) - end - - (* This function traverses the tree t to get to the leaves *) - fun explore_branches([]) = [] - | explore_branches(t::[]) = - if (is_node(t)) then explore_branches(get_children(t)) - else get_graph(t) :: [] - | explore_branches(t::ts) = - if (is_node(t)) then explore_branches(get_children(t)) @ explore_branches(ts) - else get_graph(t) :: explore_branches(ts) - - (* This function returns the list of the names of all the graphs encoded in the tree t *) - fun graphs t = - if (is_node(t)) then explore_branches(get_children(t)) - else get_graph(t)::[] - - (* This function returns the list of the names of all the graphs encoded in a list of discrimination trees t::ts *) - fun collect_graphs [] = [] - | collect_graphs (t::[]) = (graphs t) :: [] - | collect_graphs (t::ts) = (graphs t) :: (collect_graphs ts) - - (* This function receives a discrimination tree and executes the extended pruning algorithm using all possible contour list built from a target and a starting node-vertex n *) - fun do_extended_pruning _ _ [] = [] - | do_extended_pruning tree target (n::[]) = (extended_prune n target tree) :: [] - | do_extended_pruning tree target (n::ns) = (extended_prune n target tree) :: (do_extended_pruning tree target ns) - - (* This function receives a discrimination tree and executes the standard pruning algorithm using all possible contour list built from a target and a starting node-vertex n *) - fun do_standard_pruning _ _ [] = [] - | do_standard_pruning tree target (n::[]) = (standard_prune n target tree) :: [] - | do_standard_pruning tree target (n::ns) = (standard_prune n target tree) :: (do_standard_pruning tree target ns) - - (* This function recevives a discrimination tree and a target graph and starts the extended pruning algorithm *) - fun extended_pruning tree target = - let - val nodelist = G.get_unbboxed target - val nodelist = V.NSet.list_of nodelist - val treelist = do_extended_pruning tree target (rev nodelist) - val graphslist = collect_graphs (rev treelist) - in - nodelist ~~ graphslist - end - - (* This function recevives a discrimination tree and a target graph and starts the standard pruning algorithm *) - fun standard_pruning tree target = - let - val nodelist = G.get_unbboxed target - val nodelist = V.NSet.list_of nodelist - val treelist = do_standard_pruning tree target (rev nodelist) - val graphslist = collect_graphs (rev treelist) - in - nodelist ~~ graphslist - end - - fun get_match_candidates dnet target = - fold GraphName.NSet.add (flat (map snd (extended_pruning dnet target))) GraphName.NSet.empty - - (* This function checks if two graphs g1 and g2 are equal *) - (* TRUE iff the graphs have the same name *) - fun graphname_eq(g1,g2) = if (GraphName.string_of_name(g1) = GraphName.string_of_name(g2)) then true else false - - (* This function removes the graph g from the list of graphs gl, given that g is contained in gl*) - (* PRECONDITION: g is contained in gl*) - fun remove_eq_graph(_,_::[]) = [] - | remove_eq_graph(g,gl::gls) = if graphname_eq(g,gl) then gls else gl::remove_eq_graph(g,gls) - | remove_eq_graph(_,[]) = raise ERROR "Empty graph list" - - (* This function checks if a graph g is contained in the graph list gl*) - fun find_eq_graph(_,[]) = false - | find_eq_graph(g,gl::[]) = if graphname_eq(g,gl) then true else false - | find_eq_graph(g,gl::gls) = if graphname_eq(g,gl) then true else find_eq_graph(g,gls) - - (* This function checks if two graph lists g1s and g2s are equal *) - fun is_eq_graphs([],[]) = true - | is_eq_graphs([],_::[]) = false - | is_eq_graphs(_::[],[]) = false - | is_eq_graphs([],_::_) = false - | is_eq_graphs(_::_,[]) = false - | is_eq_graphs(g1::[],g2::[]) = if graphname_eq(g1,g2) then true else false - | is_eq_graphs(_::_,_::[]) = false - | is_eq_graphs(_::[],_::_) = false - | is_eq_graphs(g1::g1s,g2::g2s) = - if graphname_eq(g1,g2) then is_eq_graphs (g1s,g2s) - else if find_eq_graph(g1,g2s) then is_eq_graphs(g1s,g2::remove_eq_graph(g1,g2s)) else false - - - - (* Fold function *) - fun fold _ [] [] tab = tab - | fold f (gn::gns) (g::gs) tab = f (gn,g) (fold f gns gs tab) - | fold _ _ _ _ = raise ListPair.UnequalLengths; - - - - (* This function prints the tree t*) - fun printout ([]) = "" - | printout (t::[]) = - if (is_node(t)) then C.printout(get_contour(t)) ^ " " ^ printout(get_children(t)) - else GraphName.string_of_name(get_graph(t)) - | printout (t::ts) = - if (is_node(t)) then C.printout(get_contour(t)) ^ " " ^ printout(ts) ^ " " ^ printout(get_children(t)) - else GraphName.string_of_name(get_graph(t)) ^ " " ^ printout(ts) - -end diff --git a/core/dnets/perf.ML b/core/dnets/perf.ML deleted file mode 100644 index 00d7926f..00000000 --- a/core/dnets/perf.ML +++ /dev/null @@ -1,420 +0,0 @@ -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph); - open Tools; - - - structure LIB = TDNet_Library - structure TD = Top_DNet(G) - structure CL = TD.CL - structure C = CL.C - structure L = C.L - - fun one_of xs = - let val max = length xs in - nth xs (random_range 0 (max-1)) - end; - - (*val timestamp = Time.toString (Time.now()) - val out = TextIO.openOut ("output"^timestamp^".txt")*) - - val data_no_alg = TextIO.openAppend ("data_no_alg.txt") - val data_std_alg = TextIO.openAppend ("data_std_alg.txt") - val data_ext_alg = TextIO.openAppend ("data_ext_alg.txt") - - val k1 = (vexpr1 "0") - val k2 = (vexpr1 "1") - val k3 = (vexpr1 "a") - - val iterations = 100 - val pattern_params = {kinds = k1 :: k2 :: k3 :: [], - - num_graphs = 100, - num_nodes = 4, - num_edges = 4, - num_bbox = 3, - max_num_nodes_per_bbox = 1, - num_bound = 1 } - - val target_params = {kinds = k1 :: k2 :: k3 :: [], - - num_graphs = 1, - num_nodes = 30, - num_edges = 30, - num_bbox = 0, - max_num_nodes_per_bbox = 0, - num_bound = 0 } - - - - - fun printout out msg = - TextIO.outputSubstr(out, Substring.substring (msg,0,size msg)) - - fun savedata i no std ext = - let - val no_data = "("^i^","^no^")\n" - val std_data = "("^i^","^std^")\n" - val ext_data = "("^i^","^ext^")\n" - val _ = TextIO.outputSubstr(data_no_alg, Substring.substring(no_data,0,size no_data)) - val _ = TextIO.outputSubstr(data_std_alg, Substring.substring(std_data,0,size std_data)) - val _ = TextIO.outputSubstr(data_ext_alg, Substring.substring(ext_data,0,size ext_data)) - in - 1 - end - - - fun generate_random_nodes n = - if (n = 1) then V.mk ("v" ^ string_of_int(n)) :: [] - else V.mk ("v" ^ string_of_int(n)) :: generate_random_nodes (n-1) - - fun add_nodes g [] k = g - | add_nodes g (n::[]) k = g |> G.add_named_vertex n (one_of k) - | add_nodes g (n::ns) k = add_nodes (g |> G.add_named_vertex n (one_of k)) ns k - - fun generate_random_edges e = - if (e = 1) then E.mk ("e" ^ string_of_int(e)) :: [] - else E.mk ("e" ^ string_of_int(e)) :: generate_random_edges (e-1) - - fun add_edges g n [] = g - | add_edges g n (e::[]) = - let - val node1 = one_of n - val n_minus = LIB.rm_element (V.name_eq) node1 n - val node2 = one_of n_minus - in - g |> G.add_named_edge e (Directed,eunit1) node1 node2 - end - | add_edges g n (e::es) = - let - val node1 = one_of n - val n_minus = LIB.rm_element (V.name_eq) node1 n - val node2 = one_of n_minus - in - add_edges (g |> G.add_named_edge e (Directed,eunit1) node1 node2) n es - end - - - fun get_adj g n = V.NSet.list_of (G.get_adj_vertices g n) - - fun get_connected_nodes g [] num_nodes bb_nodes = bb_nodes - | get_connected_nodes g (n::[]) num_nodes bb_nodes = - if (num_nodes=1) then n::bb_nodes - else - let - val nodelist = LIB.maps3 get_adj g (n::bb_nodes) - val nodelist = LIB.rm_duplicates (V.name_eq) nodelist - val nodelist = LIB.sub_x_y (V.name_eq) nodelist bb_nodes - in - get_connected_nodes g nodelist (num_nodes-1) (n::bb_nodes) - end - | get_connected_nodes g (n::ns) num_nodes bb_nodes = - if (num_nodes=1) then (one_of (n::ns)) :: bb_nodes - else - let - val node = one_of(n::ns) - val nodelist = LIB.maps3 get_adj g (n::bb_nodes) - val nodelist = LIB.rm_duplicates V.name_eq nodelist - val nodelist = LIB.sub_x_y (V.name_eq) nodelist bb_nodes - in - get_connected_nodes g nodelist (num_nodes-1) (node::bb_nodes) - end - - fun add_bbox g bb [] = g - | add_bbox g bb (n::[]) = g |> G.add_to_bbox_anon bb (V.NSet.single n) - | add_bbox g bb (n::ns) = add_bbox (g |> G.add_to_bbox_anon bb (V.NSet.single n)) bb ns - - fun add_bboxes g n b npbb = - if (b=1) then - let - val num_nodes = random_range 1 npbb - val bb_nodes : V.name list = [] - val nodes = get_connected_nodes g n num_nodes bb_nodes - val (bb,g) = g |> G.add_bbox - in - add_bbox g bb nodes - end - else - let - val num_nodes = random_range 1 npbb - val bb_nodes : V.name list = [] - val nodes = get_connected_nodes g n num_nodes bb_nodes - val n = LIB.sub_x_y (V.name_eq) n nodes - val (bb,g) = g |> G.add_bbox - in - add_bboxes (add_bbox g bb nodes) n (b-1) npbb - end - - fun add_boundaries i g nodes = - let - val b = L.boundary - val bound = V.mk ("b" ^ string_of_int(i)) - val node = one_of nodes - val g = g |> G.add_named_vertex bound b - val edge = E.mk("b_e" ^ string_of_int(i)) - val dice = random_range 1 2 - val g = if (dice=1) then g |> G.add_named_edge edge (Directed,eunit1) node bound - else g |> G.add_named_edge edge (Directed,eunit1) bound node - in - if (i=1) then g else add_boundaries (i-1) g nodes - end - - - - fun instantiate_graph params = - let - val graph = G.empty - - val num_nodes = #num_nodes params - (*val _ = printout out ("Instantiating " ^ string_of_int(num_nodes) ^ " nodes..\n")*) - val nodes = generate_random_nodes num_nodes - - val kinds = #kinds params - (*val _ = printout out ("Adding the nodes to the graph selecting 1 out of "^ string_of_int(length kinds) ^" possible kinds..\n")*) - val graph = add_nodes graph nodes kinds - - val num_edges = #num_edges params - (*val _ = printout out ("Instantiating " ^ string_of_int(num_edges) ^ " edges..\n")*) - val edges = if (num_edges > 0) then generate_random_edges num_edges else [] - - (*val _ = printout out ("Wiring nodes through edges..\n")*) - val graph = if (num_edges > 0) then add_edges graph nodes edges else graph - - val num_bbox = #num_bbox params - val max_num_nodes_per_bbox = #max_num_nodes_per_bbox params - (*val _ = printout out ("Adding " ^ string_of_int(num_bbox) ^ " bbox each containing at most " ^ string_of_int(max_num_nodes_per_bbox) ^ "..\n")*) - val graph = if (num_bbox > 0) then add_bboxes graph nodes num_bbox max_num_nodes_per_bbox else graph - - val num_bound = #num_bound params - (*val _ = printout out ("Adding "^ string_of_int(num_bound) ^" boundaries..\n\n")*) - val graph = if (num_bound > 0) then add_boundaries num_bound graph nodes else graph - in - graph - end - - - fun iterate_instantiate_graph i params = - if (i=1) then (instantiate_graph params) :: [] - else (instantiate_graph params) :: (iterate_instantiate_graph (i-1) params) - - fun iterate_instantiate_graph_name i = - if (i=1) then (GraphName.mk ("g"^string_of_int(i))) :: [] - else (GraphName.mk ("g"^string_of_int(i))) :: (iterate_instantiate_graph_name (i-1)) - - - fun enumerate_graphs [] = [] - | enumerate_graphs (g::[]) = length (snd g) :: [] - | enumerate_graphs (g::gs) = length (snd g) :: (enumerate_graphs gs) - - fun sum [] = 0 - | sum (n::[]) = n - | sum (n::ns) = n + sum ns - - fun get_pretty [] = "" - | get_pretty (g::[]) = (Pretty.str_of(G.pretty g)) - | get_pretty (g::gs) = (Pretty.str_of(G.pretty g)) ^ " \n " ^ (get_pretty gs) - - - - - fun stats pattern_params target_params = - let - - (*val _ = printout out ("Generating " ^ string_of_int(#num_graphs pattern_params) ^ " graphs..\n\n")*) - val pattern_graphs = iterate_instantiate_graph (#num_graphs pattern_params) pattern_params - - (*val _ = printout out ("Generating " ^ string_of_int(#num_graphs pattern_params) ^ " graph names..\n\n")*) - val pattern_gnames = iterate_instantiate_graph_name (#num_graphs pattern_params) - - (*val pretty_graphs = get_pretty graphs*) - (*val _ = printout out pretty_graphs*) - - val tab = GraphName.NTab.empty - val tab = TD.fold GraphName.NTab.doadd pattern_gnames pattern_graphs tab - (*val _ = printout out ("\n\nBuilding the discrimination tree..\n\n")*) - val tree = TD.mk tab - (*val _ = printout out (TD.printout (tree::[]))*) - - (*val _ = printout out ("Generating a target graph..\n\n")*) - val target_graphs = iterate_instantiate_graph (#num_graphs target_params) target_params - val target_graph = nth target_graphs 0 - - (*val _ = printout out ("Pruning the tree using the target graph..\n\n")*) - val ext_graph_by_vertex = TD.extended_pruning tree target_graph - val std_graph_by_vertex = TD.standard_pruning tree target_graph - - (*val _ = printout out ("Evaluating performances..\n")*) - val ext_matchings = enumerate_graphs ext_graph_by_vertex - val std_matchings = enumerate_graphs std_graph_by_vertex - val ext_num_matchings = sum ext_matchings - val std_num_matchings = sum std_matchings - (*val _ = printout out ("Number of times to exec matching alg WITHOUT dnets: " ^ string_of_int((#num_graphs pattern_params)*(#num_nodes target_params)) ^ "\n") - val _ = printout out ("Number of times to exec matching alg WITH dnets: " ^ string_of_int(num_matchings) ^ "\n\n")*) - in - (ext_num_matchings,std_num_matchings) - end - - fun iterate_stats i pattern_params target_params = - if (i = 1) then (stats pattern_params target_params) :: [] - else (stats pattern_params target_params) :: (iterate_stats (i-1) pattern_params target_params) - - - - fun compute_num_svr [] m = 0.0 - | compute_num_svr (x::[]) m = (x-m) * (x-m) - | compute_num_svr (x::xs) m = ((x-m) * (x-m)) + (compute_num_svr xs m) - - fun toReal [] = [] - | toReal (x::[]) = Real.fromInt(x)::[] - | toReal (x::xs) = Real.fromInt(x)::(toReal xs) - - (*fun stat_test_ext_alg i pattern_params target_params = - let - val matchings = iterate_stats_ext_alg i pattern_params target_params - - val matchings_without_alg = (#num_graphs pattern_params)*(#num_nodes target_params) - - val num_matchings = sum matchings - val avg_matchings = Real.fromInt(num_matchings) / Real.fromInt(i) - - val num_svr = compute_num_svr (toReal matchings) avg_matchings - val svr_matchings = num_svr / Real.fromInt(i) - - val _ = printout out ("\n\n\n\n") - - val _ = printout out ("***************************************************************************\n") - - val _ = printout out ("I executed " ^ string_of_int(i) ^ " iterations with these parameters: \n\n") - - val _ = printout out ("PATTERNS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs pattern_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes pattern_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges pattern_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds pattern_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox pattern_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox pattern_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound pattern_params)^ "\n") - - val _ = printout out ("\n") - - val _ = printout out ("TARGETS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs target_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes target_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges target_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds target_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox target_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox target_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound target_params)^ "\n") - - val _ = printout out ("\n\n\n") - - val _ = printout out ("STAT RESULTS: \n") - val _ = printout out ("Normally I would need to run: " ^ string_of_int(matchings_without_alg)^ " iterations of the matching alg.\n") - val _ = printout out ("Now I need to run only: " ^ Real.toString(avg_matchings)^ "(avg) " ^ Real.toString(svr_matchings) ^ "(var) iterations of the matching alg.\n") - - val _ = printout out ("***************************************************************************\n") - in - 1 - end*) - - - fun stat_test i pattern_params target_params simul_number = - let - val timestamp = Time.toString (Time.now()) - val out = TextIO.openOut ("output"^timestamp^".txt") - - val matchings = iterate_stats i pattern_params target_params - - val ext_matchings = fst (split_list matchings) - val std_matchings = snd (split_list matchings) - - val matchings_without_alg = (#num_graphs pattern_params)*(#num_nodes target_params) - - val ext_num_matchings = sum ext_matchings - val ext_avg_matchings = Real.fromInt(ext_num_matchings) / Real.fromInt(i) - val ext_num_sample_var = compute_num_svr (toReal ext_matchings) ext_avg_matchings - val ext_sample_var_matchings = ext_num_sample_var / Real.fromInt(i) - - val std_num_matchings = sum std_matchings - val std_avg_matchings = Real.fromInt(std_num_matchings) / Real.fromInt(i) - val std_num_sample_var = compute_num_svr (toReal ext_matchings) std_avg_matchings - val std_sample_var_matchings = std_num_sample_var / Real.fromInt(i) - - val _ = printout out ("\n\n\n\n") - - val _ = printout out ("************************************************************************************\n") - - val _ = printout out ("EXTENDED vs STANDARD vs NO-ALGORITHM!!\n\n") - - val _ = printout out ("I executed " ^ string_of_int(i) ^ " iterations with these parameters: \n\n") - - val _ = printout out ("PATTERNS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs pattern_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes pattern_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges pattern_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds pattern_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox pattern_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox pattern_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound pattern_params)^ "\n") - - val _ = printout out ("\n") - - val _ = printout out ("TARGETS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs target_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes target_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges target_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds target_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox target_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox target_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound target_params)^ "\n") - - val _ = printout out ("\n\n\n") - - val _ = printout out ("STAT RESULTS: \n") - val _ = printout out ("Normally I would need to run: " ^ string_of_int(matchings_without_alg)^ " iterations of the matching alg.\n") - val _ = printout out ("With the standard algorithm I would need to run: " ^ Real.toString(std_avg_matchings)^ "(avg) " ^ Real.toString(std_sample_var_matchings) ^ "(var) iterations of the matching alg.\n") - val _ = printout out ("Now with the extended algorithm I need to run only: " ^ Real.toString(ext_avg_matchings)^ "(avg) " ^ Real.toString(ext_sample_var_matchings) ^ "(var) iterations of the matching alg.\n") - - val _ = printout out ("************************************************************************************\n") - - val _ = TextIO.closeOut out - - val _ = savedata (string_of_int(simul_number)) (string_of_int(matchings_without_alg)) (Real.toString(std_avg_matchings)) (Real.toString(ext_avg_matchings)) - in - 1 - end - - - - - - fun simul i v = - let - val pattern_params = {kinds = k1 :: k2 :: k3 :: [], - num_graphs = 100, - num_nodes = 7, - num_edges = 7, - num_bbox = v, - max_num_nodes_per_bbox = 1, - num_bound = 0 } - in - stat_test iterations pattern_params target_params i - end - - - fun iterate_simul [] [] = 0 - | iterate_simul (i::[]) (v::[]) = (simul i v) + (iterate_simul [] []) - | iterate_simul (i::is) (v::vs) = (simul i v) + (iterate_simul is vs) - | iterate_simul _ _ = raise ERROR "unequal length lists" - - - - val indexes = 0 :: 1 :: 2 :: 3 :: 4 :: 5 :: 6 :: [] - val variables = 0 :: 1 :: 2 :: 3 :: 4 :: 5 :: 6 ::[] - val signal = iterate_simul indexes variables - - - val _ = TextIO.closeOut data_no_alg - val _ = TextIO.closeOut data_std_alg - val _ = TextIO.closeOut data_ext_alg -in val _ = (); end; - diff --git a/core/dnets/test.ML b/core/dnets/test.ML deleted file mode 100644 index 4089fbd0..00000000 --- a/core/dnets/test.ML +++ /dev/null @@ -1,1225 +0,0 @@ -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph); - open Tools; - - - structure TD = Top_DNet(G) - structure CL = TD.CL - structure C = CL.C - structure L = C.L - - - val b = L.boundary; - val k1 = (vexpr1 "0") - val k2 = (vexpr1 "1") - val k3 = (vexpr1 "a") - - - -(******************) -(* BUILDING TESTS *) -(******************) - - (* Building the graphs *) - val pattern1 = G.empty - - val p1_v1 = V.mk "p1_v1" - val p1_v2 = V.mk "p1_v2" - val p1_v3 = V.mk "p1_v3" - val p1_v4 = V.mk "p1_v4" - val p1_v5 = V.mk "p1_v5" - - val p1_e1 = E.mk "p1_e1" - val p1_e2 = E.mk "p1_e2" - val p1_e3 = E.mk "p1_e3" - val p1_e4 = E.mk "p1_e4" - - val pattern1 = pattern1 |> G.add_named_vertex p1_v1 k1 - |> G.add_named_vertex p1_v2 k2 - |> G.add_named_vertex p1_v3 k2 - |> G.add_named_vertex p1_v4 k1 - |> G.add_named_vertex p1_v5 k2 - |> G.add_named_edge p1_e1 (Directed,eunit1) p1_v1 p1_v2 - |> G.add_named_edge p1_e2 (Directed,eunit1) p1_v1 p1_v3 - |> G.add_named_edge p1_e3 (Directed,eunit1) p1_v2 p1_v3 - |> G.add_named_edge p1_e4 (Directed,eunit1) p1_v5 p1_v4 - - val (bb1,pattern1) = pattern1 |> G.add_bbox - val pattern1 = pattern1 |> G.add_to_bbox_anon bb1 (V.NSet.single p1_v5) - - - - val pattern2 = G.empty - - val p2_v1 = V.mk "p2_v1" - val p2_v2 = V.mk "p2_v2" - val p2_v3 = V.mk "p2_v3" - val p2_v4 = V.mk "p2_v4" - - val p2_e1 = E.mk "p2_e1" - val p2_e2 = E.mk "p2_e2" - val p2_e3 = E.mk "p2_e3" - val p2_e4 = E.mk "p2_e4" - - val pattern2 = pattern2 |> G.add_named_vertex p2_v1 k2 - |> G.add_named_vertex p2_v2 k2 - |> G.add_named_vertex p2_v3 k2 - |> G.add_named_vertex p2_v4 k1 - |> G.add_named_edge p2_e1 (Directed,eunit1) p2_v1 p2_v2 - |> G.add_named_edge p2_e2 (Directed,eunit1) p2_v1 p2_v3 - |> G.add_named_edge p2_e3 (Directed,eunit1) p2_v1 p2_v4 - |> G.add_named_edge p2_e4 (Directed,eunit1) p2_v4 p2_v3 - - val (bb2,pattern2) = pattern2 |> G.add_bbox - val (bb3,pattern2) = pattern2 |> G.add_bbox - val pattern2 = pattern2 |> G.add_to_bbox_anon bb2 (V.NSet.single p2_v1) - |> G.add_to_bbox_anon bb3 (V.NSet.single p2_v4) - - - val pattern3 = G.empty - - val p3_v1 = V.mk "p3_v1" - val p3_v2 = V.mk "p3_v2" - val p3_v3 = V.mk "p3_v3" - val p3_v4 = V.mk "p3_v4" - - val p3_e1 = E.mk "p3_e1" - val p3_e2 = E.mk "p3_e2" - val p3_e3 = E.mk "p3_e3" - val p3_e4 = E.mk "p3_e4" - - val pattern3 = pattern3 |> G.add_named_vertex p3_v1 k1 - |> G.add_named_vertex p3_v2 k1 - |> G.add_named_vertex p3_v3 k2 - |> G.add_named_vertex p3_v4 k2 - |> G.add_named_edge p3_e1 (Directed,eunit1) p3_v1 p3_v2 - |> G.add_named_edge p3_e2 (Directed,eunit1) p3_v1 p3_v3 - |> G.add_named_edge p3_e3 (Directed,eunit1) p3_v2 p3_v3 - |> G.add_named_edge p3_e4 (Directed,eunit1) p3_v3 p3_v4 - - - val pattern4 = G.empty - - val p4_v1 = V.mk "p4_v1" - val p4_v2 = V.mk "p4_v2" - val p4_v3 = V.mk "p4_v3" - val p4_v4 = V.mk "p4_v4" - - val p4_e1 = E.mk "l1" - val p4_e2 = E.mk "l2" - val p4_e3 = E.mk "l3" - val p4_e4 = E.mk "l4" - - val pattern4 = pattern4 |> G.add_named_vertex p4_v1 k1 - |> G.add_named_vertex p4_v2 k1 - |> G.add_named_vertex p4_v3 k2 - |> G.add_named_vertex p4_v4 b - |> G.add_named_edge p4_e1 (Directed,eunit1) p4_v1 p4_v2 - |> G.add_named_edge p4_e2 (Directed,eunit1) p4_v1 p4_v3 - |> G.add_named_edge p4_e3 (Directed,eunit1) p4_v2 p4_v3 - |> G.add_named_edge p4_e4 (Directed,eunit1) p4_v3 p4_v4 - - - - val pattern5 = G.empty - - val p5_v1 = V.mk "p5_v1" - val p5_v2 = V.mk "p5_v2" - val p5_v3 = V.mk "p5_v3" - val p5_v4 = V.mk "p5_v4" - - val p5_e1 = E.mk "p5_e1" - val p5_e2 = E.mk "p5_e2" - val p5_e3 = E.mk "p5_e3" - val p5_e4 = E.mk "p5_e4" - - val pattern5 = pattern5 |> G.add_named_vertex p5_v1 k2 - |> G.add_named_vertex p5_v2 b - |> G.add_named_vertex p5_v3 b - |> G.add_named_vertex p5_v4 k1 - |> G.add_named_edge p5_e1 (Directed,eunit1) p5_v1 p5_v2 - |> G.add_named_edge p5_e2 (Directed,eunit1) p5_v3 p5_v1 - |> G.add_named_edge p5_e3 (Directed,eunit1) p5_v4 p5_v1 - - - val pattern6 = G.empty - - val p6_v1 = V.mk "p6_v1" - val p6_v2 = V.mk "p6_v2" - val p6_v3 = V.mk "p6_v3" - val p6_v4 = V.mk "p6_v4" - - val p6_e1 = E.mk "p6_e1" - val p6_e2 = E.mk "p6_e2" - val p6_e3 = E.mk "p6_e3" - val p6_e4 = E.mk "p6_e4" - - val pattern6 = pattern6 |> G.add_named_vertex p6_v1 k2 - |> G.add_named_vertex p6_v2 b - |> G.add_named_vertex p6_v3 k1 - |> G.add_named_vertex p6_v4 k1 - |> G.add_named_edge p6_e1 (Directed,eunit1) p6_v1 p6_v2 - |> G.add_named_edge p6_e2 (Directed,eunit1) p6_v1 p6_v3 - |> G.add_named_edge p6_e3 (Directed,eunit1) p6_v3 p6_v4 - - - - - val pattern7 = G.empty - - val p7_v1 = V.mk "p7_v1" - val p7_v2 = V.mk "p7_v2" - val p7_v3 = V.mk "p7_v3" - val p7_v4 = V.mk "p7_v4" - - val p7_e1 = E.mk "p7_e1" - val p7_e2 = E.mk "p7_e2" - val p7_e3 = E.mk "p7_e3" - val p7_e4 = E.mk "p7_e4" - val p7_e5 = E.mk "p7_e5" - - val pattern7 = pattern7 |> G.add_named_vertex p7_v1 k1 - |> G.add_named_vertex p7_v2 k1 - |> G.add_named_vertex p7_v3 k1 - |> G.add_named_vertex p7_v4 k1 - |> G.add_named_edge p7_e1 (Directed,eunit1) p7_v1 p7_v2 - |> G.add_named_edge p7_e2 (Directed,eunit1) p7_v2 p7_v1 - |> G.add_named_edge p7_e3 (Directed,eunit1) p7_v1 p7_v3 - |> G.add_named_edge p7_e4 (Directed,eunit1) p7_v2 p7_v4 - |> G.add_named_edge p7_e5 (Directed,eunit1) p7_v3 p7_v4 - - - - (* Building the contour lists *) - val cl1 = CL.mk pattern1 - val c1_1 = nth cl1 0 - val c1_2 = nth cl1 1 - val lit1_1 = nth c1_1 0 - val lit1_2 = nth c1_2 0 - val lit1_3 = nth c1_2 1 - val lit1_4 = L.mk pattern1 p1_v4 - val lit1_5 = L.mk pattern1 p1_v5 - - - val cl2 = CL.mk pattern2 - val c2_1 = nth cl2 0 - val c2_2 = nth cl2 1 - val c2_3 = nth cl2 2 - val lit2_1 = nth c2_2 0 - val lit2_2 = nth c2_1 0 - val lit2_3 = nth c2_3 0 - val lit2_4 = nth c2_3 1 - - - val cl3 = CL.mk pattern3 - val c3_1 = nth cl3 0 - val c3_2 = nth cl3 1 - val c3_3 = nth cl3 2 - val lit3_1 = nth c3_1 0 - val lit3_2 = nth c3_2 0 - val lit3_3 = nth c3_2 1 - val lit3_4 = nth c3_3 0 - - - val cl4 = CL.mk pattern4 - val c4_1 = nth cl4 0 - val c4_2 = nth cl4 1 - val c4_3 = nth cl4 2 - val lit4_1 = nth c4_1 0 - val lit4_2 = nth c4_2 0 - val lit4_3 = nth c4_2 1 - val lit4_4 = nth c4_3 0 - - - val cl5 = CL.mk pattern5 - val c5_1 = nth cl5 0 - val c5_2 = nth cl5 1 - val lit5_1 = nth c5_1 0 - val lit5_2 = nth c5_2 0 - val lit5_3 = nth c5_2 1 - val lit5_4 = nth c5_2 2 - - - val cl6 = CL.mk pattern6 - val c6_1 = nth cl6 0 - val c6_2 = nth cl6 1 - val c6_3 = nth cl6 2 - val lit6_1 = nth c6_1 0 - val lit6_2 = nth c6_2 0 - val lit6_3 = nth c6_2 1 - val lit6_4 = nth c6_3 0 - - - val cl7 = CL.mk pattern7 - val c7_1 = nth cl7 0 - val c7_2 = nth cl7 1 - val c7_3 = nth cl7 2 - val lit7_1 = nth c7_1 0 - val lit7_2 = nth c7_2 0 - val lit7_3 = nth c7_2 1 - val lit7_4 = nth c7_3 0 - - - (* TESTING BASIC OPERATIONS *) - - (* Assertion functions *) - fun assert_subtraction(c1,c2,c3) = - if C.eq(C.subtract_eq_contour(c1,c2),c3) then true - else raise ERROR ("Subtraction is wrong: expected " ^ C.printout(c3) ^ " -- computed: " ^ C.printout(C.subtract_eq_contour(c1,c2))) - - fun assert_intersection(c1,c2,c3) = - if C.eq(C.intersect_eq_contours(c1,c2),c3) then true - else raise ERROR ("Intersection is wrong: expected " ^ C.printout(c3) ^ " -- computed: " ^ C.printout(C.intersect_eq_contours(c1,c2))) - - fun assert_complement(c1,c2,c3) = - if C.eq(C.complement_eq_contour(c1,c2),c3) then true - else raise ERROR ("Complement is wrong: expected " ^ C.printout(c3) ^ " -- computed: " ^ C.printout(C.complement_eq_contour(c1,c2))) - - fun assert_duplicate_removal(c1,c2) = - if C.eq(C.remove_eq_duplicate(c1),c2) then true - else raise ERROR ("Duplicate removal is wrong: expected " ^ C.printout(c2) ^ " -- computed: " ^ C.printout(C.remove_eq_duplicate(c1))) - - (* Tests *) - val lit_a = L.build((V.mk "a"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) - val lit_b = L.build((V.mk "b"), k2, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val lit_c = L.build((V.mk "c"), k1, L.mult_star, 0, L.mult_none, 1, L.mult_none) - val lit_d = L.build((V.mk "d"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_star) - val lit_e = L.build((V.mk "e"), G.WVert, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val lit_f = L.build((V.mk "f"), k2, L.mult_star, 1, L.mult_none, 0, L.mult_none) - - val cont_a = lit_b :: lit_a :: [] - val cont_b = lit_b :: lit_c :: [] - val cont_c = lit_c :: [] - val cont_d = lit_a :: lit_e :: lit_b :: lit_f :: [] - - val sub_d_a = lit_e :: lit_f :: [] - val sub_b_c = lit_b :: [] - val sub_a_a = [] - val _ = Testing.test "Checking subtraction.." assert_subtraction (cont_d,cont_a,sub_d_a) - val _ = Testing.test "Checking subtraction.." assert_subtraction (cont_b,cont_c,sub_b_c) - val _ = Testing.test "Checking subtraction.." assert_subtraction (cont_a,cont_a,sub_a_a) - - val inters_a_b = lit_b :: [] - val inters_a_c = [] - val inters_b_c = lit_c :: [] - val inters_a_d = lit_b :: lit_a :: [] - val _ = Testing.test "Checking intersection.." assert_intersection (cont_a,cont_b,inters_a_b) - val _ = Testing.test "Checking intersection.." assert_intersection (cont_a,cont_c,inters_a_c) - val _ = Testing.test "Checking intersection.." assert_intersection (cont_b,cont_c,inters_b_c) - val _ = Testing.test "Checking intersection.." assert_intersection (cont_a,cont_d,inters_a_d) - - val compl_a_b = lit_c :: [] - val compl_a_c = lit_c :: [] - val compl_b_c = [] - val compl_a_d = lit_e :: lit_f :: [] - val compl_a_d2 = lit_f :: lit_e :: [] - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_b,compl_a_b) - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_c,compl_a_c) - val _ = Testing.test "Checking complement.." assert_complement (cont_b,cont_c,compl_b_c) - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_d,compl_a_d) - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_d,compl_a_d2) - - val cont_aa = lit_b :: lit_a :: lit_a :: lit_a :: lit_b :: [] - val cont_bb = lit_c :: lit_b :: [] - val cont_cc = lit_c :: lit_c :: [] - val _ = Testing.test "Checking duplicate removal.." assert_duplicate_removal (cont_aa,cont_a) - val _ = Testing.test "Checking duplicate removal.." assert_duplicate_removal (cont_bb,cont_b) - val _ = Testing.test "Checking duplicate removal.." assert_duplicate_removal (cont_cc,cont_c) - - - - - (* TESTING LITERALS *) - - (* Assertion functions *) - fun assert_name_eq(a:V.name,b:V.name) = - if V.name_eq(a,b) then true - else raise ERROR ("Literal names do not match: " ^ V.string_of_name(a) ^ " -- " ^ V.string_of_name(b)) - - fun assert_kind_eq(a:G.vdata,b:G.vdata) = - if G.vdata_eq(a,b) then true - else raise ERROR ("Literal kinds do not match!") - - fun assert_mult_eq(a:L.multiplicity,b:L.multiplicity,s) = - if L.mult_eq(a,b) then true - else raise ERROR ("Literal " ^ s ^ "do not match!") - - fun assert_int_eq(a,b,s) = - if (a=b) then true - else raise ERROR ("Literal " ^ s ^ "do not match!") - - fun assert_lit_eq(l1,l2) = - if L.eq(l1,l2) then true - else raise ERROR ("Literals are not equal") - - fun assert_lit_equiv(l1,l2) = - if L.equiv(l1,l2) then true - else raise ERROR ("Literals are not equivalent") - - fun assert_lit_match(l1,l2) = - if L.match(l1,l2) then true - else raise ERROR ("Literals do not match") - - (* Tests *) - val name1 = L.get_name lit1_1 - val name2 = L.get_name lit1_2 - val _ = Testing.test "Checking literal name.." assert_name_eq (name1,p1_v1) - val _ = Testing.test "Checking literal name.." assert_name_eq (name2,p1_v2) - - val kind1 = L.get_kind lit1_1 - val kind2 = L.get_kind lit1_2 - val _ = Testing.test "Checking literal kind.." assert_kind_eq (kind1,k1) - val _ = Testing.test "Checking literal kind.." assert_kind_eq (kind2,k2) - - val k_m1 = L.get_kind_mult lit1_1 - val k_m2 = L.get_kind_mult lit1_2 - val _ = Testing.test "Checking literal kind-mult.." assert_mult_eq (k_m1,L.mult_none,"kind-mult") - val _ = Testing.test "Checking literal kind-mult.." assert_mult_eq (k_m2,L.mult_none,"kind-mult") - - val i_a1 = L.get_input_arity lit1_1 - val i_a2 = L.get_input_arity lit1_2 - val _ = Testing.test "Checking literal input-arity.." assert_int_eq (i_a1,0,"input-arity") - val _ = Testing.test "Checking literal input-arity.." assert_int_eq (i_a2,1,"input-arity") - - val i_m1 = L.get_input_mult lit1_1 - val i_m2 = L.get_input_mult lit1_2 - val _ = Testing.test "Checking literal input-mult.." assert_mult_eq (i_m1,L.mult_none,"input-mult") - val _ = Testing.test "Checking literal input-mult.." assert_mult_eq (i_m2,L.mult_none,"input-mult") - - val o_a1 = L.get_output_arity lit1_1 - val o_a2 = L.get_output_arity lit1_2 - val _ = Testing.test "Checking literal output-arity.." assert_int_eq (o_a1,2,"output-arity") - val _ = Testing.test "Checking literal output-arity.." assert_int_eq (o_a2,1,"output-arity") - - val o_m1 = L.get_output_mult lit1_1 - val o_m2 = L.get_output_mult lit1_2 - val _ = Testing.test "Checking literal output-mult.." assert_mult_eq (o_m1,L.mult_none,"output-mult") - val _ = Testing.test "Checking literal output-mult.." assert_mult_eq (o_m2,L.mult_none,"output-mult") - - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_1, L.build ( (V.mk "p2_v1"), k2, L.mult_star, 0, L.mult_none, 3, L.mult_star) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_2, L.build ( (V.mk "p2_v2"), k2, L.mult_none, 1, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_3, L.build ( (V.mk "p2_v3"), k2, L.mult_qm, 2, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_4, L.build ( (V.mk "p2_v4"), k1, L.mult_star, 1, L.mult_star, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_1, L.build ( (V.mk "p2_v1"), k2, L.mult_star, 0, L.mult_none, 2, L.mult_star) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_2, L.build ( (V.mk "p2_v2"), k2, L.mult_none, 0, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_3, L.build ( (V.mk "p2_v3"), k2, L.mult_qm, 0, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_4, L.build ( (V.mk "p2_v4"), k1, L.mult_star, 0, L.mult_star, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_1, L.build ( (V.mk "p4_v1"), k2, L.mult_none, 0, L.mult_none, 3, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_2, L.build ( (V.mk "p4_v2"), k2, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_3, L.build ( (V.mk "p4_v3"), k1, L.mult_none, 2, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_4, L.build ( (V.mk "p4_v4"), b, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_1, L.build ( (V.mk "p4_v1"), k1, L.mult_none, 0, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_2, L.build ( (V.mk "p4_v2"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_3, L.build ( (V.mk "p4_v3"), k2, L.mult_none, 2, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_4, L.build ( (V.mk "p4_v4"), b, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_1, L.build ( (V.mk "p5_v1"), k2, L.mult_none, 2, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_2, L.build ( (V.mk "p5_v2"), b, L.mult_star, 1, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_3, L.build ( (V.mk "p5_v3"), b, L.mult_star, 0, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_4, L.build ( (V.mk "p5_v4"), k1, L.mult_none, 0, L.mult_none, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_1, L.build ( (V.mk "p6_v1"), k2, L.mult_none, 0, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_2, L.build ( (V.mk "p6_v2"), b, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_3, L.build ( (V.mk "p6_v3"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_4, L.build ( (V.mk "p6_v4"), k1, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_1, L.build ( (V.mk "p7_v1"), k1, L.mult_none, 1, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_2, L.build ( (V.mk "p7_v2"), k1, L.mult_none, 1, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_3, L.build ( (V.mk "p7_v3"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_4, L.build ( (V.mk "p7_v4"), k1, L.mult_none, 2, L.mult_none, 0, L.mult_none) ) - - - (* TESTING CONTOURS *) - - (* Assertion functions *) - fun assert_contour_length(i,c) = - if (length c = i) then true - else raise ERROR ("Error in contour length " ^ CL.printout(c)) - - fun is_one(x,[]) = false - | is_one(x,c::[]) = if C.equiv(x,c) then true else false - | is_one(x,c::cs) = if C.equiv(x,c) then true else is_one(x,cs) - - fun assert_tgt_fn(c,cc) = - if (is_one(c,cc)) then true - else raise ERROR ("Error in target function") - - fun assert_contour_length(i,c) = - if (length c = i) then true - else raise ERROR ("Error in contour length") - - fun assert_is_eq_contained(l,c) = - if (C.is_eq_literal_contained(l,c)) then true - else raise ERROR ("Error in the elements of the contour") - - fun assert_is_eq_not_contained(l,c) = - if (C.is_eq_literal_contained(l,c)) then raise ERROR ("Error in the elements of the contour") - else true - - fun assert_contour_eq(c1,c2) = - if C.eq(c1,c2) then true - else raise ERROR ("Contours are not equal") - - fun assert_contour_equiv(c1,c2) = - if C.equiv(c1,c2) then true - else raise ERROR ("Contours are not equivalent") - - fun assert_matching_contour_containing(c1,c2) = - if C.is_matching_contour_contained(c1,c2) then true - else raise ERROR ("Error in contour list") - - fun assert_strong_compatibility(c1,c2) = - if C.check_strong_compatibility(c1,c2) then true - else raise ERROR ("Contours are not strongly compatible") - - fun assert_no_strong_compatibility(c1,c2) = - if C.check_strong_compatibility(c1,c2) then raise ERROR ("Contours are strongly compatible") - else true - - fun assert_weak_compatibility(c1,c2) = - if C.check_weak_compatibility(c1,c2) then true - else raise ERROR ("Contours are not weakly compatible") - - fun assert_no_weak_compatibility(c1,c2) = - if C.check_weak_compatibility(c1,c2) then raise ERROR ("Contours are weakly compatible") - else true - - - (* Tests *) - val e = C.empty - val trg1 = C.target_function pattern1 - val ok1 = C.add_literal e lit1_1 - val ok2 = C.add_literal e lit1_2 - val ok3 = C.add_literal e lit1_3 - val ok4 = C.add_literal e lit1_4 - val ok1list = ok1::ok2::ok3::ok4::[] - - val trg2 = C.target_function pattern2 - val ok2 = C.add_literal e lit2_2 - val ok3 = C.add_literal e lit2_3 - val ok2list = ok2::ok3::[] - - val _ = Testing.test "Checking target function.." assert_tgt_fn (trg1,ok1list) - val _ = Testing.test "Checking target function.." assert_tgt_fn (trg2,ok2list) - - - val c1 = C.mk pattern1 trg1 - val _ = Testing.test "Checking number of literals.." assert_contour_length (2,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_contained (lit1_2,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_contained (lit1_3,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_not_contained (lit1_4,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_not_contained (lit1_5,c1) - - val c2 = C.mk pattern1 (lit1_2::lit1_3::[]) - val _ = Testing.test "Checking number of literals.." assert_contour_length (1,c2) - - - val lit_1 = L.build((V.mk "a"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) - val lit_2 = L.build((V.mk "b"), k2, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val target = lit_1 :: lit_2 :: lit_2 :: [] - - val lit_3 = L.build((V.mk "c"), k1, L.mult_star, 0, L.mult_none, 1, L.mult_none) - val pattern_1 = lit_1 :: lit_2 :: lit_3 :: lit_2 :: [] - val lit_4 = L.build((V.mk "d"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_star) - val pattern_2 = lit_2 :: lit_4 :: lit_2 :: [] - val lit_5 = L.build((V.mk "e"), G.WVert, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val lit_6 = L.build((V.mk "f"), k2, L.mult_star, 1, L.mult_none, 0, L.mult_none) - val pattern_3 = lit_5 :: lit_6 :: [] - - val pattern_2_concrete = C.get_contour_mult_none(pattern_2) - val pattern_2_abstract = C.get_contour_mult_star_or_qm(pattern_2) - val pattern_3_concrete = C.get_contour_mult_none(pattern_3) - val pattern_3_abstract = C.get_contour_mult_star_or_qm(pattern_3) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_2_concrete, pattern_2) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_2_abstract, C.empty) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_3_concrete, (lit_5::[])) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_3_abstract, (lit_6::[])) - - val _ = Testing.test "Checking containment.." assert_matching_contour_containing(pattern_2_concrete,target) - val _ = Testing.test "Checking containment.." assert_matching_contour_containing((lit_1 :: lit_2 :: lit_2 :: []),target) - - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(target,pattern_1) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(target,pattern_2) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(target,pattern_3) - - val lit_7 = L.build((V.mk "g"), k2, L.mult_qm, 1, L.mult_none, 0, L.mult_none) - val pattern_4 = lit_5 :: lit_7 :: [] - val lit_8 = L.build((V.mk "h"), k2, L.mult_none, 1, L.mult_star, 0, L.mult_none) - val pattern_5 = lit_8 :: [] - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_4) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_5) - val _ = Testing.test "Checking weak compatibility.." assert_weak_compatibility(target,pattern_4) - val _ = Testing.test "Checking weak compatibility.." assert_weak_compatibility(target,pattern_5) - - val lit_9 = L.build((V.mk "j"), k2, L.mult_qm, 1, L.mult_none, 1, L.mult_none) - val pattern_6 = lit_1 :: lit_1 :: lit_9 :: lit_6 :: [] - val pattern_7 = lit_1 :: lit_2 :: lit_8 :: lit_5 :: [] - val lit_10 = L.build((V.mk "k"), k1, L.mult_star, 1, L.mult_none, 1, L.mult_none) - val lit_11 = L.build((V.mk "l"), k2, L.mult_none, 1, L.mult_none, 1, L.mult_none) - val lit_12 = L.build((V.mk "m"), k2, L.mult_none, 2, L.mult_none, 1, L.mult_none) - val pattern_8 = lit_10 :: lit_11 :: lit_12 :: [] - val lit_13 = L.build((V.mk "n"), k1, L.mult_qm, 1, L.mult_none, 0, L.mult_none) - val pattern_9 = lit_13 :: lit_11 :: [] - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_6) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_7) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_8) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_9) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_6) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_7) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_8) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_9) - - - - (* TESTING CONTOUR LISTS *) - - (* Assertion functions *) - fun assert_contour_list_length(i,cl) = - if (length cl = i) then true - else raise ERROR ("Error in contour list length") - - fun assert_contour_list_equiv(cl1,cl2) = - if CL.equiv(cl1,cl2) then true - else raise ERROR ("Error in contour list") - - - (* Tests *) - - val hand_cl1 = (lit1_1::[]) :: (lit1_2::lit1_3::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (2,cl1) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl1,cl1) - - val hand_cl2_1 = (lit2_2::[]) :: (lit2_1::[]) :: (lit2_4::lit2_3::[]) :: [] - val hand_cl2_2 = (lit2_2::[]) :: (lit2_1::[]) :: (lit2_3::lit2_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl2) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl2_1,cl2) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl2_2,cl2) - - val hand_cl3 = (lit3_1::[]) :: (lit3_2::lit3_3::[]) :: (lit3_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl3) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl3,cl3) - - val hand_cl4 = (lit4_1::[]) :: (lit4_2::lit4_3::[]) :: (lit4_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl4) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl4,cl4) - - val hand_cl5 = (lit5_1::[]) :: (lit5_2::lit5_3::lit5_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (2,cl5) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl5,cl5) - - val hand_cl6 = (lit6_1::[]) :: (lit6_2::lit6_3::[]) :: (lit6_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl6) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl6,cl6) - - val hand_cl7 = (lit7_1::[]) :: (lit7_2::lit7_3::[]) :: (lit7_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl7) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl7,cl7) - - - - (* TESTING DNET *) - - (* Assertion functions *) - fun assert_graph_name_eq(n1,n2) = - if GraphName.name_eq(n1,n2) then true - else raise ERROR ("Graph names do not match") - - fun assert_children_length(i,j) = - if i=j then true - else raise ERROR ("Children number do not match") - - - (* Tests *) - val gn1 = GraphName.mk "pattern1" - val gn2 = GraphName.mk "pattern2" - val gn3 = GraphName.mk "pattern3" - val gn4 = GraphName.mk "pattern4" - val gn5 = GraphName.mk "pattern5" - val gn6 = GraphName.mk "pattern6" - val gn7 = GraphName.mk "pattern7" - val gnl = gn1 :: gn2 :: gn3 :: gn4 :: gn5 :: gn6 :: gn7 :: [] - val gl = pattern1 :: pattern2 :: pattern3 :: pattern4 :: pattern5 :: pattern6 :: pattern7 :: [] - - val tab = GraphName.NTab.empty - val tab = TD.fold GraphName.NTab.doadd gnl gl tab - - val tree = TD.mk tab - - val level_1 = tree - val node_1 = tree - val contour_1_1 = TD.get_contour(level_1) - val children_1_1 = TD.get_children(level_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_1_1,C.empty) - val _ = Testing.test "Checking the children.." assert_children_length((length children_1_1),5) - - val level_2 = children_1_1 - val node_2_1 = nth level_2 0 - val node_2_2 = nth level_2 1 - val node_2_3 = nth level_2 2 - val node_2_4 = nth level_2 3 - val node_2_5 = nth level_2 4 - val contour_2_1 = TD.get_contour(node_2_1) - val contour_2_2 = TD.get_contour(node_2_2) - val contour_2_3 = TD.get_contour(node_2_3) - val contour_2_4 = TD.get_contour(node_2_4) - val contour_2_5 = TD.get_contour(node_2_5) - val children_2_1 = TD.get_children(node_2_1) - val children_2_2 = TD.get_children(node_2_2) - val children_2_3 = TD.get_children(node_2_3) - val children_2_4 = TD.get_children(node_2_4) - val children_2_5 = TD.get_children(node_2_5) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit1_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit3_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit4_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_2,(lit2_2::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_3,(lit5_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_4,(lit6_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_5,(lit7_1::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_1),2) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_2),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_3),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_4),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_5),1) - - val level_3 = children_2_1 :: children_2_2 :: children_2_3 :: children_2_4 :: children_2_5 :: [] - val node_3_1 = nth children_2_1 0 - val node_3_2 = nth children_2_1 1 - val node_3_3 = nth children_2_2 0 - val node_3_4 = nth children_2_3 0 - val node_3_5 = nth children_2_4 0 - val node_3_6 = nth children_2_5 0 - val contour_3_1 = TD.get_contour(node_3_1) - val contour_3_2 = TD.get_contour(node_3_2) - val contour_3_3 = TD.get_contour(node_3_3) - val contour_3_4 = TD.get_contour(node_3_4) - val contour_3_5 = TD.get_contour(node_3_5) - val contour_3_6 = TD.get_contour(node_3_6) - val children_3_1 = TD.get_children(node_3_1) - val children_3_2 = TD.get_children(node_3_2) - val children_3_3 = TD.get_children(node_3_3) - val children_3_4 = TD.get_children(node_3_4) - val children_3_5 = TD.get_children(node_3_5) - val children_3_6 = TD.get_children(node_3_6) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_1,(lit1_2::lit1_3::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_2,(lit3_2::lit3_3::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_3,(lit2_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_4,(lit5_2::lit5_3::lit5_4::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_5,(lit6_2::lit6_3::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_6,(lit7_2::lit7_3::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_1),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_2),2) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_3),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_4),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_5),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_6),1) - - val level_4 = children_3_1 :: children_3_2 :: children_3_3 :: children_3_4 :: children_3_5 :: children_3_6 :: [] - val node_4_1 = nth children_3_1 0 - val node_4_2 = nth children_3_2 0 - val node_4_3 = nth children_3_2 1 - val node_4_4 = nth children_3_3 0 - val node_4_5 = nth children_3_4 0 - val node_4_6 = nth children_3_5 0 - val node_4_7 = nth children_3_6 0 - val graph_4_1 = TD.get_graph(node_4_1) - val contour_4_2 = TD.get_contour(node_4_2) - val children_4_2 = TD.get_children(node_4_2) - val contour_4_3 = TD.get_contour(node_4_3) - val children_4_3 = TD.get_children(node_4_3) - val contour_4_4 = TD.get_contour(node_4_4) - val children_4_4 = TD.get_children(node_4_4) - val graph_4_5 = TD.get_graph(node_4_5) - val contour_4_6 = TD.get_contour(node_4_6) - val children_4_6 = TD.get_children(node_4_6) - val contour_4_7 = TD.get_contour(node_4_7) - val children_4_7 = TD.get_children(node_4_7) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_4_1, gn1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_2,(lit3_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_2),1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_3,(lit4_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_3),1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_4,(lit2_4::lit2_3::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_4),1) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_4_5, gn5) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_6,(lit6_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_6),1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_7,(lit7_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_7),1) - - val level_5 = children_4_2 :: children_4_3 :: children_4_4 :: children_4_6 :: children_4_7 :: [] - val node_5_1 = nth children_4_2 0 - val node_5_2 = nth children_4_3 0 - val node_5_3 = nth children_4_4 0 - val node_5_4 = nth children_4_6 0 - val node_5_5 = nth children_4_7 0 - val graph_5_1 = TD.get_graph(node_5_1) - val graph_5_2 = TD.get_graph(node_5_2) - val graph_5_3 = TD.get_graph(node_5_3) - val graph_5_4 = TD.get_graph(node_5_4) - val graph_5_5 = TD.get_graph(node_5_5) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_2, gn4) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_1, gn3) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_3, gn2) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_4, gn6) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_5, gn7) - - - - - - -(******************) -(* PRUNING TESTS *) -(******************) - - (* Building the graphs *) - val target1 = G.empty - val t1_v1 = V.mk "t1v1" - val target1 = target1 |> G.add_named_vertex t1_v1 k1 - val t1_lit1 = L.mk target1 t1_v1 - - - - val target2 = G.empty - - val t2_v1 = V.mk "t2v1" - val t2_v2 = V.mk "t2v2" - val t2_v3 = V.mk "t2v3" - - val t2_e1 = E.mk "t2e1" - val t2_e2 = E.mk "t2e2" - val t2_e3 = E.mk "t2e3" - - val target2 = target2 |> G.add_named_vertex t2_v1 k1 - |> G.add_named_vertex t2_v2 k2 - |> G.add_named_vertex t2_v3 k2 - |> G.add_named_edge t2_e1 (Directed,eunit1) t2_v1 t2_v2 - |> G.add_named_edge t2_e2 (Directed,eunit1) t2_v1 t2_v3 - |> G.add_named_edge t2_e3 (Directed,eunit1) t2_v2 t2_v3 - - val t2_cl = CL.mk target2 - val t2_c1_1 = nth t2_cl 0 - val t2_c1_2 = nth t2_cl 1 - val t2_lit1 = nth t2_c1_1 0 - val t2_lit2 = nth t2_c1_2 0 - val t2_lit3 = nth t2_c1_2 1 - - - - val target3 = G.empty - - val t3_v1 = V.mk "t3v1" - - val target3 = target3 |> G.add_named_vertex t3_v1 k2 - - val t3_cl = CL.mk target3 - val t3_c1_1 = nth t3_cl 0 - - - - val target4 = G.empty - - val t4_v1a = V.mk "t4v1a" - val t4_v1b = V.mk "t4v1b" - val t4_v2 = V.mk "t4v2" - val t4_v3 = V.mk "t4v3" - val t4_v4 = V.mk "t4v4" - - val t4_e1a = E.mk "t4e1a" - val t4_e2a = E.mk "t4e2a" - val t4_e3a = E.mk "t4e3a" - val t4_e1b = E.mk "t4e1b" - val t4_e2b = E.mk "t4e2b" - val t4_e3b = E.mk "t4e3b" - val t4_e4 = E.mk "t4e4" - - val target4 = target4 |> G.add_named_vertex t4_v1a k2 - |> G.add_named_vertex t4_v1b k2 - |> G.add_named_vertex t4_v2 k2 - |> G.add_named_vertex t4_v3 k2 - |> G.add_named_vertex t4_v4 k1 - |> G.add_named_edge t4_e1a (Directed,eunit1) t4_v1a t4_v2 - |> G.add_named_edge t4_e2a (Directed,eunit1) t4_v1a t4_v3 - |> G.add_named_edge t4_e3a (Directed,eunit1) t4_v1a t4_v4 - |> G.add_named_edge t4_e1b (Directed,eunit1) t4_v1b t4_v2 - |> G.add_named_edge t4_e2b (Directed,eunit1) t4_v1b t4_v3 - |> G.add_named_edge t4_e3b (Directed,eunit1) t4_v1b t4_v4 - |> G.add_named_edge t4_e4 (Directed,eunit1) t4_v4 t4_v3 - - val t4_cl = CL.mk target4 - val t4_c1_1 = nth t4_cl 0 - val t4_c1_2 = nth t4_cl 1 - val t4_c1_3 = nth t4_cl 1 - val t4_lit1_1 = nth t4_c1_1 0 - val t4_lit2_1 = nth t4_c1_2 0 - val t4_lit2_2 = nth t4_c1_2 1 - val t4_lit3_1 = nth t4_c1_3 0 - val t4_lit3_2 = nth t4_c1_3 1 - - - - - val target5 = G.empty - - val t5_v1 = V.mk "t5v1" - val t5_v2 = V.mk "t5v2" - val t5_v3 = V.mk "t5v3" - val t5_v4 = V.mk "t5v4" - val t5_v5 = V.mk "t5v5" - - val t5_e1 = E.mk "t5e1" - val t5_e2 = E.mk "t5e2" - val t5_e3 = E.mk "t5e3" - val t5_e4 = E.mk "t5e4" - val t5_e5 = E.mk "t5e5" - - val target5 = target5 |> G.add_named_vertex t5_v1 k1 - |> G.add_named_vertex t5_v2 k1 - |> G.add_named_vertex t5_v3 k2 - |> G.add_named_vertex t5_v4 k3 - |> G.add_named_vertex t5_v5 k2 - |> G.add_named_edge t5_e1 (Directed,eunit1) t5_v1 t5_v2 - |> G.add_named_edge t5_e2 (Directed,eunit1) t5_v1 t5_v3 - |> G.add_named_edge t5_e3 (Directed,eunit1) t5_v2 t5_v3 - |> G.add_named_edge t5_e4 (Directed,eunit1) t5_v3 t5_v4 - |> G.add_named_edge t5_e5 (Directed,eunit1) t5_v4 t5_v5 - - val t5_cl = CL.mk target5 - val t5_c1_1 = nth t5_cl 0 - val t5_c1_2 = nth t5_cl 1 - val t5_c1_3 = nth t5_cl 1 - val t5_c1_4 = nth t5_cl 2 - val t5_lit1 = nth t5_c1_1 0 - val t5_lit2 = nth t5_c1_2 0 - val t5_lit3 = nth t5_c1_2 1 - val t5_lit4 = nth t5_c1_3 0 - val t5_lit5 = nth t5_c1_4 0 - - - - - val target6 = G.empty - - val t6_v1 = V.mk "t6v1" - val t6_v2 = V.mk "t6v2" - val t6_v3 = V.mk "t6v3" - val t6_v4 = V.mk "t6v4" - - val t6_e1 = E.mk "t6e1" - val t6_e2 = E.mk "t6e2" - val t6_e3 = E.mk "t6e3" - val t6_e4 = E.mk "t6e4" - val t6_e5 = E.mk "t6e5" - - val target6 = target6 |> G.add_named_vertex t6_v1 k1 - |> G.add_named_vertex t6_v2 k1 - |> G.add_named_vertex t6_v3 k2 - |> G.add_named_vertex t6_v4 k2 - |> G.add_named_edge t6_e1 (Directed,eunit1) t6_v1 t6_v2 - |> G.add_named_edge t6_e2 (Directed,eunit1) t6_v1 t6_v3 - |> G.add_named_edge t6_e3 (Directed,eunit1) t6_v2 t6_v3 - |> G.add_named_edge t6_e4 (Directed,eunit1) t6_v3 t6_v4 - - val t6_cl = CL.mk target6 - val t6_c1_1 = nth t6_cl 0 - val t6_c1_2 = nth t6_cl 1 - val t6_c1_3 = nth t6_cl 1 - val t6_lit1 = nth t6_c1_1 0 - val t6_lit2 = nth t6_c1_2 0 - val t6_lit3 = nth t6_c1_2 1 - val t6_lit4 = nth t6_c1_3 0 - - - - val target7 = G.empty - - val t7_v1 = V.mk "t7v1" - val t7_v2 = V.mk "t7v2" - val t7_v3 = V.mk "t7v3" - - val t7_e1 = E.mk "t7e1" - val t7_e2 = E.mk "t7e2" - val t7_e3 = E.mk "t7e3" - - val target7 = target7 |> G.add_named_vertex t7_v1 k2 - |> G.add_named_vertex t7_v2 k1 - |> G.add_named_vertex t7_v3 k1 - |> G.add_named_edge t7_e1 (Directed,eunit1) t7_v1 t7_v2 - |> G.add_named_edge t7_e2 (Directed,eunit1) t7_v3 t7_v1 - |> G.add_named_edge t7_e3 (Directed,eunit1) t7_v2 t7_v1 - - val t7_cl = CL.mk target7 - val t7_c1_1 = nth t7_cl 0 - val t7_c1_2 = nth t7_cl 1 - val t7_lit1 = nth t7_c1_1 0 - val t7_lit2 = nth t7_c1_2 0 - val t7_lit3 = nth t7_c1_2 1 - - - - val target8 = G.empty - - val t8_v1 = V.mk "t8v1" - val t8_v2 = V.mk "t8v2" - val t8_v3 = V.mk "t8v3" - val t8_v4 = V.mk "t8v4" - val t8_v5 = V.mk "t8v5" - - val t8_e1 = E.mk "t8e1" - val t8_e2 = E.mk "t8e2" - val t8_e3 = E.mk "t8e3" - val t8_e4 = E.mk "t8e4" - - val target8 = target8 |> G.add_named_vertex t8_v1 k2 - |> G.add_named_vertex t8_v2 k1 - |> G.add_named_vertex t8_v3 k1 - |> G.add_named_vertex t8_v4 k2 - |> G.add_named_vertex t8_v5 k1 - |> G.add_named_edge t8_e1 (Directed,eunit1) t8_v1 t8_v2 - |> G.add_named_edge t8_e2 (Directed,eunit1) t8_v1 t8_v3 - |> G.add_named_edge t8_e3 (Directed,eunit1) t8_v2 t8_v4 - |> G.add_named_edge t8_e4 (Directed,eunit1) t8_v3 t8_v5 - - val t8_cl = CL.mk target8 - val t8_c1_1 = nth t8_cl 0 - val t8_c1_2 = nth t8_cl 1 - val t8_c1_3 = nth t8_cl 2 - val t8_lit1 = nth t8_c1_1 0 - val t8_lit2 = nth t8_c1_2 0 - val t8_lit3 = nth t8_c1_2 1 - val t8_lit4 = nth t8_c1_3 0 - val t8_lit5 = nth t8_c1_3 1 - - - - - val target9 = G.empty - - val t9_v1 = V.mk "t9v1" - val t9_v2 = V.mk "t9v2" - val t9_v3 = V.mk "t9v3" - val t9_v4 = V.mk "t9v4" - val t9_v5 = V.mk "t9v5" - - val t9_e1 = E.mk "t9e1" - val t9_e2 = E.mk "t9e2" - val t9_e3 = E.mk "t9e3" - val t9_e4 = E.mk "t9e4" - val t9_e5 = E.mk "t9e5" - val t9_e6 = E.mk "t9e6" - - val target9 = target9 |> G.add_named_vertex t9_v1 k1 - |> G.add_named_vertex t9_v2 k1 - |> G.add_named_vertex t9_v3 k1 - |> G.add_named_vertex t9_v4 k1 - |> G.add_named_vertex t9_v5 k1 - |> G.add_named_edge t9_e1 (Directed,eunit1) t9_v1 t9_v2 - |> G.add_named_edge t9_e2 (Directed,eunit1) t9_v1 t9_v3 - |> G.add_named_edge t9_e3 (Directed,eunit1) t9_v3 t9_v1 - |> G.add_named_edge t9_e4 (Directed,eunit1) t9_v2 t9_v4 - |> G.add_named_edge t9_e5 (Directed,eunit1) t9_v2 t9_v5 - |> G.add_named_edge t9_e6 (Directed,eunit1) t9_v5 t9_v4 - - val t9_cl = CL.mk target9 - val t9_c1_1 = nth t9_cl 0 - val t9_c1_2 = nth t9_cl 1 - val t9_c1_3 = nth t9_cl 2 - val t9_lit1 = nth t9_c1_1 0 - val t9_lit2 = nth t9_c1_2 0 - val t9_lit3 = nth t9_c1_2 1 - val t9_lit4 = nth t9_c1_3 0 - val t9_lit5 = nth t9_c1_3 1 - - - - - (* TESTING PRUNING *) - - fun printout([]) = " " - | printout(g::[]) = GraphName.string_of_name(g) - | printout(g::gs) = GraphName.string_of_name(g) ^ printout(gs) - - (* Assertion functions *) - fun assert_graphs_eq(g1,g2) = - if TD.is_eq_graphs(g1,g2) then true - else raise ERROR ("Graphs do not match " ^ printout(g2) ^ " " ^ printout(g1)) - - fun assert_string_eq(a,b) = - if a = b then true - else raise ERROR ("Graphs do n") - - - (* Tests *) - val gn1 = GraphName.mk "pattern1" - val gn2 = GraphName.mk "pattern2" - val gn3 = GraphName.mk "pattern3" - val gn4 = GraphName.mk "pattern4" - val gn5 = GraphName.mk "pattern5" - val gn6 = GraphName.mk "pattern6" - val gn7 = GraphName.mk "pattern7" - val gnl = gn1 :: gn2 :: gn3 :: gn4 :: gn5 :: gn6 :: gn7 :: [] - val gl = pattern1 :: pattern2 :: pattern3 :: pattern4 :: pattern5 :: pattern6 :: pattern7 :: [] - - val tab = GraphName.NTab.empty - val tab = TD.fold GraphName.NTab.doadd gnl gl tab - - val tree = TD.mk tab - - - val tree_graphs = TD.graphs tree - val _ = Testing.test "Checking the graphs in the tree.." assert_graphs_eq(gnl,tree_graphs) - - - val pruned_tree1 = TD.extended_prune t1_v1 target1 tree - val pruned_tree1_grpahs = TD.graphs pruned_tree1 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq([],pruned_tree1_grpahs) - - (*val root = pruned_tree1 - val contour_1 = TD.get_contour(root) - val children_1 = TD.get_children(root) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_1,C.empty) - val _ = Testing.test "Checking the children.." assert_children_length((length children_1),0)*) - - val pruned_tree2 = TD.extended_prune t2_v1 target2 tree - val pruned_tree2_graphs = TD.graphs pruned_tree2 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn1::[],pruned_tree2_graphs) - - (*val target_contour_list = CL.mk_from target2 t2_v1 - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (target_contour_list, (t2_lit1::[]) :: (t2_lit2::t2_lit3::[]) :: [] ) - - val level_1 = pruned_tree2 - val contour_1_1 = TD.get_contour(level_1) - val children_1_1 = TD.get_children(level_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_1_1,C.empty) - val _ = Testing.test "Checking the children.." assert_children_length((length children_1_1),1) - - val level_2 = children_1_1 - val node_2_1 = nth level_2 0 - val contour_2_1 = TD.get_contour(node_2_1) - val children_2_1 = TD.get_children(node_2_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit1_1::[])) - val _ = Testing.test "Checking t he contour.." assert_contour_equiv(contour_2_1,(lit3_1::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_1),1) - - val level_3 = children_2_1 - val node_3_1 = nth children_2_1 0 - val contour_3_1 = TD.get_contour(node_3_1) - val children_3_1 = TD.get_children(node_3_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_1,(lit1_2::lit1_3::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_1),1) - - val level_4 = children_3_1 - val node_4_1 = nth children_3_1 0 - val graph_4_1 = TD.get_graph(node_4_1) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_4_1, gn1)*) - - val pruned_tree3 = TD.extended_prune t3_v1 target3 tree - val pruned_tree3_graphs = TD.graphs pruned_tree3 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn2::[],pruned_tree3_graphs) - - val pruned_tree4 = TD.extended_prune t4_v1a target4 tree - val pruned_tree4_graphs = TD.graphs pruned_tree4 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq([],pruned_tree4_graphs) - - val pruned_tree4 = TD.extended_prune t4_v2 target4 tree - val pruned_tree4_graphs = TD.graphs pruned_tree4 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn2::[],pruned_tree4_graphs) - - (*val t4_cl = CL.mk_from target4 t4_v2 - val t4_c1_1 = nth t4_cl 0 - val t4_c1_2 = nth t4_cl 1 - val t4_c1_3 = nth t4_cl 2 - val t4_lit1_1 = nth t4_c1_1 0 - val t4_lit2_1 = nth t4_c1_2 0 - val t4_lit2_2 = nth t4_c1_2 1 - val t4_lit3_1 = nth t4_c1_3 0 - val t4_lit3_2 = nth t4_c1_3 1 - - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit1_1,L.build( V.mk "t4v2", k2, L.mult_none, 2, L.mult_none, 0, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit2_1,L.build( V.mk "t4v1a", k2, L.mult_none, 0, L.mult_none, 3, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit2_2,L.build( V.mk "t4v1b", k2, L.mult_none, 0, L.mult_none, 3, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit3_1,L.build( V.mk "t4v3", k2, L.mult_none, 3, L.mult_none, 0, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit3_2,L.build( V.mk "t4v4", k1, L.mult_none, 2, L.mult_none, 1, L.mult_none )) *) - - val pruned_tree5 = TD.extended_prune t5_v1 target5 tree - val pruned_tree5_graphs = TD.graphs pruned_tree5 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn4::[],pruned_tree5_graphs) - - val pruned_tree6 = TD.extended_prune t6_v1 target6 tree - val pruned_tree6_graphs = TD.graphs pruned_tree6 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn3 :: gn4:: [],pruned_tree6_graphs) - - val pruned_tree7 = TD.extended_prune t7_v1 target7 tree - val pruned_tree7_graphs = TD.graphs pruned_tree7 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn5::[],pruned_tree7_graphs) - - (*val target_contour_list = CL.mk_from target7 t7_v1 - val t7_c1 = nth target_contour_list 0 - val t7_c2 = nth target_contour_list 1 - val lit7_1 = nth t7_c1 0 - val lit7_2 = nth t7_c2 0 - val lit7_3 = nth t7_c2 1 - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit7_1,lit5_1) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit7_2,L.build( V.mk "t4v2", k1, L.mult_none, 1, L.mult_none, 1, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit7_3,L.build( V.mk "t4v2", k1, L.mult_none, 0, L.mult_none, 1, L.mult_none )) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(t7_c1,c5_1) - val _ = Testing.test "Checking strong compatibility.." assert_weak_compatibility(t7_c2,c5_2)*) - - val pruned_tree8 = TD.extended_prune t8_v1 target8 tree - val pruned_tree8_graphs = TD.graphs pruned_tree8 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn6::[],pruned_tree8_graphs) - - (*val target_contour_list = CL.mk_from target8 t8_v1 - val t8_c1 = nth target_contour_list 0 - val t8_c2 = nth target_contour_list 1 - val t8_c3 = nth target_contour_list 2 - val lit8_1 = nth t8_c1 0 - val lit8_2 = nth t8_c2 0 - val lit8_3 = nth t8_c2 1 - val lit8_4 = nth t8_c3 0 - val lit8_5 = nth t8_c3 1 - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_1,lit6_1) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_3,lit6_3) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_5,lit6_4) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_2,L.build( V.mk "t4v2", k1, L.mult_none, 1, L.mult_none, 1, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_4,L.build( V.mk "t4v2", k2, L.mult_none, 1, L.mult_none, 0, L.mult_none )) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(t8_c1,c6_1) - val _ = Testing.test "Checking strong compatibility.." assert_weak_compatibility(t8_c2,c6_2) - val _ = Testing.test "Checking strong compatibility.." assert_weak_compatibility(t8_c3,c6_3)*) - - val pruned_tree9 = TD.extended_prune t9_v1 target9 tree - val pruned_tree9_graphs = TD.graphs pruned_tree9 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq([],pruned_tree9_graphs) - - val pruned_tree9 = TD.standard_prune t9_v1 target9 tree - val pruned_tree9_graphs = TD.graphs pruned_tree9 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn7::[],pruned_tree9_graphs) - -in - val _ = Testing.assert_no_failed_tests(); - val _ = "UNIT TESTS FOR DNETS PASSED!" -end - - -(* - val target_contour_list = CL.mk_from target3 t3_v1 - val t3_cl = nth target_contour_list 0 - val lit3_1 = nth t3_cl 0 - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit3_1,L.build( V.mk "t4v2", k2, L.mult_none, 0, L.mult_none, 0, L.mult_none )) - - val children = TD.get_children(tree) - val child = nth children 1 - - val cont = TD.get_contour(child) - val _ = Testing.test "X" assert_contour_equiv(cont, (L.build( V.mk "t4v2", k2, L.mult_none, 0, L.mult_star, 0, L.mult_none) :: [])) - - val is_weak = C.contains_boundary(cont) - val t = if is_weak then 1 else 0 - val _ = Testing.test "X" assert_int_eq(t,0,"s") - - val is_saved = C.check_strong_compatibility(t3_cl,cont) - val t = if is_saved then 1 else 0 - val _ = Testing.test "X" assert_int_eq(t,1,"s")*) diff --git a/core/expressions/alg.ML b/core/expressions/alg.ML deleted file mode 100644 index 4124a79e..00000000 --- a/core/expressions/alg.ML +++ /dev/null @@ -1,271 +0,0 @@ -structure Permutation = -struct -type T = int list -(* recover the permutation from two lists of ordered elements *) -fun get_perm ord lst1 lst2 = - map (fn a => find_index - (fn b => (ord (a, b)) = EQUAL) lst2) - lst1 - -(* invert the given permutation *) -fun invert_perm perm = get_perm Int.compare (0 upto ((length perm)-1)) perm - -(* apply a permutation to a list defined by - * [0,1,2,3,...] |-> perm *) -fun apply_perm perm lst = map (nth lst) (invert_perm perm) - -fun id size = (0 upto (size-1)) -fun is_id perm = perm = (id (length perm)) - -(* The above functions have the property: - * apply_perm (get_perm ord lst1 lst2) lst1 = lst2 *) - - -end - - -signature RING = -sig - type T - val zero : T - val one : T - val is_zero : T -> bool - val is_one : T -> bool - val + : (T * T) -> T - val * : (T * T) -> T - val ~ : T -> T - val ord : T * T -> General.order - val toString : T -> string - val pretty : T -> Pretty.T - val print : T -> unit -end - -signature MATRIX = -sig - structure URing : RING - type 'a vector = 'a list - type 'a matrix = 'a list list - type T = URing.T matrix - - (* functions that work for any matrix *) - val map : ('a -> 'b) -> 'a matrix -> 'b matrix - val fold : - ('a -> 'b -> 'b) -> 'b -> - ('b -> 'c -> 'c) -> 'c -> - 'a matrix -> 'c - val transpose : 'a matrix -> 'a matrix - val flatten : 'a matrix matrix -> 'a matrix - val singleton : 'a -> 'a matrix - - (* functions for matrices of ring elements *) - val dot_product : URing.T vector -> URing.T vector -> URing.T - val kronecker_product : T -> T -> T - val mult : T -> T -> T - val pretty : T -> Pretty.T - val print : T -> unit - val scalar_mult : URing.T -> T -> T - val sum_of_squares : T -> URing.T - val tensor : T list -> T - val permute_tensor : int list -> int -> T - - val ident : int -> T -end - -structure IntRing : RING = -struct -open Int - -type T = int -val zero = 0 -val one = 1 -fun is_zero 0 = true | is_zero _ = false -fun is_one 1 = true | is_one _ = false -fun pretty i = Pretty.str (toString i) -val print = Pretty.writeln o pretty -val ord = compare -end - -functor PolyRingFun ( - structure ExpRing : RING - structure CoeffRing : RING) = -struct - - -type T = (CoeffRing.T * ExpRing.T StrName.NTab.T) list - -val keys = StrName.NTab.get_nameset - -fun const k = [(k, StrName.NTab.empty)] -fun monomial (v,p) = - [(p, StrName.NTab.empty |> StrName.NTab.update (v, ExpRing.one))] - -fun var v = monomial (v, CoeffRing.one) - -(* compare two tables of exponents *) -fun var_ord ((_,e1),(_,e2)) = list_ord (prod_ord StrName.name_ord ExpRing.ord) - (StrName.NTab.list_of e1, - StrName.NTab.list_of e2) - -fun collapse_t (c,e) = let - fun fld (k,v) = if ExpRing.is_zero v - then StrName.NTab.delete k - else I -in (c, StrName.NTab.fold fld e e) -end - -(* merge variable tables using f *) -fun merge_vars t1 t2 = let - fun merge_add NONE (SOME v) = v - | merge_add (SOME v) NONE = v - | merge_add (SOME v1) (SOME v2) = ExpRing.+(v1,v2) - | merge_add _ _ = raise Match - val allkeys = StrName.NSet.union_merge (keys t1) (keys t2) - fun fld k = StrName.NTab.update - (k, merge_add (StrName.NTab.lookup t1 k) - (StrName.NTab.lookup t2 k)) -in StrName.NSet.fold fld allkeys StrName.NTab.empty -end - - -val simplify = let - fun merge (t1::t2::ts) = if var_ord (t1,t2) = EQUAL - then (CoeffRing.+(fst t1, fst t2), snd t1)::merge ts - else t1::t2::merge ts - | merge rem = rem -in filter_out (CoeffRing.is_zero o fst) o merge o sort var_ord -end - -fun op + (p1, p2) = simplify (p1 @ p2) - -fun op * (p1, p2) = - let fun mult_terms (c1,e1) (c2,e2) = (CoeffRing.*(c1,c2), merge_vars e1 e2) - in map_product mult_terms p1 p2 - end - -fun pretty_term (coeff,exp) = - Pretty.block (CoeffRing.pretty coeff :: - (StrName.NTab.fold - (fn (k,v) => - append [Pretty.str "*", - Pretty.str k, - Pretty.str "^", - ExpRing.pretty v]) - exp [])) -fun pretty poly = Pretty.block (Pretty.separate " +" (map pretty_term poly)) -val print = Pretty.writeln o pretty - -end - -structure IntPoly = -struct -structure PR = PolyRingFun( - structure CoeffRing = IntRing - structure ExpRing = IntRing) - -open PR -open SimpleLexer - - -fun factor x = - x |> ( - (ident --| sym "^" -- num >> (fn (ID id, INT p) => monomial(id,p) - | _ => raise ERROR "alg.ML:factor:1")) - || (ident >> (fn (ID id) => var id - | _ => raise ERROR "alg.ML:factor:2")) - || (num >> (fn (INT n) => const n - | _ => raise ERROR "alg.ML:factor:3"))) -(* -fun term x = x|>((factor --| sym "*" -- term >> op* ) || factor) -fun expr x = x|>( - (term --| sym "+" -- expr >> op+) - || term) - -*) - -end - - - - -functor MatrixFun (structure URing : RING) : MATRIX = -struct - -structure URing = URing -type 'a vector = 'a list -type 'a matrix = 'a list list -type T = URing.T matrix - -fun dot_product v1 v2 = fold2 (fn x => fn y => fn sum => (URing.+(URing.*(x,y),sum))) v1 v2 URing.zero - -val singleton = fn x => [[x]] - -local - fun split_col [] h t = (rev h,rev t) - | split_col ([]::rows) _ _ = ([],[]) - | split_col ((e::cols)::rows) h t = split_col rows (e::h) (cols::t) - - fun mult' [] _ = [] - | mult' (r::rs) cs = let - val mrow = fold (fn c => fn row => (dot_product r c)::row) cs [] - in (mrow)::(mult' rs cs) - end -in -fun transpose m = case (split_col m [] []) - of ([],[]) => [] - | (col,rest) => col::(transpose rest) - -fun mult m1 m2 = mult' m1 (transpose m2) -end (* local *) -fun map f m = List.map (fn row => List.map f row) m -fun scalar_mult k m = map (fn e => URing.*(k,e)) m -fun fold cmb_x base_x cmb_y base_y m = - Basics.fold cmb_y (List.map (fn row => Basics.fold cmb_x row base_x) m) base_y - -fun flatten m = let - fun merge_rows m1 [] = m1 - | merge_rows m1 m2 = map2 (fn r1 => fn r2 => r2 @ r1 (* for a left fold *)) - m1 m2 - fun merge_cols m1 m2 = transpose (merge_rows (transpose m1) (transpose m2)) -in fold merge_rows [] merge_cols [] m -end - -fun sum_of_squares m = fold (fn x => fn s => URing.+(URing.*(x,x),s)) URing.zero (curry URing.+) URing.zero m -fun kronecker_product m1 m2 = flatten (map (fn a => scalar_mult a m1) m2) -fun tensor lst = Basics.fold kronecker_product lst [[URing.one]] -fun ident dim = - let fun row n = List.map (fn a => if a=n then URing.one else URing.zero) - (0 upto (dim-1)) - in List.map row (0 upto (dim-1)) - end - -(* cartesion product of lists, pairing elems with f *) -fun cart f lst1 lst2 = - Library.flat (List.map (fn a => List.map (fn b => f a b) lst2) lst1) - -fun tensor_basis 0 dim = [[]] - | tensor_basis size dim = let - val smaller = tensor_basis (size-1) dim - in cart (curry op::) (0 upto (dim-1)) smaller - end - -(* apply the given permutation of tensors in a computation basis of - * dimension dim. *) -fun permute_tensor perm dim = - let - val basis = ident dim - val tbasis = tensor_basis (length perm) dim - val permuted_tbasis = - map ((fn x=>[x]) o nth basis) - (List.map (Permutation.apply_perm perm) tbasis) - in - transpose (List.map ((fn [x] => x - | _ => raise ERROR "alg.ML:permute_tensor:1") - o tensor) permuted_tbasis) - end; - -fun pretty m = Pretty.chunks (List.map (Pretty.list "[" "]") (map (URing.pretty) m)); -val print = Pretty.writeln o pretty - -end (* structure MatrixFun *) - -structure IntMatrix = MatrixFun(structure URing = IntRing) diff --git a/core/expressions/coeff.ML b/core/expressions/coeff.ML deleted file mode 100644 index 45a61416..00000000 --- a/core/expressions/coeff.ML +++ /dev/null @@ -1,146 +0,0 @@ -signature FIELD = -sig - type T; - val add : T -> T -> T; - val negate : T -> T; - val subtr : T -> T -> T; - val mult : T -> T -> T; - (* changed from div to divide to not clobber built-in -ak *) - val divide : T -> T -> T; - val eq : T -> T -> bool; - val zero : T; - val one : T; - val is_zero : T -> bool; - val is_one : T -> bool; - val ord : (T * T) -> General.order; - val pretty : T -> Pretty.T; - val print : T -> unit; -end - -signature RATIONAL = -sig - include FIELD; - val mk : int * int -> T - - val get_num : T -> int - val get_denom : T -> int - val reduce_mod : int -> T -> T -end - - -structure DB_Rational -(* : RATIONAL *) -= struct -type T = - int (* numerator *) - * int; (* denominator *) - - -fun ord ((d1,n1), (d2,n2)) = - let val (lhs,rhs) = (d1*n2,d2*n1) - in - if lhs < rhs then General.LESS - else if lhs = rhs then General.EQUAL - else General.GREATER - end - -fun eq x y = (ord (x, y) = EQUAL); - -(* use: Integer.gcd -fun gcd (m,n) = - if m=n then m - else if m mod 2 = 0 then - if n mod 2 = 0 then 2 * gcd(m div 2, n div 2) - else gcd(m div 2, n) - else (*m odd*) - if n mod 2 = 0 then gcd(m, n div 2) - else (*both odd*) - if m - fn limitDepth => - fn printElement => - fn obj => - Pretty.writeln (X.pretty_name obj)); -*) - -(* TODO: eq and match should be in uncurried form by default *) - -structure AlgFormat = -struct -exception not_supported_exp -datatype format = MATHEMATICA | MATLAB | LATEX | PLAIN -fun of_string "mathematica" = MATHEMATICA - | of_string "matlab" = MATLAB - | of_string "latex" = LATEX - | of_string "plain" = PLAIN - | of_string _ = raise not_supported_exp -end - -(* Basic stuff for an expression which might have variables *) -signature EXPR = -sig - (* NOTE: - X.T = type of variables allowed in expressions - T X.NTab.T = mapping of vars to expressions *) - exception BadExpression of string; - type T; (* expressions *) - val ord : T * T -> General.order; - val eq : T -> T -> bool; - val free_vars : T -> X.NSet.T; - (*val subs : T X.NTab.T -> T -> T;*) - - (* matching expressions *) - (*type match (* substitutions for variables in a pattern expression *) - val empty_match : match - val match : T -> T -> match -> match option; - val subst : match -> T -> T*) - - (* like f o g; this should just composes substitutions *) - (*val compose_matches : match -> match -> match; - - val pretty_match : match -> Pretty.T; - val print_match : match -> unit;*) - - (* internal stuff *) - (*val subst_of_match : match -> T X.NTab.T; - val subst_in_match_pair : match -> T * T -> T * T;*) - - - (* gaussian elimination *) - (*val eliminate : T -> T -> T X.NTab.T -> T X.NTab.T option;*) - - val pretty : T -> Pretty.T; - val pretty_math : AlgFormat.format -> T -> Pretty.T; - val print : T -> unit; - -end diff --git a/core/expressions/hilb.ML b/core/expressions/hilb.ML deleted file mode 100644 index 102586fd..00000000 --- a/core/expressions/hilb.ML +++ /dev/null @@ -1,195 +0,0 @@ -signature GRAPH_ADAPTER = -sig - structure Graph : OGRAPH - val pretty_vertex : AlgFormat.format -> Graph.T -> - V.name -> Pretty.T -end - -(* construct tensor term from a DAG *) -functor TensorTermFun ( - structure Graph : OGRAPH - val pretty_vertex : AlgFormat.format -> Graph.T -> - V.name -> Pretty.T) = -struct - -datatype term = - Tens of term list | - Comp of term list | - V of V.name | - Sigma of Permutation.T | - Id of int - -(* a component holds a term and a list of inputs in order *) -type component = E.name list * term * E.name list - - -(* tensor a list of components, gathering inputs *) -fun tensor [] = ([],Id 0,[]) - | tensor [x] = x - | tensor clist = - let val (input, terms, output) - = fold_rev ( - fn (i1,t,o1) => - fn (i1s, ts, o1s) => - (i1@i1s, (t::ts), o1@o1s)) - clist - ([], [], []) - in (input, Tens terms, output) - end -val flat_compose = let - fun fc [] = [] - | fc ((Comp l1)::l2) = l1 @ fc l2 - | fc (t::ts) = t :: fc ts -in Comp o fc -end - -(* perform the composition t1 ; t2 *) -fun compose (i1, t1, o1) (_, Id _, _) = (i1, t1, o1) - | compose (i1, t1, o1) (i2, t2, o2) = let - val oset = E.NSet.of_list o1 - val new_ins = filter_out (E.NSet.contains oset) i2 - val full_t1 = case length new_ins - of 0 => t1 - | n => (case t1 of Tens ts => Tens (ts@[Id n]) - | _ => Tens [t1, Id n]) - val perm = Permutation.get_perm E.name_ord (o1 @ new_ins) i2 - val sigma = if Permutation.is_id perm then [] else [Sigma perm] -in - (i1 @ new_ins, flat_compose (full_t1 :: sigma @ [t2]), o2) -end - - -local - (* find the "longest" directed acyclic graph that spans the given graph, applying - * the three given functions to add edges of each kind. - * back_edge : an edge that creates a directed cycle - * fw_edge : an edge that creates a cycle but not a directed cycle - * new_edge : an edge that creates no cycles *) - fun dag_traverse back_edge fw_edge new_edge graph = let - val out_elist = E.NSet.list_of o (Graph.get_out_edges graph) - - (* generate a dag from the given edge list *) - fun dag depth (e::es) dg vtab = - let - val (edata, (s, t)) = Graph.get_edge_info graph e - val (dg', vtab') = - (case V.NTab.lookup vtab t - (* if target hasn't been seen, add edge and recurse *) - of NONE => dag (depth + 1) (out_elist t) - (dg |> new_edge e edata s t) - (vtab |> V.NTab.update (t, depth + 1)) - (* if target has been seen, only add edge if its rank is lower *) - | SOME d => if (depth < d orelse d = ~1) - then (dg |> fw_edge e edata s t, vtab) - else (dg |> back_edge e edata s t, vtab)) - in dag depth es dg' vtab' - end - | dag _ [] dg vtab = (dg, vtab) - - (* pull a fresh vertex *) - fun fresh_vert vtab = let - val vnames = Graph.get_vertices graph - fun fv NONE = NONE - | fv (SOME v) = (case V.NTab.lookup vtab v - of NONE => SOME v - | _ => fv (V.NSet.next_bigger vnames v)) - in fv (V.NSet.get_min vnames) - end - - (* iteratively run dag until we run out of vertices *) - fun dag_until_done dg vtab = - case fresh_vert vtab - of NONE => dg - | (SOME v) => let - val vtab = (V.NTab.map_all (K (K ~1)) vtab) - |> V.NTab.update (v,0) - val (dg',vtab') = dag 0 (out_elist v) dg vtab - in dag_until_done dg' vtab' - end - val init_dg = E.NSet.fold Graph.delete_edge (Graph.get_edges graph) graph - in dag_until_done init_dg V.NTab.empty - end - fun ignore_edge _ _ _ _ = I - fun keep_edge _ edata s t g = g|>Graph.add_edge_anon edata s t - (* flip_edge also removes self-loops *) - fun flip_edge _ edata s t g = if V.name_ord (s,t) = EQUAL then g - else g|>Graph.add_edge_anon edata t s -in - (* remove self-loops and reverse back-edges *) - val convert_to_dag = dag_traverse flip_edge keep_edge keep_edge - (* remove self-loops and back-edges *) - val spanning_dag = dag_traverse ignore_edge keep_edge keep_edge - val spanning_tree = dag_traverse ignore_edge ignore_edge keep_edge -end - - (* will probably not terminate if dag isn't directed acyclic *) - fun get_dag_ranks dag = let - val outputs = V.NSet.filter - (fn v => E.NSet.is_empty (Graph.get_out_edges dag v)) - (Graph.get_vertices dag) - val vnames = V.NSet.list_of - (V.NSet.subtract (Graph.get_vertices dag) outputs) - fun test seen v = not (V.NSet.contains seen v) - andalso V.NSet.forall - (V.NSet.contains seen) - (Graph.get_predecessor_vertices dag v) - fun mk seen = - case filter (test seen) vnames - of []=>[] | lst => lst :: mk (V.NSet.add_list lst seen) - in mk V.NSet.empty @ [V.NSet.list_of outputs] - end - - - -fun of_graph graph = let - fun wrap_vertex v = - (E.NSet.list_of (Graph.get_in_edges graph v), - V v, - E.NSet.list_of (Graph.get_out_edges graph v)) - val dag:Graph.T = convert_to_dag graph - val ranks:V.name list list = get_dag_ranks dag - val tens_rank = tensor o (map wrap_vertex) -in (graph, #2 (fold_rev compose (map tens_rank ranks) ([], Id 0, []))) -end - -open AlgFormat -fun pretty format (graph,term) = let - val ((topen,tclose,tsep),csep) = - case format - of MATHEMATICA => (("T[", "]", ","), " .") - | PLAIN => (("(", ")", " x"), " o") - | _ => raise not_supported_exp - fun pr (V name) = pretty_vertex format graph name - | pr (Id num) = Pretty.str ("id2[" ^ (Int.toString num) ^ "]") - | pr (Sigma perm) = Pretty.block[Pretty.str "sig", - Pretty.str_list - "[" "]" - (map Int.toString perm)] - | pr (Comp lst) = Pretty.block - ([Pretty.str "("]@ - (Pretty.separate - csep (map pr (rev lst)))@ - [Pretty.str ")"]) - | pr (Tens lst) = Pretty.block - ([Pretty.str topen]@ - (Pretty.separate - tsep (map pr lst))@ - [Pretty.str tclose]) -in pr term -end - - - -val print = Pretty.writeln o (pretty PLAIN) -fun printc (_,t,_) = print t -end - -(* -structure RGGraphAdapter : GRAPH_ADAPTER = -struct -structure Graph = RGGraph -fun pretty_vertex _ _ = V.pretty_name - -end -*) - diff --git a/core/expressions/lex.ML b/core/expressions/lex.ML deleted file mode 100644 index 7bff0dc0..00000000 --- a/core/expressions/lex.ML +++ /dev/null @@ -1,86 +0,0 @@ -(* A very basic lexer *) - -signature SIMPLE_LEXER = -sig - type elem; - datatype token = INT of int - | ID of string - | LIT of string - | SYM of string - | NOOP; - exception MoreTokens of elem list; - - (* Parses out an identifier (ID): a series of letters, digits or - * quasi-letters, starting with a letter or backslash - *) - val ident : elem list -> token * elem list; - (* Produces a NOOP token *) - val noop : elem list -> token * elem list; - (* Parses out a positive integer number (INT) *) - val num : elem list -> token * elem list; - (* Discards any number of whitespace characters (token will be NOOP) *) - val space : elem list -> token * elem list; - (* Parses a literal: a string surrounded in double-quotes, which may contain - C-style escaped characters *) - val string_literal : elem list -> token * elem list; - - (* Parses out the specified text *) - val sym : elem -> elem list -> token * elem list; - (* Parse a string using the specified parser; if this fails to parse the whole - string, MoreTokens will be thrown *) - val parse_with : (elem list -> 'a * elem list) -> string -> 'a; -end; - -structure SimpleLexer : SIMPLE_LEXER = -struct - type elem = Symbol.symbol; - datatype token = INT of int | ID of string | LIT of string | SYM of string | NOOP; - exception MoreTokens of elem list; - val wordchar = fn c => (Symbol.is_quasi_letter c) orelse (Symbol.is_digit c); - - (* terminal definitions *) - val space = Scan.many Symbol.is_blank >> (K NOOP); - fun wrap_term t = space |-- t --| space - val num = wrap_term (Scan.many1 - Symbol.is_digit - >> implode - >> (the o Int.fromString) - >> INT); - - val ident = wrap_term (( - (Scan.one Symbol.is_letter || Scan.this_string "\\") - ::: Scan.many wordchar) - >> implode - >> ID); - val sym = fn str => (wrap_term (Scan.this_string str >> (K (SYM str)))); - val noop = fn toks => (NOOP, toks); - - (* string literal code stolen from isabelle/ml_parse.ML *) - val scan_escape = - Scan.one (member (op =) (raw_explode "\"\\abtnvfr")) || - $$ "^" ^^ Scan.one (fn s => ord "@" <= ord s andalso ord s <= ord "_") || - Scan.one Symbol.is_ascii_digit ^^ - Scan.one Symbol.is_ascii_digit ^^ - Scan.one Symbol.is_ascii_digit; - - val scan_str = Scan.one - (fn s => Symbol.is_printable s andalso - s <> "\"" andalso s <> "\\") || - ($$ "\\") |-- scan_escape; - - (*val scan_gap = ($$ "\\") ^^ scan_blanks1 ^^ ($$ "\\"); - val scan_gaps = Scan.repeat scan_gap >> implode;*) - - val string_literal = wrap_term (( - ($$ "\"") - |-- (Scan.repeat scan_str >> implode) - --| ($$ "\"")) >> LIT); - - fun parse_with p str = - let val (thing, toks) = p ((Symbol.explode str)@[Symbol.eof]) - in if toks = [Symbol.eof] then thing else raise MoreTokens toks - end; -end; (* struct: SimpleLexer *) - -(* vim:et:sts=2:ts=2:sw=2 -*) diff --git a/core/expressions/linrat_angle_expr.ML b/core/expressions/linrat_angle_expr.ML deleted file mode 100644 index 73b05d99..00000000 --- a/core/expressions/linrat_angle_expr.ML +++ /dev/null @@ -1,279 +0,0 @@ -(* linear combinations with rational coeffs (and pi) *) -signature LINRAT_ANGLE_EXPR -= sig - type T (* expressions *) - val ord : T * T -> General.order - val eq : T -> T -> bool - structure Coeff : RATIONAL - exception parse_exp - (* Parse a string into a linear rational expression *) - (* any alphabetic text (and \) is considered a variable name *) - (* except \pi *) - (* constants and coeffs of \pi are taken as coeffs of \pi, *) - (* and calculated modulo 2 *) - val parse : string -> T - val zero : T - val is_zero : T -> bool - val pi : T - val mk_var : X.name -> T - val mk : Coeff.T -> (X.name * Coeff.T) list -> T - val mk_const : Coeff.T -> T - val subtr_expr : T -> T -> T - val add_expr : T -> T -> T - val scale_expr : Coeff.T -> T -> T - - (* For SOME x, return the coefficient for x. For NONE, return the constant - * term. *) - val coeff_for_var : T -> X.name option -> Coeff.T - - val free_vars : T -> X.NSet.T - - val pretty : T -> Pretty.T - val pretty_math : AlgFormat.format -> T -> Pretty.T - val print : T -> unit -end - -structure LinratAngleExpr : LINRAT_ANGLE_EXPR = -struct - - (* Variable names in expressions *) - exception parse_exp - structure PiCoeff = RationalMod2 - structure Coeff = Rational - - type T = PiCoeff.T * (Coeff.T XTab.T) (* the PiCoeff is for Pi *) - - (* =====================================================================*) - - - fun kv_eq ((k1,v1),(k2,v2)) = - X.name_eq (k1, k2) andalso Coeff.eq v1 v2 - - (* don't use basis library version because it doesn't raise exception - on unequal length lists *) - fun forall_pair _ ([],[]) = true - | forall_pair eq (h1::t1,h2::t2) = eq (h1,h2) andalso forall_pair eq (t1,t2) - | forall_pair _ _ = raise ListPair.UnequalLengths - - fun eq (x,xT) (y,yT) = - if PiCoeff.eq x y then - forall_pair kv_eq (XTab.list_of xT, XTab.list_of yT) - handle ListPair.UnequalLengths => false - else false - - (* =====================================================================*) - - fun kv_order ((k1,v1),(k2,v2)) = - if X.name_ord(k1,k2) = General.EQUAL - then - Coeff.ord(v1,v2) - else X.name_ord(k1,k2) - - fun kv_list_ord ([],[]) = General.EQUAL - | kv_list_ord ([],_) = General.LESS - | kv_list_ord (_,[]) = General.GREATER - | kv_list_ord (x::xs,y::ys) = - if kv_order(x,y) <> General.EQUAL then - kv_order(x,y) - else - kv_list_ord(xs,ys) - - fun ord ((x,xT), (y,yT)) = - if Coeff.ord(x,y) <> General.EQUAL - then Coeff.ord(x,y) - else let - val xs = XTab.list_of xT - val ys = XTab.list_of yT - in - kv_list_ord(xs,ys) - end - - (* various mk functions ============================================ *) - val empty = XTab.empty : Coeff.T XTab.T - - - val zero = (PiCoeff.zero, empty) - fun is_zero (cpi,vars) = - (PiCoeff.is_zero cpi andalso XTab.is_empty vars) - - val pi = (PiCoeff.one, empty) - - fun mk const kv_list = (const, fold XTab.add kv_list empty) - - fun mk_var nm = mk PiCoeff.zero [(nm, Coeff.mk (1, 1))] - - fun coeff_for_var (_,tab) (SOME x) = - (case XTab.get_opt tab x of SOME r => r | NONE => Rational.zero) - | coeff_for_var (k,_) NONE = k - - fun free_vars (_,tab) = XTab.get_dom_set tab - - (* reduce exprs to a normal by removing variables whose Coeff = 0 ======*) - fun reduce_expr (p, t) = - let fun red (k,v) tab = - if Coeff.is_zero v - then XTab.delete k tab - else tab - in - (p, XTab.fold red t t) - end - - (* multiplication of an expression by a scalar ===================*) - fun scale_expr s (e_pi, e_tab) = - (PiCoeff.mult s e_pi, XTab.map_all (Coeff.mult s) e_tab) - - fun mk_const k = scale_expr k pi - - (* arithmetic addition for expressions ========================*) - fun add_expr (pi1,tab1) (pi2,tab2) = - let - fun f (k,v) t = - if XTab.dom_contains t k - then XTab.map_entry (Coeff.add v) k t - else XTab.set (k, v) t - val newtab = XTab.fold f tab2 tab1 - in reduce_expr (PiCoeff.add pi1 pi2, newtab) end - - (* arithmetic subtraction for expressions ========================*) - fun subtr_expr e1 e2 = - add_expr e1 (scale_expr (Coeff.mk (~1, 1)) e2) - - - (* =====================================================================*) - - (* - fun pretty (pi, tab) = - let fun pretty1 (k,v) = - if (Coeff.is_one v) then Pretty.str k - else Pretty.block [Coeff.pretty v, Pretty.str (" "^k)] - val ppi = if (Coeff.is_zero pi) then [] else [pretty1 ("\\pi", pi)] - val plist = XTab.fold (cons o pretty1) tab ppi - in - case plist of [] => Pretty.str "0" - | _ => (Pretty.block o Pretty.separate " +" o rev) plist - end - *) - - fun pretty1 (k,v) = - if Coeff.is_one (Coeff.subtr Coeff.zero v) then - Pretty.block [Pretty.str "-", X.pretty_name k] - else if Coeff.is_one v then X.pretty_name k - else Pretty.block [Coeff.pretty v, Pretty.str " ", X.pretty_name k] - - fun pretty_list [] = [Pretty.str "0"] - | pretty_list [x] = [pretty1 x] - | pretty_list (x::(xs as ((k,v)::ys))) = - (pretty1 x):: - (if Coeff.ord (v, Coeff.zero) = LESS - then Pretty.str " - " :: pretty_list ((k,Coeff.subtr Coeff.zero v)::ys) - else Pretty.str " + " :: pretty_list xs) - - fun pretty' pistr (pi, tab) = - let val tlist = rev (XTab.list_of tab) - in (Pretty.block o pretty_list) - (if Coeff.is_zero pi then tlist - else (pistr, pi)::tlist) - end - - - fun pretty_math fmt = case fmt - of AlgFormat.MATHEMATICA => pretty' (X.mk "Pi") - | AlgFormat.LATEX => pretty' (X.mk "\\pi") - | AlgFormat.PLAIN => pretty' (X.mk "pi") - | _ => raise AlgFormat.not_supported_exp - - val pretty = pretty_math AlgFormat.LATEX - - val print = Pretty.writeln o pretty - - (* functionality to parse expressions *) - structure LinratParser = - struct - structure L = SimpleLexer - datatype expr = EXP of T - | RAT of Coeff.T - | TOK of L.token - - (* wrap all the terminals in a TOK *) - val num = L.num >> TOK - val ident = L.ident >> TOK - val sym = fn s => (L.sym s) >> TOK - val noop = L.noop >> TOK - - exception semantic_exp - - (* semantic ops *) - fun rat (TOK(L.INT n), TOK(L.INT d)) = RAT (Coeff.mk (n, d)) - | rat _ = raise semantic_exp - fun pirat (TOK(L.INT n), TOK(L.INT d)) = RAT (PiCoeff.mk (n, d)) - | pirat _ = raise semantic_exp - fun whole_num mker n = mker (n,TOK(L.INT 1)) - fun rat_term (RAT r, TOK(L.ID id)) = EXP (mk (PiCoeff.mk (0, 1)) [(X.mk id,r)]) - | rat_term _ = raise semantic_exp - fun const_term (RAT r) = EXP (mk r []) - | const_term _ = raise semantic_exp - val one_pi = const_term (RAT (Coeff.mk (1, 1))) - fun var_term (TOK(L.ID id)) = EXP (mk_var (X.mk id)) - | var_term _ = raise semantic_exp - fun plus (EXP a, EXP b) = EXP (add_expr a b) - | plus _ = raise semantic_exp - fun neg (EXP a) = EXP (subtr_expr zero a) - | neg _ = raise semantic_exp - - (* the grammar, decorated with semantic operators *) - fun pi x = x|>((sym "\\" || noop) - --| (sym "p" || sym "P") --| (sym "i" || sym "I")) - - fun coeff mker x = x|>( - ((num --| sym "/" -- num) >> mker) - || (sym "(" |-- (coeff mker) --| sym ")") - || num >> (whole_num mker)) - - fun frac x = let - val maybe_num = (num --| (sym "*" || noop)) - || (fn x => (TOK(L.INT 1),x)) - fun mkrat_term ((n,i),d) = rat_term (rat (n,d),i) - in - x |> ( ((maybe_num --| pi --| sym "/" -- num) >> pirat >> const_term) - ||((maybe_num -- ident --| sym "/" -- num) >> mkrat_term) - || (sym "(" |-- frac --| sym ")")) - end - - - (* first term *) - fun term x = x|>( - ((sym "-" |-- term) >> neg) - || frac - || (((coeff pirat) --| (sym "*" || noop) --| pi) >> const_term) - || (pi >> (K one_pi)) - || (((coeff rat) --| (sym "*" || noop) -- ident) >> rat_term) - || ((coeff pirat) >> const_term) - || (ident >> var_term) - || (sym "(" |-- expr --| sym ")")) - - (* successive terms *) - and term' x = x|>( - (sym "+" |-- term) - || (sym "-" |-- term >> neg)) - and terms x = x|>( - ((term' -- terms) >> plus) - || term') - and expr x = x|>( - (term -- terms >> plus) - || term) - - fun parse str = - (case Scan.catch (L.parse_with expr) str - of EXP e => e - | _ => raise semantic_exp) - handle L.MoreTokens _ => raise parse_exp - | Fail _ => raise parse_exp - | semantic_exp => raise parse_exp - - end - - fun parse "" = zero - | parse s = LinratParser.parse s - -end - diff --git a/core/expressions/linrat_angle_matcher.ML b/core/expressions/linrat_angle_matcher.ML deleted file mode 100644 index dfd944b5..00000000 --- a/core/expressions/linrat_angle_matcher.ML +++ /dev/null @@ -1,145 +0,0 @@ -signature LINRAT_ANGLE_MATCHER = -sig - (* a matrix in Gauss normal form, along with pattern names and target names. The columns - * of the matrix are labelled by (list_of p_names) @ (list_of t_names) @ [CONST]. Note - * this code doesn't assume NSet.list_of returns names in a particular order, only that it - * will always return names in the same order. *) - type psubst = RationalMatrix.T * X.NSet.T * X.NSet.T - - (* a substitution table, along with a set of names to avoid when introducing new free variables - * (e.g. for under-specified systems of equations) *) - type subst = LinratAngleExpr.T XTab.T * X.NSet.T - - (* initialise psubst given sets of names free in pattern and target *) - val init_psubst_from_names : X.NSet.T * X.NSet.T -> psubst - - (* compute the matrix row corresponding to these expressions and add it with gauss_update. - * If gauss_update retuns NONE, this signals a match failure, so return NONE. *) - val match : LinratAngleExpr.T * LinratAngleExpr.T -> psubst -> psubst option - - (* take a rational matrix in GNF with column labels to the associated - * substitution map. For under-specified systems, free variables - * will be sent to new variables fresh w.r.t. the target. *) - val solve_psubst : psubst -> subst - - (* apply the substitution. If an uncognised variable is encountered, replace it with a - * new fresh variable. (hence the updated subst needs to be returned) *) - val subst_in_expr : subst -> LinratAngleExpr.T -> subst * LinratAngleExpr.T - - (* for outputting subst data *) - val pretty_subst : subst -> Pretty.T -end - -structure LinratAngleMatcher : LINRAT_ANGLE_MATCHER = -struct - type psubst = RationalMatrix.T * X.NSet.T * X.NSet.T - type subst = LinratAngleExpr.T XTab.T * X.NSet.T - - fun init_psubst_from_names (p_names, t_names) = (RationalMatrix.empty, p_names, t_names) - - fun match (p_expr, t_expr) (m,p_names,t_names) = let - val const = Rational.subtr (LinratAngleExpr.coeff_for_var p_expr NONE) - (LinratAngleExpr.coeff_for_var t_expr NONE) - val row = - map (fn x => ( - LinratAngleExpr.coeff_for_var p_expr (SOME x) - )) (X.NSet.list_of p_names) @ - map (fn x => ( - Rational.negate (LinratAngleExpr.coeff_for_var t_expr (SOME x)) - )) (X.NSet.list_of t_names) @ - [const] - in case RationalMatrix.gauss_update_reduced - (Rational.reduce_mod 2) - (X.NSet.cardinality p_names) row m - of SOME m' => SOME (m',p_names,t_names) - | NONE => NONE - end - - fun solve_psubst (m,p_names,t_names) = let - val cols = (X.NSet.list_of p_names) @ (X.NSet.list_of t_names) - val free = X.NSet.cardinality p_names - fun add_entry i (j,y) (x_opt,expr,tab,avoids) = - case x_opt - of NONE => if Rational.is_one (RationalMatrix.get m (i,j)) - then (SOME y, LinratAngleExpr.zero, tab, avoids) - else (NONE, LinratAngleExpr.zero, tab, avoids) - | SOME x => - let - val (tab',avoids',e) = - if j < free then - case XTab.get_opt tab y - of SOME e => (tab,avoids,e) - | NONE => - let - val (fresh,avoids') = avoids |> X.NSet.add_new y - val fresh_e = LinratAngleExpr.mk_var fresh - in (tab |> XTab.add (y, fresh_e), avoids', fresh_e) - end - else (tab, avoids, LinratAngleExpr.mk_var y) - in - ( - SOME x, - LinratAngleExpr.subtr_expr expr - (LinratAngleExpr.scale_expr (RationalMatrix.get m (i,j)) e), - tab', - avoids' - ) - end - fun add_row i (tab,avoids) = - let - (* extract the leading column name, along with the rest of the expression *) - val (x_opt,expr,tab',avoids') = - fold_index (add_entry i) cols (NONE, LinratAngleExpr.zero, tab, avoids) - (* treat the rightmost column as the constant value *) - val expr = LinratAngleExpr.subtr_expr expr - (LinratAngleExpr.mk_const (RationalMatrix.get m (i, RationalMatrix.num_cols m - 1))) - in - case x_opt - of SOME x => - if XTab.dom_contains tab x - then raise RationalMatrix.not_in_rref_exp m - else (tab' |> XTab.add (x,expr), avoids') - | NONE => raise RationalMatrix.not_in_rref_exp m - end - - in - fold add_row - ((RationalMatrix.num_rows m - 1) downto 0) - (XTab.empty, t_names) - end - - fun subst_in_expr (tab, avoids) expr = let - fun subst_for_var x ((tab,avoids),accum) = - let - val (e, tab', avoids') = - case XTab.get_opt tab x - of SOME e => (e, tab, avoids) - | NONE => - let - val (fresh, avoids') = avoids |> X.NSet.add_new x - val fresh_e = LinratAngleExpr.mk_var fresh - in - (fresh_e, tab |> XTab.add (x, fresh_e), avoids') - end - in ( - (tab', avoids'), - LinratAngleExpr.add_expr accum - (LinratAngleExpr.scale_expr - (LinratAngleExpr.coeff_for_var expr (SOME x)) e) - ) - end - in X.NSet.fold - subst_for_var - (LinratAngleExpr.free_vars expr) - ((tab,avoids), LinratAngleExpr.mk_const (LinratAngleExpr.coeff_for_var expr NONE)) - end - - fun pretty_subst (tab, avoids) = - Pretty.chunks [Pretty.block [Pretty.str "SUBST {"], - Pretty.block [Pretty.str " ", Pretty.chunks [ - Pretty.block [Pretty.str "Table: ", XTab.pretty LinratAngleExpr.pretty tab], - Pretty.block [Pretty.str "Avoids: ", X.NSet.pretty avoids] - ]], - Pretty.str "}"] - -end diff --git a/core/expressions/linrat_expr.ML b/core/expressions/linrat_expr.ML deleted file mode 100644 index 0ee262dd..00000000 --- a/core/expressions/linrat_expr.ML +++ /dev/null @@ -1,273 +0,0 @@ -(* linear combinations with rational coeffs (and pi) *) -signature LINRAT_EXPR -= sig - type T (* expressions *) - val ord : T * T -> General.order - val eq : T -> T -> bool - structure Coeff : RATIONAL - exception parse_exp - (* Parse a string into a linear rational expression *) - (* any alphabetic text (and \) is considered a variable name *) - val parse : string -> T - val zero : T - val is_zero : T -> bool - val one : T - val mk_var : X.name -> T - val mk : Coeff.T -> (X.name * Coeff.T) list -> T - val mk_const : Coeff.T -> T - val subtr_expr : T -> T -> T - val add_expr : T -> T -> T - val scale_expr : Coeff.T -> T -> T - - (* For SOME x, return the coefficient for x. For NONE, return the constant - * term. *) - val coeff_for_var : T -> X.name option -> Coeff.T - - val free_vars : T -> X.NSet.T - - val pretty : T -> Pretty.T - (*val pretty_math : AlgFormat.format -> T -> Pretty.T*) - val print : T -> unit -end - -structure LinratExpr : LINRAT_EXPR = -struct - - (* Variable names in expressions *) - exception parse_exp - structure Coeff = Rational - - type T = Coeff.T * (Coeff.T XTab.T) - (* a constant coefficient, plus a table of coeffs for variables *) - - (* =====================================================================*) - - - fun kv_eq ((k1,v1),(k2,v2)) = - X.name_eq (k1, k2) andalso Coeff.eq v1 v2 - - (* don't use basis library version because it doesn't raise exception - on unequal length lists *) - fun forall_pair _ ([],[]) = true - | forall_pair eq (h1::t1,h2::t2) = eq (h1,h2) andalso forall_pair eq (t1,t2) - | forall_pair _ _ = raise ListPair.UnequalLengths - - fun eq (x,xT) (y,yT) = - if Coeff.eq x y then - forall_pair kv_eq (XTab.list_of xT, XTab.list_of yT) - handle ListPair.UnequalLengths => false - else false - - (* =====================================================================*) - - fun kv_order ((k1,v1),(k2,v2)) = - if X.name_ord(k1,k2) = General.EQUAL - then - Coeff.ord(v1,v2) - else X.name_ord(k1,k2) - - fun kv_list_ord ([],[]) = General.EQUAL - | kv_list_ord ([],_) = General.LESS - | kv_list_ord (_,[]) = General.GREATER - | kv_list_ord (x::xs,y::ys) = - if kv_order(x,y) <> General.EQUAL then - kv_order(x,y) - else - kv_list_ord(xs,ys) - - fun ord ((x,xT), (y,yT)) = - if Coeff.ord(x,y) <> General.EQUAL - then Coeff.ord(x,y) - else let - val xs = XTab.list_of xT - val ys = XTab.list_of yT - in - kv_list_ord(xs,ys) - end - - (* various mk functions ============================================ *) - val empty = XTab.empty : Coeff.T XTab.T - - - val zero = (Coeff.zero, empty) - fun is_zero (cpi,vars) = - (Coeff.is_zero cpi andalso XTab.is_empty vars) - - val one = (Coeff.one, empty) - - fun mk const kv_list = (const, fold XTab.add kv_list empty) - - fun mk_var nm = mk Coeff.zero [(nm, Coeff.mk (1, 1))] - - fun coeff_for_var (_,tab) (SOME x) = - (case XTab.get_opt tab x of SOME r => r | NONE => Rational.zero) - | coeff_for_var (k,_) NONE = k - - fun free_vars (_,tab) = XTab.get_dom_set tab - - (* reduce exprs to a normal by removing variables whose Coeff = 0 ======*) - fun reduce_expr (p, t) = - let fun red (k,v) tab = - if Coeff.is_zero v - then XTab.delete k tab - else tab - in - (p, XTab.fold red t t) - end - - (* multiplication of an expression by a scalar ===================*) - fun scale_expr s (e_pi, e_tab) = - (Coeff.mult s e_pi, XTab.map_all (Coeff.mult s) e_tab) - - fun mk_const k = scale_expr k one - - (* arithmetic addition for expressions ========================*) - fun add_expr (const1,tab1) (const2,tab2) = - let - fun f (k,v) t = - if XTab.dom_contains t k - then XTab.map_entry (Coeff.add v) k t - else XTab.set (k, v) t - val newtab = XTab.fold f tab2 tab1 - in reduce_expr (Coeff.add const1 const2, newtab) end - - (* arithmetic subtraction for expressions ========================*) - fun subtr_expr e1 e2 = - add_expr e1 (scale_expr (Coeff.mk (~1, 1)) e2) - - - (* =====================================================================*) - - (* - fun pretty (pi, tab) = - let fun pretty1 (k,v) = - if (Coeff.is_one v) then Pretty.str k - else Pretty.block [Coeff.pretty v, Pretty.str (" "^k)] - val ppi = if (Coeff.is_zero pi) then [] else [pretty1 ("\\pi", pi)] - val plist = XTab.fold (cons o pretty1) tab ppi - in - case plist of [] => Pretty.str "0" - | _ => (Pretty.block o Pretty.separate " +" o rev) plist - end - *) - - fun pretty1 (k,v) = - if Coeff.is_one (Coeff.subtr Coeff.zero v) then - Pretty.block [Pretty.str "-", X.pretty_name k] - else if Coeff.is_one v then X.pretty_name k - else Pretty.block [Coeff.pretty v, Pretty.str " ", X.pretty_name k] - - fun pretty_list [] = [] - | pretty_list ((k,v)::xs) = - (if Coeff.ord (v, Coeff.zero) = LESS - then Pretty.str " - " :: pretty1 (k,Coeff.subtr Coeff.zero v) :: pretty_list xs - else Pretty.str " + " :: pretty1 (k,v) :: pretty_list xs) - - fun pretty (const, tab) = - Pretty.block - (case rev (XTab.list_of tab) - of [] => [Coeff.pretty const] - | (kv::kvs) => - (if Coeff.is_zero const then pretty1 kv :: pretty_list kvs - else Coeff.pretty const :: pretty_list (kv::kvs))) - - - (*fun pretty_math fmt = case fmt - of AlgFormat.MATHEMATICA => pretty' (X.mk "Pi") - | AlgFormat.LATEX => pretty' (X.mk "\\pi") - | AlgFormat.PLAIN => pretty' (X.mk "pi") - | _ => raise AlgFormat.not_supported_exp*) - - (*val pretty = pretty_math AlgFormat.LATEX*) - - val print = Pretty.writeln o pretty - - (* functionality to parse expressions *) - structure LinratParser = - struct - structure L = SimpleLexer - datatype expr = EXP of T - | RAT of Coeff.T - | TOK of L.token - - (* wrap all the terminals in a TOK *) - val num = L.num >> TOK - val ident = L.ident >> TOK - val sym = fn s => (L.sym s) >> TOK - val noop = L.noop >> TOK - - exception semantic_exp - - (* semantic ops *) - fun rat (TOK(L.INT n), TOK(L.INT d)) = RAT (Coeff.mk (n, d)) - | rat _ = raise semantic_exp - (*fun pirat (TOK(L.INT n), TOK(L.INT d)) = RAT (PiCoeff.mk (n, d)) - | pirat _ = raise semantic_exp*) - fun whole_num mker n = mker (n,TOK(L.INT 1)) - fun rat_term (RAT r, TOK(L.ID id)) = EXP (mk (Coeff.mk (0, 1)) [(X.mk id,r)]) - | rat_term _ = raise semantic_exp - fun const_term (RAT r) = EXP (mk r []) - | const_term _ = raise semantic_exp - (*val one_pi = const_term (RAT (Coeff.mk (1, 1)))*) - fun var_term (TOK(L.ID id)) = EXP (mk_var (X.mk id)) - | var_term _ = raise semantic_exp - fun plus (EXP a, EXP b) = EXP (add_expr a b) - | plus _ = raise semantic_exp - fun neg (EXP a) = EXP (subtr_expr zero a) - | neg _ = raise semantic_exp - - (* the grammar, decorated with semantic operators *) - (*fun pi x = x|>((sym "\\" || noop) - --| (sym "p" || sym "P") --| (sym "i" || sym "I"))*) - - fun coeff mker x = x|>( - ((num --| sym "/" -- num) >> mker) - || (sym "(" |-- (coeff mker) --| sym ")") - || num >> (whole_num mker)) - - fun frac x = let - val maybe_num = (num --| (sym "*" || noop)) - || (fn x => (TOK(L.INT 1),x)) - fun mkrat_term ((n,i),d) = rat_term (rat (n,d),i) - in - x |> ( ((maybe_num -- ident --| sym "/" -- num) >> mkrat_term) - || (sym "(" |-- frac --| sym ")")) - end - - - (* first term *) - fun term x = x|>( - ((sym "-" |-- term) >> neg) - || frac - || (((coeff rat) --| (sym "*" || noop) -- ident) >> rat_term) - || ((coeff rat) >> const_term) - || (ident >> var_term) - || (sym "(" |-- expr --| sym ")")) - - (* successive terms *) - and term' x = x|>( - (sym "+" |-- term) - || (sym "-" |-- term >> neg)) - and terms x = x|>( - ((term' -- terms) >> plus) - || term') - and expr x = x|>( - (term -- terms >> plus) - || term) - - fun parse str = - (case Scan.catch (L.parse_with expr) str - of EXP e => e - | _ => raise semantic_exp) - handle L.MoreTokens _ => raise parse_exp - | Fail _ => raise parse_exp - | semantic_exp => raise parse_exp - - end - - fun parse "" = zero - | parse s = LinratParser.parse s - -end - - diff --git a/core/expressions/linrat_matcher.ML b/core/expressions/linrat_matcher.ML deleted file mode 100644 index baf30bfb..00000000 --- a/core/expressions/linrat_matcher.ML +++ /dev/null @@ -1,144 +0,0 @@ -signature LINRAT_MATCHER = -sig - (* a matrix in Gauss normal form, along with pattern names and target names. The columns - * of the matrix are labelled by (list_of p_names) @ (list_of t_names) @ [CONST]. Note - * this code doesn't assume NSet.list_of returns names in a particular order, only that it - * will always return names in the same order. *) - type psubst = RationalMatrix.T * X.NSet.T * X.NSet.T - - (* a substitution table, along with a set of names to avoid when introducing new free variables - * (e.g. for under-specified systems of equations) *) - type subst = LinratExpr.T XTab.T * X.NSet.T - - (* initialise psubst given sets of names free in pattern and target *) - val init_psubst_from_names : X.NSet.T * X.NSet.T -> psubst - - (* compute the matrix row corresponding to these expressions and add it with gauss_update. - * If gauss_update retuns NONE, this signals a match failure, so return NONE. *) - val match : LinratExpr.T * LinratExpr.T -> psubst -> psubst option - - (* take a rational matrix in GNF with column labels to the associated - * substitution map. For under-specified systems, free variables - * will be sent to new variables fresh w.r.t. the target. *) - val solve_psubst : psubst -> subst - - (* apply the substitution. If an uncognised variable is encountered, replace it with a - * new fresh variable. (hence the updated subst needs to be returned) *) - val subst_in_expr : subst -> LinratExpr.T -> subst * LinratExpr.T - - (* for outputting subst data *) - val pretty_subst : subst -> Pretty.T -end - -structure LinratMatcher : LINRAT_MATCHER = -struct - type psubst = RationalMatrix.T * X.NSet.T * X.NSet.T - type subst = LinratExpr.T XTab.T * X.NSet.T - - fun init_psubst_from_names (p_names, t_names) = (RationalMatrix.empty, p_names, t_names) - - fun match (p_expr, t_expr) (m,p_names,t_names) = let - val const = Rational.subtr (LinratExpr.coeff_for_var p_expr NONE) - (LinratExpr.coeff_for_var t_expr NONE) - val row = - map (fn x => ( - LinratExpr.coeff_for_var p_expr (SOME x) - )) (X.NSet.list_of p_names) @ - map (fn x => ( - Rational.negate (LinratExpr.coeff_for_var t_expr (SOME x)) - )) (X.NSet.list_of t_names) @ - [const] - in case RationalMatrix.gauss_update - (X.NSet.cardinality p_names) row m - of SOME m' => SOME (m',p_names,t_names) - | NONE => NONE - end - - fun solve_psubst (m,p_names,t_names) = let - val cols = (X.NSet.list_of p_names) @ (X.NSet.list_of t_names) - val free = X.NSet.cardinality p_names - fun add_entry i (j,y) (x_opt,expr,tab,avoids) = - case x_opt - of NONE => if Rational.is_one (RationalMatrix.get m (i,j)) - then (SOME y, LinratExpr.zero, tab, avoids) - else (NONE, LinratExpr.zero, tab, avoids) - | SOME x => - let - val (tab',avoids',e) = - if j < free then - case XTab.get_opt tab y - of SOME e => (tab,avoids,e) - | NONE => - let - val (fresh,avoids') = avoids |> X.NSet.add_new y - val fresh_e = LinratExpr.mk_var fresh - in (tab |> XTab.add (y, fresh_e), avoids', fresh_e) - end - else (tab, avoids, LinratExpr.mk_var y) - in - ( - SOME x, - LinratExpr.subtr_expr expr - (LinratExpr.scale_expr (RationalMatrix.get m (i,j)) e), - tab', - avoids' - ) - end - fun add_row i (tab,avoids) = - let - (* extract the leading column name, along with the rest of the expression *) - val (x_opt,expr,tab',avoids') = - fold_index (add_entry i) cols (NONE, LinratExpr.zero, tab, avoids) - (* treat the rightmost column as the constant value *) - val expr = LinratExpr.subtr_expr expr - (LinratExpr.mk_const (RationalMatrix.get m (i, RationalMatrix.num_cols m - 1))) - in - case x_opt - of SOME x => - if XTab.dom_contains tab x - then raise RationalMatrix.not_in_rref_exp m - else (tab' |> XTab.add (x,expr), avoids') - | NONE => raise RationalMatrix.not_in_rref_exp m - end - - in - fold add_row - ((RationalMatrix.num_rows m - 1) downto 0) - (XTab.empty, t_names) - end - - fun subst_in_expr (tab, avoids) expr = let - fun subst_for_var x ((tab,avoids),accum) = - let - val (e, tab', avoids') = - case XTab.get_opt tab x - of SOME e => (e, tab, avoids) - | NONE => - let - val (fresh, avoids') = avoids |> X.NSet.add_new x - val fresh_e = LinratExpr.mk_var fresh - in - (fresh_e, tab |> XTab.add (x, fresh_e), avoids') - end - in ( - (tab', avoids'), - LinratExpr.add_expr accum - (LinratExpr.scale_expr - (LinratExpr.coeff_for_var expr (SOME x)) e) - ) - end - in X.NSet.fold - subst_for_var - (LinratExpr.free_vars expr) - ((tab,avoids), LinratExpr.mk_const (LinratExpr.coeff_for_var expr NONE)) - end - - fun pretty_subst (tab, avoids) = - Pretty.chunks [Pretty.block [Pretty.str "SUBST {"], - Pretty.block [Pretty.str " ", Pretty.chunks [ - Pretty.block [Pretty.str "Table: ", XTab.pretty LinratExpr.pretty tab], - Pretty.block [Pretty.str "Avoids: ", X.NSet.pretty avoids] - ]], - Pretty.str "}"] - -end diff --git a/core/expressions/matrix.ML b/core/expressions/matrix.ML deleted file mode 100644 index d5657461..00000000 --- a/core/expressions/matrix.ML +++ /dev/null @@ -1,166 +0,0 @@ -(** - * A simple matrix implementation. Primarily for doing Guass elimination. - *) - -signature MATRIX = -sig - type T - (* The type of matrix elements; see coeff.ML *) - structure Field : FIELD - - val num_rows : T -> int - val num_cols : T -> int - - val empty : T - - (* adds a zero column at the given index *) - val insert_zero_column : int -> T -> T - - (* Takes a size (rows,cols) and a function from positions to elements *) - val mk : (int * int) -> (int * int -> Field.T) -> T - (* Takes a list of rows, each row being a list of elements *) - val from_lists : Field.T list list -> T - - (* Perform Guassian elimination for the given number of unknowns. - * - * Will return NONE if the system has no solutions; otherwise the - * result will be in reduced row echelon form. *) - val gauss : int -> T -> T option - - (* Raised by calling gauss_update on a matrix not in - * reduced row echelon form (RREF) *) - exception not_in_rref_exp of T - - (* Try to add a single additional row to a matrix already in reduced - * row echelon form (RREF). - * - * Like with gauss, will return NONE if the system becomes inconsistent, - * otherwise the result will be in RREF. - * - * Raises not_in_rref_exp if given a matrix not in RREF. *) - val gauss_update : int -> Field.T list -> T -> T option - - (* As before, but apply the given function to the last column to reduce the - * field element, e.g. modulo 2. *) - val gauss_update_reduced : (Field.T -> Field.T) -> int -> Field.T list -> T -> T option - - - (* Get a single element; takes a position in (row,col) form *) - val get : T -> int * int -> Field.T - - val pretty : T -> Pretty.T - val print : T -> unit -end - -functor Matrix(Field: FIELD) : MATRIX = -struct - structure F = Field - type T = (Field.T Vector.vector) Vector.vector - structure Field = Field - - fun mk (rows,cols) f = - Vector.tabulate (rows, fn i => Vector.tabulate (cols, fn j => f(i,j))) - - val empty = mk (0,0) (K Field.zero) - - fun from_lists lists = Vector.fromList (map Vector.fromList lists) - - fun get m (i,j) = Vector.sub(Vector.sub(m,i),j) - - fun num_rows m = Vector.length m - fun num_cols m = case Vector.find (K true) m of SOME r => Vector.length r | NONE => 0 - - exception not_in_rref_exp of T - - fun find_pivot row = Vector.findi (fn (_,e) => not (Field.is_zero e)) row - - fun insert_zero i row = Vector.tabulate (Vector.length row + 1, - fn j => ( - if j < i then Vector.sub (row, j) - else if j = i then Field.zero - else Vector.sub (row, j - 1) - )) - - fun insert_zero_column i = Vector.map (insert_zero i) - - fun mult_row d row = Vector.map (fn e => Field.mult d e) row - fun subtr_row row1 row2 = Vector.mapi (fn (i,e) => Field.subtr e (Vector.sub (row2,i))) row1 - - exception inconsistent_exp of unit - - fun map_last f vec = - case Vector.length vec - of 0 => vec - | len => Vector.update (vec, len - 1, f (Vector.sub (vec, len-1))) - - fun gauss_update' reducef free new_row m = let - val rf = map_last reducef - val (row_opt, mlist) = - Vector.foldl (fn (current_row, (row_opt,accum)) => ( - case row_opt - of SOME row => - let - val current_pi = case find_pivot current_row - of SOME (x,y) => x | NONE => raise not_in_rref_exp m - in - case find_pivot row - of SOME (pi, pval) => - if pi >= free - then raise inconsistent_exp () - else - if pi < current_pi then - (NONE, current_row :: rf (mult_row (Field.divide Field.one pval) row) :: accum) - else if pi = current_pi then - (SOME (rf (subtr_row row (mult_row pval current_row))), current_row :: accum) - else (* pi > current_pi *) - (SOME row, current_row :: accum) - | NONE => (NONE, current_row :: accum) - end - | NONE => (NONE, current_row :: accum) - )) (SOME (rf new_row), []) m - in - SOME (Vector.fromList (rev ( - case row_opt - of SOME row => ( - case find_pivot row - of SOME (pi,pval) => - if pi >= free - then raise inconsistent_exp () - else mult_row (Field.divide Field.one pval) row :: mlist - | NONE => mlist (* don't add a row if it's all zeros *) - ) - | NONE => mlist - ))) - end - handle inconsistent_exp () => NONE - - (*fun gauss_updatef free rowf m = gauss_update' I free (Vector.tabulate (num_cols m,rowf)) m*) - fun gauss_update free row_list m = gauss_update' I free (Vector.fromList row_list) m - - fun gauss_update_reduced reducef free row_list m = - gauss_update' reducef free (Vector.fromList row_list) m - - fun gauss free = Vector.foldl (fn (row, m_opt) => ( - case m_opt - of SOME m => gauss_update' I free row m - | NONE => NONE - )) (SOME empty) - - fun pretty m = - Pretty.chunks (Vector.foldr (fn (row,rs) => - Pretty.list "[" "]" (Vector.foldr (fn (e,cs) => - Field.pretty e :: cs - ) [] row) :: rs - ) [] m) - - fun print m = Pretty.writeln (pretty m) -end - -structure RationalMatrix = Matrix(Rational) - -val matr = RationalMatrix.from_lists (map (map Rational.mk) [ - [(1,1), (1,1), (2,1)], - [(1,1), (1,1), (2,1)] -]) - -(*val _ = RationalMatrix.print (the (RationalMatrix.gauss 2 matr));*) diff --git a/core/expressions/semiring.ML b/core/expressions/semiring.ML deleted file mode 100644 index dd54ed53..00000000 --- a/core/expressions/semiring.ML +++ /dev/null @@ -1,74 +0,0 @@ -signature GCD_SEMIRING = -sig - type T - (*exception DivisionByComplex*) - val plus : T * T -> T - val times : T * T -> T - val zero : T - val one : T - val divide : T * T -> T - val is_zero : T -> bool - val is_one : T -> bool - val to_string : T -> string - - val eq : T * T -> bool - val gcd : T * T -> T (* greatest common (real) divisor *) - val total_ord : T * T -> order (* any total order. plus and times need not be monotone *) - val conjugate : T -> T (* any involution *) - val is_real : T -> bool (* is equal to conjugate *) -end - -structure IntSemiring : GCD_SEMIRING = -struct - type T = int - val plus = op+ - val times = op* - val divide = op div - val zero = 0 - val one = 1 - fun is_zero v = (v=zero) - fun is_one v = (v=one) - val to_string = Int.toString - - val eq = (op=) - - fun gcd (a,0) = a - | gcd (0,a) = a - | gcd (a,b) = gcd (b,a mod b) - - val total_ord = int_ord - val conjugate = I - val is_real = K true -end - -(* semiring of complexified integers *) -structure CIntSemiring : GCD_SEMIRING = -struct - type T = int * int - - fun plus ((r1,i1),(r2,i2)) = (r1+r2,i1+i2) - fun times ((r1,i1),(r2,i2)) = (r1*r2 - i1*i2, r1*i2 + r2*i1) - fun conjugate (r,i) = (r,~i) - fun divide ((r1,i1),(r2,0)) = (r1 div r2, i1 div r2) - | divide (num,denom) = divide (times (num, conjugate denom), times (denom, conjugate denom)) - val zero = (0,0) - val one = (1,0) - fun is_zero v = (v=zero) - fun is_one v = (v=one) - - fun to_string (r,i) = let - val rstr = if (r < 0) then ("-" ^ Int.toString (~r)) else (Int.toString r) - val (isgn,iop,iabs) = if (i < 0) then ("-","-", ~i) else ("", "+", i) - val istr = if iabs = 1 then "i" else Int.toString iabs ^ "i" - in case (r,i) of (_,0) => rstr - | (0,_) => isgn ^ istr - | (_,_) => rstr ^ iop ^ istr - end - - val eq = (op=) - - fun gcd ((r1,i1),(r2,i2)) = (IntSemiring.gcd (IntSemiring.gcd (r1,i1), IntSemiring.gcd (r2,i2)), 0) - - val total_ord = prod_ord int_ord int_ord - fun is_real (_,i) = (i=0) -end diff --git a/core/expressions/tensor.ML b/core/expressions/tensor.ML deleted file mode 100644 index 5094089a..00000000 --- a/core/expressions/tensor.ML +++ /dev/null @@ -1,273 +0,0 @@ -signature TENSOR = -sig - type T - type perm = int list - structure Semiring : GCD_SEMIRING - structure Tab : TABLE - sharing type T = Tab.key - val pow : int -> int -> int - val get : T -> (int * int) -> Semiring.T - val dim : T -> int - val ord : T * T -> order - val eq : T * T -> bool - val lower_index_count : T -> int - val upper_index_count : T -> int - val tensor : (int * int * int) -> Semiring.T list -> T - val tensorf : (int * int * int) -> ((int * int) -> Semiring.T) -> T - val id : int -> int -> T - val to_string : T -> string - val print : T -> unit - val product : T -> T -> T - val add : T -> T -> T - val conjugate_transpose : T -> T - val reduce : T -> (Semiring.T * Semiring.T) * T - (*val all_perms : T -> (perm * perm) Tab.table*) - val normalise : T -> ((Semiring.T * Semiring.T) * perm * perm) * T - val contract : (int * int) -> T -> T - val contract2 : (int * int) -> T -> T -> T - val compose : T -> T -> T - val decompose_index : int -> int -> int -> int list - val recompose_index : int -> int list -> int - val scalar_compare : T -> T -> (Semiring.T * Semiring.T) option - val scalar_eq : T -> T -> bool - val perm_scalar_eq : T -> T -> bool -end - -functor TensorKey (Semiring : GCD_SEMIRING) : KEY = -struct - type key = ((int * int * int) * Semiring.T vector) - fun ord ( ((d,l,u),v), ((d1,l1,u1),v1) ) = - case (prod_ord int_ord (prod_ord int_ord int_ord)) ( (d,(l,u)) , (d1,(l1,u1)) ) - of EQUAL => Vector.collate Semiring.total_ord (v,v1) - | comp => comp -end - -functor Tensor (Semiring : GCD_SEMIRING) : TENSOR = -struct - structure Semiring = Semiring - structure TKey = TensorKey(Semiring) - type T = TKey.key - type perm = int list - structure Tab = Table(TKey) - - exception negative_exponent_exn of unit - exception wrong_size_exn of unit - - val ord = TKey.ord - fun eq (t1,t2) = ord (t1,t2) = EQUAL - - (* fast exponentiation *) - fun pow x 0 = 1 - | pow x 1 = x - | pow x n = if n < 0 then raise negative_exponent_exn () else - (if (n mod 2 = 0) then pow (x*x) (n div 2) - else x * (pow (x*x) (n div 2))) - - - - fun dim ((d,_,_),_) = d; - fun lower_index_count ((_,l,_),_) = l - fun upper_index_count ((_,_,u),_) = u - - fun get ((d,l,u),vect) (row,col) = Vector.sub (vect, (pow d l) * row + col); - fun tensor (d,l,u) lst = ((d,l,u), Vector.fromList lst); - fun tensorf (d,l,u) f = let - val offset = pow d l - in ((d,l,u), Vector.tabulate (pow d (u+l), (fn i => f(i div offset, i mod offset)))) - end - - fun id d n = tensorf (d,n,n) (fn (row,col) => if row=col then Semiring.one else Semiring.zero) - - - (* reduce to scalar-canonical form. reduce T = ((a,b), T') ==> a/b T = T' *) - fun reduce (size, vect) = let - val c = case Vector.find (not o Semiring.is_zero) vect - of SOME v => if Semiring.is_real v then Semiring.one - else Semiring.conjugate v - | NONE => Semiring.one - val vect' = if Semiring.is_one c then vect else Vector.map ((curry Semiring.times) c) vect - val n = Vector.foldr (Semiring.gcd) Semiring.zero vect' - in if Semiring.is_zero n then ((Semiring.one, Semiring.one), (size,vect)) - else ((c,n), (size, Vector.map (fn m =>Semiring.divide (m,n)) vect')) - end - - fun get_from_product t1 (t2 as ((d2, l2, u2), v2)) (row,col) = let - val uoff = pow d2 u2 - val loff = pow d2 l2 - in Semiring.times (get t1 (row div uoff, col div loff), - get t2 (row mod uoff, col mod loff)) - end - - fun product (t1 as ((d1, l1, u1), v1)) (t2 as ((d2, l2, u2), v2)) = - tensorf (d1, l1+l2, u1+u2) (get_from_product t1 t2) - - fun add (t1 as ((d1,l1,u1),v1)) (t2 as ((d2,l2,u2),v2)) = - if d1 = d2 andalso l1 = l2 andalso u1 = u2 - then tensorf (d1,l1,u1) (fn idx => Semiring.plus (get t1 idx, get t2 idx)) - else raise wrong_size_exn () - - fun conjugate_transpose (t as ((d,l,u),_)) = tensorf (d,u,l) (fn (r,c) => Semiring.conjugate (get t (c,r))) - - fun decompose_index d len index = let - fun dec 0 idx = [] - | dec k idx = (idx mod d)::(dec (k-1) (idx div d)) - in rev (dec len index) - end - - fun recompose_index d lst = List.foldl (fn (part, rest) => part + d * rest) 0 lst - - (* insert element "k" in the list before "bef" *) - fun list_ins 0 k [] = [k] - | list_ins _ _ [] = [] - | list_ins bef k (l::ls) = if bef = 0 then k::l::(list_ins (bef-1) k ls) - else l::(list_ins (bef-1) k ls) - - - (* trace the j-th input (lower index) to the k-th output (upper index) *) - fun contractf f (j,k) (d,l,u) = let - fun tr (row, col) = let - val rowdec = decompose_index d (u-1) row - val coldec = decompose_index d (l-1) col - fun sum 0 = Semiring.zero - | sum i = Semiring.plus (f (recompose_index d (list_ins k (i-1) rowdec), - recompose_index d (list_ins j (i-1) coldec)), - sum (i-1)) - in sum d - end - in - tensorf (d, l-1, u-1) tr - end - - fun contract io (t as (size,_)) = contractf (get t) io size - - (* connect the k-th output of t1 to the j-th input of t2 *) - fun contract2 (j,k) (t2 as ((_,l2,u2),_)) (t1 as ((d,l1,u1),_)) = - contractf (get_from_product t1 t2) (l1+j,k) (d,l1+l2,u1+u2) - - fun compose (t2 as ((d,l2,u2),_)) (t1 as ((_,l1,_),_)) = let - fun comp_get 0 _ = Semiring.zero - | comp_get n (row,col) = Semiring.plus (Semiring.times (get t2 (row,n-1), get t1 (n-1,col)), - comp_get (n-1) (row,col)) - in tensorf (d,l1,u2) (comp_get (pow d l2)) - end - - (* scalar_compare T1 T2 = SOME (a,b) ==> a T1 = b T2 *) - fun scalar_compare t1 t2 = let - val ((a ,b ),t1') = reduce t1 - val ((a',b'),t2') = reduce t2 - in if eq (t1',t2') then SOME (Semiring.times (a,b'), Semiring.times (a',b)) else NONE - end - - fun apply_perm perm lst = map (fn i => List.nth (lst,i)) perm - - fun permrow_get (t as ((d,_,_),_)) perm (row,col) = let - val prow = recompose_index d (apply_perm perm (decompose_index d (length perm) row)) - in get t (prow, col) - end - - fun permcol_get (t as ((d,_,_),_)) perm (row,col) = let - val pcol = recompose_index d (apply_perm perm (decompose_index d (length perm) col)) - in get t (row,pcol) - end - - fun permrowcol_get (t as ((d,_,_),_)) permrow permcol (row,col) = let - val prow = recompose_index d (apply_perm permrow (decompose_index d (length permrow) row)) - val pcol = recompose_index d (apply_perm permcol (decompose_index d (length permcol) col)) - in get t (prow, pcol) - end - - fun perm_matrix d perm = let - val n = length perm - fun ptens (row,col) = - if (recompose_index d - (apply_perm perm - (decompose_index d n row) - )) = col then 1 else 0 - in tensorf (d,n,n) ptens - end - - fun perm_matrix d perm = let val n = length perm - in tensorf (d,n,n) (permcol_get (id d n) perm) - end - - fun perms size = let - fun lst 0 = [] | lst n = (n-1)::(lst (n-1)) - fun prm [] [] = [[]] - | prm _ [] = [] - | prm h (x::xs) = (map (fn p=>x::p) (prm [] ((rev h)@xs))) - @ (prm (x::h) xs) - in prm [] (lst size) - end - - fun scalar_eq t1 t2 = case scalar_compare t1 t2 - of SOME _ => true | NONE => false - - - fun compare_all comp [] _ = [] - | compare_all comp _ [] = [] - | compare_all comp (x::xs) (y::ys) = if comp x y then - (x,y)::(compare_all comp (x::xs) ys @ - compare_all comp xs ys) - else (compare_all comp (x::xs) ys @ - compare_all comp xs ys) - - (*fun perm_scalar_eq (t1 as ((d,l,u),_)) t2 = let - val lst1 = map (fn p => tensorf (d,l,u) (permrow_get t1 p)) (perms u) - val lst2 = map (fn p => tensorf (d,l,u) (permcol_get t2 p)) (perms l) - in (compare_all scalar_eq lst1 lst2) <> [] - end*) - - fun all_perms (t as ((d,l,u),_)) = let - fun f prow pcol tab = let - val (sc, tens) = reduce (tensorf (d,l,u) (permrowcol_get t prow pcol)) - in tab |> Tab.update (tens, (sc,prow,pcol)) - end - in fold_product f (perms u) (perms l) (Tab.empty) - end - - fun normalise t = let - val perm_table = all_perms t - val (t',_) = the (Tab.min perm_table) - in (the (Tab.lookup perm_table t'), t') - end - - fun perm_scalar_eq t1 t2 = let - val (_,t1') = normalise t1 - val (_,t2') = normalise t2 - in eq (t1',t2') - end - - fun pad width s = funpow (width - size s) (prefix " ") s - - fun to_string ((d,l,u),v) = let - val width = (Vector.foldr (fn (s,max) => Int.max (size (Semiring.to_string s), max)) 0 v) + 1 - val rowsize = pow d l - fun str (i, num, rest) = - (if (i <> 0) andalso ((i mod rowsize) = 0) then "]\n[ " else "") ^ - (pad width (Semiring.to_string num)) ^ rest - in - "[" ^ (Vector.foldri str "" v) ^ " ]\n\n" - end - - fun print t = TextIO.print (to_string t) -end - -structure IntTensor = Tensor(IntSemiring) -structure CIntTensor = Tensor(CIntSemiring) - -(*val t1 = Tensor.tensor (2,2,1) -[ 0, 4, 0, 8, - 16,64,32,128] -val t2 = Tensor.tensor (2,2,1) [0,0,8,16,32,64,128,256] -val t3 = Tensor.tensor (2,2,1) [2,4,8,16,32,64,128,255] -val X = Tensor.tensor (2,1,1) [0,1,1,0] -val vect = Tensor.tensor (2,0,1) [1,2] -val vect2 = Tensor.contract2 (0,0) X vect - -val _ = Tensor.print t1*) - - - - - - diff --git a/core/expressions/test/linrat_angle_expr-test.ML b/core/expressions/test/linrat_angle_expr-test.ML deleted file mode 100644 index 635295a9..00000000 --- a/core/expressions/test/linrat_angle_expr-test.ML +++ /dev/null @@ -1,183 +0,0 @@ -(* Testing Linear Rational Angle Expressions *) - -(* TODO: more extensive testing, including substitutions *) -local - structure Expr = LinratAngleExpr (* expressions *) - structure Var = X (* variable names *) - structure Coeff = Expr.Coeff (* rational numbers *) - - val zero = Expr.zero - val pi = Expr.pi - - val a_v = Var.mk "a" - val b_v = Var.mk "b" - - val a = Expr.mk_var a_v - val b = Expr.mk_var b_v - - val one = Coeff.mk (1, 1) - val neg_one = Coeff.mk (~1, 1) - val two = Coeff.mk (2, 1) - - val one_e = Expr.mk one [] - fun const coeff = Expr.mk coeff [] - - fun op +(x,y) = Expr.add_expr x y - fun op -(x,y) = Expr.subtr_expr x y - fun op *(k,x) = Expr.scale_expr k x - - val expr_str = Pretty.string_of o Expr.pretty - - fun reparse exp = - let - val str = expr_str exp - in - let - val result = Expr.parse str - in - if not (Expr.eq result exp) - then raise ERROR ("Reparsing \""^str^"\" gave \""^(expr_str result)) - else () - end - handle Expr.parse_exp => - raise ERROR ("Got parse_exp when reparsing "^str) - end - - fun parse (str,exp) = - let val result = Expr.parse str - in - if not (Expr.eq result exp) - then raise ERROR ("Parsing \""^str^"\" gave \""^(expr_str result)^ - "\" instead of \""^(expr_str exp)^"\"") - else () - end - handle Expr.parse_exp => - raise ERROR ("Got parse_exp when parsing "^str) - - fun parse_fail str = - (Expr.parse str; raise ERROR ("Parser accepted bad expression \""^str^"\"")) - handle Expr.parse_exp => () - | Div => () - - fun test_reparse expr = - Testing.test ("Reparse "^(expr_str expr)) reparse expr - fun test_parse str expr = - Testing.test ("Parse "^str) parse (str,expr) - fun test_parse_fail str = - Testing.test ("Parse (fail) "^str) parse_fail str -in - val _ = 1 - val _ = test_reparse zero - val _ = test_parse "0" zero - val _ = test_parse "" zero - - val _ = test_reparse (const one) - val _ = test_parse "\\pi" (const one) - val _ = test_parse "1\\pi" (const one) - val _ = test_parse "1*\\pi" (const one) - val _ = test_parse "1/1\\pi" (const one) - val _ = test_parse "1/1*\\pi" (const one) - val _ = test_parse "1\\pi/1" (const one) - val _ = test_parse "1*\\pi/1" (const one) - val _ = test_parse "\\pi/1" (const one) - - val _ = test_parse "pi" (const one) - val _ = test_parse "1pi" (const one) - val _ = test_parse "1*pi" (const one) - val _ = test_parse "1/1pi" (const one) - val _ = test_parse "1/1*pi" (const one) - val _ = test_parse "1pi/1" (const one) - val _ = test_parse "1*pi/1" (const one) - val _ = test_parse "pi/1" (const one) - - val _ = test_parse "PI" (const one) - val _ = test_parse "1PI" (const one) - val _ = test_parse "1*PI" (const one) - val _ = test_parse "1/1PI" (const one) - val _ = test_parse "1/1*PI" (const one) - val _ = test_parse "1PI/1" (const one) - val _ = test_parse "1*PI/1" (const one) - val _ = test_parse "PI/1" (const one) - - val _ = test_reparse a - val _ = test_parse "a" a - val _ = test_parse "1a" a - val _ = test_parse "1*a" a - val _ = test_parse "1/1a" a - val _ = test_parse "1/1*a" a - val _ = test_parse "1a/1" a - val _ = test_parse "1*a/1" a - val _ = test_parse "a/1" a - - val _ = test_reparse (const neg_one) - val _ = test_parse "-pi" (const neg_one) - val _ = test_parse "-1pi" (const neg_one) - val _ = test_parse "-1*pi" (const neg_one) - val _ = test_parse "-1/1pi" (const neg_one) - val _ = test_parse "-1/1*pi" (const neg_one) - val _ = test_parse "-1pi/1" (const neg_one) - val _ = test_parse "-1*pi/1" (const neg_one) - val _ = test_parse "-pi/1" (const neg_one) - - val _ = test_reparse (neg_one*a) - val _ = test_parse "-a" (neg_one*a) - val _ = test_parse "-1a" (neg_one*a) - val _ = test_parse "-1*a" (neg_one*a) - val _ = test_parse "-1/1a" (neg_one*a) - val _ = test_parse "-1/1*a" (neg_one*a) - val _ = test_parse "-1a/1" (neg_one*a) - val _ = test_parse "-1*a/1" (neg_one*a) - val _ = test_parse "-a/1" (neg_one*a) - - val _ = test_reparse (const (Coeff.mk (3, 4))) - val _ = test_parse "3/4" (const (Coeff.mk (3, 4))) - val _ = test_parse "3/4pi" (const (Coeff.mk (3, 4))) - val _ = test_parse "3/4*pi" (const (Coeff.mk (3, 4))) - val _ = test_parse "3pi/4" (const (Coeff.mk (3, 4))) - val _ = test_parse "3*pi/4" (const (Coeff.mk (3, 4))) - - val _ = test_reparse (const (Coeff.mk (~3, 4))) - val _ = test_parse "-3/4" (const (Coeff.mk (~3, 4))) - val _ = test_parse "-3/4pi" (const (Coeff.mk (~3, 4))) - val _ = test_parse "-3/4*pi" (const (Coeff.mk (~3, 4))) - val _ = test_parse "-3pi/4" (const (Coeff.mk (~3, 4))) - val _ = test_parse "-3*pi/4" (const (Coeff.mk (~3, 4))) - - val _ = test_reparse ((Coeff.mk (3, 4))*a) - val _ = test_parse "3/4a" ((Coeff.mk (3, 4))*a) - val _ = test_parse "3/4*a" ((Coeff.mk (3, 4))*a) - val _ = test_parse "3a/4" ((Coeff.mk (3, 4))*a) - val _ = test_parse "3*a/4" ((Coeff.mk (3, 4))*a) - - val _ = test_reparse ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3/4a" ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3/4*a" ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3a/4" ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3*a/4" ((Coeff.mk (~3, 4))*a) - - val _ = test_parse "pi/4" (const (Coeff.mk (1, 4))) - val _ = test_parse "-pi/4" (const (Coeff.mk (~1, 4))) - val _ = test_parse "a/4" ((Coeff.mk (1, 4))*a) - val _ = test_parse "-a/4" ((Coeff.mk (~1, 4))*a) - - val _ = test_reparse (a + b) - val _ = test_reparse (a - b) - val _ = test_reparse ((neg_one*a) - b) - - val _ = test_parse "a + b" (a + b) - val _ = test_parse "a - b" (a - b) - val _ = test_parse "-a + b" (b - a) - val _ = test_parse "- a - b" ((neg_one*a) - b) - val _ = test_parse "-(a + b)" ((neg_one*a) - b) - val _ = test_parse "-(a - b)" (b - a) - - val _ = test_parse "2\\pi + 1" (two*pi + (const one)) - val _ = test_reparse (two*pi + (const one)) - - val _ = test_parse_fail "&" - val _ = test_parse_fail "b/a" - val _ = test_parse_fail "a*b" - val _ = test_parse_fail "1/0" - - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/expressions/test/linrat_expr-test.ML b/core/expressions/test/linrat_expr-test.ML deleted file mode 100644 index 49221022..00000000 --- a/core/expressions/test/linrat_expr-test.ML +++ /dev/null @@ -1,143 +0,0 @@ -(* Testing Linear Rational Angle Expressions *) - -(* TODO: more extensive testing, including substitutions *) -local - structure Expr = LinratExpr (* expressions *) - structure Var = X (* variable names *) - structure Coeff = Expr.Coeff (* rational numbers *) - - val zero = Expr.zero - val one_e = Expr.one - - val a_v = Var.mk "a" - val b_v = Var.mk "b" - - val a = Expr.mk_var a_v - val b = Expr.mk_var b_v - - val one = Coeff.mk (1, 1) - val neg_one = Coeff.mk (~1, 1) - val two = Coeff.mk (2, 1) - val one_third = Coeff.mk (1, 3) - - val one_e1 = Expr.mk one [] - fun const coeff = Expr.mk coeff [] - - fun op +(x,y) = Expr.add_expr x y - fun op -(x,y) = Expr.subtr_expr x y - fun op *(k,x) = Expr.scale_expr k x - - val expr_str = Pretty.string_of o Expr.pretty - - fun reparse exp = - let - val str = expr_str exp - in - let - val result = Expr.parse str - in - if not (Expr.eq result exp) - then raise ERROR ("Reparsing \""^str^"\" gave \""^(expr_str result)) - else () - end - handle Expr.parse_exp => - raise ERROR ("Got parse_exp when reparsing "^str) - end - - fun parse (str,exp) = - let val result = Expr.parse str - in - if not (Expr.eq result exp) - then raise ERROR ("Parsing \""^str^"\" gave \""^(expr_str result)^ - "\" instead of \""^(expr_str exp)^"\"") - else () - end - handle Expr.parse_exp => - raise ERROR ("Got parse_exp when parsing "^str) - - fun parse_fail str = - (Expr.parse str; raise ERROR ("Parser accepted bad expression \""^str^"\"")) - handle Expr.parse_exp => () - | Div => () - - fun test_reparse expr = - Testing.test ("Reparse "^(expr_str expr)) reparse expr - fun test_parse str expr = - Testing.test ("Parse "^str) parse (str,expr) - fun test_parse_fail str = - Testing.test ("Parse (fail) "^str) parse_fail str -in - val _ = test_reparse zero - val _ = test_parse "0" zero - val _ = test_parse "" zero - - val _ = test_reparse (const one) - val _ = test_parse "1" (const one) - val _ = test_parse "1/1" (const one) - - val _ = test_reparse a - val _ = test_parse "a" a - val _ = test_parse "1a" a - val _ = test_parse "1*a" a - val _ = test_parse "1/1a" a - val _ = test_parse "1/1*a" a - val _ = test_parse "1a/1" a - val _ = test_parse "1*a/1" a - val _ = test_parse "a/1" a - - val _ = test_reparse (const neg_one) - val _ = test_parse "-1" (const neg_one) - val _ = test_parse "-1/1" (const neg_one) - - val _ = test_reparse (neg_one*a) - val _ = test_parse "-a" (neg_one*a) - val _ = test_parse "-1a" (neg_one*a) - val _ = test_parse "-1*a" (neg_one*a) - val _ = test_parse "-1/1a" (neg_one*a) - val _ = test_parse "-1/1*a" (neg_one*a) - val _ = test_parse "-1a/1" (neg_one*a) - val _ = test_parse "-1*a/1" (neg_one*a) - val _ = test_parse "-a/1" (neg_one*a) - - val _ = test_reparse (const (Coeff.mk (3, 4))) - val _ = test_parse "3/4" (const (Coeff.mk (3, 4))) - - val _ = test_reparse (const (Coeff.mk (~3, 4))) - val _ = test_parse "-3/4" (const (Coeff.mk (~3, 4))) - - val _ = test_reparse ((Coeff.mk (3, 4))*a) - val _ = test_parse "3/4a" ((Coeff.mk (3, 4))*a) - val _ = test_parse "3/4*a" ((Coeff.mk (3, 4))*a) - val _ = test_parse "3a/4" ((Coeff.mk (3, 4))*a) - val _ = test_parse "3*a/4" ((Coeff.mk (3, 4))*a) - - val _ = test_reparse ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3/4a" ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3/4*a" ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3a/4" ((Coeff.mk (~3, 4))*a) - val _ = test_parse "-3*a/4" ((Coeff.mk (~3, 4))*a) - - val _ = test_parse "a/4" ((Coeff.mk (1, 4))*a) - val _ = test_parse "-a/4" ((Coeff.mk (~1, 4))*a) - - val _ = test_reparse (a + b) - val _ = test_reparse (a - b) - val _ = test_reparse ((neg_one*a) - b) - - val _ = test_parse "a + b" (a + b) - val _ = test_parse "a - b" (a - b) - val _ = test_parse "-a + b" (b - a) - val _ = test_parse "- a - b" ((neg_one*a) - b) - val _ = test_parse "-(a + b)" ((neg_one*a) - b) - val _ = test_parse "-(a - b)" (b - a) - - val _ = test_parse "2a + 1" (two*a + (const one)) - val _ = test_reparse (two*a + (const one)) - - val _ = test_parse_fail "&" - val _ = test_parse_fail "b/a" - val _ = test_parse_fail "a*b" - val _ = test_parse_fail "1/0" - - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/expressions/test/linrat_matcher-test.ML b/core/expressions/test/linrat_matcher-test.ML deleted file mode 100644 index 8e4b254f..00000000 --- a/core/expressions/test/linrat_matcher-test.ML +++ /dev/null @@ -1,116 +0,0 @@ -local - val pretty_subst = XTab.pretty LinratAngleExpr.pretty - - fun assert_subst_eq (s1,s2) = - if XTab.tab_eq (fn (x,y) => LinratAngleExpr.eq x y) (s1,s2) - then () - else raise ERROR ("Substitutions not equal.\n" ^ - Pretty.string_of (pretty_subst s1) ^ "\n" ^ - Pretty.string_of (pretty_subst s2) ^ "\n") - - val matr = RationalMatrix.from_lists (map (map (fn i => Rational.mk (i,1))) [ - [1, 0, 0, ~5, 1, 0], - [0, 1, 2, ~6, 0, ~1], - [0, 0, 1, ~7, 2, 0] - ]) - - val matr_under_spec = RationalMatrix.from_lists (map (map (fn i => Rational.mk (i,1))) [ - [1, 0, 1, ~5, 1, 0], - [0, 1, 2, ~6, 0, ~1] - ]) - - val (tab,_) = LinratAngleMatcher.solve_psubst - (matr, X.NSet.of_list (map X.mk ["a","b","c"]), X.NSet.of_list (map X.mk ["d","e"])) - - val (tab_notfresh,_) = LinratAngleMatcher.solve_psubst - (matr, X.NSet.of_list (map X.mk ["a","b","c"]), X.NSet.of_list (map X.mk ["a","b"])) - - val (tab_underspec,_) = LinratAngleMatcher.solve_psubst - (matr_under_spec, X.NSet.of_list (map X.mk ["a","b","c"]), X.NSet.of_list (map X.mk ["d","e"])) - - val (tab_underspec_notfresh,_) = LinratAngleMatcher.solve_psubst - (matr_under_spec, X.NSet.of_list (map X.mk ["a","b","c"]), X.NSet.of_list (map X.mk ["c","d"])) - - (* - (* TODO: port old match tests *) - val match_str = Pretty.string_of o Expr.pretty_match; - - fun has_match (e1,e2) = - case Expr.match e1 e2 Expr.empty_match - of SOME m => m - | NONE => raise ERROR ("Expected "^(expr_str e1)^" to match " - ^(expr_str e2)); - fun no_match (e1,e2) = - case Expr.match e1 e2 Expr.empty_match - of NONE => () - | SOME m => raise ERROR ("Unexpected match "^(match_str m)^" of " - ^(expr_str e1)^" onto " ^(expr_str e2)); - *) -in - - val _ = Testing.test "Gauss matrix to subst" assert_subst_eq ( - tab, - XTab.of_list [ - (X.mk "a", LinratAngleExpr.parse "5 d - e"), - (X.mk "b", LinratAngleExpr.parse "\\pi - 8 d + 4 e"), - (X.mk "c", LinratAngleExpr.parse "7 d - 2 e") - ] - ) - - (* NOTE: it shouldn't make any difference whether the target vars are fresh w.r.t. the pattern *) - val _ = Testing.test "Gauss matrix to subst (target not fresh w.r.t. pattern)" assert_subst_eq ( - tab_notfresh, - XTab.of_list [ - (X.mk "a", LinratAngleExpr.parse "5 a - b"), - (X.mk "b", LinratAngleExpr.parse "\\pi - 8 a + 4 b"), - (X.mk "c", LinratAngleExpr.parse "7 a - 2 b") - ] - ) - - val _ = Testing.test "Gauss matrix to subst (under-specified system)" assert_subst_eq ( - tab_underspec, - XTab.of_list [ - (X.mk "a", LinratAngleExpr.parse "5 d - e - c"), - (X.mk "b", LinratAngleExpr.parse "\\pi + 6 d - 2 c"), - (X.mk "c", LinratAngleExpr.parse "c") - ] - ) - - (* note this assumes NSet.add_new grabs lowest available fresh name above the requested one. In - * this case, add_new("c") = "e". *) - val _ = Testing.test "Gauss matrix to subst (under-specified + not fresh)" assert_subst_eq ( - tab_underspec_notfresh, - XTab.of_list [ - (X.mk "a", LinratAngleExpr.parse "5 c - d - e"), - (X.mk "b", LinratAngleExpr.parse "\\pi + 6 c - 2 e"), - (X.mk "c", LinratAngleExpr.parse "e") - ] - ) - - (* - (* TODO: check the right results are produced, not just *some* matching! *) - val _ = Testing.test "match a 0 = SOME ..." has_match (a, zero); - (* a => (0) - (0) *) - - val _ = Testing.test "match (a + b) 0 = SOME ..." has_match (a + b, zero); - (* a => (0) - (b) *) - - val _ = Testing.test "match (a + b) a = SOME ..." has_match (a + b, zero); - (* a => (a) - (b) *) - - val _ = Testing.test "match a (a + b) = SOME ..." has_match (a, a + b); - (* a => (a + b) - (0) *) - - val _ = Testing.test "match pi pi = SOME ..." has_match (pi,pi); - val _ = Testing.test "match (a - pi) (2 * pi) = SOME ..." has_match - (a - pi, two * pi); - - val _ = Testing.test "match 0 a = false" no_match (zero,a); - val _ = Testing.test "match pi a = false" no_match (pi,a); - val _ = Testing.test "match pi 0 = false" no_match (pi,zero); - val _ = Testing.test "match 0 pi = false" no_match (zero,pi); - val _ = Testing.test "match pi 2pi = false" no_match (pi,two*pi); - *) - - val _ = Testing.assert_no_failed_tests () -end \ No newline at end of file diff --git a/core/graph/arity.ML b/core/graph/arity.ML deleted file mode 100644 index 5ecb9bfa..00000000 --- a/core/graph/arity.ML +++ /dev/null @@ -1,163 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* Representation arity of edges into or out of a vertex. *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -(* The arity of a vertex consists of 3 incidences: - edges coming in, - edges going out, - and undirected edges that are connected to this vertex. - - Note: self loops are assumed to create 2 incidences to a vertex. Only - isolated vertices are incidence 'zero'. -*) - -signature ARITY = -sig - type T - val zero : T - val add : T -> T -> T - - val add_1_in : T -> T - val add_1_out : T -> T - val add_1_undir : T -> T - - val subtract : T -> T -> T - val divide : T -> T -> T - val mult : T -> T -> T - val div_by_int : int -> T -> T - val mult_by_int : int -> T -> T - - val max_incidence : T -> int - val min_incidence : T -> int - val min_nonzero_incidence : T -> int option - - val has_negative_incidence : T -> bool - - val get_in : T -> int - val get_out : T -> int - val get_undir : T -> int - val get_total : T -> int - - val update_in : (int -> int) -> T -> T - val update_out : (int -> int) -> T -> T - val update_undir : (int -> int) -> T -> T - - val set_in : int -> T -> T - val set_out : int -> T -> T - val set_undir : int -> T -> T - - val rep_of : T -> {no_in : int, (* number of incoming edges *) - no_out : int, (* number of outgoig edges *) - no_undir : int (* number of undirected, connected edges *) } - - val pretty : T -> Pretty.T - val print : T -> unit - -end; - -(* *) -structure Arity : ARITY = -struct - - (* not every *) - datatype T = Arity of - {no_in : int, (* number of incoming edges *) - no_out : int, (* number of outgoig edges *) - no_undir : int (* number of undirected, connected edges *) } - - (* zero is the arity of an isolated (unconnected) vertex *) - val zero = Arity {no_in = 0, no_out = 0, no_undir = 0 }; - - fun is_zero (Arity rep) = - (#no_in rep = 0 andalso #no_out rep = 0 andalso #no_undir rep = 0); - - (* *) - fun get_in (Arity rep) = #no_in rep; - fun get_out (Arity rep) = #no_out rep; - fun get_undir (Arity rep) = #no_undir rep; - fun get_total (Arity rep) = (#no_in rep) + (#no_out rep) + (#no_undir rep); - - fun update_in f (Arity rep) = - Arity { no_in = f(#no_in rep), no_out = #no_out rep, - no_undir = #no_undir rep }; - fun update_out f (Arity rep) = - Arity { no_in = #no_in rep, no_out = f(#no_out rep), - no_undir = #no_undir rep }; - fun update_undir f (Arity rep) = - Arity { no_in = #no_in rep, no_out = #no_out rep, - no_undir = f(#no_undir rep) }; - - val set_in = update_in o K - val set_out = update_out o K - val set_undir = update_undir o K - - (* *) - fun add (Arity rep1) (Arity rep2) = - Arity { no_in = (#no_in rep1) + (#no_in rep2), - no_out = (#no_out rep1) + (#no_out rep2), - no_undir = (#no_undir rep1) + (#no_undir rep2) }; - - (* *) - val add_1_in = update_in (fn i => i + 1); - val add_1_out = update_out (fn i => i + 1); - val add_1_undir = update_undir (fn i => i + 1); - - (* *) - fun subtract (Arity rep1) (Arity rep2) = - Arity { no_in = (#no_in rep1) - (#no_in rep2), - no_out = (#no_out rep1) - (#no_out rep2), - no_undir = (#no_undir rep1) - (#no_undir rep2) }; - - fun has_negative_incidence (Arity rep) = - (#no_in rep) < 0 orelse (#no_out rep) < 0 orelse (#no_undir rep) < 0; - - fun div_by_int i (Arity rep) = - Arity { no_in = (#no_in rep) div i, - no_out = (#no_out rep) div i, - no_undir = (#no_undir rep) div i }; - - fun mult_by_int i (Arity rep) = - Arity { no_in = (#no_in rep) * i, - no_out = (#no_out rep) * i, - no_undir = (#no_undir rep) * i }; - - fun non_zero_div x y = if y <> 0 then x div y else 0; - - fun divide (Arity rep1) (Arity rep2) = (* rep1 / rep2 *) - Arity { no_in = non_zero_div (#no_in rep1) (#no_in rep2), - no_out = non_zero_div (#no_out rep1) (#no_out rep2), - no_undir = non_zero_div (#no_undir rep1) (#no_undir rep2) }; - - fun mult (Arity rep1) (Arity rep2) = (* rep1 * rep2 *) - Arity { no_in = (#no_in rep1) * (#no_in rep2), - no_out = (#no_out rep1) * (#no_out rep2), - no_undir = (#no_undir rep1) * (#no_undir rep2) }; - - fun max_incidence (Arity rep) = - Int.max(Int.max (#no_in rep, #no_out rep), #no_undir rep); - - fun min_incidence (Arity rep) = - Int.min(#no_undir rep,Int.min(#no_in rep,#no_out rep)); - - fun min_nonzero_incidence (Arity rep) = - fold (fn x => (fn NONE => SOME x - | SOME y => (if y <> 0 then SOME (Int.min(x,y)) - else SOME y))) - [#no_in rep, #no_out rep, #no_undir rep] - NONE; - - fun rep_of (Arity rep) = rep - - fun pretty (Arity rep) = - Pretty.block - ( [Pretty.str "(in:", - Pretty.str (Int.toString (#no_in rep))] - @ [Pretty.str ", out:", - Pretty.str (Int.toString (#no_out rep))] - @ [Pretty.str ", undir:", - Pretty.str (Int.toString (#no_undir rep))] - @ [Pretty.str ")"] ); - - val print = Pretty.writeln o pretty; - -end; (* *) diff --git a/core/graph/bang_graph.ML b/core/graph/bang_graph.ML deleted file mode 100644 index a8f973dc..00000000 --- a/core/graph/bang_graph.ML +++ /dev/null @@ -1,1541 +0,0 @@ -functor BangGraph(structure Data: GRAPH_DATA) : BANG_GRAPH = -struct - -structure Data = Data -open Data -datatype vdata = NVert of Data.nvdata | WVert - -(* main bang-graph datatype *) -datatype T = BG of { - vdata : vdata VTab.T, - edata : (dir_or_undir * Data.edata) ETab.T, - source : EVFn.T, - target : EVFn.T, - nhd : Nhd.T VTab.T, - - bboxes : B.NSet.T, - bbox_rel : BVRel.T, - bbox_child_rel : BBRel.T, - - (* annotations *) - g_anno : Json.jobj, - v_anno : Json.jobj VTab.T, - e_anno : Json.jobj ETab.T, - bb_anno : Json.jobj BTab.T -} - -structure Sharing = -struct - type T = T - type nvdata = nvdata - type edata = edata - type psubst = psubst - type subst = subst -end - -type wire = V.name * V.name * V.NSet.T * E.NSet.T (* a chain of edges, representing a wire *) - -(* empty bang graph *) -val empty : T = BG { - vdata = VTab.empty, - edata = ETab.empty, - source = EVFn.empty, - target = EVFn.empty, - nhd = VTab.empty, - - bboxes = B.NSet.empty, - bbox_rel = BVRel.empty, - bbox_child_rel = BBRel.empty, - - g_anno = Json.empty_obj, - v_anno = VTab.empty, - e_anno = ETab.empty, - bb_anno = BTab.empty -} - -(*********************************) -(********** EXCEPTIONS ***********) -(*********************************) - -exception no_such_vertex_exp of string * V.name * T -(*exception no_such_vnames_exp of string * V.NSet.T * T*) -exception duplicate_vertex_exp of V.name * T - -exception bad_graph_merge_exp of string * T * T - -exception no_such_edge_exp of string * E.name * T -exception duplicate_edge_exp of E.name * T - -exception no_such_bbox_exp of string * B.name * T -exception duplicate_bbox_exp of B.name * T - -exception not_an_endpoint_exp of string * E.name * V.name * T - -exception wire_vertex_exp of string * T * V.name -exception wire_exp of string * T * wire - -exception plugging_exp of T * V.name * V.name - -(* !-graph exceptions *) -exception bbox_not_open_exp of (V.name * V.name) * B.name * T -exception bbox_bad_parent_exp of B.name * B.name * T -exception merge_bbox_exp of string - -(*********************************) -(******** BASIC ACCESSORS ********) -(*********************************) - -(* Note that it is possible to break a graph by calling these updaters, so they should - * all be private. Also note that these functions are automatically generated, so avoid - * making changes *) -fun update_vdata f (BG r) = BG {vdata=f(#vdata r),edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_edata f (BG r) = BG {vdata= #vdata r,edata=f(#edata r),source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_source f (BG r) = BG {vdata= #vdata r,edata= #edata r,source=f(#source r),target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_target f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target=f(#target r),nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_nhd f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd=f(#nhd r),bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_bboxes f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes=f(#bboxes r),bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_bbox_rel f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel=f(#bbox_rel r),bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_bbox_child_rel f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel=f(#bbox_child_rel r),g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_g_anno f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno=f(#g_anno r),v_anno= #v_anno r,e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_v_anno f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno=f(#v_anno r),e_anno= #e_anno r,bb_anno= #bb_anno r} -fun update_e_anno f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno=f(#e_anno r),bb_anno= #bb_anno r} -fun update_bb_anno f (BG r) = BG {vdata= #vdata r,edata= #edata r,source= #source r,target= #target r,nhd= #nhd r,bboxes= #bboxes r,bbox_rel= #bbox_rel r,bbox_child_rel= #bbox_child_rel r,g_anno= #g_anno r,v_anno= #v_anno r,e_anno= #e_anno r,bb_anno=f(#bb_anno r)} - -fun get_vdata (BG r) = #vdata r -fun get_edata (BG r) = #edata r -fun get_source (BG r) = #source r -fun get_target (BG r) = #target r -fun get_nhd (BG r) = #nhd r -fun get_bboxes (BG r) = #bboxes r -fun get_bbox_rel (BG r) = #bbox_rel r -fun get_bbox_child_rel (BG r) = #bbox_child_rel r -fun get_g_anno (BG r) = #g_anno r -fun get_v_anno (BG r) = #v_anno r -fun get_e_anno (BG r) = #e_anno r -fun get_bb_anno (BG r) = #bb_anno r - -fun set_vdata x = update_vdata (fn _ => x) -fun set_edata x = update_edata (fn _ => x) -fun set_source x = update_source (fn _ => x) -fun set_target x = update_target (fn _ => x) -fun set_nhd x = update_nhd (fn _ => x) -fun set_bboxes x = update_bboxes (fn _ => x) -fun set_bbox_rel x = update_bbox_rel (fn _ => x) -fun set_bbox_child_rel x = update_bbox_child_rel (fn _ => x) -fun set_g_anno x = update_g_anno (fn _ => x) -fun set_v_anno x = update_v_anno (fn _ => x) -fun set_e_anno x = update_e_anno (fn _ => x) -fun set_bb_anno x = update_bb_anno (fn _ => x) - - - - -(*********************************) -(******* DERIVED ACCESSORS *******) -(*********************************) - -(* any graph updaters below should produce valid !-graphs (or fail with - * exception/NONE) *) - -(* note a graph is empty iff it has no vertices or bboxes *) -fun is_empty g = VTab.is_empty (get_vdata g) andalso B.NSet.is_empty (get_bboxes g) - -fun get_vertices g = VTab.get_dom_set (get_vdata g) -fun get_edges g = ETab.get_dom_set (get_edata g) -(* get_bboxes is an accessor fn *) - -fun has_vertex g = V.NSet.contains (get_vertices g) -fun has_edge g = E.NSet.contains (get_edges g) -fun has_bbox g = B.NSet.contains (get_bboxes g) - -fun is_wire_vertex g v = - case VTab.get_opt (get_vdata g) v - of SOME WVert => true - | SOME _ => false - | NONE => raise no_such_vertex_exp ("is_wire_vertex",v,g) -fun is_node_vertex g v = not (is_wire_vertex g v) -fun get_wire_vertices g = V.NSet.filter (is_wire_vertex g) (get_vertices g) -fun get_node_vertices g = V.NSet.filter (is_node_vertex g) (get_vertices g) - -val num_vertices = V.NSet.cardinality o get_vertices -val num_edges = E.NSet.cardinality o get_edges -val num_bboxes = B.NSet.cardinality o get_bboxes - - -fun get_edge_source g e = EVFn.get (get_source g) e - handle Map.no_such_elem_exp () => - raise no_such_edge_exp ("get_edge_source", e, g) - -fun get_edge_target g e = EVFn.get (get_target g) e - handle Map.no_such_elem_exp () => - raise no_such_edge_exp ("get_edge_target", e, g) - -fun get_vertex_data g v = VTab.get (get_vdata g) v - handle Map.no_such_elem_exp () => - raise no_such_vertex_exp ("get_vertex_data", v, g) - -fun get_edge_dir_and_data g e = ETab.get (get_edata g) e - handle Map.no_such_elem_exp () => - raise no_such_edge_exp ("get_edge_dir_and_data", e, g) - -fun get_edge_info g e = (get_edge_dir_and_data g e, (get_edge_source g e, get_edge_target g e)) - -fun get_edge_dir g e = #1 (get_edge_dir_and_data g e) -fun get_edge_data g e = #2 (get_edge_dir_and_data g e) -fun is_dir_edge g e = (get_edge_dir g e) = Directed - -fun update_vertex_data f v g = - if has_vertex g v then g |> update_vdata (VTab.map_entry f v) - else raise no_such_vertex_exp ("update_vertex_data", v, g) - -fun update_edge_data f e g = - if has_edge g e then - g |> update_edata (ETab.map_entry (fn (dir,d) => (dir, f d)) e) - else - raise no_such_edge_exp ("update_edge_data", e, g) - -fun update_edge_dir f e g = - if has_edge g e then - g |> update_edata (ETab.map_entry (fn (dir,d) => (f dir, d)) e) - else - raise no_such_edge_exp ("update_edge_dir", e, g) - -fun set_vertex_data d = update_vertex_data (K d) -fun set_edge_data d = update_edge_data (K d) -fun set_edge_dir dir = update_edge_dir (K dir) - -fun get_vertices_in_bbox g bb = - if has_bbox g bb then BVRel.img (get_bbox_rel g) bb - else raise no_such_bbox_exp ("get_vertices_in_bbox", bb, g) - -fun get_bboxes_containing_vertex g v = - if has_vertex g v then BVRel.inv_img (get_bbox_rel g) v - else raise no_such_vertex_exp ("get_bboxes_containing_vertex", v, g) - -fun get_bbox_parents g bb = - if has_bbox g bb then BBRel.inv_img (get_bbox_child_rel g) bb - else raise no_such_bbox_exp ("get_bbox_parents", bb, g) - -fun get_bbox_children g bb = - if has_bbox g bb then BBRel.img (get_bbox_child_rel g) bb - else raise no_such_bbox_exp ("get_bbox_children", bb, g) - -fun bbox_has_parents g bb = - if has_bbox g bb then BBRel.cod_contains (get_bbox_child_rel g) bb - else raise no_such_bbox_exp ("bbox_has_parents", bb, g) - -fun bbox_has_children g bb = - if has_bbox g bb then BBRel.dom_contains (get_bbox_child_rel g) bb - else raise no_such_bbox_exp ("bbox_has_children", bb, g) - -fun get_bboxed g = BVRel.get_cod_set (get_bbox_rel g) -fun get_unbboxed g = V.NSet.subtract (get_vertices g) (get_bboxed g) -fun get_empty_bboxes g = B.NSet.subtract (get_bboxes g) (BVRel.get_dom_set (get_bbox_rel g)) - -fun is_bboxed g v = if has_vertex g v - then V.NSet.contains (BVRel.get_cod_set (get_bbox_rel g)) v - else raise no_such_vertex_exp ("is_bboxed", v, g) - -val get_graph_annotation = get_g_anno -val set_graph_annotation = set_g_anno -fun get_vertex_annotation_opt g v = VTab.get_opt (get_v_anno g) v -fun get_vertex_annotation g v = VTab.get (get_v_anno g) v -fun set_vertex_annotation (v,ann) g = g |> update_v_anno (VTab.set (v,ann)) -fun get_edge_annotation_opt g e = ETab.get_opt (get_e_anno g) e -fun get_edge_annotation g e = ETab.get (get_e_anno g) e -fun set_edge_annotation (e,ann) g = g |> update_e_anno (ETab.set (e,ann)) -fun get_bbox_annotation_opt g bb = BTab.get_opt (get_bb_anno g) bb -fun get_bbox_annotation g bb = BTab.get (get_bb_anno g) bb -fun set_bbox_annotation (bb,ann) g = g |> update_bb_anno (BTab.set (bb,ann)) - -(**************************************) -(***** EXTRA DATA/SUBST FUNCTIONS *****) -(**************************************) - -fun get_nvdata_tab g = VTab.fold (fn (k,v) => case v of NVert nv => VTab.add (k, nv) | _ => I) (get_vdata g) VTab.empty -val get_vdata_tab = get_vdata -val get_v_anno_tab = get_v_anno -fun get_edata_tab g = ETab.fold (fn (k,v) => ETab.add (k, snd v)) (get_edata g) ETab.empty -val default_wv_vdata = WVert -val default_nv_vdata = NVert default_nvdata -fun default_nv_vdata_of_typestring s = NVert (default_nvdata_of_typestring s) - -fun subst_in_vdata sub (NVert nv) = - let val (sub', nv') = subst_in_nvdata sub nv - in (sub', NVert nv') end - | subst_in_vdata sub (WVert) = (sub, WVert) - -fun apply_data_subst sub graph = - (sub, graph) - |> V.NSet.fold (fn v => fn (sub, g) => ( - let val (sub', vd) = subst_in_vdata sub (get_vertex_data g v) - in (sub', g |> set_vertex_data vd v) end - )) (get_vertices graph) - |> E.NSet.fold (fn e => fn (sub, g) => ( - let val (sub', ed) = subst_in_edata sub (get_edge_data g e) - in (sub', g |> set_edge_data ed e) end - )) (get_edges graph) - -fun vdata_eq (NVert nv1, NVert nv2) = Data.nvdata_eq (nv1,nv2) - | vdata_eq (WVert, WVert) = true - | vdata_eq _ = false - -fun match_vdata (NVert nv1, NVert nv2) psubst = Data.match_nvdata (nv1,nv2) psubst - | match_vdata (WVert, WVert) psubst = SOME psubst - | match_vdata _ _ = NONE - -fun pretty_vdata (NVert n) = Pretty.block[Pretty.str "N{", Data.pretty_nvdata n, Pretty.str "}"] - | pretty_vdata (WVert) = Pretty.str "W{}" - - -fun init_psubst pattern target = - init_psubst_from_data (get_nvdata_tab pattern, get_edata_tab pattern) - (get_nvdata_tab target, get_edata_tab target) - - - -(**************************************) -(******** TRAVERSAL FUNCTIONS *********) -(**************************************) - -(* given an edge and vertex, get the other end *) -fun edge_get_other_vertex g e v1 = let - val (s,t) = (get_edge_source g e, get_edge_target g e) -in if V.name_eq (v1, s) then t - else if V.name_eq(v1, t) then s - else raise not_an_endpoint_exp ("edge_get_other_vertex",e,v1,g) -end - -(* Convenience functions for adjacent edges *) - -(* note these ignore direction *) -fun get_in_edges g v = - if has_vertex g v then EVFn.inv_img (get_target g) v - else raise no_such_vertex_exp ("get_in_edges", v, g) - - -fun get_out_edges g v = - if has_vertex g v then EVFn.inv_img (get_source g) v - else raise no_such_vertex_exp ("get_out_edges", v, g) - -fun get_adj_edges' g v = - E.NSet.union_merge (EVFn.inv_img (get_source g) v) - (EVFn.inv_img (get_target g) v) -fun get_adj_edges g v = - if has_vertex g v - then get_adj_edges' g v - else raise no_such_vertex_exp ("get_adj_edges", v, g) - -fun get_successor_vertices g v = - E.NSet.fold (fn e => ( - if is_dir_edge g e - then let val v1 = get_edge_target g e - in if V.name_eq (v,v1) then I else V.NSet.add v1 - end - else I - )) (get_out_edges g v) V.NSet.empty - -(* vertices connected to a directed in-edge of the given one *) -fun get_predecessor_vertices g v = - E.NSet.fold (fn e => ( - if is_dir_edge g e - then let val v1 = get_edge_source g e - in if V.name_eq (v,v1) then I else V.NSet.add v1 - end - else I - )) (get_in_edges g v) V.NSet.empty - -(* vertices connected to an undirected edge of the given one *) -fun get_sibling_vertices g v = - V.NSet.empty - |> E.NSet.fold (fn e => ( - if not (is_dir_edge g e) - then let val v1 = get_edge_target g e - in if V.name_eq (v,v1) then I else V.NSet.add v1 - end - else I - )) (get_out_edges g v) - |> E.NSet.fold (fn e => ( - if not (is_dir_edge g e) - then let val v1 = get_edge_source g e - in if V.name_eq (v,v1) then I else V.NSet.add v1 - end - else I - )) (get_in_edges g v) - -fun get_adj_vertices g v = - V.NSet.empty |> E.NSet.fold (fn e => - (let val v1 = get_edge_source g e - in if V.name_eq (v,v1) then I else V.NSet.add v1 end)) (get_in_edges g v) - |> E.NSet.fold (fn e => - (let val v1 = get_edge_target g e - in if V.name_eq (v,v1) then I else V.NSet.add v1 end)) (get_out_edges g v) - -fun get_adj_vertices_to_set g vset = - V.NSet.subtract (V.NSet.fold (V.NSet.union_merge o get_adj_vertices g) vset V.NSet.empty) vset - -fun get_in_dir_edges g = E.NSet.filter (fn e => get_edge_dir g e = Directed) o get_in_edges g -fun get_out_dir_edges g = E.NSet.filter (fn e => get_edge_dir g e = Directed) o get_out_edges g -fun get_adj_undir_edges g = E.NSet.filter (fn e => get_edge_dir g e = Undirected) o get_adj_edges g - -fun is_self_loop g e = V.name_eq (get_edge_source g e, get_edge_target g e) -fun get_self_loops g v = E.NSet.filter (is_self_loop g) (get_adj_edges g v) - -fun get_dir_self_loops g v = - E.NSet.filter (fn e => is_self_loop g e andalso is_dir_edge g e) - (get_adj_edges' g v) -fun get_undir_self_loops g v = - E.NSet.filter (fn e => is_self_loop g e andalso not (is_dir_edge g e)) - (get_adj_edges' g v) - - -(* set the arity. note that (undir) self-loops are counted twice *) -fun get_arity g v = let - fun card f = E.NSet.cardinality (f g v) -in Arity.zero |> Arity.set_in (card get_in_dir_edges) - |> Arity.set_out (card get_out_dir_edges) - |> Arity.set_undir (card get_adj_undir_edges + card get_undir_self_loops) -end - -fun adj_edge_classes g v = ( - get_in_dir_edges g v, - get_out_dir_edges g v, - get_adj_undir_edges g v -) - -fun get_only_edge g v = E.NSet.tryget_singleton (get_adj_edges' g v) - -(* need these here for add_named_edge *) -fun wv_get_other_edge g wv e = E.NSet.tryget_singleton (E.NSet.delete e (get_adj_edges g wv)) -fun get_wire_containing_edge g e = let - (* proceed in the direction of v away from prev_e until we hit e, a boundary, or a - node-vertex. return endpoint and edges encountered *) - fun traverse v prev_e = - if is_node_vertex g v then (v, V.NSet.empty, E.NSet.empty) - else case wv_get_other_edge g v prev_e - of SOME new_e => - if E.name_eq (e, new_e) (* come back around, is a circle *) - then (get_edge_source g e, V.NSet.empty, E.NSet.empty) (* so we get the same node for both endpoints *) - else (* v is a wire vertex that is not an endpoint *) - let val (v', wvs, es) = traverse (edge_get_other_vertex g new_e v) new_e - in (v', wvs |> V.NSet.add v, es |> E.NSet.add new_e) - end - | NONE => (v, V.NSet.empty, E.NSet.empty) - val (s_end,s_wvs,s_es) = traverse (get_edge_source g e) e - val (t_end,t_wvs,t_es) = traverse (get_edge_target g e) e - val es = s_es |> E.NSet.union_merge t_es |> E.NSet.add e -(* for circles, we take the minimum name to be s=t and the rest to be internal wire-vertices *) -in if V.name_eq (s_end,t_end) andalso is_wire_vertex g s_end then - let - val wvs = s_wvs |> V.NSet.union_merge t_wvs |> V.NSet.add s_end - val min_wv = (the o V.NSet.get_min) wvs - in (min_wv, min_wv, wvs |> V.NSet.delete min_wv, es) - end - else (s_end, t_end, s_wvs |> V.NSet.union_merge t_wvs, es) -end - - -(*****************************************) -(****** ADD/REMOVE/MODIFY FUNCTIONS ******) -(*****************************************) - -fun check_vertex_nhd v nhd g = -let - val ctxs = Nhd.get_edge_contexts nhd - val nhd_edges = fold (fn (e,_,_) => E.NSet.add e) ctxs E.NSet.empty - val v_bboxes = get_bboxes_containing_vertex g v - fun check_bbox_nesting (b1 :: b2 :: bs) = - not (B.NSet.contains v_bboxes b1) andalso - BBRel.is_mapped (get_bbox_child_rel g) (b1, b2) andalso - check_bbox_nesting (b2 :: bs) - | check_bbox_nesting [b1] = not (B.NSet.contains v_bboxes b1) - | check_bbox_nesting [] = true - fun check_edge_dir e Nhd.UNDIR = (get_edge_dir g e = Undirected) - | check_edge_dir e Nhd.IN = (get_edge_dir g e = Directed andalso - V.name_eq (get_edge_target g e, v)) - | check_edge_dir e Nhd.OUT = (get_edge_dir g e = Directed andalso - V.name_eq (get_edge_source g e, v)) - fun check_other_vertex e bs = - B.NSet.eq (B.NSet.union_merge v_bboxes (B.NSet.of_list bs)) - (get_bboxes_containing_vertex g (edge_get_other_vertex g e v)) -in - E.NSet.eq nhd_edges (get_adj_edges g v) andalso - forall (fn (e,d,bs) => ( - check_bbox_nesting bs andalso - check_edge_dir e d andalso - check_other_vertex e bs - )) ctxs -end - -(*fun set_vertex_nhd v nhd g =*) - -fun add_vertex_to_bbox' bb v g = - if (has_bbox g bb) then g |> update_bbox_rel (BVRel.add (bb, v)) - else raise no_such_bbox_exp ("add_vertex_to_bbox'", bb, g) - -fun add_vertices_to_bbox' bb = V.NSet.fold (add_vertex_to_bbox' bb) - -fun add_named_edge' e (dir,d) s t g = let - fun extend_bbs g = let - val (_,_,wvs,_) = get_wire_containing_edge g e - val s_bbs = get_bboxes_containing_vertex g s - val t_bbs = get_bboxes_containing_vertex g t - val wire_bbs = B.NSet.union_merge s_bbs t_bbs - in - B.NSet.fold (fn b => add_vertices_to_bbox' b wvs) wire_bbs g - end -in - g |> update_edata (ETab.add (e,(dir,d))) - |> update_source (EVFn.add (e, s)) - |> update_target (EVFn.add (e, t)) - |> extend_bbs -end - -fun assert_can_add_edge (dir,d) s t g = let - val () = if has_vertex g s then () - else raise no_such_vertex_exp ("add_named_edge", s, g) - val () = if has_vertex g t then () - else raise no_such_vertex_exp ("add_named_edge", t, g) - (* crazy long function to check that we don't add an incompatible - * edge to a wire vertex *) - fun can_link get_bad_edge_end v = - if not (is_wire_vertex g v) then () - else let val v_es = get_adj_edges' g v in - if E.NSet.is_empty v_es then () - else - case E.NSet.tryget_singleton v_es - of NONE => raise wire_vertex_exp ("too many edges",g,v) - | SOME e => - let val (dod,ed) = get_edge_dir_and_data g e in - if dod <> dir - then raise wire_vertex_exp ("directedness mismatch",g,v) - else if not (edata_eq (ed,d)) - then raise wire_vertex_exp ("data mismatch",g,v) - else if dir = Directed andalso - V.name_eq (v,get_bad_edge_end g e) - then raise wire_vertex_exp ("directed edges clash",g,v) - else if dir = Undirected andalso - V.name_eq (get_edge_source g e,get_edge_target g e) - then raise wire_vertex_exp ("too many edges",g,v) - else () - end - end - val _ = can_link get_edge_source s - val _ = can_link get_edge_target t -in () end - -fun add_named_edge e dd s t g = - if (has_edge g e) then raise duplicate_edge_exp (e,g) - else (assert_can_add_edge dd s t g; - add_named_edge' e dd s t g) - -fun add_edge' dd s t g = let - val fresh_name = E.NSet.new (get_edges g) E.default_name -in (fresh_name, add_named_edge' fresh_name dd s t g) -end - -fun add_edge dd s t g = - (assert_can_add_edge dd s t g; - add_edge' dd s t g) - -fun add_edge_anon dd s t g = snd (add_edge dd s t g) - -fun delete_edge e g = - g |> update_edata (ETab.delete e) - |> update_e_anno (ETab.delete e) - |> update_source (EVFn.delete e) - |> update_target (EVFn.delete e) - -fun add_named_vertex' v d = - update_vdata (VTab.add (v,d)) - -fun add_named_vertex v d g = - if (has_vertex g v) then raise duplicate_vertex_exp (v,g) - else g |> add_named_vertex' v d - -fun add_vertex d g = let - val fresh_name = V.NSet.new (get_vertices g) V.default_name -in (fresh_name, add_named_vertex' fresh_name d g) -end - -fun add_vertex_anon d g = snd (add_vertex d g) - -fun delete_vertex v g = - g |> update_vdata (VTab.delete v) - |> update_v_anno (VTab.delete v) - |> E.NSet.fold delete_edge (get_adj_edges' g v) - |> update_bbox_rel (BVRel.inv_delete v) - -fun add_named_bbox' bb = update_bboxes (B.NSet.add bb) -fun add_named_bbox bb g = - if (has_bbox g bb) then raise duplicate_bbox_exp (bb,g) - else g |> add_named_bbox' bb - -fun add_bbox g = let - val fresh_name = B.NSet.new (get_bboxes g) B.default_name -in (fresh_name, add_named_bbox' fresh_name g) -end - -fun add_bbox_anon g = snd (add_bbox g) - -fun delete_bbox bb g = - g |> update_bboxes (B.NSet.delete bb) - |> update_bbox_rel (BVRel.delete bb) - |> update_bbox_child_rel (BBRel.delete bb) - |> update_bbox_child_rel (BBRel.inv_delete bb) - |> update_bb_anno (BTab.delete bb) - -fun remove_vertex_from_bbox' bb v = update_bbox_rel (BVRel.unmap (bb, v)) -fun remove_vertex_from_bbox bb v g = - if (has_bbox g bb) then g |> remove_vertex_from_bbox' bb v - else raise no_such_bbox_exp ("remove_vertex_from_bbox", bb, g) - -fun remove_vertices_from_bbox' bb = - update_bbox_rel o (V.NSet.fold (fn v => BVRel.unmap (bb, v))) -fun remove_vertices_from_bbox bb vs g = - if (has_bbox g bb) then g |> remove_vertices_from_bbox' bb vs - else raise no_such_bbox_exp ("remove_vertex_from_bbox", bb, g) - -local - fun add_child_unchecked bb bb_child rel = let - val grandchildren = B.NSet.add bb_child (BBRel.img rel bb_child) - val grandparents = B.NSet.add bb (BBRel.inv_img rel bb) - in - B.NSet.fold (fn bp => B.NSet.fold (fn bc => BBRel.add (bp,bc)) - grandchildren) - grandparents - rel - end -in - fun add_child_to_bbox_checked' bb bb_child g = - if not (has_bbox g bb_child) - then raise no_such_bbox_exp ("add_child_to_bbox_checked'", bb_child, g) - else if B.name_eq (bb,bb_child) - then g (* no-op *) - else if not (V.NSet.sub_set (get_vertices_in_bbox g bb_child) - (get_vertices_in_bbox g bb)) - then raise bbox_bad_parent_exp (bb,bb_child,g) - else - let - fun add_child rel = - if BBRel.is_mapped rel (bb_child,bb) - then raise bbox_bad_parent_exp (bb,bb_child,g) - else add_child_unchecked bb bb_child rel - in - g |> update_bbox_child_rel add_child - end - - val add_child_to_bbox' = update_bbox_child_rel oo add_child_unchecked -end - -fun add_child_to_bbox bb bb_child g = - if not (has_bbox g bb) then raise no_such_bbox_exp ("add_child_to_bbox", bb, g) - else add_child_to_bbox_checked' bb bb_child g - -fun add_children_to_bbox' bb bb_children g = - B.NSet.fold (add_child_to_bbox' bb) bb_children g -fun add_children_to_bbox bb bb_children g = - if not (has_bbox g bb) then raise no_such_bbox_exp ("add_children_to_bbox", bb, g) - else B.NSet.fold (add_child_to_bbox_checked' bb) bb_children g - -fun add_parents_to_bbox' bb bb_parents g = - B.NSet.fold (fn bp => add_child_to_bbox' bp bb) bb_parents g - -fun add_parents_to_bbox bb bb_parents g = - if not (has_bbox g bb) then raise no_such_bbox_exp ("add_parents_to_bbox", bb, g) - else B.NSet.fold (fn bp => add_child_to_bbox' bp bb) bb_parents g - -(* TODO: these functions need to enforce transitive closure! *) - -fun remove_child_from_bbox bb bb_child g = - if not (has_bbox g bb) then raise no_such_bbox_exp ("remove_child_from_bbox", bb, g) - else if not (has_bbox g bb_child) then raise no_such_bbox_exp ("remove_child_from_bbox", bb_child, g) - else - let - fun remove_child rel = - if B.NSet.nonempty_intersect (BBRel.img rel bb) - (BBRel.inv_img rel bb_child) - then rel - else BBRel.unmap (bb,bb_child) rel - in - g |> update_bbox_child_rel remove_child - end - -fun clear_bbox_children bb g = - if not (has_bbox g bb) then raise no_such_bbox_exp ("clear_bbox_children", bb, g) - else g |> update_bbox_child_rel (BBRel.delete bb) - - -(*************************************) -(*** OPERATIONS ON MULTIPLE GRAPHS ***) -(*************************************) - -fun is_subgraph g1 g2 = -let - val e1s = get_edges g1 - val e2s = get_edges g2 - val v1s = get_vertices g1 - val v2s = get_vertices g2 - val bb1s = get_bboxes g1 - val bb2s = get_bboxes g2 -in - (* check vertex names *) - (V.NSet.sub_set v1s v2s) andalso - (* check edge names *) - (E.NSet.sub_set e1s e2s) andalso - (* check bbox names *) - (B.NSet.sub_set bb1s bb2s) andalso - (* check same vertex data *) - (VTab.forall (fn (n,d1) => vdata_eq (d1, get_vertex_data g2 n)) (get_vdata g1)) andalso - (* check same edge data, and source and target maps agree *) - (ETab.forall (fn (n,(dir1,d1)) => - let - val (dir2, d2) = get_edge_dir_and_data g2 n - in dir1 = dir2 andalso - Data.edata_eq (d1, d2) andalso - V.name_eq (get_edge_source g1 n, get_edge_source g2 n) andalso - V.name_eq (get_edge_target g1 n, get_edge_target g2 n) - end) (get_edata g1)) andalso - (* check bbox containment and children agree *) - (B.NSet.forall (fn bb => - V.NSet.sub_set (get_vertices_in_bbox g1 bb) (get_vertices_in_bbox g2 bb) andalso - B.NSet.eq (get_bbox_children g1 bb) (get_bbox_children g2 bb) - ) (get_bboxes g1)) -end - -(* should be equivalent to (is_subgraph g1 g2 andalso is_subgraph g2 g1) *) -fun exact_eq g1 g2 = - (num_vertices g1 = num_vertices g2) andalso - (num_edges g1 = num_edges g2) andalso - (num_bboxes g1 = num_bboxes g2) andalso - is_subgraph g1 g2 andalso - (B.NSet.forall (fn bb => - V.NSet.eq (get_vertices_in_bbox g1 bb) (get_vertices_in_bbox g2 bb) - ) (get_bboxes g1)) - - -(* note this assumes that sg < g. In particular, the edges in sg are not (explicitly) deleted. *) -fun delete_subgraph sg g = - g |> V.NSet.fold delete_vertex (get_vertices sg) - |> B.NSet.fold delete_bbox (get_bboxes sg) - - -(************************************) -(************* RENAMING *************) -(************************************) - -(*exception rename_do_nothing_exp of unit*) - - - -(* rename vertices using the given substitution *) -fun rename_vertices sub g = let - (* this will fail with name_clash_exp if the renaming is bad *) - val rnm = VSub.extend_fixed (get_vertices g) sub - val rnmi = VSub.inverse_of rnm -in - g |> update_vdata (fn m => VTab.compose (m,rnmi)) - |> update_v_anno (fn m => VTab.compose (m,rnmi)) - |> update_source (fn m => EVFn.compose (rnm,m)) - |> update_target (fn m => EVFn.compose (rnm,m)) - |> update_bbox_rel (fn m => BVRel.compose (rnm,m)) -end - -fun rename_vertex n1 n2 g = rename_vertices (VSub.empty |> VSub.add (n1,n2)) g - handle VSub.name_clash_exp _ => raise duplicate_vertex_exp (n2,g) -fun rename_vertex_opt n1 n2 g = SOME (rename_vertex n1 n2 g) - handle duplicate_vertex_exp _ => NONE - -(* rename edges using the given substitution *) -fun rename_edges sub g = let - (* this will fail with name_clash_exp if the renaming is bad *) - val rnm = ESub.extend_fixed (get_edges g) sub - val rnmi = ESub.inverse_of rnm -in - g |> update_edata (fn m => ETab.compose (m,rnmi)) - |> update_e_anno (fn m => ETab.compose (m,rnmi)) - |> update_source (fn m => EVFn.compose (m,rnmi)) - |> update_target (fn m => EVFn.compose (m,rnmi)) -end - -fun rename_edge n1 n2 g = rename_edges (ESub.empty |> ESub.add (n1,n2)) g - handle ESub.name_clash_exp _ => raise duplicate_edge_exp (n2,g) -fun rename_edge_opt n1 n2 g = SOME (rename_edge n1 n2 g) - handle duplicate_edge_exp _ => NONE - -fun rename_bboxes sub g = let - (* this will fail with name_clash_exp if the renaming is bad *) - val rnm = BSub.extend_fixed (get_bboxes g) sub - val rnmi = BSub.inverse_of rnm -in - g |> update_bboxes (BSub.img_of_set rnm) - |> update_bb_anno (fn m => BTab.compose (m,rnmi)) - |> update_bbox_rel (fn m => BVRel.compose (m, rnmi)) - |> update_bbox_child_rel (fn m => BBRel.compose3 (rnm, m, rnmi)) -end - -fun rename_bbox n1 n2 g = rename_bboxes (BSub.empty |> BSub.add (n1,n2)) g - handle BSub.name_clash_exp _ => raise duplicate_bbox_exp (n2,g) -fun rename_bbox_opt n1 n2 g = SOME (rename_bbox n1 n2 g) - handle duplicate_bbox_exp _ => NONE - -fun rename_bang_graph (vsub,esub,bsub) g = let - val vrnm = VSub.extend_fresh (get_vertices g) vsub - val vrnmi = VSub.inverse_of vrnm - val ernm = ESub.extend_fresh (get_edges g) esub - val ernmi = ESub.inverse_of ernm - val brnm = BSub.extend_fresh (get_bboxes g) bsub - val brnmi = BSub.inverse_of brnm -in - ( - (vrnm,ernm,brnm), - g |> update_vdata (fn m => VTab.compose (m,vrnmi)) - |> update_edata (fn m => ETab.compose (m,ernmi)) - |> update_source (fn m => EVFn.compose3 (vrnm,m,ernmi)) - |> update_target (fn m => EVFn.compose3 (vrnm,m,ernmi)) - |> update_bboxes (BSub.img_of_set brnm) - |> update_bbox_rel (fn m => BVRel.compose3 (vrnm, m, brnmi)) - |> update_bbox_child_rel (fn m => BBRel.compose3 (brnm, m, brnmi)) - |> update_v_anno (fn m => VTab.compose (m,vrnmi)) - |> update_e_anno (fn m => ETab.compose (m,ernmi)) - |> update_bb_anno (fn m => BTab.compose (m,brnmi)) - ) -end - -val rename_bang_graph_anon = snd oo rename_bang_graph -fun rename_ograph (vsub,esub) g = let - val ((vsub',esub',_), g') = rename_bang_graph (vsub,esub,BSub.empty) g -in ((vsub',esub'), g') -end -val rename_ograph_anon = snd oo rename_ograph - - -fun edges_between g v1 v2 = - if V.name_eq (v1,v2) - then get_self_loops g v1 - else E.NSet.intersect (get_adj_edges g v1) (get_adj_edges g v2) -fun has_edges_between g v1 v2 = not (E.NSet.is_empty (edges_between g v1 v2)) - -fun dir_edges_between g v1 v2 = - ( - E.NSet.intersect (get_out_dir_edges g v1) (get_in_edges g v2), - E.NSet.intersect (get_out_dir_edges g v2) (get_in_edges g v1) - ) - - -(* avoids re-checking is_wire_vertex *) -fun is_input' g v = E.NSet.is_empty (get_in_edges g v) andalso - (case E.NSet.get_local_bot (get_out_edges g v) - of SOME e => is_dir_edge g e - | NONE => false) -fun is_output' g v = E.NSet.is_empty (get_out_edges g v) andalso - (case E.NSet.get_local_bot (get_in_edges g v) - of SOME e => is_dir_edge g e - | NONE => false) -fun is_boundary' g v = let - val in_edges = get_in_edges g v - val out_edges = get_out_edges g v -in - (E.NSet.is_empty in_edges andalso E.NSet.is_empty out_edges) - orelse - (E.NSet.is_empty in_edges andalso E.NSet.is_singleton out_edges) - orelse - (E.NSet.is_singleton in_edges andalso E.NSet.is_empty out_edges) -end - -fun is_input g v = is_wire_vertex g v andalso is_input' g v -fun is_output g v = is_wire_vertex g v andalso is_output' g v -fun is_boundary g v = is_wire_vertex g v andalso is_boundary' g v - -fun get_boundary g = V.NSet.filter (is_boundary' g) (get_wire_vertices g) -fun get_inputs g = V.NSet.filter (is_input' g) (get_wire_vertices g) -fun get_outputs g = V.NSet.filter (is_output' g) (get_wire_vertices g) - - -fun merge g1 g2 = let - (* some pre-checking *) - (* we (may) have an implied span, where the centre of the span is - * the subgraph of common names (which may not be a graph at all) *) - - (* First, we check that the inclusions reflect !-box containment. - * This is actually slightly stronger than necessary, but will almost - * always be satisfied in practice. - *) - val shared_bbs = B.NSet.intersect (get_bboxes g1) (get_bboxes g2) - val _ = if B.NSet.forall (fn b => - B.NSet.eq (get_bbox_parents g1 b) - (get_bbox_parents g2 b) - ) shared_bbs - then () - else raise bad_graph_merge_exp ( - "shared !-boxes must have identical parents", - g1,g2) - val shared_vs = V.NSet.intersect (get_vertices g1) (get_vertices g2) - val _ = if V.NSet.forall (fn b => - B.NSet.eq (get_bboxes_containing_vertex g1 b) - (get_bboxes_containing_vertex g2 b) - ) shared_vs - then () - else raise bad_graph_merge_exp ( - "shared vertices must be in identical !-boxes", - g1,g2) - - (* Now we check that the inclusions are boundary-coherent; this means - * that anything that is an input of the intersection is an input of - * either g1 or g2, and similarly for outputs. - * This is complicated by: - * (a) the fact we store wire-vertex types on edges - * (b) undirected edges - *) - val _ = if V.NSet.forall (fn v => - not (is_wire_vertex g1 v) orelse - let - val adj1 = get_adj_edges' g1 v - val adj2 = get_adj_edges' g2 v - val adj = E.NSet.intersect adj1 adj2 - fun input_somewhere v = is_input' g1 v orelse is_input' g2 v - fun output_somewhere v = is_output' g1 v orelse is_output' g2 v - fun bnd_everywhere v = E.NSet.is_singleton adj1 - andalso - E.NSet.is_singleton adj2 - fun bnd_somewhere v = E.NSet.is_singleton adj1 - orelse - E.NSet.is_singleton adj2 - in - case (E.NSet.get_local_bot adj1,E.NSet.get_local_bot adj2) - of (SOME e1,SOME e2) => - let - val (dir1,data1) = get_edge_dir_and_data g1 e1 - val (dir2,data2) = get_edge_dir_and_data g2 e2 - in - dir1 = dir2 andalso edata_eq (data1,data2) andalso - if E.NSet.is_empty adj - then - if dir1 = Directed - then input_somewhere v andalso output_somewhere v - else bnd_everywhere v - else - case E.NSet.tryget_singleton adj - of NONE => true (* not a boundary at all *) - | SOME e => - let - val s = get_edge_source g1 e - val t = get_edge_target g1 e - in - if V.name_eq (s,t) - then true (* circle *) - else if dir1 = Undirected - then bnd_somewhere v - else if V.name_eq (s,v) - then output_somewhere v - else input_somewhere v - end - end - | _ => true (* at least one is an isolated wv *) - end - ) shared_vs - then () else raise bad_graph_merge_exp ("not wire-coherent",g1,g2) - - (* Of course, we also need to check that the intersection is truly a - * subgraph of both g1 and g2 (ie: that they agree on types, edge sources - * and edge targets); we check that during the construction below *) - - (* start with g1 and add things from g2 to it *) - val vdata = - VTab.fold (fn (v,d) => fn tab => - case VTab.get_opt tab v - of SOME d' => if vdata_eq (d,d') then tab - else raise bad_graph_merge_exp ( - "shared vertices must have identical vdata",g1,g2) - | NONE => VTab.add (v,d) tab - ) (get_vdata g2) (get_vdata g1) - val edata = - ETab.fold (fn (e,(d as (dir,dd))) => fn tab => - case ETab.get_opt tab e - of SOME (d' as (dir',dd')) => - if dir = dir' andalso edata_eq (dd,dd') then tab - else raise bad_graph_merge_exp ( - "shared edges must have identical edata",g1,g2) - | NONE => ETab.add (e,d) tab - ) (get_edata g2) (get_edata g1) - val source = - EVFn.fold (fn (e,v) => fn f => - case EVFn.get_opt f e - of SOME v' => if V.name_eq (v,v') - then f - else if fst (ETab.get edata e) = Undirected - andalso (* might just be reversed *) - EVFn.is_mapped (get_target g1) (e,v) - andalso - EVFn.is_mapped (get_target g2) (e,v') - then f - else raise bad_graph_merge_exp ( - "source function inconsistent",g1,g2) - | NONE => EVFn.add (e,v) f - ) (get_source g2) (get_source g1) - val target = - EVFn.fold (fn (e,v) => fn f => - case EVFn.get_opt f e - of SOME v' => if V.name_eq (v,v') - then f - else if fst (ETab.get edata e) = Undirected - andalso (* might just be reversed *) - EVFn.is_mapped (get_source g1) (e,v) - andalso - EVFn.is_mapped (get_source g2) (e,v') - then f - else raise bad_graph_merge_exp ( - "target function inconsistent",g1,g2) - | NONE => EVFn.add (e,v) f - ) (get_target g2) (get_target g1) - val bboxes = B.NSet.union_merge (get_bboxes g2) (get_bboxes g1) - val bbox_rel = BVRel.fold BVRel.add (get_bbox_rel g2) (get_bbox_rel g1) - val bbox_child_rel = BBRel.fold BBRel.add (get_bbox_child_rel g2) - (get_bbox_child_rel g1) - - val nhd_vs1 = V.NSet.intersect (VTab.get_dom_set (get_nhd g1)) shared_vs - val nhd_vs2 = V.NSet.intersect (VTab.get_dom_set (get_nhd g2)) shared_vs - - val _ = if V.NSet.eq nhd_vs1 nhd_vs2 then () - else raise bad_graph_merge_exp ( - "shared vertices must have nhds BOTH graphs, or NEITHER graph",g1,g2) - - val nhd = VTab.fold (fn (v,n) => fn nhd' => ( - case VTab.get_opt nhd' v - of SOME n' => - if Nhd.eq (n,n') - then nhd' - else raise bad_graph_merge_exp ( - "shared vertices must have indentical nhds",g1,g2) - | NONE => VTab.add (v,n) nhd' - )) (get_nhd g1) (get_nhd g2) - - (* annotations from g1 supercede g2 *) - val g_anno = Json.fold Json.update (get_g_anno g1) (get_g_anno g2) - val v_anno = VTab.fold VTab.set (get_v_anno g1) (get_v_anno g2) - val e_anno = ETab.fold ETab.set (get_e_anno g1) (get_e_anno g2) - val bb_anno = BTab.fold BTab.set (get_bb_anno g1) (get_bb_anno g2) - - (* check wvs *) -in - BG { vdata = vdata, edata = edata, - source = source, target = target, nhd = nhd, - bboxes = bboxes, bbox_rel = bbox_rel, - bbox_child_rel = bbox_child_rel, - g_anno = g_anno, - v_anno = v_anno, - e_anno = e_anno, - bb_anno = bb_anno } -end - -fun merge_opt g1 g2 = SOME (merge g1 g2) - handle bad_graph_merge_exp _ => NONE - - -(********************************) -(******** WIRE FUNCTIONS ********) -(********************************) - -(* the edge-set is unique to a particular wire *) -fun wire_ord ( (_,_,_,es) , (_,_,_,es') ) = E.NSet.ord (es,es') -fun wire_eq w1 w2 = wire_ord (w1, w2) = EQUAL -fun pretty_wire (s,t,w,e) = Pretty.list "wire(" ")" [V.pretty_name s, V.pretty_name t, V.NSet.pretty w, E.NSet.pretty e] -fun delete_wire (_,_,wvs,es) h = V.NSet.fold delete_vertex wvs (E.NSet.fold delete_edge es h) - -fun get_wires_for_edges g es = let - fun gw es' = if E.NSet.is_empty es' then [] - else let val wire = get_wire_containing_edge g ((the o - E.NSet.get_min) es') - in wire :: gw (es' |> E.NSet.remove_set (#4 wire)) - end -in gw es -end - -fun get_wire_list g = get_wires_for_edges g (get_edges g) - -fun assert_wire_coherent g (w as (_,_,_,es)) = let - val sample_edge = the (E.NSet.get_local_bot es) - val (dir_or_undir,data) = get_edge_dir_and_data g sample_edge -in - (* FIXME: check all in same direction if directed *) - E.NSet.fold (fn e => fn w => - let - val (dou',data') = get_edge_dir_and_data g e - in - if (dir_or_undir <> dou') - then raise wire_exp ("Directedness of edges differs",g,w) - else if not (Data.edata_eq (data,data')) - then raise wire_exp ("Data of edges differs",g,w) - else w - end) es w -end - -fun wire_needs_wvs g (s,t,wvs,_) = - case V.NSet.get_local_bot wvs - of NONE => false - | SOME wv => - if is_wire_vertex g s then false - else if is_wire_vertex g t then false - else - let - val bbs = get_bboxes_containing_vertex g wv - fun bbox_only_here b = let - val vs = get_vertices_in_bbox g b - in - V.NSet.eq vs wvs - end - in - B.NSet.exists bbox_only_here bbs - end - -fun is_wire_minimal g (wire as (_,_,wvs,_)) = - V.NSet.is_empty wvs orelse - (V.NSet.is_singleton wvs andalso wire_needs_wvs g wire) - -fun minimise_wire' (wire as (s,t,wvs,es)) g = let - val dir_and_data = get_edge_dir_and_data g ((the o E.NSet.get_local_bot) es) -in - if wire_needs_wvs g wire - then - let - val any_wv = the (V.NSet.get_local_bot wvs) - val bbs = get_bboxes_containing_vertex g any_wv - val g = g |> delete_wire wire - val (wv,g) = g |> add_vertex WVert - val g = g |> B.NSet.fold (fn b => add_vertex_to_bbox' b wv) bbs - val (e1,g) = g |> add_edge' dir_and_data s wv - val (e2,g) = g |> add_edge' dir_and_data wv t - in ((s,t,V.NSet.single wv,E.NSet.of_list [e1,e2]), g) end - else - let val (e,g') = g |> delete_wire wire - |> add_edge' dir_and_data s t - in ((s,t,V.NSet.empty,E.NSet.single e), g') end -end -fun minimise_wire (wire as (_,_,wvs,_)) g = let - val _ = assert_wire_coherent g wire -in - if is_wire_minimal g wire then (wire,g) - else minimise_wire' wire g -end -val minimise_wire_anon = snd oo minimise_wire - -fun is_wire_normalised g (s,t,wvs,_) = - if is_node_vertex g s andalso is_node_vertex g t then - V.NSet.cardinality wvs = 2 - else - V.NSet.is_empty wvs - -fun normalise_wire' (wire as (s,t,wvs,es)) g = - if is_wire_vertex g s orelse is_wire_vertex g t then - (* add no extra wire-verts on boundary wires, bare wires, or circles *) - minimise_wire' wire g - else - (* add two extra wire verts on internal wires *) - let - (* bboxes to add the newly-created wv's to *) - val bboxes = B.NSet.empty - |> B.NSet.union_merge (get_bboxes_containing_vertex g s) - |> B.NSet.union_merge (get_bboxes_containing_vertex g t) - |> (case V.NSet.get_local_bot wvs - of SOME wv => B.NSet.union_merge (get_bboxes_containing_vertex g wv) - | NONE => I) - val dir_and_data = get_edge_dir_and_data g ((the o E.NSet.get_local_bot) es) - val g = g |> delete_wire wire - val (wv1,g) = g |> add_vertex WVert - val (wv2,g) = g |> add_vertex WVert - val g = g |> B.NSet.fold (fn b => add_vertex_to_bbox' b wv1 o - add_vertex_to_bbox' b wv2) bboxes - val (e1,g) = g |> add_edge' dir_and_data s wv1 - val (e2,g) = g |> add_edge' dir_and_data wv1 wv2 - val (e3,g) = g |> add_edge' dir_and_data wv2 t - in - ((s,t,V.NSet.of_list [wv1,wv2],E.NSet.of_list [e1,e2,e3]), g) - end -fun normalise_wire wire g = let - val _ = assert_wire_coherent g wire -in - if is_wire_normalised g wire - then (wire,g) - else normalise_wire' wire g -end -val normalise_wire_anon = snd oo normalise_wire - -fun minimise g = g |> fold minimise_wire_anon (get_wire_list g) -fun normalise g = g |> fold normalise_wire_anon (get_wire_list g) - -fun split_edge e g = let - val (dd, (s,t)) = get_edge_info g e - val bbs = if is_wire_vertex g s - then get_bboxes_containing_vertex g s - else if is_wire_vertex g t - then get_bboxes_containing_vertex g t - else B.NSet.union_merge (get_bboxes_containing_vertex g s) - (get_bboxes_containing_vertex g t) - val g' = delete_edge e g - val (vn,g') = add_vertex WVert g' - val vset = V.NSet.single vn - val g' = (B.NSet.fold (fn b => add_vertices_to_bbox' b vset) bbs g') - val (e1,g') = add_edge' dd s vn g' - val (e2,g') = add_edge' dd vn t g' -in - ((e1,vn,e2),g') -end - -(********************************) -(****** PLUGGING FUNCTIONS ******) -(********************************) - -fun get_plugging g v1 v2 = - case (get_only_edge g v1,get_only_edge g v2) - of (SOME e1,SOME e2) => - (case (get_edge_dir_and_data g e1,get_edge_dir_and_data g e2) - of ((Undirected,d1),(Undirected,d2)) => - if Data.edata_eq (d1,d2) - then SOME ((Undirected,d1),(v1,v2)) - else NONE - | ((Directed,d1),(Directed,d2)) => - if not (Data.edata_eq (d1,d2)) - then NONE - else if is_input g v1 andalso is_output g v2 - then SOME ((Directed,d1),(v2,v1)) - else if is_input g v2 andalso is_output g v1 - then SOME ((Directed,d1),(v1,v2)) - else NONE - | _ => NONE) - | _ => NONE - -val are_pluggable = is_some ooo get_plugging -fun plug v1 v2 g = - case get_plugging g v1 v2 of - NONE => raise plugging_exp (g,v1,v2) - | SOME (ed,(v1',v2')) => add_edge' ed v1' v2' g; -val plug_anon = snd ooo plug -fun plug_and_minimise g v1 v2 = let - val (e,g') = plug g v1 v2 -in - minimise_wire (get_wire_containing_edge g' e) g' -end -val plug_and_minimise_anon = snd ooo plug_and_minimise -fun plug_and_normalise g v1 v2 = let - val (e,g') = plug g v1 v2 -in - normalise_wire (get_wire_containing_edge g' e) g' -end -val plug_and_normalise_anon = snd ooo plug_and_normalise - - -fun find_open_subgraph g vset = let - fun absorb new accum = - if V.NSet.is_empty new then accum - else - let - val new' = - V.NSet.fold (fn v => ( - if is_wire_vertex g v andalso not (V.NSet.contains accum v) - then V.NSet.add v - else I - )) (get_adj_vertices_to_set g new) V.NSet.empty - in absorb new' (V.NSet.union_merge new' accum) - end -in absorb vset vset -end - -fun get_open_subgraph g vset = let - val retained = find_open_subgraph g vset - val removed = V.NSet.subtract (get_vertices g) retained -in - V.NSet.fold delete_vertex removed g -end - - -(************************************) -(********** BBOX FUNCTIONS **********) -(************************************) - -fun add_to_bbox b vs g = let - val () = if B.NSet.contains (get_bboxes g) b then () - else raise no_such_bbox_exp ("add_to_bbox",b,g) - val () = V.NSet.fold (fn v => K ( - if has_vertex g v then () - else raise no_such_vertex_exp ("add_to_bbox",v,g) - )) vs () - val to_add = find_open_subgraph g vs - fun raise_if_not_has_added_verts bb = let - val contents = get_vertices_in_bbox g bb - in - if V.NSet.sub_set to_add contents - then true - else raise bbox_bad_parent_exp (bb,b,g) - end - val parents = get_bbox_parents g b - val _ = B.NSet.forall raise_if_not_has_added_verts parents -in (to_add, g |> add_vertices_to_bbox' b to_add) -end - -val add_to_bbox_anon = snd ooo add_to_bbox - -fun remove_from_bbox b vs g = - if not (B.NSet.contains (get_bboxes g) b) then - raise no_such_bbox_exp ("remove_from_bbox",b,g) - else - let - val old_bvs = get_vertices_in_bbox g b - val new_bvs = find_open_subgraph g (V.NSet.subtract old_bvs vs) - val remove_bvs = V.NSet.subtract old_bvs new_bvs - fun raise_if_has_removed_verts bb = let - val contents = get_vertices_in_bbox g bb - in - if V.NSet.nonempty_intersect remove_bvs contents - then raise bbox_bad_parent_exp (b,bb,g) - else false - end - val children = get_bbox_children g b - val _ = B.NSet.exists raise_if_has_removed_verts children - in - (remove_bvs, g |> remove_vertices_from_bbox b remove_bvs) - end -val remove_from_bbox_anon = snd ooo remove_from_bbox - -fun remove_from_all_bboxes vset g = let - (* no need to check parents or args *) - fun remove_from_bbox b vs g = - let - val old_bvs = get_vertices_in_bbox g b - val new_bvs = find_open_subgraph g (V.NSet.subtract old_bvs vs) - val remove_bvs = V.NSet.subtract old_bvs new_bvs - in - g |> remove_vertices_from_bbox b remove_bvs - end -in - g |> B.NSet.fold (fn b => remove_from_bbox b vset) (get_bboxes g) -end - - -fun kill_bbox bb g = - if not (has_bbox g bb) then raise no_such_bbox_exp ("kill_bbox",bb,g) - else - let - (* depending on naming, we may end up recursing to !-boxes later in the - * list, and then try to remove them again; hence no checks on recurse *) - fun kill_bbox' bb g = - if not (has_bbox g bb) then g - else - let - val g' = B.NSet.fold kill_bbox' (get_bbox_children g bb) g - val bbox_vs = get_vertices_in_bbox g' bb - in - g' |> V.NSet.fold delete_vertex bbox_vs - |> delete_bbox bb - end - in - kill_bbox' bb g - end - -fun drop_bbox bn g = - if not (has_bbox g bn) then raise no_such_bbox_exp ("drop_bbox",bn,g) - else g |> delete_bbox bn - -fun merge_bboxes bnset g = - (case B.NSet.pull_local_top bnset of - NONE => raise merge_bbox_exp "given empty bbox name set" - | SOME (bn,bnset2) => - let - val parents = get_bbox_parents g bn - fun merge_with_bn bn2 g2 = let - val b2verts = get_vertices_in_bbox g2 bn2 - val bverts = get_vertices_in_bbox g2 bn - val b2parents = get_bbox_parents g2 bn2 - in - if not (B.NSet.eq parents b2parents) - then raise merge_bbox_exp ("parents of "^(B.dest bn)^ - " and "^(B.dest bn2)^" are different") - else if V.NSet.nonempty_intersect bverts b2verts - then raise merge_bbox_exp "!-boxes are not disjoint" - else - g2 |> add_to_bbox_anon bn b2verts - |> delete_bbox bn2 - end - in - (bn, B.NSet.fold merge_with_bn bnset2 g) - end) - -fun expand_bbox bb ((vsub,esub,bsub),g) = let - val _ = if has_bbox g bb then () else raise no_such_bbox_exp ("expand_bbox", bb, g) - val old_bb_children = get_bbox_children g bb - val old_vertices = get_vertices_in_bbox g bb - val old_edges = V.NSet.fold (E.NSet.union_merge o get_adj_edges' g) old_vertices E.NSet.empty - (* if this function is called by replay_bbox_op, vsub might have a bigger domain than - * old_vertices *) - - fun new_vert_name v = V.mk (V.dest v ^ B.dest bb) - - val vsub' = VSub.restrict_dom old_vertices (VSub.extend_fresh_suggest new_vert_name old_vertices vsub) - val esub' = ESub.extend_fresh old_edges esub - val bsub' = BSub.extend_fresh old_bb_children bsub - - fun cp_vert v g = let - val v' = VSub.get vsub' v - in - g |> add_named_vertex' v' (get_vertex_data g v) - |> B.NSet.fold - (fn b => ( - if B.name_eq (bb,b) then I - else add_vertex_to_bbox' (if B.NSet.contains old_bb_children b - then BSub.get bsub' b else b) v' - )) - (get_bboxes_containing_vertex g v) - end - - fun cp_edge e g = let - val e' = ESub.get esub' e - val (old_s,old_t) = (get_edge_source g e, get_edge_target g e) - val s = case VSub.get_opt vsub' old_s of SOME s' => s' | NONE => old_s - val _ = if V.NSet.contains (get_vertices g) s then () - else raise no_such_vertex_exp ("cp_edge", s, g) - val t = case VSub.get_opt vsub' old_t of SOME t' => t' | NONE => old_t - val _ = if V.NSet.contains (get_vertices g) t then () - else raise no_such_vertex_exp ("cp_edge", t, g) - in - g |> add_named_edge' e' (get_edge_dir_and_data g e) s t - end - - fun cp_bbox b g = let - val b' = BSub.get bsub' b - val old_parents = get_bbox_parents g b - val new_parents = B.NSet.subtract (B.NSet.delete bb old_parents) - old_bb_children - val old_children = get_bbox_children g b - val new_children = BSub.img_of_set bsub' old_children - in - g |> add_named_bbox' b' - |> add_parents_to_bbox' b' new_parents - |> add_children_to_bbox' b' new_children - end - -in ((vsub',esub',bsub'), - g |> B.NSet.fold cp_bbox old_bb_children - |> V.NSet.fold cp_vert old_vertices - |> E.NSet.fold cp_edge old_edges - ) -end - -fun copy_bbox bb ((vsub,esub,bsub),g) = let - val bsub' = BSub.extend_fresh (B.NSet.single bb) bsub - val ((vsub',esub',bsub'),g') = expand_bbox bb ((vsub,esub,bsub'),g) - val parents = get_bbox_parents g bb - val new_bb = BSub.get bsub' bb - val new_children = BSub.img_of_set bsub' (get_bbox_children g bb) - val new_vertices = VSub.img_of_set vsub' (get_vertices_in_bbox g bb) -in ((vsub',esub',bsub'), - g' |> add_named_bbox' new_bb - |> add_vertices_to_bbox' new_bb new_vertices - |> add_parents_to_bbox' new_bb parents - |> add_children_to_bbox' new_bb new_children) -end - -fun fresh_copy_bbox bb g = let - val vrn = VSub.mk_from_avoids (get_vertices g) - val ern = ESub.mk_from_avoids (get_edges g) - val brn = BSub.mk_from_avoids (get_bboxes g) - val ((_,_,brn'),g') = copy_bbox bb ((vrn,ern,brn),g) -in copy_bbox bb ((vrn,ern,brn),g) -end - -val fresh_copy_bbox_anon = snd oo fresh_copy_bbox - -fun fresh_expand_bbox bb g = let - val vrn = VSub.mk_from_avoids (get_vertices g) - val ern = ESub.mk_from_avoids (get_edges g) - val brn = BSub.mk_from_avoids (get_bboxes g) -in expand_bbox bb ((vrn,ern,brn),g) -end - -val fresh_expand_bbox_anon = snd oo fresh_expand_bbox - -fun expand_bbox_op bb g = let - val vrn = VSub.mk_from_avoids (get_vertices g) - val ern = ESub.mk_from_avoids (get_edges g) - val brn = BSub.mk_from_avoids (get_bboxes g) - val ((vrn',_,bbox_map),g') = expand_bbox bb ((vrn,ern,brn),g) - val vmap = VSub.filter_dom (V.NSet.contains (get_vertices_in_bbox g bb)) vrn' -in (BBExpand { bbox=bb, vmap=vmap, bbox_map=bbox_map }, g') -end - -fun kill_bbox_op bb g = ( BBKill bb, kill_bbox bb g ) - - -fun copy_bbox_op bb g = let - val vrn = VSub.mk_from_avoids (get_vertices g) - val ern = ESub.mk_from_avoids (get_edges g) - val brn = BSub.mk_from_avoids (get_bboxes g) - val ((vrn',_,bbox_map),g') = copy_bbox bb ((vrn,ern,brn),g) - val boundary_map = - V.NSet.fold - (fn v => if is_boundary g v - then VSub.add (v,VSub.get vrn' v) - else I) - (get_vertices_in_bbox g bb) - VSub.empty -in (BBCopy { bbox=bb, boundary_map=boundary_map, bbox_map=bbox_map }, g') -end - - -fun replay_bbox_op (BBExpand {bbox,vmap,bbox_map}) g = - let - (* freshen the interior of g w.r.t. vmap *) - val g' = g |> rename_vertices (VSub.empty - |> VSub.extend_fixed (get_boundary g) - |> VSub.avoid_set_in_cod (VSub.get_cod_set vmap) - |> VSub.extend_fresh (get_vertices g)) - val vrn = vmap |> VSub.avoid_set_in_cod (get_vertices g') - val ern = ESub.empty |> ESub.avoid_set_in_cod (get_edges g') - val brn = bbox_map - in snd (expand_bbox bbox ((vrn,ern,brn), g')) - end - | replay_bbox_op (BBKill bbox) g = kill_bbox bbox g - | replay_bbox_op (BBCopy {bbox,boundary_map,bbox_map}) g = - let - (* freshen the interior of g w.r.t. boundary_map *) - val g' = g |> rename_vertices (VSub.empty - |> VSub.extend_fixed (get_boundary g) - |> VSub.avoid_set_in_cod (VSub.get_cod_set boundary_map) - |> VSub.extend_fresh (get_vertices g)) - val vrn = boundary_map |> VSub.avoid_set_in_cod (get_vertices g') - val ern = ESub.empty |> ESub.avoid_set_in_cod (get_edges g') - val brn = bbox_map - in snd (copy_bbox bbox ((vrn,ern,brn), g')) - end - -fun replay_bbox_ops bb_ops = fold_rev replay_bbox_op bb_ops - -fun pretty_edge_desc g e = - Pretty.block[E.pretty_name e, - Pretty.str "[", - V.pretty_name (get_edge_source g e), - Pretty.str (case get_edge_dir g e of Directed => " -> " | Undirected => "--"), - V.pretty_name (get_edge_target g e), - Pretty.str "]" - ] - - -fun pretty g = - Pretty.chunks - [ Pretty.str "BangGraph{", - Pretty.block [Pretty.str " ", (* indent *) - Pretty.chunks ([ - Pretty.block [Pretty.str "Node-vertices: ", V.NSet.pretty (get_node_vertices g)], - Pretty.block [Pretty.str "Wire-vertices: ", V.NSet.pretty (get_wire_vertices g)], - Pretty.block [Pretty.str "Vertex data: ", VTab.pretty pretty_vdata (get_vdata g)], - Pretty.block [Pretty.str "Edges: ", Map.pretty "" (pretty_edge_desc g) (pretty_edata o snd) (get_edata g)], - Pretty.block [Pretty.str "Bang-boxes: ", B.NSet.pretty (get_bboxes g)], - Pretty.block [Pretty.str "BB contains: ", BVRel.pretty (get_bbox_rel g)], - Pretty.block [Pretty.str "BB children: ", BBRel.pretty (get_bbox_child_rel g)]] - )], - Pretty.str "}" - ] - -val print = Pretty.writeln o pretty - -end - - diff --git a/core/graph/bang_graph.sig.ML b/core/graph/bang_graph.sig.ML deleted file mode 100644 index bd1a8b1f..00000000 --- a/core/graph/bang_graph.sig.ML +++ /dev/null @@ -1,153 +0,0 @@ -(* FIXME: put this in a struct? *) -datatype bbox_op = BBKill of B.name - | BBExpand of { - bbox : B.name, - vmap : VSub.T, - bbox_map : BSub.T } - | BBCopy of { - bbox : B.name, - boundary_map : VSub.T, - bbox_map : BSub.T } -fun bbox_op_added_bboxes (BBKill _) = B.NSet.empty - | bbox_op_added_bboxes (BBExpand {bbox_map,...}) = BSub.get_cod_set bbox_map - | bbox_op_added_bboxes (BBCopy {bbox_map,...}) = BSub.get_cod_set bbox_map -fun bbox_op_get_bbox_copy (BBKill _) _ = raise Match - | bbox_op_get_bbox_copy (BBExpand {bbox_map,...}) b = - BSub.get bbox_map b - | bbox_op_get_bbox_copy (BBCopy {bbox_map,...}) b = - BSub.get bbox_map b - -signature BANG_GRAPH = -sig - include OGRAPH - - exception bbox_not_open_exp of (V.name * V.name) * B.name * T - exception duplicate_bbox_exp of B.name * T - exception no_such_bbox_exp of string * B.name * T - exception bbox_bad_parent_exp of B.name * B.name * T - exception merge_bbox_exp of string - - - val has_bbox : T -> B.name -> bool - val get_bboxes : T -> B.NSet.T - (* Does not contain empty !-boxes (or unboxed vertices) *) - (*val get_bbox_rel : T -> BVRel.T*) - (* Return the mapping from parent !-boxes to child !-boxes *) - (*val get_bbox_child_rel : T -> BBRel.T*) - - (* all vertices that are in at least one !-box *) - val get_bboxed : T -> V.NSet.T - (* all vertices that are in at no !-boxes *) - val get_unbboxed : T -> V.NSet.T - - (* no_such_vertex_exp if vertex does not exist *) - val is_bboxed : T -> V.name -> bool - - val get_empty_bboxes : T -> B.NSet.T - - val add_named_bbox : B.name -> T -> T - val add_bbox : T -> (B.name * T) - val add_bbox_anon : T -> T - - (* does not delete the contained vertices *) - (* identity if name does not exist *) - val delete_bbox : B.name -> T -> T - - (* no_such_bbox_exp if !-box does not exist *) - (* The minimum open subgraph containing the - * given vertices is added, to ensure the - * !-box remains an open subgraph. - * - * Returns the actual vertices added (always - * a superset of the vertex set given). - * - * TODO: improve name? add_subgraph_to_bbox or something... - *) - val add_to_bbox : B.name -> V.NSet.T -> T -> (V.NSet.T * T) - val add_to_bbox_anon : B.name -> V.NSet.T -> T -> T - val remove_from_all_bboxes : V.NSet.T -> T -> T - - (* no_such_bbox_exp if !-box does not exist *) - (* bbox_bad_parent_exp if any child !-box contains vertices that will be - * removed *) - (* does not raise if verts are not in !-box *) - (* Does not remove any vertices that are - * necessary for the !-box to remain open - * - * Returns the actual vertices removed (always - * a subset of the vertex set given). - *) - val remove_from_bbox : B.name -> V.NSet.T -> T -> (V.NSet.T * T) - val remove_from_bbox_anon : B.name -> V.NSet.T -> T -> T - - - (* Set parent of the first bbox to be the second bbox, or clears - * when the second arg is NONE. *) - (*val set_bbox_parent_opt : B.name -> B.name option -> T -> T*) - - (* raises no_such_bbox_exp if parent or children not in graph *) - val add_children_to_bbox : B.name -> B.NSet.T -> T -> T - val add_child_to_bbox : B.name -> B.name -> T -> T - (* raises no_such_bbox_exp if parent or children not in graph *) - (* no-op if removing the link would break transitivity *) - val remove_child_from_bbox : B.name -> B.name -> T -> T - val clear_bbox_children : B.name -> T -> T - - val get_bbox_parents : T -> B.name -> B.NSet.T - val get_bbox_children : T -> B.name -> B.NSet.T - - (* returns true if bbox has parent(s) *) - val bbox_has_parents : T -> B.name -> bool - - val bbox_has_children : T -> B.name -> bool - - (* we don't copy updaters/setters for internal structures, - * since using these can lead to an inconsistent state *) - - (* no_such_bbox_exp if bbox does not exist *) - val get_vertices_in_bbox : T -> B.name -> V.NSet.T - - (* no_such_vertex_exp if vertex does not exist *) - val get_bboxes_containing_vertex : T -> V.name -> B.NSet.T - - (* no_such_bbox_exp *) - val kill_bbox : B.name -> T -> T - (* no_such_bbox_exp *) - val drop_bbox : B.name -> T -> T - - val merge_bboxes : B.NSet.T -> T -> B.name * T - - val expand_bbox : B.name -> (VSub.T * ESub.T * BSub.T) * T - -> (VSub.T * ESub.T * BSub.T) * T - - val copy_bbox : B.name -> (VSub.T * ESub.T * BSub.T) * T - -> (VSub.T * ESub.T * BSub.T) * T - - - val fresh_copy_bbox : B.name -> T -> (VSub.T * ESub.T * BSub.T) * T - val fresh_copy_bbox_anon : B.name -> T -> T - val fresh_expand_bbox : B.name -> T -> (VSub.T * ESub.T * BSub.T) * T - val fresh_expand_bbox_anon : B.name -> T -> T - - (* apply the given operation and return the new graph and a replayable bbox op *) - val expand_bbox_op : B.name -> T -> bbox_op * T - val kill_bbox_op : B.name -> T -> bbox_op * T - val copy_bbox_op : B.name -> T -> bbox_op * T - - val replay_bbox_op : bbox_op -> T -> T - - (* replay this list of bbox ops, from back to front (as this is how they are stored) *) - val replay_bbox_ops : bbox_op list -> T -> T - - val rename_bang_graph : (VSub.T * ESub.T * BSub.T) -> T -> - (VSub.T * ESub.T * BSub.T) * T - val rename_bang_graph_anon : (VSub.T * ESub.T * BSub.T) -> T -> T - val rename_bboxes : BSub.T -> T -> T - val rename_bbox : B.name -> B.name -> T -> T - val rename_bbox_opt : B.name -> B.name -> T -> T option - - - val get_bbox_annotation : T -> B.name -> Json.jobj - val get_bbox_annotation_opt : T -> B.name -> Json.jobj option - val set_bbox_annotation : B.name * Json.jobj -> T -> T -end diff --git a/core/graph/graph_annotations.ML b/core/graph/graph_annotations.ML deleted file mode 100644 index f0137927..00000000 --- a/core/graph/graph_annotations.ML +++ /dev/null @@ -1,274 +0,0 @@ -(** - * Annotations on a graph - * - * Assumes all components have the same annotation type. - *) -signature GRAPH_ANNOTATIONS = -sig - (* The annotation structure *) - type T; - (* The type of the annotations *) - type data; - - (* With no annotations *) - val init: T; - - val empty_data : data; - - val get_graph_annotation : T -> data; - val get_vertex_annotation : T -> V.name -> data; - val get_edge_annotation : T -> E.name -> data; - val get_bbox_annotation : T -> B.name -> data; - - val get_vertex_annotation_tab : T -> data VTab.T; - val get_edge_annotation_tab : T -> data ETab.T; - val get_bbox_annotation_tab : T -> data BTab.T; - - val set_graph_annotation : data -> T -> T; - val set_vertex_annotation : V.name -> data -> T -> T; - val set_edge_annotation : E.name -> data -> T -> T; - val set_bbox_annotation : B.name -> data -> T -> T; - - val update_graph_annotation : (data -> data) -> T -> T; - val update_vertex_annotation : V.name -> (data -> data) -> T -> T; - val update_edge_annotation : E.name -> (data -> data) -> T -> T; - val update_bbox_annotation : B.name -> (data -> data) -> T -> T; - - val remove_vertex_annotation : V.name -> T -> T; - val remove_edge_annotation : E.name -> T -> T; - val remove_bbox_annotation : B.name -> T -> T; - - val rename_vertex : V.name -> V.name -> T -> T; - val rename_edge : E.name -> E.name -> T -> T; - val rename_bbox : B.name -> B.name -> T -> T; - - (* Arg1 is components to retain *) - val cleanup : (V.NSet.T * E.NSet.T * B.NSet.T) -> T -> T; - val apply_renaming : (VSub.T * ESub.T * BSub.T) -> T -> T; - - val pretty : T -> Pretty.T; - val print : T -> unit; - val pretty_data : data -> Pretty.T; - val print_data : data -> unit; -end; - -functor GraphAnnotations( - type data - val data_init : data - val data_pretty : data -> Pretty.T -) : GRAPH_ANNOTATIONS = -struct - type data = data; - val empty_data = data_init; - - datatype T = GraphAnn of - { - graph_annotation : data, - vertex_annotation_tab : data VTab.T, - edge_annotation_tab : data ETab.T, - bbox_annotation_tab : data BTab.T - }; - - val init = GraphAnn - { - graph_annotation = data_init, - vertex_annotation_tab = VTab.empty, - edge_annotation_tab = ETab.empty, - bbox_annotation_tab = BTab.empty - }; - - fun update_graph_annotation f (GraphAnn r) = GraphAnn { - graph_annotation = f(#graph_annotation r), - vertex_annotation_tab = #vertex_annotation_tab r, - edge_annotation_tab = #edge_annotation_tab r, - bbox_annotation_tab = #bbox_annotation_tab r - } - - fun update_vertex_annotation_tab f (GraphAnn r) = GraphAnn { - graph_annotation = #graph_annotation r, - vertex_annotation_tab = f(#vertex_annotation_tab r), - edge_annotation_tab = #edge_annotation_tab r, - bbox_annotation_tab = #bbox_annotation_tab r - } - - fun update_edge_annotation_tab f (GraphAnn r) = GraphAnn { - graph_annotation = #graph_annotation r, - vertex_annotation_tab = #vertex_annotation_tab r, - edge_annotation_tab = f(#edge_annotation_tab r), - bbox_annotation_tab = #bbox_annotation_tab r - } - - fun update_bbox_annotation_tab f (GraphAnn r) = GraphAnn { - graph_annotation = #graph_annotation r, - vertex_annotation_tab = #vertex_annotation_tab r, - edge_annotation_tab = #edge_annotation_tab r, - bbox_annotation_tab = f(#bbox_annotation_tab r) - } - - fun get_graph_annotation (GraphAnn r) = #graph_annotation r - fun get_vertex_annotation_tab (GraphAnn r) = #vertex_annotation_tab r - fun get_edge_annotation_tab (GraphAnn r) = #edge_annotation_tab r - fun get_bbox_annotation_tab (GraphAnn r) = #bbox_annotation_tab r - - fun set_graph_annotation x = update_graph_annotation (fn _ => x) - - fun get_vertex_annotation ann v = - case VTab.get_opt (get_vertex_annotation_tab ann) v - of SOME x => x - | NONE => data_init; - - fun get_edge_annotation ann e = - case ETab.get_opt (get_edge_annotation_tab ann) e - of SOME x => x - | NONE => data_init; - - fun get_bbox_annotation ann e = - case BTab.get_opt (get_bbox_annotation_tab ann) e - of SOME x => x - | NONE => data_init; - - fun set_vertex_annotation v d = - update_vertex_annotation_tab (VTab.set (v,d)); - fun set_edge_annotation e d = - update_edge_annotation_tab (ETab.set (e,d)); - fun set_bbox_annotation b d = - update_bbox_annotation_tab (BTab.set (b,d)); - - fun update_vertex_annotation v f ann = - update_vertex_annotation_tab (VTab.map_default f data_init v) ann - fun update_edge_annotation e f ann = - update_edge_annotation_tab (ETab.map_default f data_init e) ann - fun update_bbox_annotation b f ann = - update_bbox_annotation_tab (BTab.map_default f data_init b) ann - - val remove_vertex_annotation = update_vertex_annotation_tab o VTab.delete; - val remove_edge_annotation = update_edge_annotation_tab o ETab.delete; - val remove_bbox_annotation = update_bbox_annotation_tab o BTab.delete; - - fun cleanup (vs,es,bs) = let - fun upd_vtab vtab = VTab.fold - (fn (k,_) => if V.NSet.contains vs k then I else VTab.delete k) - vtab vtab - fun upd_etab etab = ETab.fold - (fn (k,_) => if E.NSet.contains es k then I else ETab.delete k) - etab etab - fun upd_btab btab = BTab.fold - (fn (k,_) => if B.NSet.contains bs k then I else BTab.delete k) - btab btab - in - (update_vertex_annotation_tab upd_vtab) - o (update_edge_annotation_tab upd_etab) - o (update_bbox_annotation_tab upd_btab) - end; - - fun apply_renaming (vrn,ern,brn) ann = - let - val vrni = vrn |> VSub.extend_fixed (VTab.get_dom_set (get_vertex_annotation_tab ann)) |> VSub.inverse_of - val erni = ern |> ESub.extend_fixed (ETab.get_dom_set (get_edge_annotation_tab ann)) |> ESub.inverse_of - val brni = brn |> BSub.extend_fixed (BTab.get_dom_set (get_bbox_annotation_tab ann)) |> BSub.inverse_of - in - ann |> update_vertex_annotation_tab (fn m => VTab.compose (m,vrni)) - |> update_edge_annotation_tab (fn m => ETab.compose (m,erni)) - |> update_bbox_annotation_tab (fn m => BTab.compose (m,brni)) - end - - fun rename_vertex old new = - apply_renaming (VSub.empty |> VSub.add (old,new), ESub.empty, BSub.empty) - - fun rename_edge old new = - apply_renaming (VSub.empty, ESub.empty |> ESub.add (old,new), BSub.empty) - - fun rename_bbox old new = - apply_renaming (VSub.empty, ESub.empty, BSub.empty |> BSub.add (old,new)) - - fun pretty ann = - Pretty.chunks - [Pretty.str "Graph Annotations {", - Pretty.block - [Pretty.str " ", - Pretty.chunks - [Pretty.block - [Pretty.str "Graph Annotation: ", - data_pretty (get_graph_annotation ann)], - Pretty.block - [Pretty.str "Vertex Annotations: ", - VTab.pretty data_pretty (get_vertex_annotation_tab ann)], - Pretty.block - [Pretty.str "Edge Annotations: ", - ETab.pretty data_pretty (get_edge_annotation_tab ann)], - Pretty.block - [Pretty.str "!-Box Annotations: ", - BTab.pretty data_pretty (get_bbox_annotation_tab ann)]]], - Pretty.str "}"]; - - val print = Pretty.writeln o pretty; - val pretty_data = data_pretty; - val print_data = Pretty.writeln o pretty_data; -end; - -(** - * Table of annotations on a graph - * - * Each component has an associated table of strings. - *) -signature GRAPH_STRING_TABLE_ANNOTATIONS = -sig - include GRAPH_ANNOTATIONS where type data = string Symtab.table; - - val get_graph_property_opt : T -> string -> string option; - val get_vertex_property_opt : T -> V.name -> string -> string option; - val get_edge_property_opt : T -> E.name -> string -> string option; - val get_bbox_property_opt : T -> B.name -> string -> string option; - - val set_graph_property : (string * string) -> T -> T; - val set_vertex_property : V.name -> (string * string) -> T -> T; - val set_edge_property : E.name -> (string * string) -> T -> T; - val set_bbox_property : B.name -> (string * string) -> T -> T; - - val remove_graph_property : string -> T -> T; - val remove_vertex_property : V.name -> string -> T -> T; - val remove_edge_property : E.name -> string -> T -> T; - val remove_bbox_property : B.name -> string -> T -> T; -end; - -signature GRAPH_JSON_OBJECT_ANNOTATIONS = - GRAPH_ANNOTATIONS where type data = Json.jobj - -structure GraphStringTableAnnotations : GRAPH_STRING_TABLE_ANNOTATIONS = -struct - fun pretty_symtab tab = let - fun pretty_kv (k,v) = Pretty.str (" " ^ k ^ "=> " ^ v ^ ", ") - in - Pretty.chunks (map pretty_kv (Symtab.dest tab)) - end; - - structure Ann = GraphAnnotations( - type data = string Symtab.table - val data_init = Symtab.empty - val data_pretty = pretty_symtab - ); - open Ann; - - val get_graph_property_opt = Symtab.lookup o Ann.get_graph_annotation; - val get_vertex_property_opt = Symtab.lookup oo Ann.get_vertex_annotation; - val get_edge_property_opt = Symtab.lookup oo Ann.get_edge_annotation; - val get_bbox_property_opt = Symtab.lookup oo Ann.get_bbox_annotation; - - val set_graph_property = Ann.update_graph_annotation o Symtab.update; - fun set_vertex_property v = (Ann.update_vertex_annotation v) o Symtab.update; - fun set_edge_property e = (Ann.update_edge_annotation e) o Symtab.update; - fun set_bbox_property b = (Ann.update_bbox_annotation b) o Symtab.update; - - val remove_graph_property = Ann.update_graph_annotation o Symtab.delete; - fun remove_vertex_property v = (Ann.update_vertex_annotation v) o Symtab.delete; - fun remove_edge_property e = (Ann.update_edge_annotation e) o Symtab.delete; - fun remove_bbox_property b = (Ann.update_bbox_annotation b) o Symtab.delete; -end; - -structure GraphJsonObjectAnnotations : GRAPH_JSON_OBJECT_ANNOTATIONS = GraphAnnotations( - type data = Json.jobj - val data_init = Json.empty_obj - fun data_pretty obj = Json.pretty (Json.Object obj) -) - - diff --git a/core/graph/graph_data.ML b/core/graph/graph_data.ML deleted file mode 100644 index b0190242..00000000 --- a/core/graph/graph_data.ML +++ /dev/null @@ -1,62 +0,0 @@ -exception unknown_typestring_exp of string - -signature GRAPH_DATA = -sig - type psubst (* partial substitutions *) - type subst (* complete (aka solved) substitution *) - type nvdata (* node-vertex data *) - type edata (* edge data *) - - val pretty_theory_name : Pretty.T - - (* A partial substitution contains a collection of constraints on pattern - * variables. It is initialised by passing in a table containing node/edge - * data from the pattern and target graphs. This can be used e.g. to ensure that names - * introduced in the pattern are fresh. *) - val init_psubst_from_data : - nvdata VTab.T * edata ETab.T -> - nvdata VTab.T * edata ETab.T -> psubst - - (* The match_xxx functions update a psubst or return NONE if - * no match is possible. *) - val match_nvdata : nvdata * nvdata -> psubst -> psubst option - val match_edata : edata * edata -> psubst -> psubst option - - (* equality for data. used for subgraph and graph equality *) - val nvdata_eq : nvdata * nvdata -> bool - val edata_eq : edata * edata -> bool - - (* default vertex and edge data *) - val default_nvdata : nvdata - val default_edata : edata - - (* COMPAT: needed for old controller *) - val default_nvdata_of_typestring : string -> nvdata - (* COMPAT: needed for old controller *) - val default_edata_of_typestring : string -> edata - - val pretty_nvdata : nvdata -> Pretty.T - val pretty_edata : edata -> Pretty.T - - (* For cases where node/edge data cannot be unified greedily (or uniquely), - * this is called just before the match is finalised. One match will be - * produced for each subst returned. In the case where this hook isn't - * needed, let "type subst = psubst", and "solve_psubst = Seq.single". *) - val solve_psubst : psubst -> subst Seq.seq - - (* Apply substitutions to data. *) - val subst_in_nvdata : subst -> nvdata -> subst * nvdata - val subst_in_edata : subst -> edata -> subst * edata -end - -(* Convenience structure for defining GRAPH_DATA with empty edge data *) -structure EmptyEdgeData = -struct - type edata = unit - val default_edata = () - fun edata_eq ((),()) = true - fun match_edata ((),()) m = SOME m - fun subst_in_edata sub () = (sub, ()) - fun pretty_edata () = Pretty.str "()" - fun default_edata_of_typestring _ = () -end diff --git a/core/graph/nhd.ML b/core/graph/nhd.ML deleted file mode 100644 index 291f47ca..00000000 --- a/core/graph/nhd.ML +++ /dev/null @@ -1,205 +0,0 @@ -signature NHD = -sig - -(* an edge expression *) -datatype expr = - I of E.name | (* input *) - O of E.name | (* output *) - U of E.name | (* undirected *) - L of B.name * (expr list) | (* bbox (expand left) *) - R of B.name * (expr list) (* bbox (expand right) *) - -datatype dir = IN | OUT | UNDIR -type ectx = E.name * dir * B.name list - -type T - -val mk : expr list -> T -val empty : T - -val of_json : Json.json -> T -val json_of : T -> Json.json - -val rename : (ESub.T * BSub.T) -> T -> T -val eq : T * T -> bool - -val reduce : T -> T -val drop : B.name -> T -> T -val kill : B.name -> T -> T -val expand : B.name -> (ESub.T * BSub.T) -> T -> T -val copy : B.name -> (ESub.T * BSub.T) -> T -> T - -(* get a list of all edge contexts *) -val get_edge_contexts : T -> ectx list - -(* get edge contexts referring to a particular name. There should be - * one for normal edges and two for self-loops. *) -val get_edge_contexts_for : E.name -> T -> ectx list - - -exception nhd_exn of string - -end - -structure Nhd : NHD = -struct - -exception nhd_exn of string - -datatype expr = - I of E.name | - O of E.name | - U of E.name | - L of B.name * (expr list) | - R of B.name * (expr list) - -datatype dir = IN | OUT | UNDIR -type ectx = E.name * dir * B.name list - -datatype T = NHD of expr list - -val mk = NHD - -val empty = NHD [] - -fun rename_expr (emap, bmap) (I e) = I (ESub.get emap e) - | rename_expr (emap, bmap) (O e) = O (ESub.get emap e) - | rename_expr (emap, bmap) (U e) = U (ESub.get emap e) - | rename_expr (emap, bmap) (L (bb, es)) = - L (BSub.get bmap bb, map (rename_expr (emap, bmap)) es) - | rename_expr (emap, bmap) (R (bb, es)) = - R (BSub.get bmap bb, map (rename_expr (emap, bmap)) es) - -fun rename (emap, bmap) (NHD exprs) = mk (map (rename_expr (emap, bmap)) exprs) - -fun eq (NHD exprs, NHD exprs') = -let - fun ex_eq (L (bb,es), L (bb',es')) = - (B.name_eq (bb, bb') andalso - ListPair.allEq ex_eq (es,es') - handle ListPair.UnequalLengths => false) - | ex_eq (R (bb,es), R (bb',es')) = - (B.name_eq (bb, bb') andalso - ListPair.allEq ex_eq (es,es') - handle ListPair.UnequalLengths => false) - | ex_eq (I e, I e') = E.name_eq (e,e') - | ex_eq (O e, O e') = E.name_eq (e,e') - | ex_eq (U e, U e') = E.name_eq (e,e') - | ex_eq _ = false -in ListPair.allEq ex_eq (exprs,exprs') - handle ListPair.UnequalLengths => false -end - -(* remove empty bboxes *) -fun reduce (NHD exprs) = -let - fun reduce_e (L (bb, es)) = - (case maps reduce_e es of [] => [] | es' => [L (bb, es')]) - | reduce_e (R (bb, es)) = - (case maps reduce_e es of [] => [] | es' => [R (bb, es')]) - | reduce_e e = [e] -in mk (maps reduce_e exprs) -end - -fun kill bb (NHD exprs) = -let - fun kill_e (L (bb', es)) = - if B.name_eq (bb, bb') - then NONE - else SOME (L (bb', map_filter kill_e es)) - | kill_e (R (bb', es)) = - if B.name_eq (bb, bb') - then NONE - else SOME (R (bb', map_filter kill_e es)) - | kill_e e = SOME e -in reduce (mk (map_filter kill_e exprs)) -end - -fun drop bb (NHD exprs) = -let - fun drop_e (L (bb', es)) = - if B.name_eq (bb, bb') - then maps drop_e es - else [L (bb', maps drop_e es)] - | drop_e (R (bb', es)) = - if B.name_eq (bb, bb') - then maps drop_e es - else [R (bb', maps drop_e es)] - | drop_e e = [e] -in mk (maps drop_e exprs) -end - -fun expand bb (emap, bmap) (NHD exprs) = -let - fun expand_e (L (bb', exprs')) = - if B.name_eq (bb, bb') - then (map (rename_expr (emap, bmap)) exprs') @ [L (bb, exprs')] - else [L (bb', maps expand_e exprs')] - | expand_e (R (bb', exprs')) = - if B.name_eq (bb, bb') - then R (bb, exprs') :: (map (rename_expr (emap, bmap)) exprs') - else [R (bb', maps expand_e exprs')] - | expand_e expr = [expr] -in mk (maps expand_e exprs) -end - -fun copy bb (emap, bmap) (NHD exprs) = -let - fun copy_e (expr as L (bb', exprs')) = - if B.name_eq (bb, bb') - then [rename_expr (emap, bmap) expr, expr] - else [L (bb', maps copy_e exprs')] - | copy_e (expr as R (bb', exprs')) = - if B.name_eq (bb, bb') - then [expr, rename_expr (emap, bmap) expr] - else [R (bb', maps copy_e exprs')] - | copy_e expr = [expr] -in mk (maps copy_e exprs) -end - - -fun get_edge_contexts (NHD exprs) = -let - fun ctxs (L (bb', exprs')) = map (fn (e,d,bbs) => (e,d,bb' :: bbs)) (maps ctxs exprs') - | ctxs (R (bb', exprs')) = map (fn (e,d,bbs) => (e,d,bb' :: bbs)) (maps ctxs exprs') - | ctxs (I e) = [(e, IN, [])] - | ctxs (O e) = [(e, OUT, [])] - | ctxs (U e) = [(e, UNDIR, [])] -in maps ctxs exprs -end - -fun get_edge_contexts_for e nhd = filter (fn (e',_,_) => E.name_eq (e,e')) (get_edge_contexts nhd) - -fun expr_of_json (Json.Array lst) = - (case lst - of (Json.String bb :: Json.String "<" :: rest) => - L (B.mk bb, map expr_of_json rest) - | (Json.String bb :: Json.String ">" :: rest) => - R (B.mk bb, map expr_of_json rest) - | _ => raise nhd_exn "expected: [BB_NAME, DIR, ....]") - | expr_of_json (Json.String e) = - (case String.explode e - of (#"i" :: #":" :: e') => I (E.mk (String.implode e')) - | (#"o" :: #":" :: e') => O (E.mk (String.implode e')) - | (#"u" :: #":" :: e') => U (E.mk (String.implode e')) - | _ => raise nhd_exn "expected: 'i:...', 'o:...', or 'u:...'") - | expr_of_json _ = raise nhd_exn "expected: array or string" - -fun of_json (Json.Object obj) = - (case Json.lookup obj "expr" - of SOME (Json.Array lst) => mk (map expr_of_json lst) - | SOME _ => raise nhd_exn "expected: array" - | NONE => raise nhd_exn "expected: field 'expr'") - | of_json _ = raise nhd_exn "expected: object" - -fun json_of_expr (L (bb, es)) = - Json.Array ([Json.String (B.dest bb), Json.String "<"] @ map json_of_expr es) - | json_of_expr (R (bb, es)) = - Json.Array ([Json.String (B.dest bb), Json.String ">"] @ map json_of_expr es) - | json_of_expr (I e) = Json.String ("i:" ^ E.dest e) - | json_of_expr (O e) = Json.String ("o:" ^ E.dest e) - | json_of_expr (U e) = Json.String ("u:" ^ E.dest e) - -fun json_of (NHD exps) = Json.mk_object [("expr", Json.Array (map json_of_expr exps))] - -end diff --git a/core/graph/ograph.sig.ML b/core/graph/ograph.sig.ML deleted file mode 100644 index 2872fb60..00000000 --- a/core/graph/ograph.sig.ML +++ /dev/null @@ -1,378 +0,0 @@ -(* each edge is either directed or undirected. Undirected edges are stored the same as - * directed edges, but may be matched in either direction. *) -datatype dir_or_undir = Directed | Undirected -fun dir_eq Directed Directed = true - | dir_eq Undirected Undirected = true - | dir_eq _ _ = false - -signature OGRAPH_SHARING = -sig - type T - type nvdata - type edata - type psubst - type subst -end - -signature OGRAPH = -sig - -type T (* an open graph *) - -type nvdata (* node-vertex data *) -type edata -type psubst (* a partial substitution on data *) -type subst (* a complete (solved) substition on data *) -datatype vdata = NVert of nvdata | WVert (* vertex data *) - - -structure Sharing : OGRAPH_SHARING -sharing type Sharing.T = T -sharing type Sharing.nvdata = nvdata -sharing type Sharing.edata = edata -sharing type Sharing.psubst = psubst -sharing type Sharing.subst = subst - -type wire = V.name * V.name * V.NSet.T * E.NSet.T (* a chain of edges, representing a wire *) - -(* Thin wrappers to Graph.Data substructure *) -val init_psubst : T -> T -> psubst -val match_vdata : vdata * vdata -> psubst -> psubst option -val match_edata : edata * edata -> psubst -> psubst option -val vdata_eq : vdata * vdata -> bool -val edata_eq : edata * edata -> bool -val default_nv_vdata : vdata -val default_wv_vdata : vdata -val default_edata : edata -(* COMPAT: needed for old controller *) -val default_nv_vdata_of_typestring : string -> vdata -(* COMPAT: needed for old controller *) -val default_edata_of_typestring : string -> edata - -val pretty_vdata : vdata -> Pretty.T -val pretty_edata : edata -> Pretty.T -val solve_psubst : psubst -> subst Seq.seq - -(* Apply substitutions to data. *) -val subst_in_vdata : subst -> vdata -> subst * vdata -val subst_in_edata : subst -> edata -> subst * edata -val apply_data_subst : subst -> T -> subst * T - - -val pretty : T -> Pretty.T -val print : T -> unit -val empty : T - -(*********************************) -(********** EXCEPTIONS ***********) -(*********************************) - -exception no_such_vertex_exp of string * V.name * T -exception duplicate_vertex_exp of V.name * T - -exception no_such_edge_exp of string * E.name * T -exception duplicate_edge_exp of E.name * T - -exception bad_graph_merge_exp of string * T * T - -exception not_an_endpoint_exp of string * E.name * V.name * T - -exception wire_vertex_exp of string * T * V.name -exception wire_exp of string * T * wire - -exception plugging_exp of T * V.name * V.name - -(*******************************) -(*********** GRAPHS ************) -(*******************************) - -(* graph is empty *) -val is_empty : T -> bool - -(* containment checks *) -val has_vertex : T -> V.name -> bool -val has_edge : T -> E.name -> bool - -(* getting stuff from graphs (exception if not there) *) -val get_edge_source : T -> E.name -> V.name -val get_edge_target : T -> E.name -> V.name - -val get_edge_info : T -> E.name -> (dir_or_undir * edata) * (V.name * V.name) - -(* given an edge and vertex, get the other end *) -val edge_get_other_vertex : T -> E.name -> V.name -> V.name - -(* ignorant to direction *) -val get_in_edges : T -> V.name -> E.NSet.T -val get_out_edges : T -> V.name -> E.NSet.T -val get_adj_edges : T -> V.name -> E.NSet.T - -(* filters by direction *) -val get_in_dir_edges : T -> V.name -> E.NSet.T -val get_out_dir_edges : T -> V.name -> E.NSet.T -val get_adj_undir_edges : T -> V.name -> E.NSet.T - -(* get all adjacent vertices (not including the given vertex or vertices) *) -val get_adj_vertices : T -> V.name -> V.NSet.T -val get_adj_vertices_to_set : T -> V.NSet.T -> V.NSet.T - -(* vertices connected to a directed out-edge of the given one *) -val get_successor_vertices : T -> V.name -> V.NSet.T - -(* vertices connected to a directed in-edge of the given one *) -val get_predecessor_vertices : T -> V.name -> V.NSet.T - -(* vertices connected to an undirected edge of the given one *) -val get_sibling_vertices : T -> V.name -> V.NSet.T - -(* get incoming, outgoing, undirected edges adjacent to vertex *) -val adj_edge_classes: T -> V.name -> (E.NSet.T*E.NSet.T*E.NSet.T) - - -val get_vdata_tab : T -> vdata VTab.T -val get_v_anno_tab : T -> Json.jobj VTab.T -val get_edata_tab : T -> edata ETab.T - -val get_vertex_data : T -> V.name -> vdata -val get_edge_data : T -> E.name -> edata -val get_edge_dir : T -> E.name -> dir_or_undir -val get_edge_dir_and_data : T -> E.name -> dir_or_undir * edata - -(* setting data *) -val set_vertex_data : vdata -> V.name -> T -> T -val set_edge_data : edata -> E.name -> T -> T -val set_edge_dir : dir_or_undir -> E.name -> T -> T - -(* updating data *) -val update_vertex_data : (vdata -> vdata) -> V.name -> T -> T -val update_edge_data : (edata -> edata) -> E.name -> T -> T -val update_edge_dir : (dir_or_undir -> dir_or_undir) -> E.name -> T -> T - -val is_wire_vertex : T -> V.name -> bool -val is_node_vertex : T -> V.name -> bool -val get_vertices : T -> V.NSet.T -val get_wire_vertices : T -> V.NSet.T -val get_node_vertices : T -> V.NSet.T - -val get_edges : T -> E.NSet.T - -(* check if two graphs are exactly the same (names, data, and structure) *) -(* note that the direction of Undirected edges *does* matter *) -val is_subgraph : T -> T -> bool -(* (exact_eq g1 g2) <=> (is_subgraph g1 g2) andalso (is_subgraph g2 g1) *) -val exact_eq : T -> T -> bool - -(* adding a vertex gives back unique name for it and the new graph *) -val add_vertex : vdata -> T -> V.name * T -val add_vertex_anon : vdata -> T -> T -(* raises duplicate_vertex_exp on clash *) -val add_named_vertex : V.name -> vdata -> T -> T - -(* V.names must already exist, else raises: no_such_vertex_exp *) -val add_edge : dir_or_undir * edata -> V.name -> V.name -> T -> E.name * T -val add_edge_anon : dir_or_undir * edata -> V.name -> V.name -> T -> T -val add_named_edge - : E.name -> dir_or_undir * edata -> V.name -> V.name -> T -> T (* can raise *) - - - -(* delete is no-op if there is no such edge/vertex *) -val delete_edge : E.name -> T -> T -val delete_vertex : V.name -> T -> T - -(* merge two graphs; things with common names are unified *) -(* raises bad_graph_merge_exp if the graphs could not be merged - * (eg: it would add too many edges to wire-vertices) *) -val merge : T -> T -> T - -val get_arity : T -> V.name -> Arity.T -val get_self_loops : T -> V.name -> E.NSet.T - - -(* remove the first graph from the second, assumes first is a subgraph *) -val delete_subgraph : T -> T -> T - -(* edges between two vertices *) -val has_edges_between : T -> V.name -> V.name -> bool -val edges_between : T -> V.name -> V.name -> E.NSet.T - -val dir_edges_between : T -> V.name -> V.name - -> E.NSet.T (* from fst to snd vertex *) - * E.NSet.T (* from snd to fst vertex *) - - - - -(* Renaming *) -(* Renames of non-existent items are successful, and have no effect *) - -val rename_ograph : (VSub.T * ESub.T) -> T -> (VSub.T * ESub.T) * T -val rename_ograph_anon : (VSub.T * ESub.T) -> T -> T -(* rename a bunch of vertices; raises VSub.name_clash_exp if there is a clash *) -val rename_vertices : VSub.T -> T -> T -(* rename a vertex; raises duplicate_vertex_exp if dest vertex name exists *) -val rename_vertex : V.name -> V.name -> T -> T -(* rename a vertex; NONE if dest vertex name exists *) -val rename_vertex_opt : V.name -> V.name -> T -> T option -(* rename a bunch of edges; raises ESub.name_clash_exp if there is a clash *) -val rename_edges : ESub.T -> T -> T -(* rename an edge; raises duplicate_vertex_exp if dest edge name exists *) -val rename_edge : E.name -> E.name -> T -> T -(* rename an edge; NONE if dest edge name exists *) -val rename_edge_opt : E.name -> E.name -> T -> T option - - - -(*******************************) -(********* OPEN GRAPHS *********) -(*******************************) - -(* a boundary is a wire vertex with at most one adjacent edge *) -val get_boundary : T -> V.NSet.T -(* NB: only isolated wire-vertices are both inputs and outputs *) -(* an input is a wire-vertex with no in edges and - * no undirected out edges *) -val get_inputs : T -> V.NSet.T -(* an output is a wire-vertex with no out edges and - * no undirected in edges *) -val get_outputs : T -> V.NSet.T -val is_boundary : T -> V.name -> bool -val is_input : T -> V.name -> bool -val is_output : T -> V.name -> bool - - -(* Given a wire vertex and an edge, give the other connected edge, if it - exists. In the case of a wire-vertex with a single, given self-loop, - return NONE. *) -(* this may raise an exception if the vertex is not a wire-vertex - * or if the edge is not adjacent to it *) -val wv_get_other_edge : T -> V.name -> E.name -> E.name option - -(* Wire equality; assumes the wires are in the same graph *) -val wire_eq : wire -> wire -> bool -(* An arbitrary total order on wires; assumes the wires are in the same graph *) -val wire_ord : wire * wire -> order - -val pretty_wire : wire -> Pretty.T - - -(* Deletes a wire, except for the end-points. - * - * Note that deleting a boundary wire, a bare wire or a circle - * will leave an isolated wire-vertex behind. - *) -val delete_wire : wire -> T -> T - -(* Return the endpoints, intermediate wire-vertices, and edges of the wire containing the given - edge. The first endpoint is the one closest to the source of the edge. *) -val get_wire_containing_edge : T -> E.name -> wire - -(* Return a list of all the wires in the graph that contain any of the given edges *) -val get_wires_for_edges : T -> E.NSet.T -> wire list - -(* Return a list of all the wires in the graph *) -val get_wire_list : T -> wire list - -(* Return a graph that has exactly two edge points - * on every wire between two node-vertices, and no - * unnecessary edge points on any other wire. - * - * Note that this function assumes there are no - * isolated points in the graph - it will not touch - * isolated points, nor will it create any (bare - * wires always have at least one edge) - *) -val normalise : T -> T - -(* Return a graph that has no unnecessary edge points. - * - * Note that this function assumes there are no - * isolated points in the graph - it will not touch - * isolated points, nor will it create any (bare - * wires always have at least one edge) - *) -val minimise : T -> T - -(* Split an edge into two edges, connected by a wire-vertex *) -val split_edge : E.name -> T -> (E.name * V.name * E.name) * T - - -(* Get an edge that can join two boundary vertices, if - * there is such an edge. - * - * This will return NONE exactly when are_pluggable - * returns false - *) -val get_plugging : T -> V.name -> V.name - -> ((dir_or_undir * edata) * (V.name * V.name)) option - -(* True if and only if one of the given vertices is - * an input and one an output with the same edge data *) -val are_pluggable : T -> V.name -> V.name -> bool - -(* Plug two boundary vertices together. - * - * Raises plugging_exp if the vertices are not pluggable. - * - * Returns the name of the created edge. - *) -val plug : V.name -> V.name -> T -> (E.name * T) -(* Exactly like plug, but does not return the edge name. *) -val plug_anon : V.name -> V.name -> T -> T - -(* Plug two boundary vertices together and minimises the - * plugged wire (so all unnecessary wire vertices will be - * removed). - * - * Raises plugging_exp if the vertices are not pluggable. - * - * Returns the new wire - *) -val plug_and_minimise : V.name -> V.name -> T -> (wire * T) -(* Exactly like plug_and_minimise, but does not return the wire. *) -val plug_and_minimise_anon : V.name -> V.name -> T -> T -(* Plug two boundary vertices together and normalises the - * plugged wire. - * - * Raises plugging_exp if the vertices are not pluggable. - * - * Returns the new wire - *) -val plug_and_normalise : V.name -> V.name -> T -> (wire * T) -(* Exactly like plug_and_normalise, but does not return the wire. *) -val plug_and_normalise_anon : V.name -> V.name -> T -> T - -(* Returns the minimal subgraph containing the given node vertices, - * with wires preserved. - * - * Wires between vertices in the set will be preserved as-is. Wires - * from a vertex in the set to elsewhere in the graph will be - * retained up to the first wire-vertex. - * - * The result is guaranteed to be a true subgraph (ie: no new or - * renamed vertices) and to be normal, providing the input graph was - * normal. - * - * Implementations may assume the input graph is normalised. - *) -val get_open_subgraph : T -> V.NSet.T -> T - - -(* Getters and setters for annotations *) - -val get_graph_annotation : T -> Json.jobj -val set_graph_annotation : Json.jobj -> T -> T -val get_vertex_annotation : T -> V.name -> Json.jobj -val get_vertex_annotation_opt : T -> V.name -> Json.jobj option -val set_vertex_annotation : V.name * Json.jobj -> T -> T -val get_edge_annotation : T -> E.name -> Json.jobj -val get_edge_annotation_opt : T -> E.name -> Json.jobj option -val set_edge_annotation : E.name * Json.jobj -> T -> T - -(* -(* TODO: ? *) -(*val fold_adj_wire_vertices : (V.name * E.name -> 'a -> 'a) -> T -> V.name -> E.name V.NTab.T*) - -*) - -end diff --git a/core/graph/test/test-bang-graph.ML b/core/graph/test/test-bang-graph.ML deleted file mode 100644 index 26828e36..00000000 --- a/core/graph/test/test-bang-graph.ML +++ /dev/null @@ -1,2294 +0,0 @@ -(* Tests for the OGRAPH signature *) -functor Bang_Graph_Interface_Tests( - G : TEST_BANG_GRAPH -) -= struct - (* start by testing that things in the OGRAPH interface still work *) - structure OGraph_Interface_Tests_Bang_Graph = OGraph_Interface_Tests(Test_Bang_Graph); - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph); - open Tools; - - - val _ = Testing.test "G.has_bbox" (fn _ => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b2" [] - val _ = Testing.assert "not (G.has_bbox G.empty b0)" - (not (G.has_bbox G.empty B.default_name)) - val _ = Testing.assert "G.has_bbox g b0" (G.has_bbox g (B.mk "b0")) - val _ = Testing.assert "G.has_bbox g b1" (G.has_bbox g (B.mk "b1")) - val _ = Testing.assert "G.has_bbox g b2" (G.has_bbox g (B.mk "b2")) - val _ = Testing.assert "G.has_bbox g b9" - (not (G.has_bbox g (B.mk "b9"))) - in () end) () - - - - val _ = Testing.test "G.get_bboxes" (fn _ => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b2" [] - val exp = B.NSet.of_list (map B.mk ["b0","b1","b2"]) - val _ = Testing.assert "G.get_bboxes gives correct list" - (B.NSet.eq exp (G.get_bboxes g)) - val _ = Testing.assert "G.get_bboxes G.empty is empty" - (B.NSet.is_empty (G.get_bboxes G.empty)) - in () end) () - - - - val _ = Testing.test "G.get_bboxed" (fn _ => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_wv "w1" - |> add_wv "w2" - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b2" ["w1"] - |> add_bbox "b3" [] - val exp = V.NSet.of_list (map V.mk ["v1","v2","w1"]) - val _ = Testing.assert "G.get_bboxed gives correct list" - (V.NSet.eq exp (G.get_bboxed g)) - val _ = Testing.assert "G.get_bboxed G.empty is empty" - (V.NSet.is_empty (G.get_bboxed G.empty)) - in () end) () - - - - val _ = Testing.test "G.get_unbboxed" (fn _ => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_wv "w1" - |> add_wv "w2" - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b2" ["w1"] - |> add_bbox "b3" [] - val exp = V.NSet.of_list (map V.mk ["v3","w2"]) - val _ = Testing.assert "G.get_unbboxed gives correct list" - (V.NSet.eq exp (G.get_unbboxed g)) - val _ = Testing.assert "G.get_unbboxed G.empty is empty" - (V.NSet.is_empty (G.get_unbboxed G.empty)) - in () end) () - - - - val _ = Testing.test "G.is_bboxed" (fn _ => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_wv "w1" - |> add_wv "w2" - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b2" ["w1"] - |> add_bbox "b3" [] - val _ = Testing.assert "G.is_bboxed v1" (G.is_bboxed g (V.mk "v1")) - val _ = Testing.assert "G.is_bboxed v2" (G.is_bboxed g (V.mk "v2")) - val _ = Testing.assert "G.is_bboxed w1" (G.is_bboxed g (V.mk "w1")) - val _ = Testing.assert "G.is_bboxed v3" (not (G.is_bboxed g (V.mk "v3"))) - val _ = Testing.assert "G.is_bboxed w2" (not (G.is_bboxed g (V.mk "w2"))) - val _ = (G.is_bboxed g (V.mk "v4"); raise ERROR "expected no_such_vertex_exp") - handle G.no_such_vertex_exp (_,v',g') => - (Testing.assert "exception has correct graph" (G.exact_eq g g'); - Testing.assert "exception has correct vertex" ((V.dest v') = "v4")) - in () end) () - - - - val _ = Testing.test "G.get_empty_bboxes" (fn _ => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_wv "w1" - |> add_wv "w2" - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parent "b2" "b0" [] - |> add_bbox "b3" ["w1"] - |> add_bbox "b4" [] - |> add_bbox "b5" [] - |> add_bbox_with_parent "b6" "b5" [] - val exp = B.NSet.of_list (map B.mk ["b2","b4","b5","b6"]) - val _ = Testing.assert "G.get_empty_bboxes gives correct list" - (B.NSet.eq exp (G.get_empty_bboxes g)) - val _ = Testing.assert "G.get_empty_bboxes G.empty is empty" - (B.NSet.is_empty (G.get_empty_bboxes G.empty)) - in () end) () - - - - val _ = Testing.test "G.add_named_bbox" (fn () => let - val b0 = B.mk "b0" - val b1 = B.mk "b1" - val g = G.add_named_bbox b0 G.empty - val _ = case B.NSet.tryget_singleton (G.get_bboxes g) - of NONE => raise ERROR "add_named_bbox did not add a !-box" - | SOME b => - if not (B.name_eq (b,b0)) then - raise ERROR "add_named_bbox added the wrong name" - else () - val g' = G.add_named_bbox b1 g - val exp = B.NSet.of_list [b0,b1] - val _ = Testing.assert "g' has correct !-boxes" - (B.NSet.eq exp (G.get_bboxes g')) - val _ = ((G.add_named_bbox b0 g'; - raise ERROR "add_named_bbox (name clash) did not throw exception") - handle G.duplicate_bbox_exp (b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g' g_e); - Testing.assert "exception has correct !-box" (B.name_eq (b0,b_e)))) - in () end) () - - - - val _ = Testing.test "G.add_bbox" (fn () => let - val (b0,g) = G.add_bbox G.empty - val _ = case B.NSet.tryget_singleton (G.get_bboxes g) - of NONE => raise ERROR "add_bbox did not add a !-box" - | SOME b => - if not (B.name_eq (b,b0)) then - raise ERROR "add_bbox added the wrong name" - else () - val (b1,g') = G.add_bbox g - val exp = B.NSet.of_list [b0,b1] - val _ = Testing.assert "g' has correct !-boxes" - (B.NSet.eq exp (G.get_bboxes g')) - in () end) () - - - - (* TODO: add_bbox_anon *) - - - - val _ = Testing.test "G.delete_bbox" (fn () => let - val _ = Testing.assert "G.delete_bbox has no effect on G.empty" - (G.is_empty (G.delete_bbox B.default_name G.empty)) - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - |> add_bbox "b3" ["v1","v2"] - val _ = assert_g_eq "G.delete_bbox (no such bbox)" - g (G.delete_bbox (B.mk "b9") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - val _ = assert_g_eq "G.delete_bbox (b3)" - g_exp (G.delete_bbox (B.mk "b3") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b1" ["v1"] - |> add_bbox_with_parent "b2" "b1" [] - |> add_bbox "b3" ["v1","v2"] - val _ = assert_g_eq "G.delete_bbox (b0)" - g_exp (G.delete_bbox (B.mk "b0") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b2" "b0" [] - |> add_bbox "b3" ["v1","v2"] - val _ = assert_g_eq "G.delete_bbox (b1)" - g_exp (G.delete_bbox (B.mk "b1") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b3" ["v1","v2"] - val _ = assert_g_eq "G.delete_bbox (b2)" - g_exp (G.delete_bbox (B.mk "b2") g) - in () end) () - - - - val _ = Testing.test "G.add_to_bbox" (fn () => let - val _ = (G.add_to_bbox B.default_name (V.NSet.single V.default_name) G.empty; - raise ERROR "add_to_bbox (empty) incorrect") - handle G.no_such_vertex_exp _ => () - | G.no_such_bbox_exp _ => () - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_wv "w1" - |> add_wv "w2a" - |> add_wv "w2b" - |> add_wv "w3" - |> add_wv "w4" - |> add_dir_eunit1 "e1" "w1" "v1" - |> add_dir_eunit1 "e2" "v1" "w2a" - |> add_dir_eunit1 "e3" "w2a" "w2b" - |> add_dir_eunit1 "e4" "w2b" "v2" - |> add_dir_eunit1 "e5" "v3" "w3" - |> add_dir_eunit1 "e6" "w3" "v3" - val g = g_clean - |> add_bbox "b0" [] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = (G.add_to_bbox (B.mk "b0") (V.NSet.single (V.mk "v9")) g; - raise ERROR "add_to_bbox (no such vertex) incorrect") - handle G.no_such_vertex_exp (_,v_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g g_e); - Testing.assert "exception has correct vertex" ((V.dest v_e) = "v9")) - val _ = (G.add_to_bbox (B.mk "b9") (V.NSet.single (V.mk "v1")) g; - raise ERROR "add_to_bbox (no such !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g g_e); - Testing.assert "exception has correct !-box" ((B.dest b_e) = "b9")) - fun check_add msg (g,bn,vs) (g_exp,vs_exp) = - let - val (vs',g') = G.add_to_bbox (B.mk bn) - (V.NSet.of_list (map V.mk vs)) g - val _ = assert_g_eq msg g_exp g' - val _ = Testing.assert (msg^": vs return value correct") - (V.NSet.eq vs' (V.NSet.of_list (map V.mk vs_exp))) - in () end - val g_exp = g_clean - |> add_bbox "b0" ["v4"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox (b0,v4)" - (g,"b0",["v4"]) - (g_exp,["v4"]) - val g_exp = g_clean - |> add_bbox "b0" ["v1","w1","w2a","w2b"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox (b0,v1)" - (g,"b0",["v1"]) - (g_exp,["v1","w1","w2a","w2b"]) - val g_exp = g_clean - |> add_bbox "b0" ["w2a","w2b"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox (b0,w2a)" - (g,"b0",["w2a"]) - (g_exp,["w2a","w2b"]) - val g_exp = g_clean - |> add_bbox "b0" ["w1"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox (b0,w1)" - (g,"b0",["w1"]) - (g_exp,["w1"]) - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox (b0,w1,w4,v3)" - (g,"b0",["w1","w4","v3"]) - (g_exp,["w1","w4","v3","w3"]) - val g' = g_exp - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" ["v3","w3"] - val _ = check_add "G.add_to_bbox (b2,v3)" - (g',"b2",["v3"]) - (g_exp,["v3","w3"]) - val _ = (G.add_to_bbox (B.mk "b1") - (V.NSet.of_list (map V.mk ["v4"])) g; - raise ERROR "add_to_bbox (not in parent !-box) incorrect") - handle G.bbox_bad_parent_exp (bp_e,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g_e g); - Testing.assert "exception has correct !-box" (B.dest b_e = "b1"); - Testing.assert "exception has correct parent !-box" (B.dest bp_e = "b0")) - in () end) () - - - - val _ = Testing.test "G.add_to_bbox_anon" (fn () => let - val _ = (G.add_to_bbox_anon B.default_name (V.NSet.single V.default_name) G.empty; - raise ERROR "add_to_bbox (empty) incorrect") - handle G.no_such_vertex_exp _ => () - | G.no_such_bbox_exp _ => () - - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_wv "w1" - |> add_wv "w2a" - |> add_wv "w2b" - |> add_wv "w3" - |> add_wv "w4" - |> add_dir_eunit1 "e1" "w1" "v1" - |> add_dir_eunit1 "e2" "v1" "w2a" - |> add_dir_eunit1 "e3" "w2a" "w2b" - |> add_dir_eunit1 "e4" "w2b" "v2" - |> add_dir_eunit1 "e5" "v3" "w3" - |> add_dir_eunit1 "e6" "w3" "v3" - val g = g_clean - |> add_bbox "b0" [] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = (G.add_to_bbox_anon (B.mk "b0") (V.NSet.single (V.mk "v9")) g; - raise ERROR "add_to_bbox_anon (no such vertex) incorrect") - handle G.no_such_vertex_exp (_,v_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g g_e); - Testing.assert "exception has correct vertex" ((V.dest v_e) = "v9")) - val _ = (G.add_to_bbox_anon (B.mk "b9") (V.NSet.single (V.mk "v1")) g; - raise ERROR "add_to_bbox_anon (no such !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g g_e); - Testing.assert "exception has correct !-box" ((B.dest b_e) = "b9")) - fun check_add msg (g,bn,vs) g_exp = - let - val g' = G.add_to_bbox_anon (B.mk bn) - (V.NSet.of_list (map V.mk vs)) g - val _ = assert_g_eq msg g_exp g' - in () end - val g_exp = g_clean - |> add_bbox "b0" ["v4"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox_anon (b0,v4)" - (g,"b0",["v4"]) - g_exp - val g_exp = g_clean - |> add_bbox "b0" ["v1","w1","w2a","w2b"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox_anon (b0,v1)" - (g,"b0",["v1"]) - g_exp - val g_exp = g_clean - |> add_bbox "b0" ["w2a","w2b"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox_anon (b0,w2a)" - (g,"b0",["w2a"]) - g_exp - val g_exp = g_clean - |> add_bbox "b0" ["w1"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox_anon (b0,w1)" - (g,"b0",["w1"]) - g_exp - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" [] - val _ = check_add "G.add_to_bbox_anon (b0,w1,w4,v3)" - (g,"b0",["w1","w4","v3"]) - g_exp - val g' = g_exp - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" [] - |> add_bbox "b2" ["v3","w3"] - val _ = check_add "G.add_to_bbox_anon (b2,v3)" - (g',"b2",["v3"]) - g_exp - in () end) () - - - - val _ = Testing.test "G.remove_from_all_bboxes" (fn () => let - val _ = Testing.assert "G.remove_from_all_bboxes no effect on G.empty" - (G.is_empty (G.remove_from_all_bboxes (V.NSet.single - V.default_name) G.empty)) - - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_wv "w1" - |> add_wv "w2a" - |> add_wv "w2b" - |> add_wv "w3" - |> add_wv "w4" - |> add_dir_eunit1 "e1" "w1" "v1" - |> add_dir_eunit1 "e2" "v1" "w2a" - |> add_dir_eunit1 "e3" "w2a" "w2b" - |> add_dir_eunit1 "e4" "w2b" "v2" - |> add_dir_eunit1 "e5" "v3" "w3" - |> add_dir_eunit1 "e6" "w3" "v3" - val g = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b3" ["v3","w3","v4"] - |> add_bbox_with_parent "b2" "b3" ["v3","w3"] - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b3" ["v3","w3"] - |> add_bbox_with_parent "b2" "b3" ["v3","w3"] - val _ = assert_g_eq "G.remove_from_all_bboxes (v4)" g_exp - (G.remove_from_all_bboxes - (V.NSet.of_list (map V.mk ["v4"])) - g) - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b3" ["w3","v4"] - |> add_bbox_with_parent "b2" "b3" ["w3"] - val _ = assert_g_eq "G.remove_from_all_bboxes (v3)" g_exp - (G.remove_from_all_bboxes - (V.NSet.of_list (map V.mk ["v3"])) - g) - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1"] - |> add_bbox "b3" ["v3","w3","v4"] - |> add_bbox_with_parent "b2" "b3" ["v3","w3"] - val _ = assert_g_eq "G.remove_from_all_bboxes (w3)" g_exp - (G.remove_from_all_bboxes - (V.NSet.of_list (map V.mk ["w3"])) - g) - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4"] - |> add_bbox_with_parent "b1" "b0" ["w1"] - |> add_bbox "b3" ["v4"] - |> add_bbox_with_parent "b2" "b3" [] - val _ = assert_g_eq "G.remove_from_all_bboxes (v3,w3)" g_exp - (G.remove_from_all_bboxes - (V.NSet.of_list (map V.mk ["v3","w3"])) - g) - in () end) () - - - - val _ = Testing.test "G.remove_from_bbox" (fn () => let - val _ = (G.remove_from_bbox B.default_name (V.NSet.single V.default_name) G.empty; - raise ERROR "remove_from_bbox (empty) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.is_empty g_e); - Testing.assert "exception has correct !-box" (B.name_eq (b_e,B.default_name))) - - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_wv "w1" - |> add_wv "w2a" - |> add_wv "w2b" - |> add_wv "w3" - |> add_wv "w4" - |> add_dir_eunit1 "e1" "w1" "v1" - |> add_dir_eunit1 "e2" "v1" "w2a" - |> add_dir_eunit1 "e3" "w2a" "w2b" - |> add_dir_eunit1 "e4" "w2b" "v2" - |> add_dir_eunit1 "e5" "v3" "w3" - |> add_dir_eunit1 "e6" "w3" "v3" - val g = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3","v4"] - - val _ = (G.remove_from_bbox (B.mk "b9") (V.NSet.single (V.mk "v1")) g; - raise ERROR "remove_from_bbox (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g_e g); - Testing.assert "exception has correct !-box" (B.dest b_e = "b9")) - - fun check_remove msg (g_exp,vs_exp) (g,bn,vs) = let - val (vset',g') = G.remove_from_bbox (B.mk bn) - (V.NSet.of_list (map V.mk vs)) - g - val vset_exp = (V.NSet.of_list (map V.mk vs_exp)) - val _ = assert_g_eq msg g_exp g' - val _ = if V.NSet.eq vset_exp vset' then () - else (writeln "Expected vs:"; - V.NSet.print vset_exp; - writeln "Actual vs:"; - V.NSet.print vset'; - raise ERROR (msg^": wrong vs")) - in () end - - val _ = check_remove "G.remove_from_bbox (b3,v4)" - (g,[]) - (g,"b0",["v9"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3"] - val _ = check_remove "G.remove_from_bbox (b3,v4)" - (g_exp,["v4"]) - (g,"b2",["v4"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3","v4"] - val _ = check_remove "G.remove_from_bbox (b2,w3)" - (g_exp,[]) - (g,"b2",["w3"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v4"] - val _ = check_remove "G.remove_from_bbox (b2,v3,w3)" - (g_exp,["v3","w3"]) - (g,"b2",["v3","w3"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3","v4"] - val _ = check_remove "G.remove_from_bbox (b0,v3,w4)" - (g_exp,["v3","w4"]) - (g,"b0",["v3","w4"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1"] - |> add_bbox "b2" ["v3","w3","v4"] - val _ = check_remove "G.remove_from_bbox (b1,v3,w3)" - (g_exp,["w3"]) - (g,"b1",["v3","w3"]) - - val _ = (G.remove_from_bbox (B.mk "b0") - (V.NSet.of_list (map V.mk ["v3","w3"])) g; - raise ERROR "remove_from_bbox (in child !-box) incorrect") - handle G.bbox_bad_parent_exp (bp_e,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g_e g); - Testing.assert "exception has correct !-box" (B.dest b_e = "b1"); - Testing.assert "exception has correct parent !-box" (B.dest bp_e = "b0")) - in () end) () - - - - val _ = Testing.test "G.remove_from_bbox_anon" (fn () => let - val _ = (G.remove_from_bbox_anon B.default_name (V.NSet.single V.default_name) G.empty; - raise ERROR "remove_from_bbox_anon (empty) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.is_empty g_e); - Testing.assert "exception has correct !-box" (B.name_eq (b_e,B.default_name))) - - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_wv "w1" - |> add_wv "w2a" - |> add_wv "w2b" - |> add_wv "w3" - |> add_wv "w4" - |> add_dir_eunit1 "e1" "w1" "v1" - |> add_dir_eunit1 "e2" "v1" "w2a" - |> add_dir_eunit1 "e3" "w2a" "w2b" - |> add_dir_eunit1 "e4" "w2b" "v2" - |> add_dir_eunit1 "e5" "v3" "w3" - |> add_dir_eunit1 "e6" "w3" "v3" - val g = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3","v4"] - - val _ = (G.remove_from_bbox_anon (B.mk "b9") (V.NSet.single (V.mk "v1")) g; - raise ERROR "remove_from_bbox_anon (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g_e g); - Testing.assert "exception has correct !-box" (B.dest b_e = "b9")) - - fun check_remove msg g_exp (g,bn,vs) = - assert_g_eq msg g_exp - (G.remove_from_bbox_anon (B.mk bn) - (V.NSet.of_list (map V.mk vs)) - g) - - val _ = check_remove "G.remove_from_bbox_anon (b3,v4)" - g - (g,"b0",["v9"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3"] - val _ = check_remove "G.remove_from_bbox_anon (b3,v4)" - g_exp - (g,"b2",["v4"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3","v4"] - val _ = check_remove "G.remove_from_bbox_anon (b2,w3)" - g_exp - (g,"b2",["w3"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v4"] - val _ = check_remove "G.remove_from_bbox_anon (b2,v3,w3)" - g_exp - (g,"b2",["v3","w3"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1","w3"] - |> add_bbox "b2" ["v3","w3","v4"] - val _ = check_remove "G.remove_from_bbox_anon (b0,v3,w4)" - g_exp - (g,"b0",["v3","w4"]) - - val g_exp = g_clean - |> add_bbox "b0" ["w1","w4","v3","w3"] - |> add_bbox_with_parent "b1" "b0" ["w1"] - |> add_bbox "b2" ["v3","w3","v4"] - val _ = check_remove "G.remove_from_bbox_anon (b1,v3,w3)" - g_exp - (g,"b1",["v3","w3"]) - - val _ = (G.remove_from_bbox_anon (B.mk "b0") - (V.NSet.of_list (map V.mk ["v3","w3"])) g; - raise ERROR "remove_from_bbox_anon (in child !-box) incorrect") - handle G.bbox_bad_parent_exp (bp_e,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g_e g); - Testing.assert "exception has correct !-box" (B.dest b_e = "b1"); - Testing.assert "exception has correct parent !-box" (B.dest bp_e = "b0")) - in () end) () - - - - val _ = Testing.test "G.add_children_to_bbox" (fn () => let - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_wv "w1" - |> add_wv "w2" - val g = g_clean - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox "b1" ["w1","w2"] - |> add_bbox_with_parent "b2" "b0" ["w1"] - |> add_bbox "b3" ["v2"] - |> add_bbox "b4" [] - |> add_bbox_with_parent "b5" "b1" ["w1","w2"] - - val _ = (G.add_children_to_bbox (B.mk "b9") - (B.NSet.of_list (map B.mk ["b0","b1"])) g; - raise ERROR "add_children_to_bbox (unknown parent !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown parent !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown parent !-box)" - (B.dest b_e = "b9")) - val _ = (G.add_children_to_bbox (B.mk "b0") - (B.NSet.of_list (map B.mk ["b3","b9"])) g; - raise ERROR "add_children_to_bbox (unknown child !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct !-box (unknown child !-box)" - (B.dest b_e = "b9")) - - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox "b1" ["w1","w2"] - |> add_bbox_with_parent "b2" "b0" ["w1"] - |> add_bbox_with_parent "b3" "b0" ["v2"] - |> add_bbox_with_parent "b4" "b0" [] - |> add_bbox_with_parent "b5" "b1" ["w1","w2"] - val _ = assert_g_eq "G.add_children_to_bbox (b0:b3,b4)" g_exp - (G.add_children_to_bbox (B.mk "b0") - (B.NSet.of_list (map B.mk ["b3","b4"])) - g) - - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox "b1" ["w1","w2"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["w1"] - |> add_bbox "b3" ["v2"] - |> add_bbox "b4" [] - |> add_bbox_with_parent "b5" "b1" ["w1","w2"] - val _ = assert_g_eq "G.add_children_to_bbox (b1:b2)" g_exp - (G.add_children_to_bbox (B.mk "b1") - (B.NSet.of_list (map B.mk ["b2"])) - g) - - val _ = (G.add_children_to_bbox (B.mk "b0") - (B.NSet.of_list (map B.mk ["b1","b2"])) g; - raise ERROR "add_children_to_bbox (not subgraph) incorrect") - handle G.bbox_bad_parent_exp (bp_e,b_e,g_e) => - (Testing.assert "exception has correct !-box (not subgraph)" - (B.dest b_e = "b1"); - Testing.assert "exception has correct parent !-box (not subgraph)" - (B.dest bp_e = "b0")) - val _ = (G.add_children_to_bbox (B.mk "b5") - (B.NSet.of_list (map B.mk ["b1","b4"])) g; - raise ERROR "add_children_to_bbox (breaks anti-symmetry) incorrect") - handle G.bbox_bad_parent_exp (bp_e,b_e,g_e) => - (Testing.assert "exception has correct !-box (breaks anti-symmetry)" - (B.dest b_e = "b1"); - Testing.assert "exception has correct parent !-box (breaks anti-symmetry)" - (B.dest bp_e = "b5")) - - (* transitivity check *) - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - val g = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parents "b3" ["b0","b1","b2"] ["v1","v2"] - |> add_bbox "b4" ["v2","v3"] - |> add_bbox_with_parent "b5" "b4" ["v2"] - |> add_bbox_with_parents "b6" ["b4","b5"] ["v2"] - |> add_bbox_with_parents "b7" ["b4","b5","b6"] [] - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parents "b3" ["b0","b1","b2"] ["v1","v2"] - |> add_bbox "b4" ["v2","v3"] - |> add_bbox_with_parents "b5" ["b0","b1","b2","b4"] ["v2"] - |> add_bbox_with_parents "b6" ["b0","b1","b2","b4","b5"] ["v2"] - |> add_bbox_with_parents "b7" ["b0","b1","b2","b4","b5","b6"] [] - val _ = assert_g_eq "G.add_children_to_bbox (b2:b5,b6)" g_exp - (G.add_children_to_bbox (B.mk "b2") - (B.NSet.of_list (map B.mk ["b5","b6"])) - g) - - in () end) () - - - - val _ = Testing.test "G.add_child_to_bbox" (fn () => let - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_wv "w1" - |> add_wv "w2" - val g = g_clean - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox "b1" ["w1","w2"] - |> add_bbox_with_parent "b2" "b0" ["w1"] - |> add_bbox "b3" ["v2"] - |> add_bbox "b4" [] - |> add_bbox_with_parent "b5" "b1" ["w1","w2"] - - val _ = (G.add_child_to_bbox (B.mk "b9") (B.mk "b0") g; - raise ERROR "add_child_to_bbox (unknown parent !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown parent !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown parent !-box)" - (B.dest b_e = "b9")) - val _ = (G.add_child_to_bbox (B.mk "b0") (B.mk "b9") g; - raise ERROR "add_child_to_bbox (unknown child !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown child !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown child !-box)" - (B.dest b_e = "b9")) - - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox "b1" ["w1","w2"] - |> add_bbox_with_parent "b2" "b0" ["w1"] - |> add_bbox_with_parent "b3" "b0" ["v2"] - |> add_bbox "b4" [] - |> add_bbox_with_parent "b5" "b1" ["w1","w2"] - val _ = assert_g_eq "G.add_child_to_bbox (b0:b3)" g_exp - (G.add_child_to_bbox (B.mk "b0") (B.mk "b3") g) - - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","w1"] - |> add_bbox "b1" ["w1","w2"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["w1"] - |> add_bbox "b3" ["v2"] - |> add_bbox "b4" [] - |> add_bbox_with_parent "b5" "b1" ["w1","w2"] - val _ = assert_g_eq "G.add_child_to_bbox (b1:b2)" g_exp - (G.add_child_to_bbox (B.mk "b1") (B.mk "b2") g) - - val _ = (G.add_child_to_bbox (B.mk "b0") (B.mk "b1") g; - raise ERROR "add_child_to_bbox (not subgraph) incorrect") - handle G.bbox_bad_parent_exp (bp_e,b_e,g_e) => - (Testing.assert "exception has correct graph (not subgraph)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (not subgraph)" - (B.dest b_e = "b1"); - Testing.assert "exception has correct parent !-box (not subgraph)" - (B.dest bp_e = "b0")) - val _ = (G.add_child_to_bbox (B.mk "b5") (B.mk "b1") g; - raise ERROR "add_child_to_bbox (breaks anti-symmetry) incorrect") - handle G.bbox_bad_parent_exp (bp_e,b_e,g_e) => - (Testing.assert "exception has correct graph (breaks anti-symmetry)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (breaks anti-symmetry)" - (B.dest b_e = "b1"); - Testing.assert "exception has correct parent (breaks anti-symmetry) !-box" - (B.dest bp_e = "b5")) - - (* transitivity check *) - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - val g = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parents "b3" ["b0","b1","b2"] ["v1","v2"] - |> add_bbox "b4" ["v2","v3"] - |> add_bbox_with_parent "b5" "b4" ["v2"] - |> add_bbox_with_parents "b6" ["b4","b5"] ["v2"] - |> add_bbox_with_parents "b7" ["b4","b5","b6"] [] - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parents "b3" ["b0","b1","b2"] ["v1","v2"] - |> add_bbox "b4" ["v2","v3"] - |> add_bbox_with_parents "b5" ["b0","b1","b2","b4"] ["v2"] - |> add_bbox_with_parents "b6" ["b0","b1","b2","b4","b5"] ["v2"] - |> add_bbox_with_parents "b7" ["b0","b1","b2","b4","b5","b6"] [] - val _ = assert_g_eq "G.add_child_to_bbox (b2:b5)" g_exp - (G.add_child_to_bbox (B.mk "b2") (B.mk "b5") g) - in () end) () - - - - val _ = Testing.test "G.remove_child_from_bbox" (fn () => let - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - val g = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - - val _ = (G.remove_child_from_bbox (B.mk "b9") (B.mk "b0") g; - raise ERROR "remove_child_from_bbox (unknown parent !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown parent !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown parent !-box)" - (B.dest b_e = "b9")) - val _ = (G.remove_child_from_bbox (B.mk "b0") (B.mk "b9") g; - raise ERROR "remove_child_from_bbox (unknown child !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown child !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown child !-box)" - (B.dest b_e = "b9")) - - (* not connected *) - val _ = assert_g_eq "G.remove_child_from_bbox (b1:b3)" g - (G.remove_child_from_bbox (B.mk "b1") (B.mk "b3") g) - - (* would break transitivity *) - val _ = assert_g_eq "G.remove_child_from_bbox (b0:b2)" g - (G.remove_child_from_bbox (B.mk "b0") (B.mk "b2") g) - - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox "b1" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - val _ = assert_g_eq "G.remove_child_from_bbox (b0:b4)" g_exp - (G.remove_child_from_bbox (B.mk "b0") (B.mk "b1") g) - in () end) () - - - - val _ = Testing.test "G.clear_bbox_children" (fn () => let - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - val g = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - - val _ = (G.clear_bbox_children (B.mk "b9") g; - raise ERROR "clear_bbox_children (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown !-box)" - (B.dest b_e = "b9")) - - (* leaf !-box *) - val _ = assert_g_eq "G.clear_bbox_children (b4)" g - (G.clear_bbox_children (B.mk "b4") g) - - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2"] ["v2"] - val _ = assert_g_eq "G.clear_bbox_children (b3)" g_exp - (G.clear_bbox_children (B.mk "b3") g) - - val g_exp = g_clean - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox "b1" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b1"] ["v1","v2"] - |> add_bbox "b3" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b1","b2","b3"] ["v2"] - val _ = assert_g_eq "G.clear_bbox_children (b0)" g_exp - (G.clear_bbox_children (B.mk "b0") g) - in () end) () - - - - val _ = Testing.test "G.get_bbox_parents" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - - val _ = (G.get_bbox_parents g (B.mk "b9"); - raise ERROR "get_bbox_parents (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown !-box)" - (B.dest b_e = "b9")) - - val _ = assert_bboxes "b0" [] - (G.get_bbox_parents g (B.mk "b0")) - val _ = assert_bboxes "b3" ["b0"] - (G.get_bbox_parents g (B.mk "b3")) - val _ = assert_bboxes "b4" ["b0","b1","b2","b3"] - (G.get_bbox_parents g (B.mk "b4")) - in () end) () - - - - val _ = Testing.test "G.get_bbox_children" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - - val _ = (G.get_bbox_children g (B.mk "b9"); - raise ERROR "get_bbox_children (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown !-box)" - (B.dest b_e = "b9")) - - val _ = assert_bboxes "b0" ["b1","b2","b3","b4"] - (G.get_bbox_children g (B.mk "b0")) - val _ = assert_bboxes "b3" ["b4"] - (G.get_bbox_children g (B.mk "b3")) - val _ = assert_bboxes "b4" [] - (G.get_bbox_children g (B.mk "b4")) - in () end) () - - - - val _ = Testing.test "G.bbox_has_parents" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - - val _ = (G.bbox_has_parents g (B.mk "b9"); - raise ERROR "bbox_has_parents (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown !-box)" - (B.dest b_e = "b9")) - - val _ = Testing.assert "b0 has no parents" - (not (G.bbox_has_parents g (B.mk "b0"))) - val _ = Testing.assert "b3 has parents" - (G.bbox_has_parents g (B.mk "b3")) - val _ = Testing.assert "b4 has parents" - (G.bbox_has_parents g (B.mk "b4")) - in () end) () - - - - val _ = Testing.test "G.bbox_has_children" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - - val _ = (G.bbox_has_children g (B.mk "b9"); - raise ERROR "bbox_has_children (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown !-box)" - (B.dest b_e = "b9")) - - val _ = Testing.assert "b0 has children" - (G.bbox_has_children g (B.mk "b0")) - val _ = Testing.assert "b3 has children" - (G.bbox_has_children g (B.mk "b3")) - val _ = Testing.assert "b4 has no children" - (not (G.bbox_has_children g (B.mk "b4"))) - in () end) () - - - - val _ = Testing.test "G.get_vertices_in_bbox" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - |> add_bbox "b5" [] - - val _ = (G.get_vertices_in_bbox g (B.mk "b9"); - raise ERROR "get_vertices_in_bbox (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph (unknown !-box)" - (G.exact_eq g_e g); - Testing.assert "exception has correct !-box (unknown !-box)" - (B.dest b_e = "b9")) - - val _ = assert_vertices "b0" ["v1","v2","v3"] - (G.get_vertices_in_bbox g (B.mk "b0")) - val _ = assert_vertices "b3" ["v2","v3"] - (G.get_vertices_in_bbox g (B.mk "b3")) - val _ = assert_vertices "b4" ["v2"] - (G.get_vertices_in_bbox g (B.mk "b4")) - val _ = assert_vertices "b5" [] - (G.get_vertices_in_bbox g (B.mk "b5")) - in () end) () - - - - val _ = Testing.test "G.get_bboxes_containing_vertex" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_vunit1 "v4" - |> add_bbox "b0" ["v1","v2","v3"] - |> add_bbox_with_parent "b1" "b0" ["v1","v2","v3"] - |> add_bbox_with_parents "b2" ["b0","b1"] ["v1","v2"] - |> add_bbox_with_parent "b3" "b0" ["v2","v3"] - |> add_bbox_with_parents "b4" ["b0","b1","b2","b3"] ["v2"] - |> add_bbox "b5" [] - - val _ = (G.get_bboxes_containing_vertex g (V.mk "v9"); - raise ERROR "get_bboxes_containing_vertex (unknown vertex) incorrect") - handle G.no_such_vertex_exp (_,v_e,g_e) => - (Testing.assert "exception has correct graph (unknown vertex)" - (G.exact_eq g_e g); - Testing.assert "exception has correct vertex (unknown vertex)" - (V.dest v_e = "v9")) - - val _ = assert_bboxes "v1" ["b0","b1","b2"] - (G.get_bboxes_containing_vertex g (V.mk "v1")) - val _ = assert_bboxes "v2" ["b0","b1","b2","b3","b4"] - (G.get_bboxes_containing_vertex g (V.mk "v2")) - val _ = assert_bboxes "v3" ["b0","b1","b3"] - (G.get_bboxes_containing_vertex g (V.mk "v3")) - val _ = assert_bboxes "v4" [] - (G.get_bboxes_containing_vertex g (V.mk "v4")) - in () end) () - - - - val _ = Testing.test "G.kill_bbox" (fn () => let - val _ = (G.kill_bbox B.default_name G.empty; - raise ERROR "kill_bbox (empty) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.is_empty g_e); - Testing.assert "exception has correct !-box" (B.name_eq (b_e,B.default_name))) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - |> add_bbox "b4" ["v2"] - |> add_bbox_with_parent "b3" "b4" ["v2"] - |> add_bbox "b5" ["v3"] - val _ = (G.kill_bbox (B.mk "b9") g; - raise ERROR "kill_bbox (empty) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g_e g); - Testing.assert "exception has correct !-box" (B.dest b_e = "b9")) - - val g_exp = G.empty - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_bbox "b4" [] - |> add_bbox_with_parent "b3" "b4" [] - |> add_bbox "b5" ["v3"] - val _ = assert_g_eq "G.kill_bbox (b0)" - g_exp (G.kill_bbox (B.mk "b0") g) - val g_exp = G.empty - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_bbox "b0" ["v2"] - |> add_bbox "b4" ["v2"] - |> add_bbox_with_parent "b3" "b4" ["v2"] - |> add_bbox "b5" ["v3"] - val _ = assert_g_eq "G.kill_bbox (b1)" - g_exp (G.kill_bbox (B.mk "b1") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b4" ["v2"] - |> add_bbox_with_parent "b3" "b4" ["v2"] - |> add_bbox "b5" ["v3"] - val _ = assert_g_eq "G.kill_bbox (b2)" - g_exp (G.kill_bbox (B.mk "b2") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_bbox "b0" ["v1"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - |> add_bbox "b4" [] - |> add_bbox "b5" ["v3"] - val _ = assert_g_eq "G.kill_bbox (b3)" - g_exp (G.kill_bbox (B.mk "b3") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v3" - |> add_vunit2 "v4" - |> add_bbox "b0" ["v1"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - |> add_bbox "b5" ["v3"] - val _ = assert_g_eq "G.kill_bbox (b4)" - g_exp (G.kill_bbox (B.mk "b4") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v4" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - |> add_bbox "b4" ["v2"] - |> add_bbox_with_parent "b3" "b4" ["v2"] - val _ = assert_g_eq "G.kill_bbox (b5)" - g_exp (G.kill_bbox (B.mk "b5") g) - in () end) () - - - - val _ = Testing.test "G.drop_bbox" (fn () => let - val _ = (G.drop_bbox B.default_name G.empty; - raise ERROR "drop_bbox (empty) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.is_empty g_e); - Testing.assert "exception has correct !-box" (B.name_eq (b_e,B.default_name))) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - |> add_bbox "b3" ["v1","v2"] - val _ = (G.drop_bbox (B.mk "b9") g; - raise ERROR "drop_bbox (empty) incorrect") - handle G.no_such_bbox_exp (_,b_e,g_e) => - (Testing.assert "exception has correct graph" (G.exact_eq g_e g); - Testing.assert "exception has correct !-box" (B.dest b_e = "b9")) - - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox_with_parents "b2" ["b0","b1"] [] - val _ = assert_g_eq "G.drop_bbox (b3)" - g_exp (G.drop_bbox (B.mk "b3") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b1" ["v1"] - |> add_bbox_with_parent "b2" "b1" [] - |> add_bbox "b3" ["v1","v2"] - val _ = assert_g_eq "G.drop_bbox (b0)" - g_exp (G.drop_bbox (B.mk "b0") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b2" "b0" [] - |> add_bbox "b3" ["v1","v2"] - val _ = assert_g_eq "G.drop_bbox (b1)" - g_exp (G.drop_bbox (B.mk "b1") g) - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit2 "v3" - |> add_bbox "b0" ["v1","v2"] - |> add_bbox_with_parent "b1" "b0" ["v1"] - |> add_bbox "b3" ["v1","v2"] - val _ = assert_g_eq "G.drop_bbox (b2)" - g_exp (G.drop_bbox (B.mk "b2") g) - in () end) () - - - - val _ = Testing.test "G.merge_bboxes" (fn () => let - val g_clean = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - - val g = g_clean - |> add_bbox "bl" ["v1"] - |> add_bbox "br" ["v3"] - val _ = (G.merge_bboxes B.NSet.empty g; - raise ERROR "merge_bboxes (none given) incorrect") - handle G.merge_bbox_exp _ => () - val _ = (G.merge_bboxes (B.NSet.of_list (map B.mk ["bl","bm","br"])) g; - raise ERROR "merge_bboxes (unknown !-box) incorrect") - handle G.no_such_bbox_exp (_,b_e,_) => - (Testing.assert "exception has correct !-box" (B.dest b_e = "bm")) - - val g = g_clean - |> add_bbox "bl" ["v1"] - |> add_bbox "br" ["v3"] - val (bm,g_merged) = - G.merge_bboxes (B.NSet.of_list (map B.mk ["bl","br"])) g - val g_exp = g_clean - |> add_bbox (B.dest bm) ["v1","v3"] - val _ = assert_g_eq "G.merge_bboxes (1)" g_exp g_merged - - val g = g_clean - |> add_bbox "bp" ["v1","v2","v3"] - |> add_bbox_with_parent "bl" "bp" ["v1"] - |> add_bbox_with_parent "br" "bp" ["v3"] - val (bm,g_merged) = - G.merge_bboxes (B.NSet.of_list (map B.mk ["bl","br"])) g - val g_exp = g_clean - |> add_bbox "bp" ["v1","v2","v3"] - |> add_bbox_with_parent (B.dest bm) "bp" ["v1","v3"] - val _ = assert_g_eq "G.merge_bboxes (2)" g_exp g_merged - - val g = g_clean - |> add_bbox "bp" ["v1","v2","v3"] - |> add_bbox_with_parent "bl" "bp" ["v1"] - |> add_bbox_with_parent "bm" "bp" ["v2"] - |> add_bbox_with_parent "br" "bp" ["v3"] - val (bm,g_merged) = - G.merge_bboxes (B.NSet.of_list (map B.mk ["bl","bm","br"])) g - val g_exp = g_clean - |> add_bbox "bp" ["v1","v2","v3"] - |> add_bbox_with_parent (B.dest bm) "bp" ["v1","v2","v3"] - val _ = assert_g_eq "G.merge_bboxes (3)" g_exp g_merged - - val g = g_clean - |> add_bbox "bp" ["v1","v2","v3"] - |> add_bbox_with_parent "bl" "bp" ["v1"] - |> add_bbox_with_parent "bm" "bp" ["v2"] - |> add_bbox "br" ["v3"] - val _ = (G.merge_bboxes (B.NSet.of_list (map B.mk ["bl","bm","br"])) g; - raise ERROR "merge_bboxes (different parents) incorrect") - handle G.merge_bbox_exp _ => () - - val g = g_clean - |> add_bbox "bp" ["v1","v2","v3"] - |> add_bbox "bl" ["v1","v2"] - |> add_bbox "br" ["v2","v3"] - val _ = (G.merge_bboxes (B.NSet.of_list (map B.mk ["bl","br"])) g; - raise ERROR "merge_bboxes (overlapping) incorrect") - handle G.merge_bbox_exp _ => () - in () end) () - - - - val _ = Testing.test "G.expand_bbox" (fn () => let - (* TODO: val expand_bbox : B.name -> (VSub.T * ESub.T * BSub.T) * T - -> (VSub.T * ESub.T * BSub.T) * T *) - in () end) () - - - - val _ = Testing.test "G.copy_bbox" (fn () => let - (* TODO: val copy_bbox : B.name -> (VSub.T * ESub.T * BSub.T) * T - -> (VSub.T * ESub.T * BSub.T) * T *) - in () end) () - - - - val _ = Testing.test "G.fresh_copy_bbox" (fn () => let - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1","v2"] - |> add_bbox_with_parent "b2" "b1" ["v2","w1"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_copy_bbox (B.mk "b1") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit1 (get_vsub "v1") - |> add_vunit2 (get_vsub "v2") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") (get_vsub "v1") (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_bbox (get_bsub "b1") (map get_vsub ["v1","w1","v2"]) - |> add_bbox_with_parent (get_bsub "b2") (get_bsub "b1") (map get_vsub ["v2","w1"]) - val _ = assert_g_eq "G.fresh_copy_bbox (parent)" g_exp g_actual - - val ((vsub,esub,bsub),g_actual) = G.fresh_copy_bbox (B.mk "b2") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit2 (get_vsub "v2") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") "v1" (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_to_bbox "b1" (map get_vsub ["v2","w1"]) - |> add_bbox_with_parent (get_bsub "b2") "b1" (map get_vsub ["v2","w1"]) - val _ = assert_g_eq "G.fresh_copy_bbox (child)" g_exp g_actual - - - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1","v2"] - |> add_bbox "b1a" ["v1","w1","v2"] - |> add_bbox_with_parents "b2" ["b1","b1a"] ["v2","w1"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_copy_bbox (B.mk "b1") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit1 (get_vsub "v1") - |> add_vunit2 (get_vsub "v2") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") (get_vsub "v1") (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_bbox (get_bsub "b1") (map get_vsub ["v1","w1","v2"]) - |> add_to_bbox "b1a" (map get_vsub ["v1","w1","v2"]) - |> add_bbox_with_parents (get_bsub "b2") [(get_bsub "b1"),"b1a"] (map get_vsub ["v2","w1"]) - val _ = assert_g_eq "G.fresh_copy_bbox (child has other parent)" g_exp g_actual - - - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_vunit2 "v3" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_dir_eunit2 "e3" "v2" "w2" - |> add_dir_eunit2 "e4" "w2" "v3" - |> add_bbox "b1" ["v1","w1","v2","w2","v3"] - |> add_bbox_with_parent "b2" "b1" ["w1","v2","w2","v3"] - |> add_bbox_with_parents "b3" ["b1","b2"] ["w2","v3"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_copy_bbox (B.mk "b2") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit2 (get_vsub "v2") - |> add_vunit2 (get_vsub "v3") - |> add_wv (get_vsub "w1") - |> add_wv (get_vsub "w2") - |> add_dir_eunit1 (get_esub "e1") "v1" (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_dir_eunit2 (get_esub "e3") (get_vsub "v2") (get_vsub "w2") - |> add_dir_eunit2 (get_esub "e4") (get_vsub "w2") (get_vsub "v3") - |> add_to_bbox "b1" (map get_vsub ["w1","v2","w2","v3"]) - |> add_bbox_with_parent (get_bsub "b2") "b1" - (map get_vsub ["w1","v2","w2","v3"]) - |> add_bbox_with_parents (get_bsub "b3") ["b1",get_bsub "b2"] - (map get_vsub ["w2","v3"]) - val _ = assert_g_eq "G.fresh_copy_bbox (parent+child)" g_exp g_actual - - - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1"] - |> add_bbox "b2" ["v2","w1"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_copy_bbox (B.mk "b1") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - - val g_exp = g_orig - |> add_vunit1 (get_vsub "v1") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") (get_vsub "v1") (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") "v2" - |> add_bbox (get_bsub "b1") (map get_vsub ["v1","w1"]) - |> add_to_bbox "b2" (map get_vsub ["w1"]) - val _ = assert_g_eq "G.fresh_copy_bbox (overlap)" g_exp g_actual - in () end) () - - - - (* TODO: fresh_copy_bbox_anon *) - - - - val _ = Testing.test "G.fresh_expand_bbox" (fn () => let - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1","v2"] - |> add_bbox_with_parent "b2" "b1" ["v2","w1"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_expand_bbox (B.mk "b1") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit1 (get_vsub "v1") - |> add_vunit2 (get_vsub "v2") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") (get_vsub "v1") (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_bbox (get_bsub "b2") (map get_vsub ["v2","w1"]) - val _ = assert_g_eq "G.fresh_expand_bbox (parent)" g_exp g_actual - - val ((vsub,esub,bsub),g_actual) = G.fresh_expand_bbox (B.mk "b2") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit2 (get_vsub "v2") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") "v1" (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_to_bbox "b1" (map get_vsub ["v2","w1"]) - val _ = assert_g_eq "G.fresh_expand_bbox (child)" g_exp g_actual - - - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1","v2"] - |> add_bbox "b1a" ["v1","w1","v2"] - |> add_bbox_with_parents "b2" ["b1","b1a"] ["v2","w1"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_expand_bbox (B.mk "b1") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit1 (get_vsub "v1") - |> add_vunit2 (get_vsub "v2") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") (get_vsub "v1") (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_to_bbox "b1a" (map get_vsub ["v1","w1","v2"]) - |> add_bbox_with_parent (get_bsub "b2") "b1a" (map get_vsub ["v2","w1"]) - val _ = assert_g_eq "G.fresh_expand_bbox (child has other parent)" g_exp g_actual - - - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_vunit2 "v3" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_dir_eunit2 "e3" "v2" "w2" - |> add_dir_eunit2 "e4" "w2" "v3" - |> add_bbox "b1" ["v1","w1","v2","w2","v3"] - |> add_bbox_with_parent "b2" "b1" ["w1","v2","w2","v3"] - |> add_bbox_with_parents "b3" ["b1","b2"] ["w2","v3"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_expand_bbox (B.mk "b2") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - val g_exp = g_orig - |> add_vunit2 (get_vsub "v2") - |> add_vunit2 (get_vsub "v3") - |> add_wv (get_vsub "w1") - |> add_wv (get_vsub "w2") - |> add_dir_eunit1 (get_esub "e1") "v1" (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") (get_vsub "v2") - |> add_dir_eunit2 (get_esub "e3") (get_vsub "v2") (get_vsub "w2") - |> add_dir_eunit2 (get_esub "e4") (get_vsub "w2") (get_vsub "v3") - |> add_to_bbox "b1" (map get_vsub ["w1","v2","w2","v3"]) - |> add_bbox_with_parent (get_bsub "b3") "b1" (map get_vsub ["w2","v3"]) - val _ = assert_g_eq "G.fresh_expand_bbox (parent+child)" g_exp g_actual - - - val g_orig = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1"] - |> add_bbox "b2" ["v2","w1"] - - val ((vsub,esub,bsub),g_actual) = G.fresh_expand_bbox (B.mk "b1") g_orig - val get_vsub = V.dest o (VSub.get vsub) o V.mk - val get_esub = E.dest o (ESub.get esub) o E.mk - val get_bsub = B.dest o (BSub.get bsub) o B.mk - - val g_exp = g_orig - |> add_vunit1 (get_vsub "v1") - |> add_wv (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e1") (get_vsub "v1") (get_vsub "w1") - |> add_dir_eunit1 (get_esub "e2") (get_vsub "w1") "v2" - |> add_to_bbox "b2" (map get_vsub ["w1"]) - val _ = assert_g_eq "G.fresh_expand_bbox (overlap)" g_exp g_actual - in () end) () - - - - val _ = Testing.test "G.expand_bbox_op" (fn () => let - (* TODO: val expand_bbox_op : B.name -> T -> bbox_op * T *) - in () end) () - - - - val _ = Testing.test "G.kill_bbox_op" (fn () => let - (* TODO: val kill_bbox_op : B.name -> T -> bbox_op * T *) - in () end) () - - - - val _ = Testing.test "G.copy_bbox_op" (fn () => let - (* TODO: val copy_bbox_op : B.name -> T -> bbox_op * T *) - in () end) () - - - - val _ = Testing.test "G.replay_bbox_op" (fn () => let - (* TODO: val replay_bbox_op : bbox_op -> T -> T *) - in () end) () - - - - val _ = Testing.test "G.replay_bbox_ops" (fn () => let - (* TODO: val replay_bbox_ops : bbox_op list -> T -> T *) - in () end) () - - - - local - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - |> add_bbox "b1" ["n1","w1","w2"] - |> add_bbox_with_parent "b2" "b1" ["n1","w1","w2"] - |> add_bbox_with_parent "b3" "b1" ["w1"] - in - val _ = Testing.test "G.rename_bang_graph/G.rename_bang_graph_anon" (fn () => let - val vsub_swap1 = VSub.empty - |> VSub.add (V.mk "n1",V.mk "n2") - |> VSub.add (V.mk "n2",V.mk "n1") - |> VSub.add (V.mk "n7",V.mk "n8") - val esub_swap = ESub.empty - |> ESub.add (E.mk "e1",E.mk "e2") - |> ESub.add (E.mk "e2",E.mk "e1") - |> ESub.add (E.mk "e7",E.mk "e8") - val vsub_swap2 = VSub.empty - |> VSub.add (V.mk "w1",V.mk "w2") - |> VSub.add (V.mk "w2",V.mk "w1") - val bsub_clash = BSub.empty - |> BSub.add (B.mk "b1",B.mk "b2") - val _ = G.rename_bang_graph (vsub_swap1,esub_swap,bsub_clash) g - handle VSub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - | ESub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - | BSub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - val _ = G.rename_bang_graph_anon (vsub_swap1,esub_swap,bsub_clash) g - handle VSub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - | ESub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - | BSub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - - val bsub_swap = BSub.empty - |> BSub.add (B.mk "b1",B.mk "b2") - |> BSub.add (B.mk "b2",B.mk "b1") - val g_exp = G.empty - |> add_vunit1 "n2" - |> add_vunit1 "n1" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e2" "n2" "w1" - |> add_dir_eunit1 "e1" "w1" "n1" - |> add_undir_eunit2 "e3" "n2" "w2" - |> add_undir_eunit2 "e4" "w2" "n1" - |> add_bbox "b2" ["n2","w1","w2"] - |> add_bbox_with_parent "b1" "b2" ["n2","w1","w2"] - |> add_bbox_with_parent "b3" "b2" ["w1"] - val ((vsub',esub',bsub'),g') = - G.rename_bang_graph (vsub_swap1,esub_swap,bsub_swap) g - val _ = assert_g_eq "rename swap" g_exp g' - val _ = Testing.assert "w1 in sub" - (V.dest (VSub.get vsub' (V.mk "w1")) = "w1") - val _ = Testing.assert "e4 in sub" - (E.dest (ESub.get esub' (E.mk "e4")) = "e4") - val _ = Testing.assert "b1 in sub" - (B.dest (BSub.get bsub' (B.mk "b1")) = "b2") - val _ = Testing.assert "b3 in sub" - (B.dest (BSub.get bsub' (B.mk "b3")) = "b3") - val _ = assert_g_eq "rename swap (anon)" g_exp - (G.rename_bang_graph_anon (vsub_swap1,esub_swap,bsub_swap) g) - - val bsub_permute = BSub.empty - |> BSub.add (B.mk "b1",B.mk "b2") - |> BSub.add (B.mk "b2",B.mk "b3") - |> BSub.add (B.mk "b3",B.mk "b1") - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e2" "n1" "w2" - |> add_dir_eunit1 "e1" "w2" "n2" - |> add_undir_eunit2 "e3" "n1" "w1" - |> add_undir_eunit2 "e4" "w1" "n2" - |> add_bbox "b2" ["n1","w1","w2"] - |> add_bbox_with_parent "b3" "b2" ["n1","w1","w2"] - |> add_bbox_with_parent "b1" "b2" ["w2"] - val ((vsub',esub',bsub'),g') = - G.rename_bang_graph (vsub_swap2,esub_swap,bsub_permute) g - val _ = assert_g_eq "rename permute" g_exp g' - val _ = Testing.assert "n1 in sub" - (V.dest (VSub.get vsub' (V.mk "n1")) = "n1") - val _ = Testing.assert "e4 in sub" - (E.dest (ESub.get esub' (E.mk "e4")) = "e4") - val _ = Testing.assert "b1 in sub" - (B.dest (BSub.get bsub' (B.mk "b1")) = "b2") - val _ = assert_g_eq "rename permute (anon)" g_exp - (G.rename_bang_graph_anon (vsub_swap2,esub_swap,bsub_permute) g) - in () end) () - - - val _ = Testing.test "G.rename_bboxes" (fn () => let - val bsub_clash = BSub.empty - |> BSub.add (B.mk "b1",B.mk "b2") - val _ = (G.rename_bboxes bsub_clash g; - raise ERROR "clashing names should raise exception") - handle BSub.name_clash_exp _ => () - - val bsub_swap = BSub.empty - |> BSub.add (B.mk "b1",B.mk "b2") - |> BSub.add (B.mk "b2",B.mk "b1") - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - |> add_bbox "b2" ["n1","w1","w2"] - |> add_bbox_with_parent "b1" "b2" ["n1","w1","w2"] - |> add_bbox_with_parent "b3" "b2" ["w1"] - val _ = assert_g_eq "rename swap" g_exp - (G.rename_bboxes bsub_swap g) - - val bsub_permute = BSub.empty - |> BSub.add (B.mk "b1",B.mk "b2") - |> BSub.add (B.mk "b2",B.mk "b3") - |> BSub.add (B.mk "b3",B.mk "b1") - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - |> add_bbox "b2" ["n1","w1","w2"] - |> add_bbox_with_parent "b3" "b2" ["n1","w1","w2"] - |> add_bbox_with_parent "b1" "b2" ["w1"] - val _ = assert_g_eq "rename permute" g_exp - (G.rename_bboxes bsub_permute g) - in () end) () - - - val _ = Testing.test "G.rename_bbox/G.rename_bbox_opt" (fn () => let - val _ = assert_g_eq "rename empty" G.empty - (G.rename_bbox (B.mk "a") (B.mk "b") G.empty) - val _ = assert_g_eq "rename absent bbox" g - (G.rename_bbox (B.mk "a") (B.mk "b") g) - val _ = case G.rename_bbox_opt (B.mk "a") (B.mk "b") G.empty - of SOME g' => assert_g_eq "rename empty" G.empty g' - | NONE => raise ERROR "rename failed in empty graph" - val _ = case G.rename_bbox_opt (B.mk "a") (B.mk "b") g - of SOME g' => assert_g_eq "rename absent" g g' - | NONE => raise ERROR "rename of absent bbox failed" - - val _ = (G.rename_bbox (B.mk "b1") (B.mk "b2") g; - raise ERROR "clashing names should raise exception (b1->b2)") - handle G.duplicate_bbox_exp (b',g') => - (Testing.assert "correct b (b1->b2)" (B.dest b' = "b2"); - Testing.assert "correct g (b1->b2)" (G.exact_eq g' g)) - val _ = Testing.assert "clashing names (b1->b2)" - (is_none (G.rename_bbox_opt (B.mk "b1") (B.mk "b2") g)) - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - |> add_bbox "b4" ["n1","w1","w2"] - |> add_bbox_with_parent "b2" "b4" ["n1","w1","w2"] - |> add_bbox_with_parent "b3" "b4" ["w1"] - val _ = assert_g_eq "rename b1->b4" g_exp - (G.rename_bbox (B.mk "b1") (B.mk "b4") g) - val _ = case G.rename_bbox_opt (B.mk "b1") (B.mk "b4") g - of SOME g' => assert_g_eq "rename b1->b4" g_exp g' - | NONE => raise ERROR "rename b1->b4 returned NONE" - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - |> add_bbox "b1" ["n1","w1","w2"] - |> add_bbox_with_parent "b2" "b1" ["n1","w1","w2"] - |> add_bbox_with_parent "b4" "b1" ["w1"] - val _ = assert_g_eq "rename b3->b4" g_exp - (G.rename_bbox (B.mk "b3") (B.mk "b4") g) - val _ = case G.rename_bbox_opt (B.mk "b3") (B.mk "b4") g - of SOME g' => assert_g_eq "rename b3->b4" g_exp g' - | NONE => raise ERROR "rename b3->b4 returned NONE" - in () end) () - end - - - - (* TODO: add_edge_anon *) - val _ = Testing.test "G.add_edge/G.add_named_edge (with !-boxes)" (fn () => let - val g_clean = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 "e2" "w2" "n2" - val g = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox_with_parent "b2" "b1" ["w1"] - |> add_bbox "b3" ["n2","w2"] - |> add_bbox "b4" ["w2"] - |> add_bbox "b5" ["w1","w2"] - |> add_bbox "b6" ["n1","w1","w2","n2"] - - val (e,g') = G.add_edge (Undirected,eunit1) (V.mk "w1") (V.mk "w2") g - val g_exp = g_clean - |> add_undir_eunit1 (E.dest e) "w1" "w2" - |> add_bbox "b1" ["n1","w1","w2"] - |> add_bbox_with_parent "b2" "b1" ["w1","w2"] - |> add_bbox "b3" ["n2","w1","w2"] - |> add_bbox "b4" ["w1","w2"] - |> add_bbox "b5" ["w1","w2"] - |> add_bbox "b6" ["n1","w1","w2","n2"] - val _ = assert_g_eq "expanded bboxes (add_edge)" g_exp g' - val _ = assert_g_eq "expanded bboxes (add_named_edge)" g_exp - (G.add_named_edge e (Undirected,eunit1) (V.mk "w1") (V.mk "w2") g) - - val g_clean = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_undir_eunit1 "e1" "n1" "w1" - val g = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox_with_parent "b2" "b1" ["w1"] - |> add_bbox "b3" ["n2","w1"] - |> add_bbox "b4" ["n2"] - - val (e,g') = G.add_edge (Undirected,eunit1) (V.mk "w1") (V.mk "n2") g - val g_exp = g_clean - |> add_undir_eunit1 (E.dest e) "w1" "n2" - |> add_bbox "b1" ["n1","w1"] - |> add_bbox_with_parent "b2" "b1" ["w1"] - |> add_bbox "b3" ["n2","w1"] - |> add_bbox "b4" ["n2","w1"] - val _ = assert_g_eq "expanded bboxes (add_edge 2)" g_exp g' - val _ = assert_g_eq "expanded bboxes (add_named_edge 2)" g_exp - (G.add_named_edge e (Undirected,eunit1) (V.mk "w1") (V.mk "n2") g) - in () end) () - - - - val _ = Testing.test "G.exact_eq (with !-boxes)" (fn () => let - val g_clean = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 "e2" "w2" "n2" - val g1 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox "b2" ["n2","w2"] - val _ = Testing.assert "g1 = g1" (G.exact_eq g1 g1) - val g2 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox "b3" ["n2","w2"] - val _ = Testing.assert "g1 <> g2" (not (G.exact_eq g1 g2)) - val g3 = g_clean - |> add_bbox "b1" ["n1","w1","w2"] - |> add_bbox "b2" ["n2","w2"] - val _ = Testing.assert "g1 <> g3" (not (G.exact_eq g1 g3)) - val g4 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox "b2" ["w1"] - val g5 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox_with_parent "b2" "b1" ["w1"] - val _ = Testing.assert "g4 = g5" (G.exact_eq g5 g5) - val _ = Testing.assert "g4 <> g5" (not (G.exact_eq g4 g5)) - in () end) () - - - - val _ = Testing.test "G.is_subgraph (with !-boxes)" (fn () => let - val g_clean = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 "e2" "w2" "n2" - val g1 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox "b2" ["n2","w2"] - val _ = Testing.assert "g1 (= g1" (G.is_subgraph g1 g1) - val g2 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox "b3" ["n2","w2"] - val _ = Testing.assert "g1 (/= g2" (not (G.is_subgraph g1 g2)) - val _ = Testing.assert "g2 (/= g1" (not (G.is_subgraph g2 g1)) - val g3 = g_clean - |> add_bbox "b1" ["n1","w1","w2"] - |> add_bbox "b2" ["n2","w2"] - val _ = Testing.assert "g1 (= g3" (G.is_subgraph g1 g3) - val _ = Testing.assert "g3 (/= g1" (not (G.is_subgraph g3 g1)) - val g4 = g_clean - |> add_bbox "b1" ["n1","w1"] - val _ = Testing.assert "g4 (= g1" (G.is_subgraph g4 g1) - val _ = Testing.assert "g1 (/= g4" (not (G.is_subgraph g1 g4)) - val g4 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox "b2" ["w1"] - |> add_bbox "b3" ["n2","w2"] - val g5 = g_clean - |> add_bbox "b1" ["n1","w1"] - |> add_bbox_with_parent "b2" "b1" ["w1"] - |> add_bbox "b3" ["n2","w2"] - val _ = Testing.assert "g5 (= g5" (G.is_subgraph g5 g5) - val _ = Testing.assert "g4 (/= g5" (not (G.is_subgraph g4 g5)) - val _ = Testing.assert "g5 (/= g4" (not (G.is_subgraph g5 g4)) - in () end) () - - - - val _ = Testing.test "G.merge (with !-boxes)" (fn () => let - val g1_clean = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_wv "d" - |> add_wv "e" - |> add_wv "f" - |> add_dir_eunit1 "e" "a" "d" - |> add_dir_eunit1 "f" "d" "b" - |> add_undir_eunit2 "g" "b" "e" - |> add_undir_eunit1 "h" "b" "f" - |> add_undir_eunit1 "i" "c" "f" - val g2_clean = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c_" "x" - |> add_wv "d" - |> add_wv "e_" - |> add_wv "f_" - |> add_dir_eunit1 "e" "a" "d" - |> add_dir_eunit1 "f" "d" "b" - |> add_undir_eunit2 "g_" "b" "e_" - |> add_undir_eunit1 "h_" "b" "f_" - |> add_undir_eunit1 "i_" "c_" "f_" - val g_exp_clean = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_vexpr1 "c_" "x" - |> add_wv "d" - |> add_wv "e" - |> add_wv "e_" - |> add_wv "f" - |> add_wv "f_" - |> add_dir_eunit1 "e" "a" "d" - |> add_dir_eunit1 "f" "d" "b" - |> add_undir_eunit2 "g" "b" "e" - |> add_undir_eunit2 "g_" "b" "e_" - |> add_undir_eunit1 "h" "b" "f" - |> add_undir_eunit1 "h_" "b" "f_" - |> add_undir_eunit1 "i" "c" "f" - |> add_undir_eunit1 "i_" "c_" "f_" - - val g1 = g1_clean - |> add_bbox "b1" ["e","f"] - val g2 = g2_clean - |> add_bbox "b2" ["e_","f_"] - val g_exp = g_exp_clean - |> add_bbox "b1" ["e","f"] - |> add_bbox "b2" ["e_","f_"] - val _ = assert_g_eq "G.merge disjoint" g_exp (G.merge g1 g2) - - val g1 = g1_clean - |> add_bbox "b1" ["a","d"] - val g2 = g2_clean - |> add_bbox "b2" ["a","d"] - val _ = (G.merge g1 g2; - raise ERROR "merge (diff names) should have failed") - handle G.bad_graph_merge_exp _ => () - - val g1 = g1_clean - |> add_bbox "b1" ["a","d"] - val g2 = g2_clean - |> add_bbox "b1" ["a","d"] - val g_exp = g_exp_clean - |> add_bbox "b1" ["a","d"] - val _ = assert_g_eq "G.merge same name" g_exp (G.merge g1 g2) - - val g1 = g1_clean - |> add_bbox "b1" ["d"] - val g2 = g2_clean - |> add_bbox "b1" ["d"] - val g_exp = g_exp_clean - |> add_bbox "b1" ["d"] - val _ = assert_g_eq "G.merge same name (2)" g_exp (G.merge g1 g2) - - val g1 = g1_clean - |> add_bbox "b1" ["a","d","b","e","f"] - val g2 = g2_clean - |> add_bbox "b1" ["a","d","b","e_","f_"] - val g_exp = g_exp_clean - |> add_bbox "b1" ["a","d","b","e","f","e_","f_"] - val _ = assert_g_eq "G.merge same name, overlapping" g_exp (G.merge g1 g2) - in () end) () - - - val _ = Testing.test "G.split_edge (with !-boxes)" (fn () => let - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "w2" - |> add_dir_eunit1 "e3" "w2" "n2" - |> add_bbox "b1" ["n1","w1"] - |> add_bbox_with_parent "b2" "b1" ["w1"] - |> add_bbox "b3" ["n2","w2"] - |> add_bbox "b4" ["w2"] - |> add_bbox "b5" ["w1","w2"] - |> add_bbox "b6" ["n1","w1","w2","n2"] - val ((e1,v,e2),g') = G.split_edge (E.mk "e2") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "w2" - |> add_dir_eunit1 "e3" "w2" "n2" - |> add_bbox "b1" ["n1","w1",V.dest v,"w2"] - |> add_bbox_with_parent "b2" "b1" ["w1",V.dest v,"w2"] - |> add_bbox "b3" ["n2","w1",V.dest v,"w2"] - |> add_bbox "b4" ["w1",V.dest v,"w2"] - |> add_bbox "b5" ["w1",V.dest v,"w2"] - |> add_bbox "b6" ["n1","w1",V.dest v,"w2","n2"] - val _ = assert_g_eq "split" g_exp g' - in () end) () - - - val _ = Testing.test "G.normalise (with !-boxes)" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_dir_eunit1 "e1" "v1" "v2" - |> add_bbox "b1" ["v1","v2"] - val g' = G.normalise g - val _ = Testing.assert "normalise 1: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 4) - val _ = Testing.assert "normalise 1: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 4) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_dir_eunit1 "e1" "v1" "v2" - |> add_bbox "b1" ["v1"] - val g' = G.normalise g - val _ = Testing.assert "normalise 2: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 4) - val _ = Testing.assert "normalise 2: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 3) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1","v2"] - val g' = G.normalise g - val _ = Testing.assert "normalise 3: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 4) - val _ = Testing.assert "normalise 3: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 4) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["w1","v2"] - val g' = G.normalise g - val _ = Testing.assert "normalise 4: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 4) - val _ = Testing.assert "normalise 4: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 3) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["w1"] - val g' = G.normalise g - val _ = Testing.assert "normalise 5: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 4) - val _ = Testing.assert "normalise 5: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 2) - - val g = G.empty - |> add_vunit1 "v1" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "w2" - |> add_bbox "b1" ["v1","w1","w2"] - val g' = G.normalise g - val _ = Testing.assert "normalise 6: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 2) - val _ = Testing.assert "normalise 6: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 2) - - val g = G.empty - |> add_vunit1 "v1" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "w2" - |> add_bbox "b1" ["w1","w2"] - val g' = G.normalise g - val _ = Testing.assert "normalise 7: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 2) - val _ = Testing.assert "normalise 7: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 1) - in () end) () - - - val _ = Testing.test "G.minimise (with !-boxes)" (fn () => let - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_dir_eunit1 "e1" "v1" "v2" - |> add_bbox "b1" ["v1","v2"] - val g' = G.minimise g - val _ = Testing.assert "minimise 1: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 2) - val _ = Testing.assert "minimise 1: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 2) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1","v2"] - val g' = G.minimise g - val _ = Testing.assert "minimise 1: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 2) - val _ = Testing.assert "minimise 1: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 2) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["v1","w1"] - val g' = G.minimise g - val _ = Testing.assert "minimise 1: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 2) - val _ = Testing.assert "minimise 1: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 1) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_bbox "b1" ["w1"] - val g' = G.minimise g - val _ = Testing.assert "minimise 1: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 3) - val _ = Testing.assert "minimise 1: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 1) - - val g = G.empty - |> add_vunit1 "v1" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "w2" - |> add_bbox "b1" ["w1","w2"] - val g' = G.minimise g - val _ = Testing.assert "minimise 1: correct no. verts" - (V.NSet.cardinality (G.get_vertices g') = 2) - val _ = Testing.assert "minimise 1: b1 correct size" - (V.NSet.cardinality (G.get_vertices_in_bbox g' (B.mk "b1")) = 1) - in () end) () - (* TODO: tests for !-box openness preservation: - * merge_by_vertices - * plug(_anon) - * plug_and_normalise(_anon) - * plug_and_minimise(_anon) - * - * TODO: renamings don't muck up !-box containment relation: - * rename_ograph(_anon) - * rename_vertices - * rename_vertex - * rename_vertex_opt - * - * TODO: functions account for !-boxes: - * get_plugging - * are_pluggable - * get_open_subgraph (?) - *) - - val _ = Testing.assert_no_failed_tests() -end; - -local - structure BG_Tests = Bang_Graph_Interface_Tests(Test_Bang_Graph); -in val _ = () end; - diff --git a/core/graph/test/test-graph-setup.ML b/core/graph/test/test-graph-setup.ML deleted file mode 100644 index 19e1ae02..00000000 --- a/core/graph/test/test-graph-setup.ML +++ /dev/null @@ -1,559 +0,0 @@ -(* Graph structures for testing *) - -structure Test_Graph_Data = -struct - val pretty_theory_name = Pretty.str "test_graph" - - datatype nvdata = VUnit1 - | VUnit2 - | VExpr1 of LinratAngleExpr.T - | VExpr2 of LinratAngleExpr.T - | VString1 of string - | VString2 of string - datatype edata = EUnit1 - | EUnit2 - | EExpr1 of LinratAngleExpr.T - | EExpr2 of LinratAngleExpr.T - | EString1 of string - | EString2 of string - - (* node-vertex data *) - fun default_nvdata_of_typestring s = - case s of "unit1" => VUnit1 - | "unit2" => VUnit2 - | "expr1" => VExpr1 (LinratAngleExpr.zero) - | "expr2" => VExpr2 (LinratAngleExpr.zero) - | "string1" => VString1 "" - | "string2" => VString2 "" - | _ => raise unknown_typestring_exp s - val default_nvdata = VUnit1 - - fun nvdata_eq (VUnit1, VUnit1) = true - | nvdata_eq (VUnit2, VUnit2) = true - | nvdata_eq (VExpr1 a, VExpr1 b) = LinratAngleExpr.eq a b - | nvdata_eq (VExpr2 a, VExpr2 b) = LinratAngleExpr.eq a b - | nvdata_eq (VString1 a, VString1 b) = (a = b) - | nvdata_eq (VString2 a, VString2 b) = (a = b) - | nvdata_eq _ = false - - fun pretty_nvdata VUnit1 = Pretty.str "VUnit1" - | pretty_nvdata VUnit2 = Pretty.str "VUnit2" - | pretty_nvdata (VExpr1 a) = - Pretty.block [Pretty.str "VExpr1(", LinratAngleExpr.pretty a, Pretty.str ")"] - | pretty_nvdata (VExpr2 a) = - Pretty.block [Pretty.str "VExpr2(", LinratAngleExpr.pretty a, Pretty.str ")"] - | pretty_nvdata (VString1 s) = - Pretty.block [Pretty.str "VString1(", Pretty.str s, Pretty.str ")"] - | pretty_nvdata (VString2 s) = - Pretty.block [Pretty.str "VString2(", Pretty.str s, Pretty.str ")"] - - (* edge data *) - fun default_edata_of_typestring s = - case s of "unit1" => EUnit1 - | "unit2" => EUnit2 - | "expr1" => EExpr1 (LinratAngleExpr.zero) - | "expr2" => EExpr2 (LinratAngleExpr.zero) - | "string1" => EString1 "" - | "string2" => EString2 "" - | _ => raise unknown_typestring_exp s - val default_edata = EUnit1 - - fun edata_eq (EUnit1, EUnit1) = true - | edata_eq (EUnit2, EUnit2) = true - | edata_eq (EExpr1 a, EExpr1 b) = LinratAngleExpr.eq a b - | edata_eq (EExpr2 a, EExpr2 b) = LinratAngleExpr.eq a b - | edata_eq (EString1 a, EString1 b) = (a = b) - | edata_eq (EString2 a, EString2 b) = (a = b) - | edata_eq _ = false - - fun pretty_edata EUnit1 = Pretty.str "EUnit1" - | pretty_edata EUnit2 = Pretty.str "EUnit2" - | pretty_edata (EExpr1 a) = - Pretty.block [Pretty.str "EExpr1(", LinratAngleExpr.pretty a, Pretty.str ")"] - | pretty_edata (EExpr2 a) = - Pretty.block [Pretty.str "EExpr2(", LinratAngleExpr.pretty a, Pretty.str ")"] - | pretty_edata (EString1 s) = - Pretty.block [Pretty.str "EString1(", Pretty.str s, Pretty.str ")"] - | pretty_edata (EString2 s) = - Pretty.block [Pretty.str "EString2(", Pretty.str s, Pretty.str ")"] - - (* matching and substitution *) - type psubst = LinratAngleMatcher.psubst - type subst = LinratAngleMatcher.subst - - local - fun pull_names (nvtab,etab) = X.NSet.empty - |> VTab.fold ( - fn (_,VExpr1 a) => X.NSet.union_merge (LinratAngleExpr.free_vars a) - | (_,VExpr2 a) => X.NSet.union_merge (LinratAngleExpr.free_vars a) - | _ => I - ) nvtab - |> ETab.fold ( - fn (_,EExpr1 a) => X.NSet.union_merge (LinratAngleExpr.free_vars a) - | (_,EExpr2 a) => X.NSet.union_merge (LinratAngleExpr.free_vars a) - | _ => I - ) etab - in - fun init_psubst_from_data p_tabs t_tabs = - LinratAngleMatcher.init_psubst_from_names (pull_names p_tabs, pull_names t_tabs) - end - - fun solve_psubst ps = Seq.single (LinratAngleMatcher.solve_psubst ps) - - fun match_nvdata ((VExpr1 a1),(VExpr1 a2)) m = - LinratAngleMatcher.match (a1, a2) m - | match_nvdata ((VExpr2 a1),(VExpr2 a2)) m = - LinratAngleMatcher.match (a1, a2) m - | match_nvdata (v1,v2) m = - if nvdata_eq (v1, v2) then SOME m else NONE - - fun subst_in_nvdata sub (VExpr1 a) = - let val (sub',a') = LinratAngleMatcher.subst_in_expr sub a - in (sub', VExpr1 a') end - | subst_in_nvdata sub (VExpr2 a) = - let val (sub',a') = LinratAngleMatcher.subst_in_expr sub a - in (sub', VExpr2 a') end - | subst_in_nvdata sub vd = (sub,vd) - - fun match_edata ((EExpr1 a1),(EExpr1 a2)) m = - LinratAngleMatcher.match (a1, a2) m - | match_edata ((EExpr2 a1),(EExpr2 a2)) m = - LinratAngleMatcher.match (a1, a2) m - | match_edata (e1,e2) m = - if edata_eq (e1,e2) then SOME m else NONE - - fun subst_in_edata sub (EExpr1 a) = - let val (sub',a') = LinratAngleMatcher.subst_in_expr sub a - in (sub', EExpr1 a') end - | subst_in_edata sub (EExpr2 a) = - let val (sub',a') = LinratAngleMatcher.subst_in_expr sub a - in (sub', EExpr2 a') end - | subst_in_edata sub ed = (sub,ed) - - val pretty_subst = LinratAngleMatcher.pretty_subst - val print_subst = Pretty.writeln o pretty_subst -end - -(* - * These signatures define ographs/bang-graphs whose Data sub-structure - * is the one given above. This allows alternative implementations of - * graphs to be tested with the same set of test functors. - *) - -signature TEST_OGRAPH = OGRAPH - where type nvdata = Test_Graph_Data.nvdata - and type edata = Test_Graph_Data.edata - and type psubst = Test_Graph_Data.psubst - and type subst = Test_Graph_Data.subst - -signature TEST_BANG_GRAPH = BANG_GRAPH - where type nvdata = Test_Graph_Data.nvdata - and type edata = Test_Graph_Data.edata - and type psubst = Test_Graph_Data.psubst - and type subst = Test_Graph_Data.subst - -structure Test_Bang_Graph = BangGraph(structure Data = Test_Graph_Data) -structure Test_OGraph = Test_Bang_Graph : OGRAPH - - - -structure Test_Bang_Graph_IO = -struct - structure ComponentDataIO (* : GRAPH_COMPONENT_DATA_IO *) - = struct - type vdata = Test_Graph_Data.nvdata - type edata = Test_Graph_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - open Test_Graph_Data - structure L = InputLinratJSON - type data = vdata - val get_angle = L.input oo (get_easy Json.Null) - val to_lower = String.implode o (map Char.toLower) o String.explode - fun input (Json.String t) = - (case to_lower t - of "vunit1" => VUnit1 - | "vunit2" => VUnit2 - | "vexpr1" => VExpr1 LinratAngleExpr.zero - | "vexpr2" => VExpr2 LinratAngleExpr.zero - | "vstring1" => VString1 "" - | "vstring2" => VString2 "" - | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - | input (Json.Object obj) = - (case to_lower (get_string obj "type") - of "vunit1" => VUnit1 - | "vunit2" => VUnit2 - | "vexpr1" => VExpr1 (get_angle obj "angle") - | "vexpr2" => VExpr2 (get_angle obj "angle") - | "vstring1" => VString1 (get_string_easy "" obj "data") - | "vstring2" => VString2 (get_string_easy "" obj "data") - | t => raise bad_input_exp ("Unknown vertex type "^t,"type")) - | input _ = raise bad_input_exp ("Expected object","") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - open Test_Graph_Data - structure L = OutputLinratJSON - type data = vdata - fun output VUnit1 = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "VUnit1") - ) - | output VUnit2 = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "VUnit2") - ) - | output (VExpr1 a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "VExpr1") - |> update ("angle",L.output a) - ) - | output (VExpr2 a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "VExpr2") - |> update ("angle",L.output a) - ) - | output (VString1 s) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "VString1") - |> update ("data",Json.String s) - ) - | output (VString2 s) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "VString2") - |> update ("data",Json.String s) - ) - end - structure EDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - open Test_Graph_Data - structure L = InputLinratJSON - type data = edata - val get_angle = L.input oo (get_easy Json.Null) - val to_lower = String.implode o (map Char.toLower) o String.explode - fun input (Json.String t) = - (case to_lower t - of "eunit1" => EUnit1 - | "eunit2" => EUnit2 - | "eexpr1" => EExpr1 LinratAngleExpr.zero - | "eexpr2" => EExpr2 LinratAngleExpr.zero - | "estring1" => EString1 "" - | "estring2" => EString2 "" - | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - | input (Json.Object obj) = - (case to_lower (get_string obj "type") - of "eunit1" => EUnit1 - | "eunit2" => EUnit2 - | "eexpr1" => EExpr1 (get_angle obj "angle") - | "eexpr2" => EExpr2 (get_angle obj "angle") - | "estring1" => EString1 (get_string_easy "" obj "data") - | "estring2" => EString2 (get_string_easy "" obj "data") - | t => raise bad_input_exp ("Unknown vertex type "^t,"type")) - | input _ = raise bad_input_exp ("Expected object","") - end - structure EDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - open Test_Graph_Data - structure L = OutputLinratJSON - type data = edata - fun output EUnit1 = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "EUnit1") - ) - | output EUnit2 = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "EUnit2") - ) - | output (EExpr1 a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "EExpr1") - |> update ("angle",L.output a) - ) - | output (EExpr2 a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "EExpr2") - |> update ("angle",L.output a) - ) - | output (EString1 s) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "EString1") - |> update ("data",Json.String s) - ) - | output (EString2 s) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "EString2") - |> update ("data",Json.String s) - ) - end - - structure DotStyle : DOT_STYLE = - struct - open Test_Graph_Data - type vdata = vdata - fun style_for_ivertex_data VUnit1 = - "[style=filled,fillcolor=white,fontcolor=black,shape=square]" - | style_for_ivertex_data VUnit2 = - "[style=filled,fillcolor=black,fontcolor=black,shape=square]" - | style_for_ivertex_data (VExpr1 _) = - "[style=filled,fillcolor=red,fontcolor=black,shape=circle]" - | style_for_ivertex_data (VExpr2 _) = - "[style=filled,fillcolor=green,fontcolor=black,shape=circle]" - | style_for_ivertex_data (VString1 _) = - "[style=filled,fillcolor=blue,fontcolor=black,shape=triangle]" - | style_for_ivertex_data (VString2 _) = - "[style=filled,fillcolor=pink,fontcolor=black,shape=triangle]" - end - end - structure InputGraphJSON = InputGraphJSON( - structure Graph = Test_Bang_Graph - structure InputVertexData = ComponentDataIO.IVDataInputJSON - structure InputEdgeData = ComponentDataIO.EDataInputJSON - ) - structure OutputGraphJSON = OutputGraphJSON( - structure Graph = Test_Bang_Graph - structure OutputVertexData = ComponentDataIO.IVDataOutputJSON - structure OutputEdgeData = ComponentDataIO.EDataOutputJSON - ) - - structure GraphJSON = GraphJSON( - structure Graph = Test_Bang_Graph - structure InputVertexData = ComponentDataIO.IVDataInputJSON - structure InputEdgeData = ComponentDataIO.EDataInputJSON - structure OutputVertexData = ComponentDataIO.IVDataOutputJSON - structure OutputEdgeData = ComponentDataIO.EDataOutputJSON - ) - - structure OutputGraphDot = OutputGraphDot( - structure Graph = Test_Bang_Graph - structure DotStyle = ComponentDataIO.DotStyle - ) -end - - -functor Test_OGraph_Tools( - G : TEST_OGRAPH -) -= struct - structure G = G - structure Data = Test_Graph_Data - structure Expr = LinratAngleExpr - - val vert = G.NVert - fun unwrap_vert G.WVert = raise ERROR "wire vertex" - | unwrap_vert (G.NVert vd) = vd - - val vunit1 = vert Data.VUnit1 - val vunit2 = vert Data.VUnit2 - fun vexpr1 a = vert (Data.VExpr1 (Expr.parse a)) - val vexpr1_a = vert o Data.VExpr1 - fun vexpr2 a = vert (Data.VExpr2 (Expr.parse a)) - val vexpr2_a = vert o Data.VExpr2 - fun vstring1 s = vert (Data.VString1 s) - fun vstring2 s = vert (Data.VString2 s) - - val eunit1 = Data.EUnit1 - val eunit2 = Data.EUnit2 - fun eexpr1 a = (Data.EExpr1 (Expr.parse a)) - val eexpr1_a = Data.EExpr1 - fun eexpr2 a = (Data.EExpr2 (Expr.parse a)) - val eexpr2_a = Data.EExpr2 - fun estring1 s = (Data.EString1 s) - fun estring2 s = (Data.EString2 s) - - (* creating stuff *) - fun add_vunit1 n = G.add_named_vertex (V.mk n) vunit1 - fun add_vunit2 n = G.add_named_vertex (V.mk n) vunit2 - fun add_vexpr1 n a = G.add_named_vertex (V.mk n) (vexpr1 a) - fun add_vexpr1_a n a = G.add_named_vertex (V.mk n) (vexpr1_a a) - fun add_vexpr2 n a = G.add_named_vertex (V.mk n) (vexpr2 a) - fun add_vexpr2_a n a = G.add_named_vertex (V.mk n) (vexpr2_a a) - fun add_vstring1 n s = G.add_named_vertex (V.mk n) (vstring1 s) - fun add_vstring2 n s = G.add_named_vertex (V.mk n) (vstring2 s) - - fun add_dir_eunit1 n s t = - G.add_named_edge (E.mk n) (Directed,eunit1) (V.mk s) (V.mk t) - fun add_dir_eunit2 n s t = - G.add_named_edge (E.mk n) (Directed,eunit2) (V.mk s) (V.mk t) - fun add_dir_eexpr1 n s t a = - G.add_named_edge (E.mk n) (Directed,(eexpr1 a)) (V.mk s) (V.mk t) - fun add_dir_eexpr1_a n s t a = - G.add_named_edge (E.mk n) (Directed,(eexpr1_a a)) (V.mk s) (V.mk t) - fun add_dir_eexpr2 n s t a = - G.add_named_edge (E.mk n) (Directed,(eexpr2 a)) (V.mk s) (V.mk t) - fun add_dir_eexpr2_a n s t a = - G.add_named_edge (E.mk n) (Directed,(eexpr2_a a)) (V.mk s) (V.mk t) - fun add_dir_estring1 n s t str = - G.add_named_edge (E.mk n) (Directed,(estring1 str)) (V.mk s) (V.mk t) - fun add_dir_estring2 n s t str = - G.add_named_edge (E.mk n) (Directed,(estring2 str)) (V.mk s) (V.mk t) - - fun add_undir_eunit1 n s t = - G.add_named_edge (E.mk n) (Undirected,Data.EUnit1) (V.mk s) (V.mk t) - fun add_undir_eunit2 n s t = - G.add_named_edge (E.mk n) (Undirected,Data.EUnit2) (V.mk s) (V.mk t) - fun add_undir_eexpr1 n s t a = - G.add_named_edge (E.mk n) (Undirected,(Data.EExpr1 (Expr.parse a))) (V.mk s) (V.mk t) - fun add_undir_eexpr1_a n s t a = - G.add_named_edge (E.mk n) (Undirected,(Data.EExpr1 a)) (V.mk s) (V.mk t) - fun add_undir_eexpr2 n s t a = - G.add_named_edge (E.mk n) (Undirected,(Data.EExpr2 (Expr.parse a))) (V.mk s) (V.mk t) - fun add_undir_eexpr2_a n s t a = - G.add_named_edge (E.mk n) (Undirected,(Data.EExpr2 a)) (V.mk s) (V.mk t) - fun add_undir_estring1 n s t str = - G.add_named_edge (E.mk n) (Undirected,(Data.EString1 str)) (V.mk s) (V.mk t) - fun add_undir_estring2 n s t str = - G.add_named_edge (E.mk n) (Undirected,(Data.EString2 str)) (V.mk s) (V.mk t) - - (* testing stuff *) - fun assert_g_eq msg exp actual = - if G.exact_eq exp actual then () - else (writeln "Expected:"; - G.print exp; - writeln "Actual:"; - G.print actual; - raise ERROR (msg^": graphs differed")) - fun assert_vertices msg exp_vs actual_vset = - let - val exp_vset = V.NSet.of_list (map V.mk exp_vs) - val pretty_set = Pretty.string_of o V.NSet.pretty - in - if V.NSet.eq exp_vset actual_vset - then () - else raise ERROR (msg^": expected "^(pretty_set exp_vset)^ - " but got "^(pretty_set actual_vset)) - end - fun assert_edges msg exp_es actual_eset = - let - val exp_ens = E.NSet.of_list (map E.mk exp_es) - val pretty_set = Pretty.string_of o E.NSet.pretty - in - if E.NSet.eq exp_ens actual_eset - then () - else raise ERROR (msg^": expected "^(pretty_set exp_ens)^ - " but got "^(pretty_set actual_eset)) - end - fun assert_vertex_info msg g exp_vs = - let - val actual_vset = G.get_vertices g - val _ = assert_vertices msg (map fst exp_vs) actual_vset - fun check_verts ((vn,(vd,(ins,outs)))::vs) = - let - val (vd',(ins',outs')) = (G.get_vertex_data g (V.mk vn), - (G.get_in_edges g (V.mk vn), - G.get_out_edges g (V.mk vn))) - val _ = Testing.assert (msg^": same data for "^vn) - (G.vdata_eq (vd,vd')) - val _ = Testing.assert (msg^": same ins for "^vn) - (E.NSet.eq ins' (E.NSet.of_list (map E.mk ins))) - val _ = Testing.assert (msg^": same outs for "^vn) - (E.NSet.eq outs' (E.NSet.of_list (map E.mk outs))) - in check_verts vs end - | check_verts [] = () - in check_verts exp_vs end - fun assert_edge_info msg g exp_es = - let - val actual_eset = G.get_edges g - val _ = assert_edges msg (map fst exp_es) actual_eset - fun check_edges ((en,((dir,ed),(src,tgt)))::es) = - let - val ((dir',ed'),(src',tgt')) = (G.get_edge_dir_and_data g (E.mk en), - (G.get_edge_source g (E.mk en), - G.get_edge_target g (E.mk en))) - val _ = Testing.assert (msg^": same dir for "^en) - (dir_eq dir dir') - val _ = Testing.assert (msg^": same data for "^en) - (G.edata_eq (ed,ed')) - val _ = Testing.assert (msg^": same source for "^en) - (V.name_eq ((V.mk src),src')) - val _ = Testing.assert (msg^": same target for "^en) - (V.name_eq ((V.mk tgt),tgt')) - in check_edges es end - | check_edges [] = () - in check_edges exp_es end - fun assert_none _ NONE = () - | assert_none name (SOME _) = raise ERROR (name^" is not NONE") - - (* FIXME: move into Testing in isaplib *) - fun test_force s f v = - case Testing.test s f v of - NONE => raise Testing.failed_tests_exp (!Testing.tests_failed_ref) - | SOME x => x - - fun unwrap_nv G.WVert = raise ERROR "wire vertex" - | unwrap_nv (G.NVert vd) = vd - val nv = G.NVert - fun add_wv n = G.add_named_vertex (V.mk n) G.WVert -end - -(* Some helpful tools for creating graphs *) -functor Test_Bang_Graph_Tools( - BG : TEST_BANG_GRAPH -) = -struct - structure BG = BG - - structure Test_OG_Tools = Test_OGraph_Tools(BG) - open Test_OG_Tools - - structure G = BG - (*structure GJsonInput = Test_Bang_Graph_IO.InputGraphJSON - structure GJsonOutput = Test_Bang_Graph_IO.OutputGraphJSON*) - structure GJsonInput = Test_Bang_Graph_IO.GraphJSON - structure GJsonOutput = Test_Bang_Graph_IO.GraphJSON - structure GDotOutput = Test_Bang_Graph_IO.OutputGraphDot - - fun add_to_bbox _ [] = I - | add_to_bbox n vs = - let - val bn = B.mk n - val vset = V.NSet.of_list (map V.mk vs) - in - G.add_to_bbox_anon bn vset - end - fun add_bbox n vs = add_to_bbox n vs o G.add_named_bbox (B.mk n) - fun add_bbox_with_parent n parent vs = - G.add_child_to_bbox (B.mk parent) (B.mk n) o - add_to_bbox n vs o - G.add_named_bbox (B.mk n) - (* compat *) val add_child_bbox = add_bbox_with_parent - fun add_bbox_with_parents n parents vs = - let val bn = B.mk n in - (fold (fn parent => G.add_child_to_bbox (B.mk parent) bn) parents) o - add_to_bbox n vs o - G.add_named_bbox bn - end - - fun graph_from_json str = GJsonInput.input str - fun load_graph path = GJsonInput.input (File_Io.read_json path) - fun save_dot_graph g path = File_Io.write_string path (GDotOutput.output g) - - val print_dot_graph = writeln o GDotOutput.output - val print_json_graph = Pretty.writeln o Json.pretty o GJsonOutput.output - - fun num_bboxes g = B.NSet.cardinality (G.get_bboxes g) - - fun assert_bboxes msg exp_bs actual_bset = - let - val exp_bset = B.NSet.of_list (map B.mk exp_bs) - val pretty_set = Pretty.string_of o B.NSet.pretty - in - if B.NSet.eq exp_bset actual_bset - then () - else raise ERROR (msg^": expected "^(pretty_set exp_bset)^ - " but got "^(pretty_set actual_bset)) - end - fun assert_g_bboxes msg exp_bs = - (assert_bboxes msg exp_bs) o G.get_bboxes - - fun assert_n_bboxes g n () = - if (num_bboxes g) = n - then () else raise ERROR( - "expected: " ^ Int.toString n ^ " bboxes, "^ - "got: " ^ Int.toString (num_bboxes g)) -end - diff --git a/core/graph/test/test-nhd.ML b/core/graph/test/test-nhd.ML deleted file mode 100644 index 4cbcd816..00000000 --- a/core/graph/test/test-nhd.ML +++ /dev/null @@ -1,137 +0,0 @@ -structure Nhd_Tests = -struct - -val [Ba,Bb,Bc,Bd] = map B.mk ["Ba","Bb","Bc","Bd"] -val [a,b,c,d,e,f,g,h] = map E.mk ["a","b","c","d","e","f", "g", "h"] - - -val _ = Testing.test "Nhd.mk" (fn () => ( - Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.I b]), Nhd.R (Bb, [Nhd.O c]), Nhd.U d, Nhd.U d] - )) () - -val _ = Testing.test "Nhd.eq" (fn () => - let - val n1 = Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.R (Bb, [Nhd.I b, Nhd.O c]), Nhd.O d])] - val n2 = Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.R (Bb, [Nhd.I b, Nhd.O c, Nhd.O e]), Nhd.O d])] - val _ = Testing.assert "succeeds for equal nhds" (Nhd.eq (n1, n1)) - val _ = Testing.assert "fails for non-equal nhds" (not (Nhd.eq (n1, n2))) - in () - end) () - -val _ = Testing.test "Nhd.of_json/json_of" (fn () => - let - val json = Json.of_string "{ \"expr\": [[\"Ba\", \"<\", \"i:a\", [\"Bb\", \">\", \"i:b\", \"o:c\"], \"o:d\"]] }" - val nhd = Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.R (Bb, [Nhd.I b, Nhd.O c]), Nhd.O d])] - val _ = Testing.assert "loads json correctly" (Nhd.eq (Nhd.of_json json, nhd)) - val _ = Testing.assert "saves json correctly" - (Json.string_of (Nhd.json_of nhd) = Json.string_of json) - in () - end) () - - -val _ = Testing.test "Nhd.rename" (fn () => - let - val n1 = Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.R (Bb, [Nhd.I b, Nhd.O c]), Nhd.O d])] - val n2 = Nhd.mk [Nhd.L (Bb, [Nhd.I d, Nhd.R (Ba, [Nhd.I b, Nhd.O c]), Nhd.O e])] - val esub = ESub.empty |> ESub.add (a, d) |> ESub.add (d, e) - |> ESub.extend_fixed (E.NSet.of_list [b, c]) - val bsub = BSub.empty |> BSub.add (Ba, Bb) |> BSub.add (Bb, Ba) - val _ = Testing.assert "renaming works correctly" - (Nhd.eq (Nhd.rename (esub,bsub) n1, n2)) - in () - end) () - -val _ = Testing.test "Nhd.reduce" (fn () => - let - val n1 = Nhd.mk [Nhd.L (Ba, [Nhd.R (Bb, []), Nhd.O d])] - val n2 = Nhd.mk [Nhd.L (Ba, [Nhd.R (Bb, [])])] - val n1red = Nhd.mk [Nhd.L (Ba, [Nhd.O d])] - val n2red = Nhd.empty - val _ = Testing.assert "reduce n1" (Nhd.eq (Nhd.reduce n1, n1red)) - val _ = Testing.assert "reduce n2" (Nhd.eq (Nhd.reduce n2, n2red)) - in () - end) () - -val nhd = Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.O b, Nhd.R (Bb, [Nhd.I c])]), Nhd.O d] - -val fr_e = fold ESub.add [(a,e),(b,f),(c,g),(d,h)] ESub.empty -val fr_bb = fold BSub.add [(Ba,Bc),(Bb,Bd)] BSub.empty - - -val _ = Testing.test "Nhd.get_edge_contexts" (fn () => - let - val ctxs = [(a, Nhd.IN, [Ba]), (b, Nhd.OUT, [Ba]), (c, Nhd.IN, [Ba, Bb]), (d, Nhd.OUT, [])] - val _ = Testing.assert "gets correct contexts" (ListPair.allEq - (fn ((e,d,bs), (e',d',bs')) => ( - E.name_eq (e, e') andalso d = d' andalso ListPair.allEq B.name_eq (bs,bs') - )) - (Nhd.get_edge_contexts nhd, ctxs)) - in () - end) () - -val _ = Testing.test "Nhd.get_edge_contexts_for" (fn () => - let - val ctxs = [(a, Nhd.IN, [Ba])] - val _ = Testing.assert "gets correct contexts" (ListPair.allEq - (fn ((e,d,bs), (e',d',bs')) => ( - E.name_eq (e, e') andalso d = d' andalso ListPair.allEq B.name_eq (bs,bs') - )) - (Nhd.get_edge_contexts_for a nhd, ctxs)) - in () - end) () - -val _ = Testing.test "Nhd.kill" (fn () => - let - val k_Ba = Nhd.mk [Nhd.O d] - val k_Bb = Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.O b]), Nhd.O d] - val nhd1 = Nhd.mk [Nhd.L (Ba, [Nhd.R (Bb, [Nhd.I c])]), Nhd.O d] - val k_Bb1 = Nhd.mk [Nhd.O d] - val _ = Testing.assert "kill outer bbox" (Nhd.eq (Nhd.kill Ba nhd, k_Ba)) - val _ = Testing.assert "kill inner bbox" (Nhd.eq (Nhd.kill Bb nhd, k_Bb)) - val _ = Testing.assert "kill and reduce empty bbox" (Nhd.eq (Nhd.kill Bb nhd1, k_Bb1)) - in () - end) () - -val _ = Testing.test "Nhd.drop" (fn () => - let - val d_Ba = Nhd.mk [Nhd.I a, Nhd.O b, Nhd.R (Bb, [Nhd.I c]), Nhd.O d] - val d_Bb = Nhd.mk [Nhd.L (Ba, [Nhd.I a, Nhd.O b, Nhd.I c]), Nhd.O d] - val _ = Testing.assert "drop outer bbox" (Nhd.eq (Nhd.drop Ba nhd, d_Ba)) - val _ = Testing.assert "drop inner bbox" (Nhd.eq (Nhd.drop Bb nhd, d_Bb)) - in () - end) () - -val _ = Testing.test "Nhd.copy" (fn () => - let - val cp_Ba = Nhd.mk [ - Nhd.L (Bc, [Nhd.I e, Nhd.O f, Nhd.R (Bd, [Nhd.I g])]), - Nhd.L (Ba, [Nhd.I a, Nhd.O b, Nhd.R (Bb, [Nhd.I c])]), - Nhd.O d] - val cp_Bb = Nhd.mk [ - Nhd.L (Ba, [Nhd.I a, Nhd.O b, - Nhd.R (Bb, [Nhd.I c]), - Nhd.R (Bd, [Nhd.I g]) - ]), - Nhd.O d] - val _ = Testing.assert "copy outer bbox" (Nhd.eq (Nhd.copy Ba (fr_e, fr_bb) nhd, cp_Ba)) - val _ = Testing.assert "copy inner bbox" (Nhd.eq (Nhd.copy Bb (fr_e, fr_bb) nhd, cp_Bb)) - in () - end) () - -val _ = Testing.test "Nhd.expand" (fn () => - let - val _ = Testing.assert "expand outer bbox" - (Nhd.eq (nhd |> Nhd.expand Ba (fr_e, fr_bb), - nhd |> Nhd.copy Ba (fr_e, fr_bb) |> Nhd.drop Bc)) - val _ = Testing.assert "expand inner bbox" - (Nhd.eq (nhd |> Nhd.expand Bb (fr_e, fr_bb), - nhd |> Nhd.copy Bb (fr_e, fr_bb) |> Nhd.drop Bd)) - in () - end) () - -val _ = Testing.assert_no_failed_tests() - - -end; - -(*val _ = OS.Process.exit OS.Process.success;*) diff --git a/core/graph/test/test-ograph.ML b/core/graph/test/test-ograph.ML deleted file mode 100644 index 4d2f5449..00000000 --- a/core/graph/test/test-ograph.ML +++ /dev/null @@ -1,3583 +0,0 @@ -(* Tests for the OGRAPH signature *) -functor OGraph_Interface_Tests( - G : TEST_OGRAPH -) -= struct - structure Tools = Test_OGraph_Tools(G) - open Tools - - (* TODO: tests for Graph.Data wrappers? *) - (* TODO: subst_in_vdata *) - (* TODO: subst_in_edata *) - (* TODO: apply_data_subst *) - - - - val _ = Testing.test "G.empty" (fn () => let - val _ = assert_vertices "G.empty" [] (G.get_vertices G.empty) - val _ = assert_edges "G.empty" [] (G.get_edges G.empty) - in () end) () - - - - val _ = Testing.test "G.is_empty" (fn () => let - val _ = Testing.assert "is_empty true when empty" - (G.is_empty G.empty) - val _ = Testing.assert "is_empty false when not empty" - (not (G.is_empty (add_vunit1 "a" G.empty))) - in () end) () - - - - val _ = Testing.test "G.has_vertex" (fn () => let - val _ = Testing.assert "empty" (not (G.has_vertex G.empty (V.mk "a"))); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "a" (G.has_vertex g (V.mk "a")); - val _ = Testing.assert "b" (G.has_vertex g (V.mk "b")); - val _ = Testing.assert "e" (not (G.has_vertex g (V.mk "e"))); - in () end) (); - - - - val _ = Testing.test "G.has_edge" (fn () => let - val _ = Testing.assert "empty" (not (G.has_edge G.empty (E.mk "a"))); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "e" (G.has_edge g (E.mk "e")); - val _ = Testing.assert "f" (G.has_edge g (E.mk "f")); - val _ = Testing.assert "a" (not (G.has_edge g (E.mk "a"))); - in () end) (); - - - - val _ = Testing.test "G.get_edge_source" (fn () => let - val _ = (G.get_edge_source G.empty (E.mk "a"); - raise ERROR "G.get_edge_source did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "source of e is a" - (V.name_eq (G.get_edge_source g (E.mk "e"),V.mk "a")); - val _ = Testing.assert "source of f is b" - (V.name_eq (G.get_edge_source g (E.mk "f"),V.mk "b")); - val _ = (G.get_edge_source g (E.mk "a"); - raise ERROR "G.get_edge_source did not raise exception") - handle G.no_such_edge_exp (_,en,g') => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_edge_target" (fn () => let - val _ = (G.get_edge_target G.empty (E.mk "a"); - raise ERROR "G.get_edge_target did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "target of e is b" - (V.name_eq (G.get_edge_target g (E.mk "e"),V.mk "b")); - val _ = Testing.assert "target of f is b" - (V.name_eq (G.get_edge_target g (E.mk "f"),V.mk "b")); - val _ = (G.get_edge_target g (E.mk "a"); - raise ERROR "G.get_edge_target did not raise exception") - handle G.no_such_edge_exp (_,en,g') => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_edge_info" (fn () => let - val _ = (G.get_edge_info G.empty (E.mk "a"); - raise ERROR "G.get_edge_info (empty) did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp (empty)" - (E.string_of_name en = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val angle = Expr.parse "x + z" - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eexpr2_a "f" "b" "b" angle; - - val _ = (G.get_edge_info g (E.mk "a"); - raise ERROR "G.get_edge_info (unknown edge) did not raise exception") - handle G.no_such_edge_exp (_,en,g_e) => - (Testing.assert "correct ename in exp (unknown edge)" - (E.string_of_name en = "a"); - Testing.assert "correct graph in exp (unknown edge)" - (G.exact_eq g_e g)) - - val _ = - let - val ((dod,ed),(s,t)) = G.get_edge_info g (E.mk "e") - in - Testing.assert "e: correct dir_or_undir" (dod = Undirected); - Testing.assert "e: correct data" (G.edata_eq (ed,eunit1)); - Testing.assert "e: correct source" (V.name_eq (V.mk "a",s)); - Testing.assert "e: correct target" (V.name_eq (V.mk "b",t)) - end - - val _ = - let - val ((dod,ed),(s,t)) = G.get_edge_info g (E.mk "f") - in - Testing.assert "e: correct dir_or_undir" (dod = Directed); - Testing.assert "e: correct data" (G.edata_eq (ed,eexpr2_a angle)); - Testing.assert "e: correct source" (V.name_eq (V.mk "b",s)); - Testing.assert "e: correct target" (V.name_eq (V.mk "b",t)) - end - in () end) (); - - - - val _ = Testing.test "G.edge_get_other_vertex" (fn () => let - val _ = (G.edge_get_other_vertex G.empty (E.mk "a") (V.mk "a"); - raise ERROR "G.edge_get_other_vertex did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "target of e is b" - (V.name_eq (G.edge_get_other_vertex g (E.mk "e") (V.mk "a"), V.mk "b")); - val _ = Testing.assert "source of e is b" - (V.name_eq (G.edge_get_other_vertex g (E.mk "e") (V.mk "b"), V.mk "a")); - val _ = Testing.assert "target of f is b" - (V.name_eq (G.edge_get_other_vertex g (E.mk "f") (V.mk "b"), V.mk "b")); - val _ = (G.edge_get_other_vertex g (E.mk "a") (V.mk "a"); - raise ERROR "G.edge_get_other_vertex did not raise exception") - handle G.no_such_edge_exp (_,en,g') => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - val _ = (G.edge_get_other_vertex g (E.mk "e") (V.mk "c"); - raise ERROR "G.edge_get_other_vertex did not raise exception") - handle G.not_an_endpoint_exp (_,en,vn,g') => - (Testing.assert "correct vname in exp" (V.name_eq (vn,V.mk "c")); - Testing.assert "correct ename in exp" (E.name_eq (en,E.mk "e")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - val _ = (G.edge_get_other_vertex g (E.mk "f") (V.mk "a"); - raise ERROR "G.edge_get_other_vertex did not raise exception") - handle G.not_an_endpoint_exp (_,en,vn,g') => - (Testing.assert "correct vname in exp" (V.name_eq (vn,V.mk "a")); - Testing.assert "correct ename in exp" (E.name_eq (en,E.mk "f")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_in_edges" (fn () => let - val _ = (G.get_in_edges G.empty (V.mk "a"); - raise ERROR "G.get_in_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "in edges of a = []" - (E.NSet.is_empty (G.get_in_edges g (V.mk "a"))); - val _ = Testing.assert "in edges of b = [e,f]" - (E.NSet.eq (G.get_in_edges g (V.mk "b")) - (E.NSet.of_list (map E.mk ["e","f"]))); - - val _ = (G.get_in_edges g (V.mk "c"); - raise ERROR "G.get_in_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp" (V.name_eq (vn,V.mk "c")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_out_edges" (fn () => let - val _ = (G.get_out_edges G.empty (V.mk "a"); - raise ERROR "G.get_out_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "out edges of a = [e]" - (E.NSet.eq (G.get_out_edges g (V.mk "a")) - (E.NSet.of_list (map E.mk ["e"]))); - val _ = Testing.assert "out edges of b = [f]" - (E.NSet.eq (G.get_out_edges g (V.mk "b")) - (E.NSet.of_list (map E.mk ["f"]))); - - val _ = (G.get_out_edges g (V.mk "c"); - raise ERROR "G.get_out_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp" (V.name_eq (vn,V.mk "c")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_adj_edges" (fn () => let - val _ = (G.get_adj_edges G.empty (V.mk "a"); - raise ERROR "G.get_adj_edges (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "adj edges of a = [e]" - (E.NSet.eq (G.get_adj_edges g (V.mk "a")) - (E.NSet.of_list (map E.mk ["e"]))); - val _ = Testing.assert "adj edges of b = [e,f]" - (E.NSet.eq (G.get_adj_edges g (V.mk "b")) - (E.NSet.of_list (map E.mk ["e","f"]))); - - val _ = (G.get_adj_edges g (V.mk "c"); - raise ERROR "G.get_adj_edges (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "c")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_in_dir_edges" (fn () => let - val _ = (G.get_in_dir_edges G.empty (V.mk "a"); - raise ERROR "G.get_in_dir_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vunit2 "c" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - |> add_dir_eunit1 "g" "b" "a" - |> add_dir_eunit1 "h" "a" "b" - |> add_undir_eunit1 "i" "a" "c" - |> add_undir_eunit1 "j" "c" "a" - - val _ = Testing.assert "in dir edges of a = [g]" - (E.NSet.eq (G.get_in_dir_edges g (V.mk "a")) - (E.NSet.of_list (map E.mk ["g"]))); - val _ = Testing.assert "in dir edges of b = [f,h]" - (E.NSet.eq (G.get_in_dir_edges g (V.mk "b")) - (E.NSet.of_list (map E.mk ["f","h"]))); - val _ = Testing.assert "in dir edges of c = []" - (E.NSet.is_empty (G.get_in_dir_edges g (V.mk "c"))); - - val _ = (G.get_in_dir_edges g (V.mk "e"); - raise ERROR "G.get_in_dir_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp" (V.name_eq (vn,V.mk "e")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_out_dir_edges" (fn () => let - val _ = (G.get_out_dir_edges G.empty (V.mk "a"); - raise ERROR "G.get_out_dir_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vunit2 "c" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - |> add_dir_eunit1 "g" "b" "a" - |> add_dir_eunit1 "h" "a" "b" - |> add_undir_eunit1 "i" "a" "c" - |> add_undir_eunit1 "j" "c" "a" - - val _ = Testing.assert "out dir edges of a = [h]" - (E.NSet.eq (G.get_out_dir_edges g (V.mk "a")) - (E.NSet.of_list (map E.mk ["h"]))); - val _ = Testing.assert "out dir edges of b = [f,g]" - (E.NSet.eq (G.get_out_dir_edges g (V.mk "b")) - (E.NSet.of_list (map E.mk ["f","g"]))); - val _ = Testing.assert "out dir edges of c = []" - (E.NSet.is_empty (G.get_out_dir_edges g (V.mk "c"))); - - val _ = (G.get_out_dir_edges g (V.mk "e"); - raise ERROR "G.get_out_dir_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp" (V.name_eq (vn,V.mk "e")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_adj_undir_edges" (fn () => let - val _ = (G.get_adj_undir_edges G.empty (V.mk "a"); - raise ERROR "G.get_adj_undir_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vunit2 "c" - |> add_vunit2 "d" - |> add_undir_eunit1 "e" "a" "d" - |> add_dir_eunit2 "f" "b" "b" - |> add_dir_eunit1 "g" "b" "a" - |> add_dir_eunit1 "h" "a" "b" - |> add_undir_eunit1 "i" "a" "c" - |> add_undir_eunit1 "j" "c" "a" - - val _ = Testing.assert "adj undir edges of a = [e,i,j]" - (E.NSet.eq (G.get_adj_undir_edges g (V.mk "a")) - (E.NSet.of_list (map E.mk ["e","i","j"]))); - val _ = Testing.assert "adj undir edges of b = []" - (E.NSet.is_empty (G.get_adj_undir_edges g (V.mk "b"))); - val _ = Testing.assert "adj undir edges of c = [i,j]" - (E.NSet.eq (G.get_adj_undir_edges g (V.mk "c")) - (E.NSet.of_list (map E.mk ["i","j"]))); - val _ = Testing.assert "adj undir edges of d = [e]" - (E.NSet.eq (G.get_adj_undir_edges g (V.mk "d")) - (E.NSet.of_list (map E.mk ["e"]))); - - val _ = (G.get_adj_undir_edges g (V.mk "e"); - raise ERROR "G.get_adj_undir_edges did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp" (V.name_eq (vn,V.mk "e")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - local - val g = G.empty |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_vunit1 "v4" - |> add_wv "v5" - |> add_vunit1 "v6" - |> add_vunit1 "v7" - |> add_vunit1 "v8" - |> add_wv "v9" - |> add_undir_eunit1 "e1" "v2" "v2" - |> add_dir_eunit1 "e2" "v3" "v3" - |> add_undir_eunit1 "e3" "v4" "v4" - |> add_dir_eunit1 "e4" "v4" "v5" - |> add_dir_eunit1 "e5" "v4" "v6" - |> add_dir_eunit1 "e6" "v5" "v7" - |> add_dir_eunit1 "e7" "v7" "v6" - |> add_undir_eunit1 "e8" "v7" "v8" - |> add_undir_eunit1 "e9" "v8" "v7" - |> add_dir_eunit1 "e10" "v4" "v9" - in - val _ = Testing.test "G.get_adj_vertices" (fn () => let - val _ = (G.get_adj_vertices G.empty (V.mk "a"); - raise ERROR "G.get_adj_vertices (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)); - - val _ = (G.get_adj_vertices g (V.mk "v0"); - raise ERROR "G.get_adj_vertices (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')); - - fun check_adj_verts exp v = - assert_vertices ("adj vertices of "^v) exp (G.get_adj_vertices g (V.mk v)) - val _ = check_adj_verts [] "v1" - val _ = check_adj_verts [] "v2" - val _ = check_adj_verts [] "v3" - val _ = check_adj_verts ["v5","v6","v9"] "v4" - val _ = check_adj_verts ["v4","v7"] "v6" - val _ = check_adj_verts ["v5","v6","v8"] "v7" - in () end) (); - - - val _ = Testing.test "G.get_adj_vertices_to_set" (fn () => let - val _ = (G.get_adj_vertices_to_set G.empty (V.NSet.single (V.mk "a")); - raise ERROR "G.get_adj_vertices_to_set (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)); - - val _ = (G.get_adj_vertices_to_set g (V.NSet.of_list (map V.mk ["v0","v1"])); - raise ERROR "G.get_adj_vertices_to_set (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')); - - fun check_adj_verts exp vs = - assert_vertices ("adj vertices of "^(commas vs)) exp - (G.get_adj_vertices_to_set g (V.NSet.of_list (map V.mk vs))) - val _ = check_adj_verts [] ["v1"] - val _ = check_adj_verts ["v5","v6","v8","v9"] ["v4","v7"] - in () end) (); - - - val _ = Testing.test "G.get_successor_vertices" (fn () => let - val _ = (G.get_successor_vertices G.empty (V.mk "a"); - raise ERROR "G.get_successor_vertices (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val _ = (G.get_successor_vertices g (V.mk "v0"); - raise ERROR "G.get_successor_vertices (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - fun check exp vs = - assert_vertices ("successor vertices of "^vs) exp - (G.get_successor_vertices g (V.mk vs)) - val _ = check [] "v1" - val _ = check [] "v2" - val _ = check [] "v3" - val _ = check ["v5","v6","v9"] "v4" - val _ = check ["v7"] "v5" - val _ = check [] "v6" - val _ = check ["v6"] "v7" - val _ = check [] "v8" - in () end) () - - - - val _ = Testing.test "G.get_predecessor_vertices" (fn () => let - val _ = (G.get_predecessor_vertices G.empty (V.mk "a"); - raise ERROR "G.get_predecessor_vertices (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val _ = (G.get_predecessor_vertices g (V.mk "v0"); - raise ERROR "G.get_predecessor_vertices (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - fun check exp vs = - assert_vertices ("predecessor vertices of "^vs) exp - (G.get_predecessor_vertices g (V.mk vs)) - val _ = check [] "v1" - val _ = check [] "v2" - val _ = check [] "v3" - val _ = check [] "v4" - val _ = check ["v4"] "v5" - val _ = check ["v4","v7"] "v6" - val _ = check ["v5"] "v7" - val _ = check [] "v8" - in () end) (); - - - - val _ = Testing.test "G.get_sibling_vertices" (fn () => let - val _ = (G.get_sibling_vertices G.empty (V.mk "a"); - raise ERROR "G.get_sibling_vertices (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val _ = (G.get_sibling_vertices g (V.mk "v0"); - raise ERROR "G.get_sibling_vertices (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - val g = G.empty |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_vunit1 "v4" - |> add_wv "v5" - |> add_vunit1 "v6" - |> add_vunit1 "v7" - |> add_vunit1 "v8" - |> add_wv "v9" - |> add_undir_eunit1 "e1" "v2" "v2" - |> add_dir_eunit1 "e2" "v3" "v3" - |> add_undir_eunit1 "e3" "v4" "v4" - |> add_undir_eunit1 "e4" "v4" "v5" - |> add_dir_eunit1 "e5" "v4" "v6" - |> add_undir_eunit1 "e6" "v5" "v7" - |> add_dir_eunit1 "e7" "v7" "v6" - |> add_undir_eunit1 "e8" "v7" "v8" - |> add_undir_eunit1 "e9" "v8" "v7" - - fun check exp vs = - assert_vertices ("sibling vertices of "^vs) exp - (G.get_sibling_vertices g (V.mk vs)) - val _ = check [] "v1" - val _ = check [] "v2" - val _ = check [] "v3" - val _ = check ["v5"] "v4" - val _ = check ["v4","v7"] "v5" - val _ = check [] "v6" - val _ = check ["v5","v8"] "v7" - val _ = check ["v7"] "v8" - in () end) (); - end - - - - val _ = Testing.test "G.adj_edge_classes" (fn () => let - val _ = (G.adj_edge_classes G.empty (V.mk "a"); - raise ERROR "G.adj_edge_classes (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty |> add_vunit1 "v1" - |> add_wv "v2" - |> add_vunit1 "v3" - |> add_wv "v4" - |> add_vunit1 "v5" - |> add_vunit1 "v6" - |> add_dir_eunit1 "e1" "v1" "v2" - |> add_undir_eunit1 "e2" "v1" "v3" - |> add_dir_eunit1 "e3" "v4" "v1" - |> add_undir_eunit1 "e4" "v5" "v1" - |> add_dir_eunit1 "e5" "v1" "v5" - |> add_dir_eunit1 "e6" "v5" "v1" - |> add_dir_eunit1 "e7" "v5" "v5" - |> add_undir_eunit1 "e8" "v5" "v5" - - val _ = (G.adj_edge_classes g (V.mk "v0"); - raise ERROR "G.adj_edge_classes (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - val _ = Testing.assert "adj edge classes of v6" - (let - val (ie,oe,ae) = G.adj_edge_classes g (V.mk "v6") - in - E.NSet.is_empty ie andalso - E.NSet.is_empty oe andalso - E.NSet.is_empty ae - end) - val _ = Testing.assert "adj edge classes of v1" - (let - val (ie,oe,ae) = G.adj_edge_classes g (V.mk "v1") - in - E.NSet.eq ie (E.NSet.of_list (map E.mk ["e3","e6"])) - andalso - E.NSet.eq oe (E.NSet.of_list (map E.mk ["e1","e5"])) - andalso - E.NSet.eq ae (E.NSet.of_list (map E.mk ["e2","e4"])) - end) - val _ = Testing.assert "adj edge classes of v5" - (let - val (ie,oe,ae) = G.adj_edge_classes g (V.mk "v5") - in - E.NSet.eq ie (E.NSet.of_list (map E.mk ["e5","e7"])) - andalso - E.NSet.eq oe (E.NSet.of_list (map E.mk ["e6","e7"])) - andalso - E.NSet.eq ae (E.NSet.of_list (map E.mk ["e4","e8"])) - end) - in () end) (); - - - - val _ = Testing.test "G.get_vdata_tab" (fn () => let - val _ = Testing.assert "G.get_vdata_tab on G.empty is empty" - (VTab.is_empty (G.get_vdata_tab G.empty)) - - val g = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - val exptab = VTab.empty - |> VTab.set (V.mk "a",vunit1) - |> VTab.set (V.mk "b",vunit2) - val _ = Testing.assert "get_vdata_tab correct" - (VTab.tab_eq G.vdata_eq (exptab,G.get_vdata_tab g)) - in () end) (); - - - - val _ = Testing.test "G.get_edata_tab" (fn () => let - val _ = Testing.assert "G.get_edata_tab on G.empty is empty" - (ETab.is_empty (G.get_edata_tab G.empty)) - - val g = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - val exptab = ETab.empty - |> ETab.set (E.mk "e",eunit1) - |> ETab.set (E.mk "f",eunit2) - val _ = Testing.assert "get_edata_tab correct" - (ETab.tab_eq G.edata_eq (exptab,G.get_edata_tab g)) - in () end) (); - - - - val _ = Testing.test "G.get_vertex_data" (fn () => let - val _ = (G.get_vertex_data G.empty (V.mk "a"); - raise ERROR "G.get_vertex_data did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.exact_eq g G.empty)) - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - - val (vd,(ins,outs)) = (G.get_vertex_data g (V.mk "a"), - (G.get_in_edges g (V.mk "a"), - G.get_out_edges g (V.mk "a"))) - val _ = Testing.assert "get_vertex_data correct data" - (G.vdata_eq (vd,vunit1)); - val _ = assert_edges "ins 1" [] ins; - val _ = assert_edges "outs 1" ["e"] outs; - - val (vd,(ins,outs)) = (G.get_vertex_data g (V.mk "b"), - (G.get_in_edges g (V.mk "b"), - G.get_out_edges g (V.mk "b"))) - val _ = Testing.assert "get_vertex_data correct data" (G.vdata_eq (vd,vunit2)); - val _ = assert_edges "ins 2" ["e","f"] ins; - val _ = assert_edges "outs 2" ["f"] outs; - - val _ = (G.get_vertex_data g (V.mk "e"); - raise ERROR "G.get_vertex_data did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "e"); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_edge_data" (fn () => let - val _ = (G.get_edge_data G.empty (E.mk "a"); - raise ERROR "G.get_edge_data did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = Testing.assert "data of 'e' correct" - (G.edata_eq (G.get_edge_data g (E.mk "e"),eunit1)); - val _ = Testing.assert "data of 'f' correct" - (G.edata_eq (G.get_edge_data g (E.mk "f"),eunit2)); - - val _ = (G.get_edge_data g (E.mk "c"); - raise ERROR "G.get_edge_data did not raise exception") - handle G.no_such_edge_exp (_,en,g') => - (Testing.assert "correct ename in exp" (E.name_eq (en,E.mk "c")); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.get_edge_dir" (fn () => let - val _ = (G.get_edge_dir G.empty (E.mk "a"); - raise ERROR "G.get_edge_dir (empty) did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp (empty)" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp (empty)" (G.is_empty g)) - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - - val _ = Testing.assert "dir of 'e' correct" - (G.get_edge_dir g (E.mk "e") = Undirected) - val _ = Testing.assert "dir of 'f' correct" - (G.get_edge_dir g (E.mk "f") = Directed) - - val _ = (G.get_edge_data g (E.mk "a"); - raise ERROR "G.get_edge_data (unknown edge) did not raise exception") - handle G.no_such_edge_exp (_,en,g') => - (Testing.assert "correct ename in exp (unknown edge)" (E.name_eq (en,E.mk "a")); - Testing.assert "correct graph in exp (unknown edge)" (G.exact_eq g g')) - in () end) () - - - - val _ = Testing.test "G.get_edge_dir_and_data" (fn () => let - val _ = (G.get_edge_dir_and_data G.empty (E.mk "a"); - raise ERROR "G.get_edge_dir_and_data did not raise exception") - handle G.no_such_edge_exp (_,en,g') => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.exact_eq G.empty g')); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val ((dir,ed),(src,tgt)) = (G.get_edge_dir_and_data g (E.mk "e"), - (G.get_edge_source g (E.mk "e"), - G.get_edge_target g (E.mk "e"))) - val _ = Testing.assert "get_edge_dir_and_data correct data" (G.edata_eq (ed,eunit1)); - val _ = Testing.assert "get_edge_dir_and_data correct direction" - (case dir of Directed => false | Undirected => true); - val _ = Testing.assert "get_edge_source correct" (V.name_eq (src,V.mk "a")); - val _ = Testing.assert "get_edge_target correct" (V.name_eq (tgt,V.mk "b")); - val ((dir,ed),(src,tgt)) = (G.get_edge_dir_and_data g (E.mk "f"), - (G.get_edge_source g (E.mk "f"), - G.get_edge_target g (E.mk "f"))) - val _ = Testing.assert "get_edge_dir_and_data correct data" (G.edata_eq (ed,eunit2)); - val _ = Testing.assert "get_edge_dir_and_data correct direction" - (case dir of Directed => true | Undirected => false); - val _ = Testing.assert "get_edge_dir_and_data correct src" (V.name_eq (src,V.mk "b")); - val _ = Testing.assert "get_edge_dir_and_data correct tgt" (V.name_eq (tgt,V.mk "b")); - - val _ = (G.get_edge_dir_and_data g (E.mk "a"); - raise ERROR "G.get_edge_dir_and_data did not raise exception") - handle G.no_such_edge_exp (_,en,g') => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.exact_eq g g')); - in () end) (); - - - - val _ = Testing.test "G.set_vertex_data" (fn () => let - val _ = (G.set_vertex_data vunit1 (V.mk "a") G.empty; - raise ERROR "G.set_vertex_data did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a"; - - val g' = G.set_vertex_data vunit2 (V.mk "a") g; - val _ = assert_vertex_info "g'" g' - [("a",(vunit2,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - val _ = assert_edge_info "g'" g' - [("e",((Directed,eunit1),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - - val g'' = G.set_vertex_data vunit2 (V.mk "b") g'; - val _ = assert_vertex_info "g''" g'' - [("a",(vunit2,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - val _ = assert_edge_info "g''" g'' - [("e",((Directed,eunit1),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - in () end) (); - - - - val _ = Testing.test "G.set_edge_data" (fn () => let - val _ = (G.set_edge_data eunit1 (E.mk "a") G.empty; - raise ERROR "G.set_edge_data did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a"; - - val g' = G.set_edge_data eunit2 (E.mk "e") g; - val _ = assert_vertex_info "g'" g' - [("a",(vunit1,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - val _ = assert_edge_info "g'" g' - [("e",((Directed,eunit2),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - - val g'' = G.set_edge_data eunit2 (E.mk "f") g'; - val _ = assert_vertex_info "g''" g'' - [("a",(vunit1,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - val _ = assert_edge_info "g''" g'' - [("e",((Directed,eunit2),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - in () end) (); - - - - val _ = Testing.test "G.set_edge_dir" (fn () => let - val _ = (G.set_edge_dir Directed (E.mk "a") G.empty; - raise ERROR "G.set_edge_dir (empty) did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp (empty)" - (E.string_of_name en = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - |> add_dir_eunit1 "g" "a" "b" - |> add_undir_eunit2 "h" "b" "a" - - val _ = (G.set_edge_dir Directed (E.mk "a") G.empty; - raise ERROR "G.set_edge_dir (unknown) did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp (unknown)" - (E.string_of_name en = "a"); - Testing.assert "correct graph in exp (unknown)" - (G.is_empty g)) - - val _ = assert_g_eq "G.set_edge_dir (1)" g - (G.set_edge_dir Directed (E.mk "e") g) - val _ = assert_g_eq "G.set_edge_dir (2)" g - (G.set_edge_dir Undirected (E.mk "f") g) - val g_exp = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - |> add_dir_eunit1 "g" "a" "b" - |> add_undir_eunit2 "h" "b" "a" - val _ = assert_g_eq "G.set_edge_dir (3)" g_exp - (G.set_edge_dir Undirected (E.mk "e") g) - val g_exp = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "a" - |> add_dir_eunit1 "g" "a" "b" - |> add_undir_eunit2 "h" "b" "a" - val _ = assert_g_eq "G.set_edge_dir (4)" g_exp - (G.set_edge_dir Directed (E.mk "f") g) - in () end) (); - - - - val _ = Testing.test "G.update_vertex_data" (fn () => let - val _ = (G.update_vertex_data I (V.mk "a") G.empty; - raise ERROR "G.get_update_vertex_data did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp" ((V.string_of_name vn) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a"; - - fun swap_vd vd = - if G.vdata_eq (vd,vert Data.VUnit1) - then vert Data.VUnit2 - else - if G.vdata_eq (vd,vert Data.VUnit2) - then vert Data.VUnit1 - else vd; - - val g' = G.update_vertex_data swap_vd (V.mk "a") g; - val _ = assert_vertex_info "g'" g' - [("a",(vunit2,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - val _ = assert_edge_info "g'" g' - [("e",((Directed,eunit1),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - - val g'' = G.update_vertex_data swap_vd (V.mk "b") g'; - val _ = assert_vertex_info "g''" g'' - [("a",(vunit2,(["f"],["e"]))), - ("b",(vunit1,(["e"],["f"])))]; - val _ = assert_edge_info "g''" g'' - [("e",((Directed,eunit1),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - in () end) (); - - - - - val _ = Testing.test "G.update_edge_data" (fn () => let - val _ = (G.update_edge_data I (E.mk "a") G.empty; - raise ERROR "G.get_update_edge_data did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp" ((E.string_of_name en) = "a"); - Testing.assert "correct graph in exp" (G.is_empty g)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a"; - - fun swap_ed Data.EUnit1 = Data.EUnit2 - | swap_ed Data.EUnit2 = Data.EUnit1 - | swap_ed ed = ed; - - val g' = G.update_edge_data swap_ed (E.mk "e") g; - val _ = assert_vertex_info "g'" g' - [("a",(vunit1,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - val _ = assert_edge_info "g'" g' - [("e",((Directed,eunit2),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - - val g'' = G.update_edge_data swap_ed (E.mk "f") g'; - val _ = assert_vertex_info "g''" g'' - [("a",(vunit1,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - val _ = assert_edge_info "g''" g'' - [("e",((Directed,eunit2),("a","b"))), - ("f",((Undirected,eunit1),("b","a")))]; - in () end) (); - - - - val _ = Testing.test "G.update_edge_dir" (fn () => let - val _ = (G.update_edge_dir I (E.mk "a") G.empty; - raise ERROR "G.update_edge_dir (empty) did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp (empty)" - (E.string_of_name en = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - |> add_dir_eunit1 "g" "a" "b" - |> add_undir_eunit2 "h" "b" "a" - - val _ = (G.update_edge_dir I (E.mk "a") G.empty; - raise ERROR "G.update_edge_dir (unknown) did not raise exception") - handle G.no_such_edge_exp (_,en,g) => - (Testing.assert "correct ename in exp (unknown)" - (E.string_of_name en = "a"); - Testing.assert "correct graph in exp (unknown)" - (G.is_empty g)) - - val _ = assert_g_eq "G.update_edge_dir (1)" g - (G.update_edge_dir - (fn Directed => Directed - | Undirected => raise ERROR "bad input (1)") - (E.mk "e") g) - val _ = assert_g_eq "G.update_edge_dir (2)" g - (G.update_edge_dir - (fn Undirected => Undirected - | Directed => raise ERROR "bad input (2)") - (E.mk "f") g) - val g_exp = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - |> add_dir_eunit1 "g" "a" "b" - |> add_undir_eunit2 "h" "b" "a" - val _ = assert_g_eq "G.update_edge_dir (3)" g_exp - (G.update_edge_dir - (fn Directed => Undirected - | Undirected => raise ERROR "bad input (3)") - (E.mk "e") g) - val g_exp = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "a" - |> add_dir_eunit1 "g" "a" "b" - |> add_undir_eunit2 "h" "b" "a" - val _ = assert_g_eq "G.update_edge_dir (4)" g_exp - (G.update_edge_dir - (fn Undirected => Directed - | Directed => raise ERROR "bad input (4)") - (E.mk "f") g) - in () end) (); - - - - val _ = Testing.test "G.is_wire_vertex" (fn () => let - val _ = (G.is_wire_vertex G.empty (V.mk "a"); - raise ERROR "G.update_edge_dir (empty) did not raise exception") - handle G.no_such_vertex_exp (_,en,g) => - (Testing.assert "correct vertex in exp (empty)" - (V.string_of_name en = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_wv "c" - |> add_wv "d" - |> add_dir_eunit1 "e" "a" "c" - |> add_dir_eunit1 "f" "c" "b" - |> add_undir_eunit2 "g" "d" "d" - |> add_dir_eunit1 "h" "a" "a" - - val _ = (G.is_wire_vertex g (V.mk "e"); - raise ERROR "G.update_edge_dir (unknown) did not raise exception") - handle G.no_such_vertex_exp (_,en,g_e) => - (Testing.assert "correct vertex in exp (unknown)" - (V.string_of_name en = "e"); - Testing.assert "correct graph in exp (unknown)" - (G.exact_eq g_e g)) - - val _ = Testing.assert "not is_wire_vertex a" - (not (G.is_wire_vertex g (V.mk "a"))) - val _ = Testing.assert "not is_wire_vertex b" - (not (G.is_wire_vertex g (V.mk "b"))) - val _ = Testing.assert "is_wire_vertex c" - (G.is_wire_vertex g (V.mk "c")) - val _ = Testing.assert "is_wire_vertex d" - (G.is_wire_vertex g (V.mk "d")) - in () end) (); - - - - val _ = Testing.test "G.is_node_vertex" (fn () => let - val _ = (G.is_node_vertex G.empty (V.mk "a"); - raise ERROR "G.update_edge_dir (empty) did not raise exception") - handle G.no_such_vertex_exp (_,en,g) => - (Testing.assert "correct vertex in exp (empty)" - (V.string_of_name en = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_wv "c" - |> add_wv "d" - |> add_dir_eunit1 "e" "a" "c" - |> add_dir_eunit1 "f" "c" "b" - |> add_undir_eunit2 "g" "d" "d" - |> add_dir_eunit1 "h" "a" "a" - - val _ = (G.is_node_vertex g (V.mk "e"); - raise ERROR "G.update_edge_dir (unknown) did not raise exception") - handle G.no_such_vertex_exp (_,en,g_e) => - (Testing.assert "correct vertex in exp (unknown)" - (V.string_of_name en = "e"); - Testing.assert "correct graph in exp (unknown)" - (G.exact_eq g_e g)) - - val _ = Testing.assert "is_node_vertex a" - (G.is_node_vertex g (V.mk "a")) - val _ = Testing.assert "is_node_vertex b" - (G.is_node_vertex g (V.mk "b")) - val _ = Testing.assert "not is_node_vertex c" - (not (G.is_node_vertex g (V.mk "c"))) - val _ = Testing.assert "not is_node_vertex d" - (not (G.is_node_vertex g (V.mk "d"))) - in () end) (); - - - val _ = Testing.test "G.get_vertices" (fn () => let - val _ = Testing.assert "empty" (V.NSet.is_empty (G.get_vertices G.empty)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = assert_vertices "G.get_vertices" ["a","b"] (G.get_vertices g); - in () end) (); - - - - val _ = Testing.test "G.get_wire_vertices" (fn () => let - val _ = Testing.assert "empty" (V.NSet.is_empty (G.get_wire_vertices G.empty)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_wv "c" - |> add_wv "d" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - |> add_dir_eunit2 "g" "b" "c" - - val _ = assert_vertices "G.get_wire_vertices" - ["c","d"] - (G.get_wire_vertices g); - in () end) (); - - - - val _ = Testing.test "G.get_node_vertices" (fn () => let - val _ = Testing.assert "empty" (V.NSet.is_empty (G.get_node_vertices G.empty)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_wv "c" - |> add_wv "d" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b" - |> add_dir_eunit2 "g" "b" "c" - - val _ = assert_vertices "G.get_node_vertices" - ["a","b"] - (G.get_node_vertices g); - in () end) (); - - - - val _ = Testing.test "G.get_edges" (fn () => let - val _ = Testing.assert "empty" (E.NSet.is_empty (G.get_edges G.empty)); - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val _ = assert_edges "G.get_edges" ["e","f"] (G.get_edges g); - in () end) (); - - - - val _ = Testing.test "G.is_subgraph" (fn () => let - val _ = Testing.assert "empty (= empty" (G.is_subgraph G.empty G.empty) - val g1 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - val _ = Testing.assert "g1 (= g1" (G.is_subgraph g1 g1) - val _ = Testing.assert "empty (= g1" (G.is_subgraph G.empty g1) - val _ = Testing.assert "g1 (/= empty" (not (G.is_subgraph g1 G.empty)) - val g2 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_undir_eunit2 "f" "b" "a" - val _ = Testing.assert "g2 (= g1" (G.is_subgraph g2 g1) - val _ = Testing.assert "g1 (/= g2" (not (G.is_subgraph g1 g2)) - val g3 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - val _ = Testing.assert "g3 (= g1" (G.is_subgraph g3 g1) - val _ = Testing.assert "g1 (/= g3" (not (G.is_subgraph g1 g3)) - val g4 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "y" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - val _ = Testing.assert "g4 (/= g1" (not (G.is_subgraph g4 g1)) - val _ = Testing.assert "g1 (/= g4" (not (G.is_subgraph g1 g4)) - val g5 = G.empty |> add_vunit1 "a" - |> add_vunit2 "d" - |> add_vexpr1 "c" "x" - |> add_dir_eunit1 "e" "a" "d" - |> add_undir_eunit2 "f" "d" "a" - val _ = Testing.assert "g5 (/= g1" (not (G.is_subgraph g5 g1)) - val _ = Testing.assert "g1 (/= g5" (not (G.is_subgraph g1 g5)) - val g6 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_dir_eunit1 "g" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - val _ = Testing.assert "g6 (/= g1" (not (G.is_subgraph g6 g1)) - val _ = Testing.assert "g1 (/= g6" (not (G.is_subgraph g1 g6)) - val g7 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_dir_eunit2 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - val _ = Testing.assert "g7 (/= g1" (not (G.is_subgraph g7 g1)) - val _ = Testing.assert "g1 (/= g7" (not (G.is_subgraph g1 g7)) - (* NB: direction of Undirected edges matters! *) - val g8 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "a" "b" - val _ = Testing.assert "g1 (/= g8" (not (G.is_subgraph g1 g8)) - val _ = Testing.assert "g8 (/= g1" (not (G.is_subgraph g8 g1)) - in () end) (); - - - - val _ = Testing.test "G.exact_eq" (fn () => let - val _ = Testing.assert "empty = empty" (G.exact_eq G.empty G.empty) - - val g1 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - (* g2 = g1 *) - val g2 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - (* g3 has different vertex data (b) *) - val g3 = G.empty |> add_vunit1 "a" - |> add_vunit1 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - (* g4 has different edge directedness (f) *) - val g4 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "a" - (* g5 has different edge data (e) *) - val g5 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit2 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - (* g6 has different orientation of undirected edge (f) *) - val g6 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "a" "b" - (* g6 has different orientation of directed edge (e) *) - val g7 = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_dir_eunit1 "e" "b" "a" - |> add_undir_eunit2 "f" "b" "a" - - val _ = Testing.assert "g1 = g2" (G.exact_eq g1 g2) - val _ = Testing.assert "g1 != g3" (not (G.exact_eq g1 g3)) - val _ = Testing.assert "g1 != g4" (not (G.exact_eq g1 g4)) - val _ = Testing.assert "g1 != g5" (not (G.exact_eq g1 g5)) - val _ = Testing.assert "g1 != g6" (not (G.exact_eq g1 g6)) - val _ = Testing.assert "g1 != g7" (not (G.exact_eq g1 g7)) - in () end) () - - - - val _ = Testing.test "G.add_vertex" (fn () => let - val (vn,g) = G.add_vertex vunit1 G.empty; - val _ = assert_vertex_info "g" g [(V.string_of_name vn,(vunit1,([],[])))]; - val _ = assert_edge_info "g" g []; - - val (vn',g') = G.add_vertex vunit2 g; - val _ = Testing.assert "new name" (not (V.name_eq (vn,vn'))); - val _ = assert_vertex_info "g'" g' [(V.string_of_name vn,(vunit1,([],[]))), - (V.string_of_name vn',(vunit2,([],[])))]; - val _ = assert_edge_info "g'" g' []; - in () end) (); - - - - (* TODO: add_vertex_anon *) - - - - val _ = Testing.test "G.add_named_vertex" (fn () => let - val v_a = V.mk "a"; - val v_b = V.mk "b"; - - val g = G.add_named_vertex v_a vunit1 G.empty; - val _ = assert_vertex_info "g" g [("a",(vunit1,([],[])))]; - val _ = assert_edges "g" [] (G.get_edges g); - - val g' = G.add_named_vertex v_b vunit2 g; - val _ = assert_vertex_info "g'" g' - [("a",(vunit1,([],[]))),("b",(vunit2,([],[])))]; - val _ = assert_edges "g'" [] (G.get_edges g'); - - val (vn, g'') = G.add_vertex vunit1 g'; - val _ = Testing.assert "correct name (!= a)" (not (V.name_eq (vn,v_a))); - val _ = Testing.assert "correct name (!= b)" (not (V.name_eq (vn,v_b))); - val _ = assert_vertex_info "g''" g'' - [("a",(vunit1,([],[]))), - ("b",(vunit2,([],[]))), - (V.string_of_name vn,(vunit1,([],[])))]; - val _ = assert_edges "g''" [] (G.get_edges g''); - in () end) (); - - - - local - fun wv_compat_checks add_edge_fun = let - val g_base = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - val g = g_base - |> add_dir_eunit1 "e1" "n1" "w1" - (* bad direction *) - val _ = (G.add_edge (Undirected,eunit1) (V.mk "w1") (V.mk "n2") g; - raise ERROR "G.add_edge allowed clashing directions (1)") - handle G.wire_vertex_exp _ => () - val _ = (G.add_edge (Undirected,eunit1) (V.mk "n2") (V.mk "w1") g; - raise ERROR "G.add_edge allowed clashing directions (2)") - handle G.wire_vertex_exp _ => () - (* bad type *) - val _ = (G.add_edge (Directed,eunit2) (V.mk "w1") (V.mk "n2") g; - raise ERROR "G.add_edge allowed clashing types (1)") - handle G.wire_vertex_exp _ => () - (* multiple ins *) - val _ = (G.add_edge (Directed,eunit1) (V.mk "n2") (V.mk "w1") g; - raise ERROR "G.add_edge allowed multiple ins on a wv") - handle G.wire_vertex_exp _ => () - (* but this is fine *) - val _ = G.add_edge (Directed,eunit1) (V.mk "w1") (V.mk "n2") g; - - val g = g_base - |> add_dir_eunit1 "e2" "w1" "n2" - (* bad direction *) - val _ = (G.add_edge (Undirected,eunit1) (V.mk "n1") (V.mk "w1") g; - raise ERROR "G.add_edge allowed clashing directions (3)") - handle G.wire_vertex_exp _ => () - val _ = (G.add_edge (Undirected,eunit1) (V.mk "w1") (V.mk "n1") g; - raise ERROR "G.add_edge allowed clashing directions (4)") - handle G.wire_vertex_exp _ => () - (* bad type *) - val _ = (G.add_edge (Directed,eunit2) (V.mk "n1") (V.mk "w1") g; - raise ERROR "G.add_edge allowed clashing types (2)") - handle G.wire_vertex_exp _ => () - (* multiple outs *) - val _ = (G.add_edge (Directed,eunit1) (V.mk "w1") (V.mk "n1") g; - raise ERROR "G.add_edge allowed multiple outs on a wv") - handle G.wire_vertex_exp _ => () - (* but this is fine *) - val _ = G.add_edge (Directed,eunit1) (V.mk "n1") (V.mk "w1") g; - - val g = g_base - |> add_undir_eunit1 "e1" "n1" "w1" - (* bad direction *) - val _ = (G.add_edge (Directed,eunit1) (V.mk "w1") (V.mk "n2") g; - raise ERROR "G.add_edge allowed clashing directions (5)") - handle G.wire_vertex_exp _ => () - val _ = (G.add_edge (Directed,eunit1) (V.mk "n2") (V.mk "w1") g; - raise ERROR "G.add_edge allowed clashing directions (6)") - handle G.wire_vertex_exp _ => () - (* bad type *) - val _ = (G.add_edge (Undirected,eunit2) (V.mk "w1") (V.mk "n2") g; - raise ERROR "G.add_edge allowed clashing types (3)") - handle G.wire_vertex_exp _ => () - (* but these are fine *) - val _ = G.add_edge (Undirected,eunit1) (V.mk "n2") (V.mk "w1") g; - val _ = G.add_edge (Undirected,eunit1) (V.mk "w1") (V.mk "n2") g; - - (* loops *) - val _ = G.add_edge (Undirected,eunit1) (V.mk "w1") (V.mk "w1") g_base; - val _ = G.add_edge (Directed,eunit1) (V.mk "w1") (V.mk "w1") g_base; - - val g = g_base - |> add_undir_eunit1 "e1" "w1" "w1" - val _ = (G.add_edge (Undirected,eunit1) (V.mk "w1") (V.mk "n2") g; - raise ERROR "G.add_edge allowed more edges on a circle (1)") - handle G.wire_vertex_exp _ => () - val _ = (G.add_edge (Undirected,eunit1) (V.mk "n2") (V.mk "w1") g; - raise ERROR "G.add_edge allowed more edges on a circle (2)") - handle G.wire_vertex_exp _ => () - - val g = g_base - |> add_dir_eunit1 "e1" "w1" "w1" - val _ = (G.add_edge (Directed,eunit1) (V.mk "w1") (V.mk "n2") g; - raise ERROR "G.add_edge allowed more edges on a circle (3)") - handle G.wire_vertex_exp _ => () - val _ = (G.add_edge (Directed,eunit1) (V.mk "n2") (V.mk "w1") g; - raise ERROR "G.add_edge allowed more edges on a circle (4)") - handle G.wire_vertex_exp _ => () - in () end - in - val _ = Testing.test "G.add_edge" (fn () => let - val va = V.mk "a"; - val vb = V.mk "b"; - - val _ = (G.add_edge (Directed,eunit1) va vb G.empty; - raise ERROR "G.add_edge did not raise exception (1)") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (1)" - ((V.name_eq (vn,va)) orelse (V.name_eq (vn,vb))); - Testing.assert "correct graph in exp (1)" (G.exact_eq G.empty g')); - - val g = G.empty |> add_vunit1 "a"; - - val _ = (G.add_edge (Directed,eunit1) va vb g; - raise ERROR "G.add_edge did not raise exception (2)") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (2)" (V.name_eq (vn,vb)); - Testing.assert "correct graph in exp (2)" (G.exact_eq g g')); - val _ = (G.add_edge (Directed,eunit1) vb va g; - raise ERROR "G.add_edge did not raise exception (3)") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (3)" (V.name_eq (vn,vb)); - Testing.assert "correct graph in exp (3)" (G.exact_eq g g')); - val (en,g') = G.add_edge (Directed,eunit1) va va g; - val enstr = E.string_of_name en; - val _ = assert_edge_info "g'" g' [(enstr,((Directed,eunit1),("a","a")))]; - val _ = assert_vertex_info "g'" g' [("a",(vunit1,([enstr],[enstr])))]; - - val g = g |> add_vunit2 "b"; - - val (en,g') = G.add_edge (Directed,eunit1) va vb g; - val enstr = E.string_of_name en; - val _ = assert_edge_info "g'" g' - [(enstr,((Directed,eunit1),("a","b")))]; - val _ = assert_vertex_info "g'" g' - [("a",(vunit1,([],[enstr]))), - ("b",(vunit2,([enstr],[])))]; - - val (en',g'') = G.add_edge (Undirected,eunit2) vb va g'; - val enstr' = E.string_of_name en'; - val _ = assert_edge_info "g''" g'' - [(enstr,((Directed,eunit1),("a","b"))), - (enstr',((Undirected,eunit2),("b","a")))]; - val _ = assert_vertex_info "g''" g'' - [("a",(vunit1,([enstr'],[enstr]))), - ("b",(vunit2,([enstr],[enstr'])))]; - - val _ = wv_compat_checks G.add_edge - in () end) (); - - - (* TODO: add_edge_anon *) - - - val _ = Testing.test "G.add_named_edge" (fn () => let - val va = V.mk "a"; - val vb = V.mk "b"; - val ee = E.mk "e"; - val ef = E.mk "f"; - - val _ = (G.add_named_edge ee (Directed,eunit1) va vb G.empty; - raise ERROR "G.add_named_edge did not raise exception (1)") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (1)" - ((V.name_eq (vn,va)) orelse (V.name_eq (vn,vb))); - Testing.assert "correct graph in exp (1)" (G.exact_eq G.empty g')); - - val g = G.empty |> add_vunit1 "a"; - - val _ = (G.add_named_edge ee (Directed,eunit1) va vb g; - raise ERROR "G.add_named_edge did not raise exception (2)") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (2)" (V.name_eq (vn,vb)); - Testing.assert "correct graph in exp (2)" (G.exact_eq g g')); - val _ = (G.add_named_edge ee (Directed,eunit1) vb va g; - raise ERROR "G.add_named_edge did not raise exception (3)") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (3)" (V.name_eq (vn,vb)); - Testing.assert "correct graph in exp (3)" (G.exact_eq g g')); - val g' = G.add_named_edge ee (Directed,eunit1) va va g; - val _ = assert_edge_info "g'" g' [("e",((Directed,eunit1),("a","a")))]; - val _ = assert_vertex_info "g'" g' [("a",(vunit1,(["e"],["e"])))]; - - val g = g |> add_vunit2 "b"; - - val g' = G.add_named_edge ee (Directed,eunit1) va vb g; - val _ = assert_edge_info "g'" g' - [("e",((Directed,eunit1),("a","b")))]; - val _ = assert_vertex_info "g'" g' - [("a",(vunit1,([],["e"]))), - ("b",(vunit2,(["e"],[])))]; - - val g'' = G.add_named_edge ef (Undirected,eunit2) vb va g'; - val _ = assert_edge_info "g''" g'' - [("e",((Directed,eunit1),("a","b"))), - ("f",((Undirected,eunit2),("b","a")))]; - val _ = assert_vertex_info "g''" g'' - [("a",(vunit1,(["f"],["e"]))), - ("b",(vunit2,(["e"],["f"])))]; - - val (en,g''') = G.add_edge (Directed,eunit2) vb vb g''; - val _ = Testing.assert "correct name (4) (new)" - (not (E.name_eq (en,ee)) andalso not (E.name_eq (en,ef))); - val _ = assert_edge_info "g'''" g''' - [("e",((Directed,eunit1),("a","b"))), - ("f",((Undirected,eunit2),("b","a"))), - (E.string_of_name en,((Directed,eunit2),("b","b")))]; - val enstr = E.string_of_name en; - val _ = assert_vertex_info "g'''" g''' - [("a",(vunit1,(["f"],["e"]))), - ("b",(vunit2,(["e",enstr],["f",enstr])))]; - - val _ = wv_compat_checks (G.add_named_edge (E.mk "x")) - in () end) (); - end - - - - val _ = Testing.test "G.delete_edge" (fn () => let - val _ = G.delete_edge (E.mk "a") G.empty; - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val g' = G.delete_edge (E.mk "e") g; - val _ = assert_vertex_info "g'" g' - [("a",(vunit1,([],[]))), - ("b",(vunit2,(["f"],["f"])))]; - val _ = assert_edge_info "g'" g' - [("f",((Directed,eunit2),("b","b")))]; - in () end) (); - - - - val _ = Testing.test "G.delete_vertex" (fn () => let - val _ = G.delete_vertex (V.mk "a") G.empty; - - val g = G.empty |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit1 "e" "a" "b" - |> add_dir_eunit2 "f" "b" "b"; - - val g' = G.delete_vertex (V.mk "a") g; - val _ = assert_vertex_info "g'" g' [("b",(vunit2,(["f"],["f"])))]; - val _ = assert_edge_info "g'" g' [("f",((Directed,eunit2),("b","b")))]; - - val g' = G.delete_vertex (V.mk "b") g; - val _ = assert_vertex_info "g'" g' [("a",(vunit1,([],[])))]; - val _ = assert_edge_info "g'" g' []; - - val g' = G.delete_vertex (V.mk "a") g'; - val _ = assert_vertex_info "g'" g' []; - val _ = assert_edge_info "g'" g' []; - in () end) (); - - - - val _ = Testing.test "G.merge" (fn () => let - val _ = assert_g_eq "merge of empty graphs" G.empty - (G.merge G.empty G.empty) - - val g1 = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_wv "d" - |> add_wv "e" - |> add_wv "f" - |> add_dir_eunit1 "e" "a" "d" - |> add_dir_eunit1 "f" "d" "b" - |> add_undir_eunit2 "g" "b" "e" - |> add_undir_eunit1 "h" "b" "f" - |> add_undir_eunit1 "i" "c" "f" - val _ = assert_g_eq "merge of g1 and g1" g1 (G.merge g1 g1) - - val g2 = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c_" "x" - |> add_wv "d" - |> add_wv "e_" - |> add_wv "f_" - |> add_dir_eunit1 "e" "a" "d" - |> add_dir_eunit1 "f" "d" "b" - |> add_undir_eunit2 "g_" "b" "e_" - |> add_undir_eunit1 "h_" "b" "f_" - |> add_undir_eunit1 "i_" "c_" "f_" - val g_exp = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_vexpr1 "c_" "x" - |> add_wv "d" - |> add_wv "e_" - |> add_wv "f_" - |> add_wv "e" - |> add_wv "f" - |> add_dir_eunit1 "e" "a" "d" - |> add_dir_eunit1 "f" "d" "b" - |> add_undir_eunit2 "g_" "b" "e_" - |> add_undir_eunit1 "h_" "b" "f_" - |> add_undir_eunit1 "i_" "c_" "f_" - |> add_undir_eunit2 "g" "b" "e" - |> add_undir_eunit1 "h" "b" "f" - |> add_undir_eunit1 "i" "c" "f" - val _ = assert_g_eq "merge of g1 and g2" g_exp (G.merge g1 g2) - - val g3 = G.empty - |> add_vunit1 "a_" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_wv "d_" - |> add_wv "e_" - |> add_wv "f" - |> add_dir_eunit1 "e_" "a_" "d_" - |> add_dir_eunit1 "f_" "d_" "b" - |> add_undir_eunit2 "g_" "b" "e_" - |> add_undir_eunit1 "h" "b" "f" - |> add_undir_eunit1 "i" "c" "f" - val g_exp = G.empty - |> add_vunit1 "a" - |> add_vunit1 "a_" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_wv "d" - |> add_wv "d_" - |> add_wv "e" - |> add_wv "e_" - |> add_wv "f" - |> add_dir_eunit1 "e" "a" "d" - |> add_dir_eunit1 "e_" "a_" "d_" - |> add_dir_eunit1 "f" "d" "b" - |> add_dir_eunit1 "f_" "d_" "b" - |> add_undir_eunit2 "g" "b" "e" - |> add_undir_eunit2 "g_" "b" "e_" - |> add_undir_eunit1 "h" "b" "f" - |> add_undir_eunit1 "i" "c" "f" - val _ = assert_g_eq "merge of g1 and g3" g_exp (G.merge g1 g3) - - val g4 = G.empty - |> add_vunit1 "a_" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_wv "d_" - |> add_wv "e_" - |> add_wv "f" - |> add_dir_eunit1 "e_" "a_" "d_" - |> add_dir_eunit1 "f_" "d_" "b" - |> add_undir_eunit2 "g_" "b" "e_" - |> add_undir_eunit1 "h_" "b" "f" - |> add_undir_eunit1 "i_" "c" "f" - val _ = (G.merge g1 g4; - raise ERROR "merge g1,g4 should have failed") - handle G.bad_graph_merge_exp _ => () - - val g6 = G.empty - |> add_vunit1 "v1" - |> add_wv "w1" - |> add_undir_eunit1 "e1" "v1" "w1" - val g7 = G.empty - |> add_vunit1 "v2" - |> add_wv "w1" - |> add_undir_eunit1 "e2" "v2" "w1" - val g_exp = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_wv "w1" - |> add_undir_eunit1 "e1" "v1" "w1" - |> add_undir_eunit1 "e2" "v2" "w1" - val _ = assert_g_eq "merge of g6 and g7" g_exp (G.merge g6 g7) - - val g8 = G.empty - |> add_vunit1 "v2" - |> add_wv "w1" - |> add_undir_eunit2 "e2" "v2" "w1" - val _ = (G.merge g6 g8; - raise ERROR "merge g6,g8 should have failed") - handle G.bad_graph_merge_exp _ => () - - val g9 = G.empty - |> add_wv "a" - val g10 = G.empty - |> add_vunit2 "k" - |> add_wv "a" - |> add_undir_eunit1 "Eq" "a" "k" - val g_exp = g10 - val _ = assert_g_eq "merge of g9 and g10" g_exp (G.merge g9 g10) - - val g11 = G.empty - |> add_vunit1 "j" - |> add_vunit1 "s" - |> add_wv "a" - |> add_wv "b" - |> add_wv "c" - |> add_wv "d" - |> add_wv "Va" - |> add_wv "Vb" - |> add_wv "Ve" - |> add_wv "Vf" - |> add_wv "Vg" - |> add_wv "Vh" - |> add_undir_eunit1 "Ea" "j" "Va" - |> add_undir_eunit1 "Eb" "Va" "Vb" - |> add_undir_eunit1 "Eg" "j" "Ve" - |> add_undir_eunit1 "Eh" "Ve" "Vf" - |> add_undir_eunit1 "Ei" "Vf" "s" - |> add_undir_eunit1 "Ek" "j" "c" - |> add_undir_eunit1 "El" "s" "Vg" - |> add_undir_eunit1 "En" "Vg" "Vh" - |> add_undir_eunit1 "Ev" "b" "s" - val g12 = G.empty - |> add_vunit2 "k" - |> add_wv "a" - |> add_wv "d" - |> add_wv "Vb" - |> add_wv "Vh" - |> add_undir_eunit1 "Eo" "k" "d" - |> add_undir_eunit1 "Ep" "Vh" "k" - |> add_undir_eunit1 "Eq" "a" "k" - |> add_undir_eunit1 "Er" "k" "Vb" - val g_exp = g11 - |> add_vunit2 "k" - |> add_undir_eunit1 "Eo" "k" "d" - |> add_undir_eunit1 "Ep" "Vh" "k" - |> add_undir_eunit1 "Eq" "a" "k" - |> add_undir_eunit1 "Er" "k" "Vb" - val _ = assert_g_eq "merge of g11 and g12" g_exp (G.merge g11 g12) - in () end) (); - - - - val _ = Testing.test "G.get_arity" (fn () => let - val _ = (G.get_arity G.empty (V.mk "a"); - raise ERROR "G.get_arity (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "v1" - |> add_wv "v2" - |> add_vunit1 "v3" - |> add_wv "v4" - |> add_vunit1 "v5" - |> add_vunit1 "v6" - |> add_dir_eunit1 "e1" "v1" "v2" - |> add_undir_eunit1 "e2" "v1" "v3" - |> add_dir_eunit1 "e3" "v4" "v1" - |> add_undir_eunit1 "e4" "v5" "v1" - |> add_dir_eunit1 "e5" "v1" "v5" - |> add_dir_eunit1 "e6" "v5" "v1" - |> add_dir_eunit1 "e7" "v5" "v5" - |> add_undir_eunit1 "e8" "v5" "v5" - - val _ = (G.get_arity g (V.mk "v0"); - raise ERROR "G.get_arity (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - fun assert_arity_eq v rep = - let val a1 = G.get_arity g (V.mk v) in - if Arity.rep_of a1 = rep - then () - else ( writeln ("Expected ("^v^"):"); - Pretty.writeln (Pretty.block - ( [Pretty.str "(in:", - Pretty.str (Int.toString (#no_in rep))] - @ [Pretty.str ", out:", - Pretty.str (Int.toString (#no_out rep))] - @ [Pretty.str ", undir:", - Pretty.str (Int.toString (#no_undir rep))] - @ [Pretty.str ")"] )); - writeln ("Actual ("^v^"):"); - Arity.print a1; - Testing.assert ("arity of "^v) false) - end - - val _ = assert_arity_eq "v1" {no_in = 2, no_out = 2, no_undir = 2} - val _ = assert_arity_eq "v2" {no_in = 1, no_out = 0, no_undir = 0} - val _ = assert_arity_eq "v3" {no_in = 0, no_out = 0, no_undir = 1} - val _ = assert_arity_eq "v4" {no_in = 0, no_out = 1, no_undir = 0} - val _ = assert_arity_eq "v5" {no_in = 2, no_out = 2, no_undir = 3} - val _ = assert_arity_eq "v6" {no_in = 0, no_out = 0, no_undir = 0} - in () end) (); - - - - val _ = Testing.test "G.get_self_loops" (fn () => let - val _ = (G.get_self_loops G.empty (V.mk "a"); - raise ERROR "G.get_self_loops (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_vunit1 "v3" - |> add_wv "v4" - |> add_wv "v5" - |> add_wv "v6" - |> add_dir_eunit1 "loop1" "v1" "v1" - |> add_undir_eunit1 "loop2" "v1" "v1" - |> add_dir_eunit1 "loop3" "v2" "v2" - |> add_undir_eunit1 "loop4" "v4" "v4" - |> add_dir_eunit1 "e1" "v1" "v2" - |> add_undir_eunit1 "e2" "v1" "v3" - |> add_dir_eunit1 "e3" "v5" "v1" - - val _ = (G.get_self_loops g (V.mk "v0"); - raise ERROR "G.get_self_loops (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - val _ = assert_edges "self-loops of v1 = [loop1,loop2]" - ["loop1","loop2"] - (G.get_self_loops g (V.mk "v1")) - val _ = assert_edges "self-loops of v2 = [loop3]" - ["loop3"] - (G.get_self_loops g (V.mk "v2")) - val _ = assert_edges "self-loops of v3 = []" [] - (G.get_self_loops g (V.mk "v3")) - val _ = assert_edges "self-loops of v4 = [loop4]" - ["loop4"] - (G.get_self_loops g (V.mk "v4")) - val _ = assert_edges "self-loops of v5 = []" [] - (G.get_self_loops g (V.mk "v5")) - val _ = assert_edges "self-loops of v6 = []" [] - (G.get_self_loops g (V.mk "v6")) - in () end) (); - - - - val _ = Testing.test "G.delete_subgraph" (fn () => let - val _ = assert_g_eq "G.delete_subgraph (empty,empty)" G.empty - (G.delete_subgraph G.empty G.empty) - - val g1 = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_vexpr1 "c" "x" - |> add_dir_eunit1 "e" "a" "b" - |> add_undir_eunit2 "f" "b" "a" - val g2 = G.empty - |> add_vunit1 "a" - |> add_vunit2 "b" - |> add_undir_eunit2 "f" "b" "a" - val g_exp = G.empty - |> add_vexpr1 "c" "x" - - val _ = assert_g_eq "G.delete_subgraph (g1\\empty)" g1 - (G.delete_subgraph G.empty g1) - val _ = assert_g_eq "G.delete_subgraph (g1\\g2)" g_exp - (G.delete_subgraph g2 g1) - in () end) (); - - - - val _ = Testing.test "G.has_edges_between" (fn () => let - val _ = (G.has_edges_between G.empty (V.mk "a") (V.mk "a"); - raise ERROR "G.has_edges_between (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_vexpr1 "v3" "x" - |> add_vunit2 "v4" - |> add_undir_eunit2 "u1" "v1" "v2" - |> add_dir_eunit1 "d1" "v1" "v3" - |> add_undir_eunit2 "u2" "v3" "v4" - |> add_dir_eunit1 "d2" "v4" "v3" - |> add_dir_eunit1 "d3" "v3" "v4" - |> add_undir_eunit1 "l1" "v2" "v2" - |> add_dir_eunit1 "l2" "v3" "v3" - - val _ = (G.has_edges_between g (V.mk "v0") (V.mk "v1"); - raise ERROR "G.has_edges_between (unknown vertex 1) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex 1)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex 1)" - (G.exact_eq g g')) - val _ = (G.has_edges_between g (V.mk "v1") (V.mk "v0"); - raise ERROR "G.has_edges_between (unknown vertex 2) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex 2)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex 2)" - (G.exact_eq g g')) - - val _ = Testing.assert "edges v1-v2" - (G.has_edges_between g (V.mk "v1") (V.mk "v2")) - val _ = Testing.assert "edges v2-v1" - (G.has_edges_between g (V.mk "v2") (V.mk "v1")) - val _ = Testing.assert "edges v1-v3" - (G.has_edges_between g (V.mk "v1") (V.mk "v3")) - val _ = Testing.assert "edges v3-v1" - (G.has_edges_between g (V.mk "v3") (V.mk "v1")) - val _ = Testing.assert "edges v3-v4" - (G.has_edges_between g (V.mk "v3") (V.mk "v4")) - val _ = Testing.assert "no edges v2-v4" - (not (G.has_edges_between g (V.mk "v2") (V.mk "v4"))) - val _ = Testing.assert "no edges v1-v1" - (not (G.has_edges_between g (V.mk "v1") (V.mk "v1"))) - val _ = Testing.assert "edges v2-v2" - (G.has_edges_between g (V.mk "v2") (V.mk "v2")) - val _ = Testing.assert "edges v3-v3" - (G.has_edges_between g (V.mk "v3") (V.mk "v3")) - in () end) (); - - - - val _ = Testing.test "G.edges_between" (fn () => let - val _ = (G.edges_between G.empty (V.mk "a") (V.mk "a"); - raise ERROR "G.edges_between (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_vexpr1 "v3" "x" - |> add_vunit2 "v4" - |> add_undir_eunit2 "u1" "v1" "v2" - |> add_dir_eunit1 "d1" "v1" "v3" - |> add_undir_eunit2 "u2" "v3" "v4" - |> add_dir_eunit1 "d2" "v4" "v3" - |> add_dir_eunit1 "d3" "v3" "v4" - |> add_undir_eunit1 "l1" "v2" "v2" - |> add_dir_eunit1 "l2" "v3" "v3" - - val _ = (G.edges_between g (V.mk "v0") (V.mk "v1"); - raise ERROR "G.edges_between (unknown vertex 1) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex 1)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex 1)" - (G.exact_eq g g')) - val _ = (G.edges_between g (V.mk "v1") (V.mk "v0"); - raise ERROR "G.edges_between (unknown vertex 2) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex 2)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex 2)" - (G.exact_eq g g')) - - fun assert_edges_between v1 v2 exp = - (assert_edges ("edges "^v1^"-"^v2) exp - (G.edges_between g (V.mk v1) (V.mk v2)); - assert_edges ("edges "^v2^"-"^v1) exp - (G.edges_between g (V.mk v2) (V.mk v1))) - - val _ = assert_edges_between "v1" "v2" ["u1"] - val _ = assert_edges_between "v1" "v3" ["d1"] - val _ = assert_edges_between "v3" "v4" ["u2","d2","d3"] - val _ = assert_edges_between "v2" "v4" [] - val _ = assert_edges_between "v1" "v1" [] - val _ = assert_edges_between "v2" "v2" ["l1"] - val _ = assert_edges_between "v3" "v3" ["l2"] - in () end) (); - - - - val _ = Testing.test "G.dir_edges_between" (fn () => let - val _ = (G.dir_edges_between G.empty (V.mk "a") (V.mk "a"); - raise ERROR "G.dir_edges_between (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val g = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_vexpr1 "v3" "x" - |> add_vunit2 "v4" - |> add_undir_eunit2 "u1" "v1" "v2" - |> add_dir_eunit1 "d1" "v1" "v3" - |> add_undir_eunit2 "u2" "v3" "v4" - |> add_dir_eunit1 "d2" "v4" "v3" - |> add_dir_eunit1 "d3" "v3" "v4" - |> add_undir_eunit1 "l1" "v2" "v2" - |> add_dir_eunit1 "l2" "v3" "v3" - - val _ = (G.dir_edges_between g (V.mk "v0") (V.mk "v1"); - raise ERROR "G.dir_edges_between (unknown vertex 1) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex 1)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex 1)" - (G.exact_eq g g')) - val _ = (G.dir_edges_between g (V.mk "v1") (V.mk "v0"); - raise ERROR "G.dir_edges_between (unknown vertex 2) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex 2)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex 2)" - (G.exact_eq g g')) - - fun assert_edges_between v1 v2 forward backward = let - val (f,b) = G.dir_edges_between g (V.mk v1) (V.mk v2) - in - assert_edges ("edges "^v1^"->"^v2) forward f; - assert_edges ("edges "^v2^"->"^v1) backward b - end - - val _ = assert_edges_between "v1" "v2" [] [] - val _ = assert_edges_between "v1" "v3" ["d1"] [] - val _ = assert_edges_between "v3" "v4" ["d3"] ["d2"] - val _ = assert_edges_between "v2" "v4" [] [] - val _ = assert_edges_between "v1" "v1" [] [] - val _ = assert_edges_between "v2" "v2" [] [] - val _ = assert_edges_between "v3" "v3" ["l2"] ["l2"] - in () end) (); - - - - local - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - in - val _ = Testing.test "G.rename_ograph/G.rename_ograph_anon" (fn () => let - val vsub_clash = VSub.empty - |> VSub.add (V.mk "n1",V.mk "w1") - |> VSub.add (V.mk "n2",V.mk "n3") - |> VSub.add (V.mk "n7",V.mk "n8") - val esub_clash = ESub.empty - |> ESub.add (E.mk "e1",E.mk "e3") - |> ESub.add (E.mk "e2",E.mk "e5") - |> ESub.add (E.mk "e7",E.mk "e8") - val _ = G.rename_ograph (vsub_clash,esub_clash) g - handle VSub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - | ESub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - val _ = G.rename_ograph_anon (vsub_clash,esub_clash) g - handle VSub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - | ESub.name_clash_exp _ => raise ERROR "clashing names should no longer raise exception" - - val vsub_swap = VSub.empty - |> VSub.add (V.mk "n1",V.mk "n2") - |> VSub.add (V.mk "n2",V.mk "n1") - |> VSub.add (V.mk "n7",V.mk "n8") - val esub_swap = ESub.empty - |> ESub.add (E.mk "e1",E.mk "e2") - |> ESub.add (E.mk "e2",E.mk "e1") - |> ESub.add (E.mk "e7",E.mk "e8") - val g_exp = G.empty - |> add_vunit1 "n2" - |> add_vunit1 "n1" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e2" "n2" "w1" - |> add_dir_eunit1 "e1" "w1" "n1" - |> add_undir_eunit2 "e3" "n2" "w2" - |> add_undir_eunit2 "e4" "w2" "n1" - val ((vsub',esub'),g') = G.rename_ograph (vsub_swap,esub_swap) g - val _ = assert_g_eq "rename swap" g_exp g' - val _ = Testing.assert "n1 in sub" - (V.dest (VSub.get vsub' (V.mk "n1")) = "n2") - val _ = Testing.assert "w1 in sub" - (V.dest (VSub.get vsub' (V.mk "w1")) = "w1") - val _ = Testing.assert "e1 in sub" - (E.dest (ESub.get esub' (E.mk "e1")) = "e2") - val _ = Testing.assert "e4 in sub" - (E.dest (ESub.get esub' (E.mk "e4")) = "e4") - val _ = assert_g_eq "rename swap" g_exp - (G.rename_ograph_anon (vsub_swap,esub_swap) g) - - val g_exp = G.empty - |> add_vunit1 "n2" - |> add_vunit1 "w1" - |> add_wv "n1" - |> add_wv "w2" - |> add_dir_eunit1 "e2" "n2" "n1" - |> add_dir_eunit1 "e3" "n1" "w1" - |> add_undir_eunit2 "e1" "n2" "w2" - |> add_undir_eunit2 "e4" "w2" "w1" - val vsub_permute = VSub.empty - |> VSub.add (V.mk "n1",V.mk "n2") - |> VSub.add (V.mk "n2",V.mk "w1") - |> VSub.add (V.mk "w1",V.mk "n1") - val esub_permute = ESub.empty - |> ESub.add (E.mk "e1",E.mk "e2") - |> ESub.add (E.mk "e2",E.mk "e3") - |> ESub.add (E.mk "e3",E.mk "e1") - |> ESub.add (E.mk "e7",E.mk "e8") - val ((vsub',esub'),g') = G.rename_ograph (vsub_permute,esub_permute) g - val _ = assert_g_eq "rename permute" g_exp g' - val _ = Testing.assert "n1 in sub" - (V.dest (VSub.get vsub' (V.mk "n1")) = "n2") - val _ = Testing.assert "w2 in sub" - (V.dest (VSub.get vsub' (V.mk "w2")) = "w2") - val _ = Testing.assert "e1 in sub" - (E.dest (ESub.get esub' (E.mk "e1")) = "e2") - val _ = Testing.assert "e4 in sub" - (E.dest (ESub.get esub' (E.mk "e4")) = "e4") - val _ = assert_g_eq "rename permute" g_exp - (G.rename_ograph_anon (vsub_permute,esub_permute) g) - in () end) () - - - val _ = Testing.test "G.rename_vertices" (fn () => let - val s_clash = VSub.empty - |> VSub.add (V.mk "n1",V.mk "w1") - |> VSub.add (V.mk "n2",V.mk "n3") - |> VSub.add (V.mk "n7",V.mk "n8") - val _ = (G.rename_vertices s_clash g; - raise ERROR "clashing names should raise exception (n1->w1)") - handle VSub.name_clash_exp _ => () - - val s_swap = VSub.empty - |> VSub.add (V.mk "n1",V.mk "n2") - |> VSub.add (V.mk "n2",V.mk "n1") - |> VSub.add (V.mk "n7",V.mk "n8") - val g_exp = G.empty - |> add_vunit1 "n2" - |> add_vunit1 "n1" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n2" "w1" - |> add_dir_eunit1 "e2" "w1" "n1" - |> add_undir_eunit2 "e3" "n2" "w2" - |> add_undir_eunit2 "e4" "w2" "n1" - val _ = assert_g_eq "rename swap" g_exp - (G.rename_vertices s_swap g) - - val s_permute = VSub.empty - |> VSub.add (V.mk "n1",V.mk "n2") - |> VSub.add (V.mk "n2",V.mk "w1") - |> VSub.add (V.mk "w1",V.mk "n1") - val g_exp = G.empty - |> add_vunit1 "n2" - |> add_vunit1 "w1" - |> add_wv "n1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n2" "n1" - |> add_dir_eunit1 "e2" "n1" "w1" - |> add_undir_eunit2 "e3" "n2" "w2" - |> add_undir_eunit2 "e4" "w2" "w1" - val _ = assert_g_eq "rename permute" g_exp - (G.rename_vertices s_permute g) - in () end) () - - - val _ = Testing.test "G.rename_vertex" (fn () => let - val _ = assert_g_eq "rename empty" G.empty - (G.rename_vertex (V.mk "a") (V.mk "b") G.empty) - val _ = assert_g_eq "rename absent vertex" g - (G.rename_vertex (V.mk "a") (V.mk "b") g) - val _ = (G.rename_vertex (V.mk "n1") (V.mk "n2") g; - raise ERROR "clashing names should raise exception (n1->n2)") - handle G.duplicate_vertex_exp (v',g') => - (Testing.assert "correct v (n1->n2)" (V.dest v' = "n2"); - Testing.assert "correct g (n1->n2)" (G.exact_eq g' g)) - val _ = (G.rename_vertex (V.mk "w1") (V.mk "w2") g; - raise ERROR "clashing names should raise exception (w1->w2)") - handle G.duplicate_vertex_exp (v',g') => - (Testing.assert "correct v (w1->w2)" (V.dest v' = "w2"); - Testing.assert "correct g (w1->w2)" (G.exact_eq g' g)) - - val g_exp = G.empty - |> add_vunit1 "n3" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n3" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n3" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = assert_g_eq "rename n1->n3" g_exp - (G.rename_vertex (V.mk "n1") (V.mk "n3") g) - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w3" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w3" - |> add_dir_eunit1 "e2" "w3" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = assert_g_eq "rename w1->w3" g_exp - (G.rename_vertex (V.mk "w1") (V.mk "w3") g) - in () end) () - - - - val _ = Testing.test "G.rename_vertex_opt" (fn () => let - val _ = case G.rename_vertex_opt (V.mk "a") (V.mk "b") G.empty - of SOME g' => assert_g_eq "rename empty" G.empty g' - | NONE => raise ERROR "rename failed in empty graph" - val _ = case G.rename_vertex_opt (V.mk "a") (V.mk "b") g - of SOME g' => assert_g_eq "rename absent" g g' - | NONE => raise ERROR "rename of absent vertex failed" - val _ = Testing.assert "clashing names (n1->n2)" - (is_none (G.rename_vertex_opt (V.mk "n1") (V.mk "n2") g)) - val _ = Testing.assert "clashing names (w1->w2)" - (is_none (G.rename_vertex_opt (V.mk "w1") (V.mk "w2") g)) - - val g_exp = G.empty - |> add_vunit1 "n3" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n3" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n3" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = case G.rename_vertex_opt (V.mk "n1") (V.mk "n3") g - of SOME g' => assert_g_eq "rename n1->n3" g_exp g' - | NONE => raise ERROR "rename n1->n3 returned NONE" - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w3" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w3" - |> add_dir_eunit1 "e2" "w3" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = case G.rename_vertex_opt (V.mk "w1") (V.mk "w3") g - of SOME g' => assert_g_eq "rename w1->w3" g_exp g' - | NONE => raise ERROR "rename w1->w3 returned NONE" - in () end) () - - - - val _ = Testing.test "G.rename_edges" (fn () => let - val s_clash = ESub.empty - |> ESub.add (E.mk "e1",E.mk "e3") - |> ESub.add (E.mk "e2",E.mk "e5") - |> ESub.add (E.mk "e7",E.mk "e8") - val _ = (G.rename_edges s_clash g; - raise ERROR "clashing names should raise exception (n1->w1)") - handle ESub.name_clash_exp _ => () - - val s_swap = ESub.empty - |> ESub.add (E.mk "e1",E.mk "e2") - |> ESub.add (E.mk "e2",E.mk "e1") - |> ESub.add (E.mk "e7",E.mk "e8") - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e2" "n1" "w1" - |> add_dir_eunit1 "e1" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = assert_g_eq "rename swap" g_exp - (G.rename_edges s_swap g) - - val s_permute = ESub.empty - |> ESub.add (E.mk "e1",E.mk "e2") - |> ESub.add (E.mk "e2",E.mk "e3") - |> ESub.add (E.mk "e3",E.mk "e1") - |> ESub.add (E.mk "e7",E.mk "e8") - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e2" "n1" "w1" - |> add_dir_eunit1 "e3" "w1" "n2" - |> add_undir_eunit2 "e1" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = assert_g_eq "rename permute" g_exp - (G.rename_edges s_permute g) - in () end) () - - - - val _ = Testing.test "G.rename_edge" (fn () => let - val _ = assert_g_eq "rename empty" G.empty - (G.rename_edge (E.mk "a") (E.mk "b") G.empty) - val _ = assert_g_eq "rename absent edge" g - (G.rename_edge (E.mk "a") (E.mk "b") g) - val _ = (G.rename_edge (E.mk "e1") (E.mk "e2") g; - raise ERROR "clashing names should raise exception (e1->e2)") - handle G.duplicate_edge_exp (e',g') => - (Testing.assert "correct e (e1->e2)" (E.dest e' = "e2"); - Testing.assert "correct g (e1->e2)" (G.exact_eq g' g)) - val _ = (G.rename_edge (E.mk "e3") (E.mk "e4") g; - raise ERROR "clashing names should raise exception (e3->e4)") - handle G.duplicate_edge_exp (e',g') => - (Testing.assert "correct e (e3->e4)" (E.dest e' = "e4"); - Testing.assert "correct g (e3->e4)" (G.exact_eq g' g)) - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e5" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = assert_g_eq "rename e1->e5" g_exp - (G.rename_edge (E.mk "e1") (E.mk "e5") g) - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e5" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = assert_g_eq "rename e3->e5" g_exp - (G.rename_edge (E.mk "e3") (E.mk "e5") g) - in () end) () - - - - val _ = Testing.test "G.rename_edge_opt" (fn () => let - val _ = case G.rename_edge_opt (E.mk "a") (E.mk "b") G.empty - of SOME g' => assert_g_eq "rename empty" G.empty g' - | NONE => raise ERROR "rename failed in empty graph" - val _ = case G.rename_edge_opt (E.mk "a") (E.mk "b") g - of SOME g' => assert_g_eq "rename absent" g g' - | NONE => raise ERROR "rename of absent edge failed" - val _ = Testing.assert "clashing names (e1->e2)" - (is_none (G.rename_edge_opt (E.mk "e1") (E.mk "e2") g)) - val _ = Testing.assert "clashing names (e4->e3)" - (is_none (G.rename_edge_opt (E.mk "e4") (E.mk "e3") g)) - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e5" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e3" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = case G.rename_edge_opt (E.mk "e1") (E.mk "e5") g - of SOME g' => assert_g_eq "rename e1->e5" g_exp g' - | NONE => raise ERROR "rename e1->e5 returned NONE" - - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - |> add_undir_eunit2 "e5" "n1" "w2" - |> add_undir_eunit2 "e4" "w2" "n2" - val _ = case G.rename_edge_opt (E.mk "e3") (E.mk "e5") g - of SOME g' => assert_g_eq "rename e3->e5" g_exp g' - | NONE => raise ERROR "rename e3->e5 returned NONE" - in () end) () - end - - - - local - val g = G.empty - |> add_wv "b1" - |> add_wv "b2" - |> add_wv "b3" - |> add_wv "b4" - |> add_wv "i1" - |> add_wv "i2" - |> add_wv "i3" - |> add_wv "o1" - |> add_wv "o3" - |> add_wv "s1" - |> add_wv "c1" - |> add_wv "c2" - |> add_wv "c3" - |> add_wv "w1" - |> add_wv "w2" - |> add_wv "w3" - |> add_wv "w4" - |> add_wv "w5" - |> add_wv "w6" - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_vexpr1 "v3" "x" - |> add_dir_eunit1 "bw1" "i3" "o3" - |> add_undir_eunit1 "bw2" "b3" "b4" - |> add_undir_eunit1 "b1" "b1" "v1" - |> add_undir_eunit2 "b2" "b2" "w4" - |> add_undir_eunit2 "b2'" "w4" "v1" - |> add_dir_eunit1 "i1" "i1" "v1" - |> add_dir_eunit2 "i2" "i2" "w5" - |> add_dir_eunit2 "i2'" "w5" "v3" - |> add_dir_eunit1 "o1" "v2" "o1" - |> add_undir_eunit1 "c1" "c1" "c1" - |> add_dir_eunit2 "c2" "c2" "c3" - |> add_dir_eunit2 "c3" "c3" "c2" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e1'" "w1" "v3" - |> add_undir_eunit1 "e2" "v2" "w2" - |> add_undir_eunit1 "e2'" "w2" "v3" - |> add_undir_eunit1 "e3" "w3" "v3" - |> add_undir_eunit1 "e3'" "w3" "v3" - |> add_undir_eunit1 "e4" "v3" "w6" - |> add_undir_eunit1 "e4'" "v3" "w6" - in - val _ = Testing.test "G.get_boundary" (fn () => let - val _ = assert_vertices "G.get_boundary (empty)" [] - (G.get_boundary G.empty) - val _ = assert_vertices "G.get_boundary (g)" - ["b1","b2","b3","b4","i1","i2","i3","o1","o3","s1"] - (G.get_boundary g) - in () end) () - - - val _ = Testing.test "G.get_inputs" (fn () => let - val _ = assert_vertices "G.get_inputs (empty)" [] - (G.get_inputs G.empty) - val _ = assert_vertices "G.get_inputs (g)" - ["i1","i2","i3"] - (G.get_inputs g) - in () end) () - - - val _ = Testing.test "G.get_outputs" (fn () => let - val _ = assert_vertices "G.get_outputs (empty)" [] - (G.get_outputs G.empty) - val _ = assert_vertices "G.get_outputs (g)" - ["o1","o3"] - (G.get_outputs g) - in () end) () - - - val _ = Testing.test "G.is_boundary" (fn () => let - val _ = (G.is_boundary G.empty (V.mk "a"); - raise ERROR "G.is_boundary (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val _ = (G.is_boundary g (V.mk "v0"); - raise ERROR "G.is_boundary (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - val _ = map (fn v => Testing.assert v (G.is_boundary g (V.mk v))) - ["b1","b2","b3","b4","i1","i2","i3","o1","o3","s1"] - val _ = map (fn v => Testing.assert v (not (G.is_boundary g (V.mk v)))) - ["c1","c2","c3","w1","w2","w3","w4","w5","v1","v2","v3"] - in () end) () - - - val _ = Testing.test "G.is_input" (fn () => let - val _ = (G.is_input G.empty (V.mk "a"); - raise ERROR "G.is_input (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val _ = (G.is_input g (V.mk "v0"); - raise ERROR "G.is_input (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - val _ = map (fn v => Testing.assert v (G.is_input g (V.mk v))) - ["i1","i2","i3"] - val _ = map (fn v => Testing.assert v (not (G.is_input g (V.mk v)))) - ["b1","b2","b3","b4","o1","o3","s1","c1","c2","c3", - "w1","w2","w3","w4","w5","v1","v2","v3"] - in () end) () - - - val _ = Testing.test "G.is_output" (fn () => let - val _ = (G.is_output G.empty (V.mk "a"); - raise ERROR "G.is_output (empty) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g) => - (Testing.assert "correct vname in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g)) - - val _ = (G.is_output g (V.mk "v0"); - raise ERROR "G.is_output (unknown vertex) did not raise exception") - handle G.no_such_vertex_exp (_,vn,g') => - (Testing.assert "correct vname in exp (unknown vertex)" - (V.name_eq (vn,V.mk "v0")); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g g')) - - val _ = map (fn v => Testing.assert v (G.is_output g (V.mk v))) - ["o1","o3"] - val _ = map (fn v => Testing.assert v (not (G.is_output g (V.mk v)))) - ["b1","b2","b3","b4","i1","i2","i3","s1","c1","c2","c3", - "w1","w2","w3","w4","w5","v1","v2","v3"] - in () end) () - - - val _ = Testing.test "G.wv_get_other_edge" (fn () => let - val _ = (case G.wv_get_other_edge G.empty (V.mk "a") (E.mk "e") - of NONE => () - | SOME _ => raise ERROR "G.wv_get_other_edge (empty) did not return NONE") - handle G.no_such_vertex_exp (_,vn,g_e) => - (Testing.assert "correct vertex in exp (empty)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g_e)) - | G.no_such_edge_exp (_,en,g_e) => - (Testing.assert "correct edge in exp (empty)" - (E.string_of_name en = "e"); - Testing.assert "correct graph in exp (empty)" - (G.is_empty g_e)) - - val _ = (case G.wv_get_other_edge g (V.mk "a") (E.mk "e1") - of NONE => () - | SOME _ => raise ERROR "G.wv_get_other_edge (unknown vertex) did not return NONE") - handle G.no_such_vertex_exp (_,vn,g_e) => - (Testing.assert "correct vertex in exp (unknown vertex)" - (V.string_of_name vn = "a"); - Testing.assert "correct graph in exp (unknown vertex)" - (G.exact_eq g_e g)) - val _ = (case G.wv_get_other_edge g (V.mk "w1") (E.mk "e") - of NONE => () - | SOME _ => raise ERROR "G.wv_get_other_edge (unknown edge) did not return NONE") - handle G.no_such_edge_exp (_,en,g_e) => - (Testing.assert "correct edge in exp (unknown edge)" - (E.string_of_name en = "e"); - Testing.assert "correct graph in exp (unknown edge)" - (G.exact_eq g_e g)) - val _ = (case G.wv_get_other_edge g (V.mk "w1") (E.mk "bw1") - of NONE => () - | SOME _ => raise ERROR "G.wv_get_other_edge (other edge) did not return NONE") - handle G.no_such_edge_exp (_,en,g_e) => - (Testing.assert "correct edge in exp (other edge)" - (E.string_of_name en = "bw1"); - Testing.assert "correct graph in exp (other edge)" - (G.exact_eq g_e g)) - - val _ = case G.wv_get_other_edge g (V.mk "i1") (E.mk "i1") - of NONE => () - | SOME e => raise ERROR ("i1,i1: expected NONE, got "^(E.dest e)) - val _ = case G.wv_get_other_edge g (V.mk "w1") (E.mk "e1") - of NONE => raise ERROR "w1,e1: got NONE" - | SOME e => if E.dest e = "e1'" then () - else raise ERROR ("w1,e1: expected e1', got "^(E.dest e)) - val _ = case G.wv_get_other_edge g (V.mk "w1") (E.mk "e1'") - of NONE => raise ERROR "w1,e1': got NONE" - | SOME e => if E.dest e = "e1" then () - else raise ERROR ("w1,e1': expected e1, got "^(E.dest e)) - val _ = case G.wv_get_other_edge g (V.mk "w2") (E.mk "e2") - of NONE => raise ERROR "w2,e2: got NONE" - | SOME e => if E.dest e = "e2'" then () - else raise ERROR ("w2,e2: expected e2', got "^(E.dest e)) - val _ = case G.wv_get_other_edge g (V.mk "w2") (E.mk "e2'") - of NONE => raise ERROR "w2,e2': got NONE" - | SOME e => if E.dest e = "e2" then () - else raise ERROR ("w2,e2': expected e2, got "^(E.dest e)) - val _ = case G.wv_get_other_edge g (V.mk "c2") (E.mk "c2") - of NONE => raise ERROR "c2,c2: got NONE" - | SOME e => if E.dest e = "c3" then () - else raise ERROR ("c2,c2: expected e3, got "^(E.dest e)) - val _ = case G.wv_get_other_edge g (V.mk "c2") (E.mk "c3") - of NONE => raise ERROR "c2,c3: got NONE" - | SOME e => if E.dest e = "c2" then () - else raise ERROR ("c2,c3: expected e2, got "^(E.dest e)) - in () end) (); - end - - - - local (* wires *) - val g_no_w8_wire = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_wv "w3" - |> add_wv "w4" - |> add_wv "w5" - |> add_wv "w6" - |> add_wv "w7" - |> add_wv "bw1" - |> add_wv "bw2" - |> add_wv "bw3" - |> add_wv "bw4" - |> add_wv "bw5" - |> add_wv "bw6" - |> add_wv "bw7" - |> add_wv "bw8" - |> add_wv "isol1" - |> add_wv "c1" - |> add_wv "c2" - |> add_wv "c3" - |> add_wv "c4" - |> add_wv "c5" - |> add_vunit1 "n1" - |> add_vunit2 "n2" - |> add_vexpr1 "n3" "x" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "w2" - |> add_dir_eunit1 "e3" "w2" "n2" - |> add_dir_eunit2 "e4" "n2" "w3" - |> add_dir_eunit2 "e5" "w3" "w4" - |> add_dir_eexpr1 "e6" "n2" "n3" "x" - |> add_dir_eexpr2 "e7" "n3" "w5" "x" - |> add_dir_eexpr2 "e8" "w5" "n3" "x" - |> add_dir_estring1 "ce1" "c1" "c2" "x" - |> add_dir_estring1 "ce2" "c2" "c1" "x" - |> add_dir_estring2 "ce3" "c3" "c3" "x" - |> add_dir_estring1 "bwe1" "bw1" "bw2" "1" - |> add_dir_estring1 "bwe2" "bw3" "bw4" "2" - |> add_dir_estring1 "bwe3" "bw4" "bw5" "2" - |> add_undir_estring1 "f1" "n2" "w6" "y" - |> add_undir_estring1 "f2" "w6" "n3" "y" - |> add_undir_estring2 "f3" "n2" "w7" "y" - |> add_undir_estring2 "f4" "n3" "w7" "y" - |> add_undir_estring2 "ce4" "c4" "c5" "z" - |> add_undir_estring2 "ce5" "c4" "c5" "z" - |> add_undir_estring1 "bwe4" "bw6" "bw7" "3" - |> add_undir_estring1 "bwe5" "bw8" "bw7" "3" - val g = g_no_w8_wire - |> add_wv "w8" - |> add_undir_estring1 "f5" "w8" "n2" "z" - |> add_undir_estring1 "f6" "w8" "n3" "z" - val g_raw = g; - - (* a simple iso checker that only works on graphs where - * every wire and every node vertex has a different type *) - fun assert_g_iso_and_bounds_same msg g_exp g_actual = let - fun error m = - (writeln (msg^": "^m); - writeln "expected:"; - G.print g_exp; - writeln "got"; - G.print g_actual; - raise Testing.assertion_failed_exp (msg^": "^m)) - fun assert m b = if b then () else error m - val _ = assert "same boundaries" - (V.NSet.eq (G.get_boundary g_exp) (G.get_boundary g_actual)) - val _ = assert "vsets same size" - (V.NSet.cardinality (G.get_vertices g_exp) = - V.NSet.cardinality (G.get_vertices g_actual)) - val _ = assert "esets same size" - (E.NSet.cardinality (G.get_edges g_exp) = - E.NSet.cardinality (G.get_edges g_actual)) - fun map_nverts vsub vset_exp vset_actual = - case V.NSet.pull_local_bot vset_exp - of NONE => if V.NSet.is_empty vset_actual then vsub - else error "too many node vertices" - | SOME (v,vset_exp') => - let - val vdata = G.get_vertex_data g_exp v - fun same_data vn = G.vdata_eq (vdata,G.get_vertex_data g_actual vn) - in - case V.NSet.get_exists same_data vset_actual - of NONE => error ("vertex matching "^(V.dest v)) - | SOME tgt => map_nverts (VSub.add (v,tgt) vsub) - vset_exp' - (V.NSet.delete tgt vset_actual) - end - val vsub = VSub.extend_fixed (G.get_boundary g_exp) VSub.empty - val vsub = map_nverts vsub (G.get_node_vertices g_exp) - (G.get_node_vertices g_actual) - fun map_self_loops vsub esub c_exp c_act = - case E.NSet.pull_local_bot c_exp - of NONE => if E.NSet.is_empty c_act then (vsub,esub) - else error "too many circles" - | SOME (e,c_exp') => - let - val (edir,edata) = G.get_edge_dir_and_data g_exp e - fun same_dir_and_data en = let - val (edir',edata') = G.get_edge_dir_and_data g_actual en - in edir = edir' andalso G.edata_eq (edata,edata') end - in - case E.NSet.get_exists same_dir_and_data c_act - of NONE => error ("circle matching "^(E.dest e)) - | SOME tgt => - let - val cv = G.get_edge_source g_exp e - val cv' = G.get_edge_source g_actual tgt - val vsub' = if VSub.is_mapped vsub (cv,cv') - then vsub - else VSub.add (cv,cv') vsub - in - map_self_loops - vsub' (ESub.add (e,tgt) esub) - c_exp' (E.NSet.delete tgt c_act) - end - end - fun is_self_loop g e = V.name_eq (G.get_edge_source g e, - G.get_edge_target g e) - fun get_self_loops g = E.NSet.filter (is_self_loop g) (G.get_edges g) - val (vsub,esub) = map_self_loops vsub ESub.empty - (get_self_loops g_exp) - (get_self_loops g_actual) - fun get_non_self_loops g = E.NSet.filter (not o (is_self_loop g)) (G.get_edges g) - fun check_wires e_exp e_act unm_wvs_exp unm_wvs_act = - case E.NSet.get_local_bot e_exp - of NONE => if not (E.NSet.is_empty e_act) - then error "too many edges" - else (unm_wvs_exp,unm_wvs_act) - | SOME e => - let - val (edir,edata) = G.get_edge_dir_and_data g_exp e - fun same_dir_and_data en = let - val (edir',edata') = G.get_edge_dir_and_data g_actual en - in edir = edir' andalso G.edata_eq (edata,edata') end - in - case E.NSet.get_exists same_dir_and_data e_act - of NONE => error ("edge matching "^(E.dest e)) - | SOME tgt => - let - val (s,t,wvs,es) = G.get_wire_containing_edge g_exp e - val (s',t',wvs',es') = G.get_wire_containing_edge g_actual tgt - val e_exp' = E.NSet.remove_set es e_exp - val e_act' = E.NSet.remove_set es' e_act - val unm_wvs_exp' = - unm_wvs_exp |> V.NSet.remove_set wvs - |> V.NSet.delete s - |> V.NSet.delete t - val unm_wvs_act' = - unm_wvs_act |> V.NSet.remove_set wvs' - |> V.NSet.delete s' - |> V.NSet.delete t' - val _ = - if V.name_eq (s,t) - then assert "self-loop matches self-loop" (V.name_eq (s',t')) - else (assert "wire source matched" - (VSub.is_mapped vsub (s,s')); - assert "wire target matched" - (VSub.is_mapped vsub (t,t'))) - in - check_wires e_exp' e_act' unm_wvs_exp' unm_wvs_act' - end - end - in () end - - fun wire v1 v2 vs es = - (V.mk v1, V.mk v2, V.NSet.of_list (map V.mk vs), - E.NSet.of_list (map E.mk es)); - val exp_wire_e1 = wire "n1" "n2" ["w1","w2"] ["e1","e2","e3"]; - val exp_wire_e2 = exp_wire_e1; - val exp_wire_e3 = exp_wire_e1; - val exp_wire_e4 = wire "n2" "w4" ["w3"] ["e4","e5"]; - val exp_wire_e5 = exp_wire_e4; - val exp_wire_e6 = wire "n2" "n3" [] ["e6"]; - val exp_wire_e7 = wire "n3" "n3" ["w5"] ["e7","e8"]; - val exp_wire_e8 = exp_wire_e7; - (* this circle wire is special, as either vertex could be - * considered the endpoint *) - val exp_wire_ce1_a = wire "c1" "c1" ["c2"] ["ce1","ce2"]; - val exp_wire_ce1_b = wire "c2" "c2" ["c1"] ["ce1","ce2"]; - val exp_wire_ce2_a = exp_wire_ce1_a; - val exp_wire_ce2_b = exp_wire_ce1_b; - val exp_wire_ce3 = wire "c3" "c3" [] ["ce3"]; - val exp_wire_f1 = wire "n2" "n3" ["w6"] ["f1","f2"]; - val exp_wire_f2 = exp_wire_f1; - val exp_wire_f3 = wire "n2" "n3" ["w7"] ["f3","f4"]; - val exp_wire_f4 = exp_wire_f3; - val exp_wire_f5 = wire "n2" "n3" ["w8"] ["f5","f6"]; - val exp_wire_f6 = exp_wire_f5; - in - val _ = Testing.test "G.wire_eq" (fn () => let - val _ = Testing.assert "e1,e1" (G.wire_eq exp_wire_e1 exp_wire_e1) - val _ = Testing.assert "e1,e4" (not (G.wire_eq exp_wire_e1 exp_wire_e4)) - in () end) () - - - val _ = Testing.test "G.wire_ord" (fn () => let - val _ = Testing.assert "e1 ord e1" - (G.wire_ord (exp_wire_e1,exp_wire_e1) = EQUAL) - val _ = case G.wire_ord (exp_wire_e1,exp_wire_e4) - of GREATER => Testing.assert "e4 < e1" - (G.wire_ord (exp_wire_e4,exp_wire_e1) = LESS) - | LESS => Testing.assert "e4 > e1" - (G.wire_ord (exp_wire_e4,exp_wire_e1) = GREATER) - | EQUAL => Testing.assert "e1 <> e4" false - in () end) () - - - val _ = Testing.test "G.delete_wire" (fn () => let - val _ = assert_g_eq "delete w8 wire" g_no_w8_wire - (G.delete_wire exp_wire_f5 g_raw) - in () end) () - - - val _ = Testing.test "G.get_wire_containing_edge" (fn () => let - fun test_wire_for_edge exp_wire en = - if G.wire_eq exp_wire (G.get_wire_containing_edge g_raw (E.mk en)) - then () - else raise ERROR ("Incorrect wire returned for edge "^en) - - val _ = test_wire_for_edge exp_wire_e1 "e1" - val _ = test_wire_for_edge exp_wire_e2 "e2" - val _ = test_wire_for_edge exp_wire_e3 "e3" - val _ = test_wire_for_edge exp_wire_e4 "e4" - val _ = test_wire_for_edge exp_wire_e5 "e5" - val _ = test_wire_for_edge exp_wire_e6 "e6" - val _ = test_wire_for_edge exp_wire_e7 "e7" - val _ = test_wire_for_edge exp_wire_e8 "e8" - val _ = - if G.wire_eq exp_wire_ce1_a (G.get_wire_containing_edge g_raw (E.mk "ce1")) - then () - else - if G.wire_eq exp_wire_ce1_b (G.get_wire_containing_edge g_raw (E.mk "ce1")) - then () - else raise ERROR "Incorrect wire returned for edge ce1" - val _ = - if G.wire_eq exp_wire_ce2_a (G.get_wire_containing_edge g_raw (E.mk "ce2")) - then () - else - if G.wire_eq exp_wire_ce2_b (G.get_wire_containing_edge g_raw (E.mk "ce2")) - then () - else raise ERROR "Incorrect wire returned for edge ce2" - val _ = test_wire_for_edge exp_wire_ce3 "ce3" - val _ = test_wire_for_edge exp_wire_f1 "f1" - val _ = test_wire_for_edge exp_wire_f2 "f2" - val _ = test_wire_for_edge exp_wire_f3 "f3" - val _ = test_wire_for_edge exp_wire_f4 "f4" - val _ = test_wire_for_edge exp_wire_f5 "f5" - val _ = test_wire_for_edge exp_wire_f6 "f6" - in () end) () - - - val _ = Testing.test "G.get_wires_for_edges" (fn () => let - fun test_wires_for_edges exp_wires es = let - val actual_wires = G.get_wires_for_edges g_raw - (E.NSet.of_list (map E.mk es)) - fun unique (w::ws) = (not (exists (G.wire_eq w) ws)) - andalso (unique ws) - | unique [] = true - in - if forall (fn w => exists (G.wire_eq w) exp_wires) actual_wires - then if unique actual_wires - then () - else raise ERROR ("Duplicate wires returned") - else raise ERROR ("Incorrect wires returned for edges") - end - val _ = test_wires_for_edges [exp_wire_f1] ["f1"] - val _ = test_wires_for_edges [exp_wire_f1,exp_wire_ce3] ["f1","ce3"] - val _ = test_wires_for_edges [exp_wire_f1,exp_wire_ce3] - ["f1","f2","ce3"] - in () end) (); - - - val _ = Testing.test "G.get_wire_list" (fn () => let - val exp_wire_list = [exp_wire_e1,exp_wire_e4,exp_wire_e6,exp_wire_e7, - exp_wire_ce3,exp_wire_f1,exp_wire_f3,exp_wire_f5, - (G.get_wire_containing_edge g_raw (E.mk "ce1")), - (G.get_wire_containing_edge g_raw (E.mk "ce4")), - (G.get_wire_containing_edge g_raw (E.mk "bwe1")), - (G.get_wire_containing_edge g_raw (E.mk "bwe2")), - (G.get_wire_containing_edge g_raw (E.mk "bwe4"))] - val real_wire_list = G.get_wire_list g_raw - fun c_wire_lists (w::ws) (w'::ws') = - if G.wire_eq w w' then c_wire_lists ws ws' - else raise ERROR ("wire lists differ EXPECTED: "^Pretty.string_of (G.pretty_wire w)^ - " GOT: "^Pretty.string_of (G.pretty_wire w')) - | c_wire_lists [] [] = () - | c_wire_lists _ _ = raise ERROR "failed, wire lists of different lengths" - in - c_wire_lists (sort G.wire_ord exp_wire_list) - (sort G.wire_ord real_wire_list) - end) () - - - val _ = Testing.test "G.normalise" (fn () => let - val g_exp_norm = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_wv "w4" - |> add_wv "w5" - |> add_wv "w6" - |> add_wv "w7" - |> add_wv "w8" - |> add_wv "w9" - |> add_wv "w10" - |> add_wv "w11" - |> add_wv "w12" - |> add_wv "w13" - |> add_wv "w14" - |> add_wv "c1" - |> add_wv "c3" - |> add_wv "c4" - |> add_wv "bw1" - |> add_wv "bw2" - |> add_wv "bw3" - |> add_wv "bw5" - |> add_wv "bw6" - |> add_wv "bw8" - |> add_wv "isol1" - |> add_vunit1 "n1" - |> add_vunit2 "n2" - |> add_vexpr1 "n3" "x" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "w2" - |> add_dir_eunit1 "e3" "w2" "n2" - |> add_dir_eunit2 "e4" "n2" "w4" - |> add_dir_eexpr1 "e6a" "n2" "w9" "x" - |> add_dir_eexpr1 "e6b" "w9" "w10" "x" - |> add_dir_eexpr1 "e6c" "w10" "n3" "x" - |> add_dir_eexpr2 "e7" "n3" "w5" "x" - |> add_dir_eexpr2 "e8a" "w5" "w11" "x" - |> add_dir_eexpr2 "e8b" "w11" "n3" "x" - |> add_dir_estring1 "ce1" "c1" "c1" "x" - |> add_dir_estring2 "ce3" "c3" "c3" "x" - |> add_dir_estring1 "bwe1" "bw1" "bw2" "1" - |> add_dir_estring1 "bwe2" "bw3" "bw5" "2" - |> add_undir_estring1 "f1" "n2" "w6" "y" - |> add_undir_estring1 "f2a" "w6" "w12" "y" - |> add_undir_estring1 "f2b" "w12" "n3" "y" - |> add_undir_estring2 "f3" "n2" "w7" "y" - |> add_undir_estring2 "f4a" "w7" "w13" "y" - |> add_undir_estring2 "f4b" "n3" "w13" "y" - |> add_undir_estring1 "f5" "w8" "n2" "z" - |> add_undir_estring1 "f6a" "w8" "w14" "z" - |> add_undir_estring1 "f6b" "w14" "n3" "z" - |> add_undir_estring2 "ce4" "c4" "c4" "z" - |> add_undir_estring1 "bwe4" "bw6" "bw8" "3" - val g_norm = G.normalise g_raw - val _ = assert_g_iso_and_bounds_same "normalise" g_exp_norm g_norm - in () end) () - - - val _ = Testing.test "G.minimise" (fn () => let - val g_exp_min = G.empty - |> add_wv "w4" - |> add_wv "c1" - |> add_wv "c3" - |> add_wv "c4" - |> add_wv "bw1" - |> add_wv "bw2" - |> add_wv "bw3" - |> add_wv "bw5" - |> add_wv "bw6" - |> add_wv "bw8" - |> add_wv "isol1" - |> add_vunit1 "n1" - |> add_vunit2 "n2" - |> add_vexpr1 "n3" "x" - |> add_dir_eunit1 "e1" "n1" "n2" - |> add_dir_eunit2 "e4" "n2" "w4" - |> add_dir_eexpr1 "e6" "n2" "n3" "x" - |> add_dir_eexpr2 "e7" "n3" "n3" "x" - |> add_dir_estring1 "ce1" "c1" "c1" "x" - |> add_dir_estring2 "ce3" "c3" "c3" "x" - |> add_dir_estring1 "bwe1" "bw1" "bw2" "1" - |> add_dir_estring1 "bwe2" "bw3" "bw5" "2" - |> add_undir_estring1 "f1" "n2" "n3" "y" - |> add_undir_estring2 "f3" "n2" "n3" "y" - |> add_undir_estring1 "f5" "n2" "n3" "z" - |> add_undir_estring2 "ce4" "c4" "c4" "z" - |> add_undir_estring1 "bwe4" "bw6" "bw8" "3" - val g_min = G.minimise g_raw - val _ = assert_g_iso_and_bounds_same "minimise" g_exp_min g_min - in () end) () - - - val _ = Testing.test "G.split_edge" (fn () => let - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "w2" - |> add_dir_eunit1 "e3" "w2" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e2") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_wv "w2" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "w2" - |> add_dir_eunit1 "e3" "w2" "n2" - val _ = assert_g_eq "split (d1)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_dir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - val _ = assert_g_eq "split (d2)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 "e2" "w1" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e2") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_dir_eunit1 "e1" "n1" "w1" - |> add_dir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "n2" - val _ = assert_g_eq "split (d3)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_dir_eunit1 "e1" "n1" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv (V.dest v) - |> add_dir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "n2" - val _ = assert_g_eq "split (d4)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "n1" "n1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_wv (V.dest v) - |> add_dir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "n1" - val _ = assert_g_eq "split (d5)" g_exp g' - - val g = G.empty - |> add_wv "w1" - |> add_dir_eunit1 "e1" "w1" "w1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_dir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "w1" - val _ = assert_g_eq "split (d6)" g_exp g' - - val g = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "w1" "w2" - |> add_dir_eunit1 "e2" "w2" "w1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_wv (V.dest v) - |> add_dir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "w2" - |> add_dir_eunit1 "e2" "w2" "w1" - val _ = assert_g_eq "split (d7)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_wv "o1" - |> add_dir_eunit1 "e1" "n1" "o1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_wv "o1" - |> add_wv (V.dest v) - |> add_dir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "o1" - val _ = assert_g_eq "split (d8)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_wv "i1" - |> add_dir_eunit1 "e1" "i1" "n1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_wv "i1" - |> add_wv (V.dest v) - |> add_dir_eunit1 (E.dest e1) "i1" (V.dest v) - |> add_dir_eunit1 (E.dest e2) (V.dest v) "n1" - val _ = assert_g_eq "split (d9)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv "w2" - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 "e2" "w1" "w2" - |> add_undir_eunit1 "e3" "w2" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e2") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_wv "w2" - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "w2" - |> add_undir_eunit1 "e3" "w2" "n2" - val _ = assert_g_eq "split (u1)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 "e2" "w1" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_undir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "w1" - |> add_undir_eunit1 "e2" "w1" "n2" - val _ = assert_g_eq "split (u2)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 "e2" "w1" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e2") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_undir_eunit1 "e1" "n1" "w1" - |> add_undir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "n2" - val _ = assert_g_eq "split (u3)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_undir_eunit1 "e1" "n1" "n2" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_wv (V.dest v) - |> add_undir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "n2" - val _ = assert_g_eq "split (4)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_undir_eunit1 "e1" "n1" "n1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_wv (V.dest v) - |> add_undir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "n1" - val _ = assert_g_eq "split (u5)" g_exp g' - - val g = G.empty - |> add_wv "w1" - |> add_undir_eunit1 "e1" "w1" "w1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_wv "w1" - |> add_wv (V.dest v) - |> add_undir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "w1" - val _ = assert_g_eq "split (u6)" g_exp g' - - val g = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_undir_eunit1 "e1" "w1" "w2" - |> add_undir_eunit1 "e2" "w2" "w1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_wv (V.dest v) - |> add_undir_eunit1 (E.dest e1) "w1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "w2" - |> add_undir_eunit1 "e2" "w2" "w1" - val _ = assert_g_eq "split (u7)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_wv "b1" - |> add_undir_eunit1 "e1" "n1" "b1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_wv "b1" - |> add_wv (V.dest v) - |> add_undir_eunit1 (E.dest e1) "n1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "b1" - val _ = assert_g_eq "split (u8)" g_exp g' - - val g = G.empty - |> add_vunit1 "n1" - |> add_wv "b1" - |> add_undir_eunit1 "e1" "b1" "n1" - val ((e1,v,e2),g') = G.split_edge (E.mk "e1") g - val g_exp = G.empty - |> add_vunit1 "n1" - |> add_wv "b1" - |> add_wv (V.dest v) - |> add_undir_eunit1 (E.dest e1) "b1" (V.dest v) - |> add_undir_eunit1 (E.dest e2) (V.dest v) "n1" - val _ = assert_g_eq "split (u9)" g_exp g' - in () end) (); - end - - - local (* plugging *) - val g = G.empty |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_vunit1 "n3" - |> add_vunit1 "n4" - |> add_wv "in1" - |> add_wv "in2" - |> add_wv "out1" - |> add_wv "out2" - |> add_wv "w1" - |> add_wv "w2" - |> add_wv "w3" - |> add_wv "w4" - |> add_wv "bnd1" - |> add_wv "bnd2" - |> add_wv "bnd3" - |> add_wv "bnd4" - |> add_dir_eunit1 "e1" "in1" "n1" - |> add_dir_eunit2 "e2" "in2" "n2" - |> add_dir_eunit1 "e3" "n1" "out1" - |> add_dir_eunit2 "e4" "n3" "out2" - |> add_dir_eunit1 "e5" "n1" "w1" - |> add_dir_eunit1 "e6" "w1" "n1" - |> add_undir_eunit1 "f1" "bnd1" "n1" - |> add_undir_eunit2 "f2" "bnd2" "n2" - |> add_undir_eunit1 "f3" "n3" "bnd3" - |> add_undir_eunit2 "f4" "n1" "bnd4" - |> add_undir_eunit1 "f5" "n2" "w2" - |> add_undir_eunit1 "f6" "n2" "w2" - |> add_undir_eunit2 "f7" "n3" "w3" - |> add_undir_eunit2 "f8" "w3" "n3"; - in - val _ = Testing.test "G.get_plugging" (fn () => - let - fun test_dir_plugging inv outv g = - case G.get_plugging g (V.mk inv) (V.mk outv) - of SOME ((Directed,_),(v1,v2)) => - Testing.assert - ("G.get_plugging ("^inv^","^outv^"): correct vertex names") - ((V.name_eq (v1,V.mk outv)) andalso - (V.name_eq (v2,V.mk inv))) - | SOME _ => - Testing.assert - ("G.get_plugging ("^inv^","^outv^"): correct directedness") - false - | NONE => - Testing.assert ("G.get_plugging ("^inv^","^outv^") != NONE") - false - fun test_undir_plugging b1 b2 g = - case G.get_plugging g (V.mk b1) (V.mk b2) - of SOME ((Undirected,_),(v1,v2)) => - Testing.assert - ("G.get_plugging ("^b1^","^b2^"): correct vertex names") - (V.NSet.eq (V.NSet.of_list (map V.mk [b1,b2])) - (V.NSet.of_list [v1,v2])) - | SOME _ => - Testing.assert - ("G.get_plugging ("^b1^","^b2^"): correct directedness") - false - | NONE => - Testing.assert ("G.get_plugging ("^b1^","^b2^") != NONE") - false - fun test_no_plugging b1 b2 g = - Testing.assert ("G.get_plugging ("^b1^","^b2^") = NONE") - (case G.get_plugging g (V.mk b1) (V.mk b2) - of SOME _ => false - | NONE => true) - - val _ = test_dir_plugging "in1" "out1" g - val _ = test_dir_plugging "in2" "out2" g - val _ = test_undir_plugging "bnd1" "bnd3" g - val _ = test_undir_plugging "bnd4" "bnd2" g - val _ = test_no_plugging "in1" "in2" g - val _ = test_no_plugging "in1" "out2" g - val _ = test_no_plugging "out1" "w1" g - val _ = test_no_plugging "bnd1" "bnd2" g - val _ = test_no_plugging "bnd1" "w1" g - in () end) () - - - val _ = Testing.test "G.are_pluggable" (fn () => - let - fun test_pluggable n1 n2 g = - Testing.assert ("G.are_pluggable ("^n1^","^n2^")") - (G.are_pluggable g (V.mk n1) (V.mk n2)) - fun test_not_pluggable n1 n2 g = - Testing.assert ("not G.are_pluggable ("^n1^","^n2^")") - (not (G.are_pluggable g (V.mk n1) (V.mk n2))) - - (* FIXME: w4 is an isolated point; should this be pluggable? *) - val _ = test_pluggable "in1" "out1" g; - val _ = test_pluggable "in2" "out2" g; - val _ = test_not_pluggable "in1" "out2" g; - val _ = test_not_pluggable "in2" "out1" g; - val _ = test_not_pluggable "in1" "in2" g; - val _ = test_not_pluggable "in1" "w1" g; - val _ = test_not_pluggable "w1" "out2" g; - val _ = test_pluggable "bnd1" "bnd3" g; - val _ = test_pluggable "bnd4" "bnd2" g; - val _ = test_not_pluggable "bnd1" "bnd2" g; - val _ = test_not_pluggable "bnd1" "bnd4" g; - val _ = test_not_pluggable "bnd4" "bnd3" g; - val _ = test_not_pluggable "bnd3" "bnd2" g; - val _ = test_not_pluggable "bnd1" "w2" g; - val _ = test_not_pluggable "bnd1" "w3" g; - val _ = test_not_pluggable "bnd1" "w1" g; - val _ = test_not_pluggable "bnd3" "w2" g; - val _ = test_not_pluggable "in1" "bnd3" g; - val _ = test_not_pluggable "bnd3" "in1" g; - val _ = test_not_pluggable "out1" "bnd3" g; - val _ = test_not_pluggable "bnd3" "out1" g; - in () end) () - - - val _ = Testing.test "G.plug" (fn () => - let - fun test_dir_plug inv outv g = - let val (e,g') = G.plug (V.mk inv) (V.mk outv) g in - case G.get_edge_info g' e - of ((Directed,_),(v1,v2)) => - Testing.assert - ("G.plug ("^inv^","^outv^"): correct vertex names") - ((V.name_eq (v1,V.mk outv)) andalso - (V.name_eq (v2,V.mk inv))) - | _ => - Testing.assert - ("G.plug ("^inv^","^outv^"): correct directedness") - false - end - fun test_undir_plug b1 b2 g = - let val (e,g') = G.plug (V.mk b1) (V.mk b2) g in - case G.get_edge_info g' e - of ((Undirected,_),(v1,v2)) => - Testing.assert - ("G.plug ("^b1^","^b2^"): correct vertex names") - (V.NSet.eq (V.NSet.of_list (map V.mk [b1,b2])) - (V.NSet.of_list [v1,v2])) - | _ => - Testing.assert - ("G.plug ("^b1^","^b2^"): correct directedness") - false - end - val _ = test_dir_plug "in1" "out1" g; - val _ = test_dir_plug "in2" "out2" g; - val _ = test_undir_plug "bnd1" "bnd3" g; - val _ = test_undir_plug "bnd4" "bnd2" g; - in () end) () - - - (* TODO: plug_anon *) - - - val _ = Testing.test "G.plug_and_minimise (in1,out1)" - (fn () => - let - val in1 = V.mk "in1" - val out1 = V.mk "out1" - val ((s,t,wvs,es),g') = G.plug_and_minimise in1 out1 g - in - (Testing.assert "correct vertex names" - (V.NSet.eq (V.NSet.single (V.mk "n1")) - (V.NSet.of_list [s,t])); - Testing.assert "no wire verts" - (V.NSet.is_empty wvs); - case E.NSet.tryget_singleton es - of SOME e => - (case G.get_edge_dir g' e - of Directed => () - | _ => raise ERROR "Wrong directedness") - | NONE => raise ERROR "Wrong edge count" - ) - end - ) (); - val _ = Testing.test "G.plug_and_minimise (out2,in2)" - (fn () => - let - val out2 = V.mk "out2" - val in2 = V.mk "in2" - val ((s,t,wvs,es),g') = G.plug_and_minimise out2 in2 g - in - (Testing.assert "correct vertex names" - (V.NSet.eq (V.NSet.of_list (map V.mk ["n2","n3"])) - (V.NSet.of_list [s,t])); - Testing.assert "no wire verts" - (V.NSet.is_empty wvs); - case E.NSet.tryget_singleton es - of SOME e => - (case G.get_edge_dir g' e - of Directed => () - | _ => raise ERROR "Wrong directedness") - | NONE => raise ERROR "Wrong edge count" - ) - end - ) (); - - - (* TODO: plug_and_minimise_anon *) - - - val _ = Testing.test "G.plug_and_normalise (in1,out1)" - (fn () => - let - val in1 = V.mk "in1" - val out1 = V.mk "out1" - val ((s,t,wvs,es),_) = G.plug_and_normalise in1 out1 g - in - (Testing.assert "correct vertex names" - (V.NSet.eq (V.NSet.single (V.mk "n1")) - (V.NSet.of_list [s,t])); - Testing.assert "2 wire verts" - (V.NSet.cardinality wvs = 2); - Testing.assert "2 edges" - (E.NSet.cardinality es = 3) - ) - end - ) () - - - (* TODO: plug_and_normalise_anon *) - end - - - val _ = Testing.test "get_open_subgraph" (fn () => let - val g = G.empty |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_vunit1 "n3" - |> add_wv "w1" - |> add_wv "w2" - |> add_wv "w3" - |> add_wv "w4" - |> add_wv "w5" - |> add_wv "w6" - |> add_wv "w7" - |> add_wv "w8" - |> add_wv "w9" - |> add_wv "w10" - |> add_wv "w11" - |> add_wv "w12" - |> add_wv "w13" - |> add_wv "w17" - |> add_wv "w18" - |> add_wv "w20" - |> add_dir_eunit1 "c1" "w1" "w1" - |> add_dir_eunit1 "bw1" "w2" "w3" - |> add_dir_eunit1 "e1" "n1" "w4" - |> add_dir_eunit1 "e2" "w4" "w5" - |> add_dir_eunit1 "e3" "w5" "n1" - |> add_dir_eunit1 "e4" "n2" "w9" - |> add_dir_eunit1 "e5" "w9" "w10" - |> add_dir_eunit1 "e6" "w10" "n3" - |> add_undir_eunit1 "f1" "w6" "n2" - |> add_undir_eunit1 "f2" "n2" "w7" - |> add_undir_eunit1 "f3" "w7" "w8" - |> add_undir_eunit1 "f4" "w8" "n3" - (* these will be connected to later *) - |> add_undir_eunit1 "g1" "n3" "w12" - |> add_dir_eunit1 "g2" "n3" "w13" - |> add_dir_eunit1 "g3" "n2" "w11" - |> add_undir_eunit1 "g4" "w12" "w17" - |> add_dir_eunit1 "g6" "w13" "w18" - |> add_dir_eunit1 "g8" "w11" "w20" - val exp_subgraph = g; - - val retain = V.NSet.of_list (map V.mk ["n1","n2","n3","w1","w2","w3"]); - - val g = g |> add_vunit1 "n4" - |> add_vunit1 "n5" - |> add_wv "w14" - |> add_wv "w15" - |> add_wv "w16" - |> add_wv "w19" - |> add_dir_eunit1 "c2" "w14" "w14" - |> add_dir_eunit1 "bw2" "w15" "w16" - |> add_undir_eunit1 "g5" "w17" "n4" - |> add_dir_eunit1 "g7" "w18" "n4" - |> add_undir_eunit1 "f5" "n4" "w19" - |> add_dir_eunit1 "g9" "w20" "n5" - val full_graph = g; - val g = G.get_open_subgraph full_graph retain - val _ = Testing.assert "subgraph was as expected" - (G.exact_eq g exp_subgraph) - in () end) (); - - - - val _ = Testing.assert_no_failed_tests() -end; - -(* We don't test Test_OGraph here, as it will actually get tested in - * test-banggraph.ML *) - diff --git a/core/io/file_io.ML b/core/io/file_io.ML deleted file mode 100644 index 83b8f325..00000000 --- a/core/io/file_io.ML +++ /dev/null @@ -1,44 +0,0 @@ -structure File_Io = -struct - (* this is nicer to deal with that IO.Io *) - exception io_exn of string - - fun write_string file s = let - val outstream = TextIO.openOut file - in - (TextIO.output (outstream, s); - TextIO.closeOut outstream) - end - handle IO.Io {cause as (OS.SysErr (msg:string,_)) : exn, ...} => - raise io_exn ("Could not write to \""^file^"\": "^msg) - | IO.Io {name : string, function : string, ...} => - raise io_exn ("Could not write to \""^file^ - "\": unknown error in "^function^ - " of "^name) - - fun write_json file j = write_string file (Json.encode j) - fun write_pretty file p = write_string file (Pretty.string_of p) - - - fun read_string file = let - val instream = TextIO.openIn file - val text = TextIO.inputAll instream - val _ = TextIO.closeIn instream - in - text - end - handle IO.Io {cause as (OS.SysErr (msg:string,_)) : exn, ...} => - raise io_exn ("Could not read from \""^file^"\": "^msg) - | IO.Io {name : string, function : string, ...} => - raise io_exn ("Could not read from \""^file^ - "\": unknown error in "^function^ - " of "^name) - - fun read_json file = Json.read_file file - handle IO.Io {cause as (OS.SysErr (msg:string,_)) : exn, ...} => - raise io_exn ("Could not read from \""^file^"\": "^msg) - | IO.Io {name : string, function : string, ...} => - raise io_exn ("Could not read from \""^file^ - "\": unknown error in "^function^ - " of "^name) -end diff --git a/core/io/graph_annotations_json.ML b/core/io/graph_annotations_json.ML deleted file mode 100644 index 9626906c..00000000 --- a/core/io/graph_annotations_json.ML +++ /dev/null @@ -1,81 +0,0 @@ -functor InputGraphAnnotationsJSON( - structure Annotations : GRAPH_ANNOTATIONS - val json_to_annotation : Json.json -> Annotations.data -) : GRAPH_ANN_INPUT_JSON -= struct - open JsonInputUtils; - structure Ann = Annotations; - - type data = Ann.T; - val empty_annotation = Ann.init; - - fun input_vertex_annotation v json = - Ann.set_vertex_annotation v (json_to_annotation json); - fun input_edge_annotation e json = - Ann.set_edge_annotation e (json_to_annotation json); - fun input_bbox_annotation bb json = - Ann.set_bbox_annotation bb (json_to_annotation json); - val input_graph_annotation = Ann.set_graph_annotation o json_to_annotation; -end; - -structure InputGraphStringTableAnnotationsJSON - : GRAPH_ANN_INPUT_JSON where type data = GraphStringTableAnnotations.T -= struct - open JsonInputUtils; - - structure InputAnn = InputGraphAnnotationsJSON( - structure Annotations = GraphStringTableAnnotations - val json_to_annotation = input_string_table - ); - open InputAnn; -end; - -structure InputGraphJsonObjectAnnotationsJSON - : GRAPH_ANN_INPUT_JSON where type data = GraphJsonObjectAnnotations.T -= InputGraphAnnotationsJSON( - structure Annotations = GraphJsonObjectAnnotations - fun json_to_annotation Json.Null = Json.empty_obj - | json_to_annotation (Json.Object obj) = obj - | json_to_annotation _ = raise JsonInputUtils.bad_input_exp ("Expected object","") -) - -functor OutputGraphAnnotationsJSON( - structure Annotations : GRAPH_ANNOTATIONS - val annotation_to_json : Annotations.data -> Json.json -) : GRAPH_ANN_OUTPUT_JSON -= struct - open JsonOutputUtils; - structure Ann = Annotations; - - type data = Ann.T; - val empty_annotation = Ann.init; - - val output_vertex_annotation = - annotation_to_json oo Ann.get_vertex_annotation; - val output_edge_annotation = - annotation_to_json oo Ann.get_edge_annotation; - val output_bbox_annotation = - annotation_to_json oo Ann.get_bbox_annotation; - val output_graph_annotation = - annotation_to_json o Ann.get_graph_annotation; -end; - -structure OutputGraphStringTableAnnotationsJSON - : GRAPH_ANN_OUTPUT_JSON where type data = GraphStringTableAnnotations.T -= struct - open JsonOutputUtils; - - structure OutputAnn = OutputGraphAnnotationsJSON( - structure Annotations = GraphStringTableAnnotations - val annotation_to_json = output_string_table - ); - open OutputAnn; -end; - -structure OutputGraphJsonObjectAnnotationsJSON - : GRAPH_ANN_OUTPUT_JSON where type data = GraphJsonObjectAnnotations.T -= OutputGraphAnnotationsJSON( - structure Annotations = GraphJsonObjectAnnotations - fun annotation_to_json obj = Json.Object obj -) - diff --git a/core/io/graph_component_io.ML b/core/io/graph_component_io.ML deleted file mode 100644 index d42932dc..00000000 --- a/core/io/graph_component_io.ML +++ /dev/null @@ -1,16 +0,0 @@ -signature GRAPH_COMPONENT_DATA_IO = -sig - type nvdata - type edata - - structure IVDataInputJSON : INPUT_JSON - where type data = nvdata - structure IVDataOutputJSON : OUTPUT_JSON - where type data = nvdata - structure EDataInputJSON : INPUT_JSON - where type data = edata - structure EDataOutputJSON : OUTPUT_JSON - where type data = edata - structure DotStyle : DOT_STYLE - where type nvdata = nvdata -end diff --git a/core/io/graph_dot_output.ML b/core/io/graph_dot_output.ML deleted file mode 100644 index f7cb0541..00000000 --- a/core/io/graph_dot_output.ML +++ /dev/null @@ -1,64 +0,0 @@ -signature OUTPUT_DOT = OUTPUT where type T = string; - -functor OutputSStrNameDot(N: SSTR_NAME) : OUTPUT_DOT -= struct - type T = string; - type data = N.name; - fun output s = N.string_of_name s; -end; - -signature DOT_STYLE = -sig - type nvdata; - val style_for_ivertex_data : nvdata -> string -end - -functor OutputGraphDot( - structure Graph : BANG_GRAPH - structure DotStyle : DOT_STYLE - sharing type DotStyle.nvdata = Graph.nvdata -) : OUTPUT_DOT = -struct - structure Graph = Graph; - type T = string; - type data = Graph.T; - fun node_block graph = let - fun sty v = case Graph.get_vertex_data graph v - of Graph.NVert data => " " ^ (DotStyle.style_for_ivertex_data data) - | Graph.WVert => " [style=filled,fillcolor=gray,shape=box,width=0,height=0,fontcolor=white]" - fun f v lst = (V.pretty_name v):: - (Pretty.str (sty v)):: - (Pretty.str ";\n"):: - lst - fun box_f box lst = - (Pretty.str "subgraph cluster_"):: - (B.pretty_name box):: - (Pretty.str " {\n"):: - (V.NSet.fold_rev f (Graph.get_vertices_in_bbox graph box) - (Pretty.str "}\n"::lst)) - in B.NSet.fold box_f (Graph.get_bboxes graph) (V.NSet.fold_rev f (Graph.get_unbboxed graph) []) - end; - - fun edge_block graph = let - fun f e lst = let - val (s,t) = (Graph.get_edge_source graph e, Graph.get_edge_target graph e) - val dir = Graph.get_edge_dir graph e - in (V.pretty_name s):: - (Pretty.str " -> "):: - (V.pretty_name t):: - (Pretty.str " [label="):: - (E.pretty_name e):: - (if dir = Directed then I - else (fn l => (Pretty.str ",arrowhead=none")::l)) - ((Pretty.str "];\n")::lst) - end - in E.NSet.fold_rev f (Graph.get_edges graph) [] - end; - - fun output graph = Pretty.string_of (Pretty.block ( - [Pretty.str "digraph G {\n"] @ - node_block graph @ - edge_block graph @ - [Pretty.str "}\n"] - )); -end diff --git a/core/io/graph_json.ML b/core/io/graph_json.ML deleted file mode 100644 index 7033708d..00000000 --- a/core/io/graph_json.ML +++ /dev/null @@ -1,463 +0,0 @@ -(** - * JSON graph parsing and writing - * - * GRAPH ::= - * { - * "wire_vertices": VERTEX_LIST, - * "node_vertices": VERTEX_LIST, - * "dir_edges": EDGE_LIST, - * "undir_edges": EDGE_LIST, - * "bang_boxes": BBOX_LIST, - * "data": GDATA, - * "annotation": ANNDATA - * } - * VERTEX_LIST ::= PLAIN_VLIST | { NAME : VDESC (, NAME : VDESC)* } | {} - * PLAIN_VLIST ::= [ NAME (, NAME)* ] | [] - * EDGE_LIST ::= { NAME : EDESC (, NAME : EDESC)* } | {} - * BBOX_LIST ::= { NAME : BBDESC (, NAME : BBDESC)* } | {} - * VDESC ::= - * { - * "data": VDATA, - * "annotation": ANNDATA - * } - * EDESC ::= - * { - * "src" : NAME, - * "tgt" : NAME, - * "data": EDATA, - * "annotation": ANNDATA - * } - * BBDESC ::= - * { - * "contents": PLAIN_VLIST, - * "parent": NAME, - * "data": BBDATA, - * "annotation": ANNDATA - * } - * - * - the formats of VDATA, EDATA, BBDATA and GDATA depend on the theory - * - ANNDATA is some sort of annotation, in a format to be decided by the tool; - * the controller uses it to store user data - * - "data" and "annotation" are optional everywhere - * - in fact, everything is optional except for the "src" and "tgt" properties - * of EDATA - * - a null value is the same as omitting the element - * - * output_vertex, output_edge and output_bbox produce the following outputs: - * VERTEX ::= - * { - * "name": NAME, - * "is_wire_vertex": true|false, - * "data": VDATA, - * "annotation": ANNDATA - * } - * EDGE ::= - * { - * "name": NAME, - * "is_directed": true|false, - * "src" : NAME, - * "tgt" : NAME, - * "data": EDATA, - * "annotation": ANNDATA - * } - * BANGBOX ::= - * { - * "name": NAME, - * "contents": PLAIN_VLIST, - * "parent": NAME, - * "data": BBDATA, - * "annotation": ANNDATA - * } - * - * Caveats of current implementation: - * - BBDATA and GDATA are not supported by Quantomatic at all - * - * Note about arguments to functors: - * Json.Null is treated as the "default" value. If Output*Data.output returns - * Json.Null, nothing is written, and the default vertex or edge data will be - * used when reading the file again. Input*Data.input will never be passed - * Json.Null, as this is assumed to mean the default vertex or edge data. - * - * Similarly, annotations are not written at all if their values are null. - * However, OutputAnnotation is notified of both null and missing annotations by - * being passed a Json.Null value (this allows the annotation type to maintain a - * record of all graph components. - *) - -signature GRAPH_ANN_INPUT_JSON -= sig - exception bad_input_exp of string * string - type data - val empty_annotation : data - val input_vertex_annotation : V.name -> Json.json -> data -> data - val input_edge_annotation : E.name -> Json.json -> data -> data - val input_bbox_annotation : B.name -> Json.json -> data -> data - val input_graph_annotation : Json.json -> data -> data -end - -structure EmptyGraphAnnotationInputJson - : GRAPH_ANN_INPUT_JSON where type data = unit -= struct - exception bad_input_exp of string * string - type data = unit - val empty_annotation = () - fun input_vertex_annotation _ _ = I - fun input_edge_annotation _ _ = I - fun input_bbox_annotation _ _ = I - fun input_graph_annotation _ = I -end - -signature GRAPH_ANN_OUTPUT_JSON -= sig - type data - val empty_annotation : data - val output_vertex_annotation : data -> V.name -> Json.json - val output_edge_annotation : data -> E.name -> Json.json - val output_bbox_annotation : data -> B.name -> Json.json - val output_graph_annotation : data -> Json.json -end - -structure EmptyGraphAnnotationOutputJson - : GRAPH_ANN_OUTPUT_JSON where type data = unit -= struct - type data = unit - val empty_annotation = () - fun output_vertex_annotation _ _ = Json.Null - fun output_edge_annotation _ _ = Json.Null - fun output_bbox_annotation _ _ = Json.Null - fun output_graph_annotation _ = Json.Null -end - - -functor InputAnnotatedGraphJSON( - structure Graph : BANG_GRAPH - and InputVertexData : INPUT_JSON - and InputEdgeData : INPUT_JSON - sharing type InputVertexData.data = Graph.nvdata - sharing type InputEdgeData.data = Graph.edata - structure InputAnnotation : GRAPH_ANN_INPUT_JSON -) : INPUT_JSON -= struct - structure Graph = Graph - structure IA = InputAnnotation - - open JsonInputUtils - type data = Graph.T * InputAnnotation.data - - fun get_nv_vdata obj propName = - case Json.lookup obj propName - of SOME Json.Null => Graph.default_nv_vdata - | SOME data => (Graph.NVert (InputVertexData.input data) - handle InputVertexData.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop propName l)) - | NONE => Graph.default_nv_vdata - - fun get_edata obj propName = - case Json.lookup obj propName - of SOME Json.Null => Graph.default_edata - | SOME data => (InputEdgeData.input data - handle InputEdgeData.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop propName l)) - | NONE => Graph.default_edata - - fun get_wverts obj prop = let - fun add_vert1 (Json.String vn) (g,ann) = - (Graph.add_named_vertex (V.mk vn) Graph.WVert g, - IA.input_vertex_annotation (V.mk vn) Json.Null ann) - | add_vert1 _ _ = raise bad_input_exp ("Expected string","") - fun add_vert2 (vn,Json.Null) (g,ann) = add_vert1 (Json.String vn) (g,ann) - | add_vert2 (vn,Json.Object obj') (g,ann) = - (Graph.add_named_vertex (V.mk vn) Graph.WVert g, - IA.input_vertex_annotation (V.mk vn) (get_easy Json.Null obj' "annotation") ann - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "annotation" l)) - | add_vert2 _ _ = raise bad_input_exp ("Expected object","") - in - fold_obj_or_arr_easy (add_vert2,add_vert1) obj prop - handle Graph.duplicate_vertex_exp (vn,_) => - raise bad_input_exp ("Duplicate vertex ("^(V.dest vn)^")", prop) - end - - fun get_nverts obj prop = let - fun add_def_vert (Json.String vn) (g,ann) = - (Graph.add_named_vertex (V.mk vn) Graph.default_nv_vdata g, - IA.input_vertex_annotation (V.mk vn) Json.Null ann) - | add_def_vert _ _ = raise bad_input_exp ("Expected string","") - fun add_vert (vn,Json.Null) (g,ann) = add_def_vert (Json.String vn) (g,ann) - | add_vert (vn,Json.Object obj') (g,ann) = - (Graph.add_named_vertex (V.mk vn) (get_nv_vdata obj' "data") g, - IA.input_vertex_annotation (V.mk vn) (get_easy Json.Null obj' "annotation") ann - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "annotation" l)) - | add_vert _ _ = raise bad_input_exp ("Expected object","") - in - fold_obj_or_arr_easy (add_vert,add_def_vert) obj prop - handle Graph.duplicate_vertex_exp (vn,_) => - raise bad_input_exp ("Duplicate vertex ("^(V.dest vn)^")", prop) - end - - fun get_edges dir_or_undir obj prop = let - fun add_edge (en,Json.Object obj') (g,ann) = - (Graph.add_named_edge (E.mk en) - (dir_or_undir,(get_edata obj' "data")) - (V.mk (get_string obj' "src")) - (V.mk (get_string obj' "tgt")) - g - handle Graph.no_such_vertex_exp (_,vn,_) => - raise bad_input_exp ("Vertex "^(V.dest vn)^" does not exist",""), - IA.input_edge_annotation (E.mk en) (get_easy Json.Null obj' "annotation") ann - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "annotation" l)) - | add_edge _ _ = raise bad_input_exp ("Expected object","") - in - fold_obj_easy add_edge obj prop - handle Graph.duplicate_edge_exp (en,_) => - raise bad_input_exp ("Duplicate edge ("^(E.dest en)^")", prop) - end - - fun get_bboxes obj prop = let - fun set_bbox_parent (bn,Json.Object obj') (g,ann) = - (let - val bp = get_string_easy "" obj' "parent" - in - if bp = "" - then (g,ann) - else (g |> Graph.add_child_to_bbox (B.mk bp) (B.mk bn),ann) - end - handle Graph.no_such_bbox_exp (_,b,_) => - raise bad_input_exp - ("Bang box "^(B.dest b)^" does not exist", - "parent") - | Graph.bbox_bad_parent_exp (b1,b2,_) => - raise bad_input_exp - ((B.dest b1)^" is not a subgraph of "^(B.dest b2), - "parent")) - | set_bbox_parent _ g = g - fun add_bbox (bn,Json.Null) (g,ann) = - (Graph.add_named_bbox (B.mk bn) g, - IA.input_bbox_annotation (B.mk bn) Json.Null ann) - | add_bbox (bn,Json.Object obj') (g,ann) = - (let - val b = B.mk bn - fun get_vname (Json.String s) = V.mk s - | get_vname _ = raise bad_input_exp ("Expected string","contents") - val vs = map get_vname (get_array_easy obj' "contents") - in - g |> (Graph.add_named_bbox b) - |> (Graph.add_to_bbox_anon b (V.NSet.of_list vs)) - end - handle Graph.no_such_vertex_exp (_,vn,_) => - raise bad_input_exp ("Vertex "^(V.dest vn)^" does not exist", - "contents"), - IA.input_bbox_annotation (B.mk bn) (get_easy Json.Null obj' "annotation") ann - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "annotation" l)) - | add_bbox (bn,_) _ = raise bad_input_exp ("Expected object",bn) - in - (* we do this in two stages, so that ordering doesn't matter *) - (fold_obj_easy set_bbox_parent obj prop) - o - (fold_obj_easy add_bbox obj prop) - handle Graph.duplicate_bbox_exp (bn,_) => - raise bad_input_exp ("Duplicate bang box ("^(B.dest bn)^")", prop) - end - - fun get_ann obj prop (g,ann) = - (g, IA.input_graph_annotation (get_easy Json.Null obj prop) ann) - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l) - - fun input (Json.Object obj) = - (Graph.empty, IA.empty_annotation) - |> get_wverts obj "wire_vertices" - |> get_nverts obj "node_vertices" - |> get_edges Undirected obj "undir_edges" - |> get_edges Directed obj "dir_edges" - |> get_bboxes obj "bang_boxes" - |> get_ann obj "annotation" - | input Json.Null = (Graph.empty,IA.empty_annotation) - | input _ = raise bad_input_exp ("Expected object","") -end - -functor InputGraphJSON( - structure Graph : BANG_GRAPH - and InputVertexData : INPUT_JSON - and InputEdgeData : INPUT_JSON - sharing type InputVertexData.data = Graph.nvdata - sharing type InputEdgeData.data = Graph.edata -) : INPUT_JSON -= struct - structure SubInput = InputAnnotatedGraphJSON( - structure Graph = Graph - structure InputVertexData = InputVertexData - structure InputEdgeData = InputEdgeData - structure InputAnnotation = EmptyGraphAnnotationInputJson - ) - open SubInput - type data = Graph.T - val input = fst o SubInput.input -end - -signature OUTPUT_GRAPH_JSON -= sig - include OUTPUT_JSON - val output_vertex : data -> V.name -> T - val output_edge : data -> E.name -> T - val output_bbox : data -> B.name -> T -end - -functor OutputAnnotatedGraphJSON( - structure Graph : BANG_GRAPH - and OutputVertexData : OUTPUT_JSON - and OutputEdgeData : OUTPUT_JSON - sharing type OutputVertexData.data = Graph.nvdata - sharing type OutputEdgeData.data = Graph.edata - structure OutputAnnotation : GRAPH_ANN_OUTPUT_JSON -) : OUTPUT_GRAPH_JSON -= struct - structure G = Graph - structure OA = OutputAnnotation - - open JsonOutputUtils - type data = Graph.T * OutputAnnotation.data - - fun output_vertex (g,ann) v = let - val (is_wv,vd) = - case Graph.get_vertex_data g v - of Graph.WVert => (true,Json.Null) - | Graph.NVert d => (false,OutputVertexData.output d) - in - Json.Object ( - Json.empty_obj |> update ("name",Json.String (V.dest v)) - |> update ("is_wire_vertex",Json.Bool is_wv) - |> update ("data",vd) - |> update ("annotation",OA.output_vertex_annotation ann v) - ) - end - - fun output_edge (g,ann) e = let - val ((dir,ed),(s,t)) = (Graph.get_edge_dir_and_data g e, (Graph.get_edge_source g e, Graph.get_edge_target g e)) - in - Json.Object ( - Json.empty_obj |> update ("name",Json.String (E.dest e)) - |> update ("is_directed",Json.Bool (dir = Directed)) - |> update ("src",Json.String (V.dest s)) - |> update ("tgt",Json.String (V.dest t)) - |> update ("data",OutputEdgeData.output ed) - |> update ("annotation",OA.output_edge_annotation ann e) - ) - end - - fun output_bbox (g,ann) b = let - val add_parent = - case B.NSet.get_local_bot (Graph.get_bbox_parents g b) (* TODO: handle multiple parents *) - of SOME bp => update ("parent",Json.String (B.dest bp)) - | NONE => I - val jvs = map (fn v => Json.String (V.dest v)) - (V.NSet.list_of (Graph.get_vertices_in_bbox g b)) - in - Json.Object ( - Json.empty_obj |> update ("name",Json.String (B.dest b)) - |> add_parent - |> update ("contents",Json.Array jvs) - |> update ("annotation",OA.output_bbox_annotation ann b) - ) - end - - fun get_verts (g,ann) = let - fun nv_info v vd = - Json.Object ( - Json.empty_obj |> update ("data",OutputVertexData.output vd) - |> update ("annotation",OA.output_vertex_annotation ann v) - ) - fun wv_info v = - Json.Object ( - Json.empty_obj |> update ("annotation",OA.output_vertex_annotation ann v) - ) - fun add_vert v (wvobj,nvobj) = - case Graph.get_vertex_data g v - of Graph.NVert vd => (wvobj, update (V.dest v, nv_info v vd) nvobj) - | Graph.WVert => (update (V.dest v,wv_info v) wvobj, nvobj) - val (wvobj,nvobj) = V.NSet.fold add_vert - (Graph.get_vertices g) - (Json.empty_obj,Json.empty_obj) - in - (Json.Object wvobj,Json.Object nvobj) - end - - fun get_edges (g,ann) = let - fun edge_info e ed s t = - Json.Object ( - Json.empty_obj |> update ("src",Json.String (V.dest s)) - |> update ("tgt",Json.String (V.dest t)) - |> update ("data",OutputEdgeData.output ed) - |> update ("annotation",OA.output_edge_annotation ann e) - ) - fun add_edge e (dobj,udobj) = let - val (s,t) = (Graph.get_edge_source g e, Graph.get_edge_target g e) - val (edir, ed) = Graph.get_edge_dir_and_data g e - in case edir of Directed => (update (E.dest e,edge_info e ed s t) dobj, udobj) - | Undirected => (dobj,update (E.dest e,edge_info e ed s t) udobj) - end - val (dobj,udobj) = E.NSet.fold add_edge - (Graph.get_edges g) - (Json.empty_obj,Json.empty_obj) - in - (Json.Object dobj,Json.Object udobj) - end - - fun get_bboxes (g,ann) = let - fun add_bbox b = let - val add_parent = - case B.NSet.get_local_bot (Graph.get_bbox_parents g b) (* TODO: handle multiple parents *) - of SOME bp => update ("parent",Json.String (B.dest bp)) - | NONE => I - val jvs = map (fn v => Json.String (V.dest v)) - (V.NSet.list_of (Graph.get_vertices_in_bbox g b)) - val obj = Json.empty_obj |> add_parent - |> update ("contents",Json.Array jvs) - |> update ("annotation",OA.output_bbox_annotation ann b) - in - update (B.dest b, Json.Object obj) - end - in - Json.Object (B.NSet.fold add_bbox (Graph.get_bboxes g) Json.empty_obj) - end - - fun output (g,ann) = let - val (wverts,nverts) = get_verts (g,ann) - val (dedges,udedges) = get_edges (g,ann) - val bboxes = get_bboxes (g,ann) - in - Json.Object - (Json.empty_obj |> opt_update ("wire_vertices",wverts) - |> opt_update ("node_vertices",nverts) - |> opt_update ("undir_edges",udedges) - |> opt_update ("dir_edges",dedges) - |> opt_update ("bang_boxes",bboxes) - |> update ("annotation",OA.output_graph_annotation ann)) - end -end - -functor OutputGraphJSON( - structure Graph : BANG_GRAPH - and OutputVertexData : OUTPUT_JSON - and OutputEdgeData : OUTPUT_JSON - sharing type OutputVertexData.data = Graph.nvdata - sharing type OutputEdgeData.data = Graph.edata -) : OUTPUT_GRAPH_JSON -= struct - structure SubOutput = OutputAnnotatedGraphJSON( - structure Graph = Graph - structure OutputVertexData = OutputVertexData - structure OutputEdgeData = OutputEdgeData - structure OutputAnnotation = EmptyGraphAnnotationOutputJson - ) - open SubOutput - type data = Graph.T - fun output g = SubOutput.output (g,()) - fun output_vertex g = SubOutput.output_vertex (g,()) - fun output_edge g = SubOutput.output_edge (g,()) - fun output_bbox g = SubOutput.output_bbox (g,()) -end - diff --git a/core/io/graph_json_io.ML b/core/io/graph_json_io.ML deleted file mode 100644 index ca2f6834..00000000 --- a/core/io/graph_json_io.ML +++ /dev/null @@ -1,309 +0,0 @@ -signature GRAPH_JSON = -sig - structure Graph : BANG_GRAPH - val input : Json.json -> Graph.T - val output : Graph.T -> Json.json - val output_vertex_desc : Graph.T -> V.name -> Json.json - val output_edge_desc : Graph.T -> E.name -> Json.json - val output_bbox_desc : Graph.T -> B.name -> Json.json -end - -functor GraphJSON( - structure Graph : BANG_GRAPH - and InputVertexData : INPUT_JSON - and InputEdgeData : INPUT_JSON - and OutputVertexData : OUTPUT_JSON - and OutputEdgeData : OUTPUT_JSON - sharing type InputVertexData.data = OutputVertexData.data = Graph.nvdata - sharing type InputEdgeData.data = OutputEdgeData.data = Graph.edata -) : GRAPH_JSON -= struct - structure Graph = Graph - - open JsonInputUtils - open JsonOutputUtils - - (*************************************) - (************** Input ****************) - (*************************************) - - fun input_nv_vdata obj propName = - case Json.lookup obj propName - of SOME Json.Null => Graph.default_nv_vdata - | SOME data => (Graph.NVert (InputVertexData.input data) - handle InputVertexData.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop propName l)) - | NONE => Graph.default_nv_vdata - - fun input_edata obj propName = - case Json.lookup obj propName - of SOME Json.Null => Graph.default_edata - | SOME data => (InputEdgeData.input data - handle InputEdgeData.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop propName l)) - | NONE => Graph.default_edata - - fun input_wverts obj prop = let - fun add_vert1 (Json.String vn) g = - Graph.add_named_vertex (V.mk vn) Graph.WVert g - | add_vert1 _ _ = raise bad_input_exp ("Expected string","") - fun add_vert2 (vn,Json.Null) g = add_vert1 (Json.String vn) g - | add_vert2 (vn,Json.Object obj') g = - g |> Graph.add_named_vertex (V.mk vn) Graph.WVert - |> (case Json.lookup obj' "annotation" - of SOME (Json.Object ann) => Graph.set_vertex_annotation (V.mk vn, ann) - | SOME Json.Null => I - | SOME _ => raise bad_input_exp ("Annotation must be object or null","") - | NONE => I) - | add_vert2 _ _ = raise bad_input_exp ("Expected object","") - in - fold_obj_or_arr_easy (add_vert2,add_vert1) obj prop - handle Graph.duplicate_vertex_exp (vn,_) => - raise bad_input_exp ("Duplicate vertex ("^(V.dest vn)^")", prop) - end - - fun input_nverts obj prop = let - fun add_def_vert (Json.String vn) g = - Graph.add_named_vertex (V.mk vn) Graph.default_nv_vdata g - | add_def_vert _ _ = raise bad_input_exp ("Expected string","") - fun add_vert (vn,Json.Null) g = add_def_vert (Json.String vn) g - | add_vert (vn,Json.Object obj') g = - g |> Graph.add_named_vertex (V.mk vn) (input_nv_vdata obj' "data") - |> (case Json.lookup obj' "annotation" - of SOME (Json.Object ann) => Graph.set_vertex_annotation (V.mk vn, ann) - | SOME Json.Null => I - | SOME _ => raise bad_input_exp ("Annotation must be object or null","") - | NONE => I) - | add_vert _ _ = raise bad_input_exp ("Expected object","") - in - fold_obj_or_arr_easy (add_vert,add_def_vert) obj prop - handle Graph.duplicate_vertex_exp (vn,_) => - raise bad_input_exp ("Duplicate vertex ("^(V.dest vn)^")", prop) - end - - fun input_edges dir_or_undir obj prop = let - fun add_edge (en,Json.Object obj') g = - (g |> Graph.add_named_edge (E.mk en) - (dir_or_undir,(input_edata obj' "data")) - (V.mk (get_string obj' "src")) - (V.mk (get_string obj' "tgt")) - |> (case Json.lookup obj' "annotation" - of SOME (Json.Object ann) => Graph.set_edge_annotation (E.mk en, ann) - | SOME Json.Null => I - | SOME _ => raise bad_input_exp ("Annotation must be object or null","") - | NONE => I) - handle Graph.no_such_vertex_exp (_,vn,_) => - raise bad_input_exp ("Vertex "^(V.dest vn)^" does not exist","")) - | add_edge _ _ = raise bad_input_exp ("Expected object","") - in - fold_obj_easy add_edge obj prop - handle Graph.duplicate_edge_exp (en,_) => - raise bad_input_exp ("Duplicate edge ("^(E.dest en)^")", prop) - end - - fun input_bboxes obj prop = let - fun set_bbox_parent (bn,Json.Object obj') g = - (let - val bp = get_string_easy "" obj' "parent" - in - if bp = "" - then g - else g |> Graph.add_child_to_bbox (B.mk bp) (B.mk bn) - end - handle Graph.no_such_bbox_exp (_,b,_) => - raise bad_input_exp - ("Bang box "^(B.dest b)^" does not exist", - "parent") - | Graph.bbox_bad_parent_exp (b1,b2,_) => - raise bad_input_exp - ((B.dest b1)^" is not a subgraph of "^(B.dest b2), - "parent")) - | set_bbox_parent _ g = g - fun add_bbox (bn,Json.Null) g = - Graph.add_named_bbox (B.mk bn) g - | add_bbox (bn,Json.Object obj') g = - (let - val b = B.mk bn - fun get_vname (Json.String s) = V.mk s - | get_vname _ = raise bad_input_exp ("Expected string","contents") - val vs = map get_vname (get_array_easy obj' "contents") - in - g |> (Graph.add_named_bbox b) - |> (Graph.add_to_bbox_anon b (V.NSet.of_list vs)) - |> (case Json.lookup obj' "annotation" - of SOME (Json.Object ann) => Graph.set_bbox_annotation (B.mk bn, ann) - | SOME Json.Null => I - | SOME _ => raise bad_input_exp ("Annotation must be object or null","") - | NONE => I) - end - handle Graph.no_such_vertex_exp (_,vn,_) => - raise bad_input_exp ("Vertex "^(V.dest vn)^" does not exist", - "contents")) - | add_bbox (bn,_) _ = raise bad_input_exp ("Expected object",bn) - in - (* we do this in two stages, so that ordering doesn't matter *) - (fold_obj_easy set_bbox_parent obj prop) - o - (fold_obj_easy add_bbox obj prop) - handle Graph.duplicate_bbox_exp (bn,_) => - raise bad_input_exp ("Duplicate bang box ("^(B.dest bn)^")", prop) - end - - (*fun get_ann obj prop (g,ann) = - (g, IA.input_graph_annotation (get_easy Json.Null obj prop) ann) - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l)*) - - fun input (Json.Object obj) = - Graph.empty - |> input_wverts obj "wire_vertices" - |> input_nverts obj "node_vertices" - |> input_edges Undirected obj "undir_edges" - |> input_edges Directed obj "dir_edges" - |> input_bboxes obj "bang_boxes" - |> (case Json.lookup obj "annotation" - of SOME (Json.Object ann) => Graph.set_graph_annotation ann - | SOME Json.Null => I - | SOME _ => raise bad_input_exp ("Annotation must be object or null","") - | NONE => I) - | input Json.Null = Graph.empty - | input _ = raise bad_input_exp ("Expected object","") - - - (*************************************) - (************** Output ***************) - (*************************************) - - fun output_vertex_desc g v = let - val (is_wv,vd) = - case Graph.get_vertex_data g v - of Graph.WVert => (true,Json.Null) - | Graph.NVert d => (false,OutputVertexData.output d) - in - Json.Object ( - Json.empty_obj |> update ("name",Json.String (V.dest v)) - |> update ("is_wire_vertex",Json.Bool is_wv) - |> update ("data",vd) - |> (case Graph.get_vertex_annotation_opt g v - of SOME ann => update ("annotation",Json.Object ann) - | NONE => I) - ) - end - - fun output_edge_desc g e = let - val ((dir,ed),(s,t)) = (Graph.get_edge_dir_and_data g e, (Graph.get_edge_source g e, Graph.get_edge_target g e)) - in - Json.Object ( - Json.empty_obj |> update ("name",Json.String (E.dest e)) - |> update ("is_directed",Json.Bool (dir = Directed)) - |> update ("src",Json.String (V.dest s)) - |> update ("tgt",Json.String (V.dest t)) - |> update ("data",OutputEdgeData.output ed) - |> (case Graph.get_edge_annotation_opt g e - of SOME ann => update ("annotation",Json.Object ann) - | NONE => I) - ) - end - - fun output_bbox_desc g b = let - val add_parent = - case B.NSet.get_local_bot (Graph.get_bbox_parents g b) (* TODO: handle multiple parents *) - of SOME bp => update ("parent",Json.String (B.dest bp)) - | NONE => I - val jvs = map (fn v => Json.String (V.dest v)) - (V.NSet.list_of (Graph.get_vertices_in_bbox g b)) - in - Json.Object ( - Json.empty_obj |> update ("name",Json.String (B.dest b)) - |> add_parent - |> update ("contents",Json.Array jvs) - |> (case Graph.get_bbox_annotation_opt g b - of SOME ann => update ("annotation",Json.Object ann) - | NONE => I) - ) - end - - fun get_verts g = let - fun nv_info v vd = - Json.Object ( - Json.empty_obj |> update ("data",OutputVertexData.output vd) - |> (case Graph.get_vertex_annotation_opt g v - of SOME ann => update ("annotation",Json.Object ann) - | NONE => I) - ) - fun wv_info v = - Json.Object ( - Json.empty_obj |> (case Graph.get_vertex_annotation_opt g v - of SOME ann => update ("annotation",Json.Object ann) - | NONE => I) - ) - fun add_vert v (wvobj,nvobj) = - case Graph.get_vertex_data g v - of Graph.NVert vd => (wvobj, update (V.dest v, nv_info v vd) nvobj) - | Graph.WVert => (update (V.dest v,wv_info v) wvobj, nvobj) - val (wvobj,nvobj) = V.NSet.fold add_vert - (Graph.get_vertices g) - (Json.empty_obj,Json.empty_obj) - in - (Json.Object wvobj,Json.Object nvobj) - end - - fun get_edges g = let - fun edge_info e ed s t = - Json.Object ( - Json.empty_obj |> update ("src",Json.String (V.dest s)) - |> update ("tgt",Json.String (V.dest t)) - |> update ("data",OutputEdgeData.output ed) - |> (case Graph.get_edge_annotation_opt g e - of SOME ann => update ("annotation",Json.Object ann) - | NONE => I) - ) - fun add_edge e (dobj,udobj) = let - val (s,t) = (Graph.get_edge_source g e, Graph.get_edge_target g e) - val (edir, ed) = Graph.get_edge_dir_and_data g e - in case edir of Directed => (update (E.dest e,edge_info e ed s t) dobj, udobj) - | Undirected => (dobj,update (E.dest e,edge_info e ed s t) udobj) - end - val (dobj,udobj) = E.NSet.fold add_edge - (Graph.get_edges g) - (Json.empty_obj,Json.empty_obj) - in - (Json.Object dobj,Json.Object udobj) - end - - fun get_bboxes g = let - fun add_bbox b = let - val add_parent = - case B.NSet.get_local_bot (Graph.get_bbox_parents g b) (* TODO: handle multiple parents *) - of SOME bp => update ("parent",Json.String (B.dest bp)) - | NONE => I - val jvs = map (fn v => Json.String (V.dest v)) - (V.NSet.list_of (Graph.get_vertices_in_bbox g b)) - val obj = Json.empty_obj |> add_parent - |> update ("contents",Json.Array jvs) - |> (case Graph.get_bbox_annotation_opt g b - of SOME ann => update ("annotation",Json.Object ann) - | NONE => I) - in - update (B.dest b, Json.Object obj) - end - in - Json.Object (B.NSet.fold add_bbox (Graph.get_bboxes g) Json.empty_obj) - end - - fun output g = let - val (wverts,nverts) = get_verts g - val (dedges,udedges) = get_edges g - val bboxes = get_bboxes g - in - Json.Object - (Json.empty_obj |> opt_update ("wire_vertices",wverts) - |> opt_update ("node_vertices",nverts) - |> opt_update ("undir_edges",udedges) - |> opt_update ("dir_edges",dedges) - |> opt_update ("bang_boxes",bboxes) - |> (if Json.is_empty_obj (Graph.get_graph_annotation g) then I - else update ("annotation",Json.Object (Graph.get_graph_annotation g))) - ) - end -end \ No newline at end of file diff --git a/core/io/graphical_theory_io.ML b/core/io/graphical_theory_io.ML deleted file mode 100644 index 680c0c83..00000000 --- a/core/io/graphical_theory_io.ML +++ /dev/null @@ -1,180 +0,0 @@ -functor StringTableAnnotatedGraphicalTheoryIO ( - structure Theory : GRAPHICAL_THEORY - structure GraphComponentDataIO : GRAPH_COMPONENT_DATA_IO - sharing type Theory.Graph.nvdata = GraphComponentDataIO.nvdata - sharing type Theory.Graph.edata = GraphComponentDataIO.edata -) = -struct - structure Theory = Theory; - - (* COMPAT: needed by the old controller code *) - structure IVDataInputJSON = GraphComponentDataIO.IVDataInputJSON; - structure IVDataOutputJSON = GraphComponentDataIO.IVDataOutputJSON; - structure EDataInputJSON = GraphComponentDataIO.EDataInputJSON; - structure EDataOutputJSON = GraphComponentDataIO.EDataOutputJSON; - - structure RulesetAnnotations = RulesetStringTableAnnotations; - structure GraphAnnotations = GraphStringTableAnnotations; - - structure InputGraphJSON = InputAnnotatedGraphJSON( - structure Graph = Theory.Graph - structure InputVertexData = GraphComponentDataIO.IVDataInputJSON - structure InputEdgeData = GraphComponentDataIO.EDataInputJSON - structure InputAnnotation = InputGraphStringTableAnnotationsJSON - ); - structure OutputGraphJSON = OutputAnnotatedGraphJSON( - structure Graph = Theory.Graph - structure OutputVertexData = GraphComponentDataIO.IVDataOutputJSON - structure OutputEdgeData = GraphComponentDataIO.EDataOutputJSON - structure OutputAnnotation = OutputGraphStringTableAnnotationsJSON - ); - structure OutputGraphDot = OutputGraphDot( - structure Graph = Theory.Graph - structure DotStyle = GraphComponentDataIO.DotStyle - ); - structure InputRuleJSON = InputAnnotatedRuleJSON( - structure Rule = Theory.Rule - structure InputAnnotation = InputRulesetStringTableAnnotationsJSON.RuleAnnInput - structure InputGraph = InputGraphJSON - ); - structure OutputRuleJSON = OutputAnnotatedRuleJSON( - structure Rule = Theory.Rule - structure OutputAnnotation = OutputRulesetStringTableAnnotationsJSON.RuleAnnOutput - structure OutputGraph = OutputGraphJSON - ); - structure OutputRewriteJSON = OutputAnnotatedRewriteJSON( - structure Rule = Theory.Rule - structure OutputAnnotation = OutputRulesetStringTableAnnotationsJSON.RuleAnnOutput - structure OutputGraph = OutputGraphJSON - ); - structure OutputRewriteListJSON = OutputAnnotatedRewriteListJSON( - structure Rule = Theory.Rule - structure OutputAnnotation = OutputRulesetStringTableAnnotationsJSON - structure OutputRewrite = OutputRewriteJSON - ); - structure InputRulesetJSON = InputAnnotatedRulesetJSON( - structure Ruleset = Theory.Ruleset - structure InputAnnotation = InputRulesetStringTableAnnotationsJSON - structure InputRule = InputRuleJSON - ); - structure OutputRulesetJSON = OutputAnnotatedRulesetJSON( - structure Ruleset = Theory.Ruleset - structure OutputAnnotation = OutputRulesetStringTableAnnotationsJSON - structure OutputRule = OutputRuleJSON - ); -end; - -functor JsonObjectAnnotatedGraphicalTheoryIO ( - structure Theory : GRAPHICAL_THEORY - structure GraphComponentDataIO : GRAPH_COMPONENT_DATA_IO - sharing type Theory.Graph.nvdata = GraphComponentDataIO.nvdata - sharing type Theory.Graph.edata = GraphComponentDataIO.edata -) = -struct - structure Theory = Theory; - - (* COMPAT: needed by the old controller code *) - structure IVDataInputJSON = GraphComponentDataIO.IVDataInputJSON; - structure IVDataOutputJSON = GraphComponentDataIO.IVDataOutputJSON; - structure EDataInputJSON = GraphComponentDataIO.EDataInputJSON; - structure EDataOutputJSON = GraphComponentDataIO.EDataOutputJSON; - - structure RulesetAnnotations = RulesetJsonObjectAnnotations; - structure GraphAnnotations = GraphJsonObjectAnnotations; - - structure InputGraphJSON = InputAnnotatedGraphJSON( - structure Graph = Theory.Graph - structure InputVertexData = GraphComponentDataIO.IVDataInputJSON - structure InputEdgeData = GraphComponentDataIO.EDataInputJSON - structure InputAnnotation = InputGraphJsonObjectAnnotationsJSON - ); - structure OutputGraphJSON = OutputAnnotatedGraphJSON( - structure Graph = Theory.Graph - structure OutputVertexData = GraphComponentDataIO.IVDataOutputJSON - structure OutputEdgeData = GraphComponentDataIO.EDataOutputJSON - structure OutputAnnotation = OutputGraphJsonObjectAnnotationsJSON - ); - structure OutputGraphDot = OutputGraphDot( - structure Graph = Theory.Graph - structure DotStyle = GraphComponentDataIO.DotStyle - ); - structure InputRuleJSON = InputAnnotatedRuleJSON( - structure Rule = Theory.Rule - structure InputAnnotation = InputRulesetJsonObjectAnnotationsJSON.RuleAnnInput - structure InputGraph = InputGraphJSON - ); - structure OutputRuleJSON = OutputAnnotatedRuleJSON( - structure Rule = Theory.Rule - structure OutputAnnotation = OutputRulesetJsonObjectAnnotationsJSON.RuleAnnOutput - structure OutputGraph = OutputGraphJSON - ); - structure OutputRewriteJSON = OutputAnnotatedRewriteJSON( - structure Rule = Theory.Rule - structure OutputAnnotation = OutputRulesetJsonObjectAnnotationsJSON.RuleAnnOutput - structure OutputGraph = OutputGraphJSON - ); - structure OutputRewriteListJSON = OutputAnnotatedRewriteListJSON( - structure Rule = Theory.Rule - structure OutputAnnotation = OutputRulesetJsonObjectAnnotationsJSON - structure OutputRewrite = OutputRewriteJSON - ); - structure InputRulesetJSON = InputAnnotatedRulesetJSON( - structure Ruleset = Theory.Ruleset - structure InputAnnotation = InputRulesetJsonObjectAnnotationsJSON - structure InputRule = InputRuleJSON - ); - structure OutputRulesetJSON = OutputAnnotatedRulesetJSON( - structure Ruleset = Theory.Ruleset - structure OutputAnnotation = OutputRulesetJsonObjectAnnotationsJSON - structure OutputRule = OutputRuleJSON - ); -end; - -functor GraphicalTheoryIO ( - structure Theory : GRAPHICAL_THEORY - structure GraphComponentDataIO : GRAPH_COMPONENT_DATA_IO - sharing type Theory.Graph.nvdata = GraphComponentDataIO.nvdata - sharing type Theory.Graph.edata = GraphComponentDataIO.edata -) = -struct - structure Theory = Theory; - - structure InputGraphJSON = InputGraphJSON( - structure Graph = Theory.Graph - structure InputVertexData = GraphComponentDataIO.IVDataInputJSON - structure InputEdgeData = GraphComponentDataIO.EDataInputJSON - ); - structure OutputGraphJSON = OutputGraphJSON( - structure Graph = Theory.Graph - structure OutputVertexData = GraphComponentDataIO.IVDataOutputJSON - structure OutputEdgeData = GraphComponentDataIO.EDataOutputJSON - ); - structure OutputGraphDot = OutputGraphDot( - structure Graph = Theory.Graph - structure DotStyle = GraphComponentDataIO.DotStyle - ); - structure InputRuleJSON = InputRuleJSON( - structure Rule = Theory.Rule - structure InputGraph = InputGraphJSON - ); - structure OutputRuleJSON = OutputRuleJSON( - structure Rule = Theory.Rule - structure OutputGraph = OutputGraphJSON - ); - structure OutputRewriteJSON = OutputRewriteJSON( - structure Rule = Theory.Rule - structure OutputGraph = OutputGraphJSON - ); - structure OutputRewriteListJSON = OutputListJSON( - structure OutputValue = OutputRewriteJSON - ); - structure InputRulesetJSON = InputRulesetJSON( - structure Ruleset = Theory.Ruleset - structure InputRule = InputRuleJSON - ); - structure OutputRulesetJSON = OutputRulesetJSON( - structure Ruleset = Theory.Ruleset - structure OutputRule = OutputRuleJSON - ); -end; - diff --git a/core/io/input.ML b/core/io/input.ML deleted file mode 100644 index cafc6213..00000000 --- a/core/io/input.ML +++ /dev/null @@ -1,8 +0,0 @@ -signature INPUT = -sig - type T - type data - val input : T -> data - - exception bad_input_exp of string*string; (* message, location *) -end; diff --git a/core/io/json_io.ML b/core/io/json_io.ML deleted file mode 100644 index f11cd174..00000000 --- a/core/io/json_io.ML +++ /dev/null @@ -1,233 +0,0 @@ -signature INPUT_JSON = INPUT where type T = Json.json; -signature OUTPUT_JSON = OUTPUT where type T = Json.json; - -structure JsonInputUtils = -struct - type T = Json.json; - exception bad_input_exp of string*string; - - fun prepend_prop prop loc = - if String.size loc > 0 - then prop^"."^loc - else prop - - fun fold_obj_easy f obj prop = - (case Json.lookup obj prop - of SOME (Json.Object obj') => - Json.fold (fn (k,v) => f (k,v) - handle bad_input_exp (m,l) => - raise bad_input_exp (m, - prepend_prop (prop^"."^k) l)) - obj' - | SOME Json.Null => I - | NONE => I - | _ => raise bad_input_exp ("Expected an object",prop)) - - fun fold_arr_easy f obj prop = - (case Json.lookup obj prop - of SOME (Json.Array arr) => fold f arr - | SOME Json.Null => I - | NONE => I - | _ => raise bad_input_exp ("Expected an array",prop)) - - fun fold_obj_or_arr_easy (fo,fa) obj prop = - (case Json.lookup obj prop - of SOME (Json.Array arr) => fold fa arr - | SOME (Json.Object obj') => - Json.fold (fn (k,v) => fo (k,v) - handle bad_input_exp (m,l) => - raise bad_input_exp (m, - prepend_prop (prop^"."^k) l)) - obj' - | SOME Json.Null => I - | NONE => I - | _ => raise bad_input_exp ("Expected an object or array",prop)) - - (* wraps Json.get and converts Json.notfound_exn to bad_input_exp *) - fun get obj prop = - Json.get obj prop - handle Json.notfound_exn prop => - raise bad_input_exp ("Missing property \""^prop^"\"",""); - - (* wraps Json.get - returns dflt if not found *) - fun get_easy dflt obj prop = - case Json.lookup obj prop - of SOME (Json.Null) => dflt - | SOME v => v - | NONE => dflt - - (* wraps Json.get and demands an array *) - fun get_array obj prop = - case get obj prop - of (Json.Array arr) => arr - | _ => raise bad_input_exp ("Property \""^prop^"\" not an array","") - - (* wraps Json.get and requests an array, returns empty array if not found *) - fun get_array_easy obj prop = - case Json.lookup obj prop - of SOME (Json.Array arr) => arr - | SOME (Json.Null) => [] - | NONE => [] - | _ => raise bad_input_exp ("Property \""^prop^"\" not an array","") - - (* wraps Json.get and demands an object *) - fun get_obj obj prop = - case get obj prop - of (Json.Object ob) => ob - | _ => raise bad_input_exp ("Property \""^prop^"\" not an object","") - - (* wraps Json.get and requests an object, returns empty object if not found *) - fun get_object_easy obj prop = - case Json.lookup obj prop - of SOME (Json.Object ob) => ob - | SOME (Json.Null) => Json.empty_obj - | NONE => Json.empty_obj - | _ => raise bad_input_exp ("Property \""^prop^"\" not an object","") - - (* wraps Json.get and demands a real *) - fun get_real obj prop = - case get obj prop - of (Json.Real n) => n - | (Json.Int n) => Real.fromInt n - | _ => raise bad_input_exp ("Property \""^prop^"\" not a number","") - - (* wraps Json.get and requests a real, returns dflt if not found *) - fun get_real_easy dflt obj prop = - case Json.lookup obj prop - of SOME (Json.Real n) => n - | SOME (Json.Int n) => Real.fromInt n - | SOME (Json.Null) => dflt - | NONE => dflt - | _ => raise bad_input_exp ("Property \""^prop^"\" not a number","") - - fun coerce_int prop r = - if (Real.isFinite r) andalso Real.==(Real.realMod r,0.0) - then Real.trunc r - else raise bad_input_exp ("Property \""^prop^"\": "^ - (Real.toString r)^" is not an integer","") - - (* wraps Json.get and demands an integer *) - fun get_int obj prop = - case get obj prop - of (Json.Int n) => n - | (Json.Real n) => coerce_int prop n - | _ => raise bad_input_exp ("Property \""^prop^"\" not a number","") - - (* wraps Json.get and requests an integer, returns dflt if not found *) - fun get_int_easy dflt obj prop = - case Json.lookup obj prop - of SOME (Json.Int n) => n - | SOME (Json.Real n) => coerce_int prop n - | SOME (Json.Null) => dflt - | NONE => dflt - | _ => raise bad_input_exp ("Property \""^prop^"\" not a number","") - - (* wraps Json.get and demands a string *) - fun get_string obj prop = - case get obj prop - of (Json.String s) => s - | _ => raise bad_input_exp ("Property \""^prop^"\" not a string","") - - (* wraps Json.get and requests a string, returns dflt if not found *) - fun get_string_easy dflt obj prop = - case Json.lookup obj prop - of SOME (Json.String s) => s - | SOME (Json.Null) => dflt - | NONE => dflt - | _ => raise bad_input_exp ("Property \""^prop^"\" not a string","") - - (* wraps Json.get and demands a boolean *) - fun get_bool obj prop = - case get obj prop - of (Json.Bool b) => b - | _ => raise bad_input_exp ("Property \""^prop^"\" not a boolean","") - - (* wraps Json.get and requests a boolean, returns dflt if not found *) - fun get_bool_easy dflt obj prop = - case Json.lookup obj prop - of SOME (Json.Bool b) => b - | SOME (Json.Null) => dflt - | NONE => dflt - | _ => raise bad_input_exp ("Property \""^prop^"\" not a boolean","") - - - fun input_string_table Json.Null = Symtab.empty - | input_string_table (Json.Object obj) = - let - fun add_entry (k,Json.String v) = Symtab.update_new (k,v) - | add_entry (k,Json.Int i) = Symtab.update_new (k,Int.toString i) - | add_entry (k,Json.Real r) = Symtab.update_new (k,Real.toString r) - | add_entry (_,Json.Null) = I - | add_entry _ = raise bad_input_exp ("Expected string","") - in - Json.fold add_entry obj Symtab.empty - end - | input_string_table _ = raise bad_input_exp ("Expected object","") -end; - -structure JsonOutputUtils = -struct - type T = Json.json; - - (* Does not write null values *) - fun update (_,Json.Null) = I - | update d = Json.update d; - - (* Does not write empty values *) - fun opt_update (_,Json.Null) = I - | opt_update (_,Json.Array []) = I - | opt_update (v as (_,Json.Object obj)) = (case Json.properties obj - of [] => I - | _ => Json.update v) - | opt_update d = Json.update d; - - fun output_string_table tab = let - fun update_entry (k,v) = Json.update (k,Json.String v) - in - if Symtab.is_empty tab - then Json.Null - else Json.Object (Symtab.fold update_entry tab Json.empty_obj) - end; -end - -functor InputListJSON ( - structure InputValue : INPUT_JSON -) : INPUT_JSON -= struct - open JsonInputUtils; - type data = InputValue.data list; - - fun map_entry v = InputValue.input v - handle InputValue.bad_input_exp e => - raise bad_input_exp e - - fun input (Json.Array arr) = map map_entry arr - | input Json.Null = [] - | input _ = raise bad_input_exp ("Expected array",""); -end; - -functor OutputListJSON ( - structure OutputValue : OUTPUT_JSON -) : OUTPUT_JSON -= struct - open JsonOutputUtils; - type data = OutputValue.data list; - - fun output arr = Json.Array (map OutputValue.output arr); -end; - -structure InputUnitJSON : INPUT_JSON -= struct - open JsonInputUtils; - type data = unit; - fun input Json.Null = () - | input _ = raise bad_input_exp ("Unexpected data",""); -end; - -structure OutputUnitJSON : OUTPUT_JSON -= struct - open JsonOutputUtils; - type data = unit; - fun output () = Json.Null; -end; - diff --git a/core/io/linrat_json.ML b/core/io/linrat_json.ML deleted file mode 100644 index c432ebfe..00000000 --- a/core/io/linrat_json.ML +++ /dev/null @@ -1,99 +0,0 @@ -(** - * JSON graph parsing and writing - * - * LINRAT = Null | String | LINRAT_OBJ - * LINRAT_OBJ ::= - * { - * "pi": RATIONAL, - * "vars": RATIONAL_MAP - * } - * RATIONAL_MAP ::= - * { - * "num": Int, - * [, "denom": Int - * } - * RATIONAL_MAP ::= { VARNAME : RATIONAL (, VARNAME : RATIONAL)* } | {} | Null - * VARNAME ::= String - * RATIONAL ::= Null | - * { - * "num": Int, - * "denom": Int - * } - * - * Json.Null is treated as zero. Json.String is parsed in a "natural" way. - *) - -structure InputLinratJSON : INPUT_JSON where type data = LinratAngleExpr.T -= struct - open JsonInputUtils; - structure C = LinratAngleExpr.Coeff; - type data = LinratAngleExpr.T; - - fun parse_rational obj = let - val num = get_int obj "num" - val denom = get_int obj "denom" - in - C.mk (num, denom) - end - - fun get_rational_easy dflt obj prop = - case Json.lookup obj prop - of SOME (Json.Object obj') => parse_rational obj' - | SOME (Json.Null) => dflt - | NONE => dflt - | _ => raise bad_input_exp ("Property \""^prop^"\" was not an object","") - - fun parse_vars obj = - map (fn n => (X.mk n,get_rational_easy C.zero obj n)) (Json.properties obj) - - fun parse_str s = LinratAngleExpr.parse s - handle LinratAngleExpr.parse_exp => - raise bad_input_exp ("Could not parse \""^s^"\"","") - - fun input (Json.Object obj) = - let - val pi = get_rational_easy C.zero obj "pi" - val vars = get_object_easy obj "vars" - in - LinratAngleExpr.mk pi (parse_vars vars) - end - | input (Json.String s) = parse_str s - | input Json.Null = LinratAngleExpr.zero - | input _ = raise bad_input_exp ("Expected object or string","") - -end; - -structure OutputLinratJSON : OUTPUT_JSON where type data = LinratAngleExpr.T = -struct - open JsonOutputUtils; - type data = LinratAngleExpr.T; - - fun make_rational r = - Json.mk_object [("num", (Json.Int (Rational.get_num r))), - ("denom",(Json.Int (Rational.get_denom r)))] - - fun make_vars expr = let - fun add_entry x = Json.update ( - X.string_of_name x, - make_rational (LinratAngleExpr.coeff_for_var expr (SOME x))) - in - Json.Object (X.NSet.fold add_entry (LinratAngleExpr.free_vars expr) Json.empty_obj) - end - - fun output expr = let - (* we try to minimise the output *) - val pi = LinratAngleExpr.coeff_for_var expr NONE - val add_const = if LinratAngleExpr.Coeff.is_zero pi - then I - else Json.update ("pi",(make_rational pi)) - val add_vars = if X.NSet.is_empty (LinratAngleExpr.free_vars expr) - then I - else Json.update ("vars",(make_vars expr)) - val str = Pretty.string_of (LinratAngleExpr.pretty expr) - val add_str = Json.update ("pretty",(Json.String str)) - val add_all = add_str o add_vars o add_const - in - Json.Object (add_all Json.empty_obj) - end -end; (* structure *) - diff --git a/core/io/output.ML b/core/io/output.ML deleted file mode 100644 index 03d56b66..00000000 --- a/core/io/output.ML +++ /dev/null @@ -1,6 +0,0 @@ -signature OUTPUT = -sig - type data - type T - val output : data -> T -end; diff --git a/core/io/rewrite_json.ML b/core/io/rewrite_json.ML deleted file mode 100644 index a42dd945..00000000 --- a/core/io/rewrite_json.ML +++ /dev/null @@ -1,100 +0,0 @@ -(** - * JSON output of rewrites - * - * REWRITE_LIST ::= [ REWRITE (, REWRITE)* ] | [] - * REWRITE ::= - * { - * "rule": RULE_N, - * "rewritten_graph": GRAPH - * } - * RULE_N ::= - * { - * "name": NAME, - * "lhs": GRAPH, - * "rhs": GRAPH, - * "annotation": ANNDATA - * } - * - * See graph_json.ML and rule_json.ML for details. - *) - -functor OutputAnnotatedRewriteJSON( - structure Rule : OGRAPH_RULE - structure OutputAnnotation : RULE_ANN_OUTPUT_JSON - structure OutputGraph : OUTPUT_JSON where type data = (Rule.Graph.T * OutputAnnotation.GraphAnnOutput.data) -) : OUTPUT_JSON -= struct - structure OG = OutputGraph; - structure OA = OutputAnnotation; - - open JsonOutputUtils; - type data = ((R.name * Rule.T) * OA.data) * OG.data; - - fun output_rule ((rn,rule),ann) = let - val la = OA.get_lhs_annotation ann - val ra = OA.get_rhs_annotation ann - val lhs = Rule.get_lhs rule - val rhs = Rule.get_rhs rule - in - Json.Object ( - Json.empty_obj |> update ("name",Json.String (R.dest rn)) - |> update ("lhs",(OG.output (lhs,la))) - |> update ("rhs",(OG.output (rhs,ra))) - |> update ("annotation",(OA.output_rule_annotation ann)) - ) - end; - - fun output (r,g) = - Json.Object ( - Json.empty_obj |> update ("rule",output_rule r) - |> update ("rewritten_graph",(OG.output g)) - ); -end; - -functor OutputRewriteJSON( - structure Rule : OGRAPH_RULE - structure OutputGraph : OUTPUT_JSON - sharing type OutputGraph.data = Rule.Graph.T -) : OUTPUT_JSON -= struct - structure OG = OutputGraph; - - open JsonOutputUtils; - type data = (R.name * Rule.T) * Rule.Graph.T; - - fun output_rule (rn,rule) = - Json.Object ( - Json.empty_obj |> update ("name",Json.String (R.dest rn)) - |> update ("lhs",(OG.output (Rule.get_lhs rule))) - |> update ("rhs",(OG.output (Rule.get_rhs rule))) - ); - - fun output (r,g) = - Json.Object ( - Json.empty_obj |> update ("rule",output_rule r) - |> update ("rewritten_graph",(OG.output g)) - ); -end; - -functor OutputAnnotatedRewriteListJSON( - structure Rule : OGRAPH_RULE - structure OutputAnnotation : RULESET_ANN_OUTPUT_JSON - structure OutputRewrite : OUTPUT_JSON - where type data = ((R.name * Rule.T) * OutputAnnotation.RuleAnnOutput.data) - * (Rule.Graph.T * OutputAnnotation.RuleAnnOutput.GraphAnnOutput.data); -) : OUTPUT_JSON -= struct - structure OA = OutputAnnotation; - structure OGA = OA.RuleAnnOutput.GraphAnnOutput; - - open JsonOutputUtils; - type data = ((R.name * Rule.T) * (Rule.Graph.T * OGA.data)) list * OA.data; - - fun output_rewrite ann (r as (rn,_),g) = - OutputRewrite.output ((r,OA.get_rule_annotation ann rn),g); - - fun output (rws,ann) = - Json.Array (map (output_rewrite ann) rws); -end; - - diff --git a/core/io/rule_json.ML b/core/io/rule_json.ML deleted file mode 100644 index 175b1915..00000000 --- a/core/io/rule_json.ML +++ /dev/null @@ -1,163 +0,0 @@ -(** - * JSON rule parsing and writing - * - * RULE ::= - * { - * "lhs": GRAPH, - * "rhs": GRAPH, - * "annotation": ANNDATA - * } - * - * See graph_json.ML for details. "lhs" and "rhs" are required, but - * "annotation" is optional. - *) - -signature RULE_ANN_INPUT_JSON -= sig - structure GraphAnnInput : GRAPH_ANN_INPUT_JSON; - exception bad_input_exp of string*string; - type data; - val empty_annotation : data; - val set_lhs_annotation : GraphAnnInput.data -> data -> data; - val set_rhs_annotation : GraphAnnInput.data -> data -> data; - val input_rule_annotation : Json.json -> data -> data; -end; - -structure EmptyRuleAnnotationInputJson - : RULE_ANN_INPUT_JSON where type data = unit -= struct - structure GraphAnnInput = EmptyGraphAnnotationInputJson; - exception bad_input_exp = GraphAnnInput.bad_input_exp; - type data = unit; - val empty_annotation = (); - fun set_lhs_annotation _ = I; - fun set_rhs_annotation _ = I; - fun input_rule_annotation _ = I; -end; - -signature RULE_ANN_OUTPUT_JSON -= sig - structure GraphAnnOutput : GRAPH_ANN_OUTPUT_JSON; - type data; - val empty_annotation : data; - val get_lhs_annotation : data -> GraphAnnOutput.data; - val get_rhs_annotation : data -> GraphAnnOutput.data; - val output_rule_annotation : data -> Json.json; -end; - -structure EmptyRuleAnnotationOutputJson - : RULE_ANN_OUTPUT_JSON where type data = unit -= struct - structure GraphAnnOutput = EmptyGraphAnnotationOutputJson; - type data = unit; - val empty_annotation = (); - fun get_lhs_annotation _ = GraphAnnOutput.empty_annotation; - fun get_rhs_annotation _ = GraphAnnOutput.empty_annotation; - fun output_rule_annotation _ = Json.Null; -end; - -functor InputAnnotatedRuleJSON( - structure Rule : OGRAPH_RULE - structure InputAnnotation : RULE_ANN_INPUT_JSON - structure InputGraph : INPUT_JSON where type data = (Rule.Graph.T * InputAnnotation.GraphAnnInput.data) -) : INPUT_JSON -= struct - structure IA = InputAnnotation; - - open JsonInputUtils; - type data = Rule.T * IA.data; - - fun get_ann obj prop ann = - IA.input_rule_annotation (get_easy Json.Null obj prop) ann - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l) - - fun input (Json.Object obj) = - let - val (lhs,la) = InputGraph.input (get obj "lhs") - handle InputGraph.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "lhs" l); - val (rhs,ra) = InputGraph.input (get obj "rhs") - handle InputGraph.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "rhs" l); - val ann = IA.empty_annotation |> get_ann obj "annotation" - |> IA.set_lhs_annotation la - |> IA.set_rhs_annotation ra - in - (Rule.mk (lhs,rhs),ann) - handle Rule.bad_rule_exp (msg,_,_) => - raise bad_input_exp ("Invalid rule: "^msg,"") - end - | input _ = raise bad_input_exp ("Expected object",""); -end; - -functor InputRuleJSON( - structure Rule : OGRAPH_RULE - structure InputGraph : INPUT_JSON - sharing type InputGraph.data = Rule.Graph.T -) : INPUT_JSON -= struct - open JsonInputUtils; - type data = Rule.T; - - fun input (Json.Object obj) = - let - val lhs = InputGraph.input (get obj "lhs") - handle InputGraph.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "lhs" l); - val rhs = InputGraph.input (get obj "rhs") - handle InputGraph.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "rhs" l); - in - Rule.mk (lhs,rhs) - handle Rule.bad_rule_exp (msg,_,_) => - raise bad_input_exp ("Invalid rule: "^msg,"") - end - | input _ = raise bad_input_exp ("Expected object",""); -end; - -functor OutputAnnotatedRuleJSON( - structure Rule : OGRAPH_RULE - structure OutputAnnotation : RULE_ANN_OUTPUT_JSON - structure OutputGraph : OUTPUT_JSON where type data = (Rule.Graph.T * OutputAnnotation.GraphAnnOutput.data) -) : OUTPUT_JSON -= struct - structure OG = OutputGraph; - structure OA = OutputAnnotation; - - open JsonOutputUtils; - type data = Rule.T * OA.data; - - fun output (r,ann) = let - val la = OA.get_lhs_annotation ann - val ra = OA.get_rhs_annotation ann - val lhs = Rule.get_lhs r - val rhs = Rule.get_rhs r - in - Json.Object ( - Json.empty_obj |> update ("lhs",(OG.output (lhs,la))) - |> update ("rhs",(OG.output (rhs,ra))) - |> update ("annotation",(OA.output_rule_annotation ann)) - ) - end; -end; - -functor OutputRuleJSON( - structure Rule : OGRAPH_RULE - structure OutputGraph : OUTPUT_JSON - sharing type OutputGraph.data = Rule.Graph.T -) : OUTPUT_JSON -= struct - structure OG = OutputGraph; - - open JsonOutputUtils; - type data = Rule.T; - - fun output r = - Json.Object ( - Json.empty_obj |> update ("lhs",(OG.output (Rule.get_lhs r))) - |> update ("rhs",(OG.output (Rule.get_rhs r))) - ); -end; - - diff --git a/core/io/rule_json_io.ML b/core/io/rule_json_io.ML deleted file mode 100644 index 02586d33..00000000 --- a/core/io/rule_json_io.ML +++ /dev/null @@ -1,49 +0,0 @@ -signature RULE_JSON = -sig - structure Rule : BANG_GRAPH_RULE - val input : Json.json -> Rule.T - val output : Rule.T -> Json.json -end - -functor RuleJSON( - structure Rule : BANG_GRAPH_RULE - structure GraphJSON : GRAPH_JSON - sharing GraphJSON.Graph.Sharing = Rule.Graph.Sharing -) : RULE_JSON -= struct - -structure Rule = Rule - -open JsonInputUtils -open JsonOutputUtils - -fun input (Json.Object obj) = - let - val lhs = GraphJSON.input (get obj "lhs") - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "lhs" l) - val rhs = GraphJSON.input (get obj "rhs") - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "rhs" l) - in - Rule.mk (lhs,rhs) - |> (case Json.lookup obj "annotation" - of SOME (Json.Object ann) => Rule.set_rule_annotation ann - | SOME Json.Null => I - | SOME _ => raise bad_input_exp ("Annotation must be object or null","") - | NONE => I) - handle Rule.bad_rule_exp (msg,_,_) => - raise bad_input_exp ("Invalid rule: "^msg,"") - end - | input _ = raise bad_input_exp ("Expected object","") - -fun output r = - Json.Object ( - Json.empty_obj |> update ("lhs",(GraphJSON.output (Rule.get_lhs r))) - |> update ("rhs",(GraphJSON.output (Rule.get_rhs r))) - |> (if Json.is_empty_obj (Rule.get_rule_annotation r) - then update ("annotation", Json.mk_object [("layout", Json.Bool true)]) - else update ("annotation", Json.Object (Rule.get_rule_annotation r))) - ) - -end diff --git a/core/io/ruleset_annotations_json.ML b/core/io/ruleset_annotations_json.ML deleted file mode 100644 index f039d40c..00000000 --- a/core/io/ruleset_annotations_json.ML +++ /dev/null @@ -1,108 +0,0 @@ -functor InputRulesetAnnotationsJSON( - structure Annotations : RULESET_ANNOTATIONS - val json_to_annotation : Json.json -> Annotations.data -) : RULESET_ANN_INPUT_JSON -= struct - open JsonInputUtils; - structure RA = Annotations; - structure GA = RA.GraphAnnotations; - - type data = RA.T; - val empty_annotation = RA.init; - - structure RuleAnnInput : RULE_ANN_INPUT_JSON - = struct - open JsonInputUtils; - structure GraphAnnInput = InputGraphAnnotationsJSON( - structure Annotations = GA - val json_to_annotation = json_to_annotation - ); - type data = GA.data * (GA.T * GA.T); - val empty_annotation = (GA.empty_data,(GA.init,GA.init)); - fun set_lhs_annotation ga (ra,(_,rhs)) = (ra,(ga,rhs)); - fun set_rhs_annotation ga (ra,(lhs,_)) = (ra,(lhs,ga)); - fun input_rule_annotation js (_,ga) = (json_to_annotation js,ga); - end; - - fun set_rule_annotation r (ra,(lhs,rhs)) = - (RA.set_rule_annotation r ra) - o (RA.set_rule_lhs_annotation r lhs) - o (RA.set_rule_rhs_annotation r rhs); - val input_ruleset_annotation = RA.set_ruleset_annotation o json_to_annotation; -end; - -structure InputRulesetStringTableAnnotationsJSON - : RULESET_ANN_INPUT_JSON where type data = RulesetStringTableAnnotations.T -= struct - open JsonInputUtils; - - structure InputAnn = InputRulesetAnnotationsJSON( - structure Annotations = RulesetStringTableAnnotations - val json_to_annotation = input_string_table - ); - open InputAnn; -end; - -structure InputRulesetJsonObjectAnnotationsJSON - : RULESET_ANN_INPUT_JSON where type data = RulesetJsonObjectAnnotations.T -= InputRulesetAnnotationsJSON( - structure Annotations = RulesetJsonObjectAnnotations - fun json_to_annotation Json.Null = Json.empty_obj - | json_to_annotation (Json.Object obj) = obj - | json_to_annotation _ = raise JsonInputUtils.bad_input_exp ("Expected object","") -) - -functor OutputRulesetAnnotationsJSON( - structure Annotations : RULESET_ANNOTATIONS - val annotation_to_json : Annotations.data -> Json.json -) : RULESET_ANN_OUTPUT_JSON -= struct - open JsonOutputUtils; - structure RA = Annotations; - structure GA = RA.GraphAnnotations; - - type data = RA.T; - val empty_annotation = RA.init; - - structure RuleAnnOutput : RULE_ANN_OUTPUT_JSON - = struct - open JsonOutputUtils; - structure GraphAnnOutput = OutputGraphAnnotationsJSON( - structure Annotations = GA - val annotation_to_json = annotation_to_json - ); - type data = GA.data * (GA.T * GA.T); - val empty_annotation = (GA.empty_data,(GA.init,GA.init)); - fun get_lhs_annotation (_,(ann,_)) = ann; - fun get_rhs_annotation (_,(_,ann)) = ann; - val output_rule_annotation = annotation_to_json o fst; - end; - - fun get_rule_annotation ann r = - (RA.get_rule_annotation ann r, - (RA.get_rule_lhs_annotation ann r,RA.get_rule_rhs_annotation ann r)); - val output_ruleset_annotation = - annotation_to_json o RA.get_ruleset_annotation; -end; - -structure OutputRulesetStringTableAnnotationsJSON - : RULESET_ANN_OUTPUT_JSON where type data = RulesetStringTableAnnotations.T -= struct - open JsonOutputUtils; - - structure OutputAnn = OutputRulesetAnnotationsJSON( - structure Annotations = RulesetStringTableAnnotations - val annotation_to_json = output_string_table - ); - open OutputAnn; -end; - -structure OutputRulesetJsonObjectAnnotationsJSON - : RULESET_ANN_OUTPUT_JSON where type data = RulesetJsonObjectAnnotations.T -= OutputRulesetAnnotationsJSON( - structure Annotations = RulesetJsonObjectAnnotations - fun annotation_to_json obj = Json.Object obj -) - - - diff --git a/core/io/ruleset_json.ML b/core/io/ruleset_json.ML deleted file mode 100644 index 769a63ab..00000000 --- a/core/io/ruleset_json.ML +++ /dev/null @@ -1,224 +0,0 @@ -(** - * JSON ruleset parsing and writing - * - * RULESET ::= - * { - * "rules": RULE_LIST, - * "active_rules": RULE_NAME_LIST, - * "tags": TAG_LIST - * "annotation": ANNDATA - * } - * RULE_LIST ::= { NAME : RULE (, NAME : RULE)* } | {} - * TAG_LIST ::= { NAME : RULE_NAME_LIST (, NAME : RULE_NAME_LIST)* } | {} - * RULE_NAME_LIST ::= [ NAME (, NAME)* ] | [] - * - * See rule_json.ML and graph_json.ML for details. All fields are optional, - * but any rules references from "active_rules" or "tags" must be in "rules". - *) - -signature RULESET_ANN_INPUT_JSON -= sig - structure RuleAnnInput : RULE_ANN_INPUT_JSON; - exception bad_input_exp of string*string; - type data; - val empty_annotation : data; - val set_rule_annotation : R.name -> RuleAnnInput.data -> data -> data; - val input_ruleset_annotation : Json.json -> data -> data; -end; - -structure EmptyRulesetAnnotationInputJson - : RULESET_ANN_INPUT_JSON where type data = unit -= struct - structure RuleAnnInput = EmptyRuleAnnotationInputJson; - exception bad_input_exp = RuleAnnInput.bad_input_exp; - type data = unit; - val empty_annotation = (); - fun set_rule_annotation _ _ = I; - fun input_ruleset_annotation _ = I; -end; - -signature RULESET_ANN_OUTPUT_JSON -= sig - structure RuleAnnOutput : RULE_ANN_OUTPUT_JSON; - type data; - val empty_annotation : data; - val get_rule_annotation : data -> R.name -> RuleAnnOutput.data; - val output_ruleset_annotation : data -> Json.json; -end; - -structure EmptyRulesetAnnotationOutputJson - : RULESET_ANN_OUTPUT_JSON where type data = unit -= struct - structure RuleAnnOutput = EmptyRuleAnnotationOutputJson; - type data = unit; - val empty_annotation = (); - fun get_rule_annotation _ _ = RuleAnnOutput.empty_annotation; - fun output_ruleset_annotation _ = Json.Null; -end; - -functor InputAnnotatedRulesetJSON( - structure Ruleset : RULESET - structure InputAnnotation : RULESET_ANN_INPUT_JSON - structure InputRule : INPUT_JSON where type data = (Ruleset.Rule.T * InputAnnotation.RuleAnnInput.data) -) : INPUT_JSON -= struct - structure IA = InputAnnotation; - - open JsonInputUtils; - type data = Ruleset.T * IA.data; - - fun get_ann obj prop (rs,ann) = - (rs, IA.input_ruleset_annotation (get_easy Json.Null obj prop) ann) - handle IA.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l) - - fun map_rule (n,v) (tab,ann) = let - val (rule,r_ann) = InputRule.input v - handle InputRule.bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop n l) - val rn = R.mk n - in - (RTab.add (rn,rule) tab, - IA.set_rule_annotation rn r_ann ann) - end - - fun input_rules obj prop (ruleset,ann) = - case get_easy Json.Null obj prop - of Json.Object obj => - let - val (rules,ann') = (Json.fold map_rule obj (RTab.empty,ann) - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l)) - in - (Ruleset.set_allrules rules ruleset,ann') - end - | Json.Null => (ruleset,ann) - | _ => raise bad_input_exp ("Expected object",""); - - fun map_tag (n,(Json.Array a)) = - let - fun map_rulename (Json.String s) = R.mk s - | map_rulename _ = raise bad_input_exp ("Expected strings in array","") - val tag = TagName.mk n - in - fold (fn r => RTagRel.add (r,tag)) (map map_rulename a) - end - | map_tag _ = raise bad_input_exp ("Expected array","") - - fun input_tags obj prop = - case get_easy Json.Null obj prop - of Json.Object obj => - let - val tags = (Json.fold map_tag obj RTagRel.empty - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l)) - in - Ruleset.set_brel tags - end - | Json.Null => I - | _ => raise bad_input_exp ("Expected object",""); - - fun input_active obj prop = let - fun map_rulename (Json.String s) = R.mk s - | map_rulename _ = raise bad_input_exp ("Expected strings in array","") - val a = get_array_easy obj prop - in - Ruleset.set_active (R.NSet.of_list (map map_rulename a)) - end - - fun input (Json.Object obj) = - (Ruleset.empty,IA.empty_annotation) - |> get_ann obj "annotation" - |> input_rules obj "rules" - |> apfst (input_tags obj "tags") - |> apfst (input_active obj "active_rules") - | input Json.Null = (Ruleset.empty,IA.empty_annotation) - | input _ = raise bad_input_exp ("Expected object",""); -end; - -functor InputRulesetJSON( - structure Ruleset : RULESET - structure InputRule : INPUT_JSON - sharing type InputRule.data = Ruleset.Rule.T -) : INPUT_JSON -= struct - structure InputAnnRule : INPUT_JSON = - struct - open JsonInputUtils; - type data = InputRule.data*unit; - fun input json = (InputRule.input json,()) - end; - - structure SubInput = InputAnnotatedRulesetJSON( - structure Ruleset = Ruleset - structure InputAnnotation = EmptyRulesetAnnotationInputJson - structure InputRule = InputAnnRule - ); - open SubInput; - type data = Ruleset.T; - val input = fst o SubInput.input; -end; - -functor OutputAnnotatedRulesetJSON( - structure Ruleset : RULESET - structure OutputAnnotation : RULESET_ANN_OUTPUT_JSON - structure OutputRule : OUTPUT_JSON where type data = (Ruleset.Rule.T * OutputAnnotation.RuleAnnOutput.data) -) : OUTPUT_JSON -= struct - structure OA = OutputAnnotation; - - open JsonOutputUtils; - type data = Ruleset.T * OA.data; - - val rule_list = Json.Array o (map (Json.String o R.dest)) o R.NSet.list_of; - - fun output_rules (rs,ann) = let - val ruletab = Ruleset.get_allrules rs - val get_ann = OA.get_rule_annotation ann - fun upd_rule (r,v) = update (R.dest r,OutputRule.output (v,get_ann r)) - in - Json.Object (RTab.fold upd_rule ruletab Json.empty_obj) - end - - fun output_tags rs = let - val tagrel = Ruleset.get_tagrel rs - fun upd_tag t = update (TagName.dest t, rule_list (RTagRel.inv_img tagrel t)) - in - Json.Object (TagName.NSet.fold upd_tag (RTagRel.get_cod_set tagrel) Json.empty_obj) - end - - val output_active_rules = rule_list o Ruleset.get_active; - - fun output (rs,ann) = - Json.Object ( - Json.empty_obj |> update ("rules",output_rules (rs,ann)) - |> update ("tags",output_tags rs) - |> update ("active_rules",output_active_rules rs) - |> update ("annotation",OA.output_ruleset_annotation ann) - ); -end; - -functor OutputRulesetJSON( - structure Ruleset : RULESET - structure OutputRule : OUTPUT_JSON - sharing type OutputRule.data = Ruleset.Rule.T -) : OUTPUT_JSON -= struct - structure OutputAnnRule : OUTPUT_JSON = - struct - open JsonOutputUtils; - type data = OutputRule.data*unit; - val output = OutputRule.output o fst - end; - - structure SubOutput = OutputAnnotatedRulesetJSON( - structure Ruleset = Ruleset - structure OutputAnnotation = EmptyRulesetAnnotationOutputJson - structure OutputRule = OutputAnnRule - ); - open SubOutput; - type data = Ruleset.T; - fun output r = SubOutput.output (r,()); -end; - - diff --git a/core/io/ruleset_json_io.ML b/core/io/ruleset_json_io.ML deleted file mode 100644 index ecabe940..00000000 --- a/core/io/ruleset_json_io.ML +++ /dev/null @@ -1,105 +0,0 @@ -signature RULESET_JSON = -sig - structure Ruleset : RULESET - val input : Json.json -> Ruleset.T - val output : Ruleset.T -> Json.json -end - -functor RulesetJSON( - structure Ruleset : RULESET - structure RuleJSON : RULE_JSON - sharing RuleJSON.Rule.Sharing = Ruleset.Rule.Sharing -) : RULESET_JSON -= struct - -structure Ruleset = Ruleset - -open JsonInputUtils -open JsonOutputUtils - -fun map_rule (n,v) tab = let - val rule = RuleJSON.input v - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop n l) -in - RTab.add (R.mk n,rule) tab -end - -fun input_rules obj prop ruleset = - case get_easy Json.Null obj prop - of Json.Object obj => - let - val rules = (Json.fold map_rule obj RTab.empty - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l)) - in - Ruleset.set_allrules rules ruleset - end - | Json.Null => ruleset - | _ => raise bad_input_exp ("Expected object",""); - -fun map_tag (n,(Json.Array a)) = - let - fun map_rulename (Json.String s) = R.mk s - | map_rulename _ = raise bad_input_exp ("Expected strings in array","") - val tag = TagName.mk n - in - fold (fn r => RTagRel.add (r,tag)) (map map_rulename a) - end - | map_tag _ = raise bad_input_exp ("Expected array","") - -fun input_tags obj prop = - case get_easy Json.Null obj prop - of Json.Object obj => - let - val tags = (Json.fold map_tag obj RTagRel.empty - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop prop l)) - in - Ruleset.set_brel tags - end - | Json.Null => I - | _ => raise bad_input_exp ("Expected object",""); - -fun input_active obj prop = let - fun map_rulename (Json.String s) = R.mk s - | map_rulename _ = raise bad_input_exp ("Expected strings in array","") - val a = get_array_easy obj prop -in - Ruleset.set_active (R.NSet.of_list (map map_rulename a)) -end - -fun input (Json.Object obj) = - Ruleset.empty - |> input_rules obj "rules" - |> input_tags obj "tags" - |> input_active obj "active_rules" - | input Json.Null = Ruleset.empty - | input _ = raise bad_input_exp ("Expected object",""); - -val rule_list = Json.Array o (map (Json.String o R.dest)) o R.NSet.list_of; - -fun output_rules rs = let - val ruletab = Ruleset.get_allrules rs - fun upd_rule (r,v) = update (R.dest r, RuleJSON.output v) -in - Json.Object (RTab.fold upd_rule ruletab Json.empty_obj) -end - -fun output_tags rs = let - val tagrel = Ruleset.get_tagrel rs - fun upd_tag t = update (TagName.dest t, rule_list (RTagRel.inv_img tagrel t)) -in - Json.Object (TagName.NSet.fold upd_tag (RTagRel.get_cod_set tagrel) Json.empty_obj) -end - -val output_active_rules = rule_list o Ruleset.get_active; - -fun output rs = - Json.Object ( - Json.empty_obj |> update ("rules",output_rules rs) - |> update ("tags",output_tags rs) - |> update ("active_rules",output_active_rules rs) - ) - -end \ No newline at end of file diff --git a/core/io/test/graph-json-test.ML b/core/io/test/graph-json-test.ML deleted file mode 100644 index 741dbbd1..00000000 --- a/core/io/test/graph-json-test.ML +++ /dev/null @@ -1,156 +0,0 @@ - -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph) - open Tools - structure L = LinratAngleExpr - structure C = LinratAngleExpr.Coeff - structure GIn = GJsonInput - structure GOut = GJsonOutput - val expr_1_json = "{\"vars\":{\"a\":{\"num\":1,\"denom\":3},\"b\":{\"num\":2,\"denom\":1}}}" - val expr_1 = L.mk C.zero [(X.mk "a",C.mk (1,3)), (X.mk "b",C.mk (2,1))] - val expr_2_json = "{\"pi\":{\"num\":1,\"denom\":1}}" - val expr_2 = L.pi - val expr_3_json = "{\"pi\":{\"num\":1,\"denom\":2},\"vars\":{\"\\\\alpha\":{\"num\":1,\"denom\":1}}}" - val expr_3 = L.parse "1/2\\pi + \\alpha" - val g1 = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_vunit1 "n1" - |> add_vexpr1 "n2" "0" - |> add_vexpr2 "n3" "0" - |> add_vexpr1_a "n4" expr_1 - |> add_vexpr2_a "n5" expr_2 - |> add_vexpr2 "n6" "a+\\pi" - |> add_dir_eunit1 "e1" "w1" "n1" - |> add_undir_eunit2 "e2" "n3" "n4" - |> add_dir_eexpr1_a "e3" "n5" "n5" expr_3 - |> add_bbox "b1" ["w1","n1","n2"] - |> add_bbox_with_parent "b2" "b1" ["w1","n1"] - |> add_bbox "b3" [] - val g1_json_1 = - "{"^ - "\"wire_vertices\":[\"w1\",\"w2\"],"^ - "\"node_vertices\":{"^ - "\"n1\":{\"data\":\"VUnit1\"},"^ - "\"n2\":{\"data\":{\"type\":\"VExpr1\",\"angle\":null}},"^ - "\"n3\":{\"data\":{\"type\":\"VExpr2\"}},"^ - "\"n4\":{\"data\":{\"type\":\"VExpr1\",\"angle\":"^expr_1_json^"}},"^ - "\"n5\":{\"data\":{\"type\":\"VExpr2\",\"angle\":"^expr_2_json^"}},"^ - "\"n6\":{\"data\":{\"type\":\"VExpr2\",\"angle\":\"a+\\\\pi\"}}"^ - "},"^ - "\"dir_edges\":{"^ - "\"e1\":{\"src\":\"w1\",\"tgt\":\"n1\",\"data\":\"EUnit1\"},"^ - "\"e3\":{\"src\":\"n5\",\"tgt\":\"n5\",\"data\":{\"type\":\"EExpr1\",\"angle\":"^expr_3_json^"}}"^ - "},"^ - "\"undir_edges\":{"^ - "\"e2\":{\"src\":\"n3\",\"tgt\":\"n4\",\"data\":{\"type\":\"EUnit2\"}}"^ - "},"^ - "\"bang_boxes\":{"^ - "\"b1\":{\"contents\":[\"w1\",\"n1\",\"n2\"]},"^ - "\"b2\":{\"contents\":[\"w1\",\"n1\"],\"parent\":\"b1\"},"^ - "\"b3\":{}"^ - "}"^ - "}" - val g1_json_2 = - "{"^ - "\"wire_vertices\":{"^ - "\"w1\":{},"^ - "\"w2\":null"^ - "},"^ - "\"node_vertices\":{"^ - "\"n1\":{\"data\":\"VUnit1\"},"^ - "\"n2\":{\"data\":{\"type\":\"VExpr1\",\"angle\":null}},"^ - "\"n3\":{\"data\":{\"type\":\"VExpr2\"}},"^ - "\"n4\":{\"data\":{\"type\":\"VExpr1\",\"angle\":"^expr_1_json^"}},"^ - "\"n5\":{\"data\":{\"type\":\"VExpr2\",\"angle\":"^expr_2_json^"}},"^ - "\"n6\":{\"data\":{\"type\":\"VExpr2\",\"angle\":\"a+\\\\pi\"}}"^ - "},"^ - "\"dir_edges\":{"^ - "\"e1\":{\"src\":\"w1\",\"tgt\":\"n1\",\"data\":\"EUnit1\"},"^ - "\"e3\":{\"src\":\"n5\",\"tgt\":\"n5\",\"data\":{\"type\":\"EExpr1\",\"angle\":"^expr_3_json^"}}"^ - "},"^ - "\"undir_edges\":{"^ - "\"e2\":{\"src\":\"n3\",\"tgt\":\"n4\",\"data\":{\"type\":\"EUnit2\"}}"^ - "},"^ - "\"bang_boxes\":{"^ - "\"b1\":{\"contents\":[\"w1\",\"n1\",\"n2\"]},"^ - "\"b2\":{\"contents\":[\"w1\",\"n1\"],\"parent\":\"b1\"},"^ - "\"b3\":{}"^ - "}"^ - "}" - val g2 = G.empty - |> add_wv "w1" - |> add_wv "w2" - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_dir_eunit1 "e1" "w1" "v1" - |> add_undir_eunit1 "e2" "v2" "w2" - val g2_json = - "{"^ - "\"wire_vertices\":[\"w1\",\"w2\"],"^ - "\"node_vertices\":[\"v1\",\"v2\"],"^ - "\"dir_edges\":{"^ - "\"e1\":{\"src\":\"w1\",\"tgt\":\"v1\"}"^ - "},"^ - "\"undir_edges\":{"^ - "\"e2\":{\"src\":\"v2\",\"tgt\":\"w2\"}"^ - "}"^ - "}" - fun test_parse (json_str,exp_expr) = let - val j = Json.of_string json_str - handle Json.parse_exn m => - (writeln "Bad JSON:"; writeln json_str; raise Json.parse_exn m) - val e = GIn.input j - in - if G.exact_eq exp_expr e - then () - else - (writeln "Expected graph:"; - G.print exp_expr; - writeln "Graph from JSON:"; - G.print e; - raise ERROR "Got wrong graph") - end - fun test_reparse expr = let - val j = GOut.output expr - in - let - val e = GIn.input j - in - if G.exact_eq expr e - then () - else - (writeln "Expected graph:"; - G.print expr; - writeln "Graph from JSON:"; - G.print e; - writeln "JSON:"; - writeln (Json.string_of j); - raise ERROR "Got wrong graph") - end - handle ex => - (writeln "JSON:"; - writeln (Json.string_of j); - raise ex) - end -in - val _ = Testing.test "Graph I/O: parse empty" - test_parse ("{}",G.empty) - - val _ = Testing.test "Graph I/O: reparse empty" - test_reparse G.empty - - val _ = Testing.test "Graph I/O: parse example graph 1" - test_parse (g1_json_1,g1) - - val _ = Testing.test "Graph I/O: parse example graph 1 (alternative json)" - test_parse (g1_json_2,g1) - - val _ = Testing.test "Graph I/O: reparse example graph 1" - test_reparse g1 - - val _ = Testing.test "Graph I/O: parse example graph 3 (default data types)" - test_parse (g2_json,g2) - - val _ = Testing.assert_no_failed_tests(); -end; diff --git a/core/io/test/linrat-json-test.ML b/core/io/test/linrat-json-test.ML deleted file mode 100644 index 2bd8c283..00000000 --- a/core/io/test/linrat-json-test.ML +++ /dev/null @@ -1,59 +0,0 @@ -local - structure L = LinratAngleExpr; - structure C = LinratAngleExpr.Coeff; - structure OL = OutputLinratJSON; - structure IL = InputLinratJSON; - fun exprs_eq a b = L.is_zero (L.subtr_expr a b); - val str_expr = Pretty.string_of o LinratAngleExpr.pretty - fun test_parse (json_str,exp_expr) = let - val e = IL.input (Json.of_string json_str) - in - if exprs_eq exp_expr e - then () - else raise ERROR ("Expected \""^(str_expr exp_expr)^ - "\" but got \""^(str_expr e)^"\"") - end - fun test_reparse expr = let - val e = IL.input (OL.output expr) - in - if exprs_eq expr e - then () - else raise ERROR ("Expected \""^(str_expr expr)^ - "\" but got \""^(str_expr e)^"\"") - end -in - val _ = Testing.test "LinratAngleExpr I/O: parse empty" - test_parse ("{}",L.zero); - - val _ = Testing.test "LinratAngleExpr I/O: parse pi" - test_parse ("{\"pi\":{\"num\":1,\"denom\":1}}",L.pi); - - val _ = Testing.test "LinratAngleExpr I/O: parse 1/3 a + 2 b" - test_parse ("{\"vars\":{\"a\":{\"num\":1,\"denom\":3},\"b\":{\"num\":2,\"denom\":1}}}", - L.mk C.zero [(X.mk "a",C.mk (1, 3)), - (X.mk "b",C.mk (2, 1))]); - - val _ = Testing.test "LinratAngleExpr I/O: parse 3/8 pi + 2/3 \\beta" - test_parse ("{\"pi\":{\"num\":3,\"denom\":8},\"vars\":{\"\\\\beta\":{\"num\":2,\"denom\":3}}}", - L.mk (C.mk (3, 8)) [(X.mk "\\beta",C.mk (2, 3))]) - - val _ = Testing.test "LinratAngleExpr I/O: reparse zero" - test_reparse L.zero; - - val _ = Testing.test "LinratAngleExpr I/O: reparse pi" - test_reparse L.pi; - - val _ = Testing.test "LinratAngleExpr I/O: parse a + 2b" - test_reparse (L.mk C.zero [(X.mk "a",C.one), - (X.mk "b",C.mk (2, 1))]); - - val _ = Testing.test "LinratAngleExpr I/O: parse 3/8 pi + 2/3 \\beta" - test_reparse (L.mk (C.mk (3, 8)) [(X.mk "\\beta",C.mk (2, 3))]) - - (* NB: uses a quirk of the JSON parser (it parses JSON fragments) *) - val _ = Testing.test "LinratAngleExpr I/O: parse as string" - test_parse ("\"3/8 \\\\pi + 2/3 \\\\beta\"", - L.mk (C.mk (3, 8)) [(X.mk "\\beta",C.mk (2, 3))]) - - val _ = Testing.assert_no_failed_tests(); -end diff --git a/core/isabelle_env.ML b/core/isabelle_env.ML deleted file mode 100644 index 34df17cc..00000000 --- a/core/isabelle_env.ML +++ /dev/null @@ -1,185 +0,0 @@ -(* - * Emulate the Isabelle toplevel environment - *) -OS.FileSys.chDir "Pure"; - -fun exit st = - OS.Process.exit - (if st = 0 then OS.Process.success else OS.Process.failure); - -use "ML-Systems/polyml.ML"; - -(* from Isabelle/src/Pure/Generic *) -use "General/basics.ML"; -(* from Isabelle/src/Pure *) -use "library.ML"; - -(* Global version variable *) -val version = "Isabelle/IsaPlanner Library"; - -(* from Isabelle/src/Pure/Generic *) -use "General/print_mode.ML"; -use "General/alist.ML"; -use "General/table.ML"; -use "General/properties.ML"; - -(* from Isabelle/src/Pure/Concurrent *) -(*use "Concurrent/simple_thread.ML"; -use "Concurrent/synchronized.ML";*) -use "Concurrent/simple_thread.ML"; - -use "Concurrent/synchronized.ML"; -if Multithreading.available then () -else use "Concurrent/synchronized_sequential.ML"; -use "Concurrent/counter.ML"; - -(* from Isabelle/src/Pure/Generic *) -use "General/output.ML"; -use "PIDE/markup.ML"; - -fun legacy_feature s = warning (Markup.markup Markup.legacy ("Legacy feature! " ^ s)); - -use "General/timing.ML"; -use "General/scan.ML"; -use "General/source.ML"; -use "General/symbol.ML"; -use "General/seq.ML"; -use "General/position.ML"; -use "General/symbol_pos.ML"; -use "General/integer.ML"; -use "General/stack.ML"; -use "General/queue.ML"; -use "General/heap.ML"; -use "General/ord_list.ML"; -use "General/balanced_tree.ML"; -use "General/long_name.ML"; -use "General/buffer.ML"; -use "General/pretty.ML"; -use "PIDE/xml.ML"; -use "General/graph.ML"; -use "General/binding.ML"; -use "General/path.ML"; -use "General/url.ML"; -use "General/file.ML"; -use "General/sha1.ML"; -use "PIDE/yxml.ML"; -use "System/options.ML"; - -(* hard-coded options *) -val options = Options.empty |> - Options.declare {pos=Position.none, name="completion_limit", - typ=Options.intT, value="200"}; -Options.set_default options; - -use "General/completion.ML"; -(*use "General/json.ML";*) -(*use "General/text_socket.ML";*) - - -(* Isar lexer, for use_thy *) - -(* all proof-related types are stubs *) -type typ = unit; -type term = unit; -type attribute = unit; -type morphism = unit; -type thm = unit; -type indexname = string * int; - - -(* a very minimal lex structure for inner syntax *) -structure Lexicon = -struct -open Basic_Symbol_Pos; - -val err_prefix = "Inner lexical error: "; - -fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg); - -val scan_id = Symbol_Pos.scan_ident; -val scan_longid = scan_id @@@ (Scan.repeat1 ($$$ "." @@@ scan_id) >> flat); -val scan_tid = $$$ "'" @@@ scan_id; - -val scan_nat = Scan.many1 (Symbol.is_digit o Symbol_Pos.symbol); -val scan_int = $$$ "-" @@@ scan_nat || scan_nat; -val scan_natdot = scan_nat @@@ $$$ "." @@@ scan_nat; -val scan_float = $$$ "-" @@@ scan_natdot || scan_natdot; -val scan_hex = $$$ "0" @@@ $$$ "x" @@@ Scan.many1 (Symbol.is_ascii_hex o Symbol_Pos.symbol); -val scan_bin = $$$ "0" @@@ $$$ "b" @@@ Scan.many1 (fn (s, _) => s = "0" orelse s = "1"); - -val scan_id_nat = scan_id @@@ Scan.optional ($$$ "." @@@ scan_nat) []; -val scan_var = $$$ "?" @@@ scan_id_nat; -val scan_tvar = $$$ "?" @@@ $$$ "'" @@@ scan_id_nat; -end; - -use "Isar/token.ML"; - - -(* from Isabelle/src/Pure/Concurrent; this is the main stuff loaded in - Pure/ROOT.ML *) -use "ML/exn_properties_polyml.ML"; -use "ML/ml_statistics_polyml-5.5.0.ML"; -(*use "Concurrent/ROOT.ML";*) -val rootDir = OS.FileSys.getDir(); - -(* Include isaplib *) -(*OS.FileSys.chDir "Concurrent"; -use "ROOT.ML"; -OS.FileSys.chDir "..";*) - -(*MJB: simplification of the future calls. -It needs to be made here to use Synchronized and Future at the same time*) -(*use "Concurrent/task_queue.ML"; -use "Concurrent/future.ML";*) - -use "Concurrent/single_assignment.ML"; -if Multithreading.available then () -else use "Concurrent/single_assignment_sequential.ML"; - -if Multithreading.available -then use "Concurrent/bash.ML" -else use "Concurrent/bash_sequential.ML"; - -use "Concurrent/par_exn.ML"; -use "Concurrent/task_queue.ML"; -use "Concurrent/future.ML"; -use "Concurrent/event_timer.ML"; - -if ML_System.is_polyml then use "Concurrent/time_limit.ML" else (); - -use "Concurrent/lazy.ML"; -if Multithreading.available then () -else use "Concurrent/lazy_sequential.ML"; - -use "Concurrent/par_list.ML"; -if Multithreading.available then () -else use "Concurrent/par_list_sequential.ML"; - -use "Concurrent/mailbox.ML"; -use "Concurrent/cache.ML"; - - -(*use "General/stopwatch.ML"; -use "Concurrent/future_wrapper.ML"; -use "Concurrent/par_seq.ML"; -use "Concurrent/compactor.ML"; -use "Concurrent/par_tree.ML";*) - -(* Other IsapLib Special stuff: names and graphs *) -(*PolyML.Project.use_root "names/ROOT.ML"; -PolyML.Project.use_root "unif/ROOT.ML"; -PolyML.Project.use_root "graph/ROOT.ML"; -PolyML.Project.use_root "maps/ROOT.ML"; -*) - -(* Other basic top level things *) -(*use "basics/collection.ML";*) -(*use "basics/polym_table.ML";*) - - -(*use "basics/toplevel.ML";*) - - -OS.FileSys.chDir ".."; - -use "use_thy.ML"; diff --git a/core/json_interface/controller.ML b/core/json_interface/controller.ML deleted file mode 100644 index 034aa58c..00000000 --- a/core/json_interface/controller.ML +++ /dev/null @@ -1,36 +0,0 @@ -signature JSON_CONTROLLER = -sig - structure Theory : GRAPHICAL_THEORY - val dispatch : (string * string) * Json.json -> Json.json - type simproc = Theory.Graph.T -> ((R.name * Theory.Rule.T) * Theory.Graph.T) Seq.seq - val register_simproc : string * simproc -> unit -end - - -functor JsonController( - structure Theory : GRAPHICAL_THEORY - structure GraphComponentDataIO : GRAPH_COMPONENT_DATA_IO - sharing type Theory.Graph.nvdata = GraphComponentDataIO.nvdata - sharing type Theory.Graph.edata = GraphComponentDataIO.edata) = -struct - -open JsonControllerUtil - -structure Theory = Theory -structure CModTest = CModTest( - structure Theory = Theory - structure GraphComponentDataIO = GraphComponentDataIO) - -structure CModRewrite = CModRewrite(structure Theory = Theory) -structure CModSimplify = CModSimplify(structure Theory = Theory) - -type simproc = CModSimplify.simproc -val register_simproc = CModSimplify.register_simproc - -val mtab = Symtab.make [ - ("test", (CModTest.ftab, "Test functions")), - ("rewrite", (CModRewrite.ftab, "Graph rewriting")), - ("simplify", (CModSimplify.ftab, "Graph simprocs")) -] - -end diff --git a/core/json_interface/controller_module.ML b/core/json_interface/controller_module.ML deleted file mode 100644 index 517613e3..00000000 --- a/core/json_interface/controller_module.ML +++ /dev/null @@ -1,6 +0,0 @@ - -signature CONTROLLER_MODULE = -sig - (* a table mapping function name to function * docstring *) - val ftab : JsonControllerUtil.ftab -end diff --git a/core/json_interface/controller_registry.ML b/core/json_interface/controller_registry.ML deleted file mode 100644 index 497d5bc0..00000000 --- a/core/json_interface/controller_registry.ML +++ /dev/null @@ -1,43 +0,0 @@ -signature JSON_CONTROLLER_REGISTRY = -sig - (* - dispatch takes: - ((controller, module, function), JSON input) - and returns: - JSON output - *) - val dispatch : (string * string * string) * Json.json -> Json.json -end - -structure JsonControllerRegistry (*: JSON_CONTROLLER_REGISTRY*) = -struct - -open JsonControllerUtil - -structure RG_Controller = JsonController( - structure Theory = RG_Theory - structure GraphComponentDataIO = RG_ComponentDataIO -) - -structure GHZW_Controller = JsonController( - structure Theory = GHZW_Theory - structure GraphComponentDataIO = GHZW_ComponentDataIO -) - -val ctab = Symtab.make [ - (RG_Controller.Theory.theory_name, RG_Controller.mtab), - (GHZW_Controller.Theory.theory_name, GHZW_Controller.mtab) -] - -fun dispatch ((controller, module, function), json_input) = -let - val mtab = case Symtab.lookup ctab controller of SOME x => x - | NONE => raise user_exn ("Controller not found: " ^ controller) - val modl = case Symtab.lookup mtab module of SOME x => x - | NONE => raise user_exn ("Module not found: " ^ module) - val func = case Symtab.lookup (fst modl) function of SOME x => x - | NONE => raise user_exn ("Function not found: " ^ module ^ "." ^ function) -in (fst func) json_input -end - -end \ No newline at end of file diff --git a/core/json_interface/controller_util.ML b/core/json_interface/controller_util.ML deleted file mode 100644 index 81d42c01..00000000 --- a/core/json_interface/controller_util.ML +++ /dev/null @@ -1,96 +0,0 @@ -infix 5 -: - -structure JsonControllerUtil = -struct - -(* Fixed set of types to annotate protocol calls. Used to - generate bindings. *) -datatype ptype = - list_t of ptype | - string_t | int_t | json_t | - graphname_t | vertexname_t | edgename_t | bboxname_t | rulename_t | - graph_t | rule_t - -(* expect single arg or named args *) -datatype pargs = - S of ptype | - N of (string * ptype) list - -(* TODO: Pretty.T versions *) -fun string_of_ptype x = case x - of list_t x => "[" ^ string_of_ptype x ^ "]" - | string_t => "string" - | int_t => "int" - | json_t => "json" - | graphname_t => "graphname" - | vertexname_t => "vertexname" - | edgename_t => "edgename" - | bboxname_t => "bboxname" - | rulename_t => "rulename" - | graph_t => "graph" - | rule_t => "rule" - -fun string_of_pargs (S x) = string_of_ptype x - | string_of_pargs (N xs) = - "{ " ^ - String.concatWith ", " - (map (fn (k,t) => k ^ ": " ^ string_of_ptype t) xs) ^ - " }" - -fun (nm:string) -: (typ:ptype) = (nm, typ) - -type fdesc = { - name : string, - doc: string, - input: pargs, - output: pargs -} -type ftab = ((Json.json -> Json.json) * fdesc) Symtab.table - -fun register (fd : fdesc) f = Symtab.update_new (#name fd, (f,fd)) - -(* got bad input from the protocol, should crash *) -exception protocol_exn of string - -(* got bad input from the user, should report error and carry on *) -exception user_exn of string - -(* {{{ JSON argument helpers *) -fun optarg_json x arg = -let - val obj = case x of Json.Object y => y | _ => raise protocol_exn "Expected: JSON object" -in Json.lookup obj arg -end - -fun optarg_str x arg = - case optarg_json x arg - of SOME (Json.String y) => SOME y - | SOME _ => raise protocol_exn ("Expected: string for arg: " ^ arg) - | NONE => NONE - -fun optarg_str x arg = - case optarg_json x arg - of SOME (Json.String y) => SOME y - | SOME _ => raise protocol_exn ("Expected: string for arg: " ^ arg) - | NONE => NONE - -fun optarg_int x arg = - case optarg_json x arg - of SOME (Json.Int y) => SOME y - | SOME _ => raise protocol_exn ("Expected: string for arg: " ^ arg) - | NONE => NONE - - -fun arg_json x arg = case optarg_json x arg of SOME x => x - | NONE => raise protocol_exn ("Could not find arg: " ^ arg) - -fun arg_str x arg = case optarg_str x arg of SOME y => y - | NONE => raise protocol_exn ("Could not find arg: " ^ arg) - -fun arg_int x arg = case optarg_int x arg of SOME y => y - | NONE => raise protocol_exn ("Could not find arg: " ^ arg) - -(* }}} *) - -end - diff --git a/core/json_interface/modules/cosy.ML b/core/json_interface/modules/cosy.ML deleted file mode 100644 index 8bd62088..00000000 --- a/core/json_interface/modules/cosy.ML +++ /dev/null @@ -1,46 +0,0 @@ -signature CMOD_COSY = -sig - include CONTROLLER_MODULE -end - -functor CModCosy(Theory : GRAPHICAL_THEORY) : CMOD_COSY = -struct -open JsonControllerUtil - -(*structure *) - -(* internal state *) -(*val result_table = Unsynchronized.ref *) - - -val ftab = Symtab.empty - -(* test function *) -val ftab = ftab |> register -{ - name = "synth", - doc = "Echoes JSON input", - input = S json_t, - output = S json_t -} (fn x => ( - x -)) - -(* test named args *) -val ftab = ftab |> register -{ - name = "concat", - doc = "Concatenates the given arguments", - input = N ["arg1" -: string_t, "arg2" -: string_t], - output = S string_t -} (fn x => ( - -let - val s1 = arg_str x "arg1" - val s2 = arg_str x "arg2" -in Json.String (s1 ^ s2) -end - -)) - -end \ No newline at end of file diff --git a/core/json_interface/modules/rewrite.ML b/core/json_interface/modules/rewrite.ML deleted file mode 100644 index b7e9c2fa..00000000 --- a/core/json_interface/modules/rewrite.ML +++ /dev/null @@ -1,97 +0,0 @@ -signature CMOD_REWRITE = -sig - include CONTROLLER_MODULE -end - -functor CModRewrite( - structure Theory : GRAPHICAL_THEORY -) : CMOD_REWRITE = -struct - -open JsonControllerUtil - -structure RewriteTable = NameTable(structure Dom = SStrName) -structure Rule = Theory.Rule -structure Graph = Theory.Graph - -val rewrite_table: (((Rule.T * Graph.T) Seq.seq) RewriteTable.T) Synchronized.var = - Synchronized.var "rewrite_table" RewriteTable.empty - -val ftab = Symtab.empty - -val ftab = ftab |> register -{ - name = "find_rewrites", - doc = "Search for rewrites within the given set of vertices and\n"^ - "return a named lazy list.", - input = N ["graph" -: graph_t, "rule" -: rule_t, "vertices" -: list_t vertexname_t], - output = S string_t -} (fn x => ( - -let - val graph = Theory.GraphJSON.input (arg_json x "graph") - val rule = Theory.RuleJSON.input (arg_json x "rule") - val vertices = case arg_json x "vertices" - of Json.Array vs => - map (fn v => case v of Json.String s => V.mk s - | _ => raise user_exn "expected vertex name") vs - | _ => raise user_exn "expected array of vertex names" - val stack = Synchronized.change_result rewrite_table (RewriteTable.store ( - Theory.Rewriter.find_rewrites_in_subgraph rule graph (V.NSet.of_list vertices) - )) -in Json.String (SStrName.dest stack) -end - -)) - -val ftab = ftab |> register -{ - name = "pull_rewrite", - doc = "Pull a rewrite from the given stack. Return null if\n"^ - "stack is empty or doesn't exist.", - input = N ["stack" -: string_t], - output = S json_t -} (fn x => ( - -let - val stack = SStrName.mk (arg_str x "stack") - val nextOpt = Synchronized.change_result rewrite_table (fn tab => ( - case RewriteTable.get_opt tab stack - of SOME rws => ( - case Seq.pull rws - of SOME ((rule, graph), rws') => - (SOME (rule, graph), tab |> RewriteTable.set (stack, rws')) - | NONE => - (NONE, tab |> RewriteTable.delete stack) - ) - | NONE => (NONE, tab) - )) -in case nextOpt - of SOME (rule, graph) => - Json.mk_object [ - ("graph", Theory.GraphJSON.output graph), - ("rule", Theory.RuleJSON.output rule) - ] - | NONE => Json.Null -end - -)) - -val ftab = ftab |> register -{ - name = "delete_rewrite_stack", - doc = "Delete a rewrite stack.", - input = N ["stack" -: string_t], - output = S string_t -} (fn x => ( - -let - val stack = SStrName.mk (arg_str x "stack") - val _ = Synchronized.change rewrite_table (RewriteTable.delete stack) -in Json.String "OK" -end - -)) - - -end diff --git a/core/json_interface/modules/simplify.ML b/core/json_interface/modules/simplify.ML deleted file mode 100644 index e03632f9..00000000 --- a/core/json_interface/modules/simplify.ML +++ /dev/null @@ -1,121 +0,0 @@ -signature CMOD_SIMPLIFY = -sig - include CONTROLLER_MODULE - structure Theory : GRAPHICAL_THEORY - type dstep = (R.name * Theory.Rule.T) * Theory.Graph.T - type simproc = Theory.Graph.T -> dstep Seq.seq - val register_simproc : string * simproc -> unit -end - -functor CModSimplify( - structure Theory : GRAPHICAL_THEORY -) : CMOD_SIMPLIFY = -struct - -open JsonControllerUtil - -structure Theory = Theory - -structure SimpTable = NameTable(structure Dom = SStrName) -structure Rule = Theory.Rule -structure Graph = Theory.Graph - -type dstep = (R.name * Theory.Rule.T) * Theory.Graph.T -type simproc = Theory.Graph.T -> dstep Seq.seq - -val simproc_table: (simproc SimpTable.T) Synchronized.var = - Synchronized.var "simproc_table" SimpTable.empty - -val simp_table: ((dstep Seq.seq) SimpTable.T) Synchronized.var = - Synchronized.var "simproc_table" SimpTable.empty - -fun register_simproc (name, simproc) = - Synchronized.change simproc_table (SimpTable.set (SStrName.mk name, simproc)) - -val ftab = Symtab.empty - -val ftab = ftab |> register -{ - name = "simplify", - doc = "Simplifies with the given simproc. Returns a stack name\n"^ - "where steps can be pulled.", - input = N ["graph" -: graph_t, "simproc" -: string_t], - output = S string_t -} (fn x => ( - -let - val graph = Theory.GraphJSON.input (arg_json x "graph") - val simproc = SimpTable.get (Synchronized.value simproc_table) (SStrName.mk (arg_str x "simproc")) - val stack = Synchronized.change_result simp_table (SimpTable.store (simproc graph)) -in Json.String (SStrName.dest stack) -end - -)) - -val ftab = ftab |> register -{ - name = "list", - doc = "Lists all of the registered simprocs.", - input = N [], - output = S (list_t string_t) -} (fn x => ( - -let - val simprocs = SimpTable.get_dom_set (Synchronized.value simproc_table) -in Json.Array (map (Json.String o SStrName.dest) (SStrName.NSet.list_of simprocs)) -end - -)) - -val ftab = ftab |> register -{ - name = "pull_next_step", - doc = "Pull a rewrite from the given stack. Return null if\n"^ - "stack is empty or doesn't exist.", - input = N ["stack" -: string_t], - output = S json_t -} (fn x => ( - -let - val stack = SStrName.mk (arg_str x "stack") - val nextOpt = Synchronized.change_result simp_table (fn tab => ( - case SimpTable.get_opt tab stack - of SOME steps => ( - case Seq.pull steps - of SOME (step, steps') => - (SOME step, tab |> SimpTable.set (stack, steps')) - | NONE => - (NONE, tab |> SimpTable.delete stack) - ) - | NONE => (NONE, tab) - )) -in case nextOpt - of SOME ((rname, rule), graph) => - Json.mk_object [ - ("rule_name", Json.String (R.dest rname)), - ("rule", Theory.RuleJSON.output rule), - ("graph", Theory.GraphJSON.output graph) - ] - | NONE => Json.Null -end - -)) - -val ftab = ftab |> register -{ - name = "delete_stack", - doc = "Delete a stack of simplification steps.", - input = N ["stack" -: string_t], - output = S string_t -} (fn x => ( - -let - val stack = SStrName.mk (arg_str x "stack") - val _ = Synchronized.change simp_table (SimpTable.delete stack) -in Json.String "OK" -end - -)) - - -end diff --git a/core/json_interface/modules/test.ML b/core/json_interface/modules/test.ML deleted file mode 100644 index 20910e86..00000000 --- a/core/json_interface/modules/test.ML +++ /dev/null @@ -1,87 +0,0 @@ -signature CMOD_TEST = -sig - include CONTROLLER_MODULE -end - -functor CModTest( - structure Theory : GRAPHICAL_THEORY - structure GraphComponentDataIO : GRAPH_COMPONENT_DATA_IO - sharing type Theory.Graph.nvdata = GraphComponentDataIO.nvdata - sharing type Theory.Graph.edata = GraphComponentDataIO.edata -) : CMOD_TEST = -struct - -open JsonControllerUtil - -structure IO = JsonObjectAnnotatedGraphicalTheoryIO( - structure Theory = Theory - structure GraphComponentDataIO = GraphComponentDataIO) - - -val ftab = Symtab.empty - -(* test function *) -val ftab = ftab |> register -{ - name = "echo", - doc = "Echoes JSON input", - input = S json_t, - output = S json_t -} (fn x => ( - x -)) - -(* test graph parsing *) -val ftab = ftab |> register -{ - name = "echo_graph", - doc = "Echoes graph as JSON", - input = S graph_t, - output = S graph_t -} (fn x => ( - -let - val graph = IO.InputGraphJSON.input x - handle IO.InputGraphJSON.bad_input_exp (error , item) => - raise user_exn ("Error parsing graph/" ^ item ^" : " ^ error) - val json = IO.OutputGraphJSON.output graph -in - json -end - -)) - -(* test named args *) -val ftab = ftab |> register -{ - name = "concat", - doc = "Concatenates the given arguments", - input = N ["arg1" -: string_t, "arg2" -: string_t], - output = S string_t -} (fn x => ( - -let - val s1 = arg_str x "arg1" - val s2 = arg_str x "arg2" -in Json.String (s1 ^ s2) -end - -)) - -(* for testing job cancellation *) -val ftab = ftab |> register -{ - name = "diverge", - doc = "Run until cancelled", - input = N [], - output = S string_t -} (fn x => ( - -let - fun loop () = loop () -in (loop (); Json.Null) -end - -)) - -end diff --git a/core/json_interface/protocol.ML b/core/json_interface/protocol.ML deleted file mode 100644 index cf5f2fe7..00000000 --- a/core/json_interface/protocol.ML +++ /dev/null @@ -1,210 +0,0 @@ -signature JSON_CONTROLLER_PROTOCOL = -sig - val run_in_textstreams : TextIO.instream * TextIO.outstream -> unit - val run_in_stdio : unit -> unit - val parallel_run_in_textstreams : TextIO.instream * TextIO.outstream -> unit - val parallel_run_in_stdio : unit -> unit -end - -structure JsonControllerProtocol = -struct - -open JsonControllerUtil - -structure JobTable = NameTable(structure Dom = IntName) - -structure Log : LOG = Log(val init_level = 0) - - -fun json_error rid code msg = - Json.mk_object [ - ("request_id", Json.Int rid), - ("success", Json.Bool false), - ("output", Json.mk_object[("message", Json.String msg), ("code", Json.Int code)]) - ] - -fun help x = -let - val controller = arg_str x "controller" - val module = arg_str x "module" - val opt_function = optarg_str x "function" - val mtab = case Symtab.lookup JsonControllerRegistry.ctab controller of SOME x => x - | NONE => raise user_exn ("Controller not found: " ^ controller) - val modl = case Symtab.lookup mtab module of SOME x => x - | NONE => raise user_exn ("Module not found: " ^ module) -in - case opt_function - of NONE => Json.String (snd modl) - | SOME function => ( - let - val fd = snd (case Symtab.lookup (fst modl) function of SOME x => x - | NONE => raise user_exn ("Function not found: " ^ module ^ "." ^ function)) - in Json.String ( - module ^ "." ^ #name fd ^ ": " ^ - string_of_pargs (#input fd) ^ " -> " ^ - string_of_pargs (#output fd) ^ "\n " ^ - #doc fd - ) - end) -end - -fun version _ = Json.String "2.1" - -fun dispatch systemf rid json = let - val _ = Log.log 2 "(core) dispatch" - val controller = arg_str json "controller" - val module = arg_str json "module" - val function = arg_str json "function" - val _ = Log.log 2 ("(core) got dispatch " ^ controller ^ "::" ^ module ^ "::" ^ function) - val json_input = case optarg_json json "input" of SOME x => x | NONE => Json.mk_object [] - val json_output = - Json.mk_object [ - ("request_id", Json.Int rid), - ("success", Json.Bool true), - ("output", if controller = "!!" then - (case module of "system" => systemf function json_input - | _ => raise user_exn "Module for !! controller must be 'system'.") - else JsonControllerRegistry.dispatch ((controller,module,function),json_input)) - ] - handle user_exn msg => json_error rid 0 msg - | protocol_exn msg => json_error rid ~1 msg - val _ = Log.log 2 "(core) dispatch worker finished" -in - json_output -end - - - -fun run_in_textstreams (instream, outstream) = -let - (* system function for dispatch *) - fun system "help" x = help x - | system "version" x = version x - | system _ _ = raise user_exn ("Unrecognized system function. (options: help, version)") - fun protocol_loop src = - let - val (json_list, src') = RawSource.get src - val json = case json_list of [x] => x | _ => - raise protocol_exn "Expected: exactly one JSON object" - val rid = arg_int json "request_id" - val json_output = dispatch system rid json - in (TextIO.output (outstream, Json.encode json_output); - TextIO.flushOut outstream; - protocol_loop src') - end -in protocol_loop (Json.json_source instream) -end -handle protocol_exn msg => - (TextIO.output (outstream, - Json.encode (Json.mk_object [ - ("request_id", Json.Int ~1), - ("success", Json.Bool false), - ("output", Json.mk_object [("message", Json.String msg), ("code", Json.Int ~1)]) - ])); - TextIO.flushOut outstream) - - -fun parallel_run_in_textstreams (instream, outstream) = -let - val output_buffer: (Json.json list) Unsynchronized.ref = Unsynchronized.ref [] - val output_lock = Mutex.mutex () - val has_output = ConditionVar.conditionVar () - val job_table: ((Json.json Future.future) JobTable.T) Synchronized.var = - Synchronized.var "job_table" JobTable.empty - - (* system function for dispatch *) - fun system "help" x = help x - | system "kill" x = - let - val job = arg_int x "job" - in - (case JobTable.get_opt (Synchronized.value job_table) job - of SOME f => Future.cancel f - | NONE => raise user_exn ("No such job: " ^ Int.toString job)); - Json.mk_object [ - ("message", Json.String ("Job " ^ Int.toString job ^ " killed.")), - ("job", Json.Int job)] - end - | system "version" x = version x - | system _ _ = raise user_exn ("Unrecognized system function. (options: help, kill, version)") - - fun push_output json = ( - Log.log 2 "(core) push_output: waiting for lock"; - Mutex.lock output_lock; - Log.log 2 "(core) push_output: obtained lock"; - output_buffer := (json :: !output_buffer); - ConditionVar.broadcast has_output; - Mutex.unlock output_lock; - Log.log 2 "(core) push_output: released lock" - ) - fun flush_all buf = fold_rev (fn json => fn () => TextIO.output (outstream, Json.encode json)) buf () - fun output_loop () = ( - Log.log 2 "(core) waiting for lock on output loop"; - Mutex.lock output_lock; - Log.log 2 "(core) obtained lock on output loop"; - if List.null (!output_buffer) then ConditionVar.wait (has_output, output_lock) else (); - if List.null (!output_buffer) then () else Log.log 2 "writing output"; - flush_all (!output_buffer); - output_buffer := []; - Mutex.unlock output_lock; - Log.log 2 "(core) released lock on output loop"; - TextIO.flushOut outstream; - output_loop () - ) - - fun input_loop src = - let - val _ = Log.log 2 "(core) waiting for request on input loop" - val (json_list, src') = RawSource.get src - val _ = Log.log 2 "(core) got request" - val json = case json_list of [x] => x | _ => - raise protocol_exn "Expected: exactly one JSON object" - val rid = arg_int json "request_id" - val _ = Log.log 2 "(core) preparing to update job table" - val _ = Synchronized.change job_table (fn tab => ( - Log.log 2 "(core) updating job table"; - if IntName.NSet.contains (JobTable.get_dom_set tab) rid - then (push_output (json_error rid 0 "Duplicate request ID"); tab) - else - let - val _ = Log.log 2 "(core) forking new worker" - val worker = Future.fork (fn () => (Log.log 2 "(core) worker forked"; dispatch system rid json)) - in Thread.fork (fn () => ( - let - val json = PolyML.exception_trace (fn () => ( - Future.join worker - handle Thread.Interrupt => json_error rid 1 "Job interrupted" - )) handle _ => json_error rid 2 "Unexpected exception" - in - push_output json; - Synchronized.change job_table (JobTable.delete rid) - end - ),[]); - JobTable.add (rid,worker) tab - end)) - in input_loop src' - end -in ( - Log.log 2 "(core) forking output loop"; Thread.fork (output_loop,[]); - input_loop (Json.json_source instream) - ) -end -handle protocol_exn msg => - (TextIO.output (outstream, - Json.encode (Json.mk_object [ - ("request_id", Json.Int ~1), - ("success", Json.Bool false), - ("output", Json.mk_object [("message", Json.String msg), ("code", Json.Int ~1)]) - ])); - TextIO.flushOut outstream) - - -fun flushIn () = case TextIO.canInput (TextIO.stdIn, 1) - of SOME _ => (TextIO.input1 TextIO.stdIn;()) - | NONE => () - -fun run_in_stdio () = (run_in_textstreams (TextIO.stdIn,TextIO.stdOut)) -fun parallel_run_in_stdio () = (parallel_run_in_textstreams (TextIO.stdIn,TextIO.stdOut)) - - -end diff --git a/core/json_interface/run.ML b/core/json_interface/run.ML deleted file mode 100644 index e7b5e575..00000000 --- a/core/json_interface/run.ML +++ /dev/null @@ -1,49 +0,0 @@ -(* Main loop, with support for old protocol handlers *) - -fun print_help () = ( - TextIO.print "Usage: core [OPTIONS ...]\n"; - TextIO.print "\n"; - TextIO.print " -v, --version Print the version of the quantomatic core and exit\n"; - TextIO.print " -h, --help Print this help and exit\n"; - (*TextIO.print " --protocol Run the (old) tool communication protocol\n";*) - TextIO.print " --json-protocol Run the (sequential) JSON protocol\n"; - TextIO.print " --par-json-protocol Run the parallel JSON protocol\n"; - TextIO.print " --socket Use localhost:4321 instead of stdio\n" - ) - -fun run () = -let - val argList = CommandLine.arguments(); -in - if List.exists (fn "--version" => true | "-v" => true | _ => false) argList then - TextIO.print "Quantomatic 2.0\n" - else if List.exists (fn "--help" => true | "-h" => true | _ => false) argList then - print_help () - else if List.exists (fn s => s = "--par-json-protocol") argList then - if List.exists (fn s => s = "--socket") argList - then let - val _ = TextIO.print "waiting for connection...\n" - val s = TextSocket.local_connect 4321 - val (ins, outs) = TextSocket.get_io_stream s - val _ = JsonControllerProtocol.parallel_run_in_textstreams (ins, outs) - val _ = TextSocket.close s - in () - end - else (TextIO.print "no socket\n"; JsonControllerProtocol.parallel_run_in_stdio ()) - else if List.exists (fn s => s = "--json-protocol") argList then - if List.exists (fn s => s = "--socket") argList - then let - val s = TextSocket.local_connect 4321 - val (ins, outs) = TextSocket.get_io_stream s - val _ = JsonControllerProtocol.run_in_textstreams (ins, outs) - val _ = TextSocket.close s - in () - end - else JsonControllerProtocol.run_in_stdio () - else print_help (); - (*if List.exists (fn s => s = "--protocol") argList then - ProtocolInterface.run_in_stdio () - else - ConsoleInterface.run_in_stdio ();*) - OS.Process.exit OS.Process.success -end diff --git a/core/lib.thy b/core/lib.thy deleted file mode 100644 index 19fefc1e..00000000 --- a/core/lib.thy +++ /dev/null @@ -1,53 +0,0 @@ -theory lib -imports Pure -begin - -ML_file "lib/log.ML" -ML_file "lib/testing.ML" -ML_file "lib/raw_source.ML" -ML_file "lib/json.ML" -ML_file "lib/text_socket.ML" - -(* Generic Tools for namers, fresh names tables, and collections *) -(* for creating fresh names, has name suc and pred operation, - also nameset with ability to make fresh names. *) - -ML_file "lib/names/namer.ML" -ML_file "lib/names/namers.ML" (* instances of namer, StrName, etc *) - -ML_file "lib/names/basic_nameset.ML" (* basic sets of names *) -ML_file "lib/names/basic_nametab.ML" (* name tables which provide fresh names *) -ML_file "lib/names/basic_renaming.ML" (* renaming, based on tables and sets *) - -(* generic Name structure provies nametables, namesets and collections *) -ML_file "lib/names/basic_name.ML" -ML_file "lib/names/compound_renaming.ML" (* renaming within datatypes *) -ML_file "lib/names/renaming.ML" (* renamings which can be renamed *) - -(* as above, but with renaming *) -ML_file "lib/names/nameset.ML" -ML_file "lib/names/nametab.ML" - -(* names + renaming for them, their tables, sets, and renamings *) -ML_file "lib/names/names.ML" - -(* Binary Relations of finite name sets: good for dependencies *) -ML_file "lib/names/name_map.ML" (* functions/mappings on names *) -ML_file "lib/names/name_inj.ML" (* name iso-morphisms *) -ML_file "lib/names/name_injendo.ML" (* name auto-morphisms (name iso where dom = cod) *) -ML_file "lib/names/name_binrel.ML" (* bin relations on names *) - -(* Defines SStrName, StrName, StrIntName and common maps. *) -ML_file "lib/names/names_common.ML" - -(* testing *) -(*PolyML.Project.use_root "test/ROOT.ML";*) - -ML_file "lib/maps/abstract_map.ML" -ML_file "lib/maps/name_table.ML" -ML_file "lib/maps/name_relation.ML" -ML_file "lib/maps/name_function.ML" -ML_file "lib/maps/name_injection.ML" -ML_file "lib/maps/name_substitution.ML" - -end diff --git a/core/lib/json.ML b/core/lib/json.ML deleted file mode 100644 index bda77afc..00000000 --- a/core/lib/json.ML +++ /dev/null @@ -1,482 +0,0 @@ -(** - * Json: a structure for parsing and writing JSON (JavaScript Object Notation) - * - * The functions in this structure do basic JSON parsing, as well as providing - * accessors and updaters for JSON objects. - * - * Shortcomings: - * - Only JSON encoded in UTF-8 is supported (not UTF-16 or UTF-32) - * - Non-ASCII characters are currently not supported - * - If a string is not valid JSON, little information is given about where - * the problem was - * - * See http://www.json.org/ - *) -signature JSON = -sig - (** - * Indicates that invalid JSON was encountered - * - * The string is a short message describing the error - *) - exception parse_exn of string - (** - * The object did not have the requested property - * - * The string is the property name - *) - exception notfound_exn of string - - (** Stores a JSON object *) - type jobj - (** Represents a JSON fragment *) - datatype json = String of string (*< Must ONLY contain ASCII chars (0 to 127) *) - | Int of int - | Real of real - | Bool of bool - | Null (*< The constant "null" *) - | Array of json list - | Object of jobj - - (** An empty JSON Object *) - val empty_obj : jobj - - (** Test whether object is empty **) - val is_empty_obj : jobj -> bool - - (** Make a JSON object, given key/json value pairs *) - val mk_object : (string * json) list -> json - - (** Make a JSON object, given key/string value pairs *) - val mk_record : (string * string) list -> json - - (** - * Get the properties of a JSON object - * - * Arg 1: the JSON object - * Result: the names of the properties on the object - *) - val properties : jobj -> string list - (** - * Fold over an object's properties - * - * Arg 1: the function to fold with - * Arg 2: the JSON object - * Arg 3: the initial accumulator state - * Result: the result of the folding - *) - val fold : (string * json -> 'a -> 'a) -> jobj -> 'a -> 'a - (** - * Set a property on a JSON object - * - * Arg 1: pair of property name and the new value - * Arg 2: the JSON object to update - * Result: the updated JSON object - *) - val update : string * json -> jobj -> jobj - (** - * Remove a property from a JSON object - * - * Arg 1: the property name - * Arg 2: the JSON object to update - * Result: the updated JSON object - *) - val delete : string -> jobj -> jobj - (** - * Lookup a property on a JSON object - * - * Arg 1: the JSON object to look up the property on - * Arg 2: the property name - * Result: SOME of the property value if the object has - * that property, NONE otherwise - *) - val lookup : jobj -> string -> json option - (** - * Get a property on a JSON object - * - * Arg 1: the JSON object to get the property on - * Arg 2: the property name - * Result: the property value - * - * Raises notfound_exn if arg 2 is not a property on arg 1 - *) - val get : jobj -> string -> json - - (** - * Produce a Pretty version of the JSON - * - * WARNING: this will produce invalid JSON if there are any - * strings containing non-ASCII characters. - * - * WARNING: if the argument is not an Array or Object, the - * output will not be valid JSON (it will be a fragment of JSON) - *) - val pretty : json -> Pretty.T - (** - * Produce JSON formatted to be human-readable - * - * WARNING: this will produce invalid JSON if there are any - * strings containing non-ASCII characters. - * - * WARNING: if the argument is not an Array or Object, the - * output will not be valid JSON (it will be a fragment of JSON) - *) - val string_of : json -> string - - (** - * Parse a JSON string - * - * NB: this will accept a fragment of JSON. If the input is - * valid JSON, the result will be an Array or an Object, but - * of_string will happily parse just a string, number or - * constant. - *) - val of_string : string -> json - - (** - * Produce JSON with no extraneous whitespace - * - * WARNING: this will produce invalid JSON if there are any - * strings containing non-ASCII characters. - * - * WARNING: if the argument is not an Array or Object, the - * output will not be valid JSON (it will be a fragment of JSON) - *) - val encode : json -> string - - - (** - * Accept an instream and return a Source.source, which will pull the next JSON - * object/array from the stream - *) - val json_source : TextIO.instream -> (json, (string, unit) RawSource.source) RawSource.source - - (** - * Read in a file containing JSON - *) - val read_file : string -> json - - (** - * Write JSON out to a file - *) - val write_file : string -> json -> unit -end - -structure Json : JSON = -struct - -(* JObj stores the order in which keys are added, so that it can be - preserved when outputting a string. This is a presentation detail, - and should not be used to encode object information. *) - -structure JObj = Table(type key = string * int; - val ord = prod_ord fast_string_ord (K EQUAL)) - -datatype json = String of string | Int of int | Real of real | Bool of bool | Null - | Array of json list - | Object of json JObj.table * int -type jobj = json JObj.table * int - -exception parse_exn of string -exception notfound_exn of string - -(* table accessor functions *) - -fun properties (tab,_) = map fst (JObj.keys tab) -(* fold is at bottom of struct *) -fun update (k, v) (tab,sz) = - ((JObj.update_new ((k,sz), v) tab, sz+1) - handle JObj.DUP (_,i) => (JObj.update ((k,i), v) tab, sz)) -fun delete k (tab,sz) = (JObj.delete (k,~1) tab, sz) -fun lookup (tab,_) k = (case JObj.lookup tab (k,~1) - of SOME v => SOME v | NONE => NONE) -fun get obj k = case lookup obj k of SOME v => v | NONE => raise notfound_exn k - -val empty_obj = (JObj.empty,0) -fun is_empty_obj (tab,_) = JObj.is_empty tab - -fun mk_record xs = Object (fold_rev (fn (k,v) => update (k, String v)) xs empty_obj) -fun mk_object xs = Object (fold_rev (fn (k,v) => update (k, v)) xs empty_obj) - -structure Parser = -struct - datatype indicator = IND of char - val NOOP = () - - - (*val scan_one = Scan.one*) - - (* standard scan repeaters *) - val rep = Scan.repeat - val rep1 = Scan.repeat1 - val opt = Scan.optional - fun opt_noop s = Scan.optional s NOOP - val unless = Scan.unless; - fun eat scan = scan >> K NOOP - - (* other helpers *) - (*val st_implode = implode (* flatten a list of strings to a string *) - val ch_implode = String.implode (* convert a list of chars to a string *)*) - fun pow _ 0 = 1 - | pow x 1 = x - | pow x n = if n < 0 then raise parse_exn "negative exponent encountered" else - (if (n mod 2 = 0) then pow (x*x) (n div 2) - else x * (pow (x*x) (n div 2))) - - - fun st_to_ch st = case String.explode st of [c] => SOME c - | _ => NONE - - (* single-character production helpers *) - fun ch x = Scan.one (fn c => c = x) - - fun ch_range (x,y) = Scan.one - (fn c => (case st_to_ch c - of SOME c' => the (st_to_ch x) <= c' andalso c' <= the (st_to_ch y) - | NONE => Scan.fail ())) - - fun cdec_to_int c = - case st_to_ch c - of SOME c' => (Char.ord c' - 48) - | NONE => Scan.fail () - - fun chex_to_int c = - case st_to_ch c - of SOME c' => if #"a" < c' then Char.ord c' - 87 - else if #"A" < c' then Char.ord c' - 55 - else Char.ord c' - 48 - | NONE => Scan.fail () - - fun dec_int digits = fold (fn c => fn i => cdec_to_int c + (10 * i)) digits 0 - fun hex_int digits = fold (fn c => fn i => chex_to_int c + (16 * i)) digits 0 - - fun mk_float3 ((sgn,num),frac) = (Real.fromInt sgn) * (Real.fromInt num + frac) - fun mk_float4 (((sgn,num),frac),exp) = - if exp < 0 - then mk_float3 ((sgn,num),frac) / Real.fromInt (pow 10 (~exp)) - else mk_float3 ((sgn,num),frac) * Real.fromInt (pow 10 exp) - - fun hex_to_asc (((a,b),c),d) = let - val ord = hex_int [a,b,c,d] - in - if ord < 128 - then String.str (Char.chr ord) - else raise parse_exn "non-ascii characters not supported in strings (yet!)" - end - - - val p_printable = ch "\t" || ch "\n" || ch "\r" || ch_range (" ", "~") - - (* safe characters for double-quoted string *) - val p_safe = ch "\t" || ch "\n" || ch "\r" || - ch_range (" ", "!") || ch_range ("#", "[") || - ch_range ("]", "~") - - (* break characters *) - val p_line_feed = ch "\n" - val p_carriage_return = ch "\r" >> K "\n" - val p_break = (p_carriage_return |-- p_line_feed) || - p_carriage_return || - p_line_feed - - (* whitespace characters *) - val p_space = ch " " - val p_tab = ch "\t" - val p_white = p_break || p_space || p_tab - - (* misc characters *) - val p_dec_digit = ch_range ("0", "9") - val p_dec_nonzero = ch_range ("1", "9") - val p_hex_digit = p_dec_digit || ch_range ("A", "F") || ch_range ("a", "f") - - fun s_wrap scanner = (rep p_white) |-- scanner --| (rep p_white) - fun s_before scanner = (rep p_white) |-- scanner - - (* indicator character, wrapped in whitespace *) - val ind = s_wrap o ch - val term_ind = s_before o ch - - (* escape characters *) - val p_esc_double_quote = ch "\"" - val p_esc_backslash = ch "\\" - val p_esc_slash = ch "/" - val p_esc_backspace = ch "b" >> K "\b" - val p_esc_form_feed = ch "f" >> K "\f" - val p_esc_line_feed = ch "n" >> K "\n" - val p_esc_carriage_return = ch "r" >> K "\r" - val p_esc_tab = ch "t" >> K "\t" - val p_esc_unicode = ch "u" |-- ((p_hex_digit -- p_hex_digit -- p_hex_digit -- p_hex_digit) - >> hex_to_asc) - - val p_esc_char = p_esc_double_quote || p_esc_backslash || p_esc_slash || - p_esc_backspace || p_esc_form_feed || p_esc_line_feed || - p_esc_carriage_return || p_esc_tab || p_esc_unicode - - - (* double-quoted strings *) - val p_double_quote_char = p_safe || (ch "\\" |-- p_esc_char) - val p_string = (ch "\"" |-- rep p_double_quote_char --| ch "\"") >> implode - - (* numbers *) - val p_exp_symb = (ch "e" || ch "E") |-- - (opt (ch "+" >> K 1 || ch "-" >> K ~1) 1) - val p_sign = opt (ch "-" >> K ~1) 1 - val p_int = ( - (p_dec_nonzero -- rep1 p_dec_digit) >> op:: || - p_dec_digit >> single - ) >> dec_int - val p_exp = p_exp_symb -- rep1 p_dec_digit - >> (fn (sgn,digits) => sgn * dec_int digits) - val p_frac = ch "." |-- rep1 p_dec_digit - >> (fn digits => - Real.fromInt (dec_int digits) / - Real.fromInt (pow 10 (length digits))) - - (* a number is interpreted as an integer, unless an exponent or decimal is present *) - val p_number = (p_sign -- p_int -- opt p_frac 0.0 -- p_exp >> (Real o mk_float4)) || - (p_sign -- p_int -- p_frac >> (Real o mk_float3)) || - (p_sign -- p_int >> (Int o (fn (sign,i) => sign * i))) - - val p_true = (ch "t" |-- ch "r" |-- ch "u" |-- ch "e") >> K (Bool true) - val p_false = (ch "f" |-- ch "a" |-- ch "l" |-- ch "s" |-- ch "e") >> K (Bool false) - val p_null = (ch "n" |-- ch "u" |-- ch "l" |-- ch "l") >> K Null - - fun p_value x = let - val p_pair = p_string --| ind ":" -- p_value - val p_members = p_pair ::: rep (ind "," |-- p_pair) - val p_object = (ind "{" |-- opt p_members [] --| ind "}") - >> (fn pairs => Object (fold update pairs empty_obj)) - val p_elements = p_value ::: rep (ind "," |-- p_value) - val p_array = (ind "[" |-- opt p_elements [] --| ind "]") - in x |> ( - p_true || p_false || p_null || p_number || (p_string >> String) || - p_object || (p_array >> Array) - ) - end - - (* only accept object or array. do not accept trailing whitespace *) - fun p_top_level x = let - val p_pair = p_string --| ind ":" -- p_value - val p_members = p_pair ::: rep (ind "," |-- p_pair) - val p_object = (ind "{" |-- opt p_members [] --| term_ind "}") - >> (fn pairs => Object (fold update pairs empty_obj)) - val p_elements = p_value ::: rep (ind "," |-- p_value) - val p_array = (ind "[" |-- opt p_elements [] --| term_ind "]") - in x |> (p_object || (p_array >> Array)) - end - - fun parse' s = case p_value (raw_explode s @ [Symbol.eof]) - of (json, [x]) => if x = Symbol.eof then json else - raise parse_exn ("Expected: eof, got: " ^ x) - | (_, xs) => raise parse_exn ("Expected: eof, got: " ^ implode xs) - fun parse s = (Scan.catch parse' s) handle Fail m => raise parse_exn m -end - - -(** escapes ", \ and \n **) -fun escape s = let - fun trans #"\"" = "\\\"" - | trans #"\\" = "\\\\" - | trans #"\n" = "\\n" - | trans c = String.implode [c] -in String.translate trans s -end - -(*replaces ~ to - *) -fun convert_num s = let - fun trans #"~" = "-" - | trans #"E" = "e" - | trans c = String.implode [c] -in String.translate trans s -end - -(*and pretty_kv d (k,v) [] = [Pretty.block [pretty_q_str k, Pretty.str " : ", - pretty_d (d+1) v]] - | pretty_kv d (k,v) pr = (Pretty.block [pretty_q_str k, Pretty.str " : ", - pretty_d (d+1) v, Pretty.str ","]) :: pr*) - -fun ordered_kv (tab,_) = - order_list (map (fn ((k,i), v) => (i, (k,v))) (JObj.dest tab)) - -fun block_comma_list x xs = Pretty.block - (x :: Pretty.fbrk :: - flat (Library.separate [Pretty.str ",", Pretty.fbrk] (map single xs))); - - -fun pretty_q_str x = Pretty.str ("\"" ^ escape x ^ "\"") -and pretty_block_obj d k obj = - Pretty.chunks [ - block_comma_list - (Pretty.block (if k = "" - then [Pretty.str "{"] - else [pretty_q_str k, Pretty.str " : {"])) - (map (pretty_kv (d+1)) (ordered_kv obj)), - Pretty.str "}"] -and pretty_kv d (k,Object obj) = - if d < 2 - then pretty_block_obj d k obj - else Pretty.block [pretty_q_str k, Pretty.str " : ", pretty_d d (Object obj)] - | pretty_kv d (k,v) = - Pretty.block [pretty_q_str k, Pretty.str " : ", pretty_d d v] -and pretty_d _ (String x) = pretty_q_str x - | pretty_d _ (Int x) = Pretty.str (convert_num (Int.toString x)) - | pretty_d _ (Real x) = Pretty.str (convert_num (Real.toString x)) - | pretty_d _ (Bool x) = Pretty.str (Bool.toString x) - | pretty_d _ (Null) = Pretty.str "null" - | pretty_d d (Array xs) = Pretty.list "[" "]" (map (pretty_d (d+1)) xs) - | pretty_d d (Object obj) = - if d = 0 - then pretty_block_obj 0 "" obj - else Pretty.list "{" "}" (map (pretty_kv (d+1)) (ordered_kv obj)) - -val pretty = pretty_d 0 - -val string_of = Pretty.string_of o pretty -val of_string = Parser.parse - -(*fun json_source instream = - Source.source Symbol.stopper - (Parser.p_top_level >> single) NONE - (Source.tty' (fn c => c = "]" orelse c = "}") instream) - |> Source.set_prompt ""*) - -fun json_source instream = - RawSource.source Symbol.stopper - (Parser.p_top_level >> single) NONE - (RawSource.raw_stream (fn c => c = "}" orelse c = "]") instream) - |> RawSource.set_prompt "" - -fun read_file file = let - val get_one_json = RawSource.get_single o json_source - fun catch f s = (Scan.catch f s) - handle Fail m => raise parse_exn (m^" in file "^file) - val ins = TextIO.openIn file - val (json_out, _) = the (catch get_one_json ins) - val _ = TextIO.closeIn ins -in json_out -end - -fun write_file file json = let - val outs = TextIO.openOut file - val _ = TextIO.outputSubstr (outs, Substring.full (string_of json)) - val _ = TextIO.closeOut outs -in () -end - -fun enc_kv (k, v) = (encode (String k)) ^ ":" ^ (encode v) -and encode (String x) = "\"" ^ escape x ^ "\"" - | encode (Int x) = convert_num (Int.toString x) - | encode (Bool x) = Bool.toString x - | encode (Real x) = convert_num (Real.toString x) - | encode (Array xs) = "[" ^ implode (Library.separate "," (map encode xs)) ^ "]" - | encode (Null) = "null" - | encode (Object obj) = - "{" ^ implode (Library.separate "," (map enc_kv (ordered_kv obj))) ^ "}" - - -(* we put this here to avoid overwriting the list fold for the rest of the - * struct - *) -fun fold f = (JObj.fold (fn ((k,_),v) => f (k,v))) o fst - -end diff --git a/core/lib/log.ML b/core/lib/log.ML deleted file mode 100644 index fa062d72..00000000 --- a/core/lib/log.ML +++ /dev/null @@ -1,57 +0,0 @@ -signature LOG = -sig - (* current level of logging *) - val level_ref : int Unsynchronized.ref; - val stream_ref : TextIO.outstream Unsynchronized.ref; - - val set_log_to_stdout : unit -> unit; - val set_log_to_stderr : unit -> unit; - - val writeln_to_stream : TextIO.outstream -> string -> unit; - val writeln_to_stdout : string -> unit; - val writeln_to_stderr : string -> unit; - - (* deferred logging; the function will never be called - * if the log level is not high enough *) - val logf : int -> ('a -> string) -> 'a -> unit; - - (* log if log-level is above the int. *) - val log : int -> string -> unit; - (* forced to stdout *) - val log_stdout : int -> string -> unit; - (* forced to stderr *) - val log_stderr : int -> string -> unit; - - val print : 'a -> unit; -end; - -(* handy little library for writing logs specific to a bit of code. *) -functor Log(val init_level : int) -: LOG -= struct - val level_ref = Unsynchronized.ref init_level; - val stream_ref = Unsynchronized.ref TextIO.stdErr; - - fun writeln_to_stream stream s = - (TextIO.output (stream, s ^ "\n"); TextIO.flushOut stream); - fun writeln_to_stdout s = writeln_to_stream TextIO.stdOut s; - fun writeln_to_stderr s = writeln_to_stream TextIO.stdErr s; - - fun set_log_to_stdout () = (stream_ref := TextIO.stdOut); - fun set_log_to_stderr () = (stream_ref := TextIO.stdErr); - - fun logf level f x = - if level <= (!level_ref) then writeln_to_stream (!stream_ref) (f x) - else (); - - fun log level s = - if level <= (!level_ref) then writeln_to_stream (!stream_ref) s - else (); - fun log_stdout level s = - if level <= (!level_ref) then writeln_to_stdout s else (); - fun log_stderr level s = - if level <= (!level_ref) then writeln_to_stderr s else (); - - fun print x = (PolyML.print x; ()); -end; - diff --git a/core/lib/maps/abstract_map.ML b/core/lib/maps/abstract_map.ML deleted file mode 100644 index fdf6d8ea..00000000 --- a/core/lib/maps/abstract_map.ML +++ /dev/null @@ -1,111 +0,0 @@ -(* - * An abstract structure supporting direct and inverse images, along with fast inversion - * and composition. Other functors should work with amap's with more efficient/useful - * representations. - *) - -signature ABS_MAP = -sig - - (* the graph of a mapping *) - type ('a,'b) amapgraph - val mk_graph : (unit -> 'a list) * ('a -> 'b list) -> ('a,'b) amapgraph - - (* store a representation and a means of converting to/from graphs *) - datatype ('a,'b,'c) amap = MAP of 'c * ('c -> ('a,'b) amapgraph) - val mk : 'c * ('c -> ('a,'b) amapgraph) -> ('a,'b,'c) amap - - val update_rep : ('c -> 'c) -> ('a,'b,'c) amap -> ('a,'b,'c) amap - val get_rep : ('a,'b,'c) amap -> 'c - val set_rep : 'c -> ('a,'b,'c) amap -> ('a,'b,'c) amap - - exception no_such_elem_exp of unit - exception multiple_values_exp of unit - exception duplicate_exp of unit - exception bad_coercion_exp of unit - - val graph_of : ('a,'b,'c) amap -> ('a,'b) amapgraph - val fold_graph : ('a * 'b -> 'c -> 'c) -> ('a,'b) amapgraph -> 'c -> 'c - - val compose : ('b,'c,'y) amap * ('a,'b,'x) amap -> ('a,'c,('a,'c) amapgraph) amap - - (* convenience function for composing 3 maps together *) - val compose3 : ('c,'d,'z) amap * ('b,'c,'y) amap * ('a,'b,'x) amap -> ('a,'d,('a,'d) amapgraph) amap - - val fold : ('a * 'b -> 'd -> 'd) -> ('a,'b,'c) amap -> 'd -> 'd - val exists : ('a * 'b -> bool) -> ('a,'b,'c) amap -> bool - val forall : ('a * 'b -> bool) -> ('a,'b,'c) amap -> bool - val find : ('a * 'b -> bool) -> ('a,'b,'c) amap -> ('a * 'b) option - - val pretty : string -> ('a -> Pretty.T) -> ('b -> Pretty.T) -> ('a,'b,'c) amap -> Pretty.T - - -end - - -structure Map :> ABS_MAP = -struct - - exception no_such_elem_exp of unit - exception multiple_values_exp of unit - exception duplicate_exp of unit - exception bad_coercion_exp of unit - - datatype ('a,'b) amapgraph = MAPG of { - dom: unit -> 'a list, di: 'a -> 'b list } - fun mk_graph (dom,di) = MAPG { dom=dom, di=di } - - datatype ('a,'b,'c) amap = MAP of 'c * ('c -> ('a,'b) amapgraph) - fun mk x = MAP x - - fun get_rep (MAP (rep,_)) = rep - fun update_rep f (MAP (rep,gf)) = MAP (f(rep),gf) - fun set_rep rep1 (MAP (_,gf)) = MAP (rep1,gf) - (*val set_rep = update_rep o K*) - - fun graph_of (MAP (rep,gf)) = gf rep - - (* private: used for functions which return abstract maps (with trivial representation) *) - fun of_graph gr = MAP (gr, I) - - fun fold_graph f g base = let - val MAPG { dom, di, ... } = g - in fold (fn x => fold (fn y => f (x,y)) (di x)) (dom ()) base - end - - fun fold f = fold_graph f o graph_of - - fun find (f : 'a * 'b -> bool) m = let - exception found_exp of 'a * 'b - in ( - fold (fn (x,y) => fn () => if (f (x,y)) then raise found_exp (x,y) else ()) m (); - NONE - ) handle found_exp (x,y) => SOME (x,y) - end - - fun forall f m = case (find (not o f) m) of SOME _ => false | NONE => true - fun exists f m = case (find f m) of SOME _ => true | NONE => false - - fun compose_graphs (g2, g1) = - let - val MAPG { dom=dom1, di=di1 } = g1 - val MAPG { dom=_, di=di2 } = g2 - in MAPG { dom = dom1, di = maps di2 o di1 } - end - - fun compose (m2,m1) = of_graph (compose_graphs (graph_of m2, graph_of m1)) - fun compose3 (m3,m2,m1) = compose (compose (m3,m2), m1) - - fun pretty desc pretty_key pretty_val mp = - let - val MAPG { dom, di, ... } = graph_of mp - fun entry x ys = Pretty.block [Pretty.str "[", pretty_key x, Pretty.str "]", Pretty.str " -> ", - Pretty.list "[" "]" (map pretty_val ys)] - val entries = map (fn x => entry x (di x)) (dom ()) - in Pretty.chunks [Pretty.block [Pretty.str desc, Pretty.str " {"], - Pretty.block [Pretty.str " ",Pretty.chunks entries], - Pretty.str "}"] - end - -end - diff --git a/core/lib/maps/name_function.ML b/core/lib/maps/name_function.ML deleted file mode 100644 index 1e034aa3..00000000 --- a/core/lib/maps/name_function.ML +++ /dev/null @@ -1,165 +0,0 @@ -signature NAME_FUNCTION = -sig - include NAME_RELATION -end - -functor NameFunction( - structure Dom : NAME - structure Cod : NAME -) : NAME_FUNCTION = -struct - - type dom = Dom.name - type cod = Cod.name - structure DomSet = Dom.NSet - structure CodSet = Cod.NSet - - structure Tab = Table(type key = dom val ord = Dom.name_ord) - structure ITab = Table(type key = cod val ord = Cod.name_ord) - - (* do gets directly on a table, throw exception if not found *) - fun get_from_tab tab k = - case Tab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp () - fun get_from_itab tab k = - case ITab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp () - - (* the internal representation *) - type rep = { - tab : cod Tab.table, - itab : DomSet.T ITab.table, - dom_set : DomSet.T, - cod_set : CodSet.T - } - - type T = (dom, cod, rep) Map.amap - - (* record accessors *) - local - fun update_rep_tab f (r:rep) = {tab=f(#tab r),itab= #itab r,dom_set= #dom_set r,cod_set= #cod_set r} - fun update_rep_itab f (r:rep) = {tab= #tab r,itab=f(#itab r),dom_set= #dom_set r,cod_set= #cod_set r} - fun update_rep_dom_set f (r:rep) = {tab= #tab r,itab= #itab r,dom_set=f(#dom_set r),cod_set= #cod_set r} - fun update_rep_cod_set f (r:rep) = {tab= #tab r,itab= #itab r,dom_set= #dom_set r,cod_set=f(#cod_set r)} - in - fun update_tab f (mp:T) = Map.update_rep (update_rep_tab f) mp - fun update_itab f (mp:T) = Map.update_rep (update_rep_itab f) mp - fun update_dom_set f (mp:T) = Map.update_rep (update_rep_dom_set f) mp - fun update_cod_set f (mp:T) = Map.update_rep (update_rep_cod_set f) mp - - fun get_tab (mp:T) = #tab (Map.get_rep mp) - fun get_itab (mp:T) = #itab (Map.get_rep mp) - fun get_dom_set (mp:T) = #dom_set (Map.get_rep mp) - fun get_cod_set (mp:T) = #cod_set (Map.get_rep mp) - - fun set_tab x = update_tab (K x) - fun set_itab x = update_itab (K x) - fun set_dom_set x = update_dom_set (K x) - fun set_cod_set x = update_cod_set (K x) - end - - fun graph_of_rep (r:rep) = let - fun di x = case Tab.lookup (#tab r) x of SOME y => [y] | NONE => [] - fun dom () = DomSet.list_of (#dom_set r) - in Map.mk_graph (dom, di) - end - - val is_empty = DomSet.is_empty o get_dom_set - val dom_size = DomSet.cardinality o get_dom_set - val cod_size = CodSet.cardinality o get_cod_set - val dom_contains = DomSet.contains o get_dom_set - val cod_contains = CodSet.contains o get_cod_set - - val get = get_from_tab o get_tab - val get_opt = Tab.lookup o get_tab - fun img mp x = case get_opt mp x of SOME y => CodSet.single y | NONE => CodSet.empty - fun img_of_set mp xs = DomSet.fold (CodSet.union_merge o (img mp)) xs CodSet.empty - - fun inv_get_opt mp x = - case ITab.lookup (get_itab mp) x - of SOME ys => (case DomSet.tryget_singleton ys of SOME y => SOME y - | NONE => raise Map.multiple_values_exp ()) - | NONE => NONE - - fun inv_get mp x = case inv_get_opt mp x of SOME y => y - | NONE => raise Map.no_such_elem_exp () - - fun inv_img mp y = case ITab.lookup (get_itab mp) y of SOME xs => xs | NONE => DomSet.empty - fun inv_img_of_set mp ys = CodSet.fold (DomSet.union_merge o (inv_img mp)) ys DomSet.empty - - fun is_mapped mp (x,y) = - case get_opt mp x of SOME y' => Cod.name_eq(y,y') | NONE => false - - - fun set (x,y) mp = let - (* if x had previously occurred in an inverse image, clear it *) - val mp' = - case get_opt mp x - of SOME oldy => let - val inv = DomSet.delete x (inv_img mp oldy) - in if DomSet.is_empty inv - then mp |> update_cod_set (CodSet.delete oldy) - |> update_itab (ITab.delete oldy) - else mp |> update_itab (ITab.update (oldy,inv)) - end - | NONE => mp - in - mp' |> update_dom_set (DomSet.add x) - |> update_cod_set (CodSet.add y) - |> update_tab (Tab.update (x,y)) - |> update_itab (ITab.update (y,DomSet.add x (inv_img mp' y))) - end - - fun add (x,y) mp = - if (DomSet.contains (get_dom_set mp) x) then raise Map.duplicate_exp () - else set (x,y) mp - - local - fun delete_pair (x,y) mp = let - val inv = DomSet.delete x (inv_img mp y) - val mp' = if DomSet.is_empty inv - then mp |> update_cod_set (CodSet.delete y) - |> update_itab (ITab.delete y) - else mp |> update_itab (ITab.update (y,inv)) - in - mp' |> update_dom_set (DomSet.delete x) - |> update_tab (Tab.delete x) - end - in - fun delete x mp = case get_opt mp x of NONE => mp | SOME y => delete_pair (x,y) mp - fun inv_delete y mp = case inv_get_opt mp y of NONE => mp | SOME x => delete_pair (x,y) mp - end - - fun unmap (x,y) mp = case get_opt mp x - of SOME y1 => if Cod.name_eq (y,y1) then delete x mp else mp - | NONE => mp - - val empty_rep : rep = { tab = Tab.empty, itab = ITab.empty, - dom_set = DomSet.empty, cod_set = CodSet.empty } - val empty = Map.MAP (empty_rep, graph_of_rep) - - fun of_graph gr = Map.fold_graph add gr empty - - val fold = Map.fold - val forall = Map.forall - val exists = Map.exists - val find = Map.find - - fun coerce abs_map = fold add abs_map empty - handle _ => raise Map.bad_coercion_exp () - - fun compose ms = coerce (Map.compose ms) - fun compose3 ms = coerce (Map.compose3 ms) - - val pretty = Map.pretty "NameFunction" Dom.pretty_name Cod.pretty_name - - structure Sharing = - struct - type dom = dom - type cod = cod - type rep = rep - structure DomSet = DomSet.Sharing - structure CodSet = CodSet.Sharing - end -end - diff --git a/core/lib/maps/name_injection.ML b/core/lib/maps/name_injection.ML deleted file mode 100644 index d8a48de3..00000000 --- a/core/lib/maps/name_injection.ML +++ /dev/null @@ -1,207 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: isaplib/maps/name_relation.ML - Author: Aleks Kissinger - aleks0@gmail.com - Initially written 31 Mar 2013 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - An injective function with fast inverse-lookup and map inversion. - Aka. a bidirectional map, or BidiMap. - - NOTE: 'set (x,y)' will overwrite a value if x is already in the - domain, and raise duplicate_exp if mapping x to y would break - injectivity. 'add (x,y)' is the preferred way to add elements, - as it will throw duplicate_exp if x is in the domain *or* if y - is in the codomain. -*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -signature NAME_INJECTION = -sig - include NAME_RELATION - - val inverse_of : (cod,dom,'a) Map.amap -> T - - val restrict_dom : DomSet.T -> T -> T - val filter_dom : (dom -> bool) -> T -> T - val is_monotone : T -> bool - - (* extend the injection to the given set, using the given function to - provide codomain values *) - (*val extend_to : (dom -> cod) -> DomSet.T -> T -> T*) -end - -functor NameInjection( - structure Dom : NAME - structure Cod : NAME -) : NAME_INJECTION = -struct - - type dom = Dom.name - type cod = Cod.name - structure DomSet = Dom.NSet - structure CodSet = Cod.NSet - - structure Tab = Table(type key = dom val ord = Dom.name_ord) - structure ITab = Table(type key = cod val ord = Cod.name_ord) - - (* do gets directly on a table, throw exception if not found *) - fun get_from_tab tab k = - case Tab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp () - fun get_from_itab tab k = - case ITab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp () - - (* the internal representation *) - type rep = { - tab : cod Tab.table, - itab : dom ITab.table, - dom_set : DomSet.T, - cod_set : CodSet.T - } - - type T = (dom, cod, rep) Map.amap - - (* record accessors *) - local - fun update_rep_tab f (r:rep) = {tab=f(#tab r),itab= #itab r,dom_set= #dom_set r,cod_set= #cod_set r} - fun update_rep_itab f (r:rep) = {tab= #tab r,itab=f(#itab r),dom_set= #dom_set r,cod_set= #cod_set r} - fun update_rep_dom_set f (r:rep) = {tab= #tab r,itab= #itab r,dom_set=f(#dom_set r),cod_set= #cod_set r} - fun update_rep_cod_set f (r:rep) = {tab= #tab r,itab= #itab r,dom_set= #dom_set r,cod_set=f(#cod_set r)} - in - fun update_tab f (mp:T) = Map.update_rep (update_rep_tab f) mp - fun update_itab f (mp:T) = Map.update_rep (update_rep_itab f) mp - fun update_dom_set f (mp:T) = Map.update_rep (update_rep_dom_set f) mp - fun update_cod_set f (mp:T) = Map.update_rep (update_rep_cod_set f) mp - - fun get_tab (mp:T) = #tab (Map.get_rep mp) - fun get_itab (mp:T) = #itab (Map.get_rep mp) - fun get_dom_set (mp:T) = #dom_set (Map.get_rep mp) - fun get_cod_set (mp:T) = #cod_set (Map.get_rep mp) - - fun set_tab x = update_tab (K x) - fun set_itab x = update_itab (K x) - fun set_dom_set x = update_dom_set (K x) - fun set_cod_set x = update_cod_set (K x) - end - - fun graph_of_rep (r:rep) = let - fun di x = case Tab.lookup (#tab r) x of SOME y => [y] | NONE => [] - fun dom () = DomSet.list_of (#dom_set r) - in Map.mk_graph (dom, di) - end - - val is_empty = DomSet.is_empty o get_dom_set - val dom_size = DomSet.cardinality o get_dom_set - val cod_size = CodSet.cardinality o get_cod_set (* should always = dom_size *) - val dom_contains = DomSet.contains o get_dom_set - val cod_contains = CodSet.contains o get_cod_set - - val get = get_from_tab o get_tab - val get_opt = Tab.lookup o get_tab - fun img mp x = case get_opt mp x of SOME y => CodSet.single y | NONE => CodSet.empty - fun img_of_set mp xs = DomSet.fold (CodSet.union_merge o (img mp)) xs CodSet.empty - val inv_get = get_from_itab o get_itab - val inv_get_opt = ITab.lookup o get_itab - fun inv_img mp x = case inv_get_opt mp x of SOME y => DomSet.single y | NONE => DomSet.empty - fun inv_img_of_set mp xs = CodSet.fold (DomSet.union_merge o (inv_img mp)) xs DomSet.empty - - fun is_mapped mp (x,y) = - case get_opt mp x of SOME y' => Cod.name_eq(y,y') | NONE => false - - (* throws an exception if setting value would make map no longer injective *) - fun set (x,y) mp = let - val oldy_opt = get_opt mp x - in - if (CodSet.contains (get_cod_set mp) y) then raise Map.duplicate_exp () - else mp |> update_dom_set (DomSet.add x) - |> update_cod_set (case oldy_opt of SOME oldy => CodSet.delete oldy | NONE => I) - |> update_cod_set (CodSet.add y) - |> update_tab (Tab.update (x,y)) - |> update_itab (case oldy_opt of SOME oldy => ITab.delete oldy | NONE => I) - |> update_itab (ITab.update (y,x)) - end - - fun add (x,y) mp = - if (DomSet.contains (get_dom_set mp) x) then raise Map.duplicate_exp () - else set (x,y) mp - - local - fun delete_pair (x,y) mp = - mp |> update_dom_set (DomSet.delete x) - |> update_tab (Tab.delete x) - |> update_cod_set (CodSet.delete y) - |> update_itab (ITab.delete y) - in - fun delete x mp = case get_opt mp x of NONE => mp | SOME y => delete_pair (x,y) mp - fun inv_delete y mp = case inv_get_opt mp y of NONE => mp | SOME x => delete_pair (x,y) mp - end - - fun unmap (x,y) mp = case get_opt mp x - of SOME y1 => if Cod.name_eq (y,y1) then delete x mp else mp - | NONE => mp - - val empty_rep : rep = { tab = Tab.empty, itab = ITab.empty, - dom_set = DomSet.empty, cod_set = CodSet.empty } - val empty = Map.MAP (empty_rep, graph_of_rep) - - fun of_graph gr = Map.fold_graph add gr empty - - val fold = Map.fold - val forall = Map.forall - val exists = Map.exists - val find = Map.find - - fun coerce abs_map = fold add abs_map empty - handle _ => raise Map.bad_coercion_exp () - - fun compose ms = coerce (Map.compose ms) - fun compose3 ms = coerce (Map.compose3 ms) - - val pretty = Map.pretty "NameInjection" Dom.pretty_name Cod.pretty_name - - fun is_monotone mp = - let - fun mono dset cset = - case (DomSet.get_min dset, CodSet.get_min cset) - of (SOME d, SOME c) => Cod.name_eq (get mp d,c) andalso - mono (DomSet.delete d dset) (CodSet.delete c cset) - | (NONE,NONE) => true - | _ => false - in mono (get_dom_set mp) (get_cod_set mp) - end - - fun filter_dom f mp = - DomSet.fold (fn x => if f x then add (x, get mp x) else I) (get_dom_set mp) empty - fun restrict_dom set = filter_dom (DomSet.contains set) - - (*type irep = { - tab : dom ITab.table, - itab : cod Tab.table, - dom_set : CodSet.T, - cod_set : DomSet.T - } - - type iT = (cod,dom,irep) Map.amap*) - - (* - Linear-time inversion. This could be done in constant time on another Injection - by interchanging dom/cod and tab/itab. Unfortunately, poly can't quite figure - out that ABInj.Tab.table should be the same as BAInj.ITab.table. Ideas? - *) - fun inverse_of mp = Map.fold (fn (y,x) => add (x,y)) mp empty - (*fun extend_to f new_dom mp = DomSet.fold - (fn x => if (DomSet.contains (get_dom_set mp) x) then I else add (x, f x)) new_dom mp*) - - structure Sharing = - struct - type dom = dom - type cod = cod - type rep = rep - structure DomSet = DomSet.Sharing - structure CodSet = CodSet.Sharing - end -end - diff --git a/core/lib/maps/name_relation.ML b/core/lib/maps/name_relation.ML deleted file mode 100644 index 37c6d6f8..00000000 --- a/core/lib/maps/name_relation.ML +++ /dev/null @@ -1,256 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: isaplib/maps/name_relation.ML - Author: Aleks Kissinger - aleks0@gmail.com - Initially written 31 Mar 2013 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - A generic many-to-many relation for names. All other name-to-name - mappings inherit this signature. - - While NAME_TABLE does not directly inherit this signature, it uses - the same naming conventions where possible. -*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -signature NAME_RELATION_SHARING = -sig - type dom - type cod - type rep - structure DomSet : NAME_SET_SHARING sharing type DomSet.name = dom - structure CodSet : NAME_SET_SHARING sharing type CodSet.name = cod -end - -signature NAME_RELATION = -sig - type dom - type cod - type rep (* the internal representation *) - structure DomSet : NAME_SET - structure CodSet : NAME_SET - type T = (dom, cod, rep) Map.amap (* the main type *) - - structure Sharing : NAME_RELATION_SHARING - sharing type Sharing.dom = dom - sharing type Sharing.cod = cod - sharing type Sharing.rep = rep - sharing Sharing.DomSet = DomSet.Sharing - sharing Sharing.CodSet = CodSet.Sharing - - - val empty: T - val is_empty : T -> bool - - val get_dom_set : T -> DomSet.T (* set of elements in domain *) - val get_cod_set : T -> CodSet.T (* set of elements in codomain *) - - val dom_size : T -> int (* cardinality(dom_set) *) - val cod_size : T -> int (* cardinality(cod_set) *) - - val dom_contains : T -> dom -> bool - val cod_contains : T -> cod -> bool - - (*** FORWARD LOOKUP FUNCTIONS ***) - - (* these will both throw multiple_values_exp if multiple values returned *) - val get : T -> dom -> cod (* throws no_such_elem_exp if not found *) - val get_opt : T -> dom -> cod option (* returns NONE if not found *) - - (* these will always succeed, but may return an empty set *) - val img : T -> dom -> CodSet.T - val img_of_set : T -> DomSet.T -> CodSet.T - - (*** INVERSE LOOKUP FUNCTIONS ***) - - (* these will both throw multiple_values_exp if multiple values returned *) - val inv_get : T -> cod -> dom (* throws no_such_elem_exp if not found *) - val inv_get_opt : T -> cod -> dom option (* returns NONE if not found *) - - (* these will always succeed, but may return an empty set *) - val inv_img : T -> cod -> DomSet.T - val inv_img_of_set : T -> CodSet.T -> DomSet.T - - (*** RELATION LOOKUP ***) - - val is_mapped : T -> (dom * cod) -> bool - - (*** UPDATERS ***) - - (* NOTE: for relations, set = add, but add is preferred *) - - (* for single-valued functions, overwrite if dom exists *) - val set : (dom * cod) -> T -> T - - (* for single-valued functions, raise duplicate_exp dom exists *) - val add : (dom * cod) -> T -> T - - (* delete an element of the domain. no-op if not found *) - val delete : dom -> T -> T - - (* delete an element of the codomain. no-op if not found *) - val inv_delete : cod -> T -> T - - (* remove a single pair from the relation. for single-valued functions, use - of delete or inv_delete is preferred *) - val unmap : dom * cod -> T -> T - - (*** ABSTRACT MAP FUNCTIONS ***) - - (* inherited from Map *) - val fold : (dom * cod -> 'a -> 'a) -> T -> 'a -> 'a - val exists : (dom * cod -> bool) -> T -> bool - val forall : (dom * cod -> bool) -> T -> bool - val find : (dom * cod -> bool) -> T -> (dom * cod) option - - (* composition with other abstract maps *) - val compose : ('a,cod,'y) Map.amap * (dom,'a,'x) Map.amap -> T - - (* shorthand for compose o (I X compose) *) - val compose3 : ('b,cod,'z) Map.amap * ('a,'b,'y) Map.amap * (dom,'a,'x) Map.amap -> T - - (* try to convert an amap to the given type of relation, exception if unsuccessful. *) - val coerce : (dom,cod,'x) Map.amap -> T - - val pretty : T -> Pretty.T - -end - - -functor NameRelation( - structure Dom : NAME - structure Cod : NAME -) : NAME_RELATION = -struct - type dom = Dom.name - type cod = Cod.name - structure DomSet = Dom.NSet - structure CodSet = Cod.NSet - structure DomTab = NameTable(structure Dom = Dom) - structure CodTab = NameTable(structure Dom = Cod) - - (*structure Tab = Table(type key = dom val ord = Dom.name_ord) - structure ITab = Table(type key = cod val ord = Cod.name_ord)*) - - (* do gets directly on a table, throw exception if not found *) - (*fun get_from_tab tab k = - case Tab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp () - fun get_from_itab tab k = - case ITab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp ()*) - - (* the internal representation *) - type rep = { - tab : CodSet.T DomTab.T, - itab : DomSet.T CodTab.T - } - - type T = (dom, cod, rep) Map.amap - - (* record accessors *) - local - fun update_rep_tab f (r:rep) = {tab=f(#tab r),itab= #itab r} - fun update_rep_itab f (r:rep) = {tab= #tab r,itab=f(#itab r)} - in - fun update_tab f (mp:T) = Map.update_rep (update_rep_tab f) mp - fun update_itab f (mp:T) = Map.update_rep (update_rep_itab f) mp - fun get_tab (mp:T) = #tab (Map.get_rep mp) - fun get_itab (mp:T) = #itab (Map.get_rep mp) - fun set_tab x = update_tab (K x) - fun set_itab x = update_itab (K x) - end - - fun get_dom_set mp = DomTab.get_dom_set (get_tab mp) - fun get_cod_set mp = CodTab.get_dom_set (get_itab mp) - - fun graph_of_rep (r:rep) = let - fun di x = case DomTab.get_opt (#tab r) x of SOME ys => CodSet.list_of ys | NONE => [] - fun dom () = DomSet.list_of (DomTab.get_dom_set (#tab r)) - in Map.mk_graph (dom, di) - end - - val is_empty = DomTab.is_empty o get_tab - val dom_size = DomTab.dom_size o get_tab - val cod_size = CodTab.dom_size o get_itab - val dom_contains = DomSet.contains o get_dom_set - val cod_contains = CodSet.contains o get_cod_set - - fun get_opt mp x = - case DomTab.get_opt (get_tab mp) x - of SOME ys => (case CodSet.tryget_singleton ys of SOME y => SOME y - | NONE => raise Map.multiple_values_exp ()) - | NONE => NONE - - fun get mp x = case get_opt mp x of SOME y => y - | NONE => raise Map.no_such_elem_exp () - - fun img mp x = case DomTab.get_opt (get_tab mp) x of SOME ys => ys | NONE => CodSet.empty - fun img_of_set mp xs = DomSet.fold (CodSet.union_merge o (img mp)) xs CodSet.empty - - fun inv_get_opt mp x = - case CodTab.get_opt (get_itab mp) x - of SOME ys => (case DomSet.tryget_singleton ys of SOME y => SOME y - | NONE => raise Map.multiple_values_exp ()) - | NONE => NONE - - fun inv_get mp y = case inv_get_opt mp y of SOME x => x - | NONE => raise Map.no_such_elem_exp () - - fun inv_img mp y = case CodTab.get_opt (get_itab mp) y of SOME xs => xs | NONE => DomSet.empty - fun inv_img_of_set mp ys = CodSet.fold (DomSet.union_merge o (inv_img mp)) ys DomSet.empty - - fun is_mapped mp (x,y) = CodSet.contains (img mp x) y - - fun add (x,y) mp = - mp |> update_tab (DomTab.set (x,CodSet.add y (img mp x))) - |> update_itab (CodTab.set (y,DomSet.add x (inv_img mp y))) - - val set = add - - fun unmap (x,y) mp = let - val ix = img mp x |> CodSet.delete y - val iy = inv_img mp y |> DomSet.delete x - in mp |> (if CodSet.is_empty ix - then update_tab (DomTab.delete x) - else update_tab (DomTab.set (x,ix))) - |> (if DomSet.is_empty iy - then update_itab (CodTab.delete y) - else update_itab (CodTab.set (y,iy))) - end - - fun delete x mp = CodSet.fold (fn y => unmap (x,y)) (img mp x) mp - fun inv_delete y mp = DomSet.fold (fn x => unmap (x,y)) (inv_img mp y) mp - - val empty_rep : rep = { tab = DomTab.empty, itab = CodTab.empty } - val empty = Map.MAP (empty_rep, graph_of_rep) - - fun of_graph gr = Map.fold_graph add gr empty - - val fold = Map.fold - val forall = Map.forall - val exists = Map.exists - val find = Map.find - - fun coerce abs_map = fold add abs_map empty - handle Map.duplicate_exp () => raise Map.bad_coercion_exp () - - fun compose ms = coerce (Map.compose ms) - fun compose3 ms = coerce (Map.compose3 ms) - - val pretty = Map.pretty "NameFunction" DomTab.pretty_key CodTab.pretty_key - - structure Sharing = - struct - type dom = dom - type cod = cod - type rep = rep - structure DomSet = DomSet.Sharing - structure CodSet = CodSet.Sharing - end -end - - - - diff --git a/core/lib/maps/name_substitution.ML b/core/lib/maps/name_substitution.ML deleted file mode 100644 index 31e4b99c..00000000 --- a/core/lib/maps/name_substitution.ML +++ /dev/null @@ -1,302 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: isaplib/maps/name_relation.ML - Author: Aleks Kissinger - aleks0@gmail.com - Initially written 31 Mar 2013 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - An injective endofunction with fast inverse-lookup and constant-time - map inversion. These are used for renaming. - - Note there is a great deal of code replication with name injections. - This seems to be difficult to get around without two-level functors - since Tab != Tab. As substition is BY FAR the most used case, - it seems wasteful to obfuscate the code to share more with injections. - - NOTE: 'set (x,y)' will overwrite a value if x is already in the - domain, and raise name_clash_exp if mapping x to y would break - injectivity. -*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -signature NAME_SUBSTITUTION = -sig - include NAME_RELATION - sharing type dom = cod - sharing DomSet = CodSet - - (* - * All updaters maintain two invariants: - * 1. the underlying map is injective - * 2. cod_set (intersect) cod_avoids = empty - * - * If an updater would break this invariants, name_clash_exp is thrown. This happens in - * three cases: - * 1. (SOME a, SOME b, c) : client wants to add a mapping [b |-> c], but there already exists - * a mapping [a |-> c]. - * 2. (SOME a, NONE, c) : client wants to add c to "avoids" set, but there already exists a - * mapping [a |-> c]. - * 3. (NONE, SOME b) : client wants to add a mapping [b |-> c], but c is already in the - * "avoids" set - *) - exception name_clash_exp of dom option * dom option * cod - - (* CAREFUL: inverse_of o inverse_of != I, because avoids info gets lost. This is not a problem - * in practice, because typically inverses are created local to a renaming function then thrown - * away. *) - val inverse_of : T -> T - - (* TODO: maybe these should be in RELATION signature, and implemented everywhere *) - val restrict_dom : DomSet.T -> T -> T - val filter_dom : (dom -> bool) -> T -> T - (*val is_monotone : T -> bool*) - - (* - * Typically substitutions are created by setting an "avoids" set, then extending the domain of - * the substitution with "extend_fixed" and "extend_fresh" - *) - - (* try to extend the domain to include every element of this set, where unmapped elements - * go to themselves. This can fail with name_clash_exp. *) - val extend_fixed : DomSet.T -> T -> T - - (* extend the domain to the given set, where unmapped elements are mapped to new fresh names. - * This will never fail. *) - val extend_fresh : DomSet.T -> T -> T - - (* the same, but also take a function which suggests a new name *) - val extend_fresh_suggest : (dom -> dom) -> DomSet.T -> T -> T - - val get_cod_avoids : T -> CodSet.T - val avoid_in_cod : cod -> T -> T - val avoid_set_in_cod : CodSet.T -> T -> T - val clear_avoids : T -> T - - val mk_from_avoids : CodSet.T -> T - - val add_fresh : dom -> T -> (cod * T) - val add_fresh_anon : dom -> T -> T -end - -functor NameSubstitution( - structure Name : NAME -) : NAME_SUBSTITUTION = -struct - - type dom = Name.name - type cod = Name.name - structure DomSet = Name.NSet - structure CodSet = Name.NSet - - structure Tab = Table(type key = dom val ord = Name.name_ord) - - exception name_clash_exp of dom option * dom option * cod - - (* do gets directly on a table, throw exception if not found *) - fun get_from_tab tab k = - case Tab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp () - - (* the internal representation *) - type rep = { - tab : cod Tab.table, - itab : dom Tab.table, - dom_set : DomSet.T, - cod_set : CodSet.T, - cod_avoids : CodSet.T - } - - type T = (dom, cod, rep) Map.amap - - (* record accessors *) - local - fun update_rep_tab f (r : rep) = {tab=f(#tab r),itab= #itab r,dom_set= #dom_set r,cod_set= #cod_set r,cod_avoids= #cod_avoids r} - fun update_rep_itab f (r : rep) = {tab= #tab r,itab=f(#itab r),dom_set= #dom_set r,cod_set= #cod_set r,cod_avoids= #cod_avoids r} - fun update_rep_dom_set f (r : rep) = {tab= #tab r,itab= #itab r,dom_set=f(#dom_set r),cod_set= #cod_set r,cod_avoids= #cod_avoids r} - fun update_rep_cod_set f (r : rep) = {tab= #tab r,itab= #itab r,dom_set= #dom_set r,cod_set=f(#cod_set r),cod_avoids= #cod_avoids r} - fun update_rep_cod_avoids f (r : rep) = {tab= #tab r,itab= #itab r,dom_set= #dom_set r,cod_set= #cod_set r,cod_avoids=f(#cod_avoids r)} - in - fun update_tab f (mp:T) = Map.update_rep (update_rep_tab f) mp - fun update_itab f (mp:T) = Map.update_rep (update_rep_itab f) mp - fun update_dom_set f (mp:T) = Map.update_rep (update_rep_dom_set f) mp - fun update_cod_set f (mp:T) = Map.update_rep (update_rep_cod_set f) mp - fun update_cod_avoids f (mp:T) = Map.update_rep (update_rep_cod_avoids f) mp - - fun get_tab (mp:T) = #tab (Map.get_rep mp) - fun get_itab (mp:T) = #itab (Map.get_rep mp) - fun get_dom_set (mp:T) = #dom_set (Map.get_rep mp) - fun get_cod_set (mp:T) = #cod_set (Map.get_rep mp) - fun get_cod_avoids (mp:T) = #cod_avoids (Map.get_rep mp) - - fun set_tab x = update_tab (K x) - fun set_itab x = update_itab (K x) - fun set_dom_set x = update_dom_set (K x) - fun set_cod_set x = update_cod_set (K x) - fun set_cod_avoids x = update_cod_avoids (K x) - end - - fun graph_of_rep (r:rep) = let - fun di x = case Tab.lookup (#tab r) x of SOME y => [y] | NONE => [] - fun dom () = DomSet.list_of (#dom_set r) - in Map.mk_graph (dom, di) - end - - val is_empty = DomSet.is_empty o get_dom_set - val dom_size = DomSet.cardinality o get_dom_set - val cod_size = CodSet.cardinality o get_cod_set (* should always = dom_size *) - val dom_contains = DomSet.contains o get_dom_set - val cod_contains = CodSet.contains o get_cod_set - - val get = get_from_tab o get_tab - val get_opt = Tab.lookup o get_tab - fun img mp x = case get_opt mp x of SOME y => CodSet.single y | NONE => CodSet.empty - fun img_of_set mp xs = DomSet.fold (CodSet.union_merge o (img mp)) xs CodSet.empty - val inv_get = get_from_tab o get_itab - val inv_get_opt = Tab.lookup o get_itab - fun inv_img mp x = case inv_get_opt mp x of SOME y => DomSet.single y | NONE => DomSet.empty - fun inv_img_of_set mp xs = CodSet.fold (DomSet.union_merge o (inv_img mp)) xs DomSet.empty - - fun is_mapped mp (x,y) = - case get_opt mp x of SOME y' => Name.name_eq(y,y') | NONE => false - - (* raises an exception if setting value would make map no longer injective or break - * avoids constraint *) - fun set (x,y) mp = let - val oldy_opt = get_opt mp x - in - if (CodSet.contains (get_cod_set mp) y) - then - let - val x' = inv_get mp y - in if Name.name_eq (x,x') then mp - else raise name_clash_exp (SOME x', SOME x, y) - end - else - if (CodSet.contains (get_cod_avoids mp) y) - then raise name_clash_exp (NONE, SOME x, y) - else mp |> update_dom_set (DomSet.add x) - |> update_cod_set (case oldy_opt of SOME oldy => CodSet.delete oldy | NONE => I) - |> update_cod_set (CodSet.add y) - |> update_tab (Tab.update (x,y)) - |> update_itab (case oldy_opt of SOME oldy => Tab.delete oldy | NONE => I) - |> update_itab (Tab.update (y,x)) - end - - fun add (x,y) mp = - if (DomSet.contains (get_dom_set mp) x) then raise Map.duplicate_exp () - else set (x,y) mp - - local - fun delete_pair (x,y) mp = - mp |> update_dom_set (DomSet.delete x) - |> update_tab (Tab.delete x) - |> update_cod_set (CodSet.delete y) - |> update_itab (Tab.delete y) - in - fun delete x mp = case get_opt mp x of NONE => mp | SOME y => delete_pair (x,y) mp - fun inv_delete y mp = case inv_get_opt mp y of NONE => mp | SOME x => delete_pair (x,y) mp - end - - fun unmap (x,y) mp = case get_opt mp x - of SOME y1 => if Name.name_eq (y,y1) then delete x mp else mp - | NONE => mp - - (*fun is_monotone mp = - let - fun mono dset cset = - case (DomSet.get_min dset, CodSet.get_min cset) - of (SOME d, SOME c) => Name.name_eq (get mp d,c) andalso - mono (DomSet.delete d dset) (CodSet.delete c cset) - | (NONE,NONE) => true - | _ => false - in mono (get_dom_set mp) (get_cod_set mp) - end*) - - - - (***** avoids and freshness *****) - - fun avoid_in_cod n mp = - if CodSet.contains (get_cod_set mp) n - then raise name_clash_exp (SOME (inv_get mp n), NONE, n) - else update_cod_avoids (CodSet.add n) mp - - - fun avoid_set_in_cod nset = CodSet.fold avoid_in_cod nset - fun clear_avoids mp = mp |> set_cod_avoids CodSet.empty - - fun add_fresh_suggest suggest old_n mp = let - val avoid = CodSet.union_merge (get_cod_avoids mp) (get_cod_set mp) - val new_n = CodSet.new avoid (suggest old_n) - in (new_n, mp |> add (old_n, new_n)) - end - - fun add_fresh old_n mp = add_fresh_suggest I old_n mp - - val add_fresh_anon = snd oo add_fresh - - fun extend_fresh_suggest suggest nset mp = - DomSet.fold (fn n => if dom_contains mp n then I else snd o (add_fresh_suggest suggest n)) nset mp - - fun extend_fresh nset mp = extend_fresh_suggest I nset mp - - val empty_rep : rep = { tab = Tab.empty, itab = Tab.empty, - dom_set = DomSet.empty, cod_set = CodSet.empty, - cod_avoids = CodSet.empty } - val empty = Map.MAP (empty_rep, graph_of_rep) - - fun mk_from_avoids nset = empty |> set_cod_avoids nset - - fun of_graph gr = Map.fold_graph add gr empty - - val fold = Map.fold - val forall = Map.forall - val exists = Map.exists - val find = Map.find - - fun coerce abs_map = fold add abs_map empty - handle _ => raise Map.bad_coercion_exp () - - fun compose ms = coerce (Map.compose ms) - fun compose3 ms = coerce (Map.compose3 ms) - - val pretty = Map.pretty "NameSubstitution" Name.pretty_name Name.pretty_name - - fun restrict_dom set mp = - DomSet.fold (fn x => add (x, get mp x)) set empty - fun filter_dom f mp = - DomSet.fold (fn x => if f x then add (x, get mp x) else I) (get_dom_set mp) empty - - - (*type irep = { - tab : dom Tab.table, - itab : cod Tab.table, - dom_set : CodSet.T, - cod_set : DomSet.T - } - - type iT = (cod,dom,irep) Map.amap*) - - (* - Constant-time inversion. - *) - fun inverse_of mp = - mp |> set_cod_set (get_dom_set mp) - |> set_dom_set (get_cod_set mp) - |> set_tab (get_itab mp) - |> set_itab (get_tab mp) - - (* extend domain to include new_dom, mapping items to themselves if not mapped *) - fun extend_fixed new_dom mp = DomSet.fold - (fn x => if (DomSet.contains (get_dom_set mp) x) then I else add (x, x)) new_dom mp - - structure Sharing = - struct - type dom = dom - type cod = cod - type rep = rep - structure DomSet = DomSet.Sharing - structure CodSet = CodSet.Sharing - end -end diff --git a/core/lib/maps/name_table.ML b/core/lib/maps/name_table.ML deleted file mode 100644 index 1a79966b..00000000 --- a/core/lib/maps/name_table.ML +++ /dev/null @@ -1,186 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: isaplib/maps/name_table.ML - Author: Aleks Kissinger, Lucas Dixon - aleks0@gmail.com, lucas.dixon@ed.ac.uk - Initially written Oct 2005, Updated 27 Mar 2013 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - Table with names as keys and polymorphic value types. Note we don't - even require values to have a notion of equality, so things like - inverse image don't make sense. - - The underlying type is an ABS_MAP, so it supports coercion and - composition with other map types. -*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -signature NAME_TABLE = -sig - type name - structure NSet : NAME_SET sharing type NSet.name = name - - type 'a rep - type 'a T = (name, 'a, 'a rep) Map.amap - val empty: 'a T - - val get_dom_set : 'a T -> NSet.T - - val dom_size : 'a T -> int (* cardinality(dom_set) *) - val size : 'a T -> int (* alias for dom_size *) - - val dom_contains : 'a T -> name -> bool - - (* given the comparison function for values, return true if tables are identical *) - val tab_eq : ('a * 'a -> bool) -> 'a T * 'a T -> bool - - val is_empty : 'a T -> bool - - (* forward lookup *) - val get : 'a T -> name -> 'a - val get_opt : 'a T -> name -> 'a option - - (* updaters *) - val set : (name * 'a) -> 'a T -> 'a T - val add : (name * 'a) -> 'a T -> 'a T - val delete : name -> 'a T -> 'a T - - (* table-specific updaters *) - val store : 'a -> 'a T -> (name * 'a T) - val store_anon : 'a -> 'a T -> 'a T - val map_entry : ('a -> 'a) -> name -> 'a T -> 'a T - val map_default : ('a -> 'a) -> 'a -> name -> 'a T -> 'a T - val map_all : ('a -> 'b) -> 'a T -> 'b T - - val of_list : (name * 'a) list -> 'a T - val list_of : 'a T -> (name * 'a) list - - (* inherited from Map.Util *) - val fold : (name * 'a -> 'b -> 'b) -> 'a T -> 'b -> 'b - val exists : (name * 'a -> bool) -> 'a T -> bool - val forall : (name * 'a -> bool) -> 'a T -> bool - val find : (name * 'a -> bool) -> 'a T -> (name * 'a) option - - (* composition with other abstract maps *) - val compose : ('a,'b,'y) Map.amap * (name,'a,'x) Map.amap -> 'b T - val compose3 : ('b,'c,'z) Map.amap * ('a,'b,'y) Map.amap * (name,'a,'x) Map.amap -> 'c T - - (* try to convert an amap to a name table. Throws an exception if unsuccessful. *) - val coerce : (name,'a,'b) Map.amap -> 'a T - - val pretty_key : name -> Pretty.T - val pretty : ('a -> Pretty.T) -> 'a T -> Pretty.T -end - -functor NameTable( - structure Dom : NAME -) : NAME_TABLE = -struct - - structure NSet = Dom.NSet - type name = Dom.name - fun ord (a, b) = Dom.name_ord (a,b) - fun name_eq (a, b) = (ord (a,b) = EQUAL) - - structure Tab = Table(type key = name val ord = ord); - - (* do gets directly on a table, throw exception if not found *) - fun get_from_tab tab k = - case Tab.lookup tab k of SOME v => v - | NONE => raise Map.no_such_elem_exp () - - datatype 'a rep = Rep of {names : NSet.T, tab : 'a Tab.table} - - local - fun update_rep_names f (Rep r) = Rep {names=f(#names r),tab= #tab r} - fun update_rep_tab f (Rep r) = Rep {names= #names r,tab=f(#tab r)} - fun get_rep_names (Rep r) = #names r - fun get_rep_tab (Rep r) = #tab r - in - - fun update_names f = Map.update_rep (update_rep_names f) - fun update_tab f = Map.update_rep (update_rep_tab f) - fun set_names x = update_names (K x) - fun set_tab x = update_tab (K x) - fun get_names m = get_rep_names (Map.get_rep m) - fun get_tab m = get_rep_tab (Map.get_rep m) - - fun graph_of_rep rep = let - fun di x = case Tab.lookup (get_rep_tab rep) x of SOME y => [y] | NONE => [] - fun dom () = NSet.list_of (get_rep_names rep) - in Map.mk_graph (dom, di) - end - - end - - - type 'a T = (name, 'a, 'a rep) Map.amap - - val get_dom_set = get_names - - fun dom_size mp = NSet.cardinality (get_names mp) - val size = dom_size - fun dom_contains mp = NSet.contains (get_dom_set mp) - - fun is_empty mp = NSet.is_empty (get_names mp) - - fun get_opt mp = Tab.lookup (get_tab mp) - fun get mp = get_from_tab (get_tab mp) - - fun set (k,v) mp = - mp |> update_names (NSet.add k) - |> update_tab (Tab.update (k,v)) - - fun add (k,v) m = - if (NSet.contains (get_names m) k) then raise Map.duplicate_exp () - else set (k,v) m - - fun map_entry f k m = update_tab (Tab.map_entry k f) m - handle Tab.SAME => m - fun map_default f x k = update_tab (Tab.map_default (k,x) f) - fun map_all f (Map.MAP (Rep {names=names,tab=tab}, _)) = - Map.MAP (Rep { names = names, tab = Tab.map (K f) tab }, graph_of_rep) - - fun store v mp = let - val (fresh,names) = NSet.add_new Dom.default_name (get_names mp) - in (fresh, mp |> set_names names |> update_tab (Tab.update (fresh,v))) - end - - fun store_anon v mp = snd (store v mp) - - fun delete n mp = - mp |> update_names (NSet.delete n) - |> update_tab (Tab.delete_safe n) - - val empty_rep = Rep { names=NSet.empty, tab=Tab.empty } - val empty = Map.MAP (empty_rep, graph_of_rep) - - val fold = Map.fold - val forall = Map.forall - val exists = Map.exists - val find = Map.find - - fun of_graph gr = Map.fold_graph add gr empty - fun of_list lst = Basics.fold add lst empty - fun list_of m = fold cons m [] - - - fun tab_eq val_eq (tab1,tab2) = - if dom_size tab1 = dom_size tab2 - then fold (fn (k,v) => fn sofar => (sofar andalso - case get_opt tab2 k - of SOME v' => val_eq (v,v') - | NONE => false)) tab1 true - else false - - fun coerce abs_map = fold add abs_map empty - handle _ => raise Map.bad_coercion_exp () - - fun compose ms = coerce (Map.compose ms) - fun compose3 ms = coerce (Map.compose3 ms) - - val pretty_key = Dom.pretty_name - fun pretty pretty_val = Map.pretty "NameTable" Dom.pretty_name pretty_val - -end - diff --git a/core/lib/maps/test/name_function-test.ML b/core/lib/maps/test/name_function-test.ML deleted file mode 100644 index 401d7b63..00000000 --- a/core/lib/maps/test/name_function-test.ML +++ /dev/null @@ -1,168 +0,0 @@ -local - open TestMapsUtil - - (* naming convention: for arbitrary name-functions, use letters typical for - functions, but doubled. e.g. ff, gg, hh *) - val ff = ABFn.empty - - (*** typical mapping tests ***) - - val _ = test "An empty function" (fn () => ( - assert "should have size 0" (ABFn.dom_size ff = 0); - assert "should report true for is_empy" (ABFn.is_empty ff); - assert "should return NONE for get_opt" (is_none (ABFn.get_opt ff a1)); - ())) () - - val ff = ff |> ABFn.set (a1, b1) - val ff = ff |> ABFn.set (a2, b2) - val ff = ff |> ABFn.set (a3, b2) - - val _ = test "A function mapping three elements" (fn () => ( - assert "should have size 3" (ABFn.dom_size ff = 3); - assert "should have codomain size 2" (ABFn.cod_size ff = 2); - assert "should return correct value for a1" (B.name_eq (ABFn.get ff a1, b1)); - assert "should return correct value for a2" (B.name_eq (ABFn.get ff a2, b2)); - assert "should return correct value for a3" (B.name_eq (ABFn.get ff a3, b2)); - assert "should return correct inverse image for b1" - (A.NSet.eq (ABFn.inv_img ff b1) (A.NSet.of_list [a1])); - assert "should return correct inverse image for b2" - (A.NSet.eq (ABFn.inv_img ff b2) (A.NSet.of_list [a2,a3])); - assert "should return SOME (elem) for get_opt" - (B.name_eq (the (ABFn.get_opt ff a1), b1)); - assert "should fail with multiple_values_exp for inv_get(b2)" - (catch_multi_fail (fn () => ABFn.inv_get ff b2)); - assert "should fail with multiple_values_exp for inv_get_opt(b2)" - (catch_multi_fail (fn () => ABFn.inv_get_opt ff b2)); - assert "should fail on duplicate_exp for duplicate add" - (catch_dup_fail (fn () => ff |> ABFn.add (a1,b3))); - ())) () - - val ff = ff |> ABFn.set (a3, b3) - - val _ = test "A function after override 1" (fn () => ( - assert "should still have size 3" (ABFn.dom_size ff = 3); - assert "should have codomain size 3" (ABFn.cod_size ff = 3); - assert "should return new element" (B.name_eq (ABFn.get ff a3, b3)); - assert "should return correct inverse image for b2" - (A.NSet.eq (ABFn.inv_img ff b2) (A.NSet.of_list [a2])); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABFn.inv_img ff b3) (A.NSet.of_list [a3])); - ())) () - - val ff = ff |> ABFn.set (a2, b3) - - val _ = test "A function after override 2" (fn () => ( - assert "should still have size 3" (ABFn.dom_size ff = 3); - assert "should have codomain size 2" (ABFn.cod_size ff = 2); - assert "should return new element" (B.name_eq (ABFn.get ff a2, b3)); - assert "should return empty inverse image for b2" - (A.NSet.eq (ABFn.inv_img ff b2) (A.NSet.of_list [])); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABFn.inv_img ff b3) (A.NSet.of_list [a2, a3])); - ())) () - - val ff = ff |> ABFn.delete a2 - - val _ = test "A function after deletion 1" (fn () => ( - assert "should have size 2" (ABFn.dom_size ff = 2); - assert "should still have codomain size 2" - (ABFn.cod_size ff = 2); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABFn.inv_img ff b3) (A.NSet.of_list [a3])); - assert "should return NONE for deleted element" (is_none (ABFn.get_opt ff a2)); - ())) () - - val ff = ff |> ABFn.delete a3 - - val _ = test "A function after deletion 2" (fn () => ( - assert "should have size 1" (ABFn.dom_size ff = 1); - assert "should have codomain size 1" - (ABFn.cod_size ff = 1); - assert "should return empty inverse image for b3" - (A.NSet.eq (ABFn.inv_img ff b3) (A.NSet.of_list [])); - assert "should return NONE for deleted element" (is_none (ABFn.get_opt ff a3)); - ())) () - - (*** coercion tests ***) - - fun di x = case A.dest x of "a1" => [b1] | "a2" => [b2] | "a3" => [b2] | _ => [] - fun multi_di x = case A.dest x of "a1" => [b1] | "a2" => [b2,b3] | _ => [] - fun dom () = [a1,a2,a3] - val abs_ff = Map.mk (Map.mk_graph (dom,di), I) - val multi_abs = Map.mk (Map.mk_graph (dom,multi_di), I) - val coerced_ff = ABFn.coerce abs_ff - - val _ = test "A coerced map" (fn () => ( - assert "should have size 3" (ABFn.dom_size coerced_ff = 3); - assert "should contain the correct values" ( - (B.name_eq (ABFn.get coerced_ff a1, b1)) andalso - (B.name_eq (ABFn.get coerced_ff a2, b2)) andalso - (B.name_eq (ABFn.get coerced_ff a3, b2))); - ())) () - - val _ = test "A bad coercion" (fn () => ( - assert "should raise bad_coercion_exp for multi-valued graph" - (catch_coerce_fail (fn () => ABFn.coerce multi_abs)); - ())) () - - (*** composition ***) - val ff = ABFn.empty |> ABFn.add (a1,b1) |> ABFn.add (a2,b2) - |> ABFn.add (a3,b3) |> ABFn.add (a4,b2) - val ff2 = BCFn.empty |> BCFn.add (b1,c2) |> BCFn.add (b2,c1) - |> BCFn.add (b3,c2) - - (* ff2 o ff :: a1 |-> c2, a2 |-> c1, a3 |-> c2, a4 |-> c1 *) - - val ff_cmp = ACFn.compose (ff2,ff) - - val _ = test "A composed function" (fn () => ( - assert "should have size 4" (ACFn.dom_size ff_cmp = 4); - assert "should have codomain size 2" (ACFn.cod_size ff_cmp = 2); - assert "should return correct value for a1" (C.name_eq (ACFn.get ff_cmp a1, c2)); - assert "should return correct value for a2" (C.name_eq (ACFn.get ff_cmp a2, c1)); - assert "should return correct value for a3" (C.name_eq (ACFn.get ff_cmp a3, c2)); - assert "should return correct value for a4" (C.name_eq (ACFn.get ff_cmp a4, c1)); - assert "should return correct inverse image for c1" - (A.NSet.eq (ACFn.inv_img ff_cmp c1) (A.NSet.of_list [a2,a4])); - assert "should return correct inverse image for c2" - (A.NSet.eq (ACFn.inv_img ff_cmp c2) (A.NSet.of_list [a1,a3])); - ())) () - - (* A renaming-like scenario. Suppose we take ff as above and rename its elements with: *) - val rn_dom = AAInj.empty |> AAInj.add (a1,a5) - |> AAInj.add (a2,a2) - |> AAInj.add (a3,a1) - |> AAInj.add (a4,a3) - val rn_cod = BBInj.empty |> BBInj.add (b1,b2) - |> BBInj.add (b2,b3) - |> BBInj.add (b3,b1) - (* Note that renamings are performed all at once, so the codomain no longer needs to - be fresh in the domain. *) - - - (* The renamed function of: - ff :: a1 |-> b1, a2 |-> b2, a3 |-> b3, a4 |-> b2 - - is computed as (rn_cod o ff o rn_dom^-1). It should be: - ff' :: a5 |-> b2, a2 |-> b3, a1 |-> b1, a3 |-> b3 *) - - val ff' = ABFn.compose3 (rn_cod, ff, AAInj.inverse_of rn_dom) - - val _ = test "A renamed function" (fn () => ( - assert "should have size 4" (ABFn.dom_size ff' = 4); - assert "should have codomain size 2" (ABFn.cod_size ff' = 3); - assert "should return correct value for a5" (B.name_eq (ABFn.get ff' a5, b2)); - assert "should return correct value for a2" (B.name_eq (ABFn.get ff' a2, b3)); - assert "should return correct value for a1" (B.name_eq (ABFn.get ff' a1, b1)); - assert "should return correct value for a3" (B.name_eq (ABFn.get ff' a3, b3)); - assert "should return correct inverse image for b1" - (A.NSet.eq (ABFn.inv_img ff' b1) (A.NSet.of_list [a1])); - assert "should return correct inverse image for b2" - (A.NSet.eq (ABFn.inv_img ff' b2) (A.NSet.of_list [a5])); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABFn.inv_img ff' b3) (A.NSet.of_list [a2,a3])); - ())) () - -in - val _ = assert_no_failed_tests () -end \ No newline at end of file diff --git a/core/lib/maps/test/name_injection-test.ML b/core/lib/maps/test/name_injection-test.ML deleted file mode 100644 index cffc3f6d..00000000 --- a/core/lib/maps/test/name_injection-test.ML +++ /dev/null @@ -1,94 +0,0 @@ -local - open TestMapsUtil - - val inj = ABInj.empty - - (*** typical mapping tests ***) - - val _ = test "An empty injection" (fn () => ( - assert "should have size 0" (ABInj.dom_size inj = 0); - assert "should report true for is_empy" (ABInj.is_empty inj); - assert "should return NONE for get_opt" (is_none (ABInj.get_opt inj a1)); - ())) () - - val inj = inj |> ABInj.set (a1, b1) - val inj = inj |> ABInj.set (a2, b2) - - val _ = test "An injection with two elements" (fn () => ( - assert "should have size 2" (ABInj.dom_size inj = 2); - assert "should return correct value for a1" (B.name_eq (ABInj.get inj a1, b1)); - assert "should return correct value for a2" (B.name_eq (ABInj.get inj a2, b2)); - assert "should return correct inverse-value for b1" (A.name_eq (ABInj.inv_get inj b1, a1)); - assert "should return correct inverse-value for b2" (A.name_eq (ABInj.inv_get inj b2, a2)); - assert "should return SOME (elem) for get_opt" (B.name_eq (the (ABInj.get_opt inj a1), b1)); - assert "should fail on duplicate_exp for duplicate add" - (catch_dup_fail (fn () => inj |> ABInj.add (a1,b2))); - ())) () - - val inj = inj |> ABInj.set (a2, b3) - - val _ = test "An injection after override" (fn () => ( - assert "should still have size 2" (ABInj.dom_size inj = 2); - assert "should still have codomain size 2" (B.NSet.cardinality (ABInj.get_cod_set inj) = 2); - assert "should return new element" (B.name_eq (ABInj.get inj a2, b3)); - assert "should return NONE for old codomain" (is_none (ABInj.inv_get_opt inj b2)); - ())) () - - val inj = inj |> ABInj.delete a1 - - val _ = test "An injection after deletion" (fn () => ( - assert "should have size 1" (ABInj.dom_size inj = 1); - assert "should have codomain size 1" (B.NSet.cardinality (ABInj.get_cod_set inj) = 1); - assert "should return NONE for deleted element" (is_none (ABInj.get_opt inj a1)); - ())) () - - (*** coercion tests ***) - - fun di x = case A.dest x of "a1" => [b1] | "a2" => [b2] | _ => [] - fun multi_di x = case A.dest x of "a1" => [b1] | "a2" => [b2,b3] | _ => [] - fun non_inj_di x = case A.dest x of "a1" => [b1] | "a2" => [b1] | _ => [] - fun dom () = [a1,a2] - val abs_inj = Map.mk (Map.mk_graph (dom,di), I) - val multi_abs = Map.mk (Map.mk_graph (dom,multi_di), I) - val non_inj_abs = Map.mk (Map.mk_graph (dom,non_inj_di), I) - val coerced_inj = ABInj.coerce abs_inj - - val _ = test "A coerced map" (fn () => ( - assert "should have size 2" (ABInj.dom_size coerced_inj = 2); - assert "should contain the correct values" ( - (B.name_eq (ABInj.get coerced_inj a1, b1)) andalso - (B.name_eq (ABInj.get coerced_inj a2, b2))); - ())) () - - val _ = test "A bad coercion" (fn () => ( - assert "should raise bad_coercion_exp for multi-valued graph" - (catch_coerce_fail (fn () => ABInj.coerce multi_abs)); - assert "should raise bad_coercion_exp for non-injective graph" - (catch_coerce_fail (fn () => ABInj.coerce non_inj_abs)); - ())) () - - (*** inverses and composition ***) - val inj = ABInj.empty |> ABInj.add (a1,b1) |> ABInj.add (a2,b2) - val inv_inj = BAInj.inverse_of inj - - val _ = test "The inverse of an injection with two elements" (fn () => ( - assert "should have size 2" (ABInj.dom_size inj = 2); - assert "should return correct value for b1" (A.name_eq (BAInj.get inv_inj b1, a1)); - assert "should return correct value for b2" (A.name_eq (BAInj.get inv_inj b2, a2)); - assert "should return correct inverse-value for a1" (B.name_eq (BAInj.inv_get inv_inj a1, b1)); - assert "should return correct inverse-value for a2" (B.name_eq (BAInj.inv_get inv_inj a2, b2)); - ())) () - - val comp_inj = AAInj.compose (inv_inj, inj) - - val _ = test "The composite of an injection with its inverse" (fn () => ( - assert "should have size 2" (AAInj.dom_size comp_inj = 2); - assert "should be identity on a1" (A.name_eq (AAInj.get comp_inj a1, a1)); - assert "should be identity on a2" (A.name_eq (AAInj.get comp_inj a2, a2)); - assert "should be identity on inverse of a1" (A.name_eq (AAInj.inv_get comp_inj a1, a1)); - assert "should be identity on inverse of a2" (A.name_eq (AAInj.inv_get comp_inj a2, a2)); - ())) () - -in - val _ = assert_no_failed_tests () -end \ No newline at end of file diff --git a/core/lib/maps/test/name_relation-test.ML b/core/lib/maps/test/name_relation-test.ML deleted file mode 100644 index 46e114d2..00000000 --- a/core/lib/maps/test/name_relation-test.ML +++ /dev/null @@ -1,223 +0,0 @@ -functor TestNameRelation( - structure ABRel : NAME_RELATION - where type dom = TestMapsUtil.A.name - and type DomSet.T = TestMapsUtil.A.NSet.T - and type cod = TestMapsUtil.B.name - and type CodSet.T = TestMapsUtil.B.NSet.T - structure BCRel : NAME_RELATION - where type dom = TestMapsUtil.B.name - and type DomSet.T = TestMapsUtil.B.NSet.T - and type cod = TestMapsUtil.C.name - and type CodSet.T = TestMapsUtil.C.NSet.T - structure ACRel : NAME_RELATION - where type dom = TestMapsUtil.A.name - and type DomSet.T = TestMapsUtil.A.NSet.T - and type cod = TestMapsUtil.C.name - and type CodSet.T = TestMapsUtil.C.NSet.T -) = -struct - open TestMapsUtil - - (* naming convention: for arbitrary name-relations, use letters typical for - relations, but doubled. e.g. rel, gg, hh *) - val rel0 = ABRel.empty - - (*** typical mapping tests ***) - - val _ = test "An empty relation" (fn () => ( - assert "should have domain size 0" (ABRel.dom_size rel0 = 0); - assert "should have codomain size 0" (ABRel.cod_size rel0 = 0); - assert "should report true for is_empy" (ABRel.is_empty rel0); - assert "should return NONE for get_opt" (is_none (ABRel.get_opt rel0 a1)); - ())) () - - val rel0 = rel0 |> ABRel.add (a1, b1) |> ABRel.add (a2, b2) - |> ABRel.add (a3, b2) |> ABRel.add (a3, b3) - val rel = rel0 - - val _ = test "A relation" (fn () => ( - assert "should have domain size 3" (ABRel.dom_size rel = 3); - assert "should have codomain size 3" (ABRel.cod_size rel = 3); - assert "should return true for is_mapped(mapped pair)" - (ABRel.is_mapped rel (a1,b1)); - assert "should return false for is_mapped(not mapped pair)" - (not (ABRel.is_mapped rel (a1,b2))); - assert "should return correct image for a1" - (B.NSet.eq (ABRel.img rel a1) (B.NSet.of_list [b1])); - assert "should return correct image for a2" - (B.NSet.eq (ABRel.img rel a2) (B.NSet.of_list [b2])); - assert "should return correct image for a3" - (B.NSet.eq (ABRel.img rel a3) (B.NSet.of_list [b2,b3])); - assert "should return correct inverse image for b1" - (A.NSet.eq (ABRel.inv_img rel b1) (A.NSet.of_list [a1])); - assert "should return correct inverse image for b2" - (A.NSet.eq (ABRel.inv_img rel b2) (A.NSet.of_list [a2,a3])); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABRel.inv_img rel b3) (A.NSet.of_list [a3])); - assert "can get singleton for get(a2)" - (B.name_eq (ABRel.get rel a2, b2)); - assert "can get SOME (singleton) for get_opt(a2)" - (B.name_eq (the (ABRel.get_opt rel a1), b1)); - assert "should fail with multiple_values_exp for get(a3)" - (catch_multi_fail (fn () => ABRel.get rel a3)); - assert "should fail with multiple_values_exp for get_opt(a3)" - (catch_multi_fail (fn () => ABRel.get_opt rel a3)); - assert "should fail with multiple_values_exp for inv_get(b2)" - (catch_multi_fail (fn () => ABRel.inv_get rel b2)); - assert "should fail with multiple_values_exp for inv_get_opt(b2)" - (catch_multi_fail (fn () => ABRel.inv_get_opt rel b2)); - ())) () - - val rel = rel0 |> ABRel.unmap (a3,b2) - val _ = test "A relation after unmap" (fn () => ( - assert "should still have domain size 3" (ABRel.dom_size rel = 3); - assert "should still have codomain size 3" (ABRel.cod_size rel = 3); - assert "should return correct image for a1" - (B.NSet.eq (ABRel.img rel a1) (B.NSet.of_list [b1])); - assert "should return correct image for a2" - (B.NSet.eq (ABRel.img rel a2) (B.NSet.of_list [b2])); - assert "should return correct image for a3" - (B.NSet.eq (ABRel.img rel a3) (B.NSet.of_list [b3])); - assert "should return correct inverse image for b1" - (A.NSet.eq (ABRel.inv_img rel b1) (A.NSet.of_list [a1])); - assert "should return correct inverse image for b2" - (A.NSet.eq (ABRel.inv_img rel b2) (A.NSet.of_list [a2])); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABRel.inv_img rel b3) (A.NSet.of_list [a3])); - ())) () - - val rel = rel0 |> ABRel.delete a3 - val _ = test "A relation after domain deletion" (fn () => ( - assert "should have domain size 2" (ABRel.dom_size rel = 2); - assert "should have codomain size 2" (ABRel.cod_size rel = 2); - assert "should return correct image for a1" - (B.NSet.eq (ABRel.img rel a1) (B.NSet.of_list [b1])); - assert "should return correct image for a2" - (B.NSet.eq (ABRel.img rel a2) (B.NSet.of_list [b2])); - assert "should return correct image for a3" - (B.NSet.eq (ABRel.img rel a3) (B.NSet.of_list [])); - assert "should return correct inverse image for b1" - (A.NSet.eq (ABRel.inv_img rel b1) (A.NSet.of_list [a1])); - assert "should return correct inverse image for b2" - (A.NSet.eq (ABRel.inv_img rel b2) (A.NSet.of_list [a2])); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABRel.inv_img rel b3) (A.NSet.of_list [])); - ())) () - - val rel = rel0 |> ABRel.inv_delete b3 - val _ = test "A relation after codomain deletion" (fn () => ( - assert "should still have domain size 3" (ABRel.dom_size rel = 3); - assert "should have codomain size 2" (ABRel.cod_size rel = 2); - assert "should return correct image for a1" - (B.NSet.eq (ABRel.img rel a1) (B.NSet.of_list [b1])); - assert "should return correct image for a2" - (B.NSet.eq (ABRel.img rel a2) (B.NSet.of_list [b2])); - assert "should return correct image for a3" - (B.NSet.eq (ABRel.img rel a3) (B.NSet.of_list [b2])); - assert "should return correct inverse image for b1" - (A.NSet.eq (ABRel.inv_img rel b1) (A.NSet.of_list [a1])); - assert "should return correct inverse image for b2" - (A.NSet.eq (ABRel.inv_img rel b2) (A.NSet.of_list [a2,a3])); - assert "should return correct inverse image for b3" - (A.NSet.eq (ABRel.inv_img rel b3) (A.NSet.of_list [])); - ())) () - - - - (*** coercion tests ***) - - fun di x = case A.dest x of "a1" => [b1] | "a2" => [b2] | "a3" => [b2] | _ => [] - fun multi_di x = case A.dest x of "a1" => [b1] | "a2" => [b2,b3] | _ => [] - fun dom () = [a1,a2,a3] - val abs_rel = Map.mk (Map.mk_graph (dom,di), I) - val multi_abs = Map.mk (Map.mk_graph (dom,multi_di), I) - val coerced_rel = ABRel.coerce abs_rel - val multi_coerced_rel = ABRel.coerce multi_abs - - val _ = test "A coerced map" (fn () => ( - assert "should have domain size 3" (ABRel.dom_size coerced_rel = 3); - assert "should contain the correct values" ( - (B.name_eq (ABRel.get coerced_rel a1, b1)) andalso - (B.name_eq (ABRel.get coerced_rel a2, b2)) andalso - (B.name_eq (ABRel.get coerced_rel a3, b2))); - ())) () - - val _ = test "A multi-valued coercion" (fn () => ( - assert "should return correct image for a1" - (B.NSet.eq (ABRel.img multi_coerced_rel a1) (B.NSet.of_list [b1])); - assert "should return correct image for a2" - (B.NSet.eq (ABRel.img multi_coerced_rel a2) (B.NSet.of_list [b2,b3])); - assert "should return correct image for a3" - (B.NSet.eq (ABRel.img multi_coerced_rel a3) (B.NSet.of_list [])); - ())) () - - - (*** composition ***) - val rel = ABRel.empty |> ABRel.add (a1,b1) |> ABRel.add (a1,b2) - |> ABRel.add (a2,b2) |> ABRel.add (a3,b1) - |> ABRel.add (a4,b3) - val rel2 = BCRel.empty |> BCRel.add (b1,c1) |> BCRel.add (b1,c2) - |> BCRel.add (b2,c2) |> BCRel.add (b4,c4) - - (* rel2 o rel :: a1 |-> c1, a1 |-> c2, a2 |-> c2, a3 |-> c1, a3 |-> c2 *) - - val rel_cmp = ACRel.compose (rel2,rel) - - val _ = test "A composed relation" (fn () => ( - assert "should have domain size 3" (ACRel.dom_size rel_cmp = 3); - assert "should have codomain size 2" (ACRel.cod_size rel_cmp = 2); - assert "should return correct image for a1" - (C.NSet.eq (ACRel.img rel_cmp a1) (C.NSet.of_list [c1,c2])); - assert "should return correct image for a2" - (C.NSet.eq (ACRel.img rel_cmp a2) (C.NSet.of_list [c2])); - assert "should return correct image for a3" - (C.NSet.eq (ACRel.img rel_cmp a3) (C.NSet.of_list [c1,c2])); - assert "should return empty image for a4" - (C.NSet.eq (ACRel.img rel_cmp a4) (C.NSet.of_list [])); - ())) () - - - (* A renaming-like scenario. Suppose we take rel as above and rename its elements with: *) - val rn_dom = AAInj.empty |> AAInj.add (a1,a5) - |> AAInj.add (a2,a2) - |> AAInj.add (a3,a1) - |> AAInj.add (a4,a3) - val rn_cod = BBInj.empty |> BBInj.add (b1,b2) - |> BBInj.add (b2,b3) - |> BBInj.add (b3,b1) - (* Note that renamings are performed all at once, so the codomain no longer needs to - be fresh in the domain. *) - - - (* The renamed relation of: - rel :: a1 |-> b1, a1 |-> b2, a2 |-> b2, a3 |-> b1, a4 |-> b3 - - is computed as (rn_cod o rel o rn_dom^-1). It should be: - rel' :: a5 |-> b2, a5 |-> b3, a2 |-> b3, a1 |-> b2, a3 |-> b1 *) - - val rel' = ABRel.compose3 (rn_cod, rel, AAInj.inverse_of rn_dom) - - val _ = test "A renamed relation" (fn () => ( - assert "should have domain size 4" (ABRel.dom_size rel' = 4); - assert "should have codomain size 3" (ABRel.cod_size rel' = 3); - assert "should return correct image for a5" - (B.NSet.eq (ABRel.img rel' a5) (B.NSet.of_list [b2,b3])); - assert "should return correct image for a2" - (B.NSet.eq (ABRel.img rel' a2) (B.NSet.of_list [b3])); - assert "should return correct image for a1" - (B.NSet.eq (ABRel.img rel' a1) (B.NSet.of_list [b2])); - assert "should return correct image for a3" - (B.NSet.eq (ABRel.img rel' a3) (B.NSet.of_list [b1])); - ())) () - -end - -local - open TestMapsUtil - structure Test_ABRel = - TestNameRelation( - structure ABRel = ABRel - structure BCRel = BCRel - structure ACRel = ACRel) -in val _ = assert_no_failed_tests () -end diff --git a/core/lib/maps/test/name_substitution-test.ML b/core/lib/maps/test/name_substitution-test.ML deleted file mode 100644 index 7703bbac..00000000 --- a/core/lib/maps/test/name_substitution-test.ML +++ /dev/null @@ -1,150 +0,0 @@ -local - open TestMapsUtil - - val sub = ASub.empty - - (*** typical mapping tests ***) - - val _ = test "An empty substitution" (fn () => ( - assert "should have size 0" (ASub.dom_size sub = 0); - assert "should report true for is_empy" (ASub.is_empty sub); - assert "should return NONE for get_opt" (is_none (ASub.get_opt sub a1)); - ())) () - - val sub = sub |> ASub.set (a1, a1) - val sub = sub |> ASub.set (a2, a2) - - val _ = test "An substitution with two elements" (fn () => ( - assert "should have size 2" (ASub.dom_size sub = 2); - assert "should return correct value for a1" (A.name_eq (ASub.get sub a1, a1)); - assert "should return correct value for a2" (A.name_eq (ASub.get sub a2, a2)); - assert "should return correct inverse-value for a1" (A.name_eq (ASub.inv_get sub a1, a1)); - assert "should return correct inverse-value for a2" (A.name_eq (ASub.inv_get sub a2, a2)); - assert "should return SOME (elem) for get_opt" (A.name_eq (the (ASub.get_opt sub a1), a1)); - assert "should fail on duplicate_exp for duplicate add" - (catch_dup_fail (fn () => sub |> ASub.add (a1,a2))); - ())) () - - val sub = sub |> ASub.set (a2, a3) - - val _ = test "An substitution after override" (fn () => ( - assert "should still have size 2" (ASub.dom_size sub = 2); - assert "should still have codomain size 2" (A.NSet.cardinality (ASub.get_cod_set sub) = 2); - assert "should return new element" (A.name_eq (ASub.get sub a2, a3)); - assert "should return NONE for old codomain" (is_none (ASub.inv_get_opt sub a2)); - ())) () - - val sub = sub |> ASub.delete a1 - - val _ = test "An substitution after deletion" (fn () => ( - assert "should have size 1" (ASub.dom_size sub = 1); - assert "should have codomain size 1" (A.NSet.cardinality (ASub.get_cod_set sub) = 1); - assert "should return NONE for deleted element" (is_none (ASub.get_opt sub a1)); - ())) () - - (*** coercion tests ***) - - fun di x = case A.dest x of "a1" => [a1] | "a2" => [a2] | _ => [] - fun multi_di x = case A.dest x of "a1" => [a1] | "a2" => [a2,a3] | _ => [] - fun non_sub_di x = case A.dest x of "a1" => [a1] | "a2" => [a1] | _ => [] - fun dom () = [a1,a2] - val abs_sub = Map.mk (Map.mk_graph (dom,di), I) - val multi_abs = Map.mk (Map.mk_graph (dom,multi_di), I) - val non_sub_abs = Map.mk (Map.mk_graph (dom,non_sub_di), I) - val coerced_sub = ASub.coerce abs_sub - - val _ = test "A coerced map" (fn () => ( - assert "should have size 2" (ASub.dom_size coerced_sub = 2); - assert "should contain the correct values" ( - (A.name_eq (ASub.get coerced_sub a1, a1)) andalso - (A.name_eq (ASub.get coerced_sub a2, a2))); - ())) () - - val _ = test "A bad coercion" (fn () => ( - assert "should raise bad_coercion_exp for multi-valued graph" - (catch_coerce_fail (fn () => ASub.coerce multi_abs)); - assert "should raise bad_coercion_exp for non-subective graph" - (catch_coerce_fail (fn () => ASub.coerce non_sub_abs)); - ())) () - - (*** inverses and composition, imported from NAME_INJECTION ***) - - val sub = ASub.empty |> ASub.add (a1,a1) |> ASub.add (a2,a2) - val inv_sub = ASub.inverse_of sub - - val _ = test "The inverse of an substitution with two elements" (fn () => ( - assert "should have size 2" (ASub.dom_size sub = 2); - assert "should return correct value for a1" (A.name_eq (ASub.get inv_sub a1, a1)); - assert "should return correct value for a2" (A.name_eq (ASub.get inv_sub a2, a2)); - assert "should return correct inverse-value for a1" (A.name_eq (ASub.inv_get inv_sub a1, a1)); - assert "should return correct inverse-value for a2" (A.name_eq (ASub.inv_get inv_sub a2, a2)); - ())) () - - val comp_sub = ASub.compose (inv_sub, sub) - - val _ = test "The composite of an substitution with its inverse" (fn () => ( - assert "should have size 2" (ASub.dom_size comp_sub = 2); - assert "should be identity on a1" (A.name_eq (ASub.get comp_sub a1, a1)); - assert "should be identity on a2" (A.name_eq (ASub.get comp_sub a2, a2)); - assert "should be identity on inverse of a1" (A.name_eq (ASub.inv_get comp_sub a1, a1)); - assert "should be identity on inverse of a2" (A.name_eq (ASub.inv_get comp_sub a2, a2)); - ())) () - - val aset = A.NSet.of_list [a1,a2,a3] - val avoids_sub = ASub.mk_from_avoids aset |> ASub.extend_fresh aset - val disj = ASub.img_of_set avoids_sub aset - - val _ = test "A disjoint-renamed set" (fn () => ( - assert "should have size 3" (A.NSet.cardinality disj = 3); - assert "should be disjoint" (A.NSet.is_empty (A.NSet.intersect aset disj)); - ())) () - - val p_avoids_sub = ASub.empty - |> ASub.extend_fixed (A.NSet.of_list [a1]) - |> ASub.avoid_set_in_cod (A.NSet.of_list [a2,a3]) - |> ASub.extend_fresh aset - - val p_disj = ASub.img_of_set p_avoids_sub aset - - val _ = test "A partially disjoint-renamed set" (fn () => ( - assert "should have size 3" (A.NSet.cardinality disj = 3); - assert "should have intersection of 1" (A.NSet.cardinality (A.NSet.intersect aset p_disj) = 1); - ())) () - - local - fun opt_eq (SOME n1, SOME n2) = A.name_eq (n1,n2) - | opt_eq (NONE,NONE) = true - | opt_eq _ = false - in - fun catch_clash (a,b,c) f = (f(); false) - handle ASub.name_clash_exp (a',b',c') => (opt_eq (a,a') andalso opt_eq (b,b') andalso A.name_eq (c,c')) - end - - val sub = ASub.empty - |> ASub.add (a1,a2) - |> ASub.add (a2,a3) - |> ASub.avoid_set_in_cod (A.NSet.of_list [a4,a5]) - - val _ = test "A name clashing update" (fn () => ( - assert "should raise correctly for mapping-mapping clash" - (catch_clash (SOME a1, SOME a4, a2) (fn () => - sub |> ASub.set (a4,a2) - )); - assert "should raise correctly for mapping-avoids clash" - (catch_clash (SOME a1, NONE, a2) (fn () => - sub |> ASub.avoid_in_cod a2 - )); - assert "should raise correctly for avoids-mapping clash" - (catch_clash (NONE, SOME a3, a4) (fn () => - sub |> ASub.set (a3,a4) - )); - assert "should not raise if existing mapping is repeated" - ( - sub |> ASub.set (a1,a2); - true - ); - ())) () - -in - val _ = assert_no_failed_tests () -end \ No newline at end of file diff --git a/core/lib/maps/test/name_table-test.ML b/core/lib/maps/test/name_table-test.ML deleted file mode 100644 index 2c5b6e5d..00000000 --- a/core/lib/maps/test/name_table-test.ML +++ /dev/null @@ -1,86 +0,0 @@ -local - open TestMapsUtil - - - val tab : int ATab.T = ATab.empty - val _ = test "An empty table" (fn () => ( - assert "should have size 0" (ATab.size tab = 0); - assert "should report true for is_empy" (ATab.is_empty tab); - assert "should return NONE for get_opt" (ATab.get_opt tab a1 = NONE); - ())) () - - val tab = tab |> ATab.set (a1, 42) - - val _ = test "A table with one element" (fn () => ( - assert "should have size 1" (ATab.size tab = 1); - assert "should return correct element" (ATab.get tab a1 = 42); - assert "should return SOME (elem) for get_opt" (ATab.get_opt tab a1 = SOME 42); - assert "should fail on duplicate_exp for duplicate add" - (catch_dup_fail (fn () => tab |> ATab.add (a1,100))); - ())) () - - val tab = tab |> ATab.set (a1, 43) - - val _ = test "A table after override" (fn () => ( - assert "should still have size 1" (ATab.size tab = 1); - assert "should return new element" (ATab.get tab a1 = 43); - ())) () - - val (a',tab) = tab |> ATab.store 44 - val tab = tab |> ATab.store_anon 45 - - val _ = test "A table after storage" (fn () => ( - assert "should have size 3" (ATab.size tab = 3); - assert "should return correct element" (ATab.get tab a' = 44); - ())) () - - val tab = tab |> ATab.delete a1 - - val _ = test "A table after deletion" (fn () => ( - assert "should have size 2" (ATab.size tab = 2); - assert "should return NONE for deleted element" (ATab.get_opt tab a1 = NONE); - ())) () - - fun di x = case A.dest x of "a1" => [1] | "a2" => [2] | _ => [] - fun bad_di x = case A.dest x of "a1" => [1] | "a2" => [2,3] | _ => [] - fun dom () = [a1,a2] - val abs_tab = Map.mk (Map.mk_graph (dom,di), I) - val bad_abs_tab = Map.mk (Map.mk_graph (dom,bad_di), I) - val coerced_tab = ATab.coerce abs_tab - - val _ = test "A coerced table" (fn () => ( - assert "should have size 2" (ATab.size coerced_tab = 2); - assert "should contain the correct values" ( - (ATab.get coerced_tab a1 = 1) andalso - (ATab.get coerced_tab a2 = 2)); - ())) () - - val _ = test "A bad coercion" (fn () => ( - assert "should raise bad_coercion_exp for multi-valued graph" - (catch_coerce_fail (fn () => ATab.coerce bad_abs_tab)); - ())) () - - (* - Keys in a table can be renamed by pre-composition with an injective map. - - Consider a table, and an injective function: - tab :: a1 |-> 1, a2 |-> 2, a3 |-> 3 - rn :: a1 |-> a2, a2 |-> a1, a3 |-> a4 - - Then, we can rename the keys using 'rn' with (tab o rn^-1). This yields: - tab' :: a2 |-> 1, a1 |-> 2, a4 |-> 3 - *) - - val tab = ATab.empty |> ATab.add (a1,1) |> ATab.add (a2,2) |> ATab.add (a3,3) - val rn = AAInj.empty |> AAInj.add (a1,a2) |> AAInj.add (a2,a1) |> AAInj.add (a3,a4) - val tab' = ATab.compose (tab, AAInj.inverse_of rn) - - val _ = test "A renamed table" (fn () => ( - assert "should have size 3" (ATab.size tab' = 3); - assert "should return correct elements" - (ATab.get tab' a2 = 1 andalso ATab.get tab' a1 = 2 andalso ATab.get tab' a4 = 3); - ())) () - -in - val _ = assert_no_failed_tests () -end diff --git a/core/lib/maps/test/test_maps_util.ML b/core/lib/maps/test/test_maps_util.ML deleted file mode 100644 index 3d12b882..00000000 --- a/core/lib/maps/test/test_maps_util.ML +++ /dev/null @@ -1,90 +0,0 @@ -structure TestMapsUtil = -struct - open Testing - (* create three distinct kinds of names *) - structure ABCNames :> sig - structure A : SSTR_NAME - structure B : SSTR_NAME - structure C : SSTR_NAME - end = - struct - structure A = SStrName - structure B = SStrName - structure C = SStrName - end - - open ABCNames - - structure ATab = NameTable(structure Dom = A) - - structure ABInj = NameInjection( - structure Dom = A - structure Cod = B - ) - - structure BAInj = NameInjection( - structure Dom = B - structure Cod = A - ) - - structure AAInj = NameInjection( - structure Dom = A - structure Cod = A - ) - - structure BBInj = NameInjection( - structure Dom = B - structure Cod = B - ) - - structure ABFn = NameFunction( - structure Dom = A - structure Cod = B - ) - - structure BCFn = NameFunction( - structure Dom = B - structure Cod = C - ) - - structure ACFn = NameFunction( - structure Dom = A - structure Cod = C - ) - - structure ABRel = NameRelation( - structure Dom = A - structure Cod = B - ) - - structure BCRel = NameRelation( - structure Dom = B - structure Cod = C - ) - - structure ACRel = NameRelation( - structure Dom = A - structure Cod = C - ) - - structure ASub = NameSubstitution(structure Name = A) - structure BSub = NameSubstitution(structure Name = B) - structure CSub = NameSubstitution(structure Name = C) - - - - fun catch_coerce_fail f = (f (); false) - handle Map.bad_coercion_exp () => true - - fun catch_multi_fail f = (f (); false) - handle Map.multiple_values_exp () => true - - fun catch_dup_fail f = (f (); false) - handle Map.duplicate_exp () => true - - (* some names *) - val (a1,a2,a3,a4,a5) = (A.mk "a1", A.mk "a2", A.mk "a3", A.mk "a4", A.mk "a5") - val (b1,b2,b3,b4,b5) = (B.mk "b1", B.mk "b2", B.mk "b3", B.mk "b4", B.mk "b5") - val (c1,c2,c3,c4,c5) = (C.mk "c1", C.mk "c2", C.mk "c3", C.mk "c4", C.mk "c5") - -end \ No newline at end of file diff --git a/core/lib/names/basic_name.ML b/core/lib/names/basic_name.ML deleted file mode 100644 index 23b736fb..00000000 --- a/core/lib/names/basic_name.ML +++ /dev/null @@ -1,82 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - Basic things you want from names... name tables, name sets, - and simple tables, default names, ordering and pretty printing -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -(* names with an ordering, sets of names, and tables from names to objects *) - -(* NOTE: this signature is unused, its just for building NAME which - is used, see names.ML *) - -signature BASIC_NAME -= sig - type name - - structure Namer : NAMER - sharing type Namer.name = name - - structure NTab : BASIC_NAME_TAB - sharing type NTab.name = name; - - structure NSet : BASIC_NAME_SET - sharing NTab.NSet = NSet; - - structure Tab : TABLE - sharing type NTab.Tab.table = Tab.table; - sharing type NTab.Tab.key = Tab.key; - -(* type nmap = name NTab.T *) - structure Rnm : BASIC_RENAMING - where type 'a nmap = 'a NTab.T - and type name = name - and type nset = NSet.T - - val default_name : name - val pretty_name : name -> Pretty.T - val string_of_name : name -> string - val print_name : name -> unit - val name_eq : name * name -> bool - val name_ord : name * name -> General.order -end; - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -functor BasicNameFun(structure Namer : NAMER - val default_name : Namer.name) -: BASIC_NAME -= struct -type name = Namer.name; -val pretty_name = Namer.pretty; -val string_of_name = Pretty.string_of o Namer.pretty; -val print_name = Pretty.writeln o pretty_name; -val name_ord = Namer.ord; -fun name_eq ab = case (name_ord ab) of EQUAL => true | _ => false; -val default_name = default_name; -structure Namer = Namer; -structure NSet = BasicNameSetFun(Namer); -structure NTab = BasicNameTabFun(structure NSet = NSet and Namer = Namer); -structure Tab = NTab.Tab; -structure Rnm = BasicRenamingFun(structure NTab = NTab and Namer = Namer); -end; - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* basic names: strings, integers, and pairs of strings and ints *) -(* -structure BasicStrName : BASIC_NAME where type name = string -= BasicNameFun(structure Namer = StrNamer val default_name = "a"); - -structure BasicIntName : BASIC_NAME where type name = int -= BasicNameFun(structure Namer = IntNamer val default_name = 0); - -structure BasicStrIntName : BASIC_NAME where type name = string * int -= BasicNameFun(structure Namer = StrIntNamer val default_name = ("a",0)); -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* PolyML pretty printing *) -(* -install_pp (make_pp ["BasicStrName", "NSet", "T"] (Pretty.pprint o BasicIntName.NSet.pretty)); -install_pp (make_pp ["BasicStrName", "NSet", "T"] (Pretty.pprint o BasicStrName.NSet.pretty)); -install_pp (make_pp ["BasicStrIntName", "NSet", "T"] (Pretty.pprint o BasicStrIntName.NSet.pretty)); -*) diff --git a/core/lib/names/basic_nameset.ML b/core/lib/names/basic_nameset.ML deleted file mode 100644 index cf0a453c..00000000 --- a/core/lib/names/basic_nameset.ML +++ /dev/null @@ -1,1038 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: basic_nameset.ML - Author: Lucas Dixon - lucas.dixon@ed.ac.uk - Oct 2005 - 2010 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - - Used to get new nice fresh names (quickly). With a slight - adaptation, it can also be thought of as a memory management where - names are memory locations. - - A descrete interval table (for fresh names): a specialised, AVL - tree inspired, representation for holding intervals and getting - quick lookup/add/delete, worst case O(Log(n)^2); but when adjacent - names are used it is much faster (often constant time). - -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -signature BASIC_NAME_SET = -sig - type name - - datatype T - = Empty - | Br of int (* depth of stuff this branch *) - * (name * name) (* inclusive min/max range *) - * T * T; (* subtrees with smaller than min in left and bigger than max in right *) - - exception empty_exp of string; - exception duplicate_exp of name * T; - - val empty : T - val single : name -> T - val is_empty : T -> bool - val is_singleton : T -> bool - val tryget_singleton : T -> name option - val get_min : T -> name option (* lowest value, log time *) - val get_max : T -> name option (* highest value, log time *) - val get_local_bot : T -> name option (* random middle name - constant time *) - val get_local_top : T -> name option (* random middle name - constant time *) - val get_first : T -> name option (* smallest name - log time *) - val get_last : T -> name option (* largest name - log time *) - - (* get element that maximizes the given function *) - val maximize : (name -> int) -> T -> name option - - (* get element that minimizes the given function *) - val minimize : (name -> int) -> T -> name option - - (* number of elements *) - val cardinality : T -> int - - (* fold over ranges, in order smallest to biggest *) - val fold_ranges : ((name * name) -> 'a -> 'a) -> T -> 'a -> 'a - val fold_ranges_rev : ((name * name) -> 'a -> 'a) -> T -> 'a -> 'a - (* fold all elements, in order, smallest to biggest *) - val fold : (name -> 'a -> 'a) -> T -> 'a -> 'a - val fold_rev : (name -> 'a -> 'a) -> T -> 'a -> 'a - (* fold try, stop early if needed *) - (* val fold_try : (name -> 'a -> 'a option) -> T -> 'a -> 'a option *) - - (* to and from lists *) - val add_to_rangelist : T -> (name * name) list -> (name * name) list - val list_of : T -> name list - val of_list : name list -> T - - (* to seq *) - val seq_of : T -> name Seq.seq - - (* make a fresh name, based on a given name *) - val new : T -> name -> name - - (* adding a name *) - val try_ins_fresh : name -> T -> T option (* NONE is it already exists *) - val add_new : name -> T -> (name * T) - val ins_fresh : name -> T -> T (* raises duplicate_exp *) - val add_new_opt : name -> T -> (name option * T) (* if name clash then SOME new-name *) - val add : name -> T -> T (* = union1 *) - - (* adding many names *) - val add_list : name list -> T -> T - val add_range : name * name -> T -> T - - (* renaming *) - val rename1 : name -> name -> T -> (name * T) (* fst disappears, adds snd *) - val rename1_opt : name -> name -> T -> (name option * T) (* SOME if renamed to avoid a clash *) - val try_rename1 : name -> name -> T -> T option (* NONE on name clash *) - - (* lookup *) - val lookup : T -> name -> (name * name) option - val contains : T -> name -> bool - - (* delete *) - val delete' : name -> T -> T (* raises empty_exp if name not in set *) - val delete : name -> T -> T (* is identify if name not in set *) - val try_delete : name -> T -> T option (* is NONE if name not in set *) - - (* filter; function true to keep element *) - val filter : (name -> bool) -> T -> T - - (* val delete1 : name -> T -> T for debugging only *) - - val contains_range : T -> (name * name) -> bool - val contains_range_exactly : T -> (name * name) -> bool - - (* contains exactly the same ranges *) - val eq : T -> T -> bool - (* EQUAL = eq, LESS = snd has smallet disjoint member, - GREATER = fst has smallest disjoint member *) - val ord : T * T -> order - - (* more set-like ops *) - val union_merge : T -> T -> T (* union *) - (* ADD: add a merge_distinct which raises exception on not_distinct? *) - val subtract : T -> T -> T (* subtraction = (fst - snd) *) - val remove_set : T -> T -> T (* subtraction = (snd - fst) *) - val intersect : T -> T -> T (* intersection *) - val contrast : T -> T -> (T * T * T) (* in only left, in both, in only right*) - - (* set predicates *) - val nonempty_intersect : T -> T -> bool (* intersection is not empty *) - - - (* checking properties of all names *) - val forall : (name -> bool) -> T -> bool - val exists : (name -> bool) -> T -> bool - val get_exists : (name -> bool) -> T -> name option - val search : (name -> 'a option) -> T -> 'a option - - (* moving around the name set (log time) *) - val next_smaller : T -> name -> name option - val next_bigger : T -> name -> name option - - (* pull out the biggest and smallest elements (log time) *) -(* val pull_biggest : T -> (name * T) option; - val pull_smallest : T -> (name * T) option; *) - - (* pull out the biggest and smallest range elements (log time) *) - val pull_biggest_range : T -> ((name * name) * T) option; - val pull_smallest_range: T -> ((name * name) * T) option; - (* "empty_exp str" exception throwing versions.. *) - val pull_biggest_range' : T -> (name * name) * T; - val pull_smallest_range': T -> (name * name) * T; - - (* pull from the middle range, as value (either the top or bot end) - out from the name table (constant time), give back element and - new table; NONE if empty; *) - val pull_local_top : T -> (name * T) option; - val pull_local_bot : T -> (name * T) option; - - val del_local_top : T -> T; - val del_local_bot : T -> T; - - (* make the list of every element pulled out and paired with the rest: - [1,2,3] => [(1,[2,3]), (2,[1,3]), (3,[1,2])] *) - val pull_each : T -> (name * T) list - - (* check subset relation: true iff first set is a subset of the second set. *) - val sub_set : T -> T -> bool - - (* the list of all subsets *) - val powerset : T -> T list - - (* assumes a in tr; returns { ((leq a tr) UN x) | a <= x & x in tr }, - where (leq a tr) is elements less than or equal to a in tr. *) - val powerset_geq : name -> T -> T list - - (* check if tree is balanced everywhere *) - val is_fully_balanced : T -> bool - - (* internal: rebalances w.r.t. only top level inbalance. - log time worst case. *) - val fully_rebalance : T -> T - - (* Given a tree which is unbalanced at the top level, but with balanced - subtrees, rebalance it. *) - val rebalance : T -> T - - (* internal *) - val join_from_delete : T -> T -> T - val find_and_delete_bot_join : name * name -> T -> name * T - val find_and_delete_top_join : name * name -> T -> name * T - - val pretty_full : T -> Pretty.T - val print_full : T -> unit - val pretty_struct : T -> Pretty.T - val print_struct : T -> unit - val pretty : T -> Pretty.T - val print : T -> unit - - val assert_correct : T -> T (* raises if not correct *) - - val subdivide : int -> T -> T list (* subdivide into N roughly even sets *) - - - (* should never happen *) - exception delete_exp of string * name * T; - exception add_range_exp of string * (name * name) * T; - exception union_merge_exp of string * T * T; - - (* FIXME: unify the various bug excepotions *) - exception bug0_exp of string; - exception bug_exp of string * T; - exception bug2_exp of string * T * T; - exception bad_fnametab_exp of string * T; - -end; - - -(* Fresh Name Table Functor *) -functor BasicNameSetFun(N : NAMER) -(* : BASIC_NAME_SET *) -= struct - -structure N = N; - -type name = N.name; - -(* used to allow the bottom element to be its own pred; thus we can -have a fixed bottom point *) -fun pred_or_same s = case N.pred s of NONE => s | SOME ps => ps; - -(* invarient: as for balanced search trees (AVL trees): left is less, -right is bigger, max difference in height is 1 *) -datatype T - = Empty - | Br of int (* depth of stuff this branch *) - * (name * name) (* inclusive min/max range *) - * T * T; (* subtrees with smaller than min in left and bigger than max in right *) - -(* hopefully impossible exceptions for debugging *) -exception delete_exp of string * name * T; -exception add_range_exp of string * (name * name) * T; -exception add_exp of string * name * T; -exception union_merge_exp of string * T * T; - - -(* hopefully impossible exceptions *) -exception bug0_exp of string; -exception bug_exp of string * T; -exception bug2_exp of string * T * T; -exception duplicate_exp of name * T; - -(* table was empty *) -exception empty_exp of string; -(* table was badly formed... *) -exception bad_fnametab_exp of string * T; - -(* helper things *) -fun is_less a b = case N.ord (a,b) of LESS => true | _ => false; -fun is_leq a b = case N.ord (a,b) of GREATER => false | _ => true; -fun is_eq a b = case N.ord (a,b) of EQUAL => true | _ => false; - -(* compute the depth of a tree *) -fun real_depth_of Empty = 0 - | real_depth_of (Br (_,_,l,r)) = - 1 + (Int.max (real_depth_of l, real_depth_of r)); -(* lookup the stored depth *) -fun depth_of Empty = 0 - | depth_of (Br (i,_,_,_)) = i; - -(* bottom and top of a branch range *) -fun get_min (Br (_, (b,t),Empty,r)) = SOME b - | get_min (Br (_, (b,t),l,r)) = get_min l - | get_min Empty = NONE; -fun get_max (Br (_, (b,t),l,Empty)) = SOME t - | get_max (Br (_, (b,t),l,r)) = get_max r - | get_max Empty = NONE; - - -(* (local) check if a tree is balanced - according to it's stored depths at -the top level. *) -fun is_locally_balanced Empty = true - | is_locally_balanced (Br(_,v, l, r)) = - (Int.abs ((depth_of l) - (depth_of l))) < 2; - -(* (global) check the the tree is correctly balanced at every node in -the tree. *) -fun is_fully_balanced Empty = true - | is_fully_balanced (tr as Br(_,v, l, r)) = - is_locally_balanced tr - andalso is_fully_balanced l - andalso is_fully_balanced r; - -(* correctness checking functions (these are supposed to be invarient - over top-level functions): *) -fun assert_balanced tr = - is_fully_balanced tr orelse - raise bad_fnametab_exp ("assert_balanced",tr); - -fun assert_correct_depths Empty = true - | assert_correct_depths (tr as Br (d,(b,t),l,r)) = - if depth_of tr <> real_depth_of tr then - raise bad_fnametab_exp ("assert_correct_depths",tr) - else - assert_correct_depths l andalso assert_correct_depths r; - -(* IMPROVE: very inefficient: lots of re-computing of max r and max l *) -fun assert_correctly_ordered Empty = true - | assert_correctly_ordered (Br (d,(b,t),l,r)) = - if (case (get_min r) of NONE => false | SOME rv => is_leq rv t) orelse - (case (get_max l) of NONE => false | SOME lv => is_leq b lv) orelse - is_less t b - then raise bad_fnametab_exp ("assert_correctly_ordered",Br (d,(b,t),l,r)) - else assert_correctly_ordered l andalso assert_correctly_ordered r; - -fun assert_correct t = - (assert_balanced t; - assert_correct_depths t; - assert_correctly_ordered t; t); - - -(* basic constructors *) -val empty = Empty; -fun single x = Br(1,(x,x),Empty,Empty); - -(* depth of a br tree with these subtrees *) -fun br_depth Empty Empty = 1 - | br_depth (Br(d,_,_,_)) Empty = d + 1 - | br_depth Empty (Br(d,_,_,_)) = d + 1 - | br_depth (Br(dl,_,_,_)) (Br(dr,_,_,_)) = - 1 + Int.max (dl,dr); - -(* depth of a br tree given one child and where the other child has depth d *) -fun br_depth1 Empty d = d - | br_depth1 (Br(d,_,_,_)) d2 = 1 + Int.max (d,d2); - - -(* IMPROVE: use efficient fold over to replace fold and dest... *) -(* add tree ranges to a list of ranges *) -fun add_to_rangelist Empty L = L - | add_to_rangelist (Br (_, g, l, r)) L = - add_to_rangelist l (g :: add_to_rangelist r L); - -fun dest_range (b,t) = - (case N.ord (b, t) - of EQUAL => [b] - | GREATER => [] (* should not happen - new exception? *) - | LESS => b :: (dest_range (N.suc b, t))); - -(* give back list of all used names, expands the intervals *) -fun list_of tr = maps dest_range (add_to_rangelist tr []); - -(* empty/singleton checks *) -fun is_empty Empty = true - | is_empty _ = false; -fun is_singleton (Br (_, g, Empty, Empty)) = - (case N.ord g of EQUAL => true | _ => false) - | is_singleton _ = false; -fun tryget_singleton (Br (_, g as (b,t), Empty, Empty)) = - (case N.ord g of EQUAL => SOME b | _ => NONE) - | tryget_singleton _ = NONE; - - -(* element is below tree ranges *) -fun less_than s (Br (_, (b,t),l,r)) = (N.ord(s,b) <> GREATER) - | less_than s Empty = true; -(* element is above tree ranges *) -fun more_than s (Br (_, (b,t),l,r)) = (N.ord(s,t) <> LESS) - | more_than s Empty = true; -(* element is within tree ranges *) -fun within s (b,t) = (N.ord(s,b) <> LESS) andalso (N.ord(s,t) <> GREATER); - -(* bottom and top of a branch range *) -fun get_local_bot (Br (_, (b,t),l,r)) = SOME b - | get_local_bot _ = NONE; -fun get_local_top (Br (_, (b,t),l,r)) = SOME t - | get_local_top _ = NONE; - - -(* first and last name *) -fun get_first (Br (_, (b,t),Empty,r)) = SOME b - | get_first (Br (_, (b,t),l,r)) = get_first l - | get_first Empty = NONE; -fun get_last (Br (_, (b,t),l,Empty)) = SOME t - | get_last (Br (_, (b,t),l,r)) = get_last r - | get_last Empty = NONE; - - -(* Given a tree which is unbalanced at the top level, but whos -subtrees are balanced, rebalance it. -- I think this is optimal - -think of actual runtime -- log (empty, N), requires log N shuffles. *) -fun rebalance (Br(_,v, Empty, Empty)) = (Br(1,v,Empty,Empty)) - | rebalance (Br(_,v, Empty, r as Br(d, v2, l2, r2))) = - if d > 1 then - let val newl = rebalance(Br(depth_of l2,v,Empty,l2)) - in Br(br_depth newl r2,v2,newl,r2) end - else Br(2,v,Empty,r) (* no rebalancing needed *) - | rebalance (Br(_,v, l as Br(d,v2,l2,r2), Empty)) = - if d > 1 then - let val newr = rebalance(Br(depth_of r2,v,r2,Empty)) - in Br(br_depth l2 newr,v2,l2,newr) end - else Br(2,v,l,Empty) (* no rebalancing needed *) - | rebalance (tr as Br(_,v, l as Br(ld,lv,ll,lr), r as Br(rd,rv,rl,rr))) = - let val dd = ld - rd in - if dd > 1 then - let val newr = rebalance(Br(br_depth lr r,v,lr,r)) - in Br(br_depth newr ll,lv,ll,newr) end - else if dd < ~1 then - let val newl = rebalance(Br(br_depth l rl,v,l,rl)) - in Br (br_depth newl rr,rv,newl,rr) end - else tr - end - | rebalance Empty = Empty; - - -(* completely re-balance a tree, in case it's generated randomly: -equiv to sorting, n log n time at worst. Assumes correct cached depth -values *) -fun fully_rebalance Empty = Empty - | fully_rebalance (Br(d,v,l,r)) = - rebalance (Br(d,v,fully_rebalance l,fully_rebalance r)); - -(* pulls out the biggest element, also giving back new balanced tree, - assumes initial true was well formed (especially balanced). *) -fun pull_biggest_range' Empty = raise empty_exp "pull_biggest" - | pull_biggest_range' (Br(d,v,l,Empty)) = (v, l) - | pull_biggest_range' (Br(d,v,l,r)) = - let val (biggest,newr) = pull_biggest_range' r - in (biggest, rebalance (Br(br_depth l newr,v,l,newr))) end -fun pull_biggest_range tr = - SOME (pull_biggest_range' tr) handle empty_exp _ => NONE; - -fun pull_smallest_range' Empty = raise empty_exp "pull_smallest" - | pull_smallest_range' (Br(d,v,Empty,r)) = (v, r) - | pull_smallest_range' (Br(d,v,l,r)) = - let val (smallest,newl) = pull_smallest_range' l - in (smallest, rebalance (Br(br_depth newl r,v,newl,r))) end -fun pull_smallest_range tr = - SOME (pull_smallest_range' tr) handle empty_exp _ => NONE; - -(* for use when these are the children of a deleted node, in order to - make the replacement for the deleted node. *) -(* THINK: is this the most efficient? *) -fun join_from_delete Empty Empty = Empty - | join_from_delete (tr as Br _) Empty = tr - | join_from_delete Empty (tr as Br _) = tr - | join_from_delete (l as Br(ld,lv,ll,lr)) (r as Br(rd,rv,rl,rr)) = - if ld >= rd then - let val (newv, newl) = pull_biggest_range' l - in Br(br_depth newl r, newv, newl, r) end - else - let val (newv, newr) = pull_smallest_range' r - in Br(br_depth l newr, newv, l, newr) end; - -(* Note: Depth is recalculated in rebalance. *) -(* find_and_delete_bot_join (sx,sop) tr *) -(* where - 1) sp = Pred sx - 2) (tr, vrange, trr) is a valid tree (the parent tree) - 3) Suc sx = bottom(vrange) - Inferable: sx > tr - returns: (new bottom value, new left-tree) -*) -fun find_and_delete_bot_join (s as (sx,sp)) Empty = (sx,Empty) - | find_and_delete_bot_join (s as (sx,sp)) (Br(d,v as (b,t),l,r)) = - (case N.ord(sp,t) of - EQUAL => (b, l) (* r must be empty by (1) and (3) *) - | GREATER => let val (s',r') = find_and_delete_bot_join s r - in (s',rebalance (Br(br_depth l r',v,l,r'))) end - | LESS => (* implies sx <= t, and thus by (3) that this tree - should already be joined to the parent *) - raise bug_exp ("find_and_delete_bot_join: badly formed tree", - (Br(d,v,l,r)))); -(* -let val (s',l') = find_and_delete_bot_join s l - in (s',rebalance (Br(d,v,l',r))) end; -*) - -fun find_and_delete_top_join (s as (sx,ss)) Empty = (sx,Empty) - | find_and_delete_top_join (s as (sx,ss)) (Br(d,v as (b,t),l,r)) = - (case N.ord(ss,b) of - EQUAL => (t, r) (* by (1) and (3), l must be empty *) - | GREATER => (* implies sx >= b, and thus by (3) that this tree - should already be joined to the parent *) - raise bug_exp ("find_and_delete_top_join: badly formed tree", - (Br(d,v,l,r))) - | LESS => let val (s',l') = find_and_delete_top_join s l - in (s',rebalance (Br(br_depth l' r,v,l',r))) end); - -(* let val (s',r') = find_and_delete_top_join s r - in (s',rebalance (Br(d,v,l,r'))) end -*) - -(* add a new value s into the name table *) -fun add' (s as (sp,sx,ss)) Empty = Br (1,(sx,sx),Empty,Empty) - | add' (s as (sp,sx,ss)) (tr as Br (d,v as (b,t),l,r)) = - if within sx v then tr - else ( - case N.ord(sx,b) of - LESS => - (case N.ord(ss,b) (* check for bot join *) - of EQUAL => - let val (newb,newl) = find_and_delete_bot_join (sx,sp) l - in rebalance (Br (br_depth newl r,(newb,t),newl,r)) end - | _ => let val newl = add' s l - in rebalance (Br (br_depth newl r,(b,t),newl,r)) end) - | _ => - (case N.ord(sp,t) (* check for top join *) - of EQUAL => - let val (newt,newr) = find_and_delete_top_join (sx,ss) r - in rebalance (Br (br_depth l newr,(b,newt),l,newr)) end - (* if completely separate, and not less, put on right *) - | _ => let val newr = add' s r - in rebalance (Br (br_depth l newr,(b,t),l,newr)) end) - ); - - -fun add s = add' (pred_or_same s, s, N.suc s); -(* fun add s = assert_correct o add' (pred_or_same s, s, N.suc s); *) - -fun add_list l = fold add l; -(* fun of_list l = assert_correct (add_list l empty); *) -fun of_list l = add_list l empty; - -(* IMPROVE: implement efficient version, cached and/or computer from intervals *) -val cardinality = length o list_of; - - -(* lookup the range that a value is in. returns NONE if not in tree *) -fun lookup tr s = - let - fun lookup' Empty = NONE - | lookup' (tr as Br (d,v as (b,t),l,r)) = - (case N.ord(s,b) - of GREATER => - (case N.ord(s,t) - of GREATER => lookup' r - | _ => SOME v) - | EQUAL => SOME v - | LESS => lookup' l) - in lookup' tr end - -fun contains tr s = case lookup tr s of NONE => false | SOME _ => true; - -(* find the next entry in the name collection smaller than s *) -fun next_smaller tr s = - let - fun next' Empty = NONE - | next' (tr as Br (d,v as (b,t),l,r)) = - (case N.ord(s,b) - of GREATER => (case N.ord(s,t) - of GREATER => (case next' r of NONE => SOME t | x => x) - | _ => SOME (pred_or_same s)) - | EQUAL => get_last l - | LESS => next' l) - in next' tr end; - -(* find the next entry in the name collection bigger than s *) -fun next_bigger tr s = - let - fun next' Empty = NONE - | next' (tr as Br (d,v as (b,t),l,r)) = - (case N.ord(s,t) - of GREATER => next' r - | EQUAL => get_first r - | LESS => (case N.ord(s,b) - of LESS => (case next' l of NONE => SOME b | x => x) - | _ => SOME (N.suc s))) - in next' tr end; - -(* give back a lazy list of all used names *) -fun seq_of tr = let fun proceed NONE = Seq.empty - | proceed (SOME n) = Seq.make ( - fn()=>SOME (n,(proceed (next_bigger tr n))) - ) - in proceed (get_min tr) end; - -(* adding new elements *) -fun add_new a tr = - case lookup tr a of - NONE => (a, add a tr) - | SOME (b,t) => - let val a2 = N.suc t in (a2, add a2 tr) end; - -fun add_new_opt a tr = - case lookup tr a of - NONE => (NONE, add a tr) - | SOME (b,t) => - let val a2 = N.suc t in (SOME a2, add a2 tr) end; - -fun new tr a = - case lookup tr a of - NONE => a - | SOME (b,t) => - let val a2 = N.suc t in a2 end; - -fun ins_fresh a tr = - case lookup tr a of - NONE => add a tr - | _ => raise duplicate_exp (a,tr); - -fun try_ins_fresh a tr = - case lookup tr a of - NONE => SOME (add a tr) (* fresh, so add it *) - | SOME _ => NONE; (* not fresh! *) - - -(* grow from s getting smaller. Gives back new max and rest of tree. - will break balancing of tree. Given a new lowest value, extend the - tree, giving back the new lowest value and the new subtree (balanced) *) -fun grow_left s Empty = (s,Empty) - | grow_left s (Br(d,v as (b,t),l,r)) = - (case N.ord (s,b) - of LESS => grow_left s l - | _ => (case N.ord (s, N.suc t) - of GREATER => - let val (s', r') = grow_left s r - in (s', rebalance (Br(br_depth r' l,v,l,r'))) end - | _ => (b, l))); - -(* grow from s getting bigger. Gives back new max and rest of tree. - Will break balancing of tree *) -fun grow_right s Empty = (s,Empty) - | grow_right s (Br(d,v as (b,t),l,r)) = - (case N.ord (s,t) - of GREATER => grow_right s r - | _ => (case N.ord (N.suc s,b) - of LESS => - let val (s', l') = grow_right s l - in (s', rebalance (Br(br_depth l' r,v,l',r))) end - | _ => (t, r))); - -fun min_name n n2 = case N.ord (n,n2) of LESS => n | _ => n2; -fun max_name n n2 = case N.ord (n,n2) of LESS => n2 | _ => n; - -(* with rebalancing *) -fun add_range (v as (b,t)) Empty = Br (1,v,Empty,Empty) - | add_range (v as (b,t)) (tr as Br (d,v2 as (b2,t2),l,r)) = - rebalance - (case N.ord (b,b2) - of LESS => (* left is below this node's left *) - (case N.ord (N.suc t,b2) - of LESS => (* strictly less than this node *) - let val l' = add_range v l - in Br (1 + Int.max(depth_of l', depth_of r),v2,l',r) - end - | _ => (* top of added range touches left of this node *) - let val (b',l') = grow_left b l in - (case N.ord (t,t2) - of GREATER => (* completely overlaps this node *) - let val (t',r') = grow_right t r - in (Br (1 + Int.max(depth_of l', depth_of r'), - (b',t'),l',r')) end - | _ => Br (1 + Int.max(depth_of l',depth_of r),(b',t2),l',r)) - end) - | _ => - (case N.ord (pred_or_same b,t2) - of GREATER => (* strictly bigger than this node *) - let val r' = add_range v r - in Br (1 + Int.max(depth_of l, depth_of r'),v2,l,r') end - | _ => (* bottom of added range overlaps on this node *) - (case N.ord (t,t2) - of GREATER => (* top overhangs to the right *) - let val (t',r') = grow_right t r - in Br (1 + Int.max(depth_of l, depth_of r'), - (min_name b b2,t'),l,r') end - | _ => (* within this node *) - tr))); - -(* ? THINK: is this the most efficient way ? *) -fun union_merge Empty tr = tr - | union_merge (Br(d,v,l,r)) tr = - union_merge r (union_merge l (add_range v tr)); -(* val union_merge = assert_correct oo union_merge; *) - - -fun drop_right (Br(_,(b,t),l,_)) = Br(1+(depth_of l),(b,t),l,Empty) - | drop_right _ = raise bug0_exp "Tried to drop right from Empty tree"; -fun drop_left (Br(_,(b,t),_,r)) = Br(1+(depth_of r),(b,t),Empty,r) - | drop_left _ = raise bug0_exp "Tried to drop left from Empty tree"; - -(* fast(est?) sub_set check arg1 is a subset of arg2 *) -fun sub_set Empty tr = true - | sub_set (Br _) Empty = false - | sub_set (n1 as Br(d1,(b1,t1),l1,r1)) (n2 as Br(d2,(b2,t2),l2,r2)) = - (case N.ord (t1,b2) - of LESS => sub_set (drop_right n1) l2 andalso sub_set r1 n2 - | EQUAL => (case N.ord (b1, b2) - of EQUAL => sub_set l1 l2 andalso sub_set r1 (drop_left n2) - | _ => false) - | GREATER => - (case N.ord (b1,t2) - of LESS => (case N.ord (b1,b2) - of LESS => false - | EQUAL => (case N.ord (t1,t2) - of GREATER => false - | EQUAL => sub_set l1 l2 andalso sub_set r1 r2 - | LESS => sub_set l1 l2 andalso sub_set r1 (drop_left n2)) - | GREATER => (case N.ord (t1,t2) - of GREATER => false - | EQUAL => sub_set l1 (drop_right n2) andalso sub_set r1 r2 - | LESS => sub_set l1 (drop_right n2) andalso sub_set r1 (drop_left n2))) - | EQUAL => - (case N.ord (t1, t2) - of EQUAL => sub_set l1 (drop_right n2) andalso sub_set r1 r2 - | _ => false) - | GREATER => sub_set (drop_left n1) r2 andalso sub_set l1 n2)); - -(* assumes b < t *) -fun contains_range tr (r as (b,t)) = - let fun recf Empty = false - | recf (tr as Br(d1,(b1,t1),l1,r1)) = - (case N.ord (t,b1) of - LESS => recf l1 - | EQUAL => (case N.ord (b, b1) of EQUAL => true - (* r has value in gap between t1 and l1 *) - | _ => false) - | GREATER => (case N.ord (b,t1) of GREATER => recf r1 - | EQUAL => (case N.ord (t,t1) of EQUAL => true - (* r has value in gap between t1 and r1 *) - | _ => false) - | LESS => ((* r has value in gap between t1 and r1 *) - case N.ord (t,t1) of GREATER => false - | _ => true))) - in recf tr end; - -(* assumes b < t *) -fun contains_range_exactly tr (r as (b,t)) = - let fun recf Empty = false - | recf (tr as Br(d1,(b1,t1),l1,r1)) = - (case N.ord (t,t1) of - LESS => - (case N.ord (t, b1) of LESS => recf l1 - | _ => false) - | EQUAL => (case N.ord (b, b1) of EQUAL => true - (* r has value in gap between t1 and l1 *) - | _ => false) - | GREATER => (case N.ord (b, t1) of GREATER => recf r1 - | _ => false)) - in recf tr end; - - - - -(* delete - consider cases: 1. s is in the middle of a range, then we -need to split the range and insert the appropriate half into the -shorter branch of the tree. 2. s is the end of some range, then we -just move the range along one bit. But when the range we are adjusting -is just one element, then we simply remove this node, as per avl -trees. 3. recurse on left or right *) -fun delete' s Empty = raise empty_exp "delete': no such element to delete" - | delete' s (tr as Br (d,v as (b,t),l,r)) = - (case (N.ord(s,b), N.ord(s,t)) - of (EQUAL,EQUAL) => - join_from_delete l r - | (GREATER,LESS) => - (case Int.compare (depth_of l, depth_of r) - of LESS => Br (d,(N.suc s,t),add_range (b,pred_or_same s) l,r) - | EQUAL => - let val newl = add_range (b,pred_or_same s) l - in Br (br_depth newl r,(N.suc s,t),newl,r) end - | MORE => Br (d,(b,pred_or_same s),l,add_range (N.suc s,t) r)) - | (EQUAL, LESS) => Br (d,(N.suc b,t),l,r) - | (GREATER, EQUAL) => Br (d,(b,pred_or_same t),l,r) - | (LESS, LESS) => - let val newl = delete' s l - in rebalance (Br (br_depth newl r,v,newl,r)) end - | (GREATER, GREATER) => - let val newr = delete' s r - in rebalance (Br (br_depth l newr,v,l,newr)) end - | (EQUAL, GREATER) => raise bug0_exp "impossible! delete' 1" - | (LESS, EQUAL) => raise bug0_exp "impossible! delete' 2" - | (LESS, GREATER) => raise bug0_exp "impossible! delete' 3"); - -fun delete el set = delete' el set handle empty_exp _ => set; -fun try_delete el set = SOME (delete' el set) handle empty_exp _ => NONE; - -(* val delete1 = delete; -fun delete2 s t = - assert_correct (delete1 s t) - handle bad_fnametab_exp (msg,_) => raise delete_exp ("delete2 bug: " ^ msg, s, t) - | bug0_exp msg => raise delete_exp ("delete2 bug: " ^ msg, s, t); -val delete = delete2; -*) - -(* pulls out the biggest element, also giving back new balanced tree, - assumes initial true was well formed (especially balanced). *) -fun pull_local_top Empty = NONE - | pull_local_top (tr as (Br(_,(b,t),_,_))) = SOME (t, delete t tr); - -fun pull_local_bot Empty = NONE - | pull_local_bot (tr as Br(_,(b,t),_,_)) = SOME (b, delete b tr); - -fun del_local_top Empty = Empty - | del_local_top (tr as (Br(_,(b,t),_,_))) = delete t tr; - -fun del_local_bot Empty = Empty - | del_local_bot (tr as Br(_,(b,t),_,_)) = delete b tr; - - -(* -fun delete_range delme Empty = Empty - | delete_range (delb,delt) (tr as Br (d,v as (b,t),l,r)) = - (case (N.ord(s,b), N.ord(s,t)) - of (EQUAL,EQUAL) => - join_from_delete l r - | (GREATER,LESS) => - (case Int.compare (depth_of l, depth_of r) - of LESS => Br (d,(N.suc s,t),add_range (b,pred_or_same s) l,r) - | EQUAL => Br (d + 1,(N.suc s,t),add_range (b,pred_or_same s) l,r) - | MORE => Br (d,(b,pred_or_same s),l,add_range (N.suc s,t) r)) - | (EQUAL, LESS) => Br (d,(N.suc b,t),l,r) - | (GREATER, EQUAL) => Br (d,(b,pred_or_same t),l,r) - | (LESS, LESS) => rebalance (Br (d,v, delete s l, r)) - | (GREATER, GREATER) => rebalance (Br (d,v, l, delete s r)) - | (EQUAL, GREATER) => raise delete_exp "impossible! delete 1" - | (LESS, EQUAL) => raise delete_exp "impossible! delete 2" - | (LESS, GREATER) => raise delete_exp "impossible! delete 3") -*) - -(* efficient computation of powerset *) - -(* assumes a in tr; returns { ((leq a tr) UN x) | a <= x & x in tr }, - where (leq a tr) is elements less than or equal to a in tr. *) -fun powerset_geq a tr = - (case next_bigger tr a - of NONE => [add a empty,empty] - | SOME b => - let val bsets = powerset_geq b (delete a tr) - in (map (add a) bsets) @ bsets end); - -(* returns powerset of tr as a list *) -fun powerset tr = - (case get_first tr of - NONE => [] - | SOME a => powerset_geq a tr) - - -(* fold over the ranges in the name tree, in order smallest to biggest *) -fun fold_ranges f Empty a = a - | fold_ranges f (tr as Br (d,v as (b,t),l,r)) a = - a |> fold_ranges f l - |> f (b,t) - |> fold_ranges f r; - -fun fold_ranges_rev f Empty a = a - | fold_ranges_rev f (tr as Br (d,v as (b,t),l,r)) a = - a |> fold_ranges_rev f r - |> f (b,t) - |> fold_ranges_rev f l; - -(* IMPROVE?: write nice optimal version based on top-level - rotatability, using the depth info. should be: log(tr1), currently: min(lg(tr1)^2) *) -fun ord (tr1, tr2) = - (case (pull_smallest_range tr1, pull_smallest_range tr2) - of (NONE, NONE) => EQUAL (* both empty *) - | (NONE, SOME _) => LESS - | (SOME _, NONE) => GREATER - | (SOME ((b1,t1),tr1'), SOME ((b2,t2),tr2')) => - (case (N.ord (b1,b2)) - of LESS => GREATER - | GREATER => LESS - | EQUAL => (case (N.ord (t1,t2)) - of LESS => LESS - | GREATER => GREATER - | EQUAL => ord (tr1', tr2')))); - -fun eq tr1 tr2 = case ord (tr1,tr2) of EQUAL => true | _ => false; - - -(* IMPROVE: THIS IS NOT EFFICIENT! *) -(* subtract 1st from 2nd = fromthis - subtractthis *) -fun subtract fromthis subtractthis = - fold delete (list_of subtractthis) fromthis; -(* val subtract = assert_correct oo subtract; *) -(* same as above but swaped arguments for curried folding *) -fun remove_set subtractthis fromthis = - fold delete (list_of subtractthis) fromthis; -(* val remove_set = assert_correct oo remove_set; *) - -(* IMPROVE: this is the inefficient version; write the efficient one based on - the intervals ! *) -fun intersect A B = - fold (fn a => if contains B a then I else delete a) (list_of A) A; - - -(* IMPROVE: this is the inefficient version; write the efficient one based on - the intervals ! *) -fun nonempty_intersect A B = - let exception stop of unit - in - (fold (fn a => fn () => - if contains B a then raise stop () else ()) (list_of A) (); false) - handle stop () => true - end; - -(* contrast: compute leftonly tree, intersection and - right-only tree. IMPROVE: make efficient version. *) -fun contrast lt rt = - (subtract lt rt, intersect lt rt, subtract rt lt); - - -fun pretty_range (b,t) = - (case N.ord (b,t) of - EQUAL => N.pretty b - | _ => - Pretty.block [Pretty.str "[", N.pretty b, Pretty.str " - ", - N.pretty t, Pretty.str "]" ]); - -(* pretty printing in full detail: shows internal tree structure *) -fun pretty_full Empty = Pretty.block [] - | pretty_full (Br (d,bt,l,r)) = - Pretty.chunks - ([Pretty.block - [ Pretty.str (Int.toString d), Pretty.str " : ", pretty_range bt]] - @ [Pretty.indent 2 (case l of Empty => Pretty.str "Empty" - | _ => pretty_full l)] - @ [Pretty.indent 2 (case r of Empty => Pretty.str "Empty" - | _ => pretty_full r)]); - -(* pretty printing in brief set style, list of results *) -fun prettyL Empty = [] - | prettyL (Br (d,bt,l,r)) = - ((prettyL l) @ [pretty_range bt] @ (prettyL r)); - -fun pretty_struct' Empty = [] - | pretty_struct' (Br (d,bt,l,r)) = - [Pretty.enclose "(" ")," (pretty_struct' l)] - @ [pretty_range bt] - @ [Pretty.enclose ", (" ")" (pretty_struct' r)]; - -fun pretty ns = Pretty.list "{" "}" (prettyL ns); -fun pretty_struct ns = Pretty.enclose "{" "}" (pretty_struct' ns); - - -val print = Pretty.writeln o pretty; -val print_struct = Pretty.writeln o pretty_struct; -val print_full = Pretty.writeln o pretty_full; - -(* renaming *) -fun rename1 n1 n2 tr = - (case try_delete n1 tr - of NONE => (n2,tr) - | SOME tr2 => add_new n2 tr2) - -fun rename1_opt n1 n2 tr = (* optional new name if n2 clashes *) - (case try_delete n1 tr - of NONE => (NONE,tr) - | SOME tr2 => add_new_opt n2 tr2) - -fun try_rename1 n1 n2 tr = (* optional result: SOME if no name clash *) - (case try_delete n1 tr - of NONE => SOME tr - | SOME tr2 => - try_ins_fresh n2 tr2); - - -(* fold over all elements *) -fun fold_try f [] a = a - | fold_try f (h::t) a = - (case f h a of NONE => NONE - | SOME a2 => fold_try f t a2); - -fun fold f = fold_ranges (Basics.fold f o dest_range); -fun fold_rev f = fold_ranges_rev (Basics.fold_rev f o dest_range); - -fun filter f s = fold (fn e => if (f e) then I else delete e) s s; - -exception found_exp of name; - -fun exists f ns = - (fold (fn n => fn () => if (f n) then raise found_exp n else ()) ns (); - false) - handle found_exp _ => true; -fun get_exists f ns = - (fold (fn n => fn () => if (f n) then raise found_exp n else ()) ns (); - NONE) - handle found_exp n => SOME n; - -fun search f ns = - let exception found_data of 'a in - (fold (fn n => fn () => - case (f n) - of NONE => () - | SOME x => raise found_data x) - ns (); NONE) - handle found_data r => SOME r - end; - -fun forall f ns = - (fold (fn n => fn () => if (f n) then () else raise found_exp n) ns (); - true) - handle found_exp _ => false; - - -(* {a,b,c,d} => [(a, {b,c,d}), (b, {a,c,d}), (c,{a,b,d}), ... ] *) -fun pull_each nset = fold (fn n => fn l => (n,delete n nset)::l) nset []; - - -fun subdivide n nset = let - val size = ((cardinality nset) div n) - fun subdiv [] _ = [] - | subdiv rest 0 = [of_list rest] - | subdiv rest i = let val (l1,l2) = chop size rest - in (of_list l1)::(subdiv l2 (i-1)) - end -in subdiv (list_of nset) (n-1) -end - -(* -exception stop_exp of unit; -( * NOTE: not used; - IMPROVE: provide more efficient set based 2obj fold * ) -fun fold_2obj_names f (obj1,obj2) a = - (SOME o snd ooo fold) - (fn n1 => - (fn ([], a') => raise stop_exp () - | (n2::Ns, a') => (Ns,f (n1,n2) a'))) - obj1 (list_of obj2, a) - handle stop_exp _ => NONE -*) - -fun maximize f xs = let - fun max x' current = let - val i' = f x' - in case current - of SOME (i,x) => if (i' > i) then SOME (i',x') else SOME (i,x) - | NONE => SOME (i',x') - end -in case fold max xs NONE of SOME (_,x) => SOME x | NONE => NONE -end - -fun minimize f xs = let - fun min x' current = let - val i' = f x' - in case current - of SOME (i,x) => if (i' < i) then SOME (i',x') else SOME (i,x) - | NONE => SOME (i',x') - end -in case fold min xs NONE of SOME (_,x) => SOME x | NONE => NONE -end - -end; (* structure *) - - diff --git a/core/lib/names/basic_nametab.ML b/core/lib/names/basic_nametab.ML deleted file mode 100644 index 6f58e9e1..00000000 --- a/core/lib/names/basic_nametab.ML +++ /dev/null @@ -1,418 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: isaplib/isap_src/nametab.ML (sym link in src/generic/) - Author: Lucas Dixon, University of Edinburgh - lucas.dixon@ed.ac.uk - Initially written Oct 2005, Updated 27 Dec 2008 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - Table with names as keys. - for holding things associated with nice fresh namers. - - Each entry has a name - but we allow some names not to have - entries: you can use up names without providing entries. - - Note: Currently -*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature BASIC_NAME_TAB = -sig - - type 'a T - type name - - structure NSet : BASIC_NAME_SET sharing type NSet.name = name; - structure Tab : TABLE sharing type Tab.key = name; - - val empty : 'a T - val is_name_empty : 'a T -> bool - val is_element_empty : 'a T -> bool (* this implies is_name_empty *) - - val is_name_singleton : 'a T -> bool - val is_element_singleton : 'a T -> bool (* this implies is_name_singleton *) - val tryget_singleton : 'a T -> (name * 'a) option - - val try_ins : (name * 'a) -> 'a T -> 'a T option - val ins : (name * 'a) -> 'a T -> 'a T (* raises NSet.duplicate_exp *) - val add : (name * 'a) -> 'a T -> (name * 'a T) - val add' : (name * 'a) -> 'a T -> (name option * - 'a T) (* tell me only if changed *) - val doadd : (name * 'a) -> 'a T -> 'a T (* add with new name, don't tell me *) - - (* create new name and use it up: update must be used to set entry's data *) - val new_name : name -> 'a T -> (name * 'a T) - (* just use-up a list of names *) - (* IMPROVE: avoid conversion to lists as uses up lots of memory: - have a function in FNAMETAB to useup names from a name collection *) - val useup_names : NSet.T -> 'a T -> 'a T - - (* renaming *) - val rename1 : name -> name -> 'a T -> (name * 'a T) - val rename1_opt : name -> name -> 'a T -> (name option * 'a T) (* new name given back only if different from suggested new name *) - val try_rename1 : name -> name -> 'a T -> 'a T option (* NONE on name clash *) - - (* lookup *) - val get : 'a T -> name -> 'a (* raises Tab.UNDEF *) - val lookup : 'a T -> name -> 'a option - val contains : 'a T -> name -> bool (* only names with table entries *) - val contains_name : 'a T -> name -> bool (* + names without table entries *) - val next_smaller : 'a T -> name -> (name * 'a) option - val next_bigger : 'a T -> name -> (name * 'a) option - val pull_elem : 'a T -> ((name * 'a) * 'a T) option - (* get the the biggest/smallest element (log time) *) - val lookup_biggest : 'a T -> (name * 'a) option; - val lookup_smallest : 'a T -> (name * 'a) option; - (* pull out the biggest/smallest element (log time) *) - val pull_biggest : 'a T -> ((name * 'a) * 'a T) option; - val pull_smallest : 'a T -> ((name * 'a) * 'a T) option; - - (* quantifier checks *) - val forall : ((name * 'a) -> bool) -> 'a T -> bool - val exists : ((name * 'a) -> bool) -> 'a T -> bool - val find : ((name * 'a) -> bool) -> 'a T -> (name * 'a) option - - (* modifications *) - val safe_map_entry : ('a -> 'a) -> name -> 'a T -> 'a T (* skips if no name *) - val map_entry : ('a -> 'a) -> name -> 'a T -> 'a T (* raises Tab.UNDEF *) - val map_all : (name -> 'a -> 'b) -> 'a T -> 'b T - val fold : ((name * 'a) -> 'b -> 'b) -> 'a T -> 'b -> 'b - val update : (name * 'a) -> 'a T -> 'a T (* no exp, replaces old name *) - val update_new : (name * 'a) -> 'a T -> 'a T (* exp if not fresh for table *) - val delete : name -> 'a T -> 'a T - val merge_disjoint : 'a T -> 'a T -> 'a T (* note: must be disjoint: raises Tab.DUP *) - val merge_joint : (* exception on clash noneq elements *) - ('a * 'a -> bool) (* return true if they are same, keeping second elem else raises Tab.DUP *) - -> 'a T -> 'a T -> 'a T - (* values that are in the first but not the second *) - val difference : 'a T -> 'a T -> 'a T - - (* get all *) - val list_of : 'a T -> (name * 'a) list - val of_list : (name * 'a) list -> 'a T (* raises exception *) - val keys : 'a T -> name list - val values : 'a T -> 'a list - val get_nameset : 'a T -> NSet.T; - val cardinality : 'a T -> int; (* of nameset *) - - (* abbreivations from internal structures *) - val ord : name * name -> order - val name_eq : name * name -> bool - - (* pretty *) - val string_of_name : name -> string - val pretty_name : name -> Pretty.T - val pretty_ntree: 'a T -> Pretty.T - val pretty_as_list : ('a -> Pretty.T) -> 'a T -> Pretty.T - val pretty : ('a -> Pretty.T) -> 'a T -> Pretty.T - val print : ('a -> Pretty.T) -> 'a T -> unit -(* val direct_pretty : 'a T -> Pretty.T - val direct_print : 'a T -> unit *) -end; - - -functor BasicNameTabFun( - structure NSet : BASIC_NAME_SET - structure Namer : NAMER - sharing type Namer.name = NSet.name -) -: BASIC_NAME_TAB -= struct - - structure Namer = Namer; - structure NSet = NSet; - - type name = Namer.name; - - val pretty_name = Namer.pretty - -(* fun pretty_name (n,i) = - Pretty.block [N.pretty_name n, Pretty.str (Int.toString i)]; *) - val string_of_name = Pretty.string_of o pretty_name; - - fun ord (a, b) = Namer.ord (a,b); - - fun name_eq (a, b) = (ord (a,b) = EQUAL); - -(* (Library.prod_ord NSet.ord Library.int_ord) (a,b); *) - - structure Tab = Table(type key = name val ord = ord ); - - datatype 'a T = - Tab of {names : NSet.T, - tab : 'a Tab.table}; - - fun get_tab (Tab rep) = #tab rep; - fun get_nameset (Tab rep) = #names rep; - fun cardinality ntab = NSet.cardinality (get_nameset ntab); - - val empty = Tab {names = NSet.empty, - tab = Tab.empty}; - - fun is_name_empty (Tab {names, tab}) = NSet.is_empty names; - fun is_element_empty (Tab {names, tab}) = Tab.is_empty tab; - - fun is_name_singleton (Tab {names, tab}) = NSet.is_singleton names; - (* IMPROVE: could add this to table library *) - fun tryget_singleton (Tab {names, tab}) = - (case NSet.tryget_singleton names - of NONE => NONE - | SOME n => - (case Tab.lookup tab n of SOME x => SOME (n,x) - | NONE => NONE)); - fun is_element_singleton ntab = - (case tryget_singleton ntab of NONE => false | SOME _ => true); - - fun try_ins (x as (n,a)) (Tab rep) = - let val names = (#names rep) in - case NSet.lookup names n - of NONE => SOME (Tab{ names = NSet.add n names, - tab = Tab.update_new x (#tab rep) }) - | SOME _ => NONE - end; - - (* NSet.ins_fresh and Tab.update_new raise exceptions on duplicates *) - fun ins (x as (n,a)) (Tab rep) = - (Tab{ names = NSet.ins_fresh n (#names rep), - tab = Tab.update_new x (#tab rep) }); - - fun map_all f (Tab rep) = - Tab{ names = #names rep, - tab = Tab.map f (#tab rep) }; - - fun update (x as (n,a)) (Tab rep) = - Tab{ names = NSet.add n (#names rep), - tab = Tab.update x (#tab rep) }; - - fun update_new (x as (n,a)) (Tab rep) = - Tab{ names = NSet.add n (#names rep), - tab = Tab.update_new x (#tab rep) }; - - fun of_list l = fold update_new l empty; - - fun delete n (Tab rep) = - Tab{ names = NSet.delete n (#names rep), - tab = Tab.delete_safe n (#tab rep) }; - - fun add' (x as (n,a)) (Tab rep) = - let val names = #names rep in - case NSet.lookup names n - of NONE => (NONE, Tab{ names = NSet.add n names, - tab = Tab.update_new x (#tab rep) }) - | SOME (_,t) => - let val n' = Namer.suc t in - (SOME n', Tab{ names = NSet.add n' names, - tab = Tab.update_new (n',a) (#tab rep) }) - end - end; - - fun add (x as (n,a)) (Tab rep) = - let val names = #names rep in - case NSet.lookup names n - of NONE => (n, Tab{ names = NSet.add n names, - tab = Tab.update_new x (#tab rep) }) - | SOME (_,t) => - let val n' = Namer.suc t in - (n', Tab{ names = NSet.add n' names, - tab = Tab.update_new (n',a) (#tab rep) }) - end - end; - - fun doadd x = snd o add x; - - fun new_name n (Tab rep) = - let val (n2,names2) = NSet.add_new n (#names rep) in - (n2, Tab{ names = names2, tab = #tab rep }) - end; - - fun useup_names names (Tab rep) = - Tab{ names = NSet.union_merge names (#names rep), - tab = #tab rep}; - - fun lookup (Tab rep) n = - Tab.lookup (#tab rep) n; - - (* implicit name argument *) - fun contains_name (Tab rep) = NSet.contains (#names rep); - - fun contains (Tab rep) n = - (case Tab.lookup (#tab rep) n of NONE => false - | _ => true); - - - (* rename a name *) - fun rename1 n1 n2 (Tab rep) = - let val (n2', nset2) = NSet.rename1 n1 n2 (#names rep) in - case Tab.lookup (#tab rep) n1 - of NONE => (n2', Tab{names = nset2, tab = (#tab rep)}) - | SOME x => - (n2', Tab{ names = nset2, - tab = (#tab rep) |> Tab.delete n1 - |> Tab.update (n2',x) }) - end; - - fun rename1_opt n1 n2 (Tab rep) = - let val (n2opt, nset2) = NSet.rename1_opt n1 n2 (#names rep) - in case Tab.lookup (#tab rep) n1 - of NONE => (NONE, Tab{names = nset2, tab = (#tab rep)}) - | SOME x => - let val n2' = case n2opt of NONE => n2 | SOME n2' => n2' in - (n2opt, Tab{ names = nset2, - tab = (#tab rep) |> Tab.delete n1 - |> Tab.update (n2',x) }) - end - end; - - (* NONE on name clash *) - fun try_rename1 n1 n2 (Tab rep) = - (case NSet.try_rename1 n1 n2 (#names rep) - of NONE => NONE - | SOME nset2 => - (case Tab.lookup (#tab rep) n1 - of NONE => SOME (Tab{names = nset2, tab = (#tab rep)}) - | SOME x => - SOME (Tab{ names = nset2, - tab = (#tab rep) |> Tab.delete n1 - |> Tab.update (n2,x) }))); - - fun get (Tab rep) n = - case Tab.lookup (#tab rep) n - of NONE => - raise Tab.UNDEF n - | SOME x => x; - - fun safe_map_entry f n (Tab rep) = - Tab{ names = #names rep, - tab = Tab.map_entry n f (#tab rep) }; - - (* assumes NSet has same elements as tab *) - fun map_entry f n (Tab rep) = - let val names = #names rep in - if NSet.contains names n then - Tab{ names = names, - tab = Tab.map_entry n f (#tab rep) } - else raise Tab.UNDEF n end; - - (* FIXME & IMPROVE: may raise an exception if tab contains used - names without entries: fix by adding a get_smaller to table.ML *) - fun next_smaller tab n = - (case NSet.next_smaller (get_nameset tab) n - of NONE => NONE - | SOME n2 => SOME (n2,get tab n2)); - fun next_bigger tab n = - (case NSet.next_bigger (get_nameset tab) n - of NONE => NONE - | SOME n2 => SOME (n2,get tab n2)); - -(* assumes names = content *) - fun pull_elem (tab as Tab rep) = - (case NSet.pull_local_bot (#names rep) of - NONE => NONE - | SOME (n,names2) => - SOME ((n,get tab n), - Tab {names = names2, tab = Tab.delete n (#tab rep)})); - - (* pull out the biggest/smallest element (log time) *) -structure T = Tab - fun lookup_biggest (tb as Tab rep) = - (case Tab.max (#tab rep) - of NONE => NONE - | SOME (n,_) => SOME (n,get tb n)); - - fun lookup_smallest (tb as Tab rep) = - (case Tab.min (#tab rep) - of NONE => NONE - | SOME (n,_) => SOME (n,get tb n)); - - (* pop off the biggest/smallest element (log time) *) - fun pull_biggest tb = - (case lookup_biggest tb - of NONE => NONE - | SOME (v as (n,_)) => SOME (v, delete n tb)); - fun pull_smallest (tb as Tab rep) = - (case lookup_smallest tb - of NONE => NONE - | SOME (v as (n,_)) => SOME (v, delete n tb)); - - - fun fold f (Tab rep) = Tab.fold f (#tab rep); - - fun exists f (Tab rep) = Tab.exists f (#tab rep); - fun forall f (Tab rep) = Tab.forall f (#tab rep); - - fun find (f : name * 'a -> bool) t = - let exception found_exp of name * 'a; - in - (exists (fn nx => if f nx then raise found_exp nx else false) - t; NONE) - handle found_exp nx => SOME nx - end; - - - - (* getting all entries... *) - fun list_of (Tab rep) = Tab.fold cons (#tab rep) []; - fun keys (Tab rep) = Tab.fold (cons o fst) (#tab rep) []; - fun values (Tab rep) = Tab.fold (cons o snd) (#tab rep) []; - - (* ? more efficient way? *) - fun difference tab1 tab2 = - Basics.fold (fn k => if contains tab2 k then delete k else I) - (keys tab1) tab1; - - (* merging *) - fun merge_disjoint t1 t2 = - Tab.fold - (fn (v as (n,a)) => fn t => - case try_ins v t - of NONE => raise Tab.DUP n - | SOME t2 => t2) - (get_tab t1) t2; - - fun merge_joint chkf t1 t2 = - Tab.fold - (fn (v as (n,a)) => fn t => - case lookup t n - of NONE => update_new (n,a) t - | SOME a2 => - if chkf (a,a2) then t else - raise Tab.DUP n) - (get_tab t1) t2; - - (* pretty printing *) - fun pretty f t = - if(is_element_empty t) then Pretty.str "Empty" - else - Pretty.chunks - (map - (fn (k,v) => - Pretty.block - [ pretty_name k, - Pretty.str " => ", - f v ]) - (Tab.dest (get_tab t))); - - (* pretty printing *) - fun pretty_as_list f t = - Pretty.list "{" "}" - (map - (fn (k,v) => - Pretty.block - [ pretty_name k, - Pretty.str " => ", - f v ]) - (Tab.dest (get_tab t))); - - - fun pretty_ntree t = - Pretty.chunks - [Pretty.str "Names: ", - NSet.pretty (get_nameset t)]; - - fun print pf = Pretty.writeln o (pretty pf) - -(* - fun direct_pretty x = pretty (Pretty.str o PolyML.makestring) x; - fun direct_print x = Pretty.writeln (direct_pretty x) -*) - -end; diff --git a/core/lib/names/basic_renaming.ML b/core/lib/names/basic_renaming.ML deleted file mode 100644 index 0746beee..00000000 --- a/core/lib/names/basic_renaming.ML +++ /dev/null @@ -1,326 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: basic_renaming.ML - Author: Lucas Dixon - lucas.dixon@ed.ac.uk - 10 Jan 2010 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - * - * A renaming provides a controlled method of generating fresh names for - * named items. It can be told what cannot be used as a fresh name - * (avoids), what things should not be renamed (ignore) and be given - * explicit values for some renames (nmap). - * - * Note that this structure only generates the new names. To actually do - * the renaming, you want a COMPOUND_RENAMING. - * - * The main function is rename1, which tells you whether something should be - * renamed and, if so, what it should be renamed to. If the name is in ignores, - * or it is not in the domain of nmap and not in avoids, it will indicate that - * it should be left alone (by returning NONE). Otherwise, if it has an entry in - * nmap, the mapped-to name will be returned. Finally, if it is not ignored - * and not already mapped, but is in the set of names to avoid, it will - * generate a fresh name (ie: one not already in - * avoids) and return that, in addition to storing it in nmap. - * - * To guarantee that the final nmap is injective, the range of the initial nmap - * must be a subset of avoids. The ignore set should also be a subset of avoids - * for a similar reason, as it is meant to represent the identity part of the - * renaming map. If you want to be able to do a sequential (as opposed to - * simultaneous) renaming on the actual set of names, you will also want the - * domain of nmap to be a subset of avoids as well. Note that rename1 will - * preserve all these properties. - * - *) -signature BASIC_RENAMING -= sig - -type name (* name *) -type nset (* set of names *) -(* type nmap *) (* mapping from names to names *) -type 'a nmap (* mapping from names *) -type T (* renaming *) - -exception invalid_renaming_exp of string - -val empty : T - -val union_ignores : nset -> T -> T -val union_avoids : nset -> T -> T - -(* NOTE: you pretty much always want: "ignore <= avoids"; else you might - rename something into the ignore map... - THINK: should the interface disallow this by always making ignore<=avoids? -*) -val mk : nset (* ignore: do not create new fresh names for these *) - -> nset (* avoids: avoid these names for new names *) - -> name nmap (* nmap: use this renaming map *) - -> T - -(* avoid these *) -val mk_from_avoids : nset -> T - -(* rename these *) -val mk_from_nmap : name nmap -> T - -(* lookup if we have been renamed *) -val lookup : T -> name -> name option; -val get : T -> name -> name; (* raises NTab.Tab.UNDEF *) - -(* for doing renaming / creating an isomorphism *) -val rename1 : name (* old name *) - -> T (* renaming so far *) - (* some new name (if clashed) and extended renaming *) - -> (name option) * T - -(* for alpha-equivlance checking / isomorphism checking *) -(* rename renaming ignores names that don't occur *) -val try_rename1_renaming : name (* old name *) - -> name (* new name *) - -> T (* renaming so far *) - -> T option (* (if possible) renamed renaming *) - -(* composition extends renaming if renameing wasn't already present and - if possible *) -val try_compose_rename1 : name (* old name *) - -> name (* new name *) - -> T (* renaming so far *) - -> T option (* (if possible) extended renaming *) - -val do_compose_rename1 : name -> name -> T -> T (* raises invalid_renaming_exp *) - - -(* given a renaming, make a new renaming that is just the avoid set *) -val restrict_to_avoiding : T -> T (* = mk_from_avoids o get_avoids *) - -(* cached info about new names created during renaming *) -val get_newnames : T -> nset -val set_newnames : nset -> T -> T - -(* internals of renaming *) -val get_ignored : T -> nset -val set_ignored : nset -> T -> T - -val get_avoids : T -> nset (* all used names which can't be clashed with *) -val get_nmap : T -> name nmap -(* edit internal data: be careful! you typically need to update avoids as well - as ignores or others. Else you can have clash in ignored named. *) -val update_nmap : (name nmap -> name nmap) -> T -> T -val update_avoids : (nset -> nset) -> T -> T -val update_newnames : (nset -> nset) -> T -> T -val update_ignored : (nset -> nset) -> T -> T - - -(* internal representation *) -val constr : {ignore : nset, (* considered to already be renamed, so not renamed again *) - new : nset, (* these are the newly generated names, range of nmap *) - avoids : nset, (* Nothing is allowed to be renamed to these *) - nmap : name nmap} (* mapping from old to new names *) - -> T -val destr : T -> {ignore : nset, new : nset, avoids : nset, nmap : name nmap}; - - -(* pretty *) -val pretty : T -> Pretty.T; -val print : T -> unit; - -end; - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -functor BasicRenamingFun(structure NTab : BASIC_NAME_TAB - and Namer : NAMER - sharing type Namer.name = NTab.name) -: BASIC_RENAMING -= struct - -exception invalid_renaming_exp of string (* used in table and set renaming *) - -structure NSet = NTab.NSet; - -type name = NTab.name; -type nset = NTab.NSet.T; -(* type nmap = name NTab.T; *) -type 'a nmap = 'a NTab.T; - -(* Invarients: - ignore <= avoids - (dom(rn) + range(rn)); - new <= avoids - dom(rn); - new <= range(rn); - dom(rn) <= avoids; - range(rn) <= avoids -*) -datatype T = Renaming of - {ignore : NSet.T, (* part of context: ignored and not renamed *) - new : NSet.T, (* these are the newly generated names, - a subset of the range of nmap *) - avoids : NSet.T, (* nothing is new allowed to be renamed to these *) - nmap : name NTab.T}; (* mapping from old to new names *) - -val constr = Renaming; -fun destr (Renaming rep) = rep; - -(* empty renaming *) -val empty = - Renaming {ignore = NSet.empty, - new = NSet.empty, - avoids = NSet.empty, - nmap = NTab.empty }; - -fun mk_from_nmap nmap = - Renaming {ignore = NSet.empty, - new = NSet.empty, - avoids = NTab.fold (fn (_,n) => NSet.add n) nmap NSet.empty, - nmap = nmap}; - -fun update_newnames f (Renaming rep) = - Renaming {ignore = #ignore rep, - new = f (#new rep), - avoids = #avoids rep, - nmap = #nmap rep }; - -fun update_nmap f (Renaming rep) = - Renaming {ignore = #ignore rep, - new = #new rep, - avoids = #avoids rep, - nmap = f (#nmap rep) }; - -fun update_ignored f (Renaming rep) = - Renaming {ignore = f (#ignore rep), - new = #new rep, - avoids = #avoids rep, - nmap = #nmap rep }; - -val set_newnames = update_newnames o K; -val set_ignored = update_ignored o K; - -(* *) -fun union_ignores ignores' (Renaming rep) = - Renaming {ignore = NSet.union_merge ignores' (#ignore rep), - new = #new rep, - avoids = NSet.union_merge ignores' (#avoids rep), - nmap = #nmap rep}; - -fun union_avoids avoids' (Renaming rep) = - Renaming {ignore = #ignore rep, - new = #new rep, - avoids = NSet.union_merge avoids' (#avoids rep), - nmap = #nmap rep}; - -(* IMPROVE: make new an argument... have other constructors *) -(* NOTE: you pretty much always want: "ignore <= avoids"; else you might - rename something into the ignore map... - THINK: should the interface disallow this by always making ignore<=avoids? -*) -fun mk ignore avoids nmap = - Renaming {ignore = ignore, - new = NTab.fold (fn (_,n) => NSet.add n) nmap NSet.empty, - avoids = avoids, - nmap = nmap}; - -(* make a renaming *) -fun mk_from_avoids avoids = union_avoids avoids empty; - -(* get info/status of renaming *) -fun get_avoids (Renaming rep) = (#avoids rep); -fun get_newnames (Renaming rep) = (#new rep); -fun get_nmap (Renaming rep) = (#nmap rep); -fun get_ignored (Renaming rep) = (#ignore rep); - -fun update_avoids f (Renaming rep) = - Renaming { avoids = f(#avoids rep), ignore = #ignore rep, - new = #new rep, nmap = #nmap rep }; -fun update_newnames f (Renaming rep) = - Renaming { avoids = #avoids rep, ignore = #ignore rep, - new = f(#new rep), nmap = #nmap rep }; -fun update_nmap f (Renaming rep) = - Renaming { avoids = #avoids rep, ignore = #ignore rep, - new = #new rep, nmap = f(#nmap rep) }; -fun update_ignored f (Renaming rep) = - Renaming { avoids = f(#avoids rep), ignore = f(#ignore rep), - new = #new rep, nmap = #nmap rep }; - -val restrict_to_avoiding = mk_from_avoids o get_avoids; - -val lookup = NTab.lookup o get_nmap; -val get = NTab.get o get_nmap; - -(* rename an old name *) -fun rename1 oldn (rn as Renaming rep) = - let val ignore = (#ignore rep) in - (* ignore it, it is already renamed (or renaming is the id function) *) - if NSet.contains ignore oldn then (NONE, rn) - else - let val nmap = (#nmap rep) in - (case NTab.lookup nmap oldn of - NONE => - let val avoids = (#avoids rep) in - if NSet.contains avoids oldn then (* rename it! *) - let val (n2,avoids2) = (NSet.add_new oldn avoids) - in (SOME n2, (* oldname clashed, so it was renamed *) - Renaming {ignore = ignore, - avoids = avoids2, - new = NSet.add n2 (#new rep), - nmap = NTab.ins (oldn,n2) nmap}) - end - else (NONE, (* old name was not renamed to something different, - so we can ignore all future occurences, and avoid - later changing any other name to clash with this one. *) - Renaming {ignore = NSet.ins_fresh oldn ignore, - new = #new rep, - avoids = NSet.ins_fresh oldn avoids, - nmap = nmap}) - end - | SOME n2 => (SOME n2, rn)) - end - end; - - -(* rename an old name *) -(* THINK: maybe check if newn = oldn, then add name to ignores and it is not new or in nmap *) - -(* rename a renaming *) -fun try_rename1_renaming n1 n2 (Renaming rep) = - if NSet.contains (#avoids rep) n2 - then NONE - else SOME (Renaming {ignore = the (NSet.try_rename1 n1 n2 (#ignore rep)), - avoids = the (NSet.try_rename1 n1 n2 (#avoids rep)), - new = the (NSet.try_rename1 n1 n2 (#new rep)), - nmap = the (NTab.try_rename1 n1 n2 (#nmap rep))}); - -(* add oldn => newn to the renaming, as long as it doesn't clash, - and isn't part of the avoid set *) -fun try_compose_rename1 oldn newn (rn as Renaming rep) = - let val nmap = (#nmap rep) in - case NTab.lookup nmap oldn of - NONE => - let val avoids = (#avoids rep) in - if NSet.contains avoids newn then NONE - else - (SOME - (Renaming {ignore = #ignore rep, - new = NSet.add newn (#new rep), - avoids = avoids |> NSet.ins_fresh newn - |> NSet.add oldn, - nmap = NTab.ins (oldn,newn) nmap })) - end - | SOME n2 => if Namer.eq (n2, newn) then SOME rn - else NONE (* maybe should raise a name clash here? *) - end; - -fun do_compose_rename1 oldn newn rn = - (case try_compose_rename1 oldn newn rn of NONE => - raise invalid_renaming_exp "do_compose_rename1: already renamed differently" - | SOME rn2 => rn2); - -(* pretty *) -fun pretty (Renaming rep) = - Pretty.chunks - [Pretty.str "Renaming:", - Pretty.block [Pretty.str "ignore: ", NSet.pretty (#ignore rep)], - Pretty.block [Pretty.str "avoids: ", NSet.pretty (#avoids rep)], - Pretty.block [Pretty.str "NMap: ", NTab.pretty Namer.pretty (#nmap rep)]]; - -val print = Pretty.writeln o pretty; - -end; diff --git a/core/lib/names/compound_renaming.ML b/core/lib/names/compound_renaming.ML deleted file mode 100644 index cf3ad4f9..00000000 --- a/core/lib/names/compound_renaming.ML +++ /dev/null @@ -1,237 +0,0 @@ -(* COMPOUND_RENAMING: - * - * A compound renaming applies a renaming to a structure that contains a - * collection of named items. The structure needs to have a way of - * getting the set of existing names, and a function that attempts to - * rename one of the elements (which is expected to succeed unless there - * is a clash). - * - * Typical usage is to create a renaming that contains some initial constraints - * (such as a set of names you want to avoid), and pass it to rename to do the - * actual renaming. - * - * mk_renaming will extend an initial renaming to a renaming on the whole - * collection, which can then be passed do_renaming to actually perform the - * renaming. Just using rename is more efficient, however. - * - * Due to the way compound renaming works, you need to make sure that the domain - * and range of nmap are disjoint, and that both are subsets of avoids. - *) - -(* basic things needed for renaming names within another datatype; - allows separation of name spaces *) -signature COMPOUND_RENAMABLE = -sig -structure Nm : BASIC_NAME -type obj - -(* get set of all names *) -val get_nameset : obj -> Nm.NSet.T - -(* rename 1 name *) -val try_rename1 : Nm.name -> Nm.name -> obj -> obj option -end; - -(* IMPROVE: - ADD: can also make compound renaming from pairwise compound renamings. - ADD: could also write a version based on a name-mapfold operation on obj -*) - - -(* generic signature for renaming within a datatype *) -(* NOTE! currently, a renaming may be OK, but because of the order of - renamings used, a clash in the obj may be created. e.g. a graph rhs may - rename a vertex "a" to "b", but "b" may already exists, and have been - renamed to "c". While such a renaming is in theory ok, the implementation - doesn't allow it. What you should be doing is making "b" part of the ignore - and avoids sets. Thus "a" will be directly renamed to "c", and "b" will be - ignored. -*) -signature COMPOUND_RENAMING = -sig -type renaming (* see basic renaming for more code to make renamings *) -type obj -(* assumes that existing renaming does not cause clashes, - preserves non-clashing of renamings *) -val empty_renaming : renaming -val rename : renaming -> obj -> renaming * obj -val mk_renaming : obj -> renaming -> renaming -val do_renaming : renaming -> obj -> obj -(* Note: -rename rn x = ((mk_renaming x rn), do_renaming (mk_renaming x rn) x) - (but is more efficient) -*) -(* removes all renaming, but avoids all old names *) -val restrict_to_avoiding : renaming -> renaming - -end; - - -(* construct a compound renaming *) -functor CompoundRenamingFun(CR : COMPOUND_RENAMABLE) -: COMPOUND_RENAMING -= struct - structure Rnm = CR.Nm.Rnm; - structure Nm = CR.Nm; - - type renaming = Rnm.T; - type obj = CR.obj; - - val empty_renaming = Rnm.empty; - val print_renaming = Rnm.print; - - val restrict_to_avoiding = Rnm.restrict_to_avoiding; - - fun do_renaming rn x = - Nm.NSet.fold - (fn n => fn x2 => - case Rnm.lookup rn n of - NONE => x2 - | SOME n2 => (case CR.try_rename1 n n2 x2 - of NONE => raise Rnm.invalid_renaming_exp "do_renaming" - | SOME x3 => x3 )) - (CR.get_nameset x) x; - - fun mk_renaming x rn = - let val nset = (CR.get_nameset x); - val avoids = Rnm.get_avoids rn; - val id_names = Nm.NSet.subtract nset avoids; - (* first make sure that we set ignore (naming id) on all names not in - the avoid set: avoids us creating a name-crossing renaming (where we'd need to do simultainous substitution) *) - val rn = Rnm.union_ignores id_names rn; - in - Nm.NSet.fold (snd oo Rnm.rename1) (CR.get_nameset x) rn - end; - -(* note: takes time of nset * ln(nset), - could be inverted to take time of avoids(rn) * ln(nset) *) - fun rename rn x = - let val nset = (CR.get_nameset x); - val avoids = Rnm.get_avoids rn; - val id_names = Nm.NSet.subtract nset avoids; - (* first make sure that we set ignore (naming id) on all names not in - the avoid set: avoids us creating a name-crossing renaming (where we'd need to do simultainous substitution) *) - val rn = Rnm.union_ignores id_names rn; - in - Nm.NSet.fold - (fn n => fn (rn2,x2) => - case Rnm.rename1 n rn2 of - (NONE, rn3) => (rn3,x2) - | (SOME n2, rn3) => - (case CR.try_rename1 n n2 x2 - of NONE => - raise Rnm.invalid_renaming_exp ("compound rename: " ^ - (Nm.string_of_name n) ^ " -> " ^ (Nm.string_of_name n2)) - | SOME x3 => (rn3, x3))) - nset (rn,x) - end; -end; - - -(* IDEA: generalised apply_and_lift: - (a -> a * b) = rename - ((a -> a) -> x -> x) = update - -> x -> x * b - This would improve on the get and set code below - - fun lifted_apply producef updatef x = - let bref = ref NONE - in (updatef (fn a => let val (a',b) = producef a; - val _ = (bref := SOME b) - in a') x, the bref) - end; - - NOTE: it would take a compiler optimisation to relaise that - bref always ends up as the b, so you don't need to store the option - at all. -*) - -(* IDEA: general notion of embedding one structure within a collection of - functions to access to the old data. a Higher-order, polymorphic functor? *) -functor EmbedCompoundRenaming( - structure Crnm : COMPOUND_RENAMING - type T (* Crnm.obj lives inside T *) - (* try to get internal data, if there is any *) - val try_to_get : T -> Crnm.obj option - (* does nothing if it isn't there *) - val update : (Crnm.obj -> Crnm.obj) -> T -> T -) : COMPOUND_RENAMING (* where COMPOUND_RENAMING.obj = T *) -= -struct - type renaming = Crnm.renaming - type obj = T - val empty_renaming = Crnm.empty_renaming; - fun rename rnm obj = - case try_to_get obj of NONE => (rnm,obj) - | SOME sub_obj => - let val (rnm,sub_obj') = Crnm.rename rnm sub_obj - in (rnm, update (K sub_obj') obj) end; - fun mk_renaming obj rnm = - case try_to_get obj of NONE => rnm - | SOME sub_obj => Crnm.mk_renaming sub_obj rnm; - val do_renaming = update o Crnm.do_renaming; - val restrict_to_avoiding = Crnm.restrict_to_avoiding; -end; - - -(* for when obj has no names in it. *) -functor UnitCompoundRenamingFun(type obj) -: COMPOUND_RENAMING -= struct -type renaming = unit; -type obj = obj; -val empty_renaming = (); -fun rename rnm obj = (rnm,obj); -fun mk_renaming obj rnm = rnm; -fun do_renaming rnm obj = obj; -fun restrict_to_avoiding rnm = rnm; -end; - - -(* for when obj has no names in it. *) -functor EmptyCompoundRenamingFun(structure Nm : BASIC_NAME type obj) -: COMPOUND_RENAMING -= struct -type renaming = Nm.Rnm.T; -type obj = obj; -val empty_renaming = Nm.Rnm.empty; -fun rename rnm obj = (rnm,obj); -fun mk_renaming obj rnm = rnm; -fun do_renaming rnm obj = obj; -fun restrict_to_avoiding rnm = rnm; -end; - - -(* -signature ALPHA_CONVABLE -= sig -include COMPOUND_RENAMABLE; - -( * fold over two objects as far as they have the same structure - assumes f is applied to each pair of names; returns NONE is structure doesn't - match. -* ) -val fold_2obj_names : (Nm.name * Nm.name -> 'a -> 'a) - -> obj * obj -> 'a -> 'a option - -end; - - -functor AlphaFun(S : ALPHA) -= struct - -exception stop_exp of unit; - -fun alpha (objs as (obj1,obj2)) rn = - (S.fold_2obj_names - (fn (n1,n2) => fn rnm => - case S.try_rename1 n1 n2 rnm - of NONE => raise stop_exp() - | SOME rnm2 => rnm2) - objs - rn) - handle stop_exp _ => NONE; - -end; -*) - diff --git a/core/lib/names/name_binrel.ML b/core/lib/names/name_binrel.ML deleted file mode 100644 index 45983844..00000000 --- a/core/lib/names/name_binrel.ML +++ /dev/null @@ -1,405 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: name_binrel.ML - Author: Lucas Dixon, University of Edinburgh - lucas.dixon@ed.ac.uk - Updated: 10 Dec 2008 - Date: 10 Dec 2008 -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* - Binary Relations over finite name sets - (objects with order and fresh construction operations) - (e.g. two way linked dependencies on names) - - Curretly removes element from dom if there is nothing in cod to - related to it; and visa-versa for cod/dom. - - POSSIBLE IMPROVEMENT: lazy updates? allow unrelated elements? -*) -signature NAME_BINREL_SHARING -= sig -structure Dom : NAME_SHARING -structure Cod : NAME_SHARING -type T -end; - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature NAME_BINREL -= sig - -structure Dom : NAME (* domain elements *) -structure Cod : NAME (* codomain elements *) -type T - -val empty : T -val is_empty : T -> bool - -(* add?: union, intersection, ... *) - -(* is the pair in the relation? *) -val contains : T -> (Dom.name * Cod.name) -> bool -(* lookup: gives NONE if the domain is not even in the relation *) -val lookup : T -> (Dom.name * Cod.name) -> bool option - -(* getting related sets of names (domain and codomain functions) *) -val domf : T -> Dom.name -> Cod.NSet.T (* is empty if none *) -val codf : T -> Cod.name -> Dom.NSet.T (* is empty if none *) - -(* internal; partial function, NONE when there is no such set *) -val lookup_domf : T -> Dom.name -> Cod.NSet.T option -val lookup_codf : T -> Cod.name -> Dom.NSet.T option - -(* add and delete many binary relations; - all deletes also remove disconnected elements *) -val add_many : Dom.NSet.T -> Cod.NSet.T -> T -> T -val del_many : Dom.NSet.T -> Cod.NSet.T -> T -> T - -(* 1 to many relations *) -val add_to_dom : Dom.name -> Cod.NSet.T -> T -> T -val add_to_cod : Cod.name -> Dom.NSet.T -> T -> T - -val del_from_dom : Dom.name -> Cod.NSet.T -> T -> T -val del_from_cod : Cod.name -> Dom.NSet.T -> T -> T - -val del_dom : Dom.name -> T -> T -val del_cod : Cod.name -> T -> T - -(* 1-1 relations, union/subtraction (do not raise) *) -val add1 : Dom.name -> Cod.name -> T -> T -val del1 : Dom.name -> Cod.name -> T -> T - -(* union of two relations *) -val union_merge : T -> T -> T - -(* rename *) -val rename1_dom : Dom.name -> Dom.name -> T -> (Dom.name * T) -val rename1_cod : Cod.name -> Cod.name -> T -> (Cod.name * T) - -val rename1_opt_dom : Dom.name -> Dom.name -> T -> (Dom.name option * T) -val rename1_opt_cod : Cod.name -> Cod.name -> T -> (Cod.name option * T) - -val try_rename1_dom : Dom.name -> Dom.name -> T -> T option -val try_rename1_cod : Cod.name -> Cod.name -> T -> T option - -(* converting to/from lists *) -val of_rawlist : (Dom.name * Cod.name) list -> T -val rawlist_of : T -> (Dom.name * Cod.name) list - -val of_list : (Dom.name * Cod.NSet.T) list -> T -val list_of : T -> (Dom.name * Cod.NSet.T) list - -(* get internal tables *) -val get_domset : T -> Dom.NSet.T -val get_codset : T -> Cod.NSet.T -val get_domtab : T -> Cod.NSet.T Dom.NTab.T -val get_codtab : T -> Dom.NSet.T Cod.NTab.T - - -(* pretty printing *) -val pretty : T -> Pretty.T -val print : T -> unit - -(* sharing substruct *) -structure Sharing : NAME_BINREL_SHARING -sharing Sharing.Dom = Dom.Sharing -sharing Sharing.Cod = Cod.Sharing -sharing type Sharing.T = T; - -end; - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -functor NameBRelFun(structure Dom : NAME and Cod : NAME) -: NAME_BINREL -= struct - -structure Dom = Dom; (* argument 1 name type *) -structure Cod = Cod; (* argument 2 name type *) - -(* datatype for meta variable environments *) -(* invarient: *) -datatype T = BinRel of { - domf : Cod.NSet.T Dom.NTab.T, - codf : Dom.NSet.T Cod.NTab.T -}; - -fun get_domtab (BinRel rep) = #domf rep; -fun get_codtab (BinRel rep) = #codf rep; - -val empty = BinRel {domf = Dom.NTab.empty, - codf = Cod.NTab.empty}; - -val is_empty = Dom.NTab.is_element_empty o get_domtab; - -val get_domset = Dom.NTab.get_nameset o get_domtab; -val get_codset = Cod.NTab.get_nameset o get_codtab; - -fun update_domf f (BinRel rep) = - BinRel {domf = f (#domf rep), codf = (#codf rep)} - -fun update_codf f (BinRel rep) = - BinRel {domf = (#domf rep), codf = f (#codf rep)} - -val set_domtab = update_domf o K; -val set_codtab = update_codf o K; - -(* implicit: rel n *) -val lookup_domf = Dom.NTab.lookup o get_domtab; -val lookup_codf = Cod.NTab.lookup o get_codtab; - -fun domf rel n = - case lookup_domf rel n of NONE => Cod.NSet.empty | SOME x => x; - -fun codf rel n = - case lookup_codf rel n of NONE => Dom.NSet.empty | SOME x => x; - -(* given two elements check if bin-rel holds for them *) -fun contains rel (a,b) = Cod.NSet.contains (domf rel a) b; -fun lookup rel (a,b) = - (case lookup_domf rel a - of NONE => NONE - | SOME bset => SOME (Cod.NSet.contains bset b)); - - -(* add and remove things from the binary relation. *) -fun add_many aset bset rel = - if (Dom.NSet.is_empty aset) orelse (Cod.NSet.is_empty bset) then rel - else - rel |> update_domf - (fn abtab => - Dom.NSet.fold - (fn a => fn abtab2 => - case Dom.NTab.lookup abtab2 a of - NONE => Dom.NTab.ins (a,bset) abtab2 - | SOME oldbset => Dom.NTab.update (a,Cod.NSet.union_merge bset oldbset) abtab2 - ) aset abtab) - |> update_codf - (fn batab => - Cod.NSet.fold - (fn b => fn batab2 => - case Cod.NTab.lookup batab2 b of - NONE => Cod.NTab.ins (b,aset) batab2 - | SOME oldaset => Cod.NTab.update (b,Dom.NSet.union_merge aset oldaset) batab2 - ) bset batab); - -fun del_many aset bset rel = - rel |> update_domf - (fn abtab => - Dom.NSet.fold - (fn a => fn abtab2 => - case Dom.NTab.lookup abtab2 a of - NONE => abtab2 - | SOME oldbset => - let val newbset = Cod.NSet.subtract oldbset bset - in if Cod.NSet.is_empty newbset then Dom.NTab.delete a abtab2 - else Dom.NTab.update (a,newbset) abtab2 end) - aset abtab) - |> update_codf - (fn batab => - Cod.NSet.fold - (fn b => fn batab2 => - case Cod.NTab.lookup batab2 b of - NONE => batab2 - | SOME oldaset => - let val newaset = Dom.NSet.subtract oldaset aset - in if Dom.NSet.is_empty newaset then Cod.NTab.delete b batab2 - else Cod.NTab.update (b,newaset) batab2 end) - bset batab); - -(* 1-many relations *) -(* implicit: rel *) -fun add_to_dom a bset = add_many (Dom.NSet.single a) bset; -fun add_to_cod b aset = add_many aset (Cod.NSet.single b); -fun del_from_dom a bset = del_many (Dom.NSet.single a) bset; -fun del_from_cod b aset = del_many aset (Cod.NSet.single b); - -(* 1-1 relations *) -(* implicit: rel *) -fun add1 a b = add_many (Dom.NSet.single a) (Cod.NSet.single b); -fun del1 a b = del_many (Dom.NSet.single a) (Cod.NSet.single b); - -(* union of two binary relations *) -fun union_merge r1 r2 = - r2 - |> Dom.NTab.fold (fn (a,bset) => add_to_dom a bset) (get_domtab r1) - |> Cod.NTab.fold (fn (b,aset) => add_to_cod b aset) (get_codtab r1) - -(* convert to/from lists *) -val list_of = Dom.NTab.list_of o get_domtab; -fun of_list l = fold (fn (a,bset) => add_to_dom a bset) l empty; - -val rawlist_of = maps (fn (a,bset) => map (pair a) (Cod.NSet.list_of bset)) - o Dom.NTab.list_of o get_domtab; -fun of_rawlist l = fold (fn (a,b) => add1 a b) l empty; - -(* deleting all entries *) -fun del_dom a1 rel = - (case Dom.NTab.lookup (get_domtab rel) a1 of - NONE => rel - | SOME changedbs => rel |> del_from_dom a1 changedbs); - -fun del_cod b1 rel = - (case Cod.NTab.lookup (get_codtab rel) b1 of - NONE => rel - | SOME changedas => rel |> del_from_cod b1 changedas); - -(* renaming *) - -(* rename domain element *) -fun rename1_opt_dom a1 a2 rel = - let val domtab = get_domtab rel - in - case Dom.NTab.lookup domtab a1 of - NONE => (NONE, rel) - | SOME codset => - let val (a3opt,domtab2) = Dom.NTab.rename1_opt a1 a2 domtab - val a3 = case a3opt of NONE => a2 | SOME a3' => a3' - in - (a3opt, - rel |> set_domtab domtab2 - |> update_codf - (fn codtab => - Cod.NSet.fold - (Cod.NTab.map_entry (snd o Dom.NSet.rename1 a1 a3)) - codset codtab)) - end - end; - -fun rename1_dom a1 a2 rel = - let val domtab = get_domtab rel - in - case Dom.NTab.lookup domtab a1 of - NONE => (a2, rel) - | SOME codset => - let val (a3,domtab2) = Dom.NTab.rename1 a1 a2 domtab - in - (a3, - rel |> set_domtab domtab2 - |> update_codf - (fn codtab => - Cod.NSet.fold - (Cod.NTab.map_entry (snd o Dom.NSet.rename1 a1 a3)) - codset codtab)) - end - end; - -fun try_rename1_dom a1 a2 rel = - let val domtab = get_domtab rel - in case Dom.NTab.try_rename1 a1 a2 domtab of - NONE => NONE - | SOME domtab2 => - (case Dom.NTab.lookup domtab a1 of - NONE => SOME rel - | SOME codset => - SOME (rel |> set_domtab domtab2 - |> update_codf - (fn codtab => - Cod.NSet.fold - (Cod.NTab.map_entry (snd o Dom.NSet.rename1 a1 a2)) - codset codtab))) - end; - -(* rename codomain element *) -fun rename1_opt_cod a1 a2 rel = - let val codtab = get_codtab rel - in - case Cod.NTab.lookup codtab a1 of - NONE => (NONE, rel) - | SOME domset => - let val (a3opt,codtab2) = Cod.NTab.rename1_opt a1 a2 codtab - val a3 = case a3opt of NONE => a2 | SOME a3' => a3' - in - (a3opt, - rel |> set_codtab codtab2 - |> update_domf - (fn domtab => - Dom.NSet.fold - (Dom.NTab.map_entry (snd o Cod.NSet.rename1 a1 a3)) - domset domtab)) - end - end; - -fun rename1_cod a1 a2 rel = - let val codtab = get_codtab rel - in - case Cod.NTab.lookup codtab a1 of - NONE => (a2, rel) - | SOME domset => - let val (a3,codtab2) = Cod.NTab.rename1 a1 a2 codtab in - (a3, - rel |> set_codtab codtab2 - |> update_domf - (fn domtab => - Dom.NSet.fold - (Dom.NTab.map_entry (snd o Cod.NSet.rename1 a1 a3)) - domset domtab)) - end - end; - -fun try_rename1_cod a1 a2 rel = - let val codtab = get_codtab rel - in case Cod.NTab.try_rename1 a1 a2 codtab of - NONE => NONE - | SOME codtab2 => - (case Cod.NTab.lookup codtab a1 of - NONE => SOME rel - | SOME domset => - SOME (rel |> set_codtab codtab2 - |> update_domf - (fn domtab => - Dom.NSet.fold - (Dom.NTab.map_entry (snd o Cod.NSet.rename1 a1 a2)) - domset domtab))) - end; - - -(* pretty printing *) -fun pretty rel = - Pretty.chunks - [Pretty.str "Rel{", - Pretty.block [Pretty.str "Dom->Cod:", - Dom.NTab.pretty Cod.NSet.pretty (get_domtab rel)], - Pretty.block [Pretty.str "Cod->Dom:", - Cod.NTab.pretty Dom.NSet.pretty (get_codtab rel)], - Pretty.str "}"]; -val print = Pretty.writeln o pretty; - - -structure Sharing = struct - structure Dom = Dom.Sharing; - structure Cod = Cod.Sharing; - type T = T; -end; (* sharing struct *) - -end; - - -(*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-*) -(* two (more) instances of type-distinct binary relations - (sharing Dom and Cod) *) -signature TWO_CONV_NAME_BINRELS -= sig -structure R : NAME_BINREL -structure A : NAME_BINREL -structure B : NAME_BINREL -val coerceRA : R.T -> A.T -val coerceRB : R.T -> B.T -val coerceAR : A.T -> R.T -val coerceBR : B.T -> R.T -sharing A.Dom.Sharing = R.Dom.Sharing -sharing A.Cod.Sharing = R.Cod.Sharing -sharing B.Dom.Sharing = R.Dom.Sharing -sharing B.Cod.Sharing = R.Cod.Sharing -end; - - -functor TwoRelsFun(R : NAME_BINREL) : TWO_CONV_NAME_BINRELS -= struct - structure R = R; - structure A = R; - structure B = R; - fun coerceRB (x : R.T) = x; - fun coerceRA (x : R.T) = x; - fun coerceAR x = (x : R.T); - fun coerceBR x = (x : R.T); -end; diff --git a/core/lib/names/name_inj.ML b/core/lib/names/name_inj.ML deleted file mode 100644 index 4bf3598f..00000000 --- a/core/lib/names/name_inj.ML +++ /dev/null @@ -1,401 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - -Note: This is essentially injective mapping between finite sets of - (possibly different) names (Dom and Cod). - -The structure changes names in an old type of name space to names in a -new type of name space. This is a roughly a generalisation of -renaming, but is a little less efficient, amd with a different -treatment of new names, so renaming is written as it's own structure. - -Renaming =aprox= - CHANGE_NAME where Dom = Cod, - and translate_name = identify function. - -Inj are less efficient than renaming in that we also store in -the inverse name mapping. This allows renaming of the domain and -codomain elements, but slightly slows down the adding of names to the -name change type. - -The change-name operation uses a set of ignored names (not renamed), a -set of names to be avoided (don't make any new name one of these), and -a table of renamings (the name translation used). - -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature NAME_INJ_SHARING = (* all types for sharing *) -sig - structure Dom : NAME_SHARING - structure Cod : NAME_SHARING - type T -end; - - -signature NAME_INJ -= sig - -structure Dom : NAME -structure Cod : NAME - -(* when Dom and Cod are not the same, a translation function is needed *) -val translate_name : Dom.name -> Cod.name - -(* type of name-change / injective map (and inverse) *) -type T - -(* constructors *) -val empty : T; -val is_empty : T -> bool; - -(* contains/lookup (raised NTab exception if dom does not exist) *) -val assert_contains : T -> (Dom.name * Cod.name) -> bool -val contains : T -> (Dom.name * Cod.name) -> bool -(* lookup: NONE if domain element does not exist *) -val lookup : T -> (Dom.name * Cod.name) -> bool option - -(* other operations on the change name space *) -val union_ignores : Dom.NSet.T -> T -> T -val union_avoids : Cod.NSet.T -> T -> T -val avoid_ignores_image : T -> T; - -(* assumes two mappings are disjoint/compatible *) -val union_merge : T -> T -> T - -(* for doing renaming/creating an injective map *) -val change1 : Dom.name (* old name *) - -> T (* renaming so far *) - -> (Cod.name option) * T (* new name and extended renaming *) - -(* for alpha-equivlance/injection-checking *) -val try_change1 : Dom.name (* old name *) - -> Cod.name (* new name *) - -> T (* renaming so far *) - -> T option (* new name and extended renaming *) -(* as above, but raises exception *) -exception add_exp of Dom.name * Cod.name * T -val add : Dom.name -> Cod.name -> T -> T - -(* renaming names *) -val try_rename1_dom : Dom.name -> Dom.name -> T -> T option -val try_rename1_cod : Cod.name -> Cod.name -> T -> T option - - - -(* -val rename_dom : Dom.renaming -> T -> Dom.renaming * T -val rename_cod : Cod.renaming -> T -> Cod.renaming * T -*) - -(* lookuping up renamings *) -val domf : T -> Dom.name -> Cod.name -val codf : T -> Cod.name -> Dom.name -val lookup_domf : T -> Dom.name -> Cod.name option -val lookup_codf : T -> Cod.name -> Dom.name option - -val dom_contains : T -> Dom.name -> bool -val cod_contains : T -> Cod.name -> bool - - -(* removing entries *) -(* delete an element from the domain (and the corresponding one from cod) *) -val del_dom : Dom.name -> T -> T -val del_cod : Cod.name -> T -> T - -(* info about renaming *) -val get_codset : T -> Cod.NSet.T -val get_domset : T -> Dom.NSet.T - -val get_ignored : T -> Dom.NSet.T -val get_avoids : T -> Cod.NSet.T - -val get_domtab : T -> Cod.name Dom.NTab.T -val get_codtab : T -> Dom.name Cod.NTab.T - -(* restrict to a given domain/codomain *) -val restrict_dom_to : Dom.NSet.T -> T -> T -val restrict_cod_to : Cod.NSet.T -> T -> T - -(* pretty printing *) -val pretty : T -> Pretty.T -val print : T -> unit; - -(* internal representation *) -val constr : {ignore : Dom.NSet.T, (* These names are not renamed/changed *) - avoids : Cod.NSet.T, (* Nothing is allowed to be renamed to these *) - domf : Cod.name Dom.NTab.T, (* mapping from old to new names *) - codf : Dom.name Cod.NTab.T} -> T -val destr : T -> {ignore : Dom.NSet.T, - avoids : Cod.NSet.T, - domf : Cod.name Dom.NTab.T, - codf : Dom.name Cod.NTab.T} - -(* sharing *) -structure Sharing : NAME_INJ_SHARING -sharing type Sharing.T = T -sharing Sharing.Dom = Dom.Sharing -sharing Sharing.Cod = Cod.Sharing - -end; - - -(* *) -functor NameInjFun(structure Dom : NAME - structure Cod : NAME - val translate_name : Dom.name -> Cod.name) -: NAME_INJ -= struct - -structure Dom = Dom; -structure Cod = Cod; -val translate_name = translate_name; - -(* Invariants: - ignore <= avoids - (dom(cn) + range(cn)); - cod <= avoids - dom(cn); - cod = range(cn); - dom(cn) <= avoids; - range(cn) <= avoids; - cod = dom-1 -*) -(* THINK: maybe have a set of old/newnames for which the translation -was direct and didn't result in a name clash: like ignores for -renaming? *) -datatype T = Iso of - {ignore : Dom.NSet.T, (* These names are not renamed/changed *) - avoids : Cod.NSet.T, (* Nothing is allowed to be renamed to these *) - domf : Cod.name Dom.NTab.T, (* mapping from old to new names *) - codf : Dom.name Cod.NTab.T (* mapping from new names to their old name *) - }; - - -val constr = Iso; -fun destr (Iso rep) = rep; - -(* the empty changing of names that does nothing *) -val empty = - Iso {ignore = Dom.NSet.empty, - avoids = Cod.NSet.empty, - domf = Dom.NTab.empty, - codf = Cod.NTab.empty}; - -fun union_ignores ignores' (Iso rep) = - Iso - {ignore = Dom.NSet.union_merge ignores' (#ignore rep), - avoids = #avoids rep, - domf = #domf rep, - codf = #codf rep}; - -fun union_avoids avoids' (Iso rep) = - Iso - {ignore = #ignore rep, - avoids = Cod.NSet.union_merge avoids' (#avoids rep), - domf = #domf rep, - codf = #codf rep}; - -(* get info/status of renaming *) -fun get_avoids (Iso rep) = (#avoids rep); -fun get_domtab (Iso rep) = (#domf rep); - -val is_empty = Dom.NTab.is_element_empty o get_domtab; - -fun get_codtab (Iso rep) = (#codf rep); -fun get_ignored (Iso rep) = (#ignore rep); -val get_codset = Cod.NTab.get_nameset o get_codtab; -val get_domset = Dom.NTab.get_nameset o get_domtab; - -val dom_contains = Dom.NTab.contains o get_domtab; -val cod_contains = Cod.NTab.contains o get_codtab; - -val domf = Dom.NTab.get o get_domtab; -val codf = Cod.NTab.get o get_codtab; - -val lookup_domf = Dom.NTab.lookup o get_domtab; -val lookup_codf = Cod.NTab.lookup o get_codtab; - - -(* given two elements check if function maps them *) -fun assert_contains m (a,b) = Cod.name_eq (domf m a, b); -fun lookup m (a,b) = - (case lookup_domf m a - of NONE => NONE - | SOME b2 => SOME (Cod.name_eq (b2,b))); -fun contains m a_b = - (case lookup m a_b of SOME true => true | _ => false); - - -fun update_avoids f (Iso rep) = - Iso { ignore = #ignore rep, avoids = f (#avoids rep), - domf = #domf rep, codf = #codf rep }; - -(* implicit: T *) -fun avoid_ignores_image cn = - update_avoids - (Dom.NSet.fold (fn oldn => Cod.NSet.add (translate_name oldn)) - (get_ignored cn)) - cn; - -(* assumes compatable renamings; raises exception if not *) -fun union_merge cn1 cn2 = - Iso - {ignore = Dom.NSet.union_merge (get_ignored cn1) (get_ignored cn2), - avoids = Cod.NSet.union_merge (get_avoids cn1) (get_avoids cn2), - domf = Dom.NTab.merge_disjoint (get_domtab cn1) (get_domtab cn2), - codf = Cod.NTab.merge_disjoint (get_codtab cn1) (get_codtab cn2) }; - -(* add an old name to be changed to a new name *) -fun change1 oldn (cn as Iso rep) = - let val ignore = (#ignore rep) in - if Dom.NSet.contains ignore oldn then (NONE, cn) (* ignore it *) - else - let val domf = (#domf rep) in - (case Dom.NTab.lookup domf oldn of - NONE => - let - val avoids = (#avoids rep) - val maybe_newn = (translate_name oldn) - in - if Cod.NSet.contains avoids maybe_newn then (* rename it! *) - let val (n2,avoids2) = (Cod.NSet.add_new maybe_newn avoids) - in (SOME n2, - Iso {ignore = ignore, - avoids = avoids2, - domf = Dom.NTab.ins (oldn,n2) domf, - codf = Cod.NTab.ins (n2,oldn) (#codf rep)}) - end - else (NONE, - Iso {ignore = ignore, - avoids = Cod.NSet.ins_fresh maybe_newn avoids, - domf = Dom.NTab.ins (oldn,maybe_newn) domf, - codf = Cod.NTab.ins (maybe_newn,oldn) (#codf rep)}) - end - | SOME n2 => (SOME n2, cn)) - end - end; - - -(* rename an old name *) -fun try_change1 oldn newn (cn as Iso rep) = - let val domf = (#domf rep) in - case Dom.NTab.lookup domf oldn of - NONE => - let val avoids = (#avoids rep) in - if Cod.NSet.contains avoids newn then NONE - else - (SOME - (Iso {ignore = (#ignore rep), - avoids = Cod.NSet.ins_fresh newn avoids, - domf = Dom.NTab.ins (oldn,newn) domf, - codf = Cod.NTab.ins (newn,oldn) (#codf rep) })) - end - | SOME n2 => if Cod.name_eq (n2, newn) then SOME cn - else NONE - end; - -(* as above but raise exception - for when we know that oldn/newn - are not already mapped to/from *) -exception add_exp of Dom.name * Cod.name * T -fun add oldn newn cn = - (case try_change1 oldn newn cn of - NONE => raise add_exp (oldn, newn, cn) - | SOME cn2 => cn2); - -(* rename names *) -fun try_rename1_dom n1 n2 (cn as Iso rep) = - (case Dom.NTab.try_rename1 n1 n2 (#domf rep) - of SOME dom2 => - (case Dom.NSet.try_rename1 n1 n2 (#ignore rep) - of SOME ignore2 => - let val cod2 = - case Dom.NTab.lookup dom2 n2 of - NONE => (#codf rep) - | SOME codn => (Cod.NTab.update (codn, n2) (#codf rep)) - in - SOME (Iso {ignore = ignore2, - avoids = #avoids rep, - domf = dom2, - codf = cod2 }) - end - | NONE => NONE) - | NONE => NONE); - -fun try_rename1_cod n1 n2 (cn as Iso rep) = - (case Cod.NTab.try_rename1 n1 n2 (#codf rep) - of SOME cod2 => - (case Cod.NSet.try_rename1 n1 n2 (#avoids rep) - of SOME avoids2 => - let val dom2 = - case Cod.NTab.lookup cod2 n2 of - NONE => (#domf rep) - | SOME domn => Dom.NTab.update (domn,n2) (#domf rep) - in - SOME (Iso {ignore = #ignore rep, - avoids = avoids2, - domf = dom2, - codf = cod2 }) - end - | NONE => NONE) - | NONE => NONE); - - -(* delete an element from the domain (and the corresponding one from cod) *) -fun del_dom n (cn as Iso rep) = - let val dom1 = (#domf rep) val cod1 = (#codf rep) - in - case Dom.NTab.lookup dom1 n of - NONE => cn - | SOME codn => - Iso {ignore = Dom.NSet.delete n (#ignore rep), - avoids = Cod.NSet.delete codn (#avoids rep), - domf = Dom.NTab.delete n dom1, - codf = Cod.NTab.delete codn cod1} - end; - -fun del_cod n (cn as Iso rep) = - let val dom1 = (#domf rep) val cod1 = (#codf rep) - in - case Cod.NTab.lookup cod1 n of - NONE => cn - | SOME domn => - Iso {ignore = Dom.NSet.delete domn (#ignore rep), - avoids = Cod.NSet.delete n (#avoids rep), - domf = Dom.NTab.delete domn dom1, - codf = Cod.NTab.delete n cod1 } - end; - -(* takes time prop to dom *) -fun restrict_dom_to vs m = - Dom.NSet.fold - (fn d => fn m2 => if Dom.NSet.contains vs d then m2 - else del_dom d m2) - (get_domset m) - m; - -fun restrict_cod_to vs m = - Cod.NSet.fold - (fn n => fn m2 => if Cod.NSet.contains vs n then m2 - else del_cod n m2) - (get_codset m) - m; - - -(* pretty *) -fun pretty (Iso rep) = - Pretty.chunks - [Pretty.str "NameIso {", - Pretty.block [Pretty.str " ignore: ", Dom.NSet.pretty (#ignore rep)], - Pretty.block [Pretty.str " avoids: ", Cod.NSet.pretty (#avoids rep)], - Pretty.block [Pretty.str " domf: ", Dom.NTab.pretty Cod.pretty_name (#domf rep)], - Pretty.str "}"]; - -val print = Pretty.writeln o pretty; - - -(* *) -structure Sharing = struct - structure Dom = Dom.Sharing - structure Cod = Cod.Sharing - type T = T; -end; - -end; diff --git a/core/lib/names/name_injendo.ML b/core/lib/names/name_injendo.ML deleted file mode 100644 index f23682ea..00000000 --- a/core/lib/names/name_injendo.ML +++ /dev/null @@ -1,82 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - -Note: Injective Endomorphisms on Names (1-1) functions with same type of - dom and cod. - -(usually used for representing isomorphism from one name kind to itself) - - name_injendo = CHANGE_NAME where - translate_name = id; Unlike renaming, we don't keep cache of newnames. - -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* sharing substuff issue: would like to make a subtype of INJ for - injective endomorphism that adds the Dom=Cod constraint. -*) - -(* FIXME: I'd like to be able to further restrict existing signature of sharing - structure in NAME_INJ to being NAME_INJENDO_SHARING, e.g. where structure - Sharing : NAME_INJENDO_SHARING *) -signature NAME_INJENDO_SHARING = sig - include NAME_INJ_SHARING - sharing Dom = Cod -end; - -signature NAME_INJENDO = -sig - include NAME_INJ - sharing Sharing.Dom = Sharing.Cod - - (* renamings are trivially name injective endo *) - val of_rnm : Dom.Rnm.T -> T - val rnm_of : T -> Dom.Rnm.T - - (* amorph composition *) - val compose : T -> T -> T -end; - -(* Name endomorphisms *) -functor NameInjEndoFun(Nm : NAME) -: NAME_INJENDO -= struct - structure Nm = Nm; - structure NameInj = NameInjFun( - structure Dom = Nm - structure Cod = Nm - val translate_name = I - ) : NAME_INJ; - open NameInj; - - fun of_rnm rn = - let val rep = (Dom.Rnm.destr rn) - in - constr {ignore = #ignore rep, - avoids = #avoids rep, - domf = #nmap rep, - codf = - Dom.NTab.fold (fn (n1,n2) => Cod.NTab.doadd (n2,n1)) - (#nmap rep) Cod.NTab.empty } - end; - - fun rnm_of a = - let val rep = (destr a) - in - Dom.Rnm.constr {ignore = #ignore rep, - avoids = #avoids rep, - new = Dom.NSet.empty, - nmap = #domf rep} - end; - - (* HERE BE DRAGONS: assumptions about ignore and avoid: assumes dom of one matches cod of the other *) - fun compose a b = - let val repa = (destr a) - val repb = (destr b) - in - constr {ignore = #ignore repa, - avoids = #avoids repa, - domf = Dom.NTab.map_all (K (domf b)) (#domf repa), - codf = Cod.NTab.map_all (K (codf a)) (#codf repb) } - end; - -end; \ No newline at end of file diff --git a/core/lib/names/name_map.ML b/core/lib/names/name_map.ML deleted file mode 100644 index 5b136e09..00000000 --- a/core/lib/names/name_map.ML +++ /dev/null @@ -1,426 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: names_map.ML - Author: Lucas Dixon - lucas.dixon@ed.ac.uk *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - - Name mappings: (not-necessarily-isomorphic) functions over - finite sets of names - - Note: not necessarily surjective (inverse set is not always - defined), and may not be injective either. - -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature NAME_MAP_SHARING = (* all types for sharing *) -sig - structure Dom : NAME_SHARING - structure Cod : NAME_SHARING - type T -end; -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature NAME_MAP = -sig -structure Dom : NAME (* domain elements *) -structure Cod : NAME (* codomain elements *) - -(* name map type *) -type T - -val empty : T -val is_empty : T -> bool - -(* add?: union, intersection, ... *) - -(* contains/lookup (raised Tab.UNDEF exception if dom does not exist) *) -val assert_contains : T -> (Dom.name * Cod.name) -> bool -(* lookup: NONE if domain element does not exist *) -val lookup : T -> (Dom.name * Cod.name) -> bool option - -(* domain and codomain functions *) -exception not_in_dom_exp of Dom.name * T; -exception not_in_cod_exp of Cod.name * T; -val domf : T -> Dom.name -> Cod.name (* raise *) -val codf : T -> Cod.name -> Dom.NSet.T (* raise *) - -(* internal; partial function, NONE when there is no such set *) -val lookup_domf : T -> Dom.name -> Cod.name option -val lookup_codf : T -> Cod.name -> Dom.NSet.T option -val dom_contains : T -> Dom.name -> bool -val cod_contains : T -> Cod.name -> bool - -(* add and delete many things *) -(* val add_many_to_1cod : Dom.NSet.T -> Cod.name -> T -> T *) -(* val del_many_from_1cod : Cod.name -> Dom.NSet.T -> T -> T *) - -(* remove things preserving surjectivity: delete unmapped cod elements *) -val surj_del_dom' : Dom.name -> T - -> (Cod.name option (* removed this from cod *) - * T) -val surj_del_dom : Dom.name -> T -> T -(* basic delete: leaves unampped elements in the map *) -val del_dom : Dom.name -> T -> T -val del_cod : Cod.name -> T -> T - -(* adding mappings *) -val add_to_dom : Dom.name -> Cod.name -> T -> T -val add_to_cod : Cod.name -> Dom.NSet.T -> T -> T - -(* extend/update existing map with *) -val extend_cod_by1 : Dom.name -> Cod.name -> T -> T -val extend_cod_by : Dom.NSet.T -> Cod.name -> T -> T - -(* take union of two maps; assumes disjoint domains *) -val union_merge : T -> T -> T - -(* renaming *) -val rename1_dom : Dom.name -> Dom.name -> T -> (Dom.name * T) -val rename1_cod : Cod.name -> Cod.name -> T -> (Cod.name * T) - -val rename1_opt_dom : Dom.name -> Dom.name -> T -> (Dom.name option * T) -val rename1_opt_cod : Cod.name -> Cod.name -> T -> (Cod.name option * T) - -val try_rename1_dom : Dom.name -> Dom.name -> T -> T option -val try_rename1_cod : Cod.name -> Cod.name -> T -> T option - -(* converting to/from lists *) -val of_list : (Dom.name * Cod.name) list -> T -val list_of : T -> (Dom.name * Cod.name) list - -val of_codf_list : (Cod.name * Dom.NSet.T) list -> T -val codf_list_of : T -> (Cod.name * Dom.NSet.T) list - -(* get internal tables *) -val get_domset : T -> Dom.NSet.T -val get_codset : T -> Cod.NSet.T -val get_domlist : T -> Dom.name list -val get_codlist : T -> Cod.name list -val get_domtab : T -> Cod.name Dom.NTab.T -val get_codtab : T -> Dom.NSet.T Cod.NTab.T - -(* restrict to a given domain/codomain *) -val restrict_dom_to : Dom.NSet.T -> T -> T -val restrict_cod_to : Cod.NSet.T -> T -> T - -val premap : {ignore : Dom.NSet.T, - avoids : Dom.NSet.T, - domf : Dom.name Dom.NTab.T, - codf : Dom.name Dom.NTab.T } -> T -> T - -(* pretty printing *) -val pretty : T -> Pretty.T -val print : T -> unit - -structure Sharing : NAME_MAP_SHARING -sharing Sharing.Dom = Dom.Sharing -sharing Sharing.Cod = Cod.Sharing -sharing type Sharing.T = T; - -end; - -(* *) -functor NameMapFun(structure Dom : NAME and Cod : NAME) -: NAME_MAP -= struct - -structure Dom = Dom; (* argument 1 name type *) -structure Cod = Cod; (* argument 2 name type *) - -(* datatype for meta variable environments *) -(* invarient: *) -datatype T = NameMap of { - domf : Cod.name Dom.NTab.T, - codf : Dom.NSet.T Cod.NTab.T -}; - -val empty = NameMap {domf = Dom.NTab.empty, - codf = Cod.NTab.empty}; - -fun get_domtab (NameMap rep) = #domf rep; -fun get_codtab (NameMap rep) = #codf rep; - -val get_domset = Dom.NTab.get_nameset o get_domtab; -val get_codset = Cod.NTab.get_nameset o get_codtab; - -val is_empty = Dom.NTab.is_element_empty o get_domtab; - -val get_domlist = Dom.NSet.list_of o get_domset; -val get_codlist = Cod.NSet.list_of o get_codset; - -fun update_domf f (NameMap rep) = - NameMap {domf = f (#domf rep), codf = (#codf rep)} - -fun update_codf f (NameMap rep) = - NameMap {domf = (#domf rep), codf = f (#codf rep)} - -val set_domtab = update_domf o K; -val set_codtab = update_codf o K; - -fun union_merge f1 f2 = - let - fun merge_codfs c1 c2 = - Cod.NTab.fold - (fn (v as (n,a)) => fn c3 => - case Cod.NTab.lookup c3 n - of NONE => Cod.NTab.update_new (n,a) c3 - | SOME a2 => Cod.NTab.update (n,Dom.NSet.union_merge a a2) c3) - c1 c2; - in - NameMap { - domf = Dom.NTab.merge_joint - Cod.name_eq (get_domtab f1) (get_domtab f2), - codf = merge_codfs (get_codtab f1) (get_codtab f2) - } - end; - - -(* implicit: m n *) -val lookup_domf = Dom.NTab.lookup o get_domtab; -val lookup_codf = Cod.NTab.lookup o get_codtab; - -val dom_contains = Dom.NTab.contains_name o get_domtab; -val cod_contains = Cod.NTab.contains_name o get_codtab; - - -exception not_in_dom_exp of Dom.name * T; -exception not_in_cod_exp of Cod.name * T; - -fun domf m n = - case lookup_domf m n of - NONE => raise not_in_dom_exp (n,m) - | SOME x => x; - -fun codf m n = - case lookup_codf m n of - NONE => raise not_in_cod_exp (n,m) - | SOME x => x; - -(* given two elements check if function maps them *) -fun assert_contains m (a,b) = Cod.name_eq (domf m a, b); -fun lookup m (a,b) = - (case lookup_domf m a - of NONE => NONE - | SOME b2 => SOME (Cod.name_eq (b2,b))); -fun contains m a_b = - (case lookup m a_b of SOME true => true | _ => false); - -(* aset cannot be mapped already *) -fun add_many_to_1cod aset b m = - m |> update_domf (Dom.NSet.fold - (fn a => Dom.NTab.ins (a,b)) - aset) - |> update_codf (fn batab2 => - case Cod.NTab.lookup batab2 b of - NONE => Cod.NTab.ins (b,aset) batab2 - | SOME oldaset => - Cod.NTab.update (b,Dom.NSet.union_merge aset oldaset) batab2 - ); - -(* note: remaps aset if previously mapped *) -fun extend_cod_by aset b m = - let val aset0 = case lookup_codf m b of NONE => Dom.NSet.empty - | SOME aset0 => aset0 - in - m |> update_domf (Dom.NSet.fold - (fn a => Dom.NTab.update (a,b)) aset) - |> update_codf (Cod.NTab.update (b,Dom.NSet.union_merge aset aset0)) - end; - -val extend_cod_by1 = extend_cod_by o Dom.NSet.single; - - - -(* delete many elements *) -fun surj_del_dom' a m = - (case lookup_domf m a of - NONE => (NONE,m) - | SOME b => - let val aset = Dom.NSet.delete a (codf m b) - val codtab = get_codtab m - val (delopt, codtab2) = - (* surjective delete:, if unmapped cod, remove from cod *) - if Dom.NSet.is_empty aset then - (SOME b, Cod.NTab.delete b codtab) - else - (NONE, Cod.NTab.update (b,aset) codtab) - in - (delopt, - m |> set_codtab codtab2 - |> update_domf (Dom.NTab.delete a)) - end); -val surj_del_dom = snd oo surj_del_dom'; - -(* non-surjective delete: leaves unampped elements in the codomain *) -fun del_dom' a m = - (case lookup_domf m a of - NONE => m - | SOME b => - m |> update_codf (Cod.NTab.map_entry (Dom.NSet.delete a) b) - |> update_domf (Dom.NTab.delete a)); -(* also removes domain elements that map to it *) -fun del_cod' b m = - (case lookup_codf m b of - NONE => (Dom.NSet.empty, m) - | SOME aset => - (aset, m |> update_codf (Cod.NTab.delete b) - |> update_domf (Dom.NSet.fold Dom.NTab.delete aset))); - -(* deleting all entries *) -val del_dom = del_dom'; -val del_cod = snd oo del_cod'; - -(* implicit: a b m *) -val add_to_dom = add_many_to_1cod o Dom.NSet.single; -(* implicit: m *) -fun add_to_cod b aset = add_many_to_1cod aset b; - -(* implicit: a b m *) -val add1 = add_to_dom; -(* implicit: a m *) -(* val del1_from_cod = del_dom; *) - -(* convert to/from lists *) -val list_of = Dom.NTab.list_of o get_domtab; -fun of_list l = fold (fn (a,b) => add1 a b) l empty; - -val codf_list_of = Cod.NTab.list_of o get_codtab; -fun of_codf_list l = fold (fn (b,aset) => add_to_cod b aset) l empty; - -(* renaming *) - -(* rename domain element *) -fun rename1_opt_dom a1 a2 m = - let val domtab = get_domtab m - in - case Dom.NTab.lookup domtab a1 of - NONE => (NONE, m) - | SOME b => - let val (a3opt,domtab2) = Dom.NTab.rename1_opt a1 a2 domtab - val a3 = case a3opt of NONE => a2 | SOME a3' => a3' - in - (a3opt, - m |> set_domtab domtab2 - |> update_codf - (Cod.NTab.map_entry (snd o Dom.NSet.rename1 a1 a3) b)) - end - end; - -fun rename1_dom a1 a2 m = - let val domtab = get_domtab m - in - case Dom.NTab.lookup domtab a1 of - NONE => (a2, m) - | SOME b => - let val (a3,domtab2) = Dom.NTab.rename1 a1 a2 domtab - in - (a3, - m |> set_domtab domtab2 - |> update_codf - (Cod.NTab.map_entry (snd o Dom.NSet.rename1 a1 a3) b)) - end - end; - -(* NONE on name clash *) -fun try_rename1_dom a1 a2 m = - let val domtab = get_domtab m - in case Dom.NTab.try_rename1 a1 a2 domtab of (* try to update domain *) - NONE => NONE (* name clash *) - | SOME domtab2 => - (case Dom.NTab.lookup domtab a1 of - NONE => SOME m (* nothing in codomain *) - | SOME b => - SOME (m |> set_domtab domtab2 - |> update_codf (* update codomain also *) - (Cod.NTab.map_entry (snd o Dom.NSet.rename1 a1 a2) b))) - end; - -(* rename codomain element *) -fun rename1_opt_cod a1 a2 m = - let val codtab = get_codtab m - in - case Cod.NTab.lookup codtab a1 of - NONE => (NONE, m) - | SOME domset => - let val (a3opt,codtab2) = Cod.NTab.rename1_opt a1 a2 codtab - val a3 = case a3opt of NONE => a2 | SOME a3' => a3' - in - (a3opt, - m |> set_codtab codtab2 - |> update_domf - (fn domtab => - Dom.NSet.fold (Dom.NTab.map_entry (K a3)) - domset domtab)) - end - end; - -fun rename1_cod a1 a2 m = - let val codtab = get_codtab m - in - case Cod.NTab.lookup codtab a1 of - NONE => (a2, m) - | SOME domset => - let val (a3,codtab2) = Cod.NTab.rename1 a1 a2 codtab in - (a3, - m |> set_codtab codtab2 - |> update_domf - (fn domtab => - Dom.NSet.fold (Dom.NTab.map_entry (K a3)) - domset domtab)) - end - end; - -fun try_rename1_cod a1 a2 m = - let val codtab = get_codtab m - in case Cod.NTab.try_rename1 a1 a2 codtab of - NONE => NONE - | SOME codtab2 => - (case Cod.NTab.lookup codtab a1 of - NONE => SOME m - | SOME domset => - SOME (m |> set_codtab codtab2 - |> update_domf - (fn domtab => - Dom.NSet.fold (Dom.NTab.map_entry (K a2)) - domset domtab))) - end; - -(* takes time prop to dom *) -fun restrict_dom_to vs m = - Dom.NSet.fold - (fn d => fn m2 => if Dom.NSet.contains vs d then m2 - else del_dom d m2) - (get_domset m) - m; - -fun restrict_cod_to vs m = - Cod.NSet.fold - (fn n => fn m2 => if Cod.NSet.contains vs n then m2 - else del_cod n m2) - (get_codset m) - m; - -(* EXTREME HACKING. We need some nice, unified way of combining maps. *) -fun premap a (NameMap b) = - NameMap ({domf = Dom.NTab.map_all (K (domf (NameMap b))) (#domf a), - codf = #codf b }); - -(* pretty printing *) -fun pretty m = - Pretty.chunks - [Pretty.str "NameMap{", - Pretty.block [Pretty.str "Dom->Cod:", - Dom.NTab.pretty Cod.pretty_name (get_domtab m)], - Pretty.block [Pretty.str "Cod->Dom:", - Cod.NTab.pretty Dom.NSet.pretty (get_codtab m)], - Pretty.str "}"]; -val print = Pretty.writeln o pretty; - -(* *) -structure Sharing = -struct - structure Dom = Dom.Sharing - structure Cod = Cod.Sharing - type T = T -end; - -end; diff --git a/core/lib/names/namer.ML b/core/lib/names/namer.ML deleted file mode 100644 index ab443799..00000000 --- a/core/lib/names/namer.ML +++ /dev/null @@ -1,120 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: namer.ML - Author: Lucas Dixon - lucas.dixon@ed.ac.uk - Initially written Oct 2005, Last Updated 3 Jan 2009 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - - Namer: basic concept of a name: ordered object with the ability to - construct new names that are the Suc of old ones. - - properties: - suc and pred inverse - ord (pred x, x) == less (optional for bottom element) - ord (suc x, x) == more - - pred can have a lower bound, such that (pred x = x) but suc - cannot have an upper bound. - - - note: so far I've only used NAMER, not (SNAMER and IDEAL_NAMER) -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature NAMER -= sig - type name - val suc : name -> name - val pred : name -> name option - val ord : name * name -> order - val eq : name * name -> bool - val pretty : name -> Pretty.T -end; - -signature PRETTY_ORD -= sig - type name - val ord : name * name -> order - val pretty : name -> Pretty.T -end; - -(* a functor to make a an opaque namer from a src namer type, this is - really just for name hidding. *) -functor AddDataNamerFun( -structure N : NAMER -type data -val ord_data : data * data -> order -val pretty : data * N.name -> Pretty.T -) : NAMER -= struct -type name = data * N.name; -fun suc (d,n) = (d, N.suc n); -fun pred (d,n) = - (case N.pred n of NONE => NONE | SOME n' => SOME (d, n')); -(* order must be this way around! Else break Suc operation on names, - which breaks name tables. *) -val ord = Library.prod_ord ord_data N.ord; -fun eq n1n2 = (case ord n1n2 of EQUAL => true | _ => false); -val pretty = pretty; -end; - - -(* a (safe) namer that has names made from a src type assumes: (mk (dest n) = n) *) -signature SNAMER -= sig -type src -include NAMER -val mk : src -> name -val dest : name -> src -end; - -(* a functor to make a an opaque namer from a src namer type, this is - really just for name hidding. *) -functor SNamerFun(N : NAMER) : SNAMER -= struct -type src = N.name; -datatype name = Name of src; -fun dest (Name n) = n; -val mk = Name; -fun suc (Name n) = Name (N.suc n); -fun pred (Name n) = (case N.pred n of NONE => NONE | SOME n2 => SOME (Name n2)); -fun ord (Name n1, Name n2) = N.ord (n1,n2); -fun eq (Name n1, Name n2) = N.eq (n1,n2); -fun pretty (Name n) = N.pretty n; -end; - -(* names that have ideals - what they would ideally like to be, for nicer renaming *) -signature IDEAL_NAMER -= sig -structure Src : NAMER -include NAMER -val mk : Src.name (* ideal *) - * Src.name (* actual *) - -> name -val get_ideal : name -> Src.name (* each name has an ideal name it wants to be *) -val get_actual : name -> Src.name (* each name has an ideal name it wants to be *) -end; - -(* a functor to make an ideal namer from a src namer type, this is - really just for name hidding. It allows us to try to make every - name closer to the idea, for example, when used name is removed, we - might be able to rename later names. *) -functor IdealNamerFun(N : NAMER) : IDEAL_NAMER -= struct -structure Src = N; -datatype name = Name of N.name (* ideal *) - * N.name; (* actual *) - - -val mk = Name; -fun suc (Name (i,n)) = Name (i,N.suc n); -fun pred (Name (i,n)) = (case N.pred n of NONE => NONE - | SOME n2 => SOME (Name (i,n2))); -(* FIXME: is prod order right??? *) -fun ord (Name ns1, Name ns2) = Library.prod_ord N.ord N.ord (ns1, ns2); -fun eq n1n2 = (case ord n1n2 of EQUAL => true | _ => false); -fun pretty (Name (_,n)) = N.pretty n; -fun get_ideal (Name (i,_)) = i; -fun get_actual (Name (_,n)) = n; -end; - diff --git a/core/lib/names/namers.ML b/core/lib/names/namers.ML deleted file mode 100644 index 80df7d7a..00000000 --- a/core/lib/names/namers.ML +++ /dev/null @@ -1,294 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: gproof/namers.ML - Author: Lucas Dixon, University of Edinburgh - lucas.dixon@ed.ac.uk - Initially written Oct 2005, Updated 10 Mar 2006 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - Basic kinds of names and their lifting into FreshNamers. - - Requirements: - order must be transitive, and monotonic w.r.t. succ - pred (succ x) = x - - A useful problem to think of: - if "za" = succ("z") and "c7" < "z" then it must be that "c7" < "za" - standard string-orders (length then alpha, and basic alpha) fail - for this notion of succ. - - AK: if length then alpha, shouldn't c7 be bigger than z? - -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -(* string names that go up by changing ...zzz => ...aaaa, (the last n -"z"s to n +1 "a"s) and otherwise incrementing the last letter if its -alpha. If not alpha, we add an "a" to the end of it. *) - -(* names are string * int - int is incremeneted and represents - the last lower-case alpha part of the string *) -structure StrSNamer - : SNAMER where type src = string -= struct - -type src = string - -exception exp of string; - -datatype name = - StrName of string * int (* int is printed as alpha *) - | StrIntName of string * int; (* int is printed as int *) - - -val bot_char = #"a"; (* top char *) -val top_char = #"z"; (* top char *) -val char_root = (Char.ord bot_char) - 1; -val int_base = (Char.ord top_char) - char_root; (* cannot be 0 *) - -(* integers as chrlist, working in the above base with the given root - e.g. "a" = 0, "b" = 1, "ab" = int_base + 1, "aab" = int_base ^ 2 + 1, etc -*) -fun chrlist_of_int i = - let - fun chrlist_of' 0 l = l - | chrlist_of' i l = - let val (q,r) = IntInf.quotRem (i, int_base) (* Maybe use Integer.div_mod ? what's the difference? *) - in if r = 0 then - chrlist_of' (q - 1) (top_char::l) - else - chrlist_of' q ((Char.chr (char_root + r)) :: l) - end - in chrlist_of' i [] end; - -(* assumes characters between a and z. *) -(* IMPROVE? should check that c is in the int_base range? *) -fun int_of_chrlist [] = 0 - | int_of_chrlist (h::l) = - let - val i0 = (Char.ord h) - char_root - fun int_of' [] a = a - | int_of' (c::l) a = - let val i = (Char.ord c) - char_root - in int_of' l ((a * int_base) + i) end; - in (int_of' l i0) end; - -(* accumulate the alpha chars at the start of a given string: we use -this on a string that has been reversed so that we get all the alpha -chars at the end of the string. *) -fun is_digit c = (c >= #"0" andalso c <= #"9"); -fun is_alpha c = (c >= #"a" andalso c <= #"z"); - -fun split_prefix chkf a [] = ([], a) - | split_prefix chkf a (l as (h :: t)) = - if chkf h then split_prefix chkf (h::a) t - else (l, a); - -fun mk s = - let val cl = String.explode s - in - case (rev cl) - of [] => StrName (s,0) (* default is stringname... *) - | (c::cl2) => - if is_alpha c then - let val (sl,el) = split_prefix is_alpha [c] cl2 - in StrName(String.implode (rev sl), int_of_chrlist el) end - else if is_digit c then - let val (sl,el) = split_prefix is_digit [c] cl2 - in StrIntName(String.implode (rev sl), - case el of [] => 0 - | _ => the (Int.fromString (String.implode el))) end - else StrName (s,0) (* default for no postfix is string-postfix *) - end; - -fun dest (StrName (s,i)) = (s ^ (String.implode (chrlist_of_int i))) - | dest (StrIntName (s,i)) = (s ^ (Int.toString i)); - -val pretty = Pretty.str o dest; - -fun suc' (a,b) = (a, b + 1); -fun pred' (x as (a,0)) = NONE - | pred' (a,b) = SOME (a, b - 1); - -fun suc (StrName a) = StrName (suc' a) - | suc (StrIntName a) = StrIntName (suc' a); - -fun optappl f NONE = NONE - | optappl f (SOME x) = SOME (f x); - -fun pred (StrName a) = optappl StrName (pred' a) - | pred (StrIntName a) = optappl StrIntName (pred' a); - -(* check string order first, then integer suffix *) -val ord' = Library.prod_ord String.compare Int.compare; - -fun ord (StrName a, StrName b) = ord' (a,b) - | ord (StrIntName a, StrIntName b) = ord' (a,b) - | ord (StrName _, StrIntName _) = LESS - | ord (StrIntName _, StrName _) = GREATER; - -fun eq n1n2 = (case ord n1n2 of EQUAL => true | _ => false); - -end; (* structure *) - - - - -(* NOTE: probably not good to use - very slow - better to use StrSNamer, defined above *) -structure StrNamer -: NAMER -= struct - -exception exp of string; - -type name = string; -val pretty = Pretty.str; - -fun is_alpha c = (c >= #"a" andalso c <= #"z"); - -(* We count the final letters as these are the digits that change -when we reach the final letter(s). eg. zzz => aaaa *) -fun count_z nzs (#"z"::t) = count_z (nzs + 1) t - | count_z nzs l = (nzs,l); -fun count_a nas (#"a"::t) = count_a (nas + 1) t - | count_a nas l = (nas,l); -(* Note: only applied to alpha chars *) - -(* counting for digits, currently unused: TODO: add numerical endings? *) -fun count_9 nzs (#"9"::t) = count_9 (nzs + 1) t - | count_9 nzs l = (nzs,l); -fun count_0 nas (#"0"::t) = count_0 (nas + 1) t - | count_0 nas l = (nas,l); - -(* adds an "a" to the end of a string if no alpha ending part, if -we get a bunch of "z"'s, then we replace them by the number + 1 of -"a"'s. Else we just inc the last char's value *) -fun alpha_succ l = - let val (n, rest) = count_z 0 (rev l) - in - rev (case rest - of [] => List.tabulate (n + 1, K (#"a")) - | (h::t) => (List.tabulate (n, K (#"a"))) - @ ((Char.succ h) :: t)) - end; - -(* if l ends with "a"'s we replace it by one less "z"'s, else dec last -char value. If no chars, do nothing - we are allowed pred to not give -a new value *) -fun alpha_pred l = - let val (n, rest) = count_a 0 (rev l) - in - rev (case rest of [] => (if n > 1 then List.tabulate (n - 1, K (#"z")) - else []) - | (h :: t) => (List.tabulate (n, K (#"z"))) - @ ((Char.pred h) :: t)) - end; - -(* accumulate the alpha chars at the start of a given string: we use -this on a string that has been reversed so that we get all the alpha -chars at the end of the string. *) -fun alpha_revstart a [] = (a,[]) (* all alpha num *) - | alpha_revstart a (l as (h :: t)) = - if is_alpha h then alpha_revstart (h::a) t - else (a, l); -(* apply a function to the alpha-end part of a string *) -fun appto_alpha_ending f cl = - let val (ending,revrest) = alpha_revstart [] (rev cl) - in (rev revrest) @ (f ending) end; - -val suc = String.implode - o (appto_alpha_ending alpha_succ) - o String.explode; - -fun pred "" = NONE - | pred s = SOME ((String.implode - o (appto_alpha_ending alpha_pred) - o String.explode) s); - -(* break a string into it's alpha ending and whatever before it *) -fun nonalpha_alpha_pair s = - let val (alphaending, revstart) = - alpha_revstart [] (rev (String.explode s)) - in (String.implode (rev revstart), String.implode alphaending) end; - - -val string_ord = String.compare; -fun len_based_string_ord (s1, s2) = - (case Library.int_ord (size s1, size s2) - of EQUAL => string_ord (s1, s2) | ord => ord); - -(* compares a string based on the alpha ending, but first use standard -fast_string check on non-alpha start. *) -fun ord (a,b) = - let val (a1,a2) = nonalpha_alpha_pair a - val (b1,b2) = nonalpha_alpha_pair b - in - (* Use length based ord for postfix part which must respect Suc - operations, and ordinary left-to-right comparison for the - string prefix so that prefixes look alphabetical. - *) - case string_ord (a1, b1) - of EQUAL => len_based_string_ord (a2,b2) - | x => x - end; - -fun eq n1n2 = (case String.compare n1n2 of EQUAL => true | _ => false) - -end; (* structure *) - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -structure StrIntNamer : NAMER - = struct - type name = string * int - fun suc (x,i) = (x, i + 1); - fun pred (x,0) = NONE - | pred (x,i) = SOME (x,i - 1); - val ord = Library.prod_ord Library.fast_string_ord Library.int_ord; - fun pretty (n,i) = - Pretty.block - ([Pretty.str n] - @ (if i <> 0 then [Pretty.str (Int.toString i)] else [])); - fun eq n1n2 = (case ord n1n2 of EQUAL => true | _ => false); -end; - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -structure IntNamer : NAMER - = struct - type name = int - fun suc x = x + 1; - fun pred 0 = NONE - | pred x = SOME (x - 1); - val ord = Library.int_ord; - fun pretty x = Pretty.str (Int.toString x); - fun eq n1n2 = (case ord n1n2 of EQUAL => true | _ => false); -end; - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -functor SumNamer( - structure Namer1 : NAMER - structure Namer2 : NAMER -) = -struct - datatype name = IN1 of Namer1.name | IN2 of Namer2.name - type src = name - fun suc (IN1 n) = IN1 (Namer1.suc n) - | suc (IN2 n) = IN2 (Namer2.suc n) - - fun pred (IN1 n) = Option.map IN1 (Namer1.pred n) - | pred (IN2 n) = Option.map IN2 (Namer2.pred n) - - fun ord (IN1 n1, IN1 n2) = Namer1.ord (n1,n2) - | ord (IN2 n1, IN2 n2) = Namer2.ord (n1,n2) - | ord (IN1 _, IN2 _) = GREATER - | ord (IN2 _, IN1 _) = LESS - - fun eq ns = (ord ns = EQUAL) - - fun pretty (IN1 n) = Pretty.block [Pretty.str "IN1(", Namer1.pretty n, Pretty.str ")"] - | pretty (IN2 n) = Pretty.block [Pretty.str "IN2(", Namer2.pretty n, Pretty.str ")"] - - val mk = I - val dest = I -end - diff --git a/core/lib/names/names.ML b/core/lib/names/names.ML deleted file mode 100644 index 804321e9..00000000 --- a/core/lib/names/names.ML +++ /dev/null @@ -1,221 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: names.ML - Author: Lucas Dixon - lucas.dixon@ed.ac.uk - 30 May 2008 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - -Names + renaming for them - -Avoids very common error: see "n" is not in avoid set, so you don't -change it. But! you also don't want another renaming, call it "m" -*not* to result in renaming "m" to "n". So you need both an avoid set -as well as a ignore set. "n" gets added to the ignore set as well as -the avoid set, thus "m" doesn't clobber "n" and all future occurances -of "n" are ignored. - -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -(* Types within a NAME structure, to ease writing of sharing constraints *) -signature NAME_SHARING = -sig - type name - type 'a table - type nset - type 'a ntab - type renaming -end; - -(* names + renaming *) -signature NAME -= sig - type name - - structure Namer : NAMER - sharing type Namer.name = name; - - structure NTab : NAME_TAB; - sharing type NTab.name = name; - - structure NSet : NAME_SET; - sharing NTab.NSet = NSet; - - structure Tab : TABLE; - sharing type NTab.Tab.table = Tab.table; - sharing type NTab.Tab.key = Tab.key; - - structure Rnm : RENAMING - where type 'a nmap = 'a NTab.T - and type name = name - and type nset = NSet.T - - sharing type NTab.renaming = Rnm.T - sharing type NSet.renaming = Rnm.T; - - val default_name : name - val pretty_name : name -> Pretty.T - val string_of_name : name -> string - val print_name : name -> unit - val name_eq : name * name -> bool - val name_ord : name * name -> General.order - - type renaming = Rnm.T - (* sharing type Rnm.T = renaming *) - - val empty_renaming : renaming (* avoid these names *) - val mk_renaming : NSet.T (* ignore these *) - -> NSet.T (* avoid these names *) - -> name NTab.T (* Use this renaming map *) - -> renaming - val renaming_of_avoidset - : NSet.T (* avoid these names *) - -> renaming - - (* for renaming *) - val rename1 : name (* old name *) - -> renaming (* renaming so far *) - -> (name option) (* new name *) - * renaming (* extended renaming *) - - (* for alpha conversion *) - val try_compose_rename1 : name (* old name *) - -> name (* new name *) - -> renaming (* renaming so far *) - -> renaming option (* (optional) extended renaming *) - - (* add more stuff to renaming *) - val union_avoids_to_renaming : NSet.T -> renaming -> renaming - (* ignores are auto also added to avoids *) - val union_ignores_to_renaming : NSet.T -> renaming -> renaming - - (* gets from renamings *) - val newnames_of_renaming : renaming -> NSet.T - val ignored_of_renaming : renaming -> NSet.T - val avoids_of_renaming : renaming -> NSet.T - val nmap_of_renaming : renaming -> name NTab.T - - val pretty_renaming : renaming -> Pretty.T - val print_renaming : renaming -> unit - - - (** types which want to be shared **) - structure Sharing : NAME_SHARING - where type name = name - and type 'a table = 'a Tab.table - and type nset = NSet.T - and type 'a ntab = 'a NTab.T - and type renaming = Rnm.T - - (* where type nmap = nmap *) (* note compound type; cannot use sharing *) - -(* sharing type Rnm.name = name - sharing type Rnm.nset = NSet.T -*) - -end; - -(* Safe String Names; isomorphic strings, named hidden under a -constructor so the underlying string type is not normally revealed, -thus harder to confuse name types. *) - -signature SNAME -= sig -include NAME -type src -val mk : src -> name -val dest : name -> src -end; - -functor NameFun( - structure Namer : NAMER - val default_name : Namer.name -) : NAME -= struct - (* create basic names *) - structure Nm : BASIC_NAME = - BasicNameFun( - structure Namer = Namer - val default_name = default_name); - - open Nm; - - structure Rnm = RenamingFun(Nm); - structure NSet = NameSetFun(Nm); - structure NTab = NameTabFun(Nm); - - type renaming = Rnm.T - val empty_renaming = Rnm.empty; - val mk_renaming = Rnm.mk; - val renaming_of_avoidset = Rnm.mk_from_avoids; - - val rename1 = Rnm.rename1; - val try_compose_rename1 = Rnm.try_compose_rename1; - - val union_ignores_to_renaming = Rnm.union_ignores; - val union_avoids_to_renaming = Rnm.union_avoids; - - val newnames_of_renaming = Rnm.get_newnames; - val ignored_of_renaming = Rnm.get_ignored; - val avoids_of_renaming = Rnm.get_avoids; - val nmap_of_renaming = Rnm.get_nmap; - - val pretty_renaming = Rnm.pretty; - val print_renaming = Rnm.print; - - (** types which want to be shared **) - structure Sharing = struct - type name = name - type 'a table = 'a Tab.table - type nset = NSet.T - type 'a ntab = 'a NTab.T - type renaming = Rnm.T - end - - -(* FIXME: respect depth properly; don't print out all things in set, just depth number of them. *) - (* PolyML pretty print names *) - val _ = PolyML.addPrettyPrinter (fn depth => fn printArgTypes => fn value => - ml_pretty (Pretty.to_ML (Nm.pretty_name value))); - - (* pretty print name sets *) - val _ = PolyML.addPrettyPrinter (fn depth => fn printArgTypes => fn value => - ml_pretty (Pretty.to_ML (NSet.pretty value))); - - (* pretty name tables *) - val _ = PolyML.addPrettyPrinter (fn depth => fn printArgTypes => fn value => - let fun pretty_f a = (Pretty.from_ML o pretty_ml o printArgTypes) - (a,depth - 1) - in - value |> NTab.list_of - |> map (fn (n,a) => - Pretty.block [Nm.pretty_name n, - Pretty.str "=>", pretty_f a]) - |> Pretty.list "{" "}" - |> Pretty.to_ML |> ml_pretty - end); - -end; - -(* add data to a names type - makes new names type with extra data *) -functor AddDataNameFun( - structure Nm : NAME - type data - val ord_data : data * data -> order - val pretty : data * Nm.name -> Pretty.T - val default_name : data * Nm.name -) -: NAME -= struct - structure Namer2 = - AddDataNamerFun( - structure N = Nm.Namer; - type data = data - val ord_data = ord_data - val pretty = pretty); - structure Names = NameFun( - structure Namer = Namer2 - val default_name = default_name); - open Names; -end; diff --git a/core/lib/names/names_common.ML b/core/lib/names/names_common.ML deleted file mode 100644 index ab0ef460..00000000 --- a/core/lib/names/names_common.ML +++ /dev/null @@ -1,71 +0,0 @@ -(* IMRPOVE: stop using StrNames and use SStrNames - can be more - efficient and is safer *) -signature STR_NAME = NAME where type name = string; -structure StrName : STR_NAME = - NameFun(structure Namer = StrNamer val default_name = "a"); - -structure StrMap = NameMapFun(structure Dom = StrName and Cod = StrName); -structure StrInjEndo = NameInjEndoFun(StrName); - -(* Safe string names do not let you know the internal type of names; you - have to go through strings. *) -signature SSTR_NAME = SNAME where type src = string; - -(* safe strings as names *) -structure SStrName :> SSTR_NAME -= struct - structure S = NameFun(structure Namer = StrSNamer - val default_name = StrSNamer.mk "s0"); - open S; - type src = string; - val mk = StrSNamer.mk; - val dest = StrSNamer.dest; -end; - -structure SStrMap = NameMapFun(structure Dom = SStrName and Cod = SStrName); -structure SStrInjEndo = NameInjEndoFun(SStrName); -structure SStrBRel = NameBRelFun(structure Dom = SStrName and Cod = SStrName); - -signature STR_INT_NAME = NAME where type name = string * int; -structure StrIntName : STR_INT_NAME = - NameFun(structure Namer = StrIntNamer val default_name = ("a",0)); - -(* safe (string * int) names *) -signature SSTR_INT_NAME = SNAME where type src = string * int; -structure SStrIntName :> SSTR_INT_NAME -= struct - open StrIntName; - type src = string * int; - val mk = I; - val dest = I; -end; - -signature INT_NAME = NAME where type name = int; -structure IntName : INT_NAME = - NameFun(structure Namer = IntNamer - val default_name = 0); - -(* safe int names *) -signature SINT_NAME = SNAME where type src = int; -structure SIntName :> SINT_NAME -= struct - open IntName; - type src = int; - val mk = I; - val dest = I; -end; - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Lifting into FreshNamers *) -structure IntNSet = IntName.NSet; -structure StrNSet = StrName.NSet; -structure StrIntNSet = StrIntName.NSet; - -structure IntNTab = IntName.NTab; -structure StrNTab = StrName.NTab; -structure StrIntNTab = StrIntName.NTab; - -structure IntTab = IntNTab.Tab; -structure StrTab = StrNTab.Tab; -structure StrIntTab = StrIntNTab.Tab; diff --git a/core/lib/names/nameset.ML b/core/lib/names/nameset.ML deleted file mode 100644 index 09ced15d..00000000 --- a/core/lib/names/nameset.ML +++ /dev/null @@ -1,58 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: nameset.ML - Author: Lucas Dixon - lucas.dixon@ed.ac.uk - Initially written Oct 2005, Last Updated 29 Jan 2008 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - - Used to get new nice fresh names (quickly). With a slight - adaptation, it can also be thought of as a memory management where - names are memory spaces. - - A descrete interval table (for fresh names): a specialised, AVL - tree inspired, representation for holding intervals and getting - quick lookup/add/delete, worst case O(Log(n)^2); but when adjacent - names are used it is much faster (often constant time). - -*) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature NAME_SET_SHARING = -sig - type name - type T - type renaming -end; - -signature NAME_SET = -sig - include BASIC_NAME_SET - include COMPOUND_RENAMING where type obj = T - structure Sharing : NAME_SET_SHARING - sharing type Sharing.name = name - sharing type Sharing.T = T - sharing type Sharing.renaming = renaming -end; - -functor NameSetFun(Nm : BASIC_NAME) = -struct - open Nm.NSet; - - structure CR = CompoundRenamingFun( - struct - structure Nm = Nm; - type obj = Nm.NSet.T - val get_nameset = I - val try_rename1 = Nm.NSet.try_rename1 - end) - - open CR; - - structure Sharing = struct - type name = name - type T = T - type renaming = renaming - end; -end; - - diff --git a/core/lib/names/nametab.ML b/core/lib/names/nametab.ML deleted file mode 100644 index 9f6a7e24..00000000 --- a/core/lib/names/nametab.ML +++ /dev/null @@ -1,114 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: isaplib/isap_src/nametab.ML (sym link in src/generic/) - Author: Lucas Dixon, University of Edinburgh - lucas.dixon@ed.ac.uk - Initially written Oct 2005, Updated 27 Dec 2008 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - Table with names as keys. - for holding things associated with nice fresh namers. - - Each entry has a name - but we allow some names not to have - entries: you can use up names without providing entries. - - Note: Currently -*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature NAME_TAB_SHARING = -sig - type name - type nset - type renaming - type 'a T (* name table *) - type 'a table (* a basic lookup table, not a name table *) -end; - -signature NAME_TAB = -sig - include BASIC_NAME_TAB - type renaming - val rename : renaming -> 'a T -> renaming * 'a T - val mk_renaming : 'a T -> renaming -> renaming - val do_renaming : renaming -> 'a T -> 'a T - structure Sharing : NAME_TAB_SHARING - sharing type Sharing.name = name - sharing type Sharing.table = Tab.table - sharing type Sharing.nset = NSet.T - sharing type Sharing.renaming = renaming -end; - -(* see comments below: we end up with a second copy of the same code - as in compound_renaming: very crappy, better if we could share - ground function with free type *) -functor NameTabFun(Nm : BASIC_NAME) -: NAME_TAB -= struct - structure Rnm = Nm.Rnm; - type renaming = Rnm.T; - open Nm.NTab; - - fun do_renaming rn x = - Nm.NSet.fold - (fn n => fn x2 => - case Rnm.lookup rn n of - NONE => x2 - | SOME n2 => (case try_rename1 n n2 x2 - of NONE => raise Rnm.invalid_renaming_exp "do_renaming" - | SOME x3 => x3 )) - (get_nameset x) x; - - fun mk_renaming x rn = - Nm.NSet.fold (snd oo Rnm.rename1) (get_nameset x) rn; - -(* note: takes time of nset * ln(nset), - could be inverted to take time of avoids(rn) * ln(nset) *) - - fun rename rn x = - Nm.NSet.fold - (fn n => fn (rn2,x2) => - case Rnm.rename1 n rn2 of - (NONE, rn3) => (rn3,x2) - | (SOME n2, rn3) => - (case try_rename1 n n2 x2 - of NONE => raise Rnm.invalid_renaming_exp "rename" - | SOME x3 => (rn3, x3))) - (get_nameset x) (rn,x); - - structure Sharing = struct - type name = name; - type 'a table = 'a Tab.table; - type 'a T = 'a T; - type renaming = renaming; - type nset = Nm.NSet.T; - end; -end; - - - - -(* Cannot do the nice and obvious because of ML's broken functor and -signature mechanism *) -(* -signature NAME_TAB = -sig - include BASIC_NAME_TAB - include COMPOUND_RENAMING where type 'a obj = 'a T -end; - -functor NameTabFun(Rnm : RENAMING) = -struct - open Nm.NTab; - - structure CR = CompoundRenamingFun( - struct - structure Nm = Nm; - type obj = 'a Nm.NTab.T - val get_nameset = Nm.NTab.get_nameset - val try_rename1 = Nm.NTab.try_rename1 - end) - - open CR; -end; -*) - diff --git a/core/lib/names/renaming.ML b/core/lib/names/renaming.ML deleted file mode 100644 index 291ee753..00000000 --- a/core/lib/names/renaming.ML +++ /dev/null @@ -1,43 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Title: renaming.ML - Author: Lucas Dixon - lucas.dixon@ed.ac.uk - 30 May 2008 *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* DESCRIPTION: - * - * See BASIC_RENAMING documentation. - * - * This adds a compound renaming that updates a renaming to account for the - * precomposition of another renaming. - *) - -(* Renamings which can themselves be renamed *) -signature RENAMING -= sig -include BASIC_RENAMING; - -structure Rnm : COMPOUND_RENAMING -sharing type Rnm.renaming = T -sharing type Rnm.obj = T - -(* first renaming is added after second renaming; extra renamings are added *) -(* val compose_renaming : T -> T -> T *) -end; - - -(* *) -functor RenamingFun(Nm : BASIC_NAME) -: RENAMING -= struct - open Nm.Rnm; - - structure Rnm = CompoundRenamingFun( - struct - structure Nm = Nm - type obj = T - val get_nameset = get_avoids - val try_rename1 = try_rename1_renaming - end) - -end; diff --git a/core/lib/names/test/ROOT.ML b/core/lib/names/test/ROOT.ML deleted file mode 100644 index 655b05d6..00000000 --- a/core/lib/names/test/ROOT.ML +++ /dev/null @@ -1,2 +0,0 @@ -PolyML.Project.make "nameset.ML"; -PolyML.Project.make "name_brel.ML"; diff --git a/core/lib/names/test/name_brel.ML b/core/lib/names/test/name_brel.ML deleted file mode 100644 index 1694e00d..00000000 --- a/core/lib/names/test/name_brel.ML +++ /dev/null @@ -1,31 +0,0 @@ -local - -structure IntStrBinRel = NameBRelFun(structure Dom = IntName and Cod = StrName) -structure BinRel = IntStrBinRel; - -in val _ = Testing.test "BinRel tests" (fn () => let - -val rel0 = BinRel.empty; -val rel = rel0 |> (BinRel.add_many (IntName.NSet.of_list [1,2,3]) - (StrName.NSet.of_list ["a","b","c"])); -(*val _ = BinRel.print rel;*) - -val rel = rel |> (BinRel.add_to_dom 5 (StrName.NSet.of_list ["a","b"])); -(*val _ = BinRel.print rel;*) - -(* val rel = rel |> (BinRel.try_rename1_dom 5 4); -val _ = BinRel.print rel; *) - -val rel = rel |> (BinRel.del_many (IntName.NSet.of_list [1,2,4]) - (StrName.NSet.of_list ["b","c"])); -(*val _ = BinRel.print rel;*) - -val rel = rel |> (BinRel.del_many (IntName.NSet.of_list [1]) - (StrName.NSet.of_list ["a"])); -(*val _ = BinRel.print rel;*) - - -in () end (* let *) -) () -val _ = Testing.assert_no_failed_tests(); -end diff --git a/core/lib/names/test/nameset.ML b/core/lib/names/test/nameset.ML deleted file mode 100644 index 94660aec..00000000 --- a/core/lib/names/test/nameset.ML +++ /dev/null @@ -1,220 +0,0 @@ -local -structure N = StrNSet; -structure N2 = StrIntNSet; -structure N3 = IntNSet; -in -val _ = Testing.test "NSet tests" (fn () => let -(* - install_pp (make_pp ["N", "T"] (Pretty.pprint o N.pretty_struct)); -*) - -fun assert_correct m = N.assert_correct m; - -(* --- basic tests --- *) -val m = N.empty; -val m = m |> N.add "z" |> assert_correct; -val m = m |> N.add "aa" |> assert_correct; -val m = m |> N.add "ab" |> assert_correct; -val m = m |> N.add "za" |> assert_correct; -val m = m |> N.add "zb" |> assert_correct; -val m = m |> N.add "zc" |> assert_correct; -val m2 = m |> N.delete "zb" |> assert_correct; -val m2 = m |> N.delete "aa" |> assert_correct; -val m2 = m2 |> N.delete "ab" |> assert_correct; -val m2 = m2 |> N.delete "zb" |> assert_correct; -val m2 = m2 |> N.delete "zc" |> assert_correct; -val m2 = m2 |> N.delete "z" |> assert_correct; -val m2 = m2 |> N.delete "za" |> assert_correct; -val m2 = m2 |> N.delete "za" |> assert_correct; -val m = m |> N.add_range ("ac","f") |> assert_correct; -val m = m |> N.add_range ("a","c") |> assert_correct; -val m = m |> N.add_range ("bb","cc") |> assert_correct; -val m = m |> N.add_range ("dd","ee") |> assert_correct; -val m = m |> N.add_range ("ef","zb") |> assert_correct; - - -(* --- Test from parser code --- *) -(* stuff for initial table *) -fun chars_between startchar endchar = - (case Char.compare (startchar, endchar) of - LESS => startchar :: chars_between (Char.succ startchar) endchar - | _ => [endchar]); - -(* little helper function to print id's nicely *) -fun id_of_str "" = "unit" (* empty string mapped to char 0 *) - | id_of_str " " = "\" \"" - | id_of_str s = - let val s2 = String.toString s - in if s2 = s then s else ("\"" ^ s2 ^ "\"") end; - -val empty_str_id = id_of_str (Char.toString #"\^@"); (* empty string mapped to char 0 *) - -(* all initial characters *) -val base_chars = [#"\t", #"\n"] @ (chars_between (#" ") (#"~")); -val base_char_strs = map Char.toString base_chars; -val base_char_ids = map id_of_str base_char_strs; - -(* quoted characters *) -val quote_char = (#"\\"); -val quoted_chars = [(#"\\"), (#"'")]; -(* note subtract x y = x subtracted_from y = y - x *) -val unquoted_chars = subtract (op =) base_chars quoted_chars; - -val basetype_names = (base_char_ids ~~ base_chars); - -val big_table = - fold - (N.assert_correct oo N.add) - base_char_ids - N.empty; - - -(* --- Bugs found and cleared up... lets not make them again... *) - - -val tr2 = - N.empty - |> N.add_range ("conjEm","conjEn") - |> N.add_range ("aw","bk") - |> N.add "exEh" - |> N.add_range ("g","j") - |> N.add "exEf" - |> N.add_range ("conjEj","conjEk") - |> N.add_range ("conjEg","conjEh") - |> assert_correct; -(*val _ = N.print_full tr2;*) - -(* causes major rebalance... *) -val tr3 = tr2 |> N.add "conjEl"; - - - -(* --- intersection test 1 --- *) -val m = N.empty; -val m = m |> N.add "a" |> assert_correct; -val m = m |> N.add "b" |> assert_correct; - -val m2 = N.empty; -val m2 = m2 |> N.add "a" |> assert_correct; -val m2 = m2 |> N.add "c" |> assert_correct; - -val _ = N.intersect m m2; -val _ = N.intersect m2 m; - - -(* --- intersection test 2 --- *) -val m = N2.empty; -val m = m |> N2.add ("C",15) |> N2.assert_correct; -val m = m |> N2.add ("C",13) |> N2.assert_correct; - -val m2 = N2.empty; -val m2 = m2 |> N2.add ("C",14) |> N2.assert_correct; -val m2 = m2 |> N2.add ("C",15) |> N2.assert_correct; - -val _ = N2.intersect m m2; -val _ = N2.intersect m2 m; - - -(* --- union_merge test 1 --- *) -val s = N3.empty - |> N3.add_range (96,101) - |> N3.add_range (116,118) - |> N3.add 112 - |> N3.add 110; - -val s2 = s |> N3.add_range (111,111) |> N3.assert_correct; - -val s2 = N3.empty |> N3.add_range (111,111); - -val (v as (b,t)) = (111,111); -open N3; -val _ = case s - of (tr as Br (d,v2 as (b2,t2),l,r)) => () - | _ => raise ERROR "s wrong" - -val _ = N3.union_merge s2 s |> N3.assert_correct;; - -(* --- renaming --- *) - -val s = N3.of_list [1,2,3]; -(*val _ = N3.print s;*) -val s = s |> N3.add 5; -(*val _ = N3.print s;*) -val s = the (s |> N3.try_rename1 5 4) -val _ = case s |> N3.try_rename1 4 2 - of SOME _ => raise ERROR "N3.try_rename1 4 2: failed :( " - | _ => (); -(*val _ = N3.print s;*) - - -in () end (* let *) -) () -val _ = Testing.assert_no_failed_tests(); -end (* local *) - - - - - -(* debugging .... - - -val tr = - N.empty - |> N.add_range ("conjEm","conjEn") - |> N.add_range ("aw","bk") - |> N.add "exEh" - |> N.add_range ("g","j") - |> N.add "exEf" - |> N.add_range ("conjEj","conjEk") - |> N.add_range ("conjEg","conjEh") - |> assert_correct; -val _ = N.print_full tr; - -open N; - -val s = "conjEl"; -val (s as (sp,sx,ss)) = (pred_or_same s, s, N.suc s); - -val tr3 = tr |> N.add' s; - -val tr2 = tr |> N.add "conjEl"; - - - -val s= n; -val s = (N.pred_or_same s, s, N.N.suc s); -val (s as (sp,sx,ss)) = s; -val (tr as N.Br (d,v as (b,t),l,r)) = tr; -N.print_full tr; - -val (tr as N.Br (d,v as (b,t),l,r)) = r; - - -N.print_full (N.add' s tr); - - -N.N.ord (sx,b); -N.N.ord(ss,b); -N.N.ord (sp,t); - -N.print_full l; -N.print_full (N.add' s l); -val newl = N.add' s l; -N.print_full newl; - -val (tr as N.Br(_,v, l as N.Br(ld,lv,ll,lr), r as N.Br(rd,rv,rl,rr))) = -rebalance(N.Br (N.br_depth newl r,(b,t),newl,r)); - -N.print_full (N.Br(rd + 1,v,lr,r)); - -N.add' s l; - -N.print_full (N.rebalance (N.add' s r)); -N.print_full ((N.add' s r)); - - -N.N.ord (sx," "); -N.N.ord ("$","\" \""); - -*) diff --git a/core/lib/raw_source.ML b/core/lib/raw_source.ML deleted file mode 100644 index 0310a302..00000000 --- a/core/lib/raw_source.ML +++ /dev/null @@ -1,192 +0,0 @@ -(* Title: Pure/General/source.ML - Author: Markus Wenzel, TU Muenchen - -Coalgebraic data sources -- efficient purely functional input streams. -*) - -signature RAW_SOURCE = -sig - type ('a, 'b) source - val default_prompt: string - val set_prompt: string -> ('a, 'b) source -> ('a, 'b) source - val get: ('a, 'b) source -> 'a list * ('a, 'b) source - val unget: 'a list * ('a, 'b) source -> ('a, 'b) source - val get_single: ('a, 'b) source -> ('a * ('a, 'b) source) option - val exhaust: ('a, 'b) source -> 'a list - val map_filter: ('a -> 'b option) -> ('a, 'c) source -> ('b, ('a, 'c) source) source - val filter: ('a -> bool) -> ('a, 'b) source -> ('a, ('a, 'b) source) source - val of_list: 'a list -> ('a, 'a list) source - val exhausted: ('a, 'b) source -> ('a, 'a list) source - val of_string: string -> (string, string list) source - val of_string_limited: int -> string -> (string, substring) source - val tty: TextIO.instream -> (string, unit) source - val source': 'a -> 'b Scan.stopper -> ('a * 'b list -> 'c list * ('a * 'b list)) -> - (bool * (string -> 'a * 'b list -> 'c list * ('a * 'b list))) option -> - ('b, 'e) source -> ('c, 'a * ('b, 'e) source) source - val source: 'a Scan.stopper -> ('a list -> 'b list * 'a list) -> - (bool * (string -> 'a list -> 'b list * 'a list)) option -> - ('a, 'd) source -> ('b, ('a, 'd) source) source - (* <+++ *) - val raw_stream: (string -> bool) -> TextIO.instream -> (string, unit) source - (* +++> *) -end; - -structure RawSource: RAW_SOURCE = -struct - - -(** datatype source **) - -datatype ('a, 'b) source = - Source of - {buffer: 'a list, - info: 'b, - prompt: string, - drain: string -> 'b -> 'a list * 'b}; - -fun make_source buffer info prompt drain = - Source {buffer = buffer, info = info, prompt = prompt, drain = drain}; - - -(* prompt *) - -val default_prompt = "> "; - -fun set_prompt prompt (Source {buffer, info, prompt = _, drain}) = - make_source buffer info prompt drain; - - -(* get / unget *) - -fun get (Source {buffer = [], info, prompt, drain}) = - let val (xs, info') = drain prompt info - in (xs, make_source [] info' prompt drain) end - | get (Source {buffer, info, prompt, drain}) = - (buffer, make_source [] info prompt drain); - -fun unget (xs, Source {buffer, info, prompt, drain}) = - make_source (xs @ buffer) info prompt drain; - - -(* variations on get *) - -fun get_prompt prompt src = get (set_prompt prompt src); - -fun get_single src = - (case get src of - ([], _) => NONE - | (x :: xs, src') => SOME (x, unget (xs, src'))); - -fun exhaust src = - (case get src of - ([], _) => [] - | (xs, src') => xs @ exhaust src'); - - -(* (map)filter *) - -fun drain_map_filter f prompt src = - let - val (xs, src') = get_prompt prompt src; - val xs' = map_filter f xs; - in - if null xs orelse not (null xs') then (xs', src') - else drain_map_filter f prompt src' - end; - -fun map_filter f src = make_source [] src default_prompt (drain_map_filter f); -fun filter pred = map_filter (fn x => if pred x then SOME x else NONE); - - - -(** build sources **) - -(* list source *) - -fun of_list xs = make_source [] xs default_prompt (fn _ => fn xs => (xs, [])); - -fun exhausted src = of_list (exhaust src); - - -(* string source *) - -val of_string = of_list o raw_explode; - -fun of_string_limited limit str = - make_source [] (Substring.full str) default_prompt - (fn _ => fn s => - let - val (s1, s2) = Substring.splitAt (s, Int.min (Substring.size s, limit)); - val cs = map String.str (Substring.explode s1); - in (cs, s2) end); - - -(* stream source *) - -fun slurp_input instream = - let - fun slurp () = - (case TextIO.canInput (instream, 1) handle IO.Io _ => NONE of - NONE => [] - | SOME 0 => [] - | SOME _ => TextIO.input instream :: slurp ()); - in maps raw_explode (slurp ()) end; - -fun tty in_stream = make_source [] () default_prompt (fn prompt => fn () => - let val input = slurp_input in_stream in - if exists (fn c => c = "\n") input then (input, ()) - else - (case (Output.prompt prompt; TextIO.inputLine in_stream) of - SOME line => (input @ raw_explode line, ()) - | NONE => (input, ())) - end); - - - -(** cascade sources **) - -(* state-based *) - -fun drain_source' stopper scan opt_recover prompt (state, src) = - let - val drain = Scan.drain prompt get_prompt stopper; - val (xs, s) = get_prompt prompt src; - val inp = ((state, xs), s); - val ((ys, (state', xs')), src') = - if null xs then (([], (state, [])), s) - else - (case opt_recover of - NONE => drain (Scan.error scan) inp - | SOME (interactive, recover) => - (drain (Scan.catch scan) inp handle Fail msg => - (if interactive then (*Output.error_message msg*) () else (); - drain (Scan.unless (Scan.lift (Scan.one (Scan.is_stopper stopper))) (recover msg)) - inp))); - in (ys, (state', unget (xs', src'))) end; - -fun source' init_state stopper scan recover src = - make_source [] (init_state, src) default_prompt (drain_source' stopper scan recover); - - -(* non state-based *) - -fun drain_source stopper scan opt_recover prompt = - Scan.unlift (drain_source' stopper (Scan.lift scan) - (Option.map (fn (int, r) => (int, Scan.lift o r)) opt_recover) prompt); - -fun source stopper scan recover src = - make_source [] src default_prompt (drain_source stopper scan recover); - -(* <+++ *) -fun raw_stream flush_on in_stream = make_source [] () "" - (fn _ => fn () => - let val input = slurp_input in_stream in - if exists flush_on input then (input, ()) - else - (case TextIO.input1 in_stream of - SOME ch => (input @ [String.implode [ch]], ()) - | NONE => (input, ())) - end); -(* +++> *) - -end; diff --git a/core/lib/testing.ML b/core/lib/testing.ML deleted file mode 100644 index 2d95fe6a..00000000 --- a/core/lib/testing.ML +++ /dev/null @@ -1,137 +0,0 @@ -(* TODO: re-introduce file name info in test output *) - -(* Generic testing *) -signature TESTING = -sig - structure Log : LOG; - - type failure = { file: string option, - test: string, - id: int, - err_exn : exn }; - - - (* errors so far *) - val tests_failed_ref : failure list Unsynchronized.ref; - - (* The test function *) - val test : string (* Name of test *) - -> ('a -> 'b) (* funtion to test *) - -> 'a (* test on this value *) - -> 'b option (* NONE indicates for failure *) - - (* Should we run tests? *) - val run_tests_ref : bool Unsynchronized.ref; - (* if (!run_tests_ref) then loads the file. *) - val make_test : string -> unit; - - exception assertion_failed_exp of string; - - (* raises assertion_failed_exp with the string arg if the bool arg is false *) - val assert : string -> bool -> unit; - (* raises assertion_failed_exp with the string arg if the function returns false *) - val fassert : string -> ('a -> bool) -> 'a -> unit; - - - - (* *) - exception failed_tests_exp of failure list; - - (* raises if tests_failed_ref is not empty *) - val assert_no_failed_tests : unit -> unit; -end; - -structure Testing -: TESTING -= struct - -type failure = { file: string option, - test: string, - id: int, - err_exn : exn }; - - -structure Log = Log(val init_level = 1); - -val tests_failed_ref = Unsynchronized.ref ([] : failure list); -val test_num_ref = Unsynchronized.ref 0; -fun test testname f x = - let val _ = test_num_ref := (!test_num_ref) + 1; - val test_num_str = (Int.toString (!test_num_ref)); - val file_text = case NONE (*PolyML.Project.get_current_filename()*) - of NONE => "" - | SOME s => " from file '"^s^"'" - fun run_test () = let val result = f x in - Log.log 1 ("[PASS] "^test_num_str^": "^testname); result - end - in - (* do something with the current compiler-file line number? - (for quick knowledge of which test raised an exception...) *) - Log.log 2 ("Running test "^test_num_str^": " ^ testname ^ file_text); - ((SOME (PolyML.exception_trace run_test)) handle e => - (Log.log 0 ("[FAIL] "^test_num_str^": "^testname ^": "^ (PolyML.makestring e)); - tests_failed_ref := ( - { file = NONE (*PolyML.Project.get_current_filename()*), - test = testname, - id = (!test_num_ref), - err_exn = e } :: (!tests_failed_ref)); - NONE)) - end; - -exception failed_tests_exp of failure list; -fun assert_no_failed_tests () = - if null (!tests_failed_ref) - then Log.log 1 ((Int.toString (!test_num_ref))^" tests passed so far") - else - (Log.log 0 "The following tests failed:"; - map (fn t => Log.log 0 (" * '"^(#test t)^ - (case #file t - of SOME f => ("' in "^f) - | NONE => "") - )) - (!tests_failed_ref); - raise failed_tests_exp (!tests_failed_ref)) - -(* *) -val run_tests_ref = Unsynchronized.ref true; - -fun use_with_path path_to_file = - let - val {dir, file} = OS.Path.splitDirFile path_to_file - val old_dir = OS.FileSys.getDir() - in (OS.FileSys.chDir dir; use file; OS.FileSys.chDir old_dir) - end - -fun make_test path_to_file = - if !run_tests_ref then use_with_path path_to_file else (); - -exception assertion_failed_exp of string; -fun assert msg true = Log.log 3 (" - " ^ msg ^ " [OK]") - | assert msg false = (Log.log 2 (" - " ^ msg ^ " [FAILED]"); - raise assertion_failed_exp ("Assertion '" ^ msg ^ "' failed")) -fun fassert msg f x = assert msg (f x) - -end; - -(* small example usage: *) -(* -local - (* define some basic values*) - val a = "a"; -in - (* *) - val _ = Testing.test "the name of the first test" (fn () => let - (* the test code goes in here, and is assumed not to raise an exception. *) - val _ = Testing.assert "uninteresting test" (a = "a") - in () end) (); - - (* more tests... *) - val _ = Testing.test "the name of another test" (fn () => let - val _ = Testing.assert "another uninteresting test" (not (a = "b")) - in () end) (); - - (* This raises an exception if some test fails and prints details for all - failed tests *) - val _ = Testing.assert_no_failed_tests(); -end; (* ends local namespace used for testing *) -*) diff --git a/core/lib/text_socket.ML b/core/lib/text_socket.ML deleted file mode 100644 index 2fdac607..00000000 --- a/core/lib/text_socket.ML +++ /dev/null @@ -1,231 +0,0 @@ -signature TEXT_SOCKET = -sig - type T - val local_connect : int -> T (* connects to port *) - val write : T -> string -> unit (* write to stream *) - val read : T -> string (* read from stream *) - val close : T -> unit (* close stream *) - (* donesn't work currently*) - val safe_local_connect : int -> T (* connects to port *) - val safe_close : T -> unit (* close stream *) - - val get_instream : T -> TextIO.instream - val get_outstream : T -> TextIO.outstream - val get_io_stream : T -> TextIO.instream * TextIO.outstream - val get_socket : T -> Socket.active INetSock.stream_sock -end - -structure TextSocket : TEXT_SOCKET = -struct - - (* debugging, can be turned off or on *) - (*fun server_print str = writeln str; - fun server_print _ = ();*) - - datatype T = SockData of { - sock : Socket.active INetSock.stream_sock, - passive : Socket.passive INetSock.stream_sock, - sin : TextIO.instream, - sout : TextIO.outstream - } - - fun get_instream (SockData rep) = #sin rep; - fun get_outstream (SockData rep) = #sout rep; - fun get_io_stream (SockData rep) = (#sin rep, #sout rep); - fun get_socket (SockData rep) = #sock rep; - - (* export elements of array to a list *) - fun arrayToList0 l idx char_arr = - if idx < CharArray.length char_arr - then arrayToList0 ((CharArray.sub(char_arr, idx)) :: l) (idx + 1) char_arr - else (rev l); - val arrayToList = arrayToList0 [] 0; - - (* in WR: send vector *) - fun sendVec0 sock char_slice = - let - (* char slice to byte slice *) - val (str, i, sz) = CharVectorSlice.base char_slice; - val byte_slice = Word8VectorSlice.slice (Byte.stringToBytes str, i ,SOME sz) - in - Socket.sendVec(sock, byte_slice) - end - - (* in WR: send array *) - fun sendArr0 sock char_slice = - let - (* char slice to byte slice *) - val (char_array, i, sz) = CharArraySlice.base char_slice; - val word_array = - arrayToList char_array - |> map Char.ord - |> map Word8.fromInt - |> Word8Array.fromList; - val byte_slice = Word8ArraySlice.slice ( word_array, i ,SOME sz) - in - Socket.sendArr(sock, byte_slice) - end - - (* with given sock, init TEXT_IO stream for input and output *) - fun make_streams sock = - let - val bufsize = (*Unsynchronized.ref*) 4096; - val buffering = IO.BLOCK_BUF; - val socket_name = "psgraph_gui_socket"; - val reader = TextPrimIO.RD { - name = "psgraph_gui_socket", - chunkSize = bufsize, - readVec = SOME (fn vec => Byte.bytesToString(Socket.recvVec(sock, vec))), - readArr = NONE, (* would this be a problem ??*) - readVecNB = NONE, - readArrNB = NONE, - block = NONE, - canInput = NONE, - avail = fn() => NONE, - getPos = NONE, - setPos = NONE, - endPos = NONE, - verifyPos = NONE, - close = (fn () => Socket.close sock), - ioDesc = NONE }; - - val writer = TextPrimIO.WR { - name = socket_name, - chunkSize = bufsize, - writeVec = SOME(sendVec0 sock) (* don't think it's used anywhere*), - writeArr = SOME (sendArr0 sock), - writeVecNB = NONE, - writeArrNB = NONE, - block = NONE, - canOutput = NONE, - getPos = NONE, - setPos = NONE, - endPos = NONE, - verifyPos = NONE, - close = fn () => Socket.close sock, - ioDesc = NONE }; - val input = TextIO.mkInstream(TextIO.StreamIO.mkInstream(reader, "")); - val output = TextIO.mkOutstream(TextIO.StreamIO.mkOutstream(writer, buffering)); - in - (input, output) - end - - (*fun init_server_socket opt_host port = - let - val sock = INetSock.TCP.socket (); - val addr = - (case opt_host of - NONE => INetSock.any port - | SOME host => - NetHostDB.getByName host - |> the - |> NetHostDB.addr - |> rpair port - |> INetSock.toAddr - handle Option => raise Fail ("Cannot resolve hostname: " ^ host)); - val _ = Socket.bind (sock, addr); - val _ = Socket.listen (sock, 5); - in sock end;*) - -(* setup a passive socket server *) -fun init_server_socket port = - let - val sock = INetSock.TCP.socket (); - val _ = Socket.Ctl.setREUSEADDR (sock, true); - val addr = INetSock.any port; - val _ = Socket.bind (sock, addr); - val _ = Socket.listen (sock, 5); - in sock end; - - fun local_connect port = - let - val passive_sock = init_server_socket port; - val (sock, _)= Socket.accept passive_sock; - val (sin, sout) = make_streams sock; - in - SockData { sock = sock, passive = passive_sock, sin = sin, sout = sout } - end; - - fun write (SockData data) msg = TextIO.output (#sout data,msg); - fun read (SockData data) = TextIO.input (#sin data); - - fun close (SockData data) = - (TextIO.closeOut (#sout data) handle _ => warning ("can't close output stream"); - TextIO.closeIn (#sin data) handle _ => warning ("can't close input stream"); - Socket.close (#sock data) handle _ => warning ("can't close active socket"); - Socket.close (#passive data) handle _ => warning ("can't close passive socket")); - - val last_sock : - { sock : Socket.active INetSock.stream_sock option, - passive : Socket.passive INetSock.stream_sock option, - sin : TextIO.instream option, - sout : TextIO.outstream option - } Synchronized.var - = Synchronized.var "last sock" {sock = NONE, passive = NONE, sin= NONE, sout = NONE}; - - val socket_tracing = K; - - fun clear_last_sock _ = - let - val _ = socket_tracing "clear last sock" - val {sock, passive, sin, sout} = Synchronized.value last_sock - in - ((case sout of (SOME s) => TextIO.closeOut s | NONE => ()); - (case sin of (SOME s) => TextIO.closeIn s | NONE => ()); - (case sock of (SOME s) => Socket.close s | NONE => ()); - (case passive of (SOME s) => Socket.close s | NONE => ()); - Synchronized.change last_sock - (fn _ => {sock = NONE, passive = NONE, sin = NONE, sout = NONE})) - end - handle _ => - (Synchronized.change last_sock - (fn _ => {sock = NONE, passive = NONE, sin = NONE, sout = NONE})) - - exception exp_text_socket of string - fun safe_local_connect port = - let - val _ = socket_tracing "start safe connect" - val passive_sock = init_server_socket port; - val _ = - Synchronized.change - last_sock - (fn {sock, passive=_, sin, sout} => {sock = sock, passive = SOME passive_sock, sin = sin, sout = sout}); - (* a time out is working here *) - val sock0 = - let - val timer = Timer.startRealTimer () - fun accept_until_timeout t = - if (Timer.checkRealTimer timer|> Time.toSeconds) > t (* time out *) - then raise exp_text_socket "time out for socket: no incoming connection" - else - (case Socket.acceptNB passive_sock - of SOME ret => fst ret - | _ => accept_until_timeout t) - in - accept_until_timeout 5 (* timeout for 5 s *) - end - - val _ = - Synchronized.change - last_sock - (fn {sock=_, passive, sin, sout} => {sock = SOME sock0, passive = passive, sin = sin, sout = sout}) - val (sin0, sout0) = make_streams sock0; - val _ = - Synchronized.change - last_sock - (fn {sock, passive, sin=_, sout=_} => {sock = sock, passive = passive, sin = SOME sin0, sout = SOME sout0}) - in - SockData { sock = sock0, passive = passive_sock, sin = sin0, sout = sout0} - end - handle exp_text_socket msg => (clear_last_sock () ; raise exp_text_socket msg) (* timeout , stop trying *) - | _ => (clear_last_sock () ; safe_local_connect port) (* address is in used, try again *) - - fun safe_close data = - let val _ = socket_tracing"cleaning by it self" in - (close data; - Synchronized.change - last_sock - (fn _ => {sock = NONE, passive = NONE, sin = NONE, sout = NONE})) end - - -end (* struct *) \ No newline at end of file diff --git a/core/matching/bang_graph_homeomorphism_search.ML b/core/matching/bang_graph_homeomorphism_search.ML deleted file mode 100644 index 86b8bd57..00000000 --- a/core/matching/bang_graph_homeomorphism_search.ML +++ /dev/null @@ -1,448 +0,0 @@ -(** - * Finds wire-homeomorphisms between !-graphs - * - * This is essentially a particularly strict match search where - * the matches must be valid in both directions and no !-box - * operations are permitted. It is essentially an isomorphism - * up to the number of wire-vertices on a wire. - * - * This essentially checks that the normalised forms of the two - * graphs are the same up to vertex, edge and !-box names. Be - * warned that it makes use of Graph.vdata_eq and Graph.edata_eq, - * so alpha-conversion in element data is not supported: the names - * must be identical. - *) - -signature BANG_GRAPH_HOMEOMORPHISM_SHARING -= sig - type T - structure Graph : OGRAPH_SHARING -end - -(* A homeomorphism from a !-graph to a !-graph *) -signature BANG_GRAPH_HOMEOMORPHISM -= sig - type T - structure Graph : BANG_GRAPH - - (* sharing via sharing structure *) - structure Sharing : BANG_GRAPH_HOMEOMORPHISM_SHARING - sharing type Sharing.T = T - sharing Sharing.Graph = Graph.Sharing - - (* the graph to match from *) - val get_pat : T -> Graph.T - (* the graph to match onto *) - val get_tgt : T -> Graph.T - (* the vertex map (from pat verts to tgt verts *) - val get_vmap : T -> VVInj.T - (* the edge map (from pat edges to tgt edges *) - val get_emap : T -> EEInj.T - (* the bbox map (from pat !-boxes to tgt !-boxes *) - val get_bmap : T -> BBInj.T - - exception bad_homeomorphism_exp of string - * Graph.T - * Graph.T - * VVInj.T - * EEInj.T - * BBInj.T - - (* create a match; checks for correctness *) - (* raises bad_match_exp if not a valid match *) - val mk : Graph.T - -> Graph.T - -> VVInj.T - -> EEInj.T - -> BBInj.T - -> T - - (* create a match; checks for correctness *) - (* returns NONE if not a valid match *) - val mk_opt : Graph.T - -> Graph.T - -> VVInj.T - -> EEInj.T - -> BBInj.T - -> T option - - val print : T -> unit - val pretty : T -> Pretty.T -end - -signature BANG_GRAPH_HOMEOMORPHISM_SEARCH_SHARING -= sig - structure Graph : OGRAPH_SHARING - structure Homeomorphism : BANG_GRAPH_HOMEOMORPHISM_SHARING - sharing Graph = Homeomorphism.Graph -end - - -signature BANG_GRAPH_HOMEOMORPHISM_SEARCH -= sig - structure Graph : BANG_GRAPH - structure Homeomorphism : BANG_GRAPH_HOMEOMORPHISM - structure Sharing : BANG_GRAPH_HOMEOMORPHISM_SEARCH_SHARING - sharing Sharing.Graph = Graph.Sharing - sharing Sharing.Homeomorphism = Homeomorphism.Sharing - - val find_homeomorphisms : Graph.T -> Graph.T -> Homeomorphism.T Seq.seq - val is_homeomorphic : Graph.T -> Graph.T -> bool -end - - - -functor BangGraphHomeomorphism(Graph : BANG_GRAPH) : BANG_GRAPH_HOMEOMORPHISM -= struct - structure Graph = Graph - - datatype T = Match of { - (* pattern and target graphs *) - pat : Graph.T, - tgt : Graph.T, - (* vertex mapping from pat to tgt *) - vmap : VVInj.T, - (* edge mapping from pat to tgt *) - emap : EEInj.T, - (* !-box mapping from pat to tgt *) - bmap : BBInj.T - } - - fun get_pat (Match r) = #pat r - fun get_tgt (Match r) = #tgt r - fun get_vmap (Match r) = #vmap r - fun get_emap (Match r) = #emap r - fun get_bmap (Match r) = #bmap r - - exception bad_homeomorphism_exp of string - * Graph.T - * Graph.T - * VVInj.T - * EEInj.T - * BBInj.T - - fun find_match_error pat tgt vmap emap bmap = let - val vmapf = VVInj.get vmap - val emapf = EEInj.get emap - val bmapf = BBInj.get bmap - - fun edge_coherent e = let - val ((dir,_),(s,t)) = Graph.get_edge_info pat e - val e' = emapf e - val exp_s' = vmapf s - val exp_t' = vmapf t - val ((dir',_),(real_s',real_t')) = Graph.get_edge_info tgt e' - in - case (dir,dir') - of (Directed,Directed) => - (V.name_eq (exp_s',real_s')) andalso - (V.name_eq (exp_t',real_t')) - | (Undirected,Undirected) => - (V.NSet.eq (V.NSet.of_list [exp_s',exp_t']) - (V.NSet.of_list [real_s',real_t'])) - | (Directed,Undirected) => false - | (Undirected,Directed) => false - end - - fun bbox_coherent b = let - val b' = bmapf b - val children = Graph.get_bbox_children pat b - val children' = Graph.get_bbox_children tgt b' - val contents = Graph.get_vertices_in_bbox pat b - val contents' = Graph.get_vertices_in_bbox tgt b' - in - B.NSet.eq children' (BBInj.img_of_set bmap children) - andalso - V.NSet.eq contents' (VVInj.img_of_set vmap contents) - end - - (* FIXME: alpha conversion? *) - fun vdata_eq v = let - val v' = vmapf v - val pat_vdata = Graph.get_vertex_data pat v - val tgt_vdata = Graph.get_vertex_data tgt v' - in - Graph.vdata_eq (pat_vdata, tgt_vdata) - end - - fun edata_eq e = let - val e' = emapf e - val pat_edata = Graph.get_edge_data pat e - val tgt_edata = Graph.get_edge_data tgt e' - in - Graph.edata_eq (pat_edata, tgt_edata) - end - in - if not (V.NSet.eq (VVInj.get_dom_set vmap) (Graph.get_vertices pat)) - then SOME "vmap is not total" - else if not (E.NSet.eq (EEInj.get_dom_set emap) (Graph.get_edges pat)) - then SOME "emap is not total" - else if not (B.NSet.eq (BBInj.get_dom_set bmap) (Graph.get_bboxes pat)) - then SOME "bmap is not total" - else if not (V.NSet.eq (VVInj.get_cod_set vmap) (Graph.get_vertices tgt)) - then SOME "vmap is not onto" - else if not (E.NSet.eq (EEInj.get_cod_set emap) (Graph.get_edges tgt)) - then SOME "emap is not onto" - else if not (B.NSet.eq (BBInj.get_cod_set bmap) (Graph.get_bboxes tgt)) - then SOME "bmap is not onto" - else if not (E.NSet.forall edge_coherent (EEInj.get_dom_set emap)) - then SOME "vmap and emap are not coherent" - else if not (V.NSet.forall vdata_eq (VVInj.get_dom_set vmap)) - then SOME "vmap does not respect vertex data" - else if not (E.NSet.forall edata_eq (EEInj.get_dom_set emap)) - then SOME "emap does not respect vertex data" - else if not (B.NSet.forall bbox_coherent (BBInj.get_dom_set bmap)) - then SOME "bmap does not respect contents and children" - else NONE - end - - fun mk_opt pat tgt vmap emap bmap = - case find_match_error pat tgt vmap emap bmap - of SOME _ => NONE - | NONE => SOME (Match { - pat = pat, - tgt = tgt, - vmap = vmap, - emap = emap, - bmap = bmap - }) - - fun mk pat tgt vmap emap bmap = - case find_match_error pat tgt vmap emap bmap - of SOME e => raise bad_homeomorphism_exp (e,pat,tgt,vmap,emap,bmap) - | NONE => Match { - pat = pat, - tgt = tgt, - vmap = vmap, - emap = emap, - bmap = bmap - } - - fun pretty ms = - Pretty.chunks [ - Pretty.str "Match {", - Pretty.block [ - Pretty.str " ", (* indent *) - Pretty.chunks [ - Pretty.block [ - Pretty.str "Pattern: ", - Graph.pretty (get_pat ms) - ], - Pretty.block [ - Pretty.str "Target: ", - Graph.pretty (get_tgt ms) - ], - Pretty.block [ - Pretty.str "VMap: ", - VVInj.pretty (get_vmap ms) - ], - Pretty.block [ - Pretty.str "EMap: ", - EEInj.pretty (get_emap ms) - ], - Pretty.block [ - Pretty.str "BMap: ", - BBInj.pretty (get_bmap ms) - ] - ] - ], - Pretty.str "}" - ] - val print = Pretty.writeln o pretty - - structure Sharing : BANG_GRAPH_HOMEOMORPHISM_SHARING - = struct - structure Graph = Graph.Sharing - type T = T - end -end - - -functor BangGraphHomeomorphismSearcher(Graph : BANG_GRAPH) - : BANG_GRAPH_HOMEOMORPHISM_SEARCH -= struct - structure Graph = Graph - structure Homeomorphism = BangGraphHomeomorphism(Graph) - - structure MatchState = OGraphMatchState(Graph) - - structure IMSHooks : PROGRESSIVE_MATCH_SEARCH_HOOKS = - struct - type Context = unit - structure MatchState = MatchState - - fun p_vertex_may_be_completed _ ms v = let - val pat = MatchState.get_pat ms - val tgt = MatchState.get_tgt ms - val v' = VVInj.get (MatchState.get_vmap ms) v - val vdata = Graph.get_vertex_data pat v - val vdata' = Graph.get_vertex_data tgt v' - in - B.NSet.cardinality (Graph.get_bboxes_containing_vertex pat v) - = - B.NSet.cardinality (Graph.get_bboxes_containing_vertex tgt v') - end - fun vertex_match_is_allowed _ ms v1 v2 = let - val pat = MatchState.get_pat ms - val tgt = MatchState.get_tgt ms - val vdata1 = Graph.get_vertex_data pat v1 - val vdata2 = Graph.get_vertex_data tgt v2 - in - (* FIXME: alpha conversion? *) - Graph.vdata_eq (vdata1,vdata2) - end - fun bare_wire_match_is_allowed _ _ _ _ = true - end - - structure InnerMatchSearch = ProgressiveMatchSearch(IMSHooks) - - fun homeo_from_ms (bmap,ms) = - Homeomorphism.mk_opt (MatchState.get_pat ms) - (MatchState.get_tgt ms) - (MatchState.get_vmap ms) - (MatchState.get_emap ms) - bmap - - (* match the given set of bare wires against other bare wires *) - fun match_bare_wires ms = - if E.NSet.is_empty (MatchState.get_u_bare_wires ms) then Seq.single ms - else let - val (pat, tgt) = (MatchState.get_pat ms, MatchState.get_tgt ms) - val (vmap, emap) = (MatchState.get_vmap ms, MatchState.get_emap ms) - val tgt_verts = MatchState.get_tgt_verts ms - (* for a normalised graph, this will always be the source end *) - val bw = (the o E.NSet.get_min) (MatchState.get_u_bare_wires ms) - val ((bw_dir, bw_data), (bw_s, bw_t)) = Graph.get_edge_info pat bw - val (bw_sdata, bw_tdata) = (Graph.get_vertex_data pat bw_s, Graph.get_vertex_data pat bw_t) - - fun match_bw e = let - val ((dir,data),(s,t)) = Graph.get_edge_info tgt e - in - if not (EEInj.cod_contains emap e) - andalso dir = bw_dir - andalso Graph.edata_eq (data,bw_data) - andalso V.NSet.contains tgt_verts s - andalso V.NSet.contains tgt_verts t - andalso Graph.is_boundary tgt s - andalso Graph.is_boundary tgt t - then - let - val tgt_sdata = Graph.get_vertex_data tgt s - val tgt_tdata = Graph.get_vertex_data tgt t - - (* match data for edge, source, and target *) - val fmatch_d = Option.composePartial - (Graph.match_edata (bw_data, data), - Option.composePartial - (Graph.match_vdata (bw_sdata, tgt_sdata), - Graph.match_vdata (bw_tdata, tgt_tdata))) - fun add_bw new_subst = let - val ms = ms |> MatchState.set_match_psubst new_subst - |> MatchState.update_tgt_verts (V.NSet.delete s) - |> MatchState.update_tgt_verts (V.NSet.delete t) - |> MatchState.update_u_bare_wires (E.NSet.delete bw) - in (fn (es,et) => - ms |> MatchState.update_vmap (VVInj.add (bw_s, s)) - |> MatchState.update_vmap (VVInj.add (bw_t, t)) - |> MatchState.update_emap (EEInj.add (bw, e)) - ) - end - in - case fmatch_d (MatchState.get_match_psubst ms) - of SOME new_subst => - let val add_bw' = add_bw new_subst in - case bw_dir - of Directed => match_bare_wires (add_bw' (s,t)) - | Undirected => - Seq.append (match_bare_wires (add_bw' (s,t))) - (match_bare_wires (add_bw' (t,s))) - end - | NONE => Seq.empty - end - else Seq.empty - end - in - Seq.maps match_bw (Seq.of_list (E.NSet.list_of (Graph.get_edges tgt))) - end - - fun match_bboxes_recursive (bmap,ms) pat_bboxes tgt_bboxes = - case B.NSet.get_local_bot pat_bboxes - of NONE => if B.NSet.is_empty tgt_bboxes - then Seq.single (bmap,ms) - else Seq.empty - | SOME b => - let - val pat = MatchState.get_pat ms - val tgt = MatchState.get_tgt ms - val vmap = MatchState.get_vmap ms - val b_children = Graph.get_bbox_children pat b - val no_b_children = B.NSet.cardinality b_children - val b_contents_img = VVInj.img_of_set vmap - (Graph.get_vertices_in_bbox pat b) - val get_tgt_children = Graph.get_bbox_children tgt - val get_tgt_contents = Graph.get_vertices_in_bbox tgt - val pat_bboxes' = B.NSet.delete b pat_bboxes - fun try_match_bbox b' = let - val b'_children = get_tgt_children b' - val no_b'_children = B.NSet.cardinality b'_children - val tgt_bboxes' = B.NSet.delete b' tgt_bboxes - val bmap' = BBInj.add (b,b') bmap - in - if no_b'_children <> no_b_children - then Seq.empty - else if not (V.NSet.eq b_contents_img (get_tgt_contents b')) - then Seq.empty - else - (* recurse down into children of b,b', then move onto next - * sibling pattern !-box *) - Seq.maps (fn (bmap',ms') => - match_bboxes_recursive (bmap',ms') pat_bboxes' tgt_bboxes') - (match_bboxes_recursive (BBInj.add (b,b') bmap,ms) - b_children - b'_children) - end - in - Seq.maps try_match_bbox (Seq.of_list (B.NSet.list_of tgt_bboxes)) - end - - fun match_bboxes ms = let - val pat = MatchState.get_pat ms - val tgt = MatchState.get_tgt ms - fun get_toplevel_bboxes gr = - B.NSet.filter (not o (Graph.bbox_has_parents gr)) - (Graph.get_bboxes gr) - in - match_bboxes_recursive (BBInj.empty,ms) - (get_toplevel_bboxes pat) - (get_toplevel_bboxes tgt) - end - - fun find_homeomorphisms g1 g2 = let - val pat = Graph.normalise g1 - val tgt = Graph.normalise g2 - in - if V.NSet.cardinality (Graph.get_vertices pat) = - V.NSet.cardinality (Graph.get_vertices tgt) andalso - E.NSet.cardinality (Graph.get_edges pat) = - E.NSet.cardinality (Graph.get_edges tgt) andalso - B.NSet.cardinality (Graph.get_bboxes pat) = - B.NSet.cardinality (Graph.get_bboxes tgt) - then - MatchState.init_and_schedule_all pat tgt - |> InnerMatchSearch.match_pending () - |> Seq.maps match_bare_wires - |> Seq.filter MatchState.is_total - |> Seq.maps match_bboxes - |> Seq.map_filter homeo_from_ms - else - Seq.empty - end - - val is_homeomorphic = is_some o Seq.pull oo find_homeomorphisms - - structure Sharing = - struct - structure Graph = Graph.Sharing - structure Homeomorphism = Homeomorphism.Sharing - end -end diff --git a/core/matching/bg_match.ML b/core/matching/bg_match.ML deleted file mode 100644 index deea85d9..00000000 --- a/core/matching/bg_match.ML +++ /dev/null @@ -1,261 +0,0 @@ -(* The type-sharing struct for BANG_GRAPH_MATCH *) -signature BANG_GRAPH_MATCH_SHARING -= sig - type T - structure Graph : OGRAPH_SHARING -end - -(* A match from a !-graph to a !-graph *) -signature BANG_GRAPH_MATCH -= sig - type T; - structure Graph : BANG_GRAPH; - - (* sharing via sharing structure *) - structure Sharing : BANG_GRAPH_MATCH_SHARING - sharing type Sharing.T = T - sharing Sharing.Graph = Graph.Sharing; - - (* the un-expanded !-graph pattern *) - val get_init_pat : T -> Graph.T; - (* the graph to match from *) - val get_pat : T -> Graph.T; - (* the list of bbox operations to produce pat from init_pat *) - val get_bbox_ops : T -> bbox_op list; - (* the graph to match onto *) - val get_tgt : T -> Graph.T; - (* the vertex map (from pat verts to tgt verts *) - val get_vmap : T -> VVInj.T; - (* the edge map (from pat edges to tgt edges *) - val get_emap : T -> EEInj.T; - (* the bbox map (from pat !-boxes to tgt !-boxes *) - val get_bmap : T -> BBInj.T; - (* the vertex/edge data substitution for the matching *) - val get_subst : T -> Graph.subst; - (* any prematurely-killed !-boxes (these may not exist in the initial graph *) - val get_wild_bboxes : T -> B.NSet.T; - - exception bad_match_exp of string - * Graph.T - * Graph.T - * bbox_op list - * Graph.T - * VVInj.T - * EEInj.T - * BBInj.T - * Graph.subst - * B.NSet.T; - - (* create a match; checks for correctness *) - (* raises bad_match_exp if not a valid match *) - val mk : Graph.T - -> Graph.T - -> bbox_op list - -> Graph.T - -> VVInj.T - -> EEInj.T - -> BBInj.T - -> B.NSet.T - -> Graph.subst - -> T; - - (* create a match; checks for correctness *) - (* returns NONE if not a valid match *) - val mk_opt : - Graph.T - -> Graph.T - -> bbox_op list - -> Graph.T - -> VVInj.T - -> EEInj.T - -> BBInj.T - -> B.NSet.T - -> Graph.subst - -> T option; - - val print : T -> unit; - val pretty : T -> Pretty.T; -end; - -functor BangGraphMatch(Graph : BANG_GRAPH) : BANG_GRAPH_MATCH -= struct - structure Graph = Graph - - datatype T = Match of { - (* initial pattern graph *) - init_pat : Graph.T, - (* pattern with bboxes expanded *) - pat : Graph.T, - (* bbox operations to get from init_pat to pat *) - bbox_ops : bbox_op list, - (* target graph *) - tgt : Graph.T, - (* vertex mapping from pat to tgt *) - vmap : VVInj.T, - (* edge mapping from pat to tgt *) - emap : EEInj.T, - bmap : BBInj.T, - subst : Graph.subst, - wild_bbs : B.NSet.T - }; - - fun get_init_pat (Match r) = #init_pat r; - fun get_pat (Match r) = #pat r; - fun get_bbox_ops (Match r) = #bbox_ops r; - fun get_tgt (Match r) = #tgt r; - fun get_vmap (Match r) = #vmap r; - fun get_emap (Match r) = #emap r; - fun get_bmap (Match r) = #bmap r; - fun get_subst (Match r) = #subst r; - fun get_wild_bboxes (Match r) = #wild_bbs r; - - exception bad_match_exp of string - * Graph.T - * Graph.T - * bbox_op list - * Graph.T - * VVInj.T - * EEInj.T - * BBInj.T - * Graph.subst - * B.NSet.T; - - fun find_match_error pat tgt vmap emap bmap subst = let - val vmapf = VVInj.get vmap - val emapf = EEInj.get emap - - fun edge_coherent e = let - val ((dir,_),(s,t)) = Graph.get_edge_info pat e - val e' = emapf e - val exp_s' = vmapf s - val exp_t' = vmapf t - val ((dir',_),(real_s',real_t')) = Graph.get_edge_info tgt e' - in - case (dir,dir') - of (Directed,Directed) => - (V.name_eq (exp_s',real_s')) andalso - (V.name_eq (exp_t',real_t')) - | (Undirected,Undirected) => - (V.NSet.eq (V.NSet.of_list [exp_s',exp_t']) - (V.NSet.of_list [real_s',real_t'])) - | (Directed,Undirected) => false - | (Undirected,Directed) => false - end; - - fun vdata_matches v = let - val v' = vmapf v - val pat_vdata = Graph.get_vertex_data pat v - val tgt_vdata = Graph.get_vertex_data tgt v' - in - Graph.vdata_eq (tgt_vdata, snd(Graph.subst_in_vdata subst pat_vdata)) - end; - - fun edata_matches e = let - val e' = emapf e - val pat_edata = Graph.get_edge_data pat e - val tgt_edata = Graph.get_edge_data tgt e' - in - Graph.edata_eq (tgt_edata, snd (Graph.subst_in_edata subst pat_edata)) - end; - in - if not (V.NSet.eq (Graph.get_vertices pat) (VVInj.get_dom_set vmap)) - then SOME "vmap is not total" - else if not (E.NSet.eq (Graph.get_edges pat) (EEInj.get_dom_set emap)) - then SOME "emap is not total" - else if not (V.NSet.sub_set (VVInj.get_cod_set vmap) (Graph.get_vertices tgt)) - then SOME "vmap image is not in the target graph" - else if not (E.NSet.sub_set (EEInj.get_cod_set emap) (Graph.get_edges tgt)) - then SOME "emap image is not in the target graph" - else if not (E.NSet.forall edge_coherent (EEInj.get_dom_set emap)) - then SOME "vmap and emap are not coherent" - else if not (V.NSet.forall vdata_matches (VVInj.get_dom_set vmap)) - then SOME "vmap and subst do not give correct data" - else if not (E.NSet.forall edata_matches (EEInj.get_dom_set emap)) - then SOME "emap and subst do not give correct data" - else NONE - end; - - fun mk_opt init_pat pat bbox_ops tgt vmap emap bmap wild subst = - case find_match_error pat tgt vmap emap bmap subst - of SOME _ => NONE - | NONE => SOME (Match { - init_pat = init_pat, - pat = pat, - bbox_ops = bbox_ops, - tgt = tgt, - vmap = vmap, - emap = emap, - bmap = bmap, - subst = subst, - wild_bbs = wild - }); - - fun mk init_pat pat bbox_ops tgt vmap emap bmap wild subst = - case find_match_error pat tgt vmap emap bmap subst - of SOME e => raise bad_match_exp (e,init_pat,pat,bbox_ops,tgt,vmap,emap,bmap,subst,wild) - | NONE => Match { - init_pat = init_pat, - pat = pat, - bbox_ops = bbox_ops, - tgt = tgt, - vmap = vmap, - emap = emap, - bmap = bmap, - subst = subst, - wild_bbs = wild - }; - - fun pretty ms = let - fun pretty_bset prefix bs = - if B.NSet.is_empty bs then [] - else [Pretty.block [ - Pretty.str (prefix^": "), - B.NSet.pretty bs - ]] - in - Pretty.chunks [ - Pretty.str "Match {", - Pretty.block [ - Pretty.str " ", (* indent *) - Pretty.chunks [ - Pretty.block [ - Pretty.str "Pattern: ", - Graph.pretty (get_pat ms) - ], - Pretty.block [ - Pretty.str "Target: ", - Graph.pretty (get_tgt ms) - ], - Pretty.block [ - Pretty.str "VMap: ", - VVInj.pretty (get_vmap ms) - ], - Pretty.block [ - Pretty.str "EMap: ", - EEInj.pretty (get_emap ms) - ], - Pretty.block [ - Pretty.str "BMap: ", - BBInj.pretty (get_bmap ms) - ], - Pretty.block [ - Pretty.str "Wild !-boxes: ", - B.NSet.pretty (get_wild_bboxes ms) - ] - ] - ], - Pretty.str "}" - ] - end; - val print = Pretty.writeln o pretty; - - structure Sharing : BANG_GRAPH_MATCH_SHARING - = struct - structure Graph = Graph.Sharing - type T = T; - end; -end; - - - - diff --git a/core/matching/concrete_match_search.ML b/core/matching/concrete_match_search.ML deleted file mode 100644 index cdcc999c..00000000 --- a/core/matching/concrete_match_search.ML +++ /dev/null @@ -1,69 +0,0 @@ -(* A wrapper for matching concrete graphs against concrete graphs *) -(* see docs/matching_algo for details *) -(* This wrapper is very simple, as the InnerMatchSearch does all the work *) -functor ConcreteMatchSearch(Graph : OGRAPH) : MATCH_SEARCH = -struct - - structure Log : LOG = Log(val init_level = 0) - - - structure Graph = Graph - structure MatchState = OGraphMatchState(Graph) - structure InnerMatchSearch = ProgressiveMatchSearch(PermissiveHooks(MatchState)) - structure Match = OGraphMatch(Graph) - - fun matches_from_ms ms = let - val (pat,tgt,vmap,emap) = (MatchState.get_pat ms, - MatchState.get_tgt ms, - MatchState.get_vmap ms, - MatchState.get_emap ms) - in Seq.map (Match.mk pat tgt vmap emap) - (Graph.solve_psubst (MatchState.get_match_psubst ms)) - end - - (* We just put everything in the match state, and for each match - * provided by the inner loop, we match the bare wires, and discard any - * incomplete matches (as we have nothing left to add to the pattern) *) - fun do_match ms = - Seq.maps matches_from_ms - (Seq.filter MatchState.is_total - (Seq.maps (InnerMatchSearch.match_bare_wires ()) - (InnerMatchSearch.match_pending () ms))) - - fun log_p pretty_f lvl name = - Log.logf lvl (fn g => Pretty.string_of - (Pretty.chunks [Pretty.str (name^":"),(pretty_f g)])) - val log_graph = log_p Graph.pretty - val log_v_nset = log_p V.NSet.pretty - - fun match_normalised pat tgt = let - val _ = log_graph 3 "ConcreteMatchSearch: normalised pattern" pat - val _ = log_graph 3 "ConcreteMatchSearch: normalised target" tgt - val ms = MatchState.init_and_schedule_all pat tgt - in - do_match ms - end - fun match pat tgt = let - val pat' = Graph.normalise pat - val tgt' = Graph.normalise tgt - in match_normalised pat' tgt' end - - fun match_subgraph pat tgt tgt_verts = let - val _ = log_graph 3 "ConcreteMatchSearch: normalised pattern" pat - val _ = log_graph 3 "ConcreteMatchSearch: normalised target" tgt - val _ = log_v_nset 4 "ConcreteMatchSearch: target vertices" tgt_verts - val ms = MatchState.init_and_schedule_for_subgraph pat tgt tgt_verts - in - do_match ms - end - fun match_subgraph_normalised pat tgt = let - val pat = Graph.normalise pat - val tgt = Graph.normalise tgt - in match_subgraph_normalised pat tgt end - - structure Sharing = - struct - structure Graph = Graph.Sharing - structure Match = Match.Sharing - end -end diff --git a/core/matching/greedy_match_search.ML b/core/matching/greedy_match_search.ML deleted file mode 100644 index 85efad93..00000000 --- a/core/matching/greedy_match_search.ML +++ /dev/null @@ -1,654 +0,0 @@ -(* A wrapper for matching bang graphs against bang graphs - * see docs/matching_algo for details - * - * This implements the "Lazy !-Box Expansion" algorithm - * - * This wrapper greedily matches the whole concrete part of the - * graph, and then picks an arbitrary top-level !-box and tries - * both expanding it and killing it. - *) -functor GreedyMatchSearch( - Graph : BANG_GRAPH -) : BG_MATCH_SEARCH = -struct - - structure Log : LOG = Log(val init_level = 0) - - structure Graph = Graph - structure InnerMatchState = OGraphMatchState(Graph) - structure Match = BangGraphMatch(Graph) - - datatype State = MatchState of { - init_pat : Graph.T, - bbox_ops : bbox_op list, - wild_bboxes : B.NSet.T, - bmap : BBInj.T, - pending_pat_bbs : B.NSet.T, (* unmatched, fixed !-boxes of pattern *) - (* We split the operable (unfixed, top-level) !-boxes of the pattern into - * two parts, in order to remove some duplication. We wish to force all - * copy operations to happen before any expands (for a given !-box), so - * operable !-boxes start off in copyable_bbs, then move to expandable_bbs - * and finally are removed entirely when they are killed *) - copyable_bbs : B.NSet.T, - expandable_bbs : B.NSet.T, - pat_fix_tags : BFFn.T, (* fixity tags of pattern *) - tgt_fix_tags : BFFn.T, (* fixity tags of target *) - inner_state : InnerMatchState.T - } - - fun update_init_pat f (MatchState r) = MatchState { - init_pat = f(#init_pat r), - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_bbox_ops f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = f(#bbox_ops r), - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_wild_bboxes f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = f(#wild_bboxes r), - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_bmap f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = f(#bmap r), - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_pending_pat_bbs f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = f(#pending_pat_bbs r), - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_copyable_bbs f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = f(#copyable_bbs r), - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_expandable_bbs f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = f(#expandable_bbs r), - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_pat_fix_tags f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = f(#pat_fix_tags r), - tgt_fix_tags = #tgt_fix_tags r, - inner_state = #inner_state r - } - - fun update_tgt_fix_tags f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = f(#tgt_fix_tags r), - inner_state = #inner_state r - } - - fun update_inner_state f (MatchState r) = MatchState { - init_pat = #init_pat r, - bbox_ops = #bbox_ops r, - wild_bboxes = #wild_bboxes r, - bmap = #bmap r, - pending_pat_bbs = #pending_pat_bbs r, - copyable_bbs = #copyable_bbs r, - expandable_bbs = #expandable_bbs r, - pat_fix_tags = #pat_fix_tags r, - tgt_fix_tags = #tgt_fix_tags r, - inner_state = f(#inner_state r) - } - - fun get_init_pat (MatchState r) = #init_pat r - fun get_bbox_ops (MatchState r) = #bbox_ops r - fun get_wild_bboxes (MatchState r) = #wild_bboxes r - fun get_bmap (MatchState r) = #bmap r - fun get_pending_pat_bbs (MatchState r) = #pending_pat_bbs r - fun get_copyable_bbs (MatchState r) = #copyable_bbs r - fun get_expandable_bbs (MatchState r) = #expandable_bbs r - fun get_pat_fix_tags (MatchState r) = #pat_fix_tags r - fun get_tgt_fix_tags (MatchState r) = #tgt_fix_tags r - fun get_inner_state (MatchState r) = #inner_state r - - fun set_init_pat x = update_init_pat (fn _ => x) - fun set_bbox_ops x = update_bbox_ops (fn _ => x) - fun set_wild_bboxes x = update_wild_bboxes (fn _ => x) - fun set_bmap x = update_bmap (fn _ => x) - fun set_pending_pat_bbs x = update_pending_pat_bbs (fn _ => x) - fun set_copyable_bbs x = update_copyable_bbs (fn _ => x) - fun set_expandable_bbs x = update_expandable_bbs (fn _ => x) - fun set_pat_fix_tags x = update_pat_fix_tags (fn _ => x) - fun set_tgt_fix_tags x = update_tgt_fix_tags (fn _ => x) - fun set_inner_state x = update_inner_state (fn _ => x) - - structure IMSHooks : PROGRESSIVE_MATCH_SEARCH_HOOKS = - struct - type Context = State - structure MatchState = InnerMatchState - - (* TODO: it is probably worth optimising this some more *) - (* TODO: account for fixed/matched !-boxes? *) - fun p_vertex_may_be_completed _ ms pv = - let - val pat = InnerMatchState.get_pat ms - val tgt = InnerMatchState.get_tgt ms - val tv = VVInj.get (InnerMatchState.get_vmap ms) pv - val p_arity = Graph.get_arity pat pv - val t_arity = Graph.get_arity tgt tv - in - ( - not (Arity.get_in p_arity < Arity.get_in t_arity) orelse - (V.NSet.exists (Graph.is_bboxed pat) (Graph.get_predecessor_vertices pat pv)) - ) - andalso - ( - not (Arity.get_out p_arity < Arity.get_out t_arity) orelse - (V.NSet.exists (Graph.is_bboxed pat) (Graph.get_successor_vertices pat pv)) - ) - andalso - ( - not (Arity.get_undir p_arity < Arity.get_undir t_arity) orelse - (V.NSet.exists (Graph.is_bboxed pat) (Graph.get_sibling_vertices pat pv)) - ) - end - - fun vertex_match_is_allowed ms ims pv tv = - let - val pat = InnerMatchState.get_pat ims - val tgt = InnerMatchState.get_tgt ims - val p_bs = Graph.get_bboxes_containing_vertex pat pv - val t_bs = Graph.get_bboxes_containing_vertex tgt tv - val p_bs' = BBInj.img_of_set (get_bmap ms) p_bs - in - B.NSet.eq t_bs p_bs' - end - - fun bare_wire_match_is_allowed ms ims (p_s,_,p_t) (t_s,_,t_t) = - let - val pat = InnerMatchState.get_pat ims - val tgt = InnerMatchState.get_tgt ims - val p_wv = if Graph.is_wire_vertex pat p_s then p_s else p_t - val t_wv = if Graph.is_wire_vertex tgt t_s then t_s else t_t - val p_bs = Graph.get_bboxes_containing_vertex pat p_wv - val t_bs = Graph.get_bboxes_containing_vertex tgt t_wv - val p_bs' = BBInj.img_of_set (get_bmap ms) p_bs - in - B.NSet.eq t_bs p_bs' - end - end - structure InnerMatchSearch = ProgressiveMatchSearch(IMSHooks) - structure InnerLog = InnerMatchSearch.Log - - (* the inner match state actually holds the canonical reference to the expanded pattern graph *) - val get_pat = InnerMatchState.get_pat o get_inner_state - val update_pat = update_inner_state o InnerMatchState.update_pat - fun set_pat x = update_pat (fn _ => x) - - val get_tgt = InnerMatchState.get_tgt o get_inner_state - - fun add_wild_bbox bb = update_wild_bboxes (B.NSet.add bb) - - fun i_cap n = if n > 15 then 15 else n - fun indent 0 s = s - | indent n s = " "^(indent ((i_cap n)-1) s) - fun log_p pretty_f lvl ind name = - Log.logf lvl (fn g => Pretty.string_of (Pretty.indent (2*(i_cap ind)) - (Pretty.chunks [Pretty.str (name^":"),(pretty_f g)]))) - val log_graph = log_p Graph.pretty - val log_v_nset = log_p V.NSet.pretty - val log_e_nset = log_p E.NSet.pretty - val log_b_nset = log_p B.NSet.pretty - - fun init' pat tgt pat_fix_tags tgt_fix_tags = let - val toplevel_bbs = B.NSet.filter (not o (Graph.bbox_has_parents pat)) - (Graph.get_bboxes pat) - val pat_fixed = BFFn.get_dom_set pat_fix_tags - in - MatchState { - init_pat = pat, - bbox_ops = [], - wild_bboxes = B.NSet.empty, - bmap = BBInj.empty, - pending_pat_bbs = pat_fixed, - copyable_bbs = B.NSet.subtract toplevel_bbs pat_fixed, - expandable_bbs = B.NSet.empty, - pat_fix_tags = pat_fix_tags, - tgt_fix_tags = tgt_fix_tags, - inner_state = InnerMatchState.init pat tgt - } - end - - (*fun init_with_fixed pat tgt pat_fix_tags tgt_fix_tags = - init' pat tgt pat_fix_tags tgt_fix_tags []*) - - fun init pat tgt = init' pat tgt BFFn.empty BFFn.empty - - fun schedule_new lvl bms = let - val ms = get_inner_state bms - val pat = InnerMatchState.get_pat ms - val new_vs = V.NSet.subtract (Graph.get_vertices pat) - (VVInj.get_dom_set (InnerMatchState.get_vmap ms)) - val matched_bbs = BBInj.get_dom_set (get_bmap bms) - val _ = log_b_nset 4 lvl "Matched !-boxes in pattern so far" matched_bbs - val get_bboxes_for_v = Graph.get_bboxes_containing_vertex pat - fun in_match_surface v = B.NSet.sub_set (get_bboxes_for_v v) matched_bbs - val matchable = V.NSet.filter in_match_surface new_vs - val touched = Graph.get_adj_vertices_to_set pat matchable - (* only need to schedule p verts adjacent to new unbboxed verts; - * p verts adjacent to killed verts are scheduled in kill_pat_bbox *) - fun schedule_p_nvs ims = - InnerMatchState.extend_ps_nodeverts - (V.NSet.intersect (InnerMatchState.get_p_nodeverts ims) touched) - ims - val _ = log_v_nset 3 lvl "Scheduling new U" matchable - in - update_inner_state - (InnerMatchState.extend_u matchable - o schedule_p_nvs) - bms - end - - fun schedule_all_concrete tgt_pool bms = let - val pat = get_pat bms - val tgt = get_tgt bms - val pat_unbboxed = V.NSet.filter (not o (Graph.is_bboxed pat)) - (Graph.get_vertices pat) - val tgt_unbboxed = V.NSet.filter (not o (Graph.is_bboxed tgt)) - tgt_pool - fun schedule ims = - ims |> InnerMatchState.extend_u pat_unbboxed - |> InnerMatchState.set_tgt_verts tgt_unbboxed - in - update_inner_state schedule bms - end - - (* TODO: it is probably worth optimising this some more *) - fun is_total (lvl,bms) = - if (InnerMatchState.is_total (get_inner_state bms)) - then (Log.logf 2 (fn() => indent lvl "==== total match found ====") (); true) - else (Log.logf 2 (fn() => indent lvl "==== match failed to be total ====") (); false) - - fun match_bare_wires (lvl,ms) = let - val ims = get_inner_state ms - in - Seq.map (fn ims => (lvl+1,set_inner_state ims ms)) - (InnerMatchSearch.match_bare_wires ms ims) - end - - (* get the orbit of a single vertex under a series of expansions of a single bbox *) - fun get_orbit (bbox_ops as (BBExpand {bbox=bbox,vmap=vmap,bbox_map=_} :: ops)) = - (case V.NSet.get_local_bot (VSub.get_dom_set vmap) - of SOME v => - let - fun orb (BBExpand {bbox=bbox',vmap=vmap',bbox_map=_} :: ops) = - if B.name_eq (bbox,bbox') - then V.NSet.add (VSub.get vmap' v) (orb ops) - else V.NSet.empty - | orb _ = V.NSet.empty - in orb bbox_ops - end - | NONE => V.NSet.empty) - | get_orbit _ = V.NSet.empty - - fun match_pending (lvl,ms) = let - val ims = get_inner_state ms - in - Seq.map_filter (fn ims => - let - val orbit = get_orbit (get_bbox_ops ms) - val _ = Log.logf 2 (fn () => - "orbit: " ^ Pretty.string_of (V.NSet.pretty orbit)) () - val orbit_map = (InnerMatchState.get_vmap ims) - |> VVInj.restrict_dom orbit - in - if VVInj.is_monotone orbit_map - then SOME (lvl+1, ms |> set_inner_state ims) - else NONE - end) (InnerMatchSearch.match_pending ms ims) - end - - (*fun match_pending (lvl,ms) = let - fun update_ms ims = - (lvl+1,set_inner_state ims ms) - val ims = get_inner_state ms - in - Seq.map update_ms (InnerMatchSearch.match_pending ms ims) - end*) - - - fun finish_match ms = - Seq.filter is_total - (match_bare_wires ms) - - fun expand_pat_bbox lvl b ms = let - val _ = Log.logf 2 (fn () => indent lvl ("Expanding !-box "^(B.string_of_name b))) () - val pat = get_pat ms - val (bb_op, pat') = Graph.expand_bbox_op b pat - val new_toplevel = B.NSet.filter (not o (Graph.bbox_has_parents pat')) - (bbox_op_added_bboxes bb_op) - val _ = log_b_nset 4 lvl ">> new top-level !-boxes" new_toplevel - in - ms |> set_pat pat' - |> update_copyable_bbs (B.NSet.union_merge new_toplevel) - |> update_bbox_ops (cons bb_op) - end - - fun kill_pat_bbox lvl b ms = let - val _ = Log.logf 2 (fn () => indent lvl ("Killing !-box "^(B.string_of_name b))) () - val pat = get_pat ms - val bb_vs = Graph.get_vertices_in_bbox pat b - val adj_vs = Graph.get_adj_vertices_to_set pat bb_vs - val (bb_op, pat') = Graph.kill_bbox_op b pat - fun sched_adj ims = - InnerMatchState.extend_ps_nodeverts - (V.NSet.intersect (InnerMatchState.get_p_nodeverts ims) adj_vs) - ims - in - ms |> set_pat pat' - |> update_copyable_bbs (B.NSet.delete b) - |> update_expandable_bbs (B.NSet.delete b) - |> update_bbox_ops (cons bb_op) - |> update_inner_state sched_adj - end - - fun kill_bboxes_adj_to_complete_nvs (lvl,bms) = let - val ms = get_inner_state bms - val pat = InnerMatchState.get_pat ms - val complete = InnerMatchState.get_complete_nodeverts ms - val touched = Graph.get_adj_vertices_to_set pat complete - fun kill_all_bboxes v bms = let - val pat = InnerMatchState.get_pat (get_inner_state bms) - (* if two vertices border the same bbox, this vertex might have - * already been removed *) - val bbs = if Graph.has_vertex pat v - then Graph.get_bboxes_containing_vertex pat v - else B.NSet.empty - val operable_bbs = B.NSet.union_merge (get_copyable_bbs bms) - (get_expandable_bbs bms) - val adj_operable_bbs = B.NSet.intersect bbs operable_bbs - in - B.NSet.fold (kill_pat_bbox lvl) adj_operable_bbs bms - handle Graph.no_such_vertex_exp _ => bms - end - in - (lvl,V.NSet.fold kill_all_bboxes touched bms) - end - - fun kill_wild_bboxes (lvl,ms) = let - fun is_end_of_bare_wire g v = - if Graph.is_input g v - then - case E.NSet.tryget_singleton (Graph.get_out_edges g v) - of NONE => false (* this shouldn't happen, but isn't strictly impossible *) - | SOME e => Graph.is_output g (Graph.get_edge_target g e) - else - if Graph.is_output g v - then - case E.NSet.tryget_singleton (Graph.get_in_edges g v) - of NONE => false (* this shouldn't happen, but isn't strictly impossible *) - | SOME e => Graph.is_input g (Graph.get_edge_source g e) - else - false - fun is_wild g bbox = - V.NSet.forall (is_end_of_bare_wire g) (Graph.get_vertices_in_bbox g bbox) - fun kill_pat_bbox' bbox ms = - ms |> kill_pat_bbox lvl bbox - |> add_wild_bbox bbox - fun kill_if_wild bbox ms = - (if is_wild (InnerMatchState.get_pat (get_inner_state ms)) bbox - then kill_pat_bbox' bbox ms - else ms) - handle Graph.no_such_bbox_exp _ => ms - val pat = InnerMatchState.get_pat (get_inner_state ms) - in - (lvl,B.NSet.fold kill_if_wild (Graph.get_bboxes pat) ms) - end - - val kill_impossible_bboxes = kill_wild_bboxes - o kill_bboxes_adj_to_complete_nvs - - fun cfix_pat_bbox lvl b ms = let - val _ = Log.logf 2 (fn () => indent lvl ("Copy-fixing !-box "^(B.string_of_name b))) () - val pat = get_pat ms - val (bb_op, pat') = Graph.copy_bbox_op b pat - val b' = bbox_op_get_bbox_copy bb_op b - val b'_singleton = B.NSet.single b' - val new_toplevel = B.NSet.filter - (fn c => B.NSet.eq (Graph.get_bbox_parents pat' c) b'_singleton) - (bbox_op_added_bboxes bb_op) - val _ = log_b_nset 4 lvl ">> new top-level !-boxes" new_toplevel - in - ms |> set_pat pat' - |> update_copyable_bbs (B.NSet.union_merge new_toplevel) - |> update_pending_pat_bbs (B.NSet.ins_fresh b') - |> update_bbox_ops (cons bb_op) - end - - fun match_pending_bbs (lvl,bms) = - case B.NSet.pull_local_bot (get_pending_pat_bbs bms) - of NONE => Seq.single (lvl,bms) - | SOME (b,bbs') => - let - val pat = get_pat bms - val tgt = get_tgt bms - val bmap = get_bmap bms - val pat_parents = Graph.get_bbox_parents pat b - val tgt_parents = BBInj.img_of_set bmap pat_parents - val b_fix_tag = BFFn.get_opt (get_pat_fix_tags bms) b - fun is_tgt_cand b' = - (not (BBInj.cod_contains bmap b')) - andalso - (case (b_fix_tag,BFFn.get_opt (get_tgt_fix_tags bms) b') - of (NONE,_) => true - | (SOME x,SOME y) => F.name_eq (x,y) - | (SOME _,_) => false) - andalso - B.NSet.eq (Graph.get_bbox_parents tgt b') tgt_parents - val candidates = B.NSet.filter is_tgt_cand (Graph.get_bboxes tgt) - val bms' = set_pending_pat_bbs bbs' bms - (* schedule matchable parts of B(b) *) - (* set inner.tgt_verts to matchable part of B(b') *) - fun do_bb_match b' = let - val bmap' = BBInj.add (b,b') bmap - fun extend_tgt_verts ims = - let - val vs = Graph.get_vertices_in_bbox tgt b' - val matched_bbs = BBInj.get_cod_set bmap' - fun add_if_in_match_surface v set = - let - val bbs = Graph.get_bboxes_containing_vertex tgt v - in - if B.NSet.sub_set bbs matched_bbs - then V.NSet.add v set - else set - end - val tgt_verts = InnerMatchState.get_tgt_verts ims - in - InnerMatchState.set_tgt_verts - (V.NSet.fold add_if_in_match_surface vs tgt_verts) - ims - end - val ims = extend_tgt_verts (get_inner_state bms') - val bms' = set_bmap bmap' bms' - in - Seq.map (fn ims => (lvl+1,set_inner_state ims bms')) - (InnerMatchSearch.match_pending bms' ims) - end - in - Seq.maps do_bb_match (Seq.of_list (B.NSet.list_of candidates)) - end - - (* number of concrete vertices adjacent to bbox. We always choose the bbox with - * the most concrete nodes adjacent to expand next. *) - fun concrete_nhd_size g bb = - V.NSet.cardinality (V.NSet.filter - (not o Graph.is_bboxed g) - (Graph.get_adj_vertices_to_set g - (Graph.get_vertices_in_bbox g bb))) - - fun bbox_expand_branch b (lvl,ms) = - let - fun do_kill ms = - choose_next_bbox (lvl+1,kill_pat_bbox lvl b ms) - fun do_expand ms = - (* need to match non-concrete verts as well *) - match_loop' (lvl+1,ms |> expand_pat_bbox lvl b - |> schedule_new lvl) - in - Seq.append (Seq.maps do_expand (Seq.single ms)) - (Seq.maps do_kill (Seq.single ms)) - end - and bbox_copy_branch b (lvl,ms) = - let - fun start_expanding ms = - (Log.logf 3 (fn () => indent lvl ("!-box "^(B.string_of_name b)^" -> expandable")) (); - bbox_expand_branch b (lvl+1, - ms |> update_copyable_bbs (B.NSet.delete b) - |> update_expandable_bbs (B.NSet.ins_fresh b))) - fun do_cfix ms = - Seq.maps (match_loop' o (fn (l,s) => (l,schedule_new l s))) - (match_pending_bbs (lvl+1,cfix_pat_bbox lvl b ms)) - in - Seq.append (Seq.maps start_expanding (Seq.single ms)) - (Seq.maps do_cfix (Seq.single ms)) - end - and choose_next_bbox (lvl,ms) = - case B.NSet.maximize (concrete_nhd_size (get_pat ms)) (get_expandable_bbs ms) - of SOME b => bbox_expand_branch b (lvl,ms) - | NONE => (case B.NSet.maximize (concrete_nhd_size (get_pat ms)) (get_copyable_bbs ms) - of SOME b => bbox_copy_branch b (lvl,ms) - | NONE => finish_match (lvl,ms)) - and match_loop' x = - Seq.maps (choose_next_bbox o kill_impossible_bboxes) - (Seq.maps match_pending_bbs (match_pending x)) - - fun match_loop ms = match_loop' (0,ms) - - - fun matches_from_ms (_,ms) = let - val ims = get_inner_state ms - val (init_pat,pat,bbox_ops,tgt,vmap,emap,bmap,wild) = ( - get_init_pat ms, - get_pat ms, - get_bbox_ops ms, - InnerMatchState.get_tgt ims, - InnerMatchState.get_vmap ims, - InnerMatchState.get_emap ims, - get_bmap ms, - get_wild_bboxes ms - ) - in - Seq.map (Match.mk init_pat pat bbox_ops tgt vmap emap bmap wild) - (Graph.solve_psubst (InnerMatchState.get_match_psubst ims)) - end - - fun find_matches ms = Seq.maps matches_from_ms (match_loop ms) - - fun match_normalised pat tgt = let - val _ = log_graph 3 0 "GreedyMatchSearch: normalised pattern" pat - val _ = log_graph 3 0 "GreedyMatchSearch: normalised target" tgt - val ms = init pat tgt - in - find_matches (schedule_all_concrete (Graph.get_vertices tgt) ms) - end - - fun match pat tgt = - match_normalised (Graph.normalise pat) (Graph.normalise tgt) - - fun match_with_prematch pat tgt prematch = let - val (pat',tgt') = (Graph.normalise pat, Graph.normalise tgt) - val ms = schedule_all_concrete (Graph.get_vertices tgt') (init pat' tgt') - val ms' = VVInj.fold (fn pair => fn ms => - update_inner_state (fn ims => - case InnerMatchSearch.match_new_nv ms pair ims - of SOME ims' => ims' - | NONE => ims) ms) prematch ms - in - find_matches ms' - end - - fun match_subgraph_normalised pat tgt tgt_verts = let - val _ = log_graph 3 0 "GreedyMatchSearch: normalised pattern" pat - val _ = log_graph 3 0 "GreedyMatchSearch: normalised target" tgt - val _ = log_v_nset 4 0 "GreedyMatchSearch: target vertices" tgt_verts - val tgt_subgraph = Graph.get_open_subgraph tgt tgt_verts - val ms = init pat tgt - in - find_matches (schedule_all_concrete (Graph.get_vertices tgt_subgraph) ms) - end - fun match_subgraph pat tgt = - match_subgraph_normalised (Graph.normalise pat) (Graph.normalise tgt) - - structure Sharing = - struct - structure Graph = Graph.Sharing - structure Match = Match.Sharing - end -end diff --git a/core/matching/match.ML b/core/matching/match.ML deleted file mode 100644 index 734b238b..00000000 --- a/core/matching/match.ML +++ /dev/null @@ -1,190 +0,0 @@ -(* The type-sharing struct for OGRAPH_MATCH *) -signature OGRAPH_MATCH_SHARING -= sig - type T - structure Graph : OGRAPH_SHARING -end; - -(* A match from a string graph to a string graph *) -signature OGRAPH_MATCH -= sig - type T; - structure Graph : OGRAPH; - - (* sharing via sharing structure *) - structure Sharing : OGRAPH_MATCH_SHARING - sharing type Sharing.T = T - sharing Sharing.Graph = Graph.Sharing - - (* the pattern graph (domain) *) - val get_pat : T -> Graph.T; - (* the target graph (codomain) *) - val get_tgt : T -> Graph.T; - (* the vertex map (from pat verts to tgt verts *) - val get_vmap : T -> VVInj.T; - (* the edge map (from pat edges to tgt edges *) - val get_emap : T -> EEInj.T; - (* the vertex/edge data substitution for the matching *) - val get_subst : T -> Graph.subst; - - exception bad_match_exp of string - * Graph.T - * Graph.T - * VVInj.T - * EEInj.T - * Graph.subst; - - (* create a match; checks for correctness *) - (* raises bad_match_exp if not a valid match *) - val mk : Graph.T -> Graph.T -> VVInj.T -> EEInj.T -> Graph.subst -> T; - - (* create a match; checks for correctness *) - (* returns NONE if not a valid match *) - val mk_opt : Graph.T -> Graph.T -> VVInj.T -> EEInj.T -> Graph.subst - -> T option; - - val print : T -> unit; - val pretty : T -> Pretty.T; -end; - -functor OGraphMatch(Graph : OGRAPH) : OGRAPH_MATCH -= struct - structure Graph = Graph - - datatype T = Match of { - (* pattern and target graphs *) - pat : Graph.T, - tgt : Graph.T, - (* vertex mapping from pat to tgt *) - vmap : VVInj.T, - (* edge mapping from pat to tgt *) - emap : EEInj.T, - subst : Graph.subst - }; - - fun get_pat (Match r) = #pat r; - fun get_tgt (Match r) = #tgt r; - fun get_vmap (Match r) = #vmap r; - fun get_emap (Match r) = #emap r; - fun get_subst (Match r) = #subst r; - - exception bad_match_exp of string - * Graph.T - * Graph.T - * VVInj.T - * EEInj.T - * Graph.subst; - - fun find_match_error pat tgt vmap emap subst = let - val vmapf = VVInj.get vmap - val emapf = EEInj.get emap - - fun edge_coherent e = let - val dir = Graph.get_edge_dir pat e - val (s,t) = (Graph.get_edge_source pat e, Graph.get_edge_target pat e) - val e' = emapf e - val exp_s' = vmapf s - val exp_t' = vmapf t - val dir' = Graph.get_edge_dir tgt e' - val (real_s',real_t') = (Graph.get_edge_source tgt e', Graph.get_edge_target tgt e') - in - case (dir,dir') - of (Directed,Directed) => - (V.name_eq (exp_s',real_s')) andalso - (V.name_eq (exp_t',real_t')) - | (Undirected,Undirected) => - (V.NSet.eq (V.NSet.of_list [exp_s',exp_t']) - (V.NSet.of_list [real_s',real_t'])) - | (Directed,Undirected) => false - | (Undirected,Directed) => false - end; - - fun vdata_matches v = let - val v' = vmapf v - val pat_vdata = Graph.get_vertex_data pat v - val tgt_vdata = Graph.get_vertex_data tgt v' - in - Graph.vdata_eq (tgt_vdata, snd (Graph.subst_in_vdata subst pat_vdata)) - end; - - fun edata_matches e = let - val e' = emapf e - val pat_edata = Graph.get_edge_data pat e - val tgt_edata = Graph.get_edge_data tgt e' - in - Graph.edata_eq (tgt_edata, snd (Graph.subst_in_edata subst pat_edata)) - end; - in - if not (V.NSet.eq (Graph.get_vertices pat) (VVInj.get_dom_set vmap)) - then SOME "vmap is not total" - else if not (E.NSet.eq (Graph.get_edges pat) (EEInj.get_dom_set emap)) - then SOME "emap is not total" - else if not (V.NSet.sub_set (VVInj.get_cod_set vmap) (Graph.get_vertices tgt)) - then SOME "vmap image is not in the target graph" - else if not (E.NSet.sub_set (EEInj.get_cod_set emap) (Graph.get_edges tgt)) - then SOME "emap image is not in the target graph" - else if not (E.NSet.forall edge_coherent (EEInj.get_dom_set emap)) - then SOME "vmap and emap are not coherent" - else if not (V.NSet.forall vdata_matches (VVInj.get_dom_set vmap)) - then SOME "vmap and subst do not give correct data" - else if not (E.NSet.forall edata_matches (EEInj.get_dom_set emap)) - then SOME "emap and subst do not give correct data" - else NONE - end; - - fun mk_opt pat tgt vmap emap subst = - case find_match_error pat tgt vmap emap subst - of SOME _ => NONE - | NONE => SOME (Match { - pat = pat, - tgt = tgt, - vmap = vmap, - emap = emap, - subst = subst - }); - - fun mk pat tgt vmap emap subst = - case find_match_error pat tgt vmap emap subst - of SOME e => raise bad_match_exp (e,pat,tgt,vmap,emap,subst) - | NONE => Match { - pat = pat, - tgt = tgt, - vmap = vmap, - emap = emap, - subst = subst - }; - - fun pretty ms = - Pretty.chunks [ - Pretty.str "Match {", - Pretty.block [ - Pretty.str " ", (* indent *) - Pretty.chunks [ - Pretty.block [ - Pretty.str "Pattern: ", - Graph.pretty (get_pat ms) - ], - Pretty.block [ - Pretty.str "Target: ", - Graph.pretty (get_tgt ms) - ], - Pretty.block [ - Pretty.str "VMap: ", - VVInj.pretty (get_vmap ms) - ], - Pretty.block [ - Pretty.str "EMap: ", - EEInj.pretty (get_emap ms) - ] - ] - ], - Pretty.str "}" - ]; - val print = Pretty.writeln o pretty; - - structure Sharing : OGRAPH_MATCH_SHARING - = struct - structure Graph = Graph.Sharing - type T = T; - end; -end diff --git a/core/matching/match_search.ML b/core/matching/match_search.ML deleted file mode 100644 index c9b37937..00000000 --- a/core/matching/match_search.ML +++ /dev/null @@ -1,97 +0,0 @@ -(* The matching interface *) -(* see docs/matching_algo for details *) -signature MATCH_SEARCH_SHARING = -sig - structure Graph : OGRAPH_SHARING - structure Match : OGRAPH_MATCH_SHARING - sharing Graph = Match.Graph -end - -signature MATCH_SEARCH = -sig - structure Log : LOG; - - - structure Match : OGRAPH_MATCH - structure Graph : OGRAPH - structure Sharing : MATCH_SEARCH_SHARING - sharing Sharing.Graph = Graph.Sharing - sharing Sharing.Match = Match.Sharing - - - val match : Graph.T (* pattern graph *) - -> Graph.T (* target graph *) - -> Match.T Seq.seq (* (lazy) list of matches *) - - val match_subgraph : Graph.T (* pattern graph *) - -> Graph.T (* target graph *) - -> V.NSet.T (* target subgraph verts *) - -> Match.T Seq.seq (* (lazy) list of matches *) - - (* these versions are for efficiency if you can guarantee - * that the graphs are already normalised - *) - - val match_normalised : Graph.T (* pattern graph (normalised) *) - -> Graph.T (* target graph (normalised) *) - -> Match.T Seq.seq (* (lazy) list of matches *) - - val match_subgraph_normalised : Graph.T (* pattern graph (normalised) *) - -> Graph.T (* target graph (normalised) *) - -> V.NSet.T (* target subgraph verts *) - -> Match.T Seq.seq (* (lazy) list of matches *) -end - -signature BG_MATCH_SEARCH_SHARING = -sig - structure Graph : OGRAPH_SHARING - structure Match : BANG_GRAPH_MATCH_SHARING - sharing Graph = Match.Graph -end - -signature BG_MATCH_SEARCH = -sig - - structure Log : LOG; - structure InnerLog : LOG; - - structure Graph : BANG_GRAPH; - structure Match: BANG_GRAPH_MATCH; - structure Sharing : MATCH_SEARCH_SHARING - sharing Sharing.Graph = Graph.Sharing - sharing Sharing.Match = Match.Sharing - - (* Searches for matches *) - val match - : Graph.T (* pattern graph *) - -> Graph.T (* target graph *) - -> Match.T Seq.seq (* (lazy) list of matches *) - - (* Searches for matches that complete the given node map. If the - given node map is already invalid, returns no matches. *) - val match_with_prematch - : Graph.T (* pattern graph *) - -> Graph.T (* target graph *) - -> VVInj.T (* an initial node map *) - -> Match.T Seq.seq (* (lazy) list of matches *) - - (* Searches for matches *) - val match_subgraph - : Graph.T (* pattern graph *) - -> Graph.T (* target graph *) - -> V.NSet.T (* target subgraph verts *) - -> Match.T Seq.seq (* (lazy) list of matches *) - - (* Searches for matches *) - val match_normalised - : Graph.T (* pattern graph *) - -> Graph.T (* target graph (normalised) *) - -> Match.T Seq.seq (* (lazy) list of matches *) - - (* Searches for matches *) - val match_subgraph_normalised - : Graph.T (* pattern graph *) - -> Graph.T (* target graph (normalised) *) - -> V.NSet.T (* target subgraph verts *) - -> Match.T Seq.seq (* (lazy) list of matches *) -end diff --git a/core/matching/match_state.ML b/core/matching/match_state.ML deleted file mode 100644 index 4135aeaa..00000000 --- a/core/matching/match_state.ML +++ /dev/null @@ -1,391 +0,0 @@ -(* The match state for graph matching *) -(* see docs/matching_algo for details *) - -(* The type-sharing struct for OGRAPH_MATCH_STATE *) -signature OGRAPH_MATCH_STATE_SHARING -= sig - type T - structure Graph : OGRAPH_SHARING -end - -(* The match state for concrete graph matching *) -signature OGRAPH_MATCH_STATE -= sig - type T - structure Graph : OGRAPH - - (* sharing via sharing structure *) - structure Sharing : OGRAPH_MATCH_STATE_SHARING - sharing type Sharing.T = T - sharing Sharing.Graph = Graph.Sharing - - (* the graph to match from *) - val get_pat : T -> Graph.T - (* the graph to match onto *) - val get_tgt : T -> Graph.T - (* the vertex map so far (from pat verts to tgt verts *) - val get_vmap : T -> VVInj.T - (* the edge map so far (from pat edges to tgt edges *) - val get_emap : T -> EEInj.T - (* circles waiting to be matched *) - (* contains a representative vertex from each circle *) - val get_u_circles : T -> V.NSet.T - (* node vertices waiting to be matched *) - val get_u_nodeverts : T -> V.NSet.T - (* wire vertices waiting to be matched *) - val get_u_wireverts : T -> V.NSet.T - (* bare wires waiting to be matched *) - val get_u_bare_wires : T -> E.NSet.T - (* partially-matched vertices - these are in vmap, but not all - * the adjacent edges of their images are in the image of emap *) - val get_p_nodeverts : T -> V.NSet.T - (* those vertices in p_nodeverts that may have new matchings - * of adjacent wires *) - val get_ps_nodeverts : T -> V.NSet.T - (* the vertices of tgt to be matched onto *) - val get_tgt_verts : T -> V.NSet.T - (* the vertex/edge data substitution for the matching *) - val get_match_psubst : T -> Graph.psubst - - val update_pat : (Graph.T -> Graph.T) -> T -> T - val update_tgt : (Graph.T -> Graph.T) -> T -> T - val update_vmap : (VVInj.T -> VVInj.T) -> T -> T - val update_emap : (EEInj.T -> EEInj.T) -> T -> T - - val update_u_circles : (V.NSet.T -> V.NSet.T) -> T -> T - val update_u_nodeverts : (V.NSet.T -> V.NSet.T) -> T -> T - val update_u_wireverts : (V.NSet.T -> V.NSet.T) -> T -> T - val update_u_bare_wires : (E.NSet.T -> E.NSet.T) -> T -> T - val update_p_nodeverts : (V.NSet.T -> V.NSet.T) -> T -> T - val update_ps_nodeverts : (V.NSet.T -> V.NSet.T) -> T -> T - val update_tgt_verts : (V.NSet.T -> V.NSet.T) -> T -> T - val update_match_psubst : (Graph.psubst -> Graph.psubst) -> T -> T - - val set_pat : Graph.T -> T -> T - val set_tgt : Graph.T -> T -> T - val set_vmap : VVInj.T -> T -> T - val set_emap : EEInj.T -> T -> T - - val set_u_circles : V.NSet.T -> T -> T - val set_u_nodeverts : V.NSet.T -> T -> T - val set_u_wireverts : V.NSet.T -> T -> T - val set_u_bare_wires : E.NSet.T -> T -> T - val set_p_nodeverts : V.NSet.T -> T -> T - val set_ps_nodeverts : V.NSet.T -> T -> T - val set_tgt_verts : V.NSet.T -> T -> T - val set_match_psubst : Graph.psubst -> T -> T - - (* Add the given unmatched vertices, sorting as appropriate. - * Two major assumptions: circles have only one wire-vertex and bare wires - * have exactly two. For bare wires, you should pass both the input and - * the output; the edge connecting them will be added to u_bare_wires. It is - * important that you always pass either all or none of the wire-vertices on - * any given wire. - *) - val extend_u : V.NSet.T -> T -> T - val extend_p_nodeverts : V.NSet.T -> T -> T - val extend_ps_nodeverts : V.NSet.T -> T -> T - val extend_tgt_verts : V.NSet.T -> T -> T - - (* initialise MatchState with nothing added *) - val init : Graph.T -> (* pattern *) - Graph.T -> (* target *) - T - - val get_complete_nodeverts : T -> V.NSet.T - - (* returns true if vmap and evap represent a total graph hm *) - (* will only return true if p_nodeverts is empty (local isomorphism) *) - val is_total : T -> bool - - - (* initialise MatchState and schedule all vertices in pattern and target to - * be matched; arguments are *normalised* graphs *) - val init_and_schedule_all : Graph.T -> (* pattern *) - Graph.T -> (* target *) - T - - (* initialise MatchState and schedule all vertices in pattern to be matched, - * and the vertices from a subgraph of the target *) - val init_and_schedule_for_subgraph : Graph.T -> (* pattern *) - Graph.T -> (* target *) - V.NSet.T -> (* node verts of target subgraph *) - T - - (* schedule all vertices in pattern to be matched *) - val schedule_all_pat : T -> T - (* schedule all vertices in target to be matched *) - val schedule_all_tgt : T -> T - (* schedule the subgraph defined by the given node-vertices to be matched *) - val schedule_tgt_subgraph : V.NSet.T -> T -> T - - (* Convenience functions for adding things to match. Convention is these do NOT remove - vertices or edges from the u_*, p_*, or ps_* sets. The caller should do that. *) - - (* adds given vertex to match *) - val add_vertex_to_match : (V.name * V.name) -> T -> T - - (* adds edge and given endpoints to match *) - val add_edge_to_match : (E.name * V.name) -> (E.name * V.name) -> T -> T - - (* adds wire vertex and associated circle to match *) - val add_circle_to_match : (V.name * V.name) -> T -> T - - val pretty : T -> Pretty.T - val print : T -> unit -end - -functor OGraphMatchState(Graph : OGRAPH) : OGRAPH_MATCH_STATE -= struct - structure Graph = Graph - - datatype T = MatchState of { - (* pattern and target graphs *) - pat : Graph.T, - tgt : Graph.T, - (* vertex mapping from pat to tgt *) - vmap : VVInj.T, - (* edge mapping from pat to tgt *) - emap : EEInj.T, - (* circles, node-vertices, and wire-vertices to be matched *) - u_circles : V.NSet.T, - u_nodeverts : V.NSet.T, - u_wireverts : V.NSet.T, - u_bare_wires : E.NSet.T, - (* partially matched node-vertices *) - p_nodeverts : V.NSet.T, - (* partially matched node-vertices, scheduled for re-matching *) - ps_nodeverts : V.NSet.T, - tgt_verts : V.NSet.T, - match_psubst : Graph.psubst - } - - - fun init pat tgt = MatchState { - pat = pat, - tgt = tgt, - vmap = VVInj.empty, - emap = EEInj.empty, - u_circles = V.NSet.empty, - u_nodeverts = V.NSet.empty, - u_wireverts = V.NSet.empty, - u_bare_wires = E.NSet.empty, - p_nodeverts = V.NSet.empty, - ps_nodeverts = V.NSet.empty, - tgt_verts = V.NSet.empty, - match_psubst = Graph.init_psubst pat tgt - } - - (* getters and setters *) - fun update_pat f (MatchState r) = MatchState {pat=f(#pat r),tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_tgt f (MatchState r) = MatchState {pat= #pat r,tgt=f(#tgt r),vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_vmap f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap=f(#vmap r),emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_emap f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap=f(#emap r),u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_u_circles f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles=f(#u_circles r),u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_u_nodeverts f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts=f(#u_nodeverts r),u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_u_wireverts f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts=f(#u_wireverts r),u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_u_bare_wires f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires=f(#u_bare_wires r),p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_p_nodeverts f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts=f(#p_nodeverts r),ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_ps_nodeverts f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts=f(#ps_nodeverts r),tgt_verts= #tgt_verts r,match_psubst= #match_psubst r} - fun update_tgt_verts f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts=f(#tgt_verts r),match_psubst= #match_psubst r} - fun update_match_psubst f (MatchState r) = MatchState {pat= #pat r,tgt= #tgt r,vmap= #vmap r,emap= #emap r,u_circles= #u_circles r,u_nodeverts= #u_nodeverts r,u_wireverts= #u_wireverts r,u_bare_wires= #u_bare_wires r,p_nodeverts= #p_nodeverts r,ps_nodeverts= #ps_nodeverts r,tgt_verts= #tgt_verts r,match_psubst=f(#match_psubst r)} - - fun get_pat (MatchState r) = #pat r - fun get_tgt (MatchState r) = #tgt r - fun get_vmap (MatchState r) = #vmap r - fun get_emap (MatchState r) = #emap r - fun get_u_circles (MatchState r) = #u_circles r - fun get_u_nodeverts (MatchState r) = #u_nodeverts r - fun get_u_wireverts (MatchState r) = #u_wireverts r - fun get_u_bare_wires (MatchState r) = #u_bare_wires r - fun get_p_nodeverts (MatchState r) = #p_nodeverts r - fun get_ps_nodeverts (MatchState r) = #ps_nodeverts r - fun get_tgt_verts (MatchState r) = #tgt_verts r - fun get_match_psubst (MatchState r) = #match_psubst r - - fun set_pat x = update_pat (fn _ => x) - fun set_tgt x = update_tgt (fn _ => x) - fun set_vmap x = update_vmap (fn _ => x) - fun set_emap x = update_emap (fn _ => x) - fun set_u_circles x = update_u_circles (fn _ => x) - fun set_u_nodeverts x = update_u_nodeverts (fn _ => x) - fun set_u_wireverts x = update_u_wireverts (fn _ => x) - fun set_u_bare_wires x = update_u_bare_wires (fn _ => x) - fun set_p_nodeverts x = update_p_nodeverts (fn _ => x) - fun set_ps_nodeverts x = update_ps_nodeverts (fn _ => x) - fun set_tgt_verts x = update_tgt_verts (fn _ => x) - fun set_match_psubst x = update_match_psubst (fn _ => x) - - - - val extend_u_circles = update_u_circles o V.NSet.union_merge - val extend_u_nodeverts = update_u_nodeverts o V.NSet.union_merge - val extend_u_wireverts = update_u_wireverts o V.NSet.union_merge - val extend_u_bare_wires = update_u_bare_wires o E.NSet.union_merge - val extend_p_nodeverts = update_p_nodeverts o V.NSet.union_merge - val extend_ps_nodeverts = update_ps_nodeverts o V.NSet.union_merge - val extend_tgt_verts = update_tgt_verts o V.NSet.union_merge - - fun extend_u vs ms = let - val pat = get_pat ms - - fun sort v (circle_wvs,nvs,wvs,bare_wires) = - if Graph.is_node_vertex pat v - then (circle_wvs,(V.NSet.ins_fresh v nvs),wvs,bare_wires) - else - let - val in_es = Graph.get_in_edges pat v - val out_es = Graph.get_out_edges pat v - fun add_to_wvs () = - (circle_wvs,nvs,(V.NSet.ins_fresh v wvs),bare_wires) - fun add_to_wvs_or_bws e = let - val v' = Graph.edge_get_other_vertex pat e v - in - if Graph.is_node_vertex pat v' - then add_to_wvs () - else - case Graph.wv_get_other_edge pat v' e - of (SOME _) => add_to_wvs () - (* will always happen twice for each bare wire: *) - | NONE => (circle_wvs,nvs,wvs,(E.NSet.add e bare_wires)) - end - in - case (E.NSet.tryget_singleton (in_es), - E.NSet.tryget_singleton (out_es)) - of (SOME e1,SOME e2) => - if E.name_eq (e1,e2) - then ((V.NSet.ins_fresh v circle_wvs),nvs,wvs,bare_wires) - else add_to_wvs () - | _ => - (case E.NSet.list_of (E.NSet.union_merge in_es out_es) - of [_,_] => add_to_wvs () - | [e] => add_to_wvs_or_bws e - | _ => (* bug *) raise Match) - end - - val (circle_wvs,nvs,wvs,bare_wires) = - V.NSet.fold sort vs (V.NSet.empty,V.NSet.empty,V.NSet.empty,E.NSet.empty) - in - ms |> extend_u_circles circle_wvs - |> extend_u_nodeverts nvs - |> extend_u_wireverts wvs - |> extend_u_bare_wires bare_wires - end - - fun add_edge_to_match (ep, vp) (et, vt) ms = let - val ms' = update_emap (EEInj.add (ep, et)) ms - in - if VVInj.is_mapped (get_vmap ms) (vp,vt) - then ms' - else update_vmap (VVInj.add (vp, vt)) ms' - end - - fun add_circle_to_match (cp, ct) ms = let - val ep = (the o E.NSet.tryget_singleton) (Graph.get_in_edges (get_pat ms) cp) - val et = (the o E.NSet.tryget_singleton) (Graph.get_in_edges (get_tgt ms) ct) - in ms |> update_vmap (VVInj.add (cp, ct)) - |> update_emap (EEInj.add (ep, et)) - end - - fun add_vertex_to_match (vp, vt) ms = ms |> update_vmap (VVInj.add (vp, vt)) - - fun get_complete_nodeverts ms = let - val node_vs = V.NSet.filter (Graph.is_node_vertex (get_pat ms)) - (VVInj.get_dom_set (get_vmap ms)) - in - V.NSet.subtract node_vs (get_p_nodeverts ms) - end - - fun is_total ms = - V.NSet.eq (VVInj.get_dom_set (get_vmap ms)) (Graph.get_vertices (get_pat ms)) andalso - E.NSet.eq (EEInj.get_dom_set (get_emap ms)) (Graph.get_edges (get_pat ms)) andalso - V.NSet.is_empty (get_p_nodeverts ms) - - fun schedule_all_pat ms = let - val pat = get_pat ms - val wires = Graph.get_wire_list pat - fun delete_wvs_in_wire (s,t,wv,_) set = - set |> V.NSet.remove_set wv - |> (if Graph.is_wire_vertex pat s then V.NSet.delete s else I) - |> (if Graph.is_wire_vertex pat t then V.NSet.delete t else I) - - val circles = filter (fn (s,t,_,_) => V.name_eq (s,t) andalso Graph.is_wire_vertex pat s) wires - val bare_wires = filter (fn (s,t,_,_) => not (V.name_eq (s,t)) andalso Graph.is_wire_vertex pat s andalso Graph.is_wire_vertex pat t) wires - - val wvs = fold delete_wvs_in_wire (circles @ bare_wires) (Graph.get_wire_vertices pat) - val circle_wvs = fold (fn (s,_,_,_) => fn set => set |> V.NSet.add s) circles V.NSet.empty - val bare_wire_es = fold (fn (_,_,_,es) => E.NSet.add ((the o E.NSet.get_min) es)) bare_wires E.NSet.empty - in - ms |> set_u_circles circle_wvs - |> set_u_nodeverts (Graph.get_node_vertices pat) - |> set_u_wireverts wvs - |> set_u_bare_wires bare_wire_es - end - - fun schedule_all_tgt ms = set_tgt_verts (Graph.get_vertices (get_tgt ms)) ms; - fun schedule_tgt_subgraph vs ms = let - val tgt = get_tgt ms - val tgt_subg = Graph.get_open_subgraph tgt vs - in - set_tgt_verts (Graph.get_vertices tgt_subg) ms - end; - - val init_and_schedule_all = schedule_all_tgt o schedule_all_pat oo init; - fun init_and_schedule_for_subgraph pat tgt vs = - schedule_tgt_subgraph vs (schedule_all_pat (init pat tgt)); - - fun pretty ms = let - fun pretty_vset prefix vs = - if V.NSet.is_empty vs then [] - else [Pretty.block [ - Pretty.str (prefix^": "), - V.NSet.pretty vs - ]] - fun pretty_eset prefix es = - if E.NSet.is_empty es then [] - else [Pretty.block [ - Pretty.str (prefix^": "), - E.NSet.pretty es - ]] - in - Pretty.chunks [ - Pretty.str "Match {", - Pretty.block [ - Pretty.str " ", (* indent *) - Pretty.chunks ([ - Pretty.block [ - Pretty.str "Pattern: ", - Graph.pretty (get_pat ms) - ], - Pretty.block [ - Pretty.str "Target: ", - Graph.pretty (get_tgt ms) - ], - Pretty.block [ - Pretty.str "VMap: ", - VVInj.pretty (get_vmap ms) - ], - Pretty.block [ - Pretty.str "EMap: ", - EEInj.pretty (get_emap ms) - ] - ] - @ (pretty_vset "U_Cicles" (get_u_circles ms)) - @ (pretty_vset "U_NodeVerts" (get_u_nodeverts ms)) - @ (pretty_vset "U_WireVerts" (get_u_wireverts ms)) - @ (pretty_eset "U_BareWires" (get_u_bare_wires ms)) - @ (pretty_vset "P_NodeVerts" (get_p_nodeverts ms)) - @ (pretty_vset "Ps_NodeVerts" (get_ps_nodeverts ms)) - @ (pretty_vset "Tgt Verts" (get_tgt_verts ms))) - ], - Pretty.str "}" - ] - end; - val print = Pretty.writeln o pretty; - - structure Sharing : OGRAPH_MATCH_STATE_SHARING - = struct - structure Graph = Graph.Sharing - type T = T - end; -end diff --git a/core/matching/progressive_match_search.ML b/core/matching/progressive_match_search.ML deleted file mode 100644 index 1b85978b..00000000 --- a/core/matching/progressive_match_search.ML +++ /dev/null @@ -1,615 +0,0 @@ -(* A piece-by-piece matcher for use by other matching code *) -(* see docs/matching_algo for details *) -signature PROGRESSIVE_MATCH_SEARCH_SHARING = -sig - structure MatchState : OGRAPH_MATCH_STATE_SHARING -end - -signature PROGRESSIVE_MATCH_SEARCH_HOOKS = -sig - type Context - structure MatchState : OGRAPH_MATCH_STATE - - (* Returns false if the given partially-matched vertex (in the - * pattern graph) is guaranteed to never be completed; and true - * otherwise. - * - * If in doubt, return true. - *) - val p_vertex_may_be_completed : Context -> MatchState.T -> V.name -> bool - - (* Allows additional constraints to be placed on the match. A potential - * match will be tested using this function, and discarded if it returns - * false. - * - * This is *not* used by the bare-wire matching (see - * bare_wire_match_is_allowed). - *) - val vertex_match_is_allowed : Context -> MatchState.T -> V.name -> V.name -> bool - - (* Allows additional constraints to be placed on bare-wire matching. A - * potential match will be tested using this function, and discarded if it - * returns false. - * - * The wire segment arguments are (s,e,t) and (s',e',t'), where e is the edge - * in a bare wire of the pattern graph, e' is an edge in the target graph and - * s, t, s' and t' are the corresponding sources and targets. - * - * s, e and t are all unmatched, and s and t are wire-vertices (specifically, - * an input and an output of the pattern). - * - * e' is unmatched, but one or both of s' and t' may be matched. Further, - * while at least one of s' and t' is a wire-vertex, the other may be a - * node-vertex. The wire segment will be expanded as necessary if the match - * proceeds. - *) - val bare_wire_match_is_allowed : Context - -> MatchState.T - -> (V.name*E.name*V.name) (* bare wire in the pattern *) - -> (V.name*E.name*V.name) (* wire segment in the target graph *) - -> bool -end - -functor PermissiveHooks(MatchState : OGRAPH_MATCH_STATE) - : PROGRESSIVE_MATCH_SEARCH_HOOKS = -struct - type Context = unit - structure MatchState = MatchState - fun p_vertex_may_be_completed _ _ _ = true - fun vertex_match_is_allowed _ _ _ _ = true - fun bare_wire_match_is_allowed _ _ _ _ = true -end - -signature PROGRESSIVE_MATCH_SEARCH = -sig - structure Log : LOG - - structure MatchState : OGRAPH_MATCH_STATE - structure Hooks : PROGRESSIVE_MATCH_SEARCH_HOOKS - - val match_new_nv : Hooks.Context - -> (V.name * V.name) - -> MatchState.T - -> MatchState.T option - - (* Match all queued unmatched circles and node vertices, and extend the - * matchings of any scheduled partially-matched node vertices as much as - * possible using queued unmatched wire vertices. - * - * Returns a lazy list of updated match states. - * - * Note that this will _not_ match bare wires. - *) - val match_pending : Hooks.Context - -> MatchState.T - -> MatchState.T Seq.seq - - (* Match all the bare wires in u_bare_wires, expanding edge points where - * necessary. - *) - val match_bare_wires : Hooks.Context - -> MatchState.T - -> MatchState.T Seq.seq - - structure Sharing : PROGRESSIVE_MATCH_SEARCH_SHARING - sharing Sharing.MatchState = MatchState.Sharing - sharing Hooks.MatchState.Sharing = MatchState.Sharing -end - -functor ProgressiveMatchSearch( - Hooks : PROGRESSIVE_MATCH_SEARCH_HOOKS -) : PROGRESSIVE_MATCH_SEARCH -= struct - structure Log : LOG = Log(val init_level = 0) - - structure Hooks = Hooks - structure MatchState = Hooks.MatchState - structure Graph = MatchState.Graph - - fun log_p pretty_f lvl name = - Log.logf lvl (fn g => Pretty.string_of - (Pretty.chunks [Pretty.str (name^":"),(pretty_f g)])) - val log_graph = log_p Graph.pretty - val log_vset = log_p V.NSet.pretty - val log_vmap = log_p VVInj.pretty - val log_emap = log_p EEInj.pretty - - fun ind 0 str = str - | ind n str = ind (n-1) (" "^str) - - fun add_edge_to_match lvl (ep,vp) (et,vt) ms = let - val _ = Log.logf 2 (fn () => ind lvl - ">> Edge "^(E.string_of_name ep)^" -> "^(E.string_of_name et)) () - val _ = Log.logf 2 (fn () => ind lvl - ">> Node "^(V.string_of_name vp)^" -> "^(V.string_of_name vt)) () - in - MatchState.add_edge_to_match (ep,vp) (et,vt) ms - end - fun add_circle_to_match lvl (cp,ct) ms = let - val _ = Log.logf 2 (fn () => ind lvl - ">> Circle "^(V.string_of_name cp)^" -> "^(V.string_of_name ct)) () - in - MatchState.add_circle_to_match (cp,ct) ms - end - fun add_vertex_to_match lvl (vp,vt) ms = let - val _ = Log.logf 2 (fn () => ind lvl - ">> Node "^(V.string_of_name vp)^" -> "^(V.string_of_name vt)) () - in - MatchState.add_vertex_to_match (vp,vt) ms - end - fun log_match_fail_v1 lvl msg v = - Log.log 2 (ind lvl "!! "^msg^" ("^(V.string_of_name v)^")") - fun log_match_fail_v2 lvl msg v1 v2 = - Log.log 2 (ind lvl "!! "^msg^" ("^(V.string_of_name v1)^" -> "^(V.string_of_name v2)^")") - fun log_match_fail_e2 lvl msg e1 e2 = - Log.log 2 (ind lvl "!! "^msg^" ("^(E.string_of_name e1)^" -> "^(E.string_of_name e2)^")") - (* this version is for things that failed straight away *) - (* (ie: when branches aren't killed, but are never created) *) - fun log_match_fail_e2' lvl msg e1 e2 = - Log.log 4 (ind lvl "!! "^msg^" ("^(E.string_of_name e1)^" -> "^(E.string_of_name e2)^")") - - (*****************************************) - (************ CIRCLE MATCHING ************) - (*****************************************) - - - fun u_circles_in_tgt ms = let - val tgt = MatchState.get_tgt ms - fun is_circle v = - Graph.is_wire_vertex tgt v andalso - (let val (ins,outs) = (Graph.get_in_edges tgt v, Graph.get_out_edges tgt v) - in - E.NSet.is_singleton ins andalso - E.NSet.is_singleton outs andalso - E.NSet.eq ins outs end); - fun is_matched v = VVInj.cod_contains (MatchState.get_vmap ms) v; - fun add_if_circle v = - if is_circle v andalso not (is_matched v) - then V.NSet.add v - else I; - in - V.NSet.fold add_if_circle (MatchState.get_tgt_verts ms) V.NSet.empty - end; - - fun match_circles' lvl context ms = let - val _ = Log.log 2 (ind lvl "== Matching next circle") - in - case V.NSet.get_min (MatchState.get_u_circles ms) - of SOME cp => let - val (pat,tgt) = (MatchState.get_pat ms, MatchState.get_tgt ms) - val cpdata = Graph.get_vertex_data pat cp - val ep = (the o E.NSet.tryget_singleton) (Graph.get_in_edges pat cp) - val (epdir,epdata) = Graph.get_edge_dir_and_data pat ep - val subst = MatchState.get_match_psubst ms - val match_is_allowed = Hooks.vertex_match_is_allowed context ms cp - fun m_circ ct = let - val ctdata = Graph.get_vertex_data tgt ct - val et = (the o E.NSet.tryget_singleton) (Graph.get_in_edges tgt ct) - val _ = Log.logf 4 (fn () => ind lvl "Attempting to match circle ("^ - (V.string_of_name cp)^ ","^(E.string_of_name ep)^") against ("^ - (V.string_of_name ct)^","^(E.string_of_name et)^")") () - val (etdir,etdata) = Graph.get_edge_dir_and_data tgt et - in - case (epdir = etdir, - Option.composePartial - (Graph.match_vdata (cpdata, ctdata), - Graph.match_edata (epdata, etdata)) - subst) - of (true, SOME subst') => - if not (match_is_allowed ct) - then - (log_match_fail_e2' lvl "Match disallowed" ep et; - Seq.empty) - else - match_circles' (lvl+1) context - (ms |> MatchState.update_u_circles (V.NSet.delete cp) - |> MatchState.update_tgt_verts (V.NSet.delete ct) - |> add_circle_to_match lvl (cp,ct) - |> MatchState.set_match_psubst subst') - | (false, _) => - (log_match_fail_e2' lvl "Circle directedness mismatch" ep et; - Seq.empty) - | (_, NONE) => - (log_match_fail_e2' lvl "Circle edge data mismatch" ep et; - Seq.empty) - end - in Seq.maps m_circ (V.NSet.fold Seq.cons (u_circles_in_tgt ms) Seq.empty) - end - | NONE => (* no circles left to match, return match state *) - (Log.log 2 "== All circles matched"; Seq.single ms) - end - val match_circles = match_circles' 0 - - - (*****************************************) - (************* MAIN MATCHING *************) - (*****************************************) - - - (* Try to recursively add wire to matching, starting with the given head - * vertex and edge. Return NONE on failure. - * - * vp: already-matched vertex - * ep: unmatched edge incident to vp (other end must be in P, Uw or Un) - * vt: target of vp - * et: unmatched edge incident to vt - *) - fun tryadd_wire lvl context ms (vp,ep) (vt,et) = let - val _ = Log.logf 4 (fn () => ind lvl "Attempting to match ("^ - (V.string_of_name vp)^ ","^(E.string_of_name ep)^ - ") against ("^ - (V.string_of_name vt)^","^(E.string_of_name et)^")") () - val (pat, tgt) = (MatchState.get_pat ms, MatchState.get_tgt ms) - val e_subst_maybe = - let (* check edge compatibilty and update subst *) - val (dir_p,data_p) = Graph.get_edge_dir_and_data pat ep - val (dir_t,data_t) = Graph.get_edge_dir_and_data tgt et - val pat_inedge = V.name_eq (Graph.get_edge_target pat ep, vp) - val tgt_inedge = V.name_eq (Graph.get_edge_target tgt et, vt) - in - if dir_p = dir_t andalso ( (* directedness must match and... *) - dir_p = Undirected orelse (* undirected or... *) - (pat_inedge = tgt_inedge)) (* directions match *) - then Graph.match_edata (data_p, data_t) (MatchState.get_match_psubst ms) - else NONE - end - in - case e_subst_maybe - of NONE => (log_match_fail_e2 lvl "Edge data does not match" ep et; NONE) - | SOME e_subst => - let - val new_vp = Graph.edge_get_other_vertex pat ep vp - val new_vt = Graph.edge_get_other_vertex tgt et vt - val vmap = MatchState.get_vmap ms - in - if V.NSet.contains (MatchState.get_p_nodeverts ms) new_vp - then - if VVInj.is_mapped vmap (new_vp, new_vt) - then - SOME (ms |> add_edge_to_match lvl (ep,new_vp) (et,new_vt) - |> MatchState.update_ps_nodeverts (V.NSet.add new_vp) - |> MatchState.set_match_psubst e_subst) - else - (log_match_fail_v2 lvl "Match conflict" new_vp new_vt; NONE) - else - if not (V.NSet.contains (MatchState.get_tgt_verts ms) new_vt) - then - (log_match_fail_v2 lvl "Vertex not in T" new_vp new_vt; NONE) - else - case Graph.match_vdata (Graph.get_vertex_data pat new_vp, - Graph.get_vertex_data tgt new_vt) - e_subst - of NONE => - (log_match_fail_v2 lvl "Vertex data does not match" - new_vp new_vt; - NONE) - | SOME v_subst => - (* since data matched, new_vp is a node- or wire-vertex iff new_vt is *) - if Graph.is_wire_vertex pat new_vp - then - case (Graph.wv_get_other_edge pat new_vp ep, Graph.wv_get_other_edge tgt new_vt et) - of (SOME new_ep, SOME new_et) => - if not (Hooks.vertex_match_is_allowed context ms - new_vp new_vt) - then - (log_match_fail_v2 lvl "Match disallowed" new_vp new_vt; NONE) - else - (* pat and tgt are internal. add edge to match and recurse *) - tryadd_wire lvl context - (ms |> add_edge_to_match lvl (ep,new_vp) (et,new_vt) - |> MatchState.update_tgt_verts (V.NSet.delete new_vt) - |> MatchState.update_u_wireverts (V.NSet.delete new_vp) - |> MatchState.set_match_psubst v_subst) - (new_vp,new_ep) (new_vt,new_et) - | (SOME _, NONE) => - (* pat is internal, tgt is bdry. fail *) - (log_match_fail_v2 lvl "Tgt wire too short" new_vp new_vt; NONE) - | (NONE, _) => - if not (Hooks.vertex_match_is_allowed context ms - new_vp new_vt) - then - (log_match_fail_v2 lvl "Match disallowed" new_vp new_vt; NONE) - else - (* pat is boundary. add edge to match and finish *) - SOME (ms |> add_edge_to_match lvl (ep,new_vp) (et,new_vt) - |> MatchState.update_tgt_verts (V.NSet.delete new_vt) - |> MatchState.update_u_wireverts (V.NSet.delete new_vp) - |> MatchState.set_match_psubst v_subst) - else - if V.NSet.contains (MatchState.get_u_nodeverts ms) new_vp - then - if not (Hooks.vertex_match_is_allowed context ms - new_vp new_vt) - then - (log_match_fail_v2 lvl "Match disallowed" new_vp new_vt; NONE) - else - SOME (ms |> add_edge_to_match lvl (ep,new_vp) (et,new_vt) - |> MatchState.update_tgt_verts (V.NSet.delete new_vt) - |> MatchState.update_u_nodeverts (V.NSet.delete new_vp) - |> MatchState.update_p_nodeverts (V.NSet.add new_vp) - |> MatchState.update_ps_nodeverts (V.NSet.add new_vp) - |> MatchState.set_match_psubst v_subst) - else - (* if this node-vert isn't scheduled for matching, leave its wire alone *) - (* NB: this shouldn't happen if Uw, Un set up correctly? *) - (log_match_fail_v1 lvl "Vertex not in Un" new_vp; NONE) - end - end - - fun match_new_nv' lvl context (nv, tnv) match_state = - (* note this hook is now being called *before* scheduling the new node, - * but I don't think this matters. *) - if Hooks.vertex_match_is_allowed context match_state nv tnv - then - case Graph.match_vdata (Graph.get_vertex_data (MatchState.get_pat match_state) nv, - Graph.get_vertex_data (MatchState.get_tgt match_state) tnv) - (MatchState.get_match_psubst match_state) - of SOME subst => SOME ( - match_state - |> MatchState.update_u_nodeverts (V.NSet.delete nv) - |> MatchState.update_p_nodeverts (V.NSet.add nv) - |> MatchState.update_ps_nodeverts (V.NSet.add nv) - |> MatchState.set_match_psubst subst - |> add_vertex_to_match lvl (nv, tnv) - |> MatchState.update_tgt_verts (V.NSet.delete tnv) - ) - | NONE => NONE - else NONE - - val match_new_nv = match_new_nv' 0 - - fun match_pending context match_state = let - val _ = Log.log 2 "== Starting inner match" - val _ = log_graph 4 "Pattern" (MatchState.get_pat match_state) - val _ = log_graph 4 "Target" (MatchState.get_tgt match_state) - val p_vertex_may_be_completed = Hooks.p_vertex_may_be_completed context - (*val vertex_match_is_allowed = Hooks.vertex_match_is_allowed context*) - - (* main matching loop *) - fun match_main lvl ms = let - val _ = Log.logf 3 (fn () => ind lvl "== Entering main loop") () - val _ = log_vset 4 "Un" (MatchState.get_u_nodeverts ms) - val _ = log_vset 4 "Uw" (MatchState.get_u_wireverts ms) - val _ = log_vset 4 "P" (MatchState.get_p_nodeverts ms) - val _ = log_vset 4 "Ps" (MatchState.get_ps_nodeverts ms) - val _ = log_vset 4 "T" (MatchState.get_tgt_verts ms) - val _ = log_vmap 4 "Mv" (MatchState.get_vmap ms) - val _ = log_emap 4 "Me" (MatchState.get_emap ms) - - (* match all scheduled wires in the neighbourhood of the given node-vertex *) - fun match_nhd lvl ms nvp = let - val _ = Log.logf 3 (fn () => ind lvl - "== Matching nhd of "^(V.string_of_name nvp)) () - - val nvt = VVInj.get (MatchState.get_vmap ms) nvp - val (pat, tgt) = (MatchState.get_pat ms, MatchState.get_tgt ms) - - (* We pick one wire-vertex in Uw adjacent to nvp, and branch for each - * wire-vertex in T adjacent to nvt. On each branch, we try to match - * the entire wire, and recurse back to match_main if successful. - * - * Since we care about edge-data, we actually grab the incident edges, - * rather than just the vertex at the other end. - *) - - fun edge_end_in_Uw e = - V.NSet.contains (MatchState.get_u_wireverts ms) - (Graph.edge_get_other_vertex pat e nvp) - fun edge_end_in_T e = - V.NSet.contains (MatchState.get_tgt_verts ms) - (Graph.edge_get_other_vertex tgt e nvt) - - fun add_branch_if_wire_matches ep et = - case tryadd_wire lvl context ms (nvp,ep) (nvt,et) - of SOME ms' => Seq.append (match_nhd (lvl+1) ms' nvp) - | NONE => I - - fun remove_nvp_from_P_if_complete ms = - let - val edge_matched = EEInj.cod_contains (MatchState.get_emap ms) - in - if E.NSet.forall edge_matched (Graph.get_adj_edges tgt nvt) - then MatchState.update_p_nodeverts (V.NSet.delete nvp) ms - else ms - end - in - case E.NSet.get_exists edge_end_in_Uw (Graph.get_adj_edges pat nvp) - of SOME ep => - let - val candidate_tgt_edges = - E.NSet.filter edge_end_in_T - (Graph.get_adj_edges tgt nvt) - in - E.NSet.fold (add_branch_if_wire_matches ep) - candidate_tgt_edges - Seq.empty - end - | NONE => match_main (lvl+1) (remove_nvp_from_P_if_complete ms) - end (* match_nhd *) - - - (* continue matching from the given partially-matched node-vertex *) - fun continue_matching_from nvp = - if not (p_vertex_may_be_completed ms nvp) - then (log_match_fail_v1 lvl "Doomed" nvp; Seq.empty) - else match_nhd lvl - (ms |> MatchState.update_ps_nodeverts (V.NSet.delete nvp)) - nvp - - (* Start matching a graph component - * - * nv is removed from Un and, for every matching vertex tnv in T, - * a branch is created matching nv against tnv - *) - fun match_and_schedule_new nv = let - val _ = Log.logf 3 (fn () => ind lvl - "== Next unmatched: "^(V.string_of_name nv)) () - fun match_nv_and_continue tnv = - case ms |> match_new_nv' lvl context (nv, tnv) - of SOME ms' => match_main (lvl+1) ms' - | NONE => Seq.empty - in - Seq.maps match_nv_and_continue - (V.NSet.fold Seq.cons (MatchState.get_tgt_verts ms) Seq.empty) - end (* match_and_schedule_new *) - in - (* process next node-vertex in Ps *) - case V.NSet.get_min (MatchState.get_ps_nodeverts ms) - of SOME next_v => continue_matching_from next_v - | NONE => - ( - (* Ps empty; look in Un *) - case V.NSet.get_min (MatchState.get_u_nodeverts ms) - of SOME next_v => match_and_schedule_new next_v - | NONE => - (Log.log 2 (ind lvl "==? All scheduled vertices matched"); - Seq.single ms) - ) - end (* match_main *) - in - (* match all circles, then enter main matching routine *) - Seq.maps (match_main 0) (match_circles context match_state) - end - - - - (*****************************************) - (********** BARE WIRE MATCHING ***********) - (*****************************************) - - (* Splits an edge into two or three edges - * - * Input: an edge not in cod emap, whose source and target are in either T or - * cod vmap, and at least one of them is a wire-vertex - * - * Result: ((s,e,t),g), where g is the updated graph, e is one of the edges - * resulting from splitting the input edge, s and t are the source and target - * of e, respectively, and are in T (and not in cod vmap), and the "other - * edges" of s and t satisfy the input requirements of this function. - *) - fun grow_tgt_edge lvl ms e = let - val tgt = MatchState.get_tgt ms - val (s,t) = (Graph.get_edge_source tgt e, Graph.get_edge_target tgt e) - (* hackish way to avoid renaming issues: make sure new vnames - * are not in pattern *) - val patvs = Graph.get_vertices (MatchState.get_pat ms) - val avoidvs = V.NSet.union_merge patvs (Graph.get_vertices tgt); - val ((e1,v1,e2),tgt') = Graph.split_edge e tgt - val (v1',avoidvs') = V.NSet.add_new v1 avoidvs - val tgt' = if V.name_eq (v1,v1') then tgt' - else Graph.rename_vertex v1 v1' tgt' - in - if V.name_eq (s,t) then - (* circles get special treatment; s/t cannot be in cod vmap if e is not in - * cod emap, and so it must be in T, and we only need to split once to get - * the result we want - *) - (Log.logf 2 (fn () => ind lvl - ">> Split edge "^(E.string_of_name e)^" into ("^ - (E.string_of_name e1)^","^(V.string_of_name v1')^","^ - (E.string_of_name e2)^")") (); - log_graph 4 "New tgt" tgt'; - ((v1',e2,t), - ms |> MatchState.set_tgt tgt' - |> MatchState.update_tgt_verts (V.NSet.add v1'))) - else - let - val ((e2,v2,e3),tgt') = Graph.split_edge e2 tgt' - val (v2',_) = V.NSet.add_new v2 avoidvs' - val tgt' = if V.name_eq (v2,v2') then tgt' - else Graph.rename_vertex v2 v2' tgt' - val _ = Log.logf 2 (fn () => ind lvl - ">> Split edge "^(E.string_of_name e)^" into ("^ - (E.string_of_name e1)^","^(V.string_of_name v1')^","^ - (E.string_of_name e2)^","^(V.string_of_name v2')^","^ - (E.string_of_name e3)^")") () - val _ = log_graph 4 "New tgt" tgt'; - in - ((v1',e2,v2'), - ms |> MatchState.set_tgt tgt' - |> MatchState.update_tgt_verts (V.NSet.add v1') - |> MatchState.update_tgt_verts (V.NSet.add v2')) - end - end - - - (* match the given set of bare wires, expanding wire-vertices where necessary *) - fun match_bare_wires' lvl context ms = let - val _ = Log.log 2 (ind lvl "== Starting bare wire match") - in - if E.NSet.is_empty (MatchState.get_u_bare_wires ms) then Seq.single ms - else let - val bare_wire_match_is_allowed = Hooks.bare_wire_match_is_allowed context - val (pat, tgt) = (MatchState.get_pat ms, MatchState.get_tgt ms) - val tgt_verts = MatchState.get_tgt_verts ms - val (vmap, emap) = (MatchState.get_vmap ms, MatchState.get_emap ms) - (* for a normalised graph, this will always be the source end *) - val bw = (the o E.NSet.get_min) (MatchState.get_u_bare_wires ms) - val ((bw_dir, bw_data), (bw_s, bw_t)) = Graph.get_edge_info pat bw - val (bw_sdata, bw_tdata) = (Graph.get_vertex_data pat bw_s, Graph.get_vertex_data pat bw_t) - - (* candidate edges for matching are: - - the unique edge in a circle, bare wire, or boundary wire - - the middle edge in an interior wire *) - fun is_candidate_for_bw e = let - val ((d,_),(s,t)) = Graph.get_edge_info tgt e - fun vertex_matchable v = - (V.NSet.contains tgt_verts v) orelse (VVInj.cod_contains vmap v) - in - (bare_wire_match_is_allowed ms (bw_s,bw,bw_t) (s,e,t)) andalso - not (EEInj.cod_contains (MatchState.get_emap ms) e) andalso - d = bw_dir andalso vertex_matchable s andalso vertex_matchable t andalso - (Graph.is_boundary tgt s orelse Graph.is_boundary tgt t orelse (* bare and boundary wires *) - (Graph.is_wire_vertex tgt s andalso Graph.is_wire_vertex tgt t)) (* circles + middle edges *) - end - - (* sequence of all the edges of the right kind with matching dir_or_undir *) - val edges = E.NSet.fold_rev - (fn e => if is_candidate_for_bw e then Seq.cons e else I) - (Graph.get_edges tgt) Seq.empty - fun match_bw e = let - (* grow edge and grab relevant data *) - val ((s,mid_e,t), ms') = grow_tgt_edge lvl ms e - val tgt' = MatchState.get_tgt ms' - val et_data = Graph.get_edge_data tgt' mid_e - val (tgt_sdata,tgt_tdata) = (Graph.get_vertex_data tgt' s, Graph.get_vertex_data tgt' t) - - (* match data for edge, source, and target *) - val fmatch_d = Option.composePartial - (Graph.match_edata (bw_data, et_data), - Option.composePartial - (Graph.match_vdata (bw_sdata, tgt_sdata), - Graph.match_vdata (bw_tdata, tgt_tdata))); - fun add_bw new_subst = let - val ms' = ms' |> MatchState.set_match_psubst new_subst - |> MatchState.update_tgt_verts (V.NSet.delete s) - |> MatchState.update_tgt_verts (V.NSet.delete t) - |> MatchState.update_u_bare_wires (E.NSet.delete bw) - in (fn (es,et) => - ms' |> add_vertex_to_match lvl (bw_s,es) - |> add_edge_to_match lvl (bw,bw_t) (mid_e,et) - ) - end - in - case fmatch_d (MatchState.get_match_psubst ms') - of SOME new_subst => - let val add_bw' = add_bw new_subst in - case bw_dir - of Directed => match_bare_wires' (lvl+1) context (add_bw' (s,t)) - | Undirected => - Seq.append (match_bare_wires' (lvl+1) context (add_bw' (s,t))) - (match_bare_wires' (lvl+1) context (add_bw' (t,s))) - end - | NONE => Seq.empty - end - in Seq.maps match_bw edges - end - end - val match_bare_wires = match_bare_wires' 0 - - structure Sharing = - struct - structure Graph = Graph.Sharing - structure MatchState = MatchState.Sharing - end -end diff --git a/core/matching/test/test-bang-graph-homeomorphism-search.ML b/core/matching/test/test-bang-graph-homeomorphism-search.ML deleted file mode 100644 index 886901b5..00000000 --- a/core/matching/test/test-bang-graph-homeomorphism-search.ML +++ /dev/null @@ -1,59 +0,0 @@ -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph) - open Tools - structure GHomeo = BangGraphHomeomorphismSearcher(Test_Bang_Graph) - - val g1 = G.empty |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_undir_eunit1 "e1" "v1" "v2" - |> add_undir_eunit1 "e2" "v1" "v2" - - val g2 = G.empty |> add_vunit2 "v1" - |> add_vunit1 "v2" - |> add_undir_eunit1 "e1" "v2" "v1" - |> add_undir_eunit1 "e2" "v2" "v1" - - val g3 = g1 |> add_bbox "b1" [] - |> add_bbox "b2" ["v1"] - - val g4 = g2 |> add_bbox "b0" [] - |> add_bbox "b1" ["v2"] - - fun seq_count sq = - (case Seq.pull sq of - NONE => 0 - | SOME (_,sq') => 1 + (seq_count sq')) - fun assert_iso_count 0 iso = - (case Seq.pull iso of - NONE => () - | SOME (_,iso') => raise ERROR ("Too many isomorphisms found (" ^ - (Int.toString (seq_count iso')) ^ " too many")) - | assert_iso_count n iso = - (case Seq.pull iso of - NONE => raise ERROR ("Not enough isomorphisms found (needed " ^ - (Int.toString n) ^ " more)") - | SOME (_,iso') => assert_iso_count (n-1) iso') - - val _ = Testing.test "GHomeo.is_homeomorphic [self; no !-boxes]" - (fn () => if GHomeo.is_homeomorphic g1 g1 then () else raise ERROR ("Should be iso")) () - val _ = Testing.test "GHomeo.find_homeomorphisms [self; no !-boxes]" - (fn () => assert_iso_count 2 (GHomeo.find_homeomorphisms g1 g1)) () - - val _ = Testing.test "GHomeo.is_homeomorphic [g1,g2: no !-boxes]" - (fn () => if GHomeo.is_homeomorphic g1 g2 then () else raise ERROR ("Should be iso")) () - val _ = Testing.test "GHomeo.find_homeomorphisms [g1,g2: no !-boxes]" - (fn () => assert_iso_count 2 (GHomeo.find_homeomorphisms g1 g2)) () - - val _ = Testing.test "GHomeo.is_homeomorphic [self; disjoint !-boxes]" - (fn () => if GHomeo.is_homeomorphic g3 g3 then () else raise ERROR ("Should be iso")) () - val _ = Testing.test "GHomeo.find_homeomorphisms [self; disjoint no !-boxes]" - (fn () => assert_iso_count 2 (GHomeo.find_homeomorphisms g3 g3)) () - - val _ = Testing.test "GHomeo.is_homeomorphic [g3,g4: disjoint !-boxes]" - (fn () => if GHomeo.is_homeomorphic g3 g4 then () else raise ERROR ("Should be iso")) () - val _ = Testing.test "GHomeo.find_homeomorphisms [g3,g4: disjoint !-boxes]" - (fn () => assert_iso_count 2 (GHomeo.find_homeomorphisms g3 g4)) () -in - val _ = Testing.assert_no_failed_tests() -end - diff --git a/core/matching/test/test-bg-to-bg-matching.ML b/core/matching/test/test-bg-to-bg-matching.ML deleted file mode 100644 index 22da1708..00000000 --- a/core/matching/test/test-bg-to-bg-matching.ML +++ /dev/null @@ -1,106 +0,0 @@ -(* - * Tests matching of !-graphs against !-graphs - *) -functor BG_To_BG_Matching_Tests( - Tester : BG_MATCH_TESTER -) = -struct - structure GTools = Test_Bang_Graph_Tools(Tester.G) - open Tester - open GTools - - (* - * Disjoint !-boxes - *) - val circles_pat = G.empty - (* circle in a !-box *) - |> add_wv "cv" - |> add_dir_eunit1 "ce" "cv" "cv" - |> add_bbox "B1" ["cv"] - - val circles_pat_exp = circles_pat - |> add_wv "c1" - |> add_dir_eunit1 "ce1" "c1" "c1" - - val circles_pat_copy = circles_pat_exp - |> add_bbox "B2" ["c1"] - - val cpy_rhs = G.empty - |> add_vunit1 "v" - |> add_wv "w" - |> add_dir_eunit1 "e" "v" "w" - |> add_bbox "b" ["v","w"] - - val cpy_rhs_exp = cpy_rhs - |> add_vunit1 "v1" - |> add_wv "w1" - |> add_dir_eunit1 "e1" "v1" "w1" - - val cpy_rhs_copy = cpy_rhs_exp - |> add_bbox "b1" ["v1","w1"] - - val cpy_lhs = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_wv "w1" - |> add_wv "w2" - |> add_dir_eunit1 "e1" "v1" "w1" - |> add_dir_eunit1 "e2" "w1" "v2" - |> add_dir_eunit1 "e3" "v2" "w2" - |> add_bbox "b1" ["w2"] - - val cpy_lhs_exp = cpy_lhs - |> add_wv "w3" - |> add_dir_eunit1 "e4" "v2" "w3" - - val cpy_lhs_copy = cpy_lhs_exp - |> add_bbox "b2" ["w3"] - - val _ = Testing.test "M.match - circles pattern --> circles pattern" - (find_n_correct_matches 2) (circles_pat,circles_pat) - val _ = Testing.test "M.match - circles pattern --> EXP(circles pattern)" - (find_n_correct_matches 4) (circles_pat,circles_pat_exp) - val _ = Testing.test "M.match - circles pattern --> COPY(circles pattern)" - (find_n_correct_matches 5) (circles_pat,circles_pat_copy) - val _ = Testing.test "M.match - copy law RHS --> copy law RHS" - (find_n_correct_matches 2) (cpy_rhs,cpy_rhs) - val _ = Testing.test "M.match - copy law RHS --> EXP(copy law RHS)" - (find_n_correct_matches 4) (cpy_rhs,cpy_rhs_exp) - val _ = Testing.test "M.match - copy law RHS --> COPY(copy law RHS)" - (find_n_correct_matches 5) (cpy_rhs,cpy_rhs_copy) - val _ = Testing.test "M.match - copy law LHS --> copy law LHS" - (find_n_correct_matches 1) (cpy_lhs,cpy_lhs) - val _ = Testing.test "M.match - copy law LHS --> EXP(copy law LHS)" - (find_n_correct_matches 1) (cpy_lhs,cpy_lhs_exp) - val _ = Testing.test "M.match - copy law LHS --> COPY(copy law LHS)" - (find_n_correct_matches 2) (cpy_lhs,cpy_lhs_copy) - - (* - * Nested/overlapping !-boxes - *) - - val balanced_tree_pat = G.empty - |> add_vunit1 "l0" - |> add_vunit2 "l1-a" - |> add_vunit2 "l2-a" - |> add_dir_eunit1 "e1" "l0" "l1-a" - |> add_dir_eunit1 "e2" "l1-a" "l2-a" - |> add_bbox "B0" ["l1-a","l2-a"] - |> add_bbox "B1" ["l2-a"] - - val arbitrary_tree_pat = balanced_tree_pat - |> G.add_child_to_bbox (B.mk "B0") (B.mk "B1") - - val _ = Testing.test "M.match - balanced tree --> balanced tree" - (find_n_correct_matches 1) (balanced_tree_pat,balanced_tree_pat) - val _ = Testing.test "M.match - arbitrary tree --> arbitrary tree" - (find_n_correct_matches 1) (arbitrary_tree_pat,arbitrary_tree_pat) - val _ = Testing.test "M.match - arbitrary tree --> balanced tree" - (find_n_correct_matches 0) (arbitrary_tree_pat,balanced_tree_pat) - val _ = Testing.test "M.match - balanced tree --> arbitrary tree" - (find_n_correct_matches 0) (balanced_tree_pat,arbitrary_tree_pat) - - val _ = Testing.assert_no_failed_tests() -end - - diff --git a/core/matching/test/test-bg-to-sg-matching.ML b/core/matching/test/test-bg-to-sg-matching.ML deleted file mode 100644 index b0e453be..00000000 --- a/core/matching/test/test-bg-to-sg-matching.ML +++ /dev/null @@ -1,203 +0,0 @@ -(* - * Tests matching of !-graphs against string graphs - *) -functor BG_To_SG_Matching_Tests( - Tester : BG_MATCH_TESTER -) = -struct - structure GTools = Test_Bang_Graph_Tools(Tester.G) - open Tester - open GTools - - (* - * Disjoint !-boxes - *) - val circles_pat = G.empty - (* circle in a !-box *) - |> add_wv "cv" - |> add_dir_eunit1 "ce" "cv" "cv" - |> add_bbox "B1" ["cv"]; - - val one_circle = G.empty - |> add_wv "c1" - |> add_dir_eunit1 "ce1" "c1" "c1"; - val two_circles = one_circle - |> add_wv "c2" - |> add_dir_eunit1 "ce2" "c2" "c2"; - val three_circles = two_circles - |> add_wv "c3" - |> add_dir_eunit1 "ce3" "c3" "c3"; - - val circle_wire_pat = G.empty - (* circle *) - |> add_wv "cv" - |> add_dir_eunit1 "ce" "cv" "cv" - (* bare wire *) - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eunit1 "bw" "in1" "out1" - (* in a !-box *) - |> add_bbox "B1" ["cv","in1","out1"]; - - val v_any_ins_outs_pat = G.empty - (* RHS of spider pattern *) - |> add_wv "in1" - |> add_wv "out1" - |> add_vunit1 "v1" - |> add_dir_eunit1 "e1" "in1" "v1" - |> add_dir_eunit1 "e2" "v1" "out1" - |> add_bbox "B1" ["in1"] - |> add_bbox "B2" ["out1"]; - - val single_v = G.empty - |> add_vunit1 "v1"; - - val v_out = single_v - |> add_wv "out1" - |> add_dir_eunit1 "o1" "v1" "out1"; - - val in_in_v_out = v_out - |> add_wv "in1" - |> add_wv "in2" - |> add_dir_eunit1 "i1" "in1" "v1" - |> add_dir_eunit1 "i2" "in2" "v1"; - - val v_to_v = single_v - |> add_vunit1 "v2" - |> add_dir_eunit1 "e1" "v1" "v2"; - - val loop = single_v - |> add_dir_eunit1 "e1" "v1" "v1"; - - val star_pat = G.empty - |> add_vunit1 "centre" - |> add_vunit2 "spoke" - |> add_undir_eunit1 "e" "centre" "spoke" - |> add_bbox "box" ["spoke"]; - - val lonely_spoke = G.empty - |> add_vunit2 "s"; - - val star_zero = G.empty - |> add_vunit1 "c"; - - val star_one = star_zero - |> add_vunit2 "s1" - |> add_undir_eunit1 "e1" "c" "s1"; - - val star_two = star_one - |> add_vunit2 "s2" - |> add_undir_eunit1 "e2" "c" "s2"; - - val star_three = star_two - |> add_vunit2 "s3" - |> add_undir_eunit1 "e3" "c" "s3"; - - (*val _ = Testing.test "M.match - circles pattern --> empty" - (find_n_correct_matches 1) (circles_pat,G.empty); - val _ = Testing.test "M.match - circles pattern --> one circle" - (find_n_correct_matches 2) (circles_pat,one_circle); - val _ = Testing.test "M.match - circles pattern --> two circles" - (find_n_correct_matches 5) (circles_pat,two_circles); - val _ = Testing.test "M.match - circles pattern --> three circles" - (find_n_correct_matches 16) (circles_pat,three_circles); - val _ = Testing.test "M.match - circles+wire pattern --> one circle" - (find_n_correct_matches 1) (circle_wire_pat,one_circle); - val _ = Testing.test "M.match - circles+wire pattern --> two circles" - (find_n_correct_matches 3) (circle_wire_pat,two_circles); - val _ = Testing.test "M.match - v any ins or outs pattern --> one circle" - (find_n_correct_matches 0) (v_any_ins_outs_pat,one_circle); - val _ = Testing.test "M.match - v any ins or outs pattern --> single vertex" - (find_n_correct_matches 1) (v_any_ins_outs_pat,single_v); - val _ = Testing.test "M.match - v any ins or outs pattern --> v->1" - (find_n_correct_matches 1) (v_any_ins_outs_pat,v_out); - val _ = Testing.test "M.match - v any ins or outs pattern --> 2->v->1" - (find_n_correct_matches 2) (v_any_ins_outs_pat,in_in_v_out); - val _ = Testing.test "M.match - v any ins or outs pattern --> v->v" - (find_n_correct_matches 2) (v_any_ins_outs_pat,v_to_v); - val _ = Testing.test "M.match - v any ins or outs pattern --> v with loop" - (find_n_correct_matches 1) (v_any_ins_outs_pat,loop); - val _ = Testing.test "M.match - star pattern --> lonely spoke" - (find_n_correct_matches 0) (star_pat,lonely_spoke); - val _ = Testing.test "M.match - star pattern --> zero-spoke star" - (find_n_correct_matches 1) (star_pat,star_zero); - val _ = Testing.test "M.match - star pattern --> one-spoke star" - (find_n_correct_matches 1) (star_pat,star_one); - val _ = Testing.test "M.match - star pattern --> two-spoke star" - (find_n_correct_matches 2) (star_pat,star_two); - val _ = Testing.test "M.match - star pattern --> three-spoke star" - (find_n_correct_matches 6) (star_pat,star_three);*) - - - (* - * Expansion on self-loops - *) - (* TODO *) - - - (* - * Nested/overlapping !-boxes - *) - - val balanced_tree_pat = G.empty - |> add_vunit1 "l0" - |> add_vunit2 "l1-a" - |> add_vunit2 "l2-a" - |> add_dir_eunit1 "e1" "l0" "l1-a" - |> add_dir_eunit1 "e2" "l1-a" "l2-a" - |> add_bbox "B0" ["l1-a","l2-a"] - |> add_bbox "B1" ["l2-a"]; - - val arbitrary_tree_pat = balanced_tree_pat - |> G.add_child_to_bbox (B.mk "B0") (B.mk "B1"); - - val min_tree = G.empty - |> add_vunit1 "l0"; - - val tree_1 = min_tree - |> add_vunit2 "l1-a" - |> add_dir_eunit1 "e1" "l0" "l1-a"; - - val tree_2 = tree_1 - |> add_vunit2 "l1-b" - |> add_dir_eunit1 "e2" "l0" "l1-b"; - - val tree_3 = tree_2 - |> add_vunit2 "l1-c" - |> add_dir_eunit1 "e3" "l0" "l1-c"; - - val tree_1_1 = tree_1 - |> add_vunit2 "l2-a" - |> add_dir_eunit1 "e11" "l1-a" "l2-a"; - - val tree_1_2 = tree_1_1 - |> add_vunit2 "l2-b" - |> add_dir_eunit1 "e12" "l1-a" "l2-b"; - - val tree_2_1 = tree_2 - |> add_vunit2 "l2-a" - |> add_dir_eunit1 "e21" "l1-b" "l2-a"; - - val tree_2_1_1 = tree_2_1 - |> add_vunit2 "l2-b" - |> add_dir_eunit1 "e22" "l1-b" "l2-b"; - - (*val _ = Testing.test "M.match - balanced tree --> tree {}" - ((assert_n_wild_bboxes 1) o List.hd o (find_n_correct_matches 1)) - (balanced_tree_pat,min_tree); - val _ = Testing.test "M.match - arbitrary tree --> tree {}" - (find_n_correct_matches 1) (arbitrary_tree_pat,min_tree); - val _ = Testing.test "M.match - balanced tree --> tree {1}" - (find_n_correct_matches 1) (balanced_tree_pat,tree_1); - val _ = Testing.test "M.match - arbitrary tree --> tree {1}" - (find_n_correct_matches 1) (arbitrary_tree_pat,tree_1); - val _ = Testing.test "M.match - balanced tree --> tree {2}" - (find_n_correct_matches 2) (balanced_tree_pat,tree_2); - val _ = Testing.test "M.match - arbitrary tree --> tree {2}" - (find_n_correct_matches 2) (arbitrary_tree_pat,tree_2);*) - - - val _ = Testing.assert_no_failed_tests(); -end; - - diff --git a/core/matching/test/test-concrete-matcher.ML b/core/matching/test/test-concrete-matcher.ML deleted file mode 100644 index 322b7ed8..00000000 --- a/core/matching/test/test-concrete-matcher.ML +++ /dev/null @@ -1,11 +0,0 @@ -structure Test_ConcreteMatcher = - ConcreteMatchSearch(Test_OGraph); - -(* set to 4 for really detailed messages *) -(* -Test_ConcreteMatcher.Log.level_ref := 2; -*) - -structure Concrete_Matcher_Tests = - SG_To_SG_Matching_Tests(Test_Match_Tools(Test_ConcreteMatcher)) - diff --git a/core/matching/test/test-greedy-matcher.ML b/core/matching/test/test-greedy-matcher.ML deleted file mode 100644 index 097bd025..00000000 --- a/core/matching/test/test-greedy-matcher.ML +++ /dev/null @@ -1,16 +0,0 @@ -structure Test_GreedyMatcher = - GreedyMatchSearch(Test_Bang_Graph); - -(* set to 4 for really detailed messages *) -(* -Test_GreedyMatcher.Log.level_ref := 2; -*) - -local - structure Greedy_Tester = Test_Bang_Match_Tools(Test_GreedyMatcher) -in - structure Greedy_Matcher_SG_Tests = SG_To_SG_Matching_Tests(Greedy_Tester) - structure Greedy_Matcher_BG_SG_Tests = BG_To_SG_Matching_Tests(Greedy_Tester) - structure Greedy_Matcher_BG_BG_Tests = BG_To_BG_Matching_Tests(Greedy_Tester) -end - diff --git a/core/matching/test/test-matching-setup.ML b/core/matching/test/test-matching-setup.ML deleted file mode 100644 index 81eaaa67..00000000 --- a/core/matching/test/test-matching-setup.ML +++ /dev/null @@ -1,140 +0,0 @@ -signature TEST_MATCH_SEARCH = MATCH_SEARCH - where type Graph.nvdata = Test_Graph_Data.nvdata - and type Graph.edata = Test_Graph_Data.edata - and type Graph.psubst = Test_Graph_Data.psubst - and type Graph.subst = Test_Graph_Data.subst - -signature TEST_BG_MATCH_SEARCH = BG_MATCH_SEARCH - where type Graph.nvdata = Test_Graph_Data.nvdata - and type Graph.edata = Test_Graph_Data.edata - and type Graph.psubst = Test_Graph_Data.psubst - and type Graph.subst = Test_Graph_Data.subst - and type Match.Graph.T = Test_Bang_Graph.T - - -signature MATCH_TESTER = -sig - structure G : TEST_OGRAPH - type match - val find_n_matches : int -> (G.T * G.T) - -> match list - val find_n_correct_matches : int -> (G.T * G.T) - -> match list -end - -signature BG_MATCH_TESTER = -sig - structure G : TEST_BANG_GRAPH - type match - val find_n_matches : int -> (G.T * G.T) - -> match list - val find_n_correct_matches : int -> (G.T * G.T) - -> match list - val assert_n_wild_bboxes : int -> match -> unit -end - -functor Test_Match_Tools( - Matcher : TEST_MATCH_SEARCH -) = -struct - - structure M = Matcher - structure G = M.Graph - structure GTools = Test_OGraph_Tools(G) - open GTools - structure Match = M.Match - type match = Match.T - - fun assert_match_correct orig_pat orig_tgt tgt_verts m = let - val pat = Match.get_pat m - val tgt = Match.get_tgt m - val vmap = Match.get_vmap m - - (* FIXME: no homeo-checker for OGRAPH - val _ = Testing.assert "Pattern correct" - (HomeoFinder.is_homeomorphic orig_pat pat) - val _ = Testing.assert "Target correct" - (HomeoFinder.is_homeomorphic orig_tgt tgt) - *) - (* FIXME: normalisation - val _ = Testing.assert "T subset of tgt verts" - (V.NSet.sub_set tgt_verts (G.get_vertices tgt)) - val _ = Testing.assert "T does not intersect cod vmap" - (V.NSet.is_empty (V.NSet.intersect tgt_verts - (VVInj.get_cod_set vmap))) - *) - - in m end; - - fun assert_n_match n mseq () = let - val mlist = Seq.list_of mseq - val nm = length mlist - in - if nm = n then mlist - else (List.map Match.print mlist; - raise ERROR ("expected " ^ Int.toString n ^ - " matching(s), got " ^ Int.toString nm)) - end; - - fun find_n_matches n (pat,tgt) = - assert_n_match n (M.match pat tgt) (); - - fun find_n_correct_matches n (pat,tgt) = - map (assert_match_correct pat tgt (G.get_vertices tgt)) - (assert_n_match n (M.match pat tgt) ()); -end; - -functor Test_Bang_Match_Tools( - BGMatcher : TEST_BG_MATCH_SEARCH -) = -struct - structure BGTools = Test_Bang_Graph_Tools(BGMatcher.Graph); - open BGTools; - structure M = BGMatcher; - structure Match = M.Match; - type match = Match.T - structure G = Match.Graph - structure HomeoFinder = BangGraphHomeomorphismSearcher(G) - - fun assert_match_correct orig_pat orig_tgt tgt_verts m = let - val init_pat = Match.get_init_pat m - val pat = Match.get_pat m - val replayed_pat = Test_Bang_Graph.replay_bbox_ops (Match.get_bbox_ops m) init_pat - val tgt = Match.get_tgt m - val vmap = Match.get_vmap m - - val _ = Testing.assert "Pattern correct" - (HomeoFinder.is_homeomorphic replayed_pat pat) - val _ = Testing.assert "Target correct" - (HomeoFinder.is_homeomorphic orig_tgt tgt) - (* FIXME: normalisation - val _ = Testing.assert "T subset of tgt verts" - (V.NSet.sub_set tgt_verts (G.get_vertices tgt)) - val _ = Testing.assert "T does not intersect cod vmap" - (V.NSet.is_empty (V.NSet.intersect tgt_verts - (VVInj.get_cod_set vmap))) - *) - in m end; - - fun assert_n_match n mseq () = let - val mlist = Seq.list_of mseq - val nm = length mlist - in - if nm = n then mlist - else (List.map Match.print mlist; - raise ERROR ("expected " ^ Int.toString n ^ - " matching(s), got " ^ Int.toString nm)) - end; - - fun assert_n_wild_bboxes n ms = let - val ws = Match.get_wild_bboxes ms - in - Testing.assert ((Int.toString n)^" wild !-boxes") (B.NSet.cardinality ws = n) - end - - fun find_n_matches n (pat,tgt) = assert_n_match n (M.match pat tgt) () - - fun find_n_correct_matches n (pat,tgt) = - map (assert_match_correct pat tgt (G.get_vertices tgt)) - (assert_n_match n (M.match pat tgt) ()) -end; diff --git a/core/matching/test/test-sg-to-sg-matching.ML b/core/matching/test/test-sg-to-sg-matching.ML deleted file mode 100644 index bd4d089f..00000000 --- a/core/matching/test/test-sg-to-sg-matching.ML +++ /dev/null @@ -1,431 +0,0 @@ -(* - * Tests matching of string graphs against string graphs - *) -functor SG_To_SG_Matching_Tests(Tester : MATCH_TESTER) = -struct - structure GTools = Test_OGraph_Tools(Tester.G) - open GTools - open Tester - - (* - * Circle matching tests - *) - val circle1 = G.empty - |> add_wv "c1" - |> add_dir_eunit1 "e1" "c1" "c1"; - - val circle2 = G.empty - |> add_wv "c1" - |> add_dir_eunit2 "e1" "c1" "c1"; - - val undir_circle1 = G.empty - |> add_wv "c1" - |> add_undir_eunit1 "e1" "c1" "c1"; - - val two_1circles = circle1 - |> add_wv "c2" - |> add_dir_eunit1 "e2" "c2" "c2"; - - val two_different_circles = circle1 - |> add_wv "c2" - |> add_dir_eunit2 "e2" "c2" "c2"; - - val circle_expr1a = G.empty - |> add_wv "c1" - |> add_dir_eexpr1 "e1" "c1" "c1" "a"; - - val two_circles_expr1_a_2a = circle_expr1a - |> add_wv "c2" - |> add_dir_eexpr1 "e2" "c2" "c2" "2a"; - - val circle_expr1pi = G.empty - |> add_wv "c2" - |> add_dir_eexpr1 "e2" "c2" "c2" "\\pi"; - - val two_circles_expr1_pi_2pi = circle_expr1pi - |> add_wv "c3" - |> add_dir_eexpr1 "e3" "c3" "c3" "2 \\pi"; - - val circle_expr2pi = G.empty - |> add_wv "c3" - |> add_dir_eexpr2 "e3" "c3" "c3" "\\pi"; - - val bare_wire1 = G.empty - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eunit1 "e1" "in1" "out1"; - - val loop = G.empty - |> add_vunit1 "v1" - |> add_dir_eunit1 "e1" "v1" "v1"; - - val _ = Testing.test "match: circle (eunit1) --> empty" - (find_n_correct_matches 0) (circle1,G.empty); - val _ = Testing.test "match: empty --> circle (eunit1)" - (find_n_correct_matches 1) (G.empty,circle1); - - val _ = Testing.test "match: circle (eunit1) --> bare wire (eunit1)" - (find_n_correct_matches 0) (circle1,bare_wire1); - val _ = Testing.test "match: circle (eunit1) --> loop (vunit1,eunit1)" - (find_n_correct_matches 0) (circle1,loop); - val _ = Testing.test "match: circle (eunit1) --> circle (eunit1)" - (find_n_correct_matches 1) (circle1,circle1); - val _ = Testing.test "match: circle (eunit1) --> circle (eunit2)" - (find_n_correct_matches 0) (circle1,circle2); - - val _ = Testing.test "match: circle (eunit1) --> two circles (eunit1)" - (find_n_correct_matches 2) (circle1,two_1circles); - val _ = Testing.test "match: circle (eunit1) --> two circles (eunit1/2)" - (find_n_correct_matches 1) (circle1,two_different_circles); - val _ = Testing.test "match: two circles (eunit1) --> two circles (eunit1)" - (find_n_correct_matches 2) (two_1circles,two_1circles); - val _ = Testing.test "match: two circles (eunit1) --> two circles (eunit1/2)" - (find_n_correct_matches 0) (two_1circles,two_different_circles); - val _ = Testing.test "match: two circles (eunit1/2) --> two circles (eunit1)" - (find_n_correct_matches 1) - (two_different_circles,two_different_circles); - - val _ = Testing.test "match: circle (eexpr1:a) --> circle (eexpr1:pi)" - (find_n_correct_matches 1) (circle_expr1a,circle_expr1pi); - val _ = Testing.test "match: circle (eexpr1:a) --> circle (eexpr2:pi)" - (find_n_correct_matches 0) (circle_expr1a,circle_expr2pi); - val _ = Testing.test "match: circle (eexpr1:pi) --> circle (eexpr1:a)" - (find_n_correct_matches 0) (circle_expr1pi,circle_expr1a); - val _ = Testing.test "match: two circles (a,2a) --> two circles (pi,2pi)" - (find_n_correct_matches 1) - (two_circles_expr1_a_2a,two_circles_expr1_pi_2pi); - - val _ = Testing.test "match: undir circle --> undir circle" - (find_n_correct_matches 1) - (undir_circle1,undir_circle1); - val _ = Testing.test "match: dir circle --> undir circle" - (find_n_correct_matches 0) - (circle1,undir_circle1); - val _ = Testing.test "match: undir circle --> dir circle" - (find_n_correct_matches 0) - (undir_circle1,circle1); - - (* - * Bare wire matching tests - *) - val undir_bare_wire1 = G.empty - |> add_wv "in1" - |> add_wv "out1" - |> add_undir_eunit1 "e1" "in1" "out1"; - - val bare_wire2 = G.empty - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eunit2 "e1" "in1" "out1"; - - val two_bare_wires = bare_wire1 - |> add_wv "in2" - |> add_wv "out2" - |> add_dir_eunit1 "e2" "in2" "out2"; - - val three_bare_wires = two_bare_wires - |> add_wv "in3" - |> add_wv "out3" - |> add_dir_eunit1 "e3" "in3" "out3"; - - val two_diff_bare_wires = G.empty - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eunit1 "e1" "in1" "out1" - |> add_wv "in2" - |> add_wv "out2" - |> add_dir_eunit2 "e2" "in2" "out2"; - - val barbell = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_dir_eunit1 "e1" "v1" "v2"; - - val undir_barbell = G.empty - |> add_vunit1 "v1" - |> add_vunit1 "v2" - |> add_undir_eunit1 "e1" "v1" "v2"; - - val bare_wire_expr1a = G.empty - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eexpr1 "e1" "in1" "out1" "a"; - - val two_bare_wires_expr1_a_2a = bare_wire_expr1a - |> add_wv "in2" - |> add_wv "out2" - |> add_dir_eexpr1 "e2" "in2" "out2" "2a"; - - val bare_wire_expr1pi = G.empty - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eexpr1 "e1" "in1" "out1" "\\pi"; - - val bare_wire_expr2pi = G.empty - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eexpr2 "e1" "in1" "out1" "\\pi"; - - val two_circles_expr1_hpi_pi = G.empty - |> add_wv "c1" - |> add_dir_eexpr1 "e1" "c1" "c1" "1/2\\pi" - |> add_wv "c2" - |> add_dir_eexpr1 "e2" "c2" "c2" "\\pi"; - - val expr_loop_hpi_pi = G.empty - |> add_vunit1 "x1" - |> add_vunit1 "x2" - |> add_dir_eexpr1 "e1" "x1" "x2" "1/2\\pi" - |> add_dir_eexpr1 "e2" "x2" "x1" "\\pi"; - - val _ = Testing.test "match: bare wire (eunit1) --> empty" - (find_n_correct_matches 0) (bare_wire1,G.empty); - val _ = Testing.test "match: empty --> bare wire (eunit1)" - (find_n_correct_matches 1) (G.empty,bare_wire1); - - val _ = Testing.test "match: bare wire (eunit1) --> bare wire (eunit1)" - (find_n_correct_matches 1) (bare_wire1,bare_wire1); - val _ = Testing.test "match: bare wire (eunit1) --> bare wire (eunit2)" - (find_n_correct_matches 0) (bare_wire1,bare_wire2); - - val _ = Testing.test "match: bare wire (eunit1) --> 2 bare wires (eunit1)" - (find_n_correct_matches 2) (bare_wire1,two_bare_wires); - val _ = Testing.test "match: bare wire (eunit1) --> 2 bare wires (eunit1/2)" - (find_n_correct_matches 1) (bare_wire1,two_diff_bare_wires); - - val _ = Testing.test "match: 2 bare wires (eunit1) --> bare wire (eunit1)" - (find_n_correct_matches 2) (two_bare_wires,bare_wire1); - val _ = Testing.test "match: 2 bare wires (eunit1/2) --> bare wire (eunit1)" - (find_n_correct_matches 0) (two_diff_bare_wires,bare_wire1); - val _ = Testing.test "match: 2 bare wires (eunit1) --> 2 bare wires (eunit1)" - (find_n_correct_matches 6) (two_bare_wires,two_bare_wires); - val _ = Testing.test "match: 2 bare wires (eunit1/2) --> 2 bare wires (eunit1/2)" - (find_n_correct_matches 1) (two_diff_bare_wires,two_diff_bare_wires); - - (* for circles, there should be as many matches as there are cyclic permutations *) - val _ = Testing.test "match: bare wire (eunit1) --> circle (eunit1)" - (find_n_correct_matches 1) (bare_wire1,circle1); - val _ = Testing.test "match: bare wire (eunit2) --> circle (eunit1)" - (find_n_correct_matches 0) (bare_wire2,circle1); - val _ = Testing.test "match: 2 bare wires (eunit1) --> circle (eunit1)" - (find_n_correct_matches 1) (two_bare_wires,circle1); - val _ = Testing.test "match: 3 bare wires (eunit1) --> circle (eunit1)" - (find_n_correct_matches 2) (three_bare_wires,circle1); - - val _ = Testing.test "match: bare wire (eunit1) --> barbell (eunit1)" - (find_n_correct_matches 1) (bare_wire1,barbell); - val _ = Testing.test "match: bare wire (eunit2) --> barbell (eunit1)" - (find_n_correct_matches 0) (bare_wire2,barbell); - val _ = Testing.test "match: 2 bare wires (eunit1) --> barbell (eunit1)" - (find_n_correct_matches 2) (two_bare_wires,barbell); - - val _ = Testing.test "match: bare wire (expr1:a) --> bare wire (expr1:pi)" - (find_n_correct_matches 1) (bare_wire_expr1a,bare_wire_expr1pi); - val _ = Testing.test "match: bare wire (expr1:pi) --> bare wire (expr1:a)" - (find_n_correct_matches 0) (bare_wire_expr1pi,bare_wire_expr1a); - val _ = Testing.test "match: bare wire (expr1:a) --> bare wire (expr2:pi)" - (find_n_correct_matches 0) (bare_wire_expr1a,bare_wire_expr2pi); - val _ = Testing.test "match: bare wire (a) --> 2 circles (1/2pi,pi)" - (find_n_correct_matches 2) - (bare_wire_expr1a,two_circles_expr1_hpi_pi); - val _ = Testing.test "match: 2 bare wires (a,2a) --> loop (1/2pi,pi)" - (find_n_correct_matches 1) - (two_bare_wires_expr1_a_2a,expr_loop_hpi_pi); - val _ = Testing.test "match: 2 bare wires (a,2a) --> 2 circles (1/2pi,pi)" - (find_n_correct_matches 1) - (two_bare_wires_expr1_a_2a,two_circles_expr1_hpi_pi); - - val _ = Testing.test "match: undir bare wire (eunit1) --> undir bare wire (eunit1)" - (find_n_correct_matches 2) - (undir_bare_wire1,undir_bare_wire1); - val _ = Testing.test "match: dir bare wire (eunit1) --> undir bare wire (eunit1)" - (find_n_correct_matches 0) - (bare_wire1,undir_bare_wire1); - val _ = Testing.test "match: undir bare wire (eunit1) --> dir bare wire (eunit1)" - (find_n_correct_matches 0) - (undir_bare_wire1,bare_wire1); - val _ = Testing.test "match: undir bare wire (eunit1) --> undir barbell (eunit1)" - (find_n_correct_matches 2) (undir_bare_wire1,undir_barbell); - - (* - * Matching node-vertices against other node-vertices - *) - - val single_v = G.empty - |> add_vunit1 "v1"; - val double_v = G.empty - |> add_vunit1 "vA" - |> add_vunit1 "vB"; - val v1_v2 = G.empty - |> add_vunit1 "vA" - |> add_vunit2 "v1"; - - val v_expr1_a = G.empty - |> add_vexpr1 "v1" "a"; - val v_expr1_a_2a = G.empty - |> add_vexpr1 "v1" "a" - |> add_vexpr1 "v2" "2a"; - val v_expr1_b = G.empty - |> add_vexpr1 "v1" "b"; - val v_expr1_pi = G.empty - |> add_vexpr1 "v1" "\\pi"; - val v_expr1_hpi_pi = G.empty - |> add_vexpr1 "v1" "1/2\\pi" - |> add_vexpr1 "v2" "\\pi"; - val v_expr2_a = G.empty - |> add_vexpr2 "v1" "a"; - - val _ = Testing.test "match: vunit1 --> empty" - (find_n_correct_matches 0) (single_v,G.empty); - val _ = Testing.test "match: empty --> vunit1" - (find_n_correct_matches 1) (G.empty,single_v); - val _ = Testing.test "match: vunit1 --> vunit1" - (find_n_correct_matches 1) (single_v,single_v); - val _ = Testing.test "match: vunit1 --> 2 * vunit1" - (find_n_correct_matches 2) (single_v,double_v); - val _ = Testing.test "match: vunit1 --> vunit1, vunit2" - (find_n_correct_matches 1) (single_v,v1_v2); - val _ = Testing.test "match: 2 * vunit1 --> vunit1" - (find_n_correct_matches 0) (double_v,single_v); - val _ = Testing.test "match: 2 * vunit1 --> 2 * vunit1" - (find_n_correct_matches 2) (double_v,double_v); - - val _ = Testing.test "match: vexpr1:a --> vexpr1:a" - (find_n_correct_matches 1) (v_expr1_a,v_expr1_a); - val _ = Testing.test "match: vexpr1:a --> vexpr1:b" - (find_n_correct_matches 1) (v_expr1_a,v_expr1_b); - val _ = Testing.test "match: vexpr1:a --> vexpr1:pi" - (find_n_correct_matches 1) (v_expr1_a,v_expr1_pi); - val _ = Testing.test "match: vexpr1:pi --> vexpr1:a" - (find_n_correct_matches 0) (v_expr1_pi,v_expr1_a); - val _ = Testing.test "match: vexpr1:a --> vexpr2:a" - (find_n_correct_matches 0) (v_expr1_a,v_expr2_a); - val _ = Testing.test "match: vexpr1:a --> vexpr1:1/2pi, vexpr1:pi" - (find_n_correct_matches 2) (v_expr1_a,v_expr1_hpi_pi); - val _ = Testing.test "match: vexpr1:a, vexpr1:2a --> vexpr1:1/2pi, vexpr1:pi" - (find_n_correct_matches 1) (v_expr1_a_2a,v_expr1_hpi_pi); - - - (* - * Edge-direction matching - *) - val dir_barbell_1_2 = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_dir_eunit1 "e1" "v1" "v2"; - val dir_barbell_2_1 = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_dir_eunit1 "e1" "v2" "v1"; - val undir_barbell_1_2 = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_undir_eunit1 "e1" "v1" "v2"; - val undir_barbell_2_1 = G.empty - |> add_vunit1 "v1" - |> add_vunit2 "v2" - |> add_undir_eunit1 "e1" "v2" "v1"; - - val _ = Testing.test "match: 1->2 --> 1->2" - (find_n_correct_matches 1) - (dir_barbell_1_2,dir_barbell_1_2); - val _ = Testing.test "match: 1->2 --> 2->1" - (find_n_correct_matches 0) - (dir_barbell_1_2,dir_barbell_2_1); - val _ = Testing.test "match: 1->2 --> 1-2" - (find_n_correct_matches 0) - (dir_barbell_1_2,undir_barbell_1_2); - val _ = Testing.test "match: 1-2 --> 1-2" - (find_n_correct_matches 1) - (undir_barbell_1_2,undir_barbell_1_2); - val _ = Testing.test "match: 1-2 --> 2-1" - (find_n_correct_matches 1) - (undir_barbell_1_2,undir_barbell_2_1); - val _ = Testing.test "match: 1-2 --> 1->2" - (find_n_correct_matches 0) - (undir_barbell_1_2,dir_barbell_1_2); - - - (* - * Edge data matching - *) - val barbell_data = G.empty - |> add_vexpr1 "v1" "a" - |> add_vexpr2 "v2" "b" - |> add_dir_eexpr1 "e1" "v1" "v2" "a"; - val barbell_good_data_1 = G.empty - |> add_vexpr1 "v1" "c" - |> add_vexpr2 "v2" "b" - |> add_dir_eexpr1 "e1" "v1" "v2" "c"; - val barbell_good_data_2 = G.empty - |> add_vexpr1 "v1" "1/4pi" - |> add_vexpr2 "v2" "1/2pi" - |> add_dir_eexpr1 "e1" "v1" "v2" "1/4pi"; - val barbell_bad_data_1 = G.empty - |> add_vexpr1 "v1" "a" - |> add_vexpr2 "v2" "b" - |> add_dir_eexpr1 "e1" "v1" "v2" "b"; - val barbell_bad_data_2 = G.empty - |> add_vexpr1 "v1" "1/4pi" - |> add_vexpr2 "v2" "1/3pi" - |> add_dir_eexpr1 "e1" "v1" "v2" "1/2pi"; - - val _ = Testing.test "match: barbell data --> barbell data" - (find_n_correct_matches 1) - (barbell_data,barbell_data); - val _ = Testing.test "match: barbell data --> barbell good data 1" - (find_n_correct_matches 1) - (barbell_data,barbell_good_data_1); - val _ = Testing.test "match: barbell data --> barbell good data 2" - (find_n_correct_matches 1) - (barbell_data,barbell_good_data_2); - val _ = Testing.test "match: barbell data --> barbell bad data 1" - (find_n_correct_matches 0) - (barbell_data,barbell_bad_data_1); - val _ = Testing.test "match: barbell data --> barbell bad data 2" - (find_n_correct_matches 0) - (barbell_data,barbell_bad_data_2); - - (* - * Partial wire matches - *) - - val out_v = G.empty - |> add_vunit1 "v1" - |> add_wv "out1" - |> add_dir_eunit1 "e1" "v1" "out1"; - val in_v = G.empty - |> add_vunit1 "v1" - |> add_wv "in1" - |> add_dir_eunit1 "e1" "in1" "v1"; - val in_v_and_wire = in_v - |> add_wv "in2" - |> add_wv "out2" - |> add_dir_eunit1 "bw" "in2" "out2"; - - val barbell_loop = barbell - |> add_dir_eunit1 "e2" "v2" "v1"; - val barbell_line = barbell - |> add_wv "in1" - |> add_wv "out1" - |> add_dir_eunit1 "e2" "in1" "v1" - |> add_dir_eunit1 "e3" "v2" "out1"; - - val _ = Testing.test "match: one vert --> barbell" - (find_n_correct_matches 0) (single_v,barbell); - val _ = Testing.test "match: barbell --> barbell" - (find_n_correct_matches 1) (barbell,barbell); - val _ = Testing.test "match: barbell --> barbell" - (find_n_correct_matches 1) (barbell,barbell); - val _ = Testing.test "match: v with output --> barbell" - (find_n_correct_matches 1) (out_v,barbell); - val _ = Testing.test "match: v with input --> barbell" - (find_n_correct_matches 1) (in_v,barbell); - val _ = Testing.test "match: v with input and bare wire --> barbell" - (find_n_correct_matches 1) (in_v_and_wire,barbell); - val _ = Testing.test "match: barbell line --> barbell loop" - (find_n_correct_matches 2) (barbell_line,barbell_loop); - - val _ = Testing.assert_no_failed_tests(); -end; - diff --git a/core/names.ML b/core/names.ML deleted file mode 100644 index 32313766..00000000 --- a/core/names.ML +++ /dev/null @@ -1,193 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* - This file defines the various kinds of global names structures and - frequently maps, bij, etc between them - *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - - - -(* this signature is used just to enforce type constraint that distinguish - different kinds of names *) -signature QUANTO_NAME -= sig - - structure V : SSTR_NAME (* vertices *) - structure E : SSTR_NAME (* Edges *) - - structure B : SSTR_NAME (* Bang-boxes *) - structure X : SSTR_NAME (* eXpression variables in data *) - structure R : SSTR_NAME (* Rules *) - structure T : SSTR_NAME (* Theories *) - structure F : SSTR_NAME (* Fixing tags *) - structure TagName : SSTR_NAME - structure GraphName : SSTR_NAME - - structure VTab : NAME_TABLE - where type name = V.name - and type NSet.T = V.NSet.T - - structure ETab : NAME_TABLE - where type name = E.name - and type NSet.T = E.NSet.T - - structure BTab : NAME_TABLE - where type name = B.name - and type NSet.T = B.NSet.T - - structure XTab : NAME_TABLE - where type name = X.name - and type NSet.T = X.NSet.T - - structure RTab : NAME_TABLE - where type name = R.name - and type NSet.T = R.NSet.T - - structure TTab : NAME_TABLE - where type name = T.name - and type NSet.T = T.NSet.T - - structure EVFn : NAME_FUNCTION - where type dom = E.name - and type DomSet.T = E.NSet.T - and type cod = V.name - and type CodSet.T = V.NSet.T - - structure BFFn : NAME_FUNCTION - where type dom = B.name - and type DomSet.T = B.NSet.T - and type cod = F.name - and type CodSet.T = F.NSet.T - - structure BVRel : NAME_RELATION - where type dom = B.name - and type DomSet.T = B.NSet.T - and type cod = V.name - and type CodSet.T = V.NSet.T - - structure BBRel : NAME_RELATION - where type dom = B.name - and type DomSet.T = B.NSet.T - and type cod = B.name - and type CodSet.T = B.NSet.T - - structure RTagRel : NAME_RELATION - where type dom = R.name - and type DomSet.T = R.NSet.T - and type cod = TagName.name - and type CodSet.T = TagName.NSet.T - - structure VSub : NAME_SUBSTITUTION - where type dom = V.name - and type DomSet.T = V.NSet.T - and type cod = V.name - and type CodSet.T = V.NSet.T - - structure ESub : NAME_SUBSTITUTION - where type dom = E.name - and type DomSet.T = E.NSet.T - and type cod = E.name - and type CodSet.T = E.NSet.T - - structure BSub : NAME_SUBSTITUTION - where type dom = B.name - and type DomSet.T = B.NSet.T - and type cod = B.name - and type CodSet.T = B.NSet.T - - structure XSub : NAME_SUBSTITUTION - where type dom = X.name - and type DomSet.T = X.NSet.T - and type cod = X.name - and type CodSet.T = X.NSet.T - - structure RSub : NAME_SUBSTITUTION - where type dom = R.name - and type DomSet.T = R.NSet.T - and type cod = R.name - and type CodSet.T = R.NSet.T - - structure VVInj : NAME_INJECTION - where type dom = V.name - and type DomSet.T = V.NSet.T - and type cod = V.name - and type CodSet.T = V.NSet.T - - structure EEInj : NAME_INJECTION - where type dom = E.name - and type DomSet.T = E.NSet.T - and type cod = E.name - and type CodSet.T = E.NSet.T - - structure BBInj : NAME_INJECTION - where type dom = B.name - and type DomSet.T = B.NSet.T - and type cod = B.name - and type CodSet.T = B.NSet.T -end; - -local -structure QuantoNames :> QUANTO_NAME -= struct - structure V = struct open SStrName; val default_name = mk "Va"; end (* vertices *) - structure E = struct open SStrName; val default_name = mk "Ea"; end (* edges *) - structure B = struct open SStrName; val default_name = mk "Ba"; end (* bang boxes *) - structure X = struct open SStrName; val default_name = mk "x"; end (* expressions *) - structure R = struct open SStrName; val default_name = mk "new-rule-1"; end (* Rules *) - structure T = struct open SStrName; val default_name = mk "new-theory-1"; end (* Theories *) - structure F = struct open SStrName; val default_name = mk "a"; end (* Fixity tags *) - - structure TagName : SSTR_NAME = - struct open SStrName; val default_name = mk "new-ruleset-1"; end - structure GraphName : SSTR_NAME = - struct open SStrName; val default_name = SStrName.mk "new-graph-1" end - - structure SStrTab = NameTable(structure Dom = SStrName) - structure SStrFn = NameFunction(structure Dom = SStrName; structure Cod = SStrName) - structure SStrRel = NameRelation(structure Dom = SStrName; structure Cod = SStrName) - structure SStrSub = NameSubstitution(structure Name = SStrName) - structure SStrInj = NameInjection(structure Dom = SStrName; structure Cod = SStrName) - - structure VTab = SStrTab - structure ETab = SStrTab - structure BTab = SStrTab - structure XTab = SStrTab - structure RTab = SStrTab - structure TTab = SStrTab - - structure EVFn = SStrFn - structure BFFn = SStrFn - - structure BVRel = SStrRel - structure BBRel = SStrRel - structure RTagRel = SStrRel - - structure VSub = SStrSub - structure ESub = SStrSub - structure BSub = SStrSub - structure XSub = SStrSub - structure RSub = SStrSub - - structure VVInj = SStrInj - structure EEInj = SStrInj - structure BBInj = SStrInj -end -in -(* Put opaque Quanto names and name maps into the top-level *) -open QuantoNames - -(* install pretty printers *) -local - fun prettyVertex _ _ x = - PolyML.PrettyString("\"" ^ V.string_of_name x ^ "\""); - fun prettyEdge _ _ x = - PolyML.PrettyString("\"" ^ E.string_of_name x ^ "\""); - fun prettyBBox _ _ x = - PolyML.PrettyString("\"" ^ B.string_of_name x ^ "\""); -in - val () = PolyML.addPrettyPrinter prettyVertex; - val () = PolyML.addPrettyPrinter prettyEdge; - val () = PolyML.addPrettyPrinter prettyBBox; -end - -end diff --git a/core/quanto.thy b/core/quanto.thy deleted file mode 100644 index 5ffa6b49..00000000 --- a/core/quanto.thy +++ /dev/null @@ -1,4 +0,0 @@ -theory quanto -imports lib core theories controller synth -begin -end diff --git a/core/rewriting/heuristic/distancematrix.ML b/core/rewriting/heuristic/distancematrix.ML deleted file mode 100644 index 416ac1ea..00000000 --- a/core/rewriting/heuristic/distancematrix.ML +++ /dev/null @@ -1,129 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* Representation for graph distance matrices. *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -signature DISTANCE_MATRIX = -sig - type T; - type graph; - type name; - type dist; - - (* calculate the distance matrix of a graph *) - val calculate : graph -> T - - (* set distance between two vertices *) - val set_distance : name -> name -> dist -> T -> T - val set_distance_if_shorter : name -> name -> dist -> T -> T - - (* get distance between two vertices *) - val get_distance : T -> name -> name -> dist - - (* distance order *) - val dist_ord : dist*dist -> order - - (* pretty printing *) - val pretty_dist : dist -> Pretty.T - val pretty : T -> Pretty.T - val print : T -> unit - -end; - -functor DistanceMatrixFun( - structure Graph : OGRAPH - ) : DISTANCE_MATRIX -= struct - - datatype dist = Dist of int | Infinity; - - type graph = Graph.T; - type name = V.name; - type T = dist V.NTab.T V.NTab.T; - - structure NTab = V.NTab; - - (* Distance ordering. *) - fun dist_ord (Dist(i),Dist(j)) = Int.compare (i,j) - | dist_ord (Dist(_),Infinity) = LESS - | dist_ord (Infinity,Dist(_)) = GREATER - | dist_ord (Infinity,Infinity) = EQUAL; - - (* Get the distance between two vertices. *) - fun get_distance t v1 v2 = NTab.get (NTab.get t v1) v2 - handle NTab.Tab.UNDEF(_) => Infinity; - - (* Set the distance between two vertices. *) - fun set_distance v1 v2 d t = NTab.update (v1, - (NTab.update (v2,d) - (NTab.get t v1 - handle NTab.Tab.UNDEF(_) => NTab.empty) - )) t; - - (* Set the distance between two vertices if the new distance is less *) - fun set_distance_if_shorter v1 v2 d t = case dist_ord(d,get_distance t v1 v2) of - LESS => set_distance v1 v2 d t - | _ => t; - - fun distplusone d = case d of (Dist(i)) => Dist(i+1) | Infinity => Infinity; - - (* Calculate a distance matrix given a graph. *) - fun calculate g = let - (* Keep track of list of names = indexing set of matrix *) - val l = Graph.get_vertices g - fun calc g v t = let - val efrom = Graph.get_in_edges g v - val eto = Graph.get_out_edges g v - val gnext = g |> Graph.delete_vertex v - (* update distances to/from vwith around directed edge vfrom->vto *) - fun update_dist vfrom vto vwith t = - (* distance to target <- distance to source + 1 *) - t |> set_distance_if_shorter vwith vto - (distplusone (get_distance t vwith vfrom)) - (* distance from source <- distance from target + 1 *) - |> set_distance_if_shorter vfrom vwith - (distplusone (get_distance t vto vwith)) - (* update distances around edge named en *) - fun calc_edge en t = let - val ((dir,_),(vs,vt)) = Graph.get_edge_info g en - in t |> V.NSet.fold (update_dist vs vt) l - |> (if dir=Directed then I else - V.NSet.fold (update_dist vt vs) l) - end; - (* update distances around edge set es *) - fun calc_edges es t = t |> E.NSet.fold calc_edge es - in t |> set_distance v v (Dist(0)) - |> calc_edges efrom - |> calc_edges eto - |> (calc gnext (the (V.NSet.get_local_bot - (Graph.get_vertices gnext))) - handle Option => I) - end; - in calc g (the (V.NSet.get_local_bot l)) NTab.empty end; - - (* Pretty printing of distance *) - fun pretty_dist (Dist(i)) = Pretty.str (Int.toString i) - | pretty_dist (Infinity) = Pretty.str "X"; - - (* Pretty printing of matrix *) - fun pretty t = Pretty.chunks - [Pretty.str "DistanceMatrix{", - Pretty.block - [Pretty.str " ", (* indent *) - Pretty.chunks - (map - (fn v1 => - Pretty.block - [V.pretty_name v1, - Pretty.str " | ", - Pretty.list "" "" (map - (fn v2 => pretty_dist (get_distance t v1 v2)) - (NTab.NSet.list_of (NTab.get_nameset t)) - ) - ]) - (NTab.NSet.list_of (NTab.get_nameset t))) - ], - Pretty.str "}" - ]; - - val print = Pretty.writeln o pretty; -end; diff --git a/core/rewriting/indexing/Histogram.ML b/core/rewriting/indexing/Histogram.ML deleted file mode 100644 index c7a581e6..00000000 --- a/core/rewriting/indexing/Histogram.ML +++ /dev/null @@ -1,58 +0,0 @@ - - -signature HISTOGRAM -= sig - structure G : GRAPH - structure V : RG_VERTEX - - type Entry - type T - - val insert_into_hist : VKind.T -> T -> T - val create_hist : G.T -> T - val entry_ord : Entry -> Entry -> order - val max_entry : Entry -> Entry -> Entry -end; - -structure Histogram : HISTOGRAM -= struct - structure G = RGGraph; - structure V = G.RGVertex; - - - (*A histogram of a graph is a list of pairs of vertex kinds and the number of - times they appear. It's ordered by vertex kinds, and all functions below - assume this. The boundary vertex kind is also assumed absent.*) - type Entry = (VKind.T * int) - type T = Entry list; - - fun insert_into_hist kind [] = [(kind,1)] - | insert_into_hist kind1 ((kind2,n)::ks) = - case VKind.kind_ord (kind1,kind2) of - EQUAL => (kind2,n+1)::ks - | LESS => (kind2,n):: insert_into_hist kind1 ks - | GREATER => (kind1,1)::(kind2,n)::ks; - - fun create_hist graph = - ( - G.get_vertex_list graph - |> map (fn vdata => VKind.get_kind vdata) - (*filter out boundaries*) - |> filter (fn vkind => not (VKind.is_boundary vkind)) - |> fold insert_into_hist - ) []; - - (*order goes : Vertex kind then number of them*) - fun entry_ord (kind1,n1) (kind2,n2) = - case VKind.kind_ord (kind1,kind2) of - EQUAL => Int.compare (n1,n2) - | LESS => LESS - | GREATER => GREATER; - - fun max_entry e1 e2 = - case entry_ord e1 e2 of - EQUAL => e1 - | LESS => e2 - |GREATER=> e1; - -end; diff --git a/core/rewriting/indexing/adjacency.ML b/core/rewriting/indexing/adjacency.ML deleted file mode 100644 index 5b6a6389..00000000 --- a/core/rewriting/indexing/adjacency.ML +++ /dev/null @@ -1,187 +0,0 @@ - - -structure AdjacencyList -= struct - - structure G = RGGraph; - structure V = G.RGVertex; - structure Vk = VKind; - structure NSet = V.NSet; - structure NTab = V.NTab; - - - (*Starting at a vertex we generate an ordered list of Vkinds connected to it. - we then consider the initial vertex and its adjacents as a subgraph and find - the vertices connected to this subgraph but not inside it (vertices are actually deleted - from the graph in practice). This proceeds iteratively until we have what we need or the - graph is covered by the alist.*) - type Entry = Vk.T list - type T = Entry list; - - fun zipwith _ [] _ = [] - | zipwith _ _ [] = [] - | zipwith f (x::xs) (y::ys) = (f x y) :: zipwith f xs ys; - - fun ordered_insert v [] = [v] - | ordered_insert v (e::es) = - case Vk.kind_ord (v,e) of - LESS => e::(ordered_insert v es) - | _ => v::e::es; - - - fun get_kind_table graph = - G.get_vertices graph - |> NTab.map_all' (fn vname => fn vdata => - VKind.get_kind (vname,vdata)); - - (*Given a set of vertex names and a table mapping names to kinds, returns a list ordered - by vertex kinds*) - fun make_entry vnames ktable = - (NSet.list_of vnames - |> map (fn vname => - NTab.get ktable vname) - |> fold ordered_insert - ) []; - - - (*Defines an ordering of alist entries, simple since the entries themselves are ordered lists*) - fun entry_ord ([] : Entry, [] : Entry) = EQUAL - | entry_ord ([],_) = LESS - | entry_ord (_,[]) = GREATER - | entry_ord ((vk1::vk1s), (vk2::vk2s)) = - case VKind.kind_ord (vk1,vk2) of - EQUAL => entry_ord (vk1s, vk2s) - | lt_or_gt => lt_or_gt; - - - fun entry_eq (e1,e2) = (entry_ord (e1,e2) = EQUAL); - - (*defines an ordering on alists, used primarily for selecting the "biggest" vertex in a pattern. When there are - a number of candidate choices for biggest vertex (all with the same kind), the "bigger" the alist, the faster - it will cover the graph*) - fun alist_ord ([] : T, [] : T) = EQUAL - | alist_ord ([],_) = LESS - | alist_ord (_,[]) = GREATER - | alist_ord (e1::e1s, e2::e2s)= - case entry_ord (e1,e2) of - EQUAL => alist_ord (e1s, e2s) - | lt_or_gt => lt_or_gt; - - fun alist_eq (al1,al2) = (alist_ord (al1,al2) = EQUAL); - - (*Given a set of vertex names nameset, returns the vertices connected to this set (but not in it) - and the graph with the vertices in nameset deleted*) - fun pull_next_layer (graph, vnames) = - let - val vnames2 = G.incident_vertices graph vnames - val graph2 = NSet.fold G.delete_vertex vnames graph - in (graph2,vnames2) - end; - - (*Given a graph and a nameset, gets the next Alist entry, along with the modified - graph and nameset for next layer*) - fun get_next_layer (graph,vnames) = - if not (NSet.is_empty vnames) then - let - val (graph2,vnames2) = pull_next_layer (graph,vnames) - in - SOME (graph2,vnames2) - end - else NONE; - - - (*Given a graph and a vertex name, constructs the adjlist centered at that vertex - consumes the entire graph, intended to process patterns, not target*) - fun build_adj_list graph vname = - let - val init_vset = NSet.single vname - val ktable = get_kind_table graph - fun build (graph,vnames) = - case get_next_layer (graph,vnames) of - NONE => [] - | SOME (graph2,vnames2) => - (make_entry vnames ktable:: build (graph2,vnames2)) - in - build (graph,init_vset) - end; - - (*Returns the name of the biggest vertex kind whose adjacency list gives the "shortest/fastest", - coverage the graph (latter criteria used only in tie-break situations). Used for processing pattern graphs - only. See comment for Alist_ord function. - assumes graph is non-empty*) - fun select_biggest graph = - let - val ((name,vkind),tab) = - get_kind_table graph - |> NTab.pull_elem - |> the - in - NTab.fold (fn (vname1,vk1) => fn (vname2,vk2) => - case VKind.kind_ord (vk1,vk2) of - LESS => (vname2,vk2) - | GREATER => (vname1,vk1) - | EQUAL => - (let - val adj1 = build_adj_list graph vname1 - val adj2 = build_adj_list graph vname2 - in - case alist_ord (adj1,adj2) of - LESS => (vname2,vk2) - | _ => (vname1,vk1) - end)) tab (name,vkind) - |> Library.fst - end; - - fun get_adjacency graph = build_adj_list graph (select_biggest graph); - - (*Checks compatibility of 2 Alist entries. - interior kinds in tgt_entry must be matched with either an identical type in pat_entry, - or the boundary kind. - Compatibility is used for looking up atrees. It means that the - Vkinds in this "layer" (i.e. entry of the Alist) of the tree can be matched to the - Vkinds in the corresponding "layer" of the pattern graph - IMPORTANT! this function assumes that graphs are in minimal form, i.e ALL boundary vertices - have arity 1*) - fun compatible' [] [] = true - | compatible' entry [] = true - | compatible' [] entry = false - | compatible' (tgt_entry as t::ts) (pat_entry as p::ps) = - case VKind.kind_ord (t,p) of - EQUAL => compatible' ts ps - (*LESS => both t and p are Iverts. Since the adjlists are sorted in - descending order, p cannot be matched by any remaining kind in ts - therefore not compatible*) - | LESS => false - (*GREATER => t is an interior kind, and all remaining kinds in pat_entry - are either boundary or smaller than t. match t to the first boundary in pat_entry, - if no such boundary exists match fails (not right away, but soon)*) - |GREATER=> - let - fun remove_next_boundary [] = [] - | remove_next_boundary (y::ys)= - if VKind.is_boundary y then ys - else y::remove_next_boundary ys - in - compatible' ts (remove_next_boundary pat_entry) - end; - - fun compatible tgt_contour pat_contour = - let - val (tgt_ivert, tgt_bvert) = Library.take_prefix - (fn vk => (not o VKind.is_boundary) vk) tgt_contour - val (pat_ivert, pat_bvert) = Library.take_prefix - (fn vk => (not o VKind.is_boundary) vk) pat_contour - val spare_bverts_in_pat = - (List.length tgt_bvert) < (List.length pat_bvert) - fun compat _ [] = true - | compat [] entry = false - | compat (t::ts) (p::ps) = - case VKind.kind_ord (t,p) of - EQUAL => compat ts ps - |LESS => false - |GREATER => compat ts (p::ps) - in - compat tgt_ivert pat_ivert - end; - -end; diff --git a/core/rewriting/indexing/atree.ML b/core/rewriting/indexing/atree.ML deleted file mode 100644 index 0518ae24..00000000 --- a/core/rewriting/indexing/atree.ML +++ /dev/null @@ -1,183 +0,0 @@ -structure AdjacencyTree -= struct - - structure AList = AdjacencyList; - structure G = RGGraph; - structure V = G.RGVertex; - structure Vk = VKind; - structure NSet = V.NSet; - structure NTab = V.NTab; - structure R = Rule; - type N = V.name; - - (*graph with rule data not necessary for processing atree*) - type GData = N; - type GraphWithData = G.T * GData; - type Bag = GraphWithData list; - - datatype T = Br of Bag * ((AList.Entry * T) list) - | Lf of Bag - | Empty; - - fun prepare_rule name (graph,(startv,rule)) = - (NTab.doadd (startv,name) NTab.empty, rule); - - - fun insert_into_atree (graphwd,[]) Empty = Lf [graphwd] - | insert_into_atree (graphwd,e::es) Empty = - Br([], - [(e,insert_into_atree (graphwd,es) Empty)] - ) - | insert_into_atree (graphwd,[]) (Lf bag) = Lf (graphwd::bag) - | insert_into_atree (graphwd,e::es) (Lf bag) = - Br (bag, - [(e,insert_into_atree (graphwd,es) Empty)] - ) - | insert_into_atree (graphwd,[]) (Br (bag,brs)) = - Br ((graphwd::bag),brs) - | insert_into_atree (graphwd,e::es) (Br (bag,brs)) = - let - fun insert_into_list [] = - (e, insert_into_atree (graphwd,es) Empty)::[] - | insert_into_list ((t_entry,br)::xs) = - case AList.entry_ord (e, t_entry) of - EQUAL => (t_entry, insert_into_atree (graphwd,es) br) :: xs - |LESS => (t_entry, br) :: insert_into_list xs - |GREATER => (e, insert_into_atree (graphwd,es) Empty) :: (t_entry,br) :: xs - in - Br (bag,insert_into_list brs) - end; - - fun create_atree_from_ruleset graphs_with_data = - (graphs_with_data - |> map (fn (graph,startv) => - ((graph,startv), - AList.build_adj_list graph startv)) - |> fold insert_into_atree) Empty; - (* - fun create_atree_from_ruleset ruleset = - let - val rule_list = NTab.list_of ruleset - val graphs_with_data = map (fn (rname,(rule,flag)) => - let - val lhs = R.get_lhs rule - in - (lhs, (AList.select_biggest lhs, (rname,rule))) - end) rule_list - in - create_atree_from_ruleset' graphs_with_data - end; - *) - fun create_atree_from_graphlist graphs = - (graphs - |> map (fn graph => - ((graph,AList.select_biggest graph), - AList.get_adjacency graph)) - |>fold insert_into_atree) Empty; - - - (*Improve: target adjacencylists/contours are generated on the fly. Implement - them as a lazy list? or calculate them beforehand based on the max depth of - the tree?*) - fun prune_atree tgt_graph tgt_ktable vnames Empty = Empty - | prune_atree tgt_graph tgt_ktable vnames (Lf bag) = Lf bag - | prune_atree tgt_graph tgt_ktable vnames (Br (bag,brs)) = - if NSet.is_empty vnames - then Br (bag,brs) - else - let - val tgt_entry = AList.make_entry vnames tgt_ktable - val (graph2,vnames2) = AList.pull_next_layer (tgt_graph,vnames) - in - Br (bag, - map (fn (pat_entry,br) => - if AList.compatible tgt_entry pat_entry - then (pat_entry, prune_atree graph2 tgt_ktable vnames2 br) - else (tgt_entry,Lf [])) brs) - end; - - - fun get_graphs Empty = [] - | get_graphs (Lf bag) = bag - | get_graphs (Br (bag,brs)) = - bag @ maps (fn (pat_entry,br) => get_graphs br) brs; - - fun lookup_atree graph name atree ktable = - let - val init_nset = NSet.single name - in - prune_atree graph ktable init_nset atree - |> get_graphs - |> map (fn (graph,startv) => (graph,NTab.doadd (startv,name) NTab.empty)) - end; - - -(*information about tree*) - fun leaves Empty = 0 - | leaves (Lf _) = 1 - | leaves (Br (_,brs)) = - brs - |> map (fn (_,br) => leaves br) - |> List.foldr op+ 0; - - fun bags Empty = 0 - | bags (Lf bag) = - (case bag of - [] => 0 - | _ => 1) - | bags (Br (bag,brs)) = - (case bag of - [] => 0 - | _ => 1 ) + (brs - |> map (fn (_,br) => bags br) - |> List.foldr op+ 0); - - fun graphs Empty = 0 - | graphs (Lf bag) = List.length bag - | graphs (Br (_,brs)) = - brs - |> map (fn (_,br) => graphs br) - |> List.foldr op+ 0; - - fun graphs' Empty = 0 - | graphs' (Lf bag) = List.length bag - | graphs' (Br (bag,brs)) = - (brs - |> map (fn (bag,br) => graphs' br) - |> List.foldr op+ 0) + List.length bag; - - fun depth Empty = 0 - | depth (Lf _ ) = 0 - | depth (Br (bag,brs)) = - brs - |> map (fn (_,br) => 1 + depth br) - |> List.foldr (fn (n, m) => - if n > m then n else m) 0; - - fun first_width Empty = 0 - | first_width (Lf bag) = 0 - | first_width (Br (bag,brs)) = - List.length brs - - fun avdepth Empty = 0.0 - | avdepth (Lf bag) = 0.0 - | avdepth (Br (bag,brs)) = - let - val length = List.length brs - val sum = List.foldr op+ 0.0 - fun av xs = (sum xs) / (Real.fromInt length) - in - av (map (fn (entry,br) => 1.0 + avdepth br) brs) - end; - - fun balance atree = (Real.fromInt (depth atree)) - (avdepth atree); - - - fun test_tree atree = - (leaves atree, - depth atree, - first_width atree, - balance atree - ) - -end; diff --git a/core/rewriting/indexing/htree.ML b/core/rewriting/indexing/htree.ML deleted file mode 100644 index fa2fa65f..00000000 --- a/core/rewriting/indexing/htree.ML +++ /dev/null @@ -1,222 +0,0 @@ - - -signature HISTTREE -= sig - - structure H : HISTOGRAM - structure G : GRAPH - - type T - type Bag - - val make_htree : Bag -> T - val lookup_htree : G.T -> T -> Bag - val test_tree : T -> int * int * int * int -end; - -structure HistTree : HISTTREE -= struct - - structure H = Histogram; - structure G = RGGraph; - structure ITab = NameTabFun(IntName); - - type GraphWithData = G.T - type Bag = G.T list; - - datatype HTree = Br of (VKind.T * HTree ITab.T) - | Lf of Bag - | Empty; - - type T = HTree; - - - (*build/alter htree in incremental fashion. The Empty type should only be encountered - on the first pass of a build. ITab.ins shouldn't raise exceptions as it's only used - in situations where the vertex kind isn't in the tree. The one exception is the - EQUALS case below, but it's been guarded against there. Zero branches are created - whenever a new vertex kind is added to the tree*) - - fun insert_into_htree (graph,[]) Empty = Lf [graph] - (*if htree and histogram are empty, create a leaf*) - - | insert_into_htree (graph,(vkh,n)::hs) Empty = - Br (vkh, - ITab.ins (0,Lf []) (*assume zero cannot appear in hist*) - (ITab.ins (n, - insert_into_htree (graph,hs) Empty ) - ITab.empty)) - (*if htree is empty, we know vkh is not in the tree*) - - | insert_into_htree (graph,[]) (Lf bag) = Lf (graph::bag) - (*if hist is empty and htree is a leaf, add graph to bag*) - - | insert_into_htree (graph,(vkh,n)::hs) (Lf bag) - = - Br (vkh, - ITab.ins (0, Lf bag) - (ITab.ins (n, - insert_into_htree (graph,hs) (Lf [])) - ITab.empty)) - (*if htree is a leaf but hist is not empty, we need to expand the htree. - all remaining entries in the hist are not in the tree*) - - | insert_into_htree (graph,[]) (Br (vkt,tab)) = - Br (vkt, - ITab.map_entry (insert_into_htree (graph, [])) 0 tab - ) - (*if hist is empty, the remainder of the htree cannot rule out this graph - in a query, so propagate it to the zero branch at all remaining levels*) - - | insert_into_htree (graph, hist) - (branch as (Br (vkt, tab))) - = - let - val (vkh,n) = hd hist - in - case VKind.kind_ord (vkh,vkt) of - EQUAL => Br (vkt, - update_table (graph, tl hist) n tab) - (*see below for update_table function. - if necessary it creates a new table entry for this amount of - the current vkind, and propagates the graph to all branches - corresponding to this number or greater*) - - | LESS => Br (vkt, - ITab.map_entry (insert_into_htree (graph, hist)) 0 tab - ) - (*this means that vkt is not in the current hist, and therefore - provides no information about this graph for query purposes. - We propagate graph to the zero branch of the next layer*) - - |GREATER=> - let - val branch2 = insert_into_htree (graph, tl hist) (Lf []) - in - Br (vkh, ITab.ins (n,branch2) - (ITab.ins (0, branch) ITab.empty)) - end - (*vkh is not in tree, insert a new "layer" into the tree - note we've inserted a zero entry*) - end - - and update_table gh num tab = - case ITab.lookup tab num of - NONE => ITab.ins (num, insert_into_htree gh (Lf []) ) tab - (*table has no entry with this number of the current vkind*) - | SOME branch => ITab.update (num,insert_into_htree gh branch) tab; - - fun make_htree bag = - (bag - |> map (fn graph => (graph,H.create_hist graph)) - |> (fold insert_into_htree)) - Empty; - - - (*Zero branch always exists, so this shouldn't throw exceptions*) - fun prune_htree hist Empty = Empty - | prune_htree hist (Lf bag) = Lf bag - | prune_htree [] (Br (vkt,tab)) = - Br (vkt, - ITab.ins (0, prune_htree [] (ITab.get tab 0)) - ITab.empty) - - | prune_htree hist (Br (vkt, tab)) = - let - val (vkh,n) = hd hist - in - case VKind.kind_ord (vkh,vkt) of - EQUAL => - Br (vkt, - ITab.map_all' (fn m => fn br => - if n >= m - then prune_htree (tl hist) br - else Lf [] - ) tab - ) - (*LESS => vkt not in hist, all graphs that require more than 0 of vkt - will not match and so only zero branch should be kept*) - |LESS => - Br (vkt, - ITab.ins (0, prune_htree hist (ITab.get tab 0)) - ITab.empty) - (*GREATER => vkh not in htree, it therefore provides no information about - rules, so ignore it*) - |GREATER=> - prune_htree (tl hist) (Br (vkt,tab)) - - end; - - - fun get_graphs Empty = [] - | get_graphs (Lf bag) = bag - | get_graphs (Br (_,tab)) = - (tab - |> ITab.fold (fn (_,br) => fn l => - (get_graphs br) @ l) - ) []; - - - fun lookup_htree graph htree = - (get_graphs ( prune_htree (H.create_hist graph) htree)) - - -(*information about htree*) - - fun leaves Empty = 0 - | leaves (Lf _) = 1 - | leaves (Br (_,tab)) = - (tab - |> ITab.map_all' (fn _ => fn br => - leaves br) - |> ITab.fold (fn (name,num1) => fn num2 => - num1 + num2)) 0; - - fun depth Empty = 0 - | depth (Lf _) = 0 - | depth (Br (_,tab)) = - (tab - |> ITab.map_all' (fn _ => fn br => - 1 + depth br) - |> ITab.fold (fn (name,num1) => fn num2 => - if num1 > num2 then num1 else num2)) 0; - - fun av_depth htree = - let - fun all_depths Empty = [0] - | all_depths (Lf _) = [0] - | all_depths (Br (_,tab)) = - (tab - |> ITab.map_all' (fn _ => fn br => - map (fn m => m + 1) (all_depths br)) - |> ITab.fold (fn (_,l1) => fn l2 => l1 @ l2)) [] - val num_of_leaves = leaves htree - in - (List.foldr op+ 0 (all_depths htree)) div num_of_leaves - end; - - fun graphs Empty = 0 - | graphs (Lf bag) = List.length bag - | graphs (Br (_ , tab)) = - (tab - |> ITab.map_all' (fn _ => fn br => graphs br) - |> ITab.fold (fn (_,num1) => fn num2 => - num1 + num2)) 0; - - fun test_tree htree = - (leaves htree, - depth htree, - av_depth htree, - graphs htree); - - - - - - - - - - - -end; diff --git a/core/rewriting/indexing/htreeREF.ML b/core/rewriting/indexing/htreeREF.ML deleted file mode 100644 index 187fd1ab..00000000 --- a/core/rewriting/indexing/htreeREF.ML +++ /dev/null @@ -1,170 +0,0 @@ - - -signature HISTTREE -= sig - - structure H : HISTOGRAM - structure G : GRAPH - - type T - type Bag - - val make_htree : G.T list -> T - val lookup_htree : H.T -> T -> G.T list -end; - -structure HistTree : HISTTREE -= struct - - structure H = Histogram; - structure G = RGGraph; - structure ITab = NameTabFun(IntName); - - type GraphWithData = G.T - type Bag = (G.T ref) list; - - datatype HTree = Br of (VKind.T * HTree ITab.T) - | Lf of Bag - | Empty; - - type T = HTree; - - - (*build/alter htree in incremental fashion. The Empty type should only be encountered - on the first pass of a build. ITab.ins shouldn't raise exceptions as it's only used - in situations where the vertex kind isn't in the tree. The one exception is the - EQUALS case below, but it's been guarded against there. Zero branches are created - whenever a new vertex kind is added to the tree*) - - fun insert_into_htree (graph,[]) Empty = Lf [graph] - (*if htree and histogram are empty, create a leaf*) - - | insert_into_htree (graph,(vkh,n)::hs) Empty = - Br (vkh, - ITab.ins (0,Lf []) (*assume zero cannot appear in hist*) - (ITab.ins (n, - insert_into_htree (graph,hs) Empty ) - ITab.empty)) - (*if htree is empty, we know vkh is not in the tree*) - - | insert_into_htree (graph,[]) (Lf bag) = Lf (graph::bag) - (*if hist is empty and htree is a leaf, add graph to bag*) - - | insert_into_htree (graph,(vkh,n)::hs) (Lf bag) - = - Br (vkh, - ITab.ins (0, Lf bag) - (ITab.ins (n, - insert_into_htree (graph,hs) (Lf bag)) - ITab.empty)) - (*if htree is a leaf but hist is not empty, we need to expand the htree. - all remaining entries in the hist are not in the tree*) - - | insert_into_htree (graph,[]) (Br (vkt,tab)) = - Br (vkt, - ITab.map_all - (fn br => insert_into_htree (graph, []) br) - tab - ) - (*if hist is empty, the remainder of the htree cannot rule out this graph - in a query, so propagate it to all leaves reachable from current branch*) - - | insert_into_htree (graph, hist) - (branch as (Br (vkt, tab))) - = - let - val (vkh,n) = hd hist - in - case VKind.kind_ord (vkh,vkt) of - EQUAL => Br (vkt, - update_table (graph, tl hist) n tab) - (*see below for update_table function. - if necessary it creates a new table entry for this amount of - the current vkind, and propagates the graph to all branches - corresponding to this number or greater*) - - | LESS => Br (vkt, - ITab.map_all - (fn br => insert_into_htree (graph, hist) br) - tab) - (*this means that vkt is not in the current hist, and therefore - provides no information about this graph for query purposes. - We propagate graph to all branches in next layer*) - - |GREATER=> - let - val branch2 = insert_into_htree (graph, tl hist) branch - in - Br (vkh, ITab.ins (n,branch2) - (ITab.ins (0, branch) ITab.empty)) - end - (*vkh is not in tree, insert a new "layer" into the tree - note we've inserted a zero entry*) - end - - and update_table gh num tab = - let - val (smaller,branch) = the (ITab.next_smaller tab num) - (*should never raise an exception, all things going to plan: there is - always a zero branch in each table, and zeroes cannot appear in - histograms (or at least shouldn't)*) - in - (case ITab.lookup tab num of - NONE => ITab.ins (num, branch) tab - (*table has no entry with this number of the current vkind, - copy the branch from the next smallest entry*) - | SOME _ => tab) |> - ITab.map_all' (fn n => fn br => - if num <= n - then insert_into_htree gh br - else br) - (*continue to insert graph into all subtrees corresponding to - the same or greater number of the current vkind*) - end - - - - fun make_htree bag = - (bag - |> map (fn graph => (ref graph,H.create_hist graph)) - |> (fold insert_into_htree)) - Empty; - - (*get functions should never throw exceptions, there is always a zero branch*) - fun lookup_htree' hist Empty = [] - (*should never happen*) - | lookup_htree' hist (Lf bag) = bag - - | lookup_htree' [] (Br (_,tab)) = lookup_htree' [] (ITab.get tab 0) - (*hist is empty, move to zero branch at all remaining nodes*) - | lookup_htree' ((vkh,num)::hs) - (Br (vkt, tab)) = - - case VKind.kind_ord (vkh,vkt) of - EQUAL => ( - case ITab.lookup tab num of - NONE => let - val (_,branch) = the (ITab.next_smaller tab num) - in - lookup_htree' hs branch - end - | SOME branch => lookup_htree' hs branch - ) - (*follow path corresponding to num or the next smallest key*) - - | LESS => lookup_htree' ((vkh,num)::hs) (ITab.get tab 0) - (*vkt does not appear in hist, move to zero branch*) - - |GREATER=> lookup_htree' hs (Br (vkt,tab)); - (*vkh does not appear in tree, move to next hist entry*) - - - fun lookup_htree hist htree = - lookup_htree' hist htree - |> map (fn graphref => ! graphref) - - - - - -end; diff --git a/core/rewriting/indexing/rbuild.ML b/core/rewriting/indexing/rbuild.ML deleted file mode 100644 index 915adc40..00000000 --- a/core/rewriting/indexing/rbuild.ML +++ /dev/null @@ -1,135 +0,0 @@ - -structure RandomGraph -= struct - - structure G = RGGraph; - structure V = G.RGVertex - structure Lib = Library; - structure NSet = V.NSet; - open MkG; - - - - fun all_ivert_list 0 = [] - | all_ivert_list n = - let - val angle = Lib.one_of [zero_angle, pi_angle] - val node = Lib.one_of [mkZ angle, mkX angle, mkH] - in - node :: all_ivert_list (n-1) - end; - - fun no_angle_ivert_list 0 = [] - | no_angle_ivert_list n = - let - val node = Lib.one_of [mkZ zero_angle, mkX zero_angle, mkH] - in - node :: no_angle_ivert_list (n-1) - end; - - (**) - fun create_connected_component vtypes = - let - val graph = fold G.doadd_vertex vtypes G.empty - val vnames = NSet.list_of (G.get_vertices graph) - fun add_e_to_graph [] g acc = g - | add_e_to_graph (n::names) g [] = add_e_to_graph names g [n] - | add_e_to_graph (n::names) g acc = - let - val iname = Lib.one_of acc - val g2 = G.doadd_edge edge n iname g - in - add_e_to_graph names g2 (n::acc) - end - in - add_e_to_graph vnames graph [] - end; - - fun one_pair_from [name] = (name,name) - | one_pair_from names = - let - val first = Lib.one_of names - val second= Lib.one_of (filter (fn x => not (V.name_eq (first,x))) names) - in - (first,second) - end; - - - fun add_extra_edges n vnames graph = - let - fun add_edges 0 vs g = g - | add_edges n (vs : V.name list) g = - let - val (v1,v2) = one_pair_from vs - in - add_edges (n-1) vnames - (G.doadd_edge edge - v1 v2 - g) - end - in - add_edges n vnames graph - end; - - fun add_boundary n ivnames graph = - let - val (bvnames,graph2) = - fold (fn _ => fn (xs, g) => - let - val (name,g2) = G.add_vertex bvert g - in - (name::xs,g2) - end - ) (1 upto n) ([],graph) - in - fold (fn name => fn g => - G.doadd_edge edge - name - (Lib.one_of ivnames) - g) bvnames graph2 - end; - - (*generates a random connected graph in minimal form (all boundary verts have - only one incident edge) there will be at least sufficient edges to connect - Iverts, but more may be added with nume.Allows you to control which random - vertex type generator is used*) - fun pre_random_graph vgen numv numb nume = - let - val prim_graph = vgen numv - |> create_connected_component - val vnames = (NSet.list_of o G.get_vertices) prim_graph - in - prim_graph - |> add_extra_edges nume vnames - |> add_boundary numb vnames - end; - - (*all vertex types appear, including pi and zero X and Z vertices*) - val std_rgraph = pre_random_graph all_ivert_list - - (*X and Z vertices all have zero angle*) - val simple_rgraph = pre_random_graph no_angle_ivert_list - - fun pattern_set' graph_generator nrules maxv maxb = - let - val maxv' = if maxv < 3 then 3 else maxv - val maxb' = if maxb < 2 then 2 else maxb - in - map (fn _ => - let - val vs = Lib.random_range 2 maxv' - val bs = Lib.random_range 1 maxb' - in - graph_generator vs bs 0 - end) (1 upto nrules) - end; - - - - fun pattern_set gg nr = pattern_set' gg nr 4 3; - fun std_pattern_set nrules = pattern_set' (std_rgraph) nrules 4 3; - fun simple_pattern_set nrules = pattern_set' (simple_rgraph) nrules 4 3; - fun varied_pattern_set nrules = pattern_set' (std_rgraph) nrules 20 5; - - -end; diff --git a/core/rewriting/indexing/rtest.ML b/core/rewriting/indexing/rtest.ML deleted file mode 100644 index afa654ec..00000000 --- a/core/rewriting/indexing/rtest.ML +++ /dev/null @@ -1,351 +0,0 @@ -structure Tester -= struct - - structure G = RGGraph; - structure LS = RGGraphLoaderSaver; - structure H = Histogram; - structure HTree = HistTree; - structure AList = AdjacencyList; - structure ATree = AdjacencyTree; - structure NSet = V.NSet; - structure NTab = V.NTab; - structure Lib = Library; - structure RG = RandomGraph; - structure MSt = MatchState; - structure M = Match; - - -(*general utilities*) - fun time_wrapper m n (thing : unit -> 'a) = - let - val cput = Timer.startCPUTimer () - val rt = Timer.startRealTimer () - in - cput; - rt; - (thing (), m (*number of rules*),n (*tgt size*), - Timer.checkCPUTimes cput ,Timer.checkRealTimer rt) - end; - - val empty_names = (NSet.empty,E.NSet.empty,NSet.empty) - - fun pull_all xs seq = - case Seq.pull seq of - NONE => xs - | SOME (x,seqn) => pull_all (x::xs) seqn; - - fun matched xs = List.length (filter (fn x => case x of [] => false | _ => true) xs); - - fun matches xs = List.foldr op+ 0 (map List.length xs); - - - fun make_time (cput as { - nongc : { - usr : Time.time, - sys : Time.time - }, - gc : { - usr : Time.time, - sys : Time.time - } - }) rt = - let - val tot_cput = #usr (#nongc cput) + #sys (#nongc cput) + - #usr (#gc cput) + #sys (#gc cput) - in - - case Time.compare (tot_cput,rt) of - GREATER => tot_cput - | _ => rt - end; - - fun process_results xs = - map - (map (fn (matchlist,nrules,tgtsize,cputime,rtime) => - (matched matchlist,matches matchlist,nrules,tgtsize, - make_time cputime rtime - ))) xs; - - - (*get internal vertex names from a graph*) - fun get_inames graph = - let - val vnames = G.get_vertices graph - val bnames = G.get_boundary graph - in - NSet.remove_set bnames vnames - end; - - (*output to file*) - local - open TextIO - in - fun output results filename = - let - val dir = "indexing/test/results/" ^ filename - val ostream = openAppend dir - val show = Int.toString - in - ostream; - map (fn (num_matched, num_matches, nrules, tgtsize, time) => - outputSubstr (ostream, Substring.full ( - (show num_matched) ^ "\t" ^ - (show num_matches) ^ "\t" ^ - (show nrules) ^ "\t" ^ - (show tgtsize) ^ "\t" ^ - (show (Time.toMilliseconds time)) ^ "\n") - ) - ) results; - closeOut ostream - end - - fun test_out thing name reps = - Seq.map (fn n => output (Lib.flat (thing ())) name) (Seq.of_list (1 upto reps)) - |> pull_all []; - end; - - - -(*current Matching algorithm*) - - (*generates a ruleset and matches it against the given tgt graph*) - fun matching pats tgt = - map (pull_all []) - (map (fn pat => MSt.match empty_names pat tgt NTab.empty) pats) - - - fun match_test (gg as graph_generator) - (*ranges of values to try*)ruleset_range tgt_range - (*scaling factors for each range*)ruleset_scale tgt_scale= - (map (fn n (*vary pattern set size*) => - let - val pats = RG.pattern_set gg (n*ruleset_scale) - in - map (fn m (*vary tgt size*) => - let - val tgt = gg (m*tgt_scale) (m*(tgt_scale div 10)) 0 - in - time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - matching pats tgt - ) - end) tgt_range - end) ruleset_range) - |> process_results; - - - - val test_simple = match_test RG.simple_rgraph; - val test_std = match_test RG.std_rgraph; - - (*histogram*) - fun hist_matching tgt htree = - let - val pats = HTree.lookup_htree tgt htree - in - map (pull_all []) - (map (fn pat => MSt.match empty_names pat tgt NTab.empty) pats) - end; - - - fun hist_match_test (gg as graph_generator) - (*ranges of values to try*)ruleset_range tgt_range - (*scaling factors for each range*)ruleset_scale tgt_scale= - (map (fn n (*vary pattern set size*) => - let - val pats = RG.pattern_set gg (n*ruleset_scale) - val htree = HTree.make_htree pats - in - map (fn m (*vary tgt size*) => - let - val tgt = gg (m*tgt_scale) (m*(tgt_scale div 10)) 0 - in - time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - hist_matching tgt htree - ) - end) tgt_range - end) ruleset_range) - |> process_results; - - - (*adjacency*) - - fun adj_matching tgt atree = - let - val names = (NSet.list_of o get_inames) tgt - val ktable = AList.get_kind_table tgt - in - map (pull_all []) - (Lib.flat (map (fn name => - let - val prematches = ATree.lookup_atree tgt name atree ktable - in - map (fn (pat,pre_agenda) => - MSt.match empty_names pat tgt pre_agenda) prematches - end) names)) - end; - - fun adj_match_test (gg as graph_generator) - (*ranges of values to try*)ruleset_range tgt_range - (*scaling factors for each range*)ruleset_scale tgt_scale= - (map (fn n (*vary pattern set size*) => - let - val pats = RG.pattern_set gg (n*ruleset_scale) - val atree = ATree.create_atree_from_graphlist pats - in - map (fn m (*vary tgt size*) => - let - val tgt = gg (m*tgt_scale) (m*(tgt_scale div 10)) 0 - in - time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - adj_matching tgt atree - ) - end) tgt_range - end) ruleset_range) - |> process_results; - - (*comparing two approaches tests. random graphs are generated only once and shared between algorithms*) - - fun adj_vs_match (gg as graph_generator) - (*ranges of values to try*) ruleset_range tgt_range - (*scaling factors for each range*) ruleset_scale tgt_scale= - (map (fn n (*vary pattern set size*) => - let - val pats = RG.pattern_set gg (n*ruleset_scale) - val atree = ATree.create_atree_from_graphlist pats - in - map (fn m (*vary tgt size*) => - let - val tgt = gg (m*tgt_scale) (m*(tgt_scale div 10)) 0 - in - [time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - adj_matching tgt atree - ), - time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - matching pats tgt - )] - end) tgt_range - end) ruleset_range) - |> map process_results; - - fun varied_adj_vs_match - (*ranges of values to try*) ruleset_range tgt_range - (*scaling factors for each range*) ruleset_scale tgt_scale= - (map (fn n (*vary pattern set size*) => - let - val pats = RG.varied_pattern_set (n*ruleset_scale) - val atree = ATree.create_atree_from_graphlist pats - in - map (fn m (*vary tgt size*) => - let - val tgt = RG.std_rgraph (m*tgt_scale) (m*(tgt_scale div 10)) 0 - in - [time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - adj_matching tgt atree - ), - time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - matching pats tgt - )] - end) tgt_range - end) ruleset_range) - |> map process_results; - - fun hist_vs_match (gg as graph_generator) - (*ranges of values to try*) ruleset_range tgt_range - (*scaling factors for each range*) ruleset_scale tgt_scale= - (map (fn n (*vary pattern set size*) => - let - val pats = RG.pattern_set gg (n*ruleset_scale) - val htree = HTree.make_htree pats - in - map (fn m (*vary tgt size*) => - let - val tgt = gg (m*tgt_scale) (m*(tgt_scale div 10)) 0 - in - [time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - hist_matching tgt htree - ), - time_wrapper (n*ruleset_scale) (m*tgt_scale) - (fn () => - matching pats tgt - )] - end) tgt_range - end) ruleset_range) - |> map process_results; - - - (*standard tests*) - - fun std_match min max = match_test RG.std_rgraph (min upto max) (min upto max) 10 50; - fun simple_match min max = match_test RG.simple_rgraph (min upto max) (min upto max) 10 50; - - fun std_hist min max = hist_match_test RG.std_rgraph (min upto max) (min upto max) 10 50; - fun simple_hist min max = hist_match_test RG.simple_rgraph (min upto max) (min upto max) 10 50; - - fun std_adj min max = adj_match_test RG.std_rgraph (min upto max) (min upto max) 10 50; - fun simple_adj min max = adj_match_test RG.simple_rgraph (min upto max) (min upto max) 10 50; - - (*ruleset vs constant target size*) - - fun std_rule_match min max = match_test RG.std_rgraph (min upto max) (Lib.replicate 20 1) 5 500; - fun std_rule_adj min max = adj_match_test RG.std_rgraph (min upto max) (Lib.replicate 20 1) 5 500; - fun std_rule_hist min max = hist_match_test RG.std_rgraph (min upto max) (Lib.replicate 20 1) 5 500; - - (*constant ruleset vs target size*) - - fun std_tgt_match min max = match_test RG.std_rgraph (Lib.replicate 20 1) (min upto max) 100 50; - fun std_tgt_adj min max = adj_match_test RG.std_rgraph (Lib.replicate 20 1) (min upto max) 100 50; - fun std_tgt_hist min max = hist_match_test RG.std_rgraph (Lib.replicate 20 1) (min upto max) 100 50; - - (*vs tests*) - - fun std_adj_vs_match min max = adj_vs_match RG.std_rgraph (min upto max) (min upto max) 40 400; - - fun std_hist_vs_match min max = hist_vs_match RG.std_rgraph (min upto max) (min upto max) 10 100; - - local - open TextIO - fun output results filename = - let - val dir = "indexing/test/results/" ^ filename - val ostream = openAppend dir - val show = Int.toString - val showreal = Real.toString - in - ostream; - map (fn [(num_matched1, num_matches1, nrules1, tgtsize1, time1), - (num_matched2, num_matches2, nrules2, tgtsize2, time2)] - => - outputSubstr (ostream, Substring.full ( - (show num_matched1) ^ "\t" ^ - (show num_matches1) ^ "\t" ^ - (show nrules1) ^ "\t" ^ - (show tgtsize1) ^ "\t" ^ - (show (Time.toMilliseconds time1)) ^ "\t" ^ - (show num_matched2) ^ "\t" ^ - (show num_matches2) ^ "\t" ^ - (show nrules2) ^ "\t" ^ - (show tgtsize2) ^ "\t" ^ - (show (Time.toMilliseconds time2)) ^ "\n") - ) - ) results; - closeOut ostream - end - in - - fun test_out_vs thing name reps = - Seq.map (fn n => output (Lib.flat (thing ())) name) (Seq.of_list (1 upto reps)) - |> pull_all []; - end; - - - - -end; diff --git a/core/rewriting/indexing/test.ML b/core/rewriting/indexing/test.ML deleted file mode 100644 index 999aa203..00000000 --- a/core/rewriting/indexing/test.ML +++ /dev/null @@ -1,70 +0,0 @@ - - - -structure MyTester -= struct - - structure G = RGGraph; - structure LS = RGGraphLoaderSaver; - structure H = Histogram; - structure HTree = HistTree; - structure AList = AdjacencyList; - structure ATree = AdjacencyTree; - structure NSet = V.NSet; - structure NTab = V.NTab; - structure Lib = Library; - structure RG = RandomGraph; - structure MSt = MatchState; - - - fun load (file : string) = LS.load_graph - ((OS.FileSys.getDir ()) ^ "/indexing/test/" ^ file); - - fun save (name : string) graph = LS.save_graph - ((OS.FileSys.getDir ()) ^ "/indexing/test/saved/" ^ name) graph - - val ruleset = - ["histtest/rule1.xml", - "histtest/rule2.xml", - "histtest/rule3.xml", - "histtest/rule4.xml", - "histtest/rule5.xml", - "histtest/rule6.xml"]; - - fun load_rule_number n = load ("rules/rule" ^ (Int.toString n) ^ ".xml"); - - fun load_graph_with_name n = - (load ("rules/rule" ^ (Int.toString n) ^ ".xml"), - "rule" ^ (Int.toString n)); - - fun zipwith _ [] _ = [] - | zipwith _ _ [] = [] - | zipwith f (x::xs) (y::ys) = (f x y) :: zipwith f xs ys; - - - val empty_names = (NSet.empty,E.NSet.empty,NSet.empty) - - fun pull_all xs seq = - case Seq.pull seq of - NONE => xs - | SOME (x,seqn) => pull_all (x::xs) seqn; - - val test_compat = (fn () => - let - val pats = map load_graph_with_name (1 upto 9) - val adjs = map (fn (g,name) => (AList.get_adjacency g,name)) pats - in - map (fn (adj,name) => - (adjs - |> map (fn (adj2,name2) => - (zipwith AList.compatible adj adj2,name2))) - |> filter (fn (bools,name2) => - List.foldr (fn (b1,b2) => b1 andalso b2) true bools) - |> map (fn (_,name2) => (name,name2)) - ) adjs - end - ); - -end; - - diff --git a/core/rewriting/indexing/test/histtest/answer1.xml b/core/rewriting/indexing/test/histtest/answer1.xml deleted file mode 100644 index db8b6c19..00000000 --- a/core/rewriting/indexing/test/histtest/answer1.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - d - true - - - c - true - - - b - false - green - - - a - false - red - - - - Edad - - - Ecba - - - Ebba - - - Eacb - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/answer2.xml b/core/rewriting/indexing/test/histtest/answer2.xml deleted file mode 100644 index fa9f65fd..00000000 --- a/core/rewriting/indexing/test/histtest/answer2.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - e - true - - - d - true - - - c - true - - - b - false - green - - - a - false - red - - - - Edbe - - - Ecbd - - - Ebab - - - Eaca - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/rule1.xml b/core/rewriting/indexing/test/histtest/rule1.xml deleted file mode 100644 index 33a19acc..00000000 --- a/core/rewriting/indexing/test/histtest/rule1.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - - d - true - - - c - true - - - b - false - green - - - a - false - red - - - - Ecba - - - Ebbd - - - Eacb - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/rule2.xml b/core/rewriting/indexing/test/histtest/rule2.xml deleted file mode 100644 index 593ee610..00000000 --- a/core/rewriting/indexing/test/histtest/rule2.xml +++ /dev/null @@ -1,66 +0,0 @@ - - - - h - true - - - g - true - - - f - true - - - e - true - - - d - false - green - - - c - false - green - - - b - false - red - - - a - false - red - - - - Ehch - - - Egag - - - Effb - - - Eeca - - - Edbc - - - Ecdb - - - Ebda - - - Eaed - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/rule3.xml b/core/rewriting/indexing/test/histtest/rule3.xml deleted file mode 100644 index fa9f65fd..00000000 --- a/core/rewriting/indexing/test/histtest/rule3.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - e - true - - - d - true - - - c - true - - - b - false - green - - - a - false - red - - - - Edbe - - - Ecbd - - - Ebab - - - Eaca - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/rule4.xml b/core/rewriting/indexing/test/histtest/rule4.xml deleted file mode 100644 index db8b6c19..00000000 --- a/core/rewriting/indexing/test/histtest/rule4.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - d - true - - - c - true - - - b - false - green - - - a - false - red - - - - Edad - - - Ecba - - - Ebba - - - Eacb - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/rule5.xml b/core/rewriting/indexing/test/histtest/rule5.xml deleted file mode 100644 index 3c4109f1..00000000 --- a/core/rewriting/indexing/test/histtest/rule5.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - g - true - - - f - true - - - e - true - - - d - false - H - - - c - false - H - - - b - false - H - - - a - false - red - - - - Efbf - - - Eecg - - - Edac - - - Ecab - - - Ebda - - - Eaed - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/rule6.xml b/core/rewriting/indexing/test/histtest/rule6.xml deleted file mode 100644 index 5f9113f6..00000000 --- a/core/rewriting/indexing/test/histtest/rule6.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - g - true - - - f - true - - - e - true - - - d - true - - - c - false - green - - - b - false - green - - - a - false - red - - - - Efca - - - Eeba - - - Edag - - - Ecfb - - - Ebdc - - - Eaea - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/histtest/tgt1.xml b/core/rewriting/indexing/test/histtest/tgt1.xml deleted file mode 100644 index f33b7422..00000000 --- a/core/rewriting/indexing/test/histtest/tgt1.xml +++ /dev/null @@ -1,76 +0,0 @@ - - - - i - false - red - - - h - false - red - - - g - true - - - f - true - - - e - false - green - - - d - false - green - - - c - false - green - - - b - false - green - - - a - false - green - - - - Eidg - - - Ehhd - - - Egha - - - Efeh - - - Eebe - - - Edbi - - - Ecci - - - Ebcb - - - Eafc - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/test/results/simple-adj-10-20.txt b/core/rewriting/indexing/test/results/simple-adj-10-20.txt deleted file mode 100644 index 8ad5c011..00000000 --- a/core/rewriting/indexing/test/results/simple-adj-10-20.txt +++ /dev/null @@ -1,1210 +0,0 @@ -169 100 500 76 -190 100 550 52 -200 100 600 49 -212 100 650 49 -231 100 700 54 -244 100 750 58 -258 100 800 71 -259 100 850 68 -278 100 900 68 -292 100 950 73 -329 100 1000 237 -177 110 500 34 -200 110 550 45 -251 110 600 49 -243 110 650 56 -248 110 700 51 -297 110 750 58 -327 110 800 68 -287 110 850 69 -332 110 900 72 -327 110 950 247 -338 110 1000 76 -134 120 500 33 -166 120 550 48 -222 120 600 49 -183 120 650 53 -192 120 700 49 -238 120 750 57 -240 120 800 65 -248 120 850 70 -277 120 900 211 -269 120 950 67 -330 120 1000 78 -207 130 500 40 -234 130 550 53 -249 130 600 51 -263 130 650 64 -293 130 700 69 -316 130 750 68 -323 130 800 72 -367 130 850 83 -436 130 900 88 -402 130 950 92 -397 130 1000 82 -254 140 500 43 -313 140 550 51 -279 140 600 53 -351 140 650 59 -380 140 700 215 -425 140 750 76 -376 140 800 79 -420 140 850 79 -435 140 900 87 -467 140 950 96 -491 140 1000 124 -273 150 500 208 -265 150 550 66 -294 150 600 73 -320 150 650 73 -350 150 700 88 -363 150 750 95 -404 150 800 107 -420 150 850 275 -446 150 900 113 -472 150 950 118 -531 150 1000 119 -269 160 500 68 -295 160 550 67 -285 160 600 225 -353 160 650 79 -374 160 700 97 -405 160 750 96 -446 160 800 113 -441 160 850 112 -447 160 900 270 -513 160 950 122 -509 160 1000 125 -250 170 500 62 -242 170 550 63 -219 170 600 56 -336 170 650 88 -308 170 700 230 -334 170 750 81 -359 170 800 100 -400 170 850 105 -437 170 900 112 -413 170 950 104 -508 170 1000 289 -308 180 500 61 -295 180 550 55 -318 180 600 71 -396 180 650 76 -376 180 700 69 -426 180 750 89 -479 180 800 88 -451 180 850 244 -507 180 900 87 -513 180 950 94 -532 180 1000 103 -354 190 500 62 -377 190 550 56 -456 190 600 82 -445 190 650 228 -470 190 700 82 -507 190 750 89 -504 190 800 95 -519 190 850 98 -611 190 900 114 -591 190 950 274 -659 190 1000 126 -402 200 500 69 -359 200 550 58 -427 200 600 72 -464 200 650 95 -549 200 700 101 -544 200 750 257 -577 200 800 97 -565 200 850 112 -608 200 900 106 -703 200 950 119 -674 200 1000 119 -178 100 500 81 -195 100 550 43 -213 100 600 52 -232 100 650 54 -263 100 700 63 -250 100 750 57 -278 100 800 64 -296 100 850 69 -353 100 900 73 -302 100 950 70 -358 100 1000 240 -179 110 500 54 -198 110 550 45 -223 110 600 48 -256 110 650 71 -254 110 700 69 -289 110 750 64 -275 110 800 68 -321 110 850 68 -321 110 900 222 -341 110 950 81 -338 110 1000 99 -148 120 500 28 -166 120 550 36 -201 120 600 42 -204 120 650 42 -234 120 700 47 -245 120 750 56 -281 120 800 59 -305 120 850 210 -292 120 900 59 -363 120 950 71 -364 120 1000 79 -287 130 500 47 -319 130 550 57 -298 130 600 59 -351 130 650 61 -382 130 700 72 -386 130 750 233 -373 130 800 81 -438 130 850 83 -462 130 900 86 -519 130 950 95 -483 130 1000 93 -188 140 500 55 -200 140 550 55 -229 140 600 213 -250 140 650 63 -261 140 700 70 -282 140 750 83 -270 140 800 78 -298 140 850 93 -334 140 900 101 -347 140 950 259 -355 140 1000 109 -200 150 500 39 -250 150 550 35 -264 150 600 43 -296 150 650 51 -300 150 700 53 -369 150 750 64 -334 150 800 62 -375 150 850 78 -397 150 900 228 -430 150 950 80 -424 150 1000 86 -295 160 500 76 -359 160 550 77 -335 160 600 65 -441 160 650 106 -502 160 700 271 -541 160 750 126 -553 160 800 121 -591 160 850 112 -590 160 900 116 -632 160 950 334 -683 160 1000 145 -253 170 500 72 -280 170 550 74 -310 170 600 80 -307 170 650 79 -380 170 700 256 -377 170 750 121 -419 170 800 123 -432 170 850 111 -461 170 900 146 -474 170 950 299 -531 170 1000 152 -324 180 500 55 -385 180 550 72 -420 180 600 83 -404 180 650 74 -486 180 700 252 -542 180 750 105 -488 180 800 98 -556 180 850 110 -599 180 900 125 -681 180 950 137 -673 180 1000 139 -234 190 500 59 -276 190 550 94 -296 190 600 82 -341 190 650 71 -321 190 700 84 -374 190 750 259 -395 190 800 115 -469 190 850 112 -475 190 900 143 -492 190 950 117 -504 190 1000 293 -366 200 500 73 -417 200 550 67 -385 200 600 82 -520 200 650 89 -427 200 700 98 -514 200 750 113 -600 200 800 276 -658 200 850 116 -632 200 900 128 -705 200 950 139 -776 200 1000 322 -161 100 500 28 -206 100 550 44 -217 100 600 44 -263 100 650 51 -275 100 700 59 -301 100 750 58 -301 100 800 54 -298 100 850 65 -350 100 900 68 -343 100 950 67 -346 100 1000 232 -172 110 500 51 -164 110 550 50 -149 110 600 54 -204 110 650 54 -195 110 700 69 -209 110 750 64 -276 110 800 79 -275 110 850 84 -291 110 900 238 -271 110 950 75 -319 110 1000 100 -182 120 500 39 -211 120 550 44 -207 120 600 48 -228 120 650 53 -262 120 700 59 -255 120 750 56 -283 120 800 69 -300 120 850 227 -331 120 900 73 -318 120 950 80 -365 120 1000 88 -231 130 500 43 -261 130 550 44 -280 130 600 53 -284 130 650 52 -308 130 700 64 -314 130 750 211 -367 130 800 73 -353 130 850 78 -383 130 900 78 -396 130 950 82 -386 130 1000 77 -235 140 500 51 -208 140 550 45 -262 140 600 207 -310 140 650 67 -314 140 700 67 -358 140 750 77 -362 140 800 83 -371 140 850 83 -393 140 900 86 -434 140 950 90 -439 140 1000 255 -217 150 500 46 -297 150 550 63 -289 150 600 57 -332 150 650 71 -311 150 700 73 -379 150 750 85 -383 150 800 95 -389 150 850 234 -399 150 900 96 -403 150 950 97 -465 150 1000 115 -226 160 500 42 -253 160 550 75 -315 160 600 53 -314 160 650 68 -346 160 700 223 -375 160 750 80 -352 160 800 98 -408 160 850 98 -418 160 900 96 -473 160 950 95 -454 160 1000 248 -309 170 500 55 -358 170 550 65 -363 170 600 57 -397 170 650 65 -426 170 700 79 -413 170 750 82 -454 170 800 90 -509 170 850 269 -518 170 900 106 -525 170 950 111 -638 170 1000 116 -317 180 500 59 -333 180 550 77 -354 180 600 233 -417 180 650 92 -443 180 700 101 -503 180 750 108 -473 180 800 112 -590 180 850 132 -567 180 900 270 -615 180 950 132 -619 180 1000 140 -374 190 500 82 -367 190 550 88 -424 190 600 243 -443 190 650 95 -441 190 700 109 -506 190 750 107 -516 190 800 117 -595 190 850 138 -564 190 900 286 -645 190 950 149 -719 190 1000 159 -317 200 500 72 -320 200 550 86 -354 200 600 238 -418 200 650 85 -386 200 700 100 -447 200 750 123 -483 200 800 111 -466 200 850 104 -565 200 900 278 -583 200 950 131 -580 200 1000 140 -181 100 500 38 -184 100 550 38 -224 100 600 52 -272 100 650 54 -270 100 700 55 -268 100 750 143 -319 100 800 59 -352 100 850 69 -323 100 900 74 -361 100 950 83 -343 100 1000 81 -139 110 500 39 -162 110 550 38 -193 110 600 42 -194 110 650 45 -228 110 700 190 -234 110 750 57 -207 110 800 56 -250 110 850 59 -268 110 900 68 -292 110 950 71 -316 110 1000 76 -201 120 500 37 -200 120 550 41 -252 120 600 43 -264 120 650 180 -272 120 700 60 -295 120 750 56 -340 120 800 68 -282 120 850 64 -319 120 900 62 -390 120 950 69 -377 120 1000 74 -254 130 500 51 -272 130 550 60 -306 130 600 199 -283 130 650 66 -311 130 700 79 -386 130 750 88 -320 130 800 74 -400 130 850 83 -389 130 900 92 -433 130 950 270 -453 130 1000 102 -233 140 500 50 -227 140 550 57 -232 140 600 49 -261 140 650 57 -320 140 700 69 -335 140 750 83 -393 140 800 242 -323 140 850 87 -394 140 900 88 -418 140 950 96 -428 140 1000 98 -261 150 500 57 -295 150 550 55 -287 150 600 57 -363 150 650 220 -372 150 700 74 -369 150 750 69 -437 150 800 83 -492 150 850 90 -453 150 900 94 -525 150 950 100 -499 150 1000 263 -196 160 500 42 -244 160 550 54 -262 160 600 57 -275 160 650 60 -310 160 700 84 -294 160 750 88 -371 160 800 87 -361 160 850 249 -380 160 900 100 -423 160 950 98 -418 160 1000 109 -291 170 500 48 -273 170 550 52 -314 170 600 66 -350 170 650 67 -401 170 700 218 -474 170 750 92 -491 170 800 86 -502 170 850 86 -508 170 900 95 -552 170 950 109 -538 170 1000 294 -297 180 500 45 -310 180 550 54 -402 180 600 64 -376 180 650 60 -374 180 700 69 -452 180 750 73 -494 180 800 85 -507 180 850 258 -517 180 900 95 -491 180 950 97 -597 180 1000 98 -280 190 500 57 -365 190 550 83 -389 190 600 86 -424 190 650 263 -437 190 700 99 -464 190 750 121 -496 190 800 115 -516 190 850 107 -528 190 900 277 -577 190 950 140 -535 190 1000 135 -214 200 500 36 -257 200 550 40 -342 200 600 58 -318 200 650 47 -328 200 700 58 -403 200 750 61 -389 200 800 71 -407 200 850 67 -479 200 900 85 -490 200 950 78 -538 200 1000 83 -140 100 500 33 -191 100 550 37 -221 100 600 49 -219 100 650 45 -245 100 700 118 -248 100 750 51 -264 100 800 55 -298 100 850 64 -304 100 900 71 -279 100 950 63 -308 100 1000 68 -181 110 500 38 -190 110 550 44 -227 110 600 47 -163 110 650 40 -262 110 700 188 -240 110 750 60 -238 110 800 54 -273 110 850 58 -335 110 900 82 -322 110 950 70 -284 110 1000 67 -233 120 500 50 -267 120 550 57 -295 120 600 192 -298 120 650 64 -340 120 700 74 -344 120 750 85 -379 120 800 74 -403 120 850 91 -429 120 900 94 -427 120 950 267 -448 120 1000 102 -186 130 500 40 -197 130 550 46 -204 130 600 45 -207 130 650 53 -240 130 700 65 -276 130 750 67 -273 130 800 75 -302 130 850 73 -298 130 900 231 -316 130 950 75 -356 130 1000 88 -245 140 500 41 -281 140 550 49 -271 140 600 54 -289 140 650 55 -347 140 700 68 -355 140 750 73 -348 140 800 227 -384 140 850 82 -392 140 900 92 -434 140 950 92 -413 140 1000 90 -257 150 500 44 -263 150 550 52 -304 150 600 61 -314 150 650 179 -368 150 700 66 -354 150 750 65 -365 150 800 70 -422 150 850 79 -437 150 900 74 -444 150 950 89 -478 150 1000 90 -247 160 500 186 -306 160 550 82 -351 160 600 78 -336 160 650 86 -355 160 700 93 -391 160 750 89 -452 160 800 109 -452 160 850 247 -431 160 900 111 -486 160 950 125 -561 160 1000 130 -202 170 500 37 -293 170 550 47 -279 170 600 49 -307 170 650 188 -350 170 700 65 -370 170 750 78 -372 170 800 68 -400 170 850 78 -403 170 900 81 -461 170 950 100 -476 170 1000 238 -278 180 500 64 -314 180 550 79 -323 180 600 69 -360 180 650 82 -416 180 700 93 -421 180 750 97 -471 180 800 277 -533 180 850 116 -542 180 900 132 -509 180 950 126 -545 180 1000 145 -297 190 500 48 -320 190 550 213 -363 190 600 59 -383 190 650 68 -366 190 700 71 -424 190 750 77 -469 190 800 82 -510 190 850 95 -516 190 900 248 -532 190 950 94 -633 190 1000 115 -357 200 500 70 -352 200 550 85 -375 200 600 79 -430 200 650 97 -437 200 700 249 -497 200 750 114 -554 200 800 116 -505 200 850 114 -600 200 900 139 -622 200 950 302 -709 200 1000 159 -193 100 500 35 -204 100 550 44 -234 100 600 48 -254 100 650 61 -289 100 700 57 -267 100 750 52 -313 100 800 182 -356 100 850 62 -341 100 900 76 -377 100 950 76 -390 100 1000 83 -141 110 500 38 -127 110 550 41 -165 110 600 60 -189 110 650 52 -197 110 700 56 -225 110 750 214 -222 110 800 68 -265 110 850 78 -272 110 900 90 -293 110 950 92 -280 110 1000 89 -156 120 500 34 -174 120 550 41 -181 120 600 45 -214 120 650 174 -218 120 700 45 -258 120 750 62 -249 120 800 55 -278 120 850 59 -304 120 900 66 -328 120 950 69 -339 120 1000 69 -191 130 500 40 -247 130 550 52 -217 130 600 195 -239 130 650 53 -283 130 700 59 -325 130 750 73 -308 130 800 76 -346 130 850 93 -352 130 900 82 -393 130 950 88 -389 130 1000 228 -248 140 500 56 -288 140 550 49 -270 140 600 58 -336 140 650 69 -327 140 700 61 -340 140 750 70 -337 140 800 73 -368 140 850 77 -412 140 900 233 -436 140 950 91 -481 140 1000 96 -240 150 500 49 -292 150 550 57 -266 150 600 55 -320 150 650 72 -323 150 700 66 -347 150 750 212 -389 150 800 81 -451 150 850 86 -452 150 900 90 -463 150 950 96 -490 150 1000 111 -251 160 500 36 -331 160 550 50 -375 160 600 204 -350 160 650 64 -385 160 700 61 -428 160 750 67 -442 160 800 77 -511 160 850 83 -489 160 900 81 -543 160 950 103 -570 160 1000 254 -262 170 500 64 -315 170 550 94 -303 170 600 67 -322 170 650 79 -344 170 700 76 -392 170 750 93 -402 170 800 239 -465 170 850 105 -447 170 900 118 -510 170 950 116 -475 170 1000 119 -324 180 500 58 -343 180 550 198 -339 180 600 66 -381 180 650 72 -405 180 700 75 -424 180 750 86 -513 180 800 88 -515 180 850 92 -538 180 900 262 -587 180 950 112 -668 180 1000 121 -283 190 500 62 -265 190 550 70 -324 190 600 69 -315 190 650 75 -326 190 700 240 -353 190 750 90 -398 190 800 96 -437 190 850 102 -462 190 900 112 -501 190 950 114 -487 190 1000 290 -379 200 500 78 -403 200 550 74 -456 200 600 83 -507 200 650 98 -504 200 700 109 -506 200 750 93 -555 200 800 268 -613 200 850 128 -673 200 900 130 -652 200 950 134 -693 200 1000 290 -164 100 500 41 -177 100 550 40 -183 100 600 46 -179 100 650 42 -224 100 700 53 -251 100 750 61 -231 100 800 63 -224 100 850 59 -284 100 900 72 -255 100 950 73 -303 100 1000 224 -175 110 500 32 -175 110 550 32 -179 110 600 34 -222 110 650 44 -220 110 700 46 -214 110 750 43 -287 110 800 56 -257 110 850 62 -290 110 900 58 -294 110 950 60 -312 110 1000 63 -105 120 500 151 -166 120 550 40 -138 120 600 41 -186 120 650 51 -192 120 700 44 -194 120 750 56 -216 120 800 65 -226 120 850 70 -229 120 900 69 -270 120 950 68 -234 120 1000 210 -235 130 500 65 -308 130 550 64 -301 130 600 58 -390 130 650 84 -404 130 700 91 -421 130 750 91 -427 130 800 99 -449 130 850 266 -471 130 900 105 -533 130 950 126 -564 130 1000 118 -232 140 500 55 -307 140 550 64 -343 140 600 69 -363 140 650 230 -329 140 700 73 -391 140 750 88 -407 140 800 86 -433 140 850 96 -456 140 900 91 -503 140 950 260 -510 140 1000 116 -181 150 500 36 -200 150 550 37 -213 150 600 54 -304 150 650 54 -262 150 700 51 -306 150 750 57 -305 150 800 61 -295 150 850 67 -301 150 900 219 -360 150 950 78 -407 150 1000 84 -280 160 500 56 -298 160 550 64 -426 160 600 71 -408 160 650 74 -392 160 700 73 -497 160 750 233 -512 160 800 82 -497 160 850 96 -501 160 900 97 -589 160 950 106 -608 160 1000 117 -293 170 500 203 -323 170 550 70 -337 170 600 66 -353 170 650 80 -342 170 700 85 -427 170 750 98 -449 170 800 97 -483 170 850 254 -540 170 900 122 -519 170 950 135 -537 170 1000 116 -251 180 500 50 -259 180 550 53 -316 180 600 65 -341 180 650 209 -332 180 700 66 -383 180 750 81 -412 180 800 96 -437 180 850 104 -482 180 900 118 -480 180 950 259 -514 180 1000 116 -313 190 500 59 -434 190 550 73 -472 190 600 91 -460 190 650 83 -512 190 700 94 -563 190 750 258 -613 190 800 123 -659 190 850 116 -680 190 900 118 -662 190 950 139 -792 190 1000 312 -326 200 500 62 -339 200 550 67 -441 200 600 92 -415 200 650 82 -465 200 700 101 -493 200 750 81 -539 200 800 272 -571 200 850 112 -574 200 900 105 -610 200 950 122 -618 200 1000 146 -206 100 500 76 -189 100 550 46 -195 100 600 45 -290 100 650 74 -236 100 700 63 -264 100 750 60 -349 100 800 76 -295 100 850 73 -303 100 900 76 -346 100 950 243 -371 100 1000 87 -131 110 500 28 -171 110 550 43 -216 110 600 43 -170 110 650 37 -202 110 700 47 -208 110 750 46 -232 110 800 59 -257 110 850 59 -280 110 900 60 -289 110 950 201 -296 110 1000 83 -222 120 500 46 -234 120 550 56 -238 120 600 45 -298 120 650 56 -295 120 700 58 -339 120 750 75 -331 120 800 84 -363 120 850 226 -412 120 900 76 -382 120 950 89 -402 120 1000 84 -160 130 500 31 -178 130 550 41 -194 130 600 39 -237 130 650 48 -247 130 700 48 -249 130 750 44 -318 130 800 201 -296 130 850 67 -307 130 900 65 -338 130 950 67 -344 130 1000 80 -251 140 500 54 -260 140 550 52 -343 140 600 58 -314 140 650 59 -378 140 700 221 -384 140 750 79 -422 140 800 80 -473 140 850 99 -444 140 900 86 -508 140 950 102 -513 140 1000 103 -291 150 500 189 -283 150 550 71 -335 150 600 65 -338 150 650 72 -381 150 700 85 -368 150 750 86 -364 150 800 88 -475 150 850 105 -443 150 900 261 -530 150 950 114 -507 150 1000 111 -259 160 500 47 -272 160 550 53 -320 160 600 63 -323 160 650 58 -386 160 700 62 -429 160 750 74 -467 160 800 73 -466 160 850 80 -491 160 900 83 -502 160 950 88 -552 160 1000 94 -239 170 500 48 -309 170 550 203 -278 170 600 58 -370 170 650 76 -312 170 700 73 -376 170 750 80 -376 170 800 83 -385 170 850 91 -467 170 900 107 -448 170 950 260 -487 170 1000 110 -306 180 500 51 -337 180 550 79 -392 180 600 62 -453 180 650 81 -489 180 700 78 -543 180 750 209 -517 180 800 97 -529 180 850 98 -598 180 900 112 -665 180 950 111 -679 180 1000 120 -253 190 500 198 -276 190 550 59 -307 190 600 74 -334 190 650 84 -323 190 700 80 -359 190 750 83 -414 190 800 85 -372 190 850 240 -465 190 900 109 -522 190 950 131 -482 190 1000 111 -274 200 500 61 -305 200 550 56 -345 200 600 66 -359 200 650 206 -388 200 700 84 -400 200 750 85 -466 200 800 110 -453 200 850 98 -518 200 900 119 -536 200 950 275 -532 200 1000 110 -145 100 500 28 -108 100 550 30 -168 100 600 40 -173 100 650 38 -184 100 700 43 -173 100 750 49 -207 100 800 58 -210 100 850 52 -238 100 900 57 -221 100 950 163 -233 100 1000 62 -154 110 500 42 -161 110 550 36 -157 110 600 43 -220 110 650 57 -228 110 700 53 -247 110 750 66 -246 110 800 61 -225 110 850 58 -292 110 900 202 -254 110 950 69 -318 110 1000 87 -172 120 500 40 -190 120 550 42 -271 120 600 57 -234 120 650 50 -275 120 700 66 -311 120 750 73 -301 120 800 65 -316 120 850 225 -398 120 900 87 -374 120 950 84 -405 120 1000 81 -189 130 500 36 -248 130 550 37 -218 130 600 42 -250 130 650 50 -251 130 700 46 -270 130 750 57 -283 130 800 206 -287 130 850 61 -360 130 900 83 -340 130 950 67 -378 130 1000 71 -220 140 500 46 -232 140 550 56 -262 140 600 64 -312 140 650 83 -322 140 700 213 -301 140 750 86 -357 140 800 88 -379 140 850 100 -395 140 900 91 -443 140 950 122 -440 140 1000 264 -228 150 500 39 -245 150 550 41 -256 150 600 45 -314 150 650 60 -333 150 700 75 -319 150 750 81 -419 150 800 73 -399 150 850 80 -398 150 900 238 -436 150 950 85 -497 150 1000 97 -304 160 500 65 -357 160 550 88 -424 160 600 92 -373 160 650 231 -419 160 700 102 -531 160 750 132 -497 160 800 112 -546 160 850 113 -577 160 900 131 -604 160 950 297 -677 160 1000 134 -203 170 500 42 -261 170 550 53 -274 170 600 55 -342 170 650 67 -309 170 700 65 -327 170 750 210 -342 170 800 68 -394 170 850 82 -387 170 900 87 -448 170 950 90 -483 170 1000 91 -324 180 500 59 -324 180 550 54 -354 180 600 197 -405 180 650 73 -485 180 700 84 -469 180 750 95 -530 180 800 99 -499 180 850 99 -554 180 900 96 -606 180 950 250 -675 180 1000 112 -340 190 500 79 -350 190 550 78 -422 190 600 95 -448 190 650 98 -506 190 700 270 -560 190 750 104 -567 190 800 130 -609 190 850 121 -668 190 900 150 -679 190 950 318 -705 190 1000 139 -294 200 500 56 -382 200 550 58 -455 200 600 76 -452 200 650 82 -455 200 700 251 -520 200 750 88 -576 200 800 105 -537 200 850 107 -628 200 900 117 -648 200 950 124 -678 200 1000 277 -143 100 500 51 -180 100 550 38 -182 100 600 40 -218 100 650 48 -198 100 700 45 -229 100 750 63 -231 100 800 63 -300 100 850 59 -266 100 900 68 -308 100 950 202 -307 100 1000 70 -185 110 500 33 -220 110 550 41 -214 110 600 36 -254 110 650 47 -243 110 700 47 -264 110 750 53 -287 110 800 59 -293 110 850 66 -339 110 900 70 -319 110 950 223 -375 110 1000 76 -180 120 500 33 -186 120 550 33 -248 120 600 44 -280 120 650 53 -291 120 700 52 -304 120 750 61 -254 120 800 52 -312 120 850 60 -362 120 900 217 -332 120 950 67 -381 120 1000 89 -167 130 500 34 -207 130 550 42 -257 130 600 53 -258 130 650 55 -274 130 700 48 -289 130 750 67 -324 130 800 59 -309 130 850 203 -334 130 900 70 -323 130 950 64 -398 130 1000 84 -245 140 500 44 -319 140 550 52 -325 140 600 54 -387 140 650 65 -386 140 700 76 -473 140 750 229 -482 140 800 79 -506 140 850 74 -509 140 900 81 -549 140 950 83 -529 140 1000 89 -228 150 500 55 -289 150 550 66 -312 150 600 208 -333 150 650 84 -355 150 700 75 -342 150 750 85 -415 150 800 96 -427 150 850 95 -426 150 900 259 -469 150 950 123 -485 150 1000 118 -293 160 500 60 -336 160 550 61 -384 160 600 74 -397 160 650 78 -450 160 700 236 -405 160 750 102 -446 160 800 89 -495 160 850 111 -542 160 900 131 -574 160 950 125 -558 160 1000 278 -291 170 500 65 -351 170 550 70 -326 170 600 77 -327 170 650 81 -413 170 700 89 -411 170 750 90 -450 170 800 240 -520 170 850 112 -489 170 900 114 -555 170 950 103 -564 170 1000 111 -302 180 500 55 -334 180 550 211 -396 180 600 84 -397 180 650 67 -424 180 700 89 -480 180 750 94 -564 180 800 95 -558 180 850 106 -543 180 900 254 -605 180 950 120 -630 180 1000 118 -268 190 500 53 -316 190 550 67 -397 190 600 83 -359 190 650 236 -362 190 700 93 -425 190 750 103 -437 190 800 104 -478 190 850 125 -492 190 900 123 -529 190 950 294 -541 190 1000 137 -300 200 500 70 -350 200 550 80 -400 200 600 84 -438 200 650 82 -448 200 700 249 -494 200 750 117 -531 200 800 105 -494 200 850 101 -564 200 900 127 -573 200 950 263 -569 200 1000 128 diff --git a/core/rewriting/indexing/test/results/simple-match-10-20.txt b/core/rewriting/indexing/test/results/simple-match-10-20.txt deleted file mode 100644 index f7080843..00000000 --- a/core/rewriting/indexing/test/results/simple-match-10-20.txt +++ /dev/null @@ -1,1210 +0,0 @@ -59 100 500 415 -52 100 550 466 -60 100 600 536 -53 100 650 583 -69 100 700 642 -62 100 750 659 -62 100 800 725 -63 100 850 779 -59 100 900 757 -59 100 950 843 -69 100 1000 901 -56 110 500 474 -68 110 550 506 -49 110 600 519 -62 110 650 620 -68 110 700 662 -62 110 750 727 -58 110 800 737 -65 110 850 838 -70 110 900 861 -60 110 950 894 -66 110 1000 938 -63 120 500 489 -64 120 550 554 -67 120 600 627 -67 120 650 629 -72 120 700 722 -72 120 750 772 -70 120 800 841 -75 120 850 871 -72 120 900 895 -75 120 950 947 -76 120 1000 1008 -61 130 500 665 -70 130 550 766 -66 130 600 835 -61 130 650 923 -72 130 700 969 -69 130 750 1074 -72 130 800 1030 -75 130 850 1131 -70 130 900 1252 -80 130 950 1307 -72 130 1000 1417 -84 140 500 545 -71 140 550 638 -77 140 600 692 -81 140 650 764 -90 140 700 804 -90 140 750 864 -78 140 800 892 -89 140 850 977 -88 140 900 1099 -86 140 950 1072 -94 140 1000 1143 -74 150 500 654 -77 150 550 723 -82 150 600 776 -91 150 650 882 -79 150 700 924 -86 150 750 1053 -88 150 800 1123 -90 150 850 1166 -87 150 900 1198 -99 150 950 1325 -94 150 1000 1401 -91 160 500 635 -88 160 550 676 -104 160 600 738 -100 160 650 816 -98 160 700 857 -108 160 750 941 -105 160 800 1009 -110 160 850 1058 -101 160 900 1189 -108 160 950 1185 -108 160 1000 1236 -80 170 500 731 -82 170 550 750 -90 170 600 826 -92 170 650 948 -89 170 700 1057 -97 170 750 1025 -96 170 800 1160 -94 170 850 1233 -96 170 900 1261 -105 170 950 1400 -101 170 1000 1484 -107 180 500 702 -100 180 550 758 -102 180 600 855 -109 180 650 911 -106 180 700 985 -102 180 750 1042 -104 180 800 1139 -112 180 850 1239 -110 180 900 1318 -109 180 950 1351 -116 180 1000 1435 -104 190 500 750 -100 190 550 891 -113 190 600 856 -105 190 650 895 -107 190 700 1036 -111 190 750 1132 -112 190 800 1148 -115 190 850 1293 -121 190 900 1410 -116 190 950 1369 -109 190 1000 1469 -105 200 500 937 -106 200 550 980 -108 200 600 1071 -116 200 650 1164 -105 200 700 1259 -117 200 750 1413 -124 200 800 1498 -119 200 850 1482 -116 200 900 1722 -126 200 950 1683 -123 200 1000 1798 -51 100 500 431 -48 100 550 442 -56 100 600 507 -60 100 650 564 -58 100 700 628 -62 100 750 655 -57 100 800 689 -54 100 850 723 -63 100 900 816 -61 100 950 806 -61 100 1000 898 -61 110 500 438 -64 110 550 454 -63 110 600 492 -68 110 650 552 -67 110 700 584 -69 110 750 635 -70 110 800 675 -70 110 850 722 -74 110 900 761 -69 110 950 816 -67 110 1000 839 -57 120 500 483 -51 120 550 529 -62 120 600 568 -63 120 650 641 -59 120 700 670 -72 120 750 729 -61 120 800 750 -60 120 850 792 -65 120 900 870 -78 120 950 926 -64 120 1000 934 -70 130 500 488 -73 130 550 525 -82 130 600 594 -75 130 650 644 -83 130 700 677 -80 130 750 731 -78 130 800 781 -83 130 850 822 -91 130 900 941 -88 130 950 952 -91 130 1000 1023 -76 140 500 585 -73 140 550 594 -76 140 600 653 -77 140 650 696 -80 140 700 765 -78 140 750 817 -81 140 800 870 -84 140 850 973 -85 140 900 995 -85 140 950 1092 -87 140 1000 1127 -86 150 500 706 -83 150 550 706 -89 150 600 845 -87 150 650 863 -89 150 700 1022 -93 150 750 1068 -98 150 800 1119 -99 150 850 1180 -103 150 900 1288 -96 150 950 1322 -101 150 1000 1363 -97 160 500 565 -80 160 550 631 -89 160 600 674 -87 160 650 768 -92 160 700 842 -92 160 750 866 -92 160 800 904 -95 160 850 1016 -86 160 900 1075 -95 160 950 1188 -106 160 1000 1299 -87 170 500 2067 -98 170 550 786 -97 170 600 855 -92 170 650 918 -92 170 700 1052 -102 170 750 986 -105 170 800 1147 -111 170 850 1218 -111 170 900 1311 -106 170 950 1343 -118 170 1000 1451 -93 180 500 727 -85 180 550 826 -93 180 600 889 -97 180 650 918 -91 180 700 1031 -106 180 750 1137 -102 180 800 1179 -95 180 850 1224 -99 180 900 1379 -111 180 950 1408 -104 180 1000 1478 -106 190 500 729 -106 190 550 828 -107 190 600 913 -110 190 650 1028 -114 190 700 1072 -113 190 750 1193 -121 190 800 1176 -111 190 850 1309 -122 190 900 1347 -126 190 950 1467 -124 190 1000 1598 -112 200 500 824 -106 200 550 963 -107 200 600 993 -109 200 650 1156 -118 200 700 1194 -121 200 750 1252 -121 200 800 1340 -137 200 850 1502 -117 200 900 1530 -129 200 950 1634 -122 200 1000 1683 -45 100 500 449 -52 100 550 638 -53 100 600 596 -48 100 650 605 -57 100 700 656 -57 100 750 649 -56 100 800 707 -54 100 850 690 -59 100 900 793 -60 100 950 878 -64 100 1000 992 -54 110 500 495 -54 110 550 535 -58 110 600 583 -54 110 650 641 -61 110 700 709 -64 110 750 779 -68 110 800 806 -68 110 850 843 -75 110 900 947 -66 110 950 961 -72 110 1000 999 -56 120 500 515 -57 120 550 530 -61 120 600 543 -59 120 650 625 -71 120 700 717 -66 120 750 731 -69 120 800 831 -69 120 850 841 -69 120 900 931 -71 120 950 924 -71 120 1000 987 -61 130 500 463 -66 130 550 538 -65 130 600 577 -70 130 650 662 -71 130 700 703 -79 130 750 803 -77 130 800 808 -71 130 850 857 -76 130 900 877 -69 130 950 968 -82 130 1000 1030 -78 140 500 633 -77 140 550 724 -78 140 600 715 -90 140 650 795 -86 140 700 839 -89 140 750 912 -84 140 800 952 -87 140 850 1037 -88 140 900 1116 -92 140 950 1197 -91 140 1000 1251 -79 150 500 686 -73 150 550 768 -80 150 600 880 -80 150 650 938 -86 150 700 1027 -79 150 750 1164 -83 150 800 1077 -88 150 850 1222 -85 150 900 1299 -79 150 950 1351 -80 150 1000 1331 -84 160 500 702 -87 160 550 768 -84 160 600 802 -93 160 650 865 -95 160 700 960 -97 160 750 960 -98 160 800 1114 -96 160 850 1218 -91 160 900 1272 -101 160 950 1344 -99 160 1000 1408 -104 170 500 717 -100 170 550 790 -108 170 600 838 -101 170 650 918 -99 170 700 938 -109 170 750 1042 -107 170 800 1155 -108 170 850 1229 -107 170 900 1298 -120 170 950 1368 -115 170 1000 1440 -99 180 500 743 -98 180 550 822 -102 180 600 956 -107 180 650 959 -110 180 700 1081 -107 180 750 1157 -100 180 800 1188 -115 180 850 1297 -111 180 900 1361 -112 180 950 1437 -114 180 1000 1470 -100 190 500 831 -101 190 550 869 -102 190 600 927 -101 190 650 1022 -102 190 700 1080 -114 190 750 1112 -113 190 800 1294 -115 190 850 1375 -110 190 900 1375 -116 190 950 1489 -116 190 1000 1573 -103 200 500 889 -117 200 550 932 -108 200 600 1035 -118 200 650 1120 -118 200 700 1195 -115 200 750 1280 -121 200 800 1427 -131 200 850 1462 -115 200 900 1474 -130 200 950 1759 -127 200 1000 1722 -45 100 500 484 -53 100 550 489 -53 100 600 521 -47 100 650 576 -57 100 700 643 -58 100 750 658 -55 100 800 768 -49 100 850 814 -55 100 900 865 -56 100 950 878 -59 100 1000 905 -52 110 500 397 -56 110 550 481 -52 110 600 498 -57 110 650 568 -58 110 700 634 -59 110 750 640 -58 110 800 746 -54 110 850 764 -58 110 900 803 -66 110 950 848 -62 110 1000 874 -46 120 500 462 -54 120 550 555 -54 120 600 598 -57 120 650 650 -53 120 700 716 -65 120 750 788 -57 120 800 791 -60 120 850 875 -61 120 900 918 -66 120 950 914 -64 120 1000 994 -66 130 500 582 -59 130 550 622 -66 130 600 664 -71 130 650 766 -67 130 700 810 -78 130 750 924 -75 130 800 949 -70 130 850 999 -80 130 900 1139 -71 130 950 1140 -79 130 1000 1168 -70 140 500 520 -72 140 550 568 -79 140 600 661 -81 140 650 684 -79 140 700 770 -79 140 750 816 -74 140 800 887 -87 140 850 912 -81 140 900 1026 -89 140 950 1033 -81 140 1000 1086 -71 150 500 638 -81 150 550 710 -80 150 600 771 -84 150 650 832 -85 150 700 876 -83 150 750 982 -91 150 800 1025 -86 150 850 1144 -87 150 900 1176 -80 150 950 1211 -93 150 1000 1297 -97 160 500 650 -93 160 550 697 -89 160 600 782 -94 160 650 838 -92 160 700 927 -104 160 750 971 -105 160 800 1091 -100 160 850 1171 -98 160 900 1215 -103 160 950 1221 -100 160 1000 1288 -85 170 500 735 -89 170 550 782 -95 170 600 840 -102 170 650 938 -98 170 700 1030 -95 170 750 1094 -101 170 800 1215 -99 170 850 1197 -102 170 900 1364 -102 170 950 1351 -108 170 1000 1503 -95 180 500 722 -98 180 550 876 -99 180 600 870 -103 180 650 968 -102 180 700 1025 -104 180 750 1153 -104 180 800 1187 -106 180 850 1322 -109 180 900 1344 -111 180 950 1502 -114 180 1000 1496 -97 190 500 822 -99 190 550 904 -98 190 600 961 -113 190 650 1026 -109 190 700 1112 -108 190 750 1197 -106 190 800 1311 -108 190 850 1345 -113 190 900 1478 -111 190 950 1581 -111 190 1000 1635 -102 200 500 794 -105 200 550 895 -95 200 600 924 -106 200 650 1093 -109 200 700 1114 -115 200 750 1251 -113 200 800 1323 -113 200 850 1430 -112 200 900 1473 -112 200 950 1611 -110 200 1000 1657 -58 100 500 428 -55 100 550 504 -54 100 600 552 -55 100 650 560 -59 100 700 639 -58 100 750 743 -64 100 800 762 -56 100 850 749 -66 100 900 864 -69 100 950 839 -64 100 1000 947 -52 110 500 439 -50 110 550 437 -57 110 600 568 -61 110 650 602 -61 110 700 631 -60 110 750 677 -63 110 800 708 -66 110 850 764 -68 110 900 833 -67 110 950 870 -63 110 1000 917 -62 120 500 535 -64 120 550 659 -70 120 600 634 -69 120 650 732 -74 120 700 809 -74 120 750 900 -72 120 800 907 -74 120 850 1024 -74 120 900 1096 -71 120 950 1049 -73 120 1000 1142 -71 130 500 512 -82 130 550 595 -78 130 600 632 -76 130 650 720 -84 130 700 743 -82 130 750 812 -82 130 800 851 -82 130 850 868 -79 130 900 957 -82 130 950 1076 -92 130 1000 1140 -73 140 500 526 -82 140 550 614 -80 140 600 663 -77 140 650 750 -86 140 700 760 -88 140 750 846 -85 140 800 943 -89 140 850 921 -90 140 900 1015 -90 140 950 1067 -90 140 1000 1121 -74 150 500 635 -78 150 550 710 -79 150 600 757 -83 150 650 836 -81 150 700 872 -74 150 750 987 -87 150 800 1041 -91 150 850 1058 -91 150 900 1133 -90 150 950 1174 -87 150 1000 1252 -87 160 500 725 -81 160 550 739 -80 160 600 779 -93 160 650 900 -86 160 700 913 -96 160 750 1045 -96 160 800 1106 -90 160 850 1204 -102 160 900 1244 -95 160 950 1298 -96 160 1000 1448 -87 170 500 670 -87 170 550 709 -93 170 600 820 -100 170 650 839 -105 170 700 972 -98 170 750 1014 -103 170 800 1102 -101 170 850 1185 -102 170 900 1245 -110 170 950 1256 -110 170 1000 1406 -84 180 500 708 -85 180 550 785 -90 180 600 916 -92 180 650 970 -111 180 700 1055 -94 180 750 1059 -104 180 800 1190 -103 180 850 1259 -104 180 900 1289 -96 180 950 1386 -113 180 1000 1554 -101 190 500 751 -100 190 550 830 -101 190 600 952 -101 190 650 940 -106 190 700 1079 -101 190 750 1143 -107 190 800 1235 -117 190 850 1322 -113 190 900 1397 -115 190 950 1453 -113 190 1000 1547 -102 200 500 799 -98 200 550 889 -111 200 600 1048 -101 200 650 1155 -117 200 700 1141 -110 200 750 1341 -111 200 800 1397 -118 200 850 1412 -121 200 900 1603 -116 200 950 1647 -128 200 1000 1753 -54 100 500 342 -57 100 550 416 -61 100 600 490 -54 100 650 505 -63 100 700 519 -56 100 750 583 -64 100 800 612 -63 100 850 644 -60 100 900 685 -62 100 950 723 -60 100 1000 785 -68 110 500 444 -61 110 550 569 -66 110 600 538 -64 110 650 626 -68 110 700 709 -70 110 750 721 -73 110 800 780 -71 110 850 868 -74 110 900 889 -73 110 950 880 -71 110 1000 938 -50 120 500 560 -55 120 550 641 -50 120 600 665 -57 120 650 770 -59 120 700 784 -67 120 750 877 -72 120 800 942 -68 120 850 1083 -78 120 900 1010 -69 120 950 1085 -77 120 1000 1141 -70 130 500 490 -81 130 550 517 -76 130 600 605 -78 130 650 658 -83 130 700 752 -91 130 750 771 -84 130 800 811 -89 130 850 883 -88 130 900 954 -87 130 950 957 -93 130 1000 1081 -69 140 500 575 -68 140 550 632 -70 140 600 757 -72 140 650 757 -76 140 700 886 -77 140 750 929 -81 140 800 992 -78 140 850 1056 -79 140 900 1116 -81 140 950 1221 -80 140 1000 1200 -76 150 500 613 -75 150 550 637 -80 150 600 747 -72 150 650 774 -79 150 700 863 -81 150 750 889 -79 150 800 1027 -86 150 850 1024 -89 150 900 1064 -89 150 950 1190 -86 150 1000 1279 -93 160 500 650 -101 160 550 667 -95 160 600 769 -88 160 650 768 -92 160 700 924 -96 160 750 911 -102 160 800 1010 -103 160 850 1039 -108 160 900 1148 -100 160 950 1225 -109 160 1000 1267 -89 170 500 667 -93 170 550 747 -94 170 600 771 -89 170 650 876 -92 170 700 915 -105 170 750 1026 -106 170 800 1079 -101 170 850 1113 -109 170 900 1197 -102 170 950 1277 -107 170 1000 1384 -96 180 500 812 -100 180 550 847 -93 180 600 953 -110 180 650 1064 -98 180 700 1099 -109 180 750 1232 -101 180 800 1246 -102 180 850 1330 -114 180 900 1420 -103 180 950 1479 -114 180 1000 1607 -90 190 500 852 -90 190 550 881 -95 190 600 919 -100 190 650 1104 -91 190 700 1104 -106 190 750 1212 -119 190 800 1323 -103 190 850 1469 -111 190 900 1428 -109 190 950 1583 -115 190 1000 1681 -112 200 500 809 -114 200 550 944 -117 200 600 967 -111 200 650 1092 -117 200 700 1183 -114 200 750 1280 -123 200 800 1394 -120 200 850 1418 -125 200 900 1535 -124 200 950 1599 -120 200 1000 1687 -52 100 500 389 -54 100 550 478 -51 100 600 542 -47 100 650 562 -62 100 700 613 -59 100 750 648 -55 100 800 653 -59 100 850 715 -68 100 900 748 -54 100 950 804 -62 100 1000 847 -59 110 500 395 -55 110 550 472 -54 110 600 513 -55 110 650 575 -57 110 700 610 -58 110 750 655 -61 110 800 712 -56 110 850 715 -60 110 900 785 -61 110 950 798 -67 110 1000 888 -60 120 500 495 -64 120 550 606 -61 120 600 610 -70 120 650 701 -70 120 700 789 -69 120 750 824 -71 120 800 827 -78 120 850 945 -70 120 900 991 -74 120 950 1046 -69 120 1000 1052 -69 130 500 551 -61 130 550 593 -70 130 600 630 -68 130 650 787 -69 130 700 815 -67 130 750 857 -72 130 800 924 -72 130 850 983 -74 130 900 1023 -77 130 950 1097 -84 130 1000 1114 -63 140 500 548 -74 140 550 631 -79 140 600 710 -77 140 650 748 -75 140 700 802 -82 140 750 891 -82 140 800 985 -84 140 850 1006 -81 140 900 1070 -83 140 950 1158 -87 140 1000 1176 -78 150 500 594 -79 150 550 660 -81 150 600 773 -87 150 650 798 -77 150 700 834 -86 150 750 967 -82 150 800 989 -86 150 850 1061 -76 150 900 1122 -82 150 950 1145 -86 150 1000 1228 -87 160 500 681 -99 160 550 731 -93 160 600 777 -86 160 650 818 -98 160 700 898 -97 160 750 905 -92 160 800 1012 -98 160 850 1069 -95 160 900 1175 -106 160 950 1184 -111 160 1000 1339 -84 170 500 660 -86 170 550 738 -79 170 600 850 -90 170 650 893 -95 170 700 994 -95 170 750 1027 -97 170 800 1115 -98 170 850 1186 -100 170 900 1290 -107 170 950 1358 -97 170 1000 1454 -88 180 500 716 -90 180 550 796 -90 180 600 843 -88 180 650 976 -100 180 700 1018 -92 180 750 1116 -95 180 800 1147 -102 180 850 1301 -105 180 900 1352 -107 180 950 1429 -106 180 1000 1521 -104 190 500 812 -93 190 550 899 -105 190 600 1091 -103 190 650 1086 -107 190 700 1175 -104 190 750 1225 -107 190 800 1389 -109 190 850 1404 -112 190 900 1479 -111 190 950 1490 -124 190 1000 1713 -97 200 500 731 -102 200 550 791 -112 200 600 884 -115 200 650 1047 -112 200 700 1097 -117 200 750 1225 -127 200 800 1254 -123 200 850 1357 -121 200 900 1396 -121 200 950 1491 -120 200 1000 1546 -53 100 500 376 -49 100 550 422 -53 100 600 461 -48 100 650 504 -49 100 700 531 -59 100 750 583 -59 100 800 624 -63 100 850 679 -59 100 900 699 -57 100 950 755 -58 100 1000 813 -45 110 500 389 -48 110 550 495 -59 110 600 538 -62 110 650 616 -51 110 700 619 -66 110 750 696 -60 110 800 710 -65 110 850 789 -70 110 900 791 -61 110 950 861 -70 110 1000 933 -61 120 500 493 -55 120 550 497 -58 120 600 555 -63 120 650 603 -66 120 700 662 -72 120 750 739 -64 120 800 739 -69 120 850 789 -67 120 900 834 -65 120 950 899 -69 120 1000 965 -67 130 500 509 -71 130 550 535 -71 130 600 625 -79 130 650 652 -77 130 700 699 -78 130 750 780 -85 130 800 856 -79 130 850 884 -84 130 900 980 -78 130 950 970 -82 130 1000 1069 -80 140 500 565 -73 140 550 611 -80 140 600 686 -82 140 650 719 -83 140 700 758 -87 140 750 819 -90 140 800 950 -96 140 850 993 -85 140 900 1016 -87 140 950 1093 -97 140 1000 1198 -88 150 500 558 -91 150 550 688 -93 150 600 743 -91 150 650 822 -96 150 700 905 -88 150 750 941 -99 150 800 1024 -107 150 850 1097 -99 150 900 1135 -99 150 950 1181 -98 150 1000 1278 -76 160 500 587 -78 160 550 636 -84 160 600 719 -85 160 650 789 -79 160 700 831 -87 160 750 918 -93 160 800 930 -99 160 850 1065 -92 160 900 1117 -101 160 950 1148 -99 160 1000 1287 -83 170 500 740 -89 170 550 773 -85 170 600 808 -84 170 650 863 -93 170 700 1007 -97 170 750 1044 -99 170 800 1088 -94 170 850 1170 -111 170 900 1323 -108 170 950 1285 -108 170 1000 1486 -92 180 500 703 -93 180 550 845 -105 180 600 831 -102 180 650 847 -107 180 700 1040 -117 180 750 1112 -113 180 800 1132 -105 180 850 1216 -114 180 900 1296 -113 180 950 1324 -123 180 1000 1505 -87 190 500 826 -95 190 550 934 -87 190 600 1120 -101 190 650 1176 -101 190 700 1181 -103 190 750 1305 -102 190 800 1375 -108 190 850 1531 -110 190 900 1569 -119 190 950 1624 -102 190 1000 1719 -104 200 500 849 -101 200 550 908 -90 200 600 1000 -100 200 650 1133 -112 200 700 1197 -110 200 750 1323 -118 200 800 1392 -116 200 850 1469 -113 200 900 1524 -115 200 950 1612 -126 200 1000 1762 -51 100 500 433 -53 100 550 522 -52 100 600 542 -57 100 650 591 -57 100 700 651 -59 100 750 638 -57 100 800 724 -58 100 850 753 -55 100 900 823 -61 100 950 867 -62 100 1000 890 -52 110 500 468 -53 110 550 507 -60 110 600 525 -62 110 650 611 -65 110 700 673 -58 110 750 702 -60 110 800 791 -65 110 850 801 -65 110 900 815 -68 110 950 876 -72 110 1000 908 -71 120 500 547 -72 120 550 572 -68 120 600 594 -82 120 650 665 -72 120 700 745 -79 120 750 797 -79 120 800 820 -81 120 850 909 -80 120 900 996 -79 120 950 998 -86 120 1000 1026 -72 130 500 509 -84 130 550 591 -82 130 600 630 -79 130 650 673 -83 130 700 701 -84 130 750 764 -87 130 800 861 -89 130 850 914 -85 130 900 961 -91 130 950 1013 -96 130 1000 1083 -68 140 500 535 -69 140 550 574 -77 140 600 619 -77 140 650 679 -74 140 700 764 -78 140 750 804 -87 140 800 860 -86 140 850 952 -89 140 900 969 -81 140 950 1047 -83 140 1000 1086 -76 150 500 570 -81 150 550 642 -78 150 600 682 -81 150 650 760 -83 150 700 804 -87 150 750 822 -81 150 800 945 -89 150 850 951 -88 150 900 1039 -87 150 950 1103 -91 150 1000 1111 -94 160 500 755 -75 160 550 809 -90 160 600 829 -94 160 650 991 -91 160 700 994 -85 160 750 1056 -106 160 800 1196 -95 160 850 1269 -97 160 900 1279 -102 160 950 1433 -104 160 1000 1546 -84 170 500 619 -90 170 550 678 -87 170 600 730 -85 170 650 839 -94 170 700 897 -93 170 750 964 -93 170 800 1072 -102 170 850 1105 -101 170 900 1238 -102 170 950 1229 -98 170 1000 1374 -94 180 500 811 -93 180 550 907 -106 180 600 906 -99 180 650 977 -101 180 700 1057 -107 180 750 1096 -105 180 800 1233 -101 180 850 1327 -110 180 900 1447 -105 180 950 1489 -113 180 1000 1595 -91 190 500 716 -101 190 550 763 -103 190 600 877 -103 190 650 975 -107 190 700 956 -107 190 750 1069 -110 190 800 1200 -105 190 850 1256 -104 190 900 1357 -111 190 950 1410 -110 190 1000 1494 -114 200 500 899 -123 200 550 911 -114 200 600 969 -118 200 650 1049 -126 200 700 1172 -129 200 750 1337 -131 200 800 1377 -127 200 850 1390 -127 200 900 1598 -132 200 950 1599 -128 200 1000 1748 -50 100 500 400 -54 100 550 425 -57 100 600 481 -54 100 650 515 -62 100 700 616 -59 100 750 600 -59 100 800 669 -61 100 850 673 -56 100 900 746 -60 100 950 764 -57 100 1000 770 -53 110 500 459 -53 110 550 532 -51 110 600 572 -56 110 650 595 -61 110 700 640 -56 110 750 691 -64 110 800 689 -60 110 850 775 -59 110 900 803 -64 110 950 945 -59 110 1000 1016 -70 120 500 695 -70 120 550 627 -79 120 600 624 -72 120 650 753 -69 120 700 842 -79 120 750 782 -73 120 800 927 -76 120 850 1029 -81 120 900 968 -80 120 950 1222 -82 120 1000 1099 -65 130 500 543 -60 130 550 602 -73 130 600 643 -73 130 650 686 -78 130 700 734 -74 130 750 772 -68 130 800 864 -82 130 850 911 -80 130 900 1041 -87 130 950 1026 -80 130 1000 1074 -70 140 500 580 -69 140 550 615 -72 140 600 668 -80 140 650 817 -82 140 700 798 -87 140 750 864 -79 140 800 933 -74 140 850 1020 -80 140 900 1072 -88 140 950 1103 -80 140 1000 1197 -71 150 500 554 -75 150 550 668 -75 150 600 732 -70 150 650 780 -77 150 700 873 -87 150 750 954 -71 150 800 1006 -74 150 850 1054 -80 150 900 1105 -78 150 950 1134 -83 150 1000 1210 -79 160 500 573 -94 160 550 657 -99 160 600 674 -90 160 650 770 -98 160 700 845 -93 160 750 894 -95 160 800 935 -93 160 850 1023 -101 160 900 1090 -99 160 950 1114 -99 160 1000 1184 -72 170 500 681 -74 170 550 751 -71 170 600 837 -82 170 650 941 -96 170 700 1045 -84 170 750 1144 -84 170 800 1123 -101 170 850 1243 -97 170 900 1377 -93 170 950 1359 -96 170 1000 1450 -95 180 500 693 -93 180 550 792 -91 180 600 852 -91 180 650 940 -97 180 700 1005 -98 180 750 1094 -104 180 800 1223 -102 180 850 1255 -105 180 900 1368 -97 180 950 1388 -112 180 1000 1419 -92 190 500 768 -102 190 550 882 -103 190 600 892 -107 190 650 1012 -103 190 700 1098 -110 190 750 1110 -110 190 800 1259 -111 190 850 1268 -115 190 900 1431 -117 190 950 1476 -119 190 1000 1536 -100 200 500 753 -105 200 550 937 -113 200 600 3153 -103 200 650 1021 -106 200 700 1126 -117 200 750 1218 -116 200 800 1300 -123 200 850 1437 -125 200 900 1487 -121 200 950 1519 -132 200 1000 1626 diff --git a/core/rewriting/indexing/test/results/simple-match-5-10.txt b/core/rewriting/indexing/test/results/simple-match-5-10.txt deleted file mode 100644 index 66063c83..00000000 --- a/core/rewriting/indexing/test/results/simple-match-5-10.txt +++ /dev/null @@ -1,360 +0,0 @@ -19 50 250 101 -22 50 300 133 -16 50 350 138 -20 50 400 180 -23 50 450 236 -23 50 500 216 -20 60 250 107 -21 60 300 153 -27 60 350 174 -25 60 400 181 -27 60 450 240 -29 60 500 243 -28 70 250 148 -27 70 300 177 -28 70 350 246 -28 70 400 235 -33 70 450 322 -29 70 500 287 -37 80 250 162 -35 80 300 215 -38 80 350 217 -48 80 400 290 -44 80 450 282 -48 80 500 354 -40 90 250 193 -42 90 300 201 -46 90 350 302 -43 90 400 305 -45 90 450 391 -48 90 500 422 -38 100 250 182 -39 100 300 213 -39 100 350 292 -38 100 400 294 -40 100 450 371 -42 100 500 394 -27 50 250 113 -27 50 300 112 -23 50 350 122 -32 50 400 158 -31 50 450 215 -31 50 500 198 -21 60 250 104 -22 60 300 129 -23 60 350 185 -29 60 400 166 -26 60 450 182 -29 60 500 234 -34 70 250 124 -35 70 300 156 -34 70 350 185 -35 70 400 229 -39 70 450 233 -34 70 500 267 -38 80 250 188 -36 80 300 208 -37 80 350 273 -39 80 400 262 -41 80 450 313 -46 80 500 337 -38 90 250 169 -38 90 300 205 -38 90 350 252 -40 90 400 294 -45 90 450 306 -48 90 500 348 -41 100 250 188 -52 100 300 214 -55 100 350 304 -53 100 400 310 -49 100 450 354 -60 100 500 384 -21 50 250 140 -21 50 300 158 -25 50 350 158 -25 50 400 230 -27 50 450 220 -25 50 500 244 -16 60 250 141 -22 60 300 132 -22 60 350 160 -24 60 400 185 -30 60 450 243 -25 60 500 237 -27 70 250 126 -33 70 300 176 -33 70 350 191 -37 70 400 210 -38 70 450 256 -36 70 500 258 -37 80 250 125 -40 80 300 190 -47 80 350 178 -42 80 400 219 -46 80 450 282 -44 80 500 250 -42 90 250 143 -40 90 300 229 -44 90 350 196 -50 90 400 243 -49 90 450 312 -53 90 500 319 -47 100 250 271 -48 100 300 241 -52 100 350 341 -57 100 400 368 -50 100 450 430 -48 100 500 486 -26 50 250 99 -25 50 300 119 -27 50 350 147 -30 50 400 205 -28 50 450 191 -28 50 500 179 -21 60 250 136 -17 60 300 177 -24 60 350 180 -19 60 400 213 -21 60 450 258 -20 60 500 242 -31 70 250 125 -36 70 300 163 -35 70 350 194 -37 70 400 212 -40 70 450 254 -44 70 500 267 -33 80 250 151 -44 80 300 236 -44 80 350 222 -42 80 400 254 -48 80 450 310 -45 80 500 319 -45 90 250 211 -44 90 300 226 -41 90 350 274 -43 90 400 301 -52 90 450 373 -46 90 500 360 -50 100 250 170 -52 100 300 272 -54 100 350 265 -51 100 400 318 -55 100 450 357 -54 100 500 436 -13 50 250 84 -19 50 300 105 -22 50 350 146 -26 50 400 139 -25 50 450 175 -23 50 500 186 -31 60 250 134 -31 60 300 141 -29 60 350 152 -29 60 400 169 -32 60 450 234 -34 60 500 212 -30 70 250 140 -33 70 300 182 -34 70 350 184 -32 70 400 201 -35 70 450 296 -35 70 500 252 -34 80 250 135 -37 80 300 215 -38 80 350 223 -37 80 400 274 -43 80 450 301 -40 80 500 358 -36 90 250 159 -39 90 300 186 -42 90 350 274 -43 90 400 281 -49 90 450 339 -45 90 500 391 -48 100 250 219 -59 100 300 247 -56 100 350 292 -53 100 400 338 -58 100 450 384 -66 100 500 412 -16 50 250 91 -16 50 300 117 -17 50 350 133 -22 50 400 144 -24 50 450 169 -22 50 500 211 -23 60 250 123 -25 60 300 175 -26 60 350 187 -26 60 400 213 -29 60 450 222 -34 60 500 277 -37 70 250 124 -34 70 300 147 -38 70 350 187 -41 70 400 227 -38 70 450 207 -38 70 500 232 -36 80 250 159 -38 80 300 165 -35 80 350 215 -39 80 400 269 -45 80 450 263 -47 80 500 303 -43 90 250 148 -45 90 300 171 -49 90 350 238 -56 90 400 249 -49 90 450 270 -57 90 500 338 -50 100 250 177 -45 100 300 250 -60 100 350 267 -53 100 400 355 -55 100 450 333 -61 100 500 399 -25 50 250 85 -23 50 300 99 -31 50 350 124 -33 50 400 167 -27 50 450 158 -31 50 500 181 -22 60 250 120 -27 60 300 187 -23 60 350 180 -22 60 400 206 -29 60 450 249 -26 60 500 255 -31 70 250 132 -37 70 300 192 -36 70 350 197 -40 70 400 239 -39 70 450 280 -38 70 500 283 -29 80 250 188 -34 80 300 199 -30 80 350 220 -39 80 400 288 -33 80 450 313 -37 80 500 351 -36 90 250 166 -37 90 300 216 -37 90 350 279 -43 90 400 274 -41 90 450 331 -49 90 500 336 -39 100 250 215 -47 100 300 237 -45 100 350 269 -48 100 400 287 -49 100 450 368 -53 100 500 350 -13 50 250 99 -14 50 300 133 -18 50 350 130 -19 50 400 151 -18 50 450 172 -20 50 500 209 -18 60 250 108 -29 60 300 134 -21 60 350 167 -32 60 400 229 -29 60 450 233 -26 60 500 289 -30 70 250 107 -33 70 300 141 -32 70 350 153 -32 70 400 219 -32 70 450 197 -32 70 500 229 -38 80 250 187 -35 80 300 178 -39 80 350 218 -44 80 400 303 -44 80 450 290 -47 80 500 339 -37 90 250 160 -34 90 300 220 -41 90 350 284 -40 90 400 295 -42 90 450 363 -48 90 500 346 -42 100 250 229 -38 100 300 211 -43 100 350 299 -43 100 400 289 -46 100 450 362 -48 100 500 385 -22 50 250 99 -18 50 300 121 -22 50 350 134 -23 50 400 142 -23 50 450 171 -25 50 500 206 -26 60 250 106 -26 60 300 131 -27 60 350 145 -33 60 400 229 -32 60 450 189 -35 60 500 206 -22 70 250 227 -28 70 300 155 -24 70 350 195 -24 70 400 264 -32 70 450 243 -30 70 500 327 -36 80 250 155 -37 80 300 184 -48 80 350 273 -42 80 400 247 -45 80 450 278 -39 80 500 338 -35 90 250 152 -39 90 300 245 -44 90 350 214 -44 90 400 275 -46 90 450 324 -44 90 500 341 -53 100 250 232 -51 100 300 232 -52 100 350 308 -57 100 400 314 -58 100 450 425 -55 100 500 403 -24 50 250 87 -24 50 300 132 -23 50 350 123 -24 50 400 149 -28 50 450 158 -29 50 500 229 -33 60 250 94 -29 60 300 125 -33 60 350 136 -32 60 400 205 -31 60 450 183 -32 60 500 205 -24 70 250 126 -34 70 300 205 -27 70 350 229 -37 70 400 228 -31 70 450 290 -37 70 500 276 -38 80 250 210 -38 80 300 164 -47 80 350 205 -40 80 400 279 -39 80 450 269 -45 80 500 324 -45 90 250 157 -42 90 300 194 -43 90 350 274 -42 90 400 250 -58 90 450 343 -57 90 500 330 -48 100 250 186 -52 100 300 281 -50 100 350 283 -55 100 400 359 -58 100 450 348 -60 100 500 452 diff --git a/core/rewriting/indexing/test/results/std-adj-10-20.txt b/core/rewriting/indexing/test/results/std-adj-10-20.txt deleted file mode 100644 index 4761abbb..00000000 --- a/core/rewriting/indexing/test/results/std-adj-10-20.txt +++ /dev/null @@ -1,1210 +0,0 @@ -75 100 500 28 -73 100 550 26 -70 100 600 24 -79 100 650 31 -105 100 700 39 -117 100 750 40 -131 100 800 43 -123 100 850 43 -154 100 900 60 -130 100 950 46 -164 100 1000 170 -107 110 500 23 -107 110 550 26 -115 110 600 27 -113 110 650 28 -137 110 700 32 -144 110 750 41 -165 110 800 40 -155 110 850 35 -182 110 900 42 -190 110 950 43 -212 110 1000 48 -80 120 500 33 -98 120 550 27 -93 120 600 27 -99 120 650 27 -130 120 700 164 -134 120 750 34 -129 120 800 35 -139 120 850 43 -165 120 900 44 -155 120 950 56 -196 120 1000 69 -90 130 500 20 -117 130 550 29 -130 130 600 31 -145 130 650 31 -140 130 700 35 -141 130 750 37 -144 130 800 36 -155 130 850 174 -180 130 900 47 -183 130 950 50 -180 130 1000 52 -116 140 500 26 -122 140 550 34 -155 140 600 30 -178 140 650 38 -194 140 700 47 -185 140 750 46 -214 140 800 55 -227 140 850 53 -240 140 900 92 -219 140 950 202 -239 140 1000 61 -131 150 500 27 -149 150 550 34 -156 150 600 33 -164 150 650 38 -189 150 700 45 -217 150 750 46 -207 150 800 50 -206 150 850 49 -215 150 900 50 -219 150 950 55 -259 150 1000 224 -93 160 500 21 -102 160 550 30 -116 160 600 32 -161 160 650 47 -151 160 700 41 -169 160 750 48 -163 160 800 50 -165 160 850 49 -177 160 900 55 -190 160 950 50 -217 160 1000 54 -72 170 500 27 -101 170 550 170 -107 170 600 37 -119 170 650 34 -139 170 700 45 -111 170 750 39 -159 170 800 49 -194 170 850 56 -192 170 900 54 -175 170 950 59 -169 170 1000 53 -128 180 500 34 -139 180 550 29 -115 180 600 38 -189 180 650 171 -166 180 700 39 -192 180 750 43 -213 180 800 44 -209 180 850 49 -201 180 900 54 -237 180 950 51 -268 180 1000 69 -168 190 500 37 -121 190 550 34 -169 190 600 51 -226 190 650 49 -235 190 700 208 -198 190 750 49 -231 190 800 59 -244 190 850 68 -247 190 900 60 -303 190 950 81 -308 190 1000 77 -115 200 500 31 -101 200 550 27 -95 200 600 34 -146 200 650 42 -158 200 700 200 -137 200 750 36 -150 200 800 42 -169 200 850 52 -164 200 900 58 -217 200 950 61 -207 200 1000 64 -62 100 500 22 -70 100 550 25 -64 100 600 24 -60 100 650 35 -82 100 700 33 -104 100 750 43 -102 100 800 41 -95 100 850 127 -95 100 900 43 -108 100 950 44 -119 100 1000 51 -90 110 500 20 -96 110 550 24 -92 110 600 26 -117 110 650 27 -125 110 700 31 -130 110 750 32 -147 110 800 33 -152 110 850 39 -151 110 900 38 -187 110 950 44 -193 110 1000 48 -59 120 500 142 -59 120 550 23 -65 120 600 29 -62 120 650 26 -82 120 700 31 -82 120 750 28 -97 120 800 37 -87 120 850 39 -94 120 900 37 -100 120 950 44 -129 120 1000 47 -96 130 500 22 -122 130 550 30 -108 130 600 29 -123 130 650 33 -141 130 700 40 -142 130 750 34 -158 130 800 284 -166 130 850 46 -180 130 900 44 -176 130 950 47 -178 130 1000 54 -82 140 500 22 -94 140 550 23 -91 140 600 22 -99 140 650 27 -114 140 700 29 -149 140 750 33 -135 140 800 34 -136 140 850 36 -144 140 900 41 -151 140 950 43 -155 140 1000 190 -77 150 500 21 -95 150 550 30 -98 150 600 26 -111 150 650 33 -122 150 700 40 -103 150 750 37 -126 150 800 38 -136 150 850 49 -149 150 900 48 -159 150 950 49 -200 150 1000 57 -105 160 500 27 -145 160 550 35 -141 160 600 38 -165 160 650 39 -181 160 700 41 -169 160 750 43 -188 160 800 40 -191 160 850 47 -194 160 900 51 -203 160 950 53 -223 160 1000 64 -137 170 500 26 -146 170 550 27 -162 170 600 32 -191 170 650 36 -212 170 700 38 -183 170 750 158 -187 170 800 45 -173 170 850 40 -254 170 900 51 -236 170 950 52 -271 170 1000 56 -98 180 500 26 -138 180 550 37 -150 180 600 40 -141 180 650 42 -137 180 700 40 -150 180 750 45 -158 180 800 56 -165 180 850 190 -210 180 900 57 -221 180 950 61 -218 180 1000 62 -172 190 500 28 -159 190 550 32 -196 190 600 36 -214 190 650 40 -259 190 700 46 -232 190 750 46 -277 190 800 52 -288 190 850 53 -286 190 900 194 -320 190 950 60 -340 190 1000 62 -177 200 500 32 -180 200 550 33 -183 200 600 37 -189 200 650 42 -216 200 700 45 -216 200 750 45 -220 200 800 56 -262 200 850 64 -279 200 900 211 -293 200 950 61 -280 200 1000 62 -82 100 500 21 -88 100 550 21 -78 100 600 27 -87 100 650 34 -109 100 700 33 -136 100 750 35 -122 100 800 39 -142 100 850 48 -129 100 900 43 -141 100 950 46 -165 100 1000 49 -89 110 500 106 -125 110 550 35 -110 110 600 34 -116 110 650 34 -137 110 700 42 -153 110 750 44 -145 110 800 46 -163 110 850 48 -163 110 900 54 -179 110 950 66 -189 110 1000 60 -78 120 500 26 -96 120 550 33 -102 120 600 30 -125 120 650 31 -118 120 700 35 -146 120 750 39 -144 120 800 42 -147 120 850 52 -157 120 900 57 -189 120 950 54 -180 120 1000 50 -82 130 500 20 -91 130 550 22 -95 130 600 24 -91 130 650 26 -138 130 700 29 -135 130 750 34 -128 130 800 153 -140 130 850 37 -157 130 900 38 -150 130 950 39 -184 130 1000 46 -80 140 500 24 -96 140 550 23 -99 140 600 24 -110 140 650 33 -109 140 700 29 -120 140 750 42 -127 140 800 33 -128 140 850 38 -161 140 900 37 -175 140 950 38 -161 140 1000 46 -125 150 500 120 -113 150 550 31 -145 150 600 37 -134 150 650 40 -193 150 700 50 -174 150 750 43 -161 150 800 42 -178 150 850 45 -199 150 900 57 -228 150 950 57 -238 150 1000 66 -126 160 500 33 -119 160 550 31 -139 160 600 150 -138 160 650 36 -150 160 700 40 -167 160 750 43 -204 160 800 53 -206 160 850 52 -220 160 900 54 -213 160 950 54 -216 160 1000 57 -101 170 500 23 -146 170 550 28 -121 170 600 29 -122 170 650 29 -141 170 700 161 -147 170 750 31 -177 170 800 34 -187 170 850 46 -180 170 900 44 -202 170 950 47 -220 170 1000 49 -71 180 500 26 -82 180 550 26 -104 180 600 27 -101 180 650 33 -104 180 700 46 -134 180 750 39 -106 180 800 44 -110 180 850 43 -126 180 900 166 -155 180 950 57 -142 180 1000 53 -123 190 500 30 -144 190 550 30 -189 190 600 44 -160 190 650 35 -236 190 700 53 -218 190 750 50 -204 190 800 47 -258 190 850 55 -282 190 900 65 -290 190 950 199 -285 190 1000 59 -178 200 500 34 -183 200 550 37 -190 200 600 40 -195 200 650 45 -198 200 700 44 -247 200 750 50 -248 200 800 58 -249 200 850 55 -323 200 900 70 -314 200 950 221 -334 200 1000 69 -93 100 500 22 -78 100 550 21 -108 100 600 25 -89 100 650 24 -108 100 700 30 -131 100 750 33 -121 100 800 31 -134 100 850 34 -120 100 900 33 -162 100 950 42 -169 100 1000 43 -80 110 500 16 -77 110 550 20 -88 110 600 31 -98 110 650 109 -114 110 700 33 -133 110 750 29 -133 110 800 34 -124 110 850 38 -136 110 900 50 -148 110 950 36 -173 110 1000 39 -65 120 500 24 -79 120 550 24 -77 120 600 26 -93 120 650 28 -105 120 700 35 -118 120 750 35 -118 120 800 35 -116 120 850 36 -118 120 900 36 -156 120 950 40 -143 120 1000 43 -104 130 500 23 -112 130 550 29 -142 130 600 31 -172 130 650 38 -168 130 700 32 -159 130 750 44 -211 130 800 47 -179 130 850 44 -203 130 900 50 -244 130 950 56 -250 130 1000 53 -93 140 500 129 -133 140 550 28 -121 140 600 25 -124 140 650 32 -144 140 700 28 -144 140 750 36 -158 140 800 38 -196 140 850 44 -237 140 900 39 -245 140 950 46 -215 140 1000 44 -111 150 500 27 -74 150 550 23 -95 150 600 22 -118 150 650 30 -135 150 700 39 -129 150 750 158 -153 150 800 35 -164 150 850 36 -172 150 900 49 -192 150 950 49 -166 150 1000 51 -97 160 500 29 -158 160 550 31 -141 160 600 35 -151 160 650 36 -176 160 700 43 -179 160 750 40 -199 160 800 53 -198 160 850 46 -243 160 900 235 -241 160 950 61 -236 160 1000 56 -128 170 500 36 -96 170 550 27 -119 170 600 31 -134 170 650 40 -143 170 700 42 -159 170 750 46 -177 170 800 57 -182 170 850 49 -190 170 900 189 -205 170 950 57 -211 170 1000 62 -109 180 500 29 -133 180 550 33 -103 180 600 33 -132 180 650 33 -157 180 700 38 -167 180 750 47 -185 180 800 57 -169 180 850 49 -214 180 900 58 -176 180 950 207 -204 180 1000 59 -145 190 500 25 -183 190 550 32 -197 190 600 41 -193 190 650 39 -217 190 700 39 -245 190 750 51 -236 190 800 52 -264 190 850 54 -285 190 900 58 -298 190 950 61 -280 190 1000 219 -144 200 500 30 -131 200 550 34 -174 200 600 37 -148 200 650 32 -189 200 700 47 -202 200 750 48 -242 200 800 53 -235 200 850 51 -256 200 900 53 -241 200 950 56 -260 200 1000 61 -73 100 500 15 -99 100 550 19 -111 100 600 54 -87 100 650 21 -110 100 700 24 -130 100 750 27 -146 100 800 33 -148 100 850 34 -150 100 900 34 -123 100 950 35 -160 100 1000 37 -68 110 500 21 -76 110 550 24 -91 110 600 27 -97 110 650 32 -106 110 700 34 -107 110 750 35 -126 110 800 37 -107 110 850 39 -139 110 900 152 -148 110 950 46 -138 110 1000 45 -121 120 500 24 -127 120 550 32 -113 120 600 30 -133 120 650 33 -123 120 700 35 -172 120 750 43 -171 120 800 45 -193 120 850 52 -169 120 900 48 -195 120 950 50 -195 120 1000 52 -80 130 500 139 -90 130 550 26 -105 130 600 33 -119 130 650 31 -122 130 700 34 -119 130 750 36 -150 130 800 41 -154 130 850 43 -154 130 900 51 -169 130 950 48 -154 130 1000 60 -127 140 500 21 -131 140 550 24 -170 140 600 24 -149 140 650 29 -138 140 700 143 -185 140 750 32 -235 140 800 39 -266 140 850 39 -224 140 900 40 -225 140 950 43 -256 140 1000 44 -106 150 500 30 -130 150 550 32 -127 150 600 35 -149 150 650 34 -133 150 700 37 -147 150 750 34 -157 150 800 44 -187 150 850 51 -195 150 900 185 -198 150 950 50 -255 150 1000 91 -109 160 500 27 -141 160 550 29 -155 160 600 36 -166 160 650 32 -173 160 700 41 -206 160 750 42 -197 160 800 43 -229 160 850 49 -240 160 900 52 -246 160 950 186 -264 160 1000 57 -117 170 500 21 -137 170 550 32 -158 170 600 32 -148 170 650 36 -155 170 700 37 -177 170 750 35 -188 170 800 39 -218 170 850 48 -211 170 900 42 -227 170 950 48 -240 170 1000 49 -145 180 500 26 -195 180 550 152 -171 180 600 33 -191 180 650 37 -218 180 700 42 -261 180 750 46 -254 180 800 56 -242 180 850 59 -282 180 900 63 -283 180 950 64 -287 180 1000 68 -125 190 500 41 -115 190 550 156 -169 190 600 49 -142 190 650 37 -170 190 700 51 -177 190 750 42 -197 190 800 53 -205 190 850 58 -226 190 900 63 -217 190 950 76 -244 190 1000 65 -81 200 500 33 -112 200 550 159 -99 200 600 28 -133 200 650 40 -124 200 700 35 -140 200 750 41 -141 200 800 49 -179 200 850 57 -182 200 900 55 -194 200 950 53 -188 200 1000 59 -60 100 500 21 -68 100 550 24 -70 100 600 21 -65 100 650 23 -65 100 700 28 -82 100 750 81 -89 100 800 32 -86 100 850 31 -108 100 900 37 -106 100 950 38 -111 100 1000 37 -92 110 500 23 -82 110 550 24 -97 110 600 26 -115 110 650 32 -130 110 700 35 -122 110 750 37 -132 110 800 36 -143 110 850 43 -140 110 900 43 -161 110 950 43 -162 110 1000 185 -68 120 500 22 -68 120 550 23 -103 120 600 36 -78 120 650 28 -118 120 700 40 -105 120 750 44 -116 120 800 36 -120 120 850 44 -123 120 900 42 -131 120 950 43 -134 120 1000 54 -114 130 500 25 -116 130 550 27 -147 130 600 34 -153 130 650 146 -168 130 700 39 -190 130 750 43 -222 130 800 49 -213 130 850 48 -245 130 900 52 -213 130 950 52 -216 130 1000 50 -112 140 500 22 -122 140 550 26 -110 140 600 23 -113 140 650 32 -151 140 700 37 -142 140 750 41 -169 140 800 158 -153 140 850 37 -177 140 900 45 -170 140 950 43 -210 140 1000 54 -91 150 500 25 -96 150 550 28 -119 150 600 35 -127 150 650 32 -139 150 700 37 -129 150 750 37 -167 150 800 46 -171 150 850 43 -178 150 900 53 -164 150 950 185 -184 150 1000 56 -130 160 500 38 -134 160 550 35 -135 160 600 34 -132 160 650 39 -141 160 700 38 -161 160 750 49 -180 160 800 47 -197 160 850 51 -212 160 900 65 -204 160 950 61 -216 160 1000 209 -125 170 500 40 -123 170 550 27 -123 170 600 28 -161 170 650 35 -159 170 700 47 -157 170 750 42 -182 170 800 44 -190 170 850 64 -177 170 900 45 -192 170 950 50 -257 170 1000 62 -120 180 500 141 -142 180 550 35 -150 180 600 30 -150 180 650 32 -157 180 700 31 -161 180 750 34 -186 180 800 40 -215 180 850 46 -227 180 900 45 -239 180 950 49 -239 180 1000 49 -147 190 500 36 -160 190 550 32 -157 190 600 37 -195 190 650 42 -197 190 700 166 -199 190 750 48 -214 190 800 49 -287 190 850 61 -265 190 900 60 -282 190 950 66 -278 190 1000 70 -126 200 500 32 -149 200 550 37 -150 200 600 44 -162 200 650 49 -176 200 700 180 -181 200 750 50 -212 200 800 64 -209 200 850 64 -198 200 900 61 -232 200 950 75 -234 200 1000 67 -54 100 500 15 -56 100 550 18 -74 100 600 24 -71 100 650 24 -58 100 700 23 -76 100 750 25 -92 100 800 73 -86 100 850 32 -94 100 900 34 -89 100 950 34 -88 100 1000 34 -60 110 500 24 -58 110 550 23 -79 110 600 26 -75 110 650 24 -86 110 700 33 -88 110 750 31 -95 110 800 34 -104 110 850 36 -123 110 900 40 -98 110 950 37 -94 110 1000 38 -85 120 500 21 -106 120 550 111 -126 120 600 28 -98 120 650 29 -101 120 700 31 -117 120 750 29 -145 120 800 40 -161 120 850 44 -144 120 900 42 -170 120 950 41 -184 120 1000 47 -127 130 500 26 -146 130 550 30 -131 130 600 28 -110 130 650 28 -147 130 700 30 -141 130 750 36 -178 130 800 159 -166 130 850 39 -181 130 900 41 -246 130 950 54 -174 130 1000 39 -86 140 500 20 -63 140 550 19 -101 140 600 24 -94 140 650 23 -118 140 700 27 -101 140 750 31 -142 140 800 35 -109 140 850 31 -129 140 900 37 -178 140 950 41 -164 140 1000 39 -149 150 500 128 -201 150 550 52 -185 150 600 42 -163 150 650 41 -201 150 700 51 -217 150 750 49 -241 150 800 61 -221 150 850 63 -263 150 900 66 -252 150 950 67 -299 150 1000 77 -94 160 500 24 -112 160 550 27 -109 160 600 34 -122 160 650 34 -146 160 700 34 -124 160 750 35 -157 160 800 45 -165 160 850 50 -169 160 900 48 -222 160 950 59 -203 160 1000 49 -125 170 500 25 -163 170 550 37 -165 170 600 45 -200 170 650 155 -229 170 700 45 -200 170 750 50 -194 170 800 53 -203 170 850 52 -251 170 900 63 -293 170 950 68 -260 170 1000 58 -119 180 500 31 -149 180 550 36 -128 180 600 32 -170 180 650 177 -146 180 700 38 -176 180 750 45 -173 180 800 41 -196 180 850 47 -231 180 900 55 -204 180 950 52 -237 180 1000 55 -105 190 500 27 -104 190 550 30 -139 190 600 34 -129 190 650 37 -152 190 700 41 -147 190 750 177 -175 190 800 45 -187 190 850 52 -199 190 900 54 -175 190 950 51 -194 190 1000 57 -164 200 500 39 -202 200 550 42 -167 200 600 38 -218 200 650 47 -237 200 700 58 -254 200 750 52 -256 200 800 197 -274 200 850 70 -315 200 900 73 -294 200 950 73 -343 200 1000 83 -60 100 500 23 -66 100 550 26 -88 100 600 32 -82 100 650 29 -89 100 700 36 -91 100 750 29 -91 100 800 37 -102 100 850 101 -106 100 900 47 -125 100 950 51 -123 100 1000 41 -71 110 500 28 -83 110 550 23 -111 110 600 35 -122 110 650 34 -142 110 700 39 -149 110 750 43 -142 110 800 44 -148 110 850 52 -159 110 900 50 -161 110 950 184 -172 110 1000 59 -70 120 500 17 -67 120 550 19 -88 120 600 24 -86 120 650 27 -98 120 700 30 -129 120 750 36 -108 120 800 36 -103 120 850 35 -137 120 900 42 -155 120 950 45 -142 120 1000 43 -91 130 500 22 -109 130 550 27 -111 130 600 31 -125 130 650 34 -116 130 700 149 -120 130 750 34 -144 130 800 42 -160 130 850 43 -159 130 900 43 -164 130 950 47 -196 130 1000 56 -97 140 500 24 -103 140 550 28 -113 140 600 33 -133 140 650 38 -158 140 700 39 -185 140 750 43 -162 140 800 39 -161 140 850 168 -171 140 900 43 -190 140 950 49 -231 140 1000 55 -90 150 500 26 -128 150 550 35 -139 150 600 34 -140 150 650 53 -161 150 700 47 -128 150 750 39 -151 150 800 54 -187 150 850 50 -213 150 900 184 -215 150 950 66 -215 150 1000 74 -124 160 500 52 -135 160 550 32 -160 160 600 45 -188 160 650 49 -198 160 700 51 -242 160 750 63 -229 160 800 67 -265 160 850 66 -194 160 900 207 -273 160 950 89 -254 160 1000 84 -115 170 500 29 -152 170 550 38 -142 170 600 45 -175 170 650 47 -151 170 700 44 -200 170 750 48 -201 170 800 58 -194 170 850 181 -221 170 900 54 -250 170 950 72 -246 170 1000 57 -117 180 500 29 -128 180 550 32 -148 180 600 37 -148 180 650 40 -161 180 700 45 -207 180 750 48 -188 180 800 47 -195 180 850 47 -215 180 900 190 -239 180 950 52 -246 180 1000 62 -161 190 500 30 -199 190 550 40 -214 190 600 44 -214 190 650 42 -234 190 700 53 -224 190 750 48 -258 190 800 49 -275 190 850 55 -276 190 900 64 -302 190 950 207 -352 190 1000 74 -119 200 500 32 -152 200 550 37 -184 200 600 46 -168 200 650 38 -188 200 700 54 -203 200 750 51 -203 200 800 61 -226 200 850 55 -269 200 900 63 -279 200 950 217 -237 200 1000 62 -50 100 500 22 -67 100 550 23 -70 100 600 27 -80 100 650 35 -82 100 700 36 -79 100 750 36 -76 100 800 43 -82 100 850 35 -114 100 900 44 -114 100 950 46 -109 100 1000 48 -76 110 500 17 -89 110 550 22 -86 110 600 103 -107 110 650 26 -105 110 700 27 -134 110 750 33 -147 110 800 35 -128 110 850 33 -136 110 900 35 -161 110 950 38 -156 110 1000 41 -98 120 500 29 -130 120 550 34 -125 120 600 38 -152 120 650 48 -137 120 700 40 -136 120 750 46 -157 120 800 171 -179 120 850 51 -169 120 900 61 -201 120 950 61 -197 120 1000 70 -77 130 500 24 -100 130 550 28 -108 130 600 34 -103 130 650 36 -113 130 700 40 -119 130 750 38 -124 130 800 41 -118 130 850 42 -140 130 900 190 -140 130 950 51 -183 130 1000 69 -111 140 500 31 -132 140 550 34 -134 140 600 39 -151 140 650 38 -163 140 700 41 -181 140 750 50 -215 140 800 56 -190 140 850 56 -204 140 900 54 -206 140 950 201 -210 140 1000 68 -100 150 500 22 -98 150 550 26 -104 150 600 26 -124 150 650 30 -139 150 700 38 -144 150 750 38 -148 150 800 38 -168 150 850 47 -160 150 900 44 -176 150 950 51 -170 150 1000 50 -119 160 500 21 -117 160 550 131 -108 160 600 23 -154 160 650 31 -149 160 700 31 -183 160 750 63 -161 160 800 34 -180 160 850 40 -191 160 900 42 -212 160 950 44 -206 160 1000 47 -99 170 500 23 -103 170 550 26 -146 170 600 36 -130 170 650 36 -146 170 700 41 -160 170 750 153 -156 170 800 44 -177 170 850 42 -164 170 900 44 -197 170 950 49 -208 170 1000 58 -99 180 500 27 -134 180 550 36 -138 180 600 33 -153 180 650 44 -167 180 700 48 -167 180 750 44 -206 180 800 52 -224 180 850 191 -224 180 900 76 -234 180 950 58 -256 180 1000 60 -166 190 500 39 -190 190 550 46 -183 190 600 54 -226 190 650 52 -225 190 700 63 -254 190 750 62 -261 190 800 202 -290 190 850 69 -351 190 900 83 -320 190 950 71 -318 190 1000 75 -157 200 500 31 -140 200 550 35 -171 200 600 34 -169 200 650 39 -190 200 700 42 -203 200 750 179 -208 200 800 45 -229 200 850 52 -251 200 900 55 -273 200 950 63 -246 200 1000 60 -88 100 500 22 -91 100 550 26 -79 100 600 24 -88 100 650 31 -90 100 700 26 -106 100 750 34 -125 100 800 44 -142 100 850 44 -131 100 900 126 -142 100 950 49 -156 100 1000 58 -69 110 500 18 -87 110 550 21 -94 110 600 25 -98 110 650 25 -105 110 700 29 -117 110 750 33 -116 110 800 36 -136 110 850 39 -143 110 900 42 -148 110 950 40 -168 110 1000 45 -91 120 500 30 -82 120 550 36 -103 120 600 135 -93 120 650 30 -121 120 700 37 -135 120 750 46 -132 120 800 49 -146 120 850 45 -169 120 900 51 -152 120 950 52 -198 120 1000 58 -96 130 500 25 -103 130 550 26 -113 130 600 31 -131 130 650 34 -102 130 700 33 -115 130 750 159 -139 130 800 39 -180 130 850 48 -164 130 900 53 -171 130 950 51 -164 130 1000 50 -93 140 500 21 -118 140 550 34 -103 140 600 25 -114 140 650 28 -110 140 700 28 -133 140 750 33 -140 140 800 42 -170 140 850 43 -167 140 900 170 -164 140 950 41 -177 140 1000 48 -81 150 500 18 -96 150 550 25 -86 150 600 23 -92 150 650 25 -112 150 700 28 -113 150 750 30 -118 150 800 34 -137 150 850 38 -145 150 900 39 -168 150 950 43 -127 150 1000 40 -99 160 500 27 -95 160 550 28 -105 160 600 28 -131 160 650 30 -116 160 700 35 -116 160 750 30 -151 160 800 42 -133 160 850 40 -170 160 900 48 -184 160 950 46 -185 160 1000 54 -123 170 500 23 -131 170 550 27 -150 170 600 30 -213 170 650 38 -159 170 700 36 -212 170 750 45 -219 170 800 161 -198 170 850 48 -244 170 900 51 -265 170 950 49 -295 170 1000 57 -136 180 500 26 -176 180 550 32 -188 180 600 43 -233 180 650 40 -214 180 700 50 -245 180 750 46 -221 180 800 48 -305 180 850 194 -278 180 900 62 -313 180 950 73 -348 180 1000 67 -150 190 500 35 -152 190 550 41 -162 190 600 35 -189 190 650 45 -226 190 700 59 -251 190 750 56 -245 190 800 55 -268 190 850 206 -245 190 900 58 -308 190 950 74 -316 190 1000 73 -145 200 500 33 -149 200 550 39 -165 200 600 42 -183 200 650 42 -171 200 700 45 -208 200 750 54 -257 200 800 62 -249 200 850 204 -264 200 900 61 -295 200 950 71 -303 200 1000 81 diff --git a/core/rewriting/indexing/test/results/std-adj-25-30.txt b/core/rewriting/indexing/test/results/std-adj-25-30.txt deleted file mode 100644 index 5c6cfe86..00000000 --- a/core/rewriting/indexing/test/results/std-adj-25-30.txt +++ /dev/null @@ -1,360 +0,0 @@ -458 250 1250 104 -475 250 1300 105 -493 250 1350 101 -495 250 1400 103 -502 250 1450 117 -553 250 1500 127 -322 260 1250 105 -365 260 1300 103 -369 260 1350 232 -407 260 1400 129 -447 260 1450 117 -393 260 1500 118 -452 270 1250 97 -464 270 1300 256 -446 270 1350 118 -483 270 1400 129 -494 270 1450 129 -644 270 1500 138 -454 280 1250 253 -493 280 1300 109 -520 280 1350 131 -521 280 1400 119 -533 280 1450 143 -522 280 1500 137 -421 290 1250 108 -492 290 1300 112 -527 290 1350 110 -479 290 1400 123 -531 290 1450 115 -557 290 1500 136 -658 300 1250 132 -708 300 1300 133 -667 300 1350 289 -717 300 1400 155 -787 300 1450 160 -887 300 1500 172 -446 250 1250 150 -498 250 1300 134 -473 250 1350 109 -556 250 1400 125 -512 250 1450 132 -588 250 1500 289 -498 260 1250 118 -470 260 1300 109 -517 260 1350 115 -563 260 1400 126 -596 260 1450 287 -574 260 1500 134 -403 270 1250 106 -480 270 1300 121 -418 270 1350 109 -446 270 1400 269 -482 270 1450 138 -499 270 1500 129 -526 280 1250 119 -529 280 1300 265 -566 280 1350 113 -645 280 1400 135 -584 280 1450 120 -644 280 1500 136 -469 290 1250 245 -480 290 1300 115 -519 290 1350 120 -522 290 1400 112 -542 290 1450 128 -616 290 1500 328 -631 300 1250 136 -636 300 1300 143 -645 300 1350 3442 -686 300 1400 163 -657 300 1450 161 -744 300 1500 156 -413 250 1250 224 -469 250 1300 102 -389 250 1350 101 -439 250 1400 117 -469 250 1450 133 -521 250 1500 132 -388 260 1250 103 -442 260 1300 120 -432 260 1350 129 -441 260 1400 107 -503 260 1450 140 -433 260 1500 124 -384 270 1250 99 -413 270 1300 102 -422 270 1350 108 -498 270 1400 111 -491 270 1450 523 -492 270 1500 119 -404 280 1250 87 -396 280 1300 90 -442 280 1350 95 -486 280 1400 105 -483 280 1450 499 -497 280 1500 105 -558 290 1250 101 -608 290 1300 121 -652 290 1350 106 -643 290 1400 560 -690 290 1450 127 -712 290 1500 123 -599 300 1250 131 -596 300 1300 123 -687 300 1350 761 -700 300 1400 147 -717 300 1450 145 -742 300 1500 157 -382 250 1250 176 -438 250 1300 94 -462 250 1350 119 -481 250 1400 123 -443 250 1450 117 -516 250 1500 522 -387 260 1250 111 -405 260 1300 115 -426 260 1350 110 -408 260 1400 103 -443 260 1450 123 -461 260 1500 717 -385 270 1250 92 -407 270 1300 106 -381 270 1350 113 -438 270 1400 114 -514 270 1450 322 -463 270 1500 122 -447 280 1250 120 -480 280 1300 109 -518 280 1350 118 -586 280 1400 282 -563 280 1450 133 -582 280 1500 165 -584 290 1250 122 -568 290 1300 119 -615 290 1350 126 -697 290 1400 133 -667 290 1450 135 -713 290 1500 148 -643 300 1250 266 -691 300 1300 133 -699 300 1350 145 -693 300 1400 138 -771 300 1450 318 -765 300 1500 158 -476 250 1250 93 -478 250 1300 107 -512 250 1350 97 -536 250 1400 112 -551 250 1450 128 -530 250 1500 119 -522 260 1250 120 -563 260 1300 128 -611 260 1350 274 -642 260 1400 146 -599 260 1450 142 -709 260 1500 153 -477 270 1250 116 -520 270 1300 256 -485 270 1350 107 -542 270 1400 127 -599 270 1450 139 -541 270 1500 123 -361 280 1250 243 -385 280 1300 105 -439 280 1350 100 -460 280 1400 113 -468 280 1450 134 -463 280 1500 282 -455 290 1250 126 -506 290 1300 147 -497 290 1350 137 -499 290 1400 309 -526 290 1450 146 -612 290 1500 168 -622 300 1250 121 -643 300 1300 132 -647 300 1350 298 -657 300 1400 144 -702 300 1450 141 -801 300 1500 172 -534 250 1250 147 -574 250 1300 124 -557 250 1350 116 -603 250 1400 123 -637 250 1450 127 -631 250 1500 294 -381 260 1250 86 -459 260 1300 94 -479 260 1350 92 -480 260 1400 101 -527 260 1450 265 -524 260 1500 118 -470 270 1250 91 -506 270 1300 103 -426 270 1350 89 -493 270 1400 99 -495 270 1450 266 -483 270 1500 104 -485 280 1250 111 -506 280 1300 114 -529 280 1350 131 -582 280 1400 292 -619 280 1450 131 -652 280 1500 147 -499 290 1250 129 -513 290 1300 108 -540 290 1350 266 -588 290 1400 145 -617 290 1450 152 -523 290 1500 128 -553 300 1250 272 -526 300 1300 112 -587 300 1350 134 -548 300 1400 134 -590 300 1450 143 -602 300 1500 301 -407 250 1250 106 -422 250 1300 114 -457 250 1350 133 -466 250 1400 129 -486 250 1450 276 -527 250 1500 137 -450 260 1250 130 -474 260 1300 111 -551 260 1350 292 -572 260 1400 141 -557 260 1450 139 -570 260 1500 154 -474 270 1250 107 -490 270 1300 265 -438 270 1350 116 -484 270 1400 118 -496 270 1450 131 -523 270 1500 131 -411 280 1250 211 -437 280 1300 91 -439 280 1350 115 -469 280 1400 100 -506 280 1450 105 -494 280 1500 109 -444 290 1250 247 -511 290 1300 108 -535 290 1350 120 -599 290 1400 124 -592 290 1450 131 -650 290 1500 295 -570 300 1250 136 -567 300 1300 149 -578 300 1350 149 -630 300 1400 290 -682 300 1450 164 -671 300 1500 185 -445 250 1250 124 -467 250 1300 193 -458 250 1350 127 -497 250 1400 132 -509 250 1450 136 -526 250 1500 308 -521 260 1250 113 -515 260 1300 121 -544 260 1350 126 -547 260 1400 133 -613 260 1450 298 -621 260 1500 128 -421 270 1250 127 -428 270 1300 120 -490 270 1350 142 -500 270 1400 300 -564 270 1450 157 -515 270 1500 129 -485 280 1250 128 -500 280 1300 282 -492 280 1350 120 -555 280 1400 136 -583 280 1450 152 -545 280 1500 150 -503 290 1250 124 -551 290 1300 172 -548 290 1350 159 -582 290 1400 329 -582 290 1450 165 -589 290 1500 152 -513 300 1250 130 -542 300 1300 297 -579 300 1350 153 -550 300 1400 149 -541 300 1450 126 -598 300 1500 142 -367 250 1250 130 -393 250 1300 98 -388 250 1350 98 -418 250 1400 99 -458 250 1450 121 -452 250 1500 103 -460 260 1250 107 -505 260 1300 125 -459 260 1350 128 -503 260 1400 132 -548 260 1450 286 -515 260 1500 138 -424 270 1250 106 -489 270 1300 114 -482 270 1350 115 -492 270 1400 116 -554 270 1450 127 -583 270 1500 140 -427 280 1250 93 -434 280 1300 105 -495 280 1350 120 -498 280 1400 269 -534 280 1450 125 -538 280 1500 123 -638 290 1250 111 -575 290 1300 117 -643 290 1350 116 -603 290 1400 119 -683 290 1450 123 -656 290 1500 130 -412 300 1250 253 -452 300 1300 122 -456 300 1350 108 -492 300 1400 119 -444 300 1450 109 -490 300 1500 134 -512 250 1250 145 -553 250 1300 150 -614 250 1350 170 -596 250 1400 312 -653 250 1450 154 -700 250 1500 186 -510 260 1250 113 -556 260 1300 116 -522 260 1350 266 -579 260 1400 125 -531 260 1450 125 -636 260 1500 150 -494 270 1250 273 -532 270 1300 131 -513 270 1350 129 -557 270 1400 122 -538 270 1450 139 -620 270 1500 296 -395 280 1250 91 -421 280 1300 94 -493 280 1350 110 -489 280 1400 107 -430 280 1450 106 -483 280 1500 266 -443 290 1250 113 -565 290 1300 120 -599 290 1350 116 -588 290 1400 126 -617 290 1450 282 -655 290 1500 144 -457 300 1250 120 -538 300 1300 134 -517 300 1350 276 -604 300 1400 153 -588 300 1450 146 -597 300 1500 142 diff --git a/core/rewriting/indexing/test/results/std-hist-1-5-2.txt b/core/rewriting/indexing/test/results/std-hist-1-5-2.txt deleted file mode 100644 index 51f69467..00000000 --- a/core/rewriting/indexing/test/results/std-hist-1-5-2.txt +++ /dev/null @@ -1,50 +0,0 @@ -0 10 50 11 -1 10 100 35 -5 10 150 27 -2 10 200 291 -5 10 250 203 -2 20 50 5 -7 20 100 9 -5 20 150 13 -6 20 200 32 -7 20 250 23 -2 30 50 7 -2 30 100 16 -4 30 150 26 -3 30 200 32 -5 30 250 41 -1 40 50 5 -6 40 100 16 -10 40 150 30 -8 40 200 36 -8 40 250 4710 -7 50 50 15 -6 50 100 34 -10 50 150 32 -12 50 200 52 -14 50 250 72 -0 10 50 393 -0 10 100 298 -2 10 150 111 -5 10 200 860 -2 10 250 1614 -1 20 50 251 -6 20 100 2984 -5 20 150 17 -3 20 200 22 -8 20 250 26 -0 30 50 7 -5 30 100 17 -6 30 150 21 -4 30 200 27 -6 30 250 23946 -4 40 50 6 -7 40 100 25 -7 40 150 35 -7 40 200 43 -7 40 250 45 -6 50 50 10 -7 50 100 17 -15 50 150 32 -17 50 200 42 -14 50 250 52 diff --git a/core/rewriting/indexing/test/results/std-hist-1-5.txt b/core/rewriting/indexing/test/results/std-hist-1-5.txt deleted file mode 100644 index 63772c0c..00000000 --- a/core/rewriting/indexing/test/results/std-hist-1-5.txt +++ /dev/null @@ -1,94 +0,0 @@ -0 10 50 1 -1 10 100 7 -1 10 150 8 -2 10 200 12 -1 10 250 11 -4 20 50 3 -5 20 100 8 -7 20 150 13 -7 20 200 20 -8 20 250 24 -0 30 50 3 -4 30 100 14 -7 30 150 20 -8 30 200 28 -10 30 250 38 -3 40 50 5 -5 40 100 13 -5 40 150 20 -10 40 200 33 -10 40 250 35 -6 50 50 8 -6 50 100 17 -12 50 150 32 -15 50 200 48 -17 50 250 57 -1 10 50 2 -0 10 50 2 -0 10 50 2 -0 10 50 2 -0 10 50 4 -0 10 50 8 -1 10 50 3 -0 10 50 1 -1 10 50 3 -0 10 50 3 -1 10 50 3 -1 10 50 3 -2 10 50 3 -0 10 50 2 -1 10 50 78 -0 10 50 1 -1 10 50 3 -1 10 50 2 -1 10 50 3 -1 10 50 2 -2 20 100 7 -1 20 100 13 -2 20 100 7 -1 20 100 10 -2 20 100 7 -4 20 100 10 -4 20 100 8 -4 20 100 9 -6 20 100 9 -3 20 100 9 -3 30 150 32 -6 30 150 22 -6 30 150 24 -4 30 150 15 -7 30 150 19 -6 30 150 26 -6 30 150 351 -4 30 150 63 -7 30 150 20 -5 30 150 29 -13 40 200 47 -12 40 200 38 -10 40 200 34 -6 40 200 30 -11 40 200 31 -5 40 200 76 -9 40 200 44 -11 40 200 34 -10 40 200 35 -11 40 200 36 -3 40 200 41 -8 40 200 35 -11 40 200 36 -9 40 200 42 -9 40 200 34 -7 40 200 57 -13 40 200 1775 -11 40 200 33 -10 40 200 37 -12 40 200 38 -15 50 250 65 -7 50 250 2570 -13 50 250 1196 -13 50 250 56 -15 50 250 54 -14 50 250 82 -10 50 250 60 -14 50 250 78 -12 50 250 68 diff --git a/core/rewriting/indexing/test/results/std-match-1-5.txt b/core/rewriting/indexing/test/results/std-match-1-5.txt deleted file mode 100644 index bdb16f19..00000000 --- a/core/rewriting/indexing/test/results/std-match-1-5.txt +++ /dev/null @@ -1,25 +0,0 @@ -0 10 50 2 -2 10 100 6 -3 10 150 10 -1 10 200 9 -3 10 250 15 -2 20 50 3 -1 20 100 8 -6 20 150 16 -3 20 200 19 -6 20 250 23 -6 30 50 6 -4 30 100 13 -7 30 150 19 -4 30 200 26 -4 30 250 32 -1 40 50 8 -3 40 100 20 -7 40 150 41 -9 40 200 49 -10 40 250 52 -1 50 50 8 -10 50 100 19 -7 50 150 28 -16 50 200 79 -15 50 250 52 diff --git a/core/rewriting/indexing/test/results/std-match-10-20.txt b/core/rewriting/indexing/test/results/std-match-10-20.txt deleted file mode 100644 index fd7c1202..00000000 --- a/core/rewriting/indexing/test/results/std-match-10-20.txt +++ /dev/null @@ -1,1210 +0,0 @@ -35 100 500 372 -43 100 550 281 -43 100 600 334 -40 100 650 343 -50 100 700 410 -45 100 750 392 -47 100 800 472 -45 100 850 492 -45 100 900 545 -51 100 950 510 -47 100 1000 599 -39 110 500 351 -45 110 550 334 -49 110 600 416 -49 110 650 440 -42 110 700 410 -47 110 750 544 -45 110 800 516 -45 110 850 575 -53 110 900 595 -51 110 950 672 -50 110 1000 660 -47 120 500 286 -44 120 550 367 -48 120 600 363 -48 120 650 414 -56 120 700 454 -53 120 750 435 -53 120 800 512 -54 120 850 518 -54 120 900 564 -60 120 950 536 -62 120 1000 639 -45 130 500 377 -42 130 550 391 -47 130 600 375 -53 130 650 473 -51 130 700 488 -59 130 750 513 -52 130 800 570 -53 130 850 630 -55 130 900 663 -53 130 950 651 -56 130 1000 722 -49 140 500 380 -48 140 550 376 -54 140 600 491 -62 140 650 499 -61 140 700 590 -62 140 750 553 -60 140 800 655 -63 140 850 685 -55 140 900 693 -61 140 950 711 -61 140 1000 752 -54 150 500 411 -50 150 550 459 -52 150 600 498 -56 150 650 566 -52 150 700 604 -60 150 750 678 -62 150 800 793 -69 150 850 762 -68 150 900 802 -62 150 950 854 -57 150 1000 885 -64 160 500 438 -57 160 550 450 -64 160 600 535 -68 160 650 557 -73 160 700 607 -68 160 750 658 -76 160 800 730 -73 160 850 741 -77 160 900 761 -77 160 950 863 -75 160 1000 902 -61 170 500 446 -65 170 550 490 -66 170 600 525 -61 170 650 612 -60 170 700 645 -69 170 750 685 -70 170 800 676 -65 170 850 823 -80 170 900 812 -74 170 950 830 -69 170 1000 944 -69 180 500 394 -58 180 550 488 -72 180 600 523 -66 180 650 557 -73 180 700 569 -72 180 750 632 -76 180 800 766 -77 180 850 734 -76 180 900 787 -84 180 950 819 -79 180 1000 854 -68 190 500 540 -73 190 550 660 -72 190 600 656 -81 190 650 730 -79 190 700 829 -78 190 750 814 -80 190 800 966 -80 190 850 938 -80 190 900 977 -91 190 950 1077 -80 190 1000 1141 -80 200 500 531 -65 200 550 603 -85 200 600 660 -80 200 650 726 -82 200 700 767 -86 200 750 819 -83 200 800 869 -85 200 850 1024 -91 200 900 961 -94 200 950 1024 -84 200 1000 1097 -39 100 500 261 -38 100 550 320 -46 100 600 350 -39 100 650 396 -45 100 700 377 -41 100 750 419 -44 100 800 498 -41 100 850 498 -45 100 900 547 -51 100 950 577 -47 100 1000 646 -45 110 500 321 -44 110 550 325 -40 110 600 407 -51 110 650 378 -56 110 700 475 -51 110 750 504 -51 110 800 536 -52 110 850 504 -48 110 900 587 -51 110 950 625 -49 110 1000 679 -48 120 500 341 -38 120 550 315 -39 120 600 392 -47 120 650 441 -42 120 700 485 -45 120 750 488 -51 120 800 557 -49 120 850 559 -46 120 900 610 -49 120 950 641 -51 120 1000 680 -44 130 500 319 -44 130 550 398 -47 130 600 397 -51 130 650 475 -48 130 700 511 -53 130 750 556 -51 130 800 578 -61 130 850 644 -52 130 900 679 -57 130 950 692 -54 130 1000 701 -47 140 500 335 -53 140 550 414 -48 140 600 393 -55 140 650 479 -49 140 700 519 -55 140 750 558 -55 140 800 575 -61 140 850 652 -56 140 900 684 -60 140 950 681 -65 140 1000 769 -55 150 500 421 -60 150 550 405 -62 150 600 489 -50 150 650 543 -62 150 700 573 -61 150 750 626 -64 150 800 656 -61 150 850 688 -70 150 900 751 -61 150 950 792 -60 150 1000 774 -45 160 500 503 -58 160 550 547 -66 160 600 597 -57 160 650 606 -62 160 700 659 -67 160 750 718 -58 160 800 734 -68 160 850 824 -53 160 900 857 -63 160 950 943 -65 160 1000 969 -70 170 500 458 -70 170 550 489 -69 170 600 544 -71 170 650 529 -68 170 700 623 -80 170 750 680 -77 170 800 702 -78 170 850 806 -78 170 900 808 -75 170 950 854 -82 170 1000 933 -52 180 500 442 -61 180 550 530 -64 180 600 568 -68 180 650 624 -65 180 700 665 -65 180 750 731 -65 180 800 724 -67 180 850 794 -72 180 900 875 -68 180 950 900 -76 180 1000 940 -63 190 500 513 -68 190 550 610 -69 190 600 633 -68 190 650 689 -76 190 700 749 -75 190 750 766 -81 190 800 828 -80 190 850 921 -81 190 900 964 -90 190 950 1034 -86 190 1000 1074 -66 200 500 559 -74 200 550 625 -68 200 600 675 -73 200 650 728 -80 200 700 852 -82 200 750 851 -78 200 800 926 -79 200 850 956 -75 200 900 1010 -84 200 950 1136 -87 200 1000 1147 -42 100 500 262 -37 100 550 336 -46 100 600 296 -44 100 650 386 -41 100 700 361 -42 100 750 430 -49 100 800 493 -44 100 850 435 -42 100 900 520 -44 100 950 561 -46 100 1000 561 -38 110 500 363 -36 110 550 358 -46 110 600 437 -42 110 650 453 -47 110 700 516 -43 110 750 583 -40 110 800 615 -49 110 850 674 -48 110 900 699 -45 110 950 724 -46 110 1000 723 -44 120 500 387 -50 120 550 345 -44 120 600 433 -42 120 650 432 -49 120 700 451 -48 120 750 506 -56 120 800 563 -59 120 850 601 -51 120 900 628 -49 120 950 669 -55 120 1000 691 -46 130 500 347 -52 130 550 419 -53 130 600 448 -50 130 650 452 -55 130 700 535 -57 130 750 578 -58 130 800 625 -57 130 850 646 -51 130 900 704 -55 130 950 752 -61 130 1000 758 -45 140 500 368 -53 140 550 424 -49 140 600 398 -51 140 650 489 -58 140 700 524 -62 140 750 576 -55 140 800 623 -58 140 850 721 -56 140 900 700 -55 140 950 736 -63 140 1000 794 -51 150 500 443 -55 150 550 454 -57 150 600 509 -55 150 650 557 -60 150 700 600 -61 150 750 633 -67 150 800 697 -64 150 850 776 -65 150 900 811 -66 150 950 869 -64 150 1000 886 -68 160 500 451 -71 160 550 439 -71 160 600 502 -65 160 650 587 -74 160 700 585 -73 160 750 615 -76 160 800 711 -74 160 850 725 -78 160 900 803 -74 160 950 837 -79 160 1000 858 -59 170 500 498 -64 170 550 522 -65 170 600 571 -69 170 650 626 -71 170 700 647 -75 170 750 722 -69 170 800 792 -76 170 850 819 -69 170 900 848 -79 170 950 845 -80 170 1000 949 -83 180 500 463 -75 180 550 457 -84 180 600 518 -84 180 650 578 -84 180 700 632 -81 180 750 686 -95 180 800 735 -89 180 850 759 -94 180 900 885 -89 180 950 884 -94 180 1000 904 -70 190 500 488 -78 190 550 565 -73 190 600 578 -77 190 650 632 -82 190 700 664 -80 190 750 743 -87 190 800 843 -77 190 850 869 -82 190 900 918 -83 190 950 958 -80 190 1000 1001 -75 200 500 572 -76 200 550 593 -87 200 600 707 -83 200 650 700 -80 200 700 754 -93 200 750 827 -90 200 800 873 -98 200 850 941 -102 200 900 1069 -93 200 950 1062 -96 200 1000 1103 -27 100 500 275 -31 100 550 320 -32 100 600 341 -32 100 650 423 -29 100 700 394 -35 100 750 436 -29 100 800 502 -35 100 850 538 -34 100 900 534 -39 100 950 610 -34 100 1000 645 -39 110 500 293 -36 110 550 389 -37 110 600 371 -37 110 650 460 -39 110 700 455 -41 110 750 547 -39 110 800 554 -52 110 850 572 -49 110 900 617 -40 110 950 632 -45 110 1000 682 -46 120 500 364 -47 120 550 354 -57 120 600 430 -49 120 650 444 -52 120 700 442 -55 120 750 534 -59 120 800 558 -53 120 850 601 -55 120 900 593 -55 120 950 664 -50 120 1000 740 -53 130 500 334 -56 130 550 374 -50 130 600 393 -55 130 650 400 -57 130 700 463 -59 130 750 504 -58 130 800 518 -64 130 850 545 -67 130 900 591 -59 130 950 632 -64 130 1000 665 -48 140 500 393 -56 140 550 426 -55 140 600 446 -51 140 650 484 -51 140 700 530 -56 140 750 564 -60 140 800 638 -59 140 850 714 -64 140 900 702 -62 140 950 750 -64 140 1000 757 -57 150 500 468 -57 150 550 535 -56 150 600 619 -57 150 650 624 -71 150 700 707 -70 150 750 746 -73 150 800 713 -71 150 850 825 -73 150 900 857 -73 150 950 954 -74 150 1000 1027 -55 160 500 428 -55 160 550 483 -60 160 600 522 -55 160 650 484 -56 160 700 567 -59 160 750 591 -61 160 800 635 -62 160 850 696 -68 160 900 767 -66 160 950 799 -72 160 1000 847 -53 170 500 502 -58 170 550 574 -57 170 600 553 -62 170 650 622 -63 170 700 662 -63 170 750 811 -60 170 800 874 -62 170 850 861 -62 170 900 930 -67 170 950 975 -70 170 1000 1086 -70 180 500 497 -67 180 550 510 -63 180 600 574 -69 180 650 601 -81 180 700 690 -75 180 750 711 -74 180 800 783 -85 180 850 861 -74 180 900 851 -71 180 950 935 -79 180 1000 991 -75 190 500 515 -74 190 550 593 -76 190 600 641 -76 190 650 703 -84 190 700 727 -82 190 750 756 -83 190 800 877 -86 190 850 901 -89 190 900 978 -83 190 950 1025 -93 190 1000 1118 -66 200 500 485 -69 200 550 562 -75 200 600 605 -79 200 650 677 -81 200 700 799 -76 200 750 811 -82 200 800 903 -83 200 850 966 -77 200 900 982 -85 200 950 1013 -94 200 1000 1147 -32 100 500 238 -32 100 550 267 -34 100 600 328 -32 100 650 317 -36 100 700 368 -31 100 750 399 -35 100 800 404 -34 100 850 451 -27 100 900 467 -41 100 950 494 -38 100 1000 529 -32 110 500 378 -32 110 550 354 -34 110 600 453 -35 110 650 442 -39 110 700 452 -36 110 750 564 -34 110 800 545 -45 110 850 664 -40 110 900 639 -42 110 950 652 -38 110 1000 687 -39 120 500 337 -40 120 550 323 -46 120 600 427 -43 120 650 424 -48 120 700 482 -42 120 750 479 -44 120 800 542 -48 120 850 568 -52 120 900 565 -56 120 950 606 -47 120 1000 666 -44 130 500 374 -49 130 550 399 -42 130 600 461 -48 130 650 504 -47 130 700 556 -55 130 750 582 -53 130 800 606 -58 130 850 638 -47 130 900 692 -52 130 950 740 -58 130 1000 807 -50 140 500 412 -58 140 550 444 -52 140 600 458 -56 140 650 508 -51 140 700 558 -55 140 750 616 -61 140 800 607 -61 140 850 692 -55 140 900 680 -62 140 950 739 -72 140 1000 785 -48 150 500 456 -48 150 550 521 -50 150 600 566 -50 150 650 595 -62 150 700 571 -61 150 750 651 -57 150 800 787 -62 150 850 761 -62 150 900 783 -59 150 950 820 -63 150 1000 887 -47 160 500 438 -49 160 550 539 -48 160 600 574 -51 160 650 613 -58 160 700 671 -53 160 750 700 -52 160 800 758 -57 160 850 779 -54 160 900 863 -63 160 950 910 -64 160 1000 910 -61 170 500 512 -60 170 550 581 -66 170 600 640 -64 170 650 690 -69 170 700 706 -68 170 750 800 -72 170 800 829 -76 170 850 868 -76 170 900 842 -77 170 950 947 -78 170 1000 1019 -60 180 500 505 -60 180 550 560 -60 180 600 605 -67 180 650 652 -66 180 700 698 -60 180 750 753 -66 180 800 746 -70 180 850 906 -66 180 900 935 -71 180 950 986 -61 180 1000 1035 -53 190 500 500 -60 190 550 586 -56 190 600 623 -63 190 650 674 -65 190 700 731 -61 190 750 798 -76 190 800 874 -66 190 850 865 -75 190 900 974 -74 190 950 1037 -76 190 1000 1055 -83 200 500 561 -84 200 550 620 -90 200 600 692 -89 200 650 698 -90 200 700 785 -85 200 750 859 -89 200 800 895 -90 200 850 941 -93 200 900 1026 -93 200 950 1053 -94 200 1000 1136 -34 100 500 296 -42 100 550 304 -44 100 600 293 -44 100 650 390 -46 100 700 400 -43 100 750 417 -48 100 800 485 -51 100 850 506 -49 100 900 527 -44 100 950 547 -43 100 1000 573 -42 110 500 303 -36 110 550 302 -44 110 600 378 -36 110 650 365 -44 110 700 419 -47 110 750 471 -50 110 800 447 -47 110 850 538 -50 110 900 544 -54 110 950 559 -51 110 1000 611 -43 120 500 306 -48 120 550 353 -43 120 600 399 -47 120 650 381 -45 120 700 441 -45 120 750 486 -46 120 800 474 -52 120 850 556 -45 120 900 586 -48 120 950 611 -55 120 1000 659 -54 130 500 380 -60 130 550 376 -53 130 600 452 -53 130 650 512 -63 130 700 463 -62 130 750 553 -60 130 800 603 -57 130 850 599 -59 130 900 669 -62 130 950 711 -65 130 1000 727 -61 140 500 411 -66 140 550 451 -62 140 600 475 -61 140 650 535 -67 140 700 531 -67 140 750 605 -64 140 800 601 -64 140 850 680 -73 140 900 728 -72 140 950 756 -72 140 1000 841 -61 150 500 421 -54 150 550 463 -65 150 600 528 -60 150 650 502 -60 150 700 584 -58 150 750 591 -63 150 800 643 -64 150 850 698 -60 150 900 769 -61 150 950 767 -67 150 1000 828 -54 160 500 466 -64 160 550 506 -63 160 600 568 -64 160 650 618 -75 160 700 627 -68 160 750 681 -64 160 800 723 -65 160 850 792 -66 160 900 817 -74 160 950 877 -75 160 1000 961 -56 170 500 480 -67 170 550 591 -51 170 600 736 -55 170 650 681 -62 170 700 894 -69 170 750 800 -67 170 800 919 -65 170 850 977 -74 170 900 1181 -72 170 950 920 -74 170 1000 1118 -66 180 500 462 -69 180 550 553 -65 180 600 565 -72 180 650 611 -78 180 700 719 -78 180 750 724 -72 180 800 790 -81 180 850 834 -81 180 900 939 -86 180 950 921 -83 180 1000 939 -66 190 500 511 -71 190 550 561 -88 190 600 607 -75 190 650 637 -74 190 700 736 -84 190 750 803 -86 190 800 919 -85 190 850 910 -79 190 900 978 -86 190 950 989 -84 190 1000 1074 -75 200 500 560 -69 200 550 576 -80 200 600 697 -79 200 650 725 -78 200 700 781 -73 200 750 825 -86 200 800 910 -77 200 850 936 -84 200 900 1026 -87 200 950 1081 -91 200 1000 1152 -30 100 500 269 -32 100 550 255 -33 100 600 315 -35 100 650 362 -34 100 700 383 -40 100 750 369 -37 100 800 437 -41 100 850 484 -37 100 900 470 -42 100 950 538 -38 100 1000 558 -42 110 500 297 -45 110 550 387 -42 110 600 422 -41 110 650 418 -46 110 700 489 -49 110 750 531 -45 110 800 545 -49 110 850 600 -46 110 900 552 -51 110 950 629 -50 110 1000 664 -41 120 500 293 -41 120 550 378 -40 120 600 371 -42 120 650 422 -49 120 700 433 -46 120 750 456 -43 120 800 542 -50 120 850 578 -47 120 900 651 -53 120 950 650 -58 120 1000 707 -47 130 500 375 -41 130 550 357 -48 130 600 452 -52 130 650 524 -53 130 700 509 -56 130 750 588 -52 130 800 598 -53 130 850 633 -56 130 900 659 -50 130 950 683 -53 130 1000 725 -50 140 500 353 -52 140 550 432 -56 140 600 426 -62 140 650 486 -59 140 700 530 -59 140 750 585 -62 140 800 626 -63 140 850 636 -65 140 900 671 -64 140 950 693 -64 140 1000 789 -51 150 500 407 -58 150 550 503 -62 150 600 450 -58 150 650 522 -57 150 700 580 -66 150 750 616 -57 150 800 653 -64 150 850 699 -64 150 900 718 -65 150 950 784 -63 150 1000 834 -56 160 500 379 -61 160 550 422 -54 160 600 465 -53 160 650 459 -59 160 700 528 -63 160 750 594 -68 160 800 629 -55 160 850 648 -63 160 900 684 -71 160 950 733 -64 160 1000 789 -55 170 500 500 -60 170 550 521 -60 170 600 514 -60 170 650 603 -69 170 700 641 -70 170 750 784 -65 170 800 760 -66 170 850 827 -66 170 900 907 -69 170 950 945 -71 170 1000 993 -69 180 500 465 -63 180 550 540 -70 180 600 606 -63 180 650 664 -69 180 700 652 -68 180 750 715 -74 180 800 814 -63 180 850 840 -81 180 900 935 -76 180 950 930 -81 180 1000 1018 -86 190 500 495 -80 190 550 551 -83 190 600 600 -83 190 650 690 -83 190 700 717 -85 190 750 780 -84 190 800 840 -92 190 850 908 -84 190 900 934 -88 190 950 989 -92 190 1000 1065 -78 200 500 535 -67 200 550 572 -76 200 600 609 -89 200 650 684 -85 200 700 759 -90 200 750 806 -93 200 800 893 -93 200 850 962 -88 200 900 1011 -89 200 950 1051 -87 200 1000 1059 -31 100 500 288 -29 100 550 274 -31 100 600 338 -34 100 650 351 -36 100 700 385 -34 100 750 422 -35 100 800 422 -39 100 850 498 -36 100 900 493 -34 100 950 517 -36 100 1000 503 -39 110 500 284 -46 110 550 298 -46 110 600 317 -46 110 650 339 -47 110 700 393 -49 110 750 373 -50 110 800 465 -48 110 850 461 -54 110 900 522 -54 110 950 508 -58 110 1000 542 -29 120 500 364 -33 120 550 362 -36 120 600 404 -38 120 650 455 -36 120 700 462 -40 120 750 507 -44 120 800 533 -36 120 850 575 -47 120 900 623 -39 120 950 651 -42 120 1000 690 -42 130 500 320 -48 130 550 385 -46 130 600 420 -38 130 650 410 -49 130 700 463 -54 130 750 520 -48 130 800 556 -58 130 850 588 -54 130 900 647 -55 130 950 653 -52 130 1000 698 -47 140 500 353 -49 140 550 419 -48 140 600 405 -46 140 650 470 -53 140 700 524 -55 140 750 571 -57 140 800 640 -61 140 850 645 -56 140 900 709 -57 140 950 719 -61 140 1000 732 -51 150 500 383 -60 150 550 383 -54 150 600 432 -52 150 650 490 -56 150 700 530 -62 150 750 571 -57 150 800 584 -60 150 850 649 -61 150 900 756 -58 150 950 703 -72 150 1000 750 -54 160 500 465 -53 160 550 437 -53 160 600 537 -58 160 650 575 -69 160 700 630 -64 160 750 693 -60 160 800 698 -73 160 850 758 -69 160 900 767 -61 160 950 844 -76 160 1000 888 -53 170 500 494 -59 170 550 551 -64 170 600 669 -59 170 650 658 -61 170 700 699 -62 170 750 746 -68 170 800 777 -67 170 850 945 -62 170 900 928 -59 170 950 1013 -61 170 1000 1052 -69 180 500 490 -73 180 550 558 -78 180 600 632 -79 180 650 671 -80 180 700 657 -76 180 750 800 -80 180 800 824 -78 180 850 862 -81 180 900 882 -87 180 950 997 -80 180 1000 927 -66 190 500 525 -77 190 550 567 -73 190 600 595 -73 190 650 622 -78 190 700 693 -74 190 750 760 -75 190 800 837 -79 190 850 887 -81 190 900 904 -76 190 950 1001 -87 190 1000 1004 -67 200 500 564 -80 200 550 601 -63 200 600 657 -57 200 650 729 -76 200 700 766 -68 200 750 813 -76 200 800 835 -76 200 850 969 -73 200 900 983 -77 200 950 1119 -83 200 1000 1073 -34 100 500 254 -33 100 550 277 -31 100 600 327 -37 100 650 315 -35 100 700 356 -40 100 750 366 -41 100 800 432 -43 100 850 468 -37 100 900 475 -35 100 950 470 -39 100 1000 511 -40 110 500 340 -44 110 550 353 -43 110 600 424 -45 110 650 399 -49 110 700 485 -47 110 750 480 -44 110 800 533 -46 110 850 581 -44 110 900 571 -46 110 950 637 -49 110 1000 631 -38 120 500 394 -39 120 550 480 -37 120 600 406 -52 120 650 553 -39 120 700 580 -48 120 750 652 -49 120 800 709 -47 120 850 679 -49 120 900 820 -50 120 950 990 -53 120 1000 909 -50 130 500 360 -41 130 550 366 -47 130 600 442 -46 130 650 449 -48 130 700 479 -50 130 750 543 -54 130 800 570 -57 130 850 575 -53 130 900 619 -51 130 950 655 -55 130 1000 702 -51 140 500 410 -47 140 550 387 -50 140 600 493 -44 140 650 523 -45 140 700 561 -52 140 750 634 -54 140 800 645 -57 140 850 740 -54 140 900 764 -58 140 950 727 -56 140 1000 805 -45 150 500 415 -49 150 550 433 -46 150 600 430 -50 150 650 507 -46 150 700 521 -47 150 750 596 -49 150 800 620 -61 150 850 649 -52 150 900 718 -57 150 950 733 -59 150 1000 790 -54 160 500 479 -55 160 550 510 -53 160 600 575 -66 160 650 627 -60 160 700 698 -68 160 750 720 -67 160 800 735 -66 160 850 878 -75 160 900 877 -71 160 950 918 -72 160 1000 979 -60 170 500 479 -66 170 550 579 -74 170 600 585 -68 170 650 706 -60 170 700 636 -61 170 750 743 -68 170 800 854 -71 170 850 860 -66 170 900 968 -65 170 950 1004 -76 170 1000 1031 -70 180 500 485 -66 180 550 489 -64 180 600 553 -72 180 650 615 -70 180 700 738 -72 180 750 755 -81 180 800 796 -78 180 850 827 -87 180 900 888 -77 180 950 918 -77 180 1000 1021 -71 190 500 570 -76 190 550 596 -76 190 600 671 -72 190 650 749 -78 190 700 735 -74 190 750 860 -85 190 800 843 -74 190 850 940 -80 190 900 1019 -91 190 950 1059 -86 190 1000 1089 -81 200 500 512 -85 200 550 605 -81 200 600 625 -88 200 650 698 -89 200 700 712 -90 200 750 736 -84 200 800 864 -90 200 850 852 -90 200 900 957 -95 200 950 965 -95 200 1000 1089 -23 100 500 280 -30 100 550 342 -34 100 600 332 -29 100 650 409 -29 100 700 381 -29 100 750 466 -33 100 800 451 -34 100 850 530 -37 100 900 518 -36 100 950 574 -37 100 1000 584 -37 110 500 322 -43 110 550 321 -39 110 600 412 -39 110 650 399 -45 110 700 457 -46 110 750 465 -49 110 800 541 -41 110 850 512 -44 110 900 540 -46 110 950 609 -41 110 1000 647 -37 120 500 325 -43 120 550 327 -41 120 600 353 -47 120 650 376 -45 120 700 410 -40 120 750 448 -46 120 800 444 -41 120 850 487 -43 120 900 539 -45 120 950 567 -49 120 1000 601 -53 130 500 326 -51 130 550 375 -51 130 600 452 -51 130 650 458 -50 130 700 497 -56 130 750 504 -58 130 800 588 -56 130 850 619 -57 130 900 646 -57 130 950 667 -57 130 1000 716 -56 140 500 421 -51 140 550 381 -59 140 600 487 -51 140 650 511 -54 140 700 573 -59 140 750 628 -58 140 800 665 -60 140 850 679 -60 140 900 705 -60 140 950 761 -61 140 1000 817 -53 150 500 383 -56 150 550 396 -61 150 600 454 -53 150 650 502 -64 150 700 535 -60 150 750 604 -58 150 800 589 -66 150 850 703 -65 150 900 721 -71 150 950 757 -68 150 1000 797 -59 160 500 460 -65 160 550 459 -67 160 600 526 -71 160 650 568 -64 160 700 641 -58 160 750 635 -63 160 800 676 -73 160 850 739 -76 160 900 830 -77 160 950 826 -76 160 1000 893 -66 170 500 439 -57 170 550 460 -69 170 600 476 -71 170 650 553 -73 170 700 622 -70 170 750 633 -75 170 800 672 -79 170 850 771 -73 170 900 774 -77 170 950 834 -72 170 1000 859 -65 180 500 485 -66 180 550 583 -69 180 600 575 -67 180 650 716 -81 180 700 680 -70 180 750 722 -72 180 800 782 -77 180 850 874 -77 180 900 891 -78 180 950 985 -77 180 1000 995 -68 190 500 560 -65 190 550 548 -71 190 600 660 -67 190 650 657 -73 190 700 737 -73 190 750 780 -85 190 800 882 -81 190 850 861 -79 190 900 899 -82 190 950 1008 -79 190 1000 1079 -74 200 500 530 -77 200 550 590 -75 200 600 646 -68 200 650 746 -87 200 700 765 -87 200 750 814 -83 200 800 877 -86 200 850 973 -80 200 900 959 -87 200 950 1113 -85 200 1000 1082 diff --git a/core/rewriting/indexing/test/results/std-match-25-30.txt b/core/rewriting/indexing/test/results/std-match-25-30.txt deleted file mode 100644 index 4c65b861..00000000 --- a/core/rewriting/indexing/test/results/std-match-25-30.txt +++ /dev/null @@ -1,360 +0,0 @@ -108 250 1250 1771 -101 250 1300 1831 -127 250 1350 2006 -108 250 1400 2008 -114 250 1450 2065 -117 250 1500 2158 -128 260 1250 1970 -124 260 1300 1956 -124 260 1350 2182 -123 260 1400 2059 -115 260 1450 2184 -125 260 1500 2279 -122 270 1250 1805 -119 270 1300 1930 -121 270 1350 2027 -125 270 1400 2097 -125 270 1450 2176 -127 270 1500 2282 -121 280 1250 2308 -131 280 1300 2424 -127 280 1350 2443 -133 280 1400 2555 -127 280 1450 2713 -124 280 1500 2671 -125 290 1250 2142 -129 290 1300 2364 -130 290 1350 2441 -122 290 1400 2312 -137 290 1450 2707 -140 290 1500 2664 -117 300 1250 2221 -117 300 1300 2342 -130 300 1350 2503 -131 300 1400 2522 -128 300 1450 2757 -124 300 1500 2639 -116 250 1250 1792 -111 250 1300 1951 -111 250 1350 1939 -121 250 1400 2049 -124 250 1450 2261 -117 250 1500 2202 -108 260 1250 1728 -104 260 1300 1822 -107 260 1350 1912 -101 260 1400 1951 -106 260 1450 2053 -122 260 1500 2141 -137 270 1250 1923 -131 270 1300 1910 -128 270 1350 2027 -133 270 1400 2135 -137 270 1450 2294 -134 270 1500 2316 -127 280 1250 2059 -132 280 1300 2107 -128 280 1350 2228 -136 280 1400 2245 -129 280 1450 2350 -125 280 1500 2388 -128 290 1250 1941 -121 290 1300 2165 -126 290 1350 2212 -123 290 1400 2354 -132 290 1450 2354 -129 290 1500 2442 -140 300 1250 2239 -142 300 1300 2348 -143 300 1350 2518 -145 300 1400 2781 -150 300 1450 2595 -142 300 1500 2793 -109 250 1250 1687 -114 250 1300 1765 -105 250 1350 1868 -108 250 1400 1991 -122 250 1450 2060 -118 250 1500 2183 -119 260 1250 1844 -126 260 1300 1780 -124 260 1350 1883 -132 260 1400 1990 -120 260 1450 2068 -127 260 1500 2072 -146 270 1250 1931 -141 270 1300 1915 -146 270 1350 2029 -155 270 1400 2056 -153 270 1450 2111 -155 270 1500 2168 -121 280 1250 1983 -114 280 1300 1906 -119 280 1350 1981 -124 280 1400 2164 -123 280 1450 2241 -128 280 1500 2300 -115 290 1250 2634 -115 290 1300 2248 -120 290 1350 2528 -121 290 1400 2446 -130 290 1450 2700 -121 290 1500 2739 -151 300 1250 2261 -148 300 1300 2211 -156 300 1350 2363 -161 300 1400 2456 -158 300 1450 2623 -156 300 1500 2633 -107 250 1250 1783 -106 250 1300 1871 -115 250 1350 1903 -117 250 1400 2002 -112 250 1450 2091 -97 250 1500 2085 -118 260 1250 1780 -125 260 1300 1844 -122 260 1350 1942 -129 260 1400 1977 -128 260 1450 2027 -124 260 1500 2146 -120 270 1250 1825 -113 270 1300 1984 -126 270 1350 2068 -130 270 1400 2174 -126 270 1450 2223 -133 270 1500 2291 -144 280 1250 1966 -129 280 1300 1972 -129 280 1350 2086 -138 280 1400 2085 -127 280 1450 2266 -144 280 1500 2271 -149 290 1250 1985 -141 290 1300 2103 -145 290 1350 2174 -152 290 1400 2167 -152 290 1450 2334 -148 290 1500 2376 -129 300 1250 2075 -145 300 1300 2197 -142 300 1350 2251 -148 300 1400 2339 -136 300 1450 2468 -153 300 1500 2578 -117 250 1250 1926 -117 250 1300 2102 -115 250 1350 2186 -117 250 1400 2269 -105 250 1450 2259 -129 250 1500 2294 -128 260 1250 2051 -131 260 1300 2115 -124 260 1350 2083 -140 260 1400 2249 -135 260 1450 2468 -131 260 1500 2365 -136 270 1250 1869 -122 270 1300 1875 -130 270 1350 2016 -130 270 1400 1984 -136 270 1450 2139 -136 270 1500 2193 -137 280 1250 1919 -143 280 1300 1948 -154 280 1350 2077 -135 280 1400 2121 -144 280 1450 2198 -141 280 1500 2369 -141 290 1250 2032 -139 290 1300 2031 -136 290 1350 2107 -136 290 1400 2241 -130 290 1450 2256 -136 290 1500 2337 -138 300 1250 2085 -133 300 1300 2253 -128 300 1350 2415 -135 300 1400 2352 -141 300 1450 2580 -137 300 1500 2584 -116 250 1250 1722 -108 250 1300 1888 -114 250 1350 1962 -115 250 1400 2119 -123 250 1450 2059 -120 250 1500 2153 -109 260 1250 2064 -107 260 1300 2127 -115 260 1350 2169 -116 260 1400 2293 -123 260 1450 2330 -118 260 1500 2476 -114 270 1250 1952 -122 270 1300 2169 -121 270 1350 2187 -122 270 1400 2226 -134 270 1450 2224 -119 270 1500 2352 -133 280 1250 1902 -128 280 1300 2136 -126 280 1350 2109 -136 280 1400 2277 -129 280 1450 2371 -136 280 1500 2483 -151 290 1250 2078 -136 290 1300 2270 -146 290 1350 2243 -139 290 1400 2441 -148 290 1450 2668 -155 290 1500 2692 -135 300 1250 2148 -140 300 1300 2317 -132 300 1350 2240 -136 300 1400 2428 -144 300 1450 2523 -138 300 1500 2656 -116 250 1250 1719 -122 250 1300 1777 -120 250 1350 1767 -115 250 1400 1851 -116 250 1450 1938 -113 250 1500 1999 -124 260 1250 1874 -124 260 1300 2092 -116 260 1350 2067 -128 260 1400 2032 -126 260 1450 2116 -126 260 1500 2309 -120 270 1250 2146 -122 270 1300 2061 -119 270 1350 2232 -122 270 1400 2218 -125 270 1450 2376 -124 270 1500 2484 -126 280 1250 1933 -118 280 1300 2025 -132 280 1350 2082 -128 280 1400 2211 -133 280 1450 2208 -132 280 1500 2416 -130 290 1250 2262 -140 290 1300 2325 -135 290 1350 2466 -144 290 1400 2412 -142 290 1450 2624 -136 290 1500 2551 -146 300 1250 2213 -147 300 1300 2321 -143 300 1350 2415 -146 300 1400 2426 -156 300 1450 2620 -144 300 1500 2707 -109 250 1250 1667 -119 250 1300 1768 -111 250 1350 1805 -124 250 1400 1822 -114 250 1450 1889 -118 250 1500 1935 -117 260 1250 1707 -127 260 1300 1750 -125 260 1350 1821 -120 260 1400 1989 -125 260 1450 2002 -125 260 1500 2071 -133 270 1250 1912 -121 270 1300 2018 -131 270 1350 2087 -127 270 1400 2118 -138 270 1450 2314 -143 270 1500 2330 -134 280 1250 1982 -131 280 1300 1969 -136 280 1350 2127 -133 280 1400 2163 -135 280 1450 2340 -134 280 1500 2454 -136 290 1250 2147 -131 290 1300 2103 -129 290 1350 2168 -143 290 1400 2386 -144 290 1450 2411 -139 290 1500 2558 -149 300 1250 1999 -140 300 1300 2040 -162 300 1350 2155 -158 300 1400 2259 -151 300 1450 2270 -154 300 1500 2485 -114 250 1250 1800 -109 250 1300 1942 -107 250 1350 1869 -106 250 1400 2030 -115 250 1450 2118 -107 250 1500 2286 -124 260 1250 1851 -129 260 1300 1926 -123 260 1350 1966 -126 260 1400 2153 -128 260 1450 2126 -127 260 1500 2241 -117 270 1250 1744 -113 270 1300 1892 -114 270 1350 1999 -127 270 1400 2024 -120 270 1450 2112 -115 270 1500 2222 -137 280 1250 1947 -131 280 1300 2068 -145 280 1350 2160 -137 280 1400 2255 -144 280 1450 2301 -140 280 1500 2339 -152 290 1250 2324 -140 290 1300 2265 -147 290 1350 2267 -145 290 1400 2376 -157 290 1450 2508 -153 290 1500 2547 -142 300 1250 2078 -140 300 1300 2103 -135 300 1350 2160 -142 300 1400 2278 -137 300 1450 2334 -145 300 1500 2424 -102 250 1250 1791 -104 250 1300 1940 -105 250 1350 1978 -94 250 1400 1957 -106 250 1450 2046 -98 250 1500 2279 -130 260 1250 1911 -131 260 1300 1890 -133 260 1350 2036 -138 260 1400 2113 -133 260 1450 2165 -130 260 1500 2293 -131 270 1250 2116 -125 270 1300 2194 -128 270 1350 2159 -135 270 1400 2286 -126 270 1450 2394 -134 270 1500 2445 -129 280 1250 1867 -131 280 1300 1868 -142 280 1350 1994 -135 280 1400 2036 -135 280 1450 2193 -139 280 1500 2349 -133 290 1250 2009 -135 290 1300 1971 -132 290 1350 2106 -135 290 1400 2164 -134 290 1450 2199 -142 290 1500 2435 -140 300 1250 2011 -138 300 1300 2134 -142 300 1350 2211 -144 300 1400 2160 -144 300 1450 2335 -155 300 1500 2443 diff --git a/core/rewriting/indexing/test/results/std-match-5-10.txt b/core/rewriting/indexing/test/results/std-match-5-10.txt deleted file mode 100644 index 6d83529b..00000000 --- a/core/rewriting/indexing/test/results/std-match-5-10.txt +++ /dev/null @@ -1,360 +0,0 @@ -17 50 250 56 -11 50 300 66 -13 50 350 78 -16 50 400 98 -16 50 450 101 -17 50 500 140 -19 60 250 71 -18 60 300 82 -21 60 350 96 -20 60 400 111 -23 60 450 157 -21 60 500 142 -14 70 250 100 -12 70 300 117 -17 70 350 151 -20 70 400 208 -24 70 450 189 -23 70 500 215 -14 80 250 155 -24 80 300 142 -20 80 350 160 -17 80 400 197 -20 80 450 218 -23 80 500 237 -29 90 250 126 -26 90 300 179 -31 90 350 180 -30 90 400 226 -40 90 450 273 -34 90 500 236 -20 100 250 120 -26 100 300 168 -28 100 350 182 -30 100 400 206 -27 100 450 250 -32 100 500 255 -13 50 250 88 -19 50 300 94 -18 50 350 110 -14 50 400 132 -22 50 450 146 -19 50 500 151 -15 60 250 77 -11 60 300 94 -19 60 350 155 -23 60 400 140 -16 60 450 154 -20 60 500 172 -28 70 250 110 -22 70 300 110 -23 70 350 121 -27 70 400 143 -27 70 450 162 -28 70 500 210 -23 80 250 122 -28 80 300 125 -31 80 350 157 -33 80 400 218 -33 80 450 208 -31 80 500 217 -32 90 250 154 -27 90 300 132 -28 90 350 199 -31 90 400 237 -36 90 450 216 -36 90 500 287 -17 100 250 148 -25 100 300 154 -29 100 350 192 -27 100 400 219 -29 100 450 265 -32 100 500 256 -8 50 250 86 -16 50 300 78 -14 50 350 86 -16 50 400 116 -20 50 450 123 -13 50 500 140 -9 60 250 90 -9 60 300 78 -12 60 350 83 -16 60 400 105 -13 60 450 115 -16 60 500 141 -14 70 250 115 -25 70 300 115 -20 70 350 125 -23 70 400 135 -22 70 450 153 -24 70 500 233 -24 80 250 92 -19 80 300 108 -26 80 350 133 -22 80 400 183 -26 80 450 169 -24 80 500 190 -26 90 250 97 -25 90 300 168 -31 90 350 162 -28 90 400 192 -29 90 450 235 -38 90 500 230 -20 100 250 146 -23 100 300 207 -30 100 350 207 -30 100 400 231 -29 100 450 280 -35 100 500 282 -12 50 250 66 -16 50 300 90 -11 50 350 123 -15 50 400 102 -13 50 450 115 -18 50 500 145 -15 60 250 61 -12 60 300 84 -11 60 350 116 -16 60 400 107 -13 60 450 117 -13 60 500 144 -15 70 250 88 -21 70 300 130 -19 70 350 117 -18 70 400 147 -20 70 450 158 -23 70 500 203 -23 80 250 93 -31 80 300 121 -33 80 350 131 -27 80 400 153 -31 80 450 192 -35 80 500 187 -24 90 250 99 -31 90 300 132 -33 90 350 192 -34 90 400 174 -37 90 450 200 -36 90 500 248 -31 100 250 147 -28 100 300 156 -35 100 350 178 -37 100 400 237 -31 100 450 246 -35 100 500 316 -13 50 250 54 -13 50 300 61 -19 50 350 72 -12 50 400 80 -17 50 450 93 -15 50 500 140 -15 60 250 81 -18 60 300 91 -22 60 350 104 -26 60 400 124 -19 60 450 126 -26 60 500 192 -19 70 250 73 -21 70 300 95 -20 70 350 115 -24 70 400 139 -24 70 450 182 -17 70 500 151 -25 80 250 99 -19 80 300 122 -30 80 350 179 -28 80 400 167 -23 80 450 174 -28 80 500 207 -27 90 250 163 -23 90 300 168 -27 90 350 187 -28 90 400 233 -30 90 450 256 -33 90 500 302 -31 100 250 106 -34 100 300 129 -39 100 350 157 -39 100 400 180 -38 100 450 244 -40 100 500 210 -15 50 250 60 -14 50 300 76 -9 50 350 103 -13 50 400 100 -15 50 450 116 -17 50 500 131 -14 60 250 71 -20 60 300 83 -19 60 350 140 -20 60 400 138 -20 60 450 145 -22 60 500 154 -21 70 250 87 -24 70 300 133 -22 70 350 123 -27 70 400 152 -26 70 450 160 -26 70 500 212 -20 80 250 120 -25 80 300 120 -25 80 350 152 -26 80 400 195 -27 80 450 182 -32 80 500 203 -22 90 250 112 -21 90 300 156 -29 90 350 162 -23 90 400 168 -26 90 450 195 -32 90 500 238 -20 100 250 145 -20 100 300 145 -21 100 350 233 -28 100 400 229 -23 100 450 304 -28 100 500 271 -9 50 250 68 -15 50 300 74 -15 50 350 84 -15 50 400 130 -15 50 450 113 -18 50 500 127 -15 60 250 66 -19 60 300 89 -21 60 350 133 -27 60 400 136 -25 60 450 144 -26 60 500 160 -18 70 250 77 -24 70 300 124 -21 70 350 144 -23 70 400 161 -21 70 450 184 -26 70 500 211 -16 80 250 112 -19 80 300 121 -23 80 350 158 -23 80 400 190 -24 80 450 216 -26 80 500 219 -25 90 250 174 -22 90 300 162 -31 90 350 204 -30 90 400 198 -30 90 450 226 -32 90 500 232 -23 100 250 130 -22 100 300 188 -31 100 350 184 -30 100 400 215 -31 100 450 271 -30 100 500 278 -10 50 250 77 -13 50 300 133 -11 50 350 108 -13 50 400 148 -14 50 450 192 -13 50 500 183 -14 60 250 67 -18 60 300 92 -17 60 350 101 -19 60 400 123 -23 60 450 143 -22 60 500 182 -26 70 250 80 -18 70 300 99 -29 70 350 114 -19 70 400 130 -29 70 450 185 -23 70 500 160 -20 80 250 100 -22 80 300 125 -25 80 350 168 -23 80 400 157 -28 80 450 181 -27 80 500 216 -24 90 250 132 -27 90 300 122 -32 90 350 141 -28 90 400 171 -31 90 450 222 -37 90 500 218 -29 100 250 133 -19 100 300 165 -28 100 350 157 -26 100 400 180 -31 100 450 199 -32 100 500 272 -12 50 250 92 -20 50 300 84 -19 50 350 95 -20 50 400 133 -15 50 450 152 -20 50 500 162 -14 60 250 76 -17 60 300 93 -14 60 350 105 -16 60 400 118 -19 60 450 141 -16 60 500 155 -24 70 250 97 -22 70 300 119 -26 70 350 131 -25 70 400 144 -29 70 450 180 -31 70 500 231 -21 80 250 83 -26 80 300 108 -24 80 350 128 -30 80 400 180 -27 80 450 225 -31 80 500 180 -27 90 250 131 -28 90 300 219 -25 90 350 197 -29 90 400 251 -30 90 450 275 -36 90 500 299 -25 100 250 121 -29 100 300 175 -30 100 350 178 -32 100 400 200 -31 100 450 244 -35 100 500 237 -13 50 250 61 -15 50 300 69 -18 50 350 87 -18 50 400 111 -16 50 450 110 -17 50 500 119 -16 60 250 67 -17 60 300 80 -19 60 350 106 -20 60 400 129 -20 60 450 121 -21 60 500 146 -19 70 250 82 -20 70 300 97 -24 70 350 136 -24 70 400 138 -24 70 450 147 -21 70 500 156 -23 80 250 133 -22 80 300 120 -22 80 350 141 -24 80 400 165 -26 80 450 223 -29 80 500 222 -24 90 250 110 -18 90 300 137 -28 90 350 182 -31 90 400 175 -27 90 450 197 -29 90 500 244 -36 100 250 119 -41 100 300 151 -38 100 350 178 -43 100 400 233 -36 100 450 207 -47 100 500 285 diff --git a/core/rewriting/indexing/test/results/std-rule-varied-adj-1-40.txt b/core/rewriting/indexing/test/results/std-rule-varied-adj-1-40.txt deleted file mode 100644 index 82e037ae..00000000 --- a/core/rewriting/indexing/test/results/std-rule-varied-adj-1-40.txt +++ /dev/null @@ -1,8000 +0,0 @@ -0 5 500 8 -0 5 500 5 -0 5 500 5 -1 5 500 5 -0 5 500 5 -1 5 500 5 -0 5 500 5 -0 5 500 5 -1 5 500 4 -0 5 500 5 -0 5 500 4 -0 5 500 4 -0 5 500 5 -2 5 500 5 -2 5 500 5 -1 5 500 5 -1 5 500 5 -0 5 500 4 -1 5 500 5 -0 5 500 5 -6 10 500 6 -8 10 500 7 -6 10 500 7 -10 10 500 9 -12 10 500 8 -9 10 500 7 -10 10 500 7 -7 10 500 8 -11 10 500 7 -12 10 500 7 -9 10 500 8 -18 10 500 7 -7 10 500 6 -11 10 500 7 -7 10 500 6 -10 10 500 7 -6 10 500 7 -8 10 500 6 -11 10 500 6 -12 10 500 8 -8 15 500 10 -10 15 500 13 -5 15 500 11 -9 15 500 12 -2 15 500 9 -4 15 500 9 -5 15 500 9 -7 15 500 9 -7 15 500 9 -5 15 500 9 -5 15 500 10 -10 15 500 14 -5 15 500 8 -8 15 500 11 -6 15 500 8 -8 15 500 11 -10 15 500 15 -10 15 500 13 -7 15 500 10 -6 15 500 11 -12 20 500 8 -19 20 500 8 -19 20 500 8 -5 20 500 8 -12 20 500 8 -17 20 500 8 -12 20 500 8 -16 20 500 8 -20 20 500 10 -12 20 500 8 -12 20 500 8 -13 20 500 8 -11 20 500 8 -18 20 500 9 -15 20 500 10 -17 20 500 9 -17 20 500 9 -9 20 500 7 -12 20 500 8 -10 20 500 8 -8 25 500 8 -10 25 500 9 -16 25 500 11 -8 25 500 10 -5 25 500 8 -4 25 500 8 -7 25 500 8 -6 25 500 9 -9 25 500 8 -6 25 500 8 -6 25 500 8 -4 25 500 7 -14 25 500 10 -6 25 500 8 -8 25 500 8 -9 25 500 9 -13 25 500 9 -4 25 500 8 -9 25 500 9 -7 25 500 8 -25 30 500 12 -24 30 500 11 -31 30 500 13 -23 30 500 11 -21 30 500 55 -26 30 500 14 -29 30 500 13 -18 30 500 10 -35 30 500 13 -30 30 500 12 -25 30 500 13 -37 30 500 15 -26 30 500 11 -19 30 500 10 -30 30 500 14 -21 30 500 12 -24 30 500 11 -29 30 500 12 -35 30 500 19 -20 30 500 13 -19 35 500 13 -17 35 500 14 -10 35 500 13 -26 35 500 14 -21 35 500 20 -23 35 500 19 -20 35 500 15 -19 35 500 13 -15 35 500 12 -15 35 500 17 -21 35 500 13 -25 35 500 15 -17 35 500 15 -21 35 500 14 -20 35 500 14 -15 35 500 18 -11 35 500 19 -11 35 500 16 -12 35 500 12 -22 35 500 12 -26 40 500 10 -21 40 500 68 -24 40 500 11 -24 40 500 13 -33 40 500 12 -29 40 500 12 -31 40 500 12 -36 40 500 13 -20 40 500 11 -25 40 500 12 -28 40 500 13 -25 40 500 12 -28 40 500 15 -21 40 500 13 -29 40 500 13 -34 40 500 12 -24 40 500 11 -21 40 500 11 -29 40 500 12 -34 40 500 12 -9 45 500 10 -12 45 500 10 -14 45 500 14 -9 45 500 10 -12 45 500 10 -12 45 500 13 -14 45 500 11 -10 45 500 16 -12 45 500 10 -17 45 500 12 -18 45 500 12 -17 45 500 14 -13 45 500 16 -16 45 500 22 -19 45 500 12 -21 45 500 13 -14 45 500 14 -16 45 500 15 -10 45 500 11 -8 45 500 9 -36 50 500 65 -37 50 500 20 -34 50 500 22 -42 50 500 22 -42 50 500 18 -40 50 500 19 -40 50 500 16 -46 50 500 18 -47 50 500 21 -36 50 500 18 -38 50 500 16 -36 50 500 19 -29 50 500 14 -46 50 500 21 -40 50 500 18 -43 50 500 20 -39 50 500 20 -41 50 500 18 -34 50 500 16 -44 50 500 22 -32 55 500 18 -39 55 500 22 -34 55 500 22 -38 55 500 18 -31 55 500 16 -27 55 500 18 -32 55 500 21 -26 55 500 19 -31 55 500 114 -26 55 500 17 -28 55 500 15 -45 55 500 26 -33 55 500 16 -31 55 500 18 -28 55 500 15 -27 55 500 16 -35 55 500 19 -21 55 500 16 -29 55 500 17 -38 55 500 15 -28 60 500 14 -35 60 500 15 -32 60 500 14 -24 60 500 13 -30 60 500 14 -33 60 500 13 -23 60 500 11 -37 60 500 14 -20 60 500 12 -24 60 500 12 -33 60 500 12 -44 60 500 15 -34 60 500 16 -27 60 500 14 -33 60 500 12 -41 60 500 14 -25 60 500 13 -35 60 500 14 -26 60 500 12 -32 60 500 14 -61 65 500 15 -57 65 500 16 -63 65 500 83 -65 65 500 15 -50 65 500 15 -61 65 500 14 -71 65 500 16 -63 65 500 17 -52 65 500 14 -61 65 500 16 -66 65 500 14 -59 65 500 16 -33 65 500 12 -59 65 500 14 -62 65 500 16 -74 65 500 16 -60 65 500 15 -66 65 500 16 -69 65 500 15 -67 65 500 15 -53 70 500 17 -57 70 500 16 -52 70 500 17 -59 70 500 17 -49 70 500 15 -55 70 500 15 -65 70 500 18 -57 70 500 19 -52 70 500 15 -62 70 500 18 -62 70 500 19 -41 70 500 15 -57 70 500 18 -50 70 500 18 -71 70 500 128 -55 70 500 19 -61 70 500 17 -68 70 500 20 -63 70 500 17 -70 70 500 18 -55 75 500 14 -53 75 500 16 -53 75 500 15 -53 75 500 15 -57 75 500 15 -50 75 500 14 -41 75 500 14 -43 75 500 13 -45 75 500 14 -56 75 500 14 -46 75 500 14 -41 75 500 13 -47 75 500 14 -44 75 500 14 -62 75 500 15 -64 75 500 19 -52 75 500 18 -46 75 500 14 -60 75 500 16 -66 75 500 15 -85 80 500 20 -78 80 500 19 -76 80 500 19 -72 80 500 22 -67 80 500 19 -88 80 500 106 -86 80 500 20 -74 80 500 24 -63 80 500 18 -69 80 500 20 -79 80 500 21 -84 80 500 22 -82 80 500 19 -75 80 500 20 -66 80 500 20 -64 80 500 21 -77 80 500 21 -69 80 500 23 -53 80 500 15 -80 80 500 21 -94 85 500 18 -94 85 500 17 -109 85 500 19 -103 85 500 21 -89 85 500 18 -96 85 500 22 -79 85 500 16 -91 85 500 24 -98 85 500 20 -71 85 500 15 -80 85 500 21 -97 85 500 130 -90 85 500 26 -59 85 500 18 -81 85 500 20 -97 85 500 22 -92 85 500 22 -83 85 500 17 -69 85 500 16 -124 85 500 21 -105 90 500 29 -91 90 500 24 -115 90 500 33 -76 90 500 23 -86 90 500 25 -114 90 500 29 -99 90 500 35 -81 90 500 25 -108 90 500 33 -80 90 500 26 -78 90 500 27 -84 90 500 31 -80 90 500 22 -83 90 500 24 -76 90 500 145 -91 90 500 33 -102 90 500 26 -74 90 500 20 -84 90 500 25 -81 90 500 20 -44 95 500 17 -45 95 500 17 -51 95 500 18 -35 95 500 23 -47 95 500 22 -47 95 500 23 -45 95 500 15 -45 95 500 17 -38 95 500 18 -30 95 500 17 -41 95 500 14 -45 95 500 18 -61 95 500 24 -44 95 500 18 -44 95 500 20 -48 95 500 17 -36 95 500 17 -40 95 500 19 -58 95 500 20 -49 95 500 20 -80 100 500 102 -71 100 500 27 -88 100 500 26 -76 100 500 34 -65 100 500 19 -75 100 500 25 -70 100 500 20 -77 100 500 20 -69 100 500 20 -82 100 500 24 -90 100 500 29 -76 100 500 24 -64 100 500 19 -79 100 500 21 -79 100 500 22 -70 100 500 19 -70 100 500 26 -69 100 500 19 -74 100 500 23 -72 100 500 18 -79 105 500 19 -74 105 500 19 -69 105 500 18 -90 105 500 17 -80 105 500 17 -77 105 500 20 -71 105 500 20 -70 105 500 19 -62 105 500 19 -65 105 500 17 -101 105 500 21 -95 105 500 20 -68 105 500 18 -71 105 500 17 -87 105 500 20 -82 105 500 17 -77 105 500 20 -72 105 500 19 -64 105 500 15 -56 105 500 17 -59 110 500 17 -71 110 500 19 -71 110 500 18 -75 110 500 19 -86 110 500 19 -85 110 500 21 -58 110 500 19 -65 110 500 21 -85 110 500 21 -77 110 500 22 -78 110 500 19 -70 110 500 17 -75 110 500 17 -98 110 500 23 -63 110 500 20 -64 110 500 18 -73 110 500 19 -88 110 500 21 -72 110 500 19 -89 110 500 21 -76 115 500 24 -86 115 500 29 -93 115 500 35 -89 115 500 34 -102 115 500 37 -79 115 500 31 -115 115 500 37 -95 115 500 32 -82 115 500 29 -61 115 500 29 -104 115 500 35 -77 115 500 27 -88 115 500 27 -84 115 500 159 -94 115 500 33 -79 115 500 23 -91 115 500 33 -80 115 500 22 -68 115 500 22 -80 115 500 25 -88 120 500 17 -92 120 500 20 -67 120 500 15 -101 120 500 20 -102 120 500 23 -80 120 500 17 -69 120 500 17 -77 120 500 16 -90 120 500 25 -80 120 500 18 -96 120 500 21 -72 120 500 16 -95 120 500 23 -97 120 500 25 -74 120 500 17 -68 120 500 24 -86 120 500 20 -91 120 500 20 -80 120 500 18 -88 120 500 18 -87 125 500 20 -92 125 500 21 -106 125 500 22 -90 125 500 21 -107 125 500 25 -104 125 500 20 -103 125 500 23 -93 125 500 20 -102 125 500 23 -118 125 500 23 -110 125 500 22 -95 125 500 24 -101 125 500 23 -87 125 500 22 -109 125 500 23 -101 125 500 22 -103 125 500 23 -124 125 500 26 -94 125 500 21 -90 125 500 20 -143 130 500 39 -116 130 500 133 -84 130 500 26 -94 130 500 30 -85 130 500 24 -108 130 500 29 -150 130 500 40 -117 130 500 41 -98 130 500 26 -122 130 500 30 -106 130 500 36 -108 130 500 32 -100 130 500 31 -99 130 500 29 -133 130 500 33 -110 130 500 32 -134 130 500 35 -110 130 500 29 -131 130 500 263 -116 130 500 35 -80 135 500 26 -93 135 500 34 -88 135 500 36 -91 135 500 27 -96 135 500 30 -73 135 500 28 -92 135 500 30 -88 135 500 26 -103 135 500 1254 -109 135 500 31 -80 135 500 28 -115 135 500 41 -97 135 500 25 -122 135 500 43 -83 135 500 32 -86 135 500 29 -101 135 500 37 -92 135 500 43 -95 135 500 39 -106 135 500 32 -166 140 500 28 -134 140 500 27 -144 140 500 34 -125 140 500 31 -134 140 500 26 -119 140 500 22 -151 140 500 181 -142 140 500 26 -121 140 500 23 -117 140 500 23 -100 140 500 21 -134 140 500 31 -163 140 500 30 -146 140 500 31 -125 140 500 24 -144 140 500 27 -121 140 500 23 -149 140 500 27 -115 140 500 23 -130 140 500 24 -110 145 500 33 -111 145 500 29 -126 145 500 38 -126 145 500 29 -126 145 500 36 -106 145 500 172 -125 145 500 40 -104 145 500 35 -141 145 500 40 -113 145 500 31 -109 145 500 36 -93 145 500 25 -122 145 500 37 -107 145 500 36 -109 145 500 36 -139 145 500 40 -114 145 500 26 -145 145 500 35 -129 145 500 37 -133 145 500 35 -63 150 500 20 -76 150 500 22 -60 150 500 21 -71 150 500 20 -76 150 500 29 -72 150 500 21 -70 150 500 19 -78 150 500 25 -70 150 500 24 -99 150 500 26 -83 150 500 26 -80 150 500 22 -93 150 500 24 -83 150 500 23 -77 150 500 24 -79 150 500 22 -76 150 500 24 -69 150 500 22 -78 150 500 21 -72 150 500 23 -94 155 500 22 -94 155 500 23 -108 155 500 21 -90 155 500 25 -105 155 500 29 -100 155 500 142 -103 155 500 27 -114 155 500 25 -105 155 500 21 -101 155 500 23 -96 155 500 31 -101 155 500 32 -101 155 500 25 -92 155 500 28 -98 155 500 20 -106 155 500 20 -97 155 500 26 -96 155 500 24 -113 155 500 22 -105 155 500 20 -65 160 500 21 -83 160 500 30 -54 160 500 25 -66 160 500 24 -71 160 500 27 -75 160 500 28 -84 160 500 28 -71 160 500 151 -80 160 500 28 -71 160 500 27 -68 160 500 29 -83 160 500 26 -84 160 500 34 -75 160 500 33 -82 160 500 27 -71 160 500 26 -68 160 500 25 -75 160 500 26 -89 160 500 28 -77 160 500 27 -88 165 500 21 -101 165 500 21 -111 165 500 25 -105 165 500 31 -126 165 500 26 -127 165 500 26 -93 165 500 24 -100 165 500 157 -100 165 500 23 -101 165 500 25 -111 165 500 26 -121 165 500 24 -106 165 500 24 -126 165 500 25 -105 165 500 24 -102 165 500 25 -99 165 500 22 -104 165 500 25 -113 165 500 27 -121 165 500 28 -94 170 500 23 -121 170 500 26 -119 170 500 30 -94 170 500 24 -128 170 500 38 -116 170 500 28 -115 170 500 28 -124 170 500 26 -114 170 500 163 -101 170 500 32 -119 170 500 27 -103 170 500 24 -109 170 500 23 -102 170 500 28 -105 170 500 23 -111 170 500 26 -110 170 500 26 -124 170 500 30 -114 170 500 31 -106 170 500 25 -120 175 500 28 -131 175 500 32 -123 175 500 36 -125 175 500 36 -146 175 500 37 -118 175 500 29 -141 175 500 34 -148 175 500 174 -139 175 500 29 -125 175 500 29 -151 175 500 31 -119 175 500 32 -132 175 500 29 -115 175 500 27 -114 175 500 25 -136 175 500 32 -122 175 500 30 -149 175 500 33 -127 175 500 36 -125 175 500 27 -118 180 500 27 -132 180 500 26 -151 180 500 28 -147 180 500 32 -88 180 500 25 -115 180 500 26 -110 180 500 161 -135 180 500 30 -132 180 500 31 -130 180 500 26 -106 180 500 24 -107 180 500 29 -113 180 500 26 -131 180 500 25 -137 180 500 30 -115 180 500 28 -121 180 500 27 -119 180 500 26 -148 180 500 28 -138 180 500 29 -113 185 500 27 -112 185 500 26 -128 185 500 33 -107 185 500 28 -122 185 500 28 -159 185 500 172 -142 185 500 30 -110 185 500 30 -118 185 500 29 -137 185 500 31 -145 185 500 30 -151 185 500 30 -134 185 500 31 -114 185 500 30 -132 185 500 33 -139 185 500 33 -108 185 500 26 -128 185 500 36 -156 185 500 30 -136 185 500 29 -156 190 500 29 -125 190 500 26 -144 190 500 28 -123 190 500 167 -171 190 500 38 -141 190 500 25 -133 190 500 27 -127 190 500 32 -146 190 500 28 -153 190 500 30 -129 190 500 30 -123 190 500 26 -128 190 500 35 -128 190 500 26 -136 190 500 29 -134 190 500 28 -114 190 500 27 -120 190 500 27 -135 190 500 29 -128 190 500 29 -151 195 500 34 -208 195 500 37 -177 195 500 171 -159 195 500 38 -174 195 500 38 -187 195 500 36 -209 195 500 35 -160 195 500 35 -164 195 500 33 -177 195 500 31 -177 195 500 32 -186 195 500 32 -142 195 500 34 -208 195 500 37 -146 195 500 33 -162 195 500 30 -161 195 500 36 -173 195 500 35 -151 195 500 197 -149 195 500 32 -138 200 500 33 -140 200 500 29 -126 200 500 27 -144 200 500 29 -147 200 500 34 -150 200 500 41 -169 200 500 36 -126 200 500 34 -158 200 500 36 -119 200 500 31 -128 200 500 39 -141 200 500 29 -151 200 500 32 -140 200 500 34 -158 200 500 42 -147 200 500 191 -165 200 500 38 -117 200 500 28 -163 200 500 44 -164 200 500 35 -1 5 500 5 -1 5 500 4 -2 5 500 5 -1 5 500 4 -2 5 500 5 -1 5 500 5 -1 5 500 4 -1 5 500 4 -1 5 500 5 -3 5 500 5 -2 5 500 5 -0 5 500 4 -3 5 500 5 -4 5 500 5 -0 5 500 5 -0 5 500 5 -3 5 500 5 -1 5 500 5 -4 5 500 5 -5 5 500 5 -1 10 500 6 -0 10 500 5 -0 10 500 5 -1 10 500 6 -1 10 500 6 -2 10 500 6 -2 10 500 6 -1 10 500 5 -1 10 500 6 -2 10 500 6 -0 10 500 5 -0 10 500 6 -0 10 500 5 -1 10 500 6 -0 10 500 6 -1 10 500 6 -0 10 500 5 -1 10 500 6 -2 10 500 6 -1 10 500 6 -10 15 500 8 -10 15 500 7 -14 15 500 8 -15 15 500 8 -12 15 500 7 -11 15 500 7 -12 15 500 7 -14 15 500 57 -20 15 500 10 -20 15 500 9 -18 15 500 8 -11 15 500 8 -12 15 500 8 -16 15 500 8 -12 15 500 8 -16 15 500 9 -11 15 500 8 -10 15 500 8 -14 15 500 7 -14 15 500 9 -11 20 500 7 -14 20 500 9 -14 20 500 16 -8 20 500 9 -14 20 500 8 -8 20 500 8 -5 20 500 8 -10 20 500 13 -7 20 500 7 -10 20 500 12 -7 20 500 8 -13 20 500 9 -11 20 500 7 -9 20 500 8 -6 20 500 12 -6 20 500 10 -13 20 500 22 -12 20 500 16 -4 20 500 7 -10 20 500 10 -27 25 500 13 -25 25 500 11 -24 25 500 13 -15 25 500 14 -25 25 500 11 -33 25 500 15 -18 25 500 10 -21 25 500 11 -23 25 500 11 -26 25 500 12 -22 25 500 12 -31 25 500 12 -21 25 500 11 -34 25 500 102 -24 25 500 11 -19 25 500 10 -25 25 500 10 -19 25 500 12 -30 25 500 12 -27 25 500 13 -21 30 500 10 -30 30 500 11 -27 30 500 11 -18 30 500 10 -33 30 500 13 -21 30 500 10 -29 30 500 12 -29 30 500 11 -23 30 500 11 -33 30 500 13 -27 30 500 11 -21 30 500 10 -23 30 500 12 -29 30 500 11 -23 30 500 11 -29 30 500 11 -37 30 500 13 -35 30 500 13 -23 30 500 10 -30 30 500 12 -37 35 500 13 -45 35 500 13 -33 35 500 12 -25 35 500 13 -45 35 500 13 -44 35 500 14 -51 35 500 15 -43 35 500 14 -50 35 500 14 -32 35 500 12 -46 35 500 14 -44 35 500 15 -48 35 500 110 -27 35 500 11 -38 35 500 13 -39 35 500 13 -37 35 500 14 -33 35 500 13 -40 35 500 14 -49 35 500 15 -34 40 500 12 -24 40 500 10 -30 40 500 13 -21 40 500 10 -32 40 500 13 -27 40 500 10 -24 40 500 12 -37 40 500 10 -19 40 500 11 -42 40 500 14 -26 40 500 12 -26 40 500 12 -32 40 500 11 -19 40 500 12 -35 40 500 13 -22 40 500 17 -25 40 500 10 -40 40 500 12 -24 40 500 14 -30 40 500 10 -22 45 500 17 -17 45 500 12 -26 45 500 15 -30 45 500 16 -24 45 500 14 -25 45 500 17 -18 45 500 13 -25 45 500 14 -22 45 500 17 -27 45 500 14 -30 45 500 112 -25 45 500 16 -22 45 500 10 -26 45 500 20 -24 45 500 16 -19 45 500 15 -20 45 500 16 -22 45 500 13 -22 45 500 16 -30 45 500 16 -32 50 500 13 -22 50 500 12 -29 50 500 13 -27 50 500 13 -28 50 500 15 -31 50 500 15 -29 50 500 14 -26 50 500 15 -24 50 500 15 -29 50 500 17 -21 50 500 13 -27 50 500 15 -27 50 500 15 -34 50 500 14 -28 50 500 13 -26 50 500 14 -27 50 500 15 -21 50 500 13 -23 50 500 11 -27 50 500 13 -51 55 500 16 -60 55 500 17 -57 55 500 16 -57 55 500 16 -57 55 500 111 -55 55 500 15 -54 55 500 16 -49 55 500 15 -65 55 500 17 -50 55 500 15 -62 55 500 20 -49 55 500 14 -63 55 500 17 -58 55 500 17 -54 55 500 16 -49 55 500 15 -64 55 500 17 -69 55 500 17 -53 55 500 16 -45 55 500 14 -24 60 500 12 -36 60 500 14 -31 60 500 13 -26 60 500 13 -35 60 500 14 -29 60 500 14 -31 60 500 13 -33 60 500 15 -24 60 500 12 -33 60 500 14 -33 60 500 14 -39 60 500 16 -29 60 500 13 -34 60 500 14 -40 60 500 16 -29 60 500 13 -23 60 500 13 -32 60 500 130 -31 60 500 20 -37 60 500 14 -44 65 500 18 -38 65 500 16 -46 65 500 21 -32 65 500 17 -43 65 500 17 -37 65 500 17 -32 65 500 16 -33 65 500 16 -34 65 500 18 -26 65 500 14 -35 65 500 17 -42 65 500 16 -41 65 500 19 -46 65 500 21 -40 65 500 17 -38 65 500 18 -40 65 500 15 -41 65 500 19 -34 65 500 16 -37 65 500 19 -65 70 500 16 -44 70 500 12 -54 70 500 16 -60 70 500 16 -64 70 500 15 -56 70 500 14 -50 70 500 13 -73 70 500 14 -52 70 500 125 -52 70 500 15 -53 70 500 14 -54 70 500 14 -59 70 500 14 -51 70 500 15 -66 70 500 16 -70 70 500 15 -46 70 500 14 -48 70 500 12 -77 70 500 16 -61 70 500 14 -65 75 500 15 -54 75 500 14 -61 75 500 14 -58 75 500 16 -67 75 500 16 -67 75 500 17 -63 75 500 15 -57 75 500 15 -64 75 500 17 -50 75 500 14 -66 75 500 14 -66 75 500 15 -64 75 500 16 -48 75 500 15 -53 75 500 15 -63 75 500 15 -54 75 500 15 -60 75 500 14 -54 75 500 14 -51 75 500 14 -34 80 500 16 -54 80 500 120 -41 80 500 15 -29 80 500 14 -39 80 500 13 -36 80 500 13 -49 80 500 16 -53 80 500 17 -32 80 500 13 -42 80 500 15 -45 80 500 18 -29 80 500 11 -34 80 500 13 -50 80 500 17 -29 80 500 13 -38 80 500 15 -40 80 500 13 -32 80 500 13 -31 80 500 14 -39 80 500 15 -63 85 500 17 -72 85 500 22 -84 85 500 21 -70 85 500 19 -73 85 500 19 -55 85 500 19 -82 85 500 23 -67 85 500 20 -82 85 500 20 -77 85 500 21 -71 85 500 19 -53 85 500 17 -74 85 500 142 -97 85 500 26 -58 85 500 19 -85 85 500 21 -84 85 500 20 -61 85 500 17 -62 85 500 20 -78 85 500 21 -93 90 500 19 -82 90 500 24 -74 90 500 19 -72 90 500 22 -67 90 500 20 -85 90 500 23 -87 90 500 20 -76 90 500 19 -89 90 500 21 -81 90 500 24 -60 90 500 17 -90 90 500 23 -88 90 500 26 -78 90 500 21 -80 90 500 20 -87 90 500 23 -85 90 500 19 -88 90 500 172 -77 90 500 19 -88 90 500 19 -88 95 500 22 -79 95 500 26 -97 95 500 28 -81 95 500 25 -85 95 500 26 -82 95 500 24 -78 95 500 20 -74 95 500 23 -76 95 500 24 -99 95 500 26 -68 95 500 22 -84 95 500 27 -78 95 500 24 -85 95 500 29 -67 95 500 28 -84 95 500 26 -77 95 500 23 -72 95 500 22 -73 95 500 27 -71 95 500 176 -42 100 500 16 -35 100 500 17 -46 100 500 15 -47 100 500 18 -38 100 500 13 -53 100 500 14 -57 100 500 16 -44 100 500 13 -50 100 500 16 -49 100 500 13 -58 100 500 15 -46 100 500 13 -46 100 500 16 -45 100 500 18 -42 100 500 14 -59 100 500 19 -51 100 500 14 -48 100 500 14 -49 100 500 15 -50 100 500 19 -81 105 500 22 -58 105 500 21 -84 105 500 27 -95 105 500 26 -101 105 500 34 -79 105 500 23 -89 105 500 27 -83 105 500 25 -101 105 500 36 -91 105 500 28 -70 105 500 23 -81 105 500 26 -79 105 500 24 -85 105 500 30 -74 105 500 22 -84 105 500 24 -66 105 500 23 -62 105 500 21 -90 105 500 26 -87 105 500 22 -43 110 500 16 -66 110 500 19 -65 110 500 20 -73 110 500 21 -49 110 500 16 -55 110 500 19 -46 110 500 17 -69 110 500 21 -61 110 500 19 -65 110 500 19 -52 110 500 17 -64 110 500 20 -56 110 500 155 -55 110 500 21 -64 110 500 21 -58 110 500 16 -60 110 500 17 -58 110 500 20 -54 110 500 19 -54 110 500 18 -100 115 500 23 -105 115 500 23 -104 115 500 22 -96 115 500 25 -88 115 500 22 -87 115 500 26 -96 115 500 27 -95 115 500 22 -84 115 500 25 -89 115 500 49 -80 115 500 20 -100 115 500 21 -90 115 500 22 -87 115 500 22 -84 115 500 24 -100 115 500 160 -85 115 500 20 -93 115 500 22 -93 115 500 24 -94 115 500 22 -112 120 500 25 -109 120 500 27 -101 120 500 24 -104 120 500 24 -85 120 500 21 -97 120 500 28 -110 120 500 28 -99 120 500 27 -110 120 500 28 -86 120 500 24 -116 120 500 29 -90 120 500 27 -102 120 500 29 -98 120 500 25 -96 120 500 27 -111 120 500 29 -94 120 500 23 -95 120 500 184 -111 120 500 27 -103 120 500 30 -121 125 500 28 -109 125 500 27 -111 125 500 24 -124 125 500 32 -102 125 500 52 -115 125 500 29 -114 125 500 24 -140 125 500 28 -102 125 500 26 -128 125 500 37 -134 125 500 31 -104 125 500 28 -132 125 500 28 -134 125 500 31 -109 125 500 28 -124 125 500 177 -139 125 500 32 -128 125 500 28 -125 125 500 28 -119 125 500 27 -95 130 500 24 -81 130 500 22 -65 130 500 22 -97 130 500 21 -98 130 500 26 -78 130 500 25 -93 130 500 24 -73 130 500 23 -89 130 500 24 -55 130 500 20 -95 130 500 25 -74 130 500 24 -103 130 500 28 -89 130 500 23 -104 130 500 21 -71 130 500 22 -92 130 500 25 -79 130 500 22 -98 130 500 25 -82 130 500 22 -102 135 500 24 -99 135 500 23 -88 135 500 25 -105 135 500 24 -100 135 500 28 -102 135 500 23 -108 135 500 30 -130 135 500 33 -108 135 500 23 -102 135 500 20 -118 135 500 28 -91 135 500 31 -110 135 500 24 -84 135 500 22 -111 135 500 22 -96 135 500 21 -92 135 500 25 -104 135 500 25 -96 135 500 23 -102 135 500 182 -128 140 500 29 -132 140 500 28 -152 140 500 35 -128 140 500 28 -139 140 500 35 -154 140 500 34 -116 140 500 29 -119 140 500 33 -122 140 500 29 -134 140 500 29 -115 140 500 33 -132 140 500 33 -133 140 500 36 -129 140 500 34 -117 140 500 31 -108 140 500 30 -128 140 500 35 -100 140 500 189 -131 140 500 33 -125 140 500 32 -163 145 500 31 -141 145 500 29 -145 145 500 25 -148 145 500 24 -153 145 500 31 -167 145 500 32 -132 145 500 31 -156 145 500 32 -116 145 500 26 -124 145 500 24 -155 145 500 31 -135 145 500 26 -136 145 500 34 -149 145 500 28 -149 145 500 31 -148 145 500 29 -169 145 500 193 -145 145 500 31 -156 145 500 30 -140 145 500 26 -127 150 500 32 -125 150 500 29 -104 150 500 24 -132 150 500 30 -110 150 500 32 -110 150 500 27 -111 150 500 29 -128 150 500 29 -107 150 500 29 -111 150 500 30 -117 150 500 30 -150 150 500 42 -128 150 500 31 -130 150 500 32 -116 150 500 28 -119 150 500 36 -88 150 500 26 -148 150 500 29 -123 150 500 31 -134 150 500 31 -123 155 500 26 -113 155 500 23 -96 155 500 23 -122 155 500 27 -105 155 500 23 -109 155 500 23 -120 155 500 22 -127 155 500 28 -102 155 500 22 -131 155 500 27 -115 155 500 28 -129 155 500 25 -119 155 500 28 -120 155 500 23 -113 155 500 26 -105 155 500 23 -120 155 500 35 -128 155 500 27 -113 155 500 21 -112 155 500 25 -114 160 500 34 -130 160 500 37 -120 160 500 32 -135 160 500 32 -150 160 500 30 -121 160 500 36 -120 160 500 40 -115 160 500 37 -137 160 500 34 -117 160 500 25 -94 160 500 26 -118 160 500 30 -129 160 500 31 -113 160 500 31 -127 160 500 48 -123 160 500 30 -95 160 500 24 -137 160 500 29 -132 160 500 35 -123 160 500 34 -111 165 500 28 -121 165 500 29 -107 165 500 27 -109 165 500 27 -104 165 500 25 -119 165 500 30 -124 165 500 29 -92 165 500 27 -129 165 500 26 -117 165 500 25 -141 165 500 29 -122 165 500 24 -107 165 500 23 -129 165 500 177 -112 165 500 27 -145 165 500 27 -124 165 500 26 -126 165 500 27 -108 165 500 23 -119 165 500 27 -137 170 500 30 -133 170 500 28 -169 170 500 32 -129 170 500 28 -136 170 500 34 -133 170 500 36 -129 170 500 31 -122 170 500 32 -136 170 500 29 -148 170 500 29 -157 170 500 30 -155 170 500 186 -144 170 500 35 -173 170 500 31 -129 170 500 27 -134 170 500 34 -127 170 500 36 -137 170 500 32 -128 170 500 31 -116 170 500 29 -125 175 500 26 -135 175 500 29 -156 175 500 30 -145 175 500 31 -147 175 500 31 -151 175 500 31 -149 175 500 36 -130 175 500 27 -149 175 500 39 -169 175 500 172 -153 175 500 30 -125 175 500 30 -136 175 500 34 -139 175 500 37 -153 175 500 37 -132 175 500 31 -140 175 500 29 -144 175 500 36 -150 175 500 30 -143 175 500 34 -98 180 500 29 -112 180 500 32 -118 180 500 36 -123 180 500 34 -110 180 500 44 -119 180 500 31 -86 180 500 34 -123 180 500 32 -103 180 500 30 -98 180 500 31 -110 180 500 35 -112 180 500 29 -102 180 500 32 -99 180 500 28 -109 180 500 29 -105 180 500 29 -141 180 500 35 -110 180 500 30 -102 180 500 27 -114 180 500 41 -141 185 500 35 -109 185 500 27 -141 185 500 34 -124 185 500 31 -127 185 500 164 -106 185 500 35 -127 185 500 42 -113 185 500 27 -131 185 500 36 -133 185 500 31 -98 185 500 29 -144 185 500 31 -124 185 500 30 -121 185 500 28 -116 185 500 31 -122 185 500 34 -115 185 500 35 -112 185 500 29 -138 185 500 32 -104 185 500 33 -119 190 500 29 -181 190 500 34 -140 190 500 158 -119 190 500 29 -157 190 500 34 -114 190 500 27 -129 190 500 30 -154 190 500 34 -156 190 500 33 -128 190 500 31 -108 190 500 25 -133 190 500 27 -122 190 500 30 -131 190 500 35 -107 190 500 30 -173 190 500 35 -142 190 500 31 -117 190 500 30 -165 190 500 32 -141 190 500 29 -155 195 500 160 -142 195 500 30 -117 195 500 26 -147 195 500 28 -145 195 500 27 -127 195 500 25 -152 195 500 27 -113 195 500 23 -128 195 500 24 -123 195 500 22 -153 195 500 27 -157 195 500 26 -158 195 500 31 -169 195 500 31 -124 195 500 25 -129 195 500 24 -135 195 500 24 -135 195 500 26 -171 195 500 29 -131 195 500 24 -139 200 500 28 -162 200 500 152 -184 200 500 34 -169 200 500 33 -170 200 500 34 -149 200 500 30 -198 200 500 28 -176 200 500 32 -201 200 500 37 -169 200 500 31 -125 200 500 30 -157 200 500 29 -163 200 500 29 -185 200 500 34 -169 200 500 30 -169 200 500 31 -138 200 500 27 -202 200 500 31 -180 200 500 185 -167 200 500 34 -5 5 500 6 -3 5 500 5 -6 5 500 5 -1 5 500 5 -5 5 500 6 -4 5 500 6 -4 5 500 6 -7 5 500 6 -6 5 500 6 -7 5 500 6 -7 5 500 6 -7 5 500 6 -3 5 500 5 -8 5 500 6 -9 5 500 6 -4 5 500 6 -3 5 500 6 -6 5 500 6 -6 5 500 6 -6 5 500 6 -2 10 500 7 -4 10 500 7 -3 10 500 6 -7 10 500 7 -6 10 500 7 -1 10 500 6 -5 10 500 7 -2 10 500 7 -5 10 500 7 -3 10 500 7 -5 10 500 7 -3 10 500 7 -7 10 500 7 -3 10 500 7 -4 10 500 7 -2 10 500 7 -4 10 500 7 -6 10 500 7 -3 10 500 7 -6 10 500 7 -9 15 500 7 -9 15 500 7 -13 15 500 7 -4 15 500 6 -10 15 500 7 -7 15 500 6 -6 15 500 6 -8 15 500 7 -11 15 500 8 -16 15 500 8 -8 15 500 7 -5 15 500 7 -7 15 500 7 -10 15 500 7 -10 15 500 7 -9 15 500 7 -12 15 500 75 -10 15 500 7 -7 15 500 7 -15 15 500 7 -35 20 500 15 -35 20 500 14 -22 20 500 11 -27 20 500 12 -33 20 500 12 -34 20 500 12 -27 20 500 13 -30 20 500 13 -29 20 500 12 -25 20 500 12 -34 20 500 14 -33 20 500 12 -29 20 500 14 -33 20 500 15 -32 20 500 13 -33 20 500 16 -32 20 500 12 -23 20 500 11 -35 20 500 12 -29 20 500 13 -29 25 500 12 -26 25 500 12 -17 25 500 9 -30 25 500 11 -28 25 500 11 -25 25 500 12 -29 25 500 11 -20 25 500 12 -29 25 500 11 -24 25 500 10 -34 25 500 12 -26 25 500 11 -21 25 500 10 -15 25 500 10 -30 25 500 11 -26 25 500 12 -24 25 500 10 -24 25 500 117 -18 25 500 13 -28 25 500 13 -12 30 500 9 -19 30 500 11 -18 30 500 8 -22 30 500 10 -19 30 500 9 -17 30 500 8 -22 30 500 10 -23 30 500 9 -26 30 500 10 -19 30 500 9 -11 30 500 8 -22 30 500 10 -12 30 500 9 -8 30 500 8 -14 30 500 8 -16 30 500 8 -24 30 500 10 -10 30 500 8 -17 30 500 9 -12 30 500 8 -28 35 500 12 -16 35 500 9 -27 35 500 10 -18 35 500 11 -28 35 500 11 -29 35 500 12 -28 35 500 12 -32 35 500 11 -24 35 500 10 -15 35 500 11 -24 35 500 12 -17 35 500 10 -27 35 500 12 -27 35 500 12 -25 35 500 12 -24 35 500 11 -29 35 500 12 -23 35 500 11 -20 35 500 10 -32 35 500 14 -27 40 500 13 -18 40 500 76 -32 40 500 14 -19 40 500 12 -30 40 500 13 -18 40 500 11 -27 40 500 12 -33 40 500 13 -33 40 500 15 -18 40 500 11 -22 40 500 12 -27 40 500 12 -22 40 500 13 -35 40 500 13 -23 40 500 12 -22 40 500 12 -18 40 500 12 -35 40 500 14 -16 40 500 15 -19 40 500 12 -49 45 500 16 -38 45 500 13 -31 45 500 12 -55 45 500 16 -44 45 500 14 -49 45 500 15 -35 45 500 14 -55 45 500 20 -59 45 500 15 -54 45 500 18 -38 45 500 14 -47 45 500 15 -36 45 500 14 -36 45 500 18 -49 45 500 15 -43 45 500 15 -38 45 500 14 -61 45 500 125 -38 45 500 12 -49 45 500 14 -44 50 500 18 -48 50 500 19 -51 50 500 16 -45 50 500 20 -47 50 500 18 -44 50 500 17 -44 50 500 23 -32 50 500 12 -47 50 500 15 -54 50 500 22 -42 50 500 18 -58 50 500 18 -53 50 500 19 -56 50 500 29 -47 50 500 16 -58 50 500 20 -54 50 500 18 -42 50 500 15 -43 50 500 14 -44 50 500 19 -25 55 500 13 -13 55 500 10 -23 55 500 12 -17 55 500 12 -38 55 500 14 -21 55 500 13 -18 55 500 12 -18 55 500 12 -22 55 500 116 -16 55 500 16 -29 55 500 13 -20 55 500 11 -18 55 500 12 -25 55 500 12 -22 55 500 12 -22 55 500 12 -18 55 500 12 -24 55 500 12 -16 55 500 11 -23 55 500 13 -73 60 500 25 -73 60 500 20 -54 60 500 18 -83 60 500 22 -78 60 500 18 -72 60 500 18 -62 60 500 19 -71 60 500 21 -65 60 500 19 -70 60 500 18 -84 60 500 21 -67 60 500 19 -76 60 500 22 -60 60 500 16 -68 60 500 19 -45 60 500 17 -72 60 500 21 -61 60 500 19 -67 60 500 154 -65 60 500 22 -26 65 500 12 -30 65 500 13 -31 65 500 15 -30 65 500 12 -27 65 500 13 -33 65 500 13 -30 65 500 14 -29 65 500 14 -29 65 500 14 -34 65 500 13 -33 65 500 13 -29 65 500 14 -31 65 500 14 -37 65 500 13 -31 65 500 13 -33 65 500 14 -24 65 500 13 -29 65 500 13 -33 65 500 15 -37 65 500 15 -36 70 500 17 -32 70 500 16 -45 70 500 18 -32 70 500 16 -25 70 500 13 -46 70 500 17 -45 70 500 18 -31 70 500 13 -36 70 500 14 -43 70 500 16 -48 70 500 17 -29 70 500 15 -33 70 500 121 -49 70 500 15 -31 70 500 17 -31 70 500 15 -48 70 500 18 -43 70 500 20 -40 70 500 17 -37 70 500 18 -52 75 500 18 -41 75 500 16 -45 75 500 16 -52 75 500 18 -38 75 500 14 -30 75 500 15 -45 75 500 19 -37 75 500 14 -40 75 500 14 -40 75 500 16 -40 75 500 16 -40 75 500 16 -28 75 500 14 -35 75 500 15 -32 75 500 15 -25 75 500 14 -40 75 500 15 -41 75 500 17 -39 75 500 15 -51 75 500 20 -62 80 500 22 -58 80 500 16 -63 80 500 18 -51 80 500 124 -63 80 500 24 -52 80 500 16 -62 80 500 20 -68 80 500 21 -52 80 500 17 -63 80 500 22 -81 80 500 22 -57 80 500 16 -61 80 500 17 -59 80 500 17 -65 80 500 19 -56 80 500 20 -57 80 500 16 -62 80 500 19 -65 80 500 22 -65 80 500 21 -81 85 500 20 -76 85 500 20 -85 85 500 20 -89 85 500 21 -76 85 500 18 -86 85 500 20 -99 85 500 22 -96 85 500 25 -72 85 500 20 -82 85 500 141 -81 85 500 20 -76 85 500 17 -84 85 500 21 -94 85 500 23 -81 85 500 20 -81 85 500 21 -87 85 500 21 -82 85 500 19 -94 85 500 23 -93 85 500 20 -51 90 500 23 -59 90 500 28 -61 90 500 25 -65 90 500 31 -58 90 500 30 -51 90 500 18 -56 90 500 29 -74 90 500 27 -59 90 500 23 -61 90 500 29 -62 90 500 24 -49 90 500 25 -58 90 500 164 -59 90 500 27 -68 90 500 25 -62 90 500 27 -61 90 500 23 -81 90 500 40 -50 90 500 25 -66 90 500 28 -52 95 500 20 -58 95 500 16 -49 95 500 16 -50 95 500 17 -46 95 500 15 -54 95 500 14 -58 95 500 21 -57 95 500 25 -48 95 500 18 -47 95 500 16 -47 95 500 19 -46 95 500 15 -42 95 500 18 -53 95 500 18 -49 95 500 16 -43 95 500 18 -39 95 500 16 -45 95 500 15 -45 95 500 148 -49 95 500 23 -78 100 500 23 -74 100 500 30 -78 100 500 31 -96 100 500 35 -72 100 500 28 -69 100 500 25 -61 100 500 26 -63 100 500 26 -69 100 500 21 -103 100 500 29 -86 100 500 34 -99 100 500 23 -82 100 500 29 -69 100 500 24 -60 100 500 21 -54 100 500 17 -78 100 500 33 -56 100 500 23 -59 100 500 24 -69 100 500 28 -48 105 500 133 -71 105 500 40 -64 105 500 24 -67 105 500 24 -67 105 500 28 -53 105 500 18 -57 105 500 28 -67 105 500 26 -62 105 500 26 -52 105 500 20 -78 105 500 27 -48 105 500 26 -55 105 500 20 -80 105 500 32 -41 105 500 21 -74 105 500 31 -66 105 500 27 -74 105 500 29 -80 105 500 34 -49 105 500 23 -89 110 500 25 -77 110 500 132 -86 110 500 24 -89 110 500 27 -94 110 500 25 -89 110 500 30 -68 110 500 25 -78 110 500 28 -85 110 500 26 -84 110 500 31 -71 110 500 24 -83 110 500 23 -95 110 500 26 -82 110 500 22 -95 110 500 25 -96 110 500 27 -92 110 500 24 -91 110 500 30 -79 110 500 29 -86 110 500 22 -95 115 500 20 -84 115 500 22 -89 115 500 19 -87 115 500 139 -92 115 500 26 -85 115 500 21 -75 115 500 22 -91 115 500 20 -85 115 500 22 -82 115 500 21 -80 115 500 22 -91 115 500 21 -79 115 500 22 -80 115 500 21 -71 115 500 16 -90 115 500 20 -85 115 500 19 -92 115 500 23 -77 115 500 18 -90 115 500 20 -98 120 500 19 -101 120 500 19 -106 120 500 19 -95 120 500 18 -90 120 500 18 -90 120 500 18 -102 120 500 19 -90 120 500 18 -101 120 500 20 -83 120 500 136 -88 120 500 18 -87 120 500 19 -89 120 500 20 -95 120 500 20 -86 120 500 18 -101 120 500 19 -90 120 500 19 -95 120 500 20 -71 120 500 16 -80 120 500 17 -95 125 500 18 -79 125 500 17 -98 125 500 24 -85 125 500 18 -78 125 500 16 -84 125 500 17 -91 125 500 18 -99 125 500 20 -97 125 500 19 -81 125 500 18 -110 125 500 19 -88 125 500 18 -87 125 500 20 -87 125 500 17 -92 125 500 18 -82 125 500 18 -98 125 500 18 -95 125 500 147 -97 125 500 21 -88 125 500 19 -85 130 500 21 -85 130 500 26 -102 130 500 33 -101 130 500 26 -91 130 500 24 -87 130 500 31 -84 130 500 28 -92 130 500 25 -101 130 500 28 -98 130 500 33 -90 130 500 26 -91 130 500 23 -95 130 500 27 -82 130 500 33 -82 130 500 24 -90 130 500 24 -89 130 500 24 -83 130 500 24 -96 130 500 177 -100 130 500 27 -73 135 500 30 -77 135 500 25 -77 135 500 28 -72 135 500 25 -134 135 500 41 -68 135 500 28 -91 135 500 26 -88 135 500 30 -93 135 500 33 -80 135 500 27 -88 135 500 24 -79 135 500 29 -87 135 500 25 -77 135 500 24 -103 135 500 31 -71 135 500 22 -75 135 500 22 -84 135 500 26 -81 135 500 179 -90 135 500 29 -93 140 500 22 -99 140 500 23 -88 140 500 23 -96 140 500 25 -87 140 500 24 -84 140 500 21 -89 140 500 26 -81 140 500 20 -84 140 500 20 -98 140 500 25 -81 140 500 22 -104 140 500 23 -83 140 500 22 -107 140 500 26 -116 140 500 30 -90 140 500 23 -102 140 500 23 -114 140 500 24 -85 140 500 18 -88 140 500 22 -115 145 500 25 -114 145 500 140 -111 145 500 23 -123 145 500 23 -129 145 500 21 -139 145 500 24 -115 145 500 28 -103 145 500 22 -109 145 500 21 -119 145 500 25 -109 145 500 20 -127 145 500 24 -119 145 500 23 -112 145 500 22 -117 145 500 24 -95 145 500 21 -113 145 500 24 -122 145 500 21 -114 145 500 22 -128 145 500 27 -124 150 500 28 -116 150 500 33 -134 150 500 28 -146 150 500 151 -155 150 500 28 -139 150 500 28 -134 150 500 28 -110 150 500 25 -129 150 500 28 -130 150 500 22 -150 150 500 33 -140 150 500 27 -166 150 500 33 -151 150 500 33 -130 150 500 32 -135 150 500 30 -146 150 500 28 -150 150 500 27 -155 150 500 32 -122 150 500 24 -67 155 500 21 -114 155 500 25 -98 155 500 148 -80 155 500 22 -98 155 500 23 -87 155 500 24 -94 155 500 25 -92 155 500 25 -99 155 500 30 -118 155 500 28 -95 155 500 25 -95 155 500 31 -95 155 500 23 -91 155 500 33 -85 155 500 26 -82 155 500 24 -93 155 500 26 -92 155 500 22 -85 155 500 25 -82 155 500 21 -108 160 500 25 -108 160 500 26 -113 160 500 27 -113 160 500 25 -117 160 500 34 -86 160 500 23 -115 160 500 24 -97 160 500 24 -102 160 500 23 -109 160 500 30 -118 160 500 27 -101 160 500 27 -98 160 500 23 -107 160 500 24 -122 160 500 27 -104 160 500 26 -124 160 500 27 -132 160 500 29 -91 160 500 22 -109 160 500 29 -124 165 500 35 -120 165 500 33 -109 165 500 37 -129 165 500 153 -135 165 500 49 -114 165 500 41 -138 165 500 34 -132 165 500 36 -133 165 500 37 -111 165 500 25 -113 165 500 32 -130 165 500 27 -119 165 500 32 -107 165 500 37 -119 165 500 30 -114 165 500 31 -105 165 500 24 -127 165 500 32 -120 165 500 29 -128 165 500 40 -104 170 500 146 -94 170 500 24 -92 170 500 30 -92 170 500 30 -102 170 500 33 -98 170 500 28 -83 170 500 25 -96 170 500 30 -91 170 500 30 -94 170 500 33 -106 170 500 32 -96 170 500 31 -95 170 500 36 -99 170 500 32 -82 170 500 33 -105 170 500 30 -92 170 500 30 -100 170 500 28 -93 170 500 28 -95 170 500 176 -145 175 500 27 -117 175 500 24 -146 175 500 34 -118 175 500 33 -137 175 500 27 -125 175 500 29 -123 175 500 32 -86 175 500 23 -137 175 500 36 -127 175 500 25 -129 175 500 27 -111 175 500 26 -110 175 500 23 -127 175 500 28 -123 175 500 25 -130 175 500 26 -131 175 500 32 -121 175 500 27 -115 175 500 181 -120 175 500 30 -127 180 500 37 -148 180 500 30 -132 180 500 32 -142 180 500 39 -142 180 500 36 -127 180 500 30 -130 180 500 25 -157 180 500 37 -161 180 500 34 -136 180 500 29 -123 180 500 31 -127 180 500 36 -160 180 500 33 -151 180 500 34 -149 180 500 39 -147 180 500 182 -140 180 500 44 -159 180 500 34 -160 180 500 35 -130 180 500 26 -174 185 500 36 -149 185 500 24 -172 185 500 30 -146 185 500 43 -168 185 500 31 -194 185 500 31 -164 185 500 26 -175 185 500 28 -200 185 500 32 -179 185 500 28 -165 185 500 29 -138 185 500 29 -163 185 500 26 -172 185 500 173 -141 185 500 30 -151 185 500 24 -176 185 500 26 -176 185 500 31 -151 185 500 25 -162 185 500 26 -131 190 500 36 -143 190 500 42 -113 190 500 32 -120 190 500 30 -146 190 500 43 -151 190 500 37 -133 190 500 33 -133 190 500 34 -114 190 500 30 -139 190 500 35 -127 190 500 31 -132 190 500 34 -117 190 500 31 -130 190 500 38 -118 190 500 35 -96 190 500 24 -120 190 500 35 -110 190 500 38 -133 190 500 27 -146 190 500 32 -153 195 500 57 -201 195 500 47 -154 195 500 41 -156 195 500 41 -150 195 500 39 -174 195 500 41 -195 195 500 174 -177 195 500 40 -163 195 500 45 -189 195 500 46 -170 195 500 45 -185 195 500 51 -176 195 500 40 -177 195 500 38 -192 195 500 43 -187 195 500 38 -166 195 500 39 -195 195 500 73 -164 195 500 199 -196 195 500 42 -138 200 500 33 -155 200 500 35 -119 200 500 27 -139 200 500 34 -149 200 500 34 -147 200 500 43 -132 200 500 40 -144 200 500 36 -130 200 500 33 -167 200 500 35 -151 200 500 38 -128 200 500 29 -148 200 500 39 -156 200 500 35 -124 200 500 188 -139 200 500 30 -138 200 500 28 -127 200 500 35 -136 200 500 30 -136 200 500 29 -0 5 500 4 -0 5 500 4 -0 5 500 4 -0 5 500 4 -0 5 500 5 -0 5 500 4 -0 5 500 4 -0 5 500 4 -0 5 500 4 -0 5 500 4 -1 5 500 5 -1 5 500 4 -0 5 500 5 -0 5 500 5 -1 5 500 5 -1 5 500 5 -1 5 500 5 -1 5 500 5 -0 5 500 5 -1 5 500 4 -6 10 500 10 -6 10 500 9 -9 10 500 11 -11 10 500 11 -4 10 500 8 -9 10 500 10 -7 10 500 9 -10 10 500 12 -4 10 500 8 -5 10 500 9 -5 10 500 10 -0 10 500 6 -6 10 500 9 -8 10 500 10 -6 10 500 9 -7 10 500 11 -5 10 500 9 -4 10 500 7 -7 10 500 9 -8 10 500 12 -0 15 500 49 -2 15 500 8 -5 15 500 8 -3 15 500 7 -3 15 500 8 -5 15 500 8 -4 15 500 9 -6 15 500 10 -1 15 500 7 -6 15 500 8 -4 15 500 8 -2 15 500 7 -3 15 500 8 -3 15 500 8 -7 15 500 8 -1 15 500 7 -4 15 500 7 -7 15 500 9 -4 15 500 8 -2 15 500 7 -13 20 500 9 -20 20 500 9 -12 20 500 9 -18 20 500 9 -10 20 500 8 -17 20 500 9 -13 20 500 9 -10 20 500 8 -16 20 500 8 -14 20 500 9 -15 20 500 9 -11 20 500 9 -10 20 500 9 -16 20 500 9 -13 20 500 9 -17 20 500 9 -10 20 500 9 -12 20 500 8 -10 20 500 9 -15 20 500 9 -10 25 500 10 -13 25 500 10 -17 25 500 11 -7 25 500 9 -13 25 500 10 -15 25 500 10 -15 25 500 9 -19 25 500 10 -11 25 500 10 -9 25 500 9 -9 25 500 71 -18 25 500 10 -16 25 500 10 -19 25 500 11 -11 25 500 8 -13 25 500 9 -12 25 500 10 -11 25 500 9 -15 25 500 10 -11 25 500 8 -13 30 500 13 -27 30 500 17 -13 30 500 11 -23 30 500 14 -13 30 500 15 -12 30 500 12 -24 30 500 15 -18 30 500 12 -21 30 500 15 -19 30 500 13 -16 30 500 15 -23 30 500 12 -14 30 500 12 -24 30 500 15 -18 30 500 16 -12 30 500 12 -27 30 500 13 -24 30 500 13 -13 30 500 13 -21 30 500 11 -16 35 500 11 -19 35 500 11 -14 35 500 11 -15 35 500 11 -22 35 500 14 -22 35 500 12 -29 35 500 13 -21 35 500 12 -19 35 500 11 -17 35 500 11 -26 35 500 108 -16 35 500 10 -11 35 500 10 -24 35 500 12 -26 35 500 12 -11 35 500 10 -23 35 500 12 -16 35 500 10 -15 35 500 11 -15 35 500 9 -15 40 500 12 -29 40 500 13 -21 40 500 11 -13 40 500 10 -19 40 500 11 -19 40 500 12 -26 40 500 13 -33 40 500 14 -25 40 500 14 -28 40 500 14 -21 40 500 11 -21 40 500 14 -21 40 500 12 -31 40 500 12 -16 40 500 11 -24 40 500 12 -24 40 500 11 -14 40 500 10 -25 40 500 13 -26 40 500 13 -14 45 500 10 -21 45 500 10 -31 45 500 17 -27 45 500 11 -29 45 500 13 -21 45 500 12 -22 45 500 11 -17 45 500 12 -18 45 500 12 -13 45 500 11 -15 45 500 93 -25 45 500 12 -24 45 500 11 -29 45 500 12 -21 45 500 11 -28 45 500 11 -24 45 500 12 -22 45 500 15 -17 45 500 10 -14 45 500 12 -40 50 500 14 -36 50 500 13 -23 50 500 11 -38 50 500 15 -30 50 500 12 -18 50 500 12 -31 50 500 14 -32 50 500 14 -37 50 500 15 -18 50 500 11 -34 50 500 12 -35 50 500 12 -40 50 500 15 -28 50 500 13 -25 50 500 12 -31 50 500 13 -23 50 500 12 -31 50 500 13 -30 50 500 13 -29 50 500 14 -51 55 500 16 -42 55 500 15 -32 55 500 15 -35 55 500 14 -36 55 500 15 -38 55 500 15 -34 55 500 14 -42 55 500 14 -50 55 500 95 -56 55 500 16 -42 55 500 16 -47 55 500 15 -39 55 500 14 -46 55 500 17 -31 55 500 14 -37 55 500 16 -33 55 500 37 -40 55 500 14 -51 55 500 42 -30 55 500 15 -35 60 500 14 -27 60 500 10 -48 60 500 18 -35 60 500 15 -35 60 500 13 -28 60 500 14 -38 60 500 17 -34 60 500 13 -38 60 500 15 -26 60 500 12 -41 60 500 15 -29 60 500 11 -16 60 500 12 -35 60 500 13 -43 60 500 16 -40 60 500 14 -33 60 500 14 -34 60 500 14 -40 60 500 15 -28 60 500 13 -60 65 500 96 -76 65 500 24 -56 65 500 21 -71 65 500 19 -70 65 500 25 -66 65 500 20 -66 65 500 17 -62 65 500 22 -67 65 500 20 -64 65 500 20 -66 65 500 18 -65 65 500 20 -77 65 500 23 -62 65 500 19 -52 65 500 16 -62 65 500 20 -52 65 500 21 -55 65 500 21 -59 65 500 20 -56 65 500 23 -56 70 500 18 -56 70 500 17 -52 70 500 17 -56 70 500 14 -67 70 500 18 -52 70 500 13 -53 70 500 15 -54 70 500 126 -59 70 500 19 -41 70 500 14 -45 70 500 15 -59 70 500 17 -49 70 500 15 -52 70 500 17 -58 70 500 14 -54 70 500 16 -77 70 500 16 -73 70 500 16 -65 70 500 14 -59 70 500 19 -51 75 500 19 -47 75 500 18 -35 75 500 14 -51 75 500 20 -39 75 500 16 -56 75 500 21 -48 75 500 19 -48 75 500 16 -54 75 500 16 -41 75 500 17 -42 75 500 17 -46 75 500 17 -44 75 500 18 -46 75 500 19 -46 75 500 17 -42 75 500 22 -51 75 500 15 -43 75 500 18 -58 75 500 152 -46 75 500 19 -72 80 500 20 -78 80 500 18 -87 80 500 20 -75 80 500 19 -84 80 500 19 -79 80 500 19 -85 80 500 21 -90 80 500 21 -100 80 500 25 -81 80 500 20 -92 80 500 22 -95 80 500 19 -90 80 500 23 -79 80 500 19 -81 80 500 20 -83 80 500 23 -107 80 500 22 -96 80 500 24 -79 80 500 20 -85 80 500 23 -39 85 500 14 -58 85 500 16 -58 85 500 16 -52 85 500 15 -50 85 500 140 -57 85 500 17 -58 85 500 16 -46 85 500 15 -44 85 500 15 -41 85 500 15 -35 85 500 15 -36 85 500 13 -37 85 500 14 -58 85 500 15 -46 85 500 16 -35 85 500 13 -54 85 500 16 -50 85 500 15 -48 85 500 16 -50 85 500 16 -72 90 500 18 -62 90 500 14 -68 90 500 15 -73 90 500 15 -48 90 500 13 -62 90 500 15 -54 90 500 16 -50 90 500 14 -63 90 500 15 -68 90 500 18 -65 90 500 16 -58 90 500 15 -55 90 500 14 -57 90 500 15 -67 90 500 16 -70 90 500 16 -69 90 500 129 -72 90 500 22 -59 90 500 15 -57 90 500 15 -73 95 500 16 -70 95 500 17 -66 95 500 17 -89 95 500 19 -61 95 500 14 -85 95 500 17 -69 95 500 18 -61 95 500 15 -76 95 500 18 -59 95 500 17 -70 95 500 17 -58 95 500 16 -59 95 500 16 -61 95 500 15 -56 95 500 16 -66 95 500 18 -59 95 500 15 -65 95 500 17 -62 95 500 18 -62 95 500 17 -57 100 500 24 -72 100 500 26 -73 100 500 27 -67 100 500 20 -66 100 500 26 -69 100 500 136 -76 100 500 27 -67 100 500 26 -90 100 500 31 -86 100 500 28 -74 100 500 25 -80 100 500 29 -69 100 500 23 -79 100 500 30 -77 100 500 26 -67 100 500 29 -64 100 500 25 -92 100 500 30 -74 100 500 24 -79 100 500 29 -57 105 500 23 -52 105 500 18 -61 105 500 21 -59 105 500 21 -51 105 500 20 -51 105 500 20 -54 105 500 18 -50 105 500 137 -48 105 500 23 -51 105 500 18 -49 105 500 17 -44 105 500 18 -48 105 500 19 -52 105 500 20 -47 105 500 19 -48 105 500 17 -53 105 500 22 -60 105 500 19 -59 105 500 19 -50 105 500 20 -86 110 500 22 -85 110 500 21 -73 110 500 19 -83 110 500 20 -86 110 500 22 -68 110 500 21 -83 110 500 19 -82 110 500 21 -96 110 500 22 -93 110 500 22 -79 110 500 20 -105 110 500 25 -73 110 500 18 -77 110 500 142 -91 110 500 28 -85 110 500 20 -102 110 500 23 -89 110 500 18 -95 110 500 22 -90 110 500 20 -80 115 500 21 -70 115 500 19 -77 115 500 23 -54 115 500 20 -74 115 500 22 -62 115 500 20 -84 115 500 22 -78 115 500 21 -67 115 500 21 -64 115 500 25 -62 115 500 19 -84 115 500 26 -63 115 500 20 -74 115 500 23 -73 115 500 22 -71 115 500 21 -66 115 500 18 -59 115 500 17 -75 115 500 23 -74 115 500 161 -78 120 500 19 -79 120 500 19 -85 120 500 21 -85 120 500 20 -74 120 500 22 -78 120 500 21 -83 120 500 18 -79 120 500 19 -78 120 500 18 -84 120 500 21 -63 120 500 20 -84 120 500 25 -77 120 500 25 -93 120 500 21 -85 120 500 21 -82 120 500 24 -78 120 500 18 -71 120 500 17 -80 120 500 20 -73 120 500 19 -82 125 500 20 -112 125 500 20 -95 125 500 23 -88 125 500 23 -75 125 500 123 -98 125 500 29 -105 125 500 24 -96 125 500 20 -108 125 500 25 -102 125 500 22 -96 125 500 22 -89 125 500 21 -104 125 500 23 -104 125 500 23 -112 125 500 23 -104 125 500 25 -87 125 500 23 -87 125 500 21 -102 125 500 20 -95 125 500 21 -84 130 500 28 -100 130 500 28 -97 130 500 31 -96 130 500 28 -98 130 500 31 -104 130 500 28 -106 130 500 157 -108 130 500 28 -113 130 500 30 -110 130 500 31 -98 130 500 28 -108 130 500 27 -93 130 500 34 -86 130 500 27 -116 130 500 36 -101 130 500 28 -100 130 500 32 -74 130 500 24 -130 130 500 41 -99 130 500 40 -52 135 500 19 -73 135 500 20 -58 135 500 15 -79 135 500 23 -90 135 500 27 -71 135 500 21 -63 135 500 154 -61 135 500 29 -87 135 500 20 -86 135 500 20 -66 135 500 21 -75 135 500 24 -65 135 500 21 -68 135 500 26 -96 135 500 24 -77 135 500 27 -78 135 500 24 -81 135 500 20 -72 135 500 24 -63 135 500 17 -110 140 500 28 -88 140 500 26 -88 140 500 25 -94 140 500 22 -126 140 500 24 -103 140 500 29 -105 140 500 23 -78 140 500 20 -99 140 500 23 -94 140 500 148 -100 140 500 30 -110 140 500 23 -112 140 500 27 -86 140 500 27 -102 140 500 25 -87 140 500 22 -104 140 500 27 -101 140 500 26 -92 140 500 22 -111 140 500 25 -101 145 500 32 -128 145 500 28 -103 145 500 24 -95 145 500 28 -120 145 500 27 -88 145 500 28 -99 145 500 27 -123 145 500 26 -101 145 500 31 -91 145 500 34 -117 145 500 31 -115 145 500 33 -99 145 500 23 -116 145 500 25 -88 145 500 27 -135 145 500 34 -108 145 500 27 -112 145 500 34 -115 145 500 33 -118 145 500 33 -102 150 500 28 -113 150 500 29 -124 150 500 32 -108 150 500 25 -146 150 500 36 -124 150 500 28 -117 150 500 29 -120 150 500 27 -119 150 500 175 -124 150 500 30 -116 150 500 30 -123 150 500 27 -126 150 500 32 -102 150 500 28 -124 150 500 30 -114 150 500 29 -140 150 500 35 -114 150 500 26 -115 150 500 28 -112 150 500 26 -143 155 500 28 -135 155 500 49 -131 155 500 26 -126 155 500 23 -143 155 500 26 -115 155 500 26 -133 155 500 29 -108 155 500 150 -126 155 500 21 -142 155 500 27 -128 155 500 29 -157 155 500 29 -140 155 500 36 -135 155 500 38 -138 155 500 27 -137 155 500 26 -123 155 500 30 -145 155 500 29 -158 155 500 34 -176 155 500 35 -87 160 500 26 -102 160 500 34 -78 160 500 30 -93 160 500 28 -84 160 500 34 -99 160 500 34 -104 160 500 32 -94 160 500 163 -92 160 500 28 -84 160 500 26 -99 160 500 25 -100 160 500 36 -100 160 500 31 -100 160 500 29 -93 160 500 29 -80 160 500 28 -85 160 500 28 -86 160 500 30 -111 160 500 35 -88 160 500 27 -101 165 500 26 -129 165 500 26 -156 165 500 30 -129 165 500 31 -125 165 500 27 -133 165 500 29 -128 165 500 166 -141 165 500 28 -149 165 500 31 -118 165 500 23 -148 165 500 33 -143 165 500 31 -121 165 500 31 -126 165 500 28 -145 165 500 30 -120 165 500 26 -127 165 500 28 -130 165 500 31 -138 165 500 29 -111 165 500 25 -148 170 500 36 -132 170 500 31 -146 170 500 31 -141 170 500 29 -136 170 500 160 -130 170 500 35 -139 170 500 35 -136 170 500 28 -131 170 500 31 -138 170 500 44 -130 170 500 31 -142 170 500 31 -151 170 500 30 -144 170 500 35 -147 170 500 32 -150 170 500 35 -154 170 500 31 -150 170 500 35 -127 170 500 26 -132 170 500 29 -123 175 500 26 -123 175 500 27 -126 175 500 152 -104 175 500 25 -137 175 500 28 -135 175 500 27 -98 175 500 24 -118 175 500 28 -147 175 500 32 -116 175 500 25 -124 175 500 25 -141 175 500 27 -142 175 500 27 -112 175 500 29 -106 175 500 22 -133 175 500 27 -131 175 500 29 -139 175 500 26 -113 175 500 26 -112 175 500 26 -125 180 500 29 -112 180 500 24 -136 180 500 28 -111 180 500 26 -127 180 500 24 -103 180 500 29 -142 180 500 23 -121 180 500 22 -136 180 500 24 -115 180 500 27 -133 180 500 30 -109 180 500 24 -127 180 500 34 -112 180 500 27 -136 180 500 30 -136 180 500 53 -99 180 500 26 -123 180 500 29 -122 180 500 27 -142 180 500 31 -126 185 500 39 -130 185 500 153 -126 185 500 42 -106 185 500 33 -125 185 500 43 -127 185 500 36 -107 185 500 33 -104 185 500 33 -107 185 500 34 -119 185 500 44 -129 185 500 40 -102 185 500 27 -110 185 500 35 -112 185 500 37 -97 185 500 36 -102 185 500 35 -121 185 500 35 -121 185 500 189 -97 185 500 32 -107 185 500 32 -145 190 500 31 -155 190 500 34 -150 190 500 26 -171 190 500 36 -165 190 500 35 -171 190 500 35 -162 190 500 35 -182 190 500 37 -144 190 500 31 -154 190 500 31 -140 190 500 32 -172 190 500 31 -221 190 500 41 -164 190 500 36 -191 190 500 196 -162 190 500 35 -159 190 500 33 -160 190 500 35 -164 190 500 34 -136 190 500 28 -127 195 500 28 -151 195 500 32 -118 195 500 25 -156 195 500 35 -150 195 500 33 -116 195 500 24 -133 195 500 43 -147 195 500 32 -142 195 500 32 -125 195 500 26 -120 195 500 28 -137 195 500 31 -136 195 500 172 -130 195 500 33 -154 195 500 26 -126 195 500 28 -138 195 500 26 -124 195 500 32 -153 195 500 31 -129 195 500 27 -146 200 500 27 -118 200 500 27 -129 200 500 33 -145 200 500 29 -165 200 500 34 -131 200 500 28 -117 200 500 25 -134 200 500 26 -129 200 500 29 -124 200 500 34 -145 200 500 30 -129 200 500 166 -119 200 500 31 -120 200 500 24 -126 200 500 24 -141 200 500 38 -151 200 500 33 -147 200 500 29 -132 200 500 31 -125 200 500 29 -0 5 500 4 -0 5 500 5 -1 5 500 5 -1 5 500 8 -1 5 500 6 -1 5 500 5 -0 5 500 4 -0 5 500 5 -1 5 500 9 -2 5 500 5 -1 5 500 5 -2 5 500 9 -0 5 500 4 -1 5 500 5 -0 5 500 4 -0 5 500 5 -1 5 500 8 -2 5 500 5 -0 5 500 4 -0 5 500 4 -14 10 500 10 -9 10 500 8 -8 10 500 7 -7 10 500 7 -10 10 500 9 -7 10 500 8 -14 10 500 10 -9 10 500 8 -7 10 500 8 -13 10 500 10 -6 10 500 8 -15 10 500 62 -7 10 500 9 -15 10 500 14 -19 10 500 10 -9 10 500 9 -9 10 500 7 -10 10 500 9 -18 10 500 10 -9 10 500 9 -1 15 500 7 -4 15 500 9 -6 15 500 11 -5 15 500 10 -6 15 500 11 -7 15 500 12 -5 15 500 11 -7 15 500 11 -7 15 500 11 -5 15 500 9 -5 15 500 10 -5 15 500 10 -5 15 500 10 -7 15 500 11 -1 15 500 8 -3 15 500 8 -4 15 500 9 -2 15 500 8 -7 15 500 12 -1 15 500 7 -14 20 500 7 -16 20 500 9 -11 20 500 7 -11 20 500 8 -12 20 500 7 -7 20 500 7 -13 20 500 8 -11 20 500 7 -13 20 500 8 -17 20 500 8 -10 20 500 8 -10 20 500 8 -9 20 500 7 -12 20 500 8 -19 20 500 9 -16 20 500 8 -17 20 500 8 -10 20 500 8 -17 20 500 8 -13 20 500 8 -22 25 500 9 -21 25 500 63 -27 25 500 15 -17 25 500 14 -13 25 500 10 -20 25 500 12 -24 25 500 14 -11 25 500 8 -21 25 500 11 -14 25 500 8 -21 25 500 10 -23 25 500 10 -20 25 500 10 -25 25 500 10 -20 25 500 10 -7 25 500 8 -15 25 500 9 -20 25 500 14 -18 25 500 10 -12 25 500 9 -37 30 500 11 -27 30 500 9 -39 30 500 14 -33 30 500 11 -29 30 500 11 -26 30 500 10 -42 30 500 12 -28 30 500 10 -32 30 500 10 -32 30 500 10 -28 30 500 11 -32 30 500 10 -36 30 500 11 -37 30 500 11 -34 30 500 11 -37 30 500 12 -40 30 500 13 -36 30 500 11 -29 30 500 11 -27 30 500 11 -27 35 500 11 -51 35 500 15 -32 35 500 13 -32 35 500 11 -37 35 500 81 -39 35 500 13 -39 35 500 10 -36 35 500 11 -28 35 500 13 -28 35 500 11 -24 35 500 11 -25 35 500 10 -38 35 500 10 -35 35 500 12 -22 35 500 11 -32 35 500 11 -29 35 500 11 -24 35 500 10 -32 35 500 11 -33 35 500 11 -52 40 500 14 -52 40 500 13 -48 40 500 13 -35 40 500 12 -50 40 500 16 -54 40 500 14 -53 40 500 15 -29 40 500 11 -54 40 500 15 -51 40 500 13 -59 40 500 15 -55 40 500 14 -42 40 500 13 -41 40 500 14 -48 40 500 13 -53 40 500 13 -54 40 500 43 -58 40 500 33 -34 40 500 12 -49 40 500 16 -48 45 500 20 -46 45 500 107 -55 45 500 15 -50 45 500 19 -43 45 500 16 -45 45 500 16 -57 45 500 19 -49 45 500 15 -43 45 500 14 -55 45 500 17 -39 45 500 14 -49 45 500 17 -57 45 500 16 -57 45 500 19 -49 45 500 16 -41 45 500 15 -41 45 500 16 -58 45 500 17 -57 45 500 19 -52 45 500 16 -43 50 500 11 -38 50 500 11 -44 50 500 12 -48 50 500 12 -47 50 500 13 -45 50 500 12 -48 50 500 12 -35 50 500 13 -49 50 500 12 -45 50 500 11 -44 50 500 13 -33 50 500 11 -53 50 500 14 -34 50 500 11 -29 50 500 11 -34 50 500 12 -52 50 500 122 -48 50 500 12 -36 50 500 11 -49 50 500 13 -31 55 500 18 -39 55 500 16 -47 55 500 19 -31 55 500 16 -33 55 500 16 -41 55 500 16 -45 55 500 17 -43 55 500 20 -35 55 500 17 -37 55 500 16 -27 55 500 13 -27 55 500 13 -40 55 500 17 -30 55 500 15 -47 55 500 19 -40 55 500 16 -37 55 500 18 -45 55 500 19 -28 55 500 16 -29 55 500 15 -39 60 500 19 -42 60 500 18 -43 60 500 18 -35 60 500 17 -50 60 500 19 -41 60 500 17 -49 60 500 19 -28 60 500 14 -49 60 500 117 -30 60 500 17 -34 60 500 13 -42 60 500 18 -38 60 500 17 -28 60 500 16 -47 60 500 18 -46 60 500 20 -36 60 500 17 -41 60 500 19 -45 60 500 17 -35 60 500 17 -38 65 500 18 -31 65 500 13 -33 65 500 17 -33 65 500 18 -29 65 500 14 -34 65 500 18 -31 65 500 17 -38 65 500 18 -35 65 500 19 -32 65 500 16 -38 65 500 21 -28 65 500 17 -33 65 500 17 -40 65 500 16 -36 65 500 14 -30 65 500 19 -39 65 500 18 -39 65 500 17 -38 65 500 145 -32 65 500 17 -36 70 500 15 -33 70 500 14 -30 70 500 14 -29 70 500 18 -32 70 500 17 -28 70 500 14 -33 70 500 14 -42 70 500 15 -36 70 500 17 -28 70 500 15 -44 70 500 16 -34 70 500 18 -34 70 500 13 -43 70 500 15 -30 70 500 14 -30 70 500 14 -39 70 500 16 -40 70 500 19 -38 70 500 15 -44 70 500 18 -58 75 500 17 -62 75 500 16 -53 75 500 14 -44 75 500 14 -57 75 500 21 -42 75 500 11 -58 75 500 14 -64 75 500 18 -64 75 500 18 -59 75 500 16 -69 75 500 123 -69 75 500 20 -75 75 500 21 -59 75 500 15 -61 75 500 15 -52 75 500 15 -71 75 500 15 -58 75 500 16 -43 75 500 16 -63 75 500 15 -81 80 500 25 -78 80 500 27 -73 80 500 26 -96 80 500 29 -81 80 500 24 -75 80 500 28 -75 80 500 30 -75 80 500 26 -68 80 500 23 -65 80 500 22 -91 80 500 28 -68 80 500 21 -66 80 500 23 -82 80 500 30 -71 80 500 158 -75 80 500 29 -61 80 500 18 -72 80 500 27 -72 80 500 26 -51 80 500 18 -46 85 500 17 -71 85 500 19 -82 85 500 22 -85 85 500 18 -70 85 500 17 -71 85 500 19 -65 85 500 16 -59 85 500 15 -73 85 500 20 -58 85 500 15 -57 85 500 15 -56 85 500 19 -74 85 500 18 -75 85 500 19 -95 85 500 18 -90 85 500 20 -67 85 500 20 -40 85 500 15 -70 85 500 19 -77 85 500 18 -70 90 500 18 -77 90 500 117 -63 90 500 18 -67 90 500 21 -60 90 500 17 -73 90 500 17 -74 90 500 22 -77 90 500 18 -70 90 500 21 -72 90 500 19 -73 90 500 16 -79 90 500 18 -78 90 500 19 -71 90 500 17 -62 90 500 17 -73 90 500 17 -72 90 500 17 -69 90 500 16 -66 90 500 19 -72 90 500 17 -92 95 500 22 -96 95 500 23 -84 95 500 21 -85 95 500 23 -85 95 500 19 -86 95 500 21 -100 95 500 23 -85 95 500 20 -83 95 500 133 -90 95 500 21 -71 95 500 20 -87 95 500 23 -84 95 500 21 -117 95 500 27 -77 95 500 21 -78 95 500 21 -76 95 500 21 -91 95 500 19 -103 95 500 25 -90 95 500 19 -85 100 500 20 -104 100 500 21 -59 100 500 16 -68 100 500 17 -86 100 500 23 -72 100 500 18 -86 100 500 20 -80 100 500 19 -77 100 500 16 -90 100 500 20 -76 100 500 25 -83 100 500 22 -66 100 500 18 -71 100 500 17 -68 100 500 152 -66 100 500 23 -67 100 500 23 -82 100 500 20 -67 100 500 18 -78 100 500 17 -46 105 500 15 -36 105 500 15 -41 105 500 18 -40 105 500 19 -48 105 500 15 -57 105 500 17 -31 105 500 14 -39 105 500 18 -42 105 500 16 -46 105 500 17 -48 105 500 21 -47 105 500 19 -55 105 500 18 -41 105 500 17 -54 105 500 19 -40 105 500 16 -54 105 500 17 -39 105 500 15 -36 105 500 18 -44 105 500 16 -59 110 500 18 -78 110 500 22 -48 110 500 15 -59 110 500 104 -57 110 500 15 -56 110 500 15 -61 110 500 16 -66 110 500 17 -65 110 500 16 -70 110 500 17 -61 110 500 16 -57 110 500 17 -66 110 500 15 -59 110 500 16 -62 110 500 15 -68 110 500 19 -70 110 500 16 -73 110 500 18 -63 110 500 15 -63 110 500 15 -78 115 500 21 -75 115 500 21 -74 115 500 17 -87 115 500 22 -97 115 500 25 -96 115 500 25 -66 115 500 19 -77 115 500 22 -80 115 500 24 -104 115 500 23 -115 115 500 29 -96 115 500 136 -73 115 500 22 -91 115 500 23 -79 115 500 22 -97 115 500 24 -52 115 500 18 -77 115 500 22 -85 115 500 22 -81 115 500 18 -88 120 500 28 -113 120 500 24 -101 120 500 22 -88 120 500 21 -99 120 500 27 -87 120 500 22 -103 120 500 26 -91 120 500 25 -108 120 500 27 -113 120 500 24 -107 120 500 26 -82 120 500 23 -107 120 500 23 -103 120 500 26 -85 120 500 160 -99 120 500 29 -107 120 500 22 -108 120 500 26 -90 120 500 23 -99 120 500 27 -72 125 500 21 -59 125 500 22 -81 125 500 22 -64 125 500 22 -90 125 500 23 -65 125 500 20 -81 125 500 20 -84 125 500 23 -75 125 500 21 -49 125 500 21 -75 125 500 19 -97 125 500 21 -60 125 500 19 -98 125 500 25 -64 125 500 20 -70 125 500 18 -80 125 500 23 -71 125 500 22 -64 125 500 162 -73 125 500 20 -99 130 500 29 -116 130 500 31 -92 130 500 23 -83 130 500 22 -70 130 500 23 -86 130 500 26 -88 130 500 23 -61 130 500 25 -109 130 500 33 -78 130 500 23 -94 130 500 27 -97 130 500 28 -85 130 500 25 -100 130 500 28 -114 130 500 41 -85 130 500 26 -84 130 500 20 -70 130 500 21 -82 130 500 27 -110 130 500 165 -111 135 500 24 -127 135 500 25 -131 135 500 28 -158 135 500 30 -119 135 500 28 -116 135 500 28 -131 135 500 27 -113 135 500 24 -141 135 500 27 -131 135 500 32 -133 135 500 29 -128 135 500 30 -153 135 500 29 -118 135 500 24 -150 135 500 23 -140 135 500 32 -106 135 500 30 -132 135 500 28 -129 135 500 30 -97 135 500 179 -92 140 500 24 -109 140 500 24 -100 140 500 22 -104 140 500 28 -97 140 500 35 -108 140 500 28 -87 140 500 26 -102 140 500 28 -91 140 500 33 -102 140 500 29 -111 140 500 33 -97 140 500 27 -97 140 500 25 -130 140 500 35 -139 140 500 37 -113 140 500 31 -119 140 500 37 -134 140 500 35 -102 140 500 180 -124 140 500 29 -76 145 500 26 -80 145 500 30 -74 145 500 25 -66 145 500 24 -52 145 500 24 -77 145 500 28 -76 145 500 25 -65 145 500 22 -88 145 500 33 -57 145 500 22 -71 145 500 27 -52 145 500 18 -66 145 500 22 -63 145 500 25 -62 145 500 19 -69 145 500 24 -72 145 500 23 -72 145 500 30 -59 145 500 24 -62 145 500 23 -154 150 500 27 -141 150 500 28 -146 150 500 26 -121 150 500 25 -135 150 500 25 -168 150 500 26 -161 150 500 27 -119 150 500 24 -124 150 500 22 -121 150 500 25 -147 150 500 27 -148 150 500 24 -121 150 500 25 -113 150 500 20 -123 150 500 24 -141 150 500 26 -137 150 500 24 -122 150 500 26 -145 150 500 28 -130 150 500 21 -100 155 500 25 -76 155 500 138 -103 155 500 28 -113 155 500 27 -81 155 500 23 -86 155 500 25 -79 155 500 24 -78 155 500 22 -79 155 500 17 -72 155 500 23 -76 155 500 20 -74 155 500 23 -103 155 500 25 -89 155 500 20 -89 155 500 21 -81 155 500 23 -76 155 500 22 -88 155 500 21 -80 155 500 19 -99 155 500 21 -111 160 500 24 -89 160 500 23 -94 160 500 23 -107 160 500 23 -123 160 500 131 -96 160 500 23 -91 160 500 23 -98 160 500 23 -73 160 500 19 -67 160 500 26 -97 160 500 33 -94 160 500 24 -102 160 500 25 -112 160 500 26 -106 160 500 22 -93 160 500 25 -76 160 500 24 -84 160 500 25 -91 160 500 24 -101 160 500 27 -103 165 500 40 -108 165 500 29 -128 165 500 33 -126 165 500 29 -118 165 500 41 -114 165 500 162 -147 165 500 37 -129 165 500 37 -121 165 500 40 -110 165 500 35 -114 165 500 40 -107 165 500 27 -105 165 500 28 -127 165 500 41 -102 165 500 32 -168 165 500 39 -136 165 500 45 -113 165 500 33 -128 165 500 36 -140 165 500 42 -153 170 500 148 -120 170 500 28 -128 170 500 30 -148 170 500 29 -117 170 500 25 -111 170 500 30 -124 170 500 23 -127 170 500 28 -123 170 500 26 -150 170 500 31 -129 170 500 27 -132 170 500 27 -138 170 500 27 -138 170 500 31 -131 170 500 28 -128 170 500 26 -120 170 500 29 -127 170 500 29 -154 170 500 25 -150 170 500 35 -96 175 500 40 -127 175 500 28 -107 175 500 35 -142 175 500 40 -118 175 500 32 -103 175 500 36 -135 175 500 48 -133 175 500 29 -103 175 500 25 -114 175 500 30 -96 175 500 20 -112 175 500 33 -94 175 500 30 -132 175 500 29 -118 175 500 43 -132 175 500 28 -123 175 500 29 -119 175 500 167 -105 175 500 46 -92 175 500 27 -116 180 500 31 -117 180 500 37 -120 180 500 43 -141 180 500 40 -129 180 500 43 -148 180 500 38 -120 180 500 37 -123 180 500 38 -147 180 500 34 -119 180 500 43 -124 180 500 40 -123 180 500 41 -92 180 500 177 -124 180 500 39 -137 180 500 35 -132 180 500 42 -100 180 500 39 -138 180 500 40 -130 180 500 30 -116 180 500 42 -116 185 500 31 -140 185 500 35 -150 185 500 34 -140 185 500 37 -127 185 500 30 -138 185 500 36 -128 185 500 33 -149 185 500 33 -137 185 500 172 -139 185 500 44 -149 185 500 38 -127 185 500 33 -126 185 500 33 -147 185 500 38 -153 185 500 36 -134 185 500 37 -140 185 500 30 -126 185 500 35 -143 185 500 39 -148 185 500 34 -140 190 500 34 -132 190 500 29 -148 190 500 34 -156 190 500 39 -114 190 500 160 -164 190 500 40 -138 190 500 33 -143 190 500 35 -135 190 500 29 -145 190 500 28 -129 190 500 28 -148 190 500 32 -146 190 500 34 -162 190 500 35 -167 190 500 32 -147 190 500 26 -147 190 500 34 -141 190 500 28 -150 190 500 32 -157 190 500 32 -118 195 500 30 -130 195 500 33 -131 195 500 161 -137 195 500 36 -129 195 500 45 -133 195 500 31 -118 195 500 31 -125 195 500 36 -107 195 500 32 -116 195 500 28 -115 195 500 26 -149 195 500 38 -115 195 500 28 -132 195 500 33 -115 195 500 30 -109 195 500 22 -139 195 500 32 -117 195 500 33 -105 195 500 33 -107 195 500 34 -101 200 500 150 -136 200 500 34 -157 200 500 41 -141 200 500 36 -106 200 500 29 -116 200 500 37 -108 200 500 27 -117 200 500 36 -134 200 500 31 -99 200 500 31 -118 200 500 32 -114 200 500 27 -126 200 500 34 -155 200 500 41 -120 200 500 33 -124 200 500 31 -118 200 500 36 -142 200 500 191 -109 200 500 31 -117 200 500 40 -5 5 500 5 -6 5 500 6 -5 5 500 5 -2 5 500 5 -6 5 500 5 -5 5 500 5 -6 5 500 6 -2 5 500 5 -4 5 500 5 -4 5 500 5 -5 5 500 5 -2 5 500 5 -3 5 500 5 -6 5 500 6 -3 5 500 5 -3 5 500 5 -2 5 500 5 -4 5 500 5 -5 5 500 6 -8 5 500 6 -3 10 500 6 -2 10 500 6 -11 10 500 6 -4 10 500 6 -6 10 500 6 -13 10 500 7 -4 10 500 6 -8 10 500 6 -12 10 500 7 -7 10 500 6 -11 10 500 7 -6 10 500 6 -10 10 500 7 -10 10 500 7 -11 10 500 7 -8 10 500 6 -2 10 500 6 -10 10 500 6 -3 10 500 6 -5 10 500 6 -17 15 500 12 -11 15 500 9 -25 15 500 14 -13 15 500 10 -20 15 500 13 -16 15 500 11 -18 15 500 12 -26 15 500 12 -19 15 500 11 -20 15 500 67 -16 15 500 13 -23 15 500 14 -20 15 500 10 -15 15 500 11 -17 15 500 9 -19 15 500 11 -19 15 500 12 -9 15 500 8 -21 15 500 12 -18 15 500 10 -8 20 500 8 -10 20 500 9 -16 20 500 9 -14 20 500 9 -12 20 500 8 -5 20 500 8 -10 20 500 9 -9 20 500 8 -14 20 500 9 -12 20 500 8 -11 20 500 10 -11 20 500 8 -10 20 500 7 -10 20 500 13 -13 20 500 8 -10 20 500 8 -8 20 500 8 -13 20 500 12 -10 20 500 7 -14 20 500 8 -19 25 500 10 -25 25 500 12 -21 25 500 12 -19 25 500 10 -20 25 500 10 -15 25 500 10 -14 25 500 9 -25 25 500 12 -24 25 500 12 -23 25 500 10 -18 25 500 9 -23 25 500 11 -26 25 500 11 -22 25 500 10 -27 25 500 11 -26 25 500 98 -26 25 500 11 -33 25 500 12 -27 25 500 11 -27 25 500 12 -26 30 500 13 -24 30 500 14 -19 30 500 12 -20 30 500 12 -28 30 500 13 -21 30 500 11 -28 30 500 13 -17 30 500 12 -25 30 500 12 -24 30 500 13 -25 30 500 12 -25 30 500 12 -16 30 500 11 -23 30 500 13 -21 30 500 11 -26 30 500 12 -22 30 500 13 -16 30 500 11 -23 30 500 14 -12 30 500 10 -33 35 500 13 -42 35 500 11 -33 35 500 12 -38 35 500 14 -34 35 500 12 -48 35 500 15 -40 35 500 13 -54 35 500 14 -46 35 500 12 -45 35 500 14 -52 35 500 14 -35 35 500 12 -41 35 500 13 -46 35 500 13 -44 35 500 112 -55 35 500 24 -48 35 500 12 -38 35 500 11 -34 35 500 14 -32 35 500 13 -37 40 500 14 -46 40 500 14 -53 40 500 17 -58 40 500 18 -45 40 500 14 -55 40 500 17 -53 40 500 17 -41 40 500 15 -61 40 500 17 -51 40 500 13 -58 40 500 16 -43 40 500 16 -37 40 500 14 -43 40 500 14 -49 40 500 15 -43 40 500 13 -56 40 500 17 -43 40 500 13 -39 40 500 14 -48 40 500 14 -42 45 500 13 -47 45 500 14 -34 45 500 12 -36 45 500 12 -41 45 500 12 -45 45 500 13 -33 45 500 11 -50 45 500 13 -39 45 500 13 -46 45 500 113 -32 45 500 16 -30 45 500 13 -40 45 500 13 -44 45 500 12 -53 45 500 13 -54 45 500 13 -37 45 500 12 -40 45 500 12 -38 45 500 13 -45 45 500 14 -24 50 500 14 -32 50 500 15 -29 50 500 16 -36 50 500 17 -28 50 500 16 -27 50 500 13 -29 50 500 14 -35 50 500 13 -28 50 500 15 -22 50 500 13 -28 50 500 19 -28 50 500 13 -30 50 500 13 -40 50 500 19 -21 50 500 12 -22 50 500 13 -29 50 500 17 -29 50 500 12 -25 50 500 16 -31 50 500 15 -26 55 500 12 -38 55 500 14 -46 55 500 14 -30 55 500 12 -33 55 500 94 -32 55 500 19 -32 55 500 13 -37 55 500 13 -44 55 500 14 -46 55 500 14 -31 55 500 14 -30 55 500 12 -31 55 500 13 -37 55 500 15 -30 55 500 14 -36 55 500 13 -39 55 500 12 -32 55 500 12 -38 55 500 12 -34 55 500 14 -37 60 500 11 -48 60 500 13 -34 60 500 12 -39 60 500 12 -36 60 500 11 -33 60 500 11 -35 60 500 13 -47 60 500 13 -40 60 500 13 -52 60 500 12 -40 60 500 12 -41 60 500 13 -56 60 500 14 -43 60 500 13 -46 60 500 12 -43 60 500 13 -38 60 500 12 -35 60 500 12 -41 60 500 12 -47 60 500 12 -47 65 500 19 -62 65 500 92 -43 65 500 15 -40 65 500 15 -55 65 500 14 -66 65 500 17 -63 65 500 19 -63 65 500 15 -50 65 500 17 -66 65 500 17 -72 65 500 19 -60 65 500 14 -50 65 500 15 -67 65 500 16 -37 65 500 16 -71 65 500 18 -58 65 500 17 -50 65 500 16 -46 65 500 15 -45 65 500 16 -56 70 500 16 -63 70 500 17 -58 70 500 16 -61 70 500 16 -50 70 500 15 -59 70 500 21 -62 70 500 20 -71 70 500 19 -65 70 500 17 -59 70 500 18 -58 70 500 22 -64 70 500 126 -59 70 500 15 -60 70 500 23 -62 70 500 16 -67 70 500 18 -60 70 500 15 -61 70 500 20 -53 70 500 15 -58 70 500 16 -83 75 500 23 -73 75 500 27 -73 75 500 20 -51 75 500 18 -56 75 500 18 -70 75 500 23 -61 75 500 18 -56 75 500 17 -50 75 500 17 -75 75 500 22 -66 75 500 20 -56 75 500 18 -64 75 500 21 -65 75 500 20 -50 75 500 17 -55 75 500 16 -79 75 500 22 -59 75 500 19 -59 75 500 18 -78 75 500 146 -65 80 500 24 -52 80 500 17 -61 80 500 16 -56 80 500 16 -45 80 500 42 -55 80 500 17 -45 80 500 16 -66 80 500 17 -63 80 500 15 -48 80 500 16 -63 80 500 44 -56 80 500 16 -54 80 500 15 -65 80 500 16 -55 80 500 18 -50 80 500 15 -51 80 500 18 -54 80 500 16 -57 80 500 18 -43 80 500 16 -54 85 500 19 -52 85 500 19 -61 85 500 22 -50 85 500 18 -58 85 500 23 -43 85 500 16 -55 85 500 110 -46 85 500 16 -65 85 500 22 -68 85 500 25 -45 85 500 19 -53 85 500 25 -46 85 500 21 -49 85 500 18 -67 85 500 27 -56 85 500 19 -70 85 500 32 -50 85 500 23 -60 85 500 23 -50 85 500 16 -70 90 500 15 -49 90 500 16 -61 90 500 17 -74 90 500 16 -63 90 500 16 -54 90 500 14 -67 90 500 16 -55 90 500 21 -51 90 500 16 -73 90 500 16 -62 90 500 20 -62 90 500 18 -78 90 500 19 -63 90 500 16 -63 90 500 129 -64 90 500 15 -76 90 500 16 -68 90 500 16 -65 90 500 15 -63 90 500 14 -47 95 500 14 -66 95 500 20 -62 95 500 18 -62 95 500 24 -57 95 500 24 -48 95 500 15 -42 95 500 14 -67 95 500 18 -66 95 500 19 -63 95 500 17 -52 95 500 15 -68 95 500 19 -60 95 500 18 -48 95 500 19 -54 95 500 20 -57 95 500 21 -65 95 500 22 -56 95 500 22 -78 95 500 20 -73 95 500 21 -62 100 500 25 -81 100 500 22 -65 100 500 112 -77 100 500 21 -68 100 500 31 -65 100 500 23 -79 100 500 23 -56 100 500 23 -65 100 500 23 -77 100 500 20 -62 100 500 23 -62 100 500 23 -74 100 500 27 -77 100 500 24 -63 100 500 21 -62 100 500 21 -66 100 500 24 -61 100 500 20 -55 100 500 18 -64 100 500 25 -68 105 500 21 -71 105 500 22 -65 105 500 18 -61 105 500 17 -77 105 500 20 -66 105 500 18 -55 105 500 131 -42 105 500 15 -71 105 500 18 -67 105 500 18 -59 105 500 16 -55 105 500 16 -64 105 500 23 -70 105 500 19 -53 105 500 14 -73 105 500 22 -58 105 500 18 -75 105 500 21 -66 105 500 18 -62 105 500 18 -69 110 500 20 -80 110 500 24 -80 110 500 21 -81 110 500 21 -97 110 500 31 -78 110 500 27 -66 110 500 17 -72 110 500 20 -75 110 500 27 -65 110 500 20 -65 110 500 19 -66 110 500 23 -72 110 500 132 -78 110 500 31 -72 110 500 25 -90 110 500 23 -74 110 500 22 -76 110 500 29 -80 110 500 22 -76 110 500 27 -98 115 500 26 -84 115 500 24 -87 115 500 25 -83 115 500 26 -79 115 500 19 -78 115 500 26 -85 115 500 19 -80 115 500 22 -76 115 500 23 -94 115 500 31 -81 115 500 25 -87 115 500 22 -67 115 500 17 -76 115 500 30 -78 115 500 19 -78 115 500 148 -72 115 500 17 -65 115 500 20 -95 115 500 24 -82 115 500 28 -62 120 500 16 -65 120 500 23 -78 120 500 21 -70 120 500 19 -80 120 500 23 -69 120 500 22 -71 120 500 18 -72 120 500 21 -66 120 500 21 -77 120 500 19 -77 120 500 22 -68 120 500 21 -80 120 500 23 -75 120 500 23 -73 120 500 22 -65 120 500 19 -66 120 500 22 -68 120 500 21 -79 120 500 22 -83 120 500 20 -94 125 500 122 -120 125 500 29 -114 125 500 25 -121 125 500 31 -112 125 500 29 -114 125 500 41 -102 125 500 30 -103 125 500 28 -114 125 500 35 -87 125 500 27 -112 125 500 29 -104 125 500 27 -117 125 500 29 -96 125 500 23 -91 125 500 32 -107 125 500 23 -110 125 500 24 -100 125 500 23 -106 125 500 32 -101 125 500 172 -84 130 500 37 -108 130 500 35 -100 130 500 30 -81 130 500 27 -114 130 500 32 -75 130 500 27 -98 130 500 29 -113 130 500 45 -95 130 500 29 -88 130 500 29 -103 130 500 29 -91 130 500 24 -114 130 500 38 -122 130 500 37 -94 130 500 25 -83 130 500 29 -86 130 500 28 -102 130 500 188 -87 130 500 35 -85 130 500 27 -112 135 500 20 -128 135 500 22 -131 135 500 26 -114 135 500 26 -119 135 500 24 -119 135 500 24 -98 135 500 22 -104 135 500 22 -113 135 500 26 -102 135 500 24 -96 135 500 20 -117 135 500 25 -113 135 500 23 -122 135 500 27 -120 135 500 32 -120 135 500 22 -113 135 500 24 -122 135 500 25 -100 135 500 23 -121 135 500 170 -98 140 500 27 -93 140 500 23 -93 140 500 27 -85 140 500 23 -114 140 500 28 -102 140 500 26 -109 140 500 27 -119 140 500 31 -98 140 500 23 -109 140 500 25 -91 140 500 26 -109 140 500 27 -97 140 500 24 -87 140 500 27 -80 140 500 23 -100 140 500 23 -119 140 500 30 -86 140 500 26 -91 140 500 27 -98 140 500 25 -82 145 500 132 -96 145 500 26 -74 145 500 25 -99 145 500 31 -82 145 500 29 -105 145 500 29 -88 145 500 20 -72 145 500 20 -97 145 500 28 -83 145 500 24 -88 145 500 27 -109 145 500 35 -92 145 500 29 -76 145 500 24 -108 145 500 31 -79 145 500 24 -91 145 500 29 -86 145 500 30 -74 145 500 23 -87 145 500 25 -145 150 500 28 -157 150 500 136 -122 150 500 31 -139 150 500 31 -133 150 500 26 -120 150 500 27 -134 150 500 24 -147 150 500 28 -144 150 500 29 -124 150 500 26 -121 150 500 25 -135 150 500 25 -142 150 500 24 -115 150 500 21 -113 150 500 27 -142 150 500 25 -90 150 500 23 -106 150 500 23 -131 150 500 25 -146 150 500 27 -93 155 500 26 -91 155 500 29 -86 155 500 144 -74 155 500 29 -98 155 500 31 -108 155 500 33 -93 155 500 31 -97 155 500 34 -117 155 500 32 -85 155 500 24 -100 155 500 29 -99 155 500 32 -126 155 500 32 -110 155 500 29 -75 155 500 26 -92 155 500 26 -80 155 500 33 -95 155 500 31 -115 155 500 27 -117 155 500 40 -105 160 500 142 -121 160 500 36 -102 160 500 25 -102 160 500 26 -93 160 500 28 -111 160 500 40 -112 160 500 34 -90 160 500 24 -108 160 500 26 -95 160 500 30 -106 160 500 25 -118 160 500 27 -109 160 500 25 -110 160 500 29 -106 160 500 28 -121 160 500 25 -121 160 500 29 -103 160 500 25 -95 160 500 28 -109 160 500 25 -116 165 500 139 -128 165 500 36 -89 165 500 33 -111 165 500 39 -98 165 500 33 -110 165 500 37 -92 165 500 27 -122 165 500 41 -124 165 500 35 -99 165 500 35 -96 165 500 28 -110 165 500 35 -127 165 500 37 -112 165 500 35 -107 165 500 31 -97 165 500 32 -121 165 500 29 -115 165 500 182 -112 165 500 28 -122 165 500 37 -107 170 500 26 -99 170 500 24 -113 170 500 30 -98 170 500 36 -107 170 500 29 -109 170 500 32 -101 170 500 27 -124 170 500 29 -110 170 500 32 -132 170 500 39 -95 170 500 36 -107 170 500 33 -98 170 500 29 -119 170 500 30 -105 170 500 178 -94 170 500 26 -110 170 500 31 -105 170 500 28 -89 170 500 28 -106 170 500 29 -122 175 500 30 -100 175 500 24 -123 175 500 28 -111 175 500 27 -129 175 500 26 -104 175 500 25 -98 175 500 24 -113 175 500 26 -90 175 500 20 -108 175 500 28 -119 175 500 34 -104 175 500 24 -104 175 500 26 -95 175 500 22 -103 175 500 26 -99 175 500 167 -129 175 500 34 -103 175 500 28 -106 175 500 27 -121 175 500 30 -105 180 500 28 -133 180 500 30 -118 180 500 25 -112 180 500 26 -101 180 500 29 -115 180 500 31 -90 180 500 23 -117 180 500 28 -93 180 500 23 -129 180 500 32 -119 180 500 27 -98 180 500 23 -126 180 500 33 -90 180 500 23 -107 180 500 26 -94 180 500 157 -119 180 500 28 -135 180 500 33 -115 180 500 28 -98 180 500 24 -140 185 500 34 -124 185 500 32 -144 185 500 36 -134 185 500 38 -123 185 500 34 -144 185 500 40 -153 185 500 34 -132 185 500 35 -152 185 500 34 -144 185 500 40 -141 185 500 33 -144 185 500 31 -164 185 500 174 -139 185 500 36 -148 185 500 39 -141 185 500 30 -137 185 500 40 -143 185 500 33 -147 185 500 33 -138 185 500 39 -92 190 500 19 -113 190 500 22 -118 190 500 23 -122 190 500 25 -96 190 500 22 -102 190 500 22 -103 190 500 21 -125 190 500 25 -94 190 500 23 -148 190 500 28 -107 190 500 25 -128 190 500 25 -120 190 500 33 -119 190 500 26 -130 190 500 28 -106 190 500 24 -111 190 500 23 -112 190 500 25 -125 190 500 23 -102 190 500 21 -140 195 500 28 -112 195 500 29 -149 195 500 30 -142 195 500 28 -127 195 500 27 -123 195 500 24 -140 195 500 31 -136 195 500 29 -117 195 500 23 -113 195 500 27 -118 195 500 25 -124 195 500 26 -117 195 500 158 -123 195 500 31 -127 195 500 23 -113 195 500 28 -137 195 500 28 -118 195 500 25 -119 195 500 25 -140 195 500 25 -107 200 500 24 -141 200 500 26 -109 200 500 25 -116 200 500 23 -100 200 500 30 -114 200 500 22 -131 200 500 29 -130 200 500 33 -116 200 500 26 -128 200 500 29 -131 200 500 27 -101 200 500 26 -119 200 500 25 -107 200 500 157 -141 200 500 32 -118 200 500 26 -124 200 500 28 -147 200 500 30 -125 200 500 29 -117 200 500 25 -0 5 500 5 -0 5 500 5 -0 5 500 5 -2 5 500 6 -0 5 500 4 -0 5 500 4 -0 5 500 5 -0 5 500 5 -0 5 500 4 -0 5 500 5 -0 5 500 4 -0 5 500 5 -2 5 500 6 -0 5 500 4 -0 5 500 5 -0 5 500 5 -0 5 500 4 -0 5 500 5 -0 5 500 5 -0 5 500 5 -3 10 500 6 -2 10 500 5 -2 10 500 5 -1 10 500 6 -2 10 500 5 -2 10 500 5 -4 10 500 6 -4 10 500 6 -0 10 500 5 -1 10 500 5 -0 10 500 5 -2 10 500 6 -2 10 500 6 -0 10 500 5 -1 10 500 5 -1 10 500 5 -1 10 500 5 -0 10 500 5 -0 10 500 5 -5 10 500 5 -8 15 500 8 -7 15 500 8 -12 15 500 8 -8 15 500 7 -9 15 500 7 -11 15 500 8 -9 15 500 8 -7 15 500 8 -5 15 500 7 -8 15 500 8 -15 15 500 8 -14 15 500 8 -13 15 500 8 -16 15 500 8 -9 15 500 8 -9 15 500 7 -11 15 500 7 -11 15 500 8 -10 15 500 7 -9 15 500 8 -36 20 500 9 -33 20 500 11 -24 20 500 9 -22 20 500 9 -39 20 500 10 -20 20 500 10 -24 20 500 11 -32 20 500 11 -28 20 500 10 -24 20 500 9 -19 20 500 9 -30 20 500 12 -17 20 500 9 -31 20 500 10 -27 20 500 9 -40 20 500 11 -26 20 500 10 -42 20 500 10 -37 20 500 12 -32 20 500 12 -8 25 500 9 -16 25 500 11 -10 25 500 9 -9 25 500 9 -9 25 500 9 -6 25 500 10 -7 25 500 8 -10 25 500 9 -6 25 500 8 -12 25 500 9 -5 25 500 10 -11 25 500 9 -10 25 500 10 -7 25 500 9 -10 25 500 15 -12 25 500 9 -8 25 500 9 -7 25 500 9 -9 25 500 9 -12 25 500 10 -17 30 500 11 -13 30 500 10 -8 30 500 10 -10 30 500 11 -20 30 500 17 -11 30 500 10 -9 30 500 10 -12 30 500 11 -17 30 500 13 -10 30 500 11 -14 30 500 12 -8 30 500 10 -18 30 500 11 -11 30 500 11 -17 30 500 13 -11 30 500 10 -14 30 500 12 -10 30 500 11 -15 30 500 11 -12 30 500 10 -17 35 500 10 -19 35 500 11 -13 35 500 10 -15 35 500 11 -12 35 500 9 -14 35 500 12 -18 35 500 11 -22 35 500 11 -15 35 500 11 -12 35 500 11 -21 35 500 10 -14 35 500 9 -16 35 500 10 -13 35 500 13 -12 35 500 10 -10 35 500 9 -19 35 500 99 -25 35 500 14 -11 35 500 9 -14 35 500 9 -28 40 500 16 -25 40 500 11 -27 40 500 10 -23 40 500 11 -26 40 500 12 -24 40 500 11 -35 40 500 12 -20 40 500 12 -31 40 500 12 -25 40 500 11 -22 40 500 10 -30 40 500 12 -27 40 500 12 -32 40 500 12 -30 40 500 12 -32 40 500 13 -30 40 500 12 -28 40 500 12 -26 40 500 12 -24 40 500 12 -51 45 500 16 -49 45 500 16 -31 45 500 14 -37 45 500 11 -38 45 500 18 -40 45 500 14 -41 45 500 14 -46 45 500 13 -46 45 500 14 -52 45 500 15 -41 45 500 13 -38 45 500 13 -38 45 500 14 -57 45 500 16 -41 45 500 13 -44 45 500 15 -40 45 500 16 -51 45 500 16 -43 45 500 15 -44 45 500 16 -19 50 500 12 -24 50 500 12 -22 50 500 12 -21 50 500 12 -19 50 500 12 -16 50 500 12 -27 50 500 14 -24 50 500 12 -9 50 500 11 -20 50 500 14 -16 50 500 15 -20 50 500 12 -17 50 500 16 -14 50 500 13 -19 50 500 11 -20 50 500 12 -17 50 500 11 -20 50 500 12 -17 50 500 11 -8 50 500 10 -49 55 500 17 -42 55 500 16 -47 55 500 16 -37 55 500 17 -45 55 500 18 -39 55 500 15 -34 55 500 17 -41 55 500 17 -40 55 500 17 -39 55 500 15 -38 55 500 97 -33 55 500 16 -46 55 500 24 -45 55 500 18 -35 55 500 15 -36 55 500 17 -30 55 500 14 -41 55 500 16 -37 55 500 17 -50 55 500 20 -28 60 500 13 -28 60 500 14 -39 60 500 15 -38 60 500 16 -37 60 500 15 -34 60 500 15 -32 60 500 14 -38 60 500 14 -35 60 500 13 -30 60 500 13 -37 60 500 18 -28 60 500 13 -40 60 500 14 -38 60 500 15 -39 60 500 15 -37 60 500 14 -29 60 500 15 -29 60 500 15 -34 60 500 12 -40 60 500 17 -37 65 500 18 -51 65 500 21 -49 65 500 18 -56 65 500 95 -49 65 500 17 -45 65 500 16 -53 65 500 18 -72 65 500 18 -52 65 500 18 -62 65 500 18 -74 65 500 21 -65 65 500 17 -55 65 500 20 -58 65 500 17 -66 65 500 17 -55 65 500 18 -61 65 500 18 -64 65 500 19 -52 65 500 18 -50 65 500 15 -53 70 500 17 -59 70 500 15 -50 70 500 15 -60 70 500 18 -54 70 500 15 -49 70 500 16 -48 70 500 17 -54 70 500 17 -62 70 500 18 -67 70 500 16 -56 70 500 19 -51 70 500 16 -55 70 500 17 -66 70 500 122 -50 70 500 22 -57 70 500 14 -56 70 500 15 -77 70 500 15 -62 70 500 16 -49 70 500 15 -43 75 500 13 -28 75 500 12 -42 75 500 15 -34 75 500 12 -48 75 500 15 -36 75 500 13 -49 75 500 15 -52 75 500 15 -36 75 500 12 -44 75 500 14 -37 75 500 12 -41 75 500 15 -27 75 500 12 -40 75 500 13 -33 75 500 13 -46 75 500 13 -46 75 500 13 -35 75 500 14 -36 75 500 13 -43 75 500 14 -48 80 500 18 -63 80 500 23 -51 80 500 23 -64 80 500 24 -52 80 500 13 -60 80 500 18 -67 80 500 113 -57 80 500 15 -52 80 500 14 -59 80 500 19 -61 80 500 18 -63 80 500 20 -45 80 500 16 -48 80 500 18 -72 80 500 19 -64 80 500 24 -65 80 500 16 -62 80 500 20 -63 80 500 20 -61 80 500 23 -42 85 500 13 -42 85 500 15 -57 85 500 15 -54 85 500 17 -60 85 500 17 -55 85 500 14 -59 85 500 16 -62 85 500 17 -63 85 500 16 -48 85 500 14 -54 85 500 16 -62 85 500 19 -62 85 500 16 -59 85 500 15 -60 85 500 19 -60 85 500 139 -47 85 500 15 -48 85 500 13 -59 85 500 16 -62 85 500 15 -69 90 500 19 -59 90 500 17 -63 90 500 16 -67 90 500 19 -55 90 500 17 -64 90 500 18 -77 90 500 17 -46 90 500 13 -71 90 500 20 -70 90 500 17 -53 90 500 15 -57 90 500 15 -61 90 500 22 -54 90 500 14 -64 90 500 17 -63 90 500 17 -64 90 500 19 -66 90 500 18 -57 90 500 17 -64 90 500 18 -82 95 500 22 -91 95 500 21 -74 95 500 21 -71 95 500 18 -86 95 500 22 -79 95 500 26 -66 95 500 19 -72 95 500 21 -71 95 500 19 -73 95 500 22 -82 95 500 21 -94 95 500 23 -83 95 500 22 -92 95 500 22 -72 95 500 18 -72 95 500 18 -87 95 500 20 -88 95 500 20 -79 95 500 24 -83 95 500 20 -39 100 500 20 -60 100 500 24 -50 100 500 16 -52 100 500 21 -53 100 500 22 -56 100 500 20 -53 100 500 19 -61 100 500 22 -65 100 500 21 -57 100 500 19 -51 100 500 132 -53 100 500 20 -64 100 500 24 -45 100 500 20 -59 100 500 19 -53 100 500 21 -70 100 500 23 -58 100 500 22 -46 100 500 20 -47 100 500 19 -100 105 500 28 -96 105 500 26 -116 105 500 26 -75 105 500 21 -91 105 500 22 -93 105 500 32 -126 105 500 36 -99 105 500 24 -121 105 500 31 -104 105 500 26 -97 105 500 24 -116 105 500 25 -97 105 500 153 -97 105 500 28 -92 105 500 25 -87 105 500 33 -110 105 500 29 -111 105 500 26 -97 105 500 28 -119 105 500 28 -81 110 500 24 -87 110 500 21 -82 110 500 22 -69 110 500 22 -74 110 500 22 -72 110 500 24 -88 110 500 21 -92 110 500 23 -100 110 500 24 -63 110 500 23 -74 110 500 24 -77 110 500 23 -85 110 500 22 -91 110 500 26 -81 110 500 18 -82 110 500 155 -76 110 500 23 -72 110 500 22 -56 110 500 18 -66 110 500 18 -96 115 500 22 -97 115 500 23 -93 115 500 23 -117 115 500 30 -91 115 500 22 -99 115 500 25 -100 115 500 22 -86 115 500 24 -113 115 500 26 -95 115 500 23 -103 115 500 25 -103 115 500 26 -73 115 500 18 -93 115 500 22 -116 115 500 26 -105 115 500 24 -118 115 500 26 -111 115 500 23 -74 115 500 156 -83 115 500 21 -101 120 500 23 -123 120 500 26 -119 120 500 28 -121 120 500 25 -100 120 500 25 -127 120 500 33 -118 120 500 24 -88 120 500 23 -99 120 500 21 -120 120 500 34 -118 120 500 23 -130 120 500 26 -96 120 500 22 -101 120 500 28 -128 120 500 38 -100 120 500 25 -118 120 500 29 -89 120 500 22 -98 120 500 24 -108 120 500 157 -92 125 500 25 -79 125 500 25 -85 125 500 23 -94 125 500 23 -81 125 500 24 -103 125 500 25 -95 125 500 25 -86 125 500 25 -87 125 500 25 -99 125 500 29 -101 125 500 30 -82 125 500 26 -84 125 500 25 -88 125 500 26 -90 125 500 27 -66 125 500 22 -93 125 500 24 -101 125 500 25 -84 125 500 20 -75 125 500 23 -73 130 500 22 -100 130 500 137 -99 130 500 24 -100 130 500 24 -100 130 500 22 -87 130 500 23 -80 130 500 21 -94 130 500 22 -78 130 500 22 -100 130 500 26 -92 130 500 22 -113 130 500 26 -93 130 500 26 -100 130 500 19 -95 130 500 22 -102 130 500 21 -112 130 500 31 -107 130 500 28 -103 130 500 22 -81 130 500 20 -93 135 500 24 -108 135 500 28 -124 135 500 27 -98 135 500 133 -94 135 500 23 -101 135 500 21 -105 135 500 20 -90 135 500 24 -120 135 500 28 -78 135 500 23 -95 135 500 22 -99 135 500 23 -131 135 500 28 -91 135 500 23 -108 135 500 23 -90 135 500 24 -102 135 500 22 -125 135 500 27 -98 135 500 26 -128 135 500 28 -94 140 500 22 -142 140 500 25 -83 140 500 22 -118 140 500 26 -123 140 500 24 -89 140 500 19 -78 140 500 133 -97 140 500 22 -117 140 500 22 -116 140 500 24 -101 140 500 24 -93 140 500 21 -88 140 500 20 -87 140 500 24 -101 140 500 21 -128 140 500 23 -78 140 500 21 -107 140 500 21 -125 140 500 25 -104 140 500 21 -109 145 500 24 -103 145 500 23 -111 145 500 25 -103 145 500 25 -104 145 500 24 -88 145 500 20 -99 145 500 23 -90 145 500 24 -103 145 500 26 -110 145 500 141 -91 145 500 27 -83 145 500 23 -100 145 500 26 -97 145 500 28 -87 145 500 23 -105 145 500 19 -97 145 500 23 -105 145 500 24 -95 145 500 21 -101 145 500 24 -140 150 500 30 -127 150 500 27 -137 150 500 26 -123 150 500 25 -121 150 500 23 -119 150 500 28 -136 150 500 29 -118 150 500 25 -115 150 500 29 -124 150 500 28 -130 150 500 150 -129 150 500 32 -100 150 500 20 -131 150 500 26 -138 150 500 28 -147 150 500 29 -126 150 500 25 -165 150 500 28 -138 150 500 26 -132 150 500 29 -129 155 500 31 -156 155 500 35 -161 155 500 36 -152 155 500 33 -128 155 500 31 -158 155 500 37 -144 155 500 30 -136 155 500 26 -137 155 500 33 -142 155 500 156 -159 155 500 31 -145 155 500 34 -128 155 500 37 -128 155 500 31 -129 155 500 29 -136 155 500 37 -138 155 500 28 -131 155 500 30 -145 155 500 30 -138 155 500 30 -129 160 500 28 -112 160 500 28 -108 160 500 38 -126 160 500 33 -123 160 500 26 -126 160 500 27 -125 160 500 39 -98 160 500 160 -105 160 500 32 -111 160 500 28 -96 160 500 24 -118 160 500 27 -113 160 500 25 -100 160 500 25 -116 160 500 27 -109 160 500 29 -117 160 500 33 -110 160 500 27 -115 160 500 27 -120 160 500 28 -117 165 500 26 -111 165 500 22 -114 165 500 25 -129 165 500 25 -148 165 500 29 -109 165 500 24 -114 165 500 24 -133 165 500 148 -115 165 500 21 -109 165 500 24 -97 165 500 21 -128 165 500 23 -100 165 500 22 -125 165 500 27 -110 165 500 25 -127 165 500 26 -122 165 500 24 -125 165 500 22 -126 165 500 22 -119 165 500 24 -107 170 500 30 -95 170 500 34 -97 170 500 29 -100 170 500 27 -104 170 500 30 -109 170 500 34 -88 170 500 26 -101 170 500 27 -115 170 500 169 -99 170 500 29 -104 170 500 41 -106 170 500 31 -113 170 500 34 -106 170 500 33 -88 170 500 34 -118 170 500 37 -111 170 500 32 -102 170 500 29 -102 170 500 41 -107 170 500 26 -106 175 500 23 -94 175 500 28 -131 175 500 32 -135 175 500 32 -120 175 500 29 -107 175 500 28 -119 175 500 27 -140 175 500 32 -126 175 500 27 -130 175 500 27 -143 175 500 33 -95 175 500 26 -133 175 500 29 -121 175 500 32 -117 175 500 27 -136 175 500 33 -127 175 500 28 -122 175 500 28 -140 175 500 31 -152 175 500 30 -112 180 500 30 -145 180 500 30 -105 180 500 34 -100 180 500 30 -119 180 500 145 -106 180 500 27 -108 180 500 30 -119 180 500 26 -110 180 500 27 -132 180 500 31 -106 180 500 32 -112 180 500 30 -113 180 500 31 -110 180 500 26 -115 180 500 30 -123 180 500 34 -101 180 500 26 -106 180 500 26 -121 180 500 31 -109 180 500 25 -187 185 500 33 -150 185 500 32 -172 185 500 40 -182 185 500 155 -147 185 500 34 -182 185 500 35 -194 185 500 40 -169 185 500 34 -176 185 500 35 -166 185 500 36 -160 185 500 37 -150 185 500 32 -153 185 500 38 -151 185 500 36 -166 185 500 39 -186 185 500 34 -148 185 500 29 -145 185 500 30 -144 185 500 35 -157 185 500 172 -124 190 500 31 -131 190 500 27 -133 190 500 29 -127 190 500 29 -115 190 500 29 -100 190 500 26 -123 190 500 29 -136 190 500 27 -133 190 500 26 -133 190 500 30 -135 190 500 35 -103 190 500 26 -145 190 500 27 -131 190 500 33 -131 190 500 32 -142 190 500 31 -158 190 500 29 -105 190 500 27 -145 190 500 176 -140 190 500 34 -141 195 500 38 -147 195 500 39 -138 195 500 34 -135 195 500 35 -133 195 500 34 -118 195 500 33 -112 195 500 27 -113 195 500 31 -126 195 500 38 -137 195 500 36 -118 195 500 33 -149 195 500 42 -139 195 500 41 -124 195 500 39 -143 195 500 179 -152 195 500 40 -128 195 500 35 -147 195 500 41 -152 195 500 36 -139 195 500 31 -100 200 500 26 -119 200 500 29 -108 200 500 27 -98 200 500 29 -88 200 500 24 -109 200 500 29 -122 200 500 29 -107 200 500 26 -112 200 500 29 -91 200 500 29 -116 200 500 36 -128 200 500 33 -117 200 500 173 -110 200 500 29 -133 200 500 31 -112 200 500 32 -116 200 500 31 -94 200 500 28 -95 200 500 35 -103 200 500 25 -1 5 500 4 -0 5 500 5 -1 5 500 5 -3 5 500 5 -0 5 500 6 -0 5 500 4 -2 5 500 5 -1 5 500 5 -3 5 500 5 -3 5 500 5 -1 5 500 5 -0 5 500 5 -1 5 500 5 -2 5 500 5 -2 5 500 4 -0 5 500 5 -1 5 500 5 -1 5 500 5 -0 5 500 5 -0 5 500 5 -11 10 500 9 -9 10 500 6 -4 10 500 6 -3 10 500 6 -6 10 500 7 -5 10 500 5 -3 10 500 6 -5 10 500 5 -5 10 500 6 -5 10 500 7 -1 10 500 5 -6 10 500 6 -3 10 500 6 -5 10 500 6 -6 10 500 6 -3 10 500 5 -7 10 500 7 -7 10 500 7 -4 10 500 5 -8 10 500 7 -7 15 500 8 -7 15 500 7 -6 15 500 8 -10 15 500 8 -2 15 500 7 -5 15 500 9 -7 15 500 7 -9 15 500 8 -8 15 500 8 -9 15 500 9 -13 15 500 9 -9 15 500 8 -7 15 500 7 -5 15 500 7 -9 15 500 9 -6 15 500 10 -10 15 500 8 -8 15 500 9 -10 15 500 9 -8 15 500 8 -15 20 500 12 -8 20 500 11 -8 20 500 10 -10 20 500 10 -10 20 500 11 -17 20 500 11 -7 20 500 10 -10 20 500 10 -6 20 500 9 -9 20 500 11 -7 20 500 8 -11 20 500 13 -11 20 500 10 -8 20 500 9 -8 20 500 9 -7 20 500 8 -14 20 500 14 -10 20 500 10 -13 20 500 13 -10 20 500 12 -35 25 500 11 -31 25 500 11 -40 25 500 15 -27 25 500 12 -41 25 500 11 -32 25 500 68 -40 25 500 12 -31 25 500 12 -32 25 500 11 -36 25 500 11 -36 25 500 11 -44 25 500 13 -33 25 500 12 -35 25 500 12 -28 25 500 11 -35 25 500 11 -39 25 500 11 -32 25 500 11 -34 25 500 11 -38 25 500 12 -12 30 500 9 -13 30 500 9 -8 30 500 9 -11 30 500 9 -6 30 500 8 -13 30 500 10 -12 30 500 9 -12 30 500 9 -8 30 500 9 -8 30 500 9 -13 30 500 10 -18 30 500 10 -15 30 500 10 -10 30 500 8 -9 30 500 9 -12 30 500 10 -10 30 500 9 -12 30 500 10 -17 30 500 9 -18 30 500 11 -30 35 500 12 -35 35 500 12 -48 35 500 15 -32 35 500 12 -49 35 500 17 -40 35 500 14 -46 35 500 15 -44 35 500 84 -41 35 500 23 -43 35 500 16 -25 35 500 15 -31 35 500 13 -37 35 500 13 -37 35 500 14 -30 35 500 13 -38 35 500 14 -55 35 500 15 -43 35 500 16 -25 35 500 12 -39 35 500 15 -31 40 500 10 -33 40 500 11 -24 40 500 9 -30 40 500 10 -34 40 500 9 -38 40 500 11 -33 40 500 10 -27 40 500 10 -41 40 500 11 -26 40 500 11 -23 40 500 8 -38 40 500 11 -37 40 500 11 -31 40 500 10 -38 40 500 11 -22 40 500 9 -42 40 500 11 -32 40 500 10 -35 40 500 10 -43 40 500 11 -38 45 500 11 -26 45 500 10 -43 45 500 11 -32 45 500 10 -27 45 500 10 -39 45 500 10 -31 45 500 11 -28 45 500 10 -36 45 500 11 -23 45 500 11 -30 45 500 10 -36 45 500 12 -35 45 500 10 -32 45 500 11 -28 45 500 11 -42 45 500 12 -29 45 500 10 -29 45 500 11 -29 45 500 10 -37 45 500 11 -51 50 500 22 -46 50 500 21 -45 50 500 21 -43 50 500 17 -38 50 500 17 -48 50 500 22 -52 50 500 21 -43 50 500 18 -42 50 500 21 -43 50 500 18 -29 50 500 15 -44 50 500 19 -43 50 500 18 -49 50 500 21 -49 50 500 23 -41 50 500 16 -44 50 500 19 -48 50 500 20 -43 50 500 21 -54 50 500 134 -59 55 500 15 -57 55 500 15 -51 55 500 14 -62 55 500 15 -58 55 500 15 -63 55 500 17 -60 55 500 14 -79 55 500 18 -50 55 500 16 -58 55 500 20 -77 55 500 21 -59 55 500 15 -67 55 500 18 -73 55 500 21 -69 55 500 16 -46 55 500 14 -66 55 500 19 -42 55 500 14 -68 55 500 17 -58 55 500 14 -38 60 500 17 -37 60 500 13 -34 60 500 13 -44 60 500 16 -41 60 500 17 -42 60 500 16 -42 60 500 15 -38 60 500 19 -31 60 500 15 -38 60 500 15 -33 60 500 15 -33 60 500 111 -39 60 500 15 -45 60 500 17 -37 60 500 15 -33 60 500 13 -41 60 500 15 -31 60 500 15 -32 60 500 13 -39 60 500 14 -47 65 500 20 -33 65 500 16 -49 65 500 18 -57 65 500 27 -39 65 500 17 -46 65 500 21 -49 65 500 18 -57 65 500 20 -49 65 500 22 -50 65 500 20 -38 65 500 16 -51 65 500 15 -42 65 500 16 -55 65 500 24 -47 65 500 22 -59 65 500 18 -54 65 500 20 -45 65 500 18 -58 65 500 20 -42 65 500 17 -64 70 500 21 -35 70 500 15 -54 70 500 20 -47 70 500 20 -65 70 500 20 -66 70 500 21 -56 70 500 13 -52 70 500 16 -47 70 500 20 -56 70 500 18 -56 70 500 20 -55 70 500 17 -71 70 500 24 -52 70 500 21 -73 70 500 21 -56 70 500 19 -57 70 500 20 -53 70 500 15 -59 70 500 17 -55 70 500 21 -46 75 500 16 -42 75 500 15 -48 75 500 16 -43 75 500 16 -51 75 500 20 -66 75 500 24 -56 75 500 21 -50 75 500 135 -47 75 500 13 -63 75 500 17 -54 75 500 15 -52 75 500 17 -52 75 500 14 -47 75 500 15 -35 75 500 14 -52 75 500 16 -54 75 500 18 -55 75 500 16 -43 75 500 15 -58 75 500 17 -48 80 500 18 -52 80 500 18 -55 80 500 19 -46 80 500 16 -57 80 500 19 -56 80 500 18 -56 80 500 18 -43 80 500 18 -46 80 500 16 -50 80 500 18 -42 80 500 16 -50 80 500 19 -54 80 500 18 -59 80 500 16 -45 80 500 14 -56 80 500 20 -57 80 500 20 -63 80 500 139 -49 80 500 18 -53 80 500 19 -46 85 500 14 -33 85 500 14 -41 85 500 13 -43 85 500 13 -43 85 500 13 -52 85 500 15 -51 85 500 16 -50 85 500 14 -32 85 500 12 -57 85 500 15 -28 85 500 11 -29 85 500 12 -48 85 500 15 -32 85 500 13 -67 85 500 16 -28 85 500 13 -46 85 500 15 -38 85 500 13 -56 85 500 14 -39 85 500 13 -48 90 500 21 -73 90 500 25 -73 90 500 27 -83 90 500 30 -73 90 500 26 -95 90 500 26 -72 90 500 26 -70 90 500 28 -60 90 500 22 -87 90 500 29 -75 90 500 30 -67 90 500 25 -72 90 500 29 -65 90 500 26 -73 90 500 26 -56 90 500 23 -81 90 500 25 -69 90 500 28 -76 90 500 26 -72 90 500 27 -58 95 500 17 -67 95 500 20 -98 95 500 22 -86 95 500 24 -62 95 500 20 -59 95 500 19 -75 95 500 21 -69 95 500 21 -81 95 500 21 -59 95 500 134 -76 95 500 19 -70 95 500 17 -55 95 500 20 -74 95 500 18 -60 95 500 20 -78 95 500 21 -77 95 500 21 -67 95 500 17 -68 95 500 21 -81 95 500 19 -72 100 500 19 -60 100 500 17 -64 100 500 20 -66 100 500 19 -65 100 500 19 -57 100 500 15 -68 100 500 20 -75 100 500 19 -78 100 500 20 -59 100 500 19 -76 100 500 20 -68 100 500 21 -73 100 500 21 -68 100 500 19 -71 100 500 21 -70 100 500 20 -73 100 500 138 -75 100 500 19 -81 100 500 21 -65 100 500 17 -95 105 500 20 -83 105 500 20 -92 105 500 19 -82 105 500 17 -83 105 500 19 -70 105 500 19 -75 105 500 19 -75 105 500 18 -81 105 500 18 -101 105 500 22 -89 105 500 21 -88 105 500 19 -85 105 500 19 -76 105 500 21 -85 105 500 22 -82 105 500 19 -74 105 500 18 -87 105 500 26 -92 105 500 19 -85 105 500 19 -81 110 500 19 -79 110 500 17 -84 110 500 21 -88 110 500 116 -92 110 500 18 -107 110 500 20 -103 110 500 25 -81 110 500 25 -79 110 500 18 -97 110 500 23 -85 110 500 24 -88 110 500 23 -103 110 500 20 -106 110 500 19 -80 110 500 18 -80 110 500 21 -77 110 500 19 -91 110 500 19 -102 110 500 19 -89 110 500 18 -72 115 500 24 -82 115 500 23 -68 115 500 20 -71 115 500 21 -60 115 500 20 -72 115 500 20 -69 115 500 22 -60 115 500 24 -57 115 500 133 -78 115 500 22 -82 115 500 23 -73 115 500 25 -76 115 500 27 -73 115 500 25 -76 115 500 26 -68 115 500 19 -64 115 500 21 -65 115 500 20 -70 115 500 22 -80 115 500 22 -114 120 500 25 -118 120 500 31 -90 120 500 22 -108 120 500 28 -115 120 500 25 -91 120 500 32 -111 120 500 27 -101 120 500 25 -105 120 500 27 -102 120 500 24 -94 120 500 21 -82 120 500 147 -110 120 500 24 -119 120 500 26 -117 120 500 28 -99 120 500 28 -99 120 500 22 -75 120 500 23 -89 120 500 22 -103 120 500 25 -75 125 500 21 -85 125 500 23 -82 125 500 22 -93 125 500 27 -76 125 500 24 -89 125 500 27 -96 125 500 32 -87 125 500 24 -91 125 500 27 -92 125 500 25 -74 125 500 22 -103 125 500 26 -95 125 500 25 -86 125 500 149 -100 125 500 23 -105 125 500 29 -84 125 500 20 -92 125 500 26 -96 125 500 29 -96 125 500 22 -73 130 500 20 -93 130 500 23 -95 130 500 24 -93 130 500 28 -92 130 500 36 -86 130 500 27 -118 130 500 33 -103 130 500 34 -99 130 500 28 -115 130 500 29 -110 130 500 28 -95 130 500 29 -119 130 500 24 -91 130 500 25 -102 130 500 29 -113 130 500 24 -94 130 500 28 -103 130 500 23 -84 130 500 28 -85 130 500 22 -96 135 500 30 -85 135 500 25 -123 135 500 28 -118 135 500 31 -104 135 500 34 -116 135 500 39 -108 135 500 29 -122 135 500 36 -117 135 500 31 -112 135 500 34 -102 135 500 31 -105 135 500 32 -102 135 500 27 -105 135 500 165 -141 135 500 45 -103 135 500 32 -97 135 500 26 -100 135 500 27 -116 135 500 31 -105 135 500 28 -117 140 500 26 -107 140 500 25 -105 140 500 25 -103 140 500 24 -127 140 500 26 -108 140 500 26 -107 140 500 26 -99 140 500 25 -115 140 500 27 -112 140 500 30 -98 140 500 24 -111 140 500 28 -113 140 500 160 -106 140 500 26 -115 140 500 24 -104 140 500 26 -81 140 500 21 -107 140 500 24 -119 140 500 23 -93 140 500 21 -109 145 500 29 -124 145 500 32 -128 145 500 32 -129 145 500 32 -115 145 500 25 -100 145 500 28 -107 145 500 26 -116 145 500 24 -153 145 500 34 -119 145 500 28 -142 145 500 35 -125 145 500 31 -129 145 500 169 -114 145 500 26 -118 145 500 30 -118 145 500 28 -128 145 500 29 -124 145 500 31 -131 145 500 30 -126 145 500 25 -89 150 500 23 -97 150 500 26 -89 150 500 22 -106 150 500 28 -90 150 500 24 -78 150 500 31 -78 150 500 24 -83 150 500 20 -86 150 500 24 -104 150 500 29 -77 150 500 23 -69 150 500 20 -101 150 500 25 -93 150 500 158 -91 150 500 28 -53 150 500 21 -88 150 500 23 -88 150 500 26 -84 150 500 22 -81 150 500 24 -127 155 500 43 -101 155 500 33 -102 155 500 34 -124 155 500 36 -111 155 500 35 -107 155 500 37 -116 155 500 33 -126 155 500 40 -110 155 500 37 -97 155 500 37 -119 155 500 33 -111 155 500 163 -115 155 500 38 -104 155 500 33 -106 155 500 31 -90 155 500 29 -94 155 500 27 -76 155 500 28 -98 155 500 38 -86 155 500 30 -99 160 500 31 -116 160 500 30 -94 160 500 33 -96 160 500 30 -111 160 500 32 -105 160 500 30 -83 160 500 27 -91 160 500 33 -104 160 500 33 -95 160 500 161 -102 160 500 30 -99 160 500 29 -121 160 500 32 -96 160 500 22 -96 160 500 31 -89 160 500 30 -102 160 500 31 -102 160 500 32 -94 160 500 32 -109 160 500 23 -74 165 500 23 -88 165 500 20 -76 165 500 26 -88 165 500 24 -95 165 500 27 -101 165 500 27 -83 165 500 23 -85 165 500 25 -90 165 500 26 -86 165 500 146 -71 165 500 20 -74 165 500 22 -84 165 500 25 -74 165 500 19 -84 165 500 23 -83 165 500 23 -100 165 500 25 -83 165 500 27 -97 165 500 27 -86 165 500 25 -130 170 500 26 -119 170 500 26 -164 170 500 32 -176 170 500 32 -128 170 500 26 -159 170 500 33 -134 170 500 27 -134 170 500 30 -146 170 500 29 -127 170 500 25 -160 170 500 154 -148 170 500 27 -169 170 500 35 -155 170 500 31 -132 170 500 29 -124 170 500 29 -126 170 500 25 -118 170 500 25 -164 170 500 30 -136 170 500 29 -87 175 500 23 -105 175 500 26 -84 175 500 23 -116 175 500 32 -91 175 500 21 -89 175 500 27 -97 175 500 23 -80 175 500 20 -107 175 500 24 -81 175 500 25 -97 175 500 155 -101 175 500 28 -94 175 500 22 -97 175 500 22 -100 175 500 25 -93 175 500 21 -96 175 500 24 -100 175 500 21 -86 175 500 28 -92 175 500 24 -93 180 500 22 -97 180 500 22 -92 180 500 23 -91 180 500 30 -85 180 500 23 -96 180 500 24 -106 180 500 26 -95 180 500 22 -97 180 500 26 -91 180 500 26 -94 180 500 28 -79 180 500 23 -94 180 500 21 -98 180 500 26 -106 180 500 25 -88 180 500 26 -82 180 500 25 -109 180 500 28 -88 180 500 27 -95 180 500 24 -72 185 500 25 -89 185 500 29 -99 185 500 40 -98 185 500 30 -86 185 500 40 -99 185 500 29 -91 185 500 31 -93 185 500 27 -82 185 500 25 -110 185 500 27 -109 185 500 38 -97 185 500 31 -125 185 500 42 -101 185 500 34 -88 185 500 31 -84 185 500 24 -98 185 500 30 -99 185 500 31 -89 185 500 26 -88 185 500 33 -112 190 500 44 -114 190 500 35 -126 190 500 42 -115 190 500 41 -133 190 500 44 -89 190 500 35 -109 190 500 40 -123 190 500 161 -127 190 500 48 -96 190 500 33 -125 190 500 34 -114 190 500 43 -95 190 500 38 -109 190 500 35 -120 190 500 36 -114 190 500 37 -119 190 500 36 -101 190 500 32 -100 190 500 31 -123 190 500 32 -148 195 500 27 -170 195 500 32 -156 195 500 32 -159 195 500 157 -149 195 500 30 -160 195 500 30 -140 195 500 30 -142 195 500 31 -151 195 500 30 -159 195 500 28 -160 195 500 28 -149 195 500 33 -157 195 500 32 -147 195 500 32 -133 195 500 27 -128 195 500 27 -130 195 500 27 -141 195 500 30 -162 195 500 33 -169 195 500 29 -155 200 500 30 -178 200 500 30 -148 200 500 153 -169 200 500 35 -170 200 500 31 -149 200 500 26 -159 200 500 31 -167 200 500 36 -198 200 500 31 -110 200 500 24 -184 200 500 36 -203 200 500 36 -160 200 500 30 -132 200 500 27 -179 200 500 34 -149 200 500 27 -176 200 500 31 -142 200 500 29 -171 200 500 31 -167 200 500 31 -13 5 500 6 -11 5 500 7 -8 5 500 9 -20 5 500 6 -13 5 500 7 -5 5 500 6 -14 5 500 6 -15 5 500 6 -10 5 500 6 -13 5 500 6 -11 5 500 7 -4 5 500 5 -7 5 500 6 -3 5 500 6 -11 5 500 6 -7 5 500 6 -9 5 500 6 -7 5 500 6 -10 5 500 6 -17 5 500 7 -4 10 500 6 -5 10 500 6 -2 10 500 6 -3 10 500 7 -2 10 500 6 -2 10 500 6 -1 10 500 6 -3 10 500 5 -6 10 500 6 -3 10 500 6 -3 10 500 6 -4 10 500 6 -3 10 500 6 -5 10 500 6 -7 10 500 6 -6 10 500 6 -5 10 500 6 -4 10 500 6 -4 10 500 6 -4 10 500 6 -5 15 500 7 -7 15 500 8 -1 15 500 7 -4 15 500 8 -3 15 500 7 -3 15 500 7 -0 15 500 6 -6 15 500 8 -5 15 500 7 -5 15 500 8 -2 15 500 7 -5 15 500 8 -7 15 500 8 -4 15 500 8 -7 15 500 8 -6 15 500 8 -0 15 500 7 -3 15 500 7 -4 15 500 7 -6 15 500 7 -9 20 500 9 -12 20 500 14 -7 20 500 8 -8 20 500 8 -7 20 500 8 -7 20 500 9 -10 20 500 10 -13 20 500 9 -4 20 500 12 -11 20 500 9 -9 20 500 9 -15 20 500 13 -11 20 500 10 -11 20 500 9 -14 20 500 10 -11 20 500 13 -8 20 500 8 -15 20 500 10 -10 20 500 10 -14 20 500 10 -12 25 500 8 -15 25 500 11 -12 25 500 10 -2 25 500 8 -5 25 500 9 -9 25 500 9 -5 25 500 8 -10 25 500 9 -7 25 500 9 -10 25 500 9 -11 25 500 11 -8 25 500 9 -10 25 500 10 -3 25 500 8 -3 25 500 7 -12 25 500 9 -8 25 500 8 -11 25 500 11 -5 25 500 9 -5 25 500 8 -18 30 500 9 -18 30 500 9 -18 30 500 10 -19 30 500 9 -17 30 500 9 -21 30 500 9 -15 30 500 9 -25 30 500 9 -21 30 500 9 -22 30 500 10 -25 30 500 15 -20 30 500 9 -24 30 500 9 -29 30 500 11 -18 30 500 13 -16 30 500 9 -16 30 500 9 -7 30 500 8 -21 30 500 9 -25 30 500 10 -27 35 500 12 -29 35 500 12 -26 35 500 12 -26 35 500 13 -26 35 500 11 -25 35 500 12 -38 35 500 13 -39 35 500 12 -30 35 500 14 -20 35 500 14 -20 35 500 11 -31 35 500 15 -29 35 500 13 -30 35 500 12 -36 35 500 11 -25 35 500 12 -32 35 500 13 -32 35 500 17 -25 35 500 11 -36 35 500 12 -15 40 500 12 -16 40 500 12 -17 40 500 12 -10 40 500 12 -14 40 500 11 -10 40 500 12 -12 40 500 10 -17 40 500 13 -14 40 500 10 -19 40 500 84 -9 40 500 20 -20 40 500 12 -19 40 500 17 -5 40 500 11 -16 40 500 12 -12 40 500 14 -12 40 500 11 -18 40 500 13 -20 40 500 13 -22 40 500 17 -51 45 500 15 -58 45 500 15 -56 45 500 14 -58 45 500 15 -46 45 500 15 -59 45 500 17 -31 45 500 12 -53 45 500 15 -46 45 500 13 -41 45 500 12 -65 45 500 16 -57 45 500 13 -47 45 500 13 -39 45 500 13 -54 45 500 15 -49 45 500 13 -52 45 500 16 -52 45 500 14 -58 45 500 15 -45 45 500 14 -45 50 500 13 -50 50 500 13 -60 50 500 14 -53 50 500 12 -44 50 500 12 -43 50 500 86 -43 50 500 16 -49 50 500 12 -40 50 500 13 -46 50 500 12 -70 50 500 13 -62 50 500 13 -54 50 500 14 -48 50 500 12 -50 50 500 12 -56 50 500 13 -35 50 500 12 -56 50 500 14 -57 50 500 14 -61 50 500 12 -50 55 500 15 -67 55 500 18 -49 55 500 24 -56 55 500 24 -57 55 500 21 -46 55 500 20 -57 55 500 19 -49 55 500 20 -65 55 500 23 -60 55 500 20 -44 55 500 19 -61 55 500 23 -60 55 500 22 -50 55 500 20 -55 55 500 30 -52 55 500 19 -62 55 500 27 -70 55 500 25 -38 55 500 16 -45 55 500 18 -38 60 500 12 -56 60 500 20 -51 60 500 15 -36 60 500 13 -52 60 500 17 -54 60 500 18 -47 60 500 14 -41 60 500 14 -49 60 500 17 -47 60 500 14 -52 60 500 16 -45 60 500 15 -47 60 500 17 -41 60 500 16 -49 60 500 15 -53 60 500 16 -46 60 500 15 -61 60 500 18 -44 60 500 15 -32 60 500 14 -34 65 500 13 -47 65 500 14 -48 65 500 15 -53 65 500 15 -42 65 500 13 -39 65 500 13 -41 65 500 13 -41 65 500 12 -36 65 500 98 -44 65 500 17 -37 65 500 13 -34 65 500 13 -39 65 500 12 -45 65 500 13 -45 65 500 13 -40 65 500 12 -46 65 500 14 -41 65 500 13 -49 65 500 13 -37 65 500 14 -47 70 500 15 -42 70 500 14 -36 70 500 16 -38 70 500 16 -46 70 500 18 -40 70 500 17 -57 70 500 20 -44 70 500 16 -48 70 500 17 -45 70 500 16 -40 70 500 16 -42 70 500 17 -45 70 500 16 -31 70 500 16 -38 70 500 15 -54 70 500 18 -40 70 500 18 -47 70 500 20 -46 70 500 18 -48 70 500 18 -46 75 500 16 -39 75 500 86 -52 75 500 15 -37 75 500 15 -40 75 500 15 -41 75 500 16 -43 75 500 18 -43 75 500 15 -40 75 500 15 -37 75 500 15 -43 75 500 17 -38 75 500 17 -38 75 500 18 -48 75 500 17 -34 75 500 17 -40 75 500 16 -51 75 500 17 -41 75 500 15 -41 75 500 15 -52 75 500 19 -48 80 500 16 -57 80 500 18 -46 80 500 23 -53 80 500 15 -68 80 500 22 -59 80 500 20 -69 80 500 19 -54 80 500 22 -74 80 500 19 -55 80 500 17 -50 80 500 19 -66 80 500 25 -45 80 500 18 -54 80 500 19 -55 80 500 19 -62 80 500 16 -55 80 500 16 -62 80 500 17 -54 80 500 17 -58 80 500 18 -76 85 500 26 -67 85 500 24 -72 85 500 21 -60 85 500 24 -56 85 500 25 -88 85 500 45 -72 85 500 29 -74 85 500 26 -78 85 500 29 -68 85 500 20 -79 85 500 18 -79 85 500 20 -70 85 500 20 -66 85 500 23 -75 85 500 28 -71 85 500 25 -70 85 500 25 -71 85 500 20 -55 85 500 22 -89 85 500 27 -38 90 500 15 -41 90 500 14 -50 90 500 16 -55 90 500 19 -46 90 500 17 -43 90 500 14 -40 90 500 14 -49 90 500 17 -54 90 500 16 -58 90 500 18 -66 90 500 19 -53 90 500 16 -48 90 500 16 -39 90 500 14 -52 90 500 15 -46 90 500 18 -50 90 500 16 -43 90 500 16 -51 90 500 15 -57 90 500 18 -76 95 500 21 -63 95 500 18 -73 95 500 21 -52 95 500 102 -66 95 500 19 -69 95 500 25 -62 95 500 21 -58 95 500 19 -62 95 500 18 -73 95 500 26 -77 95 500 26 -61 95 500 21 -65 95 500 19 -72 95 500 21 -70 95 500 21 -59 95 500 24 -89 95 500 23 -68 95 500 18 -85 95 500 23 -65 95 500 22 -100 100 500 24 -91 100 500 21 -101 100 500 25 -97 100 500 25 -94 100 500 23 -100 100 500 22 -90 100 500 23 -86 100 500 127 -82 100 500 29 -89 100 500 21 -87 100 500 22 -74 100 500 21 -95 100 500 24 -85 100 500 26 -83 100 500 24 -82 100 500 21 -87 100 500 20 -82 100 500 20 -75 100 500 20 -107 100 500 23 -93 105 500 21 -116 105 500 24 -104 105 500 21 -118 105 500 27 -87 105 500 22 -102 105 500 18 -113 105 500 29 -84 105 500 22 -88 105 500 18 -96 105 500 19 -99 105 500 22 -94 105 500 21 -97 105 500 137 -88 105 500 20 -85 105 500 20 -81 105 500 21 -83 105 500 20 -83 105 500 21 -105 105 500 23 -83 105 500 20 -82 110 500 25 -104 110 500 27 -93 110 500 30 -94 110 500 22 -88 110 500 29 -96 110 500 26 -96 110 500 25 -88 110 500 25 -102 110 500 30 -103 110 500 26 -90 110 500 23 -103 110 500 23 -78 110 500 28 -105 110 500 29 -98 110 500 159 -86 110 500 23 -109 110 500 31 -98 110 500 30 -91 110 500 24 -88 110 500 19 -104 115 500 28 -96 115 500 29 -92 115 500 24 -96 115 500 32 -91 115 500 24 -81 115 500 25 -87 115 500 26 -75 115 500 23 -104 115 500 30 -94 115 500 24 -96 115 500 24 -103 115 500 29 -94 115 500 28 -109 115 500 33 -99 115 500 28 -97 115 500 166 -101 115 500 33 -96 115 500 25 -68 115 500 24 -84 115 500 25 -84 120 500 22 -119 120 500 22 -83 120 500 23 -110 120 500 27 -87 120 500 21 -72 120 500 20 -108 120 500 21 -85 120 500 22 -90 120 500 24 -88 120 500 23 -83 120 500 23 -79 120 500 23 -74 120 500 21 -82 120 500 21 -82 120 500 22 -78 120 500 20 -77 120 500 23 -85 120 500 26 -91 120 500 21 -73 120 500 150 -128 125 500 32 -133 125 500 27 -111 125 500 27 -103 125 500 25 -108 125 500 28 -141 125 500 41 -137 125 500 28 -135 125 500 28 -123 125 500 32 -125 125 500 23 -106 125 500 27 -114 125 500 28 -111 125 500 25 -122 125 500 37 -128 125 500 31 -122 125 500 27 -111 125 500 31 -113 125 500 27 -121 125 500 172 -99 125 500 28 -90 130 500 28 -74 130 500 26 -78 130 500 23 -84 130 500 27 -82 130 500 23 -77 130 500 24 -80 130 500 27 -76 130 500 22 -82 130 500 22 -72 130 500 21 -78 130 500 22 -68 130 500 22 -83 130 500 26 -71 130 500 24 -59 130 500 20 -72 130 500 23 -83 130 500 27 -84 130 500 25 -75 130 500 26 -89 130 500 28 -72 135 500 122 -87 135 500 22 -76 135 500 23 -109 135 500 23 -89 135 500 22 -77 135 500 19 -84 135 500 21 -87 135 500 22 -86 135 500 22 -95 135 500 23 -62 135 500 22 -85 135 500 27 -87 135 500 23 -78 135 500 20 -92 135 500 18 -88 135 500 21 -101 135 500 26 -96 135 500 23 -84 135 500 18 -71 135 500 28 -86 140 500 27 -104 140 500 24 -111 140 500 29 -95 140 500 24 -114 140 500 30 -97 140 500 25 -96 140 500 25 -105 140 500 26 -121 140 500 29 -112 140 500 27 -81 140 500 25 -90 140 500 25 -90 140 500 26 -96 140 500 30 -105 140 500 27 -99 140 500 24 -118 140 500 27 -93 140 500 26 -80 140 500 27 -113 140 500 28 -114 145 500 21 -152 145 500 23 -129 145 500 21 -122 145 500 19 -146 145 500 24 -127 145 500 287 -144 145 500 29 -115 145 500 26 -113 145 500 1334 -128 145 500 21 -125 145 500 21 -103 145 500 21 -136 145 500 21 -128 145 500 21 -140 145 500 22 -144 145 500 23 -121 145 500 22 -121 145 500 21 -106 145 500 20 -109 145 500 20 -72 150 500 18 -54 150 500 21 -119 150 500 26 -58 150 500 24 -89 150 500 20 -93 150 500 33 -82 150 500 23 -97 150 500 27 -75 150 500 29 -101 150 500 22 -90 150 500 26 -97 150 500 154 -84 150 500 22 -87 150 500 26 -79 150 500 23 -95 150 500 22 -93 150 500 23 -91 150 500 24 -93 150 500 24 -84 150 500 21 -87 155 500 25 -70 155 500 19 -109 155 500 24 -84 155 500 23 -87 155 500 21 -90 155 500 23 -102 155 500 28 -87 155 500 24 -80 155 500 48 -79 155 500 22 -113 155 500 26 -116 155 500 25 -99 155 500 26 -104 155 500 159 -110 155 500 34 -105 155 500 25 -78 155 500 26 -104 155 500 25 -80 155 500 20 -84 155 500 25 -92 160 500 21 -105 160 500 23 -83 160 500 22 -100 160 500 23 -104 160 500 25 -91 160 500 24 -93 160 500 24 -102 160 500 22 -87 160 500 25 -105 160 500 25 -95 160 500 25 -87 160 500 21 -102 160 500 23 -92 160 500 20 -96 160 500 22 -86 160 500 24 -105 160 500 25 -113 160 500 28 -107 160 500 24 -100 160 500 23 -105 165 500 33 -106 165 500 34 -113 165 500 33 -109 165 500 35 -129 165 500 37 -107 165 500 31 -106 165 500 32 -107 165 500 37 -108 165 500 41 -125 165 500 41 -89 165 500 28 -129 165 500 37 -108 165 500 40 -114 165 500 33 -102 165 500 33 -101 165 500 36 -107 165 500 32 -101 165 500 29 -105 165 500 26 -124 165 500 40 -115 170 500 29 -87 170 500 21 -98 170 500 26 -93 170 500 27 -116 170 500 30 -113 170 500 28 -72 170 500 21 -111 170 500 27 -103 170 500 25 -112 170 500 30 -109 170 500 24 -111 170 500 27 -96 170 500 25 -89 170 500 171 -107 170 500 23 -113 170 500 27 -97 170 500 24 -101 170 500 25 -97 170 500 26 -110 170 500 27 -136 175 500 26 -146 175 500 29 -147 175 500 30 -134 175 500 26 -121 175 500 26 -160 175 500 30 -162 175 500 32 -146 175 500 34 -144 175 500 24 -142 175 500 26 -132 175 500 27 -143 175 500 26 -138 175 500 28 -136 175 500 182 -123 175 500 24 -146 175 500 29 -160 175 500 32 -119 175 500 22 -149 175 500 28 -130 175 500 25 -139 180 500 32 -137 180 500 35 -129 180 500 39 -145 180 500 34 -114 180 500 35 -130 180 500 46 -141 180 500 36 -128 180 500 39 -123 180 500 38 -129 180 500 35 -134 180 500 182 -135 180 500 43 -137 180 500 37 -118 180 500 36 -120 180 500 33 -140 180 500 39 -148 180 500 37 -128 180 500 32 -153 180 500 31 -149 180 500 33 -155 185 500 40 -160 185 500 39 -131 185 500 35 -138 185 500 32 -149 185 500 42 -137 185 500 31 -158 185 500 178 -133 185 500 35 -132 185 500 32 -133 185 500 34 -151 185 500 34 -133 185 500 29 -138 185 500 39 -148 185 500 38 -128 185 500 34 -116 185 500 39 -141 185 500 32 -145 185 500 32 -137 185 500 28 -130 185 500 36 -122 190 500 36 -140 190 500 37 -140 190 500 31 -128 190 500 43 -147 190 500 37 -176 190 500 40 -149 190 500 37 -179 190 500 46 -159 190 500 38 -172 190 500 36 -138 190 500 37 -139 190 500 37 -164 190 500 39 -158 190 500 39 -134 190 500 28 -144 190 500 43 -132 190 500 39 -132 190 500 49 -158 190 500 36 -145 190 500 37 -92 195 500 31 -108 195 500 31 -100 195 500 30 -114 195 500 30 -94 195 500 25 -77 195 500 22 -84 195 500 26 -103 195 500 27 -87 195 500 29 -102 195 500 29 -80 195 500 25 -104 195 500 30 -98 195 500 30 -102 195 500 25 -85 195 500 25 -113 195 500 27 -92 195 500 26 -114 195 500 179 -97 195 500 29 -95 195 500 26 -124 200 500 25 -155 200 500 28 -135 200 500 26 -131 200 500 26 -142 200 500 30 -130 200 500 29 -126 200 500 25 -132 200 500 27 -132 200 500 28 -149 200 500 31 -137 200 500 27 -140 200 500 29 -145 200 500 28 -124 200 500 24 -134 200 500 25 -158 200 500 31 -126 200 500 28 -149 200 500 187 -140 200 500 29 -146 200 500 28 -6 5 500 6 -6 5 500 5 -5 5 500 5 -6 5 500 5 -6 5 500 5 -10 5 500 7 -9 5 500 6 -5 5 500 6 -2 5 500 6 -6 5 500 6 -11 5 500 6 -12 5 500 6 -7 5 500 5 -7 5 500 7 -11 5 500 6 -4 5 500 6 -9 5 500 6 -5 5 500 5 -10 5 500 6 -13 5 500 6 -5 10 500 7 -4 10 500 7 -14 10 500 8 -5 10 500 7 -7 10 500 7 -4 10 500 7 -9 10 500 8 -11 10 500 7 -10 10 500 7 -7 10 500 7 -7 10 500 7 -15 10 500 8 -8 10 500 8 -8 10 500 7 -6 10 500 7 -8 10 500 8 -11 10 500 7 -14 10 500 8 -13 10 500 8 -8 10 500 8 -12 15 500 9 -11 15 500 8 -5 15 500 7 -3 15 500 7 -11 15 500 8 -8 15 500 8 -5 15 500 7 -6 15 500 7 -12 15 500 9 -5 15 500 7 -9 15 500 8 -11 15 500 74 -9 15 500 9 -9 15 500 12 -4 15 500 6 -12 15 500 8 -14 15 500 8 -10 15 500 7 -7 15 500 8 -7 15 500 8 -18 20 500 12 -18 20 500 11 -12 20 500 11 -17 20 500 12 -6 20 500 9 -14 20 500 10 -16 20 500 10 -8 20 500 12 -16 20 500 13 -11 20 500 10 -15 20 500 8 -11 20 500 10 -11 20 500 10 -12 20 500 10 -13 20 500 9 -17 20 500 13 -10 20 500 10 -18 20 500 10 -16 20 500 10 -15 20 500 11 -13 25 500 9 -21 25 500 10 -14 25 500 9 -14 25 500 11 -21 25 500 13 -16 25 500 11 -14 25 500 10 -10 25 500 11 -15 25 500 11 -16 25 500 11 -11 25 500 10 -15 25 500 10 -12 25 500 10 -16 25 500 12 -19 25 500 11 -10 25 500 8 -15 25 500 106 -13 25 500 10 -10 25 500 10 -12 25 500 10 -58 30 500 13 -41 30 500 15 -36 30 500 15 -46 30 500 15 -46 30 500 14 -56 30 500 16 -63 30 500 15 -55 30 500 13 -49 30 500 16 -36 30 500 12 -50 30 500 14 -53 30 500 16 -67 30 500 17 -49 30 500 14 -49 30 500 14 -62 30 500 15 -34 30 500 13 -32 30 500 13 -44 30 500 13 -64 30 500 15 -24 35 500 14 -21 35 500 11 -32 35 500 13 -32 35 500 11 -29 35 500 12 -25 35 500 11 -21 35 500 10 -21 35 500 11 -29 35 500 11 -34 35 500 12 -23 35 500 12 -29 35 500 14 -21 35 500 11 -32 35 500 122 -20 35 500 11 -25 35 500 12 -31 35 500 13 -19 35 500 11 -25 35 500 11 -25 35 500 16 -28 40 500 11 -27 40 500 12 -33 40 500 11 -37 40 500 12 -24 40 500 11 -24 40 500 11 -29 40 500 11 -24 40 500 10 -32 40 500 11 -21 40 500 10 -34 40 500 12 -31 40 500 13 -36 40 500 12 -28 40 500 11 -37 40 500 11 -27 40 500 11 -32 40 500 12 -19 40 500 10 -40 40 500 13 -31 40 500 11 -57 45 500 16 -34 45 500 12 -52 45 500 14 -42 45 500 14 -43 45 500 12 -44 45 500 14 -41 45 500 13 -44 45 500 14 -34 45 500 14 -49 45 500 14 -39 45 500 13 -43 45 500 112 -53 45 500 20 -55 45 500 15 -35 45 500 12 -37 45 500 13 -44 45 500 14 -39 45 500 13 -48 45 500 15 -42 45 500 12 -44 50 500 13 -39 50 500 14 -54 50 500 15 -49 50 500 15 -57 50 500 15 -47 50 500 15 -39 50 500 13 -41 50 500 14 -36 50 500 13 -40 50 500 13 -53 50 500 16 -52 50 500 14 -43 50 500 15 -41 50 500 13 -52 50 500 16 -61 50 500 16 -45 50 500 12 -56 50 500 15 -50 50 500 13 -50 50 500 14 -44 55 500 19 -45 55 500 18 -47 55 500 18 -46 55 500 17 -32 55 500 14 -33 55 500 117 -47 55 500 23 -41 55 500 19 -49 55 500 20 -40 55 500 19 -43 55 500 18 -51 55 500 19 -49 55 500 16 -49 55 500 19 -32 55 500 18 -43 55 500 19 -42 55 500 18 -46 55 500 18 -44 55 500 15 -47 55 500 19 -66 60 500 19 -54 60 500 15 -51 60 500 15 -48 60 500 16 -61 60 500 17 -34 60 500 13 -44 60 500 15 -43 60 500 14 -46 60 500 15 -57 60 500 18 -48 60 500 14 -56 60 500 16 -50 60 500 15 -53 60 500 17 -45 60 500 15 -41 60 500 14 -47 60 500 149 -46 60 500 15 -51 60 500 15 -53 60 500 16 -81 65 500 25 -87 65 500 25 -54 65 500 23 -76 65 500 23 -56 65 500 24 -74 65 500 23 -82 65 500 25 -78 65 500 21 -62 65 500 22 -73 65 500 27 -86 65 500 21 -76 65 500 19 -80 65 500 23 -84 65 500 21 -60 65 500 22 -56 65 500 19 -88 65 500 21 -51 65 500 15 -69 65 500 17 -80 65 500 22 -50 70 500 19 -37 70 500 15 -49 70 500 129 -50 70 500 18 -47 70 500 15 -50 70 500 15 -45 70 500 14 -45 70 500 14 -51 70 500 13 -55 70 500 14 -56 70 500 14 -53 70 500 14 -39 70 500 14 -36 70 500 13 -58 70 500 14 -41 70 500 13 -47 70 500 18 -53 70 500 15 -38 70 500 13 -42 70 500 14 -63 75 500 15 -62 75 500 16 -64 75 500 14 -68 75 500 17 -59 75 500 15 -62 75 500 14 -78 75 500 17 -79 75 500 20 -63 75 500 18 -72 75 500 16 -65 75 500 15 -83 75 500 17 -63 75 500 14 -59 75 500 15 -65 75 500 15 -64 75 500 132 -77 75 500 26 -68 75 500 13 -78 75 500 19 -62 75 500 14 -57 80 500 16 -68 80 500 17 -81 80 500 20 -45 80 500 17 -72 80 500 18 -73 80 500 21 -72 80 500 20 -80 80 500 24 -57 80 500 21 -60 80 500 19 -70 80 500 18 -83 80 500 20 -74 80 500 33 -72 80 500 20 -75 80 500 24 -74 80 500 17 -115 80 500 23 -98 80 500 25 -75 80 500 21 -79 80 500 26 -42 85 500 16 -45 85 500 127 -66 85 500 21 -48 85 500 16 -54 85 500 19 -48 85 500 17 -55 85 500 18 -56 85 500 16 -57 85 500 19 -44 85 500 16 -57 85 500 21 -47 85 500 16 -61 85 500 42 -53 85 500 22 -45 85 500 17 -50 85 500 18 -56 85 500 19 -48 85 500 16 -50 85 500 17 -40 85 500 41 -71 90 500 18 -80 90 500 21 -67 90 500 20 -63 90 500 17 -64 90 500 16 -59 90 500 16 -70 90 500 19 -75 90 500 21 -71 90 500 16 -76 90 500 19 -68 90 500 15 -52 90 500 17 -63 90 500 18 -70 90 500 18 -73 90 500 17 -70 90 500 19 -59 90 500 16 -79 90 500 19 -68 90 500 19 -60 90 500 16 -94 95 500 24 -119 95 500 31 -78 95 500 25 -101 95 500 26 -89 95 500 22 -88 95 500 25 -103 95 500 22 -84 95 500 22 -100 95 500 25 -117 95 500 24 -94 95 500 25 -102 95 500 26 -116 95 500 165 -116 95 500 28 -115 95 500 26 -83 95 500 22 -103 95 500 27 -97 95 500 23 -99 95 500 31 -84 95 500 33 -69 100 500 19 -86 100 500 22 -84 100 500 18 -68 100 500 20 -58 100 500 18 -66 100 500 20 -93 100 500 24 -70 100 500 22 -82 100 500 23 -61 100 500 17 -66 100 500 20 -92 100 500 25 -72 100 500 18 -64 100 500 21 -72 100 500 17 -59 100 500 22 -78 100 500 22 -66 100 500 160 -67 100 500 20 -70 100 500 20 -49 105 500 15 -56 105 500 16 -58 105 500 19 -54 105 500 17 -61 105 500 14 -57 105 500 15 -50 105 500 19 -52 105 500 15 -62 105 500 16 -48 105 500 14 -46 105 500 14 -55 105 500 15 -39 105 500 13 -59 105 500 16 -62 105 500 16 -52 105 500 15 -62 105 500 16 -59 105 500 16 -50 105 500 14 -53 105 500 16 -85 110 500 27 -66 110 500 18 -94 110 500 34 -71 110 500 19 -79 110 500 23 -78 110 500 130 -78 110 500 38 -75 110 500 30 -73 110 500 36 -63 110 500 20 -100 110 500 26 -77 110 500 24 -78 110 500 24 -91 110 500 33 -67 110 500 29 -66 110 500 25 -66 110 500 35 -98 110 500 36 -85 110 500 23 -56 110 500 21 -104 115 500 25 -80 115 500 24 -85 115 500 28 -84 115 500 20 -95 115 500 26 -83 115 500 23 -95 115 500 138 -94 115 500 27 -79 115 500 21 -107 115 500 24 -100 115 500 21 -92 115 500 23 -80 115 500 21 -106 115 500 24 -74 115 500 23 -75 115 500 25 -78 115 500 24 -90 115 500 24 -81 115 500 26 -82 115 500 24 -74 120 500 26 -80 120 500 24 -68 120 500 23 -83 120 500 28 -73 120 500 21 -73 120 500 17 -80 120 500 21 -82 120 500 22 -61 120 500 19 -73 120 500 148 -72 120 500 26 -70 120 500 21 -77 120 500 25 -72 120 500 18 -74 120 500 23 -63 120 500 18 -80 120 500 23 -63 120 500 19 -77 120 500 20 -83 120 500 23 -95 125 500 22 -83 125 500 19 -90 125 500 17 -89 125 500 19 -76 125 500 17 -84 125 500 18 -79 125 500 20 -101 125 500 25 -101 125 500 22 -99 125 500 19 -92 125 500 18 -77 125 500 21 -89 125 500 23 -91 125 500 18 -89 125 500 19 -71 125 500 151 -80 125 500 17 -79 125 500 17 -93 125 500 21 -80 125 500 20 -75 130 500 21 -63 130 500 21 -88 130 500 26 -92 130 500 26 -77 130 500 23 -92 130 500 26 -112 130 500 28 -70 130 500 25 -83 130 500 25 -72 130 500 23 -73 130 500 21 -96 130 500 21 -83 130 500 20 -69 130 500 19 -74 130 500 23 -98 130 500 25 -92 130 500 26 -79 130 500 24 -80 130 500 22 -84 130 500 179 -88 135 500 27 -81 135 500 25 -95 135 500 26 -78 135 500 23 -85 135 500 27 -69 135 500 25 -85 135 500 27 -88 135 500 29 -87 135 500 29 -81 135 500 27 -84 135 500 29 -85 135 500 27 -113 135 500 28 -88 135 500 26 -85 135 500 29 -87 135 500 22 -84 135 500 31 -82 135 500 28 -66 135 500 20 -85 135 500 25 -82 140 500 24 -93 140 500 21 -77 140 500 25 -109 140 500 29 -88 140 500 22 -89 140 500 27 -89 140 500 26 -93 140 500 22 -90 140 500 23 -99 140 500 26 -88 140 500 21 -85 140 500 25 -96 140 500 27 -88 140 500 28 -90 140 500 27 -93 140 500 31 -117 140 500 24 -80 140 500 22 -98 140 500 27 -100 140 500 26 -153 145 500 32 -116 145 500 143 -129 145 500 25 -133 145 500 31 -155 145 500 30 -131 145 500 33 -141 145 500 31 -129 145 500 36 -127 145 500 32 -116 145 500 24 -141 145 500 23 -132 145 500 31 -153 145 500 31 -133 145 500 26 -129 145 500 31 -130 145 500 29 -168 145 500 32 -144 145 500 39 -133 145 500 30 -143 145 500 186 -88 150 500 22 -112 150 500 25 -113 150 500 29 -107 150 500 26 -116 150 500 26 -121 150 500 28 -107 150 500 27 -113 150 500 25 -107 150 500 23 -100 150 500 21 -122 150 500 26 -133 150 500 27 -104 150 500 27 -109 150 500 25 -113 150 500 26 -112 150 500 24 -116 150 500 26 -101 150 500 21 -113 150 500 26 -113 150 500 28 -119 155 500 146 -94 155 500 21 -116 155 500 27 -109 155 500 26 -119 155 500 20 -105 155 500 24 -88 155 500 21 -126 155 500 27 -126 155 500 28 -122 155 500 27 -110 155 500 27 -111 155 500 27 -128 155 500 26 -112 155 500 27 -100 155 500 24 -120 155 500 31 -112 155 500 31 -112 155 500 25 -94 155 500 24 -109 155 500 22 -124 160 500 27 -110 160 500 146 -95 160 500 28 -115 160 500 28 -106 160 500 27 -121 160 500 30 -129 160 500 27 -124 160 500 28 -113 160 500 31 -107 160 500 34 -118 160 500 30 -83 160 500 25 -117 160 500 27 -121 160 500 32 -109 160 500 27 -120 160 500 30 -92 160 500 23 -117 160 500 30 -118 160 500 27 -110 160 500 25 -117 165 500 25 -122 165 500 140 -123 165 500 24 -126 165 500 23 -122 165 500 30 -131 165 500 25 -127 165 500 27 -120 165 500 23 -130 165 500 26 -139 165 500 24 -144 165 500 27 -142 165 500 27 -103 165 500 24 -115 165 500 23 -136 165 500 24 -117 165 500 23 -158 165 500 27 -132 165 500 26 -127 165 500 23 -126 165 500 24 -121 170 500 31 -113 170 500 32 -132 170 500 152 -130 170 500 30 -113 170 500 21 -132 170 500 32 -113 170 500 25 -124 170 500 33 -155 170 500 31 -126 170 500 30 -101 170 500 30 -142 170 500 33 -110 170 500 24 -129 170 500 34 -129 170 500 30 -119 170 500 28 -135 170 500 35 -128 170 500 33 -120 170 500 25 -123 170 500 28 -151 175 500 157 -165 175 500 29 -196 175 500 31 -145 175 500 31 -151 175 500 31 -154 175 500 36 -141 175 500 31 -159 175 500 40 -146 175 500 32 -133 175 500 24 -154 175 500 29 -174 175 500 28 -168 175 500 31 -176 175 500 32 -143 175 500 32 -165 175 500 28 -140 175 500 29 -183 175 500 29 -162 175 500 184 -130 175 500 27 -156 180 500 26 -145 180 500 29 -148 180 500 30 -138 180 500 29 -181 180 500 31 -122 180 500 26 -128 180 500 27 -116 180 500 21 -116 180 500 26 -143 180 500 28 -103 180 500 24 -154 180 500 28 -141 180 500 27 -138 180 500 29 -149 180 500 31 -158 180 500 30 -122 180 500 25 -171 180 500 32 -165 180 500 185 -134 180 500 25 -116 185 500 32 -127 185 500 33 -125 185 500 32 -138 185 500 38 -103 185 500 27 -140 185 500 44 -119 185 500 31 -122 185 500 28 -114 185 500 33 -130 185 500 29 -109 185 500 29 -110 185 500 29 -119 185 500 36 -112 185 500 31 -102 185 500 30 -146 185 500 34 -116 185 500 30 -128 185 500 35 -124 185 500 29 -138 185 500 34 -183 190 500 29 -175 190 500 35 -161 190 500 32 -163 190 500 30 -141 190 500 30 -157 190 500 34 -185 190 500 40 -140 190 500 29 -153 190 500 37 -163 190 500 37 -181 190 500 41 -154 190 500 38 -139 190 500 185 -145 190 500 34 -133 190 500 35 -183 190 500 36 -150 190 500 31 -126 190 500 30 -172 190 500 33 -161 190 500 38 -119 195 500 26 -115 195 500 30 -118 195 500 25 -126 195 500 31 -112 195 500 26 -128 195 500 32 -104 195 500 24 -124 195 500 29 -123 195 500 30 -134 195 500 25 -104 195 500 23 -138 195 500 180 -127 195 500 32 -133 195 500 29 -113 195 500 29 -135 195 500 29 -112 195 500 24 -124 195 500 27 -134 195 500 28 -125 195 500 34 -149 200 500 38 -134 200 500 35 -131 200 500 37 -134 200 500 43 -130 200 500 38 -148 200 500 35 -123 200 500 35 -127 200 500 31 -139 200 500 183 -110 200 500 32 -116 200 500 30 -145 200 500 37 -116 200 500 32 -115 200 500 38 -130 200 500 33 -115 200 500 30 -115 200 500 33 -155 200 500 29 -128 200 500 40 -146 200 500 39 diff --git a/core/rewriting/indexing/test/results/std-rule-varied-hist-10.txt b/core/rewriting/indexing/test/results/std-rule-varied-hist-10.txt deleted file mode 100644 index 8a9d214e..00000000 --- a/core/rewriting/indexing/test/results/std-rule-varied-hist-10.txt +++ /dev/null @@ -1,200 +0,0 @@ -18 50 500 132 -23 50 500 836 -23 50 500 135 -21 50 500 132 -21 50 500 138 -19 50 500 139 -19 50 500 168 -22 50 500 134 -19 50 500 146 -25 50 500 132 -19 50 500 194 -21 50 500 139 -23 50 500 139 -21 50 500 152 -19 50 500 186 -22 50 500 145 -20 50 500 138 -22 50 500 149 -24 50 500 186 -23 50 500 137 -20 50 500 1127 -21 50 500 123 -18 50 500 114 -20 50 500 112 -20 50 500 117 -20 50 500 164 -18 50 500 126 -16 50 500 119 -19 50 500 126 -20 50 500 130 -18 50 500 155 -20 50 500 115 -13 50 500 114 -20 50 500 114 -17 50 500 116 -19 50 500 165 -17 50 500 127 -17 50 500 114 -16 50 500 110 -20 50 500 118 -12 50 500 117 -13 50 500 133 -9 50 500 1158 -15 50 500 132 -11 50 500 125 -13 50 500 130 -11 50 500 124 -13 50 500 162 -14 50 500 129 -11 50 500 135 -11 50 500 122 -12 50 500 128 -15 50 500 168 -11 50 500 118 -14 50 500 136 -11 50 500 129 -14 50 500 161 -14 50 500 132 -12 50 500 125 -10 50 500 113 -14 50 500 11292 -13 50 500 119 -15 50 500 163 -15 50 500 130 -15 50 500 142 -10 50 500 165 -17 50 500 126 -18 50 500 139 -19 50 500 149 -18 50 500 190 -15 50 500 136 -15 50 500 161 -17 50 500 116 -17 50 500 161 -16 50 500 131 -21 50 500 134 -13 50 500 134 -13 50 500 147 -16 50 500 152 -16 50 500 132 -17 50 500 140 -16 50 500 143 -19 50 500 141 -17 50 500 141 -16 50 500 200 -19 50 500 138 -18 50 500 137 -15 50 500 135 -18 50 500 179 -18 50 500 151 -18 50 500 132 -17 50 500 125 -14 50 500 162 -16 50 500 145 -17 50 500 130 -20 50 500 128 -16 50 500 146 -16 50 500 172 -17 50 500 128 -23 50 500 133 -16 50 500 148 -14 50 500 1542 -13 50 500 159 -15 50 500 137 -14 50 500 135 -14 50 500 133 -13 50 500 171 -13 50 500 129 -13 50 500 132 -15 50 500 130 -11 50 500 169 -15 50 500 133 -13 50 500 141 -15 50 500 151 -13 50 500 189 -14 50 500 148 -12 50 500 146 -12 50 500 139 -13 50 500 183 -12 50 500 143 -18 50 500 103 -22 50 500 117 -21 50 500 103 -16 50 500 111 -15 50 500 98 -20 50 500 218 -17 50 500 114 -16 50 500 106 -19 50 500 111 -19 50 500 105 -17 50 500 141 -17 50 500 101 -15 50 500 106 -14 50 500 98 -19 50 500 108 -13 50 500 106 -20 50 500 159 -17 50 500 107 -16 50 500 108 -16 50 500 110 -20 50 500 180 -20 50 500 135 -20 50 500 150 -22 50 500 194 -25 50 500 163 -24 50 500 136 -21 50 500 154 -19 50 500 182 -24 50 500 156 -24 50 500 135 -21 50 500 114 -23 50 500 182 -21 50 500 216 -22 50 500 105 -20 50 500 164 -22 50 500 167 -22 50 500 208 -24 50 500 160 -22 50 500 160 -22 50 500 169 -25 50 500 3704 -22 50 500 130 -24 50 500 128 -24 50 500 124 -26 50 500 133 -22 50 500 172 -21 50 500 140 -23 50 500 148 -19 50 500 127 -22 50 500 173 -16 50 500 127 -22 50 500 114 -20 50 500 102 -21 50 500 124 -22 50 500 167 -22 50 500 114 -22 50 500 123 -25 50 500 123 -19 50 500 117 -22 50 500 162 -22 50 500 24360 -18 50 500 115 -21 50 500 122 -14 50 500 115 -16 50 500 119 -18 50 500 168 -20 50 500 114 -14 50 500 123 -19 50 500 121 -19 50 500 129 -17 50 500 156 -19 50 500 119 -19 50 500 118 -18 50 500 122 -16 50 500 120 -17 50 500 176 -17 50 500 121 -18 50 500 137 -10 50 500 133 -19 50 500 121 diff --git a/core/rewriting/indexing/test/results/std-rule-varied-match-1-40.txt b/core/rewriting/indexing/test/results/std-rule-varied-match-1-40.txt deleted file mode 100644 index 334821a1..00000000 --- a/core/rewriting/indexing/test/results/std-rule-varied-match-1-40.txt +++ /dev/null @@ -1,8000 +0,0 @@ -2 5 500 21 -2 5 500 20 -3 5 500 22 -3 5 500 21 -3 5 500 20 -3 5 500 179 -3 5 500 388 -4 5 500 1222 -2 5 500 13 -2 5 500 10 -4 5 500 14 -3 5 500 13 -3 5 500 15 -1 5 500 15 -2 5 500 13 -3 5 500 16 -2 5 500 12 -2 5 500 14 -2 5 500 14 -3 5 500 12 -2 10 500 20 -3 10 500 22 -3 10 500 19 -3 10 500 22 -2 10 500 17 -2 10 500 21 -4 10 500 20 -1 10 500 18 -2 10 500 20 -3 10 500 17 -2 10 500 19 -3 10 500 18 -2 10 500 18 -2 10 500 19 -2 10 500 22 -2 10 500 18 -1 10 500 19 -2 10 500 93 -2 10 500 18 -2 10 500 21 -5 15 500 32 -5 15 500 31 -6 15 500 34 -5 15 500 30 -4 15 500 28 -5 15 500 38 -6 15 500 36 -3 15 500 34 -7 15 500 35 -6 15 500 34 -6 15 500 35 -6 15 500 32 -6 15 500 35 -8 15 500 33 -3 15 500 35 -7 15 500 106 -5 15 500 38 -4 15 500 40 -6 15 500 42 -7 15 500 29 -7 20 500 48 -6 20 500 54 -6 20 500 55 -6 20 500 56 -6 20 500 49 -6 20 500 50 -5 20 500 50 -6 20 500 53 -7 20 500 110 -5 20 500 54 -6 20 500 48 -6 20 500 45 -6 20 500 54 -7 20 500 50 -5 20 500 55 -6 20 500 49 -7 20 500 43 -7 20 500 51 -4 20 500 53 -7 20 500 53 -12 25 500 57 -12 25 500 62 -12 25 500 57 -10 25 500 58 -12 25 500 58 -12 25 500 57 -12 25 500 68 -12 25 500 65 -13 25 500 61 -11 25 500 120 -13 25 500 65 -10 25 500 51 -9 25 500 53 -13 25 500 54 -13 25 500 55 -13 25 500 61 -11 25 500 57 -9 25 500 56 -12 25 500 66 -12 25 500 122 -12 30 500 72 -13 30 500 76 -12 30 500 69 -11 30 500 72 -15 30 500 73 -13 30 500 71 -13 30 500 72 -14 30 500 141 -11 30 500 74 -15 30 500 77 -14 30 500 73 -11 30 500 69 -14 30 500 71 -12 30 500 71 -17 30 500 70 -12 30 500 72 -10 30 500 128 -14 30 500 72 -15 30 500 75 -12 30 500 73 -8 35 500 73 -11 35 500 76 -7 35 500 83 -8 35 500 70 -9 35 500 127 -7 35 500 70 -7 35 500 80 -8 35 500 74 -7 35 500 68 -9 35 500 73 -9 35 500 75 -10 35 500 75 -7 35 500 128 -8 35 500 76 -9 35 500 78 -7 35 500 72 -7 35 500 67 -9 35 500 78 -10 35 500 76 -7 35 500 130 -14 40 500 103 -17 40 500 111 -14 40 500 111 -14 40 500 112 -14 40 500 114 -18 40 500 176 -16 40 500 103 -15 40 500 109 -13 40 500 113 -14 40 500 109 -14 40 500 164 -18 40 500 113 -13 40 500 115 -16 40 500 112 -15 40 500 110 -20 40 500 110 -11 40 500 166 -16 40 500 109 -15 40 500 115 -18 40 500 117 -19 45 500 139 -18 45 500 170 -21 45 500 158 -16 45 500 133 -20 45 500 135 -21 45 500 167 -18 45 500 143 -18 45 500 114 -19 45 500 141 -17 45 500 128 -18 45 500 185 -22 45 500 139 -19 45 500 137 -18 45 500 124 -21 45 500 186 -20 45 500 127 -21 45 500 141 -22 45 500 123 -18 45 500 126 -18 45 500 184 -16 50 500 133 -18 50 500 121 -16 50 500 127 -18 50 500 131 -18 50 500 208 -14 50 500 134 -16 50 500 142 -16 50 500 131 -17 50 500 178 -19 50 500 135 -18 50 500 138 -16 50 500 143 -18 50 500 168 -16 50 500 145 -16 50 500 135 -18 50 500 130 -17 50 500 128 -17 50 500 178 -16 50 500 137 -19 50 500 137 -21 55 500 158 -18 55 500 203 -19 55 500 156 -17 55 500 161 -17 55 500 157 -14 55 500 196 -20 55 500 163 -19 55 500 168 -20 55 500 161 -18 55 500 199 -22 55 500 173 -15 55 500 172 -19 55 500 199 -18 55 500 167 -20 55 500 158 -19 55 500 160 -20 55 500 222 -21 55 500 161 -23 55 500 167 -19 55 500 165 -19 60 500 239 -26 60 500 178 -26 60 500 170 -27 60 500 215 -26 60 500 168 -26 60 500 179 -24 60 500 170 -25 60 500 211 -27 60 500 177 -25 60 500 169 -25 60 500 201 -21 60 500 191 -26 60 500 197 -22 60 500 171 -25 60 500 224 -28 60 500 181 -24 60 500 165 -28 60 500 232 -25 60 500 191 -23 60 500 171 -23 65 500 169 -20 65 500 175 -24 65 500 173 -24 65 500 165 -25 65 500 213 -23 65 500 163 -23 65 500 161 -24 65 500 170 -21 65 500 225 -21 65 500 163 -22 65 500 162 -22 65 500 218 -25 65 500 160 -22 65 500 169 -25 65 500 170 -23 65 500 206 -24 65 500 180 -25 65 500 165 -20 65 500 165 -28 65 500 209 -32 70 500 178 -30 70 500 187 -35 70 500 221 -29 70 500 187 -23 70 500 181 -28 70 500 217 -31 70 500 179 -29 70 500 185 -28 70 500 191 -24 70 500 231 -27 70 500 190 -26 70 500 168 -27 70 500 219 -33 70 500 175 -28 70 500 178 -28 70 500 161 -29 70 500 230 -25 70 500 163 -29 70 500 171 -26 70 500 207 -30 75 500 215 -36 75 500 219 -36 75 500 276 -35 75 500 250 -38 75 500 220 -38 75 500 269 -32 75 500 210 -37 75 500 283 -30 75 500 236 -33 75 500 231 -34 75 500 270 -31 75 500 235 -34 75 500 236 -33 75 500 269 -37 75 500 240 -38 75 500 272 -35 75 500 245 -34 75 500 232 -34 75 500 275 -37 75 500 239 -27 80 500 179 -29 80 500 226 -25 80 500 176 -30 80 500 193 -31 80 500 227 -25 80 500 189 -28 80 500 183 -32 80 500 226 -31 80 500 183 -31 80 500 183 -25 80 500 175 -25 80 500 214 -33 80 500 186 -28 80 500 184 -27 80 500 225 -27 80 500 193 -31 80 500 193 -29 80 500 226 -28 80 500 183 -31 80 500 183 -36 85 500 203 -31 85 500 240 -42 85 500 214 -39 85 500 249 -41 85 500 215 -37 85 500 211 -34 85 500 244 -39 85 500 194 -33 85 500 209 -37 85 500 247 -41 85 500 197 -37 85 500 188 -31 85 500 187 -34 85 500 201 -29 85 500 211 -38 85 500 194 -38 85 500 193 -39 85 500 200 -37 85 500 190 -38 85 500 248 -34 90 500 247 -35 90 500 288 -35 90 500 235 -35 90 500 242 -30 90 500 262 -35 90 500 228 -37 90 500 266 -39 90 500 253 -31 90 500 251 -29 90 500 275 -31 90 500 228 -30 90 500 228 -29 90 500 270 -33 90 500 246 -35 90 500 270 -36 90 500 237 -39 90 500 244 -37 90 500 282 -34 90 500 263 -40 90 500 292 -35 95 500 219 -30 95 500 222 -33 95 500 259 -29 95 500 217 -34 95 500 207 -38 95 500 265 -30 95 500 220 -32 95 500 266 -28 95 500 208 -33 95 500 213 -29 95 500 249 -30 95 500 212 -31 95 500 224 -30 95 500 254 -32 95 500 221 -33 95 500 211 -33 95 500 252 -32 95 500 212 -33 95 500 215 -27 95 500 265 -36 100 500 259 -38 100 500 319 -39 100 500 240 -39 100 500 303 -36 100 500 234 -41 100 500 258 -37 100 500 300 -36 100 500 290 -39 100 500 282 -37 100 500 260 -38 100 500 280 -40 100 500 298 -40 100 500 268 -40 100 500 306 -35 100 500 271 -33 100 500 303 -34 100 500 240 -37 100 500 237 -40 100 500 336 -39 100 500 271 -34 105 500 295 -36 105 500 249 -39 105 500 307 -30 105 500 248 -33 105 500 259 -36 105 500 308 -34 105 500 251 -38 105 500 292 -32 105 500 251 -32 105 500 301 -31 105 500 272 -30 105 500 249 -27 105 500 296 -35 105 500 256 -35 105 500 271 -40 105 500 252 -30 105 500 247 -32 105 500 273 -32 105 500 266 -34 105 500 303 -48 110 500 274 -43 110 500 301 -44 110 500 279 -44 110 500 318 -44 110 500 289 -39 110 500 276 -36 110 500 334 -39 110 500 288 -42 110 500 307 -43 110 500 247 -41 110 500 326 -45 110 500 277 -40 110 500 308 -46 110 500 271 -34 110 500 269 -43 110 500 315 -42 110 500 277 -41 110 500 316 -43 110 500 283 -49 110 500 310 -38 115 500 355 -44 115 500 396 -44 115 500 352 -51 115 500 386 -47 115 500 383 -42 115 500 322 -47 115 500 409 -46 115 500 337 -40 115 500 379 -47 115 500 327 -42 115 500 385 -49 115 500 389 -46 115 500 355 -43 115 500 376 -38 115 500 314 -44 115 500 400 -41 115 500 302 -45 115 500 374 -43 115 500 346 -44 115 500 410 -37 120 500 397 -41 120 500 351 -42 120 500 392 -40 120 500 350 -36 120 500 399 -40 120 500 334 -43 120 500 358 -43 120 500 326 -36 120 500 351 -40 120 500 306 -45 120 500 384 -42 120 500 324 -29 120 500 367 -38 120 500 354 -41 120 500 313 -35 120 500 365 -35 120 500 335 -35 120 500 352 -43 120 500 354 -34 120 500 388 -48 125 500 325 -46 125 500 354 -42 125 500 319 -47 125 500 347 -45 125 500 306 -43 125 500 362 -43 125 500 289 -42 125 500 340 -49 125 500 319 -48 125 500 338 -48 125 500 315 -47 125 500 338 -41 125 500 304 -53 125 500 354 -35 125 500 298 -39 125 500 360 -48 125 500 317 -47 125 500 332 -39 125 500 323 -47 125 500 336 -48 130 500 343 -40 130 500 355 -45 130 500 385 -51 130 500 342 -51 130 500 370 -43 130 500 353 -49 130 500 393 -43 130 500 340 -44 130 500 378 -47 130 500 315 -47 130 500 374 -46 130 500 346 -46 130 500 384 -47 130 500 390 -39 130 500 336 -45 130 500 378 -43 130 500 373 -46 130 500 342 -48 130 500 324 -52 130 500 356 -48 135 500 393 -51 135 500 396 -47 135 500 412 -47 135 500 362 -51 135 500 423 -54 135 500 378 -51 135 500 408 -53 135 500 387 -50 135 500 379 -53 135 500 456 -45 135 500 382 -40 135 500 354 -50 135 500 407 -55 135 500 408 -53 135 500 415 -49 135 500 341 -54 135 500 418 -49 135 500 396 -47 135 500 367 -50 135 500 419 -41 140 500 358 -44 140 500 392 -51 140 500 407 -47 140 500 345 -48 140 500 368 -44 140 500 346 -39 140 500 397 -49 140 500 338 -41 140 500 355 -51 140 500 348 -44 140 500 351 -47 140 500 385 -45 140 500 367 -48 140 500 389 -41 140 500 330 -45 140 500 354 -47 140 500 310 -53 140 500 388 -39 140 500 402 -43 140 500 342 -39 145 500 410 -46 145 500 426 -51 145 500 452 -49 145 500 454 -46 145 500 417 -51 145 500 403 -42 145 500 381 -45 145 500 380 -54 145 500 408 -44 145 500 371 -49 145 500 417 -46 145 500 430 -46 145 500 351 -47 145 500 416 -46 145 500 344 -44 145 500 429 -45 145 500 435 -54 145 500 387 -48 145 500 423 -43 145 500 381 -42 150 500 390 -48 150 500 448 -41 150 500 390 -55 150 500 424 -47 150 500 443 -49 150 500 381 -45 150 500 457 -46 150 500 450 -50 150 500 404 -49 150 500 441 -46 150 500 446 -50 150 500 422 -49 150 500 429 -46 150 500 455 -43 150 500 403 -50 150 500 415 -43 150 500 430 -48 150 500 403 -48 150 500 474 -43 150 500 423 -53 155 500 413 -53 155 500 473 -49 155 500 442 -49 155 500 427 -53 155 500 446 -44 155 500 418 -48 155 500 423 -50 155 500 462 -53 155 500 438 -51 155 500 422 -49 155 500 433 -49 155 500 453 -56 155 500 400 -56 155 500 455 -52 155 500 458 -52 155 500 421 -50 155 500 419 -54 155 500 441 -57 155 500 402 -52 155 500 424 -51 160 500 485 -48 160 500 415 -53 160 500 506 -56 160 500 484 -47 160 500 469 -51 160 500 419 -46 160 500 465 -42 160 500 477 -47 160 500 406 -46 160 500 451 -55 160 500 465 -49 160 500 396 -46 160 500 473 -49 160 500 481 -49 160 500 407 -45 160 500 487 -44 160 500 480 -51 160 500 461 -53 160 500 423 -50 160 500 468 -66 165 500 509 -55 165 500 491 -61 165 500 438 -63 165 500 454 -58 165 500 492 -60 165 500 439 -58 165 500 466 -58 165 500 500 -61 165 500 507 -61 165 500 471 -56 165 500 487 -64 165 500 491 -64 165 500 471 -66 165 500 455 -63 165 500 441 -65 165 500 499 -57 165 500 479 -60 165 500 448 -63 165 500 480 -58 165 500 476 -58 170 500 514 -52 170 500 498 -56 170 500 503 -60 170 500 497 -57 170 500 447 -59 170 500 504 -54 170 500 491 -52 170 500 501 -56 170 500 477 -51 170 500 490 -55 170 500 502 -59 170 500 490 -58 170 500 420 -63 170 500 504 -55 170 500 488 -64 170 500 533 -54 170 500 515 -59 170 500 453 -56 170 500 500 -49 170 500 476 -62 175 500 471 -53 175 500 501 -53 175 500 505 -51 175 500 483 -64 175 500 521 -51 175 500 495 -56 175 500 525 -62 175 500 544 -51 175 500 507 -57 175 500 558 -61 175 500 564 -50 175 500 521 -52 175 500 536 -61 175 500 510 -54 175 500 532 -57 175 500 527 -52 175 500 492 -53 175 500 535 -58 175 500 556 -51 175 500 573 -62 180 500 535 -65 180 500 538 -62 180 500 462 -61 180 500 517 -59 180 500 494 -60 180 500 536 -66 180 500 456 -59 180 500 477 -58 180 500 543 -66 180 500 494 -64 180 500 485 -64 180 500 505 -58 180 500 509 -58 180 500 520 -62 180 500 508 -68 180 500 489 -57 180 500 502 -61 180 500 519 -66 180 500 508 -67 180 500 480 -71 185 500 476 -70 185 500 486 -68 185 500 507 -63 185 500 457 -67 185 500 515 -68 185 500 472 -69 185 500 478 -67 185 500 459 -63 185 500 490 -75 185 500 497 -68 185 500 467 -66 185 500 449 -67 185 500 480 -69 185 500 505 -72 185 500 443 -70 185 500 465 -75 185 500 492 -64 185 500 484 -65 185 500 466 -68 185 500 480 -67 190 500 503 -74 190 500 513 -65 190 500 521 -70 190 500 466 -68 190 500 520 -65 190 500 490 -63 190 500 530 -62 190 500 466 -72 190 500 484 -69 190 500 511 -63 190 500 494 -69 190 500 483 -63 190 500 514 -75 190 500 517 -64 190 500 503 -71 190 500 460 -70 190 500 530 -64 190 500 493 -67 190 500 506 -73 190 500 485 -66 195 500 494 -67 195 500 482 -58 195 500 537 -69 195 500 527 -65 195 500 558 -63 195 500 501 -60 195 500 543 -67 195 500 519 -60 195 500 538 -73 195 500 545 -73 195 500 504 -56 195 500 526 -63 195 500 526 -72 195 500 540 -68 195 500 565 -73 195 500 485 -66 195 500 536 -62 195 500 534 -71 195 500 554 -72 195 500 559 -77 200 500 574 -69 200 500 566 -71 200 500 605 -76 200 500 633 -69 200 500 589 -67 200 500 553 -69 200 500 580 -71 200 500 613 -63 200 500 583 -68 200 500 595 -67 200 500 577 -76 200 500 606 -66 200 500 565 -67 200 500 580 -60 200 500 560 -67 200 500 569 -68 200 500 614 -70 200 500 576 -73 200 500 587 -76 200 500 640 -1 5 500 15 -0 5 500 11 -1 5 500 11 -1 5 500 14 -2 5 500 13 -1 5 500 12 -1 5 500 16 -1 5 500 13 -0 5 500 10 -2 5 500 10 -0 5 500 10 -0 5 500 10 -1 5 500 10 -1 5 500 9 -2 5 500 11 -1 5 500 11 -1 5 500 13 -2 5 500 10 -2 5 500 11 -1 5 500 14 -5 10 500 26 -3 10 500 29 -4 10 500 27 -5 10 500 22 -4 10 500 25 -5 10 500 25 -5 10 500 27 -3 10 500 26 -3 10 500 26 -5 10 500 24 -4 10 500 25 -5 10 500 28 -4 10 500 22 -4 10 500 28 -5 10 500 26 -5 10 500 25 -5 10 500 27 -4 10 500 28 -4 10 500 26 -4 10 500 104 -4 15 500 63 -4 15 500 58 -5 15 500 79 -6 15 500 68 -5 15 500 56 -6 15 500 74 -5 15 500 71 -3 15 500 61 -8 15 500 54 -6 15 500 128 -6 15 500 55 -4 15 500 80 -5 15 500 70 -5 15 500 54 -7 15 500 65 -7 15 500 61 -6 15 500 71 -6 15 500 66 -5 15 500 138 -3 15 500 70 -10 20 500 44 -8 20 500 43 -9 20 500 45 -9 20 500 52 -13 20 500 54 -9 20 500 40 -8 20 500 45 -11 20 500 46 -10 20 500 44 -11 20 500 46 -9 20 500 42 -11 20 500 48 -11 20 500 47 -10 20 500 48 -11 20 500 40 -11 20 500 40 -11 20 500 43 -10 20 500 42 -12 20 500 47 -11 20 500 46 -11 25 500 67 -11 25 500 111 -12 25 500 67 -12 25 500 58 -12 25 500 60 -11 25 500 66 -13 25 500 73 -11 25 500 62 -11 25 500 70 -12 25 500 56 -10 25 500 119 -10 25 500 63 -11 25 500 76 -12 25 500 70 -12 25 500 68 -12 25 500 70 -11 25 500 62 -14 25 500 66 -9 25 500 67 -10 25 500 124 -14 30 500 65 -13 30 500 71 -13 30 500 66 -11 30 500 71 -12 30 500 66 -13 30 500 70 -12 30 500 66 -13 30 500 64 -12 30 500 130 -13 30 500 61 -14 30 500 66 -11 30 500 62 -14 30 500 59 -13 30 500 65 -9 30 500 58 -13 30 500 67 -12 30 500 120 -14 30 500 68 -14 30 500 65 -12 30 500 71 -12 35 500 86 -12 35 500 81 -13 35 500 88 -14 35 500 78 -9 35 500 138 -15 35 500 86 -13 35 500 84 -16 35 500 79 -16 35 500 87 -17 35 500 84 -13 35 500 77 -18 35 500 136 -11 35 500 83 -12 35 500 82 -17 35 500 90 -10 35 500 79 -13 35 500 85 -13 35 500 88 -17 35 500 148 -14 35 500 82 -17 40 500 91 -15 40 500 101 -10 40 500 97 -11 40 500 99 -17 40 500 93 -19 40 500 156 -11 40 500 98 -11 40 500 94 -10 40 500 88 -13 40 500 94 -16 40 500 97 -11 40 500 139 -14 40 500 102 -12 40 500 84 -11 40 500 104 -13 40 500 95 -15 40 500 96 -14 40 500 135 -12 40 500 85 -11 40 500 98 -19 45 500 111 -22 45 500 105 -20 45 500 109 -17 45 500 151 -19 45 500 119 -16 45 500 107 -22 45 500 106 -21 45 500 109 -21 45 500 152 -18 45 500 99 -25 45 500 113 -20 45 500 103 -18 45 500 96 -19 45 500 104 -18 45 500 158 -19 45 500 100 -21 45 500 107 -16 45 500 101 -18 45 500 108 -19 45 500 106 -17 50 500 215 -21 50 500 166 -16 50 500 143 -17 50 500 160 -22 50 500 137 -22 50 500 139 -17 50 500 148 -18 50 500 152 -20 50 500 211 -18 50 500 148 -18 50 500 141 -19 50 500 151 -15 50 500 191 -19 50 500 173 -24 50 500 150 -17 50 500 151 -21 50 500 206 -17 50 500 142 -19 50 500 139 -19 50 500 133 -26 55 500 184 -27 55 500 138 -22 55 500 146 -27 55 500 155 -30 55 500 182 -31 55 500 142 -27 55 500 155 -22 55 500 140 -29 55 500 202 -22 55 500 151 -23 55 500 152 -28 55 500 161 -25 55 500 195 -26 55 500 143 -28 55 500 151 -26 55 500 144 -26 55 500 209 -27 55 500 144 -24 55 500 142 -30 55 500 149 -23 60 500 205 -25 60 500 145 -23 60 500 154 -23 60 500 155 -29 60 500 188 -27 60 500 164 -26 60 500 156 -23 60 500 151 -24 60 500 180 -26 60 500 162 -22 60 500 143 -24 60 500 153 -22 60 500 196 -23 60 500 151 -26 60 500 153 -23 60 500 153 -23 60 500 193 -23 60 500 148 -26 60 500 159 -26 60 500 143 -16 65 500 220 -16 65 500 173 -17 65 500 166 -19 65 500 183 -21 65 500 215 -15 65 500 190 -17 65 500 165 -19 65 500 218 -18 65 500 187 -16 65 500 163 -14 65 500 173 -15 65 500 211 -20 65 500 167 -16 65 500 182 -19 65 500 230 -15 65 500 164 -20 65 500 179 -17 65 500 181 -16 65 500 236 -10 65 500 185 -31 70 500 152 -25 70 500 193 -31 70 500 171 -33 70 500 164 -27 70 500 159 -27 70 500 203 -26 70 500 154 -30 70 500 166 -24 70 500 164 -28 70 500 202 -25 70 500 154 -28 70 500 146 -27 70 500 158 -19 70 500 189 -27 70 500 160 -28 70 500 160 -30 70 500 148 -27 70 500 202 -28 70 500 162 -28 70 500 156 -31 75 500 216 -28 75 500 189 -29 75 500 179 -27 75 500 180 -24 75 500 214 -34 75 500 192 -29 75 500 174 -26 75 500 220 -25 75 500 172 -23 75 500 187 -28 75 500 218 -24 75 500 179 -25 75 500 179 -27 75 500 172 -24 75 500 210 -28 75 500 194 -29 75 500 208 -29 75 500 229 -26 75 500 163 -27 75 500 192 -38 80 500 204 -36 80 500 267 -35 80 500 229 -37 80 500 262 -43 80 500 210 -39 80 500 205 -36 80 500 242 -33 80 500 225 -34 80 500 218 -39 80 500 238 -31 80 500 229 -32 80 500 221 -32 80 500 265 -37 80 500 223 -42 80 500 263 -27 80 500 230 -32 80 500 209 -36 80 500 269 -35 80 500 225 -40 80 500 218 -26 85 500 240 -29 85 500 218 -32 85 500 210 -32 85 500 266 -34 85 500 209 -33 85 500 204 -34 85 500 241 -28 85 500 225 -32 85 500 213 -30 85 500 241 -32 85 500 228 -33 85 500 258 -28 85 500 216 -39 85 500 208 -33 85 500 245 -34 85 500 223 -30 85 500 217 -31 85 500 234 -33 85 500 227 -27 85 500 217 -33 90 500 288 -37 90 500 241 -33 90 500 268 -42 90 500 243 -38 90 500 244 -36 90 500 286 -40 90 500 231 -31 90 500 285 -38 90 500 238 -35 90 500 226 -35 90 500 299 -37 90 500 246 -40 90 500 287 -40 90 500 238 -36 90 500 222 -37 90 500 280 -38 90 500 245 -35 90 500 272 -38 90 500 242 -38 90 500 258 -29 95 500 282 -34 95 500 246 -31 95 500 272 -30 95 500 232 -27 95 500 242 -29 95 500 272 -30 95 500 250 -28 95 500 288 -32 95 500 254 -28 95 500 265 -28 95 500 283 -30 95 500 241 -31 95 500 294 -30 95 500 259 -32 95 500 249 -29 95 500 288 -31 95 500 261 -29 95 500 278 -32 95 500 247 -30 95 500 247 -36 100 500 269 -41 100 500 305 -36 100 500 330 -37 100 500 278 -33 100 500 286 -41 100 500 270 -34 100 500 310 -34 100 500 271 -36 100 500 286 -41 100 500 295 -45 100 500 291 -38 100 500 315 -38 100 500 281 -38 100 500 328 -37 100 500 281 -40 100 500 321 -39 100 500 287 -37 100 500 327 -32 100 500 256 -34 100 500 322 -45 105 500 292 -38 105 500 322 -38 105 500 302 -49 105 500 283 -47 105 500 340 -47 105 500 297 -50 105 500 351 -47 105 500 283 -43 105 500 318 -42 105 500 287 -42 105 500 324 -46 105 500 279 -44 105 500 322 -52 105 500 287 -38 105 500 312 -47 105 500 289 -38 105 500 302 -46 105 500 291 -45 105 500 325 -49 105 500 276 -40 110 500 399 -34 110 500 344 -45 110 500 381 -42 110 500 322 -36 110 500 370 -39 110 500 343 -39 110 500 395 -40 110 500 417 -42 110 500 349 -37 110 500 392 -33 110 500 322 -42 110 500 405 -47 110 500 363 -43 110 500 385 -43 110 500 346 -41 110 500 329 -36 110 500 423 -42 110 500 334 -42 110 500 385 -40 110 500 354 -41 115 500 335 -32 115 500 303 -38 115 500 348 -37 115 500 308 -33 115 500 348 -37 115 500 311 -36 115 500 334 -37 115 500 309 -38 115 500 351 -32 115 500 302 -38 115 500 338 -34 115 500 279 -34 115 500 326 -36 115 500 316 -38 115 500 349 -38 115 500 298 -32 115 500 357 -37 115 500 313 -39 115 500 327 -29 115 500 319 -43 120 500 325 -42 120 500 290 -43 120 500 330 -47 120 500 281 -48 120 500 331 -49 120 500 291 -44 120 500 337 -42 120 500 287 -44 120 500 334 -44 120 500 299 -41 120 500 332 -40 120 500 306 -52 120 500 333 -43 120 500 280 -47 120 500 347 -49 120 500 295 -45 120 500 348 -48 120 500 298 -44 120 500 351 -45 120 500 313 -44 125 500 336 -46 125 500 301 -40 125 500 338 -44 125 500 297 -38 125 500 325 -40 125 500 297 -39 125 500 275 -41 125 500 324 -45 125 500 278 -45 125 500 340 -42 125 500 302 -40 125 500 335 -46 125 500 302 -40 125 500 326 -44 125 500 291 -37 125 500 326 -42 125 500 270 -42 125 500 327 -45 125 500 299 -43 125 500 334 -53 130 500 319 -51 130 500 386 -56 130 500 336 -56 130 500 360 -58 130 500 328 -54 130 500 370 -56 130 500 357 -57 130 500 326 -51 130 500 374 -54 130 500 342 -50 130 500 372 -44 130 500 324 -49 130 500 369 -55 130 500 328 -55 130 500 363 -53 130 500 326 -52 130 500 369 -49 130 500 324 -44 130 500 364 -40 130 500 347 -42 135 500 323 -42 135 500 363 -44 135 500 311 -48 135 500 332 -52 135 500 327 -46 135 500 348 -45 135 500 326 -44 135 500 362 -43 135 500 338 -46 135 500 340 -51 135 500 343 -46 135 500 365 -44 135 500 330 -45 135 500 369 -50 135 500 359 -43 135 500 327 -43 135 500 370 -49 135 500 304 -48 135 500 350 -46 135 500 318 -40 140 500 410 -39 140 500 377 -49 140 500 433 -47 140 500 431 -43 140 500 371 -35 140 500 418 -42 140 500 413 -42 140 500 401 -45 140 500 455 -43 140 500 439 -44 140 500 368 -46 140 500 411 -45 140 500 391 -45 140 500 428 -46 140 500 418 -42 140 500 439 -38 140 500 448 -44 140 500 439 -38 140 500 365 -46 140 500 417 -53 145 500 415 -41 145 500 393 -44 145 500 396 -43 145 500 388 -44 145 500 395 -50 145 500 399 -49 145 500 375 -47 145 500 402 -52 145 500 425 -48 145 500 344 -45 145 500 387 -40 145 500 382 -43 145 500 387 -44 145 500 378 -48 145 500 377 -46 145 500 383 -42 145 500 354 -51 145 500 420 -46 145 500 339 -49 145 500 394 -48 150 500 442 -48 150 500 433 -50 150 500 458 -53 150 500 466 -53 150 500 487 -50 150 500 439 -53 150 500 419 -50 150 500 465 -40 150 500 405 -52 150 500 432 -48 150 500 458 -51 150 500 452 -50 150 500 459 -45 150 500 433 -55 150 500 422 -42 150 500 478 -49 150 500 438 -55 150 500 429 -46 150 500 427 -40 150 500 453 -53 155 500 368 -52 155 500 403 -55 155 500 422 -54 155 500 382 -55 155 500 408 -53 155 500 402 -53 155 500 407 -52 155 500 411 -62 155 500 372 -59 155 500 403 -56 155 500 374 -57 155 500 409 -57 155 500 429 -61 155 500 391 -46 155 500 428 -53 155 500 405 -56 155 500 376 -50 155 500 416 -53 155 500 387 -56 155 500 416 -53 160 500 441 -59 160 500 406 -55 160 500 450 -56 160 500 424 -56 160 500 386 -59 160 500 427 -61 160 500 438 -50 160 500 378 -58 160 500 439 -57 160 500 444 -58 160 500 392 -53 160 500 428 -56 160 500 443 -53 160 500 403 -54 160 500 433 -53 160 500 421 -62 160 500 389 -59 160 500 437 -55 160 500 412 -58 160 500 405 -57 165 500 486 -59 165 500 495 -60 165 500 419 -58 165 500 457 -53 165 500 494 -63 165 500 515 -55 165 500 441 -63 165 500 514 -57 165 500 478 -57 165 500 483 -60 165 500 444 -54 165 500 462 -66 165 500 479 -52 165 500 423 -52 165 500 495 -57 165 500 460 -55 165 500 474 -60 165 500 446 -62 165 500 484 -62 165 500 454 -61 170 500 539 -59 170 500 506 -56 170 500 544 -61 170 500 503 -65 170 500 587 -54 170 500 546 -56 170 500 474 -59 170 500 519 -64 170 500 538 -68 170 500 543 -62 170 500 504 -64 170 500 495 -54 170 500 523 -61 170 500 511 -65 170 500 570 -53 170 500 557 -59 170 500 487 -60 170 500 518 -59 170 500 519 -62 170 500 505 -61 175 500 407 -64 175 500 436 -67 175 500 441 -53 175 500 390 -68 175 500 417 -62 175 500 424 -57 175 500 381 -65 175 500 433 -58 175 500 436 -54 175 500 388 -68 175 500 433 -61 175 500 384 -69 175 500 425 -65 175 500 437 -52 175 500 406 -63 175 500 431 -61 175 500 424 -63 175 500 397 -56 175 500 415 -69 175 500 445 -62 180 500 549 -70 180 500 521 -59 180 500 515 -59 180 500 603 -67 180 500 556 -62 180 500 521 -64 180 500 563 -69 180 500 534 -69 180 500 521 -67 180 500 563 -63 180 500 562 -62 180 500 529 -68 180 500 546 -71 180 500 489 -74 180 500 541 -60 180 500 555 -67 180 500 573 -62 180 500 541 -62 180 500 535 -67 180 500 537 -64 185 500 595 -68 185 500 636 -69 185 500 594 -54 185 500 576 -58 185 500 621 -74 185 500 572 -71 185 500 667 -64 185 500 668 -67 185 500 616 -71 185 500 555 -56 185 500 666 -64 185 500 661 -61 185 500 688 -66 185 500 630 -65 185 500 600 -66 185 500 635 -56 185 500 649 -62 185 500 661 -60 185 500 617 -59 185 500 673 -80 190 500 529 -64 190 500 537 -75 190 500 571 -71 190 500 594 -66 190 500 551 -70 190 500 593 -73 190 500 581 -65 190 500 546 -73 190 500 522 -70 190 500 577 -71 190 500 575 -73 190 500 548 -69 190 500 551 -63 190 500 522 -70 190 500 592 -76 190 500 601 -69 190 500 585 -72 190 500 562 -74 190 500 557 -72 190 500 538 -68 195 500 511 -72 195 500 568 -69 195 500 495 -77 195 500 519 -80 195 500 511 -71 195 500 574 -77 195 500 551 -65 195 500 571 -77 195 500 518 -73 195 500 511 -75 195 500 523 -72 195 500 536 -65 195 500 524 -72 195 500 522 -74 195 500 533 -84 195 500 539 -79 195 500 575 -75 195 500 552 -75 195 500 533 -70 195 500 479 -59 200 500 544 -65 200 500 574 -58 200 500 608 -66 200 500 552 -58 200 500 585 -67 200 500 578 -66 200 500 576 -58 200 500 491 -59 200 500 554 -56 200 500 554 -59 200 500 559 -60 200 500 575 -57 200 500 541 -63 200 500 552 -64 200 500 526 -66 200 500 573 -55 200 500 546 -65 200 500 543 -55 200 500 537 -57 200 500 580 -1 5 500 6 -2 5 500 7 -1 5 500 6 -1 5 500 7 -2 5 500 6 -2 5 500 6 -1 5 500 6 -2 5 500 8 -3 5 500 7 -1 5 500 6 -2 5 500 6 -1 5 500 6 -1 5 500 7 -2 5 500 7 -1 5 500 6 -1 5 500 6 -1 5 500 6 -1 5 500 6 -1 5 500 6 -2 5 500 7 -3 10 500 20 -3 10 500 19 -4 10 500 19 -4 10 500 19 -4 10 500 21 -3 10 500 20 -4 10 500 21 -4 10 500 18 -3 10 500 17 -3 10 500 19 -3 10 500 19 -2 10 500 21 -2 10 500 27 -6 10 500 24 -4 10 500 19 -5 10 500 21 -3 10 500 21 -3 10 500 20 -3 10 500 18 -4 10 500 21 -3 15 500 38 -3 15 500 33 -4 15 500 44 -5 15 500 40 -4 15 500 42 -3 15 500 40 -3 15 500 37 -2 15 500 53 -4 15 500 35 -4 15 500 91 -4 15 500 42 -3 15 500 48 -3 15 500 42 -3 15 500 41 -1 15 500 57 -4 15 500 46 -3 15 500 45 -4 15 500 41 -3 15 500 38 -4 15 500 50 -9 20 500 46 -10 20 500 44 -7 20 500 78 -13 20 500 46 -7 20 500 41 -12 20 500 45 -10 20 500 43 -9 20 500 51 -11 20 500 45 -10 20 500 44 -12 20 500 45 -9 20 500 45 -11 20 500 43 -10 20 500 47 -12 20 500 47 -10 20 500 106 -9 20 500 42 -9 20 500 50 -10 20 500 47 -8 20 500 44 -7 25 500 71 -8 25 500 72 -10 25 500 79 -9 25 500 82 -9 25 500 67 -6 25 500 113 -8 25 500 79 -8 25 500 79 -8 25 500 80 -9 25 500 87 -6 25 500 75 -8 25 500 67 -7 25 500 70 -7 25 500 114 -8 25 500 64 -8 25 500 68 -8 25 500 71 -8 25 500 69 -11 25 500 63 -9 25 500 78 -7 30 500 61 -7 30 500 106 -7 30 500 75 -6 30 500 71 -9 30 500 63 -8 30 500 73 -6 30 500 73 -8 30 500 79 -7 30 500 78 -7 30 500 122 -10 30 500 67 -7 30 500 72 -8 30 500 70 -9 30 500 76 -9 30 500 73 -12 30 500 79 -10 30 500 72 -5 30 500 123 -9 30 500 74 -9 30 500 76 -11 35 500 123 -12 35 500 114 -12 35 500 97 -14 35 500 145 -12 35 500 121 -10 35 500 107 -11 35 500 131 -14 35 500 115 -12 35 500 114 -14 35 500 142 -12 35 500 106 -12 35 500 113 -9 35 500 113 -14 35 500 112 -11 35 500 176 -13 35 500 99 -15 35 500 114 -11 35 500 96 -10 35 500 109 -17 35 500 118 -15 40 500 144 -14 40 500 100 -16 40 500 102 -15 40 500 119 -15 40 500 104 -13 40 500 133 -16 40 500 106 -17 40 500 102 -15 40 500 106 -16 40 500 103 -10 40 500 104 -11 40 500 146 -13 40 500 103 -12 40 500 101 -19 40 500 103 -10 40 500 100 -16 40 500 103 -14 40 500 145 -16 40 500 99 -12 40 500 106 -18 45 500 123 -13 45 500 113 -13 45 500 118 -16 45 500 154 -12 45 500 112 -15 45 500 121 -16 45 500 111 -14 45 500 113 -14 45 500 142 -15 45 500 179 -16 45 500 119 -16 45 500 117 -15 45 500 108 -19 45 500 166 -18 45 500 134 -17 45 500 116 -19 45 500 119 -17 45 500 114 -18 45 500 147 -14 45 500 131 -11 50 500 147 -13 50 500 141 -11 50 500 153 -8 50 500 174 -11 50 500 147 -13 50 500 134 -13 50 500 162 -13 50 500 172 -12 50 500 135 -13 50 500 121 -10 50 500 142 -13 50 500 138 -10 50 500 164 -14 50 500 145 -11 50 500 129 -14 50 500 152 -15 50 500 175 -13 50 500 143 -8 50 500 115 -12 50 500 139 -17 55 500 132 -15 55 500 169 -16 55 500 139 -14 55 500 143 -14 55 500 132 -13 55 500 175 -18 55 500 139 -14 55 500 136 -15 55 500 144 -15 55 500 168 -18 55 500 136 -17 55 500 125 -16 55 500 133 -17 55 500 125 -19 55 500 176 -15 55 500 125 -16 55 500 131 -17 55 500 130 -12 55 500 131 -17 55 500 182 -25 60 500 142 -23 60 500 126 -24 60 500 132 -22 60 500 185 -21 60 500 140 -24 60 500 148 -21 60 500 144 -27 60 500 179 -23 60 500 140 -21 60 500 155 -21 60 500 150 -23 60 500 178 -26 60 500 149 -28 60 500 139 -21 60 500 133 -24 60 500 130 -25 60 500 188 -23 60 500 149 -28 60 500 148 -31 60 500 152 -22 65 500 207 -18 65 500 169 -18 65 500 175 -18 65 500 216 -19 65 500 175 -24 65 500 165 -25 65 500 165 -18 65 500 206 -20 65 500 159 -24 65 500 169 -18 65 500 221 -25 65 500 174 -23 65 500 166 -22 65 500 176 -27 65 500 194 -16 65 500 156 -22 65 500 174 -19 65 500 170 -23 65 500 222 -20 65 500 171 -23 70 500 178 -25 70 500 209 -23 70 500 178 -23 70 500 169 -24 70 500 211 -25 70 500 184 -26 70 500 177 -24 70 500 187 -23 70 500 215 -28 70 500 170 -20 70 500 167 -20 70 500 212 -25 70 500 180 -24 70 500 188 -26 70 500 175 -25 70 500 230 -25 70 500 167 -25 70 500 174 -25 70 500 212 -28 70 500 182 -29 75 500 228 -27 75 500 264 -28 75 500 247 -30 75 500 211 -30 75 500 264 -27 75 500 211 -26 75 500 256 -30 75 500 235 -29 75 500 194 -31 75 500 276 -29 75 500 209 -29 75 500 219 -28 75 500 288 -28 75 500 195 -29 75 500 232 -31 75 500 266 -24 75 500 228 -29 75 500 266 -29 75 500 233 -27 75 500 211 -27 80 500 262 -23 80 500 239 -30 80 500 217 -25 80 500 248 -32 80 500 219 -22 80 500 255 -28 80 500 222 -25 80 500 221 -30 80 500 265 -30 80 500 236 -27 80 500 205 -26 80 500 256 -24 80 500 229 -25 80 500 280 -23 80 500 204 -22 80 500 211 -23 80 500 257 -26 80 500 228 -24 80 500 237 -25 80 500 267 -29 85 500 211 -31 85 500 201 -32 85 500 275 -33 85 500 206 -32 85 500 267 -30 85 500 216 -34 85 500 217 -28 85 500 259 -33 85 500 232 -28 85 500 202 -32 85 500 256 -31 85 500 202 -36 85 500 226 -34 85 500 247 -35 85 500 206 -36 85 500 224 -33 85 500 256 -27 85 500 236 -31 85 500 203 -31 85 500 253 -30 90 500 244 -30 90 500 282 -41 90 500 262 -33 90 500 238 -33 90 500 257 -28 90 500 230 -34 90 500 288 -34 90 500 244 -31 90 500 242 -32 90 500 255 -33 90 500 232 -33 90 500 270 -39 90 500 249 -35 90 500 268 -33 90 500 286 -39 90 500 253 -31 90 500 295 -35 90 500 231 -35 90 500 234 -33 90 500 256 -35 95 500 246 -39 95 500 280 -35 95 500 262 -33 95 500 265 -34 95 500 278 -41 95 500 247 -40 95 500 323 -36 95 500 254 -36 95 500 270 -38 95 500 252 -36 95 500 228 -37 95 500 283 -35 95 500 243 -36 95 500 293 -40 95 500 240 -33 95 500 269 -40 95 500 300 -39 95 500 261 -44 95 500 275 -42 95 500 259 -32 100 500 233 -35 100 500 274 -26 100 500 251 -32 100 500 275 -31 100 500 246 -30 100 500 281 -34 100 500 265 -26 100 500 270 -31 100 500 304 -31 100 500 242 -38 100 500 284 -30 100 500 257 -31 100 500 245 -30 100 500 284 -32 100 500 234 -36 100 500 292 -24 100 500 258 -29 100 500 244 -33 100 500 281 -29 100 500 244 -36 105 500 315 -40 105 500 300 -36 105 500 333 -39 105 500 315 -35 105 500 335 -37 105 500 275 -36 105 500 324 -46 105 500 298 -35 105 500 333 -36 105 500 284 -32 105 500 323 -37 105 500 268 -37 105 500 315 -39 105 500 292 -36 105 500 306 -45 105 500 313 -39 105 500 311 -36 105 500 302 -38 105 500 295 -35 105 500 317 -39 110 500 269 -41 110 500 309 -37 110 500 277 -40 110 500 310 -42 110 500 276 -43 110 500 300 -39 110 500 261 -39 110 500 303 -37 110 500 312 -44 110 500 263 -43 110 500 310 -40 110 500 277 -39 110 500 281 -38 110 500 273 -40 110 500 322 -37 110 500 287 -46 110 500 314 -41 110 500 274 -36 110 500 255 -41 110 500 312 -36 115 500 279 -40 115 500 316 -39 115 500 271 -43 115 500 312 -37 115 500 290 -38 115 500 312 -38 115 500 265 -45 115 500 267 -36 115 500 288 -44 115 500 283 -36 115 500 308 -40 115 500 284 -41 115 500 306 -39 115 500 262 -37 115 500 318 -41 115 500 285 -34 115 500 310 -39 115 500 283 -38 115 500 298 -42 115 500 312 -35 120 500 287 -40 120 500 322 -38 120 500 316 -38 120 500 355 -38 120 500 316 -35 120 500 354 -35 120 500 363 -38 120 500 294 -44 120 500 307 -45 120 500 341 -42 120 500 341 -44 120 500 313 -32 120 500 341 -40 120 500 308 -39 120 500 344 -36 120 500 301 -38 120 500 338 -42 120 500 344 -37 120 500 322 -42 120 500 316 -44 125 500 339 -41 125 500 314 -36 125 500 370 -41 125 500 319 -45 125 500 349 -37 125 500 310 -40 125 500 359 -43 125 500 331 -46 125 500 339 -46 125 500 306 -42 125 500 337 -42 125 500 329 -42 125 500 341 -50 125 500 317 -40 125 500 374 -47 125 500 352 -44 125 500 318 -39 125 500 341 -48 125 500 324 -45 125 500 340 -50 130 500 348 -37 130 500 373 -39 130 500 360 -49 130 500 366 -50 130 500 351 -47 130 500 398 -44 130 500 382 -41 130 500 359 -42 130 500 379 -38 130 500 339 -46 130 500 375 -50 130 500 370 -52 130 500 380 -38 130 500 324 -46 130 500 356 -42 130 500 394 -38 130 500 336 -41 130 500 370 -44 130 500 353 -32 130 500 393 -57 135 500 345 -66 135 500 383 -64 135 500 381 -59 135 500 322 -61 135 500 345 -59 135 500 362 -62 135 500 387 -57 135 500 347 -65 135 500 366 -60 135 500 344 -60 135 500 408 -62 135 500 369 -61 135 500 348 -58 135 500 386 -62 135 500 338 -61 135 500 373 -56 135 500 345 -57 135 500 393 -55 135 500 360 -61 135 500 340 -43 140 500 425 -58 140 500 387 -45 140 500 418 -45 140 500 424 -55 140 500 401 -42 140 500 400 -42 140 500 421 -49 140 500 365 -47 140 500 405 -48 140 500 397 -50 140 500 412 -52 140 500 397 -51 140 500 378 -50 140 500 380 -48 140 500 368 -48 140 500 379 -54 140 500 400 -53 140 500 400 -47 140 500 424 -49 140 500 431 -55 145 500 393 -40 145 500 401 -49 145 500 351 -54 145 500 380 -48 145 500 390 -46 145 500 360 -51 145 500 388 -50 145 500 366 -43 145 500 383 -40 145 500 357 -46 145 500 367 -50 145 500 374 -48 145 500 367 -53 145 500 371 -50 145 500 358 -48 145 500 395 -47 145 500 422 -51 145 500 380 -56 145 500 421 -45 145 500 369 -49 150 500 370 -45 150 500 365 -50 150 500 355 -38 150 500 362 -48 150 500 381 -47 150 500 375 -50 150 500 358 -47 150 500 389 -47 150 500 397 -42 150 500 355 -45 150 500 379 -45 150 500 341 -50 150 500 379 -49 150 500 392 -41 150 500 361 -38 150 500 374 -50 150 500 353 -44 150 500 375 -56 150 500 370 -42 150 500 370 -59 155 500 491 -63 155 500 449 -55 155 500 452 -57 155 500 478 -54 155 500 465 -50 155 500 426 -56 155 500 437 -60 155 500 463 -60 155 500 437 -55 155 500 469 -54 155 500 456 -53 155 500 493 -48 155 500 455 -55 155 500 434 -52 155 500 461 -54 155 500 403 -58 155 500 467 -61 155 500 478 -58 155 500 486 -52 155 500 447 -58 160 500 435 -60 160 500 445 -48 160 500 410 -64 160 500 455 -64 160 500 437 -61 160 500 436 -68 160 500 444 -62 160 500 451 -57 160 500 411 -55 160 500 439 -65 160 500 479 -62 160 500 419 -49 160 500 415 -59 160 500 452 -55 160 500 426 -60 160 500 475 -48 160 500 438 -56 160 500 428 -58 160 500 449 -61 160 500 444 -55 165 500 510 -53 165 500 482 -56 165 500 499 -55 165 500 492 -59 165 500 515 -48 165 500 443 -55 165 500 496 -56 165 500 504 -53 165 500 512 -63 165 500 533 -57 165 500 518 -49 165 500 496 -59 165 500 490 -62 165 500 471 -41 165 500 477 -53 165 500 500 -48 165 500 508 -53 165 500 489 -52 165 500 443 -58 165 500 485 -65 170 500 434 -69 170 500 409 -52 170 500 447 -75 170 500 462 -64 170 500 424 -64 170 500 418 -65 170 500 460 -62 170 500 408 -70 170 500 425 -71 170 500 447 -61 170 500 441 -59 170 500 436 -62 170 500 457 -66 170 500 429 -64 170 500 416 -62 170 500 410 -70 170 500 413 -71 170 500 430 -61 170 500 441 -63 170 500 406 -63 175 500 545 -59 175 500 501 -53 175 500 507 -58 175 500 454 -60 175 500 488 -59 175 500 486 -52 175 500 485 -51 175 500 484 -53 175 500 494 -52 175 500 476 -53 175 500 483 -55 175 500 452 -56 175 500 492 -53 175 500 487 -53 175 500 498 -60 175 500 455 -52 175 500 484 -62 175 500 473 -53 175 500 499 -58 175 500 480 -60 180 500 465 -66 180 500 468 -53 180 500 437 -63 180 500 469 -69 180 500 484 -67 180 500 469 -56 180 500 461 -65 180 500 485 -63 180 500 475 -65 180 500 481 -68 180 500 460 -64 180 500 490 -69 180 500 466 -68 180 500 460 -68 180 500 451 -68 180 500 470 -69 180 500 485 -71 180 500 461 -68 180 500 464 -65 180 500 495 -59 185 500 534 -64 185 500 540 -65 185 500 453 -61 185 500 547 -54 185 500 496 -60 185 500 516 -68 185 500 549 -64 185 500 489 -55 185 500 524 -67 185 500 512 -52 185 500 509 -62 185 500 526 -55 185 500 504 -59 185 500 514 -65 185 500 540 -56 185 500 537 -64 185 500 522 -69 185 500 497 -65 185 500 532 -57 185 500 514 -71 190 500 549 -65 190 500 498 -65 190 500 500 -64 190 500 532 -62 190 500 581 -62 190 500 537 -71 190 500 523 -62 190 500 511 -63 190 500 506 -64 190 500 504 -66 190 500 544 -68 190 500 541 -63 190 500 532 -71 190 500 575 -59 190 500 471 -71 190 500 532 -69 190 500 549 -67 190 500 502 -74 190 500 531 -66 190 500 533 -68 195 500 514 -71 195 500 547 -63 195 500 512 -64 195 500 567 -58 195 500 520 -71 195 500 519 -64 195 500 555 -51 195 500 528 -59 195 500 511 -66 195 500 546 -65 195 500 525 -59 195 500 544 -68 195 500 520 -67 195 500 492 -58 195 500 510 -69 195 500 538 -67 195 500 526 -69 195 500 551 -59 195 500 548 -64 195 500 487 -68 200 500 504 -62 200 500 498 -67 200 500 495 -66 200 500 522 -60 200 500 496 -69 200 500 519 -68 200 500 485 -67 200 500 533 -65 200 500 525 -74 200 500 508 -72 200 500 521 -72 200 500 496 -64 200 500 536 -65 200 500 547 -63 200 500 508 -62 200 500 534 -69 200 500 519 -70 200 500 505 -66 200 500 493 -67 200 500 497 -0 5 500 33 -0 5 500 17 -0 5 500 23 -2 5 500 23 -1 5 500 22 -0 5 500 18 -0 5 500 20 -0 5 500 23 -1 5 500 25 -0 5 500 20 -0 5 500 20 -1 5 500 23 -1 5 500 21 -0 5 500 20 -0 5 500 72 -0 5 500 24 -0 5 500 20 -0 5 500 16 -0 5 500 21 -0 5 500 22 -4 10 500 61 -3 10 500 56 -4 10 500 50 -4 10 500 38 -4 10 500 39 -4 10 500 46 -4 10 500 37 -3 10 500 44 -4 10 500 52 -3 10 500 81 -4 10 500 55 -3 10 500 55 -4 10 500 40 -3 10 500 42 -4 10 500 35 -3 10 500 43 -3 10 500 47 -4 10 500 52 -3 10 500 38 -3 10 500 46 -8 15 500 41 -9 15 500 34 -8 15 500 36 -7 15 500 72 -8 15 500 37 -8 15 500 30 -8 15 500 37 -8 15 500 44 -8 15 500 42 -9 15 500 35 -7 15 500 38 -7 15 500 36 -9 15 500 33 -7 15 500 33 -7 15 500 40 -8 15 500 38 -8 15 500 36 -8 15 500 37 -8 15 500 102 -9 15 500 37 -6 20 500 47 -8 20 500 51 -6 20 500 43 -6 20 500 42 -5 20 500 41 -7 20 500 44 -7 20 500 42 -7 20 500 43 -10 20 500 48 -6 20 500 46 -7 20 500 44 -6 20 500 101 -5 20 500 45 -7 20 500 40 -5 20 500 43 -7 20 500 42 -8 20 500 42 -6 20 500 43 -5 20 500 44 -6 20 500 42 -10 25 500 62 -12 25 500 62 -10 25 500 65 -12 25 500 98 -10 25 500 65 -11 25 500 68 -9 25 500 62 -10 25 500 64 -10 25 500 67 -12 25 500 66 -13 25 500 64 -12 25 500 60 -11 25 500 113 -11 25 500 67 -12 25 500 64 -10 25 500 69 -10 25 500 63 -10 25 500 62 -9 25 500 68 -12 25 500 67 -9 30 500 65 -10 30 500 101 -11 30 500 62 -10 30 500 62 -12 30 500 65 -11 30 500 66 -11 30 500 62 -12 30 500 66 -13 30 500 63 -9 30 500 63 -10 30 500 106 -10 30 500 67 -11 30 500 62 -12 30 500 65 -10 30 500 64 -12 30 500 60 -9 30 500 59 -11 30 500 57 -15 30 500 63 -13 30 500 111 -14 35 500 82 -16 35 500 78 -14 35 500 74 -15 35 500 76 -18 35 500 77 -16 35 500 84 -13 35 500 76 -18 35 500 129 -15 35 500 80 -15 35 500 79 -12 35 500 81 -18 35 500 83 -16 35 500 83 -18 35 500 83 -17 35 500 78 -15 35 500 133 -13 35 500 79 -16 35 500 72 -14 35 500 76 -14 35 500 74 -15 40 500 92 -18 40 500 97 -17 40 500 138 -15 40 500 98 -12 40 500 105 -14 40 500 96 -15 40 500 100 -14 40 500 90 -14 40 500 139 -15 40 500 98 -16 40 500 104 -13 40 500 99 -15 40 500 100 -11 40 500 99 -12 40 500 133 -16 40 500 103 -14 40 500 89 -16 40 500 92 -15 40 500 104 -13 40 500 104 -21 45 500 147 -21 45 500 111 -23 45 500 109 -17 45 500 115 -16 45 500 102 -20 45 500 153 -18 45 500 107 -17 45 500 105 -17 45 500 110 -19 45 500 112 -16 45 500 121 -21 45 500 154 -17 45 500 110 -18 45 500 113 -18 45 500 113 -16 45 500 109 -21 45 500 158 -17 45 500 113 -17 45 500 110 -22 45 500 116 -19 50 500 160 -18 50 500 196 -18 50 500 157 -21 50 500 152 -19 50 500 162 -19 50 500 210 -21 50 500 157 -19 50 500 171 -20 50 500 199 -20 50 500 164 -23 50 500 143 -20 50 500 179 -22 50 500 200 -20 50 500 172 -19 50 500 167 -21 50 500 173 -20 50 500 200 -21 50 500 163 -20 50 500 157 -15 50 500 166 -21 55 500 156 -20 55 500 130 -16 55 500 133 -18 55 500 124 -18 55 500 159 -18 55 500 127 -22 55 500 131 -19 55 500 138 -20 55 500 131 -19 55 500 166 -20 55 500 142 -21 55 500 134 -20 55 500 132 -17 55 500 170 -22 55 500 134 -21 55 500 131 -21 55 500 135 -18 55 500 128 -20 55 500 167 -17 55 500 122 -26 60 500 189 -24 60 500 166 -22 60 500 201 -24 60 500 162 -22 60 500 159 -17 60 500 149 -22 60 500 229 -23 60 500 169 -23 60 500 188 -20 60 500 206 -24 60 500 176 -23 60 500 184 -19 60 500 211 -18 60 500 183 -24 60 500 160 -21 60 500 174 -21 60 500 213 -20 60 500 164 -20 60 500 162 -21 60 500 197 -18 65 500 144 -31 65 500 150 -30 65 500 147 -22 65 500 136 -24 65 500 188 -25 65 500 146 -24 65 500 136 -22 65 500 141 -22 65 500 174 -30 65 500 148 -21 65 500 144 -26 65 500 154 -29 65 500 186 -24 65 500 138 -25 65 500 150 -27 65 500 143 -26 65 500 175 -29 65 500 157 -27 65 500 147 -27 65 500 145 -27 70 500 228 -26 70 500 194 -25 70 500 196 -22 70 500 214 -24 70 500 171 -26 70 500 179 -21 70 500 160 -25 70 500 214 -27 70 500 166 -25 70 500 178 -26 70 500 211 -20 70 500 162 -31 70 500 194 -23 70 500 178 -25 70 500 217 -25 70 500 191 -27 70 500 183 -26 70 500 214 -22 70 500 177 -25 70 500 212 -25 75 500 258 -24 75 500 248 -26 75 500 262 -25 75 500 276 -21 75 500 228 -27 75 500 226 -22 75 500 250 -23 75 500 197 -25 75 500 251 -29 75 500 259 -21 75 500 243 -21 75 500 254 -24 75 500 235 -22 75 500 209 -24 75 500 269 -23 75 500 220 -29 75 500 274 -21 75 500 224 -26 75 500 245 -27 75 500 277 -39 80 500 196 -38 80 500 207 -37 80 500 224 -35 80 500 180 -40 80 500 198 -31 80 500 226 -36 80 500 202 -38 80 500 203 -34 80 500 233 -36 80 500 184 -40 80 500 187 -34 80 500 227 -35 80 500 192 -36 80 500 196 -36 80 500 242 -36 80 500 187 -37 80 500 192 -34 80 500 215 -30 80 500 194 -26 80 500 190 -28 85 500 265 -31 85 500 253 -29 85 500 226 -29 85 500 262 -30 85 500 231 -24 85 500 236 -31 85 500 253 -31 85 500 234 -28 85 500 281 -36 85 500 233 -33 85 500 241 -30 85 500 258 -30 85 500 242 -28 85 500 357 -29 85 500 220 -26 85 500 268 -26 85 500 256 -33 85 500 248 -31 85 500 217 -35 85 500 274 -24 90 500 249 -29 90 500 275 -28 90 500 227 -21 90 500 250 -29 90 500 260 -23 90 500 250 -28 90 500 257 -25 90 500 259 -23 90 500 237 -25 90 500 280 -24 90 500 259 -22 90 500 255 -23 90 500 234 -19 90 500 227 -26 90 500 270 -19 90 500 227 -19 90 500 236 -23 90 500 242 -24 90 500 240 -21 90 500 258 -34 95 500 236 -39 95 500 247 -37 95 500 253 -34 95 500 233 -25 95 500 257 -29 95 500 235 -30 95 500 227 -35 95 500 278 -29 95 500 235 -33 95 500 268 -33 95 500 241 -32 95 500 232 -31 95 500 258 -35 95 500 232 -33 95 500 230 -34 95 500 266 -33 95 500 229 -32 95 500 251 -29 95 500 241 -31 95 500 231 -37 100 500 292 -39 100 500 263 -38 100 500 303 -38 100 500 260 -35 100 500 253 -34 100 500 287 -39 100 500 253 -36 100 500 296 -36 100 500 251 -36 100 500 296 -35 100 500 271 -34 100 500 266 -39 100 500 276 -36 100 500 255 -38 100 500 288 -34 100 500 258 -32 100 500 250 -36 100 500 301 -35 100 500 258 -36 100 500 301 -43 105 500 251 -38 105 500 264 -37 105 500 239 -39 105 500 238 -37 105 500 299 -37 105 500 235 -43 105 500 279 -40 105 500 239 -39 105 500 242 -44 105 500 275 -46 105 500 256 -42 105 500 270 -43 105 500 252 -44 105 500 243 -44 105 500 262 -38 105 500 236 -41 105 500 280 -38 105 500 213 -38 105 500 250 -40 105 500 267 -39 110 500 275 -40 110 500 295 -44 110 500 262 -35 110 500 245 -41 110 500 349 -39 110 500 262 -38 110 500 300 -38 110 500 293 -42 110 500 324 -36 110 500 270 -36 110 500 266 -42 110 500 299 -37 110 500 278 -37 110 500 311 -33 110 500 286 -38 110 500 304 -33 110 500 268 -36 110 500 295 -37 110 500 278 -39 110 500 251 -39 115 500 361 -31 115 500 373 -43 115 500 339 -41 115 500 329 -44 115 500 371 -38 115 500 348 -44 115 500 340 -43 115 500 355 -42 115 500 317 -42 115 500 343 -37 115 500 337 -37 115 500 352 -41 115 500 312 -34 115 500 350 -43 115 500 355 -49 115 500 361 -39 115 500 339 -42 115 500 353 -42 115 500 321 -39 115 500 356 -48 120 500 288 -39 120 500 370 -38 120 500 306 -46 120 500 314 -43 120 500 362 -35 120 500 317 -39 120 500 365 -42 120 500 321 -40 120 500 344 -42 120 500 324 -40 120 500 327 -47 120 500 314 -37 120 500 352 -43 120 500 329 -41 120 500 343 -43 120 500 331 -41 120 500 341 -36 120 500 319 -41 120 500 380 -43 120 500 330 -39 125 500 397 -40 125 500 348 -44 125 500 375 -43 125 500 397 -51 125 500 347 -34 125 500 366 -46 125 500 359 -47 125 500 417 -40 125 500 373 -53 125 500 371 -45 125 500 365 -39 125 500 344 -39 125 500 404 -47 125 500 346 -42 125 500 395 -46 125 500 413 -37 125 500 379 -45 125 500 403 -48 125 500 365 -49 125 500 411 -41 130 500 355 -38 130 500 323 -34 130 500 387 -42 130 500 326 -38 130 500 369 -41 130 500 383 -40 130 500 330 -38 130 500 347 -32 130 500 329 -42 130 500 316 -45 130 500 347 -39 130 500 313 -30 130 500 337 -44 130 500 303 -36 130 500 333 -36 130 500 353 -40 130 500 296 -36 130 500 353 -36 130 500 293 -34 130 500 346 -51 135 500 423 -48 135 500 395 -44 135 500 385 -50 135 500 391 -51 135 500 410 -48 135 500 401 -48 135 500 381 -50 135 500 427 -45 135 500 391 -54 135 500 419 -54 135 500 420 -51 135 500 424 -46 135 500 469 -50 135 500 407 -49 135 500 398 -41 135 500 404 -45 135 500 357 -49 135 500 408 -49 135 500 476 -46 135 500 416 -39 140 500 387 -43 140 500 384 -51 140 500 421 -50 140 500 362 -45 140 500 377 -49 140 500 458 -44 140 500 329 -40 140 500 401 -48 140 500 387 -46 140 500 381 -44 140 500 381 -51 140 500 370 -48 140 500 359 -38 140 500 373 -47 140 500 401 -50 140 500 366 -43 140 500 403 -44 140 500 368 -46 140 500 399 -44 140 500 378 -48 145 500 362 -49 145 500 385 -51 145 500 382 -47 145 500 345 -48 145 500 386 -54 145 500 355 -50 145 500 387 -51 145 500 344 -51 145 500 361 -57 145 500 401 -53 145 500 365 -52 145 500 381 -46 145 500 372 -55 145 500 371 -55 145 500 373 -49 145 500 354 -58 145 500 384 -51 145 500 364 -49 145 500 370 -56 145 500 364 -52 150 500 466 -55 150 500 435 -49 150 500 451 -60 150 500 427 -50 150 500 435 -48 150 500 424 -56 150 500 439 -56 150 500 449 -55 150 500 439 -57 150 500 397 -52 150 500 468 -55 150 500 436 -55 150 500 441 -47 150 500 420 -50 150 500 456 -52 150 500 439 -56 150 500 436 -57 150 500 465 -50 150 500 474 -57 150 500 424 -50 155 500 451 -61 155 500 452 -51 155 500 387 -47 155 500 475 -47 155 500 447 -52 155 500 429 -50 155 500 441 -53 155 500 456 -53 155 500 448 -60 155 500 428 -52 155 500 446 -51 155 500 456 -60 155 500 404 -54 155 500 471 -49 155 500 433 -56 155 500 440 -59 155 500 470 -55 155 500 452 -47 155 500 432 -50 155 500 415 -64 160 500 476 -61 160 500 484 -56 160 500 391 -57 160 500 442 -52 160 500 483 -61 160 500 442 -61 160 500 441 -52 160 500 480 -53 160 500 434 -58 160 500 437 -59 160 500 483 -58 160 500 467 -62 160 500 441 -53 160 500 429 -61 160 500 443 -54 160 500 416 -57 160 500 471 -58 160 500 450 -67 160 500 444 -58 160 500 442 -50 165 500 432 -49 165 500 421 -55 165 500 418 -54 165 500 468 -55 165 500 429 -54 165 500 400 -50 165 500 436 -56 165 500 426 -56 165 500 421 -53 165 500 457 -53 165 500 446 -50 165 500 437 -50 165 500 414 -49 165 500 456 -53 165 500 443 -54 165 500 445 -53 165 500 453 -46 165 500 431 -47 165 500 423 -55 165 500 475 -47 170 500 487 -54 170 500 440 -60 170 500 482 -52 170 500 484 -67 170 500 456 -52 170 500 447 -56 170 500 491 -59 170 500 525 -62 170 500 455 -48 170 500 454 -55 170 500 463 -58 170 500 456 -57 170 500 452 -64 170 500 489 -52 170 500 486 -45 170 500 461 -62 170 500 458 -56 170 500 478 -49 170 500 484 -60 170 500 441 -58 175 500 419 -61 175 500 484 -58 175 500 473 -67 175 500 495 -59 175 500 454 -56 175 500 462 -58 175 500 475 -62 175 500 451 -63 175 500 487 -69 175 500 463 -65 175 500 446 -61 175 500 473 -57 175 500 471 -57 175 500 472 -63 175 500 476 -56 175 500 434 -61 175 500 477 -58 175 500 478 -59 175 500 458 -51 175 500 464 -66 180 500 461 -62 180 500 484 -55 180 500 439 -66 180 500 497 -76 180 500 480 -72 180 500 481 -65 180 500 464 -68 180 500 485 -63 180 500 463 -63 180 500 454 -64 180 500 451 -65 180 500 475 -61 180 500 467 -59 180 500 419 -65 180 500 509 -67 180 500 454 -69 180 500 419 -58 180 500 493 -66 180 500 486 -61 180 500 503 -71 185 500 579 -62 185 500 520 -68 185 500 533 -62 185 500 537 -64 185 500 548 -62 185 500 540 -71 185 500 555 -69 185 500 532 -74 185 500 510 -63 185 500 548 -62 185 500 553 -66 185 500 552 -69 185 500 511 -70 185 500 581 -66 185 500 479 -70 185 500 541 -61 185 500 533 -68 185 500 536 -64 185 500 542 -64 185 500 525 -73 190 500 493 -60 190 500 541 -66 190 500 534 -65 190 500 528 -64 190 500 510 -66 190 500 515 -68 190 500 497 -67 190 500 530 -59 190 500 513 -65 190 500 482 -68 190 500 457 -77 190 500 534 -68 190 500 500 -68 190 500 514 -64 190 500 521 -69 190 500 471 -64 190 500 528 -68 190 500 504 -78 190 500 504 -65 190 500 500 -82 195 500 513 -73 195 500 507 -80 195 500 526 -75 195 500 523 -74 195 500 539 -76 195 500 581 -75 195 500 505 -75 195 500 520 -74 195 500 551 -66 195 500 532 -80 195 500 538 -77 195 500 568 -82 195 500 515 -64 195 500 516 -76 195 500 524 -78 195 500 519 -77 195 500 499 -85 195 500 557 -80 195 500 515 -76 195 500 540 -77 200 500 533 -78 200 500 557 -67 200 500 528 -66 200 500 538 -72 200 500 507 -78 200 500 553 -75 200 500 549 -71 200 500 524 -72 200 500 568 -75 200 500 565 -68 200 500 552 -74 200 500 519 -70 200 500 528 -81 200 500 543 -68 200 500 552 -75 200 500 531 -82 200 500 581 -70 200 500 549 -84 200 500 545 -81 200 500 583 -2 5 500 10 -3 5 500 11 -4 5 500 13 -2 5 500 9 -2 5 500 38 -3 5 500 11 -3 5 500 12 -3 5 500 10 -3 5 500 10 -2 5 500 10 -3 5 500 10 -3 5 500 15 -1 5 500 9 -3 5 500 9 -3 5 500 7 -3 5 500 14 -2 5 500 12 -3 5 500 10 -3 5 500 8 -2 5 500 11 -5 10 500 23 -4 10 500 21 -5 10 500 23 -5 10 500 23 -4 10 500 20 -5 10 500 22 -4 10 500 21 -5 10 500 24 -7 10 500 25 -5 10 500 26 -5 10 500 24 -4 10 500 21 -6 10 500 24 -2 10 500 26 -7 10 500 25 -5 10 500 20 -4 10 500 20 -6 10 500 21 -5 10 500 22 -5 10 500 21 -3 15 500 32 -2 15 500 31 -4 15 500 30 -1 15 500 27 -2 15 500 33 -3 15 500 31 -1 15 500 33 -2 15 500 32 -4 15 500 28 -4 15 500 31 -4 15 500 35 -4 15 500 27 -3 15 500 31 -2 15 500 28 -5 15 500 85 -3 15 500 33 -2 15 500 32 -1 15 500 33 -3 15 500 28 -2 15 500 36 -7 20 500 50 -9 20 500 52 -8 20 500 59 -7 20 500 53 -6 20 500 55 -8 20 500 50 -9 20 500 59 -7 20 500 97 -8 20 500 53 -7 20 500 57 -9 20 500 56 -7 20 500 54 -5 20 500 55 -6 20 500 53 -6 20 500 53 -8 20 500 53 -7 20 500 53 -9 20 500 64 -8 20 500 113 -6 20 500 53 -5 25 500 63 -7 25 500 65 -4 25 500 57 -6 25 500 68 -6 25 500 54 -8 25 500 68 -7 25 500 70 -4 25 500 56 -8 25 500 51 -5 25 500 56 -4 25 500 57 -4 25 500 74 -5 25 500 55 -5 25 500 56 -6 25 500 56 -8 25 500 63 -5 25 500 52 -6 25 500 69 -6 25 500 100 -6 25 500 60 -9 30 500 101 -11 30 500 105 -7 30 500 107 -6 30 500 96 -7 30 500 132 -9 30 500 118 -9 30 500 97 -7 30 500 98 -6 30 500 103 -8 30 500 96 -9 30 500 138 -12 30 500 105 -11 30 500 104 -8 30 500 104 -11 30 500 96 -8 30 500 107 -8 30 500 142 -10 30 500 115 -4 30 500 108 -9 30 500 100 -13 35 500 112 -14 35 500 102 -15 35 500 130 -15 35 500 105 -14 35 500 113 -15 35 500 114 -11 35 500 104 -12 35 500 96 -13 35 500 137 -12 35 500 104 -12 35 500 111 -12 35 500 95 -16 35 500 101 -17 35 500 117 -14 35 500 142 -13 35 500 108 -14 35 500 104 -12 35 500 112 -11 35 500 90 -13 35 500 104 -16 40 500 112 -17 40 500 94 -17 40 500 100 -13 40 500 92 -16 40 500 98 -19 40 500 92 -14 40 500 148 -18 40 500 100 -17 40 500 97 -15 40 500 100 -14 40 500 102 -16 40 500 105 -17 40 500 148 -18 40 500 98 -17 40 500 93 -17 40 500 101 -16 40 500 93 -16 40 500 106 -17 40 500 152 -18 40 500 107 -13 45 500 153 -16 45 500 135 -11 45 500 124 -13 45 500 179 -13 45 500 132 -12 45 500 132 -13 45 500 121 -13 45 500 160 -12 45 500 134 -17 45 500 123 -10 45 500 164 -12 45 500 129 -12 45 500 194 -14 45 500 150 -14 45 500 149 -14 45 500 130 -14 45 500 168 -16 45 500 135 -12 45 500 161 -11 45 500 148 -20 50 500 150 -21 50 500 120 -17 50 500 119 -19 50 500 118 -15 50 500 111 -17 50 500 152 -20 50 500 115 -15 50 500 123 -17 50 500 150 -19 50 500 121 -18 50 500 168 -14 50 500 108 -20 50 500 115 -19 50 500 117 -15 50 500 109 -20 50 500 154 -20 50 500 120 -19 50 500 122 -20 50 500 123 -18 50 500 114 -16 55 500 154 -18 55 500 122 -19 55 500 126 -19 55 500 129 -18 55 500 121 -16 55 500 157 -20 55 500 132 -22 55 500 123 -16 55 500 112 -17 55 500 118 -23 55 500 160 -18 55 500 124 -19 55 500 116 -17 55 500 120 -20 55 500 120 -18 55 500 158 -16 55 500 121 -15 55 500 121 -18 55 500 134 -19 55 500 147 -23 60 500 119 -24 60 500 136 -24 60 500 121 -22 60 500 124 -23 60 500 176 -24 60 500 151 -22 60 500 125 -19 60 500 132 -21 60 500 142 -24 60 500 182 -22 60 500 132 -23 60 500 122 -24 60 500 134 -23 60 500 170 -19 60 500 144 -24 60 500 141 -19 60 500 130 -18 60 500 138 -22 60 500 173 -23 60 500 132 -24 65 500 147 -24 65 500 155 -26 65 500 190 -25 65 500 139 -19 65 500 162 -25 65 500 155 -22 65 500 184 -23 65 500 157 -23 65 500 158 -22 65 500 147 -23 65 500 179 -24 65 500 135 -25 65 500 140 -24 65 500 152 -22 65 500 181 -26 65 500 154 -26 65 500 147 -23 65 500 136 -25 65 500 174 -26 65 500 145 -27 70 500 192 -21 70 500 180 -27 70 500 228 -27 70 500 203 -28 70 500 179 -23 70 500 231 -25 70 500 213 -22 70 500 189 -27 70 500 232 -29 70 500 219 -24 70 500 175 -24 70 500 225 -25 70 500 187 -26 70 500 196 -29 70 500 221 -28 70 500 175 -24 70 500 181 -24 70 500 197 -24 70 500 208 -24 70 500 191 -31 75 500 195 -30 75 500 241 -29 75 500 187 -36 75 500 199 -28 75 500 208 -30 75 500 166 -31 75 500 184 -31 75 500 241 -30 75 500 182 -29 75 500 187 -29 75 500 212 -27 75 500 197 -27 75 500 173 -29 75 500 224 -29 75 500 197 -31 75 500 208 -30 75 500 193 -30 75 500 197 -30 75 500 184 -28 75 500 207 -25 80 500 290 -22 80 500 234 -18 80 500 260 -25 80 500 280 -20 80 500 232 -23 80 500 294 -20 80 500 247 -20 80 500 285 -26 80 500 251 -22 80 500 306 -25 80 500 223 -21 80 500 227 -24 80 500 259 -20 80 500 274 -23 80 500 273 -17 80 500 246 -22 80 500 249 -20 80 500 266 -22 80 500 269 -22 80 500 285 -29 85 500 187 -30 85 500 194 -31 85 500 228 -30 85 500 213 -29 85 500 216 -29 85 500 237 -30 85 500 207 -34 85 500 203 -27 85 500 243 -32 85 500 206 -27 85 500 199 -33 85 500 249 -31 85 500 219 -33 85 500 215 -31 85 500 224 -32 85 500 220 -31 85 500 199 -32 85 500 242 -32 85 500 203 -32 85 500 209 -31 90 500 251 -32 90 500 241 -31 90 500 274 -35 90 500 238 -34 90 500 248 -32 90 500 276 -33 90 500 219 -35 90 500 246 -32 90 500 206 -35 90 500 235 -31 90 500 255 -33 90 500 245 -33 90 500 221 -35 90 500 263 -35 90 500 235 -38 90 500 269 -32 90 500 230 -38 90 500 215 -34 90 500 257 -36 90 500 214 -33 95 500 216 -37 95 500 265 -34 95 500 220 -40 95 500 251 -38 95 500 254 -37 95 500 205 -38 95 500 240 -40 95 500 216 -39 95 500 212 -33 95 500 259 -40 95 500 232 -39 95 500 223 -36 95 500 277 -37 95 500 221 -39 95 500 269 -39 95 500 246 -39 95 500 257 -36 95 500 273 -34 95 500 215 -37 95 500 219 -36 100 500 293 -44 100 500 283 -39 100 500 295 -38 100 500 229 -34 100 500 270 -38 100 500 269 -41 100 500 269 -37 100 500 323 -40 100 500 258 -41 100 500 305 -44 100 500 273 -39 100 500 313 -40 100 500 300 -35 100 500 290 -38 100 500 253 -35 100 500 313 -40 100 500 278 -36 100 500 247 -39 100 500 299 -36 100 500 279 -41 105 500 354 -38 105 500 318 -46 105 500 463 -47 105 500 361 -36 105 500 336 -35 105 500 319 -40 105 500 313 -38 105 500 335 -34 105 500 341 -40 105 500 346 -39 105 500 356 -37 105 500 297 -33 105 500 346 -36 105 500 344 -39 105 500 322 -41 105 500 342 -46 105 500 327 -36 105 500 354 -37 105 500 315 -39 105 500 362 -32 110 500 249 -33 110 500 259 -28 110 500 270 -36 110 500 259 -36 110 500 289 -33 110 500 255 -31 110 500 281 -31 110 500 257 -35 110 500 276 -35 110 500 271 -39 110 500 262 -35 110 500 287 -29 110 500 266 -34 110 500 296 -35 110 500 280 -37 110 500 286 -35 110 500 241 -31 110 500 249 -35 110 500 292 -26 110 500 247 -36 115 500 316 -40 115 500 293 -42 115 500 315 -35 115 500 299 -33 115 500 316 -38 115 500 282 -39 115 500 319 -40 115 500 292 -36 115 500 333 -40 115 500 298 -44 115 500 325 -39 115 500 294 -41 115 500 327 -41 115 500 315 -36 115 500 284 -38 115 500 317 -39 115 500 291 -39 115 500 312 -40 115 500 292 -36 115 500 309 -44 120 500 354 -52 120 500 403 -46 120 500 398 -38 120 500 315 -51 120 500 352 -44 120 500 353 -41 120 500 403 -39 120 500 411 -47 120 500 347 -40 120 500 354 -48 120 500 364 -42 120 500 403 -45 120 500 360 -49 120 500 351 -44 120 500 414 -42 120 500 361 -45 120 500 444 -43 120 500 358 -47 120 500 391 -43 120 500 382 -53 125 500 335 -52 125 500 361 -48 125 500 317 -53 125 500 368 -45 125 500 340 -49 125 500 360 -48 125 500 305 -43 125 500 373 -47 125 500 320 -47 125 500 357 -45 125 500 333 -46 125 500 339 -46 125 500 369 -46 125 500 342 -50 125 500 385 -52 125 500 331 -48 125 500 340 -41 125 500 304 -46 125 500 369 -40 125 500 352 -54 130 500 397 -57 130 500 388 -55 130 500 359 -47 130 500 397 -53 130 500 326 -51 130 500 357 -56 130 500 337 -48 130 500 365 -51 130 500 391 -56 130 500 374 -51 130 500 422 -57 130 500 333 -56 130 500 352 -47 130 500 407 -54 130 500 334 -45 130 500 341 -48 130 500 377 -51 130 500 395 -51 130 500 345 -54 130 500 403 -49 135 500 352 -41 135 500 325 -42 135 500 340 -30 135 500 306 -42 135 500 356 -43 135 500 320 -42 135 500 338 -49 135 500 317 -42 135 500 333 -40 135 500 328 -41 135 500 322 -44 135 500 328 -41 135 500 342 -41 135 500 316 -46 135 500 339 -38 135 500 317 -43 135 500 339 -47 135 500 309 -40 135 500 342 -37 135 500 314 -45 140 500 347 -54 140 500 330 -51 140 500 366 -53 140 500 357 -45 140 500 331 -54 140 500 360 -62 140 500 355 -44 140 500 362 -53 140 500 341 -48 140 500 347 -52 140 500 329 -52 140 500 341 -47 140 500 358 -49 140 500 334 -49 140 500 349 -50 140 500 317 -52 140 500 353 -44 140 500 330 -51 140 500 356 -43 140 500 356 -55 145 500 399 -50 145 500 402 -53 145 500 359 -54 145 500 411 -48 145 500 353 -45 145 500 365 -47 145 500 377 -46 145 500 380 -47 145 500 408 -47 145 500 358 -48 145 500 377 -55 145 500 397 -44 145 500 375 -55 145 500 402 -55 145 500 389 -51 145 500 407 -62 145 500 406 -46 145 500 358 -55 145 500 424 -53 145 500 398 -57 150 500 374 -64 150 500 416 -60 150 500 378 -59 150 500 401 -56 150 500 381 -59 150 500 371 -51 150 500 428 -54 150 500 386 -57 150 500 348 -57 150 500 379 -58 150 500 395 -55 150 500 415 -58 150 500 384 -58 150 500 402 -62 150 500 416 -60 150 500 377 -60 150 500 387 -58 150 500 415 -52 150 500 385 -53 150 500 407 -60 155 500 444 -65 155 500 424 -67 155 500 435 -61 155 500 431 -61 155 500 433 -60 155 500 429 -57 155 500 424 -64 155 500 394 -67 155 500 443 -62 155 500 427 -66 155 500 422 -66 155 500 425 -65 155 500 435 -67 155 500 427 -63 155 500 430 -66 155 500 440 -66 155 500 419 -64 155 500 433 -57 155 500 449 -60 155 500 393 -46 160 500 472 -57 160 500 475 -49 160 500 473 -53 160 500 465 -67 160 500 486 -55 160 500 496 -51 160 500 465 -53 160 500 483 -62 160 500 490 -55 160 500 518 -52 160 500 484 -53 160 500 527 -52 160 500 487 -45 160 500 486 -57 160 500 502 -51 160 500 473 -55 160 500 500 -48 160 500 497 -51 160 500 509 -54 160 500 455 -55 165 500 478 -54 165 500 443 -51 165 500 439 -57 165 500 452 -54 165 500 459 -59 165 500 480 -49 165 500 442 -51 165 500 497 -56 165 500 520 -60 165 500 465 -59 165 500 443 -61 165 500 485 -55 165 500 506 -60 165 500 469 -64 165 500 436 -51 165 500 454 -58 165 500 480 -59 165 500 460 -54 165 500 437 -54 165 500 457 -70 170 500 469 -70 170 500 445 -75 170 500 494 -65 170 500 485 -67 170 500 476 -70 170 500 448 -69 170 500 465 -77 170 500 479 -64 170 500 480 -65 170 500 463 -66 170 500 477 -69 170 500 448 -70 170 500 458 -74 170 500 493 -65 170 500 504 -65 170 500 488 -69 170 500 456 -72 170 500 524 -68 170 500 489 -69 170 500 501 -62 175 500 431 -62 175 500 442 -56 175 500 451 -68 175 500 468 -63 175 500 444 -65 175 500 459 -64 175 500 440 -60 175 500 386 -67 175 500 461 -66 175 500 437 -60 175 500 438 -61 175 500 471 -64 175 500 433 -59 175 500 404 -64 175 500 428 -57 175 500 441 -63 175 500 466 -62 175 500 417 -60 175 500 453 -60 175 500 423 -63 180 500 426 -55 180 500 478 -63 180 500 467 -56 180 500 435 -56 180 500 423 -58 180 500 446 -62 180 500 462 -59 180 500 418 -60 180 500 456 -56 180 500 478 -53 180 500 438 -52 180 500 453 -59 180 500 440 -64 180 500 481 -57 180 500 433 -61 180 500 463 -63 180 500 443 -51 180 500 443 -55 180 500 465 -61 180 500 467 -69 185 500 541 -72 185 500 508 -67 185 500 527 -62 185 500 457 -66 185 500 520 -61 185 500 481 -60 185 500 479 -64 185 500 536 -60 185 500 468 -59 185 500 521 -64 185 500 524 -66 185 500 448 -64 185 500 498 -62 185 500 510 -57 185 500 501 -62 185 500 485 -67 185 500 511 -62 185 500 527 -59 185 500 494 -60 185 500 532 -63 190 500 496 -64 190 500 507 -71 190 500 537 -62 190 500 513 -60 190 500 522 -58 190 500 545 -56 190 500 490 -62 190 500 536 -62 190 500 571 -55 190 500 532 -56 190 500 542 -57 190 500 559 -61 190 500 519 -69 190 500 488 -63 190 500 539 -58 190 500 540 -53 190 500 511 -58 190 500 558 -63 190 500 554 -66 190 500 519 -64 195 500 542 -67 195 500 571 -62 195 500 521 -71 195 500 530 -71 195 500 579 -72 195 500 546 -75 195 500 554 -67 195 500 536 -64 195 500 555 -66 195 500 511 -69 195 500 562 -69 195 500 541 -72 195 500 545 -65 195 500 525 -69 195 500 549 -69 195 500 581 -66 195 500 592 -76 195 500 555 -69 195 500 534 -66 195 500 565 -66 200 500 554 -59 200 500 497 -71 200 500 532 -67 200 500 546 -69 200 500 557 -64 200 500 532 -63 200 500 541 -68 200 500 514 -63 200 500 541 -58 200 500 531 -59 200 500 519 -56 200 500 549 -60 200 500 525 -57 200 500 490 -64 200 500 526 -66 200 500 550 -66 200 500 547 -66 200 500 580 -75 200 500 547 -67 200 500 552 -2 5 500 19 -2 5 500 14 -1 5 500 14 -2 5 500 12 -2 5 500 13 -2 5 500 14 -2 5 500 13 -2 5 500 12 -2 5 500 14 -2 5 500 13 -1 5 500 11 -1 5 500 12 -2 5 500 13 -3 5 500 12 -2 5 500 12 -2 5 500 13 -3 5 500 11 -3 5 500 13 -1 5 500 13 -1 5 500 11 -3 10 500 17 -3 10 500 19 -4 10 500 18 -4 10 500 18 -1 10 500 20 -4 10 500 17 -3 10 500 16 -4 10 500 18 -4 10 500 19 -3 10 500 17 -3 10 500 17 -4 10 500 21 -4 10 500 65 -3 10 500 26 -3 10 500 22 -4 10 500 19 -3 10 500 16 -3 10 500 18 -2 10 500 19 -3 10 500 18 -3 15 500 36 -5 15 500 42 -5 15 500 43 -5 15 500 39 -5 15 500 47 -4 15 500 41 -1 15 500 37 -4 15 500 46 -8 15 500 43 -4 15 500 38 -5 15 500 91 -6 15 500 48 -5 15 500 42 -5 15 500 47 -5 15 500 48 -6 15 500 39 -5 15 500 40 -5 15 500 48 -5 15 500 49 -5 15 500 53 -6 20 500 48 -7 20 500 53 -7 20 500 96 -7 20 500 57 -6 20 500 48 -6 20 500 56 -6 20 500 54 -6 20 500 63 -5 20 500 47 -5 20 500 51 -6 20 500 54 -8 20 500 59 -6 20 500 56 -6 20 500 107 -6 20 500 54 -7 20 500 51 -5 20 500 58 -5 20 500 49 -7 20 500 58 -6 20 500 51 -10 25 500 85 -11 25 500 78 -12 25 500 106 -10 25 500 74 -9 25 500 68 -11 25 500 75 -10 25 500 70 -11 25 500 82 -10 25 500 78 -12 25 500 82 -11 25 500 132 -10 25 500 71 -11 25 500 95 -10 25 500 83 -12 25 500 77 -8 25 500 73 -11 25 500 94 -10 25 500 132 -10 25 500 79 -11 25 500 71 -10 30 500 98 -14 30 500 99 -14 30 500 102 -11 30 500 99 -10 30 500 127 -12 30 500 96 -12 30 500 104 -12 30 500 94 -13 30 500 97 -9 30 500 89 -17 30 500 125 -13 30 500 94 -12 30 500 101 -11 30 500 97 -11 30 500 94 -11 30 500 89 -13 30 500 90 -13 30 500 128 -12 30 500 93 -12 30 500 105 -9 35 500 115 -12 35 500 115 -11 35 500 148 -10 35 500 106 -9 35 500 140 -14 35 500 131 -14 35 500 111 -8 35 500 156 -9 35 500 108 -13 35 500 117 -13 35 500 120 -8 35 500 111 -8 35 500 156 -9 35 500 122 -9 35 500 128 -10 35 500 127 -13 35 500 115 -10 35 500 118 -10 35 500 119 -11 35 500 120 -15 40 500 103 -14 40 500 106 -9 40 500 102 -15 40 500 123 -15 40 500 102 -15 40 500 96 -15 40 500 101 -14 40 500 95 -18 40 500 100 -15 40 500 134 -16 40 500 109 -16 40 500 106 -17 40 500 94 -16 40 500 116 -15 40 500 95 -18 40 500 135 -15 40 500 94 -17 40 500 99 -13 40 500 103 -15 40 500 109 -20 45 500 109 -15 45 500 142 -18 45 500 119 -20 45 500 121 -17 45 500 104 -20 45 500 121 -17 45 500 150 -21 45 500 126 -18 45 500 116 -20 45 500 110 -19 45 500 118 -20 45 500 147 -17 45 500 109 -15 45 500 119 -20 45 500 103 -20 45 500 107 -20 45 500 112 -20 45 500 146 -20 45 500 109 -19 45 500 105 -22 50 500 133 -21 50 500 135 -20 50 500 159 -18 50 500 126 -21 50 500 136 -21 50 500 129 -21 50 500 159 -17 50 500 131 -20 50 500 124 -21 50 500 124 -18 50 500 120 -22 50 500 155 -19 50 500 135 -20 50 500 126 -18 50 500 127 -20 50 500 118 -18 50 500 167 -21 50 500 129 -23 50 500 139 -15 50 500 131 -23 55 500 201 -22 55 500 158 -23 55 500 173 -26 55 500 170 -25 55 500 204 -24 55 500 168 -25 55 500 195 -23 55 500 187 -25 55 500 173 -27 55 500 185 -26 55 500 170 -25 55 500 184 -23 55 500 176 -22 55 500 175 -25 55 500 172 -24 55 500 208 -24 55 500 168 -26 55 500 182 -23 55 500 212 -22 55 500 168 -16 60 500 181 -23 60 500 166 -12 60 500 203 -19 60 500 169 -18 60 500 153 -15 60 500 197 -19 60 500 186 -18 60 500 159 -20 60 500 177 -16 60 500 194 -22 60 500 204 -19 60 500 181 -18 60 500 192 -17 60 500 172 -14 60 500 173 -20 60 500 148 -21 60 500 216 -15 60 500 185 -18 60 500 183 -18 60 500 206 -19 65 500 234 -16 65 500 229 -20 65 500 223 -22 65 500 236 -19 65 500 215 -17 65 500 231 -21 65 500 226 -18 65 500 247 -18 65 500 213 -19 65 500 200 -22 65 500 256 -20 65 500 207 -22 65 500 208 -20 65 500 260 -19 65 500 220 -17 65 500 232 -23 65 500 257 -19 65 500 213 -23 65 500 223 -20 65 500 245 -24 70 500 187 -27 70 500 185 -22 70 500 178 -23 70 500 171 -29 70 500 183 -23 70 500 197 -27 70 500 169 -25 70 500 179 -22 70 500 177 -27 70 500 188 -25 70 500 165 -26 70 500 160 -25 70 500 172 -21 70 500 202 -21 70 500 179 -23 70 500 187 -22 70 500 186 -25 70 500 166 -24 70 500 175 -23 70 500 161 -34 75 500 223 -36 75 500 214 -33 75 500 222 -34 75 500 220 -36 75 500 224 -32 75 500 240 -31 75 500 224 -39 75 500 211 -33 75 500 233 -36 75 500 210 -31 75 500 214 -34 75 500 241 -39 75 500 204 -35 75 500 195 -32 75 500 233 -34 75 500 209 -35 75 500 201 -36 75 500 235 -35 75 500 232 -35 75 500 216 -26 80 500 244 -31 80 500 232 -32 80 500 232 -31 80 500 232 -27 80 500 198 -33 80 500 228 -32 80 500 248 -32 80 500 217 -32 80 500 241 -30 80 500 209 -28 80 500 205 -32 80 500 256 -30 80 500 216 -35 80 500 229 -35 80 500 244 -36 80 500 205 -35 80 500 240 -33 80 500 249 -33 80 500 210 -26 80 500 244 -35 85 500 203 -34 85 500 204 -34 85 500 240 -35 85 500 186 -39 85 500 206 -36 85 500 230 -35 85 500 192 -35 85 500 207 -31 85 500 209 -33 85 500 197 -34 85 500 201 -32 85 500 190 -36 85 500 1129 -37 85 500 198 -34 85 500 205 -30 85 500 244 -38 85 500 219 -34 85 500 195 -32 85 500 251 -33 85 500 192 -37 90 500 240 -33 90 500 281 -33 90 500 241 -34 90 500 299 -34 90 500 246 -37 90 500 249 -35 90 500 283 -32 90 500 227 -32 90 500 293 -32 90 500 249 -32 90 500 253 -36 90 500 286 -34 90 500 241 -34 90 500 307 -35 90 500 221 -36 90 500 226 -32 90 500 272 -36 90 500 242 -36 90 500 299 -34 90 500 255 -36 95 500 226 -35 95 500 291 -36 95 500 232 -30 95 500 226 -36 95 500 270 -34 95 500 238 -37 95 500 263 -33 95 500 252 -38 95 500 234 -36 95 500 291 -33 95 500 258 -33 95 500 314 -31 95 500 248 -34 95 500 244 -35 95 500 271 -37 95 500 249 -34 95 500 277 -30 95 500 247 -35 95 500 245 -33 95 500 270 -38 100 500 243 -41 100 500 294 -39 100 500 229 -43 100 500 236 -46 100 500 312 -43 100 500 243 -41 100 500 294 -40 100 500 247 -43 100 500 243 -41 100 500 295 -38 100 500 235 -42 100 500 278 -37 100 500 229 -39 100 500 241 -40 100 500 283 -33 100 500 227 -42 100 500 269 -44 100 500 234 -33 100 500 222 -43 100 500 265 -26 105 500 222 -31 105 500 239 -33 105 500 276 -31 105 500 256 -33 105 500 266 -32 105 500 244 -34 105 500 238 -33 105 500 268 -37 105 500 237 -31 105 500 266 -33 105 500 236 -36 105 500 232 -34 105 500 279 -37 105 500 232 -35 105 500 279 -30 105 500 226 -34 105 500 225 -32 105 500 270 -39 105 500 237 -35 105 500 223 -37 110 500 321 -30 110 500 286 -36 110 500 320 -34 110 500 317 -32 110 500 337 -35 110 500 311 -37 110 500 330 -32 110 500 299 -32 110 500 342 -39 110 500 283 -35 110 500 332 -37 110 500 292 -41 110 500 325 -37 110 500 288 -38 110 500 351 -31 110 500 288 -40 110 500 308 -37 110 500 296 -35 110 500 319 -38 110 500 300 -35 115 500 297 -40 115 500 414 -40 115 500 456 -40 115 500 390 -39 115 500 364 -38 115 500 362 -43 115 500 369 -39 115 500 509 -37 115 500 350 -45 115 500 418 -48 115 500 428 -41 115 500 378 -47 115 500 426 -40 115 500 369 -35 115 500 342 -48 115 500 427 -47 115 500 360 -41 115 500 308 -40 115 500 404 -48 115 500 377 -47 120 500 300 -41 120 500 346 -41 120 500 294 -42 120 500 343 -44 120 500 303 -42 120 500 330 -44 120 500 298 -47 120 500 348 -46 120 500 308 -43 120 500 338 -52 120 500 300 -49 120 500 371 -44 120 500 317 -48 120 500 326 -49 120 500 315 -45 120 500 333 -44 120 500 311 -44 120 500 341 -47 120 500 306 -45 120 500 357 -45 125 500 304 -44 125 500 371 -46 125 500 332 -46 125 500 377 -47 125 500 325 -51 125 500 370 -45 125 500 328 -49 125 500 365 -40 125 500 334 -45 125 500 365 -48 125 500 297 -48 125 500 366 -46 125 500 307 -45 125 500 343 -50 125 500 305 -40 125 500 357 -48 125 500 333 -48 125 500 368 -54 125 500 385 -47 125 500 327 -40 130 500 415 -41 130 500 344 -46 130 500 407 -41 130 500 339 -43 130 500 379 -44 130 500 398 -44 130 500 378 -40 130 500 435 -47 130 500 376 -45 130 500 420 -40 130 500 422 -44 130 500 367 -39 130 500 417 -40 130 500 372 -40 130 500 396 -38 130 500 374 -48 130 500 336 -47 130 500 391 -41 130 500 374 -38 130 500 385 -42 135 500 328 -48 135 500 364 -43 135 500 384 -40 135 500 351 -47 135 500 398 -56 135 500 343 -49 135 500 366 -44 135 500 338 -52 135 500 386 -48 135 500 323 -50 135 500 375 -45 135 500 389 -47 135 500 327 -42 135 500 375 -45 135 500 355 -47 135 500 373 -52 135 500 343 -49 135 500 356 -49 135 500 381 -48 135 500 359 -60 140 500 395 -52 140 500 325 -59 140 500 394 -64 140 500 353 -53 140 500 388 -61 140 500 326 -64 140 500 394 -59 140 500 358 -54 140 500 332 -53 140 500 383 -68 140 500 348 -65 140 500 387 -67 140 500 332 -60 140 500 405 -61 140 500 401 -60 140 500 366 -63 140 500 388 -55 140 500 329 -56 140 500 353 -62 140 500 336 -64 145 500 412 -65 145 500 435 -57 145 500 398 -55 145 500 452 -58 145 500 389 -61 145 500 412 -65 145 500 403 -57 145 500 385 -54 145 500 408 -66 145 500 418 -57 145 500 351 -59 145 500 437 -60 145 500 373 -61 145 500 446 -61 145 500 400 -51 145 500 428 -51 145 500 404 -64 145 500 428 -65 145 500 394 -58 145 500 427 -55 150 500 451 -51 150 500 391 -58 150 500 427 -55 150 500 410 -51 150 500 448 -50 150 500 444 -56 150 500 406 -54 150 500 461 -52 150 500 411 -50 150 500 400 -52 150 500 427 -55 150 500 446 -52 150 500 384 -52 150 500 455 -52 150 500 445 -50 150 500 402 -61 150 500 458 -53 150 500 460 -50 150 500 374 -52 150 500 401 -48 155 500 407 -49 155 500 391 -40 155 500 409 -51 155 500 362 -51 155 500 417 -51 155 500 409 -51 155 500 366 -51 155 500 432 -42 155 500 411 -47 155 500 386 -43 155 500 396 -54 155 500 378 -51 155 500 429 -55 155 500 402 -49 155 500 401 -47 155 500 435 -45 155 500 403 -45 155 500 373 -49 155 500 401 -48 155 500 361 -60 160 500 454 -53 160 500 523 -49 160 500 495 -46 160 500 440 -54 160 500 470 -52 160 500 461 -52 160 500 434 -53 160 500 476 -49 160 500 467 -54 160 500 468 -50 160 500 419 -55 160 500 444 -49 160 500 455 -44 160 500 444 -53 160 500 469 -53 160 500 445 -44 160 500 477 -47 160 500 474 -52 160 500 481 -51 160 500 486 -69 165 500 413 -70 165 500 448 -72 165 500 431 -66 165 500 383 -70 165 500 421 -70 165 500 400 -67 165 500 469 -69 165 500 454 -67 165 500 393 -69 165 500 433 -68 165 500 424 -70 165 500 385 -72 165 500 428 -73 165 500 431 -72 165 500 376 -70 165 500 453 -71 165 500 411 -62 165 500 379 -68 165 500 440 -74 165 500 394 -60 170 500 459 -56 170 500 490 -58 170 500 478 -60 170 500 423 -52 170 500 455 -53 170 500 461 -56 170 500 402 -56 170 500 457 -53 170 500 480 -60 170 500 437 -60 170 500 440 -53 170 500 463 -54 170 500 422 -61 170 500 456 -58 170 500 455 -56 170 500 464 -59 170 500 449 -53 170 500 480 -61 170 500 487 -63 170 500 465 -69 175 500 528 -68 175 500 529 -67 175 500 553 -78 175 500 540 -69 175 500 546 -70 175 500 503 -71 175 500 503 -67 175 500 521 -71 175 500 564 -74 175 500 465 -77 175 500 537 -70 175 500 526 -71 175 500 530 -76 175 500 592 -61 175 500 500 -67 175 500 495 -70 175 500 529 -77 175 500 537 -70 175 500 578 -75 175 500 578 -66 180 500 526 -70 180 500 479 -70 180 500 517 -64 180 500 488 -66 180 500 486 -73 180 500 457 -67 180 500 554 -70 180 500 519 -67 180 500 495 -74 180 500 458 -69 180 500 521 -71 180 500 500 -77 180 500 503 -65 180 500 457 -70 180 500 494 -71 180 500 463 -68 180 500 521 -59 180 500 449 -66 180 500 514 -72 180 500 510 -68 185 500 529 -60 185 500 502 -61 185 500 473 -65 185 500 529 -58 185 500 480 -62 185 500 487 -67 185 500 473 -55 185 500 466 -65 185 500 503 -66 185 500 451 -70 185 500 497 -63 185 500 479 -63 185 500 476 -57 185 500 476 -59 185 500 473 -61 185 500 536 -64 185 500 504 -64 185 500 503 -64 185 500 473 -65 185 500 491 -73 190 500 510 -68 190 500 509 -66 190 500 472 -72 190 500 482 -69 190 500 506 -80 190 500 545 -75 190 500 493 -75 190 500 530 -77 190 500 529 -61 190 500 480 -75 190 500 459 -68 190 500 541 -78 190 500 523 -64 190 500 516 -72 190 500 518 -69 190 500 456 -79 190 500 498 -77 190 500 500 -71 190 500 546 -63 190 500 488 -73 195 500 586 -68 195 500 550 -73 195 500 573 -74 195 500 592 -61 195 500 554 -70 195 500 571 -71 195 500 548 -61 195 500 538 -71 195 500 531 -65 195 500 520 -66 195 500 574 -61 195 500 577 -78 195 500 548 -64 195 500 523 -63 195 500 565 -67 195 500 556 -75 195 500 568 -68 195 500 535 -72 195 500 565 -66 195 500 492 -83 200 500 545 -90 200 500 552 -88 200 500 603 -84 200 500 554 -73 200 500 542 -81 200 500 590 -82 200 500 595 -78 200 500 513 -90 200 500 546 -84 200 500 570 -78 200 500 524 -82 200 500 543 -76 200 500 544 -85 200 500 569 -82 200 500 553 -86 200 500 522 -81 200 500 563 -79 200 500 558 -79 200 500 561 -76 200 500 553 -2 5 500 37 -1 5 500 15 -1 5 500 19 -2 5 500 12 -2 5 500 10 -1 5 500 10 -1 5 500 13 -2 5 500 12 -1 5 500 13 -1 5 500 12 -0 5 500 14 -1 5 500 13 -2 5 500 15 -1 5 500 13 -1 5 500 14 -1 5 500 13 -1 5 500 12 -2 5 500 12 -1 5 500 11 -1 5 500 11 -5 10 500 17 -5 10 500 17 -6 10 500 18 -7 10 500 17 -5 10 500 17 -5 10 500 16 -5 10 500 18 -6 10 500 16 -5 10 500 17 -4 10 500 16 -6 10 500 17 -5 10 500 17 -6 10 500 18 -6 10 500 17 -4 10 500 88 -7 10 500 19 -5 10 500 16 -7 10 500 18 -7 10 500 17 -5 10 500 19 -7 15 500 32 -8 15 500 27 -7 15 500 31 -5 15 500 30 -9 15 500 29 -6 15 500 31 -9 15 500 31 -8 15 500 29 -6 15 500 30 -7 15 500 30 -6 15 500 29 -9 15 500 33 -8 15 500 30 -7 15 500 31 -6 15 500 105 -6 15 500 30 -8 15 500 30 -7 15 500 30 -7 15 500 29 -10 15 500 30 -8 20 500 56 -7 20 500 55 -6 20 500 56 -7 20 500 55 -8 20 500 58 -11 20 500 56 -6 20 500 56 -9 20 500 105 -5 20 500 53 -7 20 500 64 -6 20 500 57 -5 20 500 60 -6 20 500 61 -8 20 500 57 -8 20 500 58 -7 20 500 55 -9 20 500 56 -5 20 500 109 -7 20 500 54 -6 20 500 56 -9 25 500 52 -10 25 500 52 -13 25 500 53 -12 25 500 49 -8 25 500 56 -8 25 500 55 -11 25 500 53 -11 25 500 57 -10 25 500 111 -10 25 500 57 -10 25 500 50 -11 25 500 51 -11 25 500 57 -11 25 500 50 -9 25 500 51 -13 25 500 56 -12 25 500 54 -10 25 500 51 -11 25 500 57 -6 25 500 51 -16 30 500 60 -16 30 500 63 -12 30 500 59 -12 30 500 56 -14 30 500 60 -17 30 500 56 -16 30 500 59 -15 30 500 57 -16 30 500 57 -14 30 500 118 -15 30 500 59 -12 30 500 52 -14 30 500 61 -15 30 500 61 -15 30 500 64 -12 30 500 58 -14 30 500 58 -15 30 500 61 -15 30 500 117 -14 30 500 63 -13 35 500 83 -9 35 500 91 -13 35 500 76 -12 35 500 79 -12 35 500 76 -12 35 500 88 -14 35 500 130 -12 35 500 77 -10 35 500 81 -13 35 500 80 -12 35 500 86 -10 35 500 75 -13 35 500 87 -12 35 500 136 -10 35 500 79 -9 35 500 76 -10 35 500 76 -13 35 500 82 -12 35 500 75 -12 35 500 73 -15 40 500 153 -18 40 500 123 -17 40 500 120 -19 40 500 115 -14 40 500 120 -12 40 500 155 -17 40 500 121 -21 40 500 132 -12 40 500 97 -12 40 500 108 -16 40 500 110 -14 40 500 157 -17 40 500 129 -18 40 500 125 -14 40 500 107 -16 40 500 137 -15 40 500 149 -13 40 500 123 -18 40 500 110 -15 40 500 122 -18 45 500 121 -20 45 500 162 -17 45 500 118 -22 45 500 113 -17 45 500 108 -16 45 500 108 -16 45 500 166 -18 45 500 125 -19 45 500 118 -22 45 500 119 -15 45 500 114 -17 45 500 161 -18 45 500 105 -20 45 500 118 -22 45 500 106 -20 45 500 103 -19 45 500 116 -20 45 500 170 -14 45 500 109 -20 45 500 112 -14 50 500 146 -15 50 500 180 -15 50 500 146 -16 50 500 152 -16 50 500 152 -11 50 500 181 -16 50 500 133 -16 50 500 137 -18 50 500 131 -17 50 500 139 -13 50 500 197 -17 50 500 141 -16 50 500 148 -19 50 500 133 -15 50 500 188 -12 50 500 140 -16 50 500 136 -16 50 500 148 -18 50 500 173 -17 50 500 136 -17 55 500 132 -18 55 500 130 -18 55 500 131 -20 55 500 173 -13 55 500 122 -18 55 500 129 -20 55 500 130 -22 55 500 179 -22 55 500 136 -19 55 500 125 -20 55 500 133 -20 55 500 127 -18 55 500 174 -17 55 500 126 -20 55 500 127 -14 55 500 128 -17 55 500 126 -15 55 500 173 -19 55 500 134 -15 55 500 121 -21 60 500 169 -19 60 500 201 -25 60 500 163 -23 60 500 159 -23 60 500 153 -19 60 500 203 -24 60 500 168 -29 60 500 168 -23 60 500 203 -30 60 500 159 -24 60 500 170 -20 60 500 164 -22 60 500 194 -22 60 500 146 -21 60 500 159 -24 60 500 156 -25 60 500 197 -24 60 500 163 -21 60 500 163 -22 60 500 166 -24 65 500 210 -21 65 500 180 -26 65 500 171 -24 65 500 224 -29 65 500 166 -23 65 500 159 -24 65 500 168 -28 65 500 197 -24 65 500 172 -26 65 500 161 -28 65 500 226 -25 65 500 158 -27 65 500 163 -28 65 500 169 -25 65 500 208 -29 65 500 161 -28 65 500 180 -24 65 500 165 -21 65 500 206 -30 65 500 190 -33 70 500 212 -29 70 500 239 -31 70 500 198 -27 70 500 169 -25 70 500 262 -27 70 500 204 -27 70 500 190 -25 70 500 228 -29 70 500 197 -27 70 500 197 -26 70 500 234 -31 70 500 195 -27 70 500 184 -29 70 500 210 -30 70 500 238 -32 70 500 198 -30 70 500 207 -25 70 500 227 -29 70 500 196 -29 70 500 178 -29 75 500 282 -30 75 500 196 -30 75 500 300 -24 75 500 246 -27 75 500 245 -27 75 500 288 -31 75 500 323 -24 75 500 288 -30 75 500 291 -30 75 500 311 -30 75 500 274 -29 75 500 247 -31 75 500 273 -28 75 500 303 -31 75 500 246 -27 75 500 212 -27 75 500 223 -25 75 500 281 -30 75 500 177 -30 75 500 239 -30 80 500 277 -31 80 500 234 -25 80 500 211 -33 80 500 310 -28 80 500 241 -32 80 500 280 -26 80 500 241 -31 80 500 266 -29 80 500 266 -26 80 500 232 -31 80 500 279 -26 80 500 260 -32 80 500 256 -29 80 500 305 -30 80 500 263 -29 80 500 282 -33 80 500 223 -32 80 500 248 -30 80 500 285 -30 80 500 263 -34 85 500 255 -35 85 500 188 -31 85 500 217 -33 85 500 248 -36 85 500 203 -31 85 500 210 -37 85 500 259 -36 85 500 199 -38 85 500 198 -35 85 500 241 -38 85 500 195 -32 85 500 208 -33 85 500 238 -39 85 500 174 -35 85 500 198 -33 85 500 258 -36 85 500 203 -34 85 500 183 -35 85 500 244 -35 85 500 196 -30 90 500 273 -25 90 500 324 -28 90 500 278 -30 90 500 293 -31 90 500 275 -31 90 500 286 -27 90 500 264 -29 90 500 260 -28 90 500 282 -24 90 500 263 -28 90 500 329 -29 90 500 286 -29 90 500 303 -28 90 500 283 -29 90 500 309 -27 90 500 293 -29 90 500 313 -25 90 500 265 -28 90 500 258 -31 90 500 304 -41 95 500 263 -41 95 500 291 -32 95 500 249 -39 95 500 304 -32 95 500 271 -39 95 500 256 -39 95 500 327 -41 95 500 283 -36 95 500 301 -41 95 500 298 -38 95 500 315 -35 95 500 279 -36 95 500 294 -35 95 500 277 -40 95 500 274 -34 95 500 317 -37 95 500 292 -36 95 500 309 -36 95 500 251 -37 95 500 315 -32 100 500 242 -36 100 500 248 -31 100 500 284 -37 100 500 254 -27 100 500 283 -31 100 500 253 -35 100 500 280 -34 100 500 253 -28 100 500 237 -33 100 500 290 -32 100 500 256 -32 100 500 279 -29 100 500 250 -30 100 500 245 -38 100 500 271 -29 100 500 246 -39 100 500 291 -37 100 500 243 -38 100 500 259 -35 100 500 299 -39 105 500 231 -42 105 500 284 -41 105 500 249 -40 105 500 250 -40 105 500 270 -38 105 500 234 -41 105 500 278 -36 105 500 249 -32 105 500 239 -44 105 500 275 -39 105 500 253 -38 105 500 278 -40 105 500 238 -40 105 500 242 -42 105 500 273 -34 105 500 254 -43 105 500 284 -36 105 500 249 -40 105 500 251 -36 105 500 278 -36 110 500 322 -43 110 500 355 -38 110 500 316 -42 110 500 368 -37 110 500 306 -34 110 500 352 -39 110 500 366 -38 110 500 305 -39 110 500 359 -48 110 500 332 -37 110 500 349 -37 110 500 305 -36 110 500 355 -42 110 500 318 -40 110 500 353 -33 110 500 314 -38 110 500 387 -32 110 500 305 -37 110 500 352 -36 110 500 320 -27 115 500 397 -27 115 500 326 -29 115 500 398 -26 115 500 422 -34 115 500 397 -28 115 500 395 -32 115 500 323 -33 115 500 414 -27 115 500 357 -25 115 500 365 -28 115 500 378 -27 115 500 371 -26 115 500 364 -32 115 500 335 -30 115 500 383 -29 115 500 339 -30 115 500 368 -27 115 500 388 -30 115 500 335 -34 115 500 400 -38 120 500 340 -38 120 500 338 -41 120 500 314 -42 120 500 354 -36 120 500 330 -33 120 500 415 -35 120 500 310 -39 120 500 349 -38 120 500 329 -35 120 500 355 -40 120 500 360 -35 120 500 287 -35 120 500 355 -41 120 500 350 -38 120 500 387 -36 120 500 357 -41 120 500 375 -39 120 500 318 -37 120 500 361 -34 120 500 360 -50 125 500 420 -53 125 500 387 -47 125 500 389 -50 125 500 428 -45 125 500 387 -43 125 500 379 -40 125 500 423 -47 125 500 330 -50 125 500 435 -44 125 500 408 -48 125 500 420 -44 125 500 395 -48 125 500 366 -48 125 500 410 -51 125 500 374 -47 125 500 347 -42 125 500 410 -55 125 500 389 -50 125 500 422 -56 125 500 411 -51 130 500 319 -41 130 500 344 -43 130 500 325 -46 130 500 346 -48 130 500 309 -47 130 500 349 -41 130 500 327 -52 130 500 352 -51 130 500 309 -44 130 500 330 -47 130 500 284 -57 130 500 359 -49 130 500 315 -40 130 500 351 -46 130 500 312 -50 130 500 356 -47 130 500 321 -54 130 500 374 -45 130 500 302 -56 130 500 345 -49 135 500 349 -53 135 500 368 -39 135 500 336 -44 135 500 401 -53 135 500 367 -49 135 500 334 -43 135 500 391 -53 135 500 345 -56 135 500 381 -38 135 500 323 -53 135 500 376 -47 135 500 343 -46 135 500 361 -50 135 500 378 -51 135 500 359 -52 135 500 377 -41 135 500 316 -49 135 500 370 -44 135 500 333 -46 135 500 363 -55 140 500 463 -51 140 500 443 -47 140 500 433 -51 140 500 440 -53 140 500 402 -54 140 500 479 -55 140 500 476 -54 140 500 377 -58 140 500 400 -50 140 500 426 -54 140 500 396 -48 140 500 446 -46 140 500 424 -61 140 500 400 -52 140 500 428 -48 140 500 440 -51 140 500 406 -51 140 500 442 -50 140 500 376 -52 140 500 374 -55 145 500 366 -53 145 500 355 -48 145 500 369 -55 145 500 353 -50 145 500 374 -57 145 500 379 -47 145 500 356 -50 145 500 369 -62 145 500 354 -50 145 500 365 -51 145 500 348 -57 145 500 401 -54 145 500 399 -53 145 500 335 -47 145 500 379 -51 145 500 362 -50 145 500 377 -49 145 500 345 -58 145 500 386 -50 145 500 376 -56 150 500 377 -60 150 500 395 -58 150 500 389 -56 150 500 414 -53 150 500 420 -59 150 500 366 -56 150 500 410 -55 150 500 362 -52 150 500 432 -55 150 500 435 -55 150 500 380 -54 150 500 416 -53 150 500 365 -52 150 500 430 -65 150 500 403 -48 150 500 384 -51 150 500 421 -57 150 500 413 -60 150 500 377 -57 150 500 388 -53 155 500 423 -52 155 500 429 -55 155 500 440 -51 155 500 388 -49 155 500 455 -54 155 500 465 -54 155 500 447 -55 155 500 421 -52 155 500 468 -54 155 500 466 -54 155 500 382 -52 155 500 418 -51 155 500 438 -55 155 500 421 -52 155 500 440 -54 155 500 434 -54 155 500 406 -56 155 500 433 -53 155 500 471 -56 155 500 391 -59 160 500 471 -59 160 500 454 -60 160 500 451 -61 160 500 455 -65 160 500 464 -59 160 500 447 -60 160 500 479 -61 160 500 479 -57 160 500 467 -61 160 500 470 -60 160 500 465 -64 160 500 508 -64 160 500 485 -57 160 500 409 -52 160 500 463 -58 160 500 462 -60 160 500 423 -62 160 500 495 -55 160 500 471 -60 160 500 432 -44 165 500 435 -54 165 500 461 -52 165 500 477 -55 165 500 430 -51 165 500 465 -54 165 500 484 -51 165 500 419 -46 165 500 442 -49 165 500 449 -55 165 500 401 -49 165 500 439 -43 165 500 433 -46 165 500 420 -51 165 500 474 -55 165 500 448 -47 165 500 420 -48 165 500 459 -50 165 500 449 -42 165 500 434 -49 165 500 446 -63 170 500 439 -72 170 500 460 -63 170 500 414 -54 170 500 459 -68 170 500 456 -60 170 500 377 -65 170 500 486 -69 170 500 434 -62 170 500 394 -58 170 500 425 -63 170 500 447 -59 170 500 399 -65 170 500 469 -59 170 500 470 -55 170 500 399 -62 170 500 447 -66 170 500 445 -67 170 500 415 -64 170 500 471 -64 170 500 446 -70 175 500 528 -66 175 500 454 -74 175 500 480 -67 175 500 499 -66 175 500 511 -70 175 500 484 -73 175 500 496 -67 175 500 482 -69 175 500 504 -76 175 500 441 -68 175 500 511 -64 175 500 466 -67 175 500 503 -73 175 500 476 -67 175 500 522 -73 175 500 479 -69 175 500 478 -73 175 500 442 -69 175 500 492 -71 175 500 497 -50 180 500 464 -59 180 500 461 -57 180 500 495 -53 180 500 469 -60 180 500 483 -58 180 500 483 -61 180 500 482 -59 180 500 492 -56 180 500 500 -66 180 500 466 -60 180 500 501 -63 180 500 510 -46 180 500 462 -56 180 500 444 -59 180 500 484 -56 180 500 492 -56 180 500 490 -53 180 500 446 -55 180 500 481 -58 180 500 487 -65 185 500 487 -74 185 500 451 -69 185 500 470 -64 185 500 436 -67 185 500 426 -69 185 500 480 -64 185 500 461 -64 185 500 428 -61 185 500 460 -66 185 500 459 -64 185 500 482 -72 185 500 414 -59 185 500 460 -62 185 500 499 -70 185 500 416 -61 185 500 467 -55 185 500 463 -57 185 500 417 -63 185 500 455 -56 185 500 450 -67 190 500 563 -71 190 500 478 -64 190 500 551 -66 190 500 553 -73 190 500 591 -65 190 500 519 -71 190 500 556 -66 190 500 575 -61 190 500 500 -75 190 500 531 -69 190 500 587 -70 190 500 560 -75 190 500 571 -66 190 500 563 -69 190 500 647 -59 190 500 515 -67 190 500 530 -69 190 500 567 -78 190 500 508 -59 190 500 521 -69 195 500 514 -72 195 500 599 -73 195 500 561 -65 195 500 521 -69 195 500 530 -72 195 500 503 -70 195 500 595 -67 195 500 458 -67 195 500 541 -75 195 500 539 -75 195 500 554 -69 195 500 567 -75 195 500 514 -77 195 500 573 -64 195 500 556 -63 195 500 550 -73 195 500 570 -72 195 500 563 -64 195 500 572 -67 195 500 565 -80 200 500 578 -70 200 500 603 -77 200 500 652 -73 200 500 567 -67 200 500 621 -68 200 500 550 -67 200 500 677 -67 200 500 607 -68 200 500 583 -70 200 500 573 -77 200 500 626 -73 200 500 577 -72 200 500 608 -68 200 500 674 -74 200 500 554 -69 200 500 597 -71 200 500 568 -64 200 500 611 -70 200 500 554 -79 200 500 657 -2 5 500 8 -3 5 500 9 -1 5 500 8 -2 5 500 9 -2 5 500 10 -1 5 500 8 -3 5 500 9 -2 5 500 9 -0 5 500 9 -0 5 500 9 -1 5 500 9 -1 5 500 7 -1 5 500 7 -2 5 500 8 -1 5 500 8 -1 5 500 9 -1 5 500 9 -1 5 500 8 -2 5 500 10 -2 5 500 8 -5 10 500 31 -4 10 500 35 -6 10 500 63 -6 10 500 32 -3 10 500 35 -4 10 500 30 -4 10 500 35 -4 10 500 37 -5 10 500 32 -4 10 500 32 -4 10 500 31 -5 10 500 36 -5 10 500 37 -4 10 500 32 -4 10 500 29 -4 10 500 31 -5 10 500 32 -4 10 500 31 -4 10 500 33 -3 10 500 106 -5 15 500 29 -4 15 500 34 -6 15 500 29 -4 15 500 28 -5 15 500 26 -4 15 500 31 -5 15 500 27 -4 15 500 28 -3 15 500 27 -5 15 500 29 -4 15 500 28 -5 15 500 26 -5 15 500 30 -4 15 500 29 -6 15 500 27 -4 15 500 23 -4 15 500 31 -5 15 500 29 -4 15 500 29 -4 15 500 31 -3 20 500 55 -5 20 500 63 -3 20 500 58 -3 20 500 62 -6 20 500 56 -4 20 500 55 -6 20 500 59 -7 20 500 49 -4 20 500 100 -4 20 500 64 -5 20 500 56 -3 20 500 68 -4 20 500 54 -4 20 500 55 -5 20 500 53 -5 20 500 56 -2 20 500 54 -7 20 500 60 -4 20 500 99 -3 20 500 49 -14 25 500 65 -11 25 500 63 -13 25 500 67 -15 25 500 68 -12 25 500 67 -12 25 500 63 -14 25 500 72 -16 25 500 124 -16 25 500 69 -10 25 500 67 -14 25 500 69 -13 25 500 66 -12 25 500 74 -14 25 500 68 -13 25 500 72 -11 25 500 71 -14 25 500 126 -12 25 500 61 -10 25 500 67 -12 25 500 67 -12 30 500 69 -15 30 500 74 -12 30 500 66 -12 30 500 71 -10 30 500 65 -9 30 500 69 -13 30 500 68 -16 30 500 69 -11 30 500 69 -9 30 500 69 -12 30 500 66 -12 30 500 73 -8 30 500 65 -12 30 500 123 -12 30 500 70 -9 30 500 75 -11 30 500 72 -11 30 500 71 -14 30 500 71 -10 30 500 67 -12 35 500 106 -15 35 500 134 -13 35 500 107 -13 35 500 96 -11 35 500 106 -14 35 500 103 -13 35 500 95 -13 35 500 145 -12 35 500 105 -10 35 500 109 -15 35 500 95 -13 35 500 114 -14 35 500 89 -11 35 500 148 -14 35 500 104 -15 35 500 99 -14 35 500 105 -11 35 500 99 -13 35 500 97 -12 35 500 153 -13 40 500 96 -12 40 500 92 -14 40 500 94 -12 40 500 95 -14 40 500 102 -17 40 500 142 -16 40 500 101 -16 40 500 101 -13 40 500 98 -14 40 500 95 -13 40 500 95 -13 40 500 140 -15 40 500 90 -13 40 500 97 -17 40 500 99 -16 40 500 93 -14 40 500 94 -16 40 500 130 -11 40 500 96 -16 40 500 94 -19 45 500 134 -21 45 500 114 -19 45 500 114 -20 45 500 155 -21 45 500 123 -18 45 500 130 -20 45 500 120 -23 45 500 167 -22 45 500 121 -22 45 500 124 -19 45 500 122 -19 45 500 121 -21 45 500 174 -21 45 500 144 -20 45 500 130 -20 45 500 113 -23 45 500 130 -21 45 500 165 -20 45 500 122 -18 45 500 119 -17 50 500 94 -18 50 500 101 -15 50 500 135 -16 50 500 106 -21 50 500 105 -17 50 500 99 -18 50 500 99 -16 50 500 102 -18 50 500 139 -16 50 500 104 -18 50 500 100 -20 50 500 101 -14 50 500 104 -16 50 500 98 -15 50 500 150 -20 50 500 102 -20 50 500 105 -17 50 500 103 -16 50 500 102 -20 50 500 101 -15 55 500 213 -14 55 500 166 -21 55 500 190 -17 55 500 191 -12 55 500 162 -20 55 500 155 -14 55 500 163 -17 55 500 228 -16 55 500 173 -14 55 500 157 -16 55 500 140 -16 55 500 200 -16 55 500 168 -18 55 500 152 -12 55 500 227 -16 55 500 167 -17 55 500 165 -19 55 500 167 -13 55 500 195 -14 55 500 145 -18 60 500 182 -24 60 500 199 -17 60 500 205 -20 60 500 206 -21 60 500 176 -17 60 500 230 -19 60 500 188 -20 60 500 190 -20 60 500 234 -19 60 500 185 -21 60 500 188 -23 60 500 218 -19 60 500 160 -20 60 500 191 -23 60 500 176 -23 60 500 227 -18 60 500 194 -20 60 500 184 -22 60 500 222 -14 60 500 173 -26 65 500 152 -28 65 500 155 -25 65 500 198 -24 65 500 147 -27 65 500 148 -25 65 500 162 -24 65 500 196 -23 65 500 149 -25 65 500 153 -23 65 500 140 -25 65 500 183 -21 65 500 152 -28 65 500 146 -28 65 500 153 -25 65 500 186 -25 65 500 149 -27 65 500 161 -27 65 500 159 -28 65 500 201 -25 65 500 151 -29 70 500 176 -25 70 500 217 -30 70 500 171 -26 70 500 167 -26 70 500 177 -26 70 500 202 -27 70 500 173 -30 70 500 169 -31 70 500 197 -31 70 500 170 -25 70 500 181 -30 70 500 164 -29 70 500 211 -30 70 500 158 -28 70 500 158 -27 70 500 176 -28 70 500 206 -30 70 500 162 -27 70 500 165 -31 70 500 204 -26 75 500 234 -25 75 500 243 -27 75 500 263 -25 75 500 232 -27 75 500 214 -26 75 500 286 -28 75 500 262 -24 75 500 239 -25 75 500 214 -28 75 500 227 -23 75 500 226 -28 75 500 238 -27 75 500 218 -28 75 500 266 -24 75 500 226 -24 75 500 259 -26 75 500 207 -26 75 500 213 -28 75 500 283 -21 75 500 209 -23 80 500 253 -22 80 500 238 -29 80 500 231 -23 80 500 221 -25 80 500 244 -24 80 500 223 -29 80 500 258 -21 80 500 221 -28 80 500 231 -21 80 500 255 -22 80 500 212 -26 80 500 221 -24 80 500 262 -21 80 500 218 -25 80 500 213 -22 80 500 233 -24 80 500 210 -28 80 500 232 -25 80 500 219 -22 80 500 229 -27 85 500 222 -29 85 500 200 -30 85 500 204 -29 85 500 245 -28 85 500 191 -28 85 500 205 -31 85 500 242 -27 85 500 198 -27 85 500 216 -27 85 500 242 -28 85 500 201 -28 85 500 203 -31 85 500 240 -24 85 500 194 -29 85 500 210 -25 85 500 231 -27 85 500 203 -28 85 500 189 -27 85 500 247 -26 85 500 201 -34 90 500 217 -38 90 500 254 -33 90 500 224 -34 90 500 275 -30 90 500 235 -29 90 500 227 -32 90 500 251 -35 90 500 231 -23 90 500 231 -29 90 500 251 -32 90 500 231 -28 90 500 276 -35 90 500 226 -36 90 500 239 -36 90 500 269 -32 90 500 241 -32 90 500 252 -37 90 500 290 -30 90 500 241 -34 90 500 276 -39 95 500 242 -42 95 500 244 -41 95 500 268 -36 95 500 232 -34 95 500 259 -33 95 500 246 -36 95 500 232 -35 95 500 291 -41 95 500 247 -38 95 500 266 -38 95 500 239 -41 95 500 252 -40 95 500 309 -37 95 500 253 -40 95 500 309 -35 95 500 271 -35 95 500 286 -37 95 500 277 -36 95 500 254 -41 95 500 286 -34 100 500 257 -33 100 500 318 -33 100 500 266 -29 100 500 262 -33 100 500 305 -32 100 500 253 -29 100 500 272 -31 100 500 260 -23 100 500 302 -30 100 500 259 -31 100 500 283 -33 100 500 318 -26 100 500 280 -30 100 500 290 -34 100 500 245 -30 100 500 294 -27 100 500 274 -30 100 500 266 -28 100 500 315 -34 100 500 261 -45 105 500 277 -46 105 500 265 -46 105 500 272 -46 105 500 255 -44 105 500 245 -38 105 500 284 -42 105 500 235 -39 105 500 281 -42 105 500 244 -39 105 500 233 -41 105 500 295 -43 105 500 233 -42 105 500 274 -47 105 500 234 -41 105 500 250 -39 105 500 284 -39 105 500 255 -41 105 500 268 -44 105 500 243 -40 105 500 231 -42 110 500 323 -39 110 500 302 -42 110 500 304 -35 110 500 264 -38 110 500 339 -43 110 500 270 -38 110 500 319 -40 110 500 269 -36 110 500 331 -42 110 500 314 -40 110 500 307 -40 110 500 311 -38 110 500 309 -39 110 500 274 -35 110 500 263 -34 110 500 324 -41 110 500 293 -38 110 500 328 -34 110 500 257 -38 110 500 305 -42 115 500 271 -44 115 500 323 -41 115 500 282 -43 115 500 310 -45 115 500 290 -40 115 500 313 -43 115 500 273 -45 115 500 284 -47 115 500 319 -48 115 500 305 -45 115 500 337 -46 115 500 283 -43 115 500 273 -36 115 500 284 -46 115 500 302 -52 115 500 292 -46 115 500 322 -46 115 500 290 -45 115 500 269 -39 115 500 330 -34 120 500 302 -34 120 500 336 -40 120 500 310 -43 120 500 333 -40 120 500 302 -39 120 500 321 -38 120 500 288 -42 120 500 350 -40 120 500 288 -38 120 500 333 -39 120 500 313 -43 120 500 338 -37 120 500 292 -43 120 500 360 -45 120 500 282 -43 120 500 341 -43 120 500 296 -40 120 500 319 -42 120 500 295 -40 120 500 345 -42 125 500 332 -41 125 500 410 -46 125 500 306 -41 125 500 352 -42 125 500 351 -39 125 500 365 -35 125 500 352 -44 125 500 364 -32 125 500 318 -46 125 500 365 -46 125 500 354 -45 125 500 322 -41 125 500 361 -45 125 500 347 -38 125 500 366 -38 125 500 309 -42 125 500 366 -38 125 500 333 -37 125 500 367 -44 125 500 335 -52 130 500 390 -47 130 500 399 -47 130 500 355 -49 130 500 393 -51 130 500 355 -43 130 500 391 -52 130 500 342 -43 130 500 395 -47 130 500 365 -44 130 500 393 -40 130 500 377 -42 130 500 369 -51 130 500 392 -44 130 500 403 -48 130 500 376 -40 130 500 401 -47 130 500 360 -45 130 500 385 -47 130 500 361 -47 130 500 359 -52 135 500 352 -47 135 500 325 -51 135 500 363 -48 135 500 307 -48 135 500 357 -50 135 500 355 -55 135 500 355 -53 135 500 365 -49 135 500 379 -43 135 500 330 -52 135 500 379 -46 135 500 347 -50 135 500 380 -50 135 500 404 -47 135 500 312 -45 135 500 370 -49 135 500 353 -54 135 500 375 -45 135 500 322 -48 135 500 399 -49 140 500 329 -53 140 500 403 -47 140 500 339 -52 140 500 387 -51 140 500 390 -52 140 500 361 -48 140 500 368 -47 140 500 337 -51 140 500 377 -45 140 500 363 -57 140 500 358 -50 140 500 389 -56 140 500 329 -56 140 500 339 -53 140 500 327 -48 140 500 367 -49 140 500 327 -52 140 500 361 -48 140 500 314 -49 140 500 363 -54 145 500 356 -58 145 500 372 -52 145 500 390 -49 145 500 341 -59 145 500 386 -56 145 500 340 -58 145 500 371 -51 145 500 352 -59 145 500 411 -59 145 500 351 -55 145 500 380 -51 145 500 398 -55 145 500 352 -64 145 500 392 -56 145 500 353 -51 145 500 400 -51 145 500 377 -62 145 500 349 -50 145 500 391 -55 145 500 333 -46 150 500 418 -50 150 500 384 -46 150 500 420 -47 150 500 395 -43 150 500 381 -49 150 500 421 -50 150 500 396 -55 150 500 381 -46 150 500 387 -52 150 500 370 -46 150 500 384 -52 150 500 418 -47 150 500 374 -49 150 500 395 -48 150 500 351 -47 150 500 380 -50 150 500 351 -42 150 500 371 -41 150 500 414 -43 150 500 356 -67 155 500 453 -70 155 500 452 -68 155 500 423 -67 155 500 435 -67 155 500 420 -73 155 500 414 -73 155 500 454 -72 155 500 495 -61 155 500 426 -66 155 500 474 -63 155 500 409 -73 155 500 415 -70 155 500 439 -65 155 500 462 -64 155 500 424 -68 155 500 391 -61 155 500 429 -61 155 500 416 -64 155 500 430 -58 155 500 421 -57 160 500 456 -59 160 500 472 -54 160 500 471 -61 160 500 504 -55 160 500 456 -54 160 500 497 -46 160 500 522 -50 160 500 471 -53 160 500 466 -52 160 500 497 -51 160 500 501 -54 160 500 481 -53 160 500 380 -51 160 500 501 -53 160 500 475 -50 160 500 472 -53 160 500 473 -54 160 500 461 -59 160 500 443 -55 160 500 466 -65 165 500 435 -60 165 500 426 -65 165 500 444 -67 165 500 435 -57 165 500 431 -65 165 500 412 -62 165 500 464 -64 165 500 473 -67 165 500 375 -65 165 500 424 -61 165 500 425 -64 165 500 453 -63 165 500 436 -64 165 500 431 -63 165 500 432 -64 165 500 477 -64 165 500 479 -57 165 500 443 -58 165 500 391 -63 165 500 460 -50 170 500 474 -56 170 500 463 -57 170 500 454 -52 170 500 505 -54 170 500 445 -56 170 500 434 -51 170 500 468 -57 170 500 488 -49 170 500 434 -54 170 500 475 -53 170 500 437 -49 170 500 468 -50 170 500 454 -57 170 500 485 -58 170 500 483 -52 170 500 430 -59 170 500 461 -49 170 500 487 -55 170 500 458 -56 170 500 431 -65 175 500 538 -65 175 500 500 -65 175 500 507 -64 175 500 451 -63 175 500 484 -59 175 500 486 -67 175 500 495 -69 175 500 501 -55 175 500 477 -65 175 500 508 -65 175 500 510 -59 175 500 494 -63 175 500 455 -63 175 500 502 -61 175 500 483 -68 175 500 526 -66 175 500 481 -63 175 500 488 -70 175 500 543 -70 175 500 488 -55 180 500 495 -70 180 500 454 -58 180 500 500 -62 180 500 489 -57 180 500 494 -61 180 500 470 -57 180 500 501 -60 180 500 505 -60 180 500 492 -70 180 500 454 -64 180 500 476 -58 180 500 499 -57 180 500 510 -66 180 500 488 -71 180 500 501 -58 180 500 499 -59 180 500 490 -64 180 500 509 -63 180 500 478 -61 180 500 489 -76 185 500 520 -70 185 500 491 -69 185 500 513 -66 185 500 513 -58 185 500 539 -70 185 500 501 -71 185 500 465 -63 185 500 561 -62 185 500 488 -74 185 500 484 -75 185 500 530 -75 185 500 532 -72 185 500 576 -72 185 500 473 -73 185 500 552 -70 185 500 493 -76 185 500 525 -74 185 500 521 -73 185 500 471 -71 185 500 515 -68 190 500 509 -58 190 500 478 -58 190 500 452 -63 190 500 469 -66 190 500 486 -56 190 500 491 -62 190 500 462 -59 190 500 502 -58 190 500 484 -59 190 500 477 -62 190 500 491 -61 190 500 464 -66 190 500 446 -70 190 500 496 -60 190 500 456 -65 190 500 471 -63 190 500 498 -60 190 500 521 -55 190 500 498 -66 190 500 471 -74 195 500 522 -77 195 500 479 -82 195 500 503 -87 195 500 468 -75 195 500 472 -74 195 500 503 -80 195 500 492 -74 195 500 461 -74 195 500 508 -78 195 500 492 -82 195 500 512 -71 195 500 510 -84 195 500 502 -74 195 500 478 -81 195 500 500 -73 195 500 514 -75 195 500 432 -83 195 500 505 -88 195 500 525 -81 195 500 508 -74 200 500 693 -74 200 500 632 -69 200 500 612 -71 200 500 607 -65 200 500 596 -72 200 500 613 -65 200 500 595 -63 200 500 590 -74 200 500 622 -60 200 500 605 -74 200 500 573 -63 200 500 547 -62 200 500 549 -54 200 500 585 -65 200 500 617 -69 200 500 581 -61 200 500 619 -61 200 500 607 -72 200 500 589 -69 200 500 629 -2 5 500 14 -2 5 500 34 -2 5 500 11 -2 5 500 14 -1 5 500 11 -1 5 500 14 -2 5 500 12 -2 5 500 12 -2 5 500 9 -2 5 500 14 -2 5 500 11 -1 5 500 12 -2 5 500 13 -2 5 500 10 -2 5 500 12 -2 5 500 11 -2 5 500 13 -2 5 500 11 -2 5 500 11 -2 5 500 13 -2 10 500 22 -2 10 500 22 -4 10 500 25 -1 10 500 23 -2 10 500 26 -1 10 500 26 -2 10 500 25 -0 10 500 26 -1 10 500 23 -1 10 500 26 -2 10 500 25 -1 10 500 74 -3 10 500 24 -2 10 500 26 -1 10 500 25 -3 10 500 28 -2 10 500 28 -3 10 500 30 -2 10 500 32 -1 10 500 25 -2 15 500 37 -2 15 500 37 -4 15 500 40 -4 15 500 36 -4 15 500 34 -3 15 500 35 -4 15 500 36 -3 15 500 37 -6 15 500 36 -3 15 500 84 -3 15 500 37 -4 15 500 36 -5 15 500 36 -7 15 500 36 -4 15 500 34 -3 15 500 37 -1 15 500 33 -4 15 500 35 -6 15 500 36 -3 15 500 36 -6 20 500 49 -6 20 500 43 -7 20 500 45 -8 20 500 74 -5 20 500 42 -9 20 500 44 -6 20 500 39 -5 20 500 44 -9 20 500 43 -6 20 500 46 -6 20 500 46 -6 20 500 44 -7 20 500 41 -4 20 500 43 -4 20 500 40 -5 20 500 42 -5 20 500 44 -4 20 500 104 -7 20 500 41 -6 20 500 44 -5 25 500 89 -9 25 500 94 -7 25 500 84 -8 25 500 87 -6 25 500 102 -7 25 500 135 -7 25 500 90 -7 25 500 85 -8 25 500 78 -7 25 500 93 -4 25 500 91 -8 25 500 134 -9 25 500 83 -7 25 500 84 -6 25 500 85 -5 25 500 86 -8 25 500 92 -7 25 500 96 -9 25 500 122 -7 25 500 89 -17 30 500 80 -18 30 500 77 -16 30 500 71 -17 30 500 75 -16 30 500 71 -14 30 500 80 -16 30 500 126 -15 30 500 74 -17 30 500 80 -13 30 500 65 -16 30 500 78 -17 30 500 75 -13 30 500 76 -15 30 500 77 -15 30 500 120 -16 30 500 71 -16 30 500 75 -13 30 500 75 -15 30 500 83 -14 30 500 71 -9 35 500 94 -7 35 500 110 -12 35 500 95 -14 35 500 94 -11 35 500 92 -12 35 500 103 -12 35 500 90 -12 35 500 90 -11 35 500 123 -12 35 500 89 -10 35 500 88 -12 35 500 95 -5 35 500 89 -8 35 500 86 -10 35 500 138 -11 35 500 94 -10 35 500 99 -13 35 500 112 -10 35 500 89 -12 35 500 97 -16 40 500 124 -13 40 500 96 -16 40 500 101 -20 40 500 99 -13 40 500 91 -14 40 500 90 -17 40 500 97 -17 40 500 95 -17 40 500 99 -16 40 500 97 -13 40 500 92 -14 40 500 101 -16 40 500 96 -20 40 500 136 -12 40 500 104 -19 40 500 99 -19 40 500 102 -18 40 500 102 -16 40 500 99 -14 40 500 96 -16 45 500 111 -17 45 500 118 -17 45 500 113 -18 45 500 112 -22 45 500 153 -17 45 500 122 -17 45 500 113 -16 45 500 122 -18 45 500 113 -12 45 500 155 -14 45 500 108 -17 45 500 115 -18 45 500 131 -17 45 500 123 -16 45 500 139 -21 45 500 120 -13 45 500 117 -20 45 500 113 -19 45 500 120 -21 45 500 158 -18 50 500 115 -18 50 500 120 -15 50 500 142 -14 50 500 127 -20 50 500 162 -19 50 500 127 -15 50 500 123 -24 50 500 135 -15 50 500 127 -17 50 500 163 -15 50 500 119 -18 50 500 125 -16 50 500 133 -21 50 500 129 -18 50 500 166 -16 50 500 122 -17 50 500 119 -17 50 500 131 -16 50 500 160 -17 50 500 122 -20 55 500 146 -18 55 500 142 -19 55 500 140 -20 55 500 172 -18 55 500 143 -15 55 500 140 -16 55 500 146 -22 55 500 174 -17 55 500 151 -17 55 500 142 -21 55 500 157 -21 55 500 185 -21 55 500 155 -17 55 500 147 -20 55 500 141 -19 55 500 176 -19 55 500 151 -21 55 500 158 -19 55 500 155 -22 55 500 185 -22 60 500 158 -19 60 500 152 -23 60 500 132 -21 60 500 179 -26 60 500 145 -22 60 500 142 -21 60 500 148 -22 60 500 180 -21 60 500 155 -22 60 500 149 -25 60 500 152 -24 60 500 187 -23 60 500 142 -23 60 500 142 -26 60 500 150 -23 60 500 153 -24 60 500 197 -23 60 500 142 -21 60 500 147 -25 60 500 149 -25 65 500 194 -25 65 500 176 -28 65 500 167 -30 65 500 199 -23 65 500 160 -28 65 500 181 -27 65 500 169 -25 65 500 202 -24 65 500 167 -25 65 500 159 -24 65 500 197 -28 65 500 180 -24 65 500 170 -27 65 500 157 -21 65 500 216 -24 65 500 181 -28 65 500 163 -24 65 500 209 -24 65 500 170 -27 65 500 173 -28 70 500 186 -22 70 500 207 -25 70 500 191 -21 70 500 177 -26 70 500 219 -23 70 500 193 -22 70 500 171 -25 70 500 210 -23 70 500 196 -25 70 500 187 -25 70 500 185 -24 70 500 218 -26 70 500 191 -25 70 500 179 -23 70 500 215 -24 70 500 202 -26 70 500 192 -25 70 500 220 -26 70 500 182 -22 70 500 179 -29 75 500 245 -30 75 500 211 -25 75 500 198 -26 75 500 247 -21 75 500 192 -28 75 500 182 -27 75 500 241 -25 75 500 192 -28 75 500 197 -26 75 500 224 -26 75 500 210 -31 75 500 217 -28 75 500 246 -28 75 500 207 -25 75 500 207 -32 75 500 231 -19 75 500 220 -25 75 500 199 -29 75 500 227 -28 75 500 214 -29 80 500 228 -30 80 500 263 -31 80 500 232 -28 80 500 231 -26 80 500 267 -26 80 500 207 -30 80 500 264 -27 80 500 227 -29 80 500 230 -29 80 500 251 -30 80 500 222 -27 80 500 219 -28 80 500 263 -24 80 500 226 -31 80 500 221 -34 80 500 260 -27 80 500 222 -34 80 500 263 -23 80 500 229 -24 80 500 224 -24 85 500 278 -29 85 500 250 -25 85 500 233 -23 85 500 246 -21 85 500 221 -25 85 500 269 -24 85 500 227 -23 85 500 233 -22 85 500 279 -22 85 500 236 -26 85 500 273 -26 85 500 233 -24 85 500 232 -26 85 500 243 -24 85 500 230 -18 85 500 260 -28 85 500 252 -25 85 500 232 -24 85 500 281 -27 85 500 220 -28 90 500 209 -33 90 500 261 -33 90 500 244 -29 90 500 272 -29 90 500 216 -33 90 500 225 -27 90 500 256 -30 90 500 235 -37 90 500 239 -27 90 500 232 -33 90 500 230 -28 90 500 258 -27 90 500 229 -30 90 500 236 -29 90 500 260 -28 90 500 233 -28 90 500 248 -29 90 500 227 -25 90 500 232 -35 90 500 277 -36 95 500 255 -41 95 500 254 -35 95 500 249 -36 95 500 239 -31 95 500 299 -39 95 500 246 -36 95 500 288 -41 95 500 258 -38 95 500 244 -36 95 500 269 -37 95 500 247 -40 95 500 285 -41 95 500 251 -35 95 500 237 -40 95 500 275 -38 95 500 253 -35 95 500 285 -36 95 500 252 -37 95 500 249 -35 95 500 274 -38 100 500 247 -34 100 500 313 -33 100 500 259 -33 100 500 306 -32 100 500 266 -29 100 500 308 -33 100 500 271 -34 100 500 269 -32 100 500 272 -34 100 500 257 -34 100 500 306 -34 100 500 246 -36 100 500 244 -31 100 500 291 -37 100 500 270 -26 100 500 285 -33 100 500 263 -34 100 500 289 -35 100 500 246 -33 100 500 235 -40 105 500 276 -34 105 500 253 -37 105 500 281 -33 105 500 253 -42 105 500 273 -38 105 500 264 -33 105 500 254 -36 105 500 282 -38 105 500 253 -30 105 500 284 -36 105 500 266 -36 105 500 281 -37 105 500 270 -32 105 500 256 -36 105 500 280 -33 105 500 253 -34 105 500 276 -38 105 500 242 -33 105 500 247 -36 105 500 278 -44 110 500 267 -48 110 500 300 -42 110 500 273 -44 110 500 301 -38 110 500 261 -37 110 500 277 -44 110 500 297 -41 110 500 284 -43 110 500 293 -43 110 500 263 -42 110 500 305 -47 110 500 267 -38 110 500 311 -37 110 500 265 -46 110 500 267 -39 110 500 307 -44 110 500 261 -40 110 500 304 -44 110 500 266 -47 110 500 294 -49 115 500 289 -42 115 500 326 -43 115 500 291 -33 115 500 316 -44 115 500 299 -45 115 500 319 -48 115 500 297 -43 115 500 328 -45 115 500 286 -43 115 500 281 -44 115 500 322 -47 115 500 286 -42 115 500 331 -46 115 500 287 -40 115 500 317 -45 115 500 299 -42 115 500 315 -44 115 500 289 -41 115 500 321 -46 115 500 294 -36 120 500 315 -48 120 500 294 -42 120 500 316 -41 120 500 310 -39 120 500 320 -43 120 500 297 -38 120 500 320 -34 120 500 274 -40 120 500 314 -37 120 500 264 -38 120 500 288 -34 120 500 318 -35 120 500 285 -39 120 500 317 -35 120 500 279 -41 120 500 320 -36 120 500 265 -43 120 500 318 -38 120 500 284 -39 120 500 323 -48 125 500 310 -45 125 500 337 -49 125 500 297 -54 125 500 353 -48 125 500 306 -51 125 500 333 -56 125 500 303 -48 125 500 335 -50 125 500 305 -47 125 500 323 -55 125 500 297 -47 125 500 345 -52 125 500 294 -59 125 500 330 -51 125 500 313 -55 125 500 326 -42 125 500 315 -48 125 500 344 -47 125 500 312 -52 125 500 330 -40 130 500 348 -42 130 500 359 -38 130 500 381 -43 130 500 373 -34 130 500 338 -37 130 500 363 -36 130 500 383 -37 130 500 381 -40 130 500 395 -37 130 500 375 -44 130 500 383 -40 130 500 374 -36 130 500 364 -42 130 500 383 -40 130 500 351 -36 130 500 380 -36 130 500 346 -41 130 500 387 -42 130 500 407 -41 130 500 364 -49 135 500 420 -57 135 500 352 -52 135 500 372 -45 135 500 378 -48 135 500 356 -50 135 500 364 -49 135 500 348 -45 135 500 381 -44 135 500 350 -50 135 500 379 -48 135 500 386 -53 135 500 371 -51 135 500 350 -46 135 500 371 -48 135 500 383 -42 135 500 365 -42 135 500 367 -50 135 500 378 -49 135 500 338 -48 135 500 383 -50 140 500 365 -41 140 500 395 -48 140 500 420 -43 140 500 369 -42 140 500 411 -42 140 500 371 -51 140 500 411 -48 140 500 411 -52 140 500 391 -45 140 500 405 -48 140 500 338 -38 140 500 379 -47 140 500 392 -45 140 500 353 -46 140 500 404 -46 140 500 370 -45 140 500 396 -43 140 500 382 -43 140 500 374 -45 140 500 384 -57 145 500 404 -59 145 500 473 -60 145 500 472 -63 145 500 446 -55 145 500 432 -61 145 500 461 -56 145 500 480 -55 145 500 389 -54 145 500 407 -58 145 500 439 -58 145 500 463 -52 145 500 431 -60 145 500 404 -57 145 500 419 -50 145 500 443 -62 145 500 440 -64 145 500 406 -60 145 500 456 -55 145 500 459 -58 145 500 403 -48 150 500 436 -47 150 500 438 -51 150 500 411 -50 150 500 426 -49 150 500 441 -44 150 500 431 -56 150 500 428 -51 150 500 416 -54 150 500 415 -56 150 500 452 -56 150 500 420 -45 150 500 410 -55 150 500 476 -45 150 500 421 -46 150 500 398 -52 150 500 422 -48 150 500 439 -49 150 500 432 -49 150 500 420 -57 150 500 464 -40 155 500 412 -49 155 500 423 -43 155 500 485 -45 155 500 419 -49 155 500 425 -54 155 500 456 -46 155 500 398 -48 155 500 460 -40 155 500 438 -44 155 500 410 -48 155 500 438 -45 155 500 442 -52 155 500 422 -48 155 500 452 -47 155 500 459 -37 155 500 402 -48 155 500 418 -45 155 500 430 -50 155 500 412 -44 155 500 417 -62 160 500 492 -59 160 500 453 -59 160 500 448 -58 160 500 448 -54 160 500 472 -60 160 500 438 -53 160 500 471 -62 160 500 425 -63 160 500 412 -55 160 500 437 -61 160 500 435 -60 160 500 419 -56 160 500 432 -60 160 500 450 -57 160 500 410 -63 160 500 420 -65 160 500 470 -61 160 500 398 -60 160 500 425 -58 160 500 455 -67 165 500 401 -66 165 500 442 -67 165 500 441 -71 165 500 390 -71 165 500 416 -73 165 500 462 -63 165 500 442 -70 165 500 450 -63 165 500 445 -70 165 500 429 -63 165 500 418 -72 165 500 449 -69 165 500 444 -66 165 500 446 -72 165 500 430 -73 165 500 411 -73 165 500 447 -68 165 500 443 -73 165 500 388 -75 165 500 456 -62 170 500 469 -59 170 500 488 -47 170 500 453 -62 170 500 481 -57 170 500 492 -59 170 500 517 -68 170 500 467 -64 170 500 481 -62 170 500 485 -61 170 500 492 -57 170 500 481 -54 170 500 536 -63 170 500 501 -62 170 500 509 -61 170 500 525 -63 170 500 500 -63 170 500 490 -59 170 500 524 -59 170 500 494 -61 170 500 490 -60 175 500 425 -59 175 500 426 -61 175 500 486 -62 175 500 461 -65 175 500 469 -47 175 500 434 -62 175 500 409 -54 175 500 455 -56 175 500 404 -60 175 500 423 -54 175 500 439 -57 175 500 443 -62 175 500 429 -59 175 500 442 -54 175 500 471 -64 175 500 441 -52 175 500 427 -56 175 500 450 -60 175 500 455 -64 175 500 428 -66 180 500 561 -68 180 500 495 -67 180 500 502 -66 180 500 490 -64 180 500 532 -65 180 500 497 -59 180 500 500 -66 180 500 511 -62 180 500 520 -69 180 500 522 -67 180 500 528 -70 180 500 530 -63 180 500 527 -64 180 500 519 -71 180 500 509 -68 180 500 495 -63 180 500 526 -64 180 500 513 -64 180 500 534 -67 180 500 543 -63 185 500 474 -66 185 500 485 -67 185 500 476 -60 185 500 512 -72 185 500 504 -68 185 500 483 -66 185 500 490 -69 185 500 483 -66 185 500 495 -62 185 500 496 -62 185 500 482 -73 185 500 485 -67 185 500 491 -65 185 500 469 -69 185 500 513 -63 185 500 477 -66 185 500 526 -60 185 500 447 -66 185 500 482 -65 185 500 496 -69 190 500 570 -73 190 500 564 -65 190 500 472 -76 190 500 502 -69 190 500 543 -68 190 500 494 -69 190 500 521 -77 190 500 545 -62 190 500 527 -68 190 500 503 -67 190 500 538 -68 190 500 516 -67 190 500 508 -69 190 500 503 -67 190 500 557 -67 190 500 564 -77 190 500 489 -68 190 500 557 -64 190 500 530 -67 190 500 513 -64 195 500 539 -67 195 500 579 -61 195 500 519 -68 195 500 563 -70 195 500 581 -70 195 500 539 -58 195 500 552 -65 195 500 558 -80 195 500 605 -65 195 500 560 -67 195 500 585 -69 195 500 577 -69 195 500 509 -64 195 500 531 -64 195 500 473 -67 195 500 576 -73 195 500 553 -65 195 500 596 -62 195 500 585 -68 195 500 591 -68 200 500 556 -57 200 500 601 -70 200 500 580 -58 200 500 571 -61 200 500 520 -72 200 500 585 -68 200 500 606 -60 200 500 561 -61 200 500 565 -61 200 500 544 -62 200 500 543 -74 200 500 521 -71 200 500 589 -65 200 500 590 -61 200 500 625 -55 200 500 537 -71 200 500 592 -62 200 500 578 -66 200 500 558 -60 200 500 572 -2 5 500 13 -3 5 500 16 -3 5 500 15 -1 5 500 12 -3 5 500 10 -3 5 500 8 -3 5 500 10 -3 5 500 13 -2 5 500 13 -3 5 500 9 -4 5 500 12 -2 5 500 11 -3 5 500 12 -3 5 500 12 -3 5 500 13 -3 5 500 11 -2 5 500 10 -3 5 500 12 -1 5 500 9 -3 5 500 9 -6 10 500 33 -6 10 500 35 -5 10 500 30 -4 10 500 30 -6 10 500 32 -5 10 500 35 -6 10 500 30 -5 10 500 31 -5 10 500 86 -5 10 500 28 -5 10 500 27 -5 10 500 30 -7 10 500 31 -7 10 500 28 -7 10 500 27 -6 10 500 32 -5 10 500 29 -5 10 500 29 -5 10 500 28 -5 10 500 32 -6 15 500 37 -4 15 500 35 -5 15 500 36 -4 15 500 29 -3 15 500 37 -5 15 500 34 -6 15 500 87 -3 15 500 30 -3 15 500 26 -5 15 500 37 -6 15 500 38 -4 15 500 32 -4 15 500 36 -6 15 500 42 -6 15 500 42 -6 15 500 37 -6 15 500 37 -7 15 500 36 -5 15 500 35 -5 15 500 31 -7 20 500 50 -5 20 500 71 -7 20 500 55 -9 20 500 50 -7 20 500 51 -8 20 500 48 -5 20 500 47 -7 20 500 49 -8 20 500 49 -9 20 500 52 -8 20 500 48 -8 20 500 49 -8 20 500 50 -9 20 500 95 -8 20 500 53 -7 20 500 50 -5 20 500 47 -6 20 500 50 -6 20 500 48 -6 20 500 52 -12 25 500 86 -12 25 500 85 -11 25 500 105 -11 25 500 80 -13 25 500 85 -9 25 500 73 -11 25 500 65 -13 25 500 83 -10 25 500 72 -10 25 500 68 -9 25 500 69 -12 25 500 115 -11 25 500 73 -11 25 500 82 -12 25 500 79 -10 25 500 68 -9 25 500 76 -12 25 500 74 -11 25 500 72 -9 25 500 129 -12 30 500 67 -11 30 500 66 -12 30 500 67 -9 30 500 66 -10 30 500 71 -12 30 500 72 -13 30 500 73 -9 30 500 108 -12 30 500 66 -13 30 500 77 -11 30 500 70 -11 30 500 68 -12 30 500 73 -11 30 500 74 -11 30 500 73 -13 30 500 118 -11 30 500 77 -12 30 500 78 -11 30 500 69 -11 30 500 73 -11 35 500 85 -11 35 500 89 -16 35 500 89 -11 35 500 128 -17 35 500 88 -13 35 500 89 -11 35 500 84 -12 35 500 82 -10 35 500 77 -12 35 500 81 -9 35 500 125 -13 35 500 84 -14 35 500 85 -14 35 500 86 -15 35 500 86 -13 35 500 87 -13 35 500 88 -12 35 500 134 -13 35 500 86 -13 35 500 81 -10 40 500 138 -8 40 500 136 -10 40 500 142 -10 40 500 112 -11 40 500 115 -10 40 500 120 -8 40 500 114 -10 40 500 119 -7 40 500 149 -9 40 500 132 -6 40 500 129 -9 40 500 116 -12 40 500 149 -5 40 500 132 -12 40 500 130 -9 40 500 129 -8 40 500 126 -8 40 500 141 -13 40 500 137 -7 40 500 122 -17 45 500 119 -13 45 500 104 -12 45 500 132 -18 45 500 110 -14 45 500 114 -17 45 500 104 -16 45 500 114 -16 45 500 115 -12 45 500 135 -11 45 500 107 -15 45 500 108 -17 45 500 109 -15 45 500 122 -10 45 500 136 -12 45 500 101 -12 45 500 107 -14 45 500 105 -12 45 500 105 -19 45 500 103 -14 45 500 142 -18 50 500 122 -17 50 500 129 -23 50 500 135 -21 50 500 116 -20 50 500 155 -24 50 500 131 -18 50 500 119 -19 50 500 122 -19 50 500 137 -21 50 500 153 -18 50 500 129 -19 50 500 120 -18 50 500 120 -21 50 500 153 -21 50 500 115 -21 50 500 121 -17 50 500 129 -19 50 500 117 -21 50 500 175 -21 50 500 128 -21 55 500 138 -22 55 500 143 -24 55 500 152 -20 55 500 179 -21 55 500 142 -21 55 500 148 -20 55 500 147 -22 55 500 162 -23 55 500 147 -24 55 500 140 -18 55 500 151 -19 55 500 174 -21 55 500 140 -21 55 500 136 -25 55 500 143 -25 55 500 165 -19 55 500 136 -18 55 500 144 -22 55 500 131 -22 55 500 132 -16 60 500 177 -18 60 500 153 -15 60 500 161 -17 60 500 153 -15 60 500 180 -17 60 500 160 -19 60 500 158 -17 60 500 163 -16 60 500 183 -17 60 500 144 -17 60 500 154 -15 60 500 151 -19 60 500 178 -18 60 500 154 -20 60 500 149 -18 60 500 139 -18 60 500 194 -20 60 500 157 -14 60 500 154 -18 60 500 180 -24 65 500 241 -17 65 500 217 -24 65 500 266 -22 65 500 200 -19 65 500 210 -22 65 500 238 -26 65 500 247 -25 65 500 241 -20 65 500 237 -21 65 500 249 -24 65 500 254 -19 65 500 213 -21 65 500 207 -18 65 500 255 -24 65 500 221 -22 65 500 218 -18 65 500 250 -20 65 500 202 -20 65 500 196 -26 65 500 269 -27 70 500 217 -23 70 500 212 -25 70 500 257 -26 70 500 196 -26 70 500 210 -20 70 500 208 -28 70 500 200 -22 70 500 201 -24 70 500 280 -24 70 500 181 -27 70 500 178 -26 70 500 229 -27 70 500 207 -27 70 500 183 -25 70 500 209 -27 70 500 195 -27 70 500 200 -25 70 500 242 -23 70 500 201 -26 70 500 169 -30 75 500 233 -35 75 500 220 -33 75 500 201 -30 75 500 240 -27 75 500 196 -32 75 500 211 -35 75 500 231 -33 75 500 208 -34 75 500 203 -28 75 500 232 -30 75 500 193 -35 75 500 210 -33 75 500 228 -34 75 500 212 -29 75 500 208 -31 75 500 249 -35 75 500 225 -34 75 500 212 -34 75 500 230 -32 75 500 200 -34 80 500 194 -32 80 500 215 -25 80 500 192 -31 80 500 204 -32 80 500 189 -33 80 500 218 -36 80 500 194 -36 80 500 188 -33 80 500 220 -38 80 500 190 -30 80 500 195 -29 80 500 216 -28 80 500 191 -34 80 500 184 -34 80 500 229 -35 80 500 186 -27 80 500 181 -31 80 500 213 -35 80 500 204 -33 80 500 181 -37 85 500 250 -33 85 500 225 -38 85 500 228 -33 85 500 272 -33 85 500 226 -35 85 500 228 -31 85 500 242 -34 85 500 245 -34 85 500 250 -27 85 500 220 -34 85 500 238 -35 85 500 279 -33 85 500 219 -33 85 500 279 -34 85 500 261 -34 85 500 232 -35 85 500 256 -33 85 500 229 -32 85 500 267 -32 85 500 246 -25 90 500 235 -29 90 500 276 -26 90 500 249 -26 90 500 278 -28 90 500 252 -29 90 500 267 -25 90 500 306 -28 90 500 250 -23 90 500 270 -23 90 500 240 -27 90 500 276 -28 90 500 277 -23 90 500 259 -25 90 500 284 -31 90 500 242 -25 90 500 290 -23 90 500 256 -28 90 500 263 -24 90 500 270 -23 90 500 252 -33 95 500 258 -39 95 500 225 -39 95 500 234 -40 95 500 259 -40 95 500 222 -39 95 500 230 -42 95 500 251 -39 95 500 234 -46 95 500 261 -41 95 500 238 -41 95 500 225 -41 95 500 254 -40 95 500 225 -41 95 500 219 -40 95 500 257 -43 95 500 227 -41 95 500 264 -38 95 500 204 -42 95 500 227 -43 95 500 263 -32 100 500 236 -39 100 500 295 -34 100 500 252 -31 100 500 249 -37 100 500 307 -30 100 500 256 -35 100 500 282 -38 100 500 288 -37 100 500 286 -29 100 500 256 -32 100 500 253 -36 100 500 302 -36 100 500 291 -33 100 500 297 -42 100 500 249 -36 100 500 269 -35 100 500 243 -35 100 500 245 -34 100 500 279 -39 100 500 256 -34 105 500 339 -32 105 500 278 -40 105 500 338 -36 105 500 322 -36 105 500 308 -35 105 500 277 -34 105 500 267 -39 105 500 346 -35 105 500 288 -34 105 500 306 -31 105 500 294 -28 105 500 324 -37 105 500 315 -30 105 500 305 -40 105 500 281 -36 105 500 318 -31 105 500 271 -37 105 500 302 -40 105 500 315 -40 105 500 288 -30 110 500 282 -35 110 500 327 -34 110 500 272 -42 110 500 323 -42 110 500 298 -35 110 500 278 -40 110 500 293 -42 110 500 270 -38 110 500 316 -38 110 500 274 -42 110 500 315 -36 110 500 272 -46 110 500 298 -39 110 500 287 -38 110 500 332 -40 110 500 283 -47 110 500 285 -35 110 500 298 -37 110 500 293 -37 110 500 305 -42 115 500 286 -40 115 500 286 -38 115 500 270 -36 115 500 301 -41 115 500 294 -36 115 500 311 -36 115 500 285 -48 115 500 326 -35 115 500 281 -42 115 500 333 -43 115 500 277 -39 115 500 311 -39 115 500 290 -43 115 500 291 -35 115 500 316 -40 115 500 310 -38 115 500 311 -42 115 500 294 -34 115 500 306 -41 115 500 281 -36 120 500 369 -44 120 500 346 -43 120 500 392 -37 120 500 403 -39 120 500 345 -40 120 500 380 -41 120 500 354 -49 120 500 409 -44 120 500 393 -38 120 500 360 -41 120 500 379 -44 120 500 371 -38 120 500 353 -36 120 500 384 -45 120 500 350 -42 120 500 378 -44 120 500 357 -44 120 500 377 -44 120 500 362 -46 120 500 392 -49 125 500 341 -44 125 500 363 -46 125 500 393 -47 125 500 322 -44 125 500 342 -39 125 500 353 -47 125 500 351 -48 125 500 346 -40 125 500 365 -40 125 500 342 -43 125 500 316 -48 125 500 342 -47 125 500 379 -50 125 500 400 -48 125 500 328 -44 125 500 343 -42 125 500 346 -47 125 500 376 -46 125 500 334 -44 125 500 362 -44 130 500 430 -52 130 500 425 -46 130 500 379 -50 130 500 437 -45 130 500 419 -42 130 500 419 -49 130 500 385 -44 130 500 452 -49 130 500 406 -48 130 500 404 -50 130 500 434 -53 130 500 433 -53 130 500 368 -45 130 500 432 -52 130 500 397 -52 130 500 377 -47 130 500 388 -45 130 500 362 -46 130 500 417 -51 130 500 405 -48 135 500 319 -44 135 500 335 -46 135 500 328 -47 135 500 323 -50 135 500 320 -46 135 500 358 -51 135 500 299 -42 135 500 336 -51 135 500 307 -51 135 500 346 -55 135 500 314 -51 135 500 343 -56 135 500 301 -48 135 500 331 -49 135 500 313 -53 135 500 360 -50 135 500 313 -52 135 500 345 -53 135 500 316 -53 135 500 354 -45 140 500 383 -51 140 500 354 -43 140 500 387 -50 140 500 382 -52 140 500 383 -49 140 500 365 -45 140 500 376 -53 140 500 428 -47 140 500 366 -53 140 500 414 -57 140 500 368 -50 140 500 367 -50 140 500 375 -55 140 500 370 -45 140 500 358 -49 140 500 345 -49 140 500 409 -49 140 500 398 -53 140 500 390 -58 140 500 406 -40 145 500 351 -51 145 500 407 -48 145 500 403 -48 145 500 375 -48 145 500 402 -46 145 500 401 -47 145 500 377 -50 145 500 408 -42 145 500 376 -51 145 500 422 -50 145 500 383 -48 145 500 382 -48 145 500 393 -55 145 500 366 -52 145 500 398 -47 145 500 405 -48 145 500 383 -51 145 500 369 -37 145 500 376 -47 145 500 377 -51 150 500 439 -54 150 500 459 -58 150 500 471 -51 150 500 453 -45 150 500 393 -43 150 500 422 -50 150 500 415 -53 150 500 402 -46 150 500 462 -52 150 500 446 -49 150 500 445 -56 150 500 400 -51 150 500 443 -50 150 500 432 -49 150 500 413 -45 150 500 429 -47 150 500 450 -46 150 500 448 -44 150 500 405 -53 150 500 431 -57 155 500 470 -55 155 500 490 -58 155 500 450 -61 155 500 532 -54 155 500 480 -50 155 500 478 -57 155 500 510 -55 155 500 477 -53 155 500 487 -60 155 500 497 -58 155 500 547 -49 155 500 474 -57 155 500 439 -54 155 500 526 -54 155 500 476 -49 155 500 513 -49 155 500 508 -53 155 500 506 -59 155 500 508 -48 155 500 544 -48 160 500 420 -53 160 500 383 -52 160 500 423 -52 160 500 431 -55 160 500 410 -57 160 500 436 -53 160 500 442 -51 160 500 406 -49 160 500 426 -53 160 500 455 -51 160 500 376 -53 160 500 431 -58 160 500 451 -47 160 500 395 -45 160 500 433 -50 160 500 452 -50 160 500 393 -47 160 500 404 -49 160 500 425 -50 160 500 410 -63 165 500 465 -59 165 500 455 -61 165 500 439 -64 165 500 474 -53 165 500 460 -54 165 500 473 -60 165 500 459 -61 165 500 433 -59 165 500 477 -60 165 500 429 -55 165 500 481 -59 165 500 449 -56 165 500 412 -59 165 500 443 -63 165 500 483 -57 165 500 472 -56 165 500 456 -59 165 500 482 -64 165 500 474 -59 165 500 477 -59 170 500 529 -61 170 500 648 -53 170 500 592 -53 170 500 504 -61 170 500 559 -59 170 500 604 -54 170 500 656 -51 170 500 566 -62 170 500 598 -58 170 500 508 -56 170 500 654 -60 170 500 561 -55 170 500 488 -58 170 500 551 -56 170 500 612 -56 170 500 534 -55 170 500 616 -56 170 500 554 -63 170 500 542 -57 170 500 596 -71 175 500 496 -63 175 500 477 -67 175 500 482 -65 175 500 506 -71 175 500 498 -70 175 500 519 -64 175 500 498 -69 175 500 515 -65 175 500 533 -63 175 500 475 -66 175 500 504 -68 175 500 506 -63 175 500 504 -68 175 500 531 -67 175 500 483 -79 175 500 507 -64 175 500 491 -66 175 500 513 -68 175 500 462 -69 175 500 516 -81 180 500 474 -77 180 500 496 -70 180 500 460 -78 180 500 452 -69 180 500 497 -72 180 500 482 -75 180 500 502 -64 180 500 468 -75 180 500 482 -72 180 500 505 -74 180 500 528 -67 180 500 476 -79 180 500 460 -67 180 500 480 -70 180 500 469 -74 180 500 453 -73 180 500 498 -65 180 500 496 -71 180 500 519 -69 180 500 453 -68 185 500 488 -69 185 500 492 -59 185 500 478 -67 185 500 449 -58 185 500 487 -59 185 500 496 -62 185 500 487 -63 185 500 477 -65 185 500 471 -65 185 500 471 -62 185 500 487 -60 185 500 507 -50 185 500 449 -55 185 500 449 -65 185 500 448 -49 185 500 443 -58 185 500 479 -55 185 500 436 -62 185 500 505 -60 185 500 468 -64 190 500 517 -65 190 500 491 -63 190 500 498 -69 190 500 517 -65 190 500 481 -62 190 500 525 -65 190 500 510 -61 190 500 551 -64 190 500 518 -69 190 500 502 -66 190 500 512 -66 190 500 502 -70 190 500 509 -59 190 500 484 -62 190 500 499 -57 190 500 502 -59 190 500 507 -65 190 500 513 -61 190 500 447 -67 190 500 464 -79 195 500 535 -81 195 500 542 -79 195 500 534 -88 195 500 538 -79 195 500 495 -82 195 500 548 -81 195 500 524 -83 195 500 514 -81 195 500 542 -76 195 500 560 -75 195 500 506 -80 195 500 536 -78 195 500 550 -77 195 500 534 -84 195 500 509 -83 195 500 519 -85 195 500 503 -84 195 500 533 -82 195 500 563 -77 195 500 551 -72 200 500 579 -76 200 500 556 -70 200 500 558 -69 200 500 544 -79 200 500 547 -68 200 500 557 -79 200 500 551 -79 200 500 550 -80 200 500 554 -77 200 500 562 -76 200 500 538 -73 200 500 552 -82 200 500 583 -70 200 500 542 -73 200 500 568 -75 200 500 544 -83 200 500 562 -70 200 500 580 -77 200 500 571 -73 200 500 576 diff --git a/core/rewriting/indexing/test/results/std-tgt-varied-adj-1-40.txt b/core/rewriting/indexing/test/results/std-tgt-varied-adj-1-40.txt deleted file mode 100644 index e0c2a845..00000000 --- a/core/rewriting/indexing/test/results/std-tgt-varied-adj-1-40.txt +++ /dev/null @@ -1,16200 +0,0 @@ -15 100 50 3 -26 100 100 5 -20 100 150 6 -34 100 200 9 -48 100 250 9 -58 100 300 13 -44 100 350 14 -69 100 400 22 -79 100 450 17 -63 100 500 18 -112 100 550 27 -96 100 600 22 -97 100 650 25 -134 100 700 30 -116 100 750 28 -130 100 800 118 -161 100 850 36 -132 100 900 35 -177 100 950 42 -171 100 1000 45 -168 100 1050 44 -187 100 1100 49 -198 100 1150 49 -221 100 1200 54 -230 100 1250 57 -201 100 1300 63 -228 100 1350 58 -225 100 1400 201 -223 100 1450 63 -230 100 1500 72 -301 100 1550 77 -286 100 1600 79 -298 100 1650 82 -264 100 1700 86 -286 100 1750 219 -282 100 1800 81 -356 100 1850 87 -326 100 1900 91 -315 100 1950 86 -336 100 2000 86 -5 100 50 1 -21 100 100 3 -26 100 150 5 -34 100 200 7 -42 100 250 9 -46 100 300 11 -60 100 350 13 -77 100 400 18 -86 100 450 19 -80 100 500 19 -88 100 550 129 -117 100 600 27 -103 100 650 26 -105 100 700 30 -147 100 750 37 -143 100 800 37 -130 100 850 37 -165 100 900 41 -166 100 950 41 -162 100 1000 42 -182 100 1050 47 -190 100 1100 47 -185 100 1150 46 -193 100 1200 53 -207 100 1250 204 -210 100 1300 55 -224 100 1350 56 -254 100 1400 62 -270 100 1450 71 -243 100 1500 71 -274 100 1550 71 -258 100 1600 70 -272 100 1650 220 -289 100 1700 82 -272 100 1750 75 -318 100 1800 89 -347 100 1850 92 -302 100 1900 89 -316 100 1950 92 -351 100 2000 235 -9 100 50 2 -16 100 100 4 -30 100 150 8 -35 100 200 8 -43 100 250 10 -53 100 300 14 -60 100 350 17 -61 100 400 23 -100 100 450 28 -116 100 500 32 -98 100 550 26 -106 100 600 30 -104 100 650 38 -133 100 700 39 -125 100 750 45 -121 100 800 39 -139 100 850 50 -146 100 900 48 -130 100 950 200 -162 100 1000 53 -170 100 1050 68 -164 100 1100 61 -200 100 1150 67 -213 100 1200 74 -206 100 1250 67 -221 100 1300 74 -221 100 1350 81 -260 100 1400 227 -304 100 1450 100 -256 100 1500 80 -277 100 1550 89 -284 100 1600 105 -292 100 1650 104 -300 100 1700 247 -318 100 1750 107 -300 100 1800 106 -324 100 1850 119 -301 100 1900 114 -344 100 1950 117 -350 100 2000 118 -7 100 50 1 -15 100 100 4 -20 100 150 5 -33 100 200 9 -41 100 250 13 -44 100 300 12 -51 100 350 15 -70 100 400 17 -60 100 450 20 -102 100 500 30 -100 100 550 23 -98 100 600 30 -103 100 650 36 -129 100 700 36 -124 100 750 39 -149 100 800 43 -143 100 850 44 -142 100 900 49 -158 100 950 193 -131 100 1000 48 -185 100 1050 54 -170 100 1100 55 -185 100 1150 53 -196 100 1200 67 -208 100 1250 74 -222 100 1300 79 -194 100 1350 73 -240 100 1400 218 -252 100 1450 89 -248 100 1500 97 -255 100 1550 86 -264 100 1600 88 -288 100 1650 88 -291 100 1700 98 -281 100 1750 261 -282 100 1800 94 -299 100 1850 103 -341 100 1900 111 -310 100 1950 108 -349 100 2000 252 -6 100 50 3 -22 100 100 3 -22 100 150 4 -28 100 200 7 -34 100 250 10 -57 100 300 11 -44 100 350 11 -56 100 400 16 -54 100 450 14 -67 100 500 20 -68 100 550 21 -59 100 600 24 -91 100 650 23 -95 100 700 27 -93 100 750 28 -96 100 800 31 -118 100 850 37 -125 100 900 36 -124 100 950 34 -150 100 1000 36 -153 100 1050 43 -153 100 1100 45 -149 100 1150 184 -163 100 1200 49 -195 100 1250 59 -162 100 1300 48 -188 100 1350 54 -191 100 1400 56 -198 100 1450 66 -174 100 1500 61 -181 100 1550 69 -208 100 1600 72 -199 100 1650 199 -200 100 1700 69 -228 100 1750 76 -241 100 1800 76 -248 100 1850 77 -238 100 1900 80 -250 100 1950 81 -249 100 2000 211 -6 100 50 1 -7 100 100 2 -11 100 150 4 -22 100 200 7 -27 100 250 9 -33 100 300 10 -40 100 350 13 -32 100 400 14 -45 100 450 18 -47 100 500 18 -61 100 550 23 -59 100 600 20 -56 100 650 22 -94 100 700 29 -72 100 750 30 -69 100 800 27 -84 100 850 34 -100 100 900 37 -103 100 950 40 -94 100 1000 41 -89 100 1050 37 -108 100 1100 43 -130 100 1150 193 -131 100 1200 53 -119 100 1250 51 -154 100 1300 55 -137 100 1350 60 -160 100 1400 61 -148 100 1450 72 -148 100 1500 67 -162 100 1550 65 -149 100 1600 67 -178 100 1650 204 -161 100 1700 74 -187 100 1750 74 -186 100 1800 84 -207 100 1850 81 -192 100 1900 84 -178 100 1950 83 -218 100 2000 223 -4 100 50 1 -13 100 100 3 -31 100 150 5 -34 100 200 6 -36 100 250 9 -42 100 300 9 -47 100 350 13 -72 100 400 17 -72 100 450 19 -74 100 500 19 -74 100 550 19 -99 100 600 24 -86 100 650 27 -109 100 700 28 -110 100 750 30 -112 100 800 34 -119 100 850 33 -125 100 900 33 -134 100 950 36 -97 100 1000 35 -160 100 1050 48 -150 100 1100 187 -147 100 1150 44 -175 100 1200 48 -182 100 1250 51 -164 100 1300 49 -193 100 1350 58 -175 100 1400 55 -202 100 1450 58 -209 100 1500 57 -173 100 1550 58 -246 100 1600 201 -203 100 1650 65 -216 100 1700 72 -227 100 1750 73 -253 100 1800 76 -238 100 1850 80 -236 100 1900 80 -279 100 1950 81 -287 100 2000 220 -5 100 50 1 -14 100 100 3 -11 100 150 3 -24 100 200 6 -28 100 250 8 -33 100 300 12 -45 100 350 16 -52 100 400 17 -48 100 450 18 -59 100 500 23 -64 100 550 23 -65 100 600 27 -79 100 650 27 -68 100 700 26 -92 100 750 33 -93 100 800 36 -107 100 850 36 -105 100 900 40 -130 100 950 43 -117 100 1000 51 -127 100 1050 192 -141 100 1100 52 -128 100 1150 49 -148 100 1200 61 -163 100 1250 61 -154 100 1300 56 -143 100 1350 64 -183 100 1400 72 -170 100 1450 63 -171 100 1500 70 -182 100 1550 75 -211 100 1600 83 -195 100 1650 76 -201 100 1700 87 -205 100 1750 85 -163 100 1800 76 -222 100 1850 86 -226 100 1900 231 -216 100 1950 90 -247 100 2000 113 -6 100 50 2 -18 100 100 5 -17 100 150 7 -33 100 200 9 -40 100 250 10 -35 100 300 14 -52 100 350 16 -54 100 400 17 -70 100 450 23 -91 100 500 26 -78 100 550 26 -79 100 600 28 -86 100 650 36 -103 100 700 41 -111 100 750 84 -126 100 800 39 -111 100 850 42 -150 100 900 51 -136 100 950 42 -154 100 1000 47 -166 100 1050 57 -175 100 1100 55 -167 100 1150 57 -206 100 1200 71 -206 100 1250 73 -189 100 1300 221 -197 100 1350 76 -216 100 1400 76 -208 100 1450 80 -221 100 1500 81 -220 100 1550 84 -237 100 1600 89 -264 100 1650 105 -241 100 1700 252 -265 100 1750 107 -276 100 1800 105 -277 100 1850 104 -297 100 1900 106 -284 100 1950 104 -275 100 2000 100 -3 100 50 1 -10 100 100 3 -19 100 150 6 -14 100 200 5 -18 100 250 7 -29 100 300 15 -38 100 350 15 -41 100 400 16 -44 100 450 19 -44 100 500 21 -52 100 550 19 -55 100 600 25 -58 100 650 30 -56 100 700 25 -65 100 750 30 -84 100 800 33 -72 100 850 36 -64 100 900 34 -82 100 950 36 -89 100 1000 39 -102 100 1050 39 -99 100 1100 169 -110 100 1150 46 -121 100 1200 57 -131 100 1250 54 -110 100 1300 48 -133 100 1350 56 -123 100 1400 56 -129 100 1450 66 -137 100 1500 68 -128 100 1550 64 -163 100 1600 207 -146 100 1650 63 -163 100 1700 78 -153 100 1750 81 -189 100 1800 86 -164 100 1850 72 -153 100 1900 77 -186 100 1950 90 -173 100 2000 87 -10 100 50 1 -19 100 100 5 -13 100 150 5 -33 100 200 6 -37 100 250 8 -43 100 300 12 -42 100 350 12 -42 100 400 14 -61 100 450 17 -64 100 500 18 -67 100 550 19 -71 100 600 24 -75 100 650 25 -74 100 700 26 -97 100 750 32 -115 100 800 34 -117 100 850 36 -97 100 900 34 -114 100 950 42 -141 100 1000 47 -122 100 1050 45 -142 100 1100 187 -138 100 1150 49 -160 100 1200 48 -159 100 1250 50 -151 100 1300 48 -166 100 1350 57 -153 100 1400 56 -159 100 1450 55 -191 100 1500 63 -189 100 1550 60 -203 100 1600 203 -213 100 1650 69 -212 100 1700 69 -206 100 1750 70 -223 100 1800 73 -237 100 1850 78 -230 100 1900 77 -262 100 1950 86 -223 100 2000 227 -3 100 50 2 -5 100 100 2 -10 100 150 4 -16 100 200 7 -19 100 250 6 -19 100 300 9 -33 100 350 11 -46 100 400 14 -43 100 450 13 -56 100 500 18 -50 100 550 19 -61 100 600 20 -68 100 650 22 -60 100 700 22 -79 100 750 27 -81 100 800 29 -86 100 850 28 -97 100 900 31 -100 100 950 37 -104 100 1000 35 -96 100 1050 35 -107 100 1100 38 -132 100 1150 177 -109 100 1200 47 -106 100 1250 48 -101 100 1300 49 -155 100 1350 61 -169 100 1400 63 -144 100 1450 59 -147 100 1500 66 -149 100 1550 65 -161 100 1600 73 -198 100 1650 206 -167 100 1700 79 -180 100 1750 75 -180 100 1800 74 -219 100 1850 84 -196 100 1900 75 -189 100 1950 75 -216 100 2000 84 -2 100 50 1 -9 100 100 3 -18 100 150 5 -21 100 200 6 -21 100 250 86 -17 100 300 7 -33 100 350 12 -31 100 400 12 -37 100 450 13 -45 100 500 14 -32 100 550 17 -44 100 600 18 -58 100 650 20 -72 100 700 27 -47 100 750 24 -66 100 800 29 -79 100 850 31 -89 100 900 34 -81 100 950 30 -78 100 1000 37 -89 100 1050 38 -78 100 1100 36 -76 100 1150 40 -93 100 1200 50 -109 100 1250 43 -113 100 1300 49 -101 100 1350 47 -121 100 1400 48 -118 100 1450 57 -133 100 1500 59 -126 100 1550 63 -143 100 1600 68 -103 100 1650 66 -144 100 1700 66 -136 100 1750 188 -160 100 1800 68 -145 100 1850 74 -168 100 1900 77 -140 100 1950 72 -167 100 2000 86 -9 100 50 2 -9 100 100 4 -25 100 150 5 -39 100 200 7 -30 100 250 9 -48 100 300 12 -42 100 350 11 -44 100 400 14 -66 100 450 19 -72 100 500 23 -59 100 550 22 -68 100 600 25 -95 100 650 54 -76 100 700 31 -109 100 750 37 -127 100 800 38 -116 100 850 44 -113 100 900 40 -134 100 950 45 -142 100 1000 50 -146 100 1050 54 -155 100 1100 52 -169 100 1150 61 -166 100 1200 58 -177 100 1250 200 -176 100 1300 60 -204 100 1350 69 -192 100 1400 75 -229 100 1450 105 -203 100 1500 82 -192 100 1550 93 -219 100 1600 234 -209 100 1650 79 -232 100 1700 84 -254 100 1750 90 -241 100 1800 78 -274 100 1850 104 -263 100 1900 93 -284 100 1950 235 -286 100 2000 108 -14 100 50 3 -14 100 100 3 -14 100 150 4 -35 100 200 8 -56 100 250 13 -58 100 300 13 -57 100 350 17 -76 100 400 21 -73 100 450 20 -78 100 500 23 -116 100 550 26 -115 100 600 27 -130 100 650 32 -124 100 700 34 -139 100 750 42 -157 100 800 41 -157 100 850 46 -177 100 900 182 -187 100 950 43 -221 100 1000 52 -174 100 1050 46 -214 100 1100 60 -240 100 1150 60 -267 100 1200 59 -233 100 1250 64 -256 100 1300 69 -272 100 1350 68 -261 100 1400 73 -302 100 1450 83 -319 100 1500 77 -298 100 1550 83 -317 100 1600 87 -336 100 1650 90 -339 100 1700 91 -348 100 1750 86 -360 100 1800 95 -351 100 1850 98 -377 100 1900 104 -377 100 1950 105 -416 100 2000 247 -5 100 50 2 -24 100 100 4 -26 100 150 4 -45 100 200 10 -36 100 250 9 -71 100 300 15 -59 100 350 15 -83 100 400 18 -113 100 450 26 -94 100 500 22 -120 100 550 29 -122 100 600 28 -142 100 650 38 -121 100 700 36 -150 100 750 43 -167 100 800 40 -168 100 850 43 -159 100 900 42 -236 100 950 53 -225 100 1000 209 -200 100 1050 59 -235 100 1100 55 -206 100 1150 57 -241 100 1200 62 -265 100 1250 70 -274 100 1300 77 -261 100 1350 72 -254 100 1400 72 -266 100 1450 219 -323 100 1500 78 -314 100 1550 86 -356 100 1600 86 -371 100 1650 102 -316 100 1700 100 -334 100 1750 94 -333 100 1800 92 -337 100 1850 103 -377 100 1900 100 -461 100 1950 129 -420 100 2000 122 -9 100 50 2 -18 100 100 4 -19 100 150 5 -30 100 200 12 -31 100 250 10 -40 100 300 123 -49 100 350 14 -54 100 400 16 -59 100 450 21 -77 100 500 24 -83 100 550 28 -79 100 600 28 -105 100 650 30 -100 100 700 34 -117 100 750 32 -110 100 800 32 -146 100 850 37 -160 100 900 52 -153 100 950 45 -152 100 1000 50 -147 100 1050 50 -207 100 1100 191 -180 100 1150 64 -183 100 1200 65 -168 100 1250 65 -194 100 1300 60 -205 100 1350 76 -232 100 1400 71 -220 100 1450 73 -248 100 1500 86 -257 100 1550 217 -269 100 1600 92 -261 100 1650 84 -256 100 1700 89 -288 100 1750 97 -280 100 1800 90 -295 100 1850 226 -316 100 1900 102 -316 100 1950 104 -343 100 2000 122 -8 100 50 1 -9 100 100 4 -13 100 150 5 -28 100 200 5 -28 100 250 6 -44 100 300 9 -52 100 350 11 -54 100 400 14 -53 100 450 14 -60 100 500 15 -69 100 550 20 -74 100 600 21 -75 100 650 21 -78 100 700 22 -75 100 750 132 -100 100 800 33 -110 100 850 36 -108 100 900 28 -127 100 950 33 -112 100 1000 32 -149 100 1050 37 -139 100 1100 37 -134 100 1150 40 -161 100 1200 42 -158 100 1250 45 -158 100 1300 44 -198 100 1350 55 -153 100 1400 49 -188 100 1450 196 -192 100 1500 56 -209 100 1550 58 -236 100 1600 63 -216 100 1650 69 -223 100 1700 61 -217 100 1750 67 -192 100 1800 64 -277 100 1850 70 -247 100 1900 73 -260 100 1950 78 -244 100 2000 75 -21 100 50 2 -24 100 100 3 -23 100 150 6 -41 100 200 9 -59 100 250 9 -53 100 300 11 -84 100 350 17 -80 100 400 15 -87 100 450 19 -103 100 500 18 -134 100 550 25 -119 100 600 25 -121 100 650 30 -127 100 700 29 -139 100 750 30 -150 100 800 33 -182 100 850 160 -169 100 900 48 -199 100 950 40 -229 100 1000 50 -216 100 1050 46 -216 100 1100 49 -229 100 1150 58 -254 100 1200 57 -219 100 1250 56 -279 100 1300 62 -273 100 1350 66 -310 100 1400 205 -303 100 1450 66 -302 100 1500 72 -307 100 1550 71 -302 100 1600 76 -347 100 1650 77 -377 100 1700 83 -391 100 1750 223 -366 100 1800 83 -411 100 1850 95 -353 100 1900 85 -366 100 1950 90 -401 100 2000 94 -2 100 50 0 -8 100 100 2 -15 100 150 4 -20 100 200 7 -29 100 250 7 -32 100 300 10 -39 100 350 10 -34 100 400 11 -55 100 450 14 -43 100 500 15 -57 100 550 16 -51 100 600 121 -64 100 650 22 -69 100 700 24 -83 100 750 27 -84 100 800 30 -74 100 850 29 -108 100 900 34 -97 100 950 32 -128 100 1000 39 -102 100 1050 36 -93 100 1100 36 -106 100 1150 41 -112 100 1200 42 -127 100 1250 46 -122 100 1300 46 -132 100 1350 178 -151 100 1400 54 -156 100 1450 51 -165 100 1500 55 -167 100 1550 60 -163 100 1600 63 -168 100 1650 70 -177 100 1700 60 -167 100 1750 66 -177 100 1800 194 -190 100 1850 73 -183 100 1900 75 -196 100 1950 74 -212 100 2000 77 -7 100 50 2 -18 100 100 3 -17 100 150 4 -18 100 200 6 -23 100 250 8 -34 100 300 10 -33 100 350 11 -46 100 400 15 -47 100 450 16 -42 100 500 15 -46 100 550 19 -64 100 600 22 -78 100 650 26 -69 100 700 24 -88 100 750 31 -73 100 800 99 -77 100 850 32 -96 100 900 38 -111 100 950 40 -86 100 1000 39 -106 100 1050 43 -117 100 1100 42 -135 100 1150 50 -130 100 1200 53 -127 100 1250 52 -159 100 1300 56 -136 100 1350 60 -143 100 1400 185 -177 100 1450 56 -177 100 1500 65 -162 100 1550 60 -174 100 1600 67 -174 100 1650 66 -193 100 1700 74 -197 100 1750 76 -180 100 1800 75 -192 100 1850 78 -225 100 1900 79 -197 100 1950 79 -212 100 2000 85 -4 100 50 1 -14 100 100 4 -23 100 150 4 -33 100 200 8 -21 100 250 6 -44 100 300 13 -43 100 350 12 -60 100 400 17 -69 100 450 17 -60 100 500 17 -67 100 550 18 -84 100 600 24 -95 100 650 25 -95 100 700 32 -116 100 750 32 -137 100 800 43 -132 100 850 34 -139 100 900 39 -120 100 950 34 -135 100 1000 42 -152 100 1050 44 -162 100 1100 44 -172 100 1150 52 -166 100 1200 49 -186 100 1250 54 -194 100 1300 55 -197 100 1350 59 -214 100 1400 63 -206 100 1450 65 -227 100 1500 64 -218 100 1550 70 -260 100 1600 81 -253 100 1650 72 -241 100 1700 76 -285 100 1750 219 -267 100 1800 90 -275 100 1850 86 -271 100 1900 90 -308 100 1950 96 -323 100 2000 97 -5 100 50 1 -5 100 100 2 -23 100 150 4 -30 100 200 7 -39 100 250 8 -32 100 300 12 -47 100 350 10 -56 100 400 15 -51 100 450 15 -70 100 500 129 -80 100 550 21 -68 100 600 20 -80 100 650 22 -79 100 700 23 -95 100 750 29 -106 100 800 32 -117 100 850 31 -140 100 900 38 -100 100 950 37 -134 100 1000 38 -142 100 1050 41 -150 100 1100 42 -143 100 1150 46 -182 100 1200 49 -169 100 1250 54 -192 100 1300 60 -168 100 1350 53 -191 100 1400 60 -220 100 1450 59 -197 100 1500 61 -238 100 1550 66 -222 100 1600 70 -206 100 1650 68 -238 100 1700 204 -270 100 1750 76 -246 100 1800 81 -261 100 1850 88 -306 100 1900 84 -279 100 1950 87 -258 100 2000 85 -9 100 50 2 -16 100 100 2 -24 100 150 6 -31 100 200 7 -33 100 250 9 -49 100 300 14 -53 100 350 104 -56 100 400 18 -61 100 450 20 -65 100 500 21 -95 100 550 25 -106 100 600 29 -97 100 650 27 -113 100 700 38 -105 100 750 30 -137 100 800 39 -113 100 850 37 -134 100 900 41 -144 100 950 45 -157 100 1000 49 -163 100 1050 49 -152 100 1100 44 -171 100 1150 196 -208 100 1200 67 -197 100 1250 62 -186 100 1300 65 -201 100 1350 72 -211 100 1400 73 -231 100 1450 72 -220 100 1500 74 -226 100 1550 74 -238 100 1600 81 -243 100 1650 80 -252 100 1700 86 -250 100 1750 87 -277 100 1800 95 -244 100 1850 85 -298 100 1900 225 -298 100 1950 101 -306 100 2000 99 -3 100 50 1 -13 100 100 3 -23 100 150 5 -23 100 200 5 -44 100 250 12 -36 100 300 12 -50 100 350 12 -67 100 400 19 -64 100 450 21 -76 100 500 19 -72 100 550 22 -105 100 600 30 -89 100 650 26 -103 100 700 28 -138 100 750 39 -131 100 800 38 -140 100 850 158 -167 100 900 39 -183 100 950 52 -172 100 1000 41 -181 100 1050 54 -206 100 1100 55 -194 100 1150 52 -218 100 1200 54 -220 100 1250 62 -201 100 1300 54 -228 100 1350 66 -235 100 1400 197 -250 100 1450 67 -228 100 1500 67 -229 100 1550 75 -247 100 1600 70 -308 100 1650 86 -306 100 1700 85 -302 100 1750 82 -285 100 1800 78 -292 100 1850 84 -345 100 1900 97 -390 100 1950 95 -326 100 2000 92 -9 100 50 2 -12 100 100 5 -21 100 150 7 -24 100 200 7 -31 100 250 10 -30 100 300 9 -46 100 350 14 -56 100 400 21 -60 100 450 25 -68 100 500 26 -78 100 550 23 -79 100 600 27 -72 100 650 29 -101 100 700 37 -107 100 750 34 -91 100 800 167 -101 100 850 39 -99 100 900 38 -109 100 950 40 -115 100 1000 39 -96 100 1050 47 -149 100 1100 54 -153 100 1150 55 -141 100 1200 53 -149 100 1250 58 -146 100 1300 61 -160 100 1350 219 -175 100 1400 76 -205 100 1450 74 -188 100 1500 77 -195 100 1550 82 -206 100 1600 84 -212 100 1650 88 -227 100 1700 240 -210 100 1750 96 -191 100 1800 79 -236 100 1850 96 -219 100 1900 92 -247 100 1950 113 -265 100 2000 110 -7 100 50 2 -28 100 100 5 -32 100 150 5 -25 100 200 7 -34 100 250 7 -59 100 300 11 -74 100 350 14 -67 100 400 14 -88 100 450 21 -76 100 500 20 -100 100 550 26 -98 100 600 26 -113 100 650 31 -132 100 700 31 -145 100 750 34 -138 100 800 34 -187 100 850 45 -161 100 900 44 -168 100 950 44 -179 100 1000 48 -199 100 1050 53 -189 100 1100 203 -205 100 1150 53 -232 100 1200 61 -234 100 1250 60 -259 100 1300 65 -230 100 1350 68 -277 100 1400 66 -285 100 1450 69 -281 100 1500 77 -295 100 1550 231 -296 100 1600 87 -330 100 1650 93 -271 100 1700 81 -314 100 1750 87 -329 100 1800 92 -303 100 1850 92 -344 100 1900 90 -365 100 1950 98 -339 100 2000 97 -7 100 50 1 -17 100 100 7 -19 100 150 4 -39 100 200 10 -43 100 250 9 -41 100 300 12 -51 100 350 11 -64 100 400 15 -65 100 450 22 -63 100 500 18 -69 100 550 20 -97 100 600 22 -111 100 650 28 -107 100 700 27 -117 100 750 31 -129 100 800 34 -120 100 850 40 -126 100 900 39 -140 100 950 45 -132 100 1000 37 -156 100 1050 54 -162 100 1100 48 -173 100 1150 51 -166 100 1200 55 -197 100 1250 58 -156 100 1300 50 -206 100 1350 59 -225 100 1400 201 -210 100 1450 63 -237 100 1500 68 -188 100 1550 75 -228 100 1600 74 -276 100 1650 78 -257 100 1700 73 -248 100 1750 79 -276 100 1800 219 -306 100 1850 81 -309 100 1900 87 -316 100 1950 93 -302 100 2000 90 -11 100 50 3 -27 100 100 5 -40 100 150 11 -44 100 200 16 -75 100 250 38 -59 100 300 16 -62 100 350 21 -80 100 400 19 -109 100 450 24 -132 100 500 144 -142 100 550 38 -128 100 600 37 -160 100 650 36 -130 100 700 37 -167 100 750 45 -175 100 800 49 -209 100 850 55 -221 100 900 54 -172 100 950 54 -241 100 1000 69 -225 100 1050 77 -244 100 1100 223 -277 100 1150 69 -260 100 1200 100 -281 100 1250 78 -315 100 1300 77 -322 100 1350 84 -308 100 1400 92 -326 100 1450 236 -341 100 1500 95 -379 100 1550 98 -370 100 1600 106 -339 100 1650 103 -386 100 1700 137 -389 100 1750 105 -408 100 1800 116 -372 100 1850 116 -418 100 1900 117 -452 100 1950 122 -450 100 2000 270 -8 100 50 1 -17 100 100 3 -29 100 150 6 -34 100 200 10 -47 100 250 12 -54 100 300 16 -74 100 350 16 -69 100 400 19 -68 100 450 18 -68 100 500 21 -89 100 550 25 -94 100 600 26 -108 100 650 28 -112 100 700 34 -108 100 750 37 -106 100 800 34 -119 100 850 36 -125 100 900 41 -156 100 950 47 -161 100 1000 200 -187 100 1050 44 -187 100 1100 59 -189 100 1150 55 -186 100 1200 56 -195 100 1250 61 -203 100 1300 63 -200 100 1350 60 -222 100 1400 71 -229 100 1450 67 -232 100 1500 218 -233 100 1550 77 -256 100 1600 75 -258 100 1650 74 -278 100 1700 83 -273 100 1750 85 -272 100 1800 90 -258 100 1850 231 -308 100 1900 93 -307 100 1950 98 -301 100 2000 103 -10 100 50 3 -22 100 100 4 -21 100 150 7 -43 100 200 13 -36 100 250 10 -60 100 300 14 -70 100 350 20 -65 100 400 17 -74 100 450 18 -82 100 500 22 -107 100 550 25 -117 100 600 34 -106 100 650 32 -111 100 700 180 -140 100 750 45 -125 100 800 40 -160 100 850 45 -158 100 900 47 -168 100 950 46 -184 100 1000 53 -175 100 1050 53 -164 100 1100 54 -189 100 1150 54 -210 100 1200 60 -215 100 1250 215 -222 100 1300 67 -281 100 1350 80 -251 100 1400 81 -245 100 1450 78 -283 100 1500 92 -228 100 1550 75 -271 100 1600 243 -268 100 1650 91 -311 100 1700 92 -327 100 1750 109 -348 100 1800 113 -362 100 1850 101 -345 100 1900 262 -316 100 1950 101 -321 100 2000 101 -9 100 50 2 -13 100 100 3 -19 100 150 6 -29 100 200 7 -44 100 250 12 -38 100 300 12 -41 100 350 15 -65 100 400 17 -77 100 450 19 -69 100 500 19 -96 100 550 29 -67 100 600 24 -94 100 650 31 -108 100 700 32 -115 100 750 39 -136 100 800 39 -143 100 850 183 -122 100 900 42 -140 100 950 46 -137 100 1000 45 -142 100 1050 50 -178 100 1100 56 -149 100 1150 52 -185 100 1200 57 -208 100 1250 67 -203 100 1300 70 -214 100 1350 216 -184 100 1400 61 -222 100 1450 73 -221 100 1500 74 -250 100 1550 80 -267 100 1600 85 -235 100 1650 82 -255 100 1700 88 -244 100 1750 238 -284 100 1800 88 -283 100 1850 98 -300 100 1900 106 -271 100 1950 97 -309 100 2000 105 -7 100 50 1 -22 100 100 6 -33 100 150 5 -35 100 200 122 -57 100 250 8 -76 100 300 14 -55 100 350 15 -92 100 400 20 -67 100 450 22 -111 100 500 24 -113 100 550 26 -90 100 600 27 -145 100 650 31 -149 100 700 33 -170 100 750 39 -195 100 800 50 -200 100 850 41 -191 100 900 42 -208 100 950 44 -232 100 1000 48 -211 100 1050 49 -236 100 1100 212 -238 100 1150 64 -255 100 1200 61 -260 100 1250 69 -279 100 1300 70 -322 100 1350 76 -324 100 1400 74 -289 100 1450 77 -316 100 1500 232 -340 100 1550 92 -357 100 1600 86 -326 100 1650 94 -340 100 1700 97 -393 100 1750 100 -339 100 1800 237 -446 100 1850 100 -379 100 1900 102 -411 100 1950 106 -380 100 2000 99 -0 100 50 1 -17 100 100 2 -13 100 150 5 -27 100 200 10 -43 100 250 13 -47 100 300 14 -60 100 350 21 -61 100 400 20 -71 100 450 26 -82 100 500 23 -71 100 550 142 -77 100 600 30 -90 100 650 34 -89 100 700 34 -118 100 750 39 -104 100 800 40 -112 100 850 45 -104 100 900 40 -168 100 950 54 -126 100 1000 48 -140 100 1050 63 -179 100 1100 54 -179 100 1150 70 -174 100 1200 216 -166 100 1250 62 -199 100 1300 72 -169 100 1350 66 -177 100 1400 68 -180 100 1450 74 -200 100 1500 72 -210 100 1550 75 -231 100 1600 237 -204 100 1650 84 -249 100 1700 98 -247 100 1750 81 -250 100 1800 106 -217 100 1850 99 -251 100 1900 232 -277 100 1950 105 -259 100 2000 105 -6 100 50 1 -15 100 100 4 -24 100 150 5 -28 100 200 8 -59 100 250 13 -61 100 300 15 -54 100 350 14 -71 100 400 21 -96 100 450 25 -89 100 500 25 -99 100 550 27 -93 100 600 30 -120 100 650 37 -124 100 700 35 -132 100 750 40 -142 100 800 180 -118 100 850 42 -126 100 900 44 -151 100 950 47 -156 100 1000 55 -156 100 1050 55 -222 100 1100 61 -183 100 1150 62 -226 100 1200 65 -206 100 1250 75 -185 100 1300 210 -246 100 1350 81 -231 100 1400 75 -265 100 1450 87 -281 100 1500 90 -271 100 1550 87 -266 100 1600 93 -285 100 1650 248 -300 100 1700 101 -289 100 1750 92 -332 100 1800 109 -327 100 1850 103 -311 100 1900 109 -401 100 1950 274 -325 100 2000 112 -5 100 50 1 -14 100 100 3 -17 100 150 5 -30 100 200 8 -29 100 250 8 -34 100 300 12 -40 100 350 17 -60 100 400 19 -65 100 450 21 -77 100 500 22 -86 100 550 26 -90 100 600 26 -101 100 650 28 -103 100 700 34 -100 100 750 35 -98 100 800 36 -128 100 850 45 -113 100 900 182 -132 100 950 45 -118 100 1000 45 -139 100 1050 48 -144 100 1100 45 -151 100 1150 51 -177 100 1200 60 -203 100 1250 67 -165 100 1300 55 -199 100 1350 65 -187 100 1400 63 -229 100 1450 214 -184 100 1500 66 -208 100 1550 76 -224 100 1600 78 -212 100 1650 84 -245 100 1700 85 -248 100 1750 84 -265 100 1800 232 -257 100 1850 86 -259 100 1900 85 -263 100 1950 98 -278 100 2000 97 -3 100 50 1 -11 100 100 2 -19 100 150 5 -21 100 200 5 -22 100 250 7 -28 100 300 14 -46 100 350 11 -36 100 400 12 -43 100 450 14 -48 100 500 16 -50 100 550 20 -57 100 600 28 -59 100 650 147 -75 100 700 27 -64 100 750 34 -80 100 800 33 -97 100 850 41 -86 100 900 34 -92 100 950 35 -104 100 1000 41 -123 100 1050 42 -128 100 1100 55 -108 100 1150 50 -134 100 1200 54 -120 100 1250 52 -125 100 1300 181 -139 100 1350 62 -138 100 1400 65 -143 100 1450 61 -146 100 1500 61 -157 100 1550 69 -149 100 1600 65 -182 100 1650 72 -154 100 1700 66 -149 100 1750 205 -198 100 1800 118 -196 100 1850 85 -218 100 1900 83 -177 100 1950 87 -217 100 2000 91 -10 100 50 2 -16 100 100 3 -21 100 150 7 -22 100 200 7 -33 100 250 10 -57 100 300 14 -60 100 350 16 -62 100 400 117 -68 100 450 20 -92 100 500 24 -86 100 550 31 -76 100 600 24 -101 100 650 32 -119 100 700 36 -103 100 750 35 -118 100 800 41 -140 100 850 46 -138 100 900 45 -149 100 950 45 -166 100 1000 54 -171 100 1050 55 -165 100 1100 54 -176 100 1150 221 -199 100 1200 61 -183 100 1250 76 -218 100 1300 70 -208 100 1350 71 -204 100 1400 74 -238 100 1450 77 -240 100 1500 90 -224 100 1550 80 -270 100 1600 89 -277 100 1650 89 -254 100 1700 90 -238 100 1750 93 -254 100 1800 92 -248 100 1850 230 -291 100 1900 110 -301 100 1950 107 -339 100 2000 114 -8 100 50 2 -20 100 100 5 -21 100 150 6 -35 100 200 12 -36 100 250 12 -39 100 300 12 -43 100 350 11 -44 100 400 16 -59 100 450 19 -58 100 500 27 -88 100 550 23 -97 100 600 34 -94 100 650 156 -107 100 700 33 -104 100 750 39 -110 100 800 42 -115 100 850 35 -136 100 900 49 -124 100 950 41 -132 100 1000 50 -169 100 1050 47 -156 100 1100 56 -167 100 1150 55 -138 100 1200 62 -168 100 1250 198 -210 100 1300 69 -171 100 1350 65 -212 100 1400 65 -187 100 1450 66 -203 100 1500 76 -210 100 1550 77 -219 100 1600 80 -238 100 1650 212 -247 100 1700 84 -240 100 1750 95 -236 100 1800 85 -278 100 1850 111 -277 100 1900 103 -276 100 1950 246 -265 100 2000 107 -6 100 50 1 -9 100 100 2 -25 100 150 5 -26 100 200 6 -35 100 250 8 -33 100 300 10 -45 100 350 12 -41 100 400 14 -40 100 450 15 -50 100 500 18 -52 100 550 21 -65 100 600 22 -85 100 650 26 -76 100 700 25 -72 100 750 26 -91 100 800 31 -95 100 850 30 -101 100 900 35 -109 100 950 36 -94 100 1000 36 -131 100 1050 174 -146 100 1100 47 -123 100 1150 46 -129 100 1200 50 -141 100 1250 51 -166 100 1300 55 -141 100 1350 52 -164 100 1400 59 -151 100 1450 51 -183 100 1500 64 -164 100 1550 60 -181 100 1600 68 -193 100 1650 69 -190 100 1700 71 -186 100 1750 72 -198 100 1800 75 -186 100 1850 74 -192 100 1900 79 -202 100 1950 206 -229 100 2000 84 -6 100 50 1 -12 100 100 3 -22 100 150 4 -50 100 200 7 -56 100 250 9 -33 100 300 9 -61 100 350 11 -73 100 400 13 -76 100 450 15 -86 100 500 17 -114 100 550 21 -111 100 600 21 -108 100 650 26 -100 100 700 25 -134 100 750 29 -130 100 800 28 -147 100 850 31 -138 100 900 35 -158 100 950 36 -167 100 1000 42 -163 100 1050 38 -195 100 1100 43 -210 100 1150 49 -208 100 1200 46 -210 100 1250 47 -232 100 1300 50 -232 100 1350 54 -231 100 1400 55 -223 100 1450 56 -253 100 1500 61 -275 100 1550 68 -267 100 1600 211 -276 100 1650 65 -279 100 1700 70 -310 100 1750 74 -316 100 1800 81 -346 100 1850 77 -319 100 1900 76 -339 100 1950 225 -343 100 2000 92 -8 100 50 2 -16 100 100 3 -31 100 150 6 -35 100 200 9 -36 100 250 10 -54 100 300 15 -45 100 350 19 -58 100 400 18 -83 100 450 21 -86 100 500 28 -96 100 550 26 -105 100 600 30 -100 100 650 32 -86 100 700 33 -123 100 750 35 -148 100 800 41 -128 100 850 43 -155 100 900 59 -157 100 950 184 -175 100 1000 46 -154 100 1050 52 -183 100 1100 56 -193 100 1150 57 -202 100 1200 60 -205 100 1250 78 -220 100 1300 70 -238 100 1350 73 -218 100 1400 215 -225 100 1450 74 -264 100 1500 85 -258 100 1550 83 -269 100 1600 85 -258 100 1650 86 -285 100 1700 93 -233 100 1750 219 -285 100 1800 99 -301 100 1850 99 -283 100 1900 99 -329 100 1950 114 -309 100 2000 109 -10 100 50 1 -16 100 100 3 -15 100 150 3 -40 100 200 6 -33 100 250 8 -60 100 300 124 -46 100 350 11 -56 100 400 14 -57 100 450 15 -70 100 500 20 -73 100 550 20 -84 100 600 20 -104 100 650 25 -90 100 700 25 -102 100 750 27 -88 100 800 28 -129 100 850 32 -139 100 900 35 -147 100 950 38 -131 100 1000 41 -121 100 1050 42 -205 100 1100 46 -166 100 1150 46 -167 100 1200 195 -174 100 1250 49 -178 100 1300 55 -200 100 1350 54 -221 100 1400 59 -248 100 1450 62 -214 100 1500 59 -211 100 1550 57 -218 100 1600 62 -268 100 1650 73 -281 100 1700 212 -228 100 1750 68 -281 100 1800 73 -276 100 1850 78 -236 100 1900 80 -290 100 1950 85 -289 100 2000 86 -6 100 50 1 -18 100 100 2 -31 100 150 5 -25 100 200 6 -40 100 250 7 -40 100 300 9 -49 100 350 105 -72 100 400 18 -69 100 450 15 -82 100 500 16 -105 100 550 20 -105 100 600 20 -110 100 650 24 -87 100 700 22 -113 100 750 26 -133 100 800 30 -139 100 850 29 -151 100 900 34 -132 100 950 35 -171 100 1000 40 -167 100 1050 41 -196 100 1100 42 -176 100 1150 43 -223 100 1200 49 -208 100 1250 192 -201 100 1300 49 -224 100 1350 56 -246 100 1400 53 -235 100 1450 56 -266 100 1500 60 -217 100 1550 57 -240 100 1600 64 -249 100 1650 62 -254 100 1700 62 -266 100 1750 64 -327 100 1800 76 -323 100 1850 78 -324 100 1900 76 -338 100 1950 80 -330 100 2000 80 -6 100 50 2 -10 100 100 2 -22 100 150 6 -39 100 200 9 -43 100 250 12 -43 100 300 13 -58 100 350 15 -61 100 400 17 -60 100 450 18 -71 100 500 125 -69 100 550 22 -82 100 600 26 -83 100 650 32 -91 100 700 31 -106 100 750 38 -115 100 800 34 -105 100 850 37 -144 100 900 44 -144 100 950 44 -161 100 1000 48 -136 100 1050 47 -171 100 1100 55 -155 100 1150 48 -157 100 1200 196 -190 100 1250 65 -191 100 1300 59 -195 100 1350 73 -213 100 1400 70 -236 100 1450 73 -246 100 1500 78 -236 100 1550 75 -232 100 1600 221 -263 100 1650 83 -254 100 1700 81 -254 100 1750 84 -267 100 1800 88 -267 100 1850 99 -291 100 1900 99 -274 100 1950 244 -306 100 2000 108 -12 100 50 1 -19 100 100 2 -17 100 150 4 -21 100 200 5 -28 100 250 7 -48 100 300 13 -45 100 350 11 -62 100 400 14 -64 100 450 14 -50 100 500 14 -63 100 550 16 -58 100 600 18 -88 100 650 48 -69 100 700 23 -113 100 750 29 -94 100 800 29 -96 100 850 30 -124 100 900 37 -145 100 950 37 -128 100 1000 40 -109 100 1050 36 -178 100 1100 69 -185 100 1150 44 -126 100 1200 77 -160 100 1250 74 -169 100 1300 46 -179 100 1350 52 -175 100 1400 52 -206 100 1450 58 -196 100 1500 186 -175 100 1550 63 -183 100 1600 65 -221 100 1650 92 -213 100 1700 65 -241 100 1750 69 -252 100 1800 75 -213 100 1850 75 -249 100 1900 207 -246 100 1950 83 -224 100 2000 81 -5 100 50 2 -14 100 100 6 -19 100 150 4 -24 100 200 7 -22 100 250 7 -27 100 300 10 -41 100 350 12 -42 100 400 18 -52 100 450 16 -62 100 500 19 -69 100 550 22 -68 100 600 24 -75 100 650 28 -78 100 700 31 -103 100 750 36 -91 100 800 31 -88 100 850 38 -86 100 900 35 -91 100 950 35 -110 100 1000 53 -128 100 1050 42 -126 100 1100 52 -131 100 1150 54 -122 100 1200 47 -125 100 1250 54 -144 100 1300 57 -154 100 1350 61 -151 100 1400 198 -172 100 1450 62 -167 100 1500 60 -170 100 1550 74 -167 100 1600 68 -179 100 1650 71 -180 100 1700 78 -200 100 1750 74 -174 100 1800 210 -236 100 1850 98 -210 100 1900 80 -237 100 1950 93 -222 100 2000 87 -5 100 50 1 -12 100 100 3 -22 100 150 5 -20 100 200 6 -30 100 250 8 -44 100 300 11 -52 100 350 16 -37 100 400 13 -51 100 450 13 -68 100 500 20 -68 100 550 24 -67 100 600 24 -77 100 650 24 -82 100 700 133 -86 100 750 38 -85 100 800 32 -91 100 850 34 -106 100 900 40 -134 100 950 41 -122 100 1000 39 -137 100 1050 39 -138 100 1100 48 -144 100 1150 49 -144 100 1200 62 -153 100 1250 55 -170 100 1300 55 -175 100 1350 187 -167 100 1400 57 -179 100 1450 66 -190 100 1500 69 -175 100 1550 65 -191 100 1600 68 -207 100 1650 77 -228 100 1700 81 -232 100 1750 226 -246 100 1800 86 -235 100 1850 89 -217 100 1900 80 -234 100 1950 89 -247 100 2000 95 -12 100 50 2 -23 100 100 4 -30 100 150 6 -31 100 200 14 -40 100 250 11 -44 100 300 12 -68 100 350 15 -60 100 400 110 -73 100 450 40 -78 100 500 21 -103 100 550 31 -106 100 600 34 -98 100 650 32 -114 100 700 35 -116 100 750 42 -156 100 800 46 -143 100 850 45 -152 100 900 42 -156 100 950 53 -168 100 1000 53 -195 100 1050 62 -197 100 1100 193 -194 100 1150 63 -215 100 1200 71 -211 100 1250 63 -220 100 1300 74 -240 100 1350 78 -241 100 1400 70 -233 100 1450 78 -265 100 1500 229 -264 100 1550 96 -280 100 1600 88 -256 100 1650 81 -305 100 1700 88 -287 100 1750 99 -315 100 1800 245 -304 100 1850 93 -310 100 1900 106 -338 100 1950 139 -375 100 2000 126 -8 100 50 3 -14 100 100 3 -13 100 150 4 -20 100 200 7 -35 100 250 9 -41 100 300 11 -47 100 350 12 -41 100 400 12 -60 100 450 15 -62 100 500 19 -69 100 550 18 -91 100 600 24 -89 100 650 25 -89 100 700 29 -96 100 750 28 -107 100 800 29 -110 100 850 31 -135 100 900 34 -130 100 950 33 -124 100 1000 35 -131 100 1050 37 -176 100 1100 49 -185 100 1150 49 -129 100 1200 48 -155 100 1250 52 -196 100 1300 193 -177 100 1350 55 -171 100 1400 55 -190 100 1450 60 -197 100 1500 57 -174 100 1550 58 -224 100 1600 70 -244 100 1650 66 -263 100 1700 75 -227 100 1750 205 -214 100 1800 79 -226 100 1850 75 -226 100 1900 79 -280 100 1950 88 -249 100 2000 83 -7 100 50 2 -8 100 100 2 -32 100 150 6 -58 100 200 8 -46 100 250 10 -67 100 300 15 -76 100 350 14 -84 100 400 20 -105 100 450 23 -96 100 500 121 -107 100 550 23 -122 100 600 23 -146 100 650 28 -130 100 700 34 -164 100 750 34 -187 100 800 38 -148 100 850 39 -205 100 900 43 -213 100 950 53 -212 100 1000 57 -230 100 1050 58 -202 100 1100 49 -254 100 1150 61 -237 100 1200 59 -247 100 1250 66 -300 100 1300 61 -271 100 1350 70 -333 100 1400 89 -302 100 1450 70 -320 100 1500 78 -354 100 1550 223 -370 100 1600 85 -308 100 1650 95 -354 100 1700 90 -406 100 1750 96 -403 100 1800 109 -371 100 1850 94 -420 100 1900 246 -411 100 1950 106 -408 100 2000 119 -5 100 50 2 -10 100 100 2 -25 100 150 6 -29 100 200 7 -38 100 250 11 -66 100 300 14 -47 100 350 12 -63 100 400 20 -85 100 450 24 -65 100 500 20 -99 100 550 26 -103 100 600 33 -110 100 650 35 -126 100 700 156 -104 100 750 33 -117 100 800 40 -130 100 850 44 -133 100 900 41 -175 100 950 51 -163 100 1000 49 -156 100 1050 52 -163 100 1100 55 -157 100 1150 53 -209 100 1200 66 -181 100 1250 62 -195 100 1300 213 -208 100 1350 70 -247 100 1400 76 -206 100 1450 79 -250 100 1500 73 -268 100 1550 92 -237 100 1600 84 -246 100 1650 231 -291 100 1700 95 -308 100 1750 98 -263 100 1800 94 -276 100 1850 97 -329 100 1900 106 -297 100 1950 239 -324 100 2000 114 -7 100 50 1 -16 100 100 3 -32 100 150 7 -36 100 200 7 -48 100 250 11 -54 100 300 10 -47 100 350 13 -70 100 400 16 -78 100 450 21 -69 100 500 16 -100 100 550 21 -106 100 600 24 -111 100 650 26 -104 100 700 28 -101 100 750 29 -135 100 800 34 -153 100 850 36 -147 100 900 40 -174 100 950 45 -147 100 1000 42 -188 100 1050 44 -165 100 1100 47 -186 100 1150 50 -186 100 1200 52 -210 100 1250 63 -232 100 1300 59 -257 100 1350 63 -211 100 1400 62 -255 100 1450 70 -227 100 1500 62 -272 100 1550 76 -278 100 1600 72 -236 100 1650 75 -277 100 1700 79 -275 100 1750 77 -274 100 1800 74 -288 100 1850 224 -289 100 1900 86 -306 100 1950 89 -329 100 2000 98 -13 100 50 2 -21 100 100 3 -12 100 150 3 -23 100 200 7 -38 100 250 10 -50 100 300 12 -35 100 350 11 -55 100 400 15 -72 100 450 21 -69 100 500 22 -80 100 550 20 -83 100 600 30 -104 100 650 27 -98 100 700 29 -113 100 750 32 -114 100 800 149 -125 100 850 39 -141 100 900 39 -102 100 950 35 -149 100 1000 45 -130 100 1050 42 -141 100 1100 45 -136 100 1150 56 -210 100 1200 56 -168 100 1250 54 -173 100 1300 52 -181 100 1350 57 -208 100 1400 212 -197 100 1450 72 -209 100 1500 69 -212 100 1550 73 -208 100 1600 70 -235 100 1650 73 -223 100 1700 78 -249 100 1750 77 -241 100 1800 87 -223 100 1850 82 -275 100 1900 92 -278 100 1950 95 -278 100 2000 91 -9 100 50 2 -15 100 100 4 -29 100 150 5 -35 100 200 10 -52 100 250 12 -51 100 300 13 -69 100 350 17 -62 100 400 17 -81 100 450 26 -95 100 500 130 -87 100 550 30 -107 100 600 29 -106 100 650 36 -115 100 700 37 -95 100 750 30 -142 100 800 42 -148 100 850 47 -151 100 900 49 -164 100 950 50 -170 100 1000 49 -156 100 1050 58 -176 100 1100 53 -157 100 1150 206 -209 100 1200 66 -185 100 1250 60 -208 100 1300 64 -203 100 1350 70 -234 100 1400 69 -213 100 1450 73 -249 100 1500 77 -255 100 1550 234 -260 100 1600 84 -265 100 1650 86 -270 100 1700 94 -277 100 1750 89 -276 100 1800 93 -272 100 1850 96 -314 100 1900 252 -321 100 1950 110 -317 100 2000 108 -13 100 50 3 -13 100 100 3 -21 100 150 7 -30 100 200 13 -41 100 250 10 -41 100 300 12 -40 100 350 18 -81 100 400 23 -78 100 450 22 -76 100 500 24 -91 100 550 32 -107 100 600 35 -89 100 650 32 -126 100 700 168 -117 100 750 39 -172 100 800 44 -126 100 850 49 -132 100 900 45 -163 100 950 57 -154 100 1000 52 -157 100 1050 63 -169 100 1100 60 -166 100 1150 74 -189 100 1200 75 -203 100 1250 70 -188 100 1300 61 -204 100 1350 81 -215 100 1400 75 -255 100 1450 91 -231 100 1500 88 -260 100 1550 98 -252 100 1600 241 -264 100 1650 116 -259 100 1700 99 -297 100 1750 105 -297 100 1800 111 -276 100 1850 250 -345 100 1900 127 -342 100 1950 116 -347 100 2000 131 -4 100 50 1 -15 100 100 3 -26 100 150 6 -24 100 200 6 -27 100 250 8 -27 100 300 9 -33 100 350 11 -60 100 400 17 -51 100 450 16 -55 100 500 20 -75 100 550 21 -66 100 600 23 -59 100 650 23 -93 100 700 151 -71 100 750 26 -102 100 800 34 -119 100 850 39 -94 100 900 34 -103 100 950 35 -149 100 1000 40 -110 100 1050 41 -154 100 1100 48 -153 100 1150 53 -130 100 1200 47 -163 100 1250 56 -141 100 1300 49 -155 100 1350 196 -159 100 1400 60 -154 100 1450 57 -161 100 1500 59 -184 100 1550 65 -177 100 1600 67 -180 100 1650 62 -177 100 1700 67 -204 100 1750 73 -214 100 1800 211 -203 100 1850 71 -206 100 1900 75 -186 100 1950 80 -213 100 2000 81 -8 100 50 2 -16 100 100 3 -19 100 150 6 -29 100 200 7 -47 100 250 10 -36 100 300 11 -63 100 350 14 -61 100 400 18 -57 100 450 19 -63 100 500 21 -83 100 550 22 -112 100 600 33 -96 100 650 137 -105 100 700 38 -101 100 750 34 -144 100 800 39 -139 100 850 46 -160 100 900 44 -151 100 950 52 -159 100 1000 52 -161 100 1050 52 -178 100 1100 52 -172 100 1150 56 -196 100 1200 69 -219 100 1250 205 -214 100 1300 67 -223 100 1350 65 -228 100 1400 78 -237 100 1450 79 -241 100 1500 72 -263 100 1550 78 -283 100 1600 91 -259 100 1650 226 -282 100 1700 101 -276 100 1750 85 -338 100 1800 107 -264 100 1850 94 -316 100 1900 102 -355 100 1950 239 -316 100 2000 109 -9 100 50 1 -12 100 100 3 -17 100 150 5 -25 100 200 6 -32 100 250 9 -30 100 300 15 -47 100 350 15 -65 100 400 19 -41 100 450 19 -55 100 500 17 -77 100 550 29 -75 100 600 27 -83 100 650 30 -110 100 700 42 -112 100 750 39 -115 100 800 35 -127 100 850 41 -109 100 900 173 -120 100 950 50 -114 100 1000 47 -148 100 1050 48 -143 100 1100 56 -137 100 1150 50 -165 100 1200 56 -161 100 1250 52 -163 100 1300 57 -172 100 1350 61 -209 100 1400 67 -198 100 1450 69 -182 100 1500 66 -197 100 1550 73 -211 100 1600 79 -226 100 1650 85 -232 100 1700 93 -219 100 1750 87 -238 100 1800 233 -200 100 1850 84 -243 100 1900 89 -241 100 1950 94 -247 100 2000 100 -8 100 50 1 -12 100 100 3 -17 100 150 4 -17 100 200 6 -34 100 250 10 -37 100 300 10 -36 100 350 17 -48 100 400 20 -51 100 450 23 -61 100 500 21 -65 100 550 19 -69 100 600 129 -93 100 650 25 -109 100 700 29 -118 100 750 36 -86 100 800 30 -129 100 850 44 -105 100 900 32 -111 100 950 37 -123 100 1000 42 -126 100 1050 52 -134 100 1100 50 -126 100 1150 48 -145 100 1200 78 -147 100 1250 185 -159 100 1300 55 -167 100 1350 61 -205 100 1400 78 -173 100 1450 57 -180 100 1500 74 -180 100 1550 71 -189 100 1600 77 -204 100 1650 206 -199 100 1700 74 -215 100 1750 100 -209 100 1800 87 -230 100 1850 79 -220 100 1900 91 -248 100 1950 87 -256 100 2000 211 -0 100 50 0 -12 100 100 2 -20 100 150 5 -28 100 200 6 -34 100 250 10 -40 100 300 15 -45 100 350 16 -51 100 400 15 -61 100 450 20 -75 100 500 26 -72 100 550 25 -86 100 600 31 -104 100 650 31 -106 100 700 35 -114 100 750 45 -125 100 800 42 -128 100 850 40 -120 100 900 46 -157 100 950 52 -157 100 1000 189 -169 100 1050 56 -153 100 1100 53 -160 100 1150 57 -176 100 1200 66 -184 100 1250 68 -188 100 1300 75 -212 100 1350 74 -208 100 1400 63 -221 100 1450 224 -218 100 1500 78 -206 100 1550 80 -236 100 1600 83 -233 100 1650 89 -243 100 1700 89 -233 100 1750 89 -270 100 1800 246 -272 100 1850 106 -238 100 1900 97 -278 100 1950 105 -281 100 2000 112 -7 100 50 2 -15 100 100 3 -12 100 150 6 -12 100 200 5 -17 100 250 6 -29 100 300 8 -36 100 350 12 -41 100 400 13 -56 100 450 15 -50 100 500 16 -42 100 550 133 -64 100 600 22 -49 100 650 25 -61 100 700 25 -74 100 750 30 -96 100 800 30 -83 100 850 29 -95 100 900 35 -92 100 950 38 -120 100 1000 38 -114 100 1050 37 -112 100 1100 41 -122 100 1150 42 -136 100 1200 45 -128 100 1250 48 -137 100 1300 177 -124 100 1350 49 -135 100 1400 50 -155 100 1450 60 -157 100 1500 57 -132 100 1550 60 -172 100 1600 65 -181 100 1650 71 -145 100 1700 63 -167 100 1750 195 -176 100 1800 71 -199 100 1850 78 -173 100 1900 72 -219 100 1950 79 -196 100 2000 78 -8 100 50 2 -16 100 100 4 -19 100 150 4 -35 100 200 7 -26 100 250 11 -43 100 300 13 -46 100 350 15 -53 100 400 19 -73 100 450 18 -63 100 500 21 -63 100 550 26 -65 100 600 22 -80 100 650 123 -93 100 700 35 -89 100 750 34 -111 100 800 35 -100 100 850 31 -121 100 900 34 -133 100 950 42 -134 100 1000 43 -125 100 1050 47 -117 100 1100 50 -182 100 1150 61 -156 100 1200 58 -158 100 1250 59 -177 100 1300 69 -185 100 1350 69 -182 100 1400 60 -189 100 1450 72 -206 100 1500 76 -190 100 1550 66 -206 100 1600 73 -217 100 1650 78 -211 100 1700 223 -239 100 1750 86 -225 100 1800 96 -233 100 1850 98 -269 100 1900 102 -280 100 1950 94 -278 100 2000 232 -10 100 50 2 -18 100 100 3 -25 100 150 5 -34 100 200 9 -41 100 250 9 -50 100 300 12 -44 100 350 11 -54 100 400 14 -72 100 450 16 -100 100 500 19 -100 100 550 23 -95 100 600 22 -110 100 650 25 -101 100 700 26 -123 100 750 28 -140 100 800 36 -164 100 850 36 -146 100 900 34 -161 100 950 38 -170 100 1000 42 -159 100 1050 44 -188 100 1100 183 -200 100 1150 48 -187 100 1200 47 -221 100 1250 54 -205 100 1300 57 -209 100 1350 49 -217 100 1400 55 -234 100 1450 63 -239 100 1500 67 -243 100 1550 65 -285 100 1600 216 -292 100 1650 70 -291 100 1700 73 -295 100 1750 78 -304 100 1800 77 -352 100 1850 83 -305 100 1900 80 -277 100 1950 211 -339 100 2000 92 -3 100 50 2 -14 100 100 3 -12 100 150 3 -20 100 200 8 -24 100 250 9 -33 100 300 12 -43 100 350 16 -35 100 400 16 -50 100 450 18 -51 100 500 18 -64 100 550 28 -65 100 600 25 -61 100 650 25 -77 100 700 27 -87 100 750 31 -85 100 800 33 -69 100 850 31 -75 100 900 36 -107 100 950 46 -124 100 1000 184 -103 100 1050 45 -142 100 1100 50 -122 100 1150 48 -108 100 1200 53 -117 100 1250 62 -152 100 1300 66 -165 100 1350 68 -149 100 1400 64 -170 100 1450 76 -152 100 1500 203 -144 100 1550 71 -157 100 1600 74 -177 100 1650 78 -183 100 1700 80 -174 100 1750 86 -205 100 1800 88 -183 100 1850 219 -218 100 1900 90 -212 100 1950 101 -181 100 2000 93 -13 100 50 3 -29 100 100 4 -31 100 150 6 -34 100 200 6 -38 100 250 13 -52 100 300 12 -55 100 350 13 -65 100 400 15 -83 100 450 17 -74 100 500 17 -96 100 550 21 -83 100 600 27 -105 100 650 23 -123 100 700 29 -141 100 750 148 -144 100 800 32 -158 100 850 36 -154 100 900 37 -178 100 950 45 -176 100 1000 41 -199 100 1050 44 -180 100 1100 46 -202 100 1150 48 -216 100 1200 52 -212 100 1250 54 -225 100 1300 66 -253 100 1350 193 -260 100 1400 73 -250 100 1450 61 -280 100 1500 69 -254 100 1550 70 -266 100 1600 69 -270 100 1650 71 -301 100 1700 78 -310 100 1750 220 -310 100 1800 78 -336 100 1850 82 -311 100 1900 84 -322 100 1950 84 -367 100 2000 93 -8 100 50 1 -19 100 100 5 -15 100 150 5 -25 100 200 7 -34 100 250 12 -37 100 300 12 -42 100 350 14 -41 100 400 14 -73 100 450 22 -57 100 500 18 -77 100 550 25 -69 100 600 25 -82 100 650 31 -96 100 700 30 -78 100 750 32 -94 100 800 33 -124 100 850 43 -129 100 900 47 -109 100 950 46 -134 100 1000 45 -130 100 1050 50 -122 100 1100 44 -162 100 1150 64 -144 100 1200 189 -146 100 1250 55 -183 100 1300 70 -167 100 1350 71 -151 100 1400 61 -183 100 1450 75 -179 100 1500 70 -193 100 1550 77 -203 100 1600 80 -214 100 1650 80 -233 100 1700 86 -206 100 1750 87 -237 100 1800 93 -203 100 1850 87 -226 100 1900 95 -249 100 1950 236 -237 100 2000 96 -1 100 50 1 -13 100 100 5 -14 100 150 5 -22 100 200 6 -21 100 250 8 -29 100 300 10 -20 100 350 11 -33 100 400 13 -36 100 450 16 -39 100 500 19 -43 100 550 19 -49 100 600 23 -59 100 650 24 -64 100 700 24 -56 100 750 29 -58 100 800 27 -69 100 850 28 -76 100 900 33 -72 100 950 35 -71 100 1000 38 -89 100 1050 175 -103 100 1100 44 -93 100 1150 47 -109 100 1200 52 -95 100 1250 50 -109 100 1300 56 -111 100 1350 51 -113 100 1400 58 -142 100 1450 61 -135 100 1500 63 -100 100 1550 187 -123 100 1600 59 -139 100 1650 65 -151 100 1700 80 -155 100 1750 73 -151 100 1800 76 -136 100 1850 69 -138 100 1900 79 -176 100 1950 205 -165 100 2000 82 -7 100 50 1 -16 100 100 2 -21 100 150 5 -32 100 200 6 -26 100 250 9 -41 100 300 12 -48 100 350 11 -52 100 400 16 -79 100 450 17 -70 100 500 22 -85 100 550 21 -96 100 600 25 -106 100 650 36 -102 100 700 28 -106 100 750 29 -116 100 800 36 -117 100 850 32 -133 100 900 45 -133 100 950 43 -134 100 1000 180 -133 100 1050 40 -140 100 1100 44 -165 100 1150 56 -167 100 1200 54 -157 100 1250 49 -181 100 1300 57 -196 100 1350 58 -197 100 1400 62 -221 100 1450 70 -236 100 1500 67 -212 100 1550 68 -220 100 1600 73 -221 100 1650 74 -221 100 1700 71 -230 100 1750 81 -259 100 1800 85 -255 100 1850 87 -251 100 1900 221 -259 100 1950 83 -307 100 2000 82 -2 100 50 1 -14 100 100 3 -32 100 150 7 -29 100 200 7 -32 100 250 7 -36 100 300 10 -44 100 350 13 -51 100 400 13 -68 100 450 17 -65 100 500 22 -97 100 550 24 -77 100 600 24 -101 100 650 24 -98 100 700 30 -109 100 750 26 -123 100 800 33 -150 100 850 34 -118 100 900 152 -145 100 950 41 -136 100 1000 38 -144 100 1050 41 -164 100 1100 45 -153 100 1150 46 -165 100 1200 49 -187 100 1250 54 -210 100 1300 64 -180 100 1350 56 -191 100 1400 62 -200 100 1450 192 -212 100 1500 68 -225 100 1550 65 -234 100 1600 68 -230 100 1650 63 -252 100 1700 74 -261 100 1750 77 -272 100 1800 88 -259 100 1850 213 -284 100 1900 81 -254 100 1950 83 -340 100 2000 88 -14 100 50 3 -18 100 100 6 -28 100 150 7 -28 100 200 6 -36 100 250 12 -40 100 300 9 -58 100 350 15 -50 100 400 16 -91 100 450 27 -93 100 500 27 -72 100 550 22 -94 100 600 29 -101 100 650 29 -89 100 700 32 -136 100 750 161 -146 100 800 41 -120 100 850 41 -133 100 900 49 -167 100 950 51 -151 100 1000 52 -142 100 1050 43 -169 100 1100 56 -177 100 1150 51 -179 100 1200 57 -175 100 1250 52 -220 100 1300 219 -228 100 1350 79 -200 100 1400 64 -194 100 1450 68 -230 100 1500 74 -228 100 1550 77 -231 100 1600 82 -220 100 1650 78 -271 100 1700 236 -269 100 1750 95 -273 100 1800 94 -300 100 1850 95 -279 100 1900 94 -280 100 1950 97 -311 100 2000 251 -3 100 50 1 -15 100 100 3 -16 100 150 4 -35 100 200 8 -21 100 250 6 -47 100 300 13 -43 100 350 13 -52 100 400 14 -60 100 450 16 -64 100 500 20 -65 100 550 24 -83 100 600 24 -83 100 650 29 -110 100 700 34 -87 100 750 29 -103 100 800 31 -117 100 850 36 -137 100 900 41 -153 100 950 51 -158 100 1000 46 -140 100 1050 47 -171 100 1100 190 -115 100 1150 44 -172 100 1200 53 -174 100 1250 57 -172 100 1300 54 -179 100 1350 64 -177 100 1400 55 -209 100 1450 70 -178 100 1500 70 -212 100 1550 203 -195 100 1600 72 -254 100 1650 76 -213 100 1700 80 -263 100 1750 84 -273 100 1800 96 -265 100 1850 96 -284 100 1900 219 -265 100 1950 88 -268 100 2000 89 -5 100 50 2 -16 100 100 3 -19 100 150 4 -31 100 200 6 -52 100 250 9 -54 100 300 10 -56 100 350 12 -64 100 400 15 -64 100 450 16 -70 100 500 19 -96 100 550 24 -89 100 600 24 -91 100 650 23 -110 100 700 26 -132 100 750 30 -119 100 800 30 -136 100 850 34 -153 100 900 38 -159 100 950 170 -145 100 1000 40 -167 100 1050 42 -206 100 1100 52 -198 100 1150 47 -203 100 1200 50 -217 100 1250 53 -212 100 1300 56 -200 100 1350 55 -229 100 1400 60 -215 100 1450 61 -220 100 1500 65 -254 100 1550 64 -270 100 1600 71 -263 100 1650 71 -278 100 1700 71 -298 100 1750 73 -265 100 1800 77 -300 100 1850 218 -324 100 1900 81 -305 100 1950 83 -328 100 2000 89 -11 100 50 1 -13 100 100 2 -25 100 150 4 -31 100 200 7 -37 100 250 7 -46 100 300 10 -63 100 350 12 -69 100 400 15 -61 100 450 14 -84 100 500 18 -74 100 550 19 -77 100 600 19 -108 100 650 25 -99 100 700 25 -112 100 750 28 -94 100 800 32 -131 100 850 32 -143 100 900 149 -139 100 950 34 -123 100 1000 34 -125 100 1050 36 -152 100 1100 46 -165 100 1150 42 -165 100 1200 49 -179 100 1250 45 -171 100 1300 52 -193 100 1350 54 -192 100 1400 57 -224 100 1450 61 -223 100 1500 183 -212 100 1550 57 -229 100 1600 64 -238 100 1650 66 -233 100 1700 68 -249 100 1750 72 -287 100 1800 80 -271 100 1850 82 -258 100 1900 196 -283 100 1950 91 -294 100 2000 85 -9 100 50 3 -12 100 100 4 -15 100 150 4 -33 100 200 10 -28 100 250 10 -34 100 300 12 -46 100 350 16 -54 100 400 19 -38 100 450 19 -63 100 500 23 -66 100 550 24 -71 100 600 31 -78 100 650 26 -91 100 700 28 -92 100 750 33 -94 100 800 35 -91 100 850 153 -121 100 900 38 -87 100 950 37 -143 100 1000 55 -115 100 1050 47 -137 100 1100 50 -151 100 1150 53 -180 100 1200 63 -138 100 1250 58 -153 100 1300 63 -155 100 1350 58 -177 100 1400 194 -167 100 1450 69 -161 100 1500 64 -164 100 1550 70 -197 100 1600 79 -196 100 1650 78 -202 100 1700 85 -214 100 1750 86 -256 100 1800 226 -206 100 1850 92 -239 100 1900 96 -269 100 1950 103 -241 100 2000 92 -7 100 50 1 -21 100 100 5 -19 100 150 4 -32 100 200 7 -45 100 250 9 -43 100 300 10 -48 100 350 13 -68 100 400 14 -60 100 450 15 -70 100 500 17 -93 100 550 123 -107 100 600 22 -106 100 650 23 -98 100 700 26 -110 100 750 28 -117 100 800 32 -128 100 850 31 -121 100 900 33 -130 100 950 35 -132 100 1000 34 -152 100 1050 40 -146 100 1100 46 -136 100 1150 38 -178 100 1200 48 -183 100 1250 52 -213 100 1300 185 -196 100 1350 57 -210 100 1400 56 -230 100 1450 58 -212 100 1500 64 -236 100 1550 68 -248 100 1600 65 -245 100 1650 68 -225 100 1700 64 -258 100 1750 200 -256 100 1800 72 -291 100 1850 77 -256 100 1900 79 -308 100 1950 86 -316 100 2000 89 -4 100 50 3 -22 100 100 3 -20 100 150 4 -22 100 200 6 -16 100 250 7 -34 100 300 9 -41 100 350 11 -42 100 400 11 -49 100 450 16 -61 100 500 16 -53 100 550 17 -72 100 600 19 -83 100 650 29 -76 100 700 25 -97 100 750 27 -125 100 800 31 -99 100 850 31 -127 100 900 33 -112 100 950 35 -105 100 1000 35 -144 100 1050 39 -143 100 1100 44 -136 100 1150 43 -155 100 1200 46 -157 100 1250 46 -177 100 1300 55 -158 100 1350 174 -144 100 1400 53 -195 100 1450 58 -166 100 1500 60 -191 100 1550 61 -198 100 1600 71 -196 100 1650 66 -209 100 1700 73 -198 100 1750 74 -199 100 1800 205 -263 100 1850 77 -219 100 1900 81 -259 100 1950 83 -249 100 2000 84 -8 100 50 3 -5 100 100 2 -14 100 150 5 -27 100 200 11 -25 100 250 12 -35 100 300 15 -39 100 350 15 -47 100 400 22 -49 100 450 20 -54 100 500 23 -73 100 550 28 -64 100 600 121 -66 100 650 32 -92 100 700 38 -91 100 750 42 -109 100 800 54 -144 100 850 63 -101 100 900 45 -122 100 950 56 -121 100 1000 54 -135 100 1050 64 -152 100 1100 68 -103 100 1150 51 -141 100 1200 69 -139 100 1250 66 -155 100 1300 75 -140 100 1350 72 -172 100 1400 79 -157 100 1450 83 -160 100 1500 84 -174 100 1550 223 -221 100 1600 97 -204 100 1650 112 -219 100 1700 105 -215 100 1750 107 -226 100 1800 108 -214 100 1850 236 -234 100 1900 104 -228 100 1950 116 -241 100 2000 112 -4 100 50 1 -17 100 100 3 -21 100 150 4 -35 100 200 5 -49 100 250 7 -51 100 300 10 -69 100 350 12 -69 100 400 12 -90 100 450 15 -79 100 500 15 -105 100 550 20 -94 100 600 20 -101 100 650 21 -109 100 700 23 -111 100 750 139 -120 100 800 30 -129 100 850 27 -156 100 900 33 -161 100 950 33 -160 100 1000 34 -178 100 1050 37 -184 100 1100 39 -186 100 1150 37 -209 100 1200 42 -197 100 1250 45 -171 100 1300 44 -219 100 1350 50 -232 100 1400 52 -223 100 1450 187 -238 100 1500 58 -225 100 1550 52 -263 100 1600 60 -239 100 1650 64 -294 100 1700 70 -265 100 1750 62 -267 100 1800 66 -318 100 1850 72 -307 100 1900 194 -319 100 1950 78 -311 100 2000 78 -8 100 50 2 -18 100 100 7 -15 100 150 4 -33 100 200 14 -43 100 250 12 -37 100 300 9 -47 100 350 17 -76 100 400 21 -61 100 450 20 -70 100 500 17 -82 100 550 21 -103 100 600 25 -111 100 650 26 -103 100 700 30 -121 100 750 41 -130 100 800 36 -143 100 850 48 -150 100 900 162 -138 100 950 40 -154 100 1000 57 -149 100 1050 46 -177 100 1100 52 -157 100 1150 59 -214 100 1200 66 -204 100 1250 58 -187 100 1300 62 -222 100 1350 63 -216 100 1400 200 -219 100 1450 71 -242 100 1500 78 -218 100 1550 65 -268 100 1600 82 -288 100 1650 91 -266 100 1700 99 -262 100 1750 205 -268 100 1800 79 -273 100 1850 85 -266 100 1900 102 -319 100 1950 120 -307 100 2000 96 -7 100 50 3 -8 100 100 6 -22 100 150 9 -24 100 200 9 -25 100 250 10 -48 100 300 47 -48 100 350 27 -37 100 400 17 -46 100 450 18 -87 100 500 36 -87 100 550 40 -99 100 600 34 -85 100 650 30 -91 100 700 40 -127 100 750 48 -92 100 800 39 -120 100 850 61 -120 100 900 44 -135 100 950 47 -136 100 1000 53 -127 100 1050 190 -148 100 1100 56 -151 100 1150 64 -144 100 1200 69 -152 100 1250 61 -156 100 1300 64 -167 100 1350 71 -196 100 1400 79 -207 100 1450 86 -185 100 1500 77 -215 100 1550 93 -218 100 1600 98 -224 100 1650 96 -234 100 1700 99 -247 100 1750 104 -262 100 1800 247 -252 100 1850 114 -249 100 1900 103 -258 100 1950 108 -265 100 2000 111 -13 100 50 2 -12 100 100 3 -18 100 150 5 -48 100 200 10 -49 100 250 10 -46 100 300 15 -52 100 350 14 -74 100 400 116 -67 100 450 18 -84 100 500 26 -91 100 550 23 -88 100 600 22 -116 100 650 29 -139 100 700 30 -126 100 750 35 -117 100 800 31 -145 100 850 36 -143 100 900 39 -166 100 950 41 -168 100 1000 42 -165 100 1050 46 -163 100 1100 46 -220 100 1150 57 -224 100 1200 195 -229 100 1250 60 -193 100 1300 59 -214 100 1350 60 -236 100 1400 63 -277 100 1450 71 -261 100 1500 71 -245 100 1550 74 -264 100 1600 207 -272 100 1650 79 -269 100 1700 79 -311 100 1750 88 -309 100 1800 85 -299 100 1850 83 -336 100 1900 89 -325 100 1950 225 -342 100 2000 98 -8 100 50 1 -16 100 100 4 -28 100 150 6 -35 100 200 8 -37 100 250 9 -49 100 300 11 -56 100 350 13 -58 100 400 14 -74 100 450 18 -90 100 500 22 -69 100 550 22 -76 100 600 26 -98 100 650 28 -102 100 700 29 -116 100 750 34 -103 100 800 33 -110 100 850 31 -149 100 900 45 -135 100 950 39 -153 100 1000 172 -170 100 1050 52 -154 100 1100 54 -147 100 1150 48 -191 100 1200 56 -165 100 1250 58 -177 100 1300 57 -193 100 1350 60 -221 100 1400 66 -195 100 1450 68 -231 100 1500 203 -204 100 1550 68 -220 100 1600 73 -248 100 1650 73 -264 100 1700 81 -247 100 1750 82 -270 100 1800 90 -277 100 1850 215 -252 100 1900 89 -283 100 1950 92 -297 100 2000 102 -8 100 50 2 -6 100 100 2 -24 100 150 6 -29 100 200 9 -24 100 250 9 -23 100 300 12 -37 100 350 14 -44 100 400 14 -50 100 450 16 -81 100 500 24 -63 100 550 24 -79 100 600 28 -106 100 650 34 -104 100 700 32 -72 100 750 149 -86 100 800 34 -99 100 850 36 -101 100 900 48 -111 100 950 43 -115 100 1000 50 -141 100 1050 52 -116 100 1100 51 -118 100 1150 51 -155 100 1200 61 -166 100 1250 66 -186 100 1300 213 -176 100 1350 68 -181 100 1400 72 -185 100 1450 67 -177 100 1500 71 -180 100 1550 71 -208 100 1600 79 -232 100 1650 90 -219 100 1700 208 -196 100 1750 75 -246 100 1800 89 -243 100 1850 100 -240 100 1900 96 -258 100 1950 106 -267 100 2000 235 -7 100 50 3 -17 100 100 7 -19 100 150 4 -47 100 200 11 -49 100 250 15 -45 100 300 17 -50 100 350 17 -55 100 400 23 -86 100 450 26 -83 100 500 29 -100 100 550 31 -95 100 600 29 -116 100 650 33 -114 100 700 43 -112 100 750 36 -112 100 800 40 -126 100 850 40 -141 100 900 44 -154 100 950 47 -166 100 1000 50 -181 100 1050 61 -170 100 1100 54 -195 100 1150 62 -173 100 1200 69 -214 100 1250 71 -183 100 1300 70 -200 100 1350 75 -214 100 1400 79 -232 100 1450 239 -254 100 1500 99 -289 100 1550 97 -269 100 1600 103 -258 100 1650 95 -289 100 1700 102 -315 100 1750 255 -295 100 1800 103 -289 100 1850 114 -320 100 1900 122 -315 100 1950 132 -350 100 2000 286 -6 100 50 1 -12 100 100 4 -39 100 150 7 -33 100 200 8 -42 100 250 12 -58 100 300 11 -52 100 350 15 -62 100 400 15 -85 100 450 18 -77 100 500 20 -94 100 550 28 -109 100 600 25 -94 100 650 27 -99 100 700 30 -129 100 750 35 -130 100 800 34 -112 100 850 32 -141 100 900 38 -149 100 950 42 -161 100 1000 43 -180 100 1050 198 -148 100 1100 45 -187 100 1150 49 -169 100 1200 51 -205 100 1250 57 -188 100 1300 56 -187 100 1350 60 -208 100 1400 62 -249 100 1450 72 -236 100 1500 65 -216 100 1550 202 -202 100 1600 68 -231 100 1650 72 -261 100 1700 79 -238 100 1750 75 -271 100 1800 87 -317 100 1850 84 -247 100 1900 205 -335 100 1950 98 -273 100 2000 93 -6 100 50 1 -13 100 100 6 -14 100 150 5 -22 100 200 7 -23 100 250 12 -19 100 300 8 -43 100 350 13 -31 100 400 15 -46 100 450 21 -44 100 500 20 -57 100 550 21 -69 100 600 25 -52 100 650 27 -52 100 700 23 -68 100 750 30 -66 100 800 32 -97 100 850 36 -93 100 900 143 -97 100 950 42 -97 100 1000 40 -99 100 1050 42 -92 100 1100 40 -94 100 1150 42 -147 100 1200 58 -107 100 1250 47 -131 100 1300 61 -114 100 1350 53 -120 100 1400 56 -155 100 1450 62 -134 100 1500 65 -136 100 1550 74 -161 100 1600 74 -166 100 1650 80 -158 100 1700 76 -162 100 1750 80 -193 100 1800 86 -185 100 1850 201 -189 100 1900 87 -185 100 1950 82 -209 100 2000 96 -15 100 50 2 -17 100 100 3 -29 100 150 7 -31 100 200 8 -51 100 250 12 -41 100 300 15 -54 100 350 14 -90 100 400 23 -81 100 450 20 -71 100 500 18 -119 100 550 31 -111 100 600 29 -111 100 650 38 -121 100 700 37 -132 100 750 41 -128 100 800 34 -130 100 850 43 -130 100 900 39 -161 100 950 46 -164 100 1000 55 -182 100 1050 50 -165 100 1100 53 -211 100 1150 83 -188 100 1200 67 -207 100 1250 61 -246 100 1300 74 -222 100 1350 76 -266 100 1400 75 -252 100 1450 73 -279 100 1500 83 -276 100 1550 85 -289 100 1600 91 -278 100 1650 211 -290 100 1700 98 -326 100 1750 102 -298 100 1800 88 -334 100 1850 102 -304 100 1900 98 -342 100 1950 238 -338 100 2000 109 -8 100 50 1 -12 100 100 3 -17 100 150 5 -23 100 200 9 -30 100 250 7 -36 100 300 11 -38 100 350 13 -50 100 400 17 -63 100 450 25 -66 100 500 26 -63 100 550 27 -76 100 600 27 -90 100 650 35 -108 100 700 41 -106 100 750 37 -80 100 800 33 -105 100 850 40 -95 100 900 39 -112 100 950 187 -122 100 1000 47 -128 100 1050 44 -133 100 1100 49 -138 100 1150 58 -160 100 1200 58 -164 100 1250 60 -165 100 1300 66 -169 100 1350 61 -180 100 1400 68 -174 100 1450 69 -186 100 1500 73 -179 100 1550 71 -221 100 1600 82 -215 100 1650 90 -207 100 1700 79 -241 100 1750 96 -228 100 1800 231 -234 100 1850 93 -234 100 1900 108 -244 100 1950 101 -245 100 2000 96 -9 100 50 3 -18 100 100 3 -24 100 150 4 -26 100 200 6 -40 100 250 11 -47 100 300 12 -50 100 350 16 -59 100 400 17 -56 100 450 18 -71 100 500 132 -79 100 550 23 -84 100 600 24 -99 100 650 26 -89 100 700 27 -121 100 750 33 -118 100 800 34 -134 100 850 48 -130 100 900 37 -140 100 950 39 -137 100 1000 46 -165 100 1050 48 -137 100 1100 54 -166 100 1150 54 -166 100 1200 51 -201 100 1250 208 -190 100 1300 53 -203 100 1350 65 -190 100 1400 62 -189 100 1450 60 -205 100 1500 73 -231 100 1550 70 -212 100 1600 70 -229 100 1650 214 -239 100 1700 78 -267 100 1750 83 -253 100 1800 83 -241 100 1850 90 -262 100 1900 86 -300 100 1950 96 -251 100 2000 234 -4 100 50 2 -14 100 100 3 -25 100 150 5 -32 100 200 8 -46 100 250 10 -50 100 300 14 -47 100 350 13 -63 100 400 16 -60 100 450 17 -94 100 500 24 -88 100 550 23 -116 100 600 28 -109 100 650 24 -120 100 700 28 -127 100 750 31 -153 100 800 35 -135 100 850 37 -131 100 900 41 -165 100 950 41 -174 100 1000 43 -179 100 1050 47 -192 100 1100 197 -204 100 1150 54 -231 100 1200 56 -195 100 1250 57 -199 100 1300 56 -197 100 1350 55 -216 100 1400 61 -239 100 1450 63 -241 100 1500 71 -259 100 1550 214 -277 100 1600 73 -289 100 1650 83 -303 100 1700 81 -315 100 1750 86 -345 100 1800 87 -326 100 1850 88 -355 100 1900 231 -297 100 1950 85 -331 100 2000 90 -5 100 50 1 -9 100 100 3 -18 100 150 5 -26 100 200 7 -37 100 250 13 -31 100 300 13 -42 100 350 12 -42 100 400 13 -62 100 450 19 -55 100 500 19 -72 100 550 22 -72 100 600 28 -82 100 650 25 -88 100 700 28 -97 100 750 31 -93 100 800 33 -111 100 850 40 -116 100 900 176 -122 100 950 41 -143 100 1000 48 -147 100 1050 49 -134 100 1100 58 -160 100 1150 60 -139 100 1200 48 -170 100 1250 55 -147 100 1300 58 -167 100 1350 75 -177 100 1400 62 -175 100 1450 65 -182 100 1500 74 -185 100 1550 65 -212 100 1600 76 -236 100 1650 78 -207 100 1700 82 -239 100 1750 94 -197 100 1800 220 -246 100 1850 94 -240 100 1900 95 -247 100 1950 96 -258 100 2000 98 -5 100 50 1 -16 100 100 4 -14 100 150 5 -17 100 200 6 -33 100 250 8 -38 100 300 15 -34 100 350 12 -33 100 400 15 -66 100 450 17 -71 100 500 23 -70 100 550 26 -77 100 600 139 -71 100 650 30 -75 100 700 28 -95 100 750 33 -83 100 800 26 -85 100 850 33 -113 100 900 40 -99 100 950 41 -104 100 1000 41 -149 100 1050 50 -138 100 1100 47 -120 100 1150 41 -131 100 1200 51 -178 100 1250 54 -155 100 1300 208 -188 100 1350 64 -166 100 1400 67 -167 100 1450 62 -158 100 1500 69 -206 100 1550 70 -182 100 1600 72 -229 100 1650 80 -206 100 1700 214 -218 100 1750 80 -232 100 1800 92 -239 100 1850 89 -214 100 1900 84 -192 100 1950 78 -231 100 2000 94 -5 100 50 1 -17 100 100 2 -19 100 150 4 -26 100 200 8 -49 100 250 107 -28 100 300 8 -53 100 350 13 -70 100 400 15 -75 100 450 20 -82 100 500 21 -75 100 550 20 -81 100 600 24 -96 100 650 28 -117 100 700 34 -107 100 750 36 -127 100 800 33 -122 100 850 34 -160 100 900 46 -139 100 950 39 -140 100 1000 45 -171 100 1050 52 -148 100 1100 44 -190 100 1150 198 -193 100 1200 59 -198 100 1250 53 -189 100 1300 61 -212 100 1350 60 -186 100 1400 63 -211 100 1450 64 -230 100 1500 73 -228 100 1550 64 -247 100 1600 223 -244 100 1650 82 -272 100 1700 93 -277 100 1750 88 -273 100 1800 81 -311 100 1850 95 -264 100 1900 85 -311 100 1950 235 -308 100 2000 97 -3 100 50 2 -14 100 100 3 -21 100 150 5 -20 100 200 5 -41 100 250 8 -44 100 300 10 -46 100 350 11 -48 100 400 12 -51 100 450 13 -76 100 500 17 -77 100 550 19 -84 100 600 20 -86 100 650 21 -96 100 700 25 -86 100 750 33 -127 100 800 32 -87 100 850 28 -143 100 900 34 -116 100 950 33 -120 100 1000 35 -133 100 1050 169 -133 100 1100 41 -148 100 1150 41 -139 100 1200 43 -176 100 1250 48 -198 100 1300 49 -147 100 1350 47 -181 100 1400 52 -151 100 1450 52 -190 100 1500 55 -198 100 1550 59 -225 100 1600 198 -232 100 1650 67 -213 100 1700 67 -243 100 1750 70 -221 100 1800 66 -231 100 1850 70 -231 100 1900 70 -242 100 1950 81 -257 100 2000 221 -8 100 50 1 -21 100 100 3 -24 100 150 4 -28 100 200 6 -45 100 250 8 -56 100 300 10 -66 100 350 13 -66 100 400 16 -77 100 450 17 -82 100 500 18 -102 100 550 20 -96 100 600 20 -140 100 650 25 -120 100 700 26 -141 100 750 32 -147 100 800 29 -154 100 850 32 -163 100 900 35 -162 100 950 37 -168 100 1000 36 -175 100 1050 39 -184 100 1100 41 -191 100 1150 195 -203 100 1200 47 -217 100 1250 51 -207 100 1300 49 -213 100 1350 54 -216 100 1400 56 -226 100 1450 61 -259 100 1500 63 -259 100 1550 62 -270 100 1600 65 -297 100 1650 222 -257 100 1700 70 -300 100 1750 76 -347 100 1800 79 -296 100 1850 77 -303 100 1900 84 -314 100 1950 81 -362 100 2000 226 -3 100 50 1 -10 100 100 3 -17 100 150 6 -25 100 200 7 -34 100 250 9 -29 100 300 11 -45 100 350 13 -42 100 400 13 -38 100 450 17 -52 100 500 16 -57 100 550 21 -67 100 600 22 -70 100 650 24 -72 100 700 29 -78 100 750 28 -88 100 800 30 -85 100 850 31 -101 100 900 43 -100 100 950 35 -106 100 1000 39 -107 100 1050 41 -111 100 1100 43 -114 100 1150 185 -106 100 1200 44 -124 100 1250 51 -138 100 1300 54 -127 100 1350 50 -154 100 1400 59 -147 100 1450 57 -161 100 1500 90 -150 100 1550 62 -161 100 1600 69 -181 100 1650 66 -166 100 1700 69 -180 100 1750 77 -167 100 1800 79 -195 100 1850 74 -197 100 1900 83 -201 100 1950 87 -214 100 2000 209 -6 100 50 1 -14 100 100 3 -21 100 150 6 -20 100 200 5 -25 100 250 7 -33 100 300 9 -43 100 350 12 -58 100 400 19 -59 100 450 15 -62 100 500 18 -69 100 550 19 -56 100 600 20 -75 100 650 25 -83 100 700 28 -69 100 750 25 -100 100 800 30 -85 100 850 30 -105 100 900 30 -115 100 950 36 -119 100 1000 39 -101 100 1050 38 -136 100 1100 47 -146 100 1150 189 -142 100 1200 47 -154 100 1250 47 -162 100 1300 80 -141 100 1350 53 -161 100 1400 54 -163 100 1450 55 -191 100 1500 61 -152 100 1550 57 -211 100 1600 66 -189 100 1650 201 -196 100 1700 66 -205 100 1750 70 -212 100 1800 72 -216 100 1850 74 -227 100 1900 77 -220 100 1950 76 -229 100 2000 83 -6 100 50 1 -14 100 100 85 -24 100 150 5 -31 100 200 13 -33 100 250 10 -52 100 300 11 -59 100 350 12 -63 100 400 13 -80 100 450 17 -82 100 500 18 -92 100 550 20 -94 100 600 23 -110 100 650 24 -109 100 700 27 -122 100 750 27 -117 100 800 31 -109 100 850 30 -148 100 900 41 -148 100 950 39 -154 100 1000 39 -171 100 1050 44 -160 100 1100 41 -172 100 1150 47 -188 100 1200 197 -189 100 1250 49 -175 100 1300 50 -199 100 1350 56 -209 100 1400 57 -237 100 1450 63 -238 100 1500 60 -237 100 1550 64 -243 100 1600 67 -265 100 1650 208 -283 100 1700 71 -297 100 1750 79 -277 100 1800 77 -274 100 1850 76 -293 100 1900 80 -292 100 1950 81 -297 100 2000 83 -9 100 50 1 -10 100 100 3 -15 100 150 5 -28 100 200 9 -27 100 250 11 -38 100 300 10 -36 100 350 15 -46 100 400 17 -45 100 450 18 -70 100 500 20 -63 100 550 22 -68 100 600 26 -69 100 650 27 -74 100 700 29 -86 100 750 30 -99 100 800 36 -97 100 850 34 -84 100 900 39 -95 100 950 38 -109 100 1000 45 -97 100 1050 43 -125 100 1100 44 -131 100 1150 195 -137 100 1200 55 -133 100 1250 54 -139 100 1300 61 -142 100 1350 59 -159 100 1400 62 -160 100 1450 65 -176 100 1500 72 -181 100 1550 74 -159 100 1600 215 -188 100 1650 79 -190 100 1700 89 -181 100 1750 77 -189 100 1800 81 -207 100 1850 94 -205 100 1900 87 -214 100 1950 231 -223 100 2000 88 -7 100 50 2 -11 100 100 3 -15 100 150 4 -19 100 200 5 -26 100 250 8 -36 100 300 11 -32 100 350 11 -47 100 400 14 -41 100 450 15 -51 100 500 20 -48 100 550 20 -56 100 600 20 -67 100 650 25 -78 100 700 28 -82 100 750 32 -88 100 800 26 -81 100 850 28 -83 100 900 35 -104 100 950 39 -87 100 1000 33 -120 100 1050 157 -117 100 1100 44 -112 100 1150 43 -129 100 1200 48 -137 100 1250 49 -135 100 1300 46 -158 100 1350 52 -147 100 1400 58 -133 100 1450 56 -166 100 1500 62 -162 100 1550 61 -178 100 1600 200 -182 100 1650 71 -196 100 1700 75 -179 100 1750 71 -191 100 1800 71 -185 100 1850 70 -216 100 1900 78 -193 100 1950 76 -232 100 2000 84 -4 100 50 1 -11 100 100 5 -24 100 150 6 -30 100 200 8 -39 100 250 10 -43 100 300 13 -47 100 350 15 -47 100 400 16 -73 100 450 20 -67 100 500 19 -71 100 550 21 -56 100 600 21 -79 100 650 24 -79 100 700 28 -84 100 750 32 -91 100 800 35 -99 100 850 35 -123 100 900 39 -110 100 950 43 -127 100 1000 45 -115 100 1050 182 -126 100 1100 47 -120 100 1150 49 -156 100 1200 54 -130 100 1250 53 -146 100 1300 59 -166 100 1350 62 -155 100 1400 67 -171 100 1450 61 -203 100 1500 74 -172 100 1550 210 -181 100 1600 72 -200 100 1650 77 -215 100 1700 79 -211 100 1750 83 -201 100 1800 81 -212 100 1850 84 -217 100 1900 221 -229 100 1950 97 -239 100 2000 95 -8 100 50 1 -16 100 100 3 -23 100 150 7 -25 100 200 5 -38 100 250 13 -39 100 300 10 -43 100 350 13 -46 100 400 13 -70 100 450 20 -67 100 500 22 -73 100 550 19 -73 100 600 24 -104 100 650 31 -83 100 700 27 -86 100 750 38 -84 100 800 36 -98 100 850 32 -121 100 900 161 -134 100 950 41 -127 100 1000 40 -134 100 1050 46 -125 100 1100 42 -125 100 1150 45 -156 100 1200 57 -163 100 1250 52 -166 100 1300 56 -167 100 1350 63 -166 100 1400 62 -231 100 1450 215 -186 100 1500 66 -185 100 1550 72 -186 100 1600 69 -222 100 1650 66 -209 100 1700 97 -226 100 1750 78 -216 100 1800 76 -228 100 1850 85 -249 100 1900 82 -227 100 1950 84 -259 100 2000 101 -7 100 50 1 -19 100 100 3 -21 100 150 6 -30 100 200 8 -28 100 250 9 -43 100 300 11 -48 100 350 14 -66 100 400 14 -69 100 450 22 -79 100 500 22 -81 100 550 24 -90 100 600 26 -111 100 650 31 -117 100 700 142 -107 100 750 33 -133 100 800 37 -137 100 850 42 -137 100 900 41 -153 100 950 48 -180 100 1000 56 -178 100 1050 50 -165 100 1100 47 -198 100 1150 62 -183 100 1200 63 -189 100 1250 65 -203 100 1300 213 -216 100 1350 71 -214 100 1400 65 -235 100 1450 77 -233 100 1500 79 -253 100 1550 91 -271 100 1600 90 -274 100 1650 227 -267 100 1700 88 -289 100 1750 90 -296 100 1800 90 -326 100 1850 101 -303 100 1900 101 -288 100 1950 245 -323 100 2000 105 -2 100 50 2 -14 100 100 3 -20 100 150 6 -19 100 200 7 -30 100 250 14 -38 100 300 12 -30 100 350 14 -43 100 400 13 -61 100 450 18 -59 100 500 25 -64 100 550 25 -53 100 600 27 -81 100 650 30 -86 100 700 36 -76 100 750 35 -101 100 800 42 -84 100 850 38 -130 100 900 50 -117 100 950 189 -127 100 1000 45 -116 100 1050 50 -128 100 1100 60 -148 100 1150 59 -151 100 1200 61 -147 100 1250 62 -170 100 1300 67 -157 100 1350 71 -155 100 1400 68 -166 100 1450 222 -194 100 1500 77 -179 100 1550 85 -199 100 1600 84 -216 100 1650 83 -170 100 1700 73 -232 100 1750 94 -195 100 1800 235 -210 100 1850 92 -252 100 1900 104 -239 100 1950 102 -237 100 2000 107 -4 100 50 1 -19 100 100 3 -13 100 150 4 -26 100 200 6 -34 100 250 10 -24 100 300 9 -45 100 350 13 -48 100 400 15 -34 100 450 13 -59 100 500 19 -43 100 550 129 -68 100 600 24 -78 100 650 25 -83 100 700 23 -101 100 750 26 -101 100 800 31 -77 100 850 32 -102 100 900 35 -116 100 950 35 -120 100 1000 39 -123 100 1050 42 -116 100 1100 39 -118 100 1150 45 -136 100 1200 48 -122 100 1250 45 -157 100 1300 192 -127 100 1350 55 -163 100 1400 53 -182 100 1450 60 -207 100 1500 70 -169 100 1550 69 -171 100 1600 74 -209 100 1650 75 -210 100 1700 68 -186 100 1750 210 -227 100 1800 81 -222 100 1850 77 -229 100 1900 82 -215 100 1950 76 -215 100 2000 78 -2 100 50 1 -13 100 100 4 -12 100 150 6 -17 100 200 7 -18 100 250 8 -33 100 300 12 -30 100 350 13 -32 100 400 14 -52 100 450 20 -35 100 500 20 -46 100 550 121 -57 100 600 24 -71 100 650 31 -73 100 700 31 -66 100 750 34 -78 100 800 37 -89 100 850 36 -83 100 900 35 -89 100 950 40 -91 100 1000 48 -109 100 1050 53 -105 100 1100 52 -101 100 1150 45 -100 100 1200 54 -104 100 1250 197 -114 100 1300 64 -125 100 1350 67 -123 100 1400 71 -153 100 1450 78 -158 100 1500 71 -131 100 1550 71 -173 100 1600 78 -159 100 1650 212 -158 100 1700 80 -176 100 1750 85 -161 100 1800 85 -180 100 1850 96 -202 100 1900 99 -190 100 1950 97 -162 100 2000 232 -5 100 50 2 -13 100 100 4 -19 100 150 4 -22 100 200 6 -29 100 250 9 -32 100 300 10 -34 100 350 11 -56 100 400 17 -60 100 450 17 -71 100 500 21 -64 100 550 21 -77 100 600 24 -82 100 650 30 -93 100 700 30 -86 100 750 31 -90 100 800 35 -83 100 850 31 -114 100 900 37 -106 100 950 35 -111 100 1000 44 -122 100 1050 179 -123 100 1100 43 -125 100 1150 46 -131 100 1200 46 -144 100 1250 58 -164 100 1300 64 -158 100 1350 58 -143 100 1400 55 -167 100 1450 61 -177 100 1500 65 -163 100 1550 63 -182 100 1600 67 -177 100 1650 66 -191 100 1700 74 -155 100 1750 72 -218 100 1800 79 -218 100 1850 82 -224 100 1900 85 -220 100 1950 220 -240 100 2000 92 -5 100 50 2 -11 100 100 3 -22 100 150 6 -17 100 200 7 -32 100 250 8 -42 100 300 15 -40 100 350 17 -46 100 400 18 -52 100 450 17 -65 100 500 20 -61 100 550 24 -80 100 600 29 -70 100 650 29 -101 100 700 31 -89 100 750 36 -96 100 800 39 -91 100 850 38 -121 100 900 39 -121 100 950 180 -124 100 1000 48 -143 100 1050 52 -146 100 1100 49 -150 100 1150 54 -161 100 1200 59 -164 100 1250 57 -153 100 1300 54 -175 100 1350 60 -182 100 1400 62 -190 100 1450 67 -148 100 1500 68 -205 100 1550 76 -192 100 1600 73 -231 100 1650 81 -202 100 1700 86 -223 100 1750 87 -242 100 1800 91 -226 100 1850 224 -224 100 1900 89 -241 100 1950 92 -247 100 2000 104 -8 100 50 1 -12 100 100 3 -13 100 150 6 -25 100 200 6 -25 100 250 7 -43 100 300 22 -36 100 350 14 -47 100 400 15 -49 100 450 16 -71 100 500 19 -65 100 550 21 -83 100 600 24 -77 100 650 23 -73 100 700 141 -88 100 750 25 -84 100 800 27 -107 100 850 34 -124 100 900 42 -132 100 950 37 -129 100 1000 44 -135 100 1050 40 -149 100 1100 44 -160 100 1150 57 -146 100 1200 53 -150 100 1250 53 -178 100 1300 55 -182 100 1350 192 -188 100 1400 63 -200 100 1450 64 -182 100 1500 62 -216 100 1550 75 -206 100 1600 69 -183 100 1650 77 -193 100 1700 74 -224 100 1750 216 -257 100 1800 91 -249 100 1850 91 -218 100 1900 80 -244 100 1950 88 -279 100 2000 92 -9 100 50 2 -17 100 100 4 -16 100 150 4 -35 100 200 11 -43 100 250 11 -56 100 300 15 -50 100 350 14 -74 100 400 20 -66 100 450 18 -66 100 500 120 -73 100 550 24 -97 100 600 25 -106 100 650 30 -100 100 700 31 -105 100 750 30 -140 100 800 41 -174 100 850 47 -149 100 900 52 -146 100 950 46 -168 100 1000 47 -165 100 1050 54 -180 100 1100 53 -169 100 1150 52 -221 100 1200 223 -213 100 1250 69 -233 100 1300 73 -210 100 1350 69 -249 100 1400 73 -259 100 1450 79 -257 100 1500 75 -261 100 1550 79 -252 100 1600 229 -277 100 1650 87 -282 100 1700 93 -281 100 1750 86 -260 100 1800 93 -302 100 1850 108 -326 100 1900 246 -282 100 1950 98 -349 100 2000 113 -4 100 50 1 -16 100 100 4 -19 100 150 5 -30 100 200 8 -44 100 250 10 -50 100 300 14 -66 100 350 16 -63 100 400 17 -78 100 450 22 -92 100 500 27 -95 100 550 24 -116 100 600 26 -89 100 650 27 -106 100 700 28 -134 100 750 39 -119 100 800 31 -134 100 850 36 -128 100 900 39 -152 100 950 44 -130 100 1000 43 -172 100 1050 51 -161 100 1100 50 -191 100 1150 53 -183 100 1200 53 -201 100 1250 57 -199 100 1300 61 -195 100 1350 62 -225 100 1400 218 -255 100 1450 74 -260 100 1500 75 -238 100 1550 69 -252 100 1600 73 -278 100 1650 80 -293 100 1700 86 -284 100 1750 234 -274 100 1800 85 -289 100 1850 86 -359 100 1900 106 -299 100 1950 101 -328 100 2000 102 -3 100 50 4 -19 100 100 4 -24 100 150 6 -29 100 200 6 -32 100 250 10 -35 100 300 10 -46 100 350 13 -50 100 400 132 -54 100 450 20 -71 100 500 28 -82 100 550 22 -97 100 600 32 -94 100 650 29 -87 100 700 35 -97 100 750 33 -84 100 800 34 -107 100 850 51 -115 100 900 39 -141 100 950 42 -109 100 1000 42 -132 100 1050 44 -149 100 1100 53 -144 100 1150 189 -153 100 1200 62 -138 100 1250 55 -155 100 1300 63 -183 100 1350 67 -188 100 1400 69 -181 100 1450 70 -198 100 1500 79 -212 100 1550 74 -191 100 1600 218 -253 100 1650 91 -227 100 1700 80 -200 100 1750 82 -251 100 1800 94 -235 100 1850 93 -245 100 1900 228 -251 100 1950 95 -246 100 2000 97 -3 100 50 1 -23 100 100 4 -21 100 150 5 -42 100 200 6 -40 100 250 7 -58 100 300 13 -59 100 350 14 -62 100 400 14 -82 100 450 19 -105 100 500 20 -96 100 550 23 -89 100 600 22 -86 100 650 26 -111 100 700 27 -116 100 750 30 -112 100 800 31 -125 100 850 34 -143 100 900 37 -163 100 950 177 -193 100 1000 48 -189 100 1050 46 -192 100 1100 45 -196 100 1150 50 -195 100 1200 49 -225 100 1250 54 -238 100 1300 59 -214 100 1350 57 -240 100 1400 63 -214 100 1450 59 -244 100 1500 66 -220 100 1550 63 -269 100 1600 72 -273 100 1650 73 -310 100 1700 73 -303 100 1750 77 -288 100 1800 74 -306 100 1850 226 -326 100 1900 85 -374 100 1950 98 -345 100 2000 93 -7 100 50 2 -15 100 100 4 -17 100 150 5 -25 100 200 6 -40 100 250 7 -33 100 300 9 -69 100 350 16 -57 100 400 14 -64 100 450 19 -58 100 500 19 -66 100 550 24 -80 100 600 25 -91 100 650 29 -104 100 700 31 -112 100 750 35 -109 100 800 156 -125 100 850 38 -116 100 900 37 -145 100 950 41 -152 100 1000 46 -166 100 1050 51 -178 100 1100 52 -160 100 1150 65 -157 100 1200 54 -185 100 1250 57 -152 100 1300 53 -211 100 1350 210 -217 100 1400 64 -219 100 1450 69 -210 100 1500 71 -225 100 1550 78 -240 100 1600 77 -230 100 1650 77 -229 100 1700 79 -248 100 1750 231 -223 100 1800 75 -254 100 1850 83 -262 100 1900 88 -268 100 1950 97 -292 100 2000 104 -6 100 50 1 -9 100 100 2 -6 100 150 3 -27 100 200 8 -43 100 250 9 -28 100 300 9 -43 100 350 13 -56 100 400 13 -47 100 450 15 -45 100 500 115 -65 100 550 20 -77 100 600 24 -81 100 650 25 -81 100 700 29 -85 100 750 26 -86 100 800 33 -92 100 850 32 -123 100 900 40 -113 100 950 34 -112 100 1000 42 -103 100 1050 39 -151 100 1100 50 -118 100 1150 48 -139 100 1200 50 -139 100 1250 187 -147 100 1300 56 -147 100 1350 51 -152 100 1400 66 -162 100 1450 58 -161 100 1500 64 -185 100 1550 66 -165 100 1600 64 -194 100 1650 73 -206 100 1700 203 -201 100 1750 77 -181 100 1800 70 -214 100 1850 84 -173 100 1900 73 -229 100 1950 84 -251 100 2000 91 -16 100 50 2 -6 100 100 2 -26 100 150 4 -30 100 200 6 -51 100 250 8 -47 100 300 8 -68 100 350 13 -87 100 400 14 -83 100 450 106 -85 100 500 19 -114 100 550 20 -134 100 600 24 -123 100 650 24 -138 100 700 27 -134 100 750 29 -158 100 800 30 -182 100 850 34 -181 100 900 33 -199 100 950 39 -192 100 1000 41 -203 100 1050 42 -223 100 1100 45 -211 100 1150 43 -262 100 1200 52 -222 100 1250 197 -292 100 1300 56 -278 100 1350 58 -285 100 1400 59 -304 100 1450 65 -328 100 1500 66 -306 100 1550 64 -263 100 1600 62 -355 100 1650 68 -321 100 1700 228 -331 100 1750 75 -345 100 1800 77 -367 100 1850 78 -347 100 1900 79 -392 100 1950 83 -408 100 2000 90 -4 100 50 2 -9 100 100 3 -24 100 150 6 -43 100 200 11 -41 100 250 9 -47 100 300 121 -65 100 350 17 -56 100 400 17 -67 100 450 21 -93 100 500 28 -86 100 550 26 -80 100 600 27 -87 100 650 28 -114 100 700 33 -105 100 750 34 -110 100 800 38 -152 100 850 47 -181 100 900 56 -132 100 950 43 -167 100 1000 55 -151 100 1050 50 -224 100 1100 212 -164 100 1150 53 -199 100 1200 62 -193 100 1250 64 -193 100 1300 69 -194 100 1350 75 -220 100 1400 82 -252 100 1450 82 -260 100 1500 229 -233 100 1550 79 -240 100 1600 87 -240 100 1650 82 -259 100 1700 87 -266 100 1750 96 -299 100 1800 96 -290 100 1850 238 -298 100 1900 106 -306 100 1950 108 -318 100 2000 113 -9 100 50 2 -17 100 100 5 -22 100 150 7 -23 100 200 7 -30 100 250 11 -31 100 300 13 -60 100 350 19 -59 100 400 22 -71 100 450 28 -69 100 500 28 -87 100 550 31 -97 100 600 155 -111 100 650 38 -115 100 700 44 -114 100 750 45 -113 100 800 43 -117 100 850 51 -137 100 900 44 -125 100 950 47 -126 100 1000 54 -169 100 1050 58 -165 100 1100 69 -153 100 1150 207 -185 100 1200 74 -169 100 1250 66 -194 100 1300 82 -160 100 1350 77 -197 100 1400 73 -209 100 1450 83 -232 100 1500 94 -196 100 1550 221 -209 100 1600 94 -242 100 1650 96 -246 100 1700 102 -243 100 1750 94 -244 100 1800 107 -263 100 1850 245 -257 100 1900 112 -301 100 1950 126 -277 100 2000 110 -9 100 50 2 -14 100 100 4 -23 100 150 6 -15 100 200 5 -27 100 250 9 -32 100 300 12 -52 100 350 17 -39 100 400 17 -56 100 450 20 -65 100 500 19 -59 100 550 22 -80 100 600 29 -75 100 650 150 -90 100 700 31 -98 100 750 36 -97 100 800 34 -97 100 850 43 -114 100 900 44 -114 100 950 49 -116 100 1000 46 -108 100 1050 39 -110 100 1100 50 -138 100 1150 59 -158 100 1200 57 -150 100 1250 198 -156 100 1300 67 -165 100 1350 70 -137 100 1400 60 -159 100 1450 65 -188 100 1500 82 -163 100 1550 73 -196 100 1600 79 -182 100 1650 77 -205 100 1700 217 -234 100 1750 87 -216 100 1800 91 -211 100 1850 87 -221 100 1900 98 -256 100 1950 107 -237 100 2000 225 -9 100 50 1 -14 100 100 4 -24 100 150 7 -38 100 200 7 -42 100 250 12 -53 100 300 13 -63 100 350 14 -71 100 400 21 -81 100 450 26 -73 100 500 22 -81 100 550 23 -92 100 600 28 -79 100 650 32 -116 100 700 33 -116 100 750 33 -127 100 800 37 -136 100 850 43 -146 100 900 49 -169 100 950 51 -173 100 1000 193 -171 100 1050 48 -185 100 1100 60 -178 100 1150 58 -174 100 1200 64 -187 100 1250 68 -178 100 1300 63 -196 100 1350 71 -222 100 1400 70 -223 100 1450 219 -217 100 1500 70 -254 100 1550 88 -248 100 1600 79 -288 100 1650 97 -277 100 1700 92 -261 100 1750 85 -273 100 1800 233 -284 100 1850 105 -309 100 1900 94 -295 100 1950 104 -305 100 2000 103 -4 100 50 1 -13 100 100 3 -20 100 150 6 -19 100 200 7 -27 100 250 8 -42 100 300 12 -40 100 350 13 -46 100 400 16 -55 100 450 17 -48 100 500 17 -72 100 550 21 -60 100 600 19 -75 100 650 26 -79 100 700 29 -67 100 750 26 -86 100 800 28 -107 100 850 35 -127 100 900 44 -123 100 950 41 -108 100 1000 39 -122 100 1050 44 -123 100 1100 44 -131 100 1150 46 -145 100 1200 55 -155 100 1250 57 -157 100 1300 192 -160 100 1350 57 -167 100 1400 61 -179 100 1450 66 -162 100 1500 62 -177 100 1550 72 -158 100 1600 66 -205 100 1650 77 -214 100 1700 204 -215 100 1750 81 -212 100 1800 80 -258 100 1850 92 -218 100 1900 80 -226 100 1950 90 -261 100 2000 101 -8 100 50 1 -6 100 100 3 -16 100 150 5 -27 100 200 7 -20 100 250 7 -30 100 300 11 -40 100 350 110 -52 100 400 25 -46 100 450 15 -55 100 500 15 -59 100 550 20 -68 100 600 28 -70 100 650 26 -68 100 700 26 -68 100 750 25 -94 100 800 30 -90 100 850 32 -94 100 900 36 -101 100 950 33 -116 100 1000 38 -126 100 1050 39 -104 100 1100 40 -134 100 1150 48 -109 100 1200 46 -135 100 1250 187 -155 100 1300 56 -145 100 1350 58 -140 100 1400 59 -148 100 1450 61 -139 100 1500 62 -173 100 1550 62 -163 100 1600 68 -175 100 1650 72 -172 100 1700 197 -198 100 1750 82 -201 100 1800 76 -210 100 1850 80 -218 100 1900 85 -239 100 1950 90 -239 100 2000 94 -8 100 50 4 -17 100 100 5 -17 100 150 6 -28 100 200 97 -28 100 250 10 -35 100 300 13 -47 100 350 17 -54 100 400 20 -59 100 450 20 -72 100 500 34 -70 100 550 28 -75 100 600 33 -110 100 650 41 -88 100 700 38 -96 100 750 40 -119 100 800 41 -113 100 850 37 -121 100 900 43 -113 100 950 44 -128 100 1000 47 -121 100 1050 180 -139 100 1100 62 -153 100 1150 60 -136 100 1200 62 -144 100 1250 62 -166 100 1300 71 -184 100 1350 74 -182 100 1400 82 -167 100 1450 78 -216 100 1500 219 -195 100 1550 80 -206 100 1600 85 -236 100 1650 91 -255 100 1700 105 -237 100 1750 103 -233 100 1800 227 -255 100 1850 101 -257 100 1900 109 -274 100 1950 121 -284 100 2000 119 -7 100 50 2 -10 100 100 3 -23 100 150 5 -26 100 200 7 -40 100 250 10 -44 100 300 12 -62 100 350 15 -56 100 400 17 -64 100 450 17 -83 100 500 131 -80 100 550 21 -92 100 600 25 -97 100 650 28 -116 100 700 31 -139 100 750 34 -128 100 800 36 -147 100 850 38 -138 100 900 39 -148 100 950 39 -170 100 1000 48 -152 100 1050 47 -179 100 1100 54 -174 100 1150 51 -173 100 1200 56 -218 100 1250 55 -203 100 1300 62 -213 100 1350 60 -213 100 1400 64 -217 100 1450 65 -251 100 1500 73 -258 100 1550 72 -286 100 1600 84 -272 100 1650 213 -258 100 1700 76 -276 100 1750 80 -267 100 1800 84 -275 100 1850 89 -294 100 1900 89 -306 100 1950 96 -333 100 2000 232 -8 100 50 2 -26 100 100 3 -31 100 150 5 -29 100 200 7 -45 100 250 14 -48 100 300 10 -62 100 350 12 -87 100 400 19 -85 100 450 18 -86 100 500 22 -99 100 550 22 -103 100 600 24 -168 100 650 39 -112 100 700 36 -133 100 750 33 -150 100 800 51 -121 100 850 38 -174 100 900 50 -190 100 950 51 -178 100 1000 200 -182 100 1050 52 -207 100 1100 50 -217 100 1150 48 -238 100 1200 58 -229 100 1250 63 -206 100 1300 64 -260 100 1350 61 -239 100 1400 70 -268 100 1450 200 -272 100 1500 82 -270 100 1550 80 -292 100 1600 86 -326 100 1650 94 -332 100 1700 96 -319 100 1750 99 -343 100 1800 227 -382 100 1850 113 -354 100 1900 101 -356 100 1950 99 -328 100 2000 106 -9 100 50 2 -9 100 100 2 -17 100 150 5 -21 100 200 7 -25 100 250 9 -35 100 300 10 -31 100 350 11 -47 100 400 16 -55 100 450 19 -54 100 500 118 -68 100 550 22 -71 100 600 26 -78 100 650 28 -86 100 700 29 -97 100 750 33 -88 100 800 28 -96 100 850 40 -97 100 900 31 -117 100 950 41 -121 100 1000 36 -101 100 1050 43 -102 100 1100 40 -133 100 1150 54 -121 100 1200 50 -137 100 1250 177 -142 100 1300 50 -161 100 1350 58 -153 100 1400 59 -161 100 1450 67 -149 100 1500 60 -171 100 1550 69 -168 100 1600 71 -186 100 1650 70 -206 100 1700 192 -202 100 1750 79 -178 100 1800 80 -195 100 1850 74 -192 100 1900 73 -206 100 1950 83 -259 100 2000 90 -12 100 50 1 -12 100 100 4 -25 100 150 6 -22 100 200 6 -53 100 250 12 -45 100 300 10 -45 100 350 15 -55 100 400 95 -56 100 450 16 -87 100 500 22 -91 100 550 23 -88 100 600 22 -119 100 650 36 -149 100 700 35 -95 100 750 31 -133 100 800 46 -120 100 850 35 -159 100 900 42 -143 100 950 42 -148 100 1000 45 -150 100 1050 52 -170 100 1100 52 -168 100 1150 185 -179 100 1200 54 -179 100 1250 53 -167 100 1300 59 -181 100 1350 78 -234 100 1400 70 -203 100 1450 65 -210 100 1500 73 -228 100 1550 78 -226 100 1600 207 -251 100 1650 76 -245 100 1700 84 -260 100 1750 90 -271 100 1800 89 -276 100 1850 94 -290 100 1900 99 -307 100 1950 244 -290 100 2000 103 -7 100 50 2 -13 100 100 2 -14 100 150 4 -13 100 200 5 -28 100 250 7 -37 100 300 9 -35 100 350 13 -47 100 400 13 -39 100 450 13 -58 100 500 16 -59 100 550 20 -80 100 600 21 -62 100 650 21 -93 100 700 25 -72 100 750 31 -123 100 800 33 -101 100 850 29 -83 100 900 29 -111 100 950 35 -116 100 1000 153 -100 100 1050 39 -134 100 1100 37 -114 100 1150 42 -130 100 1200 48 -152 100 1250 46 -112 100 1300 44 -143 100 1350 52 -154 100 1400 56 -195 100 1450 67 -141 100 1500 55 -158 100 1550 184 -158 100 1600 63 -182 100 1650 62 -176 100 1700 65 -172 100 1750 62 -207 100 1800 67 -165 100 1850 70 -226 100 1900 78 -207 100 1950 75 -248 100 2000 210 -5 100 50 1 -7 100 100 3 -7 100 150 5 -19 100 200 7 -29 100 250 9 -29 100 300 10 -31 100 350 13 -34 100 400 14 -46 100 450 17 -51 100 500 19 -46 100 550 19 -50 100 600 25 -62 100 650 28 -60 100 700 27 -68 100 750 35 -64 100 800 30 -85 100 850 37 -87 100 900 35 -93 100 950 44 -100 100 1000 46 -120 100 1050 45 -106 100 1100 177 -105 100 1150 46 -136 100 1200 53 -133 100 1250 55 -118 100 1300 57 -99 100 1350 53 -121 100 1400 62 -137 100 1450 61 -157 100 1500 70 -176 100 1550 202 -157 100 1600 77 -164 100 1650 73 -192 100 1700 78 -191 100 1750 81 -167 100 1800 78 -189 100 1850 86 -192 100 1900 88 -180 100 1950 94 -169 100 2000 85 -5 100 50 1 -14 100 100 6 -20 100 150 4 -14 100 200 7 -46 100 250 14 -40 100 300 11 -32 100 350 14 -47 100 400 17 -52 100 450 17 -80 100 500 23 -81 100 550 28 -98 100 600 37 -80 100 650 27 -88 100 700 36 -98 100 750 33 -119 100 800 42 -110 100 850 42 -115 100 900 169 -105 100 950 41 -115 100 1000 53 -137 100 1050 52 -137 100 1100 54 -157 100 1150 62 -160 100 1200 54 -163 100 1250 62 -156 100 1300 65 -209 100 1350 74 -158 100 1400 190 -193 100 1450 72 -210 100 1500 64 -182 100 1550 84 -190 100 1600 80 -226 100 1650 85 -224 100 1700 102 -209 100 1750 219 -232 100 1800 97 -247 100 1850 102 -234 100 1900 100 -278 100 1950 115 -285 100 2000 122 -6 100 50 1 -6 100 100 2 -23 100 150 6 -33 100 200 9 -37 100 250 14 -51 100 300 116 -33 100 350 12 -47 100 400 13 -55 100 450 17 -51 100 500 21 -88 100 550 32 -80 100 600 27 -81 100 650 31 -77 100 700 34 -89 100 750 36 -104 100 800 38 -113 100 850 47 -148 100 900 55 -120 100 950 50 -120 100 1000 47 -132 100 1050 49 -162 100 1100 192 -149 100 1150 52 -157 100 1200 67 -173 100 1250 65 -178 100 1300 69 -160 100 1350 57 -165 100 1400 70 -200 100 1450 78 -171 100 1500 73 -184 100 1550 220 -210 100 1600 82 -205 100 1650 80 -183 100 1700 85 -182 100 1750 82 -216 100 1800 86 -231 100 1850 95 -260 100 1900 250 -255 100 1950 113 -225 100 2000 100 -13 100 50 2 -18 100 100 6 -21 100 150 5 -36 100 200 8 -24 100 250 7 -61 100 300 15 -63 100 350 18 -73 100 400 19 -44 100 450 15 -106 100 500 25 -111 100 550 28 -94 100 600 28 -117 100 650 31 -102 100 700 30 -121 100 750 157 -140 100 800 38 -133 100 850 36 -123 100 900 35 -142 100 950 42 -189 100 1000 49 -162 100 1050 47 -201 100 1100 54 -210 100 1150 56 -203 100 1200 61 -230 100 1250 59 -192 100 1300 58 -241 100 1350 217 -251 100 1400 74 -233 100 1450 71 -243 100 1500 75 -247 100 1550 79 -216 100 1600 73 -267 100 1650 83 -264 100 1700 224 -291 100 1750 86 -303 100 1800 93 -303 100 1850 97 -275 100 1900 91 -338 100 1950 100 -311 100 2000 245 -13 100 50 4 -18 100 100 2 -21 100 150 4 -23 100 200 5 -24 100 250 8 -35 100 300 10 -54 100 350 13 -66 100 400 17 -66 100 450 19 -67 100 500 21 -89 100 550 25 -83 100 600 24 -88 100 650 26 -109 100 700 27 -107 100 750 27 -126 100 800 34 -107 100 850 37 -118 100 900 36 -137 100 950 37 -157 100 1000 45 -159 100 1050 44 -154 100 1100 45 -158 100 1150 194 -175 100 1200 56 -181 100 1250 55 -168 100 1300 54 -220 100 1350 86 -203 100 1400 65 -204 100 1450 67 -236 100 1500 71 -217 100 1550 72 -240 100 1600 197 -238 100 1650 80 -254 100 1700 101 -234 100 1750 74 -244 100 1800 72 -273 100 1850 111 -287 100 1900 85 -277 100 1950 87 -257 100 2000 86 -5 100 50 1 -11 100 100 3 -24 100 150 5 -31 100 200 6 -56 100 250 10 -50 100 300 11 -52 100 350 13 -64 100 400 15 -89 100 450 18 -93 100 500 24 -89 100 550 20 -106 100 600 27 -115 100 650 25 -140 100 700 28 -138 100 750 31 -154 100 800 36 -117 100 850 33 -163 100 900 37 -171 100 950 165 -162 100 1000 40 -207 100 1050 48 -204 100 1100 47 -213 100 1150 51 -228 100 1200 58 -229 100 1250 52 -226 100 1300 54 -250 100 1350 59 -230 100 1400 62 -234 100 1450 59 -302 100 1500 207 -280 100 1550 62 -313 100 1600 75 -330 100 1650 77 -289 100 1700 78 -303 100 1750 73 -351 100 1800 82 -336 100 1850 214 -356 100 1900 90 -328 100 1950 87 -351 100 2000 89 -7 100 50 2 -6 100 100 2 -16 100 150 5 -24 100 200 7 -31 100 250 7 -50 100 300 11 -56 100 350 15 -52 100 400 14 -68 100 450 18 -61 100 500 20 -76 100 550 21 -78 100 600 23 -95 100 650 24 -104 100 700 26 -108 100 750 32 -102 100 800 36 -121 100 850 39 -144 100 900 37 -134 100 950 41 -133 100 1000 47 -149 100 1050 44 -158 100 1100 44 -163 100 1150 49 -155 100 1200 47 -176 100 1250 53 -215 100 1300 67 -191 100 1350 57 -200 100 1400 192 -228 100 1450 69 -208 100 1500 69 -220 100 1550 68 -235 100 1600 74 -234 100 1650 73 -204 100 1700 74 -250 100 1750 83 -235 100 1800 216 -253 100 1850 80 -229 100 1900 86 -296 100 1950 91 -257 100 2000 98 -8 100 50 3 -6 100 100 3 -24 100 150 5 -24 100 200 6 -30 100 250 9 -43 100 300 10 -38 100 350 12 -50 100 400 14 -57 100 450 18 -50 100 500 19 -68 100 550 21 -75 100 600 21 -80 100 650 123 -98 100 700 36 -74 100 750 30 -111 100 800 39 -98 100 850 31 -122 100 900 37 -140 100 950 45 -132 100 1000 42 -126 100 1050 40 -144 100 1100 46 -143 100 1150 52 -153 100 1200 54 -161 100 1250 51 -154 100 1300 193 -174 100 1350 59 -173 100 1400 60 -164 100 1450 60 -200 100 1500 70 -167 100 1550 65 -206 100 1600 73 -227 100 1650 81 -224 100 1700 77 -234 100 1750 217 -234 100 1800 89 -223 100 1850 83 -243 100 1900 84 -235 100 1950 92 -238 100 2000 93 -9 100 50 2 -13 100 100 3 -21 100 150 6 -35 100 200 7 -33 100 250 7 -36 100 300 8 -67 100 350 15 -68 100 400 100 -75 100 450 15 -91 100 500 19 -91 100 550 20 -97 100 600 21 -119 100 650 24 -118 100 700 29 -106 100 750 30 -139 100 800 32 -133 100 850 28 -134 100 900 36 -153 100 950 37 -156 100 1000 37 -145 100 1050 40 -195 100 1100 42 -169 100 1150 52 -212 100 1200 55 -213 100 1250 205 -206 100 1300 48 -208 100 1350 61 -230 100 1400 79 -210 100 1450 72 -270 100 1500 63 -244 100 1550 59 -254 100 1600 71 -253 100 1650 184 -251 100 1700 71 -290 100 1750 72 -294 100 1800 79 -285 100 1850 87 -306 100 1900 87 -317 100 1950 95 -270 100 2000 200 -9 100 50 1 -23 100 100 5 -34 100 150 7 -37 100 200 8 -41 100 250 9 -56 100 300 12 -55 100 350 13 -79 100 400 18 -88 100 450 23 -78 100 500 19 -108 100 550 26 -105 100 600 29 -125 100 650 31 -115 100 700 27 -143 100 750 33 -139 100 800 35 -144 100 850 37 -156 100 900 39 -208 100 950 48 -184 100 1000 47 -148 100 1050 41 -197 100 1100 195 -161 100 1150 52 -231 100 1200 57 -201 100 1250 55 -249 100 1300 60 -242 100 1350 64 -210 100 1400 63 -258 100 1450 69 -289 100 1500 76 -289 100 1550 211 -295 100 1600 84 -285 100 1650 73 -284 100 1700 79 -323 100 1750 87 -306 100 1800 87 -293 100 1850 84 -354 100 1900 234 -348 100 1950 104 -297 100 2000 90 -5 100 50 1 -8 100 100 3 -12 100 150 5 -30 100 200 14 -29 100 250 10 -30 100 300 12 -36 100 350 13 -46 100 400 15 -57 100 450 17 -46 100 500 19 -78 100 550 26 -82 100 600 29 -77 100 650 24 -77 100 700 25 -73 100 750 30 -102 100 800 39 -101 100 850 167 -127 100 900 49 -91 100 950 38 -103 100 1000 41 -138 100 1050 55 -132 100 1100 53 -137 100 1150 51 -147 100 1200 61 -132 100 1250 56 -142 100 1300 68 -161 100 1350 66 -160 100 1400 204 -203 100 1450 75 -180 100 1500 78 -190 100 1550 81 -190 100 1600 86 -186 100 1650 76 -199 100 1700 85 -208 100 1750 222 -236 100 1800 100 -231 100 1850 89 -216 100 1900 96 -237 100 1950 102 -239 100 2000 100 -5 100 50 2 -14 100 100 3 -32 100 150 5 -41 100 200 7 -50 100 250 8 -49 100 300 9 -79 100 350 41 -79 100 400 21 -107 100 450 18 -111 100 500 21 -93 100 550 20 -123 100 600 29 -101 100 650 25 -153 100 700 34 -137 100 750 27 -165 100 800 32 -190 100 850 36 -171 100 900 36 -173 100 950 43 -194 100 1000 40 -222 100 1050 49 -193 100 1100 46 -183 100 1150 47 -217 100 1200 196 -223 100 1250 59 -241 100 1300 64 -247 100 1350 58 -290 100 1400 60 -277 100 1450 61 -271 100 1500 67 -301 100 1550 68 -341 100 1600 77 -303 100 1650 201 -341 100 1700 83 -368 100 1750 82 -367 100 1800 89 -379 100 1850 89 -362 100 1900 84 -360 100 1950 96 -395 100 2000 86 -4 100 50 2 -10 100 100 4 -20 100 150 5 -23 100 200 6 -25 100 250 12 -44 100 300 17 -33 100 350 14 -41 100 400 15 -50 100 450 19 -59 100 500 24 -58 100 550 25 -77 100 600 30 -79 100 650 32 -90 100 700 32 -96 100 750 36 -89 100 800 37 -101 100 850 39 -115 100 900 47 -107 100 950 44 -109 100 1000 175 -142 100 1050 48 -131 100 1100 55 -140 100 1150 56 -146 100 1200 55 -166 100 1250 63 -157 100 1300 70 -162 100 1350 68 -162 100 1400 67 -166 100 1450 199 -211 100 1500 85 -199 100 1550 81 -203 100 1600 86 -186 100 1650 79 -221 100 1700 95 -191 100 1750 88 -204 100 1800 224 -234 100 1850 101 -238 100 1900 96 -251 100 1950 120 -272 100 2000 109 -10 100 50 1 -11 100 100 3 -25 100 150 4 -32 100 200 9 -40 100 250 10 -45 100 300 11 -65 100 350 18 -57 100 400 17 -72 100 450 20 -80 100 500 20 -80 100 550 130 -92 100 600 25 -108 100 650 29 -116 100 700 29 -105 100 750 33 -118 100 800 33 -136 100 850 40 -150 100 900 40 -126 100 950 39 -151 100 1000 40 -180 100 1050 49 -198 100 1100 50 -193 100 1150 56 -173 100 1200 57 -168 100 1250 190 -206 100 1300 61 -221 100 1350 65 -230 100 1400 66 -250 100 1450 70 -246 100 1500 72 -235 100 1550 70 -260 100 1600 76 -243 100 1650 214 -251 100 1700 80 -269 100 1750 84 -288 100 1800 85 -316 100 1850 95 -294 100 1900 88 -335 100 1950 99 -321 100 2000 225 -3 100 50 1 -6 100 100 2 -14 100 150 5 -21 100 200 7 -23 100 250 9 -30 100 300 10 -36 100 350 13 -32 100 400 12 -42 100 450 16 -53 100 500 18 -46 100 550 20 -43 100 600 24 -53 100 650 24 -73 100 700 31 -63 100 750 25 -80 100 800 33 -72 100 850 30 -76 100 900 28 -68 100 950 34 -93 100 1000 39 -109 100 1050 45 -109 100 1100 198 -109 100 1150 41 -98 100 1200 42 -137 100 1250 55 -118 100 1300 54 -130 100 1350 58 -127 100 1400 83 -115 100 1450 52 -133 100 1500 57 -140 100 1550 61 -138 100 1600 183 -167 100 1650 93 -151 100 1700 60 -165 100 1750 73 -150 100 1800 75 -166 100 1850 107 -175 100 1900 84 -169 100 1950 202 -178 100 2000 87 -7 100 50 2 -13 100 100 3 -36 100 150 7 -36 100 200 8 -36 100 250 10 -46 100 300 13 -48 100 350 14 -73 100 400 19 -86 100 450 23 -84 100 500 23 -85 100 550 24 -98 100 600 27 -109 100 650 30 -114 100 700 36 -110 100 750 34 -124 100 800 36 -129 100 850 38 -160 100 900 47 -162 100 950 46 -160 100 1000 58 -169 100 1050 57 -186 100 1100 52 -163 100 1150 59 -190 100 1200 58 -205 100 1250 62 -212 100 1300 69 -190 100 1350 68 -235 100 1400 79 -223 100 1450 224 -256 100 1500 78 -238 100 1550 82 -268 100 1600 89 -276 100 1650 80 -335 100 1700 100 -260 100 1750 85 -310 100 1800 228 -292 100 1850 91 -312 100 1900 101 -301 100 1950 100 -340 100 2000 113 -5 100 50 2 -8 100 100 3 -22 100 150 8 -31 100 200 13 -28 100 250 14 -41 100 300 16 -42 100 350 15 -55 100 400 23 -54 100 450 121 -58 100 500 26 -54 100 550 24 -79 100 600 28 -98 100 650 37 -84 100 700 36 -82 100 750 38 -115 100 800 38 -102 100 850 42 -97 100 900 39 -122 100 950 53 -125 100 1000 50 -115 100 1050 46 -126 100 1100 55 -156 100 1150 201 -148 100 1200 57 -171 100 1250 69 -180 100 1300 77 -173 100 1350 75 -179 100 1400 78 -174 100 1450 81 -159 100 1500 67 -191 100 1550 211 -201 100 1600 89 -176 100 1650 87 -178 100 1700 89 -222 100 1750 98 -226 100 1800 95 -231 100 1850 94 -239 100 1900 109 -214 100 1950 108 -253 100 2000 108 -5 100 50 1 -22 100 100 3 -30 100 150 6 -28 100 200 7 -30 100 250 6 -46 100 300 15 -55 100 350 11 -59 100 400 18 -73 100 450 25 -73 100 500 15 -77 100 550 23 -91 100 600 26 -113 100 650 36 -111 100 700 29 -135 100 750 146 -106 100 800 31 -138 100 850 39 -134 100 900 37 -133 100 950 41 -144 100 1000 47 -147 100 1050 47 -155 100 1100 45 -168 100 1150 49 -177 100 1200 54 -187 100 1250 55 -198 100 1300 54 -221 100 1350 195 -214 100 1400 62 -231 100 1450 66 -245 100 1500 72 -244 100 1550 79 -232 100 1600 76 -260 100 1650 75 -266 100 1700 83 -262 100 1750 211 -273 100 1800 81 -255 100 1850 77 -301 100 1900 95 -296 100 1950 92 -319 100 2000 94 -7 100 50 1 -14 100 100 3 -19 100 150 4 -26 100 200 6 -34 100 250 7 -46 100 300 10 -53 100 350 14 -66 100 400 14 -75 100 450 16 -85 100 500 116 -80 100 550 19 -109 100 600 27 -82 100 650 21 -87 100 700 22 -105 100 750 26 -116 100 800 30 -136 100 850 31 -117 100 900 31 -137 100 950 36 -131 100 1000 34 -134 100 1050 40 -140 100 1100 41 -157 100 1150 44 -193 100 1200 50 -179 100 1250 46 -193 100 1300 181 -196 100 1350 52 -228 100 1400 58 -203 100 1450 58 -202 100 1500 58 -222 100 1550 62 -230 100 1600 67 -249 100 1650 68 -260 100 1700 74 -254 100 1750 193 -273 100 1800 72 -273 100 1850 74 -249 100 1900 74 -286 100 1950 82 -287 100 2000 83 -10 100 50 1 -22 100 100 4 -33 100 150 7 -32 100 200 7 -46 100 250 7 -53 100 300 10 -64 100 350 12 -83 100 400 15 -67 100 450 17 -88 100 500 20 -110 100 550 21 -93 100 600 113 -137 100 650 28 -125 100 700 29 -124 100 750 30 -159 100 800 32 -158 100 850 35 -141 100 900 44 -173 100 950 38 -196 100 1000 49 -222 100 1050 47 -198 100 1100 46 -218 100 1150 48 -227 100 1200 55 -223 100 1250 54 -222 100 1300 56 -258 100 1350 62 -255 100 1400 64 -295 100 1450 69 -290 100 1500 75 -316 100 1550 70 -289 100 1600 73 -306 100 1650 73 -327 100 1700 211 -332 100 1750 79 -310 100 1800 75 -356 100 1850 84 -361 100 1900 91 -379 100 1950 98 -381 100 2000 99 -4 100 50 1 -15 100 100 5 -15 100 150 8 -26 100 200 11 -39 100 250 9 -34 100 300 9 -37 100 350 11 -45 100 400 12 -54 100 450 16 -57 100 500 16 -59 100 550 17 -84 100 600 24 -82 100 650 24 -73 100 700 25 -104 100 750 31 -105 100 800 30 -115 100 850 36 -117 100 900 33 -116 100 950 41 -118 100 1000 37 -123 100 1050 40 -152 100 1100 46 -164 100 1150 51 -147 100 1200 183 -160 100 1250 54 -152 100 1300 49 -182 100 1350 57 -182 100 1400 59 -190 100 1450 58 -169 100 1500 59 -195 100 1550 61 -219 100 1600 67 -193 100 1650 184 -225 100 1700 73 -197 100 1750 68 -219 100 1800 77 -226 100 1850 77 -244 100 1900 79 -239 100 1950 80 -252 100 2000 88 -7 100 50 1 -14 100 100 4 -26 100 150 7 -16 100 200 79 -24 100 250 9 -43 100 300 13 -42 100 350 13 -48 100 400 14 -46 100 450 18 -53 100 500 21 -59 100 550 26 -66 100 600 27 -72 100 650 29 -78 100 700 33 -93 100 750 30 -88 100 800 38 -88 100 850 36 -105 100 900 42 -112 100 950 37 -126 100 1000 46 -121 100 1050 43 -118 100 1100 46 -171 100 1150 183 -128 100 1200 49 -131 100 1250 53 -145 100 1300 55 -149 100 1350 57 -170 100 1400 73 -169 100 1450 62 -182 100 1500 67 -176 100 1550 78 -187 100 1600 212 -224 100 1650 81 -182 100 1700 76 -188 100 1750 80 -189 100 1800 77 -207 100 1850 85 -235 100 1900 92 -229 100 1950 225 -229 100 2000 96 -3 100 50 1 -11 100 100 2 -23 100 150 5 -17 100 200 6 -31 100 250 9 -37 100 300 13 -40 100 350 12 -41 100 400 14 -49 100 450 15 -62 100 500 16 -64 100 550 22 -71 100 600 22 -74 100 650 24 -108 100 700 26 -85 100 750 27 -97 100 800 26 -110 100 850 29 -98 100 900 34 -102 100 950 35 -126 100 1000 39 -145 100 1050 160 -143 100 1100 40 -157 100 1150 45 -143 100 1200 48 -176 100 1250 49 -177 100 1300 55 -160 100 1350 57 -169 100 1400 56 -211 100 1450 61 -208 100 1500 61 -210 100 1550 61 -182 100 1600 186 -210 100 1650 63 -204 100 1700 72 -227 100 1750 74 -211 100 1800 69 -234 100 1850 77 -233 100 1900 79 -242 100 1950 79 -251 100 2000 94 -3 100 50 1 -18 100 100 3 -17 100 150 3 -33 100 200 6 -33 100 250 7 -33 100 300 9 -52 100 350 13 -49 100 400 13 -75 100 450 17 -73 100 500 17 -57 100 550 16 -86 100 600 22 -100 100 650 22 -93 100 700 27 -114 100 750 29 -105 100 800 34 -121 100 850 33 -134 100 900 35 -160 100 950 38 -152 100 1000 41 -171 100 1050 48 -185 100 1100 48 -177 100 1150 46 -180 100 1200 48 -167 100 1250 49 -184 100 1300 202 -195 100 1350 53 -204 100 1400 57 -206 100 1450 56 -211 100 1500 65 -209 100 1550 64 -251 100 1600 72 -239 100 1650 67 -261 100 1700 76 -269 100 1750 76 -292 100 1800 85 -279 100 1850 78 -319 100 1900 84 -295 100 1950 86 -276 100 2000 83 -13 100 50 1 -23 100 100 3 -31 100 150 6 -37 100 200 10 -56 100 250 13 -50 100 300 13 -69 100 350 14 -72 100 400 126 -94 100 450 18 -82 100 500 21 -110 100 550 32 -120 100 600 30 -125 100 650 33 -122 100 700 33 -130 100 750 38 -165 100 800 39 -143 100 850 39 -206 100 900 48 -170 100 950 46 -193 100 1000 45 -231 100 1050 52 -212 100 1100 56 -230 100 1150 223 -294 100 1200 69 -252 100 1250 71 -282 100 1300 67 -297 100 1350 65 -290 100 1400 75 -294 100 1450 92 -263 100 1500 73 -296 100 1550 236 -330 100 1600 88 -328 100 1650 86 -337 100 1700 103 -367 100 1750 103 -334 100 1800 90 -352 100 1850 243 -413 100 1900 115 -385 100 1950 104 -390 100 2000 110 -4 100 50 1 -14 100 100 4 -24 100 150 5 -32 100 200 6 -32 100 250 8 -36 100 300 10 -59 100 350 13 -47 100 400 12 -74 100 450 22 -66 100 500 18 -67 100 550 18 -84 100 600 21 -76 100 650 24 -87 100 700 25 -90 100 750 154 -104 100 800 33 -124 100 850 33 -128 100 900 37 -140 100 950 35 -159 100 1000 40 -130 100 1050 39 -153 100 1100 48 -160 100 1150 51 -163 100 1200 47 -162 100 1250 47 -167 100 1300 52 -143 100 1350 54 -178 100 1400 195 -186 100 1450 54 -209 100 1500 61 -193 100 1550 60 -209 100 1600 66 -177 100 1650 63 -219 100 1700 68 -222 100 1750 72 -268 100 1800 79 -212 100 1850 67 -267 100 1900 79 -270 100 1950 82 -281 100 2000 84 -5 100 50 1 -9 100 100 3 -21 100 150 6 -17 100 200 6 -31 100 250 8 -25 100 300 9 -40 100 350 12 -47 100 400 15 -67 100 450 18 -60 100 500 16 -74 100 550 22 -68 100 600 20 -75 100 650 24 -75 100 700 27 -102 100 750 29 -103 100 800 144 -79 100 850 26 -119 100 900 34 -97 100 950 33 -117 100 1000 40 -120 100 1050 42 -128 100 1100 44 -117 100 1150 43 -152 100 1200 53 -140 100 1250 50 -152 100 1300 57 -166 100 1350 59 -187 100 1400 194 -167 100 1450 60 -204 100 1500 66 -197 100 1550 65 -187 100 1600 67 -197 100 1650 71 -204 100 1700 73 -217 100 1750 73 -240 100 1800 76 -243 100 1850 78 -229 100 1900 84 -228 100 1950 84 -256 100 2000 87 -4 100 50 1 -14 100 100 4 -24 100 150 6 -32 100 200 8 -33 100 250 11 -33 100 300 10 -50 100 350 14 -62 100 400 15 -75 100 450 20 -73 100 500 22 -90 100 550 25 -87 100 600 23 -88 100 650 27 -86 100 700 32 -109 100 750 146 -122 100 800 36 -130 100 850 39 -148 100 900 56 -129 100 950 45 -152 100 1000 48 -153 100 1050 59 -162 100 1100 48 -148 100 1150 59 -157 100 1200 54 -169 100 1250 59 -188 100 1300 216 -183 100 1350 63 -199 100 1400 76 -205 100 1450 73 -247 100 1500 87 -216 100 1550 70 -236 100 1600 82 -235 100 1650 71 -252 100 1700 229 -266 100 1750 93 -303 100 1800 104 -268 100 1850 84 -271 100 1900 89 -307 100 1950 106 -275 100 2000 233 -4 100 50 2 -18 100 100 4 -19 100 150 4 -22 100 200 10 -26 100 250 8 -30 100 300 8 -43 100 350 12 -42 100 400 12 -54 100 450 15 -70 100 500 18 -66 100 550 18 -87 100 600 23 -86 100 650 24 -108 100 700 28 -92 100 750 32 -114 100 800 32 -110 100 850 32 -125 100 900 36 -104 100 950 36 -139 100 1000 41 -138 100 1050 43 -126 100 1100 184 -149 100 1150 45 -157 100 1200 46 -177 100 1250 57 -163 100 1300 51 -180 100 1350 56 -199 100 1400 61 -172 100 1450 59 -177 100 1500 60 -212 100 1550 67 -192 100 1600 201 -199 100 1650 64 -201 100 1700 68 -244 100 1750 75 -227 100 1800 71 -245 100 1850 77 -223 100 1900 79 -236 100 1950 77 -253 100 2000 226 -10 100 50 2 -11 100 100 4 -26 100 150 4 -31 100 200 7 -35 100 250 9 -47 100 300 11 -64 100 350 15 -56 100 400 15 -70 100 450 14 -66 100 500 19 -69 100 550 22 -77 100 600 24 -72 100 650 24 -90 100 700 26 -127 100 750 36 -111 100 800 31 -100 100 850 28 -127 100 900 38 -143 100 950 41 -138 100 1000 39 -164 100 1050 42 -150 100 1100 185 -162 100 1150 48 -154 100 1200 49 -166 100 1250 54 -163 100 1300 54 -186 100 1350 57 -205 100 1400 58 -191 100 1450 58 -200 100 1500 61 -233 100 1550 70 -216 100 1600 203 -247 100 1650 72 -218 100 1700 66 -256 100 1750 76 -258 100 1800 82 -262 100 1850 78 -285 100 1900 85 -246 100 1950 83 -293 100 2000 87 -5 100 50 1 -17 100 100 5 -22 100 150 9 -28 100 200 9 -34 100 250 9 -41 100 300 10 -37 100 350 12 -56 100 400 15 -69 100 450 22 -75 100 500 23 -76 100 550 24 -78 100 600 29 -109 100 650 32 -105 100 700 39 -103 100 750 39 -100 100 800 36 -120 100 850 41 -139 100 900 47 -155 100 950 54 -140 100 1000 197 -158 100 1050 57 -180 100 1100 60 -155 100 1150 56 -172 100 1200 67 -203 100 1250 64 -190 100 1300 62 -205 100 1350 74 -194 100 1400 67 -224 100 1450 227 -211 100 1500 76 -241 100 1550 82 -229 100 1600 84 -250 100 1650 87 -239 100 1700 85 -255 100 1750 86 -262 100 1800 234 -303 100 1850 106 -252 100 1900 99 -288 100 1950 105 -269 100 2000 103 -11 100 50 1 -24 100 100 6 -46 100 150 6 -41 100 200 10 -42 100 250 9 -65 100 300 14 -66 100 350 16 -82 100 400 18 -95 100 450 79 -100 100 500 31 -107 100 550 23 -126 100 600 31 -113 100 650 30 -123 100 700 32 -128 100 750 34 -172 100 800 36 -164 100 850 39 -156 100 900 41 -171 100 950 46 -160 100 1000 49 -214 100 1050 53 -216 100 1100 58 -226 100 1150 58 -230 100 1200 68 -236 100 1250 62 -232 100 1300 61 -253 100 1350 65 -244 100 1400 74 -277 100 1450 76 -284 100 1500 76 -305 100 1550 80 -321 100 1600 224 -309 100 1650 80 -324 100 1700 88 -300 100 1750 88 -378 100 1800 101 -388 100 1850 93 -352 100 1900 242 -351 100 1950 92 -392 100 2000 103 -7 100 50 1 -10 100 100 2 -21 100 150 4 -33 100 200 6 -31 100 250 7 -37 100 300 9 -25 100 350 8 -61 100 400 13 -60 100 450 13 -59 100 500 16 -85 100 550 18 -65 100 600 19 -89 100 650 21 -81 100 700 22 -85 100 750 23 -114 100 800 29 -108 100 850 28 -113 100 900 29 -118 100 950 32 -150 100 1000 171 -150 100 1050 42 -147 100 1100 38 -152 100 1150 44 -145 100 1200 42 -147 100 1250 41 -170 100 1300 45 -164 100 1350 55 -184 100 1400 53 -207 100 1450 53 -201 100 1500 57 -209 100 1550 201 -210 100 1600 58 -183 100 1650 57 -224 100 1700 70 -226 100 1750 68 -245 100 1800 66 -240 100 1850 68 -246 100 1900 68 -246 100 1950 73 -250 100 2000 220 -8 100 50 2 -18 100 100 4 -18 100 150 5 -33 100 200 8 -34 100 250 8 -40 100 300 12 -58 100 350 21 -55 100 400 16 -65 100 450 20 -70 100 500 21 -82 100 550 26 -86 100 600 26 -93 100 650 27 -126 100 700 34 -112 100 750 37 -103 100 800 33 -131 100 850 38 -114 100 900 38 -144 100 950 47 -126 100 1000 41 -170 100 1050 197 -148 100 1100 52 -142 100 1150 58 -176 100 1200 50 -185 100 1250 60 -191 100 1300 57 -196 100 1350 64 -194 100 1400 68 -206 100 1450 70 -218 100 1500 218 -220 100 1550 72 -235 100 1600 89 -243 100 1650 87 -242 100 1700 87 -228 100 1750 88 -256 100 1800 88 -255 100 1850 231 -295 100 1900 103 -314 100 1950 106 -322 100 2000 104 -7 100 50 4 -5 100 100 4 -16 100 150 12 -25 100 200 12 -19 100 250 9 -28 100 300 13 -43 100 350 16 -36 100 400 16 -32 100 450 24 -52 100 500 25 -49 100 550 28 -44 100 600 29 -68 100 650 35 -72 100 700 154 -66 100 750 40 -82 100 800 49 -92 100 850 46 -99 100 900 49 -91 100 950 50 -88 100 1000 53 -81 100 1050 46 -115 100 1100 66 -98 100 1150 70 -115 100 1200 64 -102 100 1250 200 -143 100 1300 75 -139 100 1350 74 -133 100 1400 67 -152 100 1450 74 -149 100 1500 75 -138 100 1550 87 -158 100 1600 93 -157 100 1650 104 -169 100 1700 101 -154 100 1750 100 -184 100 1800 97 -189 100 1850 99 -200 100 1900 235 -209 100 1950 99 -199 100 2000 111 -6 100 50 1 -24 100 100 3 -21 100 150 5 -23 100 200 7 -18 100 250 10 -38 100 300 12 -47 100 350 16 -51 100 400 15 -52 100 450 16 -62 100 500 20 -67 100 550 27 -74 100 600 20 -76 100 650 27 -79 100 700 30 -77 100 750 25 -111 100 800 35 -102 100 850 33 -99 100 900 163 -103 100 950 36 -120 100 1000 38 -147 100 1050 46 -131 100 1100 47 -150 100 1150 45 -132 100 1200 47 -153 100 1250 52 -177 100 1300 64 -137 100 1350 55 -164 100 1400 64 -175 100 1450 197 -151 100 1500 61 -178 100 1550 63 -216 100 1600 72 -209 100 1650 76 -212 100 1700 72 -216 100 1750 84 -204 100 1800 78 -219 100 1850 212 -252 100 1900 88 -240 100 1950 89 -229 100 2000 83 -6 100 50 2 -6 100 100 2 -13 100 150 5 -20 100 200 10 -29 100 250 9 -35 100 300 11 -35 100 350 17 -63 100 400 24 -46 100 450 17 -51 100 500 18 -60 100 550 20 -55 100 600 25 -63 100 650 27 -92 100 700 41 -82 100 750 32 -82 100 800 150 -89 100 850 43 -98 100 900 39 -99 100 950 39 -115 100 1000 43 -106 100 1050 44 -111 100 1100 50 -112 100 1150 59 -131 100 1200 72 -139 100 1250 63 -148 100 1300 66 -162 100 1350 202 -134 100 1400 55 -157 100 1450 75 -142 100 1500 68 -135 100 1550 72 -161 100 1600 72 -181 100 1650 79 -170 100 1700 89 -184 100 1750 213 -180 100 1800 84 -213 100 1850 92 -205 100 1900 92 -188 100 1950 88 -198 100 2000 90 -5 100 50 3 -19 100 100 4 -14 100 150 6 -25 100 200 13 -21 100 250 10 -42 100 300 10 -51 100 350 21 -48 100 400 19 -89 100 450 37 -76 100 500 23 -75 100 550 27 -86 100 600 28 -120 100 650 40 -96 100 700 34 -108 100 750 43 -139 100 800 51 -139 100 850 51 -155 100 900 52 -168 100 950 62 -164 100 1000 61 -157 100 1050 198 -172 100 1100 64 -137 100 1150 57 -186 100 1200 74 -188 100 1250 71 -190 100 1300 71 -161 100 1350 70 -219 100 1400 80 -228 100 1450 232 -219 100 1500 88 -236 100 1550 96 -236 100 1600 93 -245 100 1650 93 -283 100 1700 116 -299 100 1750 259 -265 100 1800 108 -277 100 1850 115 -302 100 1900 130 -343 100 1950 118 -293 100 2000 281 -3 100 50 1 -11 100 100 4 -12 100 150 4 -20 100 200 7 -19 100 250 7 -25 100 300 9 -31 100 350 15 -36 100 400 13 -45 100 450 15 -51 100 500 24 -49 100 550 24 -56 100 600 21 -66 100 650 23 -62 100 700 24 -75 100 750 33 -90 100 800 37 -89 100 850 35 -84 100 900 34 -81 100 950 37 -84 100 1000 37 -87 100 1050 38 -95 100 1100 40 -117 100 1150 196 -100 100 1200 48 -112 100 1250 54 -123 100 1300 53 -105 100 1350 57 -126 100 1400 56 -138 100 1450 58 -135 100 1500 66 -127 100 1550 61 -140 100 1600 60 -152 100 1650 201 -141 100 1700 66 -176 100 1750 78 -157 100 1800 72 -196 100 1850 76 -177 100 1900 97 -191 100 1950 83 -178 100 2000 209 -7 100 50 1 -19 100 100 3 -14 100 150 4 -33 100 200 10 -31 100 250 8 -29 100 300 12 -51 100 350 17 -46 100 400 15 -60 100 450 18 -78 100 500 24 -90 100 550 31 -76 100 600 24 -81 100 650 29 -116 100 700 32 -93 100 750 29 -105 100 800 34 -101 100 850 34 -134 100 900 44 -136 100 950 47 -138 100 1000 44 -135 100 1050 47 -118 100 1100 195 -138 100 1150 50 -151 100 1200 54 -151 100 1250 59 -182 100 1300 59 -174 100 1350 60 -201 100 1400 68 -194 100 1450 62 -183 100 1500 68 -198 100 1550 203 -214 100 1600 85 -203 100 1650 75 -216 100 1700 75 -240 100 1750 75 -259 100 1800 90 -249 100 1850 90 -236 100 1900 220 -231 100 1950 88 -244 100 2000 93 -10 100 50 1 -26 100 100 4 -29 100 150 5 -38 100 200 8 -45 100 250 9 -51 100 300 11 -66 100 350 13 -61 100 400 16 -58 100 450 19 -78 100 500 19 -106 100 550 23 -96 100 600 23 -102 100 650 27 -116 100 700 32 -114 100 750 33 -140 100 800 35 -129 100 850 37 -160 100 900 180 -149 100 950 44 -181 100 1000 43 -170 100 1050 43 -157 100 1100 47 -194 100 1150 52 -186 100 1200 51 -209 100 1250 59 -216 100 1300 57 -213 100 1350 58 -221 100 1400 57 -237 100 1450 207 -242 100 1500 69 -257 100 1550 73 -236 100 1600 66 -243 100 1650 75 -265 100 1700 75 -311 100 1750 77 -298 100 1800 81 -283 100 1850 219 -293 100 1900 90 -317 100 1950 96 -334 100 2000 101 -6 100 50 2 -8 100 100 2 -22 100 150 4 -32 100 200 8 -36 100 250 7 -49 100 300 10 -44 100 350 11 -53 100 400 17 -70 100 450 17 -62 100 500 18 -64 100 550 24 -76 100 600 19 -94 100 650 25 -111 100 700 30 -90 100 750 144 -115 100 800 38 -115 100 850 34 -129 100 900 34 -140 100 950 43 -133 100 1000 44 -151 100 1050 46 -153 100 1100 42 -165 100 1150 47 -152 100 1200 65 -182 100 1250 61 -183 100 1300 52 -209 100 1350 201 -203 100 1400 68 -187 100 1450 68 -179 100 1500 62 -237 100 1550 65 -241 100 1600 70 -218 100 1650 78 -223 100 1700 73 -240 100 1750 208 -239 100 1800 83 -232 100 1850 82 -279 100 1900 90 -260 100 1950 80 -290 100 2000 93 -7 100 50 2 -15 100 100 4 -26 100 150 5 -39 100 200 10 -49 100 250 9 -48 100 300 10 -57 100 350 14 -65 100 400 19 -73 100 450 17 -77 100 500 19 -81 100 550 126 -85 100 600 23 -106 100 650 27 -112 100 700 27 -121 100 750 31 -116 100 800 29 -143 100 850 39 -116 100 900 32 -135 100 950 40 -144 100 1000 38 -153 100 1050 48 -183 100 1100 50 -175 100 1150 51 -196 100 1200 51 -208 100 1250 201 -215 100 1300 59 -205 100 1350 56 -205 100 1400 60 -245 100 1450 63 -223 100 1500 63 -268 100 1550 73 -244 100 1600 75 -242 100 1650 72 -245 100 1700 213 -279 100 1750 84 -262 100 1800 79 -278 100 1850 74 -295 100 1900 85 -260 100 1950 90 -271 100 2000 86 -6 100 50 1 -14 100 100 4 -19 100 150 6 -34 100 200 8 -36 100 250 9 -41 100 300 112 -51 100 350 18 -50 100 400 12 -80 100 450 18 -69 100 500 19 -71 100 550 22 -85 100 600 27 -84 100 650 29 -88 100 700 28 -101 100 750 29 -101 100 800 32 -134 100 850 34 -141 100 900 43 -131 100 950 38 -139 100 1000 43 -124 100 1050 43 -158 100 1100 46 -168 100 1150 52 -150 100 1200 53 -169 100 1250 59 -178 100 1300 58 -190 100 1350 61 -162 100 1400 62 -198 100 1450 67 -183 100 1500 68 -200 100 1550 72 -217 100 1600 71 -230 100 1650 76 -241 100 1700 85 -247 100 1750 81 -262 100 1800 83 -246 100 1850 82 -224 100 1900 81 -278 100 1950 232 -268 100 2000 93 -4 100 50 2 -14 100 100 3 -13 100 150 4 -24 100 200 6 -33 100 250 9 -35 100 300 10 -47 100 350 12 -45 100 400 14 -60 100 450 16 -56 100 500 18 -65 100 550 20 -79 100 600 25 -97 100 650 28 -79 100 700 23 -91 100 750 31 -82 100 800 30 -101 100 850 35 -112 100 900 36 -120 100 950 38 -129 100 1000 40 -107 100 1050 35 -129 100 1100 52 -142 100 1150 52 -143 100 1200 45 -151 100 1250 57 -160 100 1300 53 -157 100 1350 55 -179 100 1400 64 -176 100 1450 60 -201 100 1500 63 -186 100 1550 202 -205 100 1600 68 -227 100 1650 79 -237 100 1700 75 -208 100 1750 72 -196 100 1800 76 -248 100 1850 81 -218 100 1900 79 -214 100 1950 225 -234 100 2000 91 -4 100 50 2 -7 100 100 3 -12 100 150 3 -15 100 200 5 -36 100 250 14 -41 100 300 15 -43 100 350 16 -46 100 400 18 -58 100 450 22 -60 100 500 24 -58 100 550 21 -67 100 600 25 -80 100 650 26 -91 100 700 33 -61 100 750 37 -97 100 800 39 -94 100 850 50 -110 100 900 41 -96 100 950 183 -110 100 1000 44 -125 100 1050 54 -118 100 1100 56 -142 100 1150 62 -125 100 1200 55 -128 100 1250 56 -157 100 1300 64 -178 100 1350 70 -138 100 1400 65 -181 100 1450 220 -174 100 1500 81 -176 100 1550 75 -169 100 1600 82 -201 100 1650 87 -191 100 1700 89 -197 100 1750 89 -174 100 1800 215 -241 100 1850 104 -255 100 1900 108 -207 100 1950 96 -251 100 2000 111 -4 100 50 1 -10 100 100 3 -23 100 150 7 -28 100 200 8 -31 100 250 10 -23 100 300 10 -36 100 350 12 -36 100 400 15 -44 100 450 14 -58 100 500 124 -58 100 550 21 -56 100 600 23 -66 100 650 28 -90 100 700 33 -73 100 750 29 -80 100 800 37 -101 100 850 37 -85 100 900 40 -105 100 950 36 -106 100 1000 36 -123 100 1050 50 -125 100 1100 49 -129 100 1150 49 -116 100 1200 47 -141 100 1250 191 -121 100 1300 55 -176 100 1350 63 -149 100 1400 67 -147 100 1450 64 -175 100 1500 71 -163 100 1550 75 -167 100 1600 69 -183 100 1650 203 -179 100 1700 78 -203 100 1750 85 -196 100 1800 80 -197 100 1850 83 -198 100 1900 86 -206 100 1950 96 -198 100 2000 216 -8 100 50 2 -11 100 100 6 -24 100 150 8 -40 100 200 7 -46 100 250 15 -49 100 300 14 -72 100 350 15 -68 100 400 16 -87 100 450 23 -86 100 500 24 -70 100 550 19 -122 100 600 31 -115 100 650 28 -124 100 700 31 -149 100 750 34 -124 100 800 34 -147 100 850 40 -190 100 900 50 -163 100 950 43 -176 100 1000 52 -205 100 1050 203 -204 100 1100 55 -191 100 1150 48 -205 100 1200 53 -241 100 1250 66 -248 100 1300 68 -262 100 1350 68 -255 100 1400 74 -286 100 1450 80 -275 100 1500 219 -298 100 1550 80 -295 100 1600 82 -315 100 1650 86 -310 100 1700 89 -351 100 1750 95 -351 100 1800 94 -328 100 1850 246 -337 100 1900 94 -315 100 1950 98 -333 100 2000 102 -12 100 50 3 -23 100 100 4 -34 100 150 7 -28 100 200 6 -56 100 250 12 -43 100 300 10 -73 100 350 18 -84 100 400 18 -97 100 450 22 -64 100 500 18 -115 100 550 31 -123 100 600 32 -129 100 650 150 -113 100 700 30 -97 100 750 28 -138 100 800 39 -144 100 850 41 -178 100 900 45 -152 100 950 43 -155 100 1000 43 -215 100 1050 54 -177 100 1100 52 -210 100 1150 58 -199 100 1200 56 -225 100 1250 218 -213 100 1300 61 -245 100 1350 70 -222 100 1400 64 -236 100 1450 76 -247 100 1500 79 -276 100 1550 78 -306 100 1600 87 -288 100 1650 222 -313 100 1700 88 -314 100 1750 93 -355 100 1800 96 -308 100 1850 93 -317 100 1900 102 -364 100 1950 243 -285 100 2000 96 -4 100 50 1 -23 100 100 5 -21 100 150 6 -30 100 200 9 -42 100 250 13 -60 100 300 18 -61 100 350 17 -52 100 400 19 -58 100 450 18 -94 100 500 27 -106 100 550 32 -88 100 600 26 -90 100 650 30 -112 100 700 38 -144 100 750 43 -120 100 800 43 -114 100 850 43 -129 100 900 192 -143 100 950 54 -136 100 1000 50 -179 100 1050 56 -166 100 1100 54 -174 100 1150 60 -175 100 1200 63 -226 100 1250 72 -222 100 1300 71 -237 100 1350 73 -223 100 1400 228 -264 100 1450 82 -256 100 1500 85 -236 100 1550 83 -246 100 1600 83 -282 100 1650 93 -308 100 1700 240 -300 100 1750 100 -290 100 1800 103 -276 100 1850 97 -299 100 1900 100 -281 100 1950 103 -294 100 2000 255 -5 100 50 1 -3 100 100 2 -11 100 150 4 -24 100 200 6 -28 100 250 7 -23 100 300 10 -44 100 350 14 -38 100 400 13 -39 100 450 19 -66 100 500 22 -67 100 550 26 -54 100 600 23 -77 100 650 33 -83 100 700 29 -85 100 750 26 -87 100 800 34 -101 100 850 37 -95 100 900 36 -102 100 950 42 -93 100 1000 37 -116 100 1050 45 -120 100 1100 186 -125 100 1150 54 -128 100 1200 57 -157 100 1250 62 -129 100 1300 58 -154 100 1350 59 -140 100 1400 58 -144 100 1450 64 -153 100 1500 70 -172 100 1550 71 -171 100 1600 76 -203 100 1650 77 -202 100 1700 86 -186 100 1750 74 -197 100 1800 91 -197 100 1850 88 -223 100 1900 95 -215 100 1950 89 -215 100 2000 88 -9 100 50 2 -13 100 100 3 -30 100 150 5 -19 100 200 7 -28 100 250 7 -40 100 300 14 -44 100 350 15 -53 100 400 18 -54 100 450 18 -55 100 500 18 -77 100 550 29 -72 100 600 27 -78 100 650 28 -103 100 700 38 -104 100 750 35 -116 100 800 38 -111 100 850 35 -137 100 900 152 -121 100 950 40 -128 100 1000 47 -133 100 1050 54 -132 100 1100 46 -155 100 1150 52 -179 100 1200 58 -163 100 1250 64 -177 100 1300 63 -170 100 1350 68 -172 100 1400 63 -183 100 1450 75 -177 100 1500 71 -234 100 1550 84 -207 100 1600 77 -236 100 1650 94 -221 100 1700 87 -245 100 1750 96 -252 100 1800 97 -240 100 1850 96 -257 100 1900 102 -290 100 1950 99 -260 100 2000 107 -6 100 50 2 -13 100 100 2 -24 100 150 6 -35 100 200 11 -32 100 250 9 -44 100 300 13 -49 100 350 15 -53 100 400 17 -62 100 450 127 -64 100 500 19 -74 100 550 27 -80 100 600 25 -105 100 650 30 -94 100 700 32 -106 100 750 33 -110 100 800 38 -109 100 850 39 -115 100 900 38 -127 100 950 46 -122 100 1000 47 -164 100 1050 51 -175 100 1100 54 -168 100 1150 61 -164 100 1200 53 -181 100 1250 59 -178 100 1300 61 -177 100 1350 60 -192 100 1400 72 -189 100 1450 74 -225 100 1500 81 -213 100 1550 73 -233 100 1600 217 -240 100 1650 78 -234 100 1700 88 -258 100 1750 93 -233 100 1800 89 -282 100 1850 95 -257 100 1900 96 -264 100 1950 93 -284 100 2000 103 -3 100 50 1 -6 100 100 2 -14 100 150 4 -30 100 200 7 -44 100 250 12 -32 100 300 10 -56 100 350 14 -56 100 400 14 -57 100 450 17 -78 100 500 23 -60 100 550 20 -74 100 600 23 -92 100 650 27 -108 100 700 30 -92 100 750 29 -93 100 800 29 -121 100 850 35 -131 100 900 39 -139 100 950 172 -132 100 1000 67 -116 100 1050 40 -133 100 1100 43 -156 100 1150 47 -146 100 1200 47 -153 100 1250 52 -159 100 1300 52 -184 100 1350 63 -192 100 1400 61 -205 100 1450 66 -196 100 1500 73 -172 100 1550 63 -205 100 1600 66 -191 100 1650 68 -202 100 1700 73 -233 100 1750 76 -249 100 1800 78 -257 100 1850 207 -276 100 1900 84 -263 100 1950 86 -252 100 2000 112 -9 100 50 1 -11 100 100 3 -17 100 150 4 -19 100 200 11 -25 100 250 12 -38 100 300 17 -46 100 350 16 -45 100 400 16 -53 100 450 21 -51 100 500 19 -110 100 550 30 -71 100 600 27 -97 100 650 26 -84 100 700 36 -83 100 750 30 -83 100 800 37 -92 100 850 39 -103 100 900 40 -115 100 950 50 -118 100 1000 38 -137 100 1050 56 -125 100 1100 44 -136 100 1150 55 -159 100 1200 60 -152 100 1250 56 -178 100 1300 70 -156 100 1350 191 -177 100 1400 72 -176 100 1450 63 -181 100 1500 77 -187 100 1550 76 -203 100 1600 74 -205 100 1650 87 -191 100 1700 210 -187 100 1750 85 -231 100 1800 94 -244 100 1850 87 -231 100 1900 97 -239 100 1950 95 -227 100 2000 95 -7 100 50 1 -17 100 100 2 -22 100 150 4 -39 100 200 7 -39 100 250 10 -42 100 300 10 -71 100 350 15 -73 100 400 14 -80 100 450 16 -81 100 500 18 -89 100 550 22 -119 100 600 25 -144 100 650 30 -141 100 700 29 -124 100 750 30 -144 100 800 34 -159 100 850 36 -173 100 900 40 -177 100 950 42 -192 100 1000 47 -191 100 1050 47 -196 100 1100 53 -204 100 1150 196 -230 100 1200 56 -208 100 1250 52 -217 100 1300 61 -237 100 1350 55 -239 100 1400 62 -265 100 1450 67 -273 100 1500 68 -287 100 1550 72 -280 100 1600 218 -294 100 1650 73 -296 100 1700 76 -323 100 1750 82 -356 100 1800 85 -317 100 1850 86 -319 100 1900 89 -388 100 1950 235 -362 100 2000 93 -5 100 50 1 -7 100 100 2 -20 100 150 7 -26 100 200 6 -32 100 250 9 -42 100 300 12 -55 100 350 14 -38 100 400 14 -59 100 450 18 -85 100 500 21 -64 100 550 22 -79 100 600 30 -88 100 650 33 -100 100 700 33 -104 100 750 31 -105 100 800 37 -120 100 850 37 -127 100 900 42 -137 100 950 179 -134 100 1000 48 -153 100 1050 49 -152 100 1100 55 -153 100 1150 53 -158 100 1200 50 -162 100 1250 59 -166 100 1300 60 -164 100 1350 60 -193 100 1400 70 -218 100 1450 204 -219 100 1500 74 -212 100 1550 84 -221 100 1600 81 -244 100 1650 84 -232 100 1700 89 -216 100 1750 85 -235 100 1800 219 -263 100 1850 93 -266 100 1900 93 -269 100 1950 92 -285 100 2000 95 -9 100 50 2 -17 100 100 5 -22 100 150 6 -37 100 200 10 -27 100 250 8 -35 100 300 11 -68 100 350 18 -48 100 400 15 -80 100 450 19 -68 100 500 22 -102 100 550 27 -111 100 600 138 -110 100 650 32 -107 100 700 31 -151 100 750 41 -138 100 800 41 -145 100 850 45 -154 100 900 49 -161 100 950 47 -174 100 1000 46 -185 100 1050 56 -210 100 1100 55 -203 100 1150 58 -206 100 1200 203 -228 100 1250 70 -229 100 1300 82 -249 100 1350 77 -244 100 1400 80 -237 100 1450 75 -254 100 1500 83 -285 100 1550 85 -282 100 1600 234 -247 100 1650 79 -298 100 1700 93 -310 100 1750 102 -273 100 1800 97 -321 100 1850 97 -327 100 1900 252 -337 100 1950 118 -300 100 2000 108 -11 100 50 3 -18 100 100 4 -18 100 150 4 -27 100 200 8 -36 100 250 8 -35 100 300 12 -27 100 350 10 -46 100 400 15 -52 100 450 19 -54 100 500 15 -67 100 550 19 -64 100 600 18 -71 100 650 21 -70 100 700 24 -81 100 750 26 -87 100 800 33 -96 100 850 153 -94 100 900 31 -109 100 950 37 -127 100 1000 37 -102 100 1050 32 -172 100 1100 48 -130 100 1150 43 -166 100 1200 46 -130 100 1250 48 -163 100 1300 53 -163 100 1350 52 -160 100 1400 54 -156 100 1450 195 -174 100 1500 62 -203 100 1550 59 -193 100 1600 63 -220 100 1650 67 -173 100 1700 68 -234 100 1750 71 -217 100 1800 72 -222 100 1850 78 -270 100 1900 214 -239 100 1950 79 -236 100 2000 84 -8 100 50 2 -14 100 100 3 -15 100 150 4 -31 100 200 8 -38 100 250 8 -49 100 300 15 -46 100 350 13 -50 100 400 15 -62 100 450 18 -76 100 500 24 -84 100 550 26 -74 100 600 27 -94 100 650 29 -105 100 700 34 -107 100 750 32 -100 100 800 30 -118 100 850 160 -108 100 900 34 -136 100 950 42 -148 100 1000 44 -140 100 1050 50 -140 100 1100 50 -158 100 1150 58 -164 100 1200 51 -205 100 1250 65 -212 100 1300 67 -180 100 1350 55 -219 100 1400 205 -224 100 1450 76 -209 100 1500 70 -216 100 1550 75 -231 100 1600 80 -248 100 1650 87 -190 100 1700 70 -247 100 1750 224 -234 100 1800 82 -281 100 1850 99 -267 100 1900 86 -282 100 1950 96 -280 100 2000 98 -7 100 50 1 -23 100 100 5 -48 100 150 6 -39 100 200 7 -65 100 250 11 -71 100 300 14 -75 100 350 12 -95 100 400 16 -107 100 450 17 -117 100 500 132 -122 100 550 29 -136 100 600 30 -151 100 650 30 -150 100 700 39 -162 100 750 32 -174 100 800 35 -202 100 850 41 -181 100 900 37 -212 100 950 52 -179 100 1000 42 -233 100 1050 49 -223 100 1100 51 -239 100 1150 55 -259 100 1200 208 -291 100 1250 65 -277 100 1300 59 -326 100 1350 74 -304 100 1400 66 -351 100 1450 78 -359 100 1500 77 -345 100 1550 78 -344 100 1600 214 -353 100 1650 81 -357 100 1700 87 -388 100 1750 95 -394 100 1800 89 -426 100 1850 102 -376 100 1900 234 -433 100 1950 101 -430 100 2000 103 -4 100 50 1 -13 100 100 3 -12 100 150 4 -32 100 200 7 -32 100 250 11 -47 100 300 13 -37 100 350 15 -60 100 400 17 -56 100 450 18 -67 100 500 23 -82 100 550 26 -78 100 600 25 -86 100 650 30 -86 100 700 27 -83 100 750 31 -116 100 800 38 -115 100 850 162 -124 100 900 51 -113 100 950 40 -123 100 1000 44 -135 100 1050 46 -142 100 1100 47 -155 100 1150 54 -143 100 1200 58 -175 100 1250 64 -191 100 1300 72 -204 100 1350 69 -214 100 1400 210 -227 100 1450 81 -181 100 1500 65 -202 100 1550 71 -198 100 1600 75 -208 100 1650 81 -202 100 1700 80 -235 100 1750 215 -226 100 1800 84 -229 100 1850 91 -229 100 1900 89 -234 100 1950 98 -251 100 2000 91 -6 100 50 2 -10 100 100 3 -16 100 150 4 -30 100 200 7 -27 100 250 7 -43 100 300 9 -48 100 350 11 -43 100 400 10 -59 100 450 14 -59 100 500 19 -71 100 550 113 -84 100 600 25 -71 100 650 22 -90 100 700 24 -100 100 750 31 -100 100 800 27 -75 100 850 26 -113 100 900 30 -109 100 950 30 -125 100 1000 36 -150 100 1050 38 -135 100 1100 48 -137 100 1150 40 -143 100 1200 45 -159 100 1250 45 -181 100 1300 54 -171 100 1350 185 -164 100 1400 55 -179 100 1450 53 -208 100 1500 57 -177 100 1550 56 -191 100 1600 61 -211 100 1650 60 -214 100 1700 62 -229 100 1750 68 -201 100 1800 194 -205 100 1850 67 -263 100 1900 77 -239 100 1950 68 -267 100 2000 90 -9 100 50 2 -20 100 100 3 -31 100 150 5 -39 100 200 7 -47 100 250 9 -44 100 300 13 -76 100 350 16 -76 100 400 19 -71 100 450 15 -86 100 500 22 -100 100 550 22 -84 100 600 19 -106 100 650 25 -111 100 700 31 -134 100 750 133 -121 100 800 31 -127 100 850 31 -133 100 900 36 -158 100 950 41 -163 100 1000 47 -191 100 1050 47 -191 100 1100 48 -196 100 1150 49 -197 100 1200 49 -215 100 1250 51 -177 100 1300 51 -228 100 1350 189 -256 100 1400 62 -272 100 1450 74 -263 100 1500 68 -252 100 1550 65 -254 100 1600 72 -264 100 1650 80 -295 100 1700 77 -232 100 1750 198 -299 100 1800 79 -346 100 1850 86 -294 100 1900 85 -340 100 1950 101 -309 100 2000 83 -7 100 50 2 -15 100 100 3 -16 100 150 4 -27 100 200 7 -37 100 250 8 -34 100 300 11 -50 100 350 17 -46 100 400 14 -50 100 450 16 -57 100 500 26 -62 100 550 117 -80 100 600 26 -57 100 650 21 -85 100 700 36 -71 100 750 31 -89 100 800 35 -83 100 850 28 -103 100 900 41 -110 100 950 37 -128 100 1000 43 -123 100 1050 44 -124 100 1100 53 -121 100 1150 52 -137 100 1200 52 -140 100 1250 188 -165 100 1300 57 -158 100 1350 66 -152 100 1400 59 -169 100 1450 61 -163 100 1500 65 -187 100 1550 62 -205 100 1600 72 -171 100 1650 66 -187 100 1700 202 -216 100 1750 82 -220 100 1800 78 -210 100 1850 94 -209 100 1900 72 -210 100 1950 93 -238 100 2000 91 -8 100 50 1 -18 100 100 3 -21 100 150 3 -32 100 200 7 -34 100 250 91 -46 100 300 10 -56 100 350 14 -51 100 400 13 -70 100 450 21 -70 100 500 21 -103 100 550 20 -83 100 600 21 -70 100 650 23 -100 100 700 25 -116 100 750 27 -116 100 800 29 -114 100 850 32 -131 100 900 34 -146 100 950 36 -146 100 1000 46 -173 100 1050 42 -166 100 1100 53 -160 100 1150 45 -204 100 1200 178 -165 100 1250 50 -195 100 1300 59 -203 100 1350 53 -185 100 1400 54 -248 100 1450 64 -246 100 1500 73 -230 100 1550 75 -209 100 1600 70 -234 100 1650 201 -280 100 1700 81 -243 100 1750 71 -282 100 1800 76 -264 100 1850 82 -240 100 1900 76 -273 100 1950 81 -297 100 2000 203 -17 100 50 2 -17 100 100 3 -31 100 150 8 -31 100 200 8 -37 100 250 7 -46 100 300 14 -50 100 350 15 -61 100 400 16 -88 100 450 23 -69 100 500 22 -111 100 550 27 -110 100 600 24 -102 100 650 26 -103 100 700 31 -130 100 750 36 -158 100 800 37 -146 100 850 38 -176 100 900 43 -174 100 950 45 -157 100 1000 44 -180 100 1050 50 -225 100 1100 61 -211 100 1150 58 -200 100 1200 55 -211 100 1250 62 -236 100 1300 64 -241 100 1350 72 -260 100 1400 74 -276 100 1450 70 -225 100 1500 207 -262 100 1550 75 -276 100 1600 83 -279 100 1650 89 -268 100 1700 86 -330 100 1750 88 -338 100 1800 93 -346 100 1850 244 -341 100 1900 99 -316 100 1950 100 -384 100 2000 106 -5 100 50 2 -7 100 100 2 -12 100 150 3 -19 100 200 7 -20 100 250 7 -25 100 300 7 -25 100 350 9 -29 100 400 13 -34 100 450 15 -39 100 500 15 -40 100 550 18 -29 100 600 14 -50 100 650 20 -49 100 700 24 -42 100 750 21 -50 100 800 24 -61 100 850 34 -54 100 900 29 -59 100 950 29 -58 100 1000 33 -90 100 1050 36 -77 100 1100 38 -86 100 1150 37 -86 100 1200 40 -92 100 1250 45 -123 100 1300 50 -102 100 1350 45 -117 100 1400 50 -118 100 1450 50 -122 100 1500 184 -115 100 1550 56 -125 100 1600 58 -128 100 1650 58 -115 100 1700 64 -143 100 1750 64 -155 100 1800 72 -132 100 1850 68 -165 100 1900 74 -127 100 1950 65 -171 100 2000 78 -8 100 50 1 -13 100 100 4 -21 100 150 9 -27 100 200 7 -37 100 250 12 -41 100 300 10 -48 100 350 15 -54 100 400 16 -63 100 450 23 -72 100 500 22 -71 100 550 24 -89 100 600 33 -92 100 650 36 -90 100 700 39 -109 100 750 36 -109 100 800 37 -113 100 850 39 -134 100 900 34 -135 100 950 152 -139 100 1000 44 -148 100 1050 50 -139 100 1100 57 -159 100 1150 55 -160 100 1200 69 -153 100 1250 55 -169 100 1300 72 -177 100 1350 64 -198 100 1400 71 -190 100 1450 79 -196 100 1500 74 -231 100 1550 82 -212 100 1600 91 -214 100 1650 97 -233 100 1700 86 -249 100 1750 216 -234 100 1800 98 -255 100 1850 105 -247 100 1900 112 -253 100 1950 101 -281 100 2000 108 -7 100 50 1 -10 100 100 2 -19 100 150 5 -24 100 200 6 -47 100 250 9 -45 100 300 10 -47 100 350 96 -53 100 400 13 -68 100 450 15 -65 100 500 17 -83 100 550 23 -85 100 600 23 -103 100 650 24 -105 100 700 25 -116 100 750 28 -110 100 800 37 -109 100 850 29 -124 100 900 32 -134 100 950 35 -125 100 1000 36 -152 100 1050 39 -154 100 1100 44 -160 100 1150 43 -158 100 1200 46 -181 100 1250 177 -187 100 1300 53 -177 100 1350 50 -185 100 1400 54 -220 100 1450 60 -192 100 1500 63 -205 100 1550 58 -239 100 1600 77 -228 100 1650 68 -251 100 1700 196 -252 100 1750 86 -256 100 1800 85 -281 100 1850 76 -245 100 1900 74 -277 100 1950 80 -276 100 2000 81 -7 100 50 1 -13 100 100 2 -15 100 150 4 -25 100 200 6 -38 100 250 10 -35 100 300 9 -37 100 350 9 -61 100 400 96 -59 100 450 14 -55 100 500 16 -71 100 550 16 -94 100 600 24 -81 100 650 20 -101 100 700 28 -107 100 750 28 -118 100 800 35 -117 100 850 30 -116 100 900 35 -105 100 950 32 -143 100 1000 39 -152 100 1050 42 -151 100 1100 43 -151 100 1150 44 -167 100 1200 48 -178 100 1250 181 -183 100 1300 56 -175 100 1350 52 -179 100 1400 55 -199 100 1450 58 -188 100 1500 59 -199 100 1550 66 -214 100 1600 63 -195 100 1650 60 -225 100 1700 74 -199 100 1750 71 -238 100 1800 78 -228 100 1850 77 -263 100 1900 82 -285 100 1950 83 -281 100 2000 83 -268 100 2000 152 -284 100 2050 105 -308 100 2100 119 -287 100 2150 112 -273 100 2200 103 -318 100 2250 122 -287 100 2300 116 -306 100 2350 132 -348 100 2400 136 -319 100 2450 141 -320 100 2500 259 -350 100 2550 142 -342 100 2600 135 -339 100 2650 130 -324 100 2700 273 -382 100 2750 148 -416 100 2800 171 -392 100 2850 154 -388 100 2900 291 -400 100 2950 163 -400 100 3000 161 -408 100 3050 177 -386 100 3100 332 -411 100 3150 179 -481 100 3200 188 -437 100 3250 312 -449 100 3300 202 -457 100 3350 184 -438 100 3400 172 -475 100 3450 180 -449 100 3500 192 -463 100 3550 195 -506 100 3600 327 -505 100 3650 213 -482 100 3700 209 -526 100 3750 352 -552 100 3800 224 -511 100 3850 353 -550 100 3900 220 -492 100 3950 208 -550 100 4000 363 -237 100 2000 109 -262 100 2050 118 -241 100 2100 105 -272 100 2150 118 -270 100 2200 259 -257 100 2250 114 -277 100 2300 118 -269 100 2350 128 -273 100 2400 125 -282 100 2450 260 -306 100 2500 127 -315 100 2550 149 -326 100 2600 143 -346 100 2650 290 -367 100 2700 171 -322 100 2750 156 -332 100 2800 168 -334 100 2850 290 -350 100 2900 158 -360 100 2950 168 -363 100 3000 170 -379 100 3050 312 -327 100 3100 173 -377 100 3150 180 -338 100 3200 293 -373 100 3250 181 -418 100 3300 192 -380 100 3350 190 -381 100 3400 308 -368 100 3450 176 -423 100 3500 196 -448 100 3550 320 -437 100 3600 199 -434 100 3650 208 -471 100 3700 348 -463 100 3750 210 -473 100 3800 208 -463 100 3850 351 -468 100 3900 218 -464 100 3950 224 -489 100 4000 366 -287 100 2000 106 -314 100 2050 131 -285 100 2100 116 -300 100 2150 271 -343 100 2200 141 -294 100 2250 131 -327 100 2300 139 -318 100 2350 268 -352 100 2400 148 -354 100 2450 138 -361 100 2500 148 -366 100 2550 163 -380 100 2600 145 -402 100 2650 162 -397 100 2700 173 -393 100 2750 296 -385 100 2800 181 -382 100 2850 169 -452 100 2900 209 -409 100 2950 314 -426 100 3000 187 -448 100 3050 190 -428 100 3100 313 -428 100 3150 201 -498 100 3200 212 -470 100 3250 336 -485 100 3300 222 -462 100 3350 207 -459 100 3400 358 -451 100 3450 192 -535 100 3500 237 -492 100 3550 346 -501 100 3600 236 -520 100 3650 224 -505 100 3700 215 -510 100 3750 221 -539 100 3800 355 -525 100 3850 239 -559 100 3900 266 -531 100 3950 362 -582 100 4000 250 -319 100 2000 87 -346 100 2050 92 -408 100 2100 222 -348 100 2150 90 -372 100 2200 101 -363 100 2250 101 -388 100 2300 112 -349 100 2350 218 -466 100 2400 109 -389 100 2450 112 -443 100 2500 120 -398 100 2550 123 -423 100 2600 250 -446 100 2650 130 -491 100 2700 132 -483 100 2750 135 -426 100 2800 128 -443 100 2850 263 -488 100 2900 135 -506 100 2950 152 -470 100 3000 136 -490 100 3050 280 -542 100 3100 154 -474 100 3150 151 -543 100 3200 158 -502 100 3250 284 -567 100 3300 161 -537 100 3350 159 -524 100 3400 160 -531 100 3450 286 -562 100 3500 170 -548 100 3550 165 -595 100 3600 165 -623 100 3650 178 -529 100 3700 167 -644 100 3750 183 -648 100 3800 322 -678 100 3850 190 -664 100 3900 200 -619 100 3950 309 -668 100 4000 190 -329 100 2000 114 -326 100 2050 116 -327 100 2100 230 -347 100 2150 110 -338 100 2200 119 -343 100 2250 122 -354 100 2300 125 -373 100 2350 371 -383 100 2400 142 -371 100 2450 3526 -391 100 2500 140 -421 100 2550 151 -397 100 2600 143 -429 100 2650 300 -415 100 2700 141 -429 100 2750 150 -457 100 2800 161 -453 100 2850 332 -473 100 2900 164 -481 100 2950 168 -499 100 3000 182 -467 100 3050 332 -454 100 3100 167 -506 100 3150 175 -483 100 3200 312 -514 100 3250 176 -566 100 3300 199 -540 100 3350 338 -575 100 3400 198 -560 100 3450 200 -561 100 3500 199 -566 100 3550 195 -578 100 3600 204 -558 100 3650 212 -506 100 3700 199 -600 100 3750 224 -631 100 3800 373 -620 100 3850 223 -637 100 3900 223 -639 100 3950 379 -613 100 4000 241 -301 100 2000 123 -362 100 2050 132 -360 100 2100 262 -352 100 2150 136 -353 100 2200 135 -357 100 2250 136 -359 100 2300 278 -374 100 2350 142 -421 100 2400 155 -401 100 2450 148 -409 100 2500 159 -403 100 2550 155 -405 100 2600 174 -461 100 2650 174 -406 100 2700 295 -432 100 2750 169 -450 100 2800 178 -438 100 2850 177 -465 100 2900 327 -510 100 2950 196 -470 100 3000 181 -496 100 3050 325 -478 100 3100 213 -557 100 3150 202 -518 100 3200 335 -543 100 3250 211 -558 100 3300 218 -552 100 3350 368 -532 100 3400 215 -547 100 3450 224 -523 100 3500 345 -592 100 3550 230 -541 100 3600 230 -578 100 3650 374 -576 100 3700 233 -582 100 3750 225 -594 100 3800 373 -642 100 3850 265 -604 100 3900 372 -652 100 3950 266 -638 100 4000 263 -317 100 2000 251 -339 100 2050 121 -331 100 2100 125 -361 100 2150 129 -374 100 2200 151 -376 100 2250 287 -377 100 2300 144 -382 100 2350 146 -377 100 2400 136 -405 100 2450 303 -457 100 2500 179 -414 100 2550 169 -435 100 2600 163 -467 100 2650 321 -444 100 2700 174 -466 100 2750 178 -438 100 2800 303 -497 100 2850 197 -492 100 2900 200 -507 100 2950 200 -523 100 3000 193 -533 100 3050 197 -508 100 3100 188 -540 100 3150 347 -512 100 3200 197 -551 100 3250 226 -585 100 3300 370 -559 100 3350 217 -567 100 3400 243 -565 100 3450 218 -566 100 3500 216 -607 100 3550 383 -567 100 3600 241 -610 100 3650 232 -629 100 3700 391 -632 100 3750 247 -621 100 3800 253 -664 100 3850 254 -625 100 3900 272 -669 100 3950 400 -703 100 4000 277 -216 100 2000 90 -244 100 2050 100 -253 100 2100 240 -246 100 2150 92 -243 100 2200 100 -253 100 2250 98 -294 100 2300 110 -281 100 2350 244 -268 100 2400 103 -290 100 2450 124 -289 100 2500 119 -306 100 2550 125 -303 100 2600 262 -316 100 2650 127 -323 100 2700 138 -283 100 2750 129 -344 100 2800 128 -337 100 2850 270 -343 100 2900 140 -326 100 2950 136 -350 100 3000 152 -391 100 3050 287 -355 100 3100 149 -370 100 3150 156 -392 100 3200 152 -385 100 3250 292 -416 100 3300 169 -381 100 3350 165 -379 100 3400 166 -416 100 3450 302 -408 100 3500 171 -423 100 3550 177 -431 100 3600 306 -398 100 3650 189 -413 100 3700 181 -420 100 3750 181 -436 100 3800 317 -473 100 3850 202 -472 100 3900 181 -461 100 3950 321 -459 100 4000 205 -235 100 2000 85 -276 100 2050 87 -273 100 2100 98 -319 100 2150 223 -248 100 2200 106 -262 100 2250 103 -322 100 2300 121 -328 100 2350 111 -316 100 2400 245 -318 100 2450 109 -345 100 2500 119 -339 100 2550 134 -346 100 2600 122 -353 100 2650 250 -373 100 2700 130 -324 100 2750 125 -331 100 2800 125 -331 100 2850 133 -408 100 2900 289 -328 100 2950 140 -409 100 3000 139 -429 100 3050 144 -423 100 3100 281 -410 100 3150 152 -386 100 3200 157 -423 100 3250 175 -426 100 3300 293 -449 100 3350 173 -440 100 3400 160 -415 100 3450 156 -473 100 3500 165 -459 100 3550 172 -439 100 3600 167 -450 100 3650 300 -485 100 3700 178 -457 100 3750 174 -525 100 3800 186 -435 100 3850 168 -511 100 3900 206 -498 100 3950 189 -521 100 4000 326 -280 100 2000 97 -341 100 2050 104 -277 100 2100 92 -343 100 2150 137 -381 100 2200 252 -335 100 2250 121 -352 100 2300 113 -313 100 2350 100 -340 100 2400 121 -347 100 2450 253 -418 100 2500 146 -369 100 2550 130 -399 100 2600 145 -378 100 2650 272 -380 100 2700 137 -389 100 2750 140 -428 100 2800 149 -421 100 2850 141 -455 100 2900 137 -408 100 2950 135 -404 100 3000 167 -455 100 3050 155 -408 100 3100 150 -458 100 3150 168 -417 100 3200 186 -476 100 3250 289 -500 100 3300 177 -520 100 3350 189 -500 100 3400 216 -503 100 3450 186 -547 100 3500 187 -553 100 3550 192 -526 100 3600 319 -549 100 3650 192 -560 100 3700 192 -559 100 3750 330 -605 100 3800 212 -544 100 3850 203 -602 100 3900 359 -586 100 3950 247 -587 100 4000 221 -260 100 2000 211 -239 100 2050 110 -218 100 2100 105 -279 100 2150 121 -251 100 2200 105 -280 100 2250 253 -289 100 2300 116 -295 100 2350 125 -278 100 2400 117 -290 100 2450 129 -302 100 2500 260 -325 100 2550 135 -313 100 2600 127 -307 100 2650 138 -306 100 2700 144 -319 100 2750 276 -349 100 2800 151 -365 100 2850 151 -331 100 2900 151 -356 100 2950 284 -335 100 3000 146 -326 100 3050 153 -378 100 3100 164 -385 100 3150 291 -405 100 3200 171 -399 100 3250 176 -361 100 3300 301 -397 100 3350 177 -405 100 3400 188 -427 100 3450 186 -433 100 3500 327 -452 100 3550 196 -407 100 3600 192 -474 100 3650 334 -438 100 3700 201 -456 100 3750 202 -445 100 3800 342 -479 100 3850 211 -475 100 3900 224 -473 100 3950 346 -500 100 4000 217 -349 100 2000 85 -302 100 2050 87 -322 100 2100 205 -310 100 2150 91 -330 100 2200 94 -333 100 2250 97 -335 100 2300 105 -353 100 2350 100 -353 100 2400 241 -377 100 2450 114 -372 100 2500 106 -343 100 2550 109 -408 100 2600 111 -429 100 2650 118 -402 100 2700 256 -439 100 2750 126 -399 100 2800 136 -443 100 2850 124 -488 100 2900 257 -430 100 2950 127 -488 100 3000 145 -453 100 3050 141 -465 100 3100 142 -485 100 3150 134 -465 100 3200 151 -514 100 3250 149 -467 100 3300 149 -532 100 3350 278 -513 100 3400 159 -492 100 3450 160 -540 100 3500 161 -533 100 3550 156 -539 100 3600 167 -605 100 3650 175 -562 100 3700 294 -574 100 3750 178 -572 100 3800 173 -565 100 3850 180 -553 100 3900 314 -620 100 3950 185 -594 100 4000 187 -275 100 2000 187 -269 100 2050 107 -297 100 2100 116 -293 100 2150 113 -282 100 2200 120 -269 100 2250 244 -265 100 2300 112 -298 100 2350 113 -300 100 2400 121 -304 100 2450 136 -293 100 2500 258 -326 100 2550 148 -340 100 2600 146 -347 100 2650 153 -358 100 2700 149 -371 100 2750 149 -373 100 2800 144 -363 100 2850 143 -390 100 2900 156 -405 100 2950 168 -409 100 3000 182 -404 100 3050 167 -420 100 3100 298 -414 100 3150 172 -375 100 3200 179 -452 100 3250 181 -446 100 3300 321 -423 100 3350 193 -418 100 3400 175 -433 100 3450 291 -448 100 3500 190 -454 100 3550 188 -459 100 3600 327 -441 100 3650 197 -520 100 3700 212 -479 100 3750 333 -509 100 3800 232 -450 100 3850 227 -494 100 3900 339 -522 100 3950 209 -502 100 4000 240 -374 100 2000 109 -374 100 2050 114 -362 100 2100 105 -373 100 2150 114 -419 100 2200 124 -423 100 2250 128 -426 100 2300 123 -396 100 2350 136 -427 100 2400 141 -484 100 2450 148 -423 100 2500 284 -449 100 2550 144 -478 100 2600 149 -498 100 2650 152 -493 100 2700 295 -511 100 2750 163 -479 100 2800 154 -512 100 2850 166 -556 100 2900 309 -539 100 2950 178 -545 100 3000 171 -620 100 3050 186 -539 100 3100 172 -580 100 3150 184 -589 100 3200 185 -624 100 3250 333 -599 100 3300 185 -581 100 3350 182 -605 100 3400 335 -643 100 3450 204 -555 100 3500 189 -642 100 3550 342 -651 100 3600 211 -667 100 3650 206 -634 100 3700 347 -689 100 3750 220 -675 100 3800 220 -692 100 3850 359 -709 100 3900 236 -742 100 3950 250 -719 100 4000 385 -271 100 2000 93 -266 100 2050 102 -278 100 2100 95 -287 100 2150 100 -313 100 2200 243 -309 100 2250 122 -293 100 2300 105 -314 100 2350 111 -307 100 2400 118 -322 100 2450 260 -336 100 2500 121 -326 100 2550 114 -360 100 2600 129 -351 100 2650 132 -327 100 2700 256 -344 100 2750 139 -391 100 2800 151 -355 100 2850 143 -375 100 2900 273 -393 100 2950 157 -362 100 3000 152 -407 100 3050 155 -395 100 3100 277 -427 100 3150 164 -433 100 3200 168 -461 100 3250 162 -445 100 3300 296 -438 100 3350 171 -464 100 3400 171 -395 100 3450 155 -452 100 3500 291 -469 100 3550 179 -417 100 3600 166 -500 100 3650 302 -481 100 3700 188 -495 100 3750 203 -530 100 3800 200 -452 100 3850 182 -504 100 3900 197 -502 100 3950 198 -523 100 4000 329 -185 100 2000 80 -198 100 2050 79 -223 100 2100 89 -231 100 2150 87 -247 100 2200 207 -253 100 2250 97 -218 100 2300 95 -227 100 2350 95 -229 100 2400 99 -253 100 2450 102 -273 100 2500 232 -261 100 2550 101 -263 100 2600 107 -275 100 2650 108 -268 100 2700 116 -266 100 2750 117 -331 100 2800 121 -280 100 2850 118 -296 100 2900 127 -318 100 2950 125 -315 100 3000 248 -338 100 3050 133 -341 100 3100 141 -329 100 3150 133 -334 100 3200 138 -350 100 3250 284 -337 100 3300 141 -347 100 3350 148 -372 100 3400 143 -383 100 3450 267 -386 100 3500 151 -347 100 3550 150 -347 100 3600 155 -399 100 3650 276 -396 100 3700 170 -416 100 3750 171 -416 100 3800 161 -369 100 3850 167 -386 100 3900 174 -441 100 3950 185 -410 100 4000 295 -314 100 2000 90 -300 100 2050 87 -344 100 2100 98 -355 100 2150 98 -362 100 2200 101 -364 100 2250 235 -363 100 2300 107 -344 100 2350 106 -350 100 2400 106 -395 100 2450 118 -388 100 2500 255 -379 100 2550 112 -359 100 2600 112 -416 100 2650 119 -416 100 2700 125 -469 100 2750 268 -419 100 2800 132 -447 100 2850 129 -442 100 2900 131 -457 100 2950 149 -489 100 3000 136 -501 100 3050 143 -505 100 3100 149 -415 100 3150 136 -519 100 3200 295 -507 100 3250 153 -512 100 3300 157 -522 100 3350 167 -587 100 3400 160 -546 100 3450 167 -554 100 3500 165 -538 100 3550 293 -554 100 3600 171 -603 100 3650 181 -598 100 3700 184 -575 100 3750 325 -626 100 3800 182 -570 100 3850 176 -594 100 3900 313 -607 100 3950 188 -650 100 4000 197 -278 100 2000 94 -259 100 2050 201 -262 100 2100 96 -286 100 2150 101 -298 100 2200 107 -296 100 2250 104 -331 100 2300 250 -298 100 2350 105 -293 100 2400 111 -337 100 2450 119 -305 100 2500 121 -315 100 2550 125 -335 100 2600 122 -343 100 2650 124 -340 100 2700 132 -383 100 2750 147 -402 100 2800 251 -401 100 2850 145 -377 100 2900 143 -406 100 2950 135 -413 100 3000 304 -446 100 3050 156 -420 100 3100 161 -415 100 3150 159 -369 100 3200 280 -418 100 3250 160 -440 100 3300 159 -414 100 3350 162 -431 100 3400 304 -452 100 3450 170 -454 100 3500 172 -437 100 3550 164 -467 100 3600 298 -519 100 3650 178 -515 100 3700 184 -482 100 3750 293 -499 100 3800 188 -531 100 3850 201 -518 100 3900 317 -466 100 3950 188 -512 100 4000 193 -297 100 2000 85 -309 100 2050 93 -332 100 2100 92 -344 100 2150 100 -350 100 2200 101 -375 100 2250 107 -326 100 2300 102 -337 100 2350 223 -356 100 2400 112 -389 100 2450 136 -400 100 2500 113 -401 100 2550 117 -396 100 2600 243 -391 100 2650 122 -413 100 2700 125 -412 100 2750 128 -446 100 2800 131 -470 100 2850 262 -423 100 2900 128 -414 100 2950 128 -444 100 3000 150 -518 100 3050 279 -454 100 3100 140 -489 100 3150 156 -446 100 3200 140 -512 100 3250 278 -484 100 3300 151 -515 100 3350 163 -520 100 3400 157 -507 100 3450 289 -510 100 3500 163 -595 100 3550 172 -540 100 3600 172 -551 100 3650 297 -564 100 3700 177 -570 100 3750 193 -633 100 3800 316 -632 100 3850 184 -607 100 3900 184 -599 100 3950 184 -618 100 4000 189 -310 100 2000 91 -291 100 2050 82 -287 100 2100 83 -292 100 2150 84 -285 100 2200 211 -318 100 2250 94 -297 100 2300 100 -336 100 2350 97 -296 100 2400 95 -337 100 2450 102 -335 100 2500 232 -345 100 2550 111 -329 100 2600 100 -373 100 2650 108 -363 100 2700 124 -380 100 2750 231 -371 100 2800 125 -379 100 2850 121 -399 100 2900 128 -394 100 2950 124 -426 100 3000 250 -414 100 3050 142 -455 100 3100 132 -412 100 3150 135 -407 100 3200 131 -441 100 3250 144 -427 100 3300 137 -435 100 3350 147 -451 100 3400 142 -451 100 3450 268 -436 100 3500 154 -504 100 3550 159 -503 100 3600 156 -490 100 3650 268 -504 100 3700 160 -491 100 3750 157 -528 100 3800 271 -526 100 3850 173 -494 100 3900 182 -514 100 3950 167 -539 100 4000 297 -273 100 2000 97 -262 100 2050 96 -260 100 2100 107 -296 100 2150 102 -298 100 2200 214 -313 100 2250 99 -298 100 2300 112 -281 100 2350 110 -313 100 2400 116 -285 100 2450 114 -324 100 2500 264 -334 100 2550 119 -331 100 2600 145 -334 100 2650 126 -355 100 2700 250 -350 100 2750 133 -363 100 2800 140 -380 100 2850 145 -376 100 2900 149 -374 100 2950 141 -400 100 3000 146 -384 100 3050 138 -438 100 3100 148 -418 100 3150 261 -410 100 3200 167 -445 100 3250 161 -450 100 3300 287 -451 100 3350 184 -475 100 3400 185 -442 100 3450 168 -471 100 3500 299 -495 100 3550 171 -454 100 3600 172 -465 100 3650 306 -481 100 3700 177 -479 100 3750 201 -510 100 3800 200 -506 100 3850 180 -538 100 3900 205 -500 100 3950 201 -562 100 4000 330 -306 100 2000 97 -349 100 2050 99 -325 100 2100 97 -339 100 2150 104 -386 100 2200 230 -335 100 2250 97 -362 100 2300 102 -368 100 2350 110 -399 100 2400 118 -403 100 2450 229 -337 100 2500 110 -382 100 2550 122 -396 100 2600 127 -397 100 2650 124 -400 100 2700 255 -441 100 2750 141 -428 100 2800 132 -513 100 2850 139 -482 100 2900 134 -504 100 2950 152 -492 100 3000 152 -469 100 3050 144 -452 100 3100 151 -487 100 3150 260 -511 100 3200 153 -488 100 3250 148 -555 100 3300 170 -504 100 3350 155 -540 100 3400 175 -543 100 3450 168 -573 100 3500 305 -609 100 3550 183 -534 100 3600 180 -575 100 3650 175 -610 100 3700 310 -585 100 3750 185 -620 100 3800 191 -662 100 3850 303 -623 100 3900 209 -661 100 3950 207 -637 100 4000 323 -322 100 2000 90 -328 100 2050 98 -333 100 2100 95 -433 100 2150 112 -362 100 2200 235 -420 100 2250 123 -373 100 2300 111 -380 100 2350 115 -410 100 2400 115 -440 100 2450 119 -389 100 2500 124 -429 100 2550 128 -430 100 2600 125 -404 100 2650 124 -476 100 2700 265 -496 100 2750 140 -446 100 2800 136 -507 100 2850 139 -495 100 2900 145 -532 100 2950 147 -516 100 3000 149 -503 100 3050 152 -552 100 3100 287 -554 100 3150 162 -537 100 3200 162 -578 100 3250 169 -577 100 3300 300 -579 100 3350 174 -588 100 3400 178 -637 100 3450 180 -657 100 3500 186 -572 100 3550 183 -562 100 3600 179 -551 100 3650 307 -618 100 3700 182 -668 100 3750 198 -676 100 3800 342 -653 100 3850 196 -635 100 3900 196 -672 100 3950 332 -684 100 4000 206 -310 100 2000 106 -310 100 2050 111 -328 100 2100 106 -386 100 2150 331 -358 100 2200 109 -366 100 2250 2235 -411 100 2300 125 -418 100 2350 120 -384 100 2400 115 -376 100 2450 117 -393 100 2500 268 -437 100 2550 141 -426 100 2600 155 -461 100 2650 151 -429 100 2700 286 -444 100 2750 147 -479 100 2800 157 -464 100 2850 164 -428 100 2900 299 -474 100 2950 148 -451 100 3000 150 -459 100 3050 165 -493 100 3100 314 -421 100 3150 151 -513 100 3200 178 -546 100 3250 183 -557 100 3300 168 -544 100 3350 195 -587 100 3400 194 -534 100 3450 319 -579 100 3500 189 -552 100 3550 193 -617 100 3600 339 -613 100 3650 198 -630 100 3700 220 -640 100 3750 349 -618 100 3800 223 -667 100 3850 230 -616 100 3900 352 -641 100 3950 227 -607 100 4000 207 -328 100 2000 228 -311 100 2050 119 -305 100 2100 108 -356 100 2150 120 -336 100 2200 139 -313 100 2250 280 -372 100 2300 132 -356 100 2350 133 -389 100 2400 150 -342 100 2450 288 -390 100 2500 152 -420 100 2550 176 -392 100 2600 154 -374 100 2650 316 -411 100 2700 149 -380 100 2750 150 -465 100 2800 167 -409 100 2850 311 -445 100 2900 177 -416 100 2950 175 -415 100 3000 172 -459 100 3050 160 -484 100 3100 188 -477 100 3150 185 -470 100 3200 319 -536 100 3250 204 -493 100 3300 200 -487 100 3350 349 -533 100 3400 217 -530 100 3450 226 -528 100 3500 364 -529 100 3550 219 -487 100 3600 201 -543 100 3650 389 -588 100 3700 234 -573 100 3750 234 -581 100 3800 243 -598 100 3850 247 -585 100 3900 369 -609 100 3950 256 -595 100 4000 252 -282 100 2000 220 -284 100 2050 99 -267 100 2100 105 -312 100 2150 110 -289 100 2200 99 -320 100 2250 256 -294 100 2300 107 -311 100 2350 116 -326 100 2400 120 -332 100 2450 123 -330 100 2500 276 -356 100 2550 127 -383 100 2600 147 -344 100 2650 139 -382 100 2700 162 -356 100 2750 295 -385 100 2800 149 -399 100 2850 150 -365 100 2900 148 -390 100 2950 288 -421 100 3000 165 -403 100 3050 155 -412 100 3100 163 -432 100 3150 162 -430 100 3200 164 -473 100 3250 173 -434 100 3300 312 -457 100 3350 180 -461 100 3400 186 -464 100 3450 328 -473 100 3500 179 -507 100 3550 197 -474 100 3600 189 -519 100 3650 360 -527 100 3700 202 -518 100 3750 199 -499 100 3800 339 -545 100 3850 224 -548 100 3900 220 -526 100 3950 209 -560 100 4000 225 -336 100 2000 102 -386 100 2050 226 -344 100 2100 107 -360 100 2150 114 -347 100 2200 120 -367 100 2250 117 -412 100 2300 284 -402 100 2350 126 -378 100 2400 123 -398 100 2450 136 -392 100 2500 138 -449 100 2550 299 -462 100 2600 150 -425 100 2650 147 -469 100 2700 158 -466 100 2750 291 -441 100 2800 153 -462 100 2850 161 -451 100 2900 164 -526 100 2950 169 -495 100 3000 169 -529 100 3050 183 -490 100 3100 317 -538 100 3150 182 -499 100 3200 179 -513 100 3250 344 -551 100 3300 199 -559 100 3350 195 -588 100 3400 197 -551 100 3450 199 -572 100 3500 204 -607 100 3550 202 -566 100 3600 202 -569 100 3650 208 -614 100 3700 201 -635 100 3750 212 -631 100 3800 220 -636 100 3850 378 -628 100 3900 223 -628 100 3950 234 -643 100 4000 369 -289 100 2000 94 -307 100 2050 94 -337 100 2100 113 -328 100 2150 112 -288 100 2200 237 -350 100 2250 100 -350 100 2300 102 -365 100 2350 112 -366 100 2400 113 -355 100 2450 123 -424 100 2500 134 -416 100 2550 118 -422 100 2600 131 -405 100 2650 129 -401 100 2700 261 -432 100 2750 144 -428 100 2800 127 -429 100 2850 143 -452 100 2900 275 -464 100 2950 156 -450 100 3000 146 -461 100 3050 147 -477 100 3100 289 -459 100 3150 149 -478 100 3200 159 -501 100 3250 167 -512 100 3300 318 -510 100 3350 173 -525 100 3400 172 -518 100 3450 174 -519 100 3500 322 -507 100 3550 177 -564 100 3600 180 -550 100 3650 303 -549 100 3700 195 -592 100 3750 197 -637 100 3800 352 -612 100 3850 193 -624 100 3900 197 -618 100 3950 359 -599 100 4000 201 -239 100 2000 93 -247 100 2050 98 -242 100 2100 96 -246 100 2150 226 -270 100 2200 112 -269 100 2250 117 -267 100 2300 117 -285 100 2350 123 -309 100 2400 257 -269 100 2450 120 -291 100 2500 124 -309 100 2550 126 -316 100 2600 133 -330 100 2650 143 -296 100 2700 135 -313 100 2750 133 -328 100 2800 143 -364 100 2850 297 -344 100 2900 149 -325 100 2950 149 -375 100 3000 168 -373 100 3050 293 -380 100 3100 165 -364 100 3150 165 -361 100 3200 297 -379 100 3250 167 -365 100 3300 168 -379 100 3350 168 -370 100 3400 298 -416 100 3450 186 -397 100 3500 180 -393 100 3550 175 -426 100 3600 186 -393 100 3650 185 -445 100 3700 196 -442 100 3750 330 -433 100 3800 198 -464 100 3850 209 -428 100 3900 332 -441 100 3950 197 -456 100 4000 211 -273 100 2000 204 -274 100 2050 98 -249 100 2100 94 -296 100 2150 105 -276 100 2200 101 -259 100 2250 103 -277 100 2300 251 -259 100 2350 106 -282 100 2400 109 -336 100 2450 118 -344 100 2500 126 -314 100 2550 268 -344 100 2600 123 -335 100 2650 137 -310 100 2700 121 -336 100 2750 131 -349 100 2800 280 -366 100 2850 150 -340 100 2900 143 -355 100 2950 145 -380 100 3000 266 -383 100 3050 150 -353 100 3100 142 -384 100 3150 159 -418 100 3200 302 -435 100 3250 173 -362 100 3300 161 -416 100 3350 299 -404 100 3400 166 -420 100 3450 162 -436 100 3500 178 -487 100 3550 321 -421 100 3600 187 -464 100 3650 202 -414 100 3700 313 -477 100 3750 192 -459 100 3800 192 -516 100 3850 336 -477 100 3900 202 -486 100 3950 202 -532 100 4000 340 -317 100 2000 112 -309 100 2050 117 -398 100 2100 138 -356 100 2150 129 -370 100 2200 274 -380 100 2250 137 -380 100 2300 144 -374 100 2350 143 -366 100 2400 150 -380 100 2450 138 -407 100 2500 143 -415 100 2550 152 -404 100 2600 151 -422 100 2650 297 -433 100 2700 162 -428 100 2750 178 -475 100 2800 319 -490 100 2850 166 -422 100 2900 169 -481 100 2950 182 -488 100 3000 335 -479 100 3050 178 -476 100 3100 186 -493 100 3150 325 -484 100 3200 197 -514 100 3250 190 -496 100 3300 341 -540 100 3350 208 -515 100 3400 195 -560 100 3450 359 -567 100 3500 197 -562 100 3550 217 -568 100 3600 362 -605 100 3650 225 -552 100 3700 209 -592 100 3750 369 -580 100 3800 231 -649 100 3850 268 -621 100 3900 395 -587 100 3950 224 -610 100 4000 385 -195 100 2000 96 -187 100 2050 106 -216 100 2100 114 -231 100 2150 116 -217 100 2200 256 -252 100 2250 129 -198 100 2300 114 -225 100 2350 124 -239 100 2400 122 -222 100 2450 248 -231 100 2500 136 -227 100 2550 127 -245 100 2600 134 -281 100 2650 142 -248 100 2700 274 -244 100 2750 145 -287 100 2800 158 -284 100 2850 159 -301 100 2900 287 -280 100 2950 152 -306 100 3000 169 -309 100 3050 165 -312 100 3100 159 -304 100 3150 165 -307 100 3200 164 -318 100 3250 300 -344 100 3300 179 -363 100 3350 185 -325 100 3400 187 -342 100 3450 190 -342 100 3500 187 -298 100 3550 186 -346 100 3600 324 -398 100 3650 230 -382 100 3700 210 -367 100 3750 347 -387 100 3800 203 -390 100 3850 215 -438 100 3900 364 -366 100 3950 217 -423 100 4000 350 -244 100 2000 89 -215 100 2050 83 -243 100 2100 95 -240 100 2150 93 -246 100 2200 97 -290 100 2250 237 -276 100 2300 100 -259 100 2350 102 -277 100 2400 104 -281 100 2450 115 -298 100 2500 110 -263 100 2550 250 -296 100 2600 113 -290 100 2650 115 -288 100 2700 123 -311 100 2750 121 -310 100 2800 253 -324 100 2850 126 -340 100 2900 136 -339 100 2950 142 -362 100 3000 273 -373 100 3050 144 -371 100 3100 147 -337 100 3150 142 -390 100 3200 280 -404 100 3250 159 -348 100 3300 144 -398 100 3350 155 -384 100 3400 290 -397 100 3450 166 -391 100 3500 160 -371 100 3550 158 -410 100 3600 308 -438 100 3650 172 -402 100 3700 167 -390 100 3750 170 -462 100 3800 178 -413 100 3850 171 -459 100 3900 186 -452 100 3950 317 -465 100 4000 188 -242 100 2000 94 -237 100 2050 89 -294 100 2100 103 -296 100 2150 106 -298 100 2200 111 -323 100 2250 116 -311 100 2300 130 -323 100 2350 109 -319 100 2400 233 -370 100 2450 137 -345 100 2500 136 -323 100 2550 116 -342 100 2600 117 -356 100 2650 126 -355 100 2700 137 -398 100 2750 152 -395 100 2800 150 -368 100 2850 277 -415 100 2900 140 -378 100 2950 151 -416 100 3000 156 -431 100 3050 296 -427 100 3100 165 -463 100 3150 171 -402 100 3200 161 -396 100 3250 177 -429 100 3300 176 -468 100 3350 186 -432 100 3400 305 -452 100 3450 180 -428 100 3500 179 -471 100 3550 305 -496 100 3600 200 -451 100 3650 191 -513 100 3700 198 -511 100 3750 194 -595 100 3800 210 -510 100 3850 197 -565 100 3900 210 -509 100 3950 202 -539 100 4000 211 -266 100 2000 91 -238 100 2050 95 -273 100 2100 103 -257 100 2150 102 -251 100 2200 107 -254 100 2250 237 -271 100 2300 99 -262 100 2350 110 -304 100 2400 118 -287 100 2450 128 -300 100 2500 108 -295 100 2550 109 -265 100 2600 117 -290 100 2650 131 -283 100 2700 123 -307 100 2750 260 -358 100 2800 133 -309 100 2850 129 -363 100 2900 143 -332 100 2950 129 -372 100 3000 280 -354 100 3050 155 -361 100 3100 145 -354 100 3150 147 -386 100 3200 283 -372 100 3250 146 -377 100 3300 156 -379 100 3350 152 -386 100 3400 303 -379 100 3450 162 -383 100 3500 162 -450 100 3550 284 -434 100 3600 177 -471 100 3650 187 -412 100 3700 180 -464 100 3750 308 -468 100 3800 195 -483 100 3850 196 -515 100 3900 308 -417 100 3950 191 -449 100 4000 197 -341 100 2000 201 -364 100 2050 116 -368 100 2100 107 -358 100 2150 110 -396 100 2200 122 -404 100 2250 114 -432 100 2300 120 -443 100 2350 125 -424 100 2400 128 -461 100 2450 134 -461 100 2500 263 -484 100 2550 134 -458 100 2600 132 -453 100 2650 144 -485 100 2700 143 -464 100 2750 152 -544 100 2800 154 -484 100 2850 153 -517 100 2900 290 -531 100 2950 163 -535 100 3000 153 -502 100 3050 161 -524 100 3100 311 -513 100 3150 169 -574 100 3200 191 -581 100 3250 174 -568 100 3300 198 -589 100 3350 196 -588 100 3400 183 -575 100 3450 303 -656 100 3500 198 -634 100 3550 196 -648 100 3600 328 -625 100 3650 196 -684 100 3700 209 -653 100 3750 349 -707 100 3800 213 -620 100 3850 221 -693 100 3900 355 -677 100 3950 238 -694 100 4000 228 -376 100 2000 105 -392 100 2050 109 -350 100 2100 100 -398 100 2150 122 -382 100 2200 112 -405 100 2250 259 -406 100 2300 123 -395 100 2350 115 -410 100 2400 126 -421 100 2450 128 -454 100 2500 270 -410 100 2550 123 -441 100 2600 132 -463 100 2650 145 -451 100 2700 286 -490 100 2750 142 -497 100 2800 154 -562 100 2850 168 -501 100 2900 291 -517 100 2950 160 -542 100 3000 165 -551 100 3050 159 -517 100 3100 305 -620 100 3150 191 -515 100 3200 170 -559 100 3250 313 -580 100 3300 178 -553 100 3350 171 -628 100 3400 187 -600 100 3450 188 -573 100 3500 188 -605 100 3550 190 -606 100 3600 336 -601 100 3650 201 -679 100 3700 214 -584 100 3750 336 -642 100 3800 213 -623 100 3850 219 -677 100 3900 351 -711 100 3950 223 -708 100 4000 346 -508 100 2000 123 -544 100 2050 145 -512 100 2100 134 -512 100 2150 135 -574 100 2200 296 -564 100 2250 166 -574 100 2300 144 -596 100 2350 155 -592 100 2400 317 -596 100 2450 160 -606 100 2500 162 -568 100 2550 145 -680 100 2600 177 -702 100 2650 183 -631 100 2700 176 -705 100 2750 334 -713 100 2800 193 -705 100 2850 207 -769 100 2900 363 -718 100 2950 201 -770 100 3000 227 -668 100 3050 342 -808 100 3100 214 -775 100 3150 204 -846 100 3200 370 -821 100 3250 225 -776 100 3300 199 -821 100 3350 382 -870 100 3400 236 -801 100 3450 220 -937 100 3500 268 -909 100 3550 234 -940 100 3600 422 -920 100 3650 259 -927 100 3700 407 -1001 100 3750 291 -965 100 3800 264 -1018 100 3850 409 -1063 100 3900 269 -1107 100 3950 436 -1002 100 4000 258 -222 100 2000 119 -242 100 2050 247 -235 100 2100 133 -260 100 2150 134 -275 100 2200 135 -272 100 2250 138 -291 100 2300 275 -271 100 2350 158 -295 100 2400 167 -327 100 2450 181 -323 100 2500 159 -330 100 2550 181 -260 100 2600 133 -336 100 2650 311 -310 100 2700 158 -321 100 2750 171 -351 100 2800 193 -327 100 2850 317 -345 100 2900 200 -350 100 2950 207 -362 100 3000 325 -360 100 3050 215 -420 100 3100 208 -356 100 3150 332 -420 100 3200 224 -398 100 3250 215 -342 100 3300 321 -384 100 3350 231 -385 100 3400 209 -387 100 3450 345 -348 100 3500 198 -454 100 3550 233 -404 100 3600 242 -413 100 3650 228 -429 100 3700 351 -460 100 3750 256 -428 100 3800 238 -418 100 3850 384 -404 100 3900 238 -461 100 3950 376 -445 100 4000 269 -347 100 2000 103 -369 100 2050 118 -393 100 2100 230 -385 100 2150 119 -409 100 2200 121 -403 100 2250 126 -430 100 2300 119 -436 100 2350 262 -440 100 2400 131 -454 100 2450 127 -471 100 2500 146 -475 100 2550 274 -469 100 2600 129 -458 100 2650 143 -483 100 2700 154 -560 100 2750 173 -521 100 2800 145 -508 100 2850 166 -552 100 2900 171 -559 100 2950 305 -551 100 3000 181 -573 100 3050 182 -549 100 3100 179 -586 100 3150 317 -631 100 3200 201 -638 100 3250 185 -608 100 3300 316 -592 100 3350 201 -653 100 3400 195 -646 100 3450 324 -658 100 3500 202 -628 100 3550 187 -661 100 3600 364 -684 100 3650 216 -699 100 3700 205 -689 100 3750 350 -673 100 3800 225 -669 100 3850 232 -737 100 3900 369 -755 100 3950 251 -740 100 4000 359 -299 100 2000 111 -299 100 2050 86 -308 100 2100 86 -281 100 2150 86 -309 100 2200 92 -306 100 2250 204 -317 100 2300 92 -390 100 2350 106 -338 100 2400 104 -342 100 2450 104 -375 100 2500 109 -384 100 2550 235 -379 100 2600 110 -390 100 2650 118 -377 100 2700 143 -367 100 2750 114 -408 100 2800 235 -354 100 2850 118 -426 100 2900 126 -408 100 2950 125 -464 100 3000 275 -443 100 3050 133 -426 100 3100 134 -492 100 3150 173 -487 100 3200 142 -464 100 3250 142 -525 100 3300 146 -466 100 3350 144 -497 100 3400 149 -523 100 3450 143 -516 100 3500 150 -505 100 3550 185 -540 100 3600 275 -537 100 3650 167 -514 100 3700 159 -566 100 3750 166 -538 100 3800 289 -514 100 3850 199 -552 100 3900 175 -607 100 3950 324 -591 100 4000 184 -325 100 2000 97 -338 100 2050 96 -308 100 2100 90 -336 100 2150 205 -373 100 2200 105 -405 100 2250 114 -399 100 2300 118 -400 100 2350 115 -394 100 2400 241 -389 100 2450 119 -424 100 2500 122 -391 100 2550 123 -427 100 2600 126 -415 100 2650 244 -461 100 2700 137 -463 100 2750 143 -446 100 2800 142 -479 100 2850 141 -528 100 2900 141 -472 100 2950 144 -510 100 3000 151 -499 100 3050 151 -495 100 3100 264 -546 100 3150 163 -524 100 3200 156 -549 100 3250 369 -548 100 3300 164 -517 100 3350 1418 -623 100 3400 182 -580 100 3450 167 -565 100 3500 174 -626 100 3550 329 -612 100 3600 190 -670 100 3650 193 -611 100 3700 335 -608 100 3750 186 -594 100 3800 188 -653 100 3850 348 -641 100 3900 200 -638 100 3950 203 -678 100 4000 361 -168 100 2000 82 -162 100 2050 91 -210 100 2100 86 -166 100 2150 80 -189 100 2200 96 -184 100 2250 95 -203 100 2300 236 -200 100 2350 99 -243 100 2400 106 -208 100 2450 105 -216 100 2500 109 -261 100 2550 247 -248 100 2600 114 -220 100 2650 111 -239 100 2700 123 -258 100 2750 130 -247 100 2800 255 -248 100 2850 121 -272 100 2900 147 -240 100 2950 130 -270 100 3000 136 -273 100 3050 270 -257 100 3100 135 -261 100 3150 128 -268 100 3200 143 -283 100 3250 268 -273 100 3300 153 -266 100 3350 142 -304 100 3400 161 -291 100 3450 295 -277 100 3500 151 -294 100 3550 168 -304 100 3600 169 -340 100 3650 318 -323 100 3700 171 -337 100 3750 165 -341 100 3800 307 -372 100 3850 187 -311 100 3900 171 -303 100 3950 175 -346 100 4000 303 -336 100 2000 97 -333 100 2050 95 -314 100 2100 99 -325 100 2150 107 -388 100 2200 254 -357 100 2250 106 -348 100 2300 108 -332 100 2350 110 -353 100 2400 110 -414 100 2450 266 -380 100 2500 122 -392 100 2550 117 -419 100 2600 129 -378 100 2650 123 -437 100 2700 276 -385 100 2750 130 -428 100 2800 137 -424 100 2850 134 -442 100 2900 140 -480 100 2950 147 -482 100 3000 154 -491 100 3050 156 -435 100 3100 154 -463 100 3150 156 -526 100 3200 161 -504 100 3250 163 -584 100 3300 306 -519 100 3350 167 -521 100 3400 172 -540 100 3450 174 -570 100 3500 330 -533 100 3550 180 -561 100 3600 186 -553 100 3650 315 -546 100 3700 183 -547 100 3750 180 -558 100 3800 190 -571 100 3850 197 -623 100 3900 197 -613 100 3950 200 -650 100 4000 189 -274 100 2000 76 -263 100 2050 79 -301 100 2100 81 -305 100 2150 85 -314 100 2200 214 -307 100 2250 87 -355 100 2300 94 -369 100 2350 96 -357 100 2400 98 -355 100 2450 100 -352 100 2500 237 -371 100 2550 110 -402 100 2600 108 -409 100 2650 117 -415 100 2700 107 -431 100 2750 117 -385 100 2800 108 -432 100 2850 129 -446 100 2900 135 -419 100 2950 120 -444 100 3000 254 -472 100 3050 129 -433 100 3100 128 -505 100 3150 142 -470 100 3200 133 -488 100 3250 270 -467 100 3300 142 -461 100 3350 137 -489 100 3400 143 -481 100 3450 283 -539 100 3500 150 -559 100 3550 150 -517 100 3600 154 -573 100 3650 301 -533 100 3700 154 -579 100 3750 162 -608 100 3800 166 -585 100 3850 301 -541 100 3900 174 -553 100 3950 177 -588 100 4000 297 -308 100 2000 128 -310 100 2050 119 -305 100 2100 119 -329 100 2150 127 -324 100 2200 278 -330 100 2250 134 -294 100 2300 124 -367 100 2350 131 -389 100 2400 292 -333 100 2450 142 -389 100 2500 147 -348 100 2550 148 -378 100 2600 147 -412 100 2650 142 -413 100 2700 175 -412 100 2750 172 -443 100 2800 293 -420 100 2850 164 -412 100 2900 176 -436 100 2950 178 -503 100 3000 314 -459 100 3050 187 -458 100 3100 202 -433 100 3150 320 -470 100 3200 187 -444 100 3250 180 -510 100 3300 197 -538 100 3350 213 -560 100 3400 206 -482 100 3450 206 -527 100 3500 209 -503 100 3550 234 -494 100 3600 357 -577 100 3650 226 -609 100 3700 231 -566 100 3750 381 -579 100 3800 231 -548 100 3850 222 -604 100 3900 377 -597 100 3950 251 -604 100 4000 380 -298 100 2000 92 -311 100 2050 99 -297 100 2100 100 -327 100 2150 102 -333 100 2200 100 -327 100 2250 239 -361 100 2300 108 -364 100 2350 105 -387 100 2400 112 -333 100 2450 112 -362 100 2500 109 -391 100 2550 257 -377 100 2600 114 -367 100 2650 122 -403 100 2700 133 -401 100 2750 131 -404 100 2800 124 -465 100 2850 150 -452 100 2900 136 -429 100 2950 141 -446 100 3000 301 -450 100 3050 147 -486 100 3100 150 -440 100 3150 148 -478 100 3200 297 -513 100 3250 160 -483 100 3300 161 -535 100 3350 167 -503 100 3400 163 -567 100 3450 173 -516 100 3500 164 -579 100 3550 304 -518 100 3600 176 -569 100 3650 181 -568 100 3700 188 -539 100 3750 171 -621 100 3800 185 -566 100 3850 179 -581 100 3900 328 -612 100 3950 200 -619 100 4000 198 -414 100 2000 204 -420 100 2050 102 -412 100 2100 103 -409 100 2150 101 -387 100 2200 110 -465 100 2250 116 -444 100 2300 266 -456 100 2350 119 -468 100 2400 125 -516 100 2450 132 -475 100 2500 125 -506 100 2550 275 -487 100 2600 130 -469 100 2650 132 -503 100 2700 139 -571 100 2750 278 -589 100 2800 149 -552 100 2850 156 -507 100 2900 151 -586 100 2950 310 -581 100 3000 161 -606 100 3050 153 -616 100 3100 154 -583 100 3150 311 -625 100 3200 165 -636 100 3250 173 -628 100 3300 173 -683 100 3350 180 -685 100 3400 180 -635 100 3450 181 -692 100 3500 333 -692 100 3550 197 -689 100 3600 192 -698 100 3650 346 -701 100 3700 195 -735 100 3750 207 -713 100 3800 346 -761 100 3850 214 -761 100 3900 213 -747 100 3950 358 -767 100 4000 214 -304 100 2000 103 -318 100 2050 107 -327 100 2100 234 -335 100 2150 113 -354 100 2200 105 -336 100 2250 98 -354 100 2300 103 -388 100 2350 115 -369 100 2400 268 -397 100 2450 122 -352 100 2500 123 -403 100 2550 123 -437 100 2600 131 -423 100 2650 124 -428 100 2700 134 -429 100 2750 126 -471 100 2800 144 -422 100 2850 273 -482 100 2900 162 -399 100 2950 136 -463 100 3000 154 -457 100 3050 293 -447 100 3100 156 -484 100 3150 171 -508 100 3200 160 -508 100 3250 305 -509 100 3300 170 -579 100 3350 169 -538 100 3400 313 -479 100 3450 175 -549 100 3500 187 -538 100 3550 176 -594 100 3600 326 -562 100 3650 200 -609 100 3700 198 -571 100 3750 321 -613 100 3800 218 -624 100 3850 211 -606 100 3900 349 -620 100 3950 200 -612 100 4000 215 -367 100 2000 234 -366 100 2050 127 -373 100 2100 133 -376 100 2150 125 -363 100 2200 133 -387 100 2250 296 -378 100 2300 137 -398 100 2350 128 -441 100 2400 141 -404 100 2450 272 -454 100 2500 151 -437 100 2550 154 -434 100 2600 149 -451 100 2650 315 -498 100 2700 177 -533 100 2750 181 -496 100 2800 175 -519 100 2850 329 -515 100 2900 177 -514 100 2950 178 -520 100 3000 321 -537 100 3050 198 -538 100 3100 193 -550 100 3150 345 -554 100 3200 213 -567 100 3250 205 -603 100 3300 349 -581 100 3350 219 -551 100 3400 202 -575 100 3450 355 -670 100 3500 227 -647 100 3550 209 -654 100 3600 364 -664 100 3650 233 -647 100 3700 224 -689 100 3750 381 -653 100 3800 232 -666 100 3850 222 -659 100 3900 238 -658 100 3950 243 -709 100 4000 382 -234 100 2000 92 -233 100 2050 106 -271 100 2100 101 -239 100 2150 98 -243 100 2200 250 -269 100 2250 106 -298 100 2300 116 -244 100 2350 114 -252 100 2400 112 -271 100 2450 123 -314 100 2500 126 -309 100 2550 139 -292 100 2600 131 -267 100 2650 130 -331 100 2700 258 -298 100 2750 129 -306 100 2800 130 -324 100 2850 148 -337 100 2900 145 -330 100 2950 144 -376 100 3000 153 -338 100 3050 151 -368 100 3100 297 -352 100 3150 158 -386 100 3200 170 -379 100 3250 173 -343 100 3300 294 -409 100 3350 173 -388 100 3400 178 -379 100 3450 182 -397 100 3500 181 -418 100 3550 187 -422 100 3600 188 -432 100 3650 313 -434 100 3700 188 -424 100 3750 192 -425 100 3800 329 -458 100 3850 203 -467 100 3900 208 -477 100 3950 350 -522 100 4000 220 -295 100 2000 93 -307 100 2050 98 -318 100 2100 226 -331 100 2150 100 -363 100 2200 111 -332 100 2250 115 -316 100 2300 106 -330 100 2350 106 -387 100 2400 263 -412 100 2450 130 -355 100 2500 127 -371 100 2550 123 -418 100 2600 269 -413 100 2650 127 -435 100 2700 138 -443 100 2750 138 -420 100 2800 135 -462 100 2850 291 -455 100 2900 152 -421 100 2950 137 -447 100 3000 150 -432 100 3050 284 -503 100 3100 166 -493 100 3150 172 -501 100 3200 164 -572 100 3250 166 -529 100 3300 173 -517 100 3350 171 -558 100 3400 337 -543 100 3450 173 -551 100 3500 186 -549 100 3550 326 -563 100 3600 199 -596 100 3650 200 -565 100 3700 200 -612 100 3750 205 -579 100 3800 201 -638 100 3850 206 -615 100 3900 211 -645 100 3950 212 -616 100 4000 344 -303 100 2000 81 -325 100 2050 117 -377 100 2100 97 -357 100 2150 101 -373 100 2200 95 -376 100 2250 237 -391 100 2300 105 -370 100 2350 132 -352 100 2400 112 -404 100 2450 115 -454 100 2500 253 -382 100 2550 117 -408 100 2600 124 -421 100 2650 125 -472 100 2700 124 -464 100 2750 264 -464 100 2800 161 -519 100 2850 131 -497 100 2900 136 -503 100 2950 268 -507 100 3000 141 -560 100 3050 180 -489 100 3100 148 -501 100 3150 283 -519 100 3200 150 -529 100 3250 184 -606 100 3300 163 -552 100 3350 320 -621 100 3400 170 -539 100 3450 161 -561 100 3500 298 -564 100 3550 172 -647 100 3600 178 -687 100 3650 192 -552 100 3700 309 -612 100 3750 176 -616 100 3800 192 -682 100 3850 318 -620 100 3900 196 -543 100 3950 184 -645 100 4000 344 -256 100 2000 91 -296 100 2050 103 -268 100 2100 91 -264 100 2150 94 -269 100 2200 94 -308 100 2250 238 -299 100 2300 99 -295 100 2350 105 -307 100 2400 111 -315 100 2450 113 -312 100 2500 112 -352 100 2550 122 -342 100 2600 122 -339 100 2650 125 -298 100 2700 118 -367 100 2750 270 -374 100 2800 135 -353 100 2850 138 -397 100 2900 147 -423 100 2950 148 -422 100 3000 138 -406 100 3050 140 -386 100 3100 149 -404 100 3150 159 -409 100 3200 140 -469 100 3250 160 -465 100 3300 163 -427 100 3350 284 -430 100 3400 171 -484 100 3450 186 -456 100 3500 171 -448 100 3550 298 -458 100 3600 175 -507 100 3650 193 -521 100 3700 333 -502 100 3750 196 -511 100 3800 183 -529 100 3850 198 -505 100 3900 322 -512 100 3950 192 -567 100 4000 200 -175 100 2000 159 -221 100 2050 79 -248 100 2100 83 -227 100 2150 87 -273 100 2200 92 -252 100 2250 88 -258 100 2300 92 -265 100 2350 229 -279 100 2400 97 -284 100 2450 99 -303 100 2500 110 -277 100 2550 100 -305 100 2600 108 -305 100 2650 102 -332 100 2700 112 -293 100 2750 111 -322 100 2800 121 -315 100 2850 116 -332 100 2900 252 -335 100 2950 122 -337 100 3000 121 -379 100 3050 134 -369 100 3100 254 -370 100 3150 133 -365 100 3200 135 -370 100 3250 135 -364 100 3300 134 -354 100 3350 265 -414 100 3400 150 -345 100 3450 142 -395 100 3500 150 -421 100 3550 269 -414 100 3600 147 -419 100 3650 152 -421 100 3700 160 -421 100 3750 289 -430 100 3800 165 -444 100 3850 170 -445 100 3900 283 -427 100 3950 161 -485 100 4000 173 -383 100 2000 99 -375 100 2050 96 -397 100 2100 204 -388 100 2150 101 -405 100 2200 109 -414 100 2250 105 -405 100 2300 110 -428 100 2350 255 -476 100 2400 123 -439 100 2450 119 -426 100 2500 119 -467 100 2550 127 -455 100 2600 262 -489 100 2650 134 -510 100 2700 136 -528 100 2750 140 -545 100 2800 142 -482 100 2850 133 -572 100 2900 145 -539 100 2950 146 -543 100 3000 145 -592 100 3050 299 -539 100 3100 154 -519 100 3150 150 -571 100 3200 160 -655 100 3250 166 -644 100 3300 170 -574 100 3350 168 -603 100 3400 313 -630 100 3450 181 -623 100 3500 182 -592 100 3550 185 -667 100 3600 176 -626 100 3650 178 -652 100 3700 186 -702 100 3750 344 -688 100 3800 202 -711 100 3850 206 -715 100 3900 337 -707 100 3950 210 -743 100 4000 209 -317 100 2000 203 -281 100 2050 104 -331 100 2100 114 -304 100 2150 106 -356 100 2200 113 -324 100 2250 113 -370 100 2300 249 -344 100 2350 145 -369 100 2400 124 -369 100 2450 120 -394 100 2500 140 -379 100 2550 260 -397 100 2600 130 -392 100 2650 138 -420 100 2700 141 -403 100 2750 270 -437 100 2800 142 -386 100 2850 137 -441 100 2900 155 -424 100 2950 146 -458 100 3000 156 -409 100 3050 150 -476 100 3100 158 -479 100 3150 292 -497 100 3200 170 -515 100 3250 174 -512 100 3300 174 -521 100 3350 309 -529 100 3400 168 -500 100 3450 174 -579 100 3500 330 -475 100 3550 177 -555 100 3600 191 -549 100 3650 197 -537 100 3700 323 -594 100 3750 197 -587 100 3800 199 -529 100 3850 334 -582 100 3900 196 -586 100 3950 207 -577 100 4000 334 -274 100 2000 95 -275 100 2050 93 -267 100 2100 113 -295 100 2150 109 -260 100 2200 219 -296 100 2250 98 -272 100 2300 98 -347 100 2350 107 -317 100 2400 113 -293 100 2450 109 -306 100 2500 235 -344 100 2550 115 -339 100 2600 127 -345 100 2650 126 -353 100 2700 138 -361 100 2750 127 -407 100 2800 136 -387 100 2850 138 -368 100 2900 138 -372 100 2950 252 -407 100 3000 131 -364 100 3050 146 -388 100 3100 152 -374 100 3150 263 -449 100 3200 147 -442 100 3250 156 -475 100 3300 158 -431 100 3350 292 -521 100 3400 168 -401 100 3450 166 -510 100 3500 171 -464 100 3550 311 -469 100 3600 175 -500 100 3650 183 -471 100 3700 308 -525 100 3750 195 -501 100 3800 181 -505 100 3850 307 -536 100 3900 195 -540 100 3950 199 -516 100 4000 324 -372 100 2000 124 -357 100 2050 120 -400 100 2100 136 -370 100 2150 135 -354 100 2200 274 -373 100 2250 137 -379 100 2300 139 -414 100 2350 146 -475 100 2400 296 -443 100 2450 161 -451 100 2500 168 -451 100 2550 168 -443 100 2600 287 -425 100 2650 168 -459 100 2700 166 -485 100 2750 157 -509 100 2800 319 -513 100 2850 182 -499 100 2900 194 -532 100 2950 336 -527 100 3000 208 -546 100 3050 205 -530 100 3100 326 -478 100 3150 185 -532 100 3200 199 -567 100 3250 349 -578 100 3300 217 -549 100 3350 209 -551 100 3400 339 -588 100 3450 229 -567 100 3500 224 -605 100 3550 367 -631 100 3600 230 -591 100 3650 229 -590 100 3700 365 -658 100 3750 232 -674 100 3800 249 -707 100 3850 243 -697 100 3900 245 -677 100 3950 369 -732 100 4000 271 -269 100 2000 82 -298 100 2050 86 -286 100 2100 210 -292 100 2150 86 -277 100 2200 87 -290 100 2250 87 -292 100 2300 90 -323 100 2350 97 -323 100 2400 224 -303 100 2450 99 -320 100 2500 99 -304 100 2550 112 -320 100 2600 104 -362 100 2650 113 -327 100 2700 230 -359 100 2750 114 -386 100 2800 130 -341 100 2850 114 -367 100 2900 126 -396 100 2950 246 -396 100 3000 127 -444 100 3050 140 -410 100 3100 132 -433 100 3150 250 -386 100 3200 132 -426 100 3250 138 -419 100 3300 140 -441 100 3350 143 -483 100 3400 275 -447 100 3450 147 -434 100 3500 149 -467 100 3550 153 -498 100 3600 263 -450 100 3650 158 -478 100 3700 160 -469 100 3750 268 -485 100 3800 164 -473 100 3850 168 -438 100 3900 164 -522 100 3950 286 -529 100 4000 176 -254 100 2000 82 -259 100 2050 84 -258 100 2100 84 -252 100 2150 163 -266 100 2200 90 -343 100 2250 100 -304 100 2300 88 -278 100 2350 97 -326 100 2400 97 -329 100 2450 227 -343 100 2500 97 -354 100 2550 109 -341 100 2600 110 -330 100 2650 101 -349 100 2700 114 -383 100 2750 240 -356 100 2800 110 -362 100 2850 120 -328 100 2900 114 -383 100 2950 116 -354 100 3000 244 -387 100 3050 121 -409 100 3100 131 -414 100 3150 134 -386 100 3200 127 -432 100 3250 139 -433 100 3300 143 -433 100 3350 153 -413 100 3400 149 -451 100 3450 145 -454 100 3500 143 -445 100 3550 148 -453 100 3600 149 -459 100 3650 140 -498 100 3700 158 -476 100 3750 157 -491 100 3800 273 -533 100 3850 169 -495 100 3900 171 -522 100 3950 170 -542 100 4000 308 -314 100 2000 118 -282 100 2050 109 -276 100 2100 113 -288 100 2150 114 -344 100 2200 252 -326 100 2250 131 -332 100 2300 141 -307 100 2350 136 -338 100 2400 263 -337 100 2450 152 -376 100 2500 132 -376 100 2550 146 -377 100 2600 145 -429 100 2650 298 -369 100 2700 153 -389 100 2750 163 -395 100 2800 163 -387 100 2850 148 -417 100 2900 168 -398 100 2950 169 -393 100 3000 308 -461 100 3050 184 -446 100 3100 180 -440 100 3150 173 -462 100 3200 313 -489 100 3250 190 -494 100 3300 178 -476 100 3350 305 -476 100 3400 202 -466 100 3450 199 -475 100 3500 329 -539 100 3550 204 -537 100 3600 220 -502 100 3650 335 -512 100 3700 214 -498 100 3750 209 -518 100 3800 356 -521 100 3850 211 -580 100 3900 238 -619 100 3950 362 -551 100 4000 230 -311 100 2000 106 -335 100 2050 105 -354 100 2100 111 -359 100 2150 119 -380 100 2200 119 -391 100 2250 121 -355 100 2300 119 -400 100 2350 115 -431 100 2400 131 -366 100 2450 135 -359 100 2500 124 -387 100 2550 296 -456 100 2600 136 -423 100 2650 142 -451 100 2700 157 -483 100 2750 141 -397 100 2800 142 -464 100 2850 158 -412 100 2900 148 -490 100 2950 311 -440 100 3000 154 -482 100 3050 149 -465 100 3100 299 -483 100 3150 168 -510 100 3200 165 -582 100 3250 175 -594 100 3300 330 -560 100 3350 179 -553 100 3400 185 -584 100 3450 340 -647 100 3500 203 -539 100 3550 187 -556 100 3600 197 -576 100 3650 195 -565 100 3700 198 -639 100 3750 219 -600 100 3800 201 -570 100 3850 203 -624 100 3900 371 -696 100 3950 223 -648 100 4000 228 -348 100 2000 92 -333 100 2050 205 -285 100 2100 90 -338 100 2150 93 -416 100 2200 102 -391 100 2250 103 -379 100 2300 106 -364 100 2350 249 -348 100 2400 102 -409 100 2450 140 -401 100 2500 112 -444 100 2550 113 -467 100 2600 288 -456 100 2650 121 -428 100 2700 147 -452 100 2750 134 -477 100 2800 295 -512 100 2850 137 -510 100 2900 133 -466 100 2950 136 -545 100 3000 286 -547 100 3050 173 -517 100 3100 144 -562 100 3150 148 -574 100 3200 298 -506 100 3250 172 -529 100 3300 147 -532 100 3350 156 -567 100 3400 328 -636 100 3450 164 -571 100 3500 167 -612 100 3550 329 -612 100 3600 187 -625 100 3650 174 -619 100 3700 179 -638 100 3750 319 -595 100 3800 180 -646 100 3850 189 -690 100 3900 328 -692 100 3950 250 -691 100 4000 194 -263 100 2000 218 -263 100 2050 97 -270 100 2100 96 -269 100 2150 95 -294 100 2200 105 -289 100 2250 131 -310 100 2300 247 -327 100 2350 115 -318 100 2400 105 -310 100 2450 147 -323 100 2500 122 -353 100 2550 257 -332 100 2600 136 -352 100 2650 139 -390 100 2700 131 -418 100 2750 278 -409 100 2800 181 -394 100 2850 137 -398 100 2900 170 -416 100 2950 288 -406 100 3000 157 -407 100 3050 148 -435 100 3100 160 -467 100 3150 295 -432 100 3200 161 -462 100 3250 163 -496 100 3300 168 -465 100 3350 198 -484 100 3400 178 -493 100 3450 167 -491 100 3500 319 -463 100 3550 240 -464 100 3600 186 -498 100 3650 320 -514 100 3700 197 -495 100 3750 200 -529 100 3800 398 -571 100 3850 201 -613 100 3900 236 -567 100 3950 344 -528 100 4000 202 -409 100 2000 97 -415 100 2050 110 -484 100 2100 235 -402 100 2150 99 -437 100 2200 111 -469 100 2250 114 -482 100 2300 112 -491 100 2350 268 -497 100 2400 124 -493 100 2450 125 -488 100 2500 124 -505 100 2550 133 -511 100 2600 271 -542 100 2650 136 -554 100 2700 146 -523 100 2750 132 -568 100 2800 297 -612 100 2850 155 -577 100 2900 151 -597 100 2950 146 -594 100 3000 296 -610 100 3050 162 -678 100 3100 160 -710 100 3150 173 -716 100 3200 325 -708 100 3250 177 -707 100 3300 172 -685 100 3350 318 -707 100 3400 187 -654 100 3450 188 -709 100 3500 183 -693 100 3550 177 -767 100 3600 193 -720 100 3650 191 -739 100 3700 342 -784 100 3750 209 -767 100 3800 194 -802 100 3850 345 -876 100 3900 231 -869 100 3950 226 -861 100 4000 199 -338 100 2000 98 -294 100 2050 107 -334 100 2100 107 -336 100 2150 234 -341 100 2200 105 -371 100 2250 122 -319 100 2300 114 -362 100 2350 118 -398 100 2400 271 -396 100 2450 130 -372 100 2500 120 -384 100 2550 126 -417 100 2600 129 -414 100 2650 274 -371 100 2700 125 -387 100 2750 136 -408 100 2800 135 -396 100 2850 285 -423 100 2900 148 -475 100 2950 157 -469 100 3000 156 -470 100 3050 295 -455 100 3100 168 -492 100 3150 165 -514 100 3200 168 -522 100 3250 303 -464 100 3300 166 -534 100 3350 186 -495 100 3400 179 -555 100 3450 179 -509 100 3500 184 -522 100 3550 193 -578 100 3600 330 -525 100 3650 183 -556 100 3700 194 -560 100 3750 348 -580 100 3800 213 -593 100 3850 204 -614 100 3900 361 -585 100 3950 212 -614 100 4000 208 -261 100 2000 193 -279 100 2050 87 -255 100 2100 87 -295 100 2150 91 -292 100 2200 94 -273 100 2250 105 -265 100 2300 231 -335 100 2350 106 -310 100 2400 101 -312 100 2450 105 -363 100 2500 121 -317 100 2550 108 -312 100 2600 245 -329 100 2650 120 -344 100 2700 119 -328 100 2750 122 -328 100 2800 124 -364 100 2850 255 -369 100 2900 130 -369 100 2950 121 -384 100 3000 140 -363 100 3050 260 -387 100 3100 140 -385 100 3150 140 -393 100 3200 151 -412 100 3250 273 -377 100 3300 141 -418 100 3350 147 -411 100 3400 149 -403 100 3450 292 -442 100 3500 156 -427 100 3550 151 -413 100 3600 160 -448 100 3650 289 -425 100 3700 165 -472 100 3750 165 -485 100 3800 179 -459 100 3850 301 -453 100 3900 171 -469 100 3950 188 -493 100 4000 297 -345 100 2000 122 -341 100 2050 120 -363 100 2100 126 -287 100 2150 106 -364 100 2200 278 -345 100 2250 128 -393 100 2300 136 -346 100 2350 131 -317 100 2400 125 -425 100 2450 311 -388 100 2500 146 -383 100 2550 139 -358 100 2600 143 -417 100 2650 292 -409 100 2700 148 -413 100 2750 162 -459 100 2800 171 -460 100 2850 151 -446 100 2900 163 -424 100 2950 167 -467 100 3000 314 -468 100 3050 179 -492 100 3100 180 -489 100 3150 186 -530 100 3200 345 -514 100 3250 205 -497 100 3300 195 -554 100 3350 345 -554 100 3400 205 -525 100 3450 202 -567 100 3500 363 -554 100 3550 214 -505 100 3600 200 -630 100 3650 369 -581 100 3700 223 -563 100 3750 349 -630 100 3800 230 -608 100 3850 232 -640 100 3900 379 -584 100 3950 237 -629 100 4000 253 -351 100 2000 240 -345 100 2050 114 -390 100 2100 129 -406 100 2150 145 -394 100 2200 273 -428 100 2250 145 -442 100 2300 133 -469 100 2350 144 -423 100 2400 127 -445 100 2450 307 -444 100 2500 146 -491 100 2550 154 -480 100 2600 165 -467 100 2650 297 -529 100 2700 169 -558 100 2750 173 -584 100 2800 325 -511 100 2850 163 -544 100 2900 182 -529 100 2950 179 -520 100 3000 323 -529 100 3050 172 -569 100 3100 207 -607 100 3150 344 -633 100 3200 209 -559 100 3250 186 -553 100 3300 335 -668 100 3350 219 -599 100 3400 196 -659 100 3450 365 -649 100 3500 222 -632 100 3550 220 -647 100 3600 369 -688 100 3650 225 -637 100 3700 218 -633 100 3750 370 -645 100 3800 233 -690 100 3850 374 -704 100 3900 234 -703 100 3950 239 -711 100 4000 375 -312 100 2000 101 -290 100 2050 89 -323 100 2100 102 -331 100 2150 105 -311 100 2200 239 -339 100 2250 110 -384 100 2300 111 -382 100 2350 118 -362 100 2400 122 -384 100 2450 257 -417 100 2500 125 -406 100 2550 118 -380 100 2600 128 -415 100 2650 120 -428 100 2700 270 -444 100 2750 144 -418 100 2800 136 -410 100 2850 130 -446 100 2900 149 -455 100 2950 152 -443 100 3000 145 -467 100 3050 166 -506 100 3100 295 -446 100 3150 144 -445 100 3200 179 -502 100 3250 180 -492 100 3300 293 -575 100 3350 176 -495 100 3400 154 -541 100 3450 168 -521 100 3500 189 -535 100 3550 176 -552 100 3600 178 -510 100 3650 294 -567 100 3700 218 -601 100 3750 199 -636 100 3800 328 -592 100 3850 187 -573 100 3900 204 -632 100 3950 333 -613 100 4000 192 -209 100 2000 85 -220 100 2050 78 -211 100 2100 83 -243 100 2150 216 -223 100 2200 90 -227 100 2250 91 -258 100 2300 98 -252 100 2350 96 -262 100 2400 101 -244 100 2450 223 -259 100 2500 101 -277 100 2550 105 -241 100 2600 112 -285 100 2650 116 -264 100 2700 108 -297 100 2750 254 -294 100 2800 114 -260 100 2850 106 -278 100 2900 117 -298 100 2950 120 -308 100 3000 248 -307 100 3050 120 -287 100 3100 125 -317 100 3150 134 -311 100 3200 253 -321 100 3250 135 -348 100 3300 135 -338 100 3350 138 -342 100 3400 143 -370 100 3450 152 -372 100 3500 148 -403 100 3550 155 -386 100 3600 276 -364 100 3650 155 -386 100 3700 160 -413 100 3750 161 -383 100 3800 285 -409 100 3850 174 -399 100 3900 164 -418 100 3950 174 -425 100 4000 303 -324 100 2000 121 -312 100 2050 131 -296 100 2100 118 -310 100 2150 268 -347 100 2200 146 -336 100 2250 134 -325 100 2300 129 -363 100 2350 139 -386 100 2400 281 -368 100 2450 156 -344 100 2500 152 -379 100 2550 165 -386 100 2600 300 -415 100 2650 175 -370 100 2700 160 -405 100 2750 295 -415 100 2800 184 -451 100 2850 185 -413 100 2900 165 -447 100 2950 334 -430 100 3000 183 -471 100 3050 195 -445 100 3100 339 -477 100 3150 204 -483 100 3200 199 -501 100 3250 344 -473 100 3300 195 -459 100 3350 224 -513 100 3400 348 -528 100 3450 228 -549 100 3500 223 -553 100 3550 373 -554 100 3600 224 -513 100 3650 232 -578 100 3700 361 -590 100 3750 244 -561 100 3800 380 -598 100 3850 255 -567 100 3900 260 -575 100 3950 395 -601 100 4000 263 -167 100 2000 73 -201 100 2050 186 -178 100 2100 78 -182 100 2150 73 -220 100 2200 87 -199 100 2250 88 -202 100 2300 90 -193 100 2350 89 -219 100 2400 219 -234 100 2450 93 -234 100 2500 101 -223 100 2550 96 -213 100 2600 96 -208 100 2650 101 -209 100 2700 225 -243 100 2750 110 -298 100 2800 118 -261 100 2850 114 -258 100 2900 113 -279 100 2950 257 -254 100 3000 114 -256 100 3050 120 -264 100 3100 120 -284 100 3150 133 -289 100 3200 249 -256 100 3250 125 -280 100 3300 128 -308 100 3350 137 -312 100 3400 256 -302 100 3450 135 -311 100 3500 146 -323 100 3550 144 -331 100 3600 286 -324 100 3650 151 -336 100 3700 158 -332 100 3750 146 -362 100 3800 279 -355 100 3850 163 -360 100 3900 159 -379 100 3950 170 -355 100 4000 293 -254 100 2000 77 -304 100 2050 92 -284 100 2100 93 -278 100 2150 83 -305 100 2200 89 -282 100 2250 208 -350 100 2300 97 -325 100 2350 96 -312 100 2400 107 -360 100 2450 108 -349 100 2500 113 -359 100 2550 234 -314 100 2600 107 -343 100 2650 119 -354 100 2700 133 -395 100 2750 126 -377 100 2800 246 -390 100 2850 129 -405 100 2900 131 -381 100 2950 130 -397 100 3000 130 -429 100 3050 264 -423 100 3100 136 -438 100 3150 139 -436 100 3200 141 -473 100 3250 266 -470 100 3300 147 -469 100 3350 139 -456 100 3400 147 -459 100 3450 272 -494 100 3500 162 -489 100 3550 163 -505 100 3600 162 -450 100 3650 283 -512 100 3700 172 -530 100 3750 172 -538 100 3800 302 -540 100 3850 175 -536 100 3900 179 -552 100 3950 185 -566 100 4000 308 -286 100 2000 108 -306 100 2050 122 -326 100 2100 123 -323 100 2150 245 -346 100 2200 124 -333 100 2250 114 -358 100 2300 140 -338 100 2350 130 -319 100 2400 279 -348 100 2450 142 -361 100 2500 128 -376 100 2550 144 -368 100 2600 261 -377 100 2650 157 -371 100 2700 150 -383 100 2750 159 -374 100 2800 281 -411 100 2850 148 -395 100 2900 159 -422 100 2950 171 -421 100 3000 298 -446 100 3050 183 -452 100 3100 185 -430 100 3150 304 -450 100 3200 182 -474 100 3250 201 -506 100 3300 207 -469 100 3350 185 -452 100 3400 200 -538 100 3450 198 -484 100 3500 322 -518 100 3550 212 -486 100 3600 217 -547 100 3650 330 -506 100 3700 212 -489 100 3750 211 -570 100 3800 221 -546 100 3850 224 -562 100 3900 357 -600 100 3950 227 -563 100 4000 230 -415 100 2000 222 -412 100 2050 117 -412 100 2100 115 -454 100 2150 126 -407 100 2200 125 -448 100 2250 286 -508 100 2300 145 -436 100 2350 131 -447 100 2400 133 -506 100 2450 141 -502 100 2500 284 -488 100 2550 152 -530 100 2600 153 -471 100 2650 145 -490 100 2700 284 -528 100 2750 157 -558 100 2800 166 -613 100 2850 319 -588 100 2900 173 -615 100 2950 170 -566 100 3000 175 -589 100 3050 309 -616 100 3100 193 -587 100 3150 199 -647 100 3200 337 -598 100 3250 192 -657 100 3300 193 -706 100 3350 351 -632 100 3400 195 -640 100 3450 219 -724 100 3500 352 -701 100 3550 212 -668 100 3600 216 -724 100 3650 366 -659 100 3700 206 -774 100 3750 230 -754 100 3800 366 -748 100 3850 237 -724 100 3900 238 -822 100 3950 383 -781 100 4000 251 -389 100 2000 103 -368 100 2050 214 -402 100 2100 130 -417 100 2150 127 -434 100 2200 134 -407 100 2250 133 -430 100 2300 261 -428 100 2350 133 -425 100 2400 133 -484 100 2450 142 -500 100 2500 277 -444 100 2550 138 -528 100 2600 145 -481 100 2650 152 -520 100 2700 298 -548 100 2750 158 -516 100 2800 166 -559 100 2850 169 -610 100 2900 312 -558 100 2950 174 -540 100 3000 176 -579 100 3050 306 -576 100 3100 174 -628 100 3150 196 -571 100 3200 175 -616 100 3250 316 -592 100 3300 174 -642 100 3350 195 -648 100 3400 336 -708 100 3450 215 -655 100 3500 215 -711 100 3550 368 -726 100 3600 234 -691 100 3650 219 -724 100 3700 361 -682 100 3750 224 -730 100 3800 353 -656 100 3850 208 -778 100 3900 237 -701 100 3950 358 -759 100 4000 246 -235 100 2000 94 -246 100 2050 91 -254 100 2100 225 -279 100 2150 106 -273 100 2200 116 -240 100 2250 105 -301 100 2300 130 -245 100 2350 239 -308 100 2400 117 -287 100 2450 129 -298 100 2500 127 -306 100 2550 118 -309 100 2600 255 -330 100 2650 138 -339 100 2700 147 -322 100 2750 136 -325 100 2800 142 -320 100 2850 133 -354 100 2900 155 -326 100 2950 145 -337 100 3000 152 -370 100 3050 282 -354 100 3100 150 -388 100 3150 154 -418 100 3200 286 -408 100 3250 177 -381 100 3300 171 -400 100 3350 184 -406 100 3400 298 -429 100 3450 188 -420 100 3500 179 -432 100 3550 305 -475 100 3600 188 -398 100 3650 187 -435 100 3700 191 -422 100 3750 178 -485 100 3800 207 -460 100 3850 198 -438 100 3900 322 -498 100 3950 212 -496 100 4000 206 -202 100 2000 172 -205 100 2050 84 -206 100 2100 88 -220 100 2150 90 -229 100 2200 94 -209 100 2250 94 -223 100 2300 225 -246 100 2350 102 -238 100 2400 104 -275 100 2450 110 -247 100 2500 104 -256 100 2550 112 -255 100 2600 243 -273 100 2650 121 -271 100 2700 116 -302 100 2750 115 -294 100 2800 116 -284 100 2850 237 -273 100 2900 131 -296 100 2950 130 -286 100 3000 129 -287 100 3050 235 -311 100 3100 141 -314 100 3150 136 -337 100 3200 145 -314 100 3250 263 -333 100 3300 146 -332 100 3350 154 -352 100 3400 152 -355 100 3450 277 -363 100 3500 163 -344 100 3550 160 -359 100 3600 160 -351 100 3650 287 -365 100 3700 176 -385 100 3750 170 -385 100 3800 171 -357 100 3850 294 -379 100 3900 180 -419 100 3950 188 -430 100 4000 293 -249 100 2000 76 -250 100 2050 78 -287 100 2100 88 -300 100 2150 86 -336 100 2200 86 -339 100 2250 197 -311 100 2300 83 -331 100 2350 92 -355 100 2400 99 -343 100 2450 102 -337 100 2500 102 -357 100 2550 229 -374 100 2600 109 -332 100 2650 108 -364 100 2700 103 -381 100 2750 112 -397 100 2800 115 -395 100 2850 114 -405 100 2900 124 -403 100 2950 123 -415 100 3000 128 -440 100 3050 240 -417 100 3100 130 -426 100 3150 129 -429 100 3200 135 -440 100 3250 142 -444 100 3300 269 -468 100 3350 138 -464 100 3400 140 -478 100 3450 144 -455 100 3500 261 -507 100 3550 152 -465 100 3600 151 -488 100 3650 154 -570 100 3700 278 -548 100 3750 163 -538 100 3800 163 -551 100 3850 163 -577 100 3900 151 -546 100 3950 165 -584 100 4000 174 -308 100 2000 98 -252 100 2050 167 -305 100 2100 102 -292 100 2150 101 -288 100 2200 97 -299 100 2250 106 -317 100 2300 110 -318 100 2350 111 -369 100 2400 117 -363 100 2450 123 -361 100 2500 126 -378 100 2550 257 -367 100 2600 131 -362 100 2650 126 -348 100 2700 128 -365 100 2750 125 -390 100 2800 266 -358 100 2850 135 -431 100 2900 145 -424 100 2950 150 -415 100 3000 270 -471 100 3050 161 -412 100 3100 151 -471 100 3150 170 -440 100 3200 291 -454 100 3250 165 -492 100 3300 168 -446 100 3350 158 -453 100 3400 296 -470 100 3450 168 -486 100 3500 175 -494 100 3550 303 -494 100 3600 183 -516 100 3650 183 -528 100 3700 325 -502 100 3750 182 -559 100 3800 198 -473 100 3850 181 -563 100 3900 324 -579 100 3950 208 -571 100 4000 202 -267 100 2000 194 -247 100 2050 98 -264 100 2100 97 -296 100 2150 112 -291 100 2200 113 -273 100 2250 101 -314 100 2300 114 -277 100 2350 103 -321 100 2400 120 -320 100 2450 119 -334 100 2500 121 -320 100 2550 261 -335 100 2600 133 -358 100 2650 137 -341 100 2700 143 -340 100 2750 261 -356 100 2800 132 -372 100 2850 139 -341 100 2900 137 -373 100 2950 364 -413 100 3000 156 -378 100 3050 1864 -416 100 3100 164 -430 100 3150 163 -458 100 3200 175 -391 100 3250 316 -434 100 3300 172 -453 100 3350 176 -429 100 3400 309 -449 100 3450 179 -455 100 3500 179 -483 100 3550 189 -437 100 3600 325 -458 100 3650 191 -435 100 3700 191 -481 100 3750 337 -484 100 3800 194 -464 100 3850 189 -523 100 3900 343 -549 100 3950 217 -514 100 4000 205 -321 100 2000 91 -309 100 2050 210 -336 100 2100 95 -376 100 2150 100 -320 100 2200 105 -357 100 2250 96 -372 100 2300 113 -371 100 2350 110 -350 100 2400 107 -370 100 2450 114 -419 100 2500 121 -435 100 2550 122 -414 100 2600 112 -422 100 2650 134 -468 100 2700 135 -409 100 2750 125 -464 100 2800 271 -459 100 2850 136 -479 100 2900 146 -472 100 2950 149 -500 100 3000 301 -550 100 3050 152 -510 100 3100 151 -490 100 3150 156 -473 100 3200 311 -531 100 3250 167 -566 100 3300 165 -539 100 3350 166 -505 100 3400 317 -597 100 3450 172 -561 100 3500 180 -555 100 3550 317 -565 100 3600 172 -556 100 3650 184 -513 100 3700 181 -592 100 3750 324 -636 100 3800 190 -635 100 3850 189 -600 100 3900 340 -642 100 3950 198 -673 100 4000 208 -251 100 2000 94 -256 100 2050 212 -277 100 2100 92 -216 100 2150 78 -265 100 2200 94 -262 100 2250 94 -277 100 2300 92 -284 100 2350 245 -300 100 2400 108 -341 100 2450 104 -317 100 2500 106 -286 100 2550 105 -356 100 2600 264 -330 100 2650 120 -352 100 2700 109 -364 100 2750 124 -374 100 2800 121 -375 100 2850 272 -361 100 2900 138 -390 100 2950 133 -367 100 3000 135 -370 100 3050 128 -429 100 3100 282 -339 100 3150 138 -394 100 3200 138 -398 100 3250 148 -414 100 3300 278 -436 100 3350 150 -431 100 3400 156 -441 100 3450 152 -427 100 3500 289 -453 100 3550 161 -405 100 3600 160 -429 100 3650 167 -479 100 3700 167 -520 100 3750 174 -464 100 3800 185 -462 100 3850 315 -444 100 3900 187 -513 100 3950 194 -521 100 4000 320 -289 100 2000 105 -285 100 2050 105 -322 100 2100 116 -319 100 2150 117 -341 100 2200 268 -345 100 2250 126 -349 100 2300 123 -396 100 2350 142 -317 100 2400 129 -376 100 2450 288 -400 100 2500 144 -415 100 2550 145 -413 100 2600 144 -365 100 2650 288 -376 100 2700 153 -421 100 2750 166 -467 100 2800 160 -409 100 2850 322 -451 100 2900 167 -438 100 2950 171 -471 100 3000 165 -405 100 3050 152 -473 100 3100 184 -475 100 3150 179 -478 100 3200 315 -491 100 3250 194 -488 100 3300 202 -511 100 3350 344 -535 100 3400 206 -518 100 3450 209 -511 100 3500 352 -528 100 3550 210 -552 100 3600 215 -591 100 3650 365 -566 100 3700 220 -575 100 3750 231 -555 100 3800 354 -600 100 3850 215 -610 100 3900 231 -592 100 3950 375 -614 100 4000 230 -348 100 2000 119 -352 100 2050 243 -363 100 2100 124 -321 100 2150 137 -362 100 2200 127 -385 100 2250 123 -370 100 2300 280 -348 100 2350 129 -371 100 2400 127 -394 100 2450 140 -399 100 2500 140 -401 100 2550 164 -413 100 2600 162 -417 100 2650 148 -451 100 2700 313 -421 100 2750 206 -419 100 2800 153 -436 100 2850 181 -502 100 2900 304 -446 100 2950 170 -457 100 3000 171 -457 100 3050 300 -504 100 3100 202 -518 100 3150 183 -482 100 3200 344 -507 100 3250 249 -517 100 3300 194 -563 100 3350 351 -500 100 3400 199 -593 100 3450 194 -547 100 3500 355 -619 100 3550 224 -552 100 3600 197 -569 100 3650 354 -588 100 3700 229 -588 100 3750 235 -558 100 3800 355 -638 100 3850 282 -665 100 3900 402 -626 100 3950 228 -640 100 4000 254 -249 100 2000 85 -277 100 2050 199 -254 100 2100 88 -260 100 2150 89 -264 100 2200 101 -283 100 2250 97 -276 100 2300 104 -321 100 2350 241 -316 100 2400 112 -320 100 2450 108 -282 100 2500 107 -316 100 2550 106 -315 100 2600 254 -282 100 2650 112 -325 100 2700 119 -357 100 2750 116 -358 100 2800 122 -356 100 2850 267 -362 100 2900 132 -363 100 2950 131 -362 100 3000 135 -377 100 3050 142 -419 100 3100 134 -419 100 3150 151 -411 100 3200 150 -399 100 3250 146 -432 100 3300 143 -414 100 3350 146 -438 100 3400 159 -465 100 3450 295 -423 100 3500 168 -430 100 3550 165 -448 100 3600 160 -427 100 3650 302 -445 100 3700 163 -484 100 3750 177 -405 100 3800 163 -452 100 3850 314 -475 100 3900 181 -510 100 3950 184 -551 100 4000 322 -366 100 2000 126 -354 100 2050 127 -368 100 2100 127 -407 100 2150 281 -356 100 2200 113 -394 100 2250 126 -419 100 2300 128 -415 100 2350 142 -430 100 2400 295 -455 100 2450 154 -479 100 2500 153 -490 100 2550 167 -530 100 2600 330 -487 100 2650 151 -486 100 2700 172 -482 100 2750 301 -503 100 2800 159 -517 100 2850 171 -544 100 2900 183 -534 100 2950 322 -554 100 3000 200 -592 100 3050 208 -578 100 3100 335 -582 100 3150 194 -650 100 3200 202 -632 100 3250 359 -646 100 3300 224 -592 100 3350 207 -644 100 3400 363 -690 100 3450 230 -640 100 3500 220 -657 100 3550 371 -698 100 3600 237 -654 100 3650 235 -679 100 3700 244 -729 100 3750 242 -733 100 3800 392 -689 100 3850 264 -775 100 3900 263 -737 100 3950 281 -754 100 4000 265 -336 100 2000 205 -352 100 2050 99 -337 100 2100 96 -366 100 2150 98 -392 100 2200 107 -430 100 2250 109 -373 100 2300 247 -433 100 2350 116 -394 100 2400 112 -392 100 2450 121 -470 100 2500 125 -460 100 2550 265 -472 100 2600 126 -469 100 2650 128 -472 100 2700 125 -460 100 2750 134 -475 100 2800 280 -502 100 2850 142 -497 100 2900 139 -526 100 2950 145 -510 100 3000 272 -509 100 3050 148 -529 100 3100 153 -539 100 3150 155 -572 100 3200 301 -574 100 3250 166 -559 100 3300 162 -601 100 3350 315 -600 100 3400 167 -610 100 3450 165 -604 100 3500 175 -574 100 3550 304 -629 100 3600 179 -591 100 3650 182 -590 100 3700 324 -626 100 3750 186 -673 100 3800 190 -667 100 3850 197 -658 100 3900 178 -683 100 3950 202 -721 100 4000 206 -300 100 2000 192 -291 100 2050 102 -280 100 2100 95 -301 100 2150 99 -348 100 2200 106 -364 100 2250 107 -340 100 2300 110 -365 100 2350 111 -358 100 2400 117 -341 100 2450 112 -361 100 2500 112 -362 100 2550 257 -398 100 2600 119 -416 100 2650 133 -385 100 2700 126 -385 100 2750 133 -414 100 2800 131 -432 100 2850 151 -441 100 2900 138 -421 100 2950 142 -412 100 3000 269 -446 100 3050 152 -402 100 3100 143 -437 100 3150 147 -474 100 3200 282 -491 100 3250 159 -495 100 3300 160 -486 100 3350 166 -439 100 3400 301 -511 100 3450 169 -525 100 3500 176 -517 100 3550 289 -509 100 3600 179 -508 100 3650 180 -510 100 3700 179 -601 100 3750 323 -596 100 3800 186 -569 100 3850 198 -537 100 3900 304 -579 100 3950 198 -608 100 4000 208 -273 100 2000 79 -279 100 2050 193 -304 100 2100 90 -308 100 2150 92 -268 100 2200 89 -306 100 2250 102 -342 100 2300 101 -318 100 2350 235 -344 100 2400 102 -393 100 2450 108 -328 100 2500 100 -356 100 2550 106 -376 100 2600 242 -331 100 2650 107 -373 100 2700 119 -413 100 2750 121 -383 100 2800 123 -389 100 2850 271 -392 100 2900 124 -437 100 2950 135 -437 100 3000 144 -419 100 3050 285 -412 100 3100 143 -438 100 3150 147 -434 100 3200 150 -454 100 3250 149 -450 100 3300 141 -432 100 3350 151 -535 100 3400 159 -493 100 3450 286 -489 100 3500 167 -539 100 3550 172 -450 100 3600 159 -488 100 3650 307 -502 100 3700 169 -491 100 3750 163 -523 100 3800 171 -545 100 3850 306 -553 100 3900 187 -546 100 3950 181 -548 100 4000 312 -216 100 2000 85 -205 100 2050 94 -225 100 2100 96 -233 100 2150 93 -217 100 2200 103 -244 100 2250 219 -276 100 2300 113 -261 100 2350 96 -269 100 2400 114 -263 100 2450 110 -267 100 2500 236 -299 100 2550 119 -281 100 2600 118 -281 100 2650 134 -279 100 2700 116 -303 100 2750 265 -302 100 2800 133 -292 100 2850 121 -320 100 2900 135 -333 100 2950 144 -320 100 3000 136 -334 100 3050 139 -325 100 3100 139 -307 100 3150 141 -351 100 3200 266 -373 100 3250 173 -335 100 3300 152 -355 100 3350 152 -355 100 3400 282 -368 100 3450 157 -384 100 3500 174 -353 100 3550 273 -390 100 3600 176 -393 100 3650 173 -398 100 3700 173 -400 100 3750 309 -387 100 3800 172 -433 100 3850 180 -479 100 3900 310 -461 100 3950 193 -397 100 4000 188 -233 100 2000 95 -264 100 2050 189 -255 100 2100 102 -253 100 2150 103 -255 100 2200 99 -272 100 2250 106 -286 100 2300 105 -280 100 2350 247 -324 100 2400 116 -330 100 2450 116 -347 100 2500 128 -325 100 2550 118 -328 100 2600 260 -354 100 2650 126 -329 100 2700 122 -341 100 2750 135 -344 100 2800 133 -356 100 2850 129 -365 100 2900 142 -405 100 2950 153 -377 100 3000 141 -377 100 3050 288 -412 100 3100 163 -403 100 3150 155 -423 100 3200 161 -428 100 3250 152 -431 100 3300 164 -408 100 3350 163 -448 100 3400 291 -458 100 3450 177 -433 100 3500 171 -467 100 3550 176 -438 100 3600 310 -469 100 3650 186 -452 100 3700 195 -452 100 3750 326 -518 100 3800 193 -498 100 3850 197 -480 100 3900 325 -475 100 3950 190 -480 100 4000 202 -351 100 2000 103 -362 100 2050 218 -389 100 2100 129 -362 100 2150 118 -336 100 2200 113 -368 100 2250 114 -389 100 2300 262 -422 100 2350 128 -402 100 2400 128 -451 100 2450 137 -438 100 2500 283 -414 100 2550 137 -392 100 2600 138 -447 100 2650 144 -457 100 2700 147 -426 100 2750 272 -445 100 2800 154 -476 100 2850 156 -484 100 2900 162 -532 100 2950 295 -552 100 3000 179 -515 100 3050 176 -483 100 3100 285 -562 100 3150 173 -537 100 3200 180 -543 100 3250 181 -534 100 3300 320 -600 100 3350 192 -577 100 3400 189 -603 100 3450 320 -545 100 3500 199 -583 100 3550 190 -643 100 3600 342 -634 100 3650 206 -603 100 3700 201 -662 100 3750 350 -603 100 3800 213 -648 100 3850 213 -656 100 3900 355 -693 100 3950 224 -691 100 4000 228 -167 100 2000 207 -154 100 2050 95 -140 100 2100 88 -184 100 2150 101 -178 100 2200 99 -166 100 2250 106 -191 100 2300 227 -181 100 2350 121 -211 100 2400 127 -217 100 2450 127 -200 100 2500 264 -209 100 2550 116 -208 100 2600 124 -216 100 2650 130 -193 100 2700 122 -216 100 2750 252 -236 100 2800 134 -248 100 2850 142 -220 100 2900 136 -250 100 2950 139 -243 100 3000 147 -228 100 3050 144 -241 100 3100 149 -250 100 3150 298 -258 100 3200 164 -278 100 3250 161 -283 100 3300 176 -262 100 3350 285 -258 100 3400 169 -303 100 3450 190 -293 100 3500 177 -319 100 3550 177 -277 100 3600 166 -306 100 3650 190 -309 100 3700 297 -315 100 3750 195 -312 100 3800 181 -297 100 3850 306 -332 100 3900 202 -306 100 3950 195 -364 100 4000 349 -281 100 2000 88 -283 100 2050 94 -298 100 2100 92 -293 100 2150 89 -261 100 2200 90 -307 100 2250 231 -342 100 2300 107 -303 100 2350 101 -315 100 2400 103 -308 100 2450 97 -306 100 2500 122 -315 100 2550 103 -354 100 2600 131 -341 100 2650 116 -381 100 2700 126 -387 100 2750 240 -331 100 2800 113 -376 100 2850 137 -414 100 2900 140 -433 100 2950 138 -378 100 3000 263 -381 100 3050 131 -427 100 3100 139 -445 100 3150 139 -409 100 3200 269 -431 100 3250 150 -474 100 3300 151 -450 100 3350 161 -436 100 3400 299 -413 100 3450 156 -448 100 3500 168 -446 100 3550 157 -515 100 3600 307 -501 100 3650 169 -444 100 3700 171 -483 100 3750 287 -505 100 3800 198 -510 100 3850 179 -528 100 3900 317 -542 100 3950 201 -528 100 4000 185 -274 100 2000 108 -278 100 2050 193 -295 100 2100 102 -296 100 2150 111 -296 100 2200 107 -348 100 2250 122 -310 100 2300 113 -312 100 2350 246 -318 100 2400 123 -334 100 2450 120 -350 100 2500 130 -364 100 2550 140 -335 100 2600 248 -381 100 2650 142 -323 100 2700 130 -385 100 2750 140 -391 100 2800 274 -373 100 2850 142 -395 100 2900 152 -409 100 2950 150 -405 100 3000 284 -419 100 3050 155 -431 100 3100 162 -428 100 3150 158 -462 100 3200 306 -425 100 3250 163 -459 100 3300 169 -454 100 3350 176 -476 100 3400 174 -478 100 3450 181 -491 100 3500 182 -430 100 3550 316 -479 100 3600 187 -464 100 3650 182 -534 100 3700 321 -502 100 3750 185 -515 100 3800 192 -527 100 3850 329 -566 100 3900 210 -554 100 3950 216 -528 100 4000 347 -265 100 2000 117 -305 100 2050 120 -267 100 2100 122 -299 100 2150 127 -298 100 2200 252 -312 100 2250 135 -282 100 2300 117 -306 100 2350 126 -303 100 2400 127 -301 100 2450 274 -334 100 2500 135 -344 100 2550 140 -349 100 2600 151 -356 100 2650 277 -347 100 2700 151 -359 100 2750 157 -391 100 2800 170 -385 100 2850 298 -349 100 2900 162 -391 100 2950 177 -384 100 3000 291 -416 100 3050 175 -395 100 3100 171 -378 100 3150 186 -396 100 3200 308 -435 100 3250 211 -414 100 3300 187 -455 100 3350 336 -465 100 3400 214 -454 100 3450 198 -452 100 3500 330 -441 100 3550 209 -484 100 3600 210 -494 100 3650 366 -495 100 3700 224 -484 100 3750 228 -512 100 3800 362 -521 100 3850 233 -510 100 3900 216 -536 100 3950 374 -539 100 4000 252 -312 100 2000 107 -315 100 2050 216 -315 100 2100 106 -317 100 2150 111 -338 100 2200 111 -336 100 2250 108 -373 100 2300 263 -349 100 2350 120 -384 100 2400 134 -357 100 2450 122 -349 100 2500 121 -414 100 2550 293 -402 100 2600 137 -421 100 2650 145 -435 100 2700 150 -417 100 2750 281 -441 100 2800 147 -416 100 2850 172 -499 100 2900 163 -430 100 2950 282 -476 100 3000 166 -456 100 3050 161 -447 100 3100 163 -463 100 3150 293 -482 100 3200 179 -463 100 3250 174 -472 100 3300 296 -495 100 3350 171 -560 100 3400 189 -507 100 3450 187 -547 100 3500 340 -580 100 3550 196 -550 100 3600 199 -526 100 3650 314 -552 100 3700 211 -551 100 3750 216 -549 100 3800 329 -580 100 3850 212 -542 100 3900 211 -607 100 3950 345 -568 100 4000 221 -328 100 2000 123 -286 100 2050 166 -321 100 2100 119 -333 100 2150 119 -362 100 2200 139 -398 100 2250 143 -352 100 2300 268 -382 100 2350 141 -362 100 2400 141 -403 100 2450 159 -381 100 2500 275 -416 100 2550 155 -434 100 2600 156 -433 100 2650 171 -443 100 2700 291 -463 100 2750 171 -447 100 2800 180 -458 100 2850 166 -441 100 2900 169 -441 100 2950 171 -524 100 3000 188 -492 100 3050 315 -494 100 3100 192 -490 100 3150 190 -539 100 3200 339 -500 100 3250 209 -558 100 3300 219 -538 100 3350 342 -550 100 3400 209 -554 100 3450 205 -574 100 3500 359 -577 100 3550 215 -595 100 3600 235 -574 100 3650 355 -548 100 3700 223 -562 100 3750 217 -654 100 3800 363 -615 100 3850 246 -641 100 3900 369 -692 100 3950 251 -620 100 4000 249 -344 100 2000 207 -337 100 2050 100 -330 100 2100 101 -342 100 2150 104 -329 100 2200 101 -368 100 2250 113 -367 100 2300 243 -385 100 2350 119 -379 100 2400 118 -396 100 2450 114 -353 100 2500 112 -401 100 2550 251 -416 100 2600 128 -408 100 2650 126 -438 100 2700 130 -412 100 2750 261 -424 100 2800 143 -455 100 2850 137 -470 100 2900 141 -440 100 2950 143 -441 100 3000 269 -490 100 3050 155 -498 100 3100 155 -509 100 3150 151 -495 100 3200 150 -478 100 3250 156 -553 100 3300 163 -520 100 3350 290 -542 100 3400 173 -622 100 3450 180 -579 100 3500 183 -571 100 3550 306 -608 100 3600 181 -616 100 3650 194 -585 100 3700 302 -577 100 3750 193 -564 100 3800 192 -597 100 3850 333 -659 100 3900 204 -574 100 3950 196 -667 100 4000 332 -220 100 2000 92 -255 100 2050 100 -242 100 2100 91 -270 100 2150 105 -279 100 2200 102 -310 100 2250 248 -273 100 2300 115 -291 100 2350 106 -291 100 2400 111 -316 100 2450 125 -298 100 2500 254 -310 100 2550 120 -310 100 2600 120 -327 100 2650 125 -322 100 2700 126 -332 100 2750 256 -369 100 2800 145 -362 100 2850 147 -355 100 2900 142 -318 100 2950 247 -420 100 3000 157 -371 100 3050 147 -389 100 3100 154 -376 100 3150 452 -360 100 3200 2468 -381 100 3250 159 -381 100 3300 160 -413 100 3350 175 -398 100 3400 310 -424 100 3450 173 -407 100 3500 175 -457 100 3550 316 -460 100 3600 182 -459 100 3650 188 -422 100 3700 349 -477 100 3750 195 -468 100 3800 204 -458 100 3850 191 -452 100 3900 333 -484 100 3950 203 -464 100 4000 200 -265 100 2000 194 -287 100 2050 95 -261 100 2100 99 -265 100 2150 99 -308 100 2200 104 -296 100 2250 108 -255 100 2300 259 -269 100 2350 105 -290 100 2400 112 -299 100 2450 109 -329 100 2500 120 -313 100 2550 258 -304 100 2600 118 -339 100 2650 121 -328 100 2700 124 -365 100 2750 129 -347 100 2800 283 -350 100 2850 141 -370 100 2900 135 -386 100 2950 149 -395 100 3000 298 -378 100 3050 147 -393 100 3100 154 -381 100 3150 146 -408 100 3200 309 -441 100 3250 156 -427 100 3300 158 -447 100 3350 170 -417 100 3400 308 -462 100 3450 170 -430 100 3500 169 -445 100 3550 175 -414 100 3600 313 -452 100 3650 176 -485 100 3700 181 -484 100 3750 318 -508 100 3800 185 -514 100 3850 195 -468 100 3900 325 -478 100 3950 188 -507 100 4000 194 -324 100 2000 102 -344 100 2050 241 -323 100 2100 105 -364 100 2150 121 -378 100 2200 128 -347 100 2250 124 -375 100 2300 278 -390 100 2350 130 -384 100 2400 133 -416 100 2450 140 -390 100 2500 140 -424 100 2550 288 -458 100 2600 143 -433 100 2650 145 -477 100 2700 150 -456 100 2750 301 -403 100 2800 147 -435 100 2850 142 -469 100 2900 159 -497 100 2950 311 -463 100 3000 169 -487 100 3050 159 -488 100 3100 173 -526 100 3150 176 -494 100 3200 172 -549 100 3250 202 -549 100 3300 328 -528 100 3350 185 -579 100 3400 196 -562 100 3450 347 -531 100 3500 203 -553 100 3550 198 -552 100 3600 356 -568 100 3650 208 -549 100 3700 203 -576 100 3750 350 -598 100 3800 217 -658 100 3850 219 -596 100 3900 367 -608 100 3950 222 -633 100 4000 231 -208 100 2000 204 -189 100 2050 86 -276 100 2100 102 -270 100 2150 97 -231 100 2200 97 -245 100 2250 101 -239 100 2300 244 -243 100 2350 106 -298 100 2400 119 -265 100 2450 116 -265 100 2500 121 -307 100 2550 269 -300 100 2600 127 -267 100 2650 123 -302 100 2700 132 -308 100 2750 131 -278 100 2800 278 -317 100 2850 144 -288 100 2900 132 -283 100 2950 137 -325 100 3000 274 -315 100 3050 142 -330 100 3100 150 -344 100 3150 158 -323 100 3200 297 -373 100 3250 165 -350 100 3300 157 -362 100 3350 166 -318 100 3400 294 -371 100 3450 174 -398 100 3500 173 -362 100 3550 172 -417 100 3600 167 -367 100 3650 174 -424 100 3700 183 -370 100 3750 313 -395 100 3800 188 -415 100 3850 195 -435 100 3900 316 -417 100 3950 191 -430 100 4000 195 -352 100 2000 109 -338 100 2050 234 -375 100 2100 118 -415 100 2150 121 -361 100 2200 119 -395 100 2250 121 -388 100 2300 274 -387 100 2350 127 -439 100 2400 138 -421 100 2450 138 -403 100 2500 132 -465 100 2550 303 -407 100 2600 144 -422 100 2650 141 -416 100 2700 142 -501 100 2750 294 -468 100 2800 154 -490 100 2850 173 -483 100 2900 159 -472 100 2950 319 -614 100 3000 177 -501 100 3050 172 -548 100 3100 319 -526 100 3150 186 -531 100 3200 175 -528 100 3250 188 -585 100 3300 193 -556 100 3350 186 -569 100 3400 199 -582 100 3450 354 -636 100 3500 218 -615 100 3550 208 -586 100 3600 360 -654 100 3650 238 -575 100 3700 225 -658 100 3750 220 -643 100 3800 217 -639 100 3850 370 -706 100 3900 241 -693 100 3950 246 -690 100 4000 488 -315 100 2000 96 -354 100 2050 111 -342 100 2100 113 -350 100 2150 104 -336 100 2200 280 -371 100 2250 103 -360 100 2300 112 -413 100 2350 119 -419 100 2400 129 -418 100 2450 272 -394 100 2500 137 -402 100 2550 126 -433 100 2600 140 -442 100 2650 143 -390 100 2700 274 -418 100 2750 140 -491 100 2800 148 -448 100 2850 137 -442 100 2900 273 -485 100 2950 154 -478 100 3000 160 -519 100 3050 150 -514 100 3100 294 -474 100 3150 166 -514 100 3200 169 -542 100 3250 171 -557 100 3300 308 -589 100 3350 179 -581 100 3400 186 -595 100 3450 313 -608 100 3500 190 -578 100 3550 186 -604 100 3600 339 -612 100 3650 185 -627 100 3700 193 -581 100 3750 195 -607 100 3800 184 -625 100 3850 220 -603 100 3900 204 -654 100 3950 201 -664 100 4000 216 -263 100 2000 80 -293 100 2050 90 -278 100 2100 199 -270 100 2150 88 -278 100 2200 90 -338 100 2250 92 -298 100 2300 96 -326 100 2350 104 -355 100 2400 240 -363 100 2450 114 -361 100 2500 108 -369 100 2550 117 -341 100 2600 112 -392 100 2650 246 -377 100 2700 118 -347 100 2750 113 -385 100 2800 121 -379 100 2850 118 -416 100 2900 276 -454 100 2950 133 -431 100 3000 129 -428 100 3050 137 -432 100 3100 269 -433 100 3150 140 -440 100 3200 144 -431 100 3250 144 -496 100 3300 151 -492 100 3350 143 -431 100 3400 150 -457 100 3450 147 -524 100 3500 159 -508 100 3550 164 -461 100 3600 157 -510 100 3650 158 -491 100 3700 300 -504 100 3750 165 -597 100 3800 175 -529 100 3850 170 -582 100 3900 311 -552 100 3950 175 -603 100 4000 181 -230 100 2000 85 -246 100 2050 179 -241 100 2100 82 -269 100 2150 93 -272 100 2200 99 -241 100 2250 95 -300 100 2300 99 -281 100 2350 230 -273 100 2400 105 -319 100 2450 111 -296 100 2500 108 -364 100 2550 119 -326 100 2600 248 -323 100 2650 118 -346 100 2700 118 -334 100 2750 121 -377 100 2800 141 -345 100 2850 257 -353 100 2900 126 -390 100 2950 133 -388 100 3000 137 -371 100 3050 138 -399 100 3100 125 -385 100 3150 144 -377 100 3200 136 -386 100 3250 136 -423 100 3300 283 -384 100 3350 141 -414 100 3400 155 -415 100 3450 150 -427 100 3500 280 -425 100 3550 166 -464 100 3600 161 -454 100 3650 172 -438 100 3700 158 -483 100 3750 169 -437 100 3800 171 -468 100 3850 300 -478 100 3900 193 -538 100 3950 186 -492 100 4000 194 -472 100 2000 111 -435 100 2050 128 -464 100 2100 132 -504 100 2150 121 -484 100 2200 297 -544 100 2250 130 -503 100 2300 146 -511 100 2350 141 -561 100 2400 136 -523 100 2450 281 -480 100 2500 134 -551 100 2550 142 -601 100 2600 162 -617 100 2650 296 -622 100 2700 178 -598 100 2750 169 -626 100 2800 302 -601 100 2850 175 -625 100 2900 173 -613 100 2950 193 -659 100 3000 316 -671 100 3050 185 -658 100 3100 179 -714 100 3150 324 -766 100 3200 218 -708 100 3250 194 -721 100 3300 365 -714 100 3350 217 -766 100 3400 211 -797 100 3450 364 -746 100 3500 197 -829 100 3550 245 -802 100 3600 370 -853 100 3650 251 -807 100 3700 242 -827 100 3750 235 -771 100 3800 239 -852 100 3850 357 -862 100 3900 247 -941 100 3950 276 -829 100 4000 394 -424 100 2000 108 -381 100 2050 107 -427 100 2100 104 -432 100 2150 251 -401 100 2200 109 -439 100 2250 115 -418 100 2300 116 -418 100 2350 120 -494 100 2400 278 -380 100 2450 121 -485 100 2500 123 -459 100 2550 129 -486 100 2600 134 -477 100 2650 281 -457 100 2700 139 -516 100 2750 142 -536 100 2800 153 -526 100 2850 282 -563 100 2900 156 -561 100 2950 161 -550 100 3000 156 -554 100 3050 311 -555 100 3100 159 -593 100 3150 170 -661 100 3200 320 -600 100 3250 173 -585 100 3300 175 -606 100 3350 176 -611 100 3400 328 -705 100 3450 196 -624 100 3500 184 -711 100 3550 328 -648 100 3600 185 -741 100 3650 203 -720 100 3700 353 -754 100 3750 202 -720 100 3800 209 -722 100 3850 362 -773 100 3900 220 -709 100 3950 214 -710 100 4000 364 -209 100 2000 89 -213 100 2050 86 -222 100 2100 85 -236 100 2150 94 -263 100 2200 99 -245 100 2250 255 -253 100 2300 99 -267 100 2350 108 -264 100 2400 115 -275 100 2450 114 -272 100 2500 226 -294 100 2550 111 -267 100 2600 123 -275 100 2650 114 -287 100 2700 118 -320 100 2750 250 -292 100 2800 129 -304 100 2850 123 -313 100 2900 137 -335 100 2950 128 -356 100 3000 272 -343 100 3050 135 -301 100 3100 145 -313 100 3150 143 -365 100 3200 256 -352 100 3250 150 -377 100 3300 155 -359 100 3350 160 -405 100 3400 277 -338 100 3450 148 -385 100 3500 151 -363 100 3550 168 -406 100 3600 284 -380 100 3650 178 -418 100 3700 165 -421 100 3750 308 -414 100 3800 168 -418 100 3850 174 -432 100 3900 197 -442 100 3950 311 -440 100 4000 189 -307 100 2000 105 -318 100 2050 113 -345 100 2100 212 -374 100 2150 123 -366 100 2200 123 -334 100 2250 114 -343 100 2300 114 -388 100 2350 271 -428 100 2400 141 -403 100 2450 155 -399 100 2500 129 -416 100 2550 157 -409 100 2600 147 -443 100 2650 153 -426 100 2700 152 -442 100 2750 159 -454 100 2800 289 -498 100 2850 170 -461 100 2900 162 -468 100 2950 165 -470 100 3000 163 -456 100 3050 161 -510 100 3100 176 -517 100 3150 312 -516 100 3200 190 -539 100 3250 190 -494 100 3300 197 -539 100 3350 181 -520 100 3400 206 -552 100 3450 201 -551 100 3500 356 -588 100 3550 218 -536 100 3600 194 -585 100 3650 364 -589 100 3700 217 -620 100 3750 248 -630 100 3800 378 -580 100 3850 229 -588 100 3900 243 -658 100 3950 369 -645 100 4000 225 -261 100 2000 91 -299 100 2050 107 -270 100 2100 238 -304 100 2150 108 -313 100 2200 110 -306 100 2250 112 -327 100 2300 113 -313 100 2350 113 -309 100 2400 260 -323 100 2450 108 -352 100 2500 124 -328 100 2550 120 -358 100 2600 131 -350 100 2650 265 -385 100 2700 133 -370 100 2750 136 -374 100 2800 133 -364 100 2850 271 -369 100 2900 143 -413 100 2950 155 -429 100 3000 156 -401 100 3050 299 -436 100 3100 168 -451 100 3150 163 -453 100 3200 162 -440 100 3250 292 -416 100 3300 163 -451 100 3350 172 -474 100 3400 186 -427 100 3450 312 -482 100 3500 187 -460 100 3550 178 -511 100 3600 323 -496 100 3650 181 -487 100 3700 199 -503 100 3750 226 -512 100 3800 369 -517 100 3850 194 -528 100 3900 203 -523 100 3950 318 -534 100 4000 210 -236 100 2000 109 -246 100 2050 110 -224 100 2100 211 -234 100 2150 113 -251 100 2200 118 -253 100 2250 114 -271 100 2300 123 -259 100 2350 264 -280 100 2400 130 -290 100 2450 149 -291 100 2500 129 -309 100 2550 140 -292 100 2600 274 -304 100 2650 144 -303 100 2700 138 -289 100 2750 127 -319 100 2800 274 -313 100 2850 148 -295 100 2900 139 -339 100 2950 155 -322 100 3000 289 -354 100 3050 174 -384 100 3100 181 -370 100 3150 188 -327 100 3200 299 -398 100 3250 198 -378 100 3300 176 -399 100 3350 340 -406 100 3400 228 -378 100 3450 228 -374 100 3500 215 -376 100 3550 207 -388 100 3600 224 -430 100 3650 245 -412 100 3700 335 -392 100 3750 196 -420 100 3800 198 -432 100 3850 326 -436 100 3900 227 -452 100 3950 208 -469 100 4000 354 -330 100 2000 89 -330 100 2050 94 -309 100 2100 90 -335 100 2150 96 -384 100 2200 231 -327 100 2250 93 -336 100 2300 95 -404 100 2350 101 -395 100 2400 103 -373 100 2450 102 -397 100 2500 246 -381 100 2550 104 -425 100 2600 116 -408 100 2650 112 -408 100 2700 114 -435 100 2750 250 -427 100 2800 118 -482 100 2850 128 -491 100 2900 131 -504 100 2950 135 -434 100 3000 261 -476 100 3050 139 -482 100 3100 151 -513 100 3150 168 -525 100 3200 278 -567 100 3250 159 -560 100 3300 157 -561 100 3350 159 -567 100 3400 297 -589 100 3450 169 -528 100 3500 161 -574 100 3550 161 -524 100 3600 302 -623 100 3650 176 -614 100 3700 177 -597 100 3750 304 -647 100 3800 180 -622 100 3850 210 -680 100 3900 198 -635 100 3950 328 -677 100 4000 197 -297 100 2000 107 -294 100 2050 133 -300 100 2100 210 -338 100 2150 124 -339 100 2200 172 -346 100 2250 127 -352 100 2300 184 -357 100 2350 285 -361 100 2400 129 -348 100 2450 130 -339 100 2500 135 -403 100 2550 162 -427 100 2600 287 -407 100 2650 168 -414 100 2700 183 -393 100 2750 177 -430 100 2800 328 -422 100 2850 174 -406 100 2900 150 -453 100 2950 158 -415 100 3000 318 -451 100 3050 212 -486 100 3100 230 -428 100 3150 202 -494 100 3200 301 -464 100 3250 188 -462 100 3300 220 -455 100 3350 185 -534 100 3400 179 -495 100 3450 173 -497 100 3500 208 -564 100 3550 351 -488 100 3600 257 -547 100 3650 249 -572 100 3700 345 -588 100 3750 276 -539 100 3800 300 -533 100 3850 384 -582 100 3900 294 -624 100 3950 290 -572 100 4000 395 -304 100 2000 119 -276 100 2050 97 -282 100 2100 128 -317 100 2150 148 -322 100 2200 121 -314 100 2250 139 -343 100 2300 142 -326 100 2350 172 -333 100 2400 281 -343 100 2450 140 -354 100 2500 140 -382 100 2550 169 -359 100 2600 144 -378 100 2650 281 -368 100 2700 145 -395 100 2750 158 -420 100 2800 163 -418 100 2850 301 -408 100 2900 166 -445 100 2950 201 -412 100 3000 173 -402 100 3050 173 -424 100 3100 170 -434 100 3150 162 -440 100 3200 283 -475 100 3250 177 -442 100 3300 172 -446 100 3350 177 -508 100 3400 325 -509 100 3450 190 -496 100 3500 211 -443 100 3550 313 -523 100 3600 224 -495 100 3650 193 -530 100 3700 364 -543 100 3750 217 -540 100 3800 256 -561 100 3850 335 -533 100 3900 226 -543 100 3950 221 -542 100 4000 226 -270 100 2000 230 -278 100 2050 121 -279 100 2100 139 -322 100 2150 233 -318 100 2200 118 -311 100 2250 121 -325 100 2300 111 -353 100 2350 123 -352 100 2400 256 -334 100 2450 135 -334 100 2500 133 -391 100 2550 156 -353 100 2600 281 -396 100 2650 147 -373 100 2700 145 -372 100 2750 136 -396 100 2800 149 -354 100 2850 161 -401 100 2900 174 -439 100 2950 173 -470 100 3000 305 -431 100 3050 171 -428 100 3100 179 -412 100 3150 195 -436 100 3200 299 -472 100 3250 178 -433 100 3300 195 -446 100 3350 316 -496 100 3400 201 -517 100 3450 231 -493 100 3500 218 -465 100 3550 206 -510 100 3600 202 -475 100 3650 207 -516 100 3700 219 -468 100 3750 235 -539 100 3800 339 -495 100 3850 231 -532 100 3900 228 -546 100 3950 343 -538 100 4000 299 -319 100 2000 164 -309 100 2050 163 -314 100 2100 206 -348 100 2150 125 -354 100 2200 138 -327 100 2250 129 -333 100 2300 143 -362 100 2350 282 -356 100 2400 144 -369 100 2450 150 -342 100 2500 141 -382 100 2550 285 -379 100 2600 159 -392 100 2650 153 -387 100 2700 159 -405 100 2750 334 -463 100 2800 177 -452 100 2850 176 -451 100 2900 172 -457 100 2950 172 -440 100 3000 183 -463 100 3050 209 -458 100 3100 304 -505 100 3150 208 -516 100 3200 232 -477 100 3250 352 -525 100 3300 208 -512 100 3350 196 -497 100 3400 334 -497 100 3450 211 -524 100 3500 251 -519 100 3550 378 -559 100 3600 230 -553 100 3650 258 -544 100 3700 203 -546 100 3750 248 -559 100 3800 369 -565 100 3850 236 -565 100 3900 270 -599 100 3950 382 -586 100 4000 250 -292 100 2000 109 -313 100 2050 225 -324 100 2100 107 -332 100 2150 116 -326 100 2200 129 -327 100 2250 122 -349 100 2300 265 -380 100 2350 128 -385 100 2400 137 -384 100 2450 135 -417 100 2500 149 -374 100 2550 285 -364 100 2600 136 -431 100 2650 155 -403 100 2700 151 -384 100 2750 270 -409 100 2800 152 -440 100 2850 161 -452 100 2900 163 -478 100 2950 303 -418 100 3000 163 -467 100 3050 165 -442 100 3100 289 -508 100 3150 182 -503 100 3200 195 -493 100 3250 187 -516 100 3300 350 -542 100 3350 200 -514 100 3400 206 -570 100 3450 334 -478 100 3500 209 -546 100 3550 213 -584 100 3600 350 -562 100 3650 221 -527 100 3700 216 -602 100 3750 2607 -581 100 3800 229 -643 100 3850 233 -614 100 3900 386 -630 100 3950 254 -643 100 4000 244 -250 100 2000 213 -241 100 2050 91 -233 100 2100 94 -288 100 2150 103 -232 100 2200 96 -231 100 2250 97 -269 100 2300 248 -259 100 2350 109 -281 100 2400 109 -283 100 2450 106 -288 100 2500 112 -293 100 2550 113 -313 100 2600 119 -273 100 2650 112 -295 100 2700 124 -289 100 2750 120 -315 100 2800 124 -365 100 2850 275 -327 100 2900 146 -336 100 2950 143 -334 100 3000 147 -330 100 3050 273 -336 100 3100 139 -354 100 3150 140 -323 100 3200 143 -381 100 3250 289 -388 100 3300 146 -437 100 3350 174 -375 100 3400 159 -399 100 3450 308 -385 100 3500 178 -427 100 3550 174 -399 100 3600 180 -449 100 3650 313 -465 100 3700 186 -398 100 3750 178 -453 100 3800 324 -445 100 3850 194 -445 100 3900 187 -465 100 3950 339 -474 100 4000 204 -266 100 2000 96 -290 100 2050 97 -230 100 2100 93 -252 100 2150 105 -287 100 2200 110 -303 100 2250 116 -294 100 2300 113 -283 100 2350 112 -290 100 2400 116 -315 100 2450 273 -294 100 2500 128 -360 100 2550 122 -328 100 2600 134 -343 100 2650 134 -360 100 2700 291 -328 100 2750 134 -332 100 2800 130 -355 100 2850 149 -354 100 2900 279 -386 100 2950 158 -363 100 3000 144 -357 100 3050 146 -375 100 3100 304 -410 100 3150 160 -386 100 3200 166 -387 100 3250 166 -418 100 3300 323 -421 100 3350 174 -418 100 3400 173 -421 100 3450 169 -422 100 3500 324 -466 100 3550 180 -449 100 3600 187 -449 100 3650 325 -437 100 3700 195 -450 100 3750 201 -489 100 3800 353 -495 100 3850 209 -498 100 3900 209 -469 100 3950 335 -471 100 4000 206 -253 100 2000 91 -285 100 2050 108 -273 100 2100 100 -270 100 2150 228 -315 100 2200 117 -285 100 2250 115 -320 100 2300 113 -277 100 2350 111 -319 100 2400 115 -306 100 2450 262 -344 100 2500 125 -339 100 2550 126 -355 100 2600 140 -403 100 2650 280 -399 100 2700 148 -345 100 2750 140 -369 100 2800 133 -357 100 2850 141 -383 100 2900 143 -379 100 2950 159 -367 100 3000 142 -417 100 3050 170 -362 100 3100 151 -408 100 3150 163 -454 100 3200 164 -423 100 3250 302 -440 100 3300 177 -453 100 3350 168 -441 100 3400 171 -403 100 3450 320 -487 100 3500 194 -481 100 3550 203 -448 100 3600 327 -482 100 3650 204 -543 100 3700 208 -506 100 3750 348 -519 100 3800 205 -539 100 3850 206 -524 100 3900 357 -503 100 3950 214 -519 100 4000 231 -282 100 2000 93 -260 100 2050 187 -322 100 2100 97 -305 100 2150 93 -328 100 2200 98 -309 100 2250 105 -336 100 2300 97 -360 100 2350 255 -378 100 2400 118 -346 100 2450 114 -351 100 2500 107 -406 100 2550 126 -367 100 2600 127 -405 100 2650 261 -406 100 2700 137 -439 100 2750 146 -396 100 2800 131 -461 100 2850 274 -454 100 2900 157 -416 100 2950 133 -451 100 3000 141 -415 100 3050 153 -439 100 3100 282 -479 100 3150 155 -405 100 3200 138 -493 100 3250 154 -506 100 3300 300 -494 100 3350 168 -463 100 3400 179 -517 100 3450 159 -479 100 3500 164 -494 100 3550 168 -477 100 3600 172 -492 100 3650 301 -490 100 3700 181 -548 100 3750 183 -546 100 3800 197 -590 100 3850 354 -592 100 3900 192 -546 100 3950 189 -540 100 4000 320 -363 100 2000 116 -336 100 2050 112 -383 100 2100 131 -341 100 2150 104 -341 100 2200 245 -409 100 2250 126 -426 100 2300 162 -396 100 2350 151 -401 100 2400 127 -409 100 2450 264 -434 100 2500 134 -442 100 2550 142 -440 100 2600 144 -384 100 2650 129 -457 100 2700 149 -493 100 2750 170 -486 100 2800 134 -471 100 2850 150 -474 100 2900 148 -496 100 2950 156 -458 100 3000 153 -525 100 3050 325 -543 100 3100 186 -520 100 3150 154 -539 100 3200 185 -555 100 3250 307 -540 100 3300 199 -577 100 3350 187 -647 100 3400 351 -534 100 3450 177 -591 100 3500 173 -618 100 3550 196 -557 100 3600 353 -604 100 3650 205 -603 100 3700 213 -606 100 3750 345 -640 100 3800 231 -612 100 3850 214 -712 100 3900 339 -666 100 3950 240 -702 100 4000 365 -251 100 2000 124 -263 100 2050 133 -271 100 2100 119 -299 100 2150 138 -300 100 2200 301 -285 100 2250 141 -291 100 2300 143 -281 100 2350 164 -279 100 2400 268 -257 100 2450 136 -306 100 2500 174 -286 100 2550 160 -291 100 2600 297 -359 100 2650 193 -313 100 2700 165 -309 100 2750 164 -339 100 2800 311 -328 100 2850 179 -340 100 2900 175 -340 100 2950 190 -376 100 3000 335 -365 100 3050 180 -419 100 3100 202 -342 100 3150 315 -395 100 3200 190 -390 100 3250 195 -392 100 3300 325 -423 100 3350 221 -420 100 3400 227 -411 100 3450 353 -410 100 3500 202 -425 100 3550 212 -430 100 3600 370 -411 100 3650 222 -455 100 3700 247 -429 100 3750 344 -437 100 3800 229 -495 100 3850 258 -522 100 3900 414 -459 100 3950 246 -495 100 4000 384 -264 100 2000 127 -222 100 2050 126 -249 100 2100 116 -274 100 2150 129 -246 100 2200 252 -281 100 2250 133 -269 100 2300 131 -272 100 2350 146 -275 100 2400 137 -327 100 2450 278 -322 100 2500 151 -285 100 2550 143 -346 100 2600 156 -309 100 2650 284 -319 100 2700 165 -318 100 2750 164 -323 100 2800 164 -332 100 2850 316 -373 100 2900 187 -333 100 2950 172 -359 100 3000 327 -385 100 3050 201 -394 100 3100 185 -372 100 3150 184 -392 100 3200 346 -341 100 3250 207 -355 100 3300 172 -406 100 3350 326 -377 100 3400 204 -390 100 3450 208 -447 100 3500 344 -427 100 3550 224 -452 100 3600 236 -420 100 3650 353 -449 100 3700 244 -446 100 3750 234 -462 100 3800 361 -451 100 3850 232 -463 100 3900 379 -454 100 3950 250 -471 100 4000 264 -290 100 2000 221 -326 100 2050 120 -329 100 2100 127 -316 100 2150 117 -332 100 2200 130 -322 100 2250 120 -346 100 2300 133 -351 100 2350 135 -349 100 2400 132 -393 100 2450 139 -397 100 2500 279 -375 100 2550 144 -426 100 2600 156 -399 100 2650 151 -421 100 2700 303 -455 100 2750 173 -438 100 2800 166 -407 100 2850 163 -443 100 2900 311 -474 100 2950 177 -477 100 3000 174 -440 100 3050 164 -523 100 3100 332 -517 100 3150 200 -519 100 3200 201 -482 100 3250 353 -521 100 3300 221 -494 100 3350 193 -535 100 3400 340 -499 100 3450 227 -547 100 3500 215 -508 100 3550 373 -583 100 3600 212 -586 100 3650 211 -569 100 3700 359 -601 100 3750 227 -569 100 3800 232 -590 100 3850 373 -619 100 3900 237 -622 100 3950 244 -655 100 4000 377 -214 100 2000 84 -203 100 2050 81 -238 100 2100 96 -225 100 2150 93 -246 100 2200 217 -224 100 2250 104 -275 100 2300 108 -273 100 2350 104 -285 100 2400 109 -269 100 2450 99 -272 100 2500 242 -285 100 2550 121 -311 100 2600 125 -261 100 2650 118 -298 100 2700 128 -301 100 2750 240 -316 100 2800 138 -284 100 2850 134 -347 100 2900 147 -340 100 2950 132 -322 100 3000 275 -315 100 3050 132 -356 100 3100 149 -347 100 3150 139 -360 100 3200 273 -386 100 3250 152 -368 100 3300 160 -402 100 3350 164 -382 100 3400 274 -391 100 3450 153 -416 100 3500 171 -408 100 3550 177 -421 100 3600 156 -402 100 3650 183 -416 100 3700 185 -422 100 3750 297 -442 100 3800 194 -384 100 3850 197 -387 100 3900 293 -449 100 3950 204 -445 100 4000 199 -228 100 2000 90 -286 100 2050 97 -245 100 2100 190 -251 100 2150 91 -261 100 2200 100 -284 100 2250 105 -268 100 2300 99 -239 100 2350 93 -269 100 2400 240 -253 100 2450 110 -313 100 2500 120 -281 100 2550 107 -300 100 2600 116 -299 100 2650 245 -334 100 2700 139 -334 100 2750 135 -334 100 2800 135 -348 100 2850 146 -334 100 2900 259 -369 100 2950 133 -369 100 3000 137 -357 100 3050 144 -340 100 3100 259 -400 100 3150 162 -370 100 3200 149 -388 100 3250 157 -410 100 3300 157 -398 100 3350 153 -388 100 3400 158 -389 100 3450 157 -413 100 3500 299 -406 100 3550 166 -431 100 3600 163 -436 100 3650 165 -411 100 3700 293 -437 100 3750 173 -427 100 3800 178 -467 100 3850 179 -465 100 3900 312 -456 100 3950 172 -459 100 4000 186 -347 100 2000 239 -360 100 2050 167 -367 100 2100 144 -339 100 2150 139 -390 100 2200 151 -393 100 2250 313 -389 100 2300 164 -413 100 2350 171 -417 100 2400 152 -451 100 2450 327 -454 100 2500 194 -426 100 2550 176 -423 100 2600 326 -483 100 2650 207 -450 100 2700 186 -473 100 2750 203 -467 100 2800 361 -502 100 2850 245 -490 100 2900 204 -493 100 2950 344 -526 100 3000 214 -549 100 3050 225 -541 100 3100 365 -567 100 3150 233 -506 100 3200 214 -559 100 3250 379 -592 100 3300 221 -605 100 3350 388 -609 100 3400 250 -625 100 3450 233 -562 100 3500 381 -622 100 3550 269 -599 100 3600 254 -651 100 3650 283 -657 100 3700 291 -627 100 3750 402 -658 100 3800 264 -663 100 3850 407 -657 100 3900 266 -640 100 3950 301 -664 100 4000 262 -359 100 2000 129 -311 100 2050 120 -292 100 2100 115 -328 100 2150 125 -362 100 2200 142 -358 100 2250 129 -360 100 2300 134 -356 100 2350 276 -440 100 2400 164 -342 100 2450 136 -373 100 2500 149 -415 100 2550 320 -444 100 2600 161 -423 100 2650 183 -386 100 2700 169 -456 100 2750 324 -403 100 2800 165 -434 100 2850 165 -451 100 2900 168 -443 100 2950 329 -487 100 3000 192 -461 100 3050 190 -484 100 3100 328 -513 100 3150 206 -480 100 3200 208 -486 100 3250 374 -487 100 3300 199 -496 100 3350 194 -526 100 3400 358 -579 100 3450 226 -502 100 3500 212 -555 100 3550 382 -597 100 3600 253 -551 100 3650 263 -595 100 3700 400 -630 100 3750 266 -599 100 3800 256 -597 100 3850 388 -575 100 3900 249 -597 100 3950 395 -645 100 4000 260 -244 100 2000 100 -251 100 2050 103 -246 100 2100 232 -256 100 2150 113 -257 100 2200 98 -301 100 2250 110 -295 100 2300 120 -320 100 2350 123 -292 100 2400 255 -336 100 2450 135 -336 100 2500 141 -336 100 2550 134 -345 100 2600 267 -357 100 2650 145 -369 100 2700 140 -342 100 2750 135 -340 100 2800 144 -385 100 2850 292 -347 100 2900 143 -373 100 2950 145 -412 100 3000 163 -419 100 3050 292 -389 100 3100 158 -410 100 3150 180 -391 100 3200 173 -379 100 3250 161 -421 100 3300 177 -405 100 3350 173 -430 100 3400 302 -427 100 3450 176 -423 100 3500 175 -444 100 3550 196 -452 100 3600 180 -454 100 3650 192 -478 100 3700 207 -496 100 3750 336 -473 100 3800 202 -525 100 3850 210 -483 100 3900 336 -488 100 3950 224 -490 100 4000 222 -349 100 2000 212 -290 100 2050 100 -337 100 2100 109 -330 100 2150 110 -321 100 2200 116 -363 100 2250 260 -362 100 2300 122 -357 100 2350 121 -383 100 2400 130 -380 100 2450 128 -383 100 2500 269 -371 100 2550 130 -424 100 2600 141 -418 100 2650 140 -382 100 2700 284 -429 100 2750 152 -449 100 2800 168 -438 100 2850 158 -443 100 2900 289 -453 100 2950 168 -450 100 3000 157 -458 100 3050 172 -454 100 3100 287 -490 100 3150 175 -485 100 3200 182 -518 100 3250 183 -523 100 3300 326 -502 100 3350 186 -548 100 3400 190 -506 100 3450 305 -524 100 3500 196 -512 100 3550 188 -560 100 3600 340 -564 100 3650 202 -581 100 3700 202 -596 100 3750 354 -567 100 3800 214 -545 100 3850 198 -614 100 3900 373 -581 100 3950 225 -593 100 4000 221 -296 100 2000 91 -254 100 2050 207 -302 100 2100 98 -297 100 2150 106 -292 100 2200 107 -339 100 2250 120 -313 100 2300 107 -300 100 2350 245 -340 100 2400 118 -354 100 2450 124 -359 100 2500 111 -310 100 2550 115 -343 100 2600 261 -383 100 2650 143 -382 100 2700 132 -384 100 2750 126 -400 100 2800 138 -391 100 2850 258 -421 100 2900 144 -427 100 2950 159 -424 100 3000 157 -459 100 3050 276 -445 100 3100 160 -432 100 3150 161 -485 100 3200 179 -450 100 3250 285 -469 100 3300 163 -486 100 3350 174 -489 100 3400 178 -504 100 3450 182 -481 100 3500 186 -514 100 3550 180 -533 100 3600 303 -471 100 3650 185 -536 100 3700 182 -492 100 3750 309 -521 100 3800 191 -521 100 3850 201 -520 100 3900 194 -609 100 3950 204 -555 100 4000 209 -338 100 2000 105 -349 100 2050 108 -349 100 2100 106 -314 100 2150 100 -327 100 2200 111 -364 100 2250 115 -378 100 2300 119 -437 100 2350 268 -371 100 2400 116 -420 100 2450 125 -422 100 2500 127 -433 100 2550 136 -438 100 2600 281 -449 100 2650 137 -474 100 2700 152 -445 100 2750 140 -454 100 2800 289 -508 100 2850 156 -481 100 2900 153 -498 100 2950 153 -488 100 3000 299 -475 100 3050 157 -478 100 3100 156 -536 100 3150 172 -543 100 3200 317 -510 100 3250 171 -546 100 3300 171 -581 100 3350 187 -593 100 3400 179 -583 100 3450 179 -560 100 3500 181 -566 100 3550 339 -606 100 3600 192 -660 100 3650 200 -601 100 3700 329 -580 100 3750 189 -642 100 3800 202 -609 100 3850 333 -633 100 3900 201 -693 100 3950 211 -656 100 4000 354 -304 100 2000 120 -314 100 2050 104 -317 100 2100 120 -324 100 2150 116 -347 100 2200 263 -313 100 2250 125 -347 100 2300 132 -354 100 2350 126 -375 100 2400 143 -410 100 2450 297 -369 100 2500 148 -379 100 2550 155 -406 100 2600 158 -390 100 2650 287 -410 100 2700 151 -424 100 2750 158 -421 100 2800 163 -440 100 2850 321 -463 100 2900 169 -469 100 2950 180 -463 100 3000 177 -418 100 3050 171 -501 100 3100 210 -494 100 3150 182 -459 100 3200 322 -491 100 3250 197 -537 100 3300 208 -557 100 3350 339 -494 100 3400 206 -510 100 3450 200 -500 100 3500 336 -564 100 3550 213 -550 100 3600 221 -566 100 3650 360 -562 100 3700 225 -627 100 3750 245 -570 100 3800 377 -600 100 3850 230 -620 100 3900 249 -649 100 3950 388 -626 100 4000 252 -218 100 2000 91 -232 100 2050 97 -268 100 2100 116 -277 100 2150 109 -262 100 2200 113 -266 100 2250 114 -249 100 2300 108 -290 100 2350 230 -269 100 2400 120 -291 100 2450 116 -302 100 2500 126 -288 100 2550 128 -297 100 2600 252 -320 100 2650 138 -319 100 2700 140 -316 100 2750 144 -340 100 2800 271 -326 100 2850 141 -348 100 2900 161 -398 100 2950 155 -352 100 3000 144 -395 100 3050 160 -358 100 3100 164 -342 100 3150 155 -388 100 3200 303 -362 100 3250 161 -416 100 3300 179 -404 100 3350 167 -353 100 3400 298 -401 100 3450 182 -368 100 3500 189 -446 100 3550 319 -430 100 3600 184 -460 100 3650 204 -423 100 3700 187 -473 100 3750 195 -437 100 3800 193 -453 100 3850 198 -457 100 3900 338 -478 100 3950 227 -485 100 4000 229 -325 100 2000 108 -362 100 2050 108 -383 100 2100 108 -335 100 2150 99 -364 100 2200 109 -404 100 2250 255 -377 100 2300 114 -399 100 2350 119 -390 100 2400 130 -382 100 2450 120 -413 100 2500 259 -413 100 2550 135 -424 100 2600 129 -457 100 2650 139 -436 100 2700 138 -481 100 2750 293 -530 100 2800 155 -441 100 2850 157 -489 100 2900 162 -504 100 2950 305 -461 100 3000 180 -536 100 3050 179 -544 100 3100 299 -585 100 3150 186 -581 100 3200 185 -547 100 3250 176 -597 100 3300 329 -602 100 3350 189 -578 100 3400 192 -575 100 3450 303 -610 100 3500 197 -653 100 3550 219 -626 100 3600 364 -573 100 3650 199 -617 100 3700 211 -669 100 3750 362 -612 100 3800 214 -636 100 3850 213 -625 100 3900 342 -689 100 3950 213 -684 100 4000 233 -254 100 2000 98 -274 100 2050 211 -267 100 2100 104 -295 100 2150 114 -268 100 2200 113 -292 100 2250 125 -304 100 2300 125 -302 100 2350 117 -328 100 2400 132 -304 100 2450 122 -333 100 2500 135 -344 100 2550 1266 -374 100 2600 147 -350 100 2650 144 -391 100 2700 147 -356 100 2750 295 -377 100 2800 156 -380 100 2850 158 -384 100 2900 156 -372 100 2950 303 -416 100 3000 173 -414 100 3050 164 -399 100 3100 170 -409 100 3150 307 -418 100 3200 174 -458 100 3250 189 -433 100 3300 177 -443 100 3350 165 -480 100 3400 186 -421 100 3450 187 -437 100 3500 316 -453 100 3550 191 -464 100 3600 187 -480 100 3650 338 -473 100 3700 192 -470 100 3750 204 -484 100 3800 364 -496 100 3850 220 -504 100 3900 217 -531 100 3950 370 -552 100 4000 237 -252 100 2000 87 -273 100 2050 91 -325 100 2100 100 -307 100 2150 234 -317 100 2200 109 -366 100 2250 108 -320 100 2300 105 -328 100 2350 116 -349 100 2400 267 -355 100 2450 111 -384 100 2500 123 -359 100 2550 120 -326 100 2600 137 -381 100 2650 280 -404 100 2700 141 -377 100 2750 121 -451 100 2800 149 -401 100 2850 135 -397 100 2900 292 -433 100 2950 157 -437 100 3000 157 -437 100 3050 156 -419 100 3100 318 -408 100 3150 150 -454 100 3200 156 -430 100 3250 162 -478 100 3300 318 -463 100 3350 170 -491 100 3400 175 -485 100 3450 309 -482 100 3500 170 -506 100 3550 180 -534 100 3600 180 -522 100 3650 353 -480 100 3700 192 -491 100 3750 196 -525 100 3800 328 -599 100 3850 219 -565 100 3900 206 -589 100 3950 348 -591 100 4000 231 -210 100 2000 100 -240 100 2050 102 -240 100 2100 235 -236 100 2150 111 -273 100 2200 121 -264 100 2250 127 -268 100 2300 111 -261 100 2350 276 -281 100 2400 124 -294 100 2450 141 -256 100 2500 122 -286 100 2550 137 -313 100 2600 301 -330 100 2650 138 -293 100 2700 138 -338 100 2750 152 -336 100 2800 297 -336 100 2850 163 -324 100 2900 156 -354 100 2950 150 -374 100 3000 310 -365 100 3050 164 -378 100 3100 171 -367 100 3150 181 -348 100 3200 169 -401 100 3250 182 -411 100 3300 197 -390 100 3350 320 -371 100 3400 178 -395 100 3450 181 -413 100 3500 342 -424 100 3550 196 -402 100 3600 188 -407 100 3650 341 -390 100 3700 188 -454 100 3750 205 -459 100 3800 359 -432 100 3850 193 -444 100 3900 212 -477 100 3950 371 -468 100 4000 227 -281 100 2000 105 -299 100 2050 104 -278 100 2100 102 -299 100 2150 244 -324 100 2200 118 -294 100 2250 102 -319 100 2300 121 -373 100 2350 128 -354 100 2400 283 -344 100 2450 127 -336 100 2500 136 -336 100 2550 125 -341 100 2600 120 -358 100 2650 123 -347 100 2700 133 -415 100 2750 146 -401 100 2800 150 -396 100 2850 297 -406 100 2900 151 -418 100 2950 150 -433 100 3000 156 -444 100 3050 309 -431 100 3100 166 -443 100 3150 165 -449 100 3200 317 -466 100 3250 179 -452 100 3300 180 -483 100 3350 179 -472 100 3400 331 -463 100 3450 189 -481 100 3500 180 -509 100 3550 329 -513 100 3600 194 -527 100 3650 194 -520 100 3700 363 -519 100 3750 208 -550 100 3800 204 -545 100 3850 368 -512 100 3900 207 -534 100 3950 224 -580 100 4000 366 -244 100 2000 79 -283 100 2050 90 -305 100 2100 89 -315 100 2150 90 -288 100 2200 90 -271 100 2250 243 -291 100 2300 95 -306 100 2350 97 -345 100 2400 98 -337 100 2450 101 -384 100 2500 109 -388 100 2550 270 -386 100 2600 114 -338 100 2650 108 -372 100 2700 111 -359 100 2750 123 -419 100 2800 265 -382 100 2850 126 -393 100 2900 122 -400 100 2950 128 -419 100 3000 137 -438 100 3050 129 -425 100 3100 135 -438 100 3150 140 -437 100 3200 141 -432 100 3250 274 -417 100 3300 143 -445 100 3350 148 -497 100 3400 150 -476 100 3450 289 -472 100 3500 156 -509 100 3550 162 -509 100 3600 164 -519 100 3650 296 -483 100 3700 159 -538 100 3750 173 -508 100 3800 309 -522 100 3850 183 -519 100 3900 175 -560 100 3950 176 -563 100 4000 326 -323 100 2000 121 -302 100 2050 109 -314 100 2100 117 -344 100 2150 261 -379 100 2200 132 -327 100 2250 127 -367 100 2300 144 -355 100 2350 138 -374 100 2400 299 -394 100 2450 136 -407 100 2500 147 -382 100 2550 141 -427 100 2600 305 -378 100 2650 144 -420 100 2700 155 -415 100 2750 167 -450 100 2800 310 -419 100 2850 174 -433 100 2900 161 -456 100 2950 175 -434 100 3000 157 -487 100 3050 174 -482 100 3100 179 -497 100 3150 328 -528 100 3200 191 -438 100 3250 175 -502 100 3300 352 -513 100 3350 198 -507 100 3400 196 -567 100 3450 355 -559 100 3500 205 -532 100 3550 208 -553 100 3600 356 -576 100 3650 228 -598 100 3700 223 -530 100 3750 371 -575 100 3800 226 -599 100 3850 231 -601 100 3900 374 -606 100 3950 226 -619 100 4000 228 -312 100 2000 200 -300 100 2050 90 -326 100 2100 95 -336 100 2150 89 -340 100 2200 95 -302 100 2250 91 -317 100 2300 238 -333 100 2350 101 -343 100 2400 103 -366 100 2450 110 -337 100 2500 106 -331 100 2550 110 -348 100 2600 251 -341 100 2650 108 -382 100 2700 117 -382 100 2750 118 -405 100 2800 117 -358 100 2850 256 -334 100 2900 116 -421 100 2950 135 -346 100 3000 123 -397 100 3050 267 -423 100 3100 139 -397 100 3150 134 -494 100 3200 150 -438 100 3250 149 -440 100 3300 135 -458 100 3350 149 -479 100 3400 162 -478 100 3450 161 -525 100 3500 164 -455 100 3550 172 -468 100 3600 174 -539 100 3650 304 -493 100 3700 163 -535 100 3750 171 -508 100 3800 174 -560 100 3850 315 -531 100 3900 182 -586 100 3950 184 -552 100 4000 316 -283 100 2000 104 -319 100 2050 120 -316 100 2100 111 -306 100 2150 130 -282 100 2200 267 -355 100 2250 136 -335 100 2300 134 -352 100 2350 140 -344 100 2400 136 -341 100 2450 294 -355 100 2500 144 -328 100 2550 131 -344 100 2600 144 -332 100 2650 264 -382 100 2700 158 -426 100 2750 166 -404 100 2800 170 -434 100 2850 305 -402 100 2900 171 -365 100 2950 158 -407 100 3000 179 -431 100 3050 167 -472 100 3100 185 -397 100 3150 166 -499 100 3200 322 -435 100 3250 188 -456 100 3300 197 -515 100 3350 345 -460 100 3400 205 -482 100 3450 201 -525 100 3500 362 -533 100 3550 208 -537 100 3600 212 -524 100 3650 370 -535 100 3700 234 -545 100 3750 225 -585 100 3800 362 -568 100 3850 231 -537 100 3900 252 -557 100 3950 365 -557 100 4000 239 -321 100 2000 87 -314 100 2050 97 -319 100 2100 105 -366 100 2150 105 -337 100 2200 103 -380 100 2250 101 -368 100 2300 107 -414 100 2350 258 -353 100 2400 114 -377 100 2450 118 -437 100 2500 151 -407 100 2550 119 -451 100 2600 271 -443 100 2650 136 -447 100 2700 141 -457 100 2750 153 -446 100 2800 276 -486 100 2850 158 -474 100 2900 138 -498 100 2950 162 -516 100 3000 294 -480 100 3050 146 -498 100 3100 169 -529 100 3150 165 -481 100 3200 276 -545 100 3250 164 -510 100 3300 166 -599 100 3350 168 -548 100 3400 306 -555 100 3450 176 -565 100 3500 168 -574 100 3550 302 -630 100 3600 194 -612 100 3650 198 -621 100 3700 314 -569 100 3750 189 -682 100 3800 183 -658 100 3850 207 -590 100 3900 208 -561 100 3950 208 -681 100 4000 221 -243 100 2000 98 -218 100 2050 92 -211 100 2100 98 -254 100 2150 105 -220 100 2200 106 -251 100 2250 106 -245 100 2300 105 -246 100 2350 111 -270 100 2400 115 -261 100 2450 121 -270 100 2500 123 -310 100 2550 258 -295 100 2600 126 -284 100 2650 116 -289 100 2700 122 -313 100 2750 134 -286 100 2800 275 -298 100 2850 142 -278 100 2900 141 -331 100 2950 138 -340 100 3000 271 -324 100 3050 156 -346 100 3100 152 -365 100 3150 154 -333 100 3200 276 -350 100 3250 161 -378 100 3300 167 -382 100 3350 155 -369 100 3400 290 -395 100 3450 163 -372 100 3500 176 -404 100 3550 172 -416 100 3600 171 -406 100 3650 187 -415 100 3700 190 -429 100 3750 305 -425 100 3800 194 -406 100 3850 195 -447 100 3900 314 -439 100 3950 191 -470 100 4000 199 -303 100 2000 93 -302 100 2050 193 -332 100 2100 93 -368 100 2150 97 -346 100 2200 98 -411 100 2250 109 -329 100 2300 105 -392 100 2350 250 -403 100 2400 114 -411 100 2450 123 -366 100 2500 116 -385 100 2550 124 -400 100 2600 251 -433 100 2650 130 -429 100 2700 132 -526 100 2750 134 -432 100 2800 273 -418 100 2850 136 -522 100 2900 154 -540 100 2950 152 -445 100 3000 146 -437 100 3050 142 -509 100 3100 161 -478 100 3150 155 -497 100 3200 154 -546 100 3250 165 -487 100 3300 170 -533 100 3350 168 -537 100 3400 304 -565 100 3450 168 -534 100 3500 166 -522 100 3550 178 -611 100 3600 341 -609 100 3650 190 -583 100 3700 186 -592 100 3750 319 -539 100 3800 177 -634 100 3850 195 -701 100 3900 336 -604 100 3950 196 -660 100 4000 197 -303 100 2000 117 -312 100 2050 225 -279 100 2100 104 -311 100 2150 122 -310 100 2200 115 -325 100 2250 134 -333 100 2300 259 -336 100 2350 134 -368 100 2400 131 -331 100 2450 127 -348 100 2500 141 -375 100 2550 283 -389 100 2600 151 -343 100 2650 158 -360 100 2700 147 -401 100 2750 275 -424 100 2800 171 -371 100 2850 161 -425 100 2900 176 -407 100 2950 308 -397 100 3000 168 -450 100 3050 188 -445 100 3100 310 -434 100 3150 169 -402 100 3200 162 -464 100 3250 200 -494 100 3300 314 -474 100 3350 192 -513 100 3400 209 -460 100 3450 334 -511 100 3500 201 -505 100 3550 220 -522 100 3600 350 -523 100 3650 219 -556 100 3700 233 -531 100 3750 367 -552 100 3800 227 -561 100 3850 220 -524 100 3900 373 -519 100 3950 225 -546 100 4000 224 -333 100 2000 209 -317 100 2050 110 -365 100 2100 153 -349 100 2150 119 -400 100 2200 139 -354 100 2250 112 -349 100 2300 138 -390 100 2350 147 -380 100 2400 136 -408 100 2450 310 -419 100 2500 156 -401 100 2550 148 -437 100 2600 142 -416 100 2650 282 -422 100 2700 171 -440 100 2750 180 -462 100 2800 182 -461 100 2850 299 -480 100 2900 168 -463 100 2950 161 -450 100 3000 160 -462 100 3050 303 -507 100 3100 190 -491 100 3150 201 -522 100 3200 217 -567 100 3250 339 -556 100 3300 192 -566 100 3350 220 -527 100 3400 321 -585 100 3450 237 -554 100 3500 204 -541 100 3550 335 -562 100 3600 227 -650 100 3650 240 -623 100 3700 345 -600 100 3750 230 -601 100 3800 231 -636 100 3850 366 -610 100 3900 256 -697 100 3950 358 -658 100 4000 257 -374 100 2000 102 -430 100 2050 111 -477 100 2100 120 -501 100 2150 123 -448 100 2200 114 -526 100 2250 130 -513 100 2300 137 -506 100 2350 279 -526 100 2400 137 -535 100 2450 157 -543 100 2500 149 -574 100 2550 154 -598 100 2600 290 -615 100 2650 152 -563 100 2700 151 -571 100 2750 147 -675 100 2800 328 -696 100 2850 182 -677 100 2900 170 -672 100 2950 314 -674 100 3000 187 -670 100 3050 170 -711 100 3100 174 -618 100 3150 317 -705 100 3200 178 -751 100 3250 191 -774 100 3300 336 -766 100 3350 192 -811 100 3400 204 -742 100 3450 345 -738 100 3500 210 -742 100 3550 191 -802 100 3600 352 -824 100 3650 216 -843 100 3700 221 -786 100 3750 355 -849 100 3800 220 -853 100 3850 217 -890 100 3900 360 -867 100 3950 223 -981 100 4000 242 -239 100 2000 224 -225 100 2050 113 -236 100 2100 119 -247 100 2150 124 -229 100 2200 107 -251 100 2250 250 -274 100 2300 117 -280 100 2350 136 -272 100 2400 128 -282 100 2450 135 -246 100 2500 263 -298 100 2550 162 -274 100 2600 138 -292 100 2650 144 -278 100 2700 272 -289 100 2750 148 -296 100 2800 158 -302 100 2850 161 -318 100 2900 290 -290 100 2950 149 -304 100 3000 159 -355 100 3050 180 -342 100 3100 301 -363 100 3150 198 -357 100 3200 174 -339 100 3250 316 -349 100 3300 179 -381 100 3350 190 -350 100 3400 186 -381 100 3450 328 -366 100 3500 201 -383 100 3550 207 -344 100 3600 310 -373 100 3650 207 -376 100 3700 210 -413 100 3750 344 -412 100 3800 218 -406 100 3850 223 -400 100 3900 340 -417 100 3950 217 -435 100 4000 225 -307 100 2000 197 -297 100 2050 107 -333 100 2100 118 -328 100 2150 111 -347 100 2200 116 -332 100 2250 238 -313 100 2300 113 -376 100 2350 118 -348 100 2400 114 -377 100 2450 130 -394 100 2500 254 -461 100 2550 155 -431 100 2600 144 -406 100 2650 149 -438 100 2700 273 -439 100 2750 147 -453 100 2800 152 -444 100 2850 151 -450 100 2900 300 -455 100 2950 158 -497 100 3000 164 -443 100 3050 161 -525 100 3100 307 -518 100 3150 173 -514 100 3200 183 -516 100 3250 176 -548 100 3300 316 -532 100 3350 182 -521 100 3400 185 -538 100 3450 306 -522 100 3500 186 -539 100 3550 188 -569 100 3600 337 -587 100 3650 207 -553 100 3700 200 -605 100 3750 351 -603 100 3800 206 -613 100 3850 213 -645 100 3900 356 -624 100 3950 210 -584 100 4000 220 -294 100 2000 92 -344 100 2050 205 -333 100 2100 108 -319 100 2150 102 -342 100 2200 109 -403 100 2250 113 -366 100 2300 245 -394 100 2350 117 -398 100 2400 124 -413 100 2450 129 -412 100 2500 136 -385 100 2550 261 -448 100 2600 144 -426 100 2650 143 -433 100 2700 144 -495 100 2750 281 -455 100 2800 142 -461 100 2850 143 -520 100 2900 153 -486 100 2950 147 -485 100 3000 151 -536 100 3050 167 -516 100 3100 168 -499 100 3150 301 -470 100 3200 163 -563 100 3250 181 -532 100 3300 180 -565 100 3350 320 -571 100 3400 178 -539 100 3450 185 -493 100 3500 310 -576 100 3550 199 -580 100 3600 192 -577 100 3650 322 -600 100 3700 202 -605 100 3750 192 -606 100 3800 209 -627 100 3850 196 -648 100 3900 204 -647 100 3950 213 -644 100 4000 232 -263 100 2000 98 -242 100 2050 90 -256 100 2100 105 -279 100 2150 213 -255 100 2200 104 -302 100 2250 104 -285 100 2300 110 -299 100 2350 116 -281 100 2400 107 -313 100 2450 246 -311 100 2500 122 -301 100 2550 113 -316 100 2600 122 -333 100 2650 124 -369 100 2700 261 -330 100 2750 130 -327 100 2800 129 -379 100 2850 142 -361 100 2900 265 -360 100 2950 140 -339 100 3000 139 -413 100 3050 151 -404 100 3100 290 -381 100 3150 148 -389 100 3200 164 -366 100 3250 158 -446 100 3300 294 -402 100 3350 164 -451 100 3400 170 -409 100 3450 162 -410 100 3500 293 -388 100 3550 170 -442 100 3600 168 -448 100 3650 175 -454 100 3700 177 -431 100 3750 188 -475 100 3800 190 -426 100 3850 293 -497 100 3900 193 -494 100 3950 197 -502 100 4000 332 -309 100 2000 118 -277 100 2050 108 -275 100 2100 106 -307 100 2150 109 -295 100 2200 257 -311 100 2250 129 -311 100 2300 122 -325 100 2350 123 -315 100 2400 127 -341 100 2450 260 -379 100 2500 135 -393 100 2550 147 -399 100 2600 142 -398 100 2650 141 -400 100 2700 154 -434 100 2750 148 -379 100 2800 142 -408 100 2850 156 -432 100 2900 151 -438 100 2950 176 -422 100 3000 155 -440 100 3050 297 -419 100 3100 170 -494 100 3150 178 -436 100 3200 171 -492 100 3250 326 -539 100 3300 187 -453 100 3350 179 -454 100 3400 307 -483 100 3450 215 -545 100 3500 198 -549 100 3550 209 -476 100 3600 206 -522 100 3650 198 -543 100 3700 215 -555 100 3750 211 -560 100 3800 214 -553 100 3850 345 -592 100 3900 243 -587 100 3950 229 -596 100 4000 347 -226 100 2000 85 -209 100 2050 87 -250 100 2100 153 -239 100 2150 99 -224 100 2200 192 -256 100 2250 100 -247 100 2300 109 -257 100 2350 106 -255 100 2400 138 -247 100 2450 103 -263 100 2500 241 -305 100 2550 121 -304 100 2600 125 -283 100 2650 122 -289 100 2700 123 -296 100 2750 245 -329 100 2800 139 -293 100 2850 128 -311 100 2900 137 -336 100 2950 250 -298 100 3000 138 -304 100 3050 137 -331 100 3100 143 -327 100 3150 262 -356 100 3200 162 -388 100 3250 194 -362 100 3300 155 -367 100 3350 278 -344 100 3400 184 -375 100 3450 159 -345 100 3500 175 -397 100 3550 281 -362 100 3600 170 -387 100 3650 178 -358 100 3700 278 -392 100 3750 181 -385 100 3800 181 -412 100 3850 182 -436 100 3900 371 -428 100 3950 191 -422 100 4000 190 -183 100 2000 180 -157 100 2050 106 -177 100 2100 113 -218 100 2150 101 -200 100 2200 124 -185 100 2250 215 -229 100 2300 121 -210 100 2350 123 -224 100 2400 140 -207 100 2450 116 -236 100 2500 241 -212 100 2550 124 -238 100 2600 138 -285 100 2650 136 -260 100 2700 156 -214 100 2750 134 -233 100 2800 143 -277 100 2850 161 -333 100 2900 176 -251 100 2950 153 -294 100 3000 163 -276 100 3050 161 -285 100 3100 286 -263 100 3150 167 -305 100 3200 172 -324 100 3250 187 -297 100 3300 287 -283 100 3350 173 -325 100 3400 192 -319 100 3450 297 -311 100 3500 189 -315 100 3550 207 -344 100 3600 197 -356 100 3650 208 -362 100 3700 1700 -321 100 3750 201 -342 100 3800 206 -361 100 3850 346 -355 100 3900 228 -363 100 3950 252 -335 100 4000 370 -225 100 2000 92 -247 100 2050 96 -236 100 2100 87 -287 100 2150 111 -234 100 2200 111 -251 100 2250 245 -259 100 2300 111 -294 100 2350 114 -272 100 2400 138 -259 100 2450 109 -297 100 2500 276 -315 100 2550 125 -310 100 2600 128 -321 100 2650 123 -318 100 2700 129 -281 100 2750 262 -317 100 2800 140 -343 100 2850 143 -349 100 2900 150 -336 100 2950 280 -341 100 3000 153 -323 100 3050 148 -376 100 3100 152 -366 100 3150 287 -372 100 3200 160 -383 100 3250 151 -384 100 3300 156 -393 100 3350 311 -407 100 3400 153 -383 100 3450 167 -392 100 3500 190 -458 100 3550 327 -414 100 3600 172 -453 100 3650 200 -477 100 3700 330 -465 100 3750 209 -430 100 3800 181 -429 100 3850 316 -492 100 3900 212 -458 100 3950 197 -461 100 4000 346 -190 100 2000 89 -208 100 2050 89 -219 100 2100 94 -200 100 2150 104 -223 100 2200 100 -237 100 2250 250 -240 100 2300 110 -240 100 2350 119 -260 100 2400 118 -233 100 2450 112 -253 100 2500 113 -285 100 2550 263 -267 100 2600 121 -257 100 2650 125 -282 100 2700 130 -311 100 2750 261 -298 100 2800 136 -279 100 2850 130 -301 100 2900 133 -325 100 2950 144 -283 100 3000 273 -293 100 3050 143 -322 100 3100 153 -297 100 3150 152 -300 100 3200 287 -371 100 3250 168 -327 100 3300 160 -349 100 3350 165 -345 100 3400 308 -373 100 3450 170 -384 100 3500 179 -406 100 3550 313 -352 100 3600 182 -360 100 3650 174 -360 100 3700 173 -398 100 3750 332 -415 100 3800 188 -402 100 3850 193 -388 100 3900 321 -420 100 3950 198 -371 100 4000 190 -301 100 2000 90 -291 100 2050 198 -276 100 2100 99 -296 100 2150 96 -345 100 2200 107 -312 100 2250 98 -380 100 2300 107 -361 100 2350 113 -349 100 2400 106 -336 100 2450 112 -371 100 2500 111 -390 100 2550 130 -373 100 2600 262 -402 100 2650 132 -377 100 2700 129 -393 100 2750 125 -373 100 2800 129 -367 100 2850 136 -391 100 2900 140 -418 100 2950 129 -412 100 3000 133 -425 100 3050 287 -456 100 3100 140 -450 100 3150 146 -454 100 3200 149 -505 100 3250 292 -455 100 3300 145 -468 100 3350 159 -497 100 3400 158 -463 100 3450 290 -530 100 3500 164 -550 100 3550 168 -534 100 3600 176 -540 100 3650 317 -537 100 3700 198 -507 100 3750 176 -546 100 3800 309 -515 100 3850 182 -548 100 3900 180 -570 100 3950 322 -527 100 4000 189 -301 100 2000 92 -294 100 2050 100 -347 100 2100 98 -321 100 2150 218 -359 100 2200 105 -326 100 2250 100 -369 100 2300 105 -390 100 2350 111 -390 100 2400 117 -414 100 2450 266 -378 100 2500 117 -416 100 2550 120 -440 100 2600 118 -361 100 2650 121 -402 100 2700 263 -425 100 2750 136 -456 100 2800 137 -466 100 2850 145 -474 100 2900 271 -474 100 2950 142 -491 100 3000 142 -490 100 3050 156 -500 100 3100 295 -483 100 3150 152 -493 100 3200 153 -524 100 3250 158 -510 100 3300 309 -556 100 3350 175 -563 100 3400 170 -577 100 3450 176 -562 100 3500 314 -574 100 3550 172 -577 100 3600 176 -596 100 3650 324 -610 100 3700 191 -555 100 3750 181 -640 100 3800 192 -580 100 3850 179 -595 100 3900 183 -664 100 3950 200 -651 100 4000 337 -300 100 2000 116 -298 100 2050 125 -335 100 2100 125 -304 100 2150 256 -324 100 2200 128 -321 100 2250 127 -343 100 2300 137 -364 100 2350 140 -334 100 2400 289 -354 100 2450 151 -364 100 2500 142 -351 100 2550 153 -415 100 2600 292 -420 100 2650 162 -436 100 2700 183 -446 100 2750 161 -410 100 2800 317 -447 100 2850 163 -454 100 2900 174 -419 100 2950 331 -433 100 3000 187 -451 100 3050 170 -485 100 3100 186 -510 100 3150 343 -476 100 3200 188 -490 100 3250 183 -401 100 3300 318 -497 100 3350 214 -512 100 3400 203 -528 100 3450 367 -543 100 3500 221 -547 100 3550 218 -526 100 3600 351 -552 100 3650 232 -575 100 3700 240 -534 100 3750 369 -572 100 3800 232 -568 100 3850 378 -607 100 3900 265 -595 100 3950 254 -583 100 4000 383 -255 100 2000 97 -306 100 2050 100 -308 100 2100 110 -311 100 2150 106 -319 100 2200 280 -314 100 2250 114 -387 100 2300 138 -345 100 2350 120 -323 100 2400 128 -334 100 2450 271 -368 100 2500 129 -368 100 2550 148 -394 100 2600 142 -319 100 2650 266 -419 100 2700 143 -383 100 2750 148 -362 100 2800 145 -432 100 2850 305 -419 100 2900 156 -428 100 2950 155 -424 100 3000 159 -426 100 3050 290 -447 100 3100 164 -434 100 3150 184 -461 100 3200 181 -492 100 3250 309 -477 100 3300 182 -473 100 3350 191 -505 100 3400 312 -492 100 3450 187 -504 100 3500 176 -498 100 3550 182 -503 100 3600 182 -566 100 3650 214 -533 100 3700 213 -520 100 3750 347 -548 100 3800 212 -515 100 3850 207 -563 100 3900 217 -610 100 3950 232 -548 100 4000 331 -337 100 2000 94 -311 100 2050 92 -296 100 2100 89 -322 100 2150 92 -353 100 2200 100 -332 100 2250 240 -377 100 2300 111 -369 100 2350 112 -372 100 2400 119 -367 100 2450 116 -366 100 2500 117 -375 100 2550 106 -372 100 2600 125 -396 100 2650 120 -422 100 2700 136 -419 100 2750 263 -422 100 2800 126 -433 100 2850 129 -446 100 2900 125 -418 100 2950 137 -487 100 3000 286 -428 100 3050 146 -406 100 3100 141 -482 100 3150 144 -465 100 3200 283 -484 100 3250 157 -542 100 3300 165 -520 100 3350 157 -526 100 3400 306 -525 100 3450 165 -513 100 3500 170 -528 100 3550 172 -501 100 3600 157 -511 100 3650 173 -530 100 3700 185 -550 100 3750 303 -561 100 3800 188 -516 100 3850 189 -607 100 3900 333 -567 100 3950 188 -582 100 4000 190 -184 100 2000 80 -205 100 2050 81 -201 100 2100 187 -193 100 2150 76 -192 100 2200 82 -183 100 2250 81 -210 100 2300 112 -226 100 2350 93 -242 100 2400 211 -236 100 2450 95 -233 100 2500 100 -228 100 2550 96 -209 100 2600 100 -262 100 2650 110 -244 100 2700 239 -271 100 2750 114 -258 100 2800 111 -255 100 2850 114 -236 100 2900 111 -259 100 2950 251 -267 100 3000 120 -255 100 3050 118 -293 100 3100 121 -296 100 3150 134 -294 100 3200 263 -304 100 3250 133 -297 100 3300 132 -310 100 3350 136 -274 100 3400 134 -354 100 3450 133 -338 100 3500 145 -295 100 3550 144 -332 100 3600 142 -320 100 3650 262 -347 100 3700 174 -375 100 3750 159 -341 100 3800 152 -384 100 3850 280 -317 100 3900 157 -364 100 3950 163 -392 100 4000 285 -295 100 2000 98 -323 100 2050 97 -314 100 2100 104 -348 100 2150 115 -369 100 2200 108 -385 100 2250 273 -348 100 2300 119 -386 100 2350 120 -385 100 2400 124 -356 100 2450 121 -404 100 2500 268 -352 100 2550 124 -409 100 2600 132 -387 100 2650 129 -423 100 2700 135 -388 100 2750 131 -401 100 2800 134 -408 100 2850 144 -434 100 2900 157 -460 100 2950 292 -466 100 3000 157 -454 100 3050 161 -481 100 3100 166 -472 100 3150 288 -441 100 3200 158 -489 100 3250 163 -508 100 3300 291 -471 100 3350 169 -502 100 3400 167 -520 100 3450 172 -536 100 3500 320 -546 100 3550 200 -531 100 3600 189 -554 100 3650 324 -520 100 3700 185 -593 100 3750 200 -569 100 3800 342 -576 100 3850 195 -582 100 3900 193 -606 100 3950 199 -653 100 4000 216 -307 100 2000 90 -301 100 2050 96 -328 100 2100 105 -313 100 2150 230 -318 100 2200 100 -282 100 2250 103 -328 100 2300 110 -311 100 2350 111 -348 100 2400 118 -332 100 2450 274 -371 100 2500 126 -359 100 2550 123 -419 100 2600 136 -355 100 2650 132 -385 100 2700 128 -426 100 2750 138 -406 100 2800 136 -455 100 2850 147 -431 100 2900 291 -451 100 2950 152 -419 100 3000 139 -457 100 3050 161 -450 100 3100 296 -471 100 3150 167 -475 100 3200 164 -477 100 3250 158 -429 100 3300 302 -525 100 3350 177 -495 100 3400 173 -502 100 3450 300 -499 100 3500 181 -499 100 3550 182 -532 100 3600 194 -532 100 3650 342 -475 100 3700 186 -553 100 3750 181 -549 100 3800 311 -535 100 3850 190 -589 100 3900 204 -569 100 3950 344 -563 100 4000 200 -390 100 2000 110 -339 100 2050 106 -377 100 2100 249 -386 100 2150 108 -434 100 2200 120 -427 100 2250 122 -437 100 2300 123 -405 100 2350 263 -413 100 2400 128 -479 100 2450 133 -466 100 2500 138 -515 100 2550 143 -512 100 2600 294 -487 100 2650 135 -457 100 2700 143 -528 100 2750 154 -491 100 2800 292 -539 100 2850 156 -614 100 2900 169 -522 100 2950 158 -597 100 3000 323 -620 100 3050 182 -589 100 3100 196 -531 100 3150 308 -644 100 3200 180 -629 100 3250 181 -608 100 3300 187 -578 100 3350 333 -620 100 3400 179 -649 100 3450 195 -579 100 3500 320 -673 100 3550 190 -666 100 3600 207 -668 100 3650 335 -673 100 3700 209 -721 100 3750 212 -679 100 3800 349 -748 100 3850 235 -764 100 3900 222 -792 100 3950 378 -770 100 4000 230 -254 100 2000 77 -238 100 2050 76 -250 100 2100 202 -253 100 2150 81 -261 100 2200 87 -275 100 2250 89 -257 100 2300 88 -276 100 2350 89 -265 100 2400 84 -311 100 2450 231 -305 100 2500 101 -277 100 2550 100 -303 100 2600 109 -335 100 2650 104 -322 100 2700 106 -356 100 2750 110 -312 100 2800 113 -368 100 2850 119 -360 100 2900 114 -373 100 2950 119 -347 100 3000 257 -358 100 3050 124 -379 100 3100 126 -371 100 3150 128 -408 100 3200 255 -382 100 3250 147 -427 100 3300 151 -458 100 3350 149 -430 100 3400 139 -443 100 3450 271 -418 100 3500 147 -459 100 3550 147 -439 100 3600 152 -485 100 3650 277 -445 100 3700 155 -487 100 3750 161 -396 100 3800 155 -438 100 3850 156 -470 100 3900 169 -488 100 3950 170 -429 100 4000 277 -361 100 2000 147 -351 100 2050 138 -349 100 2100 151 -320 100 2150 133 -386 100 2200 177 -430 100 2250 180 -380 100 2300 157 -403 100 2350 281 -390 100 2400 164 -414 100 2450 175 -429 100 2500 163 -445 100 2550 341 -467 100 2600 192 -451 100 2650 172 -460 100 2700 340 -477 100 2750 226 -507 100 2800 213 -487 100 2850 354 -541 100 2900 241 -525 100 2950 222 -491 100 3000 350 -517 100 3050 195 -572 100 3100 241 -567 100 3150 387 -560 100 3200 234 -569 100 3250 232 -567 100 3300 370 -586 100 3350 245 -597 100 3400 380 -595 100 3450 273 -639 100 3500 263 -630 100 3550 424 -611 100 3600 241 -612 100 3650 394 -681 100 3700 285 -609 100 3750 421 -638 100 3800 292 -676 100 3850 289 -678 100 3900 292 -679 100 3950 303 -671 100 4000 419 -432 100 2000 98 -468 100 2050 111 -481 100 2100 114 -487 100 2150 259 -497 100 2200 112 -525 100 2250 125 -524 100 2300 135 -592 100 2350 125 -576 100 2400 272 -544 100 2450 129 -575 100 2500 145 -618 100 2550 158 -576 100 2600 279 -639 100 2650 155 -607 100 2700 155 -643 100 2750 157 -658 100 2800 298 -614 100 2850 154 -656 100 2900 165 -606 100 2950 161 -680 100 3000 308 -739 100 3050 204 -707 100 3100 178 -718 100 3150 318 -729 100 3200 201 -752 100 3250 198 -700 100 3300 332 -860 100 3350 206 -744 100 3400 192 -796 100 3450 187 -861 100 3500 348 -825 100 3550 204 -828 100 3600 211 -887 100 3650 355 -843 100 3700 210 -780 100 3750 214 -915 100 3800 210 -856 100 3850 249 -875 100 3900 345 -910 100 3950 235 -898 100 4000 237 -256 100 2000 105 -232 100 2050 92 -247 100 2100 97 -292 100 2150 118 -275 100 2200 118 -306 100 2250 118 -270 100 2300 244 -282 100 2350 126 -305 100 2400 119 -315 100 2450 126 -315 100 2500 141 -290 100 2550 262 -314 100 2600 149 -349 100 2650 153 -308 100 2700 134 -336 100 2750 286 -303 100 2800 147 -334 100 2850 155 -334 100 2900 143 -366 100 2950 269 -372 100 3000 158 -421 100 3050 186 -375 100 3100 173 -405 100 3150 291 -417 100 3200 174 -398 100 3250 178 -385 100 3300 287 -417 100 3350 174 -395 100 3400 182 -429 100 3450 200 -408 100 3500 319 -439 100 3550 200 -444 100 3600 209 -432 100 3650 321 -461 100 3700 219 -470 100 3750 211 -469 100 3800 326 -494 100 3850 245 -451 100 3900 223 -476 100 3950 215 -472 100 4000 226 -273 100 2000 99 -310 100 2050 209 -284 100 2100 98 -279 100 2150 96 -311 100 2200 111 -267 100 2250 99 -316 100 2300 115 -323 100 2350 270 -353 100 2400 123 -330 100 2450 127 -344 100 2500 128 -349 100 2550 131 -350 100 2600 128 -353 100 2650 139 -350 100 2700 130 -357 100 2750 142 -368 100 2800 281 -406 100 2850 142 -370 100 2900 144 -371 100 2950 141 -404 100 3000 278 -450 100 3050 168 -390 100 3100 157 -423 100 3150 153 -433 100 3200 315 -428 100 3250 163 -484 100 3300 177 -410 100 3350 174 -436 100 3400 182 -469 100 3450 191 -453 100 3500 181 -472 100 3550 324 -452 100 3600 179 -497 100 3650 193 -510 100 3700 313 -484 100 3750 188 -538 100 3800 207 -518 100 3850 338 -524 100 3900 209 -510 100 3950 206 -517 100 4000 331 -272 100 2000 105 -308 100 2050 106 -334 100 2100 118 -324 100 2150 113 -342 100 2200 259 -341 100 2250 118 -305 100 2300 114 -346 100 2350 126 -353 100 2400 126 -344 100 2450 256 -336 100 2500 137 -372 100 2550 137 -380 100 2600 139 -368 100 2650 141 -398 100 2700 143 -389 100 2750 155 -393 100 2800 152 -421 100 2850 161 -396 100 2900 150 -457 100 2950 170 -453 100 3000 149 -439 100 3050 292 -414 100 3100 172 -447 100 3150 166 -466 100 3200 189 -470 100 3250 319 -454 100 3300 178 -432 100 3350 171 -509 100 3400 319 -456 100 3450 191 -537 100 3500 205 -454 100 3550 329 -497 100 3600 209 -527 100 3650 205 -519 100 3700 344 -550 100 3750 215 -562 100 3800 217 -560 100 3850 344 -551 100 3900 220 -594 100 3950 225 -546 100 4000 344 -315 100 2000 107 -323 100 2050 103 -343 100 2100 108 -333 100 2150 120 -317 100 2200 245 -351 100 2250 131 -334 100 2300 113 -354 100 2350 124 -359 100 2400 120 -355 100 2450 260 -379 100 2500 130 -369 100 2550 132 -384 100 2600 133 -415 100 2650 149 -421 100 2700 270 -429 100 2750 158 -458 100 2800 156 -431 100 2850 154 -433 100 2900 281 -482 100 2950 162 -484 100 3000 156 -440 100 3050 159 -476 100 3100 168 -472 100 3150 175 -519 100 3200 172 -515 100 3250 300 -497 100 3300 189 -497 100 3350 178 -516 100 3400 313 -531 100 3450 189 -504 100 3500 184 -584 100 3550 203 -535 100 3600 314 -583 100 3650 209 -574 100 3700 211 -563 100 3750 329 -607 100 3800 206 -594 100 3850 220 -635 100 3900 353 -606 100 3950 218 -620 100 4000 347 -445 100 2000 114 -427 100 2050 120 -404 100 2100 122 -391 100 2150 129 -402 100 2200 258 -422 100 2250 135 -479 100 2300 140 -463 100 2350 141 -501 100 2400 276 -582 100 2450 164 -434 100 2500 153 -530 100 2550 153 -514 100 2600 288 -559 100 2650 175 -580 100 2700 171 -572 100 2750 177 -577 100 2800 301 -539 100 2850 171 -614 100 2900 202 -556 100 2950 300 -604 100 3000 184 -590 100 3050 203 -588 100 3100 197 -636 100 3150 323 -653 100 3200 221 -656 100 3250 187 -697 100 3300 339 -670 100 3350 203 -668 100 3400 217 -668 100 3450 333 -759 100 3500 225 -740 100 3550 216 -707 100 3600 235 -783 100 3650 234 -762 100 3700 350 -737 100 3750 248 -732 100 3800 253 -821 100 3850 399 -753 100 3900 259 -816 100 3950 381 -802 100 4000 281 -348 100 2000 117 -353 100 2050 121 -400 100 2100 253 -414 100 2150 124 -394 100 2200 121 -424 100 2250 128 -448 100 2300 140 -392 100 2350 145 -395 100 2400 138 -400 100 2450 129 -453 100 2500 156 -495 100 2550 268 -436 100 2600 158 -435 100 2650 144 -508 100 2700 166 -486 100 2750 298 -496 100 2800 165 -496 100 2850 158 -522 100 2900 166 -484 100 2950 302 -511 100 3000 164 -524 100 3050 177 -586 100 3100 385 -566 100 3150 193 -552 100 3200 1870 -610 100 3250 193 -534 100 3300 191 -600 100 3350 346 -608 100 3400 211 -640 100 3450 204 -633 100 3500 374 -581 100 3550 209 -654 100 3600 215 -580 100 3650 352 -665 100 3700 254 -692 100 3750 218 -688 100 3800 381 -645 100 3850 236 -694 100 3900 231 -643 100 3950 385 -650 100 4000 235 -340 100 2000 85 -339 100 2050 106 -338 100 2100 221 -350 100 2150 112 -360 100 2200 116 -380 100 2250 106 -388 100 2300 116 -321 100 2350 257 -340 100 2400 105 -399 100 2450 115 -405 100 2500 115 -410 100 2550 120 -455 100 2600 273 -410 100 2650 130 -471 100 2700 138 -540 100 2750 133 -424 100 2800 145 -458 100 2850 152 -487 100 2900 142 -441 100 2950 151 -517 100 3000 154 -542 100 3050 165 -574 100 3100 177 -483 100 3150 153 -531 100 3200 295 -577 100 3250 174 -529 100 3300 175 -598 100 3350 202 -593 100 3400 318 -546 100 3450 169 -633 100 3500 184 -574 100 3550 301 -572 100 3600 184 -612 100 3650 177 -602 100 3700 337 -675 100 3750 210 -624 100 3800 197 -592 100 3850 195 -662 100 3900 187 -630 100 3950 198 -668 100 4000 219 -284 100 2000 96 -292 100 2050 100 -258 100 2100 97 -291 100 2150 105 -291 100 2200 105 -304 100 2250 246 -313 100 2300 117 -288 100 2350 122 -304 100 2400 117 -323 100 2450 116 -316 100 2500 301 -325 100 2550 124 -307 100 2600 125 -343 100 2650 129 -328 100 2700 135 -350 100 2750 290 -363 100 2800 147 -381 100 2850 154 -358 100 2900 144 -436 100 2950 315 -381 100 3000 166 -413 100 3050 159 -410 100 3100 169 -412 100 3150 317 -399 100 3200 153 -401 100 3250 162 -444 100 3300 171 -438 100 3350 162 -434 100 3400 172 -461 100 3450 182 -391 100 3500 302 -426 100 3550 192 -465 100 3600 193 -472 100 3650 336 -474 100 3700 197 -499 100 3750 198 -488 100 3800 188 -514 100 3850 202 -505 100 3900 207 -532 100 3950 207 -532 100 4000 341 -261 100 2000 80 -282 100 2050 80 -318 100 2100 89 -297 100 2150 89 -289 100 2200 213 -370 100 2250 95 -333 100 2300 95 -328 100 2350 100 -325 100 2400 101 -401 100 2450 114 -330 100 2500 257 -357 100 2550 104 -415 100 2600 117 -337 100 2650 113 -362 100 2700 114 -342 100 2750 251 -405 100 2800 125 -408 100 2850 119 -419 100 2900 123 -419 100 2950 128 -444 100 3000 267 -392 100 3050 130 -368 100 3100 134 -435 100 3150 136 -445 100 3200 140 -441 100 3250 276 -437 100 3300 142 -496 100 3350 152 -450 100 3400 143 -453 100 3450 270 -491 100 3500 153 -471 100 3550 150 -476 100 3600 157 -515 100 3650 293 -520 100 3700 157 -585 100 3750 170 -543 100 3800 298 -528 100 3850 171 -547 100 3900 175 -598 100 3950 177 -628 100 4000 314 -261 100 2000 97 -237 100 2050 103 -277 100 2100 102 -317 100 2150 106 -326 100 2200 243 -307 100 2250 104 -355 100 2300 111 -337 100 2350 121 -296 100 2400 111 -351 100 2450 277 -361 100 2500 123 -343 100 2550 122 -340 100 2600 123 -328 100 2650 124 -382 100 2700 280 -376 100 2750 131 -377 100 2800 141 -383 100 2850 140 -424 100 2900 286 -367 100 2950 141 -455 100 3000 154 -409 100 3050 150 -434 100 3100 286 -427 100 3150 168 -431 100 3200 153 -430 100 3250 162 -462 100 3300 309 -492 100 3350 167 -477 100 3400 167 -420 100 3450 168 -462 100 3500 322 -452 100 3550 176 -483 100 3600 182 -510 100 3650 308 -517 100 3700 196 -501 100 3750 198 -515 100 3800 344 -537 100 3850 201 -563 100 3900 200 -550 100 3950 338 -519 100 4000 197 -273 100 2000 109 -280 100 2050 114 -299 100 2100 113 -282 100 2150 240 -316 100 2200 124 -296 100 2250 120 -293 100 2300 117 -294 100 2350 120 -353 100 2400 272 -312 100 2450 133 -340 100 2500 140 -359 100 2550 149 -343 100 2600 271 -375 100 2650 154 -339 100 2700 141 -372 100 2750 149 -371 100 2800 292 -395 100 2850 177 -383 100 2900 158 -386 100 2950 158 -374 100 3000 307 -396 100 3050 168 -412 100 3100 166 -421 100 3150 182 -443 100 3200 337 -422 100 3250 181 -428 100 3300 181 -430 100 3350 345 -440 100 3400 213 -479 100 3450 199 -491 100 3500 339 -471 100 3550 192 -496 100 3600 204 -434 100 3650 332 -524 100 3700 214 -521 100 3750 220 -535 100 3800 359 -487 100 3850 217 -544 100 3900 232 -542 100 3950 361 -512 100 4000 222 -246 100 2000 102 -229 100 2050 103 -237 100 2100 226 -262 100 2150 110 -234 100 2200 102 -257 100 2250 112 -261 100 2300 117 -233 100 2350 232 -284 100 2400 126 -308 100 2450 116 -269 100 2500 138 -271 100 2550 144 -295 100 2600 252 -299 100 2650 145 -293 100 2700 160 -323 100 2750 134 -307 100 2800 272 -303 100 2850 139 -303 100 2900 135 -333 100 2950 161 -324 100 3000 277 -334 100 3050 155 -347 100 3100 151 -316 100 3150 153 -345 100 3200 287 -340 100 3250 178 -371 100 3300 178 -380 100 3350 171 -376 100 3400 303 -408 100 3450 184 -375 100 3500 192 -410 100 3550 320 -416 100 3600 181 -449 100 3650 185 -412 100 3700 331 -426 100 3750 187 -384 100 3800 181 -451 100 3850 205 -392 100 3900 319 -414 100 3950 206 -451 100 4000 202 -354 100 2000 219 -314 100 2050 115 -342 100 2100 132 -340 100 2150 129 -335 100 2200 130 -353 100 2250 118 -350 100 2300 125 -371 100 2350 146 -377 100 2400 134 -390 100 2450 276 -396 100 2500 139 -401 100 2550 147 -420 100 2600 153 -419 100 2650 301 -414 100 2700 157 -434 100 2750 152 -414 100 2800 167 -490 100 2850 302 -459 100 2900 166 -432 100 2950 182 -451 100 3000 177 -468 100 3050 182 -501 100 3100 169 -474 100 3150 191 -512 100 3200 334 -507 100 3250 190 -559 100 3300 236 -501 100 3350 330 -535 100 3400 207 -525 100 3450 194 -555 100 3500 353 -591 100 3550 235 -553 100 3600 231 -525 100 3650 359 -558 100 3700 212 -598 100 3750 219 -621 100 3800 378 -591 100 3850 232 -593 100 3900 361 -652 100 3950 246 -599 100 4000 232 -210 100 2000 231 -216 100 2050 116 -218 100 2100 110 -256 100 2150 122 -264 100 2200 135 -262 100 2250 270 -237 100 2300 129 -268 100 2350 132 -280 100 2400 137 -270 100 2450 134 -298 100 2500 300 -328 100 2550 162 -335 100 2600 164 -264 100 2650 126 -323 100 2700 303 -332 100 2750 186 -322 100 2800 174 -358 100 2850 187 -359 100 2900 336 -376 100 2950 186 -388 100 3000 191 -357 100 3050 310 -360 100 3100 183 -386 100 3150 198 -393 100 3200 344 -372 100 3250 190 -403 100 3300 210 -390 100 3350 352 -397 100 3400 203 -396 100 3450 206 -425 100 3500 365 -413 100 3550 214 -440 100 3600 228 -434 100 3650 362 -418 100 3700 228 -434 100 3750 239 -474 100 3800 397 -466 100 3850 245 -448 100 3900 372 -454 100 3950 240 -454 100 4000 239 -285 100 2000 106 -254 100 2050 217 -300 100 2100 105 -287 100 2150 106 -285 100 2200 114 -323 100 2250 112 -312 100 2300 257 -312 100 2350 115 -313 100 2400 113 -329 100 2450 121 -313 100 2500 128 -304 100 2550 284 -338 100 2600 136 -332 100 2650 134 -380 100 2700 152 -372 100 2750 287 -373 100 2800 168 -346 100 2850 147 -404 100 2900 154 -392 100 2950 279 -380 100 3000 158 -352 100 3050 152 -400 100 3100 164 -380 100 3150 302 -424 100 3200 168 -433 100 3250 177 -420 100 3300 163 -444 100 3350 328 -428 100 3400 172 -447 100 3450 208 -471 100 3500 310 -470 100 3550 184 -448 100 3600 184 -493 100 3650 345 -493 100 3700 201 -488 100 3750 193 -513 100 3800 347 -500 100 3850 199 -511 100 3900 209 -475 100 3950 342 -488 100 4000 201 -271 100 2000 83 -275 100 2050 95 -301 100 2100 94 -311 100 2150 93 -313 100 2200 217 -279 100 2250 90 -320 100 2300 100 -330 100 2350 108 -303 100 2400 103 -373 100 2450 243 -342 100 2500 111 -369 100 2550 118 -378 100 2600 116 -355 100 2650 123 -368 100 2700 253 -369 100 2750 121 -391 100 2800 126 -404 100 2850 127 -406 100 2900 137 -436 100 2950 267 -446 100 3000 140 -420 100 3050 137 -375 100 3100 146 -458 100 3150 283 -452 100 3200 164 -422 100 3250 151 -477 100 3300 149 -473 100 3350 289 -496 100 3400 175 -494 100 3450 183 -469 100 3500 163 -527 100 3550 303 -515 100 3600 184 -533 100 3650 171 -520 100 3700 304 -545 100 3750 190 -484 100 3800 177 -502 100 3850 181 -515 100 3900 315 -593 100 3950 196 -595 100 4000 191 -283 100 2000 195 -317 100 2050 100 -336 100 2100 105 -351 100 2150 116 -337 100 2200 118 -333 100 2250 118 -355 100 2300 258 -338 100 2350 115 -366 100 2400 115 -399 100 2450 125 -401 100 2500 125 -426 100 2550 276 -370 100 2600 134 -407 100 2650 137 -369 100 2700 136 -409 100 2750 271 -422 100 2800 137 -428 100 2850 143 -430 100 2900 143 -455 100 2950 303 -505 100 3000 165 -437 100 3050 158 -477 100 3100 162 -503 100 3150 292 -495 100 3200 159 -448 100 3250 166 -467 100 3300 171 -448 100 3350 305 -561 100 3400 183 -548 100 3450 190 -521 100 3500 315 -571 100 3550 194 -535 100 3600 191 -559 100 3650 201 -539 100 3700 192 -569 100 3750 196 -588 100 3800 209 -628 100 3850 341 -606 100 3900 213 -596 100 3950 211 -621 100 4000 208 -234 100 2000 80 -242 100 2050 85 -254 100 2100 85 -257 100 2150 87 -258 100 2200 210 -273 100 2250 87 -284 100 2300 101 -295 100 2350 101 -289 100 2400 100 -285 100 2450 100 -278 100 2500 228 -339 100 2550 107 -362 100 2600 109 -310 100 2650 107 -314 100 2700 106 -329 100 2750 114 -338 100 2800 117 -383 100 2850 115 -349 100 2900 123 -396 100 2950 128 -358 100 3000 258 -360 100 3050 135 -398 100 3100 135 -380 100 3150 129 -399 100 3200 140 -389 100 3250 265 -407 100 3300 143 -383 100 3350 132 -388 100 3400 142 -415 100 3450 267 -414 100 3500 151 -433 100 3550 151 -441 100 3600 152 -460 100 3650 279 -417 100 3700 152 -448 100 3750 163 -479 100 3800 172 -471 100 3850 297 -504 100 3900 182 -536 100 3950 185 -489 100 4000 298 -340 100 2000 108 -299 100 2050 109 -322 100 2100 108 -306 100 2150 103 -367 100 2200 276 -383 100 2250 125 -368 100 2300 129 -427 100 2350 133 -402 100 2400 140 -419 100 2450 271 -426 100 2500 137 -424 100 2550 128 -395 100 2600 146 -384 100 2650 278 -422 100 2700 150 -487 100 2750 155 -472 100 2800 192 -472 100 2850 281 -416 100 2900 171 -524 100 2950 167 -444 100 3000 179 -456 100 3050 322 -500 100 3100 190 -518 100 3150 174 -530 100 3200 308 -496 100 3250 175 -518 100 3300 202 -524 100 3350 319 -524 100 3400 195 -536 100 3450 198 -576 100 3500 201 -586 100 3550 355 -582 100 3600 246 -598 100 3650 323 -571 100 3700 201 -613 100 3750 226 -621 100 3800 350 -560 100 3850 225 -627 100 3900 256 -638 100 3950 397 -603 100 4000 233 -183 100 2000 87 -206 100 2050 94 -197 100 2100 198 -209 100 2150 102 -194 100 2200 101 -214 100 2250 106 -262 100 2300 106 -200 100 2350 107 -207 100 2400 243 -220 100 2450 117 -243 100 2500 111 -252 100 2550 125 -293 100 2600 132 -238 100 2650 242 -248 100 2700 124 -295 100 2750 142 -239 100 2800 125 -257 100 2850 255 -262 100 2900 141 -262 100 2950 145 -251 100 3000 139 -290 100 3050 137 -271 100 3100 141 -287 100 3150 153 -302 100 3200 157 -308 100 3250 293 -316 100 3300 159 -253 100 3350 157 -302 100 3400 165 -306 100 3450 290 -312 100 3500 172 -360 100 3550 182 -343 100 3600 166 -348 100 3650 303 -331 100 3700 176 -371 100 3750 188 -328 100 3800 301 -301 100 3850 186 -309 100 3900 193 -353 100 3950 185 -405 100 4000 205 -219 100 2000 97 -192 100 2050 88 -263 100 2100 107 -238 100 2150 100 -274 100 2200 104 -272 100 2250 101 -286 100 2300 104 -278 100 2350 109 -316 100 2400 114 -319 100 2450 257 -294 100 2500 104 -306 100 2550 112 -338 100 2600 134 -345 100 2650 127 -322 100 2700 265 -334 100 2750 128 -329 100 2800 126 -360 100 2850 140 -301 100 2900 259 -368 100 2950 141 -365 100 3000 170 -378 100 3050 152 -399 100 3100 280 -364 100 3150 149 -346 100 3200 149 -384 100 3250 157 -384 100 3300 283 -380 100 3350 169 -401 100 3400 156 -431 100 3450 186 -437 100 3500 311 -422 100 3550 178 -471 100 3600 185 -448 100 3650 317 -475 100 3700 177 -425 100 3750 181 -422 100 3800 172 -443 100 3850 348 -483 100 3900 191 -469 100 3950 202 -492 100 4000 301 -231 100 2000 103 -245 100 2050 112 -273 100 2100 120 -254 100 2150 110 -251 100 2200 116 -263 100 2250 127 -287 100 2300 122 -267 100 2350 123 -287 100 2400 127 -301 100 2450 133 -301 100 2500 136 -316 100 2550 148 -324 100 2600 143 -325 100 2650 264 -324 100 2700 144 -334 100 2750 148 -342 100 2800 150 -333 100 2850 271 -342 100 2900 167 -364 100 2950 161 -383 100 3000 176 -345 100 3050 300 -360 100 3100 165 -371 100 3150 180 -383 100 3200 308 -381 100 3250 176 -388 100 3300 188 -385 100 3350 180 -426 100 3400 325 -426 100 3450 197 -415 100 3500 200 -440 100 3550 331 -441 100 3600 202 -435 100 3650 211 -464 100 3700 351 -474 100 3750 216 -459 100 3800 234 -501 100 3850 348 -465 100 3900 217 -516 100 3950 240 -464 100 4000 343 -312 100 2000 119 -308 100 2050 107 -326 100 2100 110 -332 100 2150 229 -336 100 2200 125 -355 100 2250 128 -335 100 2300 129 -307 100 2350 113 -385 100 2400 246 -355 100 2450 149 -351 100 2500 121 -371 100 2550 133 -385 100 2600 142 -396 100 2650 271 -407 100 2700 156 -379 100 2750 139 -436 100 2800 155 -403 100 2850 265 -427 100 2900 179 -412 100 2950 140 -433 100 3000 298 -377 100 3050 141 -462 100 3100 190 -412 100 3150 175 -468 100 3200 303 -474 100 3250 159 -493 100 3300 186 -459 100 3350 183 -503 100 3400 198 -496 100 3450 203 -465 100 3500 170 -490 100 3550 305 -504 100 3600 205 -508 100 3650 215 -505 100 3700 311 -526 100 3750 221 -569 100 3800 204 -551 100 3850 345 -556 100 3900 205 -520 100 3950 234 -605 100 4000 335 -438 100 2000 123 -423 100 2050 125 -457 100 2100 128 -435 100 2150 249 -449 100 2200 131 -464 100 2250 127 -491 100 2300 141 -469 100 2350 137 -485 100 2400 126 -513 100 2450 148 -545 100 2500 161 -555 100 2550 151 -534 100 2600 291 -532 100 2650 164 -507 100 2700 156 -600 100 2750 163 -587 100 2800 167 -623 100 2850 183 -601 100 2900 181 -598 100 2950 309 -589 100 3000 181 -637 100 3050 205 -640 100 3100 333 -681 100 3150 196 -624 100 3200 193 -718 100 3250 344 -641 100 3300 199 -630 100 3350 194 -668 100 3400 208 -735 100 3450 208 -756 100 3500 217 -758 100 3550 364 -820 100 3600 227 -718 100 3650 221 -763 100 3700 365 -842 100 3750 248 -791 100 3800 254 -775 100 3850 371 -802 100 3900 235 -845 100 3950 387 -810 100 4000 265 diff --git a/core/rewriting/indexing/test/results/std-tgt-varied-match-1-40.txt b/core/rewriting/indexing/test/results/std-tgt-varied-match-1-40.txt deleted file mode 100644 index abbf6a33..00000000 --- a/core/rewriting/indexing/test/results/std-tgt-varied-match-1-40.txt +++ /dev/null @@ -1,8000 +0,0 @@ -13 100 50 46 -21 100 100 76 -25 100 150 168 -28 100 200 95 -32 100 250 120 -33 100 300 146 -40 100 350 177 -42 100 400 252 -38 100 450 240 -38 100 500 301 -40 100 550 317 -45 100 600 377 -37 100 650 351 -44 100 700 396 -45 100 750 420 -43 100 800 467 -41 100 850 496 -46 100 900 530 -52 100 950 533 -47 100 1000 575 -49 100 1050 591 -50 100 1100 627 -48 100 1150 642 -45 100 1200 710 -47 100 1250 729 -49 100 1300 752 -50 100 1350 775 -52 100 1400 854 -48 100 1450 813 -51 100 1500 883 -63 100 1550 853 -54 100 1600 979 -50 100 1650 957 -52 100 1700 1002 -55 100 1750 1035 -54 100 1800 1093 -54 100 1850 1132 -54 100 1900 1133 -55 100 1950 1177 -53 100 2000 1143 -8 100 50 23 -13 100 100 46 -23 100 150 72 -23 100 200 133 -28 100 250 123 -28 100 300 150 -29 100 350 161 -33 100 400 247 -37 100 450 218 -36 100 500 289 -39 100 550 293 -37 100 600 362 -38 100 650 363 -44 100 700 417 -38 100 750 393 -42 100 800 473 -37 100 850 485 -41 100 900 488 -44 100 950 531 -39 100 1000 566 -43 100 1050 613 -45 100 1100 629 -40 100 1150 636 -48 100 1200 695 -48 100 1250 731 -40 100 1300 735 -45 100 1350 785 -45 100 1400 808 -52 100 1450 865 -43 100 1500 863 -45 100 1550 881 -48 100 1600 965 -49 100 1650 983 -49 100 1700 1004 -46 100 1750 1050 -48 100 1800 1079 -43 100 1850 1093 -51 100 1900 1154 -50 100 1950 1190 -49 100 2000 1180 -9 100 50 19 -13 100 100 87 -17 100 150 74 -31 100 200 95 -25 100 250 129 -32 100 300 153 -32 100 350 217 -37 100 400 192 -40 100 450 229 -37 100 500 321 -34 100 550 276 -42 100 600 351 -42 100 650 338 -42 100 700 387 -44 100 750 403 -48 100 800 468 -44 100 850 518 -50 100 900 525 -46 100 950 490 -51 100 1000 587 -52 100 1050 584 -48 100 1100 637 -52 100 1150 645 -52 100 1200 650 -54 100 1250 737 -52 100 1300 717 -56 100 1350 761 -55 100 1400 778 -52 100 1450 848 -52 100 1500 870 -50 100 1550 879 -52 100 1600 931 -53 100 1650 969 -51 100 1700 994 -58 100 1750 1017 -53 100 1800 1042 -61 100 1850 1087 -61 100 1900 1132 -55 100 1950 1115 -58 100 2000 1163 -9 100 50 22 -10 100 100 47 -19 100 150 71 -23 100 200 99 -26 100 250 158 -31 100 300 152 -32 100 350 170 -33 100 400 240 -34 100 450 233 -30 100 500 238 -34 100 550 323 -34 100 600 290 -37 100 650 366 -37 100 700 374 -38 100 750 417 -43 100 800 452 -41 100 850 454 -41 100 900 499 -39 100 950 526 -38 100 1000 569 -42 100 1050 575 -41 100 1100 599 -40 100 1150 658 -45 100 1200 694 -41 100 1250 722 -40 100 1300 725 -43 100 1350 755 -44 100 1400 775 -44 100 1450 797 -39 100 1500 830 -42 100 1550 861 -47 100 1600 939 -43 100 1650 937 -46 100 1700 983 -45 100 1750 980 -47 100 1800 1066 -45 100 1850 1011 -47 100 1900 1095 -52 100 1950 1074 -46 100 2000 1111 -12 100 50 24 -16 100 100 56 -14 100 150 69 -20 100 200 131 -32 100 250 120 -25 100 300 142 -27 100 350 161 -27 100 400 265 -28 100 450 238 -35 100 500 257 -30 100 550 335 -40 100 600 331 -36 100 650 405 -38 100 700 411 -35 100 750 402 -37 100 800 488 -38 100 850 502 -42 100 900 528 -39 100 950 504 -43 100 1000 569 -38 100 1050 651 -43 100 1100 677 -40 100 1150 638 -46 100 1200 730 -43 100 1250 726 -39 100 1300 769 -45 100 1350 805 -42 100 1400 848 -46 100 1450 876 -50 100 1500 869 -49 100 1550 914 -51 100 1600 934 -51 100 1650 1011 -44 100 1700 1037 -50 100 1750 1038 -51 100 1800 1078 -52 100 1850 1121 -45 100 1900 1130 -54 100 1950 1183 -53 100 2000 1240 -14 100 50 22 -21 100 100 45 -22 100 150 68 -29 100 200 132 -28 100 250 116 -31 100 300 133 -30 100 350 184 -39 100 400 234 -38 100 450 240 -38 100 500 249 -42 100 550 323 -40 100 600 284 -38 100 650 390 -42 100 700 419 -38 100 750 401 -43 100 800 488 -39 100 850 473 -47 100 900 462 -39 100 950 523 -41 100 1000 571 -41 100 1050 611 -47 100 1100 623 -45 100 1150 656 -41 100 1200 662 -50 100 1250 743 -43 100 1300 749 -47 100 1350 757 -47 100 1400 784 -47 100 1450 811 -45 100 1500 894 -51 100 1550 896 -49 100 1600 913 -48 100 1650 960 -50 100 1700 986 -49 100 1750 1042 -49 100 1800 1034 -53 100 1850 1134 -50 100 1900 1129 -52 100 1950 1141 -52 100 2000 1164 -7 100 50 28 -11 100 100 40 -16 100 150 70 -22 100 200 95 -25 100 250 170 -23 100 300 147 -31 100 350 165 -27 100 400 194 -26 100 450 265 -29 100 500 261 -32 100 550 316 -37 100 600 330 -31 100 650 372 -32 100 700 360 -34 100 750 417 -37 100 800 466 -38 100 850 426 -35 100 900 523 -37 100 950 548 -37 100 1000 584 -38 100 1050 583 -41 100 1100 667 -34 100 1150 681 -40 100 1200 671 -45 100 1250 733 -43 100 1300 755 -40 100 1350 799 -43 100 1400 807 -40 100 1450 810 -39 100 1500 845 -40 100 1550 901 -47 100 1600 892 -47 100 1650 983 -44 100 1700 967 -47 100 1750 1018 -43 100 1800 1032 -42 100 1850 1134 -45 100 1900 1096 -47 100 1950 1140 -45 100 2000 1162 -12 100 50 22 -20 100 100 49 -26 100 150 76 -31 100 200 102 -29 100 250 116 -22 100 300 183 -35 100 350 206 -38 100 400 198 -40 100 450 292 -38 100 500 282 -39 100 550 344 -42 100 600 300 -46 100 650 371 -41 100 700 359 -38 100 750 424 -43 100 800 473 -46 100 850 443 -45 100 900 536 -44 100 950 558 -44 100 1000 615 -46 100 1050 649 -43 100 1100 635 -50 100 1150 685 -53 100 1200 718 -45 100 1250 743 -49 100 1300 737 -50 100 1350 791 -50 100 1400 827 -49 100 1450 859 -50 100 1500 903 -45 100 1550 925 -49 100 1600 991 -51 100 1650 1018 -54 100 1700 1039 -52 100 1750 1029 -46 100 1800 1120 -55 100 1850 1109 -51 100 1900 1159 -47 100 1950 1183 -50 100 2000 1182 -12 100 50 61 -13 100 100 46 -19 100 150 68 -22 100 200 105 -26 100 250 116 -27 100 300 138 -29 100 350 218 -26 100 400 199 -35 100 450 216 -32 100 500 327 -34 100 550 316 -37 100 600 366 -36 100 650 377 -36 100 700 395 -39 100 750 464 -33 100 800 404 -40 100 850 522 -45 100 900 529 -41 100 950 550 -40 100 1000 581 -41 100 1050 588 -43 100 1100 633 -44 100 1150 641 -42 100 1200 692 -41 100 1250 710 -46 100 1300 796 -41 100 1350 783 -47 100 1400 800 -48 100 1450 895 -45 100 1500 877 -50 100 1550 923 -52 100 1600 962 -45 100 1650 972 -45 100 1700 1008 -55 100 1750 1064 -52 100 1800 1089 -50 100 1850 1098 -47 100 1900 1044 -51 100 1950 1159 -53 100 2000 1195 -8 100 50 22 -16 100 100 44 -13 100 150 65 -18 100 200 89 -28 100 250 118 -26 100 300 181 -22 100 350 156 -23 100 400 201 -28 100 450 258 -31 100 500 248 -30 100 550 267 -36 100 600 323 -39 100 650 307 -39 100 700 391 -41 100 750 395 -38 100 800 384 -38 100 850 485 -43 100 900 495 -50 100 950 555 -43 100 1000 521 -42 100 1050 556 -41 100 1100 606 -46 100 1150 639 -44 100 1200 666 -38 100 1250 680 -45 100 1300 697 -47 100 1350 750 -48 100 1400 768 -51 100 1450 826 -50 100 1500 848 -51 100 1550 872 -42 100 1600 856 -47 100 1650 925 -43 100 1700 947 -54 100 1750 1031 -48 100 1800 1017 -47 100 1850 1028 -53 100 1900 1084 -59 100 1950 1103 -52 100 2000 1142 -7 100 50 17 -12 100 100 42 -19 100 150 74 -26 100 200 93 -30 100 250 123 -26 100 300 135 -37 100 350 194 -32 100 400 189 -39 100 450 229 -40 100 500 295 -37 100 550 265 -37 100 600 341 -36 100 650 344 -40 100 700 368 -44 100 750 426 -47 100 800 420 -45 100 850 455 -48 100 900 493 -47 100 950 510 -41 100 1000 535 -46 100 1050 558 -47 100 1100 620 -44 100 1150 622 -44 100 1200 625 -43 100 1250 657 -50 100 1300 705 -50 100 1350 752 -47 100 1400 781 -45 100 1450 819 -51 100 1500 857 -50 100 1550 856 -48 100 1600 858 -53 100 1650 901 -55 100 1700 922 -53 100 1750 993 -50 100 1800 986 -47 100 1850 1034 -47 100 1900 1030 -51 100 1950 1076 -55 100 2000 1096 -9 100 50 19 -12 100 100 49 -18 100 150 67 -24 100 200 102 -32 100 250 162 -27 100 300 147 -24 100 350 169 -34 100 400 213 -34 100 450 252 -32 100 500 258 -29 100 550 332 -41 100 600 304 -40 100 650 374 -36 100 700 351 -39 100 750 419 -43 100 800 468 -42 100 850 461 -40 100 900 496 -41 100 950 528 -43 100 1000 583 -46 100 1050 614 -41 100 1100 674 -41 100 1150 658 -45 100 1200 691 -47 100 1250 697 -42 100 1300 739 -49 100 1350 770 -46 100 1400 778 -49 100 1450 830 -49 100 1500 889 -44 100 1550 888 -44 100 1600 876 -41 100 1650 982 -50 100 1700 984 -51 100 1750 1042 -47 100 1800 1047 -50 100 1850 1099 -53 100 1900 1137 -47 100 1950 1112 -49 100 2000 1168 -8 100 50 33 -15 100 100 48 -21 100 150 73 -27 100 200 154 -26 100 250 124 -28 100 300 143 -33 100 350 181 -35 100 400 234 -33 100 450 228 -40 100 500 292 -37 100 550 280 -35 100 600 308 -37 100 650 360 -35 100 700 405 -42 100 750 423 -40 100 800 468 -38 100 850 495 -40 100 900 533 -43 100 950 523 -46 100 1000 552 -44 100 1050 596 -40 100 1100 654 -46 100 1150 682 -43 100 1200 678 -41 100 1250 713 -44 100 1300 744 -43 100 1350 748 -47 100 1400 812 -51 100 1450 858 -45 100 1500 864 -47 100 1550 926 -42 100 1600 946 -47 100 1650 1027 -48 100 1700 1032 -56 100 1750 1014 -49 100 1800 1116 -49 100 1850 1158 -46 100 1900 1109 -51 100 1950 1160 -46 100 2000 1197 -4 100 50 17 -10 100 100 39 -18 100 150 67 -23 100 200 98 -22 100 250 121 -23 100 300 147 -29 100 350 220 -29 100 400 197 -36 100 450 224 -32 100 500 294 -34 100 550 269 -36 100 600 348 -35 100 650 325 -36 100 700 416 -33 100 750 444 -32 100 800 400 -41 100 850 478 -36 100 900 489 -43 100 950 547 -40 100 1000 533 -41 100 1050 585 -42 100 1100 622 -36 100 1150 633 -43 100 1200 683 -37 100 1250 698 -43 100 1300 722 -43 100 1350 782 -40 100 1400 815 -42 100 1450 828 -42 100 1500 830 -48 100 1550 926 -49 100 1600 960 -47 100 1650 954 -46 100 1700 966 -48 100 1750 997 -43 100 1800 1011 -48 100 1850 1122 -47 100 1900 1101 -48 100 1950 1139 -45 100 2000 1168 -10 100 50 21 -13 100 100 50 -22 100 150 78 -18 100 200 84 -31 100 250 133 -28 100 300 147 -31 100 350 221 -37 100 400 229 -29 100 450 254 -38 100 500 249 -45 100 550 295 -38 100 600 347 -43 100 650 335 -39 100 700 402 -46 100 750 442 -42 100 800 437 -43 100 850 481 -40 100 900 550 -42 100 950 543 -45 100 1000 563 -44 100 1050 561 -45 100 1100 663 -40 100 1150 637 -45 100 1200 733 -45 100 1250 746 -49 100 1300 800 -46 100 1350 808 -50 100 1400 840 -50 100 1450 887 -49 100 1500 934 -57 100 1550 874 -50 100 1600 949 -46 100 1650 966 -50 100 1700 974 -50 100 1750 1052 -56 100 1800 1080 -51 100 1850 1117 -50 100 1900 1124 -47 100 1950 1177 -51 100 2000 1170 -10 100 50 22 -18 100 100 39 -20 100 150 64 -21 100 200 136 -29 100 250 105 -28 100 300 152 -32 100 350 170 -37 100 400 238 -32 100 450 210 -38 100 500 230 -39 100 550 319 -42 100 600 311 -38 100 650 357 -43 100 700 416 -40 100 750 387 -42 100 800 433 -44 100 850 412 -41 100 900 485 -44 100 950 510 -45 100 1000 541 -39 100 1050 566 -37 100 1100 555 -42 100 1150 605 -41 100 1200 652 -52 100 1250 654 -47 100 1300 682 -46 100 1350 763 -44 100 1400 780 -50 100 1450 785 -46 100 1500 796 -48 100 1550 817 -47 100 1600 900 -44 100 1650 899 -53 100 1700 898 -50 100 1750 974 -56 100 1800 988 -47 100 1850 1017 -44 100 1900 1044 -52 100 1950 1056 -53 100 2000 1161 -5 100 50 26 -10 100 100 46 -17 100 150 63 -26 100 200 99 -34 100 250 111 -36 100 300 143 -27 100 350 199 -36 100 400 202 -38 100 450 217 -42 100 500 286 -41 100 550 271 -43 100 600 346 -48 100 650 346 -46 100 700 393 -42 100 750 363 -49 100 800 425 -54 100 850 461 -44 100 900 472 -47 100 950 515 -44 100 1000 521 -54 100 1050 589 -50 100 1100 583 -48 100 1150 639 -55 100 1200 651 -48 100 1250 712 -50 100 1300 749 -52 100 1350 748 -55 100 1400 804 -47 100 1450 816 -59 100 1500 871 -54 100 1550 827 -53 100 1600 883 -58 100 1650 961 -54 100 1700 957 -58 100 1750 973 -52 100 1800 1028 -57 100 1850 983 -49 100 1900 1088 -63 100 1950 1093 -59 100 2000 1150 -7 100 50 20 -19 100 100 49 -12 100 150 60 -20 100 200 139 -25 100 250 115 -26 100 300 133 -27 100 350 155 -24 100 400 216 -28 100 450 218 -30 100 500 221 -32 100 550 299 -30 100 600 268 -27 100 650 335 -34 100 700 333 -32 100 750 397 -34 100 800 387 -41 100 850 463 -41 100 900 506 -34 100 950 508 -39 100 1000 501 -37 100 1050 542 -38 100 1100 594 -40 100 1150 613 -36 100 1200 639 -42 100 1250 655 -39 100 1300 707 -46 100 1350 713 -45 100 1400 776 -42 100 1450 776 -43 100 1500 793 -38 100 1550 815 -42 100 1600 830 -43 100 1650 887 -44 100 1700 912 -44 100 1750 917 -43 100 1800 952 -45 100 1850 975 -44 100 1900 1017 -42 100 1950 1040 -46 100 2000 1060 -6 100 50 57 -16 100 100 48 -26 100 150 68 -20 100 200 100 -23 100 250 108 -29 100 300 158 -27 100 350 216 -24 100 400 203 -30 100 450 227 -35 100 500 307 -37 100 550 307 -38 100 600 368 -38 100 650 362 -38 100 700 413 -39 100 750 437 -39 100 800 418 -40 100 850 495 -38 100 900 524 -38 100 950 558 -41 100 1000 590 -44 100 1050 627 -42 100 1100 661 -37 100 1150 633 -42 100 1200 723 -39 100 1250 740 -44 100 1300 802 -49 100 1350 813 -42 100 1400 835 -46 100 1450 882 -47 100 1500 938 -44 100 1550 897 -46 100 1600 923 -50 100 1650 1006 -45 100 1700 1000 -42 100 1750 1065 -43 100 1800 1105 -48 100 1850 1071 -46 100 1900 1135 -52 100 1950 1138 -50 100 2000 1217 -12 100 50 25 -13 100 100 35 -24 100 150 64 -27 100 200 78 -27 100 250 110 -39 100 300 141 -30 100 350 191 -31 100 400 191 -40 100 450 203 -38 100 500 265 -36 100 550 268 -40 100 600 313 -48 100 650 288 -43 100 700 375 -42 100 750 352 -48 100 800 430 -48 100 850 407 -53 100 900 464 -42 100 950 482 -45 100 1000 520 -50 100 1050 495 -52 100 1100 559 -46 100 1150 555 -50 100 1200 629 -54 100 1250 669 -50 100 1300 662 -53 100 1350 716 -53 100 1400 727 -51 100 1450 747 -54 100 1500 785 -52 100 1550 818 -55 100 1600 862 -55 100 1650 868 -54 100 1700 924 -53 100 1750 902 -51 100 1800 957 -55 100 1850 996 -56 100 1900 1002 -55 100 1950 1026 -55 100 2000 1018 -10 100 50 33 -10 100 100 47 -18 100 150 82 -29 100 200 127 -30 100 250 124 -37 100 300 153 -34 100 350 160 -37 100 400 252 -35 100 450 228 -38 100 500 299 -44 100 550 305 -42 100 600 367 -44 100 650 374 -46 100 700 406 -44 100 750 399 -45 100 800 457 -47 100 850 501 -52 100 900 522 -46 100 950 519 -46 100 1000 566 -48 100 1050 626 -52 100 1100 631 -49 100 1150 645 -55 100 1200 698 -48 100 1250 748 -54 100 1300 781 -50 100 1350 833 -49 100 1400 830 -51 100 1450 875 -53 100 1500 890 -48 100 1550 964 -49 100 1600 927 -56 100 1650 1007 -52 100 1700 965 -55 100 1750 1021 -53 100 1800 1110 -54 100 1850 1074 -54 100 1900 1141 -56 100 1950 1119 -54 100 2000 1201 -9 100 50 22 -13 100 100 47 -12 100 150 76 -21 100 200 101 -17 100 250 146 -29 100 300 151 -34 100 350 156 -33 100 400 194 -30 100 450 269 -30 100 500 230 -35 100 550 335 -37 100 600 311 -34 100 650 357 -39 100 700 356 -36 100 750 463 -39 100 800 456 -39 100 850 427 -40 100 900 520 -36 100 950 541 -39 100 1000 549 -43 100 1050 622 -42 100 1100 629 -43 100 1150 600 -43 100 1200 635 -44 100 1250 662 -46 100 1300 739 -41 100 1350 744 -42 100 1400 788 -49 100 1450 827 -45 100 1500 813 -39 100 1550 881 -48 100 1600 904 -47 100 1650 918 -44 100 1700 965 -48 100 1750 976 -47 100 1800 1026 -50 100 1850 1045 -51 100 1900 1099 -47 100 1950 1077 -49 100 2000 1127 -4 100 50 22 -18 100 100 72 -16 100 150 76 -21 100 200 86 -26 100 250 105 -31 100 300 148 -23 100 350 152 -33 100 400 228 -31 100 450 209 -29 100 500 265 -32 100 550 263 -28 100 600 281 -33 100 650 335 -37 100 700 333 -39 100 750 402 -38 100 800 434 -35 100 850 423 -33 100 900 480 -30 100 950 521 -35 100 1000 538 -38 100 1050 543 -38 100 1100 550 -36 100 1150 613 -38 100 1200 611 -42 100 1250 684 -38 100 1300 683 -40 100 1350 711 -37 100 1400 753 -39 100 1450 783 -38 100 1500 816 -44 100 1550 806 -42 100 1600 834 -44 100 1650 885 -42 100 1700 931 -41 100 1750 898 -41 100 1800 974 -39 100 1850 981 -38 100 1900 1025 -45 100 1950 1034 -42 100 2000 1056 -3 100 50 19 -18 100 100 49 -17 100 150 94 -24 100 200 101 -25 100 250 170 -34 100 300 156 -31 100 350 202 -40 100 400 221 -35 100 450 296 -40 100 500 265 -39 100 550 345 -36 100 600 335 -43 100 650 391 -44 100 700 420 -47 100 750 441 -48 100 800 488 -48 100 850 503 -49 100 900 550 -48 100 950 524 -49 100 1000 604 -48 100 1050 636 -51 100 1100 638 -54 100 1150 676 -46 100 1200 694 -51 100 1250 734 -50 100 1300 807 -49 100 1350 855 -56 100 1400 870 -53 100 1450 937 -55 100 1500 905 -52 100 1550 973 -55 100 1600 994 -52 100 1650 1023 -57 100 1700 1054 -55 100 1750 1102 -52 100 1800 1082 -58 100 1850 1141 -57 100 1900 1143 -55 100 1950 1212 -54 100 2000 1209 -10 100 50 22 -11 100 100 47 -17 100 150 76 -18 100 200 82 -28 100 250 156 -23 100 300 129 -28 100 350 190 -30 100 400 199 -33 100 450 274 -35 100 500 266 -33 100 550 322 -35 100 600 298 -36 100 650 397 -39 100 700 379 -42 100 750 418 -43 100 800 468 -40 100 850 464 -39 100 900 465 -42 100 950 554 -41 100 1000 573 -41 100 1050 585 -45 100 1100 660 -49 100 1150 654 -40 100 1200 671 -47 100 1250 751 -49 100 1300 785 -47 100 1350 789 -46 100 1400 792 -44 100 1450 865 -51 100 1500 884 -50 100 1550 879 -47 100 1600 887 -47 100 1650 975 -51 100 1700 1028 -49 100 1750 1018 -50 100 1800 1025 -53 100 1850 1112 -49 100 1900 1137 -53 100 1950 1142 -54 100 2000 1142 -10 100 50 20 -15 100 100 38 -21 100 150 71 -20 100 200 120 -22 100 250 140 -29 100 300 144 -27 100 350 170 -31 100 400 240 -32 100 450 221 -31 100 500 243 -39 100 550 306 -32 100 600 292 -41 100 650 368 -43 100 700 356 -40 100 750 420 -40 100 800 448 -44 100 850 428 -46 100 900 468 -37 100 950 525 -43 100 1000 522 -42 100 1050 597 -40 100 1100 627 -50 100 1150 670 -46 100 1200 709 -41 100 1250 694 -43 100 1300 751 -43 100 1350 738 -47 100 1400 784 -47 100 1450 788 -48 100 1500 836 -49 100 1550 919 -51 100 1600 913 -46 100 1650 956 -54 100 1700 962 -51 100 1750 981 -52 100 1800 1007 -47 100 1850 1082 -52 100 1900 1096 -49 100 1950 1082 -54 100 2000 1118 -5 100 50 18 -20 100 100 78 -19 100 150 79 -21 100 200 98 -29 100 250 121 -30 100 300 131 -33 100 350 201 -38 100 400 192 -37 100 450 218 -44 100 500 271 -36 100 550 266 -40 100 600 338 -40 100 650 311 -40 100 700 369 -47 100 750 388 -42 100 800 439 -44 100 850 455 -47 100 900 468 -47 100 950 520 -42 100 1000 559 -49 100 1050 581 -47 100 1100 615 -46 100 1150 634 -44 100 1200 668 -44 100 1250 678 -51 100 1300 745 -48 100 1350 756 -50 100 1400 786 -49 100 1450 805 -51 100 1500 798 -50 100 1550 884 -49 100 1600 874 -48 100 1650 939 -50 100 1700 938 -53 100 1750 1009 -55 100 1800 1070 -53 100 1850 1038 -54 100 1900 1052 -50 100 1950 1075 -50 100 2000 1090 -4 100 50 29 -14 100 100 55 -20 100 150 62 -17 100 200 107 -25 100 250 158 -29 100 300 162 -29 100 350 184 -28 100 400 204 -36 100 450 277 -35 100 500 288 -41 100 550 319 -33 100 600 363 -35 100 650 429 -35 100 700 426 -43 100 750 449 -43 100 800 477 -38 100 850 509 -42 100 900 584 -45 100 950 598 -45 100 1000 678 -44 100 1050 660 -44 100 1100 682 -39 100 1150 697 -47 100 1200 777 -43 100 1250 747 -45 100 1300 777 -47 100 1350 889 -46 100 1400 931 -49 100 1450 937 -45 100 1500 988 -47 100 1550 966 -44 100 1600 1009 -48 100 1650 1092 -47 100 1700 1139 -50 100 1750 1168 -51 100 1800 1162 -53 100 1850 1125 -46 100 1900 1208 -49 100 1950 1243 -50 100 2000 1338 -10 100 50 22 -8 100 100 47 -21 100 150 95 -16 100 200 112 -26 100 250 156 -24 100 300 199 -29 100 350 206 -28 100 400 255 -27 100 450 273 -30 100 500 301 -36 100 550 376 -39 100 600 376 -35 100 650 384 -37 100 700 391 -35 100 750 462 -39 100 800 496 -35 100 850 570 -40 100 900 574 -35 100 950 595 -44 100 1000 666 -42 100 1050 694 -38 100 1100 765 -44 100 1150 762 -42 100 1200 761 -45 100 1250 811 -41 100 1300 825 -46 100 1350 846 -45 100 1400 916 -48 100 1450 970 -45 100 1500 968 -42 100 1550 937 -46 100 1600 974 -43 100 1650 1048 -49 100 1700 1148 -48 100 1750 1164 -45 100 1800 1178 -47 100 1850 1228 -47 100 1900 1181 -47 100 1950 1311 -47 100 2000 1333 -9 100 50 21 -10 100 100 47 -12 100 150 68 -21 100 200 95 -18 100 250 149 -24 100 300 151 -24 100 350 175 -25 100 400 195 -29 100 450 234 -39 100 500 235 -33 100 550 303 -36 100 600 305 -33 100 650 349 -35 100 700 381 -35 100 750 391 -37 100 800 450 -41 100 850 412 -36 100 900 466 -39 100 950 514 -40 100 1000 511 -42 100 1050 550 -43 100 1100 547 -42 100 1150 639 -40 100 1200 621 -42 100 1250 686 -41 100 1300 730 -45 100 1350 722 -45 100 1400 803 -40 100 1450 775 -49 100 1500 827 -43 100 1550 873 -47 100 1600 865 -43 100 1650 905 -47 100 1700 914 -42 100 1750 955 -51 100 1800 1014 -50 100 1850 1025 -46 100 1900 1039 -53 100 1950 1112 -51 100 2000 1072 -8 100 50 24 -12 100 100 84 -18 100 150 62 -22 100 200 91 -23 100 250 119 -29 100 300 147 -26 100 350 196 -30 100 400 194 -36 100 450 239 -31 100 500 269 -39 100 550 300 -37 100 600 335 -36 100 650 321 -36 100 700 357 -39 100 750 376 -36 100 800 413 -36 100 850 482 -42 100 900 481 -40 100 950 500 -40 100 1000 557 -38 100 1050 567 -40 100 1100 566 -39 100 1150 610 -41 100 1200 690 -45 100 1250 674 -41 100 1300 721 -41 100 1350 769 -43 100 1400 764 -47 100 1450 826 -38 100 1500 847 -40 100 1550 889 -44 100 1600 861 -47 100 1650 908 -48 100 1700 936 -46 100 1750 998 -43 100 1800 1031 -52 100 1850 1040 -46 100 1900 1064 -47 100 1950 1038 -53 100 2000 1120 -11 100 50 18 -10 100 100 89 -24 100 150 79 -19 100 200 99 -24 100 250 116 -27 100 300 161 -29 100 350 231 -29 100 400 225 -29 100 450 230 -34 100 500 306 -30 100 550 296 -29 100 600 356 -37 100 650 343 -33 100 700 394 -33 100 750 421 -36 100 800 372 -41 100 850 512 -35 100 900 547 -37 100 950 586 -37 100 1000 604 -38 100 1050 650 -41 100 1100 674 -42 100 1150 709 -42 100 1200 718 -40 100 1250 747 -41 100 1300 744 -42 100 1350 796 -38 100 1400 847 -51 100 1450 871 -37 100 1500 853 -42 100 1550 953 -44 100 1600 937 -46 100 1650 983 -44 100 1700 984 -44 100 1750 1093 -46 100 1800 1110 -45 100 1850 1104 -44 100 1900 1147 -47 100 1950 1198 -40 100 2000 1164 -6 100 50 21 -10 100 100 38 -20 100 150 68 -21 100 200 120 -30 100 250 131 -30 100 300 158 -26 100 350 182 -30 100 400 223 -37 100 450 236 -35 100 500 281 -37 100 550 297 -41 100 600 330 -35 100 650 353 -40 100 700 431 -39 100 750 396 -38 100 800 452 -37 100 850 472 -35 100 900 530 -41 100 950 553 -42 100 1000 593 -46 100 1050 598 -40 100 1100 661 -45 100 1150 691 -41 100 1200 686 -45 100 1250 724 -43 100 1300 772 -45 100 1350 818 -47 100 1400 809 -45 100 1450 876 -52 100 1500 881 -49 100 1550 879 -58 100 1600 964 -53 100 1650 912 -54 100 1700 1043 -45 100 1750 1032 -53 100 1800 1063 -47 100 1850 1173 -53 100 1900 1126 -53 100 1950 1148 -53 100 2000 1202 -11 100 50 51 -11 100 100 49 -14 100 150 77 -13 100 200 104 -21 100 250 135 -22 100 300 145 -27 100 350 209 -28 100 400 199 -33 100 450 256 -25 100 500 311 -25 100 550 324 -33 100 600 337 -28 100 650 385 -32 100 700 387 -31 100 750 433 -35 100 800 449 -31 100 850 487 -34 100 900 509 -32 100 950 575 -34 100 1000 574 -33 100 1050 652 -38 100 1100 670 -35 100 1150 672 -39 100 1200 723 -38 100 1250 749 -40 100 1300 754 -40 100 1350 781 -43 100 1400 864 -42 100 1450 836 -39 100 1500 885 -41 100 1550 921 -38 100 1600 938 -40 100 1650 934 -42 100 1700 1040 -38 100 1750 1036 -43 100 1800 1074 -39 100 1850 1109 -41 100 1900 1144 -40 100 1950 1115 -37 100 2000 1225 -6 100 50 16 -16 100 100 49 -16 100 150 66 -22 100 200 82 -28 100 250 99 -22 100 300 158 -31 100 350 152 -30 100 400 187 -26 100 450 211 -32 100 500 215 -35 100 550 238 -34 100 600 292 -41 100 650 302 -42 100 700 351 -40 100 750 327 -41 100 800 403 -38 100 850 395 -35 100 900 437 -44 100 950 477 -39 100 1000 497 -42 100 1050 527 -46 100 1100 525 -37 100 1150 563 -42 100 1200 602 -50 100 1250 650 -47 100 1300 667 -44 100 1350 655 -49 100 1400 674 -49 100 1450 741 -44 100 1500 753 -46 100 1550 774 -44 100 1600 804 -49 100 1650 838 -49 100 1700 873 -43 100 1750 878 -43 100 1800 931 -46 100 1850 950 -52 100 1900 986 -48 100 1950 992 -50 100 2000 1002 -9 100 50 27 -14 100 100 43 -21 100 150 72 -25 100 200 96 -28 100 250 119 -28 100 300 181 -32 100 350 166 -29 100 400 197 -39 100 450 254 -34 100 500 235 -37 100 550 279 -36 100 600 362 -41 100 650 332 -38 100 700 384 -38 100 750 423 -41 100 800 429 -43 100 850 460 -46 100 900 490 -43 100 950 539 -40 100 1000 538 -43 100 1050 569 -44 100 1100 602 -50 100 1150 613 -47 100 1200 672 -48 100 1250 692 -46 100 1300 713 -52 100 1350 762 -56 100 1400 827 -50 100 1450 817 -48 100 1500 864 -51 100 1550 893 -53 100 1600 922 -47 100 1650 947 -50 100 1700 969 -55 100 1750 1012 -53 100 1800 1012 -51 100 1850 1058 -52 100 1900 1034 -48 100 1950 1146 -56 100 2000 1113 -14 100 50 27 -9 100 100 82 -19 100 150 66 -17 100 200 86 -29 100 250 132 -29 100 300 143 -29 100 350 205 -32 100 400 203 -31 100 450 230 -36 100 500 280 -32 100 550 286 -39 100 600 360 -39 100 650 343 -40 100 700 380 -40 100 750 440 -38 100 800 411 -38 100 850 467 -43 100 900 505 -44 100 950 517 -44 100 1000 546 -46 100 1050 589 -48 100 1100 634 -49 100 1150 634 -50 100 1200 709 -47 100 1250 714 -46 100 1300 722 -50 100 1350 780 -54 100 1400 808 -52 100 1450 839 -49 100 1500 840 -50 100 1550 892 -48 100 1600 894 -45 100 1650 897 -51 100 1700 972 -48 100 1750 958 -52 100 1800 1014 -50 100 1850 1084 -54 100 1900 1077 -52 100 1950 1157 -48 100 2000 1143 -12 100 50 24 -14 100 100 44 -23 100 150 76 -26 100 200 90 -29 100 250 138 -31 100 300 159 -34 100 350 239 -28 100 400 214 -42 100 450 258 -37 100 500 294 -39 100 550 346 -38 100 600 321 -36 100 650 394 -43 100 700 395 -43 100 750 430 -35 100 800 478 -40 100 850 460 -43 100 900 529 -51 100 950 549 -41 100 1000 592 -47 100 1050 628 -43 100 1100 660 -45 100 1150 681 -48 100 1200 711 -45 100 1250 756 -43 100 1300 782 -45 100 1350 848 -52 100 1400 874 -44 100 1450 901 -52 100 1500 888 -49 100 1550 967 -55 100 1600 942 -47 100 1650 1029 -56 100 1700 1009 -48 100 1750 1090 -43 100 1800 1083 -51 100 1850 1112 -47 100 1900 1155 -49 100 1950 1175 -48 100 2000 1251 -10 100 50 23 -19 100 100 43 -26 100 150 58 -34 100 200 104 -29 100 250 118 -41 100 300 158 -44 100 350 215 -45 100 400 182 -47 100 450 227 -49 100 500 252 -43 100 550 254 -48 100 600 313 -48 100 650 290 -46 100 700 398 -53 100 750 371 -52 100 800 449 -49 100 850 469 -58 100 900 450 -56 100 950 516 -55 100 1000 543 -60 100 1050 568 -58 100 1100 594 -60 100 1150 593 -55 100 1200 618 -63 100 1250 668 -50 100 1300 660 -56 100 1350 705 -58 100 1400 771 -62 100 1450 820 -63 100 1500 799 -55 100 1550 825 -66 100 1600 857 -58 100 1650 881 -57 100 1700 940 -56 100 1750 922 -57 100 1800 978 -62 100 1850 995 -61 100 1900 1042 -63 100 1950 1084 -62 100 2000 1089 -7 100 50 28 -9 100 100 58 -10 100 150 85 -17 100 200 144 -24 100 250 146 -15 100 300 184 -30 100 350 190 -22 100 400 262 -29 100 450 262 -28 100 500 328 -26 100 550 320 -33 100 600 384 -28 100 650 392 -35 100 700 444 -31 100 750 474 -32 100 800 524 -32 100 850 561 -34 100 900 541 -37 100 950 573 -31 100 1000 620 -37 100 1050 693 -35 100 1100 726 -40 100 1150 751 -35 100 1200 796 -39 100 1250 850 -40 100 1300 893 -38 100 1350 894 -42 100 1400 928 -37 100 1450 979 -37 100 1500 960 -42 100 1550 1011 -43 100 1600 1072 -42 100 1650 1123 -41 100 1700 1140 -41 100 1750 1152 -40 100 1800 1202 -42 100 1850 1236 -37 100 1900 1292 -42 100 1950 1310 -44 100 2000 1356 -8 100 50 33 -14 100 100 48 -18 100 150 78 -26 100 200 111 -15 100 250 165 -23 100 300 174 -23 100 350 197 -27 100 400 251 -27 100 450 242 -33 100 500 274 -36 100 550 362 -35 100 600 307 -30 100 650 377 -38 100 700 404 -34 100 750 429 -35 100 800 486 -33 100 850 521 -40 100 900 528 -39 100 950 543 -36 100 1000 532 -39 100 1050 620 -44 100 1100 649 -46 100 1150 712 -41 100 1200 758 -43 100 1250 778 -46 100 1300 816 -43 100 1350 840 -36 100 1400 829 -42 100 1450 895 -43 100 1500 923 -49 100 1550 950 -46 100 1600 972 -46 100 1650 985 -52 100 1700 1084 -52 100 1750 1112 -45 100 1800 1122 -45 100 1850 1200 -47 100 1900 1227 -44 100 1950 1207 -52 100 2000 1223 -9 100 50 23 -9 100 100 45 -16 100 150 72 -20 100 200 99 -21 100 250 123 -25 100 300 182 -24 100 350 175 -25 100 400 211 -32 100 450 272 -33 100 500 274 -32 100 550 328 -29 100 600 328 -31 100 650 383 -36 100 700 387 -35 100 750 451 -39 100 800 470 -36 100 850 450 -43 100 900 522 -35 100 950 531 -38 100 1000 567 -40 100 1050 587 -36 100 1100 630 -43 100 1150 665 -39 100 1200 680 -40 100 1250 700 -44 100 1300 792 -41 100 1350 782 -43 100 1400 816 -48 100 1450 832 -43 100 1500 874 -52 100 1550 901 -39 100 1600 945 -46 100 1650 964 -48 100 1700 1027 -48 100 1750 1031 -47 100 1800 1052 -47 100 1850 1090 -54 100 1900 1090 -50 100 1950 1158 -51 100 2000 1206 -10 100 50 23 -14 100 100 52 -21 100 150 75 -29 100 200 117 -30 100 250 132 -36 100 300 202 -33 100 350 205 -41 100 400 229 -42 100 450 278 -42 100 500 262 -45 100 550 332 -45 100 600 313 -49 100 650 422 -48 100 700 407 -51 100 750 466 -53 100 800 492 -49 100 850 508 -49 100 900 545 -52 100 950 591 -52 100 1000 604 -54 100 1050 629 -48 100 1100 672 -52 100 1150 684 -56 100 1200 777 -59 100 1250 797 -52 100 1300 817 -54 100 1350 822 -51 100 1400 836 -55 100 1450 893 -55 100 1500 936 -55 100 1550 969 -64 100 1600 1013 -53 100 1650 1063 -57 100 1700 1066 -54 100 1750 1065 -58 100 1800 1163 -66 100 1850 1211 -57 100 1900 1208 -57 100 1950 1254 -56 100 2000 1216 -5 100 50 22 -17 100 100 40 -16 100 150 65 -23 100 200 95 -24 100 250 152 -35 100 300 142 -28 100 350 153 -34 100 400 200 -38 100 450 239 -39 100 500 250 -37 100 550 291 -46 100 600 310 -40 100 650 351 -44 100 700 342 -42 100 750 435 -49 100 800 400 -42 100 850 419 -45 100 900 454 -47 100 950 507 -47 100 1000 497 -48 100 1050 534 -47 100 1100 563 -52 100 1150 618 -46 100 1200 644 -55 100 1250 664 -53 100 1300 705 -45 100 1350 709 -44 100 1400 751 -48 100 1450 740 -47 100 1500 768 -48 100 1550 812 -47 100 1600 880 -51 100 1650 884 -55 100 1700 888 -47 100 1750 937 -53 100 1800 951 -56 100 1850 1031 -53 100 1900 1016 -54 100 1950 1078 -58 100 2000 1103 -9 100 50 25 -17 100 100 46 -18 100 150 74 -21 100 200 108 -31 100 250 131 -32 100 300 184 -41 100 350 184 -37 100 400 216 -34 100 450 268 -36 100 500 284 -36 100 550 343 -40 100 600 309 -39 100 650 371 -47 100 700 397 -44 100 750 463 -36 100 800 502 -44 100 850 510 -48 100 900 539 -46 100 950 505 -47 100 1000 578 -46 100 1050 625 -47 100 1100 606 -49 100 1150 663 -52 100 1200 705 -49 100 1250 742 -51 100 1300 837 -52 100 1350 820 -48 100 1400 800 -51 100 1450 872 -52 100 1500 953 -50 100 1550 974 -52 100 1600 1043 -54 100 1650 982 -50 100 1700 1045 -52 100 1750 1059 -53 100 1800 1060 -48 100 1850 1091 -59 100 1900 1142 -57 100 1950 1166 -61 100 2000 1261 -8 100 50 26 -16 100 100 43 -17 100 150 79 -31 100 200 116 -27 100 250 132 -41 100 300 196 -28 100 350 182 -35 100 400 224 -34 100 450 275 -34 100 500 255 -36 100 550 353 -38 100 600 330 -37 100 650 379 -41 100 700 351 -44 100 750 452 -47 100 800 482 -44 100 850 468 -52 100 900 532 -41 100 950 573 -47 100 1000 623 -48 100 1050 650 -48 100 1100 616 -50 100 1150 697 -51 100 1200 698 -50 100 1250 731 -49 100 1300 796 -48 100 1350 829 -44 100 1400 815 -42 100 1450 817 -53 100 1500 942 -51 100 1550 956 -51 100 1600 977 -54 100 1650 1027 -52 100 1700 1064 -54 100 1750 1032 -59 100 1800 1124 -55 100 1850 1115 -60 100 1900 1195 -57 100 1950 1178 -51 100 2000 1220 -9 100 50 31 -10 100 100 54 -19 100 150 85 -15 100 200 116 -21 100 250 170 -21 100 300 198 -23 100 350 225 -21 100 400 264 -30 100 450 272 -29 100 500 296 -36 100 550 358 -29 100 600 377 -35 100 650 404 -32 100 700 431 -33 100 750 453 -34 100 800 551 -35 100 850 562 -36 100 900 558 -34 100 950 619 -37 100 1000 628 -40 100 1050 678 -40 100 1100 695 -38 100 1150 766 -38 100 1200 783 -47 100 1250 866 -36 100 1300 879 -47 100 1350 901 -45 100 1400 883 -47 100 1450 956 -42 100 1500 1007 -43 100 1550 1016 -39 100 1600 1053 -45 100 1650 1037 -47 100 1700 1121 -45 100 1750 1191 -42 100 1800 1178 -50 100 1850 1247 -50 100 1900 1313 -48 100 1950 1341 -49 100 2000 1334 -3 100 50 17 -18 100 100 53 -15 100 150 74 -27 100 200 97 -23 100 250 150 -22 100 300 153 -30 100 350 195 -31 100 400 221 -33 100 450 253 -31 100 500 244 -34 100 550 341 -39 100 600 320 -31 100 650 365 -37 100 700 384 -35 100 750 428 -47 100 800 453 -40 100 850 533 -44 100 900 487 -47 100 950 510 -39 100 1000 542 -43 100 1050 614 -42 100 1100 627 -47 100 1150 688 -43 100 1200 683 -46 100 1250 733 -46 100 1300 726 -44 100 1350 763 -47 100 1400 879 -49 100 1450 847 -50 100 1500 905 -48 100 1550 951 -46 100 1600 946 -46 100 1650 1025 -46 100 1700 1005 -46 100 1750 1028 -44 100 1800 1090 -44 100 1850 1067 -49 100 1900 1117 -49 100 1950 1187 -53 100 2000 1183 -13 100 50 18 -18 100 100 47 -19 100 150 87 -25 100 200 100 -34 100 250 110 -37 100 300 153 -33 100 350 170 -33 100 400 212 -34 100 450 228 -40 100 500 273 -40 100 550 273 -39 100 600 293 -45 100 650 363 -45 100 700 347 -46 100 750 415 -43 100 800 430 -44 100 850 464 -42 100 900 494 -45 100 950 524 -43 100 1000 554 -44 100 1050 572 -49 100 1100 599 -53 100 1150 606 -42 100 1200 646 -49 100 1250 694 -46 100 1300 729 -45 100 1350 734 -51 100 1400 757 -49 100 1450 776 -47 100 1500 824 -47 100 1550 865 -53 100 1600 887 -49 100 1650 949 -51 100 1700 980 -52 100 1750 940 -50 100 1800 987 -55 100 1850 1047 -52 100 1900 1025 -53 100 1950 1073 -53 100 2000 1077 -9 100 50 30 -14 100 100 54 -19 100 150 98 -16 100 200 101 -26 100 250 134 -25 100 300 147 -26 100 350 211 -29 100 400 204 -33 100 450 235 -35 100 500 302 -39 100 550 319 -36 100 600 368 -36 100 650 340 -32 100 700 425 -34 100 750 454 -39 100 800 454 -41 100 850 504 -43 100 900 556 -40 100 950 528 -44 100 1000 598 -36 100 1050 655 -43 100 1100 689 -39 100 1150 694 -38 100 1200 663 -43 100 1250 774 -44 100 1300 793 -40 100 1350 792 -44 100 1400 870 -39 100 1450 866 -45 100 1500 909 -43 100 1550 931 -44 100 1600 942 -46 100 1650 975 -42 100 1700 1020 -48 100 1750 1083 -53 100 1800 1112 -49 100 1850 1167 -48 100 1900 1193 -51 100 1950 1244 -51 100 2000 1239 -7 100 50 25 -14 100 100 65 -15 100 150 64 -26 100 200 91 -26 100 250 126 -29 100 300 140 -31 100 350 196 -31 100 400 205 -31 100 450 221 -28 100 500 257 -37 100 550 270 -37 100 600 298 -30 100 650 307 -39 100 700 392 -38 100 750 358 -38 100 800 419 -35 100 850 437 -42 100 900 464 -51 100 950 515 -42 100 1000 528 -46 100 1050 548 -46 100 1100 570 -44 100 1150 587 -50 100 1200 637 -43 100 1250 639 -46 100 1300 732 -42 100 1350 740 -44 100 1400 770 -50 100 1450 817 -48 100 1500 843 -43 100 1550 791 -45 100 1600 880 -49 100 1650 854 -47 100 1700 912 -49 100 1750 930 -48 100 1800 948 -52 100 1850 1046 -47 100 1900 1071 -54 100 1950 1075 -53 100 2000 1102 -9 100 50 23 -15 100 100 61 -17 100 150 75 -28 100 200 107 -30 100 250 114 -31 100 300 197 -28 100 350 167 -35 100 400 201 -36 100 450 278 -34 100 500 250 -37 100 550 309 -34 100 600 337 -42 100 650 367 -42 100 700 371 -40 100 750 447 -43 100 800 454 -45 100 850 454 -42 100 900 484 -44 100 950 537 -43 100 1000 558 -46 100 1050 576 -49 100 1100 661 -47 100 1150 659 -50 100 1200 743 -45 100 1250 4446 -45 100 1300 767 -49 100 1350 814 -45 100 1400 789 -45 100 1450 874 -48 100 1500 901 -47 100 1550 921 -49 100 1600 973 -52 100 1650 961 -48 100 1700 1021 -49 100 1750 1053 -50 100 1800 1109 -52 100 1850 1070 -52 100 1900 1125 -53 100 1950 1175 -53 100 2000 1244 -9 100 50 23 -10 100 100 49 -21 100 150 73 -31 100 200 106 -35 100 250 135 -30 100 300 203 -28 100 350 186 -33 100 400 205 -37 100 450 270 -34 100 500 265 -33 100 550 315 -43 100 600 340 -38 100 650 360 -46 100 700 379 -46 100 750 453 -47 100 800 452 -42 100 850 482 -45 100 900 538 -46 100 950 533 -43 100 1000 578 -47 100 1050 614 -45 100 1100 627 -45 100 1150 682 -50 100 1200 685 -54 100 1250 734 -52 100 1300 731 -50 100 1350 747 -52 100 1400 808 -54 100 1450 801 -52 100 1500 886 -48 100 1550 899 -51 100 1600 951 -60 100 1650 986 -50 100 1700 992 -52 100 1750 1035 -52 100 1800 1052 -49 100 1850 1088 -55 100 1900 1118 -52 100 1950 1149 -52 100 2000 1184 -6 100 50 26 -6 100 100 51 -18 100 150 71 -16 100 200 103 -18 100 250 169 -20 100 300 147 -25 100 350 165 -27 100 400 235 -27 100 450 231 -29 100 500 267 -26 100 550 316 -34 100 600 303 -31 100 650 378 -34 100 700 357 -36 100 750 459 -39 100 800 444 -33 100 850 499 -36 100 900 483 -44 100 950 527 -34 100 1000 575 -33 100 1050 589 -43 100 1100 621 -37 100 1150 659 -44 100 1200 703 -35 100 1250 726 -42 100 1300 738 -37 100 1350 768 -36 100 1400 837 -39 100 1450 851 -37 100 1500 850 -39 100 1550 892 -41 100 1600 946 -43 100 1650 948 -40 100 1700 1000 -43 100 1750 1023 -41 100 1800 1014 -50 100 1850 1090 -48 100 1900 1120 -43 100 1950 1110 -48 100 2000 1173 -9 100 50 27 -13 100 100 61 -15 100 150 66 -28 100 200 154 -27 100 250 142 -28 100 300 170 -32 100 350 184 -26 100 400 259 -31 100 450 234 -37 100 500 326 -35 100 550 292 -39 100 600 383 -37 100 650 361 -37 100 700 441 -36 100 750 474 -39 100 800 456 -43 100 850 497 -37 100 900 539 -41 100 950 600 -39 100 1000 606 -39 100 1050 669 -46 100 1100 676 -45 100 1150 709 -44 100 1200 753 -42 100 1250 796 -44 100 1300 813 -51 100 1350 865 -47 100 1400 851 -51 100 1450 885 -51 100 1500 987 -52 100 1550 973 -53 100 1600 1014 -51 100 1650 1034 -49 100 1700 1095 -47 100 1750 1134 -52 100 1800 1140 -49 100 1850 1169 -53 100 1900 1228 -51 100 1950 1278 -53 100 2000 1305 -6 100 50 16 -12 100 100 42 -19 100 150 68 -21 100 200 89 -31 100 250 113 -37 100 300 128 -35 100 350 206 -32 100 400 179 -38 100 450 218 -42 100 500 278 -32 100 550 243 -41 100 600 323 -45 100 650 319 -39 100 700 391 -40 100 750 378 -46 100 800 447 -46 100 850 461 -45 100 900 431 -46 100 950 515 -46 100 1000 520 -53 100 1050 554 -50 100 1100 574 -51 100 1150 584 -42 100 1200 566 -51 100 1250 665 -47 100 1300 689 -45 100 1350 742 -43 100 1400 735 -54 100 1450 790 -55 100 1500 802 -52 100 1550 817 -48 100 1600 867 -52 100 1650 889 -55 100 1700 932 -55 100 1750 925 -54 100 1800 960 -50 100 1850 1002 -49 100 1900 1012 -60 100 1950 1038 -62 100 2000 1073 -15 100 50 25 -15 100 100 50 -21 100 150 86 -35 100 200 93 -29 100 250 124 -33 100 300 183 -38 100 350 172 -35 100 400 207 -33 100 450 259 -43 100 500 245 -38 100 550 306 -35 100 600 292 -37 100 650 383 -44 100 700 376 -41 100 750 420 -50 100 800 423 -51 100 850 486 -44 100 900 522 -48 100 950 559 -51 100 1000 607 -47 100 1050 644 -47 100 1100 611 -49 100 1150 679 -50 100 1200 677 -53 100 1250 763 -51 100 1300 758 -48 100 1350 798 -54 100 1400 799 -47 100 1450 820 -48 100 1500 886 -52 100 1550 912 -54 100 1600 920 -58 100 1650 959 -55 100 1700 1058 -54 100 1750 1008 -54 100 1800 1027 -56 100 1850 1079 -49 100 1900 1065 -59 100 1950 1191 -60 100 2000 1182 -12 100 50 26 -14 100 100 46 -20 100 150 73 -20 100 200 106 -21 100 250 170 -29 100 300 150 -25 100 350 156 -32 100 400 188 -36 100 450 266 -33 100 500 237 -33 100 550 323 -38 100 600 290 -45 100 650 383 -41 100 700 371 -39 100 750 423 -45 100 800 488 -38 100 850 438 -37 100 900 488 -44 100 950 544 -44 100 1000 583 -43 100 1050 606 -50 100 1100 629 -49 100 1150 689 -45 100 1200 697 -48 100 1250 718 -46 100 1300 747 -50 100 1350 779 -52 100 1400 762 -54 100 1450 826 -46 100 1500 880 -50 100 1550 916 -51 100 1600 892 -50 100 1650 987 -47 100 1700 949 -52 100 1750 1049 -51 100 1800 1069 -50 100 1850 1089 -53 100 1900 1090 -53 100 1950 1086 -55 100 2000 1168 -13 100 50 22 -5 100 100 39 -13 100 150 66 -18 100 200 102 -27 100 250 114 -20 100 300 176 -32 100 350 165 -27 100 400 184 -34 100 450 246 -33 100 500 234 -32 100 550 252 -33 100 600 344 -28 100 650 334 -35 100 700 347 -36 100 750 388 -35 100 800 393 -33 100 850 454 -39 100 900 471 -37 100 950 467 -36 100 1000 524 -42 100 1050 549 -39 100 1100 569 -39 100 1150 621 -36 100 1200 646 -42 100 1250 684 -42 100 1300 701 -38 100 1350 725 -42 100 1400 762 -39 100 1450 773 -41 100 1500 802 -46 100 1550 818 -44 100 1600 891 -43 100 1650 880 -44 100 1700 941 -45 100 1750 955 -47 100 1800 954 -42 100 1850 1014 -46 100 1900 1033 -50 100 1950 1071 -47 100 2000 1138 -19 100 50 33 -18 100 100 55 -13 100 150 73 -19 100 200 105 -26 100 250 111 -27 100 300 190 -30 100 350 175 -39 100 400 223 -37 100 450 296 -36 100 500 307 -41 100 550 351 -40 100 600 334 -38 100 650 426 -41 100 700 466 -41 100 750 429 -37 100 800 513 -43 100 850 525 -45 100 900 565 -48 100 950 638 -52 100 1000 607 -52 100 1050 612 -47 100 1100 700 -47 100 1150 699 -49 100 1200 753 -49 100 1250 809 -53 100 1300 827 -47 100 1350 822 -51 100 1400 903 -48 100 1450 845 -46 100 1500 869 -45 100 1550 992 -50 100 1600 979 -46 100 1650 1030 -47 100 1700 1120 -54 100 1750 1100 -53 100 1800 1113 -46 100 1850 1108 -53 100 1900 1215 -55 100 1950 1239 -52 100 2000 1263 -9 100 50 24 -16 100 100 60 -24 100 150 84 -25 100 200 134 -34 100 250 171 -35 100 300 224 -34 100 350 202 -33 100 400 256 -35 100 450 349 -42 100 500 382 -49 100 550 402 -47 100 600 455 -39 100 650 453 -37 100 700 436 -45 100 750 533 -46 100 800 565 -45 100 850 563 -46 100 900 622 -47 100 950 619 -47 100 1000 698 -49 100 1050 777 -56 100 1100 806 -51 100 1150 844 -45 100 1200 852 -52 100 1250 905 -46 100 1300 913 -49 100 1350 1007 -53 100 1400 1014 -52 100 1450 995 -56 100 1500 1084 -53 100 1550 1118 -54 100 1600 1158 -52 100 1650 1260 -56 100 1700 1199 -59 100 1750 1184 -58 100 1800 1338 -59 100 1850 1364 -58 100 1900 1378 -63 100 1950 1492 -60 100 2000 1411 -3 100 50 20 -14 100 100 81 -15 100 150 72 -17 100 200 90 -25 100 250 121 -22 100 300 160 -31 100 350 202 -25 100 400 203 -33 100 450 236 -33 100 500 281 -36 100 550 292 -33 100 600 354 -32 100 650 336 -37 100 700 369 -37 100 750 408 -36 100 800 457 -42 100 850 488 -44 100 900 527 -41 100 950 517 -40 100 1000 559 -40 100 1050 577 -44 100 1100 594 -38 100 1150 666 -38 100 1200 672 -43 100 1250 695 -40 100 1300 720 -48 100 1350 771 -47 100 1400 790 -47 100 1450 813 -43 100 1500 869 -47 100 1550 879 -46 100 1600 923 -45 100 1650 974 -47 100 1700 1039 -46 100 1750 992 -49 100 1800 991 -46 100 1850 1059 -43 100 1900 1102 -44 100 1950 1115 -44 100 2000 1137 -6 100 50 20 -16 100 100 46 -17 100 150 64 -23 100 200 103 -20 100 250 142 -36 100 300 142 -27 100 350 158 -28 100 400 200 -37 100 450 252 -35 100 500 241 -43 100 550 304 -36 100 600 292 -41 100 650 360 -39 100 700 337 -37 100 750 396 -47 100 800 400 -45 100 850 437 -48 100 900 505 -48 100 950 513 -47 100 1000 534 -42 100 1050 540 -44 100 1100 551 -46 100 1150 622 -44 100 1200 657 -49 100 1250 668 -55 100 1300 702 -48 100 1350 751 -45 100 1400 762 -49 100 1450 791 -51 100 1500 807 -50 100 1550 861 -49 100 1600 895 -51 100 1650 856 -55 100 1700 937 -50 100 1750 951 -52 100 1800 980 -48 100 1850 1005 -50 100 1900 1011 -50 100 1950 1068 -58 100 2000 1100 -9 100 50 19 -11 100 100 60 -14 100 150 64 -14 100 200 85 -24 100 250 107 -24 100 300 158 -26 100 350 158 -34 100 400 193 -32 100 450 233 -33 100 500 233 -30 100 550 264 -34 100 600 302 -40 100 650 332 -34 100 700 364 -32 100 750 366 -40 100 800 426 -40 100 850 417 -40 100 900 459 -44 100 950 466 -41 100 1000 559 -41 100 1050 568 -38 100 1100 583 -35 100 1150 590 -45 100 1200 583 -41 100 1250 681 -42 100 1300 698 -44 100 1350 718 -43 100 1400 775 -43 100 1450 772 -47 100 1500 758 -48 100 1550 794 -48 100 1600 882 -46 100 1650 875 -46 100 1700 908 -46 100 1750 932 -46 100 1800 968 -49 100 1850 1014 -43 100 1900 1006 -46 100 1950 1020 -48 100 2000 1033 -11 100 50 30 -17 100 100 47 -22 100 150 65 -24 100 200 96 -29 100 250 166 -31 100 300 133 -38 100 350 156 -34 100 400 219 -36 100 450 274 -45 100 500 234 -42 100 550 334 -48 100 600 316 -43 100 650 367 -42 100 700 332 -40 100 750 431 -46 100 800 470 -44 100 850 451 -45 100 900 514 -50 100 950 546 -48 100 1000 542 -50 100 1050 587 -48 100 1100 678 -51 100 1150 678 -51 100 1200 679 -54 100 1250 719 -57 100 1300 758 -51 100 1350 780 -55 100 1400 806 -51 100 1450 839 -51 100 1500 876 -54 100 1550 892 -57 100 1600 932 -56 100 1650 954 -50 100 1700 988 -56 100 1750 1019 -51 100 1800 1037 -55 100 1850 1096 -55 100 1900 1087 -53 100 1950 1040 -55 100 2000 1188 -13 100 50 25 -15 100 100 51 -20 100 150 71 -19 100 200 112 -22 100 250 137 -25 100 300 196 -28 100 350 189 -32 100 400 192 -39 100 450 274 -33 100 500 274 -36 100 550 331 -34 100 600 313 -40 100 650 400 -35 100 700 371 -41 100 750 461 -37 100 800 469 -40 100 850 465 -40 100 900 512 -42 100 950 533 -41 100 1000 613 -41 100 1050 615 -45 100 1100 618 -48 100 1150 666 -38 100 1200 760 -44 100 1250 772 -44 100 1300 790 -42 100 1350 811 -50 100 1400 879 -43 100 1450 835 -40 100 1500 907 -48 100 1550 972 -44 100 1600 941 -44 100 1650 977 -48 100 1700 1041 -48 100 1750 1078 -46 100 1800 1110 -47 100 1850 1089 -46 100 1900 1179 -51 100 1950 1198 -49 100 2000 1219 -8 100 50 19 -8 100 100 44 -15 100 150 69 -26 100 200 142 -23 100 250 121 -30 100 300 147 -33 100 350 189 -37 100 400 232 -38 100 450 238 -35 100 500 286 -40 100 550 288 -41 100 600 360 -40 100 650 345 -43 100 700 412 -45 100 750 360 -46 100 800 446 -45 100 850 469 -45 100 900 466 -45 100 950 518 -49 100 1000 552 -42 100 1050 587 -45 100 1100 630 -51 100 1150 651 -49 100 1200 697 -47 100 1250 672 -48 100 1300 744 -50 100 1350 778 -51 100 1400 806 -50 100 1450 846 -49 100 1500 861 -49 100 1550 877 -51 100 1600 941 -50 100 1650 978 -50 100 1700 972 -57 100 1750 1033 -51 100 1800 1077 -54 100 1850 1057 -55 100 1900 1131 -54 100 1950 1133 -51 100 2000 1165 -8 100 50 23 -17 100 100 48 -25 100 150 120 -28 100 200 90 -34 100 250 122 -35 100 300 163 -38 100 350 199 -35 100 400 234 -41 100 450 258 -38 100 500 290 -38 100 550 284 -43 100 600 367 -40 100 650 363 -48 100 700 396 -47 100 750 425 -47 100 800 434 -49 100 850 524 -45 100 900 580 -48 100 950 551 -49 100 1000 545 -48 100 1050 621 -48 100 1100 646 -51 100 1150 666 -52 100 1200 725 -53 100 1250 719 -47 100 1300 796 -48 100 1350 784 -51 100 1400 804 -50 100 1450 843 -53 100 1500 866 -54 100 1550 942 -54 100 1600 952 -60 100 1650 1033 -56 100 1700 984 -55 100 1750 1079 -53 100 1800 1028 -57 100 1850 1122 -57 100 1900 1124 -58 100 1950 1171 -56 100 2000 1183 -10 100 50 21 -15 100 100 55 -16 100 150 72 -25 100 200 126 -26 100 250 122 -25 100 300 156 -32 100 350 180 -27 100 400 242 -32 100 450 223 -36 100 500 258 -38 100 550 322 -36 100 600 319 -39 100 650 369 -39 100 700 396 -44 100 750 415 -40 100 800 483 -44 100 850 492 -46 100 900 475 -42 100 950 524 -44 100 1000 546 -48 100 1050 603 -44 100 1100 659 -45 100 1150 637 -43 100 1200 667 -45 100 1250 725 -47 100 1300 724 -47 100 1350 787 -51 100 1400 824 -50 100 1450 856 -47 100 1500 873 -49 100 1550 924 -52 100 1600 989 -47 100 1650 942 -50 100 1700 981 -54 100 1750 990 -52 100 1800 1076 -47 100 1850 1132 -50 100 1900 1116 -55 100 1950 1151 -48 100 2000 1193 -8 100 50 23 -9 100 100 48 -19 100 150 71 -22 100 200 106 -32 100 250 136 -29 100 300 199 -31 100 350 189 -33 100 400 227 -32 100 450 276 -34 100 500 278 -33 100 550 357 -35 100 600 325 -38 100 650 407 -38 100 700 399 -39 100 750 437 -38 100 800 508 -40 100 850 530 -43 100 900 510 -35 100 950 556 -46 100 1000 627 -40 100 1050 632 -39 100 1100 648 -41 100 1150 671 -47 100 1200 718 -47 100 1250 788 -43 100 1300 815 -42 100 1350 838 -41 100 1400 824 -46 100 1450 901 -49 100 1500 908 -45 100 1550 920 -38 100 1600 1019 -49 100 1650 1017 -46 100 1700 1053 -47 100 1750 1086 -48 100 1800 1170 -53 100 1850 1170 -46 100 1900 1191 -48 100 1950 1187 -51 100 2000 1231 -14 100 50 20 -14 100 100 42 -18 100 150 96 -23 100 200 82 -29 100 250 106 -22 100 300 130 -33 100 350 148 -36 100 400 239 -41 100 450 222 -41 100 500 245 -43 100 550 285 -43 100 600 275 -47 100 650 362 -41 100 700 317 -42 100 750 402 -38 100 800 388 -43 100 850 420 -48 100 900 473 -47 100 950 501 -48 100 1000 491 -51 100 1050 533 -48 100 1100 547 -45 100 1150 579 -48 100 1200 631 -46 100 1250 657 -48 100 1300 688 -49 100 1350 689 -48 100 1400 724 -47 100 1450 755 -54 100 1500 768 -52 100 1550 807 -54 100 1600 887 -48 100 1650 845 -53 100 1700 887 -53 100 1750 948 -49 100 1800 951 -52 100 1850 1001 -56 100 1900 972 -57 100 1950 974 -48 100 2000 1026 -4 100 50 24 -14 100 100 49 -15 100 150 71 -21 100 200 102 -22 100 250 150 -33 100 300 202 -31 100 350 168 -32 100 400 233 -34 100 450 248 -35 100 500 279 -39 100 550 386 -35 100 600 365 -35 100 650 370 -36 100 700 386 -40 100 750 409 -42 100 800 494 -44 100 850 462 -49 100 900 570 -41 100 950 529 -40 100 1000 537 -39 100 1050 673 -39 100 1100 690 -44 100 1150 827 -46 100 1200 733 -49 100 1250 863 -47 100 1300 855 -48 100 1350 897 -51 100 1400 885 -45 100 1450 871 -47 100 1500 1017 -50 100 1550 972 -46 100 1600 1042 -45 100 1650 1011 -46 100 1700 1133 -48 100 1750 1230 -50 100 1800 1205 -50 100 1850 1178 -46 100 1900 1294 -51 100 1950 1380 -53 100 2000 1254 -8 100 50 28 -9 100 100 58 -17 100 150 80 -22 100 200 102 -26 100 250 174 -32 100 300 170 -33 100 350 203 -27 100 400 236 -35 100 450 250 -39 100 500 306 -34 100 550 291 -39 100 600 366 -36 100 650 360 -38 100 700 428 -39 100 750 414 -40 100 800 457 -37 100 850 446 -46 100 900 505 -43 100 950 586 -46 100 1000 572 -41 100 1050 600 -47 100 1100 653 -45 100 1150 683 -45 100 1200 679 -47 100 1250 738 -41 100 1300 769 -46 100 1350 835 -45 100 1400 850 -49 100 1450 869 -44 100 1500 872 -47 100 1550 945 -56 100 1600 1013 -48 100 1650 988 -48 100 1700 1041 -49 100 1750 995 -53 100 1800 1086 -45 100 1850 1102 -49 100 1900 1127 -56 100 1950 1162 -50 100 2000 1171 -13 100 50 30 -13 100 100 50 -15 100 150 84 -27 100 200 106 -22 100 250 121 -32 100 300 205 -32 100 350 174 -34 100 400 213 -39 100 450 265 -36 100 500 274 -40 100 550 335 -46 100 600 330 -38 100 650 403 -44 100 700 391 -42 100 750 438 -46 100 800 461 -36 100 850 516 -50 100 900 508 -49 100 950 574 -42 100 1000 576 -48 100 1050 610 -52 100 1100 665 -47 100 1150 684 -51 100 1200 751 -48 100 1250 755 -51 100 1300 804 -51 100 1350 830 -51 100 1400 819 -47 100 1450 870 -47 100 1500 903 -52 100 1550 964 -54 100 1600 1000 -46 100 1650 1012 -56 100 1700 1083 -62 100 1750 1086 -58 100 1800 1101 -50 100 1850 1146 -53 100 1900 1213 -55 100 1950 1237 -56 100 2000 1211 -12 100 50 25 -16 100 100 57 -16 100 150 83 -21 100 200 152 -18 100 250 109 -32 100 300 144 -31 100 350 183 -28 100 400 232 -35 100 450 223 -34 100 500 294 -39 100 550 292 -31 100 600 368 -37 100 650 351 -36 100 700 398 -43 100 750 420 -39 100 800 453 -39 100 850 503 -46 100 900 514 -37 100 950 531 -38 100 1000 581 -44 100 1050 599 -44 100 1100 637 -46 100 1150 647 -40 100 1200 673 -46 100 1250 738 -46 100 1300 787 -42 100 1350 778 -43 100 1400 804 -46 100 1450 836 -46 100 1500 887 -56 100 1550 910 -50 100 1600 916 -49 100 1650 1001 -48 100 1700 981 -48 100 1750 1043 -46 100 1800 1070 -47 100 1850 1064 -52 100 1900 1156 -49 100 1950 1158 -48 100 2000 1145 -9 100 50 22 -14 100 100 49 -21 100 150 64 -20 100 200 83 -20 100 250 139 -27 100 300 142 -23 100 350 151 -29 100 400 185 -30 100 450 229 -32 100 500 235 -32 100 550 287 -33 100 600 297 -38 100 650 336 -35 100 700 354 -34 100 750 392 -39 100 800 376 -40 100 850 431 -39 100 900 459 -42 100 950 456 -40 100 1000 520 -42 100 1050 539 -38 100 1100 572 -41 100 1150 586 -43 100 1200 629 -44 100 1250 657 -47 100 1300 690 -47 100 1350 719 -44 100 1400 733 -47 100 1450 726 -46 100 1500 811 -43 100 1550 786 -46 100 1600 832 -42 100 1650 855 -48 100 1700 899 -46 100 1750 946 -48 100 1800 954 -44 100 1850 952 -50 100 1900 969 -42 100 1950 1011 -48 100 2000 1055 -6 100 50 16 -15 100 100 49 -20 100 150 67 -24 100 200 93 -30 100 250 162 -27 100 300 154 -35 100 350 184 -33 100 400 210 -38 100 450 253 -36 100 500 254 -36 100 550 327 -36 100 600 316 -37 100 650 390 -36 100 700 366 -42 100 750 416 -40 100 800 460 -39 100 850 454 -43 100 900 503 -37 100 950 527 -43 100 1000 551 -41 100 1050 590 -47 100 1100 628 -46 100 1150 698 -47 100 1200 668 -46 100 1250 750 -44 100 1300 736 -45 100 1350 751 -48 100 1400 781 -51 100 1450 829 -51 100 1500 825 -50 100 1550 882 -45 100 1600 882 -49 100 1650 956 -47 100 1700 964 -46 100 1750 999 -52 100 1800 985 -48 100 1850 1076 -49 100 1900 1074 -51 100 1950 1157 -47 100 2000 1174 -7 100 50 19 -11 100 100 43 -19 100 150 68 -25 100 200 92 -22 100 250 109 -19 100 300 163 -25 100 350 141 -27 100 400 179 -28 100 450 224 -37 100 500 220 -35 100 550 254 -38 100 600 324 -34 100 650 302 -35 100 700 353 -33 100 750 351 -40 100 800 413 -38 100 850 430 -37 100 900 402 -35 100 950 462 -37 100 1000 484 -39 100 1050 523 -37 100 1100 520 -43 100 1150 581 -37 100 1200 567 -49 100 1250 630 -39 100 1300 657 -39 100 1350 681 -44 100 1400 704 -41 100 1450 730 -43 100 1500 764 -42 100 1550 786 -43 100 1600 826 -41 100 1650 848 -43 100 1700 841 -45 100 1750 892 -42 100 1800 933 -40 100 1850 921 -46 100 1900 996 -45 100 1950 989 -51 100 2000 1023 -8 100 50 27 -15 100 100 39 -18 100 150 72 -17 100 200 118 -29 100 250 111 -25 100 300 196 -29 100 350 209 -31 100 400 275 -36 100 450 311 -35 100 500 344 -39 100 550 359 -37 100 600 381 -36 100 650 420 -40 100 700 444 -36 100 750 506 -46 100 800 547 -37 100 850 510 -41 100 900 560 -43 100 950 642 -48 100 1000 681 -46 100 1050 686 -48 100 1100 684 -42 100 1150 767 -52 100 1200 825 -50 100 1250 855 -49 100 1300 851 -55 100 1350 858 -51 100 1400 942 -50 100 1450 971 -44 100 1500 1025 -46 100 1550 1070 -55 100 1600 1018 -50 100 1650 1218 -56 100 1700 1196 -48 100 1750 1252 -47 100 1800 1185 -52 100 1850 1128 -52 100 1900 1240 -54 100 1950 1304 -48 100 2000 1404 -7 100 50 26 -8 100 100 45 -23 100 150 87 -28 100 200 109 -29 100 250 179 -31 100 300 191 -34 100 350 192 -36 100 400 245 -34 100 450 240 -31 100 500 290 -38 100 550 348 -42 100 600 348 -44 100 650 398 -41 100 700 447 -39 100 750 411 -46 100 800 481 -46 100 850 523 -45 100 900 526 -45 100 950 568 -44 100 1000 637 -47 100 1050 669 -43 100 1100 686 -45 100 1150 682 -48 100 1200 755 -46 100 1250 816 -53 100 1300 820 -49 100 1350 829 -48 100 1400 899 -46 100 1450 885 -54 100 1500 944 -52 100 1550 953 -45 100 1600 941 -45 100 1650 1020 -47 100 1700 1043 -50 100 1750 1099 -49 100 1800 1110 -54 100 1850 1159 -50 100 1900 1193 -58 100 1950 1254 -51 100 2000 1301 -13 100 50 19 -8 100 100 48 -18 100 150 81 -18 100 200 106 -23 100 250 168 -30 100 300 174 -29 100 350 187 -37 100 400 262 -35 100 450 246 -36 100 500 270 -33 100 550 309 -34 100 600 381 -35 100 650 354 -40 100 700 412 -41 100 750 421 -40 100 800 496 -45 100 850 513 -43 100 900 552 -47 100 950 562 -47 100 1000 603 -42 100 1050 589 -42 100 1100 655 -49 100 1150 715 -49 100 1200 746 -49 100 1250 749 -55 100 1300 786 -46 100 1350 816 -50 100 1400 853 -47 100 1450 849 -52 100 1500 869 -47 100 1550 937 -46 100 1600 1005 -53 100 1650 977 -51 100 1700 1036 -47 100 1750 1129 -51 100 1800 1078 -51 100 1850 1166 -56 100 1900 1133 -51 100 1950 1175 -52 100 2000 1164 -8 100 50 28 -14 100 100 56 -13 100 150 88 -16 100 200 134 -19 100 250 132 -26 100 300 189 -29 100 350 192 -27 100 400 241 -25 100 450 274 -26 100 500 332 -32 100 550 309 -31 100 600 374 -29 100 650 382 -23 100 700 453 -34 100 750 482 -36 100 800 497 -41 100 850 504 -39 100 900 566 -39 100 950 577 -38 100 1000 620 -44 100 1050 671 -44 100 1100 702 -42 100 1150 718 -41 100 1200 730 -34 100 1250 790 -43 100 1300 846 -44 100 1350 826 -42 100 1400 946 -49 100 1450 916 -43 100 1500 985 -45 100 1550 985 -44 100 1600 1026 -42 100 1650 1118 -49 100 1700 1087 -49 100 1750 1137 -47 100 1800 1186 -47 100 1850 1182 -45 100 1900 1223 -45 100 1950 1324 -50 100 2000 1271 -8 100 50 21 -19 100 100 41 -20 100 150 72 -22 100 200 98 -28 100 250 115 -30 100 300 170 -32 100 350 178 -32 100 400 207 -35 100 450 265 -37 100 500 257 -40 100 550 284 -40 100 600 346 -42 100 650 310 -44 100 700 379 -46 100 750 406 -40 100 800 420 -42 100 850 475 -43 100 900 488 -41 100 950 525 -49 100 1000 518 -46 100 1050 583 -49 100 1100 576 -45 100 1150 645 -44 100 1200 641 -47 100 1250 683 -44 100 1300 709 -48 100 1350 715 -45 100 1400 774 -50 100 1450 804 -45 100 1500 854 -44 100 1550 861 -46 100 1600 867 -51 100 1650 889 -51 100 1700 1004 -51 100 1750 1011 -54 100 1800 1003 -53 100 1850 1081 -44 100 1900 1079 -50 100 1950 1101 -54 100 2000 1135 -9 100 50 19 -13 100 100 43 -21 100 150 73 -18 100 200 87 -23 100 250 117 -22 100 300 121 -33 100 350 193 -32 100 400 173 -30 100 450 208 -32 100 500 266 -35 100 550 250 -30 100 600 274 -33 100 650 329 -39 100 700 332 -38 100 750 374 -41 100 800 453 -39 100 850 410 -42 100 900 451 -41 100 950 488 -35 100 1000 507 -41 100 1050 527 -46 100 1100 543 -43 100 1150 568 -44 100 1200 586 -44 100 1250 610 -46 100 1300 692 -50 100 1350 701 -47 100 1400 738 -48 100 1450 750 -47 100 1500 760 -50 100 1550 795 -44 100 1600 812 -41 100 1650 821 -47 100 1700 850 -54 100 1750 895 -43 100 1800 906 -53 100 1850 969 -52 100 1900 1005 -44 100 1950 984 -48 100 2000 1059 -8 100 50 19 -15 100 100 42 -22 100 150 74 -22 100 200 82 -28 100 250 155 -35 100 300 127 -33 100 350 153 -35 100 400 174 -38 100 450 222 -35 100 500 233 -38 100 550 266 -41 100 600 259 -42 100 650 303 -41 100 700 373 -46 100 750 403 -45 100 800 374 -45 100 850 444 -41 100 900 446 -43 100 950 455 -48 100 1000 510 -46 100 1050 519 -47 100 1100 579 -44 100 1150 602 -49 100 1200 569 -47 100 1250 605 -55 100 1300 645 -50 100 1350 678 -53 100 1400 734 -60 100 1450 780 -51 100 1500 788 -51 100 1550 796 -54 100 1600 798 -57 100 1650 861 -55 100 1700 893 -57 100 1750 933 -53 100 1800 918 -50 100 1850 975 -63 100 1900 1001 -51 100 1950 1003 -51 100 2000 1051 -3 100 50 24 -16 100 100 50 -18 100 150 98 -19 100 200 109 -23 100 250 143 -19 100 300 204 -24 100 350 201 -23 100 400 223 -35 100 450 275 -31 100 500 294 -34 100 550 357 -30 100 600 340 -35 100 650 415 -36 100 700 428 -33 100 750 437 -37 100 800 475 -37 100 850 555 -31 100 900 560 -35 100 950 535 -43 100 1000 631 -37 100 1050 658 -44 100 1100 650 -38 100 1150 698 -49 100 1200 741 -41 100 1250 731 -46 100 1300 836 -44 100 1350 787 -41 100 1400 807 -38 100 1450 845 -44 100 1500 967 -50 100 1550 983 -41 100 1600 999 -49 100 1650 1047 -42 100 1700 1013 -44 100 1750 1091 -51 100 1800 1085 -47 100 1850 1185 -48 100 1900 1187 -48 100 1950 1170 -53 100 2000 1253 -10 100 50 22 -13 100 100 45 -17 100 150 70 -24 100 200 98 -27 100 250 120 -28 100 300 173 -32 100 350 168 -36 100 400 196 -37 100 450 248 -34 100 500 263 -35 100 550 271 -37 100 600 315 -38 100 650 355 -41 100 700 375 -43 100 750 417 -42 100 800 423 -41 100 850 462 -47 100 900 493 -38 100 950 521 -42 100 1000 554 -48 100 1050 551 -47 100 1100 615 -43 100 1150 612 -51 100 1200 672 -44 100 1250 703 -46 100 1300 671 -48 100 1350 775 -47 100 1400 769 -46 100 1450 841 -47 100 1500 819 -48 100 1550 904 -54 100 1600 906 -50 100 1650 901 -49 100 1700 943 -54 100 1750 1013 -48 100 1800 1007 -52 100 1850 1070 -56 100 1900 1049 -52 100 1950 1073 -50 100 2000 1164 -9 100 50 21 -14 100 100 45 -16 100 150 103 -25 100 200 89 -28 100 250 120 -21 100 300 139 -26 100 350 158 -34 100 400 245 -36 100 450 223 -33 100 500 257 -32 100 550 256 -39 100 600 267 -40 100 650 366 -37 100 700 382 -35 100 750 396 -49 100 800 417 -39 100 850 467 -39 100 900 467 -43 100 950 529 -40 100 1000 554 -45 100 1050 610 -38 100 1100 714 -41 100 1150 699 -45 100 1200 665 -43 100 1250 702 -48 100 1300 745 -50 100 1350 746 -43 100 1400 775 -53 100 1450 828 -42 100 1500 889 -42 100 1550 913 -47 100 1600 919 -48 100 1650 927 -54 100 1700 971 -47 100 1750 1010 -46 100 1800 991 -52 100 1850 1035 -51 100 1900 1076 -47 100 1950 1068 -55 100 2000 1149 -6 100 50 36 -13 100 100 86 -15 100 150 103 -14 100 200 122 -21 100 250 156 -23 100 300 211 -18 100 350 217 -26 100 400 270 -22 100 450 314 -25 100 500 327 -28 100 550 387 -24 100 600 415 -29 100 650 412 -26 100 700 503 -31 100 750 547 -28 100 800 587 -31 100 850 581 -32 100 900 637 -29 100 950 672 -33 100 1000 669 -31 100 1050 721 -40 100 1100 769 -35 100 1150 797 -33 100 1200 835 -35 100 1250 882 -33 100 1300 919 -39 100 1350 965 -36 100 1400 878 -40 100 1450 1034 -38 100 1500 1036 -36 100 1550 1021 -41 100 1600 1075 -42 100 1650 1098 -39 100 1700 1160 -39 100 1750 1217 -44 100 1800 1223 -44 100 1850 1290 -47 100 1900 1381 -49 100 1950 1405 -42 100 2000 1400 -3 100 50 19 -18 100 100 41 -14 100 150 70 -20 100 200 104 -31 100 250 116 -28 100 300 144 -32 100 350 204 -33 100 400 189 -33 100 450 234 -41 100 500 270 -39 100 550 277 -35 100 600 328 -35 100 650 329 -43 100 700 377 -39 100 750 432 -44 100 800 408 -39 100 850 452 -45 100 900 463 -39 100 950 479 -32 100 1000 519 -45 100 1050 585 -42 100 1100 595 -47 100 1150 637 -41 100 1200 640 -48 100 1250 714 -45 100 1300 688 -46 100 1350 743 -49 100 1400 792 -45 100 1450 773 -50 100 1500 848 -48 100 1550 874 -52 100 1600 855 -50 100 1650 876 -46 100 1700 917 -47 100 1750 996 -45 100 1800 1070 -50 100 1850 1031 -48 100 1900 1050 -52 100 1950 1091 -51 100 2000 1086 -10 100 50 20 -25 100 100 50 -14 100 150 59 -33 100 200 106 -24 100 250 100 -35 100 300 140 -35 100 350 186 -36 100 400 168 -36 100 450 218 -41 100 500 252 -39 100 550 236 -43 100 600 295 -50 100 650 333 -51 100 700 379 -48 100 750 374 -44 100 800 429 -42 100 850 418 -49 100 900 458 -56 100 950 478 -53 100 1000 492 -46 100 1050 530 -47 100 1100 546 -49 100 1150 591 -51 100 1200 583 -56 100 1250 650 -52 100 1300 650 -50 100 1350 662 -52 100 1400 708 -57 100 1450 733 -53 100 1500 781 -54 100 1550 857 -51 100 1600 805 -53 100 1650 862 -56 100 1700 931 -57 100 1750 944 -56 100 1800 968 -51 100 1850 1021 -57 100 1900 1010 -52 100 1950 1019 -56 100 2000 1052 -6 100 50 26 -13 100 100 47 -14 100 150 63 -23 100 200 97 -18 100 250 119 -29 100 300 145 -28 100 350 195 -27 100 400 196 -26 100 450 215 -30 100 500 268 -29 100 550 276 -32 100 600 295 -34 100 650 315 -31 100 700 373 -42 100 750 387 -36 100 800 424 -38 100 850 472 -37 100 900 487 -34 100 950 491 -39 100 1000 508 -38 100 1050 558 -39 100 1100 585 -39 100 1150 615 -37 100 1200 660 -46 100 1250 678 -39 100 1300 693 -44 100 1350 762 -46 100 1400 744 -42 100 1450 756 -47 100 1500 802 -51 100 1550 865 -46 100 1600 872 -43 100 1650 883 -53 100 1700 963 -42 100 1750 969 -47 100 1800 972 -45 100 1850 1011 -48 100 1900 1047 -43 100 1950 1098 -45 100 2000 1120 -6 100 50 32 -12 100 100 53 -20 100 150 76 -18 100 200 95 -22 100 250 138 -23 100 300 129 -27 100 350 160 -23 100 400 187 -28 100 450 259 -32 100 500 263 -37 100 550 303 -29 100 600 324 -38 100 650 334 -39 100 700 350 -37 100 750 382 -33 100 800 422 -33 100 850 440 -39 100 900 482 -42 100 950 536 -44 100 1000 548 -46 100 1050 587 -39 100 1100 584 -40 100 1150 624 -42 100 1200 673 -43 100 1250 704 -46 100 1300 733 -49 100 1350 758 -50 100 1400 753 -53 100 1450 814 -49 100 1500 807 -47 100 1550 821 -53 100 1600 876 -46 100 1650 882 -46 100 1700 937 -50 100 1750 979 -55 100 1800 990 -49 100 1850 1006 -44 100 1900 1053 -53 100 1950 1076 -46 100 2000 1070 -8 100 50 21 -16 100 100 52 -16 100 150 76 -20 100 200 97 -18 100 250 146 -27 100 300 150 -33 100 350 179 -27 100 400 210 -31 100 450 297 -41 100 500 287 -34 100 550 339 -38 100 600 318 -41 100 650 393 -38 100 700 409 -39 100 750 438 -47 100 800 470 -40 100 850 498 -43 100 900 536 -45 100 950 554 -43 100 1000 608 -46 100 1050 617 -45 100 1100 654 -44 100 1150 718 -46 100 1200 717 -43 100 1250 746 -47 100 1300 794 -48 100 1350 829 -47 100 1400 832 -53 100 1450 892 -45 100 1500 874 -49 100 1550 998 -52 100 1600 1021 -51 100 1650 972 -50 100 1700 1038 -46 100 1750 1023 -59 100 1800 1052 -52 100 1850 1126 -58 100 1900 1103 -55 100 1950 1233 -52 100 2000 1210 -12 100 50 40 -19 100 100 40 -23 100 150 76 -19 100 200 101 -30 100 250 104 -29 100 300 128 -28 100 350 212 -39 100 400 181 -36 100 450 218 -37 100 500 258 -37 100 550 279 -42 100 600 305 -41 100 650 334 -43 100 700 370 -45 100 750 373 -44 100 800 429 -47 100 850 455 -49 100 900 467 -43 100 950 493 -45 100 1000 518 -45 100 1050 558 -40 100 1100 596 -47 100 1150 625 -51 100 1200 645 -49 100 1250 644 -49 100 1300 695 -50 100 1350 726 -53 100 1400 754 -49 100 1450 785 -53 100 1500 781 -47 100 1550 850 -52 100 1600 894 -55 100 1650 891 -55 100 1700 925 -52 100 1750 932 -55 100 1800 968 -53 100 1850 971 -54 100 1900 1037 -54 100 1950 1069 -56 100 2000 1108 -8 100 50 20 -17 100 100 57 -14 100 150 57 -20 100 200 86 -22 100 250 109 -25 100 300 173 -31 100 350 160 -25 100 400 206 -36 100 450 255 -32 100 500 253 -32 100 550 265 -36 100 600 315 -33 100 650 320 -34 100 700 355 -38 100 750 388 -38 100 800 417 -42 100 850 452 -35 100 900 481 -41 100 950 485 -37 100 1000 505 -38 100 1050 579 -41 100 1100 562 -38 100 1150 621 -44 100 1200 635 -43 100 1250 670 -39 100 1300 706 -45 100 1350 5450 -47 100 1400 774 -43 100 1450 780 -45 100 1500 780 -48 100 1550 847 -52 100 1600 889 -46 100 1650 854 -50 100 1700 987 -43 100 1750 938 -48 100 1800 1023 -47 100 1850 1003 -45 100 1900 1078 -46 100 1950 1090 -51 100 2000 1148 -9 100 50 27 -14 100 100 44 -15 100 150 71 -20 100 200 105 -24 100 250 158 -28 100 300 160 -26 100 350 157 -26 100 400 209 -33 100 450 266 -28 100 500 269 -34 100 550 336 -39 100 600 309 -37 100 650 377 -36 100 700 382 -36 100 750 422 -38 100 800 478 -37 100 850 445 -34 100 900 509 -35 100 950 535 -34 100 1000 560 -41 100 1050 619 -40 100 1100 634 -43 100 1150 684 -43 100 1200 743 -41 100 1250 725 -43 100 1300 761 -41 100 1350 775 -45 100 1400 802 -46 100 1450 859 -45 100 1500 882 -41 100 1550 907 -43 100 1600 951 -46 100 1650 959 -42 100 1700 1006 -42 100 1750 1037 -50 100 1800 1026 -47 100 1850 1092 -49 100 1900 1105 -49 100 1950 1151 -47 100 2000 1135 -8 100 50 22 -23 100 100 50 -27 100 150 70 -24 100 200 96 -31 100 250 171 -32 100 300 153 -42 100 350 189 -33 100 400 254 -41 100 450 233 -42 100 500 256 -43 100 550 308 -46 100 600 308 -45 100 650 395 -50 100 700 361 -42 100 750 441 -48 100 800 476 -45 100 850 494 -52 100 900 487 -51 100 950 551 -49 100 1000 570 -54 100 1050 603 -50 100 1100 663 -47 100 1150 675 -49 100 1200 709 -52 100 1250 678 -54 100 1300 782 -54 100 1350 780 -57 100 1400 830 -54 100 1450 833 -56 100 1500 891 -54 100 1550 875 -51 100 1600 982 -60 100 1650 972 -57 100 1700 1013 -53 100 1750 1038 -60 100 1800 1095 -55 100 1850 1145 -53 100 1900 1111 -60 100 1950 1197 -56 100 2000 1149 -9 100 50 20 -11 100 100 41 -18 100 150 76 -21 100 200 105 -23 100 250 129 -22 100 300 169 -32 100 350 236 -28 100 400 214 -28 100 450 283 -36 100 500 265 -36 100 550 290 -38 100 600 378 -40 100 650 395 -41 100 700 382 -38 100 750 460 -43 100 800 433 -41 100 850 473 -43 100 900 547 -46 100 950 537 -42 100 1000 592 -44 100 1050 587 -38 100 1100 648 -40 100 1150 675 -44 100 1200 709 -46 100 1250 744 -43 100 1300 776 -48 100 1350 793 -40 100 1400 802 -48 100 1450 841 -45 100 1500 881 -51 100 1550 906 -45 100 1600 957 -47 100 1650 1005 -52 100 1700 1037 -45 100 1750 1001 -50 100 1800 1076 -54 100 1850 1112 -50 100 1900 1124 -45 100 1950 1098 -47 100 2000 1197 -8 100 50 23 -13 100 100 40 -21 100 150 68 -17 100 200 120 -25 100 250 108 -21 100 300 133 -24 100 350 158 -31 100 400 216 -29 100 450 213 -34 100 500 248 -36 100 550 308 -35 100 600 288 -35 100 650 353 -38 100 700 327 -42 100 750 402 -37 100 800 420 -40 100 850 427 -43 100 900 495 -40 100 950 509 -37 100 1000 516 -38 100 1050 513 -43 100 1100 594 -42 100 1150 596 -48 100 1200 629 -46 100 1250 660 -47 100 1300 672 -48 100 1350 714 -44 100 1400 701 -46 100 1450 767 -46 100 1500 820 -45 100 1550 846 -41 100 1600 855 -45 100 1650 854 -52 100 1700 897 -50 100 1750 949 -48 100 1800 1000 -57 100 1850 991 -51 100 1900 1014 -51 100 1950 1055 -51 100 2000 1072 -8 100 50 32 -21 100 100 48 -21 100 150 82 -25 100 200 106 -29 100 250 134 -26 100 300 185 -34 100 350 173 -34 100 400 207 -35 100 450 279 -37 100 500 260 -37 100 550 292 -35 100 600 352 -39 100 650 377 -44 100 700 375 -39 100 750 454 -40 100 800 464 -44 100 850 491 -42 100 900 525 -48 100 950 554 -44 100 1000 599 -45 100 1050 614 -49 100 1100 649 -40 100 1150 654 -45 100 1200 722 -46 100 1250 748 -42 100 1300 799 -48 100 1350 765 -46 100 1400 875 -44 100 1450 852 -43 100 1500 891 -47 100 1550 935 -47 100 1600 986 -51 100 1650 961 -50 100 1700 1024 -55 100 1750 1015 -46 100 1800 1019 -49 100 1850 1122 -51 100 1900 1123 -50 100 1950 1116 -47 100 2000 1187 -3 100 50 20 -9 100 100 52 -14 100 150 79 -14 100 200 93 -25 100 250 134 -24 100 300 191 -23 100 350 172 -26 100 400 202 -22 100 450 281 -38 100 500 272 -30 100 550 277 -26 100 600 351 -28 100 650 386 -33 100 700 366 -32 100 750 429 -34 100 800 421 -30 100 850 480 -38 100 900 521 -36 100 950 507 -35 100 1000 573 -33 100 1050 614 -39 100 1100 625 -34 100 1150 654 -35 100 1200 728 -40 100 1250 744 -40 100 1300 772 -43 100 1350 814 -41 100 1400 816 -40 100 1450 895 -37 100 1500 910 -41 100 1550 903 -44 100 1600 950 -47 100 1650 1003 -47 100 1700 1018 -39 100 1750 1064 -45 100 1800 1111 -46 100 1850 1196 -47 100 1900 1121 -45 100 1950 1179 -43 100 2000 1197 -4 100 50 26 -13 100 100 58 -11 100 150 75 -24 100 200 119 -18 100 250 108 -28 100 300 199 -30 100 350 181 -26 100 400 208 -28 100 450 278 -32 100 500 281 -32 100 550 360 -33 100 600 314 -36 100 650 392 -37 100 700 371 -36 100 750 473 -35 100 800 503 -35 100 850 522 -35 100 900 511 -34 100 950 551 -37 100 1000 593 -36 100 1050 607 -43 100 1100 653 -46 100 1150 692 -43 100 1200 691 -44 100 1250 748 -41 100 1300 779 -40 100 1350 865 -40 100 1400 842 -43 100 1450 857 -48 100 1500 904 -44 100 1550 989 -43 100 1600 996 -45 100 1650 1042 -46 100 1700 1037 -48 100 1750 1090 -47 100 1800 1214 -48 100 1850 1160 -50 100 1900 1109 -44 100 1950 1193 -52 100 2000 1324 -4 100 50 21 -12 100 100 50 -18 100 150 84 -21 100 200 120 -28 100 250 183 -24 100 300 157 -20 100 350 187 -32 100 400 247 -28 100 450 240 -30 100 500 280 -35 100 550 358 -35 100 600 330 -35 100 650 394 -34 100 700 437 -36 100 750 441 -34 100 800 505 -35 100 850 537 -41 100 900 592 -41 100 950 627 -40 100 1000 650 -40 100 1050 646 -41 100 1100 684 -41 100 1150 715 -44 100 1200 709 -41 100 1250 767 -40 100 1300 834 -38 100 1350 898 -45 100 1400 901 -45 100 1450 959 -47 100 1500 971 -47 100 1550 987 -44 100 1600 1003 -41 100 1650 1035 -46 100 1700 1089 -44 100 1750 1091 -44 100 1800 1072 -49 100 1850 1173 -47 100 1900 1195 -46 100 1950 1229 -42 100 2000 1246 -13 100 50 19 -14 100 100 38 -14 100 150 69 -22 100 200 139 -21 100 250 126 -30 100 300 126 -34 100 350 178 -31 100 400 247 -32 100 450 216 -35 100 500 253 -37 100 550 296 -37 100 600 305 -35 100 650 357 -37 100 700 352 -36 100 750 406 -39 100 800 458 -38 100 850 431 -45 100 900 501 -44 100 950 516 -42 100 1000 558 -37 100 1050 554 -45 100 1100 619 -41 100 1150 597 -46 100 1200 655 -45 100 1250 681 -42 100 1300 761 -46 100 1350 770 -46 100 1400 800 -49 100 1450 813 -46 100 1500 804 -50 100 1550 879 -48 100 1600 919 -47 100 1650 923 -45 100 1700 982 -46 100 1750 944 -50 100 1800 1023 -45 100 1850 1038 -47 100 1900 1092 -52 100 1950 1045 -48 100 2000 1130 -14 100 50 22 -12 100 100 43 -18 100 150 75 -24 100 200 121 -27 100 250 128 -29 100 300 135 -29 100 350 172 -29 100 400 239 -30 100 450 227 -34 100 500 245 -32 100 550 297 -36 100 600 310 -40 100 650 359 -35 100 700 389 -34 100 750 365 -38 100 800 459 -43 100 850 436 -38 100 900 512 -42 100 950 536 -39 100 1000 526 -41 100 1050 632 -45 100 1100 633 -45 100 1150 677 -46 100 1200 699 -38 100 1250 712 -49 100 1300 769 -47 100 1350 785 -44 100 1400 789 -46 100 1450 821 -51 100 1500 894 -47 100 1550 853 -42 100 1600 895 -46 100 1650 952 -43 100 1700 979 -43 100 1750 1055 -48 100 1800 1024 -46 100 1850 1082 -41 100 1900 1099 -52 100 1950 1155 -49 100 2000 1147 -8 100 50 29 -12 100 100 55 -11 100 150 91 -20 100 200 104 -26 100 250 128 -24 100 300 188 -27 100 350 176 -33 100 400 209 -34 100 450 301 -36 100 500 262 -36 100 550 327 -32 100 600 332 -40 100 650 386 -36 100 700 399 -40 100 750 446 -37 100 800 504 -39 100 850 516 -38 100 900 496 -43 100 950 541 -45 100 1000 620 -44 100 1050 616 -40 100 1100 677 -43 100 1150 679 -42 100 1200 704 -41 100 1250 758 -46 100 1300 729 -46 100 1350 825 -45 100 1400 853 -43 100 1450 878 -45 100 1500 938 -49 100 1550 943 -48 100 1600 1023 -46 100 1650 1040 -46 100 1700 1043 -43 100 1750 1060 -51 100 1800 1131 -44 100 1850 1119 -52 100 1900 1187 -51 100 1950 1233 -51 100 2000 1266 -5 100 50 23 -5 100 100 39 -14 100 150 72 -25 100 200 107 -23 100 250 123 -29 100 300 195 -27 100 350 201 -29 100 400 202 -33 100 450 286 -36 100 500 284 -32 100 550 347 -34 100 600 323 -34 100 650 396 -35 100 700 427 -41 100 750 408 -44 100 800 464 -40 100 850 501 -38 100 900 552 -38 100 950 518 -35 100 1000 567 -41 100 1050 617 -42 100 1100 656 -43 100 1150 661 -41 100 1200 719 -37 100 1250 723 -44 100 1300 762 -45 100 1350 825 -44 100 1400 820 -41 100 1450 842 -42 100 1500 942 -47 100 1550 928 -44 100 1600 951 -51 100 1650 1002 -47 100 1700 1007 -51 100 1750 1076 -50 100 1800 1095 -44 100 1850 1163 -55 100 1900 1162 -43 100 1950 1180 -47 100 2000 1244 -5 100 50 26 -12 100 100 48 -23 100 150 83 -23 100 200 105 -17 100 250 115 -23 100 300 147 -31 100 350 222 -25 100 400 195 -25 100 450 233 -32 100 500 287 -29 100 550 280 -29 100 600 340 -32 100 650 354 -32 100 700 397 -44 100 750 437 -31 100 800 442 -37 100 850 484 -38 100 900 514 -36 100 950 561 -38 100 1000 635 -34 100 1050 631 -39 100 1100 666 -38 100 1150 671 -38 100 1200 696 -42 100 1250 733 -39 100 1300 772 -44 100 1350 758 -45 100 1400 820 -43 100 1450 826 -41 100 1500 880 -42 100 1550 919 -46 100 1600 952 -45 100 1650 967 -45 100 1700 990 -43 100 1750 1049 -51 100 1800 1135 -41 100 1850 1115 -48 100 1900 1128 -46 100 1950 1196 -50 100 2000 1201 -7 100 50 28 -7 100 100 41 -16 100 150 105 -19 100 200 109 -24 100 250 127 -21 100 300 138 -28 100 350 230 -28 100 400 218 -29 100 450 226 -34 100 500 315 -35 100 550 285 -34 100 600 386 -29 100 650 376 -30 100 700 407 -34 100 750 449 -39 100 800 443 -36 100 850 498 -41 100 900 536 -41 100 950 527 -39 100 1000 606 -38 100 1050 614 -43 100 1100 663 -35 100 1150 672 -40 100 1200 733 -47 100 1250 767 -47 100 1300 784 -44 100 1350 785 -42 100 1400 804 -43 100 1450 862 -51 100 1500 885 -46 100 1550 892 -51 100 1600 951 -51 100 1650 1006 -47 100 1700 1032 -47 100 1750 1055 -46 100 1800 1082 -47 100 1850 1156 -49 100 1900 1133 -47 100 1950 1202 -50 100 2000 1217 -9 100 50 28 -20 100 100 59 -21 100 150 70 -25 100 200 113 -28 100 250 172 -30 100 300 146 -25 100 350 182 -30 100 400 215 -28 100 450 276 -35 100 500 265 -32 100 550 319 -33 100 600 320 -36 100 650 350 -36 100 700 389 -36 100 750 450 -43 100 800 448 -38 100 850 519 -41 100 900 523 -38 100 950 538 -42 100 1000 569 -44 100 1050 637 -44 100 1100 660 -47 100 1150 660 -44 100 1200 722 -44 100 1250 707 -46 100 1300 721 -52 100 1350 830 -50 100 1400 830 -51 100 1450 881 -47 100 1500 886 -48 100 1550 917 -48 100 1600 953 -48 100 1650 999 -46 100 1700 1037 -51 100 1750 1041 -53 100 1800 1109 -50 100 1850 1047 -56 100 1900 1132 -46 100 1950 1149 -50 100 2000 1186 -7 100 50 23 -12 100 100 38 -25 100 150 75 -21 100 200 122 -31 100 250 127 -31 100 300 148 -37 100 350 172 -33 100 400 253 -43 100 450 228 -41 100 500 249 -39 100 550 323 -37 100 600 298 -41 100 650 373 -43 100 700 387 -39 100 750 354 -44 100 800 480 -48 100 850 473 -50 100 900 450 -42 100 950 544 -47 100 1000 560 -46 100 1050 582 -48 100 1100 592 -46 100 1150 655 -47 100 1200 670 -46 100 1250 703 -48 100 1300 741 -51 100 1350 783 -47 100 1400 781 -52 100 1450 816 -51 100 1500 830 -53 100 1550 878 -53 100 1600 890 -49 100 1650 942 -48 100 1700 994 -51 100 1750 1001 -56 100 1800 994 -51 100 1850 1028 -54 100 1900 1050 -54 100 1950 1116 -59 100 2000 1124 -6 100 50 22 -24 100 100 50 -21 100 150 68 -30 100 200 140 -30 100 250 120 -30 100 300 148 -37 100 350 170 -33 100 400 232 -37 100 450 228 -36 100 500 291 -45 100 550 299 -40 100 600 326 -46 100 650 328 -45 100 700 423 -40 100 750 440 -50 100 800 463 -40 100 850 474 -39 100 900 493 -45 100 950 529 -45 100 1000 569 -43 100 1050 564 -45 100 1100 610 -44 100 1150 657 -45 100 1200 645 -48 100 1250 720 -46 100 1300 702 -48 100 1350 743 -53 100 1400 815 -45 100 1450 848 -50 100 1500 864 -54 100 1550 873 -55 100 1600 955 -49 100 1650 971 -56 100 1700 965 -50 100 1750 984 -52 100 1800 1099 -48 100 1850 1079 -51 100 1900 1119 -53 100 1950 1060 -51 100 2000 1180 -10 100 50 22 -25 100 100 41 -22 100 150 65 -27 100 200 114 -30 100 250 98 -31 100 300 127 -37 100 350 160 -38 100 400 174 -29 100 450 233 -42 100 500 230 -44 100 550 279 -37 100 600 270 -45 100 650 334 -42 100 700 311 -44 100 750 384 -43 100 800 360 -47 100 850 427 -38 100 900 401 -52 100 950 446 -43 100 1000 477 -45 100 1050 495 -50 100 1100 517 -51 100 1150 532 -50 100 1200 583 -49 100 1250 605 -51 100 1300 635 -60 100 1350 648 -55 100 1400 675 -48 100 1450 713 -45 100 1500 713 -54 100 1550 773 -58 100 1600 766 -53 100 1650 785 -57 100 1700 850 -55 100 1750 854 -50 100 1800 866 -63 100 1850 905 -56 100 1900 943 -64 100 1950 977 -63 100 2000 974 -13 100 50 30 -15 100 100 51 -16 100 150 73 -24 100 200 96 -29 100 250 153 -34 100 300 133 -31 100 350 181 -33 100 400 222 -31 100 450 254 -41 100 500 258 -38 100 550 305 -45 100 600 310 -39 100 650 385 -39 100 700 373 -40 100 750 416 -43 100 800 462 -45 100 850 441 -45 100 900 518 -46 100 950 546 -45 100 1000 564 -48 100 1050 603 -47 100 1100 622 -48 100 1150 660 -48 100 1200 689 -48 100 1250 735 -47 100 1300 725 -50 100 1350 775 -53 100 1400 798 -56 100 1450 808 -54 100 1500 873 -58 100 1550 918 -55 100 1600 897 -50 100 1650 940 -53 100 1700 996 -50 100 1750 1038 -52 100 1800 1101 -54 100 1850 1141 -54 100 1900 1129 -56 100 1950 1152 -55 100 2000 1210 -8 100 50 21 -12 100 100 45 -12 100 150 100 -16 100 200 88 -21 100 250 111 -15 100 300 148 -30 100 350 185 -29 100 400 232 -28 100 450 211 -34 100 500 260 -32 100 550 282 -31 100 600 294 -33 100 650 351 -39 100 700 399 -36 100 750 382 -36 100 800 424 -38 100 850 479 -42 100 900 471 -42 100 950 516 -43 100 1000 503 -41 100 1050 591 -42 100 1100 613 -36 100 1150 641 -42 100 1200 682 -46 100 1250 666 -42 100 1300 705 -45 100 1350 774 -46 100 1400 773 -41 100 1450 794 -47 100 1500 822 -51 100 1550 880 -49 100 1600 915 -50 100 1650 905 -49 100 1700 995 -45 100 1750 969 -52 100 1800 1012 -51 100 1850 1053 -53 100 1900 1074 -51 100 1950 1119 -53 100 2000 1122 -10 100 50 30 -14 100 100 50 -22 100 150 105 -28 100 200 116 -25 100 250 129 -31 100 300 166 -33 100 350 242 -35 100 400 211 -33 100 450 242 -37 100 500 317 -38 100 550 322 -43 100 600 370 -36 100 650 393 -42 100 700 425 -36 100 750 462 -35 100 800 482 -45 100 850 507 -37 100 900 579 -42 100 950 580 -41 100 1000 634 -43 100 1050 667 -49 100 1100 679 -42 100 1150 671 -40 100 1200 715 -47 100 1250 760 -51 100 1300 786 -42 100 1350 860 -41 100 1400 876 -48 100 1450 898 -46 100 1500 938 -43 100 1550 961 -47 100 1600 944 -50 100 1650 1030 -54 100 1700 1063 -54 100 1750 1116 -48 100 1800 1138 -48 100 1850 1225 -52 100 1900 1221 -53 100 1950 1224 -49 100 2000 1261 -3 100 50 50 -6 100 100 50 -20 100 150 71 -25 100 200 101 -24 100 250 136 -24 100 300 159 -27 100 350 207 -31 100 400 207 -32 100 450 258 -34 100 500 290 -34 100 550 289 -35 100 600 355 -36 100 650 410 -33 100 700 373 -39 100 750 442 -40 100 800 471 -40 100 850 475 -37 100 900 512 -40 100 950 550 -44 100 1000 585 -36 100 1050 636 -38 100 1100 666 -41 100 1150 656 -45 100 1200 718 -44 100 1250 735 -45 100 1300 786 -40 100 1350 800 -43 100 1400 853 -43 100 1450 845 -42 100 1500 887 -44 100 1550 891 -46 100 1600 977 -46 100 1650 977 -48 100 1700 1073 -44 100 1750 1120 -42 100 1800 1083 -45 100 1850 1136 -46 100 1900 1125 -46 100 1950 1155 -45 100 2000 1180 -7 100 50 21 -16 100 100 47 -15 100 150 59 -20 100 200 94 -26 100 250 156 -21 100 300 136 -27 100 350 166 -27 100 400 198 -33 100 450 244 -35 100 500 279 -39 100 550 311 -39 100 600 337 -33 100 650 372 -37 100 700 322 -47 100 750 425 -42 100 800 446 -45 100 850 462 -41 100 900 507 -39 100 950 515 -42 100 1000 574 -40 100 1050 586 -49 100 1100 643 -46 100 1150 671 -47 100 1200 679 -49 100 1250 697 -46 100 1300 753 -45 100 1350 824 -45 100 1400 817 -49 100 1450 801 -50 100 1500 852 -47 100 1550 893 -48 100 1600 861 -46 100 1650 970 -44 100 1700 971 -49 100 1750 978 -50 100 1800 1057 -51 100 1850 1106 -49 100 1900 1068 -51 100 1950 1097 -52 100 2000 1205 -2 100 50 16 -19 100 100 56 -15 100 150 70 -17 100 200 108 -22 100 250 122 -26 100 300 176 -27 100 350 152 -33 100 400 192 -28 100 450 260 -29 100 500 237 -36 100 550 278 -32 100 600 321 -39 100 650 334 -37 100 700 378 -38 100 750 422 -40 100 800 419 -36 100 850 472 -40 100 900 499 -40 100 950 511 -39 100 1000 540 -45 100 1050 555 -42 100 1100 587 -42 100 1150 660 -43 100 1200 654 -42 100 1250 678 -47 100 1300 725 -41 100 1350 737 -49 100 1400 770 -45 100 1450 817 -48 100 1500 859 -44 100 1550 868 -46 100 1600 914 -45 100 1650 956 -42 100 1700 953 -43 100 1750 1015 -45 100 1800 1016 -42 100 1850 1049 -50 100 1900 1120 -46 100 1950 1085 -44 100 2000 1122 -10 100 50 22 -13 100 100 52 -24 100 150 76 -24 100 200 132 -21 100 250 110 -30 100 300 147 -27 100 350 165 -30 100 400 232 -26 100 450 204 -28 100 500 241 -34 100 550 296 -33 100 600 315 -36 100 650 328 -38 100 700 366 -37 100 750 399 -36 100 800 465 -33 100 850 451 -36 100 900 515 -37 100 950 523 -39 100 1000 537 -48 100 1050 590 -40 100 1100 612 -42 100 1150 644 -45 100 1200 659 -41 100 1250 699 -42 100 1300 739 -43 100 1350 819 -43 100 1400 739 -45 100 1450 804 -47 100 1500 820 -44 100 1550 836 -46 100 1600 899 -46 100 1650 894 -50 100 1700 1009 -44 100 1750 960 -45 100 1800 1033 -40 100 1850 1083 -47 100 1900 1075 -46 100 1950 1116 -44 100 2000 1056 -13 100 50 19 -16 100 100 63 -22 100 150 76 -36 100 200 92 -32 100 250 117 -40 100 300 143 -39 100 350 196 -36 100 400 216 -40 100 450 219 -37 100 500 300 -44 100 550 274 -39 100 600 343 -51 100 650 327 -50 100 700 377 -49 100 750 373 -47 100 800 467 -52 100 850 459 -50 100 900 448 -58 100 950 509 -45 100 1000 511 -52 100 1050 562 -56 100 1100 601 -53 100 1150 637 -48 100 1200 651 -53 100 1250 681 -59 100 1300 734 -55 100 1350 754 -51 100 1400 828 -51 100 1450 762 -52 100 1500 835 -55 100 1550 890 -58 100 1600 879 -62 100 1650 868 -58 100 1700 921 -58 100 1750 1012 -59 100 1800 1023 -60 100 1850 1009 -59 100 1900 1042 -59 100 1950 1123 -61 100 2000 1100 -9 100 50 28 -15 100 100 51 -17 100 150 83 -22 100 200 155 -26 100 250 127 -22 100 300 147 -26 100 350 168 -24 100 400 226 -34 100 450 260 -34 100 500 287 -33 100 550 326 -28 100 600 344 -30 100 650 343 -32 100 700 414 -31 100 750 428 -35 100 800 461 -40 100 850 491 -36 100 900 544 -38 100 950 524 -40 100 1000 540 -40 100 1050 595 -40 100 1100 624 -38 100 1150 619 -38 100 1200 687 -42 100 1250 735 -44 100 1300 814 -39 100 1350 828 -49 100 1400 830 -45 100 1450 915 -43 100 1500 887 -41 100 1550 913 -44 100 1600 958 -49 100 1650 977 -38 100 1700 1032 -46 100 1750 1018 -43 100 1800 1041 -54 100 1850 1174 -54 100 1900 1139 -46 100 1950 1163 -46 100 2000 1218 -12 100 50 28 -16 100 100 51 -16 100 150 75 -21 100 200 101 -33 100 250 137 -34 100 300 174 -37 100 350 179 -36 100 400 223 -35 100 450 255 -41 100 500 271 -41 100 550 342 -41 100 600 330 -45 100 650 393 -45 100 700 408 -44 100 750 475 -48 100 800 465 -47 100 850 502 -48 100 900 510 -45 100 950 568 -53 100 1000 623 -46 100 1050 684 -49 100 1100 640 -52 100 1150 659 -52 100 1200 737 -51 100 1250 818 -51 100 1300 841 -54 100 1350 827 -52 100 1400 845 -51 100 1450 916 -57 100 1500 944 -56 100 1550 915 -58 100 1600 1023 -53 100 1650 1045 -59 100 1700 1030 -56 100 1750 1114 -59 100 1800 1143 -56 100 1850 1145 -48 100 1900 1160 -59 100 1950 1200 -54 100 2000 1230 -6 100 50 26 -12 100 100 75 -17 100 150 70 -20 100 200 119 -23 100 250 121 -26 100 300 150 -33 100 350 210 -30 100 400 204 -34 100 450 241 -35 100 500 304 -34 100 550 334 -38 100 600 340 -37 100 650 359 -39 100 700 404 -37 100 750 432 -35 100 800 445 -37 100 850 491 -41 100 900 558 -38 100 950 568 -40 100 1000 602 -43 100 1050 656 -41 100 1100 638 -40 100 1150 672 -42 100 1200 691 -44 100 1250 755 -46 100 1300 754 -43 100 1350 819 -49 100 1400 866 -45 100 1450 889 -45 100 1500 875 -47 100 1550 935 -48 100 1600 956 -41 100 1650 1000 -48 100 1700 1024 -42 100 1750 1070 -44 100 1800 1080 -49 100 1850 1125 -59 100 1900 1178 -46 100 1950 1180 -46 100 2000 1202 -10 100 50 51 -13 100 100 48 -17 100 150 94 -24 100 200 99 -25 100 250 132 -27 100 300 188 -30 100 350 290 -26 100 400 244 -31 100 450 302 -30 100 500 265 -31 100 550 343 -35 100 600 314 -40 100 650 390 -41 100 700 414 -39 100 750 486 -42 100 800 516 -42 100 850 570 -44 100 900 592 -41 100 950 628 -38 100 1000 624 -46 100 1050 698 -39 100 1100 721 -38 100 1150 712 -42 100 1200 797 -42 100 1250 783 -42 100 1300 824 -46 100 1350 843 -46 100 1400 960 -44 100 1450 964 -43 100 1500 979 -47 100 1550 1016 -44 100 1600 1019 -46 100 1650 1120 -45 100 1700 1092 -43 100 1750 1158 -42 100 1800 1190 -51 100 1850 1265 -49 100 1900 1236 -50 100 1950 1250 -49 100 2000 1335 -15 100 50 42 -16 100 100 54 -24 100 150 65 -20 100 200 101 -30 100 250 112 -28 100 300 144 -30 100 350 184 -35 100 400 166 -38 100 450 213 -34 100 500 262 -35 100 550 269 -38 100 600 324 -43 100 650 288 -51 100 700 363 -48 100 750 379 -46 100 800 408 -48 100 850 423 -43 100 900 459 -45 100 950 482 -48 100 1000 533 -46 100 1050 545 -51 100 1100 556 -45 100 1150 612 -44 100 1200 599 -49 100 1250 655 -51 100 1300 641 -53 100 1350 690 -47 100 1400 716 -42 100 1450 728 -54 100 1500 787 -53 100 1550 811 -44 100 1600 815 -50 100 1650 898 -49 100 1700 889 -55 100 1750 919 -49 100 1800 953 -54 100 1850 1004 -51 100 1900 1023 -48 100 1950 1038 -51 100 2000 1075 -2 100 50 24 -11 100 100 48 -15 100 150 73 -18 100 200 99 -27 100 250 117 -23 100 300 186 -25 100 350 166 -28 100 400 202 -24 100 450 256 -31 100 500 245 -36 100 550 265 -30 100 600 328 -38 100 650 363 -34 100 700 365 -40 100 750 414 -40 100 800 424 -34 100 850 473 -36 100 900 478 -40 100 950 537 -39 100 1000 563 -37 100 1050 542 -41 100 1100 594 -50 100 1150 618 -39 100 1200 656 -41 100 1250 693 -41 100 1300 732 -38 100 1350 809 -42 100 1400 797 -43 100 1450 852 -45 100 1500 859 -46 100 1550 872 -47 100 1600 895 -44 100 1650 926 -52 100 1700 969 -44 100 1750 1053 -48 100 1800 1030 -48 100 1850 1093 -46 100 1900 1095 -53 100 1950 1097 -51 100 2000 1185 -3 100 50 16 -13 100 100 50 -17 100 150 91 -18 100 200 112 -17 100 250 132 -25 100 300 175 -24 100 350 181 -26 100 400 204 -29 100 450 242 -31 100 500 275 -31 100 550 329 -33 100 600 322 -33 100 650 398 -40 100 700 408 -36 100 750 441 -35 100 800 450 -35 100 850 519 -34 100 900 517 -36 100 950 576 -39 100 1000 594 -43 100 1050 640 -39 100 1100 676 -47 100 1150 731 -43 100 1200 691 -47 100 1250 766 -45 100 1300 859 -46 100 1350 830 -42 100 1400 855 -43 100 1450 896 -43 100 1500 942 -45 100 1550 956 -47 100 1600 947 -48 100 1650 1048 -53 100 1700 1021 -46 100 1750 1090 -48 100 1800 1111 -45 100 1850 1151 -48 100 1900 1165 -51 100 1950 1211 -46 100 2000 1250 -11 100 50 21 -6 100 100 43 -19 100 150 63 -22 100 200 100 -27 100 250 117 -26 100 300 144 -29 100 350 191 -27 100 400 185 -28 100 450 232 -32 100 500 270 -34 100 550 291 -32 100 600 315 -38 100 650 317 -38 100 700 390 -37 100 750 374 -38 100 800 400 -43 100 850 452 -37 100 900 481 -39 100 950 496 -47 100 1000 538 -46 100 1050 570 -45 100 1100 576 -47 100 1150 618 -40 100 1200 612 -45 100 1250 701 -50 100 1300 732 -44 100 1350 715 -46 100 1400 736 -48 100 1450 757 -42 100 1500 789 -46 100 1550 838 -41 100 1600 860 -44 100 1650 876 -51 100 1700 928 -55 100 1750 933 -46 100 1800 1048 -50 100 1850 1008 -51 100 1900 1048 -49 100 1950 1086 -48 100 2000 1100 -14 100 50 24 -20 100 100 72 -21 100 150 74 -34 100 200 123 -32 100 250 162 -37 100 300 264 -31 100 350 291 -36 100 400 314 -39 100 450 314 -37 100 500 278 -44 100 550 353 -38 100 600 549 -40 100 650 459 -45 100 700 508 -54 100 750 532 -43 100 800 524 -47 100 850 504 -46 100 900 631 -47 100 950 765 -44 100 1000 671 -49 100 1050 648 -49 100 1100 725 -50 100 1150 760 -53 100 1200 859 -53 100 1250 857 -48 100 1300 848 -47 100 1350 1045 -47 100 1400 950 -53 100 1450 1189 -50 100 1500 1077 -49 100 1550 1058 -50 100 1600 1013 -55 100 1650 1236 -49 100 1700 1243 -51 100 1750 1259 -55 100 1800 1133 -52 100 1850 1286 -55 100 1900 1449 -58 100 1950 1309 -54 100 2000 1504 -6 100 50 20 -13 100 100 43 -18 100 150 88 -20 100 200 84 -23 100 250 92 -31 100 300 134 -31 100 350 150 -32 100 400 205 -29 100 450 200 -30 100 500 217 -33 100 550 255 -28 100 600 256 -38 100 650 332 -36 100 700 315 -36 100 750 365 -38 100 800 366 -44 100 850 424 -39 100 900 451 -34 100 950 450 -40 100 1000 455 -40 100 1050 482 -47 100 1100 546 -47 100 1150 560 -42 100 1200 554 -46 100 1250 592 -41 100 1300 634 -46 100 1350 668 -42 100 1400 675 -47 100 1450 711 -46 100 1500 737 -49 100 1550 754 -45 100 1600 811 -49 100 1650 830 -42 100 1700 872 -44 100 1750 890 -44 100 1800 936 -50 100 1850 914 -42 100 1900 934 -47 100 1950 950 -49 100 2000 995 -4 100 50 23 -25 100 100 55 -18 100 150 75 -29 100 200 115 -39 100 250 154 -35 100 300 169 -28 100 350 223 -39 100 400 258 -43 100 450 250 -47 100 500 275 -43 100 550 350 -44 100 600 402 -40 100 650 338 -54 100 700 452 -52 100 750 485 -51 100 800 459 -46 100 850 531 -54 100 900 560 -48 100 950 594 -52 100 1000 610 -57 100 1050 681 -50 100 1100 656 -53 100 1150 739 -59 100 1200 695 -54 100 1250 777 -53 100 1300 786 -60 100 1350 855 -58 100 1400 878 -50 100 1450 908 -59 100 1500 961 -56 100 1550 993 -56 100 1600 1047 -55 100 1650 1032 -62 100 1700 1138 -56 100 1750 1059 -61 100 1800 1108 -59 100 1850 1143 -57 100 1900 1224 -62 100 1950 1237 -64 100 2000 1265 -12 100 50 18 -14 100 100 44 -25 100 150 72 -30 100 200 99 -27 100 250 131 -37 100 300 176 -39 100 350 164 -32 100 400 195 -34 100 450 247 -42 100 500 258 -41 100 550 316 -40 100 600 324 -40 100 650 347 -43 100 700 362 -43 100 750 386 -46 100 800 461 -42 100 850 433 -46 100 900 508 -45 100 950 516 -46 100 1000 563 -51 100 1050 565 -48 100 1100 628 -46 100 1150 599 -44 100 1200 672 -50 100 1250 676 -47 100 1300 664 -49 100 1350 760 -52 100 1400 794 -51 100 1450 813 -57 100 1500 828 -52 100 1550 862 -50 100 1600 917 -55 100 1650 928 -51 100 1700 959 -52 100 1750 940 -57 100 1800 1023 -56 100 1850 1050 -58 100 1900 1030 -52 100 1950 1091 -55 100 2000 1160 -5 100 50 20 -12 100 100 41 -21 100 150 63 -18 100 200 107 -26 100 250 113 -24 100 300 142 -25 100 350 157 -28 100 400 217 -32 100 450 213 -33 100 500 236 -35 100 550 300 -32 100 600 275 -36 100 650 320 -36 100 700 317 -34 100 750 376 -35 100 800 449 -36 100 850 392 -37 100 900 472 -35 100 950 489 -36 100 1000 487 -39 100 1050 526 -35 100 1100 560 -38 100 1150 601 -42 100 1200 600 -41 100 1250 672 -41 100 1300 674 -43 100 1350 684 -42 100 1400 732 -38 100 1450 741 -46 100 1500 766 -42 100 1550 815 -46 100 1600 860 -46 100 1650 867 -45 100 1700 914 -47 100 1750 923 -48 100 1800 967 -44 100 1850 997 -41 100 1900 998 -47 100 1950 1006 -47 100 2000 1019 -6 100 50 23 -16 100 100 46 -19 100 150 97 -21 100 200 109 -27 100 250 127 -27 100 300 143 -25 100 350 231 -29 100 400 213 -33 100 450 259 -34 100 500 292 -34 100 550 312 -33 100 600 364 -42 100 650 355 -36 100 700 434 -29 100 750 418 -39 100 800 454 -33 100 850 483 -33 100 900 501 -45 100 950 543 -38 100 1000 579 -40 100 1050 628 -36 100 1100 649 -42 100 1150 673 -46 100 1200 713 -37 100 1250 730 -42 100 1300 800 -45 100 1350 801 -40 100 1400 834 -48 100 1450 901 -44 100 1500 940 -45 100 1550 909 -46 100 1600 928 -49 100 1650 1024 -42 100 1700 1060 -52 100 1750 1053 -56 100 1800 1113 -40 100 1850 1082 -49 100 1900 1195 -42 100 1950 1177 -52 100 2000 1262 -9 100 50 30 -16 100 100 69 -17 100 150 67 -33 100 200 99 -31 100 250 128 -28 100 300 144 -40 100 350 196 -41 100 400 193 -32 100 450 225 -44 100 500 276 -44 100 550 323 -53 100 600 393 -37 100 650 379 -43 100 700 409 -53 100 750 456 -47 100 800 419 -40 100 850 510 -48 100 900 535 -47 100 950 582 -46 100 1000 610 -42 100 1050 607 -44 100 1100 701 -45 100 1150 694 -48 100 1200 719 -55 100 1250 743 -49 100 1300 792 -46 100 1350 813 -50 100 1400 894 -45 100 1450 868 -51 100 1500 944 -52 100 1550 938 -56 100 1600 1004 -51 100 1650 991 -55 100 1700 1054 -53 100 1750 1127 -54 100 1800 1117 -51 100 1850 1127 -51 100 1900 1116 -57 100 1950 1204 -59 100 2000 1229 -8 100 50 34 -13 100 100 57 -18 100 150 101 -18 100 200 150 -22 100 250 147 -25 100 300 183 -29 100 350 249 -29 100 400 253 -27 100 450 311 -25 100 500 342 -31 100 550 348 -36 100 600 396 -32 100 650 426 -42 100 700 474 -39 100 750 472 -38 100 800 535 -39 100 850 600 -43 100 900 637 -41 100 950 658 -38 100 1000 676 -43 100 1050 681 -44 100 1100 779 -43 100 1150 832 -40 100 1200 824 -43 100 1250 853 -46 100 1300 899 -42 100 1350 969 -44 100 1400 965 -42 100 1450 1038 -44 100 1500 997 -48 100 1550 6670 -45 100 1600 1104 -46 100 1650 1212 -45 100 1700 1203 -45 100 1750 1292 -45 100 1800 1243 -46 100 1850 1325 -49 100 1900 1416 -47 100 1950 1329 -49 100 2000 1439 -7 100 50 17 -20 100 100 51 -24 100 150 105 -23 100 200 90 -29 100 250 120 -36 100 300 155 -37 100 350 238 -41 100 400 206 -37 100 450 230 -33 100 500 301 -36 100 550 288 -44 100 600 358 -47 100 650 327 -45 100 700 401 -42 100 750 390 -42 100 800 472 -46 100 850 498 -49 100 900 509 -50 100 950 557 -47 100 1000 562 -48 100 1050 620 -50 100 1100 638 -56 100 1150 658 -51 100 1200 705 -53 100 1250 703 -47 100 1300 768 -52 100 1350 801 -54 100 1400 812 -56 100 1450 857 -60 100 1500 874 -57 100 1550 938 -56 100 1600 945 -57 100 1650 969 -50 100 1700 988 -53 100 1750 1064 -54 100 1800 1087 -55 100 1850 1092 -54 100 1900 1127 -55 100 1950 1177 -60 100 2000 1232 -4 100 50 22 -11 100 100 51 -18 100 150 103 -20 100 200 87 -30 100 250 121 -25 100 300 137 -28 100 350 155 -29 100 400 238 -31 100 450 208 -32 100 500 262 -33 100 550 268 -31 100 600 294 -35 100 650 364 -38 100 700 371 -32 100 750 362 -38 100 800 437 -37 100 850 407 -42 100 900 485 -35 100 950 473 -37 100 1000 563 -38 100 1050 510 -40 100 1100 580 -42 100 1150 618 -42 100 1200 650 -41 100 1250 673 -44 100 1300 673 -42 100 1350 749 -45 100 1400 799 -37 100 1450 799 -43 100 1500 838 -46 100 1550 821 -49 100 1600 887 -48 100 1650 886 -47 100 1700 906 -48 100 1750 979 -50 100 1800 964 -45 100 1850 1056 -47 100 1900 1028 -52 100 1950 1049 -48 100 2000 1061 -12 100 50 22 -17 100 100 36 -20 100 150 63 -25 100 200 121 -23 100 250 103 -27 100 300 120 -23 100 350 151 -28 100 400 180 -36 100 450 249 -36 100 500 248 -34 100 550 295 -36 100 600 278 -36 100 650 352 -38 100 700 326 -36 100 750 399 -40 100 800 389 -39 100 850 437 -40 100 900 474 -42 100 950 443 -44 100 1000 489 -38 100 1050 541 -47 100 1100 581 -45 100 1150 578 -45 100 1200 666 -45 100 1250 621 -48 100 1300 679 -46 100 1350 706 -50 100 1400 734 -45 100 1450 746 -45 100 1500 788 -46 100 1550 806 -46 100 1600 836 -49 100 1650 857 -51 100 1700 888 -49 100 1750 913 -52 100 1800 955 -47 100 1850 979 -49 100 1900 1021 -46 100 1950 1035 -53 100 2000 1037 -8 100 50 21 -17 100 100 53 -16 100 150 76 -26 100 200 123 -20 100 250 121 -24 100 300 185 -27 100 350 205 -27 100 400 222 -33 100 450 305 -32 100 500 269 -34 100 550 346 -30 100 600 330 -32 100 650 399 -40 100 700 399 -34 100 750 441 -33 100 800 491 -36 100 850 554 -36 100 900 527 -37 100 950 513 -42 100 1000 607 -38 100 1050 617 -35 100 1100 634 -40 100 1150 721 -35 100 1200 704 -37 100 1250 761 -42 100 1300 833 -46 100 1350 871 -42 100 1400 874 -40 100 1450 873 -42 100 1500 917 -44 100 1550 979 -35 100 1600 988 -50 100 1650 1043 -45 100 1700 1047 -42 100 1750 1098 -44 100 1800 1112 -46 100 1850 1127 -43 100 1900 1222 -52 100 1950 1188 -52 100 2000 1273 -8 100 50 21 -10 100 100 45 -20 100 150 77 -25 100 200 153 -25 100 250 127 -25 100 300 167 -35 100 350 194 -30 100 400 242 -38 100 450 235 -31 100 500 310 -40 100 550 305 -40 100 600 366 -36 100 650 383 -36 100 700 404 -41 100 750 437 -42 100 800 442 -39 100 850 483 -41 100 900 568 -41 100 950 569 -46 100 1000 593 -50 100 1050 611 -47 100 1100 681 -42 100 1150 676 -46 100 1200 737 -51 100 1250 792 -46 100 1300 793 -43 100 1350 792 -51 100 1400 803 -40 100 1450 910 -45 100 1500 930 -49 100 1550 915 -51 100 1600 1001 -48 100 1650 1058 -47 100 1700 1101 -57 100 1750 1107 -47 100 1800 1064 -55 100 1850 1158 -54 100 1900 1167 -50 100 1950 1127 -51 100 2000 1186 -4 100 50 18 -13 100 100 49 -21 100 150 74 -16 100 200 120 -28 100 250 128 -28 100 300 142 -25 100 350 168 -30 100 400 265 -32 100 450 226 -31 100 500 271 -35 100 550 276 -33 100 600 352 -31 100 650 325 -39 100 700 380 -37 100 750 389 -36 100 800 438 -45 100 850 498 -37 100 900 474 -33 100 950 521 -43 100 1000 548 -46 100 1050 569 -44 100 1100 618 -47 100 1150 625 -40 100 1200 667 -45 100 1250 713 -46 100 1300 746 -45 100 1350 765 -42 100 1400 772 -43 100 1450 795 -48 100 1500 860 -47 100 1550 935 -44 100 1600 859 -43 100 1650 951 -51 100 1700 980 -49 100 1750 955 -51 100 1800 1060 -51 100 1850 1044 -57 100 1900 1099 -48 100 1950 1096 -49 100 2000 1129 -10 100 50 21 -18 100 100 78 -13 100 150 63 -31 100 200 84 -28 100 250 112 -27 100 300 143 -35 100 350 158 -39 100 400 237 -40 100 450 222 -39 100 500 264 -35 100 550 273 -39 100 600 292 -45 100 650 356 -43 100 700 375 -41 100 750 371 -44 100 800 434 -44 100 850 420 -53 100 900 482 -43 100 950 506 -48 100 1000 543 -52 100 1050 551 -51 100 1100 550 -51 100 1150 651 -50 100 1200 674 -49 100 1250 685 -48 100 1300 647 -50 100 1350 719 -55 100 1400 793 -53 100 1450 800 -49 100 1500 812 -54 100 1550 862 -54 100 1600 899 -51 100 1650 888 -52 100 1700 927 -52 100 1750 956 -54 100 1800 970 -55 100 1850 1020 -52 100 1900 999 -54 100 1950 1057 -56 100 2000 1076 -11 100 50 35 -15 100 100 54 -22 100 150 129 -16 100 200 109 -24 100 250 123 -30 100 300 185 -28 100 350 232 -36 100 400 222 -31 100 450 263 -38 100 500 331 -36 100 550 325 -44 100 600 368 -37 100 650 414 -41 100 700 441 -44 100 750 503 -42 100 800 539 -43 100 850 538 -36 100 900 602 -38 100 950 578 -42 100 1000 607 -41 100 1050 681 -50 100 1100 682 -45 100 1150 760 -49 100 1200 763 -40 100 1250 791 -44 100 1300 861 -46 100 1350 848 -41 100 1400 904 -49 100 1450 943 -48 100 1500 948 -47 100 1550 1034 -47 100 1600 1077 -50 100 1650 1116 -47 100 1700 1153 -54 100 1750 1179 -51 100 1800 1190 -50 100 1850 1183 -47 100 1900 1261 -48 100 1950 1306 -53 100 2000 1358 -13 100 50 20 -18 100 100 48 -22 100 150 75 -30 100 200 110 -36 100 250 132 -32 100 300 177 -34 100 350 170 -39 100 400 202 -41 100 450 261 -40 100 500 252 -40 100 550 311 -35 100 600 317 -46 100 650 387 -44 100 700 368 -41 100 750 455 -41 100 800 449 -46 100 850 467 -49 100 900 522 -48 100 950 525 -46 100 1000 590 -52 100 1050 599 -48 100 1100 611 -53 100 1150 675 -53 100 1200 708 -51 100 1250 753 -54 100 1300 754 -49 100 1350 769 -54 100 1400 797 -54 100 1450 797 -54 100 1500 873 -52 100 1550 920 -58 100 1600 930 -53 100 1650 933 -53 100 1700 1011 -52 100 1750 1001 -56 100 1800 1044 -56 100 1850 1087 -50 100 1900 1131 -52 100 1950 1141 -55 100 2000 1174 -10 100 50 28 -16 100 100 44 -17 100 150 66 -15 100 200 145 -19 100 250 120 -27 100 300 162 -30 100 350 174 -23 100 400 229 -33 100 450 218 -28 100 500 271 -31 100 550 320 -33 100 600 349 -36 100 650 372 -34 100 700 413 -34 100 750 416 -35 100 800 493 -37 100 850 504 -37 100 900 549 -38 100 950 537 -37 100 1000 591 -38 100 1050 582 -39 100 1100 631 -42 100 1150 675 -42 100 1200 690 -42 100 1250 754 -44 100 1300 802 -39 100 1350 842 -45 100 1400 784 -49 100 1450 845 -42 100 1500 913 -43 100 1550 901 -39 100 1600 936 -40 100 1650 960 -43 100 1700 987 -52 100 1750 1013 -44 100 1800 1055 -48 100 1850 1061 -48 100 1900 1114 -45 100 1950 1158 -48 100 2000 1194 -7 100 50 20 -15 100 100 55 -22 100 150 75 -28 100 200 101 -29 100 250 161 -28 100 300 146 -31 100 350 155 -33 100 400 233 -36 100 450 216 -41 100 500 279 -40 100 550 304 -42 100 600 296 -46 100 650 378 -44 100 700 352 -39 100 750 403 -46 100 800 438 -46 100 850 461 -46 100 900 518 -46 100 950 547 -45 100 1000 601 -53 100 1050 623 -54 100 1100 675 -53 100 1150 642 -45 100 1200 707 -49 100 1250 732 -49 100 1300 724 -51 100 1350 730 -49 100 1400 836 -52 100 1450 837 -52 100 1500 837 -45 100 1550 861 -49 100 1600 916 -56 100 1650 968 -47 100 1700 1059 -49 100 1750 1015 -52 100 1800 1068 -56 100 1850 1108 -59 100 1900 1095 -53 100 1950 1119 -54 100 2000 1182 -15 100 50 24 -9 100 100 45 -23 100 150 107 -20 100 200 80 -25 100 250 126 -24 100 300 139 -26 100 350 178 -25 100 400 239 -33 100 450 229 -29 100 500 278 -32 100 550 252 -32 100 600 311 -34 100 650 342 -35 100 700 382 -35 100 750 401 -32 100 800 430 -32 100 850 408 -40 100 900 479 -38 100 950 510 -38 100 1000 547 -42 100 1050 572 -40 100 1100 641 -44 100 1150 636 -38 100 1200 662 -42 100 1250 694 -41 100 1300 735 -43 100 1350 738 -46 100 1400 756 -41 100 1450 787 -39 100 1500 778 -44 100 1550 852 -47 100 1600 926 -44 100 1650 905 -49 100 1700 915 -43 100 1750 945 -44 100 1800 1022 -49 100 1850 1028 -45 100 1900 1110 -49 100 1950 1045 -45 100 2000 1124 -15 100 50 22 -23 100 100 39 -26 100 150 71 -23 100 200 102 -30 100 250 109 -26 100 300 135 -37 100 350 196 -36 100 400 185 -41 100 450 235 -41 100 500 278 -39 100 550 258 -36 100 600 324 -46 100 650 321 -41 100 700 378 -44 100 750 381 -39 100 800 426 -37 100 850 430 -41 100 900 439 -44 100 950 518 -50 100 1000 525 -46 100 1050 542 -50 100 1100 571 -50 100 1150 621 -37 100 1200 637 -45 100 1250 611 -49 100 1300 660 -56 100 1350 700 -41 100 1400 695 -55 100 1450 741 -48 100 1500 773 -50 100 1550 808 -47 100 1600 829 -52 100 1650 851 -53 100 1700 892 -60 100 1750 923 -61 100 1800 940 -57 100 1850 1003 -54 100 1900 987 -52 100 1950 1034 -54 100 2000 1056 -3 100 50 16 -8 100 100 74 -12 100 150 63 -22 100 200 96 -22 100 250 114 -30 100 300 130 -27 100 350 160 -29 100 400 209 -29 100 450 206 -31 100 500 231 -35 100 550 287 -35 100 600 284 -39 100 650 352 -38 100 700 342 -38 100 750 407 -43 100 800 421 -39 100 850 429 -37 100 900 500 -41 100 950 501 -41 100 1000 538 -38 100 1050 568 -41 100 1100 528 -40 100 1150 595 -40 100 1200 630 -42 100 1250 648 -46 100 1300 688 -42 100 1350 707 -46 100 1400 732 -43 100 1450 762 -46 100 1500 824 -50 100 1550 804 -44 100 1600 840 -48 100 1650 869 -43 100 1700 923 -44 100 1750 964 -50 100 1800 996 -50 100 1850 1007 -50 100 1900 1049 -52 100 1950 1044 -50 100 2000 1080 -11 100 50 18 -20 100 100 55 -19 100 150 68 -31 100 200 148 -29 100 250 133 -34 100 300 162 -37 100 350 190 -38 100 400 271 -40 100 450 222 -38 100 500 290 -44 100 550 302 -42 100 600 354 -42 100 650 346 -45 100 700 432 -44 100 750 409 -48 100 800 481 -46 100 850 493 -44 100 900 501 -51 100 950 508 -51 100 1000 569 -49 100 1050 606 -46 100 1100 660 -48 100 1150 675 -49 100 1200 698 -51 100 1250 734 -48 100 1300 854 -46 100 1350 788 -51 100 1400 829 -51 100 1450 837 -47 100 1500 890 -52 100 1550 970 -55 100 1600 942 -52 100 1650 981 -54 100 1700 997 -53 100 1750 1006 -58 100 1800 1098 -53 100 1850 1090 -49 100 1900 1172 -53 100 1950 1214 -53 100 2000 1145 -15 100 50 26 -15 100 100 89 -17 100 150 80 -28 100 200 92 -25 100 250 131 -31 100 300 139 -31 100 350 181 -31 100 400 201 -37 100 450 240 -34 100 500 290 -37 100 550 313 -41 100 600 347 -38 100 650 326 -34 100 700 394 -43 100 750 437 -39 100 800 441 -49 100 850 473 -44 100 900 489 -49 100 950 588 -45 100 1000 530 -41 100 1050 608 -45 100 1100 592 -49 100 1150 637 -47 100 1200 663 -47 100 1250 710 -48 100 1300 725 -52 100 1350 822 -49 100 1400 832 -50 100 1450 832 -49 100 1500 865 -49 100 1550 899 -48 100 1600 887 -53 100 1650 954 -47 100 1700 938 -51 100 1750 1008 -54 100 1800 1060 -50 100 1850 1065 -51 100 1900 1069 -50 100 1950 1151 -55 100 2000 1168 -9 100 50 25 -14 100 100 46 -23 100 150 68 -21 100 200 99 -24 100 250 136 -28 100 300 155 -31 100 350 206 -31 100 400 199 -32 100 450 213 -41 100 500 275 -38 100 550 263 -31 100 600 301 -38 100 650 320 -36 100 700 382 -44 100 750 397 -38 100 800 450 -48 100 850 465 -47 100 900 484 -45 100 950 504 -41 100 1000 559 -43 100 1050 584 -41 100 1100 632 -48 100 1150 653 -49 100 1200 687 -47 100 1250 700 -42 100 1300 720 -43 100 1350 744 -49 100 1400 818 -53 100 1450 818 -49 100 1500 857 -43 100 1550 849 -48 100 1600 874 -53 100 1650 917 -51 100 1700 967 -50 100 1750 969 -54 100 1800 975 -57 100 1850 1043 -49 100 1900 1149 -54 100 1950 1127 -56 100 2000 1167 -5 100 50 27 -14 100 100 53 -27 100 150 83 -25 100 200 101 -27 100 250 162 -35 100 300 148 -32 100 350 166 -31 100 400 216 -32 100 450 255 -35 100 500 285 -39 100 550 326 -39 100 600 304 -39 100 650 385 -37 100 700 369 -39 100 750 460 -45 100 800 465 -41 100 850 497 -48 100 900 518 -49 100 950 560 -43 100 1000 572 -46 100 1050 611 -49 100 1100 642 -52 100 1150 688 -47 100 1200 706 -49 100 1250 736 -50 100 1300 805 -50 100 1350 770 -46 100 1400 858 -53 100 1450 882 -51 100 1500 852 -47 100 1550 900 -48 100 1600 959 -50 100 1650 947 -49 100 1700 1018 -54 100 1750 1029 -51 100 1800 1031 -55 100 1850 1095 -53 100 1900 1107 -49 100 1950 1177 -51 100 2000 1185 -6 100 50 22 -12 100 100 51 -17 100 150 83 -13 100 200 92 -16 100 250 119 -18 100 300 183 -21 100 350 169 -29 100 400 199 -29 100 450 264 -23 100 500 256 -27 100 550 283 -31 100 600 351 -32 100 650 368 -36 100 700 375 -32 100 750 447 -30 100 800 469 -37 100 850 461 -31 100 900 509 -41 100 950 555 -40 100 1000 581 -31 100 1050 591 -41 100 1100 632 -40 100 1150 679 -31 100 1200 711 -45 100 1250 777 -39 100 1300 756 -38 100 1350 767 -43 100 1400 781 -48 100 1450 891 -38 100 1500 892 -45 100 1550 877 -41 100 1600 977 -44 100 1650 927 -42 100 1700 1017 -44 100 1750 1035 -49 100 1800 1088 -41 100 1850 1081 -39 100 1900 1107 -47 100 1950 1156 -42 100 2000 1165 -5 100 50 27 -15 100 100 49 -11 100 150 72 -23 100 200 101 -24 100 250 114 -27 100 300 173 -41 100 350 210 -33 100 400 205 -37 100 450 225 -35 100 500 294 -37 100 550 284 -42 100 600 353 -43 100 650 357 -42 100 700 408 -47 100 750 437 -38 100 800 444 -37 100 850 477 -44 100 900 509 -49 100 950 533 -46 100 1000 564 -39 100 1050 571 -48 100 1100 650 -47 100 1150 652 -53 100 1200 675 -45 100 1250 700 -41 100 1300 764 -53 100 1350 792 -43 100 1400 797 -48 100 1450 855 -51 100 1500 884 -46 100 1550 904 -48 100 1600 943 -56 100 1650 970 -54 100 1700 959 -49 100 1750 1029 -48 100 1800 1055 -52 100 1850 1044 -49 100 1900 1083 -52 100 1950 1132 -53 100 2000 1158 -10 100 50 24 -13 100 100 38 -25 100 150 63 -26 100 200 103 -29 100 250 167 -34 100 300 159 -34 100 350 183 -37 100 400 225 -33 100 450 225 -50 100 500 244 -45 100 550 323 -43 100 600 316 -43 100 650 378 -50 100 700 380 -47 100 750 405 -49 100 800 420 -48 100 850 427 -51 100 900 496 -49 100 950 516 -52 100 1000 562 -55 100 1050 631 -51 100 1100 611 -58 100 1150 642 -52 100 1200 652 -57 100 1250 719 -54 100 1300 716 -56 100 1350 750 -60 100 1400 824 -57 100 1450 842 -60 100 1500 877 -62 100 1550 914 -59 100 1600 920 -58 100 1650 903 -57 100 1700 973 -61 100 1750 1041 -59 100 1800 1025 -57 100 1850 1081 -56 100 1900 1062 -60 100 1950 1116 -55 100 2000 1107 -8 100 50 30 -11 100 100 50 -17 100 150 76 -19 100 200 111 -24 100 250 126 -20 100 300 180 -24 100 350 190 -27 100 400 180 -30 100 450 273 -21 100 500 248 -28 100 550 292 -29 100 600 356 -29 100 650 395 -29 100 700 365 -37 100 750 410 -42 100 800 428 -36 100 850 471 -34 100 900 516 -33 100 950 536 -32 100 1000 589 -32 100 1050 586 -43 100 1100 669 -40 100 1150 673 -41 100 1200 713 -38 100 1250 750 -36 100 1300 789 -36 100 1350 762 -42 100 1400 767 -42 100 1450 813 -40 100 1500 905 -39 100 1550 939 -47 100 1600 945 -37 100 1650 962 -41 100 1700 1021 -46 100 1750 1043 -38 100 1800 1030 -37 100 1850 1101 -41 100 1900 1093 -46 100 1950 1157 -47 100 2000 1164 -5 100 50 30 -13 100 100 39 -19 100 150 84 -22 100 200 98 -25 100 250 119 -22 100 300 127 -33 100 350 169 -24 100 400 217 -26 100 450 218 -35 100 500 260 -27 100 550 256 -34 100 600 301 -32 100 650 359 -31 100 700 369 -34 100 750 352 -36 100 800 432 -35 100 850 443 -35 100 900 451 -38 100 950 507 -35 100 1000 524 -42 100 1050 588 -39 100 1100 587 -37 100 1150 587 -39 100 1200 636 -42 100 1250 730 -41 100 1300 736 -43 100 1350 744 -43 100 1400 770 -42 100 1450 738 -43 100 1500 851 -46 100 1550 860 -42 100 1600 896 -46 100 1650 943 -44 100 1700 913 -44 100 1750 960 -44 100 1800 965 -45 100 1850 1010 -46 100 1900 1035 -44 100 1950 1082 -47 100 2000 1104 -13 100 50 29 -18 100 100 49 -26 100 150 82 -25 100 200 101 -23 100 250 181 -26 100 300 157 -35 100 350 210 -32 100 400 266 -33 100 450 281 -36 100 500 335 -35 100 550 339 -33 100 600 367 -38 100 650 380 -35 100 700 469 -41 100 750 480 -39 100 800 472 -39 100 850 547 -35 100 900 555 -38 100 950 599 -40 100 1000 651 -37 100 1050 660 -43 100 1100 666 -41 100 1150 746 -45 100 1200 784 -46 100 1250 823 -43 100 1300 783 -41 100 1350 870 -44 100 1400 896 -48 100 1450 943 -45 100 1500 1028 -44 100 1550 1004 -42 100 1600 1049 -47 100 1650 1109 -50 100 1700 1091 -45 100 1750 1106 -47 100 1800 1145 -51 100 1850 1217 -46 100 1900 1222 -54 100 1950 1312 -50 100 2000 1374 -10 100 50 23 -15 100 100 45 -25 100 150 66 -22 100 200 100 -32 100 250 139 -30 100 300 197 -27 100 350 181 -38 100 400 210 -34 100 450 286 -42 100 500 268 -38 100 550 316 -39 100 600 313 -39 100 650 389 -46 100 700 365 -46 100 750 422 -44 100 800 468 -40 100 850 488 -46 100 900 521 -46 100 950 543 -50 100 1000 573 -50 100 1050 600 -47 100 1100 653 -53 100 1150 658 -49 100 1200 700 -48 100 1250 725 -59 100 1300 805 -53 100 1350 822 -50 100 1400 820 -48 100 1450 853 -53 100 1500 892 -49 100 1550 909 -57 100 1600 956 -52 100 1650 994 -53 100 1700 1004 -55 100 1750 1040 -56 100 1800 1063 -53 100 1850 1102 -61 100 1900 1150 -53 100 1950 1146 -52 100 2000 1224 -9 100 50 23 -15 100 100 50 -21 100 150 63 -19 100 200 92 -24 100 250 109 -30 100 300 177 -30 100 350 184 -35 100 400 193 -36 100 450 264 -39 100 500 269 -42 100 550 308 -33 100 600 328 -41 100 650 383 -38 100 700 368 -40 100 750 432 -45 100 800 429 -44 100 850 429 -43 100 900 515 -39 100 950 530 -44 100 1000 553 -48 100 1050 596 -47 100 1100 612 -51 100 1150 595 -52 100 1200 652 -44 100 1250 675 -49 100 1300 736 -50 100 1350 751 -55 100 1400 804 -57 100 1450 825 -52 100 1500 839 -50 100 1550 865 -51 100 1600 943 -50 100 1650 934 -48 100 1700 937 -50 100 1750 981 -50 100 1800 1056 -53 100 1850 1024 -59 100 1900 1071 -55 100 1950 1115 -55 100 2000 1126 -8 100 50 54 -14 100 100 51 -20 100 150 64 -18 100 200 89 -24 100 250 110 -27 100 300 131 -25 100 350 181 -30 100 400 179 -36 100 450 217 -32 100 500 245 -34 100 550 270 -36 100 600 316 -35 100 650 325 -42 100 700 364 -38 100 750 357 -41 100 800 417 -39 100 850 402 -39 100 900 463 -42 100 950 483 -40 100 1000 487 -43 100 1050 494 -43 100 1100 563 -51 100 1150 613 -41 100 1200 628 -41 100 1250 639 -49 100 1300 650 -41 100 1350 707 -47 100 1400 720 -44 100 1450 735 -47 100 1500 751 -49 100 1550 816 -41 100 1600 856 -52 100 1650 841 -51 100 1700 917 -47 100 1750 899 -47 100 1800 934 -46 100 1850 986 -46 100 1900 983 -53 100 1950 1022 -51 100 2000 1062 -6 100 50 18 -16 100 100 53 -18 100 150 97 -20 100 200 95 -13 100 250 141 -24 100 300 163 -26 100 350 207 -28 100 400 211 -22 100 450 240 -28 100 500 264 -35 100 550 270 -36 100 600 326 -36 100 650 320 -33 100 700 380 -35 100 750 391 -35 100 800 442 -37 100 850 461 -37 100 900 482 -36 100 950 498 -40 100 1000 539 -34 100 1050 600 -42 100 1100 593 -43 100 1150 619 -44 100 1200 686 -39 100 1250 719 -42 100 1300 757 -47 100 1350 724 -40 100 1400 744 -50 100 1450 786 -41 100 1500 822 -44 100 1550 874 -46 100 1600 887 -48 100 1650 913 -51 100 1700 928 -49 100 1750 1003 -47 100 1800 1010 -44 100 1850 1010 -47 100 1900 1055 -53 100 1950 1155 -50 100 2000 1099 -6 100 50 24 -19 100 100 49 -19 100 150 76 -26 100 200 118 -22 100 250 133 -30 100 300 205 -25 100 350 204 -24 100 400 217 -34 100 450 306 -29 100 500 265 -32 100 550 326 -38 100 600 335 -36 100 650 418 -34 100 700 420 -39 100 750 473 -37 100 800 506 -42 100 850 553 -37 100 900 538 -40 100 950 585 -47 100 1000 619 -42 100 1050 600 -41 100 1100 771 -44 100 1150 784 -46 100 1200 716 -51 100 1250 770 -47 100 1300 865 -47 100 1350 804 -47 100 1400 840 -41 100 1450 974 -49 100 1500 958 -50 100 1550 1087 -43 100 1600 998 -47 100 1650 1097 -51 100 1700 1058 -54 100 1750 1283 -53 100 1800 1214 -51 100 1850 1203 -54 100 1900 1313 -52 100 1950 1153 -53 100 2000 1334 -10 100 50 22 -16 100 100 48 -20 100 150 70 -21 100 200 100 -32 100 250 143 -31 100 300 138 -30 100 350 181 -43 100 400 239 -35 100 450 239 -41 100 500 234 -36 100 550 281 -37 100 600 292 -35 100 650 357 -44 100 700 343 -43 100 750 397 -47 100 800 453 -50 100 850 458 -49 100 900 505 -55 100 950 508 -41 100 1000 550 -47 100 1050 564 -49 100 1100 636 -57 100 1150 665 -52 100 1200 678 -55 100 1250 708 -53 100 1300 716 -47 100 1350 749 -50 100 1400 804 -50 100 1450 812 -55 100 1500 838 -53 100 1550 853 -49 100 1600 893 -50 100 1650 928 -56 100 1700 936 -53 100 1750 1000 -58 100 1800 1019 -54 100 1850 1029 -58 100 1900 1070 -62 100 1950 1116 -55 100 2000 1165 -11 100 50 28 -11 100 100 39 -20 100 150 74 -21 100 200 98 -24 100 250 115 -24 100 300 146 -23 100 350 208 -27 100 400 197 -33 100 450 222 -28 100 500 259 -34 100 550 287 -31 100 600 340 -35 100 650 338 -37 100 700 406 -38 100 750 385 -37 100 800 467 -35 100 850 480 -39 100 900 530 -37 100 950 538 -46 100 1000 515 -42 100 1050 579 -39 100 1100 623 -39 100 1150 640 -40 100 1200 666 -36 100 1250 709 -47 100 1300 699 -48 100 1350 746 -44 100 1400 793 -42 100 1450 805 -43 100 1500 866 -50 100 1550 851 -48 100 1600 908 -46 100 1650 923 -50 100 1700 988 -53 100 1750 974 -41 100 1800 987 -46 100 1850 1041 -54 100 1900 1076 -45 100 1950 1125 -47 100 2000 1142 -5 100 50 43 -12 100 100 65 -18 100 150 87 -16 100 200 138 -24 100 250 131 -29 100 300 130 -25 100 350 229 -30 100 400 278 -31 100 450 200 -32 100 500 303 -29 100 550 304 -34 100 600 391 -34 100 650 461 -43 100 700 495 -39 100 750 490 -36 100 800 518 -39 100 850 654 -42 100 900 616 -37 100 950 574 -42 100 1000 556 -43 100 1050 611 -40 100 1100 706 -44 100 1150 666 -41 100 1200 709 -41 100 1250 859 -44 100 1300 942 -43 100 1350 930 -42 100 1400 930 -44 100 1450 840 -44 100 1500 815 -41 100 1550 955 -42 100 1600 1098 -50 100 1650 1002 -41 100 1700 1130 -47 100 1750 1115 -46 100 1800 1185 -54 100 1850 1207 -40 100 1900 1303 -45 100 1950 1391 -46 100 2000 1248 -8 100 50 24 -16 100 100 46 -14 100 150 79 -17 100 200 106 -22 100 250 131 -20 100 300 193 -25 100 350 203 -25 100 400 219 -23 100 450 271 -28 100 500 273 -28 100 550 324 -32 100 600 351 -33 100 650 408 -27 100 700 440 -37 100 750 462 -31 100 800 470 -37 100 850 555 -33 100 900 564 -38 100 950 586 -38 100 1000 609 -38 100 1050 675 -34 100 1100 670 -42 100 1150 713 -37 100 1200 739 -40 100 1250 802 -42 100 1300 793 -41 100 1350 904 -42 100 1400 908 -43 100 1450 892 -38 100 1500 885 -41 100 1550 1005 -45 100 1600 1017 -42 100 1650 1095 -43 100 1700 1100 -46 100 1750 1130 -43 100 1800 1128 -44 100 1850 1152 -41 100 1900 1184 -47 100 1950 1305 -44 100 2000 1290 -9 100 50 16 -14 100 100 50 -20 100 150 69 -16 100 200 91 -17 100 250 117 -27 100 300 162 -32 100 350 164 -35 100 400 187 -32 100 450 252 -35 100 500 236 -36 100 550 258 -36 100 600 313 -32 100 650 318 -43 100 700 364 -37 100 750 413 -39 100 800 390 -37 100 850 447 -40 100 900 474 -43 100 950 477 -43 100 1000 517 -39 100 1050 529 -41 100 1100 621 -39 100 1150 622 -42 100 1200 653 -53 100 1250 663 -45 100 1300 716 -46 100 1350 711 -46 100 1400 760 -43 100 1450 795 -40 100 1500 796 -45 100 1550 853 -46 100 1600 880 -47 100 1650 888 -51 100 1700 926 -50 100 1750 923 -49 100 1800 998 -47 100 1850 979 -48 100 1900 1058 -50 100 1950 1048 -45 100 2000 1089 -11 100 50 24 -15 100 100 53 -21 100 150 86 -24 100 200 111 -25 100 250 158 -35 100 300 173 -24 100 350 183 -22 100 400 249 -32 100 450 273 -34 100 500 314 -36 100 550 315 -29 100 600 366 -34 100 650 373 -40 100 700 440 -41 100 750 440 -41 100 800 477 -42 100 850 522 -45 100 900 569 -44 100 950 572 -44 100 1000 622 -37 100 1050 635 -41 100 1100 664 -44 100 1150 706 -43 100 1200 754 -40 100 1250 802 -43 100 1300 811 -42 100 1350 879 -43 100 1400 854 -47 100 1450 865 -47 100 1500 911 -47 100 1550 982 -45 100 1600 1020 -42 100 1650 1032 -43 100 1700 1055 -40 100 1750 1166 -51 100 1800 1125 -49 100 1850 1197 -46 100 1900 1176 -48 100 1950 1309 -50 100 2000 1299 -11 100 50 28 -23 100 100 45 -19 100 150 70 -33 100 200 98 -25 100 250 121 -29 100 300 143 -37 100 350 245 -31 100 400 198 -35 100 450 277 -45 100 500 259 -45 100 550 262 -43 100 600 342 -42 100 650 365 -43 100 700 395 -43 100 750 459 -45 100 800 510 -50 100 850 455 -49 100 900 544 -50 100 950 534 -48 100 1000 580 -55 100 1050 590 -49 100 1100 645 -49 100 1150 669 -57 100 1200 674 -52 100 1250 720 -52 100 1300 759 -54 100 1350 786 -60 100 1400 834 -50 100 1450 839 -57 100 1500 879 -54 100 1550 907 -55 100 1600 910 -57 100 1650 994 -54 100 1700 1015 -56 100 1750 1002 -57 100 1800 1091 -54 100 1850 1093 -57 100 1900 1142 -61 100 1950 1090 -57 100 2000 1138 -5 100 50 20 -13 100 100 46 -23 100 150 93 -22 100 200 88 -32 100 250 123 -30 100 300 155 -37 100 350 202 -37 100 400 209 -35 100 450 232 -38 100 500 269 -35 100 550 276 -36 100 600 321 -40 100 650 326 -41 100 700 391 -41 100 750 389 -43 100 800 428 -46 100 850 489 -38 100 900 476 -47 100 950 512 -47 100 1000 557 -49 100 1050 604 -50 100 1100 626 -50 100 1150 632 -49 100 1200 642 -45 100 1250 701 -49 100 1300 738 -49 100 1350 729 -52 100 1400 786 -45 100 1450 791 -54 100 1500 817 -46 100 1550 871 -49 100 1600 869 -49 100 1650 905 -49 100 1700 932 -51 100 1750 996 -51 100 1800 1033 -55 100 1850 1011 -54 100 1900 1121 -53 100 1950 1115 -56 100 2000 1124 -3 100 50 21 -13 100 100 44 -20 100 150 71 -21 100 200 74 -33 100 250 143 -25 100 300 135 -31 100 350 180 -30 100 400 172 -30 100 450 229 -32 100 500 238 -35 100 550 286 -38 100 600 304 -34 100 650 302 -37 100 700 364 -36 100 750 400 -38 100 800 398 -41 100 850 443 -43 100 900 481 -46 100 950 477 -49 100 1000 511 -40 100 1050 527 -46 100 1100 566 -47 100 1150 611 -47 100 1200 646 -39 100 1250 669 -51 100 1300 685 -51 100 1350 731 -46 100 1400 738 -45 100 1450 761 -45 100 1500 797 -49 100 1550 835 -46 100 1600 825 -51 100 1650 865 -46 100 1700 918 -52 100 1750 951 -51 100 1800 966 -51 100 1850 1000 -50 100 1900 1016 -48 100 1950 1032 -52 100 2000 1108 -10 100 50 22 -8 100 100 47 -18 100 150 68 -20 100 200 103 -17 100 250 115 -21 100 300 159 -28 100 350 175 -24 100 400 184 -28 100 450 206 -28 100 500 266 -29 100 550 273 -35 100 600 313 -31 100 650 311 -37 100 700 371 -37 100 750 388 -35 100 800 417 -46 100 850 469 -40 100 900 483 -43 100 950 482 -37 100 1000 531 -37 100 1050 561 -44 100 1100 563 -42 100 1150 624 -44 100 1200 679 -40 100 1250 690 -44 100 1300 687 -46 100 1350 729 -44 100 1400 733 -41 100 1450 747 -44 100 1500 829 -44 100 1550 866 -38 100 1600 853 -42 100 1650 888 -40 100 1700 951 -41 100 1750 945 -52 100 1800 952 -48 100 1850 1037 -48 100 1900 1063 -49 100 1950 1080 -49 100 2000 1088 -6 100 50 21 -18 100 100 54 -23 100 150 102 -23 100 200 98 -28 100 250 124 -31 100 300 147 -34 100 350 255 -36 100 400 192 -42 100 450 5385 -40 100 500 252 -40 100 550 301 -43 100 600 368 -41 100 650 384 -38 100 700 370 -42 100 750 450 -42 100 800 474 -42 100 850 464 -43 100 900 527 -43 100 950 584 -50 100 1000 620 -48 100 1050 638 -44 100 1100 664 -46 100 1150 701 -54 100 1200 715 -49 100 1250 730 -45 100 1300 763 -49 100 1350 804 -48 100 1400 847 -46 100 1450 896 -56 100 1500 907 -50 100 1550 904 -50 100 1600 984 -55 100 1650 982 -47 100 1700 999 -49 100 1750 1122 -51 100 1800 1103 -51 100 1850 1125 -54 100 1900 1147 -56 100 1950 1179 -54 100 2000 1219 -4 100 50 21 -12 100 100 46 -18 100 150 64 -24 100 200 129 -26 100 250 119 -25 100 300 129 -28 100 350 178 -33 100 400 240 -33 100 450 220 -36 100 500 229 -36 100 550 296 -35 100 600 302 -34 100 650 354 -42 100 700 343 -37 100 750 400 -42 100 800 451 -40 100 850 427 -43 100 900 482 -35 100 950 487 -43 100 1000 540 -47 100 1050 516 -41 100 1100 593 -43 100 1150 604 -48 100 1200 636 -45 100 1250 666 -44 100 1300 678 -48 100 1350 737 -49 100 1400 727 -42 100 1450 753 -45 100 1500 776 -46 100 1550 823 -45 100 1600 876 -44 100 1650 833 -47 100 1700 962 -50 100 1750 943 -46 100 1800 956 -44 100 1850 993 -46 100 1900 1022 -48 100 1950 1091 -54 100 2000 1069 -11 100 50 26 -13 100 100 43 -21 100 150 74 -21 100 200 106 -27 100 250 117 -32 100 300 197 -26 100 350 174 -32 100 400 206 -35 100 450 260 -35 100 500 270 -37 100 550 336 -37 100 600 312 -35 100 650 370 -38 100 700 354 -41 100 750 426 -34 100 800 467 -43 100 850 469 -45 100 900 510 -40 100 950 529 -45 100 1000 577 -46 100 1050 619 -36 100 1100 595 -42 100 1150 609 -43 100 1200 647 -44 100 1250 725 -46 100 1300 742 -45 100 1350 784 -43 100 1400 785 -47 100 1450 805 -47 100 1500 817 -44 100 1550 930 -45 100 1600 915 -47 100 1650 997 -47 100 1700 1005 -50 100 1750 1022 -48 100 1800 1007 -49 100 1850 1099 -51 100 1900 1101 -47 100 1950 1163 -48 100 2000 1170 -6 100 50 23 -17 100 100 40 -23 100 150 70 -25 100 200 92 -31 100 250 118 -40 100 300 145 -40 100 350 205 -41 100 400 202 -40 100 450 218 -37 100 500 278 -42 100 550 254 -38 100 600 313 -41 100 650 305 -45 100 700 392 -46 100 750 352 -47 100 800 472 -45 100 850 427 -51 100 900 447 -51 100 950 516 -52 100 1000 536 -53 100 1050 611 -49 100 1100 600 -51 100 1150 642 -54 100 1200 667 -49 100 1250 690 -52 100 1300 681 -49 100 1350 748 -52 100 1400 751 -53 100 1450 788 -47 100 1500 763 -50 100 1550 877 -53 100 1600 895 -50 100 1650 938 -59 100 1700 946 -58 100 1750 1000 -55 100 1800 991 -56 100 1850 1050 -52 100 1900 1058 -59 100 1950 1050 -56 100 2000 1106 -9 100 50 25 -8 100 100 47 -19 100 150 73 -20 100 200 131 -22 100 250 118 -28 100 300 149 -22 100 350 173 -27 100 400 243 -30 100 450 253 -31 100 500 274 -27 100 550 337 -34 100 600 376 -35 100 650 353 -36 100 700 410 -31 100 750 414 -36 100 800 499 -34 100 850 528 -34 100 900 554 -34 100 950 581 -35 100 1000 628 -36 100 1050 597 -34 100 1100 629 -36 100 1150 746 -37 100 1200 698 -43 100 1250 788 -42 100 1300 756 -42 100 1350 790 -42 100 1400 833 -46 100 1450 857 -41 100 1500 908 -43 100 1550 914 -44 100 1600 947 -40 100 1650 1055 -46 100 1700 1103 -44 100 1750 1051 -49 100 1800 1118 -46 100 1850 1185 -42 100 1900 1180 -48 100 1950 1226 -39 100 2000 1221 -9 100 50 24 -14 100 100 45 -25 100 150 66 -27 100 200 134 -26 100 250 118 -34 100 300 155 -31 100 350 170 -32 100 400 243 -35 100 450 228 -31 100 500 256 -41 100 550 349 -42 100 600 312 -43 100 650 357 -47 100 700 413 -38 100 750 389 -49 100 800 477 -46 100 850 449 -49 100 900 478 -49 100 950 534 -51 100 1000 587 -50 100 1050 596 -49 100 1100 630 -52 100 1150 676 -53 100 1200 701 -52 100 1250 709 -52 100 1300 722 -52 100 1350 745 -49 100 1400 791 -53 100 1450 912 -53 100 1500 918 -53 100 1550 891 -57 100 1600 959 -51 100 1650 962 -63 100 1700 1024 -58 100 1750 1031 -51 100 1800 1099 -53 100 1850 1091 -56 100 1900 1114 -53 100 1950 1179 -58 100 2000 1223 -4 100 50 21 -16 100 100 47 -28 100 150 72 -25 100 200 89 -29 100 250 113 -33 100 300 150 -34 100 350 216 -35 100 400 201 -36 100 450 242 -41 100 500 310 -36 100 550 293 -45 100 600 355 -44 100 650 336 -41 100 700 424 -45 100 750 456 -45 100 800 432 -46 100 850 518 -49 100 900 514 -41 100 950 529 -45 100 1000 603 -45 100 1050 587 -43 100 1100 604 -47 100 1150 628 -47 100 1200 698 -48 100 1250 751 -51 100 1300 757 -46 100 1350 803 -49 100 1400 798 -50 100 1450 872 -49 100 1500 888 -48 100 1550 933 -47 100 1600 924 -56 100 1650 963 -51 100 1700 992 -50 100 1750 1052 -56 100 1800 1049 -51 100 1850 1093 -57 100 1900 1110 -55 100 1950 1130 -54 100 2000 1158 -4 100 50 21 -16 100 100 42 -18 100 150 62 -16 100 200 129 -23 100 250 117 -28 100 300 139 -31 100 350 170 -33 100 400 235 -38 100 450 229 -36 100 500 233 -36 100 550 313 -40 100 600 296 -42 100 650 349 -39 100 700 347 -36 100 750 401 -42 100 800 451 -44 100 850 423 -46 100 900 469 -42 100 950 529 -42 100 1000 529 -41 100 1050 527 -42 100 1100 600 -48 100 1150 586 -46 100 1200 619 -39 100 1250 678 -46 100 1300 672 -51 100 1350 716 -48 100 1400 715 -48 100 1450 760 -46 100 1500 826 -52 100 1550 847 -46 100 1600 866 -52 100 1650 882 -55 100 1700 934 -52 100 1750 946 -50 100 1800 973 -48 100 1850 1056 -55 100 1900 1068 -53 100 1950 1052 -52 100 2000 1121 -13 100 50 21 -19 100 100 50 -25 100 150 74 -35 100 200 99 -34 100 250 129 -37 100 300 209 -40 100 350 165 -36 100 400 201 -39 100 450 274 -38 100 500 247 -36 100 550 309 -46 100 600 341 -45 100 650 378 -42 100 700 366 -47 100 750 421 -45 100 800 440 -47 100 850 494 -52 100 900 521 -52 100 950 532 -48 100 1000 581 -47 100 1050 623 -49 100 1100 637 -50 100 1150 703 -52 100 1200 714 -46 100 1250 732 -49 100 1300 768 -49 100 1350 832 -48 100 1400 801 -55 100 1450 834 -55 100 1500 879 -48 100 1550 947 -50 100 1600 959 -55 100 1650 978 -55 100 1700 996 -57 100 1750 1053 -55 100 1800 1127 -52 100 1850 1121 -56 100 1900 1155 -52 100 1950 1169 -63 100 2000 1160 -4 100 50 52 -15 100 100 40 -22 100 150 56 -24 100 200 85 -28 100 250 99 -25 100 300 125 -28 100 350 141 -30 100 400 210 -32 100 450 188 -32 100 500 209 -34 100 550 271 -38 100 600 253 -35 100 650 314 -38 100 700 299 -39 100 750 357 -36 100 800 350 -29 100 850 401 -41 100 900 435 -36 100 950 416 -36 100 1000 475 -44 100 1050 504 -40 100 1100 503 -45 100 1150 526 -43 100 1200 556 -45 100 1250 574 -44 100 1300 622 -48 100 1350 656 -44 100 1400 690 -45 100 1450 704 -44 100 1500 702 -46 100 1550 736 -48 100 1600 765 -49 100 1650 812 -45 100 1700 840 -52 100 1750 863 -49 100 1800 866 -48 100 1850 910 -49 100 1900 932 -47 100 1950 948 -48 100 2000 963 -4 100 50 20 -15 100 100 44 -16 100 150 71 -19 100 200 96 -20 100 250 119 -19 100 300 173 -31 100 350 186 -29 100 400 205 -30 100 450 273 -32 100 500 258 -34 100 550 319 -36 100 600 301 -29 100 650 362 -34 100 700 358 -38 100 750 417 -37 100 800 456 -46 100 850 481 -36 100 900 521 -37 100 950 529 -48 100 1000 567 -41 100 1050 584 -42 100 1100 661 -46 100 1150 670 -45 100 1200 701 -43 100 1250 750 -43 100 1300 747 -45 100 1350 778 -48 100 1400 778 -40 100 1450 785 -43 100 1500 904 -43 100 1550 907 -53 100 1600 934 -50 100 1650 953 -48 100 1700 1006 -49 100 1750 1001 -48 100 1800 1075 -45 100 1850 1048 -42 100 1900 1123 -49 100 1950 1138 -52 100 2000 1158 -8 100 50 24 -20 100 100 52 -14 100 150 69 -18 100 200 94 -25 100 250 152 -25 100 300 150 -28 100 350 158 -27 100 400 199 -27 100 450 293 -30 100 500 239 -34 100 550 324 -35 100 600 338 -39 100 650 375 -32 100 700 366 -45 100 750 403 -37 100 800 455 -46 100 850 434 -41 100 900 540 -41 100 950 527 -40 100 1000 551 -38 100 1050 604 -42 100 1100 577 -41 100 1150 611 -42 100 1200 689 -48 100 1250 723 -45 100 1300 733 -49 100 1350 806 -51 100 1400 830 -47 100 1450 850 -41 100 1500 874 -45 100 1550 889 -55 100 1600 922 -56 100 1650 983 -43 100 1700 977 -50 100 1750 1070 -54 100 1800 1041 -45 100 1850 1069 -55 100 1900 1127 -48 100 1950 1139 -50 100 2000 1156 -5 100 50 33 -11 100 100 59 -19 100 150 95 -23 100 200 124 -24 100 250 183 -28 100 300 184 -27 100 350 259 -33 100 400 318 -30 100 450 307 -30 100 500 386 -35 100 550 336 -32 100 600 430 -41 100 650 486 -34 100 700 470 -36 100 750 513 -37 100 800 547 -38 100 850 579 -42 100 900 643 -39 100 950 680 -39 100 1000 733 -41 100 1050 804 -33 100 1100 795 -39 100 1150 899 -47 100 1200 858 -41 100 1250 916 -41 100 1300 958 -44 100 1350 934 -44 100 1400 995 -43 100 1450 1058 -44 100 1500 1211 -49 100 1550 1113 -50 100 1600 1128 -43 100 1650 1185 -45 100 1700 1364 -50 100 1750 1296 -47 100 1800 1353 -48 100 1850 1445 -49 100 1900 1403 -49 100 1950 1403 -47 100 2000 1398 -6 100 50 51 -22 100 100 51 -25 100 150 74 -20 100 200 92 -23 100 250 111 -26 100 300 138 -33 100 350 204 -30 100 400 178 -34 100 450 201 -35 100 500 283 -33 100 550 263 -39 100 600 329 -37 100 650 317 -37 100 700 384 -41 100 750 373 -39 100 800 435 -43 100 850 464 -47 100 900 453 -46 100 950 518 -39 100 1000 529 -41 100 1050 574 -43 100 1100 563 -47 100 1150 588 -42 100 1200 648 -44 100 1250 614 -45 100 1300 676 -47 100 1350 682 -50 100 1400 774 -46 100 1450 812 -47 100 1500 809 -51 100 1550 835 -47 100 1600 888 -53 100 1650 953 -50 100 1700 967 -52 100 1750 973 -49 100 1800 1071 -50 100 1850 1068 -49 100 1900 1054 -42 100 1950 1096 -51 100 2000 1095 -20 100 50 28 -22 100 100 47 -22 100 150 114 -23 100 200 94 -33 100 250 125 -24 100 300 132 -30 100 350 168 -40 100 400 239 -44 100 450 214 -43 100 500 270 -38 100 550 274 -43 100 600 299 -42 100 650 332 -42 100 700 389 -44 100 750 411 -45 100 800 426 -43 100 850 491 -41 100 900 468 -40 100 950 543 -45 100 1000 565 -44 100 1050 618 -52 100 1100 683 -50 100 1150 625 -52 100 1200 754 -55 100 1250 719 -54 100 1300 770 -54 100 1350 773 -57 100 1400 792 -49 100 1450 838 -47 100 1500 870 -55 100 1550 928 -56 100 1600 937 -57 100 1650 991 -53 100 1700 955 -59 100 1750 1017 -54 100 1800 1028 -53 100 1850 1017 -56 100 1900 1093 -57 100 1950 1127 -54 100 2000 1181 -0 100 50 18 -10 100 100 47 -16 100 150 87 -19 100 200 123 -23 100 250 141 -25 100 300 201 -25 100 350 200 -24 100 400 217 -26 100 450 288 -29 100 500 317 -28 100 550 334 -33 100 600 355 -33 100 650 417 -34 100 700 447 -32 100 750 419 -37 100 800 545 -35 100 850 549 -38 100 900 570 -36 100 950 587 -38 100 1000 684 -39 100 1050 673 -38 100 1100 655 -38 100 1150 713 -38 100 1200 771 -44 100 1250 747 -37 100 1300 795 -38 100 1350 816 -46 100 1400 896 -34 100 1450 905 -41 100 1500 935 -43 100 1550 1014 -47 100 1600 1025 -39 100 1650 1106 -39 100 1700 1067 -42 100 1750 1084 -45 100 1800 1182 -42 100 1850 1232 -46 100 1900 1196 -49 100 1950 1233 -47 100 2000 1352 -4 100 50 17 -15 100 100 41 -20 100 150 68 -20 100 200 93 -17 100 250 111 -26 100 300 172 -25 100 350 164 -34 100 400 211 -29 100 450 248 -33 100 500 247 -32 100 550 264 -32 100 600 319 -31 100 650 334 -33 100 700 380 -34 100 750 401 -37 100 800 410 -38 100 850 467 -37 100 900 502 -36 100 950 471 -43 100 1000 538 -40 100 1050 557 -40 100 1100 579 -37 100 1150 627 -42 100 1200 630 -38 100 1250 674 -39 100 1300 704 -41 100 1350 715 -44 100 1400 749 -42 100 1450 781 -42 100 1500 786 -44 100 1550 875 -44 100 1600 919 -46 100 1650 906 -47 100 1700 967 -46 100 1750 985 -51 100 1800 1012 -45 100 1850 1011 -39 100 1900 1047 -43 100 1950 1056 -49 100 2000 1135 -8 100 50 25 -14 100 100 59 -14 100 150 85 -24 100 200 107 -25 100 250 174 -25 100 300 181 -25 100 350 184 -33 100 400 213 -37 100 450 305 -30 100 500 314 -29 100 550 338 -34 100 600 401 -33 100 650 377 -31 100 700 436 -37 100 750 500 -39 100 800 484 -36 100 850 511 -40 100 900 624 -41 100 950 615 -42 100 1000 646 -43 100 1050 660 -39 100 1100 743 -44 100 1150 763 -43 100 1200 793 -43 100 1250 832 -44 100 1300 859 -45 100 1350 885 -45 100 1400 907 -45 100 1450 967 -46 100 1500 953 -37 100 1550 1021 -45 100 1600 1100 -51 100 1650 1113 -48 100 1700 1140 -47 100 1750 1117 -48 100 1800 1150 -50 100 1850 1234 -48 100 1900 1248 -51 100 1950 1322 -49 100 2000 1297 -8 100 50 17 -14 100 100 41 -9 100 150 64 -18 100 200 96 -20 100 250 116 -23 100 300 179 -27 100 350 172 -22 100 400 190 -37 100 450 252 -30 100 500 237 -28 100 550 263 -26 100 600 311 -37 100 650 328 -34 100 700 373 -34 100 750 394 -41 100 800 417 -37 100 850 461 -38 100 900 471 -37 100 950 476 -35 100 1000 552 -37 100 1050 544 -36 100 1100 589 -41 100 1150 636 -36 100 1200 638 -44 100 1250 654 -43 100 1300 689 -42 100 1350 740 -43 100 1400 774 -42 100 1450 747 -44 100 1500 860 -50 100 1550 891 -46 100 1600 883 -44 100 1650 887 -50 100 1700 941 -45 100 1750 943 -43 100 1800 987 -46 100 1850 1009 -45 100 1900 1003 -47 100 1950 1073 -45 100 2000 1095 -10 100 50 23 -12 100 100 49 -24 100 150 68 -33 100 200 95 -32 100 250 168 -27 100 300 149 -32 100 350 174 -41 100 400 246 -35 100 450 234 -32 100 500 260 -37 100 550 306 -47 100 600 315 -44 100 650 386 -44 100 700 417 -44 100 750 381 -42 100 800 470 -43 100 850 478 -46 100 900 466 -45 100 950 538 -48 100 1000 561 -46 100 1050 580 -48 100 1100 630 -46 100 1150 631 -51 100 1200 673 -44 100 1250 707 -45 100 1300 744 -48 100 1350 742 -49 100 1400 784 -49 100 1450 829 -49 100 1500 869 -50 100 1550 893 -51 100 1600 919 -50 100 1650 938 -52 100 1700 978 -52 100 1750 1044 -51 100 1800 1073 -51 100 1850 1057 -54 100 1900 1068 -54 100 1950 1097 -52 100 2000 1155 -7 100 50 22 -20 100 100 48 -18 100 150 78 -23 100 200 91 -22 100 250 130 -28 100 300 140 -26 100 350 208 -26 100 400 181 -33 100 450 220 -34 100 500 271 -37 100 550 254 -36 100 600 318 -35 100 650 328 -33 100 700 381 -35 100 750 363 -36 100 800 425 -37 100 850 452 -36 100 900 447 -43 100 950 489 -45 100 1000 523 -47 100 1050 569 -44 100 1100 589 -47 100 1150 630 -45 100 1200 675 -44 100 1250 678 -39 100 1300 726 -46 100 1350 756 -46 100 1400 740 -47 100 1450 789 -44 100 1500 796 -46 100 1550 820 -48 100 1600 872 -53 100 1650 891 -50 100 1700 901 -44 100 1750 965 -55 100 1800 991 -46 100 1850 971 -54 100 1900 1031 -42 100 1950 1063 -47 100 2000 1123 -7 100 50 53 -13 100 100 41 -13 100 150 68 -22 100 200 82 -17 100 250 109 -26 100 300 134 -29 100 350 186 -32 100 400 191 -30 100 450 212 -27 100 500 273 -30 100 550 258 -37 100 600 289 -36 100 650 323 -32 100 700 356 -36 100 750 350 -42 100 800 427 -34 100 850 430 -45 100 900 470 -47 100 950 503 -41 100 1000 540 -41 100 1050 529 -44 100 1100 570 -39 100 1150 601 -41 100 1200 638 -36 100 1250 643 -40 100 1300 663 -40 100 1350 688 -46 100 1400 748 -45 100 1450 805 -43 100 1500 820 -42 100 1550 809 -46 100 1600 852 -50 100 1650 882 -49 100 1700 955 -53 100 1750 948 -49 100 1800 986 -51 100 1850 980 -46 100 1900 1035 -50 100 1950 1064 -45 100 2000 1080 -10 100 50 26 -17 100 100 52 -17 100 150 81 -24 100 200 101 -30 100 250 164 -35 100 300 152 -31 100 350 169 -35 100 400 250 -35 100 450 236 -33 100 500 262 -40 100 550 317 -36 100 600 326 -44 100 650 369 -44 100 700 420 -40 100 750 404 -43 100 800 459 -47 100 850 513 -39 100 900 479 -44 100 950 552 -42 100 1000 556 -45 100 1050 646 -46 100 1100 630 -47 100 1150 651 -49 100 1200 683 -41 100 1250 688 -48 100 1300 760 -48 100 1350 744 -49 100 1400 830 -49 100 1450 896 -50 100 1500 870 -53 100 1550 953 -53 100 1600 967 -56 100 1650 955 -55 100 1700 1025 -46 100 1750 1024 -48 100 1800 1071 -48 100 1850 1100 -55 100 1900 1180 -47 100 1950 1148 -52 100 2000 1164 diff --git a/core/rewriting/indexing/test/saved/rand1.xml b/core/rewriting/indexing/test/saved/rand1.xml deleted file mode 100644 index 8ff9fe19..00000000 --- a/core/rewriting/indexing/test/saved/rand1.xml +++ /dev/null @@ -1,116 +0,0 @@ - - - - j - false - green - - \pi - - 1 - 1 - - - - - - i - false - red - - \pi - - 1 - 1 - - - - - - h - false - red - - - g - true - - - f - false - red - - \pi - - 1 - 1 - - - - - - e - false - H - - - d - false - green - - \pi - - 1 - 1 - - - - - - c - false - H - - - b - true - - - a - false - H - - - - Ejib - - - Eiff - - - Ehdg - - - Eghc - - - Efjb - - - Eejj - - - Edad - - - Ecch - - - Ebhg - - - Eagf - - - - \ No newline at end of file diff --git a/core/rewriting/indexing/treetest.ML b/core/rewriting/indexing/treetest.ML deleted file mode 100644 index 25f695cd..00000000 --- a/core/rewriting/indexing/treetest.ML +++ /dev/null @@ -1,76 +0,0 @@ -structure TreeTester -= struct - - structure G = RGGraph; - structure LS = RGGraphLoaderSaver; - structure H = Histogram; - structure HTree = HistTree; - structure AList = AdjacencyList; - structure ATree = AdjacencyTree; - structure NSet = V.NSet; - structure NTab = V.NTab; - structure Lib = Library; - structure RG = RandomGraph; - structure MSt = MatchState; - structure M = Match; - - - (*result generators*) - - (*outputs 5-tuple : (number of rules, leaves, bags (non-empty), max depth, - graphs - to ensure none are duplicated*) - fun test_atree patgen ruleset_range scale = map (fn n => - let - val pats = patgen (n*scale) - val atree= ATree.create_atree_from_graphlist pats - val (n1,n2,n3,n4) = ATree.test_tree atree - in - (n*scale,n1,n2,n3,n4) - end) ruleset_range; - (*outputs 5-tuple : (number of rules, leaves, max depth, av depth, graphs*) - fun test_htree patgen ruleset_range scale = map (fn n => - let - val pats = patgen (n*scale) - val htree= HTree.make_htree pats - val (n1,n2,n3,n4) = HTree.test_tree htree - in - (n*scale,n1,n2,n3,n4) - end) ruleset_range; - - fun std_atree_test min max = test_atree RG.std_pattern_set (min upto max) 20; - fun std_htree_test min max = test_htree RG.std_pattern_set (min upto max) 20; - fun simple_htree_test min max = test_htree RG.simple_pattern_set (min upto max) 20; - - - fun varied_atree_test min max = test_atree RG.varied_pattern_set (min upto max) 20; - (*output wrapper*) - - local - open TextIO - in - fun output results filename = - let - val dir = "indexing/test/results/" ^ filename - val ostream = openAppend dir - val show = Int.toString - val showreal = Real.toString - in - ostream; - map (fn (nrules, nleaves, n3, n4, ngraphs) => - outputSubstr (ostream, Substring.full ( - (show nrules) ^ "\t" ^ - (show nleaves) ^ "\t" ^ - (show n3) ^ "\t" ^ - (show n4) ^ "\t" ^ - (showreal ngraphs) ^ "\n") - ) - ) results; - closeOut ostream - end - - fun test_out thing name reps = - map (fn n => output (thing ()) name) (1 upto reps) - - end; - -end; diff --git a/core/rewriting/indexing/vkind.ML b/core/rewriting/indexing/vkind.ML deleted file mode 100644 index 34f9f119..00000000 --- a/core/rewriting/indexing/vkind.ML +++ /dev/null @@ -1,56 +0,0 @@ - -signature VKIND -= sig - structure V : RG_VERTEX - structure NSet : BASIC_NAME_SET - type T - - (*get kind of vertex passed, format (name,(data,(in,out))) comes from *) - (* result of get_vertex_list *) - val get_kind : (V.name * (V.data *(NSet.T * NSet.T))) - -> T - (*give ordering on vertex kinds*) - val kind_ord : T * T -> order - val kind_eq : T * T -> bool - - val is_boundary : T -> bool - -end; - - -(* type of vertex kinds: X/Z/H and arity, also includes boundary type*) -structure VKind : VKIND -= struct - structure V = Vertex; - structure NSet = E.NTab.NSet; - - - type data = V.IData.data; - datatype T = Kind of (data * int) (*quantum node type and arity*) - | Bound; (*boundary vertex*) - - - - fun get_kind (name,(idata,(inedges,outedges))) = - let - val arity = NSet.cardinality inedges + NSet.cardinality outedges - in case idata of - V.NVert qdata => Kind (qdata,arity) - |V.BVert => Bound - end; - - (*arities are compared first*) - fun kind_ord (Bound,Bound) = EQUAL - | kind_ord (Bound,_) = LESS - | kind_ord (_, Bound) = GREATER - | kind_ord (Kind (d1,a1),Kind (d2,a2)) = - case Int.compare (a1,a2) of - EQUAL => V.IData.data_ord (d1,d2) - | LESS => LESS - | GREATER=>GREATER; - - fun kind_eq (k1,k2) = (kind_ord (k1,k2) = EQUAL); - - fun is_boundary k = kind_eq (k,Bound) - -end; diff --git a/core/rewriting/rewriter.ML b/core/rewriting/rewriter.ML deleted file mode 100644 index b004ad68..00000000 --- a/core/rewriting/rewriter.ML +++ /dev/null @@ -1,216 +0,0 @@ -(** - * An implementation of double-pushout rewriting for string graphs - * - * Delegates to a MATCH_SEARCH implementation to find matches, then cuts - * out the matched subgraph (except for the boundary) and pastes in the - * RHS of the rule. - *) - -signature REWRITER_SHARING = -sig - structure Rule : OGRAPH_RULE_SHARING - structure Graph : OGRAPH_SHARING - type match -end - -signature REWRITER = -sig - structure Log : LOG - - (* NB: we don't require the Rule.Graph and Graph to be the same *) - structure Rule : OGRAPH_RULE - structure Graph : OGRAPH - sharing Rule.Sharing.Graph = Graph.Sharing - type match - - structure Sharing : REWRITER_SHARING - sharing Sharing.Rule = Rule.Sharing - sharing Sharing.Graph = Graph.Sharing - sharing type Sharing.match = match - - (** - * Find all matches of a rule's LHS in a graph - *) - val find_matches : Rule.T - -> Graph.T - -> match Seq.seq - - (** - * Find all matchings of a rule's LHS, given an initial mapping - * of vertices to try matching - *) - val find_matches_with_prematch : Rule.T - -> Graph.T - -> VVInj.T - -> match Seq.seq - - (** - * Find all matches of a rule's LHS in a subgraph of a graph - * - * - * The subgraph is defined by the vertex set given in the third argument. - *) - val find_matches_in_subgraph : Rule.T - -> Graph.T - -> V.NSet.T - -> match Seq.seq - - (** - * Find all rewrites for a rule in a graph - * - * As well as the rewritten graph, a prepared version of the rule is returned. - *) - val find_rewrites : Rule.T - -> Graph.T - -> (Rule.T * Graph.T) Seq.seq - - (** - * Find all rewrites for a rule in a graph, given an initial - * mapping of vertices to try matching - *) - val find_rewrites_with_prematch : Rule.T - -> Graph.T - -> VVInj.T - -> (Rule.T * Graph.T) Seq.seq - - (** - * Find all rewrites for a rule in a subgraph of a graph - * - * The subgraph is defined by the vertex list given in the third argument. - *) - val find_rewrites_in_subgraph : Rule.T - -> Graph.T - -> V.NSet.T - -> (Rule.T * Graph.T) Seq.seq - - val rewrite_at : Rule.T - -> match - -> (Rule.T * Graph.T) - - (* a couple of helpful fns because match is opaque *) - val print_match : match -> unit - val pretty_match : match -> Pretty.T -end - -(** - * Rewriter for !-graphs. - * - * Note that, although the rewriter signature does not require the rule graph - * type to be the actual graph type, our use of BG_MATCH_SEARCH does force them - * to agree. - *) -functor BangGraphRewriter( - structure Rule : BANG_GRAPH_RULE - structure Matcher : BG_MATCH_SEARCH - sharing Matcher.Sharing.Graph = Rule.Sharing.Graph -) : REWRITER = -struct - structure Log : LOG = Log(val init_level = 0) - - structure Rule = Rule - structure Graph = Rule.Graph - type match = Matcher.Match.T - structure Match = Matcher.Match - - fun log_p pretty_f lvl name = - Log.logf lvl (fn g => Pretty.string_of - (Pretty.chunks [Pretty.str (name^":"),(pretty_f g)])) - - val log_graph = log_p Graph.pretty - (*val log_vrnm = log_p V.Rnm.pretty - val log_ernm = log_p E.Rnm.pretty*) - - fun find_matches rule graph = Matcher.match (Rule.get_lhs rule) graph - - fun find_matches_with_prematch rule graph prematch = - Matcher.match_with_prematch (Rule.get_lhs rule) graph prematch - - fun find_matches_in_subgraph rule graph vset = - Matcher.match_subgraph (Rule.get_lhs rule) graph vset - - fun sg_rewrite_at m rhs = let - val g = Match.get_tgt m - val pat = Match.get_pat m - val (vmap,emap) = (Match.get_vmap m, Match.get_emap m) - val rhs' = snd (rhs |> Graph.apply_data_subst (Match.get_subst m)) - - (* try, whenever possible, to inherit vertex annotations (e.g. coords) from the target graph *) - val v_anno = VTab.compose (Graph.get_v_anno_tab g, vmap) - val rhs' = V.NSet.fold - (fn v => fn g => - case VTab.get_opt v_anno v - of SOME a => g |> Graph.set_vertex_annotation (v, a) - | NONE => g) - (Graph.get_vertices rhs') rhs' - - val shared_boundary = Graph.get_boundary pat - val lhs_interior = V.NSet.subtract (Graph.get_vertices pat) shared_boundary - - val shared_boundary_img = VVInj.img_of_set vmap shared_boundary - - val h = g |> E.NSet.fold (Graph.delete_edge o EEInj.get emap) (Graph.get_edges pat) - |> V.NSet.fold (Graph.delete_vertex o VVInj.get vmap) lhs_interior - - val _ = log_graph 4 "OGraphSubst.rewrite: pattern graph" pat - val _ = log_graph 3 "OGraphSubst.rewrite: G" g - val _ = log_graph 3 "OGraphSubst.rewrite: G-pat" h - val _ = log_graph 4 "OGraphSubst.rewrite: RHS" rhs' - - val lhs' = snd (pat |> Graph.apply_data_subst (Match.get_subst m)) - val rule = Rule.mk (Graph.minimise lhs', Graph.minimise rhs') - val vrn = VSub.empty - |> V.NSet.fold (fn b => VSub.add (b, VVInj.get vmap b)) shared_boundary - |> VSub.avoid_set_in_cod (V.NSet.subtract (Graph.get_vertices g) shared_boundary_img) - |> VSub.extend_fresh (Rule.get_vertices rule) - val ern = ESub.mk_from_avoids (Graph.get_edges h) |> ESub.extend_fresh (Rule.get_edges rule) - val brn = BSub.coerce (Match.get_bmap m) - val (_,rule') = rule |> Rule.rename_bang_graph_rule (vrn,ern,brn) - - val _ = log_graph 4 "OGraphSubst.rewrite: RHS (prepared)" (Rule.get_rhs rule') - val _ = log_graph 4 "OGraphSubst.rewrite: LHS (prepared)" (Rule.get_lhs rule') - - in - (rule', Graph.merge h (Rule.get_rhs rule')) - end - - (* TODO: this doesn't really make sense as a separate function anymore, - * since sg_rewrite_at does !-graph stuff too *) - fun rewrite_at rule m = let - val rhs = Rule.get_rhs rule - val oper = Match.get_bbox_ops m - val rhs_inst = rhs |> Graph.replay_bbox_ops oper - - val (final_rule,g') = sg_rewrite_at m rhs_inst - in - (final_rule, Graph.minimise g') - end - - fun find_rewrites r g = let - val mseq = find_matches r g - in - Seq.map (rewrite_at r) mseq - end - - fun find_rewrites_with_prematch r g prematch = let - val mseq = find_matches_with_prematch r g prematch - in - Seq.map (rewrite_at r) mseq - end - - fun find_rewrites_in_subgraph r g vset = let - val mseq = find_matches_in_subgraph r g vset - in - Seq.map (rewrite_at r) mseq - end - - val print_match = Matcher.Match.print - val pretty_match = Matcher.Match.pretty - - structure Sharing : REWRITER_SHARING - = struct - structure Rule = Rule.Sharing - structure Graph = Graph.Sharing - type match = match - end -end - diff --git a/core/rewriting/rule.ML b/core/rewriting/rule.ML deleted file mode 100644 index 9fbf37e9..00000000 --- a/core/rewriting/rule.ML +++ /dev/null @@ -1,332 +0,0 @@ -(** - * Rules for rewriting graphs - * - * We store the LHS and the RHS, making sure they are consistent - * (share a boundary, share !-boxes, etc). We also ensure that - * these are the *only* shared vertex or edge names between the - * graphs (FIXME: why?). - *) - -signature OGRAPH_RULE_SHARING = -sig - structure Graph : OGRAPH_SHARING - type T -end - -signature OGRAPH_RULE = -sig - type T - structure Graph : OGRAPH - - structure Sharing : OGRAPH_RULE_SHARING - sharing Sharing.Graph = Graph.Sharing - sharing type Sharing.T = T - - exception bad_rule_exp of string * Graph.T * Graph.T - val mk : (Graph.T * Graph.T) -> T - (* returns the renamings applied to the RHS *) - (*val mk_with_renamings : (Graph.T * Graph.T) -> T * (VSub.T * ESub.T)*) - - (* generate a boundary graph, including bboxes? *) - val get_boundary_vertices : T -> V.NSet.T - - val get_lhs : T -> Graph.T - val get_rhs : T -> Graph.T - - val get_vertices : T -> V.NSet.T - val get_edges : T -> E.NSet.T - val get_bboxes : T -> B.NSet.T - - (* swaps lhs and rhs *) - val symmetric : T -> T - - (* for pretty printing *) - val pretty : T -> Pretty.T - val print : T -> unit - - val rename_ograph_rule : (VSub.T * ESub.T) -> T -> (VSub.T * ESub.T) * T - - val get_rule_annotation : T -> Json.jobj - val set_rule_annotation : Json.jobj -> T -> T -end - -signature BANG_GRAPH_RULE = -sig - (* extends OGRAPH_RULE *) - type T - structure Graph : BANG_GRAPH - - structure Sharing : OGRAPH_RULE_SHARING - sharing Sharing.Graph = Graph.Sharing - sharing type Sharing.T = T - - exception bad_rule_exp of string * Graph.T * Graph.T - val mk : (Graph.T * Graph.T) -> T - (* returns the renamings applied to the RHS *) - (*val mk_with_renamings : (Graph.T * Graph.T) -> T * (VSub.T * ESub.T)*) - - (* generate a boundary graph, including bboxes? *) - val get_boundary_vertices : T -> V.NSet.T - - val get_lhs : T -> Graph.T - val get_rhs : T -> Graph.T - - val get_vertices : T -> V.NSet.T - val get_edges : T -> E.NSet.T - val get_bboxes : T -> B.NSet.T - - (* swaps lhs and rhs *) - val symmetric : T -> T - - (* expand a bbox on both sides *) - val expand_bbox : B.name -> T -> T - - (* kill a bbox on both sides *) - val kill_bbox : B.name -> T -> T - - (* for pretty printing *) - val pretty : T -> Pretty.T - val print : T -> unit - - val rename_ograph_rule : (VSub.T * ESub.T) -> T -> (VSub.T * ESub.T) * T - - val rename_bang_graph_rule : (VSub.T * ESub.T * BSub.T) -> T -> - (VSub.T * ESub.T * BSub.T) * T - - val get_rule_annotation : T -> Json.jobj - val set_rule_annotation : Json.jobj -> T -> T -end - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -functor BangGraphRule(Graph : BANG_GRAPH) : BANG_GRAPH_RULE = -struct - -structure Graph = Graph - -datatype T = Rule of { lhs : Graph.T, - rhs : Graph.T, - boundary_vertices : V.NSet.T, - rule_annotation : Json.jobj } - -fun update_lhs f (Rule r) = Rule {lhs=f(#lhs r),rhs= #rhs r,boundary_vertices= #boundary_vertices r,rule_annotation= #rule_annotation r} -fun update_rhs f (Rule r) = Rule {lhs= #lhs r,rhs=f(#rhs r),boundary_vertices= #boundary_vertices r,rule_annotation= #rule_annotation r} -fun update_boundary_vertices f (Rule r) = Rule {lhs= #lhs r,rhs= #rhs r,boundary_vertices=f(#boundary_vertices r),rule_annotation= #rule_annotation r} -fun update_rule_annotation f (Rule r) = Rule {lhs= #lhs r,rhs= #rhs r,boundary_vertices= #boundary_vertices r,rule_annotation=f(#rule_annotation r)} - -fun get_lhs (Rule r) = #lhs r -fun get_rhs (Rule r) = #rhs r -fun get_boundary_vertices (Rule r) = #boundary_vertices r -fun get_rule_annotation (Rule r) = #rule_annotation r - -fun set_lhs x = update_lhs (fn _ => x) -fun set_rhs x = update_rhs (fn _ => x) -fun set_boundary_vertices x = update_boundary_vertices (fn _ => x) -fun set_rule_annotation x = update_rule_annotation (fn _ => x) - - -exception bad_rule_exp of string * Graph.T * Graph.T - -type boundary = { - input_tab : Graph.edata VTab.T, - output_tab : Graph.edata VTab.T, - undir_bound_tab : Graph.edata VTab.T, - isolated : V.NSet.T -} - -fun update_input_tab f (r : boundary) = {input_tab=f(#input_tab r),output_tab= #output_tab r,undir_bound_tab= #undir_bound_tab r,isolated= #isolated r} -fun update_output_tab f (r : boundary) = {input_tab= #input_tab r,output_tab=f(#output_tab r),undir_bound_tab= #undir_bound_tab r,isolated= #isolated r} -fun update_undir_bound_tab f (r : boundary) = {input_tab= #input_tab r,output_tab= #output_tab r,undir_bound_tab=f(#undir_bound_tab r),isolated= #isolated r} -fun update_isolated f (r : boundary) = {input_tab= #input_tab r,output_tab= #output_tab r,undir_bound_tab= #undir_bound_tab r,isolated=f(#isolated r)} - -fun get_input_tab (r : boundary) = #input_tab r -fun get_output_tab (r : boundary) = #output_tab r -fun get_undir_bound_tab (r : boundary) = #undir_bound_tab r -fun get_isolated (r : boundary) = #isolated r - -val get_inputs = VTab.get_dom_set o get_input_tab -val get_outputs = VTab.get_dom_set o get_output_tab -val get_undir_bounds = VTab.get_dom_set o get_undir_bound_tab - -fun get_full_boundary (bnd : boundary) = - get_inputs bnd |> V.NSet.union_merge (get_outputs bnd) - |> V.NSet.union_merge (get_undir_bounds bnd) - |> V.NSet.union_merge (get_isolated bnd) - -val empty_boundary : boundary = { - input_tab = VTab.empty, output_tab = VTab.empty, - undir_bound_tab = VTab.empty, isolated = V.NSet.empty -} - -fun find_graph_boundary g = -let - fun build_boundary v (bnd : boundary) = - if Graph.is_node_vertex g v then bnd - else - case (E.NSet.tryget_singleton (Graph.get_in_edges g v), - E.NSet.tryget_singleton (Graph.get_out_edges g v)) - of (SOME _, SOME _) => bnd - | (SOME e, NONE) => - (case Graph.get_edge_dir_and_data g e - of (Directed,ed) => bnd |> update_output_tab (VTab.add (v,ed)) - | (Undirected,ed) => bnd |> update_undir_bound_tab (VTab.add (v,ed))) - | (NONE, SOME e) => - (case Graph.get_edge_dir_and_data g e - of (Directed,ed) => bnd |> update_input_tab (VTab.add (v,ed)) - | (Undirected,ed) => bnd |> update_undir_bound_tab (VTab.add (v,ed))) - | (NONE,NONE) => bnd |> update_isolated (V.NSet.add v) -in - V.NSet.fold build_boundary (Graph.get_vertices g) empty_boundary -end - -(* Internal function that makes sure that: - * - boundary names are the same - * - !-boxes names are the same - * - !-boxes in LHS bang the same boundary vertices as in RHS - * and returns the shared boundary names - *) -fun get_rule_bounds lhs rhs = - let - val lhs_bnd = find_graph_boundary lhs - val rhs_bnd = find_graph_boundary rhs - val boundary_vs = get_full_boundary lhs_bnd - - fun has_same_bboxes v sofar = sofar andalso - B.NSet.eq (Graph.get_bboxes_containing_vertex lhs v) - (Graph.get_bboxes_containing_vertex rhs v) - - fun has_same_bbox_children bbox sofar = sofar andalso - B.NSet.eq (Graph.get_bbox_children lhs bbox) - (Graph.get_bbox_children rhs bbox) - in - if V.NSet.is_empty (get_isolated lhs_bnd) andalso - V.NSet.is_empty (get_isolated rhs_bnd) andalso - VTab.tab_eq Graph.edata_eq (get_input_tab lhs_bnd, get_input_tab rhs_bnd) andalso - VTab.tab_eq Graph.edata_eq (get_output_tab lhs_bnd, get_output_tab rhs_bnd) andalso - VTab.tab_eq Graph.edata_eq (get_undir_bound_tab lhs_bnd, get_undir_bound_tab rhs_bnd) andalso - B.NSet.eq (Graph.get_bboxes lhs) (Graph.get_bboxes rhs) andalso - B.NSet.fold has_same_bbox_children (Graph.get_bboxes lhs) true andalso - V.NSet.fold has_same_bboxes boundary_vs true - then boundary_vs - else - raise bad_rule_exp ("get_rule_bounds: Left and right hand side boundaries are different", lhs, rhs) - end - -(*fun mk_with_renamings (lhs,rhs) = - let - val bnd = get_rule_bounds lhs rhs - val lhs_int = V.NSet.subtract (Graph.get_vertices lhs) bnd - val rhs_int = V.NSet.subtract (Graph.get_vertices rhs) bnd - (* rename rhs; sharing bvertex names with lhs, - all else must be distinct, but do minimal amount of renaming *) - val vrn = VSub.empty |> VSub.extend_fixed bnd - |> VSub.avoid_set_in_cod lhs_int - |> VSub.extend_fresh rhs_int - val ern = ESub.empty |> ESub.avoid_set_in_cod (Graph.get_edges lhs) - |> ESub.extend_fresh (Graph.get_edges rhs) - val brn = BSub.empty |> BSub.extend_fixed (Graph.get_bboxes lhs) - - val ((vrn,ern,_), rhs') = Graph.rename_bang_graph (vrn,ern,brn) rhs - in (Rule { lhs = lhs, rhs = rhs', boundary_vertices = bnd }, - (vrn,ern)) - end; -val mk = fst o mk_with_renamings -*) - -fun mk (lhs,rhs) = - let - val bnd = get_rule_bounds lhs rhs - in (Rule { lhs = lhs, rhs = rhs, boundary_vertices = bnd, - rule_annotation = Json.empty_obj }) - end - -(*fun get_lhs (Rule rep) = #lhs rep; -fun get_rhs (Rule rep) = #rhs rep; -fun get_boundary_vertices (Rule rep) = #boundary_vertices rep; - -fun update_lhs f (Rule rep) = - Rule { lhs = f (#lhs rep), rhs = #rhs rep, boundary_vertices = #boundary_vertices rep }; -fun update_rhs f (Rule rep) = - Rule { lhs = #lhs rep, rhs = f (#rhs rep), boundary_vertices = #boundary_vertices rep }; -fun update_boundary_vertices f (Rule rep) = - Rule { lhs = #lhs rep, rhs = #rhs rep, boundary_vertices = f (#boundary_vertices rep) };*) - -(* swap lhs and rhs of a rule *) -fun symmetric r = mk (get_rhs r, get_lhs r) - -fun expand_bbox b rule = -let - val (exp, lhs') = Graph.expand_bbox_op b (get_lhs rule) - val rhs' = Graph.replay_bbox_ops [exp] (get_rhs rule) -in mk (lhs', rhs') -end - -fun kill_bbox b rule = -let - val (kill, lhs') = Graph.kill_bbox_op b (get_lhs rule) - val rhs' = Graph.replay_bbox_ops [kill] (get_rhs rule) -in mk (lhs', rhs') -end - -fun get_vertices rule = - V.NSet.union_merge - (Graph.get_vertices (get_lhs rule)) - (Graph.get_vertices (get_rhs rule)) -fun get_edges rule = - E.NSet.union_merge - (Graph.get_edges (get_lhs rule)) - (Graph.get_edges (get_rhs rule)) -fun get_bboxes rule = - B.NSet.union_merge - (Graph.get_bboxes (get_lhs rule)) - (Graph.get_bboxes (get_rhs rule)) - -(* -fun try_rename1_vname n1 n2 rule = - (SOME - (rule |> (update_lhs (untryify (Graph.try_rename1_vname n1 n2))) - |> (update_rhs (untryify (Graph.try_rename1_vname n1 n2))) - |> (update_boundary_vertices (untryify (V.NSet.try_rename1 n1 n2))))) - handle tryify_failed () => NONE -(* FIXME: make sure disjoint names property is preserved *) -fun try_rename1_ename n1 n2 rule = - (SOME - (rule |> (update_lhs (untryify (Graph.try_rename1_ename n1 n2))) - |> (update_rhs (untryify (Graph.try_rename1_ename n1 n2))))) - handle tryify_failed () => NONE -fun try_rename1_bbox n1 n2 rule = - (SOME - (rule |> (update_lhs (untryify (Graph.try_rename1_bbox n1 n2))) - |> (update_rhs (untryify (Graph.try_rename1_bbox n1 n2))))) - handle tryify_failed () => NONE*) - -fun rename_bang_graph_rule (vsub,esub,bsub) rule = let - val vrnm = vsub |> VSub.extend_fixed (get_vertices rule) - val ernm = esub |> ESub.extend_fixed (get_edges rule) - val brnm = bsub |> BSub.extend_fixed (get_bboxes rule) -in ((vrnm,ernm,brnm), - rule |> update_lhs (Graph.rename_bang_graph_anon (vrnm,ernm,brnm)) - |> update_rhs (Graph.rename_bang_graph_anon (vrnm,ernm,brnm)) - |> update_boundary_vertices (VSub.img_of_set vrnm)) -end - -fun rename_ograph_rule (vsub,esub) rule = let - val ((vrnm,ernm,_),rule') = rename_bang_graph_rule (vsub,esub,BSub.empty) rule -in ((vrnm,ernm),rule') -end - -fun pretty (Rule rep) = - Pretty.chunks - [Pretty.block [Pretty.str "Rule: "], - Pretty.block [Pretty.str "LHS: ", Graph.pretty (#lhs rep)], - Pretty.block [Pretty.str "RHS: ", Graph.pretty (#rhs rep)]] - -val print = Pretty.writeln o pretty - - -structure Sharing = struct - structure Graph = Graph.Sharing - type T = T -end - -end diff --git a/core/rewriting/ruleset_rewriter.ML b/core/rewriting/ruleset_rewriter.ML deleted file mode 100644 index 97afbda1..00000000 --- a/core/rewriting/ruleset_rewriter.ML +++ /dev/null @@ -1,144 +0,0 @@ -signature RULESET_REWRITER_SHARING = -sig - structure Ruleset : RULESET_SHARING - type match -end - -signature RULESET_REWRITER = -sig - structure Log : LOG - - structure Ruleset : RULESET - type match - - structure Sharing : RULESET_REWRITER_SHARING - sharing Sharing.Ruleset = Ruleset.Sharing - sharing type Sharing.match = match - - (* Finds all rewrites for a graph using the active rules - * in a ruleset - *) - val apply : Ruleset.T (* ruleset to use *) - -> Ruleset.Rule.Graph.T (* graph to rewrite *) - -> ((R.name * Ruleset.Rule.T) * Ruleset.Rule.Graph.T) Seq.seq - (* instantiated rule and new graph *) - - (* Finds all rewrites for a subgraph using the active rules - * in a ruleset. Note that the match is actually performed - * on the smallest open subgraph containing the given - * vertices - *) - val apply_in : Ruleset.T (* ruleset to use *) - -> Ruleset.Rule.Graph.T (* graph to rewrite *) - -> V.NSet.T (* subgraph to match against *) - -> ((R.name * Ruleset.Rule.T) * Ruleset.Rule.Graph.T) Seq.seq - (* instantiated rule and new graph *) - - (* As apply, but gives only the first result *) - val apply_first : Ruleset.T - -> Ruleset.Rule.Graph.T - -> ((R.name * Ruleset.Rule.T) * Ruleset.Rule.Graph.T) option - (* As apply_in, but gives only the first result *) - val apply_first_in : Ruleset.T - -> Ruleset.Rule.Graph.T - -> V.NSet.T - -> ((R.name * Ruleset.Rule.T) * Ruleset.Rule.Graph.T) option - - (* Find all rewrites in a set of rules *) - val applicable_rules : Ruleset.T (* background ruleset *) - -> (Ruleset.Rule.T -> Ruleset.Rule.Graph.T -> match Seq.seq) (* matcher *) - -> R.NSet.T (* subset of rules to use *) - -> Ruleset.Rule.Graph.T (* graph to look for matches of rules in *) - -> ((R.name * Ruleset.Rule.T) * Ruleset.Rule.Graph.T) Seq.seq - (* instantiated rule and new graph *) - - (* a couple of helpful fns because match is opaque *) - val print_match : match -> unit - val pretty_match : match -> Pretty.T -end - -(* *) -functor RulesetRewriter( - structure Ruleset : BANG_GRAPH_RULESET - structure Rewriter : REWRITER - sharing Rewriter.Rule.Sharing = Ruleset.Rule.Sharing - sharing Rewriter.Graph.Sharing = Ruleset.Rule.Graph.Sharing -) : RULESET_REWRITER = -struct - structure Log : LOG = Log(val init_level = 0) - - structure Ruleset = Ruleset - type match = Rewriter.match - - structure Rule = Ruleset.BangGraphRule - structure Graph = Rule.Graph - - fun log_p pretty_f lvl name = - Log.logf lvl (fn g => Pretty.string_of - (Pretty.chunks [Pretty.str (name^":"),(pretty_f g)])) - - val log_graph = log_p Graph.pretty - val log_rule = log_p Rule.pretty - - (* apply rules to whole graph *) - (* IMPROVE: provide a single name-space for all rules, and thus avoid - multiple renamings. *) - fun applicable_rules rset matcher rnset g = let - val _ = log_graph 4 "RulesetRewriter.applicable_rules: graph" g - in - R.NSet.fold - (fn n => - let - val rule = (Ruleset.get_rule rset n) - val rulename = R.string_of_name n - val _ = log_rule 2 ("Trying rule "^rulename) rule - val matches = matcher rule g - fun apply_rw m = let - val _ = Log.log 1 ("Applying rule "^rulename) - val (rule,g) = Rewriter.rewrite_at rule m - in - ((n,rule),g) - end - in - Seq.append (Seq.map apply_rw matches) - end) - rnset - Seq.empty - end - - fun apply rset g = - applicable_rules rset Rewriter.find_matches - (Ruleset.get_active rset) g - - fun apply_in rset g vset = - let - fun find_matches rule graph = - Rewriter.find_matches_in_subgraph rule graph vset - in - applicable_rules rset find_matches - (Ruleset.get_active rset) g - end - - (* apply just first rule that can be applied *) - fun apply_first rset g = - case Seq.pull (apply rset g) - of NONE => NONE - | SOME (h,_) => SOME h - - fun apply_first_in rset vset g = - case Seq.pull (apply_in rset vset g) - of NONE => NONE - | SOME (h,_) => SOME h - - val print_match = Rewriter.print_match - val pretty_match = Rewriter.pretty_match - - structure Sharing : RULESET_REWRITER_SHARING - = struct - structure Ruleset = Ruleset.Sharing - type match = match - end - - -end (* *) - diff --git a/core/rewriting/simp_util.ML b/core/rewriting/simp_util.ML deleted file mode 100644 index 2f2ba868..00000000 --- a/core/rewriting/simp_util.ML +++ /dev/null @@ -1,210 +0,0 @@ -infixr 4 ++ - -signature SIMP_UTIL = -sig - structure Theory : GRAPHICAL_THEORY - type dstep = (R.name * Theory.Rule.T) * Theory.Graph.T - type simproc = Theory.Graph.T -> dstep Seq.seq - val ruleset : (R.name * Theory.Rule.T) list -> Theory.Ruleset.T - val REDUCE_ALL : Theory.Ruleset.T -> simproc - val REWRITE : (R.name * Theory.Rule.T) -> simproc - val REWRITE_TARGETED : { PATTERN : Theory.Graph.T -> V.name option, - TARGET : Theory.Graph.T -> V.name option} - -> (R.name * Theory.Rule.T) - -> simproc - - val REDUCE : (R.name * Theory.Rule.T) -> simproc - val REDUCE_WITH : { PRE : Theory.Graph.T -> bool , - POST : Theory.Graph.T * Theory.Graph.T -> bool } - -> (R.name * Theory.Rule.T) - -> simproc - - val REDUCE_WHILE : (Theory.Graph.T -> bool) - -> (R.name * Theory.Rule.T) - -> simproc - val REDUCE_METRIC : (Theory.Graph.T -> int) - -> (R.name * Theory.Rule.T) - -> simproc - - val REDUCE_METRIC_TO : int -> (Theory.Graph.T -> int) - -> (R.name * Theory.Rule.T) - -> simproc - - val ++ : simproc * simproc -> simproc - val LOOP : simproc -> simproc - val LOOP_WHILE : (Theory.Graph.T -> bool) -> simproc -> simproc - - val normalise : simproc -> Theory.Graph.T -> Theory.Graph.T option - - - val const_vertex : string -> (Theory.Graph.T -> V.name option) - val json_of_derivation : simproc -> Theory.Graph.T -> Json.json - val save_derivation : string -> simproc -> Theory.Graph.T -> unit - val load_rule : string -> (R.name * Theory.Rule.T) - val load_ruleset : string list -> Theory.Ruleset.T - val load_graph : string -> Theory.Graph.T - val save_graph : string -> Theory.Graph.T -> unit -end - -functor SimpUtil( - structure Theory : GRAPHICAL_THEORY -) : SIMP_UTIL = -struct - -structure Theory = Theory -type dstep = (R.name * Theory.Rule.T) * Theory.Graph.T -type simproc = Theory.Graph.T -> dstep Seq.seq - - -fun load_rule s = (R.mk s, Theory.RuleJSON.input (Json.read_file (s^".qrule"))) -fun load_graph s = Theory.GraphJSON.input (Json.read_file (s^".qgraph")) -fun save_graph s g = Json.write_file (s^".qgraph") (Theory.GraphJSON.output g) - -fun ruleset rule_list = fold (fn r => fn rs => ( - rs |> Theory.Ruleset.update_rule r - |> Theory.Ruleset.activate_rule (fst r) - )) rule_list Theory.Ruleset.empty - -fun load_ruleset rule_list = ruleset (map load_rule rule_list) - -fun const_vertex s _ = SOME (V.mk s) - -fun REDUCE_ALL ruleset graph = - Seq.make (fn () => ( - case Seq.pull (Theory.RulesetRewriter.apply ruleset graph) - of SOME ((r,g'), _) => SOME ((r,g'), REDUCE_ALL ruleset g') - | NONE => NONE - )) - -fun REWRITE rule graph = - Seq.make (fn () => ( - case Seq.pull (Theory.Rewriter.find_rewrites (snd rule) graph) - of SOME ((r,g'), _) => SOME (((fst rule, r),g'), Seq.empty) - | NONE => NONE - )) - -fun REDUCE rule graph = - Seq.make (fn () => ( - case Seq.pull (Theory.Rewriter.find_rewrites (snd rule) graph) - of SOME ((r,g'), _) => SOME (((fst rule, r),g'), REDUCE rule g') - | NONE => NONE - )) - -fun REWRITE_TARGETED {PATTERN=patternf, TARGET=targetf} rule graph = - Seq.make (fn () => ( - case (patternf (Theory.Rule.get_lhs (snd rule)), targetf graph) - of (SOME pv, SOME tv) => ( - let - val _ = TextIO.print (V.dest pv ^ " -> " ^ V.dest tv ^ "\n") - val prematch = VVInj.empty |> VVInj.add (pv,tv) - in - case Seq.pull (Theory.Rewriter.find_rewrites_with_prematch (snd rule) graph prematch) - of SOME ((r,g'), _) => SOME (((fst rule, r),g'), Seq.empty) - | NONE => NONE - end) - | _ => NONE - )) - -fun REDUCE_WITH {PRE=PRE, POST=POST} rule graph = - Seq.make (fn () => ( - if not (PRE graph) then NONE - else case Seq.pull (Seq.filter - (fn (_, g') => POST (graph, g')) - (Theory.Rewriter.find_rewrites (snd rule) graph)) - of SOME ((r,g'), _) => SOME (((fst rule, r),g'), REDUCE_WITH {PRE=PRE,POST=POST} rule g') - | NONE => NONE - )) - - -(*fun REDUCE_METRIC metric rule graph = - Seq.make (fn () => ( - let - val m = metric graph - in - case Seq.pull (Seq.filter - (fn (_, g') => metric g' < m) - (Theory.Rewriter.find_rewrites rule graph)) - of SOME ((r,g'), _) => SOME ((r,g'), REDUCE_METRIC metric rule g') - | NONE => NONE - end - ))*) - - -fun REDUCE_METRIC_TO i metric = - REDUCE_WITH { PRE = (fn g => (i <= metric g)), - POST = (fn (g,g') => (metric g' < metric g)) } - -val REDUCE_METRIC = REDUCE_METRIC_TO 0 - -fun REDUCE_WHILE b = REDUCE_WITH { PRE = b, POST = K true } - -(*fun REDUCE_METRIC_TO i metric rule graph = - Seq.make (fn () => ( - let - val m = metric graph - in - if m <= i then NONE - else case Seq.pull (Seq.filter - (fn (_, g') => metric g' < m) - (Theory.Rewriter.find_rewrites rule graph)) - of SOME ((r,g'), _) => SOME ((r,g'), REDUCE_METRIC_TO i metric rule g') - | NONE => NONE - end - ))*) - -(*fun derivation g simproc =*) - -fun json_of_derivation simp g = let - val steps = Seq.list_of (simp g) -in - Json.mk_object [ - ("root", Theory.GraphJSON.output g), - ("steps", - Json.Object (fold_index (fn (i,((rname,rule),graph)) => - Json.update ( - "s" ^ Int.toString i, - Json.mk_object [ - ("parent", if i = 0 then Json.Null else Json.String ("s" ^ Int.toString (i - 1))), - ("rule_name", Json.String (R.dest rname)), - ("rule", Theory.RuleJSON.output rule), - ("graph", Theory.GraphJSON.output graph) - ] - )) - steps Json.empty_obj)), - ("heads", Json.Array [Json.String ("s" ^ Int.toString (length steps - 1))]) - ] -end - -fun save_derivation s simp g = Json.write_file (s^".qderive") (json_of_derivation simp g) - -fun normalise simp g = let - val seq = (simp g) - fun norm prev seq = case Seq.pull seq - of SOME ((_,g), seq') => norm (SOME g) seq' - | NONE => prev -in norm NONE seq -end - -(* produces a rewrite sequence by feeding the last graph in seqf1 to seqf2 *) -fun op++ (seqf1, seqf2) graph = let - fun chain prev_graph s1 = - Seq.make (fn () => ( - case Seq.pull s1 - of SOME ((r,g'), s1') => SOME ((r,g'), chain (SOME g') s1') - | NONE => Seq.pull (seqf2 (case prev_graph of SOME g' => g' | NONE => graph)) - )) -in chain NONE (seqf1 graph) -end - -fun LOOP_WHILE f seqf graph = - if (not (f graph)) then Seq.empty - else - Seq.make (fn () => ( - case Seq.pull (seqf graph) - of SOME ((r,g'), s1) => SOME ((r,g'), g' |> ((fn _ => s1) ++ (LOOP_WHILE f seqf))) - | NONE => NONE - )) - -val LOOP = LOOP_WHILE (K true) - -end diff --git a/core/rewriting/test/graphs/.gitignore b/core/rewriting/test/graphs/.gitignore deleted file mode 100644 index 0842616d..00000000 --- a/core/rewriting/test/graphs/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/*.dot diff --git a/core/rewriting/test/graphs/bare-wire-rule-1-expected.graph b/core/rewriting/test/graphs/bare-wire-rule-1-expected.graph deleted file mode 100644 index c7eee6f9..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-1-expected.graph +++ /dev/null @@ -1,8 +0,0 @@ -{ - "node_vertices" : ["a","b","c"], - "dir_edges" : - { - "1" : { "src" : "a", "tgt" : "b" }, - "2" : { "src" : "b", "tgt" : "c" } - } -} diff --git a/core/rewriting/test/graphs/bare-wire-rule-1-lhs.graph b/core/rewriting/test/graphs/bare-wire-rule-1-lhs.graph deleted file mode 100644 index 00bc75b6..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-1-lhs.graph +++ /dev/null @@ -1,7 +0,0 @@ -{ - "wire_vertices" : ["a","b"], - "dir_edges" : - { - "1" : { "src" : "a", "tgt" : "b" } - } -} diff --git a/core/rewriting/test/graphs/bare-wire-rule-1-rhs.graph b/core/rewriting/test/graphs/bare-wire-rule-1-rhs.graph deleted file mode 100644 index ef24072b..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-1-rhs.graph +++ /dev/null @@ -1,9 +0,0 @@ -{ - "wire_vertices" : ["a","b"], - "node_vertices" : ["c"], - "dir_edges" : - { - "2" : { "src" : "a", "tgt" : "c" }, - "3" : { "src" : "c", "tgt" : "b" } - } -} diff --git a/core/rewriting/test/graphs/bare-wire-rule-1-target.graph b/core/rewriting/test/graphs/bare-wire-rule-1-target.graph deleted file mode 100644 index f7ec5330..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-1-target.graph +++ /dev/null @@ -1,10 +0,0 @@ -{ - "wire_vertices" : ["n","o"], - "node_vertices" : ["m","p"], - "dir_edges" : - { - "10" : { "src" : "m", "tgt" : "n" }, - "11" : { "src" : "n", "tgt" : "o" }, - "12" : { "src" : "o", "tgt" : "p" } - } -} diff --git a/core/rewriting/test/graphs/bare-wire-rule-2-expected.graph b/core/rewriting/test/graphs/bare-wire-rule-2-expected.graph deleted file mode 100644 index 6452608a..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-2-expected.graph +++ /dev/null @@ -1,8 +0,0 @@ -{ - "node_vertices" : ["Va","Vb","Vc"], - "dir_edges" : - { - "Ea" : { "src" : "Va", "tgt" : "Vb" }, - "Eb" : { "src" : "Vb", "tgt" : "Vc" } - } -} diff --git a/core/rewriting/test/graphs/bare-wire-rule-2-lhs.graph b/core/rewriting/test/graphs/bare-wire-rule-2-lhs.graph deleted file mode 100644 index 840a03e3..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-2-lhs.graph +++ /dev/null @@ -1,7 +0,0 @@ -{ - "wire_vertices" : ["Vf","Ve"], - "dir_edges" : - { - "Ea" : { "src" : "Vf", "tgt" : "Ve" } - } -} diff --git a/core/rewriting/test/graphs/bare-wire-rule-2-rhs.graph b/core/rewriting/test/graphs/bare-wire-rule-2-rhs.graph deleted file mode 100644 index d76910fd..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-2-rhs.graph +++ /dev/null @@ -1,9 +0,0 @@ -{ - "wire_vertices" : ["Ve","Vf"], - "node_vertices" : ["Va"], - "dir_edges" : - { - "Ea" : { "src" : "Vf", "tgt" : "Va" }, - "Eb" : { "src" : "Va", "tgt" : "Ve" } - } -} diff --git a/core/rewriting/test/graphs/bare-wire-rule-2-target.graph b/core/rewriting/test/graphs/bare-wire-rule-2-target.graph deleted file mode 100644 index dd2d7ed3..00000000 --- a/core/rewriting/test/graphs/bare-wire-rule-2-target.graph +++ /dev/null @@ -1,10 +0,0 @@ -{ - "wire_vertices" : ["Vc","Vd"], - "node_vertices" : ["Va","Vb"], - "dir_edges" : - { - "Ea" : { "src" : "Va", "tgt" : "Vc" }, - "Eb" : { "src" : "Vc", "tgt" : "Vd" }, - "Ec" : { "src" : "Vd", "tgt" : "Vb" } - } -} diff --git a/core/rewriting/test/graphs/red-spider-1-expected.graph b/core/rewriting/test/graphs/red-spider-1-expected.graph deleted file mode 100644 index 461bd4c4..00000000 --- a/core/rewriting/test/graphs/red-spider-1-expected.graph +++ /dev/null @@ -1,38 +0,0 @@ -{ - "wire_vertices": [ - "Vm", - "Vl" - ], - "node_vertices": { - "Vc": { - "data": { - "type": "VExpr2", - "angle": { - "pretty": "0" - } - } - }, - "Vy": { - "data": { - "type": "VExpr1", - "angle": { - "pretty": "0" - } - } - } - }, - "dir_edges": { - "Ea": { - "src": "Vl", - "tgt": "Vc" - }, - "Ec": { - "src": "Vc", - "tgt": "Vy" - }, - "Eu": { - "src": "Vm", - "tgt": "Vy" - } - } -} diff --git a/core/rewriting/test/graphs/red-spider-1-lhs.graph b/core/rewriting/test/graphs/red-spider-1-lhs.graph deleted file mode 100644 index e823304c..00000000 --- a/core/rewriting/test/graphs/red-spider-1-lhs.graph +++ /dev/null @@ -1,88 +0,0 @@ -{ -"wire_vertices": [ - "Vd", - "Vc", - "Vb", - "Va" -], -"node_vertices": { - "Ve": { - "data": { - "type": "VExpr1", - "angle": { - "vars": { - "a": { - "num": 1, - "denom": 1 - } - }, - "pretty": "a" - } - }, - "annotation": { - "quanto-gui:position": "62:93" - } - }, - "Vf": { - "data": { - "type": "VExpr1", - "angle": { - "vars": { - "b": { - "num": 1, - "denom": 1 - } - }, - "pretty": "b" - } - }, - "annotation": { - "quanto-gui:position": "118:104" - } - } -}, -"dir_edges": { - "Ea": { - "src": "Va", - "tgt": "Ve" - }, - "Eb": { - "src": "Ve", - "tgt": "Vc" - }, - "Ec": { - "src": "Ve", - "tgt": "Vf" - }, - "Ed": { - "src": "Vb", - "tgt": "Vf" - }, - "Ee": { - "src": "Vf", - "tgt": "Vd" - } -}, -"bang_boxes": { - "Ba": { - "contents": [ - "Va" - ] - }, - "Bb": { - "contents": [ - "Vb" - ] - }, - "Bc": { - "contents": [ - "Vc" - ] - }, - "Bd": { - "contents": [ - "Vd" - ] - } -} -} diff --git a/core/rewriting/test/graphs/red-spider-1-rhs.graph b/core/rewriting/test/graphs/red-spider-1-rhs.graph deleted file mode 100644 index 214809a8..00000000 --- a/core/rewriting/test/graphs/red-spider-1-rhs.graph +++ /dev/null @@ -1,71 +0,0 @@ -{ - "wire_vertices": [ - "Vd", - "Vc", - "Vb", - "Va" - ], - "node_vertices": { - "Vg": { - "data": { - "type": "VExpr1", - "angle": { - "vars": { - "a": { - "num": 1, - "denom": 1 - }, - "b": { - "num": 1, - "denom": 1 - } - }, - "pretty": "a + b" - } - }, - "annotation": { - "quanto-gui:position": "80:79" - } - } - }, - "dir_edges": { - "Ef": { - "src": "Va", - "tgt": "Vg" - }, - "Eg": { - "src": "Vg", - "tgt": "Vc" - }, - "Eh": { - "src": "Vb", - "tgt": "Vg" - }, - "Ei": { - "src": "Vg", - "tgt": "Vd" - } - }, - "bang_boxes": { - "Ba": { - "contents": [ - "Va" - ] - }, - "Bb": { - "contents": [ - "Vb" - ] - }, - "Bc": { - "contents": [ - "Vc" - ] - }, - "Bd": { - "contents": [ - "Vd" - ] - } - } -} diff --git a/core/rewriting/test/graphs/red-spider-1-tgt.graph b/core/rewriting/test/graphs/red-spider-1-tgt.graph deleted file mode 100644 index c671c47b..00000000 --- a/core/rewriting/test/graphs/red-spider-1-tgt.graph +++ /dev/null @@ -1,50 +0,0 @@ -{ - "wire_vertices": [ - "Vm", - "Vl" - ], - "node_vertices": { - "Vc": { - "data": { - "type": "VExpr2", - "angle": { - "pretty": "0" - } - } - }, - "Vr": { - "data": { - "type": "VExpr1", - "angle": { - "pretty": "0" - } - } - }, - "Vy": { - "data": { - "type": "VExpr1", - "angle": { - "pretty": "0" - } - } - } - }, - "dir_edges": { - "Ea": { - "src": "Vl", - "tgt": "Vc" - }, - "Ec": { - "src": "Vc", - "tgt": "Vy" - }, - "Ed": { - "src": "Vy", - "tgt": "Vr" - }, - "Eu": { - "src": "Vm", - "tgt": "Vy" - } - } -} diff --git a/core/rewriting/test/graphs/spider-bbox-expected.graph b/core/rewriting/test/graphs/spider-bbox-expected.graph deleted file mode 100644 index de761fca..00000000 --- a/core/rewriting/test/graphs/spider-bbox-expected.graph +++ /dev/null @@ -1,22 +0,0 @@ -{ - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"c + \\pi"}}, - "z1":{"data":"vunit2"}, - "z2":{"data":"vunit2"}, - "z3":{"data":"vunit2"}, - "z4":{"data":"vunit2"}, - "z5":{"data":"vunit2"}, - "z6":{"data":"vunit2"}, - "z7":{"data":"vunit2"}, - "z8":{"data":"vunit2"} - }, - "dir_edges" : { - "e1": { "src" : "z1", "tgt" : "x1" }, - "e2": { "src" : "z2", "tgt" : "x1" }, - "e3": { "src" : "z3", "tgt" : "x1" }, - "e4": { "src" : "x1", "tgt" : "z4" }, - "e5": { "src" : "x1", "tgt" : "z5" }, - "e6": { "src" : "x1", "tgt" : "z6" }, - "e7": { "src" : "x1", "tgt" : "z7" } - } -} diff --git a/core/rewriting/test/graphs/spider-bbox-lhs.graph b/core/rewriting/test/graphs/spider-bbox-lhs.graph deleted file mode 100644 index d7f22f7c..00000000 --- a/core/rewriting/test/graphs/spider-bbox-lhs.graph +++ /dev/null @@ -1,20 +0,0 @@ -{ - "wire_vertices" : ["in1","in2","out1","out2"], - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"a"}}, - "x2":{"data":{"type":"vexpr1","angle":"b"}} - }, - "dir_edges" : { - "e1": { "src" : "in1", "tgt" : "x1" }, - "e2": { "src" : "x1", "tgt" : "out1" }, - "e3": { "src" : "in2", "tgt" : "x2" }, - "e4": { "src" : "x2", "tgt" : "out2" }, - "e5": { "src" : "x1", "tgt" : "x2" } - }, - "bang_boxes" : { - "b1": { "contents" : [ "in1" ] }, - "b2": { "contents" : [ "in2" ] }, - "b3": { "contents" : [ "out1" ] }, - "b4": { "contents" : [ "out2" ] } - } -} diff --git a/core/rewriting/test/graphs/spider-bbox-rhs.graph b/core/rewriting/test/graphs/spider-bbox-rhs.graph deleted file mode 100644 index 13695acb..00000000 --- a/core/rewriting/test/graphs/spider-bbox-rhs.graph +++ /dev/null @@ -1,18 +0,0 @@ -{ - "wire_vertices" : ["in1","in2","out1","out2"], - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"a + b"}} - }, - "dir_edges" : { - "e1": { "src" : "in1", "tgt" : "x1" }, - "e2": { "src" : "x1", "tgt" : "out1" }, - "e3": { "src" : "in2", "tgt" : "x1" }, - "e4": { "src" : "x1", "tgt" : "out2" } - }, - "bang_boxes" : { - "b1": { "contents" : [ "in1" ] }, - "b2": { "contents" : [ "in2" ] }, - "b3": { "contents" : [ "out1" ] }, - "b4": { "contents" : [ "out2" ] } - } -} diff --git a/core/rewriting/test/graphs/spider-bbox-target.graph b/core/rewriting/test/graphs/spider-bbox-target.graph deleted file mode 100644 index b36a9dc3..00000000 --- a/core/rewriting/test/graphs/spider-bbox-target.graph +++ /dev/null @@ -1,24 +0,0 @@ -{ - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"c"}}, - "x2":{"data":{"type":"vexpr1","angle":"\\pi"}}, - "z1":{"data":"vunit2"}, - "z2":{"data":"vunit2"}, - "z3":{"data":"vunit2"}, - "z4":{"data":"vunit2"}, - "z5":{"data":"vunit2"}, - "z6":{"data":"vunit2"}, - "z7":{"data":"vunit2"}, - "z8":{"data":"vunit2"} - }, - "dir_edges" : { - "e1": { "src" : "z1", "tgt" : "x2" }, - "e2": { "src" : "z2", "tgt" : "x2" }, - "e3": { "src" : "z3", "tgt" : "x2" }, - "e4": { "src" : "x1", "tgt" : "z4" }, - "e5": { "src" : "x1", "tgt" : "z5" }, - "e6": { "src" : "x1", "tgt" : "z6" }, - "e7": { "src" : "x2", "tgt" : "z7" }, - "e8": { "src" : "x1", "tgt" : "x2" } - } -} diff --git a/core/rewriting/test/graphs/spider-expected.graph b/core/rewriting/test/graphs/spider-expected.graph deleted file mode 100644 index d09beb27..00000000 --- a/core/rewriting/test/graphs/spider-expected.graph +++ /dev/null @@ -1,15 +0,0 @@ -{ - "node_vertices" : { - "x1":{"data":"vunit1"}, - "z1":{"data":"vunit2"}, - "z2":{"data":"vunit2"}, - "z3":{"data":"vunit2"}, - "z4":{"data":"vunit2"} - }, - "dir_edges" : { - "e1": { "src" : "z1", "tgt" : "x1" }, - "e2": { "src" : "z2", "tgt" : "x1" }, - "e3": { "src" : "x1", "tgt" : "z3" }, - "e4": { "src" : "x1", "tgt" : "z4" } - } -} diff --git a/core/rewriting/test/graphs/spider-lhs.graph b/core/rewriting/test/graphs/spider-lhs.graph deleted file mode 100644 index c771d85e..00000000 --- a/core/rewriting/test/graphs/spider-lhs.graph +++ /dev/null @@ -1,14 +0,0 @@ -{ - "wire_vertices" : ["in1","in2","out1","out2"], - "node_vertices" : { - "x1":{"data":"vunit1"}, - "x2":{"data":"vunit1"} - }, - "dir_edges" : { - "e1": { "src" : "in1", "tgt" : "x1" }, - "e2": { "src" : "x1", "tgt" : "out1" }, - "e3": { "src" : "in2", "tgt" : "x2" }, - "e4": { "src" : "x2", "tgt" : "out2" }, - "e5": { "src" : "x1", "tgt" : "x2" } - } -} diff --git a/core/rewriting/test/graphs/spider-rhs.graph b/core/rewriting/test/graphs/spider-rhs.graph deleted file mode 100644 index 2357aae5..00000000 --- a/core/rewriting/test/graphs/spider-rhs.graph +++ /dev/null @@ -1,12 +0,0 @@ -{ - "wire_vertices" : ["in1","in2","out1","out2"], - "node_vertices" : { - "x1":{"data":"vunit1"} - }, - "dir_edges" : { - "e1": { "src" : "in1", "tgt" : "x1" }, - "e2": { "src" : "x1", "tgt" : "out1" }, - "e3": { "src" : "in2", "tgt" : "x1" }, - "e4": { "src" : "x1", "tgt" : "out2" } - } -} diff --git a/core/rewriting/test/graphs/spider-subst-expected.graph b/core/rewriting/test/graphs/spider-subst-expected.graph deleted file mode 100644 index 528ed572..00000000 --- a/core/rewriting/test/graphs/spider-subst-expected.graph +++ /dev/null @@ -1,15 +0,0 @@ -{ - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"c + \\pi"}}, - "z1":{"data":"vunit2"}, - "z2":{"data":"vunit2"}, - "z3":{"data":"vunit2"}, - "z4":{"data":"vunit2"} - }, - "dir_edges" : { - "e1": { "src" : "z1", "tgt" : "x1" }, - "e2": { "src" : "z2", "tgt" : "x1" }, - "e3": { "src" : "x1", "tgt" : "z3" }, - "e4": { "src" : "x1", "tgt" : "z4" } - } -} diff --git a/core/rewriting/test/graphs/spider-subst-lhs.graph b/core/rewriting/test/graphs/spider-subst-lhs.graph deleted file mode 100644 index 364eeef0..00000000 --- a/core/rewriting/test/graphs/spider-subst-lhs.graph +++ /dev/null @@ -1,14 +0,0 @@ -{ - "wire_vertices" : ["in1","in2","out1","out2"], - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"a"}}, - "x2":{"data":{"type":"vexpr1","angle":"b"}} - }, - "dir_edges" : { - "e1": { "src" : "in1", "tgt" : "x1" }, - "e2": { "src" : "x1", "tgt" : "out1" }, - "e3": { "src" : "in2", "tgt" : "x2" }, - "e4": { "src" : "x2", "tgt" : "out2" }, - "e5": { "src" : "x1", "tgt" : "x2" } - } -} diff --git a/core/rewriting/test/graphs/spider-subst-rhs.graph b/core/rewriting/test/graphs/spider-subst-rhs.graph deleted file mode 100644 index bd0fe4d6..00000000 --- a/core/rewriting/test/graphs/spider-subst-rhs.graph +++ /dev/null @@ -1,12 +0,0 @@ -{ - "wire_vertices" : ["in1","in2","out1","out2"], - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"a + b"}} - }, - "dir_edges" : { - "e1": { "src" : "in1", "tgt" : "x1" }, - "e2": { "src" : "x1", "tgt" : "out1" }, - "e3": { "src" : "in2", "tgt" : "x1" }, - "e4": { "src" : "x1", "tgt" : "out2" } - } -} diff --git a/core/rewriting/test/graphs/spider-subst-target.graph b/core/rewriting/test/graphs/spider-subst-target.graph deleted file mode 100644 index 282bf975..00000000 --- a/core/rewriting/test/graphs/spider-subst-target.graph +++ /dev/null @@ -1,17 +0,0 @@ -{ - "node_vertices" : { - "x1":{"data":{"type":"vexpr1","angle":"c"}}, - "x2":{"data":{"type":"vexpr1","angle":"\\pi"}}, - "z1":{"data":"vunit2"}, - "z2":{"data":"vunit2"}, - "z3":{"data":"vunit2"}, - "z4":{"data":"vunit2"} - }, - "dir_edges" : { - "e1": { "src" : "z1", "tgt" : "x1" }, - "e2": { "src" : "z2", "tgt" : "x2" }, - "e3": { "src" : "x1", "tgt" : "z3" }, - "e4": { "src" : "x2", "tgt" : "z4" }, - "e5": { "src" : "x1", "tgt" : "x2" } - } -} diff --git a/core/rewriting/test/graphs/spider-target.graph b/core/rewriting/test/graphs/spider-target.graph deleted file mode 100644 index 61d14b22..00000000 --- a/core/rewriting/test/graphs/spider-target.graph +++ /dev/null @@ -1,17 +0,0 @@ -{ - "node_vertices" : { - "x1":{"data":"vunit1"}, - "x2":{"data":"vunit1"}, - "z1":{"data":"vunit2"}, - "z2":{"data":"vunit2"}, - "z3":{"data":"vunit2"}, - "z4":{"data":"vunit2"} - }, - "dir_edges" : { - "e1": { "src" : "z1", "tgt" : "x1" }, - "e2": { "src" : "z2", "tgt" : "x2" }, - "e3": { "src" : "x1", "tgt" : "z3" }, - "e4": { "src" : "x2", "tgt" : "z4" }, - "e5": { "src" : "x1", "tgt" : "x2" } - } -} diff --git a/core/rewriting/test/rewriter-test.ML b/core/rewriting/test/rewriter-test.ML deleted file mode 100644 index edc1335c..00000000 --- a/core/rewriting/test/rewriter-test.ML +++ /dev/null @@ -1,158 +0,0 @@ -structure Test_BangGraphRewriter = BangGraphRewriter( - structure Rule = Test_BG_Rule - structure Matcher = Test_GreedyMatcher -) -(* set to 4 for really detailed messages *) -(* Test_BangGraphRewriter.Log.level_ref := 2; *) - -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph) - open Tools - structure RW = Test_BangGraphRewriter - - structure HomeoFinder = BangGraphHomeomorphismSearcher(Test_Bang_Graph) - - - type test_data = { - name : string, - subgraph : (string list) option, - lhs : string, - rhs : string, - target : string, - expected : string - }; - exception graph_load_exn of (string*exn); - - - - fun test_f ({ name = nm, - subgraph = subg, - lhs = lhs_f, - rhs = rhs_f, - target = tgt_f, - expected = exp_f }:test_data) = - let - fun test () = - let - val lhs = Tools.load_graph lhs_f - handle e => raise graph_load_exn (lhs_f,e); - val rhs = Tools.load_graph rhs_f - handle e => raise graph_load_exn (rhs_f,e); - val tgt = Tools.load_graph tgt_f - handle e => raise graph_load_exn (tgt_f,e); - val exp = Tools.load_graph exp_f - handle e => raise graph_load_exn (exp_f,e); - val rule = Test_BG_Rule.mk (lhs,rhs) - val rw_seq = - case subg - of SOME vs => RW.find_rewrites_in_subgraph - rule - tgt - (V.NSet.of_list (map V.mk vs)) - | NONE => RW.find_rewrites - rule - tgt - val ((_,result),rw_seq') = - case Seq.pull rw_seq - of SOME x => x - | NONE => raise ERROR "No matches!" - val (exp,result) = (G.normalise exp, G.normalise result) - (* make sure the remaining rewrites actually succeed *) - val _ = Seq.list_of rw_seq' - in - if HomeoFinder.is_homeomorphic exp result - then result - else ( - save_dot_graph result (tgt_f^".result.dot"); - save_dot_graph exp (exp_f^".dot"); - writeln "Result (normalised):"; - writeln (" (written to "^tgt_f^".result.dot)"); - G.print result; - writeln "Expected (normalised):"; - writeln (" (written to "^exp_f^".dot)"); - G.print exp; - raise ERROR "Got wrong rewrite result" - ) - end - in - Testing.test nm test () - end; - - fun run_tests [] = Testing.assert_no_failed_tests() - | run_tests (t::ts) = (test_f t; run_tests ts); - -in - (* Add tests here; paths are relative to this file *) - val _ = run_tests [ - { name = "spider rewrite", - subgraph = NONE, - lhs = "graphs/spider-lhs.graph", - rhs = "graphs/spider-rhs.graph", - target = "graphs/spider-target.graph", - expected = "graphs/spider-expected.graph" - }, - { name = "spider rewrite with angles", - subgraph = NONE, - lhs = "graphs/spider-subst-lhs.graph", - rhs = "graphs/spider-subst-rhs.graph", - target = "graphs/spider-subst-target.graph", - expected = "graphs/spider-subst-expected.graph" - }, - { name = "spider rewrite with !-boxes", - subgraph = NONE, - lhs = "graphs/spider-bbox-lhs.graph", - rhs = "graphs/spider-bbox-rhs.graph", - target = "graphs/spider-bbox-target.graph", - expected = "graphs/spider-bbox-expected.graph" - }, - { name = "spider rewrite with odd names", - subgraph = NONE, - lhs = "graphs/red-spider-1-lhs.graph", - rhs = "graphs/red-spider-1-rhs.graph", - target = "graphs/red-spider-1-tgt.graph", - expected = "graphs/red-spider-1-expected.graph" - }, - { name = "spider rewrite with odd names (subgraph)", - subgraph = SOME ["Vm","Vl","Vc","Vr","Vy"], - lhs = "graphs/red-spider-1-lhs.graph", - rhs = "graphs/red-spider-1-rhs.graph", - target = "graphs/red-spider-1-tgt.graph", - expected = "graphs/red-spider-1-expected.graph" - }, - { name = "bare wire rewrite", - subgraph = NONE, - lhs = "graphs/bare-wire-rule-1-lhs.graph", - rhs = "graphs/bare-wire-rule-1-rhs.graph", - target = "graphs/bare-wire-rule-1-target.graph", - expected = "graphs/bare-wire-rule-1-expected.graph" - }, - { name = "bare wire rewrite (subgraph)", - subgraph = SOME ["m","p"], - lhs = "graphs/bare-wire-rule-1-lhs.graph", - rhs = "graphs/bare-wire-rule-1-rhs.graph", - target = "graphs/bare-wire-rule-1-target.graph", - expected = "graphs/bare-wire-rule-1-expected.graph" - }, - { name = "bare wire rewrite with some initially clashing names", - subgraph = NONE, - lhs = "graphs/bare-wire-rule-2-lhs.graph", - rhs = "graphs/bare-wire-rule-2-rhs.graph", - target = "graphs/bare-wire-rule-2-target.graph", - expected = "graphs/bare-wire-rule-2-expected.graph" - }, - { name = "bare wire rewrite with some initially clashing names (subgraph)", - subgraph = SOME ["Va","Vb"], - lhs = "graphs/bare-wire-rule-2-lhs.graph", - rhs = "graphs/bare-wire-rule-2-rhs.graph", - target = "graphs/bare-wire-rule-2-target.graph", - expected = "graphs/bare-wire-rule-2-expected.graph" - } - (* TODO: more tests: - * - clashing vertex and edge names - * - clashing expression variable names (in pattern and target) - * - !-boxes with non-boundary vertices - * - nested/overlapping !-boxes - *) - ]; -end; - diff --git a/core/rewriting/test/rule-test.ML b/core/rewriting/test/rule-test.ML deleted file mode 100644 index 118642e2..00000000 --- a/core/rewriting/test/rule-test.ML +++ /dev/null @@ -1,183 +0,0 @@ -structure Test_BG_Rule = BangGraphRule(Test_Bang_Graph); - -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph); - open Tools; - structure Rule = Test_BG_Rule; - structure HomeoFinder = BangGraphHomeomorphismSearcher(G) - - fun test_mk_bad_rule (lhs,rhs) = - (Rule.mk (lhs,rhs); - writeln "LHS:"; - G.print lhs; - writeln "RHS:"; - G.print rhs; - raise ERROR "Bad rule accepted") - handle (Rule.bad_rule_exp _) => (); - - val _ = Testing.test "empty graphs" Rule.mk (G.empty,G.empty); - - val lhs = G.empty |> add_vunit1 "z1"; - val rhs = G.empty |> add_vunit1 "z1" - |> add_vunit2 "x1" - |> add_dir_eunit1 "e1" "z1" "x1"; - val _ = Testing.test "empty boundaries" Rule.mk (lhs,rhs); - - - val lhs = G.empty |> add_wv "in1" - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "in1" "n1"; - val rhs = G.empty |> add_wv "in1" - |> add_vunit2 "n1" - |> add_dir_eunit1 "e1" "in1" "n1"; - val _ = Testing.test "one input, no outputs" Rule.mk (lhs,rhs); - - val lhs = G.empty |> add_wv "in1" - |> add_wv "out1" - |> add_wv "out2" - |> add_vunit1 "n1" - |> add_vunit2 "n2" - |> add_dir_eunit1 "e1" "in1" "n1" - |> add_dir_eunit1 "e2" "n1" "out1" - |> add_dir_eunit1 "e3" "n2" "out2"; - val rhs = G.empty |> add_wv "in1" - |> add_wv "out1" - |> add_wv "out2" - |> add_vunit2 "n1" - |> add_vunit2 "n2" - |> add_dir_eunit1 "e1" "in1" "n1" - |> add_dir_eunit1 "e2" "n2" "out1" - |> add_dir_eunit1 "e3" "n2" "out2"; - val _ = Testing.test "one input, two outputs" Rule.mk (lhs,rhs); - - val lhs = G.empty |> add_wv "in1" - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "in1" "n1"; - val rhs = G.empty |> add_wv "in1" - |> add_vunit1 "n1" - |> add_dir_eunit2 "e1" "in1" "n1"; - val _ = Testing.test "differing input types" test_mk_bad_rule (lhs,rhs); - - val lhs = G.empty |> add_wv "out1" - |> add_wv "out2" - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "n1" "out1" - |> add_dir_eunit2 "e2" "n1" "out2"; - val rhs = G.empty |> add_wv "out1" - |> add_wv "out2" - |> add_vunit1 "n1" - |> add_dir_eunit2 "e1" "n1" "out1" - |> add_dir_eunit1 "e2" "n1" "out2"; - val _ = Testing.test "differing output types" test_mk_bad_rule (lhs,rhs); - - val lhs = G.empty |> add_wv "bnd1" - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "bnd1" "n1"; - val rhs = G.empty |> add_wv "bnd1" - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "n1" "bnd1"; - val _ = Testing.test "differing boundary directions" test_mk_bad_rule (lhs,rhs); - - - val lhs = G.empty |> add_wv "bnd1" - |> add_wv "bnd2" - |> add_vunit1 "n1" - |> add_vunit1 "n2" - |> add_vunit2 "n3" - |> add_vunit2 "n4" - |> add_undir_eunit1 "e1" "n3" "n1" - |> add_undir_eunit1 "e2" "bnd1" "n1" - |> add_undir_eunit1 "e3" "bnd2" "n1" - |> add_undir_eunit1 "e4" "n2" "n4" - val rhs = G.empty |> add_wv "bnd1" - |> add_wv "bnd2" - |> add_vunit1 "n1" - |> add_vunit2 "n2" - |> add_undir_eunit1 "e1" "n1" "bnd1" - |> add_undir_eunit1 "e2" "bnd2" "n2"; - val _ = Testing.test "two undirected boundaries" Rule.mk (lhs,rhs); - - val lhs = G.empty |> add_wv "bnd1" - |> add_wv "bnd2" - |> add_vunit1 "n1" - |> add_undir_eunit1 "e1" "n1" "bnd1" - |> add_undir_eunit1 "e2" "bnd2" "n1"; - val rhs = G.empty |> add_wv "bnd1" - |> add_vunit1 "n1" - |> add_undir_eunit1 "e1" "n1" "bnd1"; - val _ = Testing.test "differing numbers of undirected boundaries" - test_mk_bad_rule (lhs,rhs); - - val lhs = G.empty |> add_wv "bnd1" - |> add_vunit1 "n1" - |> add_undir_eunit2 "e1" "n1" "bnd1" - val rhs = G.empty |> add_wv "bnd1" - |> add_vunit1 "n1" - |> add_undir_eunit1 "e1" "n1" "bnd1"; - val _ = Testing.test "differing types of undirected boundaries" - test_mk_bad_rule (lhs,rhs); - - val g = G.empty - |> add_wv "in1" - |> add_wv "in2" - |> add_wv "out1" - |> add_wv "out2" - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "in1" "n1" - |> add_dir_eunit1 "e2" "in2" "n1" - |> add_dir_eunit1 "e3" "n1" "out1" - |> add_dir_eunit1 "e4" "n1" "out2"; - - val lhs = g - |> add_bbox "b1" [] - |> add_bbox "b2" ["in1","in2"] - |> add_bbox_with_parent "b3" "b2" ["in1"]; - val rhs = lhs; - val _ = Testing.test "same bboxes" Rule.mk (lhs,rhs); - - val lhs = g - |> add_bbox "b1" []; - val rhs = g - |> add_bbox "b2" []; - val _ = Testing.test "differing !-box names" - test_mk_bad_rule (lhs,rhs); - - val lhs = g - |> add_bbox "b1" ["in1"]; - val rhs = g - |> add_bbox "b1" ["in2"]; - val _ = Testing.test "differing !-box contents" - test_mk_bad_rule (lhs,rhs); - - val lhs = g - |> add_bbox "b1" [] - |> add_bbox "b2" ["in1","in2"] - |> add_bbox_with_parent "b3" "b2" ["in1"]; - val rhs = g - |> add_bbox "b1" [] - |> add_bbox "b2" ["in1","in2"] - |> add_bbox "b3" ["in1"]; - val _ = Testing.test "differing !-box parents" - test_mk_bad_rule (lhs,rhs); - - (* Rule.symmetric *) - - - val lhs = G.empty |> add_wv "in1" - |> add_vunit1 "n1" - |> add_dir_eunit1 "e1" "in1" "n1" - val rhs = G.empty |> add_wv "in1" - |> add_vunit2 "n1" - |> add_vunit2 "n2" - |> add_dir_eunit1 "e1" "in1" "n1" - |> add_dir_eunit1 "e2" "n1" "n2" - val rule = Rule.mk (lhs,rhs) - val rule' = Rule.symmetric rule - val _ = Testing.assert "new RHS iso to old LHS" - (HomeoFinder.is_homeomorphic lhs (Rule.get_rhs rule')) - val _ = Testing.assert "new LHS iso to old RHS" - (HomeoFinder.is_homeomorphic rhs (Rule.get_lhs rule')) - -in - val _ = Testing.assert_no_failed_tests(); -end; diff --git a/core/rewriting/test/ruleset_rewriter-tests.ML b/core/rewriting/test/ruleset_rewriter-tests.ML deleted file mode 100644 index cc4b6962..00000000 --- a/core/rewriting/test/ruleset_rewriter-tests.ML +++ /dev/null @@ -1,9 +0,0 @@ -structure Test_RulesetRewriter : RULESET_REWRITER = RulesetRewriter( - structure Ruleset = Test_BG_Ruleset; - structure Rewriter = Test_BangGraphRewriter; -); -(* set to 4 for really detailed messages *) -(* Test_RulesetRewriter.Log.level_ref := 2; *) - -(* TODO: tests for rewriting against rulesets *) - diff --git a/core/run_protocol.ML b/core/run_protocol.ML deleted file mode 100644 index 3bec6348..00000000 --- a/core/run_protocol.ML +++ /dev/null @@ -1,30 +0,0 @@ -PolyML.SaveState.loadState "heaps/quanto.heap"; - -(* Futures may hang unless we allow at least 4 workers *) -if (Multithreading.max_threads_value() <= 4) -then Multithreading.max_threads_update 4 else (); - -fun run_protocol port () = -let - val _ = TextIO.print "waiting for connection..." - val s = TextSocket.local_connect port - val _ = TextIO.print "got connection\n" - val (ins, outs) = TextSocket.get_io_stream s - val _ = JsonControllerProtocol.parallel_run_in_textstreams (ins, outs) - val _ = TextSocket.close s -in () -end - -fun poll_future f = ( - OS.Process.sleep (Time.fromMilliseconds 100); - case Future.peek f - of SOME (Exn.Exn e) => raise e - | _ => (); - OS.Process.sleep (Time.fromMilliseconds 100); - case Future.peek f - of SOME (Exn.Exn e) => raise e - | _ => ()) - -(*val protocol_job = Future.fork (run_protocol 4321); -poll_future protocol_job;*) - diff --git a/core/scratch.thy b/core/scratch.thy deleted file mode 100644 index e37378f4..00000000 --- a/core/scratch.thy +++ /dev/null @@ -1,29 +0,0 @@ -theory scratch -imports quanto -begin - -ML {* -open RG_SimpUtil; -open RG_Theory; - -val _ = cd "/Users/alek/git/quantomatic/core/test"; -val g = load_graph "graphs/target-test"; -val r = snd (load_rule "rules/rotate-targeted"); -val l = Rule.get_lhs r; -val vs = Graph.get_vertices_in_bbox l (B.mk "bx0"); -val bb = Graph.is_bboxed l (V.mk "v0"); - -fun concrete_nhd_size g bb = - V.NSet.cardinality (V.NSet.filter - (not o Graph.is_bboxed g) - (Graph.get_adj_vertices_to_set g - (Graph.get_vertices_in_bbox g bb))) - -fun max_concrete_nhd_bbox g bset = - B.NSet.maximize (concrete_nhd_size g) bset - -val bb = max_concrete_nhd_bbox l (Graph.get_bboxes l) - -*} - -end diff --git a/core/synth.thy b/core/synth.thy deleted file mode 100644 index 7fdf32de..00000000 --- a/core/synth.thy +++ /dev/null @@ -1,15 +0,0 @@ -theory synth -imports core theories -begin - -ML_file "synth/default_gens.ML" -ML_file "synth/metric.ML" -ML_file "synth/graph_equiv.ML" -ML_file "synth/eq_class_tab.ML" -ML_file "synth/default_rws.ML" -ML_file "synth/fast_graph_enum.ML" -ML_file "synth/tensor_equiv.ML" -ML_file "synth/synth_util.ML" -ML_file "synth/theories.ML" - -end diff --git a/core/synth/default_gens.ML b/core/synth/default_gens.ML deleted file mode 100644 index df02812c..00000000 --- a/core/synth/default_gens.ML +++ /dev/null @@ -1,58 +0,0 @@ -functor DefaultGenerators( - structure Graph : BANG_GRAPH -) = -struct - structure Graph = Graph - - val id = let - val gr = Graph.empty - val (a,gr) = gr |> Graph.add_vertex (Graph.WVert) - val (b,gr) = gr |> Graph.add_vertex (Graph.WVert) - in (gr |> Graph.add_edge_anon (Directed, Graph.default_edata) a b, 1, 1) - end - - fun gen data (ins, outs) = let - (* fun addk 0 gr = (gr, V.NSet.empty) - | addk k gr = let - val (v,gr) = gr |> Graph.add_vertex Graph.OVData.WVert - val (gr,vs) = addk (k-1) gr - in (gr, V.NSet.add v vs) - end - val (iv, gr) = Graph.empty |> Graph.add_vertex (Graph.OVData.NVert data) - val (gr,invs) = addk ins gr - val (gr,outvs) = addk outs gr - val gr = V.NSet.fold (fn v => Graph.add_edge_anon (Directed, Graph.default_edata) v iv) invs gr - val gr = V.NSet.fold (fn v => Graph.add_edge_anon (Directed, Graph.default_edata) iv v) outvs gr *) - in (data, ins, outs) - end - - fun gen_list max_arity data_list = let - fun alist 0 0 = [] - | alist k 0 = (0,k)::alist (k-1) (k-1) - | alist k i = (i,k-i)::alist k (i-1) - in (fold_product (cons oo gen) data_list (alist max_arity max_arity) []) - end -end - - -(* -val gens = [ - gen GHZW_VertexData.GHZ 3 0, - gen GHZW_VertexData.W 3 0, - gen GHZW_VertexData.GHZ 2 1, - gen GHZW_VertexData.W 2 1, - gen GHZW_VertexData.GHZ 1 2, - gen GHZW_VertexData.W 1 2, - gen GHZW_VertexData.GHZ 0 3, - gen GHZW_VertexData.W 0 3, - gen GHZW_VertexData.GHZ 2 0, - gen GHZW_VertexData.W 2 0, - gen GHZW_VertexData.GHZ 1 1, - gen GHZW_VertexData.W 1 1, - gen GHZW_VertexData.W 0 2, - gen GHZW_VertexData.GHZ 0 2, - gen GHZW_VertexData.GHZ 1 0, - gen GHZW_VertexData.W 1 0, - gen GHZW_VertexData.GHZ 0 1, - gen GHZW_VertexData.W 0 1 -];*) \ No newline at end of file diff --git a/core/synth/default_rws.ML b/core/synth/default_rws.ML deleted file mode 100644 index 77dc97d7..00000000 --- a/core/synth/default_rws.ML +++ /dev/null @@ -1,67 +0,0 @@ -signature SPIDER_REWRITES = -sig - structure Theory : GRAPHICAL_THEORY - val frob_rules : Theory.Graph.nvdata list -> Theory.Ruleset.T - val frob_and_special_rules : Theory.Graph.nvdata list -> Theory.Ruleset.T -end - -functor SpiderRewrites ( - structure Theory : GRAPHICAL_THEORY -) : SPIDER_REWRITES = -struct - structure Theory = Theory - structure Ruleset = Theory.Ruleset - structure Rule = Theory.Rule - structure Graph = Theory.Graph - - val edge = (Directed, Graph.default_edata) - - fun bx g = let - val (vn,g) = g |> Graph.add_vertex Graph.WVert - val (bn,g) = g |> Graph.add_bbox - in (vn, #2 (Graph.add_to_bbox bn (V.NSet.single vn) g)) - end - - fun frob data = let - val rhs = Graph.empty - val (b1,rhs) = rhs |> bx - val (b2,rhs) = rhs |> bx - val (b3,rhs) = rhs |> bx - val (b4,rhs) = rhs |> bx - val (i1,rhs) = rhs |> Graph.add_vertex (Graph.NVert data) - val rhs = rhs |> Graph.add_edge_anon edge b1 i1 |> Graph.add_edge_anon edge i1 b2 - val (i2,lhs) = rhs |> Graph.add_vertex (Graph.NVert data) - val lhs = lhs |> Graph.add_edge_anon edge b3 i2 |> Graph.add_edge_anon edge i2 b4 |> Graph.add_edge_anon edge i1 i2 - val rhs = rhs |> Graph.add_edge_anon edge b3 i1 |> Graph.add_edge_anon edge i1 b4 - in Rule.mk (lhs,rhs) - end - - fun special data = let - val rhs = Graph.empty - val (b1,rhs) = rhs |> bx - val (b2,rhs) = rhs |> bx - val (i1,rhs) = rhs |> Graph.add_vertex (Graph.NVert data) - val rhs = rhs |> Graph.add_edge_anon edge b1 i1 |> Graph.add_edge_anon edge i1 b2 - val lhs = rhs |> Graph.add_edge_anon edge i1 i1 - in Rule.mk (lhs,rhs) - end - - val t_redex = TagName.mk "redex" - - fun add_frob_and_special d rs = - let - val (fr, rs) = Ruleset.add_fresh_rule (frob d) rs - val (sp, rs) = Ruleset.add_fresh_rule (special d) rs - in rs |> Ruleset.tag_rule fr t_redex - |> Ruleset.tag_rule sp t_redex - end - - fun add_frob d rs = - let - val (fr, rs) = Ruleset.add_fresh_rule (frob d) rs - in rs |> Ruleset.tag_rule fr t_redex - end - - fun frob_rules ds = fold add_frob ds Ruleset.empty - fun frob_and_special_rules ds = fold add_frob_and_special ds Ruleset.empty -end diff --git a/core/synth/eq_class_tab.ML b/core/synth/eq_class_tab.ML deleted file mode 100644 index 0b9d0785..00000000 --- a/core/synth/eq_class_tab.ML +++ /dev/null @@ -1,450 +0,0 @@ -signature GRAPH_ENTRY = -sig - structure Graph : BANG_GRAPH - structure Equiv : GRAPH_EQUIV - sharing Equiv.Graph.Sharing = Graph.Sharing - - type T - type data - val update_graph : (Graph.T -> Graph.T) -> T -> T - val update_data : (data -> data) -> T -> T - val update_edata : (Equiv.T option -> Equiv.T option) -> T -> T - val update_dirty : (bool -> bool) -> T -> T - val get_graph : T -> Graph.T - val get_data : T -> data - val get_edata : T -> Equiv.T option - val get_dirty : T -> bool - val set_graph : Graph.T -> T -> T - val set_data : data -> T -> T - val set_edata : Equiv.T option -> T -> T - val set_dirty : bool -> T -> T - val mk : Graph.T * data -> T - val mk' : (Graph.T * Equiv.T option) * data -> T -end - -signature EQ_CLASS = -sig - type T - val update_rep : (GraphName.name -> GraphName.name) -> T -> T - val update_congs : (GraphName.name list -> GraphName.name list) -> T -> T - val update_redexes : (GraphName.name list -> GraphName.name list) -> T -> T - val get_rep : T -> GraphName.name - val get_congs : T -> GraphName.name list - val get_redexes : T -> GraphName.name list - val set_rep : GraphName.name -> T -> T - val set_congs : GraphName.name list -> T -> T - val set_redexes : GraphName.name list -> T -> T - val mk : GraphName.name -> T -end - -signature EQ_CLASS_TAB = -sig - type T - structure Theory : GRAPHICAL_THEORY - structure EqClass : EQ_CLASS - structure GraphEntry : GRAPH_ENTRY - structure DNet : TOP_DNET - sharing GraphEntry.Graph.Sharing - = Theory.Graph.Sharing - = DNet.G.Sharing - - - val update_initial_rs : (Theory.Ruleset.T -> Theory.Ruleset.T) -> T -> T - val get_initial_rs : T -> Theory.Ruleset.T - val set_initial_rs : Theory.Ruleset.T -> T -> T - - (* also adds to DNET *) - val set_initial_ruleset : Theory.Ruleset.T -> T -> T - - val get_ruleset : T -> Theory.Ruleset.T - - val get_graph_entry : T -> GraphName.name -> GraphEntry.T - val get_graph_tab : T -> GraphEntry.T GraphName.NTab.T - - val fold_eqclasses : (EqClass.T -> 'a -> 'a) -> T -> 'a -> 'a - val fold_redexes : (GraphEntry.T -> 'a -> 'a) -> T -> 'a -> 'a - val fold_irredexes : (GraphEntry.T -> 'a -> 'a) -> T -> 'a -> 'a - - (* folds over a function which gets args "(graph, rep) is_redex" *) - (*val fold_rule_pairs : - (GraphEntry.Graph.T * GraphEntry.Graph.T -> bool -> 'a -> 'a) -> - T -> 'a -> 'a*) - - val exists_redexes : (GraphEntry.T -> bool) -> T -> bool - val exists_irredexes : (GraphEntry.T -> bool) -> T -> bool - val get_irredexes : T -> GraphEntry.T list - val get_redexes : T -> GraphEntry.T list - - - (* add one graph entry to table. Only return the table if it has actually changed. *) - val update_one : GraphEntry.T -> T -> T option - val update : GraphEntry.T list -> T -> GraphEntry.T list * T - - val empty : T - val mk : Theory.Ruleset.T -> T -end - -functor GraphEntry( - structure Equiv : GRAPH_EQUIV - type data -) : GRAPH_ENTRY = -struct - structure Graph = Equiv.Graph - structure Equiv = Equiv - type data = data - - exception stub_data_access of unit - datatype T = GE of { graph: Graph.T, data: data, edata: Equiv.T option, dirty: bool } - fun update_graph f (GE r) = GE {graph = f(#graph r), data = #data r, edata = #edata r, dirty = #dirty r} - fun update_data f (GE r) = GE {graph = #graph r, data = f(#data r), edata = #edata r, dirty = #dirty r} - fun update_edata f (GE r) = GE {graph = #graph r, data = #data r, edata = f(#edata r), dirty = #dirty r} - fun update_dirty f (GE r) = GE {graph = #graph r, data = #data r, edata = #edata r, dirty = f(#dirty r)} - fun get_graph (GE r) = #graph r - fun get_data (GE r) = #data r - fun get_edata (GE r) = #edata r - fun get_dirty (GE r) = #dirty r - val set_graph = update_graph o K - val set_data = update_data o K - val set_edata = update_edata o K - val set_dirty = update_dirty o K - - fun mk' ((gr, ed), d) = - GE { - graph = gr, - data = d, - edata = ed, - dirty = true - } - - fun mk (gr, d) = let - val (gr' ,ed) = Equiv.compute_equiv_data gr - in mk' ((gr', SOME ed), d) - end -end - -structure EqClass = -struct - datatype T = EQC of { rep: GraphName.name, congs: GraphName.name list, redexes: GraphName.name list } - fun update_rep f (EQC r) = EQC {rep= f(#rep r),congs= #congs r,redexes= #redexes r} - fun update_congs f (EQC r) = EQC {rep= #rep r,congs= f(#congs r),redexes= #redexes r} - fun update_redexes f (EQC r) = EQC {rep= #rep r,congs= #congs r,redexes= f(#redexes r)} - fun get_rep (EQC r) = #rep r - fun get_congs (EQC r) = #congs r - fun get_redexes (EQC r) = #redexes r - val set_rep = update_rep o K - val set_congs = update_congs o K - val set_redexes = update_redexes o K - fun mk g = EQC { rep = g, congs = [], redexes = [] } -end - - -functor EqClassTab( - structure Theory : GRAPHICAL_THEORY - structure Metric : GRAPH_METRIC - structure Equiv : GRAPH_EQUIV - sharing Theory.Graph.Sharing = - Metric.Graph.Sharing = - Equiv.Graph.Sharing - type data - val default_data : data -) : EQ_CLASS_TAB = -struct - structure Theory = Theory - structure Ruleset = Theory.Ruleset - structure EqClass = EqClass - structure G = Theory.Graph - structure GraphEntry = GraphEntry( - structure Equiv = Equiv - type data = data) - structure GE = GraphEntry - structure DNet = Top_DNet(G) - - structure ClassIndex = Table( - type key = int * int - val ord = prod_ord int_ord int_ord - ) - - datatype T = EQT of { - tab : (EqClass.T list) ClassIndex.table, - initial_rs : Theory.Ruleset.T, - dnet : DNet.T, - graph_tab : GE.T GraphName.NTab.T, - graph_tab' : GE.Graph.T GraphName.NTab.T - } - - val empty = EQT { - tab=ClassIndex.empty, - initial_rs=Theory.Ruleset.empty, - dnet=DNet.empty, - graph_tab=GraphName.NTab.empty, - graph_tab'=GraphName.NTab.empty - } - - fun update_tab f (EQT r) = EQT {tab=f(#tab r),initial_rs= #initial_rs r,dnet= #dnet r,graph_tab= #graph_tab r,graph_tab'= #graph_tab' r} - fun update_initial_rs f (EQT r) = EQT {tab= #tab r,initial_rs=f(#initial_rs r),dnet= #dnet r,graph_tab= #graph_tab r,graph_tab'= #graph_tab' r} - fun update_dnet f (EQT r) = EQT {tab= #tab r,initial_rs= #initial_rs r,dnet=f(#dnet r),graph_tab= #graph_tab r,graph_tab'= #graph_tab' r} - fun update_graph_tab f (EQT r) = EQT {tab= #tab r,initial_rs= #initial_rs r,dnet= #dnet r,graph_tab=f(#graph_tab r),graph_tab'= #graph_tab' r} - fun update_graph_tab' f (EQT r) = EQT {tab= #tab r,initial_rs= #initial_rs r,dnet= #dnet r,graph_tab= #graph_tab r,graph_tab'=f(#graph_tab' r)} - - fun get_tab (EQT r) = #tab r - fun get_initial_rs (EQT r) = #initial_rs r - fun get_dnet (EQT r) = #dnet r - fun get_graph_tab (EQT r) = #graph_tab r - fun get_graph_tab' (EQT r) = #graph_tab' r - - fun set_tab x = update_tab (fn _ => x) - fun set_initial_rs x = update_initial_rs (fn _ => x) - fun set_dnet x = update_dnet (fn _ => x) - fun set_graph_tab x = update_graph_tab (fn _ => x) - fun set_graph_tab' x = update_graph_tab' (fn _ => x) - - - (*fun update_tab f (EQT r) = EQT {tab=f(#tab r),initial_rs= #initial_rs r,dnet= #dnet r,graph_tab= #graph_tab r} - fun update_initial_rs f (EQT r) = EQT {tab= #tab r,initial_rs=f(#initial_rs r),dnet= #dnet r,graph_tab= #graph_tab r} - fun update_dnet f (EQT r) = EQT {tab= #tab r,initial_rs= #initial_rs r,dnet=f(#dnet r),graph_tab= #graph_tab r} - fun update_graph_tab f (EQT r) = EQT {tab= #tab r,initial_rs= #initial_rs r,dnet= #dnet r,graph_tab=f(#graph_tab r)} - - fun get_tab (EQT r) = #tab r - fun get_initial_rs (EQT r) = #initial_rs r - fun get_dnet (EQT r) = #dnet r - fun get_graph_tab (EQT r) = #graph_tab r - - val set_tab = update_tab o K - val set_initial_rs = update_initial_rs o K - val set_dnet = update_dnet o K - val set_graph_tab = update_graph_tab o K*) - - - fun set_initial_ruleset rs eqt = - let - val eqt = eqt |> set_initial_rs rs - fun addr (rn,rule) eqt = - let - val gn = GraphName.mk ("g-" ^ R.dest rn) - val g = Ruleset.Rule.get_lhs rule - in - eqt |> update_graph_tab' (GraphName.NTab.doadd (gn,g)) - |> update_dnet (DNet.add_graph (gn,g)) - end - in - RTab.fold addr (Ruleset.get_allrules rs) eqt - end - - - fun get_graph_entry eqt g = - GraphName.NTab.get (get_graph_tab eqt) g - - fun get_graph eqt g = - case GraphName.NTab.lookup (get_graph_tab eqt) g - of SOME ge => (GraphEntry.get_graph ge) - | NONE => (GraphName.NTab.get (get_graph_tab' eqt) g) - - fun set_as_redex gn eqt = - eqt |> update_dnet (DNet.add_graph (gn, GE.get_graph (get_graph_entry eqt gn))) - - - (* convenience functions for pulling out redexes and irredexes *) - local - exception found_exn of unit - fun fold_to_exists fldf f eqt = - fldf (fn x => fn _ => - (if f x then raise found_exn () else false)) eqt false - handle found_exn () => true - in - fun fold_eqclasses f = ClassIndex.fold (fn (_, classlist) => - fold f classlist - ) o get_tab - - fun fold_redexes f eqt = - fold_eqclasses (fn class => - fold (fn gn => f (GraphName.NTab.get (get_graph_tab eqt) gn)) - (EqClass.get_redexes class) - ) eqt - - fun fold_irredexes f eqt = - fold_eqclasses (fn class => - fold (fn gn => f (GraphName.NTab.get (get_graph_tab eqt) gn)) - (EqClass.get_rep class :: EqClass.get_congs class) - ) eqt - - (*fun fold_rule_pairs f = - let - fun class_fld class x = - let - val rep = GraphEntry.get_graph (EqClass.get_rep class) - in - fold - (fn redex => f (GraphEntry.get_graph redex, rep) true) - (EqClass.get_redexes class) - (fold - (fn cong => f (GraphEntry.get_graph cong, rep) false) - (EqClass.get_congs class) x) - end - in - ClassIndex.fold (fn (_, classlist) => - fold class_fld classlist) o get_tab - end*) - - fun get_redexes eqt = fold_redexes cons eqt [] - fun get_irredexes eqt = fold_irredexes cons eqt [] - val exists_redexes = fold_to_exists fold_redexes - val exists_irredexes = fold_to_exists fold_irredexes - end - - - fun get_ruleset eqt = - let - fun add_rule nm tag rhs lhs_n rs = - let - val lhs = GE.get_graph (GraphName.NTab.get (get_graph_tab eqt) lhs_n) - val r = Theory.Rule.mk (lhs, rhs) - val (rname, rs) = rs |> Theory.Ruleset.add_fresh_rule r - in rs |> Theory.Ruleset.tag_rule rname (TagName.mk tag) - end - - fun add_class class rs = - let - val rep = GE.get_graph (GraphName.NTab.get (get_graph_tab eqt) (EqClass.get_rep class)) - val add_redex = add_rule "r_0" "redex" rep - val add_cong = add_rule "c_0" "cong" rep - in - fold add_redex (EqClass.get_redexes class) - (fold add_cong (EqClass.get_congs class) rs) - end - in - ClassIndex.fold (fn (_, classlist) => - fold add_class classlist) (get_tab eqt) (get_initial_rs eqt) - end - - - (*fun has_match tgt pat = - is_some (Seq.pull (Theory.MatchSearch.match - (get_graph pat) (get_graph tgt) - )) - - fun rs_can_reduce rs tgt = - let - fun r_matches r_name = - is_some (Seq.pull (Theory.MatchSearch.match - (Theory.Rule.get_lhs (Ruleset.get_rule rs r_name)) (get_graph tgt) - )) - in R.NSet.exists r_matches - (Theory.Ruleset.get_rules_in_tag rs (TagName.mk "redex")) - end*) - - (* returns (SOME new_class) if graph is equivalent to the representative, - and NONE otherwise *) - - -(* fun find_and_add eqt gn = let - val ge = GraphName.NTab.get (get_graph_tab eqt) gn - - *) - - (*fun update_one ge eqt = - if (*rs_can_reduce (get_initial_rs eqt) ge orelse - exists_redexes (has_match ge) eqt*) false - then NONE - else let - val (gn, gt) = get_graph_tab eqt |> GraphName.NTab.add (GraphName.mk "g0", ge) - val eqt = eqt |> set_graph_tab gt - val gr = GE.get_graph ge - val arity = (V.NSet.cardinality (G.get_inputs gr), - V.NSet.cardinality (G.get_outputs gr)) - in SOME (eqt |> - update_tab (ClassIndex.map_default (arity, []) - (find_and_add eqt gn) - )) - end*) - - fun has_match_in_dnet eqt tgt = let - fun has_match pat_n = let - val pat = get_graph eqt pat_n - in is_some (Seq.pull ( - Theory.MatchSearch.match pat (GE.get_graph tgt) - )) - end - in GraphName.NSet.exists has_match - (DNet.get_match_candidates (get_dnet eqt) (GE.get_graph tgt)) - end - - fun update_one ge eqt = - if has_match_in_dnet eqt ge then NONE - else let - val (gn, gt) = get_graph_tab eqt |> GraphName.NTab.add (GraphName.mk "g0", ge) - val eqt' = eqt |> set_graph_tab gt - val gr = GE.get_graph ge - val arity = (V.NSet.cardinality (G.get_inputs gr), - V.NSet.cardinality (G.get_outputs gr)) - val classlist = case ClassIndex.lookup (get_tab eqt) arity - of SOME cs => cs | NONE => [] - - fun tryadd_to_class cls eqt = let - val cls_ge = GraphName.NTab.get (get_graph_tab eqt) (EqClass.get_rep cls) - val inclass = - case (GE.get_edata cls_ge, GE.get_edata ge) - of (SOME d1, SOME d2) => Equiv.eq (d1, d2) - | _ => false - in - if inclass - then - SOME ( - case Metric.ord_graph (GE.get_graph cls_ge, GE.get_graph ge) - of LESS => (cls |> EqClass.update_redexes (cons gn), eqt |> set_as_redex gn) - | GREATER => - let - val new_reds = EqClass.get_rep cls :: EqClass.get_congs cls - in (cls |> EqClass.update_redexes (fn reds => new_reds @ reds) - |> EqClass.set_congs [] - |> EqClass.set_rep gn, - fold set_as_redex new_reds eqt) - end - | EQUAL => (cls |> EqClass.update_congs (cons gn), eqt) - ) - else NONE - end - - fun tryaddf cls (found, rest, eqt) = - if found then (true, cls :: rest, eqt) - else case tryadd_to_class cls eqt - of SOME (cls', eqt') => (true, cls' :: rest, eqt') - | NONE => (false, cls :: rest, eqt) - - val (found, classlist', eqt') = fold tryaddf classlist (false, [], eqt') - in - SOME ( - eqt' |> update_tab (ClassIndex.update (arity, - if found then classlist' - else EqClass.mk gn :: classlist')) - ) - end - - fun update ge_list eqtab = let - fun updatef ge (ges, eqt) = - case update_one ge eqt - of SOME eqt' => (ge::ges, eqt') - | NONE => (ges, eqt) - in fold updatef ge_list ([],eqtab) - end - - - fun mk rs = let - val initial_redexes = Ruleset.get_rules_in_tag rs (TagName.mk "redex") - fun add_rule rn eqt = let - val ge = GE.mk' ((Theory.Rule.get_lhs (Ruleset.get_rule rs rn), NONE), default_data) - val (gn, tab) = (get_graph_tab eqt) |> GraphName.NTab.add (GraphName.mk "r0", ge) - in eqt |> set_graph_tab tab |> set_as_redex gn - end - in R.NSet.fold add_rule initial_redexes (empty |> set_initial_rs rs) - end -end - - - - - - - - - - diff --git a/core/synth/fast_graph_enum.ML b/core/synth/fast_graph_enum.ML deleted file mode 100644 index 9a16f240..00000000 --- a/core/synth/fast_graph_enum.ML +++ /dev/null @@ -1,343 +0,0 @@ -signature GRAPH_ENUM = -sig - structure Theory : GRAPHICAL_THEORY - structure Metric : GRAPH_METRIC - structure EqClassTab : EQ_CLASS_TAB - - type data - type generator = (Theory.Graph.vdata * int * int) - - val default_data : data - type size_param = int * int * int * int - - sharing Theory.Graph.Sharing = - Metric.Graph.Sharing - sharing Theory.Ruleset.Sharing = - EqClassTab.Theory.Ruleset.Sharing - - val tab_update : generator list -> size_param -> EqClassTab.T -> EqClassTab.T - val tab_enum : generator list -> size_param -> EqClassTab.T - val enum : generator list -> size_param -> Theory.Graph.T list -end - -functor FastGraphEnum( - structure Theory : GRAPHICAL_THEORY - structure Metric : GRAPH_METRIC - structure Equiv : GRAPH_EQUIV - sharing Theory.Graph.Sharing = - Metric.Graph.Sharing = - Equiv.Graph.Sharing -) : GRAPH_ENUM = -struct - structure Theory = Theory - structure Metric = Metric - structure Graph = Theory.Graph - - type size_param = int * int * int * int - type generator = (Theory.Graph.vdata * int * int) (* generators (data + arities) *) - (* Ordered Vertices, free inputs, free outputs, list of lists of - edges, one vertex at a time *) - type adjmat = generator list * int list * int list * (int list) list - type data = adjmat * int - val empty_graph_rep = (([],[],[],[]),0) - val default_data = empty_graph_rep - - fun generator_eq ((data1,i1,o1), (data2,i2,o2)) = - Theory.Graph.vdata_eq (data1, data2) andalso - (i1,o1) = (i2,o2) - - structure EqClassTab = EqClassTab( - structure Theory = Theory - structure Metric = Metric - structure Equiv = Equiv - type data = data - val default_data = default_data) - structure GraphEntry = EqClassTab.GraphEntry - - (****** Utility Functions ******) - - fun drop_while _ [] = [] - | drop_while p (x::xs) = - if p x then drop_while p xs - else x::xs - - fun keep_drop 0 0 ts = ts - | keep_drop 0 j (t::ts) = keep_drop 0 (j-1) ts - | keep_drop i j (t::ts) = t::(keep_drop (i-1) j ts) - | keep_drop _ _ _ = raise Match - - fun filter_max m zs = let - fun filter_max' m (x, (n,ys)) = - case (Int.compare (m x, n)) of - LESS => (n,ys) - | GREATER => ((m x),x::[]) - | EQUAL => (n,x::ys) - in snd (List.foldl (filter_max' m) (0,[]) zs) - end - - val sum = List.foldr (fn (x,y) => x+y) 0 - - - (****** adjmat functions ******) - - (* Number of: vertices, total inputs, total outputs *) - fun adjmat_total_arities (gs,_,_,_) = let - val m = sum (map (fn (_,i,_) => i) gs) - val n = sum (map (fn (_,_,j) => j) gs) - in (m, n) - end - - (* Number of: vertices, total inputs, total outputs *) - fun adjmat_arity (g as (gs,_,_,edges)) = let - val p = sum (map sum edges) - val (m,n) = adjmat_total_arities g - in (m-p, n-p) - end - - (* Number of: vertices, total inputs, total outputs *) - fun adjmat_pluggings (g as (_,_,_,edges)) = sum (map sum edges) - - - - fun split_edge [] = ([],[]) - | split_edge xs = let - fun split_edge' xs ys 0 = (xs,ys) - | split_edge' (x::xs) ys n = split_edge' xs (x::ys) (n-1) - | split_edge' _ _ _ = raise Match - in split_edge' xs [] ((length xs - 1) div 2) - end - - val to_block_matr = let - fun switch xs yss = let - val (cs,rs) = split_edge xs - in ListPair.map (fn (y, ys) => y::ys) (cs, rs::yss) - end - in List.foldr (uncurry switch) [] - end - - fun from_block_matr [] = [] - | from_block_matr (es::ess) = - (List.revAppend (map hd ess, es)) :: from_block_matr (map tl ess) - - val order = List.foldl (List.revAppend) [] - - fun swap_adj_matr i tss = let - fun swap_adj_list 1 (t1::t2::ts) = t2::t1::ts - | swap_adj_list i (t::ts) = t::(swap_adj_list (i-1) ts) - | swap_adj_list _ ts = ts - in swap_adj_list i (map (swap_adj_list i) tss) - end - - fun perm_matr tss = let - fun shift_matr 0 tsss = tsss - | shift_matr i tsss = tsss @ (shift_matr (i-1) (map (swap_adj_matr i) tsss)) - fun perm_matr' tss 0 = [tss] - | perm_matr' tss i = shift_matr i (perm_matr' tss (i-1)) - in perm_matr' tss (length tss-1) - end - - fun matr_canonical ess = let - val cur_order = order ess - in forall - ((fn x => (list_ord int_ord (x, cur_order) <> GREATER)) o - order o from_block_matr) - (perm_matr (to_block_matr ess)) - end - - fun compare_vert ts us = let - val k = (length ts - 1) div 2 - val d = (length us - 1) div 2 - k - val ts' = (keep_drop (k+1) d (keep_drop k d us)) - in list_ord int_ord (ts, ts') - end - - fun num_same_vert xss = let - fun num_same_vert' i (v::[]) = i - | num_same_vert' i (v1::v2::vs) = - (case (compare_vert v1 v2) - of LESS => 0 - | GREATER => num_same_vert' 1 (v2::vs) - | EQUAL => num_same_vert' (i+1) (v1::vs)) - | num_same_vert' _ [] = 0 - in num_same_vert' 1 xss - end - - fun centre [] = [] - | centre (t::ts) = let - fun centre' x i [] = [] - | centre' x i (t::ts) = take i (drop x t) :: (centre' x (i+2) ts) - in centre' ((length t - 1) div 2) 1 (t::ts) - end - - fun edges_canonical [] = true - | edges_canonical ts = let - val k = num_same_vert ts - val vs = drop (length ts - k) ts - in (matr_canonical o rev o centre) vs - end - - fun last_vert_canonical ([], _, _, _) = true - | last_vert_canonical (vs, _, _, ess) = let - fun last_vert (x::y::xs) (es1::ess) ls = if (x=y) then last_vert (x::xs) ess (es1::ls) else es1::ls - | last_vert (x::_) (es1::_) ls = es1::ls - | last_vert _ _ _ = raise Match - in edges_canonical (last_vert vs ess []) - end - - fun to_graph_idfree (vs,is,os,ess) = let - val g0 = Graph.empty - fun to_graph_vert [] g = ([],g) - | to_graph_vert ((v,_,_)::vs) g = let - val (vs', g') = to_graph_vert vs g - val (v', g'') = Graph.add_vertex v g' - in (v'::vs', g'') - end - val (vertices, g0) = to_graph_vert vs g0 - fun into_edges [] (_, []) = [] - | into_edges (vin::vins) (vout, (n::ns)) = List.tabulate (n, K (vout,vin)) @ (into_edges vins (vout, ns)) - | into_edges _ _ = raise Match - fun list_int_edges vs ess = maps (into_edges vs) (ListPair.zip (vs, ess)) - val edge_list = list_int_edges vertices (to_block_matr ess) - val g0 = List.foldr (fn ((x,y), g) => snd (g |> Graph.add_edge (Directed, Theory.Graph.default_edata) x y)) g0 edge_list - val edge_in_list = maps (fn (n,y) => List.tabulate (n, K y)) (ListPair.zip (is, vertices)) - val edge_out_list = maps (fn (n,y) => List.tabulate (n, K y)) (ListPair.zip (os, vertices)) - fun add_in_edge (v, g) = let - val (bound, g') = g |> Graph.add_vertex (Theory.Graph.WVert); - val (_,g'') = g' |> Graph.add_edge (Directed, Theory.Graph.default_edata) bound v - in g'' - end - fun add_out_edge (v, g) = let - val (bound, g') = g |> Graph.add_vertex (Theory.Graph.WVert); - val (_,g'') = g' |> Graph.add_edge (Directed, Theory.Graph.default_edata) v bound - in g'' - end - val g0 = List.foldr add_in_edge g0 edge_in_list - val g0 = List.foldr add_out_edge g0 edge_out_list - in g0 - end - - fun add_ident_wire g = let - val (x, g) = g |> Graph.add_vertex (Theory.Graph.WVert); - val (y, g) = g |> Graph.add_vertex (Theory.Graph.WVert); - val (_, g) = g |> Graph.add_edge (Directed, Theory.Graph.default_edata) x y - in g - end - - fun to_graph (am,i) = funpow i add_ident_wire (to_graph_idfree am) - - fun to_graph_entry ami = GraphEntry.mk (to_graph ami, ami) - - - (****** Building adjmat graphs ******) - - (* adds a specific type of vertex with no edges *) - (* add_gen : generator -> adjmat -> adjmat *) - fun add_gen (v' as (_,inp,out)) (verts, inps, outs, edges) = (v'::verts, inp::inps, out::outs, (List.tabulate ((2 * length (verts) +1), K 0)::edges)) - - (* adds any possible vertex with no edges *) - (* add_gens : [generator] -> adjmat -> adjmat list *) - fun add_gens (max_p,max_m,max_n) gens (matr as ([],_,_,_)) = let - val gens = filter (fn (_,m,n) => m <= max_m + max_p andalso n <= max_n + max_p) gens - in map (fn v => add_gen v matr) gens - end - | add_gens (max_p,max_m,max_n) gens (matr as (vert::_,_,_,_)) = let - fun is_small (_,gm,gn) = let - val (m,n) = adjmat_total_arities matr - in (m+gm <= max_m + max_p) andalso (n+gn <= max_n + max_p) - end - val gens = drop_while (fn g => not (generator_eq (g,vert))) gens - val gens = filter is_small gens - in map (fn v => add_gen v matr) gens - end - - (* adds any possible edge *) - (* add_edge : adjmat -> adjmat list *) - fun add_edge ([], _, _, _) = [] - | add_edge (_, _, _, []) = raise Match - | add_edge (verts, inps, outs, edge::edges) = let - fun new_edge_here (i2::is2) (o2::os2) (e2::es2) = if ((i2<>0) andalso (o2<>0)) - then [(i2-1::is2, o2-1::os2, e2+1::es2)] else [] - | new_edge_here _ _ _ = raise Match - fun add_edge' [i1] [o1] es = new_edge_here [i1] [o1] es - | add_edge' (i1::[]) (o1::os) (e1::es) = new_edge_here (i1::[]) (o1::os) (e1::es) @ - map (fn (x,y,z) => (x,o1::y,0::z)) (if (e1=0) then add_edge' [i1] os es else [] ) - | add_edge' (i1::is) (o1::os) (e1::es) = new_edge_here (i1::is) (o1::os) (e1::es) @ - map (fn (x,y,z) => (i1::x,y,0::z)) (if (e1=0) then add_edge' is (o1::os) es else [] ) - | add_edge' _ _ _ = raise Match - in map (fn (x,y,z) => (verts,rev x,y,z::edges)) (add_edge' (rev inps) outs edge) - end - - - (****** Enumeration with EqClassTab ******) - - (* TODO: replace fopt with (f, filt) *) - fun fold_graphs gens (max_v,max_p,max_m,max_n) fopt initial = let - fun fold_save fopt (x::xs) (ys,accum) = - (if (fn (m,n) => (m + snd x) <= max_m andalso (n + snd x) <= max_n) (adjmat_arity (fst x)) - then case fopt (to_graph_entry x) accum - of SOME accum' => fold_save fopt xs (x::ys, accum') - | NONE => fold_save fopt xs (ys,accum) - else fold_save fopt xs (x::ys, accum)) - | fold_save fopt [] (ys,accum) = (ys, accum) - (* add edges in all possible ways *) - fun fold_edges_round [] (next, accum) = (next, accum) - | fold_edges_round egs (next, accum) = - let - val (next',accum') = fold_save fopt (map (fn eg => (eg,0)) egs) ([],accum) - val egs' = filter (fn (g,_) => (adjmat_pluggings g) < max_p) next' - val egs' = maps (add_edge o fst) egs' - in fold_edges_round (egs') (next' @ next, accum') - end - (***) - (* one round of enumeration. (prev, accum) => add one vertex => (next, accum') *) - fun fold_graphs_round (prev, accum) = - let - (* start by adding one bare wire to the output of the previous round *) - val (prev_with_bare, accum') = - fold_save fopt (map (fn (am,i) => (am,i+1)) (filter (fn (am,i) => (fst (adjmat_arity am) + i < max_m ) andalso (snd (adjmat_arity am) + i < max_n )) prev)) ([], accum) - (* only add non-trivial generators to graphs that don't have any bare wires yet *) - val prev = map fst (filter (fn (_,i)=> i = 0) prev) - val graphs = flat (map (add_gens (max_p,max_m,max_n) gens) prev) - in fold (fn g => fold_edges_round [g]) graphs (prev_with_bare, accum') - end - (***) - in - snd (funpow max_v fold_graphs_round ([empty_graph_rep], initial)) - end - - fun enum gens max_size = - fold_graphs gens max_size - (fn g => fn gs => SOME (GraphEntry.get_graph g :: gs)) [] - - fun tab_update gens max_size = fold_graphs gens max_size EqClassTab.update_one - fun tab_enum gens max_size = tab_update gens max_size EqClassTab.empty - - (* recursively adds new graphs to tab by adding one new edge to each irredex *) - (* tab_add_edges : [adjmat] -> EqClassTab -> EqClassTab *) - (*fun tab_add_edges [] tab = tab - | tab_add_edges egs tab = let - val egs' = maps add_edge egs - val (irreds,tab') = EqClassTab.update (map (fn eg => to_graph_entry (eg,0)) egs') tab - in tab_add_edges (map (fst o GraphEntry.get_data) irreds) tab' - end - - (* adds new graphs with one new vertex and any possible number of extra edges *) - (* tab_add_gens : generator list -> EqClassTab -> EqClassTab *) - fun tab_add_gens gens tab = let - val irred_graphs = filter_max (fn (g,i) => i + adjmat_size g) - (map GraphEntry.get_data (EqClassTab.get_irredexes tab)) - val (_,tab') = EqClassTab.update (map (fn (am,i) => to_graph_entry (am,i+1)) irred_graphs) tab - val irred_graphs = map fst (filter (fn (g,i) => i=0) irred_graphs) - val graphs = flat (map (add_gens gens) irred_graphs) - in List.foldl (fn (g,t) => tab_add_edges [g] t) tab' graphs - end*) - - - (*fun tab_update gens max_verts tab = - funpow max_verts (tab_add_gens gens) - (snd (EqClassTab.update [to_graph_entry (([],[],[],[]),0)] tab)) - - (* Creates a table of graphs up to a certain size *) - (* tab_enum : generator list -> Int -> EqClassTab *) - fun tab_enum gens max_verts = EqClassTab.empty |> tab_update gens max_verts*) -end diff --git a/core/synth/graph_equiv.ML b/core/synth/graph_equiv.ML deleted file mode 100644 index d115d0ad..00000000 --- a/core/synth/graph_equiv.ML +++ /dev/null @@ -1,14 +0,0 @@ -signature GRAPH_EQUIV = -sig - structure Graph : BANG_GRAPH - type T - - (* compute data that is used to check graph equivalence. This function may - also do some renaming of the graph to put it in some canonical form (e.g. - wrt. the ordering of inputs and outputs). *) - val compute_equiv_data : Graph.T -> Graph.T * T - val to_string : T -> string - val eq : T * T -> bool -end - - diff --git a/core/synth/metric.ML b/core/synth/metric.ML deleted file mode 100644 index b7df06be..00000000 --- a/core/synth/metric.ML +++ /dev/null @@ -1,92 +0,0 @@ -signature GRAPH_METRIC = -sig - type T - structure Graph : BANG_GRAPH - val compute : Graph.T -> T - val ord : T * T -> order - val bottom : T - - (* ASSUMED: ord_graph = ord o (apfst compute) o (apsnd compute) *) - val ord_graph : Graph.T * Graph.T -> order -end - -functor EdgeComplexityMetric( - structure Graph : BANG_GRAPH -) : GRAPH_METRIC = -struct - type T = int * int * int - structure Graph = Graph - - fun ord ((a,b,c), (d,e,f)) = (prod_ord int_ord (prod_ord int_ord int_ord)) ((a,(b,c)), (d,(e,f))) - val bottom = (0,0,0) - - fun compute graph = let - fun vert_weight v = let - val edges = E.NSet.cardinality (E.NSet.union_merge - (Graph.get_in_edges graph v) - (Graph.get_out_edges graph v)) - in if edges > 2 then edges else 0 - end - fun vfld v (ec,count) = (ec + vert_weight v, count+1) - val (ec,verts) = V.NSet.fold vfld (Graph.get_vertices graph) (0,0) - val edges = (E.NSet.cardinality o Graph.get_edges) graph - in (ec, verts, edges) - end - - val ord_graph = ord o (apfst compute) o (apsnd compute) -end - -functor WeightedArityMetric( - structure Graph : BANG_GRAPH - val weight_for_data : Graph.vdata -> int -) : GRAPH_METRIC = -struct - type T = int - structure Graph = Graph - val ord = int_ord - val bottom = 0 - - (*fun weight_for_arity 0 0 = 10 - | weight_for_arity 1 0 = 20 - | weight_for_arity 0 1 = 23 - | weight_for_arity 1 1 = 30 - | weight_for_arity 2 0 = 50 - | weight_for_arity 0 2 = 53 - | weight_for_arity 2 1 = 80 - | weight_for_arity 1 2 = 83 - | weight_for_arity 3 0 = 90 - | weight_for_arity 0 3 = 93 - | weight_for_arity 2 2 = 110 - | weight_for_arity 3 1 = 120 - | weight_for_arity 1 3 = 123 - | weight_for_arity 4 0 = 130 - | weight_for_arity 0 4 = 133 - | weight_for_arity n m = (20 * (5 + n)) + (21 * m)*) - - fun weight_for_arity 0 0 l = 10 + l - | weight_for_arity 1 0 l = 20 + l - | weight_for_arity 0 1 l = 22 + l - | weight_for_arity 1 1 l = 30 + l - | weight_for_arity 2 0 l = 40 + l - | weight_for_arity 0 2 l = 44 + l - | weight_for_arity 2 1 l = 50 + l - | weight_for_arity 1 2 l = 55 + l - | weight_for_arity m n l = 20*(m*m + n*n) + 20*(m + n)*(m + n) + l - - (*fun weight_for_arity m n l = 2*(m*m + n*n) + 2*(m + n)*(m + n) + l*) - - fun compute graph = let - fun vert_weights v tot = let - val loops = E.NSet.cardinality (Graph.get_self_loops graph v) - val ins = E.NSet.cardinality (Graph.get_in_edges graph v) - loops - val outs = E.NSet.cardinality (Graph.get_out_edges graph v) - loops - val dat = Graph.get_vertex_data graph v - in (weight_for_data dat * weight_for_arity ins outs loops) + tot - end - in V.NSet.fold vert_weights (Graph.get_vertices graph) 0 - end - - val ord_graph = ord o (apfst compute) o (apsnd compute) -end - - diff --git a/core/synth/synth_util.ML b/core/synth/synth_util.ML deleted file mode 100644 index 01060d49..00000000 --- a/core/synth/synth_util.ML +++ /dev/null @@ -1,99 +0,0 @@ -infixr 4 ++ - -signature SYNTH_UTIL = -sig - structure Theory : GRAPHICAL_THEORY - - val load_rule : string -> (R.name * Theory.Rule.T) - val load_ruleset : string list -> Theory.Ruleset.T - val load_graph : string -> Theory.Graph.T - val save_graph : string -> Theory.Graph.T -> unit -end - -functor SynthUtil( - structure Enum : GRAPH_ENUM -) = -struct - -structure Theory = Enum.Theory - -fun esc s = - let - fun f #"/" = "--" - | f c = String.str c - in String.translate f s - end - -fun load_rule s = (R.mk s, Theory.RuleJSON.input (Json.read_file (s^".qrule"))) -fun save_rule s r = Json.write_file (s^".qrule") (Theory.RuleJSON.output r) -fun load_graph s = Theory.GraphJSON.input (Json.read_file (s^".qgraph")) -fun save_graph s g = Json.write_file (s^".qgraph") (Theory.GraphJSON.output g) - -fun ruleset rule_list = fold (fn r => fn rs => ( - rs |> Theory.Ruleset.update_rule r - |> Theory.Ruleset.activate_rule (fst r) - )) rule_list Theory.Ruleset.empty - -fun load_ruleset rule_list = ruleset (map load_rule rule_list) - -fun save_ruleset output_dir rs = - RTab.fold - (fn (rn,r) => fn () => - (save_rule (output_dir ^ "/" ^ esc (R.dest rn)) r)) - (Theory.Ruleset.get_allrules rs) () - - -fun mk_output_dir () = let - val _ = if not (OS.FileSys.access ("synth_output", [])) - then OS.FileSys.mkDir "synth_output" - else () - fun frdir i = let - val dir = "synth_output/s" ^ (Int.toString i) - in - if not (OS.FileSys.access (dir, [])) - then (OS.FileSys.mkDir dir; dir) - else frdir (i+1) - end -in frdir 0 -end - -(*fun synth gens rs0 (sz as (i,j,k,l)) = let - val _ = TextIO.print ("Starting enumeration for ("^ - Int.toString i ^ "," ^ Int.toString j ^ "," ^ - Int.toString k ^ "," ^ Int.toString l ^ - ")\n") - val tab = Enum.EqClassTab.empty |> Enum.EqClassTab.set_initial_ruleset rs0 - val tab = tab |> Enum.tab_update gens sz - val _ = TextIO.print "done\n" - val rs = Enum.EqClassTab.get_ruleset tab - val dir = mk_output_dir () - val _ = TextIO.print ("saving output to: " ^ dir ^ "\n") -in - save_ruleset dir rs -end*) - -fun synth_run gens (sz as (i,j,k,l)) tab = let - val _ = TextIO.print ("Starting enumeration run for ("^ - Int.toString i ^ "," ^ Int.toString j ^ "," ^ - Int.toString k ^ "," ^ Int.toString l ^ - ")\n") - val tab = tab |> Enum.tab_update gens sz - val _ = TextIO.print "done\n" -in - tab -end - -fun synth gens rs0 runs = let - val _ = TextIO.print ("Initialising synthesis\n") - val tab = Enum.EqClassTab.empty |> Enum.EqClassTab.set_initial_ruleset rs0 - val _ = TextIO.print "done\n" - val tab = fold (synth_run gens) runs tab - val rs = Enum.EqClassTab.get_ruleset tab - val dir = mk_output_dir () - val _ = TextIO.print ("saving output to: " ^ dir ^ "\n") -in - save_ruleset dir rs -end - - -end diff --git a/core/synth/tensor_equiv.ML b/core/synth/tensor_equiv.ML deleted file mode 100644 index bccda10f..00000000 --- a/core/synth/tensor_equiv.ML +++ /dev/null @@ -1,91 +0,0 @@ -signature TENSOR_DATA = -sig - structure Graph : BANG_GRAPH - structure Tensor : TENSOR - - val dimension : int (* all generators are assumed to be of fixed dimension *) - (* given vertex data and arities, return a tensor *) - val tensor_for_vertex_data : Graph.vdata -> (int * int) -> Tensor.T -end - - -functor TensorEquiv( - structure TData : TENSOR_DATA -) : GRAPH_EQUIV = -struct - type T = TData.Tensor.T - structure Graph = TData.Graph - - (* offset tables assign a vertex name to the rightmost tensor index that connects - to that name. *) - - (* shifts all of the offsets >= the offset of vertex "nm" down by 1. if "nm" has - no space left, it is removed from the offset table. *) - fun contract_offsets_for_name nm offsets = let - val current_offset = V.NTab.get offsets nm - fun dec (nm, offset) (tab, rm) = - if offset >= current_offset - then (tab |> V.NTab.doadd (nm, offset-1), rm) - else (tab |> V.NTab.doadd (nm, offset), rm orelse current_offset-1 = offset) - val (tab, rm) = V.NTab.fold dec offsets (V.NTab.empty, current_offset = 0) - in if rm then tab |> V.NTab.delete nm else tab - end - - fun contract_edge graph edge (i_offsets, o_offsets, tensor) = let - val src = TData.Graph.get_edge_source graph edge - val tgt = TData.Graph.get_edge_target graph edge - val upper = V.NTab.get o_offsets src - val lower = V.NTab.get i_offsets tgt - in (contract_offsets_for_name tgt i_offsets, - contract_offsets_for_name src o_offsets, - TData.Tensor.contract (lower,upper) tensor) - end - - fun boundary_list offsets = map fst (sort (fn ((_,o1),(_,o2)) => int_ord (o1, o2)) (V.NTab.list_of offsets)) - - fun append_vertex graph vert (i_offsets, o_offsets, verts, tensor) = let - val data = TData.Graph.get_vertex_data graph vert - val in_edges = TData.Graph.get_in_edges graph vert - val out_edges = TData.Graph.get_out_edges graph vert - val (num_in, num_out) = - case data of TData.Graph.NVert _ => (E.NSet.cardinality in_edges, E.NSet.cardinality out_edges) - | TData.Graph.WVert => (1,1) - val (current_in, current_out) = (TData.Tensor.lower_index_count tensor, TData.Tensor.upper_index_count tensor) - val new_tensor = TData.Tensor.product tensor (TData.tensor_for_vertex_data data (num_in, num_out)) - val new_i_offsets = if num_in = 0 then i_offsets - else i_offsets |> V.NTab.doadd (vert, (TData.Tensor.lower_index_count tensor) + num_in - 1) - val new_o_offsets = if num_out = 0 then o_offsets - else o_offsets |> V.NTab.doadd (vert, (TData.Tensor.upper_index_count tensor) + num_out - 1) - val new_verts = verts |> V.NSet.add vert - val edges_to_contract = V.NSet.fold - (E.NSet.union_merge o (TData.Graph.edges_between graph vert)) - new_verts E.NSet.empty - val (new_i_offsets, new_o_offsets, new_tensor) = - E.NSet.fold (contract_edge graph) edges_to_contract (new_i_offsets,new_o_offsets,new_tensor) - in (new_i_offsets, new_o_offsets, new_verts, new_tensor) - end - - fun tensor_for_graph graph = let - val (i_offsets,o_offsets,_,t) = - V.NSet.fold_rev (append_vertex graph) - (TData.Graph.get_vertices graph) - (V.NTab.empty,V.NTab.empty,V.NSet.empty, - TData.Tensor.id TData.dimension 0) - in (boundary_list i_offsets, boundary_list o_offsets,t) - end - - fun compute_equiv_data gr = let - val (ins,outs,tens) = tensor_for_graph gr - val ((scalar,perm_outs,perm_ins),tens') = TData.Tensor.normalise tens - val norm_ins = map (fn n => V.mk ("i_"^(Int.toString n))) perm_ins - val norm_outs = map (fn n => V.mk ("o_"^(Int.toString n))) perm_outs - val gr' = fold2 Graph.rename_vertex ins norm_ins (fold2 Graph.rename_vertex outs norm_outs gr) - in - (gr', tens') - end - - val eq = TData.Tensor.eq - val to_string = TData.Tensor.to_string -end - - diff --git a/core/synth/theories.ML b/core/synth/theories.ML deleted file mode 100644 index 284387c0..00000000 --- a/core/synth/theories.ML +++ /dev/null @@ -1,146 +0,0 @@ -structure GHZW_TensorData : TENSOR_DATA = -struct - structure Graph = GHZW_Theory.Graph - structure Tensor = IntTensor - - val dimension = 2 - - fun ghz (maxi, maxj) (i,j) = if ((i=0 andalso j=0) orelse (i=maxi andalso j=maxj)) then 1 else 0 - fun w (ins,outs) (i,j) = let - val outsum = List.foldr (op+) 0 (Tensor.decompose_index 2 outs i) - val insum = List.foldr (op+) 0 (Tensor.decompose_index 2 ins j) - in if ((outsum = 1 andalso insum = ins) orelse (outsum = 0 andalso insum = (ins - 1))) then 1 else 0 - end - - - fun tensor_for_vertex_data (Graph.NVert GHZW_Data.GHZ) (ins, outs) = - Tensor.tensorf (2,ins,outs) (ghz ((Tensor.pow 2 outs)-1, (Tensor.pow 2 ins)-1)) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.W) (ins, outs) = - Tensor.tensorf (2,ins,outs) (w (ins,outs)) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.TICK) (1,1) = - Tensor.tensor (2,1,1) [0,1,1,0] - | tensor_for_vertex_data (Graph.NVert GHZW_Data.TICK) (ins,outs) = - Tensor.tensorf (2,ins,outs) (K 0) (* just return a zero tensor if bad dimensions on tick *) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.ZERO) (ins,outs) = - Tensor.tensorf (2,ins,outs) (K 0) (* always return 0 tensor *) - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id dimension 1 -end - -(* -structure RG_TensorDataNoPhase : TENSOR_DATA = -struct - structure Graph = RG_Theory.Graph - structure Tensor = IntTensor - - val dimension = 2 - fun red (ins,outs) (i,j) = - if (fold (curry op+) (Tensor.decompose_index 2 outs i) 0) mod 2 = - (fold (curry op+) (Tensor.decompose_index 2 ins j) 0) mod 2 then 1 else 0 - fun green (maxi,maxj) (i,j) = if ((i=0 andalso j=0) orelse (i=maxi andalso j=maxj)) then 1 else 0 - - fun tensor_for_vertex_data (Graph.NVert (RG_Data.Znd _)) (ins, outs) = - Tensor.tensorf (2,ins,outs) (green ((Tensor.pow 2 outs)-1, (Tensor.pow 2 ins)-1)) - | tensor_for_vertex_data (Graph.NVert (RG_Data.Xnd _)) (ins, outs) = - Tensor.tensorf (2,ins,outs) (red (ins,outs)) - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id dimension 1 -end -*) - -structure RGB_TensorData : TENSOR_DATA = -struct - structure Graph = RGB_Theory.Graph - structure Tensor = CIntTensor - - val dimension = 2 - - val green_basis = (Tensor.tensor (2,0,1) [(1,0),(0,0)], Tensor.tensor (2,0,1) [(0,0),(1,0)]) - val red_basis = (Tensor.tensor (2,0,1) [(1,0),(1,0)], Tensor.tensor (2,0,1) [(0,~1),(0,1)]) - val blue_basis = (Tensor.tensor (2,0,1) [(1,0),(0,1)], Tensor.tensor (2,0,1) [(1,0),(0,~1)]) - - fun gen (ket1,ket2) (ins, outs) = let - val one = Tensor.id 2 0 - val bra1 = Tensor.conjugate_transpose ket1 - val bra2 = Tensor.conjugate_transpose ket2 - val term1 = Tensor.product (funpow ins (Tensor.product bra1) one) - (funpow outs (Tensor.product ket1) one) - val term2 = Tensor.product (funpow ins (Tensor.product bra2) one) - (funpow outs (Tensor.product ket2) one) - in Tensor.add term1 term2 - end - - fun tensor_for_vertex_data (Graph.NVert RGB_Data.Red) io = gen red_basis io - | tensor_for_vertex_data (Graph.NVert RGB_Data.Green) io = gen green_basis io - | tensor_for_vertex_data (Graph.NVert RGB_Data.Blue) io = gen blue_basis io - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id 2 1 -end - - -structure RG_TensorData : TENSOR_DATA = -struct - structure Graph = RG_Theory.Graph - structure Tensor = CIntTensor - - val dimension = 2 - - val green_basis = (Tensor.tensor (2,0,1) [(1,0),(0,0)], Tensor.tensor (2,0,1) [(0,0),(1,0)]) - val red_basis = (Tensor.tensor (2,0,1) [(1,0),(1,0)], Tensor.tensor (2,0,1) [(1,0),(~1,0)]) - - fun gen (ket1,ket2) (ins, outs) = let - val one = Tensor.id 2 0 - val bra1 = Tensor.conjugate_transpose ket1 - val bra2 = Tensor.conjugate_transpose ket2 - val term1 = Tensor.product (funpow ins (Tensor.product bra1) one) - (funpow outs (Tensor.product ket1) one) - val term2 = Tensor.product (funpow ins (Tensor.product bra2) one) - (funpow outs (Tensor.product ket2) one) - in Tensor.add term1 term2 - end - - val zero = LinratAngleExpr.zero - - fun tensor_for_vertex_data (Graph.NVert (RG_Data.Xnd _)) io = gen red_basis io - | tensor_for_vertex_data (Graph.NVert (RG_Data.Znd _)) io = gen green_basis io - | tensor_for_vertex_data (Graph.NVert RG_Data.Hnd) _ = raise ERROR "Cannot deal with hadamards" - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id 2 1 -end - - -(* FUNCTOR APPLICATIONS *) -structure RG_Enum = FastGraphEnum( - structure Theory = RG_Theory - - structure Metric = WeightedArityMetric( - structure Graph = RG_Theory.Graph - fun weight_for_data (Graph.NVert (RG_Data.Xnd _)) = 6 - | weight_for_data (Graph.NVert (RG_Data.Znd _)) = 5 - | weight_for_data _ = 4 - ) - - structure Equiv = TensorEquiv(structure TData = RG_TensorData) -) - -structure RG_Spiders = SpiderRewrites(structure Theory = RG_Theory) - - -structure GHZW_Enum = FastGraphEnum( - structure Theory = GHZW_Theory - - structure Metric = WeightedArityMetric( - structure Graph = GHZW_Theory.Graph - fun weight_for_data (Graph.NVert (GHZW_Data.GHZ)) = 6 - | weight_for_data (Graph.NVert (GHZW_Data.W)) = 5 - | weight_for_data _ = 4 - ) - - structure Equiv = TensorEquiv(structure TData = GHZW_TensorData) -) - -structure GHZW_Spiders = SpiderRewrites(structure Theory = GHZW_Theory) -structure GHZW_Synth = -struct - structure SU = SynthUtil(structure Enum = GHZW_Enum) - open SU - val GHZ = GHZW_Theory.Graph.NVert GHZW_Data.GHZ - val W = GHZW_Theory.Graph.NVert GHZW_Data.W -end - diff --git a/core/test/PROTOCOLTEST.ML b/core/test/PROTOCOLTEST.ML deleted file mode 100644 index 3e2b9ad4..00000000 --- a/core/test/PROTOCOLTEST.ML +++ /dev/null @@ -1,36 +0,0 @@ -val _ = PolyML.Compiler.printDepth := 0; - -val _ = TextIO.print "Testing the Quantomatic protocol:\n\n"; - -use "test/protocol/core-tester.ML"; -use "test/protocol/test-utils.ML"; - -use "test/protocol/simple-test.ML"; -use "test/protocol/theory-tests.ML"; -use "test/protocol/console-tests.ML"; -use "test/protocol/rg-graph-tests.ML"; - -fun updateCoreExec [] = setCoreExecutable "bin/quanto-core" - | updateCoreExec ("--core"::ce::ss) = setCoreExecutable ce - | updateCoreExec (s::ss) = updateCoreExec ss; - -fun updateLogFile [] = NONE - | updateLogFile ("--log"::lf::ss) = (setLogFile lf; SOME lf) - | updateLogFile (s::ss) = updateLogFile ss; - -val _ = updateCoreExec (CommandLine.arguments()); -val logfile = updateLogFile (CommandLine.arguments()); - -val _ = OS.Process.exit ( - if runRegisteredTests () then - (TextIO.print "\nAll tests passed\n"; - case logfile - of SOME file => TextIO.print ("See "^file^" for details\n") - | NONE => (); - OS.Process.success) - else - (TextIO.print "\nSome tests failed\n"; - case logfile - of SOME file => TextIO.print ("See "^file^" for details\n") - | NONE => (); - OS.Process.failure)) diff --git a/core/test/graphs/ghz_w/2 times 0 plus 2.graph b/core/test/graphs/ghz_w/2 times 0 plus 2.graph deleted file mode 100644 index fecdf5b4..00000000 --- a/core/test/graphs/ghz_w/2 times 0 plus 2.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"Vb":{}},"node_vertices":{"Va":{"data":"GHZ"},"Vc":{"data":"W"},"Vd":{"data":"W"},"Ve":{"data":"W"},"Vf":{"data":"W"},"Vg":{"data":"GHZ"},"Vh":{"data":"GHZ"},"Vj":{"data":"GHZ"},"Vk":{"data":"GHZ"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vg","tgt":"Vc"},"Ec":{"src":"Vh","tgt":"Vc"},"Ed":{"src":"Vc","tgt":"Va"},"Ee":{"src":"Ve","tgt":"Vd"},"Ef":{"src":"Vf","tgt":"Vd"},"Eg":{"src":"Vj","tgt":"Vf"},"Eh":{"src":"Vk","tgt":"Vf"},"Ei":{"src":"Vd","tgt":"Va"}}} \ No newline at end of file diff --git a/core/test/graphs/ghz_w/2 times 2.graph b/core/test/graphs/ghz_w/2 times 2.graph deleted file mode 100644 index 5b30fbce..00000000 --- a/core/test/graphs/ghz_w/2 times 2.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"Vb":{}},"node_vertices":{"Va":{"data":"GHZ"},"Vc":{"data":"W"},"Vg":{"data":"GHZ"},"Vh":{"data":"GHZ"},"Vj":{"data":"GHZ"},"Vk":{"data":"GHZ"},"Vw":{"data":"W","annotation":{"quanto-gui:position":"119:80"}}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vg","tgt":"Vc"},"Ec":{"src":"Vh","tgt":"Vc"},"Ed":{"src":"Vc","tgt":"Va"},"Ee":{"src":"Vj","tgt":"Vw"},"Ef":{"src":"Vk","tgt":"Vw"},"Eg":{"src":"Vw","tgt":"Va"}}} \ No newline at end of file diff --git a/core/test/graphs/ghz_w/4.graph b/core/test/graphs/ghz_w/4.graph deleted file mode 100644 index e6be29f1..00000000 --- a/core/test/graphs/ghz_w/4.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"Vb":{"annotation":{"quanto-gui:position":"84:104"}}},"node_vertices":{"Va":{"data":"GHZ","annotation":{"quanto-gui:position":"29:30"}},"Vc":{"data":"W","annotation":{"quanto-gui:position":"84:67"}},"Vd":{"data":"GHZ","annotation":{"quanto-gui:position":"140:30"}},"Vg":{"data":"GHZ","annotation":{"quanto-gui:position":"103:30"}},"Vh":{"data":"GHZ","annotation":{"quanto-gui:position":"66:30"}}},"dir_edges":{"Ea":{"src":"Vc","tgt":"Vb"},"Eb":{"src":"Vg","tgt":"Vc"},"Ec":{"src":"Vh","tgt":"Vc"},"Ed":{"src":"Va","tgt":"Vc"},"Ee":{"src":"Vd","tgt":"Vc"}}} \ No newline at end of file diff --git a/core/test/graphs/ghz_w/ghz-spider-simple.graph b/core/test/graphs/ghz_w/ghz-spider-simple.graph deleted file mode 100644 index cf35cc04..00000000 --- a/core/test/graphs/ghz_w/ghz-spider-simple.graph +++ /dev/null @@ -1 +0,0 @@ -{"node_vertices":{"Va":{"data":"GHZ","annotation":{"quanto-gui:position":"56:95"}},"Vb":{"data":"GHZ","annotation":{"quanto-gui:position":"103:95"}},"Vc":{"data":"W","annotation":{"quanto-gui:position":"60:28"}},"Vd":{"data":"W","annotation":{"quanto-gui:position":"107:28"}},"Ve":{"data":"W","annotation":{"quanto-gui:position":"60:147"}},"Vf":{"data":"W","annotation":{"quanto-gui:position":"107:147"}}},"dir_edges":{"Ea":{"src":"Vc","tgt":"Va"},"Eb":{"src":"Va","tgt":"Ve"},"Ec":{"src":"Vd","tgt":"Vb"},"Ed":{"src":"Vb","tgt":"Vf"},"Ee":{"src":"Va","tgt":"Vb"}}} \ No newline at end of file diff --git a/core/test/graphs/red_green/bbox.qgraph b/core/test/graphs/red_green/bbox.qgraph deleted file mode 100644 index 1c3960ba..00000000 --- a/core/test/graphs/red_green/bbox.qgraph +++ /dev/null @@ -1 +0,0 @@ -{"bang_boxes":{"bx0":{"contents":["b0"]}},"wire_vertices":{"b0":{"annotation":{"boundary":true,"coord":[-0.22,-0.44]}}},"node_vertices":{"v0":{"data":{"type":"X","value":""},"annotation":{"coord":[-0.22,0.88]}}},"undir_edges":{"e0":{"src":"v0","tgt":"b0"}}} \ No newline at end of file diff --git a/core/test/graphs/red_green/d2-ladder_rw1.graph b/core/test/graphs/red_green/d2-ladder_rw1.graph deleted file mode 100644 index 8453f535..00000000 --- a/core/test/graphs/red_green/d2-ladder_rw1.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"a":{},"b":{},"c":{},"d":{}},"node_vertices":{"i":{"data":{"type":"X","angle":{"pretty":"0"}}},"j":{"data":{"type":"Z","angle":{"pretty":"0"}}},"s":{"data":{"type":"Z","angle":{"pretty":"0"}}},"t":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ei":{"src":"j","tgt":"i"},"Ej":{"src":"i","tgt":"d"},"Ek":{"src":"j","tgt":"c"},"El":{"src":"t","tgt":"i"},"Em":{"src":"t","tgt":"a"},"Ev":{"src":"b","tgt":"s"},"Ew":{"src":"j","tgt":"s"},"Ex":{"src":"s","tgt":"t"}}} \ No newline at end of file diff --git a/core/test/graphs/red_green/d2-ladder_rw2.graph b/core/test/graphs/red_green/d2-ladder_rw2.graph deleted file mode 100644 index 68749d84..00000000 --- a/core/test/graphs/red_green/d2-ladder_rw2.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"a":{},"b":{},"c":{},"d":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"i":{"data":{"type":"X","angle":{"pretty":"0"}}},"t":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"e","tgt":"b"},"Ee":{"src":"i","tgt":"e"},"Ef":{"src":"c","tgt":"e"},"Eg":{"src":"e","tgt":"t"},"Ej":{"src":"i","tgt":"d"},"El":{"src":"t","tgt":"i"},"Em":{"src":"t","tgt":"a"}},"bang_boxes":{"Bc":{"contents":[]},"Bd":{"contents":[]}}} \ No newline at end of file diff --git a/core/test/graphs/red_green/d2-ladder_rw3.graph b/core/test/graphs/red_green/d2-ladder_rw3.graph deleted file mode 100644 index 2426ebca..00000000 --- a/core/test/graphs/red_green/d2-ladder_rw3.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"a":{},"b":{},"c":{},"d":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}},"g":{"data":{"type":"Z","angle":{"pretty":"0"}}},"h":{"data":{"type":"X","angle":{"pretty":"0"}}},"i":{"data":{"type":"X","angle":{"pretty":"0"}}},"j":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"f","tgt":"e"},"Eb":{"src":"a","tgt":"f"},"Ec":{"src":"b","tgt":"e"},"Ed":{"src":"e","tgt":"g"},"Ee":{"src":"f","tgt":"h"},"Ef":{"src":"h","tgt":"g"},"Eg":{"src":"h","tgt":"j"},"Eh":{"src":"g","tgt":"i"},"Ei":{"src":"j","tgt":"i"},"Ej":{"src":"i","tgt":"d"},"Ek":{"src":"j","tgt":"c"}}} \ No newline at end of file diff --git a/core/test/graphs/red_green/d2-ladder_rw3_norm.graph b/core/test/graphs/red_green/d2-ladder_rw3_norm.graph deleted file mode 100644 index d6709f95..00000000 --- a/core/test/graphs/red_green/d2-ladder_rw3_norm.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"a":{"annotation":{"quanto-gui:position":"60:20"}},"b":{"annotation":{"quanto-gui:position":"107:20"}},"c":{"annotation":{"quanto-gui:position":"109:74"}},"d":{"annotation":{"quanto-gui:position":"60:81"}}},"undir_edges":{"Ea":{"src":"a","tgt":"d"},"Eb":{"src":"b","tgt":"c"}}} \ No newline at end of file diff --git a/core/test/graphs/red_green/gen-bialg-inst.qgraph b/core/test/graphs/red_green/gen-bialg-inst.qgraph deleted file mode 100644 index f38d95da..00000000 --- a/core/test/graphs/red_green/gen-bialg-inst.qgraph +++ /dev/null @@ -1 +0,0 @@ -{"bang_boxes":{"bx1":{"contents":["b1","v1"]}},"wire_vertices":{"b1":{"annotation":{"boundary":true,"coord":[-0.14,-2.14]}},"b0":{"annotation":{"boundary":true,"coord":[0.04,2.66]}},"b2":{"annotation":{"boundary":true,"coord":[1.04,2.66]}}},"node_vertices":{"v1":{"annotation":{"coord":[-0.12,-0.62]}},"v0":{"data":{"type":"X","value":""},"annotation":{"coord":[-0.02,1.24]}},"v2":{"data":{"type":"X","value":""},"annotation":{"coord":[0.98,1.24]}}},"undir_edges":{"e1":{"src":"v0","tgt":"v1"},"e0":{"src":"b0","tgt":"v0"},"e4":{"src":"v1","tgt":"v2"},"e3":{"src":"b2","tgt":"v2"},"e2":{"src":"v1","tgt":"b1"}}} \ No newline at end of file diff --git a/core/test/graphs/red_green/x-isom.graph b/core/test/graphs/red_green/x-isom.graph deleted file mode 100644 index c2278d52..00000000 --- a/core/test/graphs/red_green/x-isom.graph +++ /dev/null @@ -1 +0,0 @@ -{"node_vertices":{"Va":{"data":{"type":"Z","angle":{"pretty":"0"}},"annotation":{"quanto-gui:position":"60:20"}},"Vb":{"data":{"type":"Z","angle":{"pretty":"0"}},"annotation":{"quanto-gui:position":"64:179"}},"Vc":{"data":{"type":"X","angle":{"pretty":"0"}},"annotation":{"quanto-gui:position":"64:75"}},"Vd":{"data":{"type":"X","angle":{"pretty":"0"}},"annotation":{"quanto-gui:position":"63:120"}}},"undir_edges":{"Ea":{"src":"Va","tgt":"Vc"},"Eb":{"src":"Vc","tgt":"Vd"},"Ec":{"src":"Vc","tgt":"Vd"},"Ed":{"src":"Vd","tgt":"Vb"}}} \ No newline at end of file diff --git a/core/test/load_heap.ML b/core/test/load_heap.ML deleted file mode 100644 index 023bd780..00000000 --- a/core/test/load_heap.ML +++ /dev/null @@ -1,2 +0,0 @@ -PolyML.SaveState.loadState "../heaps/quanto.heap"; -PolyML.Compiler.printDepth:=10; diff --git a/core/test/old-rg-ruleset-rewriter-tests.ML b/core/test/old-rg-ruleset-rewriter-tests.ML deleted file mode 100644 index cbcbb236..00000000 --- a/core/test/old-rg-ruleset-rewriter-tests.ML +++ /dev/null @@ -1,261 +0,0 @@ -local - structure Rule = RG_Theory.Rule - structure Ruleset = RG_Theory.Ruleset - structure Rewriter = BangGraphRewriter( - structure Rule = Ruleset.BangGraphRule - structure Matcher = RG_Theory.MatchSearch - ) - structure RulesetRewriter = RG_Theory.RulesetRewriter - structure MatchSearch = RG_Theory.MatchSearch - structure G = RG_Theory.Graph - val ruleset = "rulesets/red_green/basic.rules" - |> Json.read_file - |> RG_Theory.RulesetJSON.input - - val isom_rule = Ruleset.get_rule ruleset (R.mk "isometry_red") - val x_abelian1_rule = Ruleset.get_rule ruleset (R.mk "x_abelian1") - - fun list_of_rule_matches r g = - let val matches = Rewriter.find_matches r g in - Seq.list_of matches - end - - val zero_angle = LinratAngleExpr.zero - val parse_angle = LinratAngleExpr.parse - fun mkX a = G.NVert (RG_Data.Xnd a) - fun mkZ a = G.NVert (RG_Data.Znd a) - val mkH = G.NVert RG_Data.Hnd - val undir_edge = (Undirected,()) - val dir_edge = (Directed,()) - val bvert = G.WVert -in - -val _ = Testing.test "rule2: !match(isom_rule, X - X - Z)" (fn () => let - (* Example of applications of rule that does not match *) - val g = G.empty; - val (n1, g) = g |> G.add_vertex (mkX zero_angle); - val (n2, g) = g |> G.add_vertex (mkX zero_angle); - val (n3, g) = g |> G.add_vertex (mkZ zero_angle); - val (_, g) = g |> G.add_edge undir_edge n1 n3; - val (_, g) = g |> G.add_edge undir_edge n2 n3; - (*val _ = G.print g;*) - val _ = case list_of_rule_matches isom_rule g - of [] => () - | _ => raise ERROR "Expected no matches" - in () end) (); - - -val _ = Testing.test "rule3: [g1,g2,g3,g4] = subst(isom_lhs, isom_lhs)" (fn () => let - (* test rewriting isometry lhs with isometry rule *) - val g = G.empty; - val (b1, g) = g |> G.add_vertex bvert; - val (n1, g) = g |> G.add_vertex (mkX zero_angle); - val (n2, g) = g |> G.add_vertex (mkX zero_angle); - val (b2, g) = g |> G.add_vertex bvert; - val (_, g) = g |> G.add_edge undir_edge b1 n1; - val (_, g) = g |> G.add_edge undir_edge n1 n2; - val (_, g) = g |> G.add_edge undir_edge n1 n2; - val (_, g) = g |> G.add_edge undir_edge n2 b2; - (*val _ = G.print g;*) - - val _ = - case list_of_rule_matches isom_rule g - of [m1,m2,m3,m4] => - let - (* - val _ = Rule.print r'; - val _ = Rewriter.print_match m1; - *) - (* - val inst_isom_rule = Rewriter.instantiate_rule m1 r'; - val _ = Rule.print inst_isom_rule; - *) - val _ = Rewriter.rewrite_at isom_rule m1 - val _ = Rewriter.rewrite_at isom_rule m2 - val _ = Rewriter.rewrite_at isom_rule m3 - val _ = Rewriter.rewrite_at isom_rule m4 - in () end - | _ => raise ERROR "Expected four matches" - in () end) (); - -val _ = Testing.test "rule4: rewriting bialgebra (self-matches)" (fn () => let - val r = Ruleset.get_rule ruleset (R.mk "bialgebra1"); - val g = Rule.get_lhs r; - val _ = - case list_of_rule_matches r g - of [m1,m2,m3,m4] => - let - (* - val _ = Rule.print r'; - val _ = Rewriter.print_match m1; - *) - (* - val inst_isom_rule = Rewriter.instantiate_rule m1 r'; - val _ = Rule.print inst_isom_rule; - *) - val _ = Rewriter.rewrite_at r m1 - val _ = Rewriter.rewrite_at r m2 - val _ = Rewriter.rewrite_at r m3 - val _ = Rewriter.rewrite_at r m4 - in () end - | _ => raise ERROR "Expected four matches" - in () end) (); - - - (* Self-Loop, we create two vertices with self-loops and try to rewrite the - * whole graph. *) - val _ = Testing.test "rule5: rewriting x_abelian1 (loop)" (fn () => let - val g = G.empty; - val (n1, g) = g |> G.add_vertex (mkX zero_angle); - val (n2, g) = g |> G.add_vertex (mkX zero_angle); - val (_, g) = g |> G.add_edge undir_edge n1 n1; - val (_, g) = g |> G.add_edge undir_edge n2 n2; - (*val _ = G.print g;*) - - val m1 = list_of_rule_matches x_abelian1_rule g - (* - val _ = Rewriter.print_match (hd m1) - *) - val (_,g) = Rewriter.rewrite_at x_abelian1_rule (hd m1) - val m1 = list_of_rule_matches x_abelian1_rule g - val (_,g) = Rewriter.rewrite_at x_abelian1_rule (hd m1) - (*val _ = G.print g*) - - in () end) (); - - -val _ = Testing.test "rule6: X(a+b)- => X(a)-X(b)-, matching X(c)-Z(b): renaming of internal variables prior to rewriting." (fn () => let - (* lhs *) - val lhs = let val g = G.empty; - val (b1, g) = g |> G.add_vertex bvert; - val (x1, g) = g |> G.add_vertex (mkX (parse_angle "a + b")); - val (_, g) = g |> G.add_edge undir_edge x1 b1; - in g end; - (*val _ = G.print lhs;*) - (* rhs *) - val rhs = let val g = G.empty; - val (b1, g) = g |> G.add_vertex bvert; - val (x1, g) = g |> G.add_vertex (mkX (parse_angle "a")); - val (x2, g) = g |> G.add_vertex (mkX (parse_angle "b")); - val (_, g) = g |> G.add_edge undir_edge x1 x2; - val (_, g) = g |> G.add_edge undir_edge x2 b1; - in g end; - (*val _ = G.print rhs;*) - val rule = Rule.mk (lhs,rhs); - (* tgt graph *) - val tgt = let val g = G.empty; - val (x1, g) = g |> G.add_vertex (mkX (parse_angle "c")); - val (x2, g) = g |> G.add_vertex (mkZ (parse_angle "b")); - val (_, g) = g |> G.add_vertex bvert; - val (_, g) = g |> G.add_edge undir_edge x1 x2; - in g end - - val _ = list_of_rule_matches rule tgt - - in () end) (); - - -val _ = Testing.test "rule7: isom_rule rewrites subgraph" (fn () => let - (* Example of applications of rule that does not match *) - val g = G.empty; - val (x1, g) = g |> G.add_vertex (mkX zero_angle); - val (x2, g) = g |> G.add_vertex (mkX zero_angle); - val (z1, g) = g |> G.add_vertex (mkZ zero_angle); - val (z2, g) = g |> G.add_vertex (mkZ zero_angle); - val (_, g) = g |> G.add_edge undir_edge z1 x1; - val (_, g) = g |> G.add_edge undir_edge x1 x2; - val (_, g) = g |> G.add_edge undir_edge x1 x2; - val (_, g) = g |> G.add_edge undir_edge x2 z2; - (*val _ = G.print g;*) - val verts = V.NSet.single x1; - val rseq = RulesetRewriter.applicable_rules ruleset - (fn r => fn g => Rewriter.find_matches_in_subgraph r g verts) - (R.NSet.single (R.mk "isometry_red")) g; - val _ = Seq.list_of rseq; - in () end) (); - - - val Ba = B.mk "Ba" - - val Va = V.mk "Va" - val Vb = V.mk "Vb" - val Vc = V.mk "Vc" - val Vd = V.mk "Vd" - val Vf = V.mk "Vf" - - val Vx = V.mk "Vx" - val Vy = V.mk "Vy" - -(* lhs *) - - - val lhs = G.empty - |> G.add_named_vertex Va bvert - |> G.add_named_vertex Vc (mkZ zero_angle) - |> G.add_named_bbox Ba - |> G.add_to_bbox_anon Ba (V.NSet.of_list [Va]) - |> G.add_edge_anon dir_edge Vc Va - - (* - val _ = Pretty.writeln (Pretty.str "interior node bbox rule") - val _ = G.print lhs; - *) - (* rhs *) - val rhs = G.empty - |> G.add_named_vertex Va bvert - |> G.add_named_vertex Vf (mkX zero_angle) - |> G.add_named_vertex Vd (mkH) - |> G.add_named_bbox Ba - |> G.add_to_bbox_anon Ba (V.NSet.of_list [Va,Vd]) - |> G.add_edge_anon dir_edge Vf Vd - |> G.add_edge_anon dir_edge Vd Va - - (*val rhs = G.empty - |> G.add_named_vertex Va bvert - |> G.add_named_vertex Vf (mkX zero_angle) - |> G.add_named_vertex Vd (mkH) - |> G.add_named_bbox Ba - |> G.add_to_bbox_anon Ba (V.NSet.of_list [Va,Vd]) - |> G.add_edge_anon dir_edge Vf Vd - |> G.add_edge_anon dir_edge Vd Va*) - - (*val _ = G.print rhs;*) - val rule = Rule.mk (lhs,rhs); - - (* - val _ = Pretty.writeln (Pretty.str "*** rule (before freshening):") - val _ = Rule.print rule; - *) - - (* tgt graph *) - val tgt = G.empty - |> G.add_named_vertex Va bvert - |> G.add_named_vertex Vb (mkZ zero_angle) - |> G.add_edge_anon dir_edge Vb Va - - val _ = Testing.test "rule with interior node in bbox on RHS" (fn () => let - val m1 = list_of_rule_matches rule tgt - - (* - val _ = Pretty.writeln (Pretty.str "*** match state:") - val match = hd m1 - val _ = Rewriter.print_match match - *) - (*val rhs' = (MatchSearch.BGMatchState.replay_pat_bbox_ops match (Rule.get_rhs rule)) - *) - - (*exception die_exp of unit - val _ = raise die_exp ()*) - (*val (_,g) = Rewriter.rewrite_at rule (hd m1) - val _ = G.print g;*) - - in () end) (); - - - val _ = Testing.assert_no_failed_tests(); -end - - - - - diff --git a/core/test/performance.ML b/core/test/performance.ML deleted file mode 100644 index 948fa595..00000000 --- a/core/test/performance.ML +++ /dev/null @@ -1,17 +0,0 @@ -PolyML.SaveState.loadState "../heaps/quanto.heap"; -PolyML.Compiler.printDepth:=10; - -val rsj = Json.read_file "../../examples/steane/steane.qrules"; -val rs = RG_GraphicalTheoryIO.InputRulesetJSON.input rsj; -val gj = Json.read_file "../../examples/steane/enc_dec_sp_norm.qgraph"; -val g1 = RG_GraphicalTheoryIO.InputGraphJSON.input gj; -val gj = Json.read_file "../../examples/steane/sp2legs.qgraph"; -val g2 = RG_GraphicalTheoryIO.InputGraphJSON.input gj; - -val gj = Json.read_file "../../examples/steane/splhs.qgraph"; -val pat = RG_GraphicalTheoryIO.InputGraphJSON.input gj; - - - -RG_Theory.MatchSearch.InnerLog.level_ref := 2; -RG_Theory.MatchSearch.Log.level_ref := 2; diff --git a/core/test/protocol/console-tests.ML b/core/test/protocol/console-tests.ML deleted file mode 100644 index a6e0e630..00000000 --- a/core/test/protocol/console-tests.ML +++ /dev/null @@ -1,48 +0,0 @@ -CoreTester.registerTest "Get console commands" (fn session => ( - let - val () = CoreTester.writeRequestByParts session ("CL","ada") "" - val commands = CoreTester.readNameListResponse session "ada" - fun eqTest a b = (a = b) - fun checkContains value = - if (List.find (eqTest value) commands) = NONE then - raise CoreTester.test_exp (value^" was not in the list of commands") - else () - in - checkContains "list_graphs"; - checkContains "new_graph"; - checkContains "print_graph"; - checkContains "apply_rewrite" - end -)); - -CoreTester.registerTest "Get console help" (fn session => ( - CoreTester.writeRequestByParts session ("CH","fortran") "untag_rule"; - CoreTester.demandConsoleHelpResponse session "fortran" ("RULE TAG", "Remove TAG from RULE"); - - CoreTester.writeRequestByParts session ("CH","c++") "list_rules"; - CoreTester.demandConsoleHelpResponse session "c++" ("", "List all loaded rules") -)); - -CoreTester.registerTest "Run console commands" (fn session => ( - CoreTester.writeRequestByParts session ("TS","pascal") "red_green"; - CoreTester.demandOkResponse session "pascal"; - - CoreTester.writeRequestByParts session ("CC","js") - (CoreTester.dataChunk "new_graph"); - CoreTester.demandConsoleResponse session "js" "new-graph-1"; - - CoreTester.writeRequestByParts session ("CC","perl") - (CoreTester.dataChunk "rename_graph \"new-graph-1\" \"my-little-graph\""); - CoreTester.demandConsoleResponse session "perl" "my-little-graph"; - - (* check it has updated the state properly *) - CoreTester.writeRequestByParts session ("GL", "ruby") ""; - CoreTester.demandNameListResponse session "ruby" ["my-little-graph"]; - - CoreTester.writeRequestByParts session ("CC","objc") - (CoreTester.dataChunk "rename_graph foo bar"); - CoreTester.demandConsoleResponse session "objc" "!!! No such graph \"foo\"" -)); - -(* vi:et:sw=2:sts=2 -*) diff --git a/core/test/protocol/core-tester.ML b/core/test/protocol/core-tester.ML deleted file mode 100644 index e774f688..00000000 --- a/core/test/protocol/core-tester.ML +++ /dev/null @@ -1,467 +0,0 @@ -(* - * Infrastructure for doing tests on quanto-core - * - * Note: this will only work on Unix. - * - * Example test: - * CoreTester.registerTest "Change theory (invalid_theory)" (fn session => ( - * CoreTester.writeRequestByParts session ("TS","apple") "invalid_theory"; - * CoreTester.demandErrorResponse session "apple" "BADTHEORY" - * )); - * Throw a test_exp when you get something back you don't expect. - *) - -signature CORE_TESTER = -sig - type session; - type request; - (** Indicates a test failure *) - exception test_exp of string; - - (** - * Constructs a request - * Param: (code,requestId) - * Param: Body - *) - val createRequest: (string*string) -> string -> request; - (** - * The standard argument delimiter string. - * Value: "\u001b;" - *) - val delim: string; - (** - * Constructs a data chunk for some data - * Param: data - * Result: "\u001b["^(String.size data)^"\u001b|"^data^"\u001b]" - *) - val dataChunk: string -> string; - (** - * Escapes a string so it can be placed in a request body. - * - * Replaces all occurences of ESC ("\u001b") in its input with - * two copies of ESC ("\u001b\u001b"). - *) - val escapeStr: string -> string; - (** - * Constructs a string list - *) - val stringList: string list -> string; - (** - * Produces a message body by concatenating the given arguments with the delimiter - *) - val concatArgs: string list -> string; - - (** - * Writes a previously constructed request to the core - *) - val writeRequest: session -> request -> unit; - (** - * Creates a request and sends it to the core - *) - val writeRequestByParts: session -> (string*string) -> string -> unit; - val writeRequestByArgs: session -> (string*string) -> string list -> unit; - - - (** - * The following methods all take a session and a request id. - * - * The read* methods read in a response of that type, and return - * the parsed data. If a different response is received, a - * test_exp is raised. - * - * The demand* methods do the same, but constrain the data - * received, and raise test_exp if the data is not as expected. - *) - val readErrorResponse: session -> string -> (string*string); - (** Only the error code is checked *) - val demandErrorResponse: session -> string -> string -> unit; - val readOkResponse: session -> string -> unit; - (** Identical to readOkResponse *) - val demandOkResponse: session -> string -> unit; - val readConsoleResponse: session -> string -> string; - val demandConsoleResponse: session -> string -> string -> unit; - val readConsoleHelpResponse: session -> string -> (string*string); - val demandConsoleHelpResponse: session -> string -> (string*string) -> unit; - val readDataResponse: session -> string -> string; - val demandDataResponse: session -> string -> string -> unit; - val readPrettyResponse: session -> string -> string; - val demandPrettyResponse: session -> string -> string -> unit; - val readXmlResponse: session -> string -> string; - val demandXmlResponse: session -> string -> string -> unit; - val readCountResponse: session -> string -> int; - val demandCountResponse: session -> string -> int -> unit; - val readNameResponse: session -> string -> string; - val demandNameResponse: session -> string -> string -> unit; - val readNameListResponse: session -> string -> string list; - val demandNameListResponse: session -> string -> string list -> unit; - val readUserDataResponse: session -> string -> string; - val demandUserDataResponse: session -> string -> string -> unit; - val readUnknownRequestResponse: session -> string -> string; - val demandUnknownRequestResponse: session -> string -> string -> unit; - - (** Start a new session; not normally used directly (see registerTest) *) - val startSession: string -> session; - (** End a running session; not normally used directly (see registerTest) *) - val endSession: session -> unit; - (** Check the protocol version of a running session *) - val protocolVersion: session -> string; - - val setCoreExecutable: string -> unit; - val setLogFile: string -> unit; - - (** - * Register a test. - * - * The first argument is the test name, the second is the test method. A - * freshly-started session is passed. If the test fails, test_exp should - * be raised. - *) - val registerTest: string -> (session -> unit) -> unit; - (** - * Runs all the registered tests, in the order that they were registered. - *) - val runRegisteredTests: unit -> bool; -end - -structure CoreTester = -struct - type session = { proc: (TextIO.instream, TextIO.outstream) Unix.proc, - instream: TextIO.instream, - outstream: TextIO.outstream, - version: string - } - type request = { code: string, requestId: string, body: string }; - exception test_exp of string; - - val printableEscapes = String.map (fn #"\u001b" => #"\u00a4" | c => c) - - val coreExecutable = ref "../../bin/quanto-core"; - val logStream = ref (TextIO.openOut "/dev/null"); - - fun setCoreExecutable exec = (coreExecutable := exec); - fun setLogFile file = (logStream := TextIO.openOut file); - - fun output (stream,msg) = ( - TextIO.output (!logStream,printableEscapes msg); - TextIO.output (stream,msg)); - fun flushOut stream = ( - TextIO.flushOut (!logStream); - TextIO.flushOut stream); - fun input1 stream = - let - val ch = TextIO.input1 stream - val () = (case ch - of SOME #"\u001b" => TextIO.output ((!logStream),"\u00a4") - | SOME c => TextIO.output ((!logStream),String.str c) - | NONE => () - ) - in ch end; - fun inputN (stream,0) = "" - | inputN (stream,len) = - let - val str = TextIO.inputN (stream,len) - val strSize = String.size str - val () = TextIO.output (!logStream,printableEscapes str) - in - if (strSize = 0 orelse strSize = len) then - str - else - (* large messages sometimes get split up *) - (str^(inputN (stream,len-strSize))) - end; - fun log msg = (TextIO.output (!logStream,msg); TextIO.flushOut (!logStream)) - - fun createRequest (code,requestId) body = - { code = code, requestId = requestId, body = body }; - - fun constructRequest { code, requestId, body } = - ("\u001b<"^code^"\u001b:"^requestId^"\u001b|"^body^"\u001b>") - - val delim = "\u001b;"; - fun dataChunk data = "\u001b["^(Int.toString (String.size data))^"\u001b|"^data^"\u001b]"; - val escapeStr = String.translate (fn #"\u001b" => "\u001b\u001b" | c => String.str c); - fun stringList ss = String.concatWith "\u001b," (map escapeStr ss) - val concatArgs = String.concatWith delim - - fun writeRequest ({ outstream, ... }:session) request = - (log "\n\n"; - output (outstream,constructRequest request); - flushOut outstream; - log "\n"); - fun writeRequestByParts session header body = writeRequest session (createRequest header body); - fun writeRequestByArgs session header args = writeRequest session (createRequest header (concatArgs args)); - - fun eatChar ch instream = - case (input1 instream) - of NONE => raise test_exp ("Unexpected EOF, expecting "^(String.str ch)) - | SOME c => if ch = c then () else - raise test_exp ("Unexpected char "^(String.str c)^", expecting "^(String.str ch)); - fun eatEsc instream = - case (input1 instream) - of NONE => raise test_exp ("Unexpected EOF, expecting ESC") - | SOME #"\u001b" => () - | SOME c => raise test_exp ("Unexpected char "^(String.str c)^", expecting ESC"); - - local - fun readToEscape' (soFar: string) (terminator: char) (instream:TextIO.instream) : string = - case input1 instream - of SOME #"\u001b" => - ( - case input1 instream of - NONE => raise test_exp "End of file" - | SOME #"\u001b" => (* Escaped ESC. *) - readToEscape' (soFar ^ str #"\u001b") terminator instream - | SOME ch => if ch = terminator - then soFar - else raise test_exp (str ch ^ " not " ^ str terminator) - ) - | SOME ch => readToEscape' (soFar ^ str ch) terminator instream - | NONE => raise test_exp "End of file" - in - val readToEscape = readToEscape' "" - end - fun readHeader (expCode, expRequestId) instream = - let - val () = eatEsc instream - val () = eatChar #"<" instream - val code = readToEscape #":" instream - val requestId = readToEscape #"|" instream - in - if code <> expCode then - raise test_exp ("Expected a "^expCode^" response, got a "^code^" response") - else if requestId <> expRequestId then - raise test_exp ("Expected request id was \""^expRequestId^"\", got \""^requestId^"\"") - else - () - end - - fun readInt termCh instream : int = - case Int.fromString (readToEscape termCh instream) of - NONE => 0 - | SOME i => i - fun readDataChunk instream = - let - val () = (eatEsc instream; eatChar #"[" instream) - val dataLength = readInt #"|" instream - val data = inputN (instream, dataLength) - val gotLength = String.size data - val _ = if gotLength < dataLength then - raise test_exp ("Expected "^(Int.toString dataLength)^" bytes of data, got "^(Int.toString gotLength)) - else () - val () = (eatEsc instream; eatChar #"]" instream) - in - data - end; - fun assertAtBodyEnd instream = - let - val body = readToEscape #">" instream - in - if "" <> body then - raise test_exp ("Expected end of body, got \""^(printableEscapes body)^"\"") - else () - end; - fun eatEscapedChar c instream = (eatEsc instream; eatChar c instream) - - local - fun readList' _ 0 (_, terminator) instream = (eatEscapedChar terminator instream; []) - | readList' _ 1 (_, terminator) instream = [readToEscape terminator instream] - | readList' soFar count (splitter, terminator) instream = - (case input1 instream - of SOME #"\u001b" => - ( - case input1 instream - of NONE => raise test_exp "End of file" - | SOME #"\u001b" => readList' (soFar ^ "\u001b") count (splitter, terminator) instream - | SOME ch => - if ch = splitter then - soFar::(readList' "" (count-1) (splitter, terminator) instream) - else if ch = terminator then - raise test_exp ("Wrong number of elements in list") - else - raise test_exp (str ch ^ " not " ^ str terminator) - ) - | SOME ch => readList' (soFar ^ str ch) count (splitter, terminator) instream - | NONE => raise test_exp "End of file" - ) - in - fun readList (splitter:char,terminator:char) (instream:TextIO.instream) : string list = - readList' "" (readInt #":" instream) (splitter,terminator) instream - end - - fun readDataChunkResponse code ({ instream, ... }:session) requestId = - let - val () = readHeader (code,requestId) instream - val data = readDataChunk instream - val _ = assertAtBodyEnd instream - in - data - end; - fun demandDataChunkResponse code session requestId expResp = - let val resp = (readDataChunkResponse code session requestId) in - if expResp <> resp then - raise test_exp ("Expected response \""^(printableEscapes expResp)^"\", got \""^(printableEscapes resp)^"\"") - else () - end - - fun readErrorResponse ({ instream, ... }:session) requestId = - let - val () = readHeader ("Q",requestId) instream - val errorCode = readToEscape #";" instream - val errorMsg = readToEscape #">" instream - in - (errorCode, errorMsg) - end; - fun demandErrorResponse session requestId expErrorCode = - let val (errorCode,_) = readErrorResponse session requestId in - if expErrorCode <> errorCode then - raise test_exp ("Expected error code "^expErrorCode^", got "^errorCode) - else () - end; - - fun readOkResponse ({ instream, ... }:session) requestId = - let - val () = readHeader ("O",requestId) instream - val body = readToEscape #">" instream - in - if "" <> body then - raise test_exp ("Expected empty body, got \""^(printableEscapes body)^"\"") - else () - end; - val demandOkResponse = readOkResponse; - - val readConsoleResponse = readDataChunkResponse "C"; - val demandConsoleResponse = demandDataChunkResponse "C"; - - fun readConsoleHelpResponse ({ instream, ... }:session) requestId = - let - val () = readHeader ("H",requestId) instream - val args = readToEscape #";" instream - val help = readToEscape #">" instream - in - (args, help) - end; - fun demandConsoleHelpResponse session requestId (expArgs,expHelp) = - let val (args,help) = (readConsoleHelpResponse session requestId) in - if expArgs <> args then - raise test_exp ("Expected args list \""^(printableEscapes expArgs)^"\", got \""^(printableEscapes args)^"\"") - else if expHelp <> help then - raise test_exp ("Expected help text \""^(printableEscapes expHelp)^"\", got \""^(printableEscapes help)^"\"") - else () - end - - val readDataResponse = readDataChunkResponse "R"; - val demandDataResponse = demandDataChunkResponse "R"; - - val readPrettyResponse = readDataChunkResponse "P"; - val demandPrettyResponse = demandDataChunkResponse "P"; - - val readXmlResponse = readDataChunkResponse "X"; - val demandXmlResponse = demandDataChunkResponse "X"; - - fun readCountResponse ({ instream, ... }:session) requestId = - let val () = readHeader ("I",requestId) instream in - readInt #">" instream - end; - fun demandCountResponse session requestId expVal = - let val i = readCountResponse session requestId in - if expVal <> i then - raise test_exp ("Expected count was "^(Int.toString expVal)^", got "^(Int.toString i)) - else () - end; - - fun readNameResponse ({ instream, ... }:session) requestId = - let val () = readHeader ("N",requestId) instream in - readToEscape #">" instream - end; - fun demandNameResponse session requestId expName = - let val name = readNameResponse session requestId in - if expName <> name then - raise test_exp ("Expected response was \""^(printableEscapes expName)^"\", got \""^(printableEscapes name)^"\"") - else () - end; - - fun readNameListResponse ({ instream, ... }:session) requestId = - let val () = readHeader ("M",requestId) instream in - readList (#",",#">") instream - end - fun demandNameListResponse session requestId expNames = - let val names = readNameListResponse session requestId in - if expNames <> names then - raise test_exp ("Expected response was \""^(printableEscapes (String.concatWith "\u001b," expNames))^"\", got \""^(printableEscapes (String.concatWith "\u001b," names))^"\"") - else () - end; - - val readUserDataResponse = readDataChunkResponse "U"; - val demandUserDataResponse = demandDataChunkResponse "U"; - - fun readUnknownRequestResponse ({ instream, ... }:session) requestId = - let val () = readHeader ("Z",requestId) instream in - readToEscape #">" instream - end; - fun demandUnknownRequestResponse session requestId expCode = - let val code = readUnknownRequestResponse session requestId in - if expCode <> code then - raise test_exp ("Expected code was \""^(printableEscapes expCode)^"\", got \""^(printableEscapes code)^"\"") - else () - end; - - fun startSession path = - let - val proc = Unix.execute (path,["--protocol"]) - val (instream,outstream) = Unix.streamsOf proc - val () = (eatEsc instream; eatChar #"<" instream) - val () = (eatChar #"V" instream) - val () = (eatEsc instream; eatChar #"|" instream) - val version = readToEscape #">" instream - in - { proc = proc, instream = instream, outstream = outstream, version = version } - end; - fun endSession ({ proc, ... }:session) = - let - val status = Unix.reap proc - in - if (OS.Process.isSuccess status) then - () - else - raise test_exp "Core process exited with failure code" - end - fun protocolVersion ({ version, ... }:session) = version; - - val tests: ((string*(session -> unit)) list ref) = ref []; - - fun registerTest name testFun = - (tests := (name,testFun)::(!tests)); - - fun killSession ({proc,instream,outstream,...}:session) = - (TextIO.closeOut outstream; - log "<<>>"; - log (printableEscapes (TextIO.inputAll instream)); - Unix.kill (proc,Posix.Signal.kill)); - - fun runTests success [] = success - | runTests success ((name,testFun)::ts) = - (let - val () = log "========================================\n" - val () = log ("Starting test \""^name^"\"\n\n") - val () = TextIO.print (name^" ... ") - val session = startSession (!coreExecutable) - val () = (testFun session handle e => (killSession session; raise e)) - val () = endSession session - in - log ("\n\nTest \""^name^"\" passed.\n\n\n"); - TextIO.print "passed\n"; - runTests success ts - end - handle test_exp msg => - (log ("\n\nTest \""^name^"\" failed:\n "^msg^"\n\n\n"); - TextIO.print "failed:\n "; - TextIO.print msg; - TextIO.print "\n"; - runTests false ts)); - - fun runRegisteredTests () = runTests true (rev (!tests)) -end -open CoreTester; - -(* vi:et:sw=2:sts=2 -*) diff --git a/core/test/protocol/rg-graph-tests.ML b/core/test/protocol/rg-graph-tests.ML deleted file mode 100644 index 45a84bb0..00000000 --- a/core/test/protocol/rg-graph-tests.ML +++ /dev/null @@ -1,203 +0,0 @@ -let - val exampleGraphData = "edge-pointedge-pointedge-pointhadamardZ(1/2) x + iyiy11x12Z\\pi11X0X\\alpha\\alpha11unitunitunitunitunitunitunitunitbde" - - fun vertexName (n,_,_) = n - fun vertexType (_,t,_) = t - fun vertexData (_,_,d) = d - fun eq a b = a = b - exception none_exp of unit; - fun the (SOME x) = x - | the NONE = raise none_exp (); - val exampleGraphVertices = [("a","X",SOME "\\alpha"), - ("b","X",SOME "0"), - ("c","Z",SOME "\\pi"), - ("d","Z",SOME "(1/2) x + iy"), - ("e","hadamard",NONE), - ("f","edge-point",NONE), - ("g","edge-point",NONE), - ("h","edge-point",NONE)] - val exampleGraphVertexNames = map vertexName exampleGraphVertices - fun exampleGraphVertex vn = the (List.find ((eq vn) o vertexName) exampleGraphVertices) - val exampleGraphVertexType = vertexType o exampleGraphVertex - val exampleGraphVertexData = vertexData o exampleGraphVertex - - fun edgeName (n,_,_,_,_,_) = n - fun edgeIsDir (_,d,_,_,_,_) = d - fun edgeSource (_,_,s,_,_,_) = s - fun edgeTarget (_,_,_,t,_,_) = t - fun edgeType (_,_,_,_,t,_) = t - fun edgeData (_,_,_,_,_,d) = d - val exampleGraphEdges = [("a",false,"c","a","unit",NONE), - ("b",false,"b","a","unit",NONE), - ("c",true ,"e","e","unit",NONE), - ("d",true ,"e","d","unit",NONE), - ("e",true ,"e","a","unit",NONE), - ("f",true ,"f","c","unit",NONE), - ("g",true ,"a","g","unit",NONE), - ("h",true ,"d","h","unit",NONE)] - val exampleGraphEdgeNames = map edgeName exampleGraphEdges - fun exampleGraphEdge en = the (List.find ((eq en) o edgeName) exampleGraphEdges) - val exampleGraphEdgeIsDir = edgeIsDir o exampleGraphEdge - val exampleGraphEdgeSource = edgeSource o exampleGraphEdge - val exampleGraphEdgeTarget = edgeTarget o exampleGraphEdge - val exampleGraphEdgeType = edgeType o exampleGraphEdge - val exampleGraphEdgeData = edgeData o exampleGraphEdge - - fun bangBoxName (b,_) = b - fun bangBoxVertices (_,v) = v - val exampleGraphBangBoxes = [("Ba",["b"]), - ("Bb",["d","e"]), - ("Bc",[])] - val exampleGraphBangBoxNames = map bangBoxName exampleGraphBangBoxes - fun exampleGraphBangBox bn = the (List.find ((eq bn) o bangBoxName) exampleGraphBangBoxes) - val exampleGraphBangBoxVertices = bangBoxVertices o exampleGraphBangBox -in - -registerTest "Add/list/kill graphs (red_green)" (fn session => ( -let - val _ = writeRequestByParts session ("TS","quimby") "red_green"; - val _ = demandOkResponse session "quimby"; - - fun checkGraphNames expNames = - let - val _ = writeRequestByParts session ("GL","santa's little helper") "" - val graphNames = readNameListResponse session "santa's little helper" - in - if unordered_eq graphNames expNames then () else - raise test_exp "Got wrong graph names" - end - - val _ = writeRequestByParts session ("GL","homer") "" - val _ = demandNameListResponse session "homer" [] - - val _ = writeRequestByParts session ("GOE","lisa") "" - val gr1Name = readNameResponse session "lisa" - - val _ = writeRequestByParts session ("GOE","marge") "magic-graph" - val _ = demandNameResponse session "marge" "magic-graph" - - val _ = writeRequestByParts session ("GOE","bart") "magic-graph" - val gr3Name = readNameResponse session "bart" - val _ = if "magic-graph" <> gr3Name then () else - raise test_exp "GOE overwrote an existing graph!" - - val _ = checkGraphNames [gr1Name,"magic-graph",gr3Name] - - val _ = writeRequestByArgs session ("GR","barney") ["magic-graph","ordinary graph"] - val _ = demandNameResponse session "barney" "ordinary graph" - - val _ = checkGraphNames [gr1Name,"ordinary graph",gr3Name] - - val _ = writeRequestByArgs session ("GR","monty") ["ordinary graph",gr3Name] - val gr2Name = readNameResponse session "monty" - val _ = if gr2Name <> gr3Name then () else - raise test_exp "GR overwrote an existing graph!" - - val _ = checkGraphNames [gr1Name,gr2Name,gr3Name] - - val _ = writeRequestByParts session ("GD","maggie") gr1Name - val _ = demandOkResponse session "maggie" - - val _ = checkGraphNames [gr2Name,gr3Name] - - val _ = writeRequestByParts session ("GOG","moe") gr2Name - val gr4Name = readNameResponse session "moe" - - val _ = checkGraphNames [gr2Name,gr3Name,gr4Name] -in () end -)); - -registerTest "Load/save graphs (red_green)" (fn session => ( -let - val _ = writeRequestByParts session ("TS","Geneva") "red_green"; - val _ = demandOkResponse session "Geneva"; - - val inputGraphFile = OS.FileSys.tmpName() - val graphOutstream = TextIO.openOut inputGraphFile - val _ = TextIO.output (graphOutstream,exampleGraphData) - val _ = TextIO.closeOut graphOutstream - val outputGraphFile = OS.FileSys.tmpName() - val dummyGraphFile = OS.FileSys.tmpName() - - val _ = writeRequestByArgs session ("GOD","London") ["example",dataChunk exampleGraphData] - val _ = demandNameResponse session "London" "example" - - val _ = writeRequestByArgs session ("GOD","Cardiff") ["example",dataChunk "gibberish"] - val _ = demandErrorResponse session "Cardiff" "BADDATA" - - val _ = writeRequestByParts session ("GOF","Paris") inputGraphFile - val graph2Name = readNameResponse session "Paris" - - val _ = writeRequestByParts session ("GOF","Dublin") dummyGraphFile - val _ = demandErrorResponse session "Dublin" "BADDATA" - - val _ = OS.FileSys.remove dummyGraphFile - val _ = writeRequestByParts session ("GOF","Madrid") dummyGraphFile - val _ = demandErrorResponse session "Madrid" "FILEACCESS" - - val _ = writeRequestByArgs session ("GS","Helsinki") ["example","/proc/dummy.graph"] - val _ = demandErrorResponse session "Helsinki" "FILEACCESS" - - val _ = writeRequestByArgs session ("GS","Stockholm") ["dummy graph",outputGraphFile] - val _ = demandErrorResponse session "Stockholm" "NOSUCHGRAPH" - - val _ = writeRequestByArgs session ("GS","Berlin") ["example",outputGraphFile] - val _ = demandOkResponse session "Berlin" - - val graphInstream = TextIO.openIn outputGraphFile - val graphData2 = TextIO.inputAll graphInstream - val _ = TextIO.closeIn graphInstream - - val _ = writeRequestByArgs session ("GE","Reykjavik") ["dummy graph","native"] - val _ = demandErrorResponse session "Reykjavik" "NOSUCHGRAPH" - - val _ = writeRequestByArgs session ("GE","Copenhagen") ["example","dummy"] - val _ = demandErrorResponse session "Copenhagen" "BADFORMAT" - - (* check that the saved data is the same as the exported data *) - val _ = writeRequestByArgs session ("GE","Edinburgh") [graph2Name,"native"] - val _ = demandDataResponse session "Edinburgh" graphData2 - - val _ = OS.FileSys.remove inputGraphFile - val _ = OS.FileSys.remove outputGraphFile -in () end -)); - -registerTest "Describe graphs (red_green)" (fn session => ( -let - val _ = writeRequestByParts session ("TS","Cuba") "red_green"; - val _ = demandOkResponse session "Cuba"; - - val _ = writeRequestByArgs session ("GOD","USA") ["example",dataChunk exampleGraphData] - val _ = demandNameResponse session "USA" "example" - - val _ = writeRequestByArgs session ("GVVL","Canada") ["dummy graph"] - val _ = demandErrorResponse session "Canada" "NOSUCHGRAPH" - - val _ = writeRequestByArgs session ("GVVL","Mexico") ["example"] - val vertices = readNameListResponse session "Mexico" - val _ = if unordered_eq vertices exampleGraphVertexNames then () else - raise test_exp "Got wrong vertex names" - - val _ = writeRequestByArgs session ("GVEL","Bahamas") ["dummy graph"] - val _ = demandErrorResponse session "Bahamas" "NOSUCHGRAPH" - - val _ = writeRequestByArgs session ("GVEL","Antigua and Barbuda") ["example"] - val edges = readNameListResponse session "Antigua and Barbuda" - val _ = if unordered_eq edges exampleGraphEdgeNames then () else - raise test_exp "Got wrong edge names" - - val _ = writeRequestByArgs session ("GVBL","Barbados") ["dummy graph"] - val _ = demandErrorResponse session "Barbados" "NOSUCHGRAPH" - - val _ = writeRequestByArgs session ("GVBL","Belize") ["example"] - val bangBoxes = readNameListResponse session "Belize" - val _ = if unordered_eq bangBoxes exampleGraphBangBoxNames then () else - raise test_exp "Got wrong !-box names" -in () end -)); - -() end - -(* vi:et:sw=2:sts=2 -*) diff --git a/core/test/protocol/simple-test.ML b/core/test/protocol/simple-test.ML deleted file mode 100644 index 021acc7c..00000000 --- a/core/test/protocol/simple-test.ML +++ /dev/null @@ -1,10 +0,0 @@ -CoreTester.registerTest "Simple test" (fn session => -let val version = CoreTester.protocolVersion session in - if version <> "1.0" then - raise CoreTester.test_exp ("Wrong version (got "^version^", expected 1.0)") - else () -end -); - -(* vi:et:sw=2:sts=2 -*) diff --git a/core/test/protocol/test-utils.ML b/core/test/protocol/test-utils.ML deleted file mode 100644 index 7af8be6d..00000000 --- a/core/test/protocol/test-utils.ML +++ /dev/null @@ -1,29 +0,0 @@ - -signature TEST_UTILS = -sig - val unordered_eq : ''a list -> ''a list -> bool; -end - -structure TestUtils : TEST_UTILS = -struct - fun find_and_remove x [] = NONE - | find_and_remove x (y::ys) = - if x = y then SOME ys else - case (find_and_remove x ys) - of NONE => NONE - | SOME zs => SOME (y::zs); - - fun unordered_eq' [] [] = true - | unordered_eq' (x::xs) ys = - (case (find_and_remove x ys) - of NONE => false - | SOME zs => unordered_eq' xs zs) - | unordered_eq' _ _ = false; - - fun unordered_eq xs ys = - (List.length xs) = (List.length ys) andalso (unordered_eq' xs ys) -end -open TestUtils; - -(* vi:et:sw=2:sts=2 -*) diff --git a/core/test/protocol/theory-tests.ML b/core/test/protocol/theory-tests.ML deleted file mode 100644 index 463df0ba..00000000 --- a/core/test/protocol/theory-tests.ML +++ /dev/null @@ -1,32 +0,0 @@ -registerTest "Change/list theories" (fn session => ( -let - val _ = writeRequestByParts session ("TS","apple") "invalid_theory" - val _ = demandErrorResponse session "apple" "BADTHEORY" - - val _ = writeRequestByArgs session ("TL","peach") [] - val theories = readNameListResponse session "peach" - val _ = if (List.exists (fn x => (x = "red_green")) theories) then () else - raise test_exp "red_green theory not listed" - val _ = if (List.exists (fn x => (x = "ghz_w")) theories) then () else - raise test_exp "ghz_w theory not listed" - - val _ = writeRequestByParts session ("TS","pear") "red_green" - val _ = demandOkResponse session "pear" - val _ = writeRequestByParts session ("TG","banana") "" - val _ = demandNameResponse session "banana" "red_green" - - val _ = writeRequestByParts session ("TS","orange") "red_green_blue" - val _ = demandOkResponse session "orange" - val _ = writeRequestByParts session ("TG","kiwi") "" - val _ = demandNameResponse session "kiwi" "red_green_blue" - - val _ = writeRequestByParts session ("TS","plum") "ghz_w" - val _ = demandOkResponse session "plum" - val _ = writeRequestByParts session ("TG","mango") "" - val _ = demandNameResponse session "mango" "ghz_w" -in () end -)); - -(* vi:et:sw=2:sts=2 -*) - diff --git a/core/test/regression-tests.ML b/core/test/regression-tests.ML deleted file mode 100644 index e87259e6..00000000 --- a/core/test/regression-tests.ML +++ /dev/null @@ -1,185 +0,0 @@ -(* Red/Green *) -local - open RG_Theory - structure G = Graph - structure HomeoFinder = BangGraphHomeomorphismSearcher(Graph) -in -val _ = Testing.test "ruleset rewriting 1: ladder with red spider (all)" (fn () => let - val ruleset = "rulesets/red_green/basic.rules" - |> Json.read_file - |> RulesetJSON.input - - val graph = "graphs/red_green/d2-ladder_rw1.graph" - |> Json.read_file - |> GraphJSON.input - - val _ = RulesetRewriter.apply_first ruleset graph - - in () end) () - -val _ = Testing.test "ruleset rewriting 2: ladder with red spider (full subgraph)" (fn () => let - val ruleset = "rulesets/red_green/basic.rules" - |> Json.read_file - |> RulesetJSON.input - - val graph = "graphs/red_green/d2-ladder_rw1.graph" - |> Json.read_file - |> GraphJSON.input - - val vset = G.get_vertices graph - val _ = RulesetRewriter.apply_first_in ruleset graph vset - - in () end) () -val _ = Testing.test "ruleset rewriting 2: ladder with red spider (rpt)" (fn () => let - val ruleset = "rulesets/red_green/basic_all.rules" - |> Json.read_file - |> RulesetJSON.input - - val graph = "graphs/red_green/d2-ladder_rw3.graph" - |> Json.read_file - |> GraphJSON.input - val exp_graph = "graphs/red_green/d2-ladder_rw3_norm.graph" - |> Json.read_file - |> GraphJSON.input - - fun do_rw g = - case RulesetRewriter.apply_first ruleset g - of NONE => g - | SOME (_,g') => do_rw g' - - val new_graph = do_rw graph - val _ = if HomeoFinder.is_homeomorphic new_graph exp_graph then () - else (writeln "Expected"; G.print exp_graph; - writeln "Got"; G.print new_graph; - raise ERROR "Rewriting gave wrong graph") - in () end) () - - - val bb_graph = "graphs/red_green/bbox.qgraph" - |> Json.read_file |> GraphJSON.input - val bb_rule = Rule.mk (bb_graph, bb_graph) - - val bb_matches = MatchSearch.match bb_graph bb_graph - val _ = Testing.test "red-green !-graph should match itself" (fn () => ( - if length (Seq.list_of bb_matches) >= 1 then () - else raise ERROR "No matches found" - )) () - - val bb_rewrites = Rewriter.find_rewrites bb_rule bb_graph - val _ = Testing.test "red-green !-graph rule should match its own LHS" (fn () => ( - if length (Seq.list_of bb_rewrites) >= 1 then () - else raise ERROR "No matches found" - )) () - - val bb_graph = "graphs/red_green/gen-bialg-inst.qgraph" - |> Json.read_file |> GraphJSON.input - val bb_rule = Rule.mk (bb_graph, bb_graph) - - val _ = PolyML.exception_trace (fn () => Seq.list_of bb_matches) - - val bb_matches = MatchSearch.match bb_graph bb_graph - val _ = Testing.test "gen-bialg !-graph should match itself" (fn () => ( - if length (Seq.list_of bb_matches) >= 1 then () - else raise ERROR "No matches found" - )) () - - - (*val bb_rewrites = Rewriter.find_rewrites bb_rule bb_graph - val _ = Testing.test "gen-bialg !-graph rule should match its own LHS" (fn () => ( - if length (Seq.list_of bb_rewrites) >= 1 then () - else raise ERROR "No matches found" - )) ()*) - -end - -(* GHZ/W *) -local - open GHZW_Theory - (*structure Controller = GHZW_Controller*) - structure G = Graph - structure HomeoFinder = BangGraphHomeomorphismSearcher(Graph) - (*fun ctrlr_assert_ok Commands.OkResponse = () - | ctrlr_assert_ok _ = raise ERROR "Unexpected response from controller" - fun ctrlr_assert_string (Commands.StringResponse { data }) = data - | ctrlr_assert_string _ = raise ERROR "Unexpected response from controller" - fun ctrlr_assert_count (Commands.CountResponse { count }) = count - | ctrlr_assert_count _ = raise ERROR "Unexpected response from controller"*) -in -val _ = Testing.test "GHZ/W ruleset rewriting 1: 2x2" (fn () => let - val ruleset = "rulesets/ghz_w/default.rules" - |> Json.read_file - |> RulesetJSON.input - - val graph = "graphs/ghz_w/2 times 2.graph" - |> Json.read_file - |> GraphJSON.input - val exp_graph = "graphs/ghz_w/4.graph" - |> Json.read_file - |> GraphJSON.input - - fun do_rw g = let - val vset = G.get_vertices g - val rwseq = RulesetRewriter.apply_in ruleset g vset - val rwlist = Seq.list_of rwseq - in - case rwlist - of [] => g - | ((_,g')::_) => do_rw g' - end - - val new_graph = do_rw graph - val _ = if HomeoFinder.is_homeomorphic new_graph exp_graph then () - else (writeln "Expected"; G.print exp_graph; - writeln "Got"; G.print new_graph; - raise ERROR "Rewriting gave wrong graph") - in () end) () - -(*val _ = Testing.test "GHZ/W ruleset rewriting 1: 2x2 (controller)" (fn () => let - open Commands - val _ = ctrlr_assert_ok (Controller.serviceRequest - (ImportRulesetFromFileRequest { - fileName = "rulesets/ghz_w/default.rules", - replace = true - })) - val data = ctrlr_assert_string (Controller.serviceRequest - (LoadGraphRequest { - details = LoadGraphFromFileRequest { - fileName = "graphs/ghz_w/2 times 2.graph" - } - })) - val graph_name = data - val vertex_names = ["Vk","Vh","Vg","Vw","Vc","Vj","Va","Vb"] - val _ = ctrlr_assert_count (Controller.serviceRequest - (AttachRewritesRequest { - graphName = graph_name, - vertexNames = vertex_names - })) - - in () end) ()*) - -(*val _ = Testing.test "GHZ/W ruleset rewriting 2: spider" (fn () => let - open Commands - val _ = ctrlr_assert_ok (Controller.serviceRequest - (ImportRulesetFromFileRequest { - fileName = "rulesets/ghz_w/default.rules", - replace = true - })) - val data = ctrlr_assert_string (Controller.serviceRequest - (LoadGraphRequest { - details = LoadGraphFromFileRequest { - fileName = "graphs/ghz_w/ghz-spider-simple.graph" - } - })) - val graph_name = data - val vertex_names = ["Va","Vb"] - val count = ctrlr_assert_count (Controller.serviceRequest - (AttachRewritesRequest { - graphName = graph_name, - vertexNames = vertex_names - })) - val _ = Testing.assert "Count is 1" (count = 1) - - in () end) () *) -end - -val _ = Testing.assert_no_failed_tests() diff --git a/core/test/rulesets/ghz_w/default.rules b/core/test/rulesets/ghz_w/default.rules deleted file mode 100644 index ede2c2c5..00000000 --- a/core/test/rulesets/ghz_w/default.rules +++ /dev/null @@ -1 +0,0 @@ -{"rules":{"distributivity":{"lhs":{"wire_vertices":{"Vb":{},"Vf":{},"Vg":{}},"node_vertices":{"Va":{"data":"GHZ"},"Vc":{"data":"W"},"Vd":{"data":"GHZ"},"Ve":{"data":"W"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vc","tgt":"Va"},"Ec":{"src":"Vd","tgt":"Vc"},"Ed":{"src":"Ve","tgt":"Va"},"Ee":{"src":"Vf","tgt":"Ve"},"Ef":{"src":"Vg","tgt":"Ve"}},"bang_boxes":{"Ba":{"contents":["Vd"]},"Bb":{"contents":["Vf"]}}},"rhs":{"wire_vertices":{"Vb":{},"Vf":{},"Vg":{}},"node_vertices":{"Vh":{"data":"W"},"Vi":{"data":"GHZ"},"Vj":{"data":"GHZ"},"Vk":{"data":"W"},"Vl":{"data":"GHZ"},"Vm":{"data":"GHZ"},"Vn":{"data":"W"},"Vo":{"data":"W"}},"dir_edges":{"Eg":{"src":"Vh","tgt":"Vm"},"Eh":{"src":"Vi","tgt":"Vn"},"Ei":{"src":"Vj","tgt":"Vh"},"Ej":{"src":"Vk","tgt":"Vb"},"Ek":{"src":"Vl","tgt":"Vk"},"El":{"src":"Vm","tgt":"Vk"},"Em":{"src":"Vg","tgt":"Vl"},"En":{"src":"Vf","tgt":"Vo"},"Eo":{"src":"Vn","tgt":"Vl"},"Ep":{"src":"Vo","tgt":"Vm"}},"bang_boxes":{"Ba":{"contents":["Vi","Vj"]},"Bb":{"contents":["Vf"]}}}},"ghz id":{"lhs":{"wire_vertices":{"Va":{},"Vb":{}},"node_vertices":{"Vc":{"data":"GHZ"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vc"},"Eb":{"src":"Vc","tgt":"Vb"}}},"rhs":{"wire_vertices":{"Va":{},"Vb":{}},"dir_edges":{"Ec":{"src":"Va","tgt":"Vb"}}}},"ghz spider":{"lhs":{"wire_vertices":{"Va":{},"Vb":{},"Ve":{},"Vf":{}},"node_vertices":{"Vc":{"data":"GHZ"},"Vd":{"data":"GHZ"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vc"},"Eb":{"src":"Vc","tgt":"Vd"},"Ec":{"src":"Vc","tgt":"Ve"},"Ed":{"src":"Vf","tgt":"Vd"},"Ee":{"src":"Vd","tgt":"Vb"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vb"]},"Bc":{"contents":["Ve"]},"Bd":{"contents":["Vf"]}}},"rhs":{"wire_vertices":{"Va":{},"Vb":{},"Ve":{},"Vf":{}},"node_vertices":{"Vg":{"data":"GHZ"}},"dir_edges":{"Ef":{"src":"Va","tgt":"Vg"},"Eg":{"src":"Vg","tgt":"Ve"},"Eh":{"src":"Vf","tgt":"Vg"},"Ei":{"src":"Vg","tgt":"Vb"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vb"]},"Bc":{"contents":["Ve"]},"Bd":{"contents":["Vf"]}}}},"ghz speciality":{"lhs":{"wire_vertices":{"Va":{},"Vc":{}},"node_vertices":{"Vb":{"data":"GHZ"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vb","tgt":"Vc"},"Ec":{"src":"Vb","tgt":"Vb"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vc"]}}},"rhs":{"wire_vertices":{"Va":{},"Vc":{}},"node_vertices":{"Vd":{"data":"GHZ"}},"dir_edges":{"Ed":{"src":"Va","tgt":"Vd"},"Ee":{"src":"Vd","tgt":"Vc"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vc"]}}}},"ghz copies w":{"lhs":{"wire_vertices":{"Va":{},"Vb":{}},"node_vertices":{"Vc":{"data":"GHZ"},"Vd":{"data":"W"}},"dir_edges":{"Ea":{"src":"Vd","tgt":"Vc"},"Eb":{"src":"Vc","tgt":"Va"},"Ec":{"src":"Vc","tgt":"Vb"}}},"rhs":{"wire_vertices":{"Va":{},"Vb":{}},"node_vertices":{"Ve":{"data":"W"},"Vf":{"data":"W"}},"dir_edges":{"Ed":{"src":"Ve","tgt":"Va"},"Ee":{"src":"Vf","tgt":"Vb"}}}},"w id":{"lhs":{"wire_vertices":{"Va":{},"Vb":{}},"node_vertices":{"Vc":{"data":"W"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vc"},"Eb":{"src":"Vc","tgt":"Vb"}}},"rhs":{"wire_vertices":{"Va":{},"Vb":{}},"dir_edges":{"Ec":{"src":"Va","tgt":"Vb"}}}},"w spider":{"lhs":{"wire_vertices":{"Va":{},"Vb":{},"Ve":{},"Vf":{}},"node_vertices":{"Vc":{"data":"W"},"Vd":{"data":"W"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vc"},"Eb":{"src":"Vc","tgt":"Vd"},"Ec":{"src":"Vc","tgt":"Ve"},"Ed":{"src":"Vd","tgt":"Vb"},"Ee":{"src":"Vf","tgt":"Vd"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vb"]},"Bc":{"contents":["Ve"]},"Bd":{"contents":["Vf"]}}},"rhs":{"wire_vertices":{"Va":{},"Vb":{},"Ve":{},"Vf":{}},"node_vertices":{"Vg":{"data":"W"}},"dir_edges":{"Ef":{"src":"Va","tgt":"Vg"},"Eg":{"src":"Vg","tgt":"Ve"},"Eh":{"src":"Vf","tgt":"Vg"},"Ei":{"src":"Vg","tgt":"Vb"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vb"]},"Bc":{"contents":["Ve"]},"Bd":{"contents":["Vf"]}}}},"w explode":{"lhs":{"wire_vertices":{"Va":{},"Vb":{}},"node_vertices":{"Vc":{"data":"W"},"Vd":{"data":"W"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vc"},"Eb":{"src":"Vc","tgt":"Vd"},"Ec":{"src":"Vc","tgt":"Vd"},"Ed":{"src":"Vd","tgt":"Vb"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vb"]}}},"rhs":{"wire_vertices":{"Va":{},"Vb":{}},"node_vertices":{"Ve":{"data":"W"},"Vf":{"data":"W"}},"undir_edges":{"Eg":{"src":"Ve","tgt":"Ve"},"Eh":{"src":"Vf","tgt":"Vf"}},"dir_edges":{"Ee":{"src":"Va","tgt":"Ve"},"Ef":{"src":"Vf","tgt":"Vb"}},"bang_boxes":{"Ba":{"contents":["Va"]},"Bb":{"contents":["Vb"]}}}}},"tags":{},"active_rules":["distributivity","ghz id","ghz spider","ghz speciality","ghz copies w","w id","w spider"]} \ No newline at end of file diff --git a/core/test/rulesets/red_green/basic.rules b/core/test/rulesets/red_green/basic.rules deleted file mode 100644 index 45977f7c..00000000 --- a/core/test/rulesets/red_green/basic.rules +++ /dev/null @@ -1 +0,0 @@ -{"rules":{"green_antiloop":{"lhs":{"wire_vertices":{"b":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"a","tgt":"a"}},"bang_boxes":{"Ba":{"contents":["b"]}}},"rhs":{"wire_vertices":{"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"b"}},"bang_boxes":{"Ba":{"contents":["b"]}}}},"hh_to_id":{"lhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"b":{"data":{"type":"hadamard"}},"c":{"data":{"type":"hadamard"}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"}}},"rhs":{"wire_vertices":{"a":{},"d":{}},"undir_edges":{"Ed":{"src":"a","tgt":"d"}}}},"isometry_red":{"lhs":{"wire_vertices":{"c":{},"d":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pretty":"0"}}},"b":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"a":{"src":"d","tgt":"b"},"b":{"src":"b","tgt":"a"},"c":{"src":"b","tgt":"a"},"d":{"src":"a","tgt":"c"}}},"rhs":{"wire_vertices":{"c":{},"d":{}},"undir_edges":{"e":{"src":"d","tgt":"c"}}}},"isometry_green":{"lhs":{"wire_vertices":{"c":{},"d":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}},"b":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"a":{"src":"d","tgt":"b"},"b":{"src":"b","tgt":"a"},"c":{"src":"b","tgt":"a"},"d":{"src":"a","tgt":"c"}}},"rhs":{"wire_vertices":{"c":{},"d":{}},"undir_edges":{"e":{"src":"d","tgt":"c"}}}},"red_antiloop":{"lhs":{"wire_vertices":{"b":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"a","tgt":"a"}},"bang_boxes":{"Ba":{"contents":["b"]}}},"rhs":{"wire_vertices":{"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"b"}},"bang_boxes":{"Ba":{"contents":["b"]}}}},"spider.red":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"b"},"Eb":{"src":"a","tgt":"c"},"Ec":{"src":"c","tgt":"d"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"e","tgt":"b"},"Ee":{"src":"a","tgt":"e"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}}},"spider.green":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pretty":"0"}}},"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"b"},"Eb":{"src":"a","tgt":"c"},"Ec":{"src":"c","tgt":"d"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"e","tgt":"b"},"Ee":{"src":"a","tgt":"e"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}}},"x_copy_pi":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"x_group1_1in":{"lhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"b":{"data":{"type":"Z","angle":{"pretty":"0"}}},"c":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"}}},"rhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"e","tgt":"f"},"Ef":{"src":"f","tgt":"d"}}}},"x_group1_2in":{"lhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"b":{"data":{"type":"Z","angle":{"pretty":"0"}}},"c":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"},"Ed":{"src":"c","tgt":"e"}}},"rhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"f":{"data":{"type":"Z","angle":{"pretty":"0"}}},"g":{"data":{"type":"X","angle":{"pretty":"0"}}},"h":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"e"},"Ef":{"src":"a","tgt":"g"},"Eg":{"src":"g","tgt":"h"},"Eh":{"src":"g","tgt":"f"},"Ei":{"src":"h","tgt":"d"}}}},"x_h_swap":{"lhs":{"wire_vertices":{"a":{}},"node_vertices":{"b":{"data":{"type":"hadamard"}},"c":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"a","tgt":"d"}}}},"x_hh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"hadamard"}},"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"c","tgt":"e"},"Ec":{"src":"e","tgt":"d"},"Ed":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"a","tgt":"f"},"Ef":{"src":"f","tgt":"b"}}}},"x_hhh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"hadamard"}},"f":{"data":{"type":"hadamard"}},"g":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"d"},"Eb":{"src":"b","tgt":"e"},"Ec":{"src":"f","tgt":"c"},"Ed":{"src":"d","tgt":"g"},"Ee":{"src":"e","tgt":"g"},"Ef":{"src":"g","tgt":"f"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"h":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Eg":{"src":"a","tgt":"h"},"Eh":{"src":"b","tgt":"h"},"Ei":{"src":"h","tgt":"c"}}}},"z_copy_pi":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"z_group1_1in":{"lhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"b":{"data":{"type":"X","angle":{"pretty":"0"}}},"c":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"}}},"rhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"e","tgt":"f"},"Ef":{"src":"f","tgt":"d"}}}},"z_group1_2in":{"lhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"b":{"data":{"type":"X","angle":{"pretty":"0"}}},"c":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"},"Ed":{"src":"c","tgt":"e"}}},"rhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"f":{"data":{"type":"X","angle":{"pretty":"0"}}},"g":{"data":{"type":"Z","angle":{"pretty":"0"}}},"h":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"e"},"Ef":{"src":"a","tgt":"g"},"Eg":{"src":"g","tgt":"h"},"Eh":{"src":"g","tgt":"f"},"Ei":{"src":"h","tgt":"d"}}}},"z_inv_h_swap":{"lhs":{"wire_vertices":{"a":{}},"node_vertices":{"b":{"data":{"type":"hadamard"}},"c":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"a","tgt":"d"}}}},"z_inv_hh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"hadamard"}},"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"c","tgt":"e"},"Ec":{"src":"e","tgt":"d"},"Ed":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"f":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"a","tgt":"f"},"Ef":{"src":"f","tgt":"b"}}}},"z_inv_hhh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"hadamard"}},"f":{"data":{"type":"hadamard"}},"g":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"d"},"Eb":{"src":"b","tgt":"e"},"Ec":{"src":"f","tgt":"c"},"Ed":{"src":"d","tgt":"g"},"Ee":{"src":"e","tgt":"g"},"Ef":{"src":"g","tgt":"f"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"h":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Eg":{"src":"a","tgt":"h"},"Eh":{"src":"b","tgt":"h"},"Ei":{"src":"h","tgt":"c"}}}},"bialgebra1":{"lhs":{"wire_vertices":{"a":{},"b":{},"g":{},"h":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"pretty":"0"}}},"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"b","tgt":"d"},"Ec":{"src":"e","tgt":"g"},"Ed":{"src":"f","tgt":"h"},"Ee":{"src":"c","tgt":"e"},"Ef":{"src":"c","tgt":"f"},"Eg":{"src":"d","tgt":"e"},"Eh":{"src":"d","tgt":"f"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"g":{},"h":{}},"node_vertices":{"i":{"data":{"type":"Z","angle":{"pretty":"0"}}},"j":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ei":{"src":"a","tgt":"i"},"Ej":{"src":"b","tgt":"i"},"Ek":{"src":"i","tgt":"j"},"El":{"src":"j","tgt":"g"},"Em":{"src":"j","tgt":"h"}}}},"hopf_1_1":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"d","tgt":"b"},"Ec":{"src":"c","tgt":"d"},"Ed":{"src":"c","tgt":"d"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"a","tgt":"e"},"Ef":{"src":"f","tgt":"b"}}}},"hopf_1_2":{"lhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pretty":"0"}}},"e":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"d"},"Eb":{"src":"c","tgt":"d"},"Ec":{"src":"e","tgt":"b"},"Ed":{"src":"d","tgt":"e"},"Ee":{"src":"d","tgt":"e"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"f":{"data":{"type":"X","angle":{"pretty":"0"}}},"g":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ef":{"src":"a","tgt":"f"},"Eg":{"src":"c","tgt":"g"},"Eh":{"src":"g","tgt":"b"}}}},"hopf_2_1":{"lhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pretty":"0"}}},"e":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"d"},"Eb":{"src":"b","tgt":"d"},"Ec":{"src":"e","tgt":"c"},"Ed":{"src":"d","tgt":"e"},"Ee":{"src":"d","tgt":"e"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"f":{"data":{"type":"X","angle":{"pretty":"0"}}},"g":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ef":{"src":"a","tgt":"f"},"Eg":{"src":"b","tgt":"f"},"Eh":{"src":"g","tgt":"c"}}}},"hopf_2_2":{"lhs":{"wire_vertices":{"a":{},"b":{},"c":{},"d":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"b","tgt":"e"},"Eb":{"src":"a","tgt":"e"},"Ec":{"src":"f","tgt":"d"},"Ed":{"src":"f","tgt":"c"},"Ee":{"src":"e","tgt":"f"},"Ef":{"src":"e","tgt":"f"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"c":{},"d":{}},"node_vertices":{"g":{"data":{"type":"X","angle":{"pretty":"0"}}},"h":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Eg":{"src":"a","tgt":"g"},"Eh":{"src":"b","tgt":"g"},"Ei":{"src":"h","tgt":"c"},"Ej":{"src":"h","tgt":"d"}}}},"x_abelian1":{"lhs":{"wire_vertices":{"a":{},"c":{}},"node_vertices":{"b":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{},"c":{}},"undir_edges":{"Ec":{"src":"a","tgt":"c"}}}},"x_copy_0":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"x_group2":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}},"b":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"b","tgt":"a"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"x_group3":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pretty":"0"}}},"b":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"x_group4":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pretty":"0"}}},"b":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"z_abelian1":{"lhs":{"wire_vertices":{"a":{},"c":{}},"node_vertices":{"b":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{},"c":{}},"undir_edges":{"Ec":{"src":"a","tgt":"c"}}}},"z_copy_0":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"z_group2":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pretty":"0"}}},"b":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"b","tgt":"a"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"z_group3":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}},"b":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"z_group4":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}},"b":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}}},"tags":{"spider":["spider.red","spider.green"],"bialgebra1":["bialgebra1"]},"active_rules":["green_antiloop","spider.red"]} \ No newline at end of file diff --git a/core/test/rulesets/red_green/basic_all.rules b/core/test/rulesets/red_green/basic_all.rules deleted file mode 100644 index bd38ee3b..00000000 --- a/core/test/rulesets/red_green/basic_all.rules +++ /dev/null @@ -1 +0,0 @@ -{"rules":{"hopf":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"d","tgt":"b"},"Ec":{"src":"c","tgt":"d"},"Ed":{"src":"c","tgt":"d"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"a","tgt":"e"},"Ef":{"src":"f","tgt":"b"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}}},"hh_to_id":{"lhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"b":{"data":{"type":"hadamard"}},"c":{"data":{"type":"hadamard"}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"}}},"rhs":{"wire_vertices":{"a":{},"d":{}},"undir_edges":{"Ed":{"src":"a","tgt":"d"}}}},"x_spider":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}},"d":{"data":{"type":"X","angle":{"vars":{"\\beta":{"num":1,"denom":1}},"pretty":"\\beta"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"b"},"Eb":{"src":"a","tgt":"c"},"Ec":{"src":"c","tgt":"d"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"vars":{"\\beta":{"num":1,"denom":1},"\\alpha":{"num":1,"denom":1}},"pretty":"\\beta + \\alpha"}}}},"undir_edges":{"Ed":{"src":"e","tgt":"b"},"Ee":{"src":"a","tgt":"e"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}}},"x_antiloop":{"lhs":{"wire_vertices":{"b":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"a","tgt":"a"}},"bang_boxes":{"Ba":{"contents":["b"]}}},"rhs":{"wire_vertices":{"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"b"}},"bang_boxes":{"Ba":{"contents":["b"]}}}},"x_isometry":{"lhs":{"wire_vertices":{"c":{},"d":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pretty":"0"}}},"b":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"a":{"src":"d","tgt":"b"},"b":{"src":"b","tgt":"a"},"c":{"src":"b","tgt":"a"},"d":{"src":"a","tgt":"c"}}},"rhs":{"wire_vertices":{"c":{},"d":{}},"undir_edges":{"e":{"src":"d","tgt":"c"}}}},"x_copy_pi":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"d":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"f":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"x_group1_1in":{"lhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"b":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"c":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"}}},"rhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"e","tgt":"f"},"Ef":{"src":"f","tgt":"d"}}}},"x_group1_2in":{"lhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"b":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"c":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"},"Ed":{"src":"c","tgt":"e"}}},"rhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"f":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"g":{"data":{"type":"X","angle":{"pretty":"0"}}},"h":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"e"},"Ef":{"src":"a","tgt":"g"},"Eg":{"src":"g","tgt":"h"},"Eh":{"src":"g","tgt":"f"},"Ei":{"src":"h","tgt":"d"}}}},"x_h_swap":{"lhs":{"wire_vertices":{"a":{}},"node_vertices":{"b":{"data":{"type":"hadamard"}},"c":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ec":{"src":"a","tgt":"d"}}}},"x_hh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"hadamard"}},"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"c","tgt":"e"},"Ec":{"src":"e","tgt":"d"},"Ed":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"a","tgt":"f"},"Ef":{"src":"f","tgt":"b"}}}},"x_hhh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"hadamard"}},"f":{"data":{"type":"hadamard"}},"g":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"d"},"Eb":{"src":"b","tgt":"e"},"Ec":{"src":"f","tgt":"c"},"Ed":{"src":"d","tgt":"g"},"Ee":{"src":"e","tgt":"g"},"Ef":{"src":"g","tgt":"f"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"h":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Eg":{"src":"a","tgt":"h"},"Eh":{"src":"b","tgt":"h"},"Ei":{"src":"h","tgt":"c"}}}},"z_spider":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}},"d":{"data":{"type":"Z","angle":{"vars":{"\\beta":{"num":1,"denom":1}},"pretty":"\\beta"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"b"},"Eb":{"src":"a","tgt":"c"},"Ec":{"src":"c","tgt":"d"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"vars":{"\\beta":{"num":1,"denom":1},"\\alpha":{"num":1,"denom":1}},"pretty":"\\beta + \\alpha"}}}},"undir_edges":{"Ed":{"src":"e","tgt":"b"},"Ee":{"src":"a","tgt":"e"}},"bang_boxes":{"Ba":{"contents":["a"]},"Bb":{"contents":["b"]}}}},"z_antiloop":{"lhs":{"wire_vertices":{"b":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"a","tgt":"a"}},"bang_boxes":{"Ba":{"contents":["b"]}}},"rhs":{"wire_vertices":{"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"b"}},"bang_boxes":{"Ba":{"contents":["b"]}}}},"z_isometry":{"lhs":{"wire_vertices":{"c":{},"d":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}},"b":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"a":{"src":"d","tgt":"b"},"b":{"src":"b","tgt":"a"},"c":{"src":"b","tgt":"a"},"d":{"src":"a","tgt":"c"}}},"rhs":{"wire_vertices":{"c":{},"d":{}},"undir_edges":{"e":{"src":"d","tgt":"c"}}}},"z_copy_pi":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"d":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"f":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"z_group1_1in":{"lhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"b":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"c":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"}}},"rhs":{"wire_vertices":{"a":{},"d":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"e","tgt":"f"},"Ef":{"src":"f","tgt":"d"}}}},"z_group1_2in":{"lhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"b":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"c":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"},"Ec":{"src":"c","tgt":"d"},"Ed":{"src":"c","tgt":"e"}}},"rhs":{"wire_vertices":{"a":{},"d":{},"e":{}},"node_vertices":{"f":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"g":{"data":{"type":"Z","angle":{"pretty":"0"}}},"h":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"e"},"Ef":{"src":"a","tgt":"g"},"Eg":{"src":"g","tgt":"h"},"Eh":{"src":"g","tgt":"f"},"Ei":{"src":"h","tgt":"d"}}}},"z_inv_h_swap":{"lhs":{"wire_vertices":{"a":{}},"node_vertices":{"b":{"data":{"type":"hadamard"}},"c":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ec":{"src":"a","tgt":"d"}}}},"z_inv_hh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"hadamard"}},"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"c","tgt":"e"},"Ec":{"src":"e","tgt":"d"},"Ed":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"f":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ee":{"src":"a","tgt":"f"},"Ef":{"src":"f","tgt":"b"}}}},"z_inv_hhh_swap":{"lhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"d":{"data":{"type":"hadamard"}},"e":{"data":{"type":"hadamard"}},"f":{"data":{"type":"hadamard"}},"g":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"d"},"Eb":{"src":"b","tgt":"e"},"Ec":{"src":"f","tgt":"c"},"Ed":{"src":"d","tgt":"g"},"Ee":{"src":"e","tgt":"g"},"Ef":{"src":"g","tgt":"f"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"c":{}},"node_vertices":{"h":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Eg":{"src":"a","tgt":"h"},"Eh":{"src":"b","tgt":"h"},"Ei":{"src":"h","tgt":"c"}}}},"bialgebra1":{"lhs":{"wire_vertices":{"a":{},"b":{},"g":{},"h":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"pretty":"0"}}},"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"c"},"Eb":{"src":"b","tgt":"d"},"Ec":{"src":"e","tgt":"g"},"Ed":{"src":"f","tgt":"h"},"Ee":{"src":"c","tgt":"e"},"Ef":{"src":"c","tgt":"f"},"Eg":{"src":"d","tgt":"e"},"Eh":{"src":"d","tgt":"f"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"g":{},"h":{}},"node_vertices":{"i":{"data":{"type":"Z","angle":{"pretty":"0"}}},"j":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ei":{"src":"a","tgt":"i"},"Ej":{"src":"b","tgt":"i"},"Ek":{"src":"i","tgt":"j"},"El":{"src":"j","tgt":"g"},"Em":{"src":"j","tgt":"h"}}}},"scalar elim 1":{"lhs":{"node_vertices":{"Va":{"data":{"type":"X","angle":{"pretty":"0"}}},"Vb":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"Va","tgt":"Vb"}}},"rhs":{}},"scalar elim 2":{"lhs":{"node_vertices":{"Va":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"Vb":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"Va","tgt":"Vb"}}},"rhs":{}},"scalar elim 3":{"lhs":{"node_vertices":{"Va":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}},"Vb":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"Va","tgt":"Vb"}}},"rhs":{}},"scalar elim 4":{"lhs":{"node_vertices":{"Va":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}},"Vb":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ea":{"src":"Va","tgt":"Vb"}}},"rhs":{}},"x_abelian1":{"lhs":{"wire_vertices":{"a":{},"c":{}},"node_vertices":{"b":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{},"c":{}},"undir_edges":{"Ec":{"src":"a","tgt":"c"}}}},"x_copy_0":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"x_copy_0_3":{"lhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"},"Ed":{"src":"d","tgt":"Va"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pretty":"0"}}},"f":{"data":{"type":"X","angle":{"pretty":"0"}}},"Vb":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"b"},"Ef":{"src":"Vb","tgt":"Va"},"Eg":{"src":"a","tgt":"e"}}}},"x_copy_pi_3":{"lhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"c":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"d":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"},"Ed":{"src":"d","tgt":"Va"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"e":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"f":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"Vb":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"b"},"Ef":{"src":"Vb","tgt":"Va"},"Eg":{"src":"a","tgt":"e"}}}},"x_group2":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}},"b":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"b","tgt":"a"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":-1,"denom":1}},"pretty":"-\\alpha"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"x_group3":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pretty":"0"}}},"b":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"x_group4":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"b":{"data":{"type":"Z","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"z_abelian1":{"lhs":{"wire_vertices":{"a":{},"c":{}},"node_vertices":{"b":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ea":{"src":"a","tgt":"b"},"Eb":{"src":"b","tgt":"c"}}},"rhs":{"wire_vertices":{"a":{},"c":{}},"undir_edges":{"Ec":{"src":"a","tgt":"c"}}}},"z_copy_0":{"lhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"}}},"rhs":{"wire_vertices":{"a":{},"b":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ed":{"src":"a","tgt":"e"},"Ee":{"src":"f","tgt":"b"}}}},"z_copy_0_3":{"lhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"},"Ed":{"src":"d","tgt":"Va"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pretty":"0"}}},"f":{"data":{"type":"Z","angle":{"pretty":"0"}}},"Vb":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"b"},"Ef":{"src":"Vb","tgt":"Va"},"Eg":{"src":"a","tgt":"e"}}}},"z_copy_pi_3":{"lhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"c":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"d":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"d","tgt":"c"},"Eb":{"src":"a","tgt":"d"},"Ec":{"src":"d","tgt":"b"},"Ed":{"src":"d","tgt":"Va"}}},"rhs":{"wire_vertices":{"a":{},"b":{},"Va":{}},"node_vertices":{"e":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"f":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"Vb":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ee":{"src":"f","tgt":"b"},"Ef":{"src":"Vb","tgt":"Va"},"Eg":{"src":"a","tgt":"e"}}}},"z_group2":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}},"b":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"b","tgt":"a"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":-1,"denom":1}},"pretty":"-\\alpha"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"z_group3":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}},"b":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}},"z_group4":{"lhs":{"wire_vertices":{"c":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}},"b":{"data":{"type":"X","angle":{"vars":{"\\alpha":{"num":1,"denom":1}},"pretty":"\\alpha"}}}},"undir_edges":{"Ea":{"src":"c","tgt":"b"},"Eb":{"src":"a","tgt":"b"}}},"rhs":{"wire_vertices":{"c":{}},"node_vertices":{"d":{"data":{"type":"Z","angle":{"pi":{"num":1,"denom":1},"pretty":"\\pi"}}}},"undir_edges":{"Ec":{"src":"c","tgt":"d"}}}}},"tags":{},"active_rules":["hopf","hh_to_id","x_spider","x_antiloop","x_copy_pi","x_group1_1in","x_group1_2in","x_h_swap","x_hh_swap","x_hhh_swap","z_spider","z_antiloop","z_copy_pi","z_group1_1in","z_group1_2in","z_inv_h_swap","z_inv_hh_swap","z_inv_hhh_swap","bialgebra1","scalar elim 1","scalar elim 2","scalar elim 3","scalar elim 4","x_abelian1","x_copy_0","x_copy_0_3","x_copy_pi_3","x_group2","x_group3","x_group4","z_abelian1","z_copy_0","z_copy_0_3","z_copy_pi_3","z_group2","z_group3","z_group4"]} \ No newline at end of file diff --git a/core/theories.thy b/core/theories.thy deleted file mode 100644 index 258ce134..00000000 --- a/core/theories.thy +++ /dev/null @@ -1,63 +0,0 @@ -theory theories -imports core -begin - -(* string vertex/edge graphs *) -ML_file "theories/string_ve/data.ML" -ML_file "theories/string_ve/io.ML" -ML_file "theories/string_ve/theory.ML" -(*ML_file "theories/string_ve/test/test.ML";*) - -(* red-green specific vertices, graphs and matching *) -(* graph-derived expressions for R-G graphs *) -ML_file "theories/red_green/data.ML" -ML_file "theories/red_green/io.ML" -ML_file "theories/red_green/theory.ML" -(*ML_file "theories/red_green/test/test.ML";*) -ML_file "theories/red_green/rg_mathematica.ML" - -(* ghz-w specific vertices, graphs, and matching *) -ML_file "theories/ghz_w/data.ML" -ML_file "theories/ghz_w/io.ML" -ML_file "theories/ghz_w/theory.ML" -(*ML_file "theories/ghz_w/test/test.ML";*) - -(* Graphs having vertices with strings as data, substring as matching *) -ML_file "theories/substrings/data.ML" -ML_file "theories/substrings/io.ML" -ML_file "theories/substrings/theory.ML" -(*ML_file "theories/substrings/test/test.ML";*) - -(* Graphs having strings as types, linrat as data and both substrings and linrat - * as matching *) -ML_file "theories/substr_linrat/data.ML" -ML_file "theories/substr_linrat/io.ML" -ML_file "theories/substr_linrat/theory.ML" -(*ML_file "theories/substr_linrat/test/test.ML";*) - -(* rgb specific vertices, graphs, and matching *) -ML_file "theories/red_green_blue/data.ML" -ML_file "theories/red_green_blue/io.ML" -ML_file "theories/red_green_blue/theory.ML" -(*ML_file "theories/red_green_blue/test/test.ML";*) - -(* petri specific vertices, graphs, and matching *) -ML_file "theories/petri/data.ML" -ML_file "theories/petri/io.ML" -ML_file "theories/petri/theory.ML" -(*ML_file "theories/petri/test/test.ML";*) - -(* Tactics as Graphs in Isabelle *) -ML_file "theories/isaplanner_rtechn/data.ML" -ML_file "theories/isaplanner_rtechn/io.ML" -ML_file "theories/isaplanner_rtechn/theory.ML" -(*ML_file "theories/isaplanner_rtechn/test/test.ML";*) - - -(* Pair of dots with rational expressions *) -ML_file "theories/rational_pair/data.ML" -ML_file "theories/rational_pair/io.ML" -ML_file "theories/rational_pair/theory.ML" -(*ML_file "theories/rational_pair/test/test.ML";*) - -end diff --git a/core/theories/generate-no-data-io.py b/core/theories/generate-no-data-io.py deleted file mode 100644 index 3c7a7862..00000000 --- a/core/theories/generate-no-data-io.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 - -# eg: ./generate-no-data-theory.py {0} ghzw GHZ W TICK ZERO > ghzw/theory.ML - -import sys - -code_name = sys.argv[1] -types = sys.argv[3:] -maxlen = len(max(types,key=len)) - -print('(* Generated using {0} *)'.format(' '.join(sys.argv))) - -print('''structure {0}_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = {0}_Data.nvdata - type edata = {0}_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = nvdata - val to_lower = String.implode o (map Char.toLower) o String.explode - fun get_type t = - (case to_lower t'''.format(code_name)) -pad = ' '*(maxlen - len(types[0])) -print(' of "{2}" {3}=> {0}_Data.{1}'.format(code_name,types[0],types[0].lower(),pad)) -for t in types[1:]: - pad = ' '*(maxlen - len(t)) - print(' | "{2}" {3}=> {0}_Data.{1}'.format(code_name,t,t.lower(),pad)) -print(''' | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - fun input (Json.String t) = get_type t - | input (Json.Object obj) = - (get_type (get_string obj "type") - handle bad_input_exp (m,l) => - raise bad_input_exp (m, prepend_prop "type" l)) - | input _ = raise bad_input_exp ("Expected string","type") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - type data = nvdata'''.format(code_name)) -pad = ' '*(maxlen - len(types[0])) -print(' fun typestr {0}_Data.{1} {2}= "{1}"'.format(code_name,types[0],pad)) -for t in types[1:]: - pad = ' '*(maxlen - len(t)) - print(' | typestr {0}_Data.{1} {2}= "{1}"'.format(code_name,t,pad)) -print(''' fun output d = Json.mk_record [("type",typestr d)] - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - (* TODO: alter these: *)'''.format(code_name)) -print(' fun style_for_ivertex_data {0}_Data.{1} ='.format(code_name,types[0])) -print(' "[style=filled,fillcolor=white,fontcolor=black,shape=circle]"') -for t in types[1:]: - print(' | style_for_ivertex_data {0}_Data.{1} ='.format(code_name,t)) - print(' "[style=filled,fillcolor=white,fontcolor=black,shape=circle]"') -print(''' end -end - -(* Use this for convenience if you don't need annotations *) -structure {0}_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = {0}_Theory - structure GraphComponentDataIO = {0}_ComponentDataIO -) -'''.format(code_name)) - -sys.stderr.write("Don't forget to set the Dot styles\n") - diff --git a/core/theories/generate-no-data-theory.py b/core/theories/generate-no-data-theory.py deleted file mode 100644 index 392af337..00000000 --- a/core/theories/generate-no-data-theory.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python3 - -# eg: ./generate-no-data-theory.py GHZW ghzw GHZ W TICK ZERO > ghzw/theory.ML - -import sys - -code_name = sys.argv[1] -pretty_name = sys.argv[2] -types = sys.argv[3:] -maxlen = len(max(types,key=len)) - -print('(* Generated using {0} *)'.format(' '.join(sys.argv))) - -print('''structure {0}_Data = -struct - val pretty_theory_name = Pretty.str "{1}" - type psubst = unit - type subst = psubst - - datatype nvdata = {2} - val default_nvdata = {3} - fun nvdata_eq (a,b) = a = b - - fun match_nvdata (x,y) () = if nvdata_eq (x,y) then SOME () else NONE - fun subst_in_nvdata sub d = (sub,d) - '''.format(code_name, pretty_name, " | ".join(types), types[0], '","'.join(types))) - -pad = ' '*(maxlen - len(types[0])) -print(' fun pretty_nvdata {0} {1}= Pretty.str "{0}"'.format(types[0],pad)) -for t in types[1:]: - pad = ' '*(maxlen - len(t)) - print(' | pretty_nvdata {0} {1}= Pretty.str "{0}"'.format(t,pad)) - -print(' ') - -pad = ' '*(maxlen - len(types[0])) -print(' fun default_nvdata_of_typestring "{0}" {1}= {0}'.format(types[0],pad)) -for t in types[1:]: - pad = ' '*(maxlen - len(t)) - print(' | default_nvdata_of_typestring "{0}" {1}= {0}'.format(t,pad)) -print(' | default_nvdata_of_typestring s = raise unknown_typestring_exp s') - -print(''' - open EmptyEdgeData - - fun init_psubst_from_data _ _ = () - val solve_psubst = Seq.single -end - -structure {0}_Theory = GraphicalTheory(structure Data = {0}_Data) -'''.format(code_name)) - diff --git a/core/theories/ghz_w/data.ML b/core/theories/ghz_w/data.ML deleted file mode 100644 index e0aa9013..00000000 --- a/core/theories/ghz_w/data.ML +++ /dev/null @@ -1,32 +0,0 @@ -(* Generated using ./theories/generate-no-data-theory.py GHZW ghzw GHZ W TICK ZERO *) -structure GHZW_Data = -struct - val pretty_theory_name = Pretty.str "ghz_w" - type psubst = unit - type subst = psubst - - datatype nvdata = GHZ | W | TICK | ZERO | GHZ2 - val default_nvdata = GHZ - fun nvdata_eq (a,b) = a = b - - fun match_nvdata (x,y) () = if nvdata_eq (x,y) then SOME () else NONE - fun subst_in_nvdata sub d = (sub,d) - - fun pretty_nvdata GHZ = Pretty.str "GHZ" - | pretty_nvdata W = Pretty.str "W" - | pretty_nvdata GHZ2 = Pretty.str "GHZ2" - | pretty_nvdata TICK = Pretty.str "TICK" - | pretty_nvdata ZERO = Pretty.str "ZERO" - - fun default_nvdata_of_typestring "GHZ" = GHZ - | default_nvdata_of_typestring "W" = W - | default_nvdata_of_typestring "GHZ2" = GHZ2 - | default_nvdata_of_typestring "TICK" = TICK - | default_nvdata_of_typestring "ZERO" = ZERO - | default_nvdata_of_typestring s = raise unknown_typestring_exp s - - open EmptyEdgeData - - fun init_psubst_from_data _ _ = () - val solve_psubst = Seq.single -end diff --git a/core/theories/ghz_w/io.ML b/core/theories/ghz_w/io.ML deleted file mode 100644 index b81d0138..00000000 --- a/core/theories/ghz_w/io.ML +++ /dev/null @@ -1,57 +0,0 @@ -(* Generated using theories/generate-no-data-io.py GHZW ghzw GHZ W TICK ZERO *) -structure GHZW_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = GHZW_Data.nvdata - type edata = GHZW_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = nvdata - val to_lower = String.implode o (map Char.toLower) o String.explode - fun get_type t = - (case to_lower t - of "ghz" => GHZW_Data.GHZ - | "w" => GHZW_Data.W - | "ghz2" => GHZW_Data.GHZ2 - | "tick" => GHZW_Data.TICK - | "zero" => GHZW_Data.ZERO - | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - fun input (Json.String t) = get_type t - | input (Json.Object obj) = - (get_type (get_string obj "type") - handle bad_input_exp (m,l) => - raise bad_input_exp (m, prepend_prop "type" l)) - | input _ = raise bad_input_exp ("Expected string","type") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - type data = nvdata - fun typestr GHZW_Data.GHZ = "GHZ" - | typestr GHZW_Data.W = "W" - | typestr GHZW_Data.GHZ2 = "GHZ2" - | typestr GHZW_Data.TICK = "TICK" - | typestr GHZW_Data.ZERO = "ZERO" - fun output d = Json.mk_record [("type",typestr d)] - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data GHZW_Data.GHZ = - "[style=filled,fillcolor=white,fontcolor=black,shape=circle]" - | style_for_ivertex_data GHZW_Data.W = - "[style=filled,fillcolor=black,fontcolor=white,shape=circle]" - | style_for_ivertex_data GHZW_Data.GHZ2 = - "[style=filled,fillcolor=red,fontcolor=white,shape=circle]" - | style_for_ivertex_data GHZW_Data.TICK = - "[style=filled,fillcolor=red,fontcolor=white,shape=diamond]" - | style_for_ivertex_data GHZW_Data.ZERO = - "[style=filled,fillcolor=blue,fontcolor=black,shape=square]" - end -end - - diff --git a/core/theories/ghz_w/test/graphs/current_format.graph b/core/theories/ghz_w/test/graphs/current_format.graph deleted file mode 100644 index 479fde26..00000000 --- a/core/theories/ghz_w/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"node_vertices":{"w":{"data":{"type":"W"}},"ghz":{"data":{"type":"GHZ"}},"tick":{"data":{"type":"TICK"}},"zero":{"data":{"type":"ZERO"}}}} \ No newline at end of file diff --git a/core/theories/ghz_w/test/graphs/v1_format.graph b/core/theories/ghz_w/test/graphs/v1_format.graph deleted file mode 100644 index e877029b..00000000 --- a/core/theories/ghz_w/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"node_vertices":{"w":{"data":"W"},"ghz":{"data":"GHZ"},"tick":{"data":"tick"},"zero":{"data":"zero"}}} \ No newline at end of file diff --git a/core/theories/ghz_w/test/graphs/v2_format.graph b/core/theories/ghz_w/test/graphs/v2_format.graph deleted file mode 100644 index 479fde26..00000000 --- a/core/theories/ghz_w/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"node_vertices":{"w":{"data":{"type":"W"}},"ghz":{"data":{"type":"GHZ"}},"tick":{"data":{"type":"TICK"}},"zero":{"data":{"type":"ZERO"}}}} \ No newline at end of file diff --git a/core/theories/ghz_w/test/test.ML b/core/theories/ghz_w/test/test.ML deleted file mode 100644 index 51b3b97b..00000000 --- a/core/theories/ghz_w/test/test.ML +++ /dev/null @@ -1,34 +0,0 @@ -local - structure Theory = GHZW_Theory - open Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "ghz") (Graph.NVert GHZW_Data.GHZ) - |> Graph.add_named_vertex (V.mk "w") (Graph.NVert GHZW_Data.W) - |> Graph.add_named_vertex (V.mk "tick") (Graph.NVert GHZW_Data.TICK) - |> Graph.add_named_vertex (V.mk "zero") (Graph.NVert GHZW_Data.ZERO) - val g' = GraphJSON.input (GraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (GraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/ghz_w/theory.ML b/core/theories/ghz_w/theory.ML deleted file mode 100644 index 15ec55f9..00000000 --- a/core/theories/ghz_w/theory.ML +++ /dev/null @@ -1,9 +0,0 @@ -structure GHZW_Theory = GraphicalTheory( - structure Data = GHZW_Data - structure DataIO = GHZW_ComponentDataIO) - -(* Use this for convenience if you don't need annotations *) -(*structure GHZW_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = GHZW_Theory - structure GraphComponentDataIO = GHZW_ComponentDataIO -)*) diff --git a/core/theories/graphical_theory.ML b/core/theories/graphical_theory.ML deleted file mode 100644 index 181e4d68..00000000 --- a/core/theories/graphical_theory.ML +++ /dev/null @@ -1,91 +0,0 @@ -signature GRAPHICAL_THEORY = -sig - val theory_name : string; - - - (* Graph *) - structure Graph : BANG_GRAPH - - (* Rules = pairs of graph + cached computed matching data *) - structure Rule : BANG_GRAPH_RULE - sharing Rule.Graph.Sharing = Graph.Sharing - - (* sets of rules for a graph *) - structure Ruleset : BANG_GRAPH_RULESET - sharing Ruleset.Sharing.Rule = Rule.Sharing - - (* A matching between two graphs *) - structure Match : BANG_GRAPH_MATCH - sharing Match.Graph.Sharing = Graph.Sharing; - - (* Matching algorithm *) - structure MatchSearch : BG_MATCH_SEARCH - sharing MatchSearch.Sharing.Match = Match.Sharing - sharing MatchSearch.Sharing.Graph = Graph.Sharing - - structure Rewriter : REWRITER - sharing Rewriter.Sharing.Rule = Rule.Sharing - sharing type Rewriter.Sharing.match = Match.T - - (* Sets of rules and matching for those sets *) - structure RulesetRewriter : RULESET_REWRITER - sharing RulesetRewriter.Sharing.Ruleset = Ruleset.Sharing - sharing type RulesetRewriter.Sharing.match = Match.T - - structure GraphJSON : GRAPH_JSON - sharing Graph.Sharing = GraphJSON.Graph.Sharing - - structure RuleJSON : RULE_JSON - sharing Rule.Sharing = RuleJSON.Rule.Sharing - - structure RulesetJSON : RULESET_JSON - sharing Ruleset.Sharing = RulesetJSON.Ruleset.Sharing -end - - - -functor GraphicalTheory( - structure Data : GRAPH_DATA - structure DataIO : GRAPH_COMPONENT_DATA_IO - sharing type Data.nvdata = DataIO.nvdata - sharing type Data.edata = DataIO.edata -) : GRAPHICAL_THEORY = -struct - val theory_name = Pretty.string_of Data.pretty_theory_name; - - structure Graph = BangGraph(structure Data = Data) - structure Rule = BangGraphRule(Graph) - structure Ruleset = BangGraphRuleset(Rule) - structure MatchSearch = GreedyMatchSearch(Graph) - structure Match = MatchSearch.Match - - structure Rewriter = BangGraphRewriter( - structure Rule = Ruleset.BangGraphRule - structure Matcher = MatchSearch - ) - - structure RulesetRewriter = RulesetRewriter( - structure Ruleset = Ruleset - structure Rewriter = Rewriter - ) - - (* NEW json I/O structures *) - structure GraphJSON = GraphJSON( - structure Graph = Graph - structure InputVertexData = DataIO.IVDataInputJSON - structure InputEdgeData = DataIO.EDataInputJSON - structure OutputVertexData = DataIO.IVDataOutputJSON - structure OutputEdgeData = DataIO.EDataOutputJSON - ) - - structure RuleJSON = RuleJSON( - structure Rule = Rule - structure GraphJSON = GraphJSON - ) - - structure RulesetJSON = RulesetJSON( - structure Ruleset = Ruleset - structure RuleJSON = RuleJSON - ) -end - diff --git a/core/theories/isaplanner_rtechn/data.ML b/core/theories/isaplanner_rtechn/data.ML deleted file mode 100644 index 79f6630b..00000000 --- a/core/theories/isaplanner_rtechn/data.ML +++ /dev/null @@ -1,53 +0,0 @@ -structure RTechn_Data = -struct - val pretty_theory_name = Pretty.str "isaplanner_rtechn" - type psubst = unit - type subst = psubst - - structure RTechnKey : NAME where type name = string = StrName; - - datatype nvdata = RTechnNode of RTechnKey.name - | GoalNode of StrName.name (* goal/wire type *) - * StrIntName.NSet.T; (* goal names of this type *) - val default_nvdata = RTechnNode "?" - - fun default_nvdata_of_typestring s = - case s of "RT" => RTechnNode "?" - | "Gs" => GoalNode ("?", StrIntName.NSet.empty) - | _ => raise unknown_typestring_exp s - - fun nvdata_eq (RTechnNode k1, RTechnNode k2) = RTechnKey.name_eq (k1,k2) - | nvdata_eq (GoalNode (n1,s1), GoalNode (n2,s2)) = - StrName.name_eq (n1,n2) andalso StrIntName.NSet.eq s1 s2 - | nvdata_eq _ = false - - fun pretty_nvdata (RTechnNode n) = - Pretty.block [Pretty.str "RT(", RTechnKey.pretty_name n, Pretty.str ")"] - | pretty_nvdata (GoalNode (n,s)) = - Pretty.block [Pretty.str "Gs(", StrName.pretty_name n, Pretty.str ",", - StrIntName.NSet.pretty s, Pretty.str ")"] - - fun match_nvdata (RTechnNode n1, RTechnNode n2) subst = - if String.isPrefix n1 n2 then SOME subst else NONE - | match_nvdata (GoalNode (n1,_), GoalNode (n2,_)) subst = - if String.isPrefix n1 n2 then SOME subst else NONE - | match_nvdata _ _ = NONE - - fun subst_in_nvdata sub d = (sub, d) - - type edata = string - val default_edata = "" - - fun default_edata_of_typestring _ = "" - - fun edata_eq (s1,s2) = s1 = s2 - - val pretty_edata = Pretty.str - - fun match_edata (s1,s2) u = if s1 = s2 then SOME u else NONE - - fun subst_in_edata sub d = (sub, d) - - fun init_psubst_from_data _ _ = () - val solve_psubst = Seq.single -end diff --git a/core/theories/isaplanner_rtechn/io.ML b/core/theories/isaplanner_rtechn/io.ML deleted file mode 100644 index bf71a5d8..00000000 --- a/core/theories/isaplanner_rtechn/io.ML +++ /dev/null @@ -1,99 +0,0 @@ -structure RTechn_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = RTechn_Data.nvdata - type edata = RTechn_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = nvdata - val to_lower = String.implode o (map Char.toLower) o String.explode - fun goal_name_from_json Json.Null set = set - | goal_name_from_json (Json.Object obj) set = - let - val str = get_string_easy "" obj "string" - val num = get_int_easy 0 obj "number" - in - StrIntName.NSet.ins_fresh (str,num) set - handle StrIntName.NSet.duplicate_exp _ => - raise bad_input_exp - ("goal name \""^str^"\","^ - (Int.toString num)^" is a duplicate entry","") - end - | goal_name_from_json _ _ = raise bad_input_exp ("Expected object","") - fun get_goal_names obj prop = - fold_arr_easy goal_name_from_json obj prop StrIntName.NSet.empty - fun decode_goalnode obj = - RTechn_Data.GoalNode - (get_string_easy "" obj "goal_type", - get_goal_names obj "goal_names") - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "goal" l) - fun input (Json.Object obj) = - (case to_lower (get_string obj "type") - of "rt" => RTechn_Data.RTechnNode - (case Json.lookup obj "value" - of SOME (Json.String s) => s - | SOME Json.Null => "?" - | SOME _ => raise bad_input_exp ("Expected string","value") - | NONE => get_string_easy "?" obj "tactic") - | "gs" => - (case Json.lookup obj "value" - of SOME (Json.Object obj') => - (decode_goalnode obj' - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "value" l)) - | SOME _ => raise bad_input_exp ("Expected object","value") - | NONE => decode_goalnode obj) - | t => raise bad_input_exp ("Unknown vertex type "^t,"type")) - | input _ = raise bad_input_exp ("Expected object","") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - type data = nvdata - fun output (RTechn_Data.RTechnNode n) = - Json.mk_record [("type", "RT"), ("label",n), ("value", n)] - | output (RTechn_Data.GoalNode (n,gset)) = - let - fun to_json (str,i) = - Json.Object ( - Json.empty_obj |> update ("string",Json.String str) - |> update ("number",Json.Int i) - ) - val gns = map to_json (StrIntName.NSet.list_of gset) - val value = - Json.Object ( - Json.empty_obj |> update ("goal_type",Json.String n) - |> update ("goal_names",Json.Array gns) - ) - in - Json.Object ( - Json.empty_obj |> update ("type",Json.String "Gs") - |> update ("label",Json.String n) - |> update ("value",value) - ) - end - end - structure EDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = edata - fun input (Json.String str) = str - | input _ = raise bad_input_exp ("Expected string","type") - end - structure EDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - type data = edata - val output = Json.String - end - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data _ = - "[style=filled,fillcolor=white,fontcolor=black,shape=circle]" - end -end - diff --git a/core/theories/isaplanner_rtechn/test/graphs/current_format.graph b/core/theories/isaplanner_rtechn/test/graphs/current_format.graph deleted file mode 100644 index 57797e37..00000000 --- a/core/theories/isaplanner_rtechn/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"value":"foo","label":"foo","type":"RT"}},"b":{"data":{"type":"Gs","label":"bar","value":{"goal_type":"bar","goal_names":[{"string":"x","number":1},{"string":"y","number":-1}]}}}},"dir_edges":{"e1":{"src":"a","tgt":"w","data":"z"},"e2":{"src":"w","tgt":"b","data":"z"}}} \ No newline at end of file diff --git a/core/theories/isaplanner_rtechn/test/graphs/rippling.graph b/core/theories/isaplanner_rtechn/test/graphs/rippling.graph deleted file mode 100644 index 935f2227..00000000 --- a/core/theories/isaplanner_rtechn/test/graphs/rippling.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"Va":{},"Ve":{},"Vf":{},"Vh":{}},"node_vertices":{"l":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"o":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"p":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"q":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"r":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"s":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"t":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"u":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"v":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"w":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"x":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"y":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"z":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"aa":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ab":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ac":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ad":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ae":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"af":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ag":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ah":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ai":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"aj":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ak":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"al":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"am":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"an":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ao":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ap":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"aq":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ar":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"as":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"at":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"au":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"av":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"aw":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ax":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ay":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"az":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"ba":{"data":{"type":"Gs","goal_type":"","goal_names":[]}},"or":{"data":{"type":"RT","tactic":"or"}},"os":{"data":{"type":"RT","tactic":"os"}},"induct":{"data":{"type":"RT","tactic":"induct"}},"merge_id":{"data":{"type":"RT","tactic":"merge_id"}},"merge_ie":{"data":{"type":"RT","tactic":"merge_ie"}},"merge_if":{"data":{"type":"RT","tactic":"merge_if"}},"merge_ig":{"data":{"type":"RT","tactic":"merge_ig"}},"merge_ih":{"data":{"type":"RT","tactic":"merge_ih"}},"merge_ii":{"data":{"type":"RT","tactic":"merge_ii"}},"merge_ij":{"data":{"type":"RT","tactic":"merge_ij"}},"rippling.fertilisation_split":{"data":{"type":"RT","tactic":"rippling.fertilisation_split"}},"rippling.filter_equality_goals":{"data":{"type":"RT","tactic":"rippling.filter_equality_goals"}},"rippling.get_and_filter_equality_skeletons":{"data":{"type":"RT","tactic":"rippling.get_and_filter_equality_skeletons"}},"rippling.id_base":{"data":{"type":"RT","tactic":"rippling.id_base"}},"rippling.ripple_step":{"data":{"type":"RT","tactic":"rippling.ripple_step"}},"rippling.start_rippling":{"data":{"type":"RT","tactic":"rippling.start_rippling"}},"rippling.strong_fert":{"data":{"type":"RT","tactic":"rippling.strong_fert"}},"rippling.weak_fert_subst_eq_sym_true":{"data":{"type":"RT","tactic":"rippling.weak_fert_subst_eq_sym_true"}},"rippling.weak_fert_subst_eq_sym_false":{"data":{"type":"RT","tactic":"rippling.weak_fert_subst_eq_sym_false"}},"rippling.weak_fert_subst_in_lhs":{"data":{"type":"RT","tactic":"rippling.weak_fert_subst_in_lhs"}},"rippling.weak_fert_subst_in_rhs":{"data":{"type":"RT","tactic":"rippling.weak_fert_subst_in_rhs"}},"simp_no_asm_simp_changed":{"data":{"type":"RT","tactic":"simp_no_asm_simp_changed"}},"simp_no_asm_simp_changee":{"data":{"type":"RT","tactic":"simp_no_asm_simp_changed"}},"split_id":{"data":{"type":"RT","tactic":"split_if"}}},"dir_edges":{"goal1":{"src":"ba","tgt":"simp_no_asm_simp_changed","data":""},"goal2":{"src":"as","tgt":"simp_no_asm_simp_changee","data":""},"goal.base0":{"src":"induct","tgt":"ay","data":""},"goal.base2":{"src":"rippling.id_base","tgt":"ba","data":""},"goal.base4":{"src":"ay","tgt":"rippling.id_base","data":""},"goal.conds0":{"src":"rippling.strong_fert","tgt":"ar","data":""},"goal.conds1":{"src":"ar","tgt":"Vf","data":""},"goal.conds2":{"src":"rippling.weak_fert_subst_in_rhs","tgt":"r","data":""},"goal.conds3":{"src":"r","tgt":"merge_id","data":""},"goal.conds4":{"src":"merge_id","tgt":"t","data":""},"goal.conds5":{"src":"t","tgt":"merge_ih","data":""},"goal.conds6":{"src":"rippling.weak_fert_subst_eq_sym_true","tgt":"ah","data":""},"goal.conds7":{"src":"ah","tgt":"merge_if","data":""},"goal.conds8":{"src":"rippling.weak_fert_subst_eq_sym_false","tgt":"ab","data":""},"goal.conds9":{"src":"ab","tgt":"merge_if","data":""},"goal.conds10":{"src":"merge_if","tgt":"ad","data":""},"goal.conds11":{"src":"ad","tgt":"merge_ih","data":""},"goal.conds12":{"src":"merge_ih","tgt":"as","data":""},"goal.conds13":{"src":"rippling.weak_fert_subst_in_lhs","tgt":"x","data":""},"goal.conds14":{"src":"x","tgt":"merge_id","data":""},"goal.inductable0":{"src":"Va","tgt":"l","data":""},"goal.inductable1":{"src":"l","tgt":"induct","data":""},"goal.rippling.active0":{"src":"rippling.start_rippling","tgt":"av","data":""},"goal.rippling.active2":{"src":"rippling.ripple_step","tgt":"av","data":""},"goal.rippling.active4":{"src":"av","tgt":"rippling.ripple_step","data":""},"goal.rippling.active0.fully0":{"src":"rippling.fertilisation_split","tgt":"aq","data":""},"goal.rippling.active0.fully3":{"src":"aq","tgt":"rippling.strong_fert","data":""},"goal.rippling.active0.not_fully0":{"src":"rippling.fertilisation_split","tgt":"q","data":""},"goal.rippling.active0.not_fully2":{"src":"rippling.get_and_filter_equality_skeletons","tgt":"ap","data":""},"goal.rippling.active0.not_fully4":{"src":"q","tgt":"rippling.get_and_filter_equality_skeletons","data":""},"goal.rippling.active0.not_fully5":{"src":"ap","tgt":"rippling.filter_equality_goals","data":""},"goal.rippling.active0.not_fully0.eq0":{"src":"rippling.filter_equality_goals","tgt":"an","data":""},"goal.rippling.active0.not_fully0.eq3":{"src":"aa","tgt":"rippling.weak_fert_subst_in_rhs","data":""},"goal.rippling.active0.not_fully0.eq4":{"src":"an","tgt":"or","data":""},"goal.rippling.active0.not_fully0.eq5":{"src":"w","tgt":"rippling.weak_fert_subst_in_lhs","data":""},"goal.rippling.active0.not_fully0.eq0.fst0":{"src":"or","tgt":"w","data":""},"goal.rippling.active0.not_fully0.eq0.snd0":{"src":"or","tgt":"aa","data":""},"goal.rippling.active0.not_fully0.noteq0":{"src":"rippling.filter_equality_goals","tgt":"aw","data":""},"goal.rippling.active0.not_fully0.noteq3":{"src":"ak","tgt":"rippling.weak_fert_subst_eq_sym_false","data":""},"goal.rippling.active0.not_fully0.noteq4":{"src":"aw","tgt":"os","data":""},"goal.rippling.active0.not_fully0.noteq5":{"src":"ag","tgt":"rippling.weak_fert_subst_eq_sym_true","data":""},"goal.rippling.active0.not_fully0.noteq0.fst0":{"src":"os","tgt":"ag","data":""},"goal.rippling.active0.not_fully0.noteq0.snd0":{"src":"os","tgt":"ak","data":""},"goal.rippling.end0":{"src":"rippling.ripple_step","tgt":"at","data":""},"goal.rippling.end3":{"src":"at","tgt":"rippling.fertilisation_split","data":""},"goal.simp0":{"src":"simp_no_asm_simp_changed","tgt":"o","data":""},"goal.simp1":{"src":"o","tgt":"merge_ij","data":""},"goal.simp2":{"src":"simp_no_asm_simp_changee","tgt":"al","data":""},"goal.simp3":{"src":"al","tgt":"merge_ij","data":""},"goal.simp4":{"src":"merge_ij","tgt":"ax","data":""},"goal.simp5":{"src":"ax","tgt":"Vh","data":""},"goal.step0":{"src":"induct","tgt":"az","data":""},"goal.step3":{"src":"az","tgt":"rippling.start_rippling","data":""},"goal.unknown0":{"src":"rippling.ripple_step","tgt":"au","data":""},"goal.unknown1":{"src":"au","tgt":"Ve","data":""},"goal.unknown2":{"src":"rippling.weak_fert_subst_in_rhs","tgt":"s","data":""},"goal.unknown3":{"src":"s","tgt":"merge_ie","data":""},"goal.unknown4":{"src":"merge_ie","tgt":"u","data":""},"goal.unknown5":{"src":"u","tgt":"merge_ii","data":""},"goal.unknown6":{"src":"rippling.weak_fert_subst_eq_sym_true","tgt":"ai","data":""},"goal.unknown7":{"src":"ai","tgt":"merge_ig","data":""},"goal.unknown8":{"src":"rippling.weak_fert_subst_eq_sym_false","tgt":"ac","data":""},"goal.unknown9":{"src":"ac","tgt":"merge_ig","data":""},"goal.unknown10":{"src":"merge_ig","tgt":"ae","data":""},"goal.unknown11":{"src":"ae","tgt":"merge_ii","data":""},"goal.unknown12":{"src":"merge_ii","tgt":"as","data":""},"goal.unknown13":{"src":"rippling.weak_fert_subst_in_lhs","tgt":"y","data":""},"goal.unknown14":{"src":"y","tgt":"merge_ie","data":""},"result.skeleton0":{"src":"rippling.get_and_filter_equality_skeletons","tgt":"p","data":""},"result.skeleton2":{"src":"split_id","tgt":"ao","data":""},"result.skeleton3":{"src":"z","tgt":"rippling.weak_fert_subst_in_rhs","data":""},"result.skeleton4":{"src":"ao","tgt":"os","data":""},"result.skeleton5":{"src":"v","tgt":"rippling.weak_fert_subst_in_lhs","data":""},"result.skeleton6":{"src":"aj","tgt":"rippling.weak_fert_subst_eq_sym_false","data":""},"result.skeleton7":{"src":"af","tgt":"rippling.weak_fert_subst_eq_sym_true","data":""},"result.skeleton9":{"src":"p","tgt":"split_id","data":""},"result.skeleton10":{"src":"split_id","tgt":"am","data":""},"result.skeleton11":{"src":"am","tgt":"or","data":""},"result.skeleton0.fst0":{"src":"or","tgt":"v","data":""},"result.skeleton0.fst1":{"src":"os","tgt":"af","data":""},"result.skeleton0.snd0":{"src":"or","tgt":"z","data":""},"result.skeleton0.snd1":{"src":"os","tgt":"aj","data":""}}} \ No newline at end of file diff --git a/core/theories/isaplanner_rtechn/test/graphs/v1_format.graph b/core/theories/isaplanner_rtechn/test/graphs/v1_format.graph deleted file mode 100644 index e19ed63c..00000000 --- a/core/theories/isaplanner_rtechn/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"type":"RT","tactic":"foo"}},"b":{"data":{"type":"Gs","goal_type":"bar","goal_names":[{"string":"x","number":1},{"string":"y","number":-1}]}}},"dir_edges":{"e1":{"src":"a","tgt":"w","data":"z"},"e2":{"src":"w","tgt":"b","data":"z"}}} \ No newline at end of file diff --git a/core/theories/isaplanner_rtechn/test/graphs/v2_format.graph b/core/theories/isaplanner_rtechn/test/graphs/v2_format.graph deleted file mode 100644 index 57797e37..00000000 --- a/core/theories/isaplanner_rtechn/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"value":"foo","label":"foo","type":"RT"}},"b":{"data":{"type":"Gs","label":"bar","value":{"goal_type":"bar","goal_names":[{"string":"x","number":1},{"string":"y","number":-1}]}}}},"dir_edges":{"e1":{"src":"a","tgt":"w","data":"z"},"e2":{"src":"w","tgt":"b","data":"z"}}} \ No newline at end of file diff --git a/core/theories/isaplanner_rtechn/test/test.ML b/core/theories/isaplanner_rtechn/test/test.ML deleted file mode 100644 index 2c45ee72..00000000 --- a/core/theories/isaplanner_rtechn/test/test.ML +++ /dev/null @@ -1,37 +0,0 @@ -local - open RTechn_Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "a") - (Graph.NVert (RTechn_Data.RTechnNode "foo")) - |> Graph.add_named_vertex (V.mk "b") - (Graph.NVert (RTechn_Data.GoalNode ("bar", - StrIntName.NSet.of_list [("x",1),("y",~1)]))) - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,"z") (V.mk "a") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,"z") (V.mk "w") (V.mk "b") - val g' = GraphJSON.input (GraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (GraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/isaplanner_rtechn/theory.ML b/core/theories/isaplanner_rtechn/theory.ML deleted file mode 100644 index 39234ee5..00000000 --- a/core/theories/isaplanner_rtechn/theory.ML +++ /dev/null @@ -1,12 +0,0 @@ - - -structure RTechn_Theory = GraphicalTheory( - structure Data = RTechn_Data - structure DataIO = RTechn_ComponentDataIO) - -(* Use this for convenience if you don't need annotations *) -(*structure RTechn_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = RTechn_Theory - structure GraphComponentDataIO = RTechn_ComponentDataIO -)*) - diff --git a/core/theories/petri/data.ML b/core/theories/petri/data.ML deleted file mode 100644 index 00e714aa..00000000 --- a/core/theories/petri/data.ML +++ /dev/null @@ -1,30 +0,0 @@ -(* Generated using ./theories/generate-no-data-theory.py Petri petri COPY SWITCH BUF0 BUF1 *) -structure Petri_Data = -struct - val pretty_theory_name = Pretty.str "petri" - type psubst = unit - type subst = psubst - - datatype nvdata = COPY | SWITCH | BUF0 | BUF1 - val default_nvdata = COPY - fun nvdata_eq (a,b) = a = b - - fun match_nvdata (x,y) () = if nvdata_eq (x,y) then SOME () else NONE - fun subst_in_nvdata sub d = (sub,d) - - fun pretty_nvdata COPY = Pretty.str "COPY" - | pretty_nvdata SWITCH = Pretty.str "SWITCH" - | pretty_nvdata BUF0 = Pretty.str "BUF0" - | pretty_nvdata BUF1 = Pretty.str "BUF1" - - fun default_nvdata_of_typestring "COPY" = COPY - | default_nvdata_of_typestring "SWITCH" = SWITCH - | default_nvdata_of_typestring "BUF0" = BUF0 - | default_nvdata_of_typestring "BUF1" = BUF1 - | default_nvdata_of_typestring s = raise unknown_typestring_exp s - - open EmptyEdgeData - - fun init_psubst_from_data _ _ = () - val solve_psubst = Seq.single -end diff --git a/core/theories/petri/io.ML b/core/theories/petri/io.ML deleted file mode 100644 index 15bb2ac2..00000000 --- a/core/theories/petri/io.ML +++ /dev/null @@ -1,52 +0,0 @@ -(* Generated using theories/generate-no-data-io.py Petri petri COPY SWITCH BUF0 BUF1 *) -structure Petri_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = Petri_Data.nvdata - type edata = Petri_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = nvdata - val to_lower = String.implode o (map Char.toLower) o String.explode - fun get_type t = - (case to_lower t - of "copy" => Petri_Data.COPY - | "switch" => Petri_Data.SWITCH - | "buf0" => Petri_Data.BUF0 - | "buf1" => Petri_Data.BUF1 - | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - fun input (Json.String t) = get_type t - | input (Json.Object obj) = - (get_type (get_string obj "type") - handle bad_input_exp (m,l) => - raise bad_input_exp (m, prepend_prop "type" l)) - | input _ = raise bad_input_exp ("Expected string","type") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - type data = nvdata - fun typestr Petri_Data.COPY = "COPY" - | typestr Petri_Data.SWITCH = "SWITCH" - | typestr Petri_Data.BUF0 = "BUF0" - | typestr Petri_Data.BUF1 = "BUF1" - fun output d = Json.mk_record [("type",typestr d)] - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data Petri_Data.BUF0 = - "[style=filled,fillcolor=white,fontcolor=black,shape=square]" - | style_for_ivertex_data Petri_Data.BUF1 = - "[style=filled,fillcolor=black,fontcolor=white,shape=square]" - | style_for_ivertex_data Petri_Data.COPY = - "[style=filled,fillcolor=red,fontcolor=white,shape=circle]" - | style_for_ivertex_data Petri_Data.SWITCH = - "[style=filled,fillcolor=blue,fontcolor=black,shape=circle]"; - end -end - diff --git a/core/theories/petri/test/graphs/current_format.graph b/core/theories/petri/test/graphs/current_format.graph deleted file mode 100644 index 9460e9fe..00000000 --- a/core/theories/petri/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"c":{"data":{"type":"COPY"}},"s":{"data":{"type":"SWITCH"}},"0":{"data":{"type":"BUF0"}},"1":{"data":{"type":"BUF1"}}},"dir_edges":{"e1":{"src":"c","tgt":"w"},"e2":{"src":"w","tgt":"s"}}} \ No newline at end of file diff --git a/core/theories/petri/test/graphs/v1_format.graph b/core/theories/petri/test/graphs/v1_format.graph deleted file mode 100644 index 4c729440..00000000 --- a/core/theories/petri/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"c":{"data":"COPY"},"s":{"data":"SWITCH"},"0":{"data":"BUF0"},"1":{"data":"BUF1"}},"dir_edges":{"e1":{"src":"c","tgt":"w"},"e2":{"src":"w","tgt":"s"}}} \ No newline at end of file diff --git a/core/theories/petri/test/graphs/v2_format.graph b/core/theories/petri/test/graphs/v2_format.graph deleted file mode 100644 index 9460e9fe..00000000 --- a/core/theories/petri/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"c":{"data":{"type":"COPY"}},"s":{"data":{"type":"SWITCH"}},"0":{"data":{"type":"BUF0"}},"1":{"data":{"type":"BUF1"}}},"dir_edges":{"e1":{"src":"c","tgt":"w"},"e2":{"src":"w","tgt":"s"}}} \ No newline at end of file diff --git a/core/theories/petri/test/test.ML b/core/theories/petri/test/test.ML deleted file mode 100644 index 45083697..00000000 --- a/core/theories/petri/test/test.ML +++ /dev/null @@ -1,36 +0,0 @@ -local - open Petri_Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "c") (Graph.NVert Petri_Data.COPY) - |> Graph.add_named_vertex (V.mk "s") (Graph.NVert Petri_Data.SWITCH) - |> Graph.add_named_vertex (V.mk "0") (Graph.NVert Petri_Data.BUF0) - |> Graph.add_named_vertex (V.mk "1") (Graph.NVert Petri_Data.BUF1) - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,()) (V.mk "c") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,()) (V.mk "w") (V.mk "s") - val g' = GraphJSON.input (GraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (GraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/petri/theory.ML b/core/theories/petri/theory.ML deleted file mode 100644 index 91a25c59..00000000 --- a/core/theories/petri/theory.ML +++ /dev/null @@ -1,12 +0,0 @@ - - -structure Petri_Theory = GraphicalTheory( - structure Data = Petri_Data - structure DataIO = Petri_ComponentDataIO) - - -(* Use this for convenience if you don't need annotations *) -(*structure Petri_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = Petri_Theory - structure GraphComponentDataIO = Petri_ComponentDataIO -)*) diff --git a/core/theories/rational_pair/data.ML b/core/theories/rational_pair/data.ML deleted file mode 100644 index 0b4fae97..00000000 --- a/core/theories/rational_pair/data.ML +++ /dev/null @@ -1,51 +0,0 @@ -structure RP_Data = -struct - val pretty_theory_name = Pretty.str "rational_pair" - type psubst = LinratMatcher.psubst - type subst = LinratMatcher.subst - - datatype nvdata = Bnd of LinratExpr.T (* Black node *) - | Wnd of LinratExpr.T (* White node *) - val default_nvdata = Bnd LinratExpr.zero - - fun default_nvdata_of_typestring s = - case s of "B" => Bnd LinratExpr.zero - | "W" => Wnd LinratExpr.zero - | _ => raise unknown_typestring_exp s - - fun nvdata_eq (Bnd a, Bnd b) = LinratExpr.eq a b - | nvdata_eq (Wnd a, Wnd b) = LinratExpr.eq a b - | nvdata_eq _ = false - - fun pretty_nvdata (Bnd a) = - Pretty.block [Pretty.str "B(", LinratExpr.pretty a, Pretty.str ")"] - | pretty_nvdata (Wnd a) = - Pretty.block [Pretty.str "W(", LinratExpr.pretty a, Pretty.str ")"] - - fun match_nvdata (Bnd a1,Bnd a2) m = LinratMatcher.match (a1,a2) m - | match_nvdata (Wnd a1,Wnd a2) m = LinratMatcher.match (a1,a2) m - | match_nvdata _ _ = NONE - - fun subst_in_nvdata u (Bnd a) = - let val (sub',a') = LinratMatcher.subst_in_expr u a - in (sub', Bnd a') end - | subst_in_nvdata u (Wnd a) = - let val (sub',a') = LinratMatcher.subst_in_expr u a - in (sub', Wnd a') end - - open EmptyEdgeData - - local - fun pull_names (nvtab,_) = X.NSet.empty - |> VTab.fold ( - fn (_,Bnd a) => X.NSet.union_merge (LinratExpr.free_vars a) - | (_,Wnd a) => X.NSet.union_merge (LinratExpr.free_vars a) - | _ => I - ) nvtab - in - fun init_psubst_from_data p_data t_data = - LinratMatcher.init_psubst_from_names (pull_names p_data, pull_names t_data) - end - - fun solve_psubst ps = Seq.single (LinratMatcher.solve_psubst ps) -end \ No newline at end of file diff --git a/core/theories/rational_pair/io.ML b/core/theories/rational_pair/io.ML deleted file mode 100644 index 84f4b18e..00000000 --- a/core/theories/rational_pair/io.ML +++ /dev/null @@ -1,66 +0,0 @@ -(* Data input for red-green graphs. The input strives to be as backward-compatible - * as possible, while output always outputs the newest format. As a consequence, - * the old Quantomatic GUI will no longer talk to the core. *) - -structure RP_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = RP_Data.nvdata - type edata = RP_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - structure L = InputLinratJSON - type data = nvdata - fun get_expr obj = - case Json.lookup obj "value" - of SOME (Json.String s) => - (LinratExpr.parse s - handle LinratExpr.parse_exp => - raise bad_input_exp ("Could not parse \""^s^"\"","")) - | NONE => LinratExpr.zero - val to_lower = String.implode o (map Char.toLower) o String.explode - fun input (Json.String t) = - (case to_lower t - of "b" => RP_Data.Bnd LinratExpr.zero - | "w" => RP_Data.Wnd LinratExpr.zero - | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - | input (Json.Object obj) = - (case to_lower (get_string obj "type") - of "b" => RP_Data.Bnd (get_expr obj) - | "w" => RP_Data.Wnd (get_expr obj) - | t => raise bad_input_exp ("Unknown vertex type "^t,"type")) - | input _ = raise bad_input_exp ("Expected object","") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - structure L = OutputLinratJSON - type data = nvdata - fun expr_to_jstring a = Json.String - (case Pretty.string_of (LinratExpr.pretty a) of "0" => "" | s => s) - fun output (RP_Data.Bnd a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "B") - |> update ("value",expr_to_jstring a) - ) - | output (RP_Data.Wnd a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "W") - |> update ("value",expr_to_jstring a) - ) - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data (RP_Data.Bnd _) = - "[style=filled,fillcolor=black,fontcolor=white,shape=circle]" - | style_for_ivertex_data (RP_Data.Wnd _) = - "[style=filled,fillcolor=white,fontcolor=black,shape=circle]" - end -end - - diff --git a/core/theories/rational_pair/test/graphs/current_format.graph b/core/theories/rational_pair/test/graphs/current_format.graph deleted file mode 100644 index 615435e9..00000000 --- a/core/theories/rational_pair/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"type":"B","value":""}},"b":{"data":{"type":"B","value":"a + b"}},"c":{"data":{"type":"W","value":""}},"d":{"data":{"type":"W","value":"c"}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/rational_pair/test/test.ML b/core/theories/rational_pair/test/test.ML deleted file mode 100644 index 141a91dd..00000000 --- a/core/theories/rational_pair/test/test.ML +++ /dev/null @@ -1,32 +0,0 @@ -local - open RP_Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "a") - (Graph.NVert (RP_Data.Bnd (LinratExpr.zero))) - |> Graph.add_named_vertex (V.mk "b") - (Graph.NVert (RP_Data.Bnd (LinratExpr.parse "a + b"))) - |> Graph.add_named_vertex (V.mk "c") - (Graph.NVert (RP_Data.Wnd (LinratExpr.zero))) - |> Graph.add_named_vertex (V.mk "d") - (Graph.NVert (RP_Data.Wnd (LinratExpr.parse "c"))) - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,()) (V.mk "a") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,()) (V.mk "w") (V.mk "b") - val g' = GraphJSON.input (GraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (GraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/rational_pair/theory.ML b/core/theories/rational_pair/theory.ML deleted file mode 100644 index be46abc2..00000000 --- a/core/theories/rational_pair/theory.ML +++ /dev/null @@ -1,39 +0,0 @@ - - -structure RP_Theory = GraphicalTheory( - structure Data = RP_Data - structure DataIO = RP_ComponentDataIO) - -(* Use this for convenience if you don't need annotations *) -(*structure RG_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = RG_Theory - structure GraphComponentDataIO = RG_ComponentDataIO -)*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* terms used within the RG theory. *) -(*structure RGHilbTerm = TensorTermFun( - -structure Graph = RG_Theory.Graph -fun pretty_vertex fmt graph vert = let - val (v, (ie,oe)) = Graph.get_vertex graph vert - val (ic,oc) = (E.NSet.cardinality ie, - E.NSet.cardinality oe) - val nums = [Pretty.str (Int.toString ic), - Pretty.str ", ", - Pretty.str (Int.toString oc)] -in Pretty.block - (case v of Graph.OVData.NVert iv => - (case iv - of RG_InternVData.Xnd e => - [Pretty.str "xsp[", LinratAngleExpr.pretty_math fmt e, Pretty.str ", "] @ - nums @ [Pretty.str "]"] - | RG_InternVData.Znd e => - [Pretty.str "zsp[", LinratAngleExpr.pretty_math fmt e, Pretty.str ", "] @ - nums @ [Pretty.str "]"] - | RG_InternVData.Hnd => [Pretty.str "h"]) - | _ => [Pretty.str "id2[1]"]) -end - -) -*) diff --git a/core/theories/red_green/data.ML b/core/theories/red_green/data.ML deleted file mode 100644 index 4fbada79..00000000 --- a/core/theories/red_green/data.ML +++ /dev/null @@ -1,63 +0,0 @@ -structure RG_Data = -struct - val pretty_theory_name = Pretty.str "red_green" - type psubst = LinratAngleMatcher.psubst - type subst = LinratAngleMatcher.subst - - datatype nvdata = Xnd of LinratAngleExpr.T (* Red: defined using H of Z *) - | Znd of LinratAngleExpr.T (* Green *) - | Hnd (* Hadamard node *) - | Var of string - val default_nvdata = Znd LinratAngleExpr.zero - - fun default_nvdata_of_typestring s = - case s of "X" => Xnd LinratAngleExpr.zero - | "Z" => Znd LinratAngleExpr.zero - | "hadamard" => Hnd - | "var" => Var "" - | _ => raise unknown_typestring_exp s - - fun nvdata_eq (Hnd, Hnd) = true - | nvdata_eq (Znd a, Znd b) = LinratAngleExpr.eq a b - | nvdata_eq (Xnd a, Xnd b) = LinratAngleExpr.eq a b - | nvdata_eq (Var s, Var t) = (s = t) - | nvdata_eq _ = false - - fun pretty_nvdata (Xnd a) = - Pretty.block [Pretty.str "X(", LinratAngleExpr.pretty a, Pretty.str ")"] - | pretty_nvdata (Znd a) = - Pretty.block [Pretty.str "Z(", LinratAngleExpr.pretty a, Pretty.str ")"] - | pretty_nvdata Hnd = Pretty.str "H" - | pretty_nvdata (Var s) = Pretty.block [Pretty.str "Var(", Pretty.str s, Pretty.str ")"] - - fun match_nvdata (Hnd, Hnd) m = SOME m - | match_nvdata (Znd a1,Znd a2) m = LinratAngleMatcher.match (a1,a2) m - | match_nvdata (Xnd a1,Xnd a2) m = LinratAngleMatcher.match (a1,a2) m - | match_nvdata (Var s, Var t) m = if s = t then SOME m else NONE - | match_nvdata _ _ = NONE - - fun subst_in_nvdata sub Hnd = (sub, Hnd) - | subst_in_nvdata sub (Var s) = (sub, Var s) - | subst_in_nvdata u (Xnd a) = - let val (sub',a') = LinratAngleMatcher.subst_in_expr u a - in (sub', Xnd a') end - | subst_in_nvdata u (Znd a) = - let val (sub',a') = LinratAngleMatcher.subst_in_expr u a - in (sub', Znd a') end - - open EmptyEdgeData - - local - fun pull_names (nvtab,_) = X.NSet.empty - |> VTab.fold ( - fn (_,Znd a) => X.NSet.union_merge (LinratAngleExpr.free_vars a) - | (_,Xnd a) => X.NSet.union_merge (LinratAngleExpr.free_vars a) - | _ => I - ) nvtab - in - fun init_psubst_from_data p_data t_data = - LinratAngleMatcher.init_psubst_from_names (pull_names p_data, pull_names t_data) - end - - fun solve_psubst ps = Seq.single (LinratAngleMatcher.solve_psubst ps) -end \ No newline at end of file diff --git a/core/theories/red_green/io.ML b/core/theories/red_green/io.ML deleted file mode 100644 index 922bd2f3..00000000 --- a/core/theories/red_green/io.ML +++ /dev/null @@ -1,91 +0,0 @@ -(* Data input for red-green graphs. The input strives to be as backward-compatible - * as possible, while output always outputs the newest format. As a consequence, - * the old Quantomatic GUI will no longer talk to the core. *) - -structure RG_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = RG_Data.nvdata - type edata = RG_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - structure L = InputLinratJSON - type data = nvdata - fun get_angle obj = - case Json.lookup obj "value" - (* new RG-graphs just store angle as a string, to be parsed *) - of SOME (Json.String s) => - (LinratAngleExpr.parse s - handle LinratAngleExpr.parse_exp => - raise bad_input_exp ("Could not parse \""^s^"\"","")) - (* older RG-graphs use a (redundant) JSON representation of angle data *) - | SOME (v as Json.Object _) => L.input v - (* really old (pre-Derive) graphs call this field 'angle' *) - | NONE => L.input (get_easy Json.Null obj "angle") - val to_lower = String.implode o (map Char.toLower) o String.explode - fun input (Json.String t) = - (case to_lower t - of "hadamard" => RG_Data.Hnd - | "h" => RG_Data.Hnd - | "x" => RG_Data.Xnd LinratAngleExpr.zero - | "z" => RG_Data.Znd LinratAngleExpr.zero - | "var" => RG_Data.Var "" - | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - | input (Json.Object obj) = - (case to_lower (get_string obj "type") - of "hadamard" => RG_Data.Hnd - | "h" => RG_Data.Hnd - | "z" => RG_Data.Znd (get_angle obj) - | "x" => RG_Data.Xnd (get_angle obj) - | "var" => RG_Data.Var (case Json.lookup obj "value" - of SOME (Json.String s) => s - | NONE => "") - | t => raise bad_input_exp ("Unknown vertex type "^t,"type")) - | input _ = raise bad_input_exp ("Expected object","") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - structure L = OutputLinratJSON - type data = nvdata - fun angle_to_jstring a = Json.String - (case Pretty.string_of (LinratAngleExpr.pretty a) of "0" => "" | s => s) - fun output (RG_Data.Znd a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "Z") - |> update ("value",angle_to_jstring a) - ) - | output (RG_Data.Xnd a) = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "X") - |> update ("value",angle_to_jstring a) - ) - | output RG_Data.Hnd = - Json.Object ( - Json.empty_obj |> update ("type",Json.String "hadamard") - ) - | output (RG_Data.Var s) = - Json.Object ( - Json.empty_obj |> update ("type", Json.String "var") - |> update ("value", Json.String s) - ) - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data (RG_Data.Znd _) = - "[style=filled,fillcolor=green,fontcolor=black,shape=circle]" - | style_for_ivertex_data (RG_Data.Xnd _) = - "[style=filled,fillcolor=red,fontcolor=white,shape=circle]" - | style_for_ivertex_data (RG_Data.Var _) = - "[style=filled,fillcolor=white,fontcolor=black,shape=circle]" - | style_for_ivertex_data RG_Data.Hnd = - "[style=filled,fillcolor=yellow,fontcolor=white,shape=square]" - end -end - - diff --git a/core/theories/red_green/rg_mathematica.ML b/core/theories/red_green/rg_mathematica.ML deleted file mode 100644 index eb0e447c..00000000 --- a/core/theories/red_green/rg_mathematica.ML +++ /dev/null @@ -1,43 +0,0 @@ -(*PolyML.SaveState.loadState "../../heaps/quanto.heap"; -PolyML.Compiler.printDepth:=100;*) - -structure RG_Mathematica = -struct - -structure G = RG_Theory.Graph - -fun to_mathematica name graph = let - val g = G.minimise graph - val bnd = E.NSet.filter - (fn e => G.is_boundary g (G.get_edge_source g e) orelse - G.is_boundary g (G.get_edge_target g e)) - (G.get_edges g) - val interior = E.NSet.subtract (G.get_edges g) bnd - val pretty_ang = LinratAngleExpr.pretty_math AlgFormat.MATHEMATICA - fun pretty_int b = Pretty.str ("{" ^ E.dest b ^ ", 0, 1}") - fun pretty_adj v = - Pretty.list "{" "}" - (map (Pretty.str o E.dest) (E.NSet.list_of (G.get_adj_edges g v))) - fun pretty_nd v = - case G.get_vertex_data g v - of G.NVert (RG_Data.Znd angle) => - Pretty.block[Pretty.str "z[", pretty_ang angle, - Pretty.str ", ", pretty_adj v, Pretty.str "] * "] - | G.NVert (RG_Data.Xnd angle) => - Pretty.block[Pretty.str "x[", pretty_ang angle, - Pretty.str ", ", pretty_adj v, Pretty.str "] * "] - | _ => Pretty.str "" -in - Pretty.string_of (Pretty.block [ - Pretty.str (name ^ "["), - Pretty.list "{" "}" (map (fn e => Pretty.str (E.dest e ^ "_")) (E.NSet.list_of bnd)), - Pretty.str "] := ", - Pretty.str "Sum[(", - Pretty.block (map pretty_nd (V.NSet.list_of (G.get_node_vertices g))), - Pretty.str "1), ", - Pretty.list "{" "}" (map pretty_int (E.NSet.list_of interior)), - Pretty.str "];" - ]) -end - -end \ No newline at end of file diff --git a/core/theories/red_green/rg_simp_util.ML b/core/theories/red_green/rg_simp_util.ML deleted file mode 100644 index 7a8c3554..00000000 --- a/core/theories/red_green/rg_simp_util.ML +++ /dev/null @@ -1,58 +0,0 @@ -structure RG_SimpUtil = -struct - -structure SU = SimpUtil( - structure Theory = RG_Theory - structure IO = RG_ComponentDataIO) -open SU - -fun is_red g v = case RG_Theory.Graph.get_vertex_data g v - of (RG_Theory.Graph.NVert (RG_Data.Xnd _)) => true | _ => false - -fun is_green g v = case RG_Theory.Graph.get_vertex_data g v - of (RG_Theory.Graph.NVert (RG_Data.Znd _)) => true | _ => false - -fun is_boundary_red g v = (RG_Theory.Graph.is_boundary g v) andalso V.NSet.exists (is_red g) (RG_Theory.Graph.get_adj_vertices g v) - -fun num_boundary_red g = let - val gmin = RG_Theory.Graph.minimise g -in V.NSet.cardinality (V.NSet.filter (is_boundary_red gmin) (RG_Theory.Graph.get_vertices gmin)) -end - -fun is_interior_green g v = (is_green g v) andalso V.NSet.forall (is_red g) (RG_Theory.Graph.get_adj_vertices g v) - -fun arity g v = Arity.get_undir (RG_Theory.Graph.get_arity g v) - -fun min_green_arity g = let - fun min v (SOME a) = SOME (Int.min (Arity.get_undir (RG_Theory.Graph.get_arity g v), a)) - | min v NONE = SOME (Arity.get_undir (RG_Theory.Graph.get_arity g v)) -in - case V.NSet.fold min (V.NSet.filter (is_interior_green g) (RG_Theory.Graph.get_vertices g)) NONE - of SOME a => a - | NONE => 0 -end - -fun vertex_where f g = - V.NSet.get_exists (f g) (RG_Theory.Graph.get_vertices g) - -fun min_arity_vertex_where f g = let - fun min v' (SOME v) = if (arity g v' < arity g v andalso f g v') - then SOME v' else SOME v - | min v NONE = if (f g v) then SOME v else NONE -in - V.NSet.fold min (RG_Theory.Graph.get_vertices g) NONE -end - -fun min_arity_green_vertex g = min_arity_vertex_where is_green g - -fun max_arity_green_vertex g = let - fun max v (SOME v') = if (arity g v' > arity g v andalso is_green g v') - then SOME v else SOME v' - | max v NONE = if (is_green g v) then SOME v else NONE -in - V.NSet.fold max (RG_Theory.Graph.get_vertices g) NONE -end - -val register_simproc = JsonControllerRegistry.RG_Controller.register_simproc - -end diff --git a/core/theories/red_green/test/graphs/current_format.graph b/core/theories/red_green/test/graphs/current_format.graph deleted file mode 100644 index 27bd4796..00000000 --- a/core/theories/red_green/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"type":"Z","value":""}},"b":{"data":{"type":"Z","value":"a + b"}},"c":{"data":{"type":"X","value":""}},"d":{"data":{"type":"X","value":"c"}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/red_green/test/graphs/v1_format.graph b/core/theories/red_green/test/graphs/v1_format.graph deleted file mode 100644 index c9c2fb3a..00000000 --- a/core/theories/red_green/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"type":"Z","angle":{"pretty":"0"}}},"b":{"data":{"type":"Z","angle":{"vars":{"a":{"denom":1,"num":1},"b":{"denom":1,"num":1}},"pretty":"a + b"}}},"c":{"data":{"type":"X","angle":{"pretty":"0"}}},"d":{"data":{"type":"X","angle":{"vars":{"c":{"denom":1,"num":1}},"pretty":"c"}}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/red_green/test/graphs/v2_format.graph b/core/theories/red_green/test/graphs/v2_format.graph deleted file mode 100644 index e7be7bda..00000000 --- a/core/theories/red_green/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"type":"Z","label":"0","value":{"pretty":"0"}}},"b":{"data":{"type":"Z","label":"a + b","value":{"vars":{"a":{"denom":1,"num":1},"b":{"denom":1,"num":1}},"pretty":"a + b"}}},"c":{"data":{"type":"X","label":"0","value":{"pretty":"0"}}},"d":{"data":{"type":"X","label":"c","value":{"vars":{"c":{"denom":1,"num":1}},"pretty":"c"}}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/red_green/test/test.ML b/core/theories/red_green/test/test.ML deleted file mode 100644 index b5006dd8..00000000 --- a/core/theories/red_green/test/test.ML +++ /dev/null @@ -1,40 +0,0 @@ -local - open RG_Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "a") - (Graph.NVert (RG_Data.Znd (LinratAngleExpr.zero))) - |> Graph.add_named_vertex (V.mk "b") - (Graph.NVert (RG_Data.Znd (LinratAngleExpr.parse "a + b"))) - |> Graph.add_named_vertex (V.mk "c") - (Graph.NVert (RG_Data.Xnd (LinratAngleExpr.zero))) - |> Graph.add_named_vertex (V.mk "d") - (Graph.NVert (RG_Data.Xnd (LinratAngleExpr.parse "c"))) - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,()) (V.mk "a") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,()) (V.mk "w") (V.mk "b") - val g' = GraphJSON.input (GraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (GraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/red_green/theory.ML b/core/theories/red_green/theory.ML deleted file mode 100644 index dfe12cc5..00000000 --- a/core/theories/red_green/theory.ML +++ /dev/null @@ -1,39 +0,0 @@ - - -structure RG_Theory = GraphicalTheory( - structure Data = RG_Data - structure DataIO = RG_ComponentDataIO) - -(* Use this for convenience if you don't need annotations *) -(*structure RG_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = RG_Theory - structure GraphComponentDataIO = RG_ComponentDataIO -)*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* terms used within the RG theory. *) -(*structure RGHilbTerm = TensorTermFun( - -structure Graph = RG_Theory.Graph -fun pretty_vertex fmt graph vert = let - val (v, (ie,oe)) = Graph.get_vertex graph vert - val (ic,oc) = (E.NSet.cardinality ie, - E.NSet.cardinality oe) - val nums = [Pretty.str (Int.toString ic), - Pretty.str ", ", - Pretty.str (Int.toString oc)] -in Pretty.block - (case v of Graph.OVData.NVert iv => - (case iv - of RG_InternVData.Xnd e => - [Pretty.str "xsp[", LinratAngleExpr.pretty_math fmt e, Pretty.str ", "] @ - nums @ [Pretty.str "]"] - | RG_InternVData.Znd e => - [Pretty.str "zsp[", LinratAngleExpr.pretty_math fmt e, Pretty.str ", "] @ - nums @ [Pretty.str "]"] - | RG_InternVData.Hnd => [Pretty.str "h"]) - | _ => [Pretty.str "id2[1]"]) -end - -) -*) diff --git a/core/theories/red_green_blue/data.ML b/core/theories/red_green_blue/data.ML deleted file mode 100644 index eb41ca66..00000000 --- a/core/theories/red_green_blue/data.ML +++ /dev/null @@ -1,28 +0,0 @@ -(* Generated using ./theories/generate-no-data-theory.py RGB red_green_blue Red Green Blue *) -structure RGB_Data = -struct - val pretty_theory_name = Pretty.str "red_green_blue" - type psubst = unit - type subst = psubst - - datatype nvdata = Red | Green | Blue - val default_nvdata = Red - fun nvdata_eq (a,b) = a = b - - fun match_nvdata (x,y) () = if nvdata_eq (x,y) then SOME () else NONE - fun subst_in_nvdata sub d = (sub,d) - - fun pretty_nvdata Red = Pretty.str "Red" - | pretty_nvdata Green = Pretty.str "Green" - | pretty_nvdata Blue = Pretty.str "Blue" - - fun default_nvdata_of_typestring "Red" = Red - | default_nvdata_of_typestring "Green" = Green - | default_nvdata_of_typestring "Blue" = Blue - | default_nvdata_of_typestring s = raise unknown_typestring_exp s - - open EmptyEdgeData - - fun init_psubst_from_data _ _ = () - val solve_psubst = Seq.single -end diff --git a/core/theories/red_green_blue/io.ML b/core/theories/red_green_blue/io.ML deleted file mode 100644 index 7a84dc01..00000000 --- a/core/theories/red_green_blue/io.ML +++ /dev/null @@ -1,49 +0,0 @@ -(* Generated using theories/generate-no-data-io.py RGB red_green_blue Red Green Blue *) -structure RGB_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = RGB_Data.nvdata - type edata = RGB_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = nvdata - val to_lower = String.implode o (map Char.toLower) o String.explode - fun get_type t = - (case to_lower t - of "red" => RGB_Data.Red - | "green" => RGB_Data.Green - | "blue" => RGB_Data.Blue - | _ => raise bad_input_exp ("Unknown vertex type "^t,"")) - fun input (Json.String t) = get_type t - | input (Json.Object obj) = - (get_type (get_string obj "type") - handle bad_input_exp (m,l) => - raise bad_input_exp (m, prepend_prop "type" l)) - | input _ = raise bad_input_exp ("Expected string","type") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - type data = nvdata - fun typestr RGB_Data.Red = "Red" - | typestr RGB_Data.Green = "Green" - | typestr RGB_Data.Blue = "Blue" - fun output d = Json.mk_record [("type",typestr d)] - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data RGB_Data.Red = - "[style=filled,fillcolor=red,fontcolor=white,shape=circle]" - | style_for_ivertex_data RGB_Data.Green = - "[style=filled,fillcolor=green,fontcolor=white,shape=circle]" - | style_for_ivertex_data RGB_Data.Blue = - "[style=filled,fillcolor=blue,fontcolor=white,shape=circle]" - end -end - - diff --git a/core/theories/red_green_blue/test/graphs/current_format.graph b/core/theories/red_green_blue/test/graphs/current_format.graph deleted file mode 100644 index 309ad98d..00000000 --- a/core/theories/red_green_blue/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"b":{"data":{"type":"Blue"}},"g":{"data":{"type":"Green"}},"r":{"data":{"type":"Red"}}},"dir_edges":{"e1":{"src":"r","tgt":"w"},"e2":{"src":"w","tgt":"g"}}} \ No newline at end of file diff --git a/core/theories/red_green_blue/test/graphs/v1_format.graph b/core/theories/red_green_blue/test/graphs/v1_format.graph deleted file mode 100644 index 96dc360f..00000000 --- a/core/theories/red_green_blue/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"b":{"data":"Blue"},"g":{"data":"Green"},"r":{"data":"Red"}},"dir_edges":{"e1":{"src":"r","tgt":"w"},"e2":{"src":"w","tgt":"g"}}} \ No newline at end of file diff --git a/core/theories/red_green_blue/test/graphs/v2_format.graph b/core/theories/red_green_blue/test/graphs/v2_format.graph deleted file mode 100644 index 309ad98d..00000000 --- a/core/theories/red_green_blue/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"b":{"data":{"type":"Blue"}},"g":{"data":{"type":"Green"}},"r":{"data":{"type":"Red"}}},"dir_edges":{"e1":{"src":"r","tgt":"w"},"e2":{"src":"w","tgt":"g"}}} \ No newline at end of file diff --git a/core/theories/red_green_blue/test/test.ML b/core/theories/red_green_blue/test/test.ML deleted file mode 100644 index b8a3fe2f..00000000 --- a/core/theories/red_green_blue/test/test.ML +++ /dev/null @@ -1,35 +0,0 @@ -local - open RGB_Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "r") (Graph.NVert RGB_Data.Red) - |> Graph.add_named_vertex (V.mk "g") (Graph.NVert RGB_Data.Green) - |> Graph.add_named_vertex (V.mk "b") (Graph.NVert RGB_Data.Blue) - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,()) (V.mk "r") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,()) (V.mk "w") (V.mk "g") - val g' = GraphJSON.input (GraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (GraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/red_green_blue/theory.ML b/core/theories/red_green_blue/theory.ML deleted file mode 100644 index 66a573d9..00000000 --- a/core/theories/red_green_blue/theory.ML +++ /dev/null @@ -1,12 +0,0 @@ - - -structure RGB_Theory = GraphicalTheory( - structure Data = RGB_Data - structure DataIO = RGB_ComponentDataIO) - - -(* Use this for convenience if you don't need annotations *) -(*structure RGB_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = RGB_Theory - structure GraphComponentDataIO = RGB_ComponentDataIO -)*) diff --git a/core/theories/ruleset.ML b/core/theories/ruleset.ML deleted file mode 100644 index 6d95d216..00000000 --- a/core/theories/ruleset.ML +++ /dev/null @@ -1,248 +0,0 @@ -(* sharing types for Rulesets *) -signature RULESET_SHARING = -sig - structure Rule : OGRAPH_RULE_SHARING - type T -end - -(* *) -signature RULESET = -sig - structure Rule : OGRAPH_RULE - - type T (* a set of named rules, which can be taged, and which have - an active subset *) - - val empty : T - - (* sharing structure *) - structure Sharing : RULESET_SHARING - sharing Sharing.Rule = Rule.Sharing - sharing type Sharing.T = T - - exception no_such_rule_exp of R.name - exception rname_already_exists_exp of R.name - - (* basic getting of info about rulesets *) - val get_allrules : T -> Rule.T RTab.T - val get_all_rule_names_list : T -> R.name list - val get_tags : T -> TagName.NSet.T - val get_rules_in_tag : T -> TagName.name -> R.NSet.T - val get_active : T -> R.NSet.T - val get_tagrel : T -> RTagRel.T - val get_rule : T -> R.name -> Rule.T - val get_rule_opt : T -> R.name -> Rule.T option - - val set_brel : RTagRel.T -> T -> T - val set_allrules : Rule.T RTab.T -> T -> T - val set_active : R.NSet.T -> T -> T - - val is_activate : T -> R.name -> bool - val rule_exists : T -> R.name -> bool - val tag_exists : T -> TagName.name -> bool - - (* manipulating the ruleset *) - val merge : T -> T -> T (* rules in first replace those in second with same name *) - - val add_fresh_rule : Rule.T -> T -> R.name * T (* fresh name for rule *) - val update_rule : R.name * Rule.T -> T -> T (* replaces/inserts *) - val tag_rule : R.name -> TagName.name -> T -> T - val untag_rule : R.name -> TagName.name -> T -> T - - val activate_rule : R.name -> T -> T - val deactivate_rule : R.name -> T -> T - val delete_rule : R.name -> T -> T - val rename_rule : R.name -> R.name -> T -> T - val rename_rules : RSub.T -> T -> T - - val activate_tag : TagName.name -> T -> T - val deactivate_tag : TagName.name -> T -> T - val delete_tag : TagName.name -> T -> T - - val delete_rules_in_tag : TagName.name -> T -> T - -end (* signature *) - -signature BANG_GRAPH_RULESET = -sig - include RULESET - structure BangGraphRule : BANG_GRAPH_RULE - sharing BangGraphRule.Sharing = Rule.Sharing -end (* signature *) - - -(* a Ruleset is map from rule names to rule, - a subset of active rulenames, and a binary relation between rulenames - and tagnames. *) -functor Ruleset(Rule : OGRAPH_RULE) : RULESET = -struct - - structure Rule = Rule - structure G = Rule.Graph - - exception no_such_rule_exp of R.name - exception rname_already_exists_exp of R.name - - (* at some point, a ruleset should be a richer structure, - but this will do for now. Its just a rule list and a flag - for "active". - The rule list has a boolean to show if the rule is active *) - datatype T = - Ruleset of - { allrules : Rule.T RTab.T, - brel : RTagRel.T, - active : R.NSet.T } - - val empty = Ruleset { - allrules = RTab.empty, - brel = RTagRel.empty, - active = R.NSet.empty - } - - fun get_allrules (Ruleset rep) = #allrules rep - fun get_active (Ruleset rep) = #active rep - fun get_tagrel (Ruleset rep) = #brel rep - - fun update_allrules f (Ruleset rep) = - Ruleset {allrules = f (#allrules rep), active = #active rep, - brel = #brel rep } - fun update_active f (Ruleset rep) = - Ruleset {allrules = #allrules rep, active = f (#active rep), - brel = #brel rep } - fun update_brel f (Ruleset rep) = - Ruleset {allrules = #allrules rep, active = #active rep, - brel = f (#brel rep) } - - val set_allrules = update_allrules o K - val set_active = update_active o K - val set_brel = update_brel o K - - - structure Sharing = struct - structure Rule = Rule.Sharing - type T = T - end - - fun rule_exists rset rname = - RTab.dom_contains (get_allrules rset) rname - - fun is_activate rset rname = - R.NSet.contains (get_active rset) rname - - fun get_rule_opt rset rule_name = - RTab.get_opt (get_allrules rset) rule_name - - fun get_rule rset rule_name = - RTab.get (get_allrules rset) rule_name - - fun get_all_rule_names_list rset = R.NSet.list_of (RTab.get_dom_set (get_allrules rset)) - fun get_tags rset = RTagRel.get_cod_set (get_tagrel rset) - - fun get_rules_in_tag rset tagname = RTagRel.inv_img (get_tagrel rset) tagname - - fun tag_exists rset tagname = - TagName.NSet.contains (RTagRel.get_cod_set (get_tagrel rset)) tagname - - (* *) - fun set_rule_activation activate name rset = - if rule_exists rset name then - rset |> update_active ((if activate then R.NSet.add - else R.NSet.delete) name) - else raise no_such_rule_exp name - - (* these all raise UNDEF on failure *) - val activate_rule = set_rule_activation true - val deactivate_rule = set_rule_activation false - fun delete_rule name rset = - rset |> update_active (R.NSet.delete name) - |> update_brel (RTagRel.delete name) - |> update_allrules (RTab.delete name) - - - exception invalid_state_exp of unit - -fun rename_rules rsub rset = let - val rrnm = rsub |> RSub.extend_fixed (RTab.get_dom_set (get_allrules rset)) - val rrnmi = RSub.inverse_of rrnm -in rset |> update_allrules (fn m => RTab.compose (m, rrnmi)) - |> update_brel (fn m => RTagRel.compose (m, rrnmi)) - |> update_active (RSub.img_of_set rrnm) -end - -fun rename_rule old new = rename_rules (RSub.empty |> RSub.add (old,new)) - -(* fun rename_rule old new rset = - let - val rules = (get_allrules rset) - val rule = case RTab.lookup rules old - of NONE => raise no_such_rule_exp old - | SOME r => r - val rules2 = - case (rules |> RTab.delete old - |> RTab.try_ins (new,rule)) - of NONE => raise rname_already_exists_exp new - | SOME r => r - val brel = case RTagRel.try_rename1_dom old new (get_tagrel rset) - of NONE => raise invalid_state_exp () - | SOME b => b - val active = - case (R.NSet.try_rename1 old new (get_active rset)) - of NONE => raise invalid_state_exp () - | SOME r => r - in - rset |> set_active active - |> set_brel brel - |> set_allrules rules2 - end*) - - - fun activate_tag tagname rset = - R.NSet.fold activate_rule (get_rules_in_tag rset tagname) rset - fun deactivate_tag tagname rset = - R.NSet.fold deactivate_rule (get_rules_in_tag rset tagname) rset - fun delete_tag tagname rset = - R.NSet.fold delete_rule (get_rules_in_tag rset tagname) rset - - - fun update_rule (name, rule) rset = - rset |> update_allrules (RTab.set (name,rule)) - - - fun add_fresh_rule rule rset = - let - val (name,allrules) = RTab.store rule (get_allrules rset) - in (name, rset |> set_allrules allrules) - end - - - (* add all rules from rset1 to rset2, replaces elements in rset2 on duplicates, - unions tags contents and unions active set. *) - fun merge rset1 rset2 = - rset2 - |> update_allrules (RTab.fold RTab.set (get_allrules rset1)) - |> update_brel (RTagRel.fold RTagRel.add (get_tagrel rset1)) - |> update_active (R.NSet.union_merge (get_active rset1)) - - fun tag_rule rname tagname rset = - if rule_exists rset rname then - rset |> update_brel (RTagRel.add (rname, tagname)) - else raise no_such_rule_exp rname - - fun untag_rule rname tagname rset = - if rule_exists rset rname then - rset |> update_brel (RTagRel.unmap (rname, tagname)) - else raise no_such_rule_exp rname - - fun delete_rules_in_tag tagname rset = - rset |> R.NSet.fold delete_rule (get_rules_in_tag rset tagname) - -end (* functor Ruleset *) - -functor BangGraphRuleset(Rule : BANG_GRAPH_RULE) : BANG_GRAPH_RULESET = -struct - structure BangGraphRule = Rule - structure RS = Ruleset(Rule) - open RS -end - - diff --git a/core/theories/ruleset_annotations.ML b/core/theories/ruleset_annotations.ML deleted file mode 100644 index 958043c7..00000000 --- a/core/theories/ruleset_annotations.ML +++ /dev/null @@ -1,193 +0,0 @@ -(** - * Annotations on a ruleset - *) -signature RULESET_ANNOTATIONS = -sig - structure GraphAnnotations : GRAPH_ANNOTATIONS; - (* The annotation structure *) - type T; - type data = GraphAnnotations.data; - - (* With no annotations *) - val init: T; - - val get_ruleset_annotation : T -> data; - val get_rule_annotation : T -> R.name -> data; - val get_rule_lhs_annotation : T -> R.name -> GraphAnnotations.T; - val get_rule_rhs_annotation : T -> R.name -> GraphAnnotations.T; - - val get_rule_annotation_tab : T -> (data*(GraphAnnotations.T*GraphAnnotations.T)) RTab.T; - - val set_ruleset_annotation : data -> T -> T; - val set_rule_annotation : R.name -> data -> T -> T; - val set_rule_lhs_annotation : R.name -> GraphAnnotations.T -> T -> T; - val set_rule_rhs_annotation : R.name -> GraphAnnotations.T -> T -> T; - - val update_ruleset_annotation : (data -> data) -> T -> T; - val update_rule_annotation : R.name -> (data -> data) -> T -> T; - val update_rule_lhs_annotation : R.name -> (GraphAnnotations.T -> GraphAnnotations.T) -> T -> T; - val update_rule_rhs_annotation : R.name -> (GraphAnnotations.T -> GraphAnnotations.T) -> T -> T; - - (* first overwrites second *) - val merge : T -> T -> T; - - val remove_rule_annotation : R.name -> T -> T; - val rename_rule : R.name -> R.name -> T -> T; - - (* Arg1 is rules to retain *) - val cleanup : R.NSet.T -> T -> T; - - val pretty : T -> Pretty.T; - val print : T -> unit; - val pretty_data : data -> Pretty.T; - val print_data : data -> unit; -end; - -functor RulesetAnnotations( - structure GraphAnnotations : GRAPH_ANNOTATIONS -) : RULESET_ANNOTATIONS = -struct - structure GraphAnnotations = GraphAnnotations; - type data = GraphAnnotations.data; - structure GA = GraphAnnotations; - type T = (data * ((data * (GA.T * GA.T)) RTab.T)); - - val init = (GA.empty_data,RTab.empty); - - fun get_ruleset_annotation (rsa,_) = rsa; - fun get_rule_annotation (_,ratab) r = - case RTab.get_opt ratab r - of SOME (ra,_) => ra - | NONE => GA.empty_data; - fun get_rule_lhs_annotation (_,ratab) r = - case RTab.get_opt ratab r - of SOME (_,(ga,_)) => ga - | NONE => GA.init; - fun get_rule_rhs_annotation (_,ratab) r = - case RTab.get_opt ratab r - of SOME (_,(_,ga)) => ga - | NONE => GA.init; - - fun get_rule_annotation_tab (_,ratab) = ratab; - - val update_ruleset_annotation = apfst; - fun update_rule_annotation r f (rsa,rtab) = - ( - rsa, - RTab.map_default - (fn (rann,gas) => (f rann,gas)) - (GA.empty_data,(GA.init,GA.init)) r rtab - ) - - fun update_rule_lhs_annotation r f (rsa,rtab) = - ( - rsa, - RTab.map_default - (fn (rann,(lhs,rhs)) => (rann,(f lhs,rhs))) - (GA.empty_data,(GA.init,GA.init)) r rtab - ) - - fun update_rule_rhs_annotation r f (rsa,rtab) = - ( - rsa, - RTab.map_default - (fn (rann,(lhs,rhs)) => (rann,(lhs,f rhs))) - (GA.empty_data,(GA.init,GA.init)) r rtab - ) - - fun set_ruleset_annotation ann (_,ratab) = (ann,ratab) - fun set_rule_annotation r ann = update_rule_annotation r (K ann) - fun set_rule_lhs_annotation r ann = update_rule_lhs_annotation r (K ann) - fun set_rule_rhs_annotation r ann = update_rule_rhs_annotation r (K ann) - - fun merge (rsa,rtab1) (_,rtab2) = - (rsa, RTab.fold RTab.set rtab2 rtab1); - fun remove_rule_annotation r (rsa,rtab) = (rsa, RTab.delete r rtab); - fun rename_rule old new (rsa,rtab) = - if not (R.name_eq (old,new)) then - let - val ann = RTab.get rtab old - in (rsa, rtab |> RTab.delete old |> RTab.add (new,ann)) - end - else (rsa,rtab) - - fun cleanup rset (ras,rtab) = - (ras, RTab.fold - (fn (k,_) => if R.NSet.contains rset k then I else RTab.delete k) - rtab rtab); - - val pretty_data = GraphAnnotations.pretty_data; - val print_data = GraphAnnotations.print_data; - fun pretty_rule (ran,(lhs,rhs)) = - Pretty.chunks [ - Pretty.block [ - Pretty.str "Rule Annotation: ", - pretty_data ran - ], - Pretty.block [ - Pretty.str "LHS Annotations: ", - GraphAnnotations.pretty lhs - ], - Pretty.block [ - Pretty.str "RHS Annotations: ", - GraphAnnotations.pretty rhs - ] - ]; - fun pretty ann = - Pretty.chunks - [Pretty.str "Ruleset Annotations {", - Pretty.block - [Pretty.str " ", - Pretty.chunks - [Pretty.block - [Pretty.str "Ruleset Annotation: ", - pretty_data (get_ruleset_annotation ann)], - Pretty.block - [Pretty.str "Rule Annotations: ", - RTab.pretty pretty_rule (get_rule_annotation_tab ann)]]], - Pretty.str "}"]; - val print = Pretty.writeln o pretty; -end; - -(** - * Table of annotations on a ruleset - * - * Each component has an associated table of strings. - *) -signature RULESET_STRING_TABLE_ANNOTATIONS = -sig - include RULESET_ANNOTATIONS where type data = string Symtab.table; - - val get_ruleset_property_opt : T -> string -> string option; - val get_rule_property_opt : T -> R.name -> string -> string option; - - val set_ruleset_property : (string * string) -> T -> T; - val set_rule_property : R.name -> (string * string) -> T -> T; - - val remove_ruleset_property : string -> T -> T; - val remove_rule_property : R.name -> string -> T -> T; -end; - -signature RULESET_JSON_OBJECT_ANNOTATIONS = RULESET_ANNOTATIONS where type data = Json.jobj - -structure RulesetStringTableAnnotations : RULESET_STRING_TABLE_ANNOTATIONS = -struct - structure Ann = RulesetAnnotations( - structure GraphAnnotations = GraphStringTableAnnotations - ); - open Ann; - - val get_ruleset_property_opt = Symtab.lookup o Ann.get_ruleset_annotation; - val get_rule_property_opt = Symtab.lookup oo Ann.get_rule_annotation; - - val set_ruleset_property = Ann.update_ruleset_annotation o Symtab.update; - fun set_rule_property r = (Ann.update_rule_annotation r) o Symtab.update; - - val remove_ruleset_property = Ann.update_ruleset_annotation o Symtab.delete; - fun remove_rule_property r = (Ann.update_rule_annotation r) o Symtab.delete; -end; - -structure RulesetJsonObjectAnnotations : RULESET_JSON_OBJECT_ANNOTATIONS = RulesetAnnotations( - structure GraphAnnotations = GraphJsonObjectAnnotations -) - diff --git a/core/theories/string_data.ML b/core/theories/string_data.ML deleted file mode 100644 index 80df4ff7..00000000 --- a/core/theories/string_data.ML +++ /dev/null @@ -1,65 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* A structure that defines vertices with strings as data and - exact string matching. *) -structure StringData -: UNIFIABLE_DATA -= struct - - type data = string; - - (* ordering is used for building tables, sorting, etc *) - val data_ord = String.compare; - (* equality check (should agree with order): - data_eq(x,y) <=> data_ord (x,y) = EQUAL *) - fun data_eq (xy as (x, y)) = - (case data_ord xy of EQUAL => true - | _ => false); - - (* pretty printing code *) - val pretty_data = Pretty.str; - val print_data = Pretty.writeln o pretty_data; - - (* no variables in our vertex data, so we have dummy unit subst *) - type subst = unit; - val empty_subst = (); - fun compose_subst (u1,u2) = (); - - (* matching is string prefix "abc" matches "abcd" and "abc.d" but not - "ab.cd" or "aabc" *) - fun match_data (ss as (s1, s2)) subst = - if data_eq (s1, s2) then SOME subst else NONE; - (* unification: if x matches y, and y matches x (with the same subst) *) - (* for string prefix, x and y must be the same string *) - fun unify_data xy subst = if data_eq xy then SOME subst else NONE; - (* there is not data in a vertex, subst is just the id on the data *) - fun subst_in_data subst x = x; - - (* pretty printing for subst *) - fun pretty_subst () = Pretty.str "string subst: ()"; - val print_subst = Pretty.writeln o pretty_subst; - - val default_data = ""; - - structure Sharing = struct type data = data; type subst = subst; end; -end; - - -(* *) -structure StringData_ComponentData_Param -: ELEMENT_COMPONENT_DATA_PARAM where type data = StringData.data -= struct - type data = StringData.data; - exception unknown_type_exp of string; - exception unexpected_data_exp; - fun type_of s = "String"; - fun cdata_of s = ComponentData.String s; - fun default_data "String" = "undefined" - | default_data n = raise unknown_type_exp n; - fun update f x = x; -end; - -structure StringData_ComponentData = ElementComponentData( - StringData_ComponentData_Param) - - - diff --git a/core/theories/string_ve/data.ML b/core/theories/string_ve/data.ML deleted file mode 100644 index a8ebf582..00000000 --- a/core/theories/string_ve/data.ML +++ /dev/null @@ -1,24 +0,0 @@ -structure StringVE_Data = -struct - type psubst = unit - type subst = unit - type nvdata = string - type edata = string - val pretty_theory_name = Pretty.str "string_ve"; - fun init_psubst_from_data _ _ = () - fun nvdata_eq (s1,s2) = (s1=s2) - fun edata_eq (s1,s2) = (s1=s2) - fun match_nvdata (s1,s2) _ = if nvdata_eq (s1,s2) then SOME () else NONE - fun match_edata (s1,s2) _ = if edata_eq (s1,s2) then SOME () else NONE - val default_nvdata = "" - val default_edata = "" - val solve_psubst = Seq.single - fun subst_in_nvdata _ s = ((),s) - fun subst_in_edata _ s = ((),s) - - fun default_nvdata_of_typestring _ = "" - fun default_edata_of_typestring _ = "" - - val pretty_nvdata = Pretty.str - val pretty_edata = Pretty.str -end diff --git a/core/theories/string_ve/graph.ML b/core/theories/string_ve/graph.ML deleted file mode 100644 index 053663f2..00000000 --- a/core/theories/string_ve/graph.ML +++ /dev/null @@ -1,63 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* String Vertex/Edge Theory: both verts and edges have string data *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -structure StringVE_GraphParam -: GRAPH_PARAM -= struct - val pretty_theory_name = Pretty.str "ghz_w"; - - structure VData = StringData - - (* edges have unit data, and are ignored as we use Vertex subst type *) - structure EData = StringData - - exception match_exp of unit - - (* substitutions for variables in vertex/edge data - (usually, we only have data on vertices) *) - type subst = unit; - - val empty_subst = (); - fun compose_subst u1 u2 = (); - fun subst_in_vertex () = I; - fun subst_in_edge () = I; - fun try_match_edge d1 d2 u = EData.match_data (d1,d2) u; - fun match_edge e1 e2 u = - (case try_match_edge e1 e2 u - of NONE => raise match_exp () - | SOME u' => u'); - fun try_match_vertex d1 d2 u = VData.match_data (d1,d2) u; - fun match_vertex v1 v2 u = - (case try_match_vertex v1 v2 u - of NONE => raise match_exp () - | SOME u' => u'); - - (* pretty printing *) - fun pretty_subst () = Pretty.str "unit" - val print_subst = Pretty.writeln o pretty_subst; - - structure NoInternalData = NoInternalDataGraphParam( - type vdata = VData.data type edata = EData.data); - open NoInternalData; - - structure Sharing = - struct - structure VData = VData.Sharing; - structure EData = EData.Sharing; - type T = T; - type subst = subst; - end; -end; - -structure StringVE_OVertex = OVertexData(StringData); -structure StringVE_OGraphParam = OGraphParam( - structure Param = StringVE_GraphParam - and OVData = StringVE_OVertex); - -structure StringVE_OVertexComponentData_Param = OVertexComponentData_Param( - structure VertexDataParam = StringData_ComponentData_Param - structure OVData = StringVE_OVertex); - -structure StringVE_OVertexComponentData = ElementComponentData( - StringVE_OVertexComponentData_Param); - diff --git a/core/theories/string_ve/io.ML b/core/theories/string_ve/io.ML deleted file mode 100644 index 3eb7082f..00000000 --- a/core/theories/string_ve/io.ML +++ /dev/null @@ -1,44 +0,0 @@ -structure StringVE_ComponentDataIO: GRAPH_COMPONENT_DATA_IO = -struct - type nvdata = StringVE_Data.nvdata - type edata = StringVE_Data.edata - - structure IVDataInputJSON = - struct - open JsonInputUtils - type data = StringVE_Data.nvdata - fun input (Json.Object obj) = get_string_easy "" obj "value" - | input (Json.String s) = s - | input _ = raise bad_input_exp ("Expected object","type") - end - - structure IVDataOutputJSON = - struct - open JsonOutputUtils - type data = StringVE_Data.nvdata - fun output s = Json.mk_record [("type", "string"), ("label",s), ("value", s)] - end - - structure EDataInputJSON = - struct - open JsonInputUtils - type data = StringVE_Data.edata - fun input (Json.Object obj) = get_string_easy "" obj "value" - | input (Json.String s) = s - | input _ = raise bad_input_exp ("Expected object","type") - end - - structure EDataOutputJSON = - struct - open JsonOutputUtils - type data = StringVE_Data.edata - fun output s = Json.mk_record [("type", "string"), ("label",s), ("value", s)] - end - - structure DotStyle = - struct - type nvdata = nvdata - fun style_for_ivertex_data _ = - "[style=filled,fillcolor=white,fontcolor=black,shape=circle]" - end -end diff --git a/core/theories/string_ve/test/graphs/current_format.graph b/core/theories/string_ve/test/graphs/current_format.graph deleted file mode 100644 index 7e9048e5..00000000 --- a/core/theories/string_ve/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"value":"foo","label":"foo","type":"string"}},"b":{"data":{"value":"bar","label":"bar","type":"string"}}},"dir_edges":{"e1":{"src":"a","tgt":"w","data":{"value":"x","label":"x","type":"string"}},"e2":{"src":"w","tgt":"b","data":{"value":"x","label":"x","type":"string"}}}} \ No newline at end of file diff --git a/core/theories/string_ve/test/graphs/v1_format.graph b/core/theories/string_ve/test/graphs/v1_format.graph deleted file mode 100644 index 4561f3b4..00000000 --- a/core/theories/string_ve/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"value":"foo","type":"string"}},"b":{"data":{"value":"bar","type":"string"}}},"dir_edges":{"e1":{"src":"a","tgt":"w","data":{"value":"x","type":"string"}},"e2":{"src":"w","tgt":"b","data":{"value":"x","type":"string"}}}} \ No newline at end of file diff --git a/core/theories/string_ve/test/graphs/v2_format.graph b/core/theories/string_ve/test/graphs/v2_format.graph deleted file mode 100644 index 7e9048e5..00000000 --- a/core/theories/string_ve/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"value":"foo","label":"foo","type":"string"}},"b":{"data":{"value":"bar","label":"bar","type":"string"}}},"dir_edges":{"e1":{"src":"a","tgt":"w","data":{"value":"x","label":"x","type":"string"}},"e2":{"src":"w","tgt":"b","data":{"value":"x","label":"x","type":"string"}}}} \ No newline at end of file diff --git a/core/theories/string_ve/test/test.ML b/core/theories/string_ve/test/test.ML deleted file mode 100644 index 9ce3e3ce..00000000 --- a/core/theories/string_ve/test/test.ML +++ /dev/null @@ -1,35 +0,0 @@ -local - (*structure IO = StringVE_IO*) - open StringVE_Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "a") (Graph.NVert "foo") - |> Graph.add_named_vertex (V.mk "b") (Graph.NVert "bar") - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,"x") (V.mk "a") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,"x") (V.mk "w") (V.mk "b") - val g' = GraphJSON.input (GraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> GraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (GraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/string_ve/theory.ML b/core/theories/string_ve/theory.ML deleted file mode 100644 index bd55ebfe..00000000 --- a/core/theories/string_ve/theory.ML +++ /dev/null @@ -1,13 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* String Vertex/Edge Theory: both verts and edges have string data *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -structure StringVE_Theory = GraphicalTheory( - structure Data = StringVE_Data - structure DataIO = StringVE_ComponentDataIO -) - -(* OLD I/O structure *) -(*structure StringVE_IO = GraphicalTheoryIO( - structure Theory = StringVE_Theory - structure GraphComponentDataIO = StringVE_ComponentDataIO -)*) diff --git a/core/theories/substr_linrat/data.ML b/core/theories/substr_linrat/data.ML deleted file mode 100644 index 5086dd39..00000000 --- a/core/theories/substr_linrat/data.ML +++ /dev/null @@ -1,40 +0,0 @@ -structure Substr_Linrat_Data = -struct - val pretty_theory_name = Pretty.str "substr_linrat" - type psubst = LinratAngleMatcher.psubst - type subst = LinratAngleMatcher.subst - - type nvdata = string * LinratAngleExpr.T - val default_nvdata = ("",LinratAngleExpr.zero) - - fun default_nvdata_of_typestring "Default" = default_nvdata - | default_nvdata_of_typestring s = raise unknown_typestring_exp s - - fun nvdata_eq ((s1,a1),(s2,a2)) = s1 = s2 andalso LinratAngleExpr.eq a1 a2 - - fun pretty_nvdata (s,a) = - Pretty.block [Pretty.str s, Pretty.str "(", LinratAngleExpr.pretty a, Pretty.str ")"] - - fun match_nvdata ((s1,a1),(s2,a2)) m = - if String.isPrefix s1 s2 - then LinratAngleMatcher.match (a1,a2) m - else NONE - - fun subst_in_nvdata u (s,a) = - let val (sub',a') = LinratAngleMatcher.subst_in_expr u a - in (sub', (s,a')) end - - open EmptyEdgeData - - local - fun pull_names (nvtab,_) = X.NSet.empty - |> VTab.fold ( - fn (_,(_,a)) => X.NSet.union_merge (LinratAngleExpr.free_vars a) - ) nvtab - in - fun init_psubst_from_data p_data t_data = - LinratAngleMatcher.init_psubst_from_names (pull_names p_data, pull_names t_data) - end - - fun solve_psubst ps = Seq.single (LinratAngleMatcher.solve_psubst ps) -end diff --git a/core/theories/substr_linrat/io.ML b/core/theories/substr_linrat/io.ML deleted file mode 100644 index 85c08f3d..00000000 --- a/core/theories/substr_linrat/io.ML +++ /dev/null @@ -1,59 +0,0 @@ -structure Substr_Linrat_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = Substr_Linrat_Data.nvdata - type edata = Substr_Linrat_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = nvdata - structure L = InputLinratJSON - val get_angle = L.input oo (get_easy Json.Null) - fun decode_data obj = - let - val str = get_string_easy "" obj "string" - val angle = get_angle obj "angle" - in - (str,angle) - end - fun input (Json.Object obj) = - (case Json.lookup obj "value" - of SOME (Json.Object obj') => - (decode_data obj' - handle bad_input_exp (m,l) => - raise bad_input_exp (m,prepend_prop "value" l)) - | SOME _ => raise bad_input_exp ("Expected object","value") - | NONE => decode_data obj) - | input _ = raise bad_input_exp ("Expected string","") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - structure L = OutputLinratJSON - type data = nvdata - fun output (str,angle) = let - val value = - Json.Object ( - Json.empty_obj |> Json.update ("string",Json.String str) - |> Json.update ("angle",L.output angle) - ) - in - Json.Object ( - Json.empty_obj |> Json.update ("type",Json.String "string_linrat") - |> Json.update ("label",Json.String str) - |> Json.update ("value",value) - ) - end - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data _ = - "[style=filled,fillcolor=green,fontcolor=black,shape=circle]" - end -end - - diff --git a/core/theories/substr_linrat/test/graphs/current_format.graph b/core/theories/substr_linrat/test/graphs/current_format.graph deleted file mode 100644 index 7c4d8671..00000000 --- a/core/theories/substr_linrat/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"type":"string_linrat","label":"foo","value":{"string":"foo","angle":{"pretty":"0"}}}},"b":{"data":{"type":"string_linrat","label":"bar","value":{"string":"bar","angle":{"vars":{"a":{"denom":1,"num":1},"b":{"denom":1,"num":1}},"pretty":"a + b"}}}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/substr_linrat/test/graphs/empty_graph.graph b/core/theories/substr_linrat/test/graphs/empty_graph.graph deleted file mode 100644 index 9e26dfee..00000000 --- a/core/theories/substr_linrat/test/graphs/empty_graph.graph +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/core/theories/substr_linrat/test/graphs/g3.graph b/core/theories/substr_linrat/test/graphs/g3.graph deleted file mode 100644 index c529a660..00000000 --- a/core/theories/substr_linrat/test/graphs/g3.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"Va":{}},"node_vertices":{"Vb":{"data":{"string":"blah.foo.","angle":{"pretty":"0"}}},"Vc":{"data":{"string":"blah.bar.","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"Vc","tgt":"Vc"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vb","tgt":"Vc"}}} \ No newline at end of file diff --git a/core/theories/substr_linrat/test/graphs/v1_format.graph b/core/theories/substr_linrat/test/graphs/v1_format.graph deleted file mode 100644 index 18f519f4..00000000 --- a/core/theories/substr_linrat/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"string":"foo","angle":{"pretty":"0"}}},"b":{"data":{"string":"bar","angle":{"vars":{"a":{"denom":1,"num":1},"b":{"denom":1,"num":1}},"pretty":"a + b"}}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/substr_linrat/test/graphs/v2_format.graph b/core/theories/substr_linrat/test/graphs/v2_format.graph deleted file mode 100644 index 7c4d8671..00000000 --- a/core/theories/substr_linrat/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"type":"string_linrat","label":"foo","value":{"string":"foo","angle":{"pretty":"0"}}}},"b":{"data":{"type":"string_linrat","label":"bar","value":{"string":"bar","angle":{"vars":{"a":{"denom":1,"num":1},"b":{"denom":1,"num":1}},"pretty":"a + b"}}}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/substr_linrat/test/rules/r1.graph b/core/theories/substr_linrat/test/rules/r1.graph deleted file mode 100644 index 78a1ff61..00000000 --- a/core/theories/substr_linrat/test/rules/r1.graph +++ /dev/null @@ -1 +0,0 @@ -{"lhs":{"wire_vertices":{"Va":{}},"node_vertices":{"Vb":{"data":{"string":"blah.foo.","angle":{"pretty":"0"}}},"Vc":{"data":{"string":"blah.bar.","angle":{"pretty":"0"}}}},"undir_edges":{"Ec":{"src":"Vc","tgt":"Vc"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vb","tgt":"Vc"}}},"rhs":{"wire_vertices":{"Va":{}},"node_vertices":{"Vd":{"data":{"string":"blah.foo.","angle":{"pretty":"0"}}},"Ve":{"data":{"string":"blah.bar.","angle":{"pretty":"0"}}}},"dir_edges":{"Ed":{"src":"Va","tgt":"Vd"},"Ee":{"src":"Vd","tgt":"Ve"}}}} \ No newline at end of file diff --git a/core/theories/substr_linrat/test/test.ML b/core/theories/substr_linrat/test/test.ML deleted file mode 100644 index 9c3f701b..00000000 --- a/core/theories/substr_linrat/test/test.ML +++ /dev/null @@ -1,37 +0,0 @@ -local - structure IO = Substr_Linrat_GraphicalTheoryIO - open IO.Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "a") - (Graph.NVert ("foo",LinratAngleExpr.zero)) - |> Graph.add_named_vertex (V.mk "b") - (Graph.NVert ("bar",LinratAngleExpr.parse "a + b")) - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,()) (V.mk "a") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,()) (V.mk "w") (V.mk "b") - val g' = IO.InputGraphJSON.input (IO.OutputGraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> IO.InputGraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> IO.InputGraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (IO.OutputGraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/substr_linrat/theory.ML b/core/theories/substr_linrat/theory.ML deleted file mode 100644 index f759d445..00000000 --- a/core/theories/substr_linrat/theory.ML +++ /dev/null @@ -1,66 +0,0 @@ - - -structure Substr_Linrat_Theory = GraphicalTheory( - structure Data = Substr_Linrat_Data - structure DataIO = Substr_Linrat_ComponentDataIO) - - -(* Use this for convenience if you don't need annotations *) -structure Substr_Linrat_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = Substr_Linrat_Theory - structure GraphComponentDataIO = Substr_Linrat_ComponentDataIO -) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* terms used within the RG theory. *) -(*structure RGHilbTerm = TensorTermFun( - -structure Graph = RG_Theory.Graph -fun pretty_vertex fmt graph vert = let - val (v, (ie,oe)) = Graph.get_vertex graph vert - val (ic,oc) = (E.NSet.cardinality ie, - E.NSet.cardinality oe) - val nums = [Pretty.str (Int.toString ic), - Pretty.str ", ", - Pretty.str (Int.toString oc)] -in Pretty.block - (case v of Graph.OVData.NVert iv => - (case iv - of RG_InternVData.Xnd e => - [Pretty.str "xsp[", LinratAngleExpr.pretty_math fmt e, Pretty.str ", "] @ - nums @ [Pretty.str "]"] - | RG_InternVData.Znd e => - [Pretty.str "zsp[", LinratAngleExpr.pretty_math fmt e, Pretty.str ", "] @ - nums @ [Pretty.str "]"] - | RG_InternVData.Hnd => [Pretty.str "h"]) - | _ => [Pretty.str "id2[1]"]) -end - -) -*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* terms used within the SUBSTR_LINRAT theory. *) -(* -structure SUBSTR_LINRAT_HilbTerm = TensorTermFun( - -structure Graph = SUBSTR_LINRAT_Theory.Graph - -fun pretty_vertex fmt graph vert = let - val (v, (ie,oe)) = Graph.get_vertex graph vert - val (ic,oc) = (E.NSet.cardinality ie, - E.NSet.cardinality oe) - val nums = [Pretty.str (Int.toString ic), - Pretty.str ", ", - Pretty.str (Int.toString oc)] -in Pretty.block - (case v of Graph.OVData.NVert (SUBSTR_LINRAT_InternVData.S_E (s, e)) => - [Pretty.str s, Pretty.str "[", LinratAngleExpr.pretty_math fmt e, Pretty.str ","] @ - nums @ [Pretty.str "]"] - | _ => [Pretty.str "id2[1]"]) -end - - -) -*) - diff --git a/core/theories/substrings/data.ML b/core/theories/substrings/data.ML deleted file mode 100644 index c7948ef6..00000000 --- a/core/theories/substrings/data.ML +++ /dev/null @@ -1,27 +0,0 @@ -(* strings on vertices; prefix-matching *) -structure Substrings_Data = -struct - val pretty_theory_name = Pretty.str "substrings" - type psubst = unit - type subst = psubst - - type nvdata = string - val default_nvdata = "undefined" - - fun default_nvdata_of_typestring "String" = default_nvdata - | default_nvdata_of_typestring s = raise unknown_typestring_exp s - - fun nvdata_eq (s1,s2) = s1 = s2 - - val pretty_nvdata = Pretty.str - - fun match_nvdata (s1,s2) () = - if String.isPrefix s1 s2 then SOME () else NONE - - fun subst_in_nvdata sub d = (sub, d) - - open EmptyEdgeData - - fun init_psubst_from_data _ _ = () - val solve_psubst = Seq.single -end diff --git a/core/theories/substrings/io.ML b/core/theories/substrings/io.ML deleted file mode 100644 index acd169c5..00000000 --- a/core/theories/substrings/io.ML +++ /dev/null @@ -1,32 +0,0 @@ -structure Substrings_ComponentDataIO : GRAPH_COMPONENT_DATA_IO -= struct - type nvdata = Substrings_Data.nvdata - type edata = Substrings_Data.edata - - structure IVDataInputJSON : INPUT_JSON = - struct - open JsonInputUtils - type data = nvdata - fun input (Json.Object obj) = get_string_easy "" obj "value" - | input (Json.String str) = str - | input _ = raise bad_input_exp ("Expected string","type") - end - structure IVDataOutputJSON : OUTPUT_JSON = - struct - open JsonOutputUtils - type data = nvdata - fun output s = Json.mk_record [("type", "string"), ("label",s), ("value", s)] - end - structure EDataInputJSON = InputUnitJSON - structure EDataOutputJSON = OutputUnitJSON - - structure DotStyle : DOT_STYLE = - struct - type nvdata = nvdata - fun style_for_ivertex_data _ = - "[style=filled,fillcolor=white,fontcolor=black,shape=circle]" - end -end - - - diff --git a/core/theories/substrings/test/graphs/current_format.graph b/core/theories/substrings/test/graphs/current_format.graph deleted file mode 100644 index 643e26dd..00000000 --- a/core/theories/substrings/test/graphs/current_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"value":"foo","label":"foo","type":"string"}},"b":{"data":{"value":"bar","label":"bar","type":"string"}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/substrings/test/graphs/empty_graph.graph b/core/theories/substrings/test/graphs/empty_graph.graph deleted file mode 100644 index 9e26dfee..00000000 --- a/core/theories/substrings/test/graphs/empty_graph.graph +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/core/theories/substrings/test/graphs/g3.graph b/core/theories/substrings/test/graphs/g3.graph deleted file mode 100644 index eba23c93..00000000 --- a/core/theories/substrings/test/graphs/g3.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"Va":{}},"node_vertices":{"Vb":{"data":{"value":"blah.foo.","label":"blah.foo.","type":"string"}},"Vc":{"data":{"value":"blah.bar.","label":"blah.bar.","type":"string"}}},"undir_edges":{"Ec":{"src":"Vc","tgt":"Vc"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vb","tgt":"Vc"}}} \ No newline at end of file diff --git a/core/theories/substrings/test/graphs/v1_format.graph b/core/theories/substrings/test/graphs/v1_format.graph deleted file mode 100644 index cbc33cbe..00000000 --- a/core/theories/substrings/test/graphs/v1_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":"foo"},"b":{"data":"bar"}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/substrings/test/graphs/v2_format.graph b/core/theories/substrings/test/graphs/v2_format.graph deleted file mode 100644 index 643e26dd..00000000 --- a/core/theories/substrings/test/graphs/v2_format.graph +++ /dev/null @@ -1 +0,0 @@ -{"wire_vertices":{"w":{}},"node_vertices":{"a":{"data":{"value":"foo","label":"foo","type":"string"}},"b":{"data":{"value":"bar","label":"bar","type":"string"}}},"dir_edges":{"e1":{"src":"a","tgt":"w"},"e2":{"src":"w","tgt":"b"}}} \ No newline at end of file diff --git a/core/theories/substrings/test/rules/r1.graph b/core/theories/substrings/test/rules/r1.graph deleted file mode 100644 index 1d40e9e0..00000000 --- a/core/theories/substrings/test/rules/r1.graph +++ /dev/null @@ -1 +0,0 @@ -{"lhs":{"wire_vertices":{"Va":{}},"node_vertices":{"Vb":{"data":{"value":"blah.foo.","label":"blah.foo.","type":"string"}},"Vc":{"data":{"value":"blah.bar.","label":"blah.bar.","type":"string"}}},"undir_edges":{"Ec":{"src":"Vc","tgt":"Vc"}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vb","tgt":"Vc"}}},"rhs":{"wire_vertices":{"Va":{}},"node_vertices":{"Vb":{"data":{"value":"blah.foo.","label":"blah.foo.","type":"string"}},"Vc":{"data":{"value":"blah.bar.","label":"blah.bar.","type":"string"}}},"dir_edges":{"Ea":{"src":"Va","tgt":"Vb"},"Eb":{"src":"Vb","tgt":"Vc"}}}} \ No newline at end of file diff --git a/core/theories/substrings/test/test.ML b/core/theories/substrings/test/test.ML deleted file mode 100644 index 3e6e9ed3..00000000 --- a/core/theories/substrings/test/test.ML +++ /dev/null @@ -1,35 +0,0 @@ -local - structure IO = Substrings_GraphicalTheoryIO - open IO.Theory - fun assert_g_eq msg exp actual = - if Graph.exact_eq exp actual then () - else (writeln "Expected:"; - Graph.print exp; - writeln "Actual:"; - Graph.print actual; - raise ERROR (msg^": graphs differed")) -in - val _ = Testing.test (theory_name^" theory save/restore") (fn () => let - val g = Graph.empty - |> Graph.add_named_vertex (V.mk "a") (Graph.NVert "foo") - |> Graph.add_named_vertex (V.mk "b") (Graph.NVert "bar") - |> Graph.add_named_vertex (V.mk "w") Graph.WVert - |> Graph.add_named_edge (E.mk "e1") (Directed,()) (V.mk "a") (V.mk "w") - |> Graph.add_named_edge (E.mk "e2") (Directed,()) (V.mk "w") (V.mk "b") - val g' = IO.InputGraphJSON.input (IO.OutputGraphJSON.output g) - val _ = assert_g_eq "input (output g) = g" g g' - val _ = "graphs/v1_format.graph" - |> File_Io.read_json - |> IO.InputGraphJSON.input - |> assert_g_eq "input v1_format.graph = g" g - val _ = "graphs/v2_format.graph" - |> File_Io.read_json - |> IO.InputGraphJSON.input - |> assert_g_eq "input v2_format.graph = g" g - (* if the file format changes, do - * cp graphs/current_format.graph graphs/v_format.graph - * and add a new read test (see above) *) - val _ = File_Io.write_json "graphs/current_format.graph" (IO.OutputGraphJSON.output g) - in () end) () - val _ = Testing.assert_no_failed_tests() -end diff --git a/core/theories/substrings/theory.ML b/core/theories/substrings/theory.ML deleted file mode 100644 index 7edda6e1..00000000 --- a/core/theories/substrings/theory.ML +++ /dev/null @@ -1,12 +0,0 @@ - - -structure Substrings_Theory = GraphicalTheory( - structure Data = Substrings_Data - structure DataIO = Substrings_ComponentDataIO) - - -(* Use this for convenience if you don't need annotations *) -structure Substrings_GraphicalTheoryIO = GraphicalTheoryIO( - structure Theory = Substrings_Theory - structure GraphComponentDataIO = Substrings_ComponentDataIO -) diff --git a/core/theories/test/ruleset-test.ML b/core/theories/test/ruleset-test.ML deleted file mode 100644 index 51425d00..00000000 --- a/core/theories/test/ruleset-test.ML +++ /dev/null @@ -1,3 +0,0 @@ -(* test constrcution of rule and ruleset *) -structure Test_BG_Ruleset = BangGraphRuleset(Test_BG_Rule); - diff --git a/core/theories/unused/theory.ML b/core/theories/unused/theory.ML deleted file mode 100644 index c9603fd5..00000000 --- a/core/theories/unused/theory.ML +++ /dev/null @@ -1,202 +0,0 @@ -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=--=-=-=-==-=-=-=-=-=-=-=-=-=- *) -(* Theory with Fixed Logical Kernel style INFINNISHED *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -signature THEORY -= sig - -structure Rule : RULE -structure Rn : NAME where type name = string - -type T - -val empty : T - -(* adds an assumption/axiom *) -val add_assm : Rn.name -> Rule.T -> T -> T - -exception no_such_result_exp of Rn.name * T; - -val get_assms : T -> Rule.T Rn.NTab.T -val get_assm_list : T -> (Rn.name * Rule.T) list -val get_assm : T -> Rn.name -> Rule.T - -val trivial : Rn.name -> Rule.RGGraph.T -> T -> (Rn.name * T) -val symmetric : Rn.name -> T -> (Rn.name * T) -val subst : Rn.name -> Rn.name -> T -> (Rn.name * T) Seq.seq - -end; - - -structure DB_Theory -= struct - -(* rules *) -structure Rule : RULE = Rule - -(* names of rules *) -structure Rn = StrName; - -(* dependencies: Dom = result name used in Cod result *) -structure Dep = BinRelFun(structure Dom = Rn and Cod = Rn); - -(* proofs for results/derived rules *) -datatype proof = - Proof of (Rn.name (* applied the rule named this *) - * Rn.name option (* resulting in this subgoal *) - * bool (* true = applied left to right, false = right to left *) - * Match.T (* as instantiated here *) - ) - | Refl; - -(* a derived rusult *) -datatype result = Result of Rule.T (* final resulting rule/conjecture *) - * proof option; (* proof *) - -(* a theory *) -datatype T = Theory of { assms: Rn.NSet.T, (* names of axioms of the theory *) - topns: Rn.NSet.T, (* names of interesting results *) - rs : result Rn.NTab.T, (* all results in the theory *) - deps : Dep.T (* cached dependencies: - dom = result name; cod = names of things that use it *) - }; - -(* basic proof functions *) - -(* basic result functions *) - - -(* basic theory functions *) -fun get_assms (Theory rep) = #assms rep; -fun update_assms (Theory rep) = Theory {assms = f (#assms rep)}; -val set_assms = update_assms o K; - -fun get_topns (Theory rep) = #topns rep; -fun update_topns (Theory rep) = Theory {topns = f (#topns rep)}; -val set_topns = update_topns o K; - -fun get_rs (Theory rep) = #rs rep; -fun update_rs (Theory rep) = Theory {rs = f (#rs rep)}; -val set_rs = update_rs o K; - -fun get_deps (Theory rep) = #deps rep; -fun update_deps (Theory rep) = Theory {deps = f (#deps rep)}; -val set_deps = update_deps o K; - -(* implicit: thry n *) -val get_r_deps = Dep.get o get_deps; -(* implicit: n nset thry *) -val add_r_deps = update_deps oo Dep.add_to_dom; -(* implicit: n n2 thry, where n2 is uses n1 *) -val add_r_dep = update_deps oo Dep.add1_to_dom; - -(* implicit: n f *) -val update_result = update_rs oo Rn.NTab.map_entry; -val set_result = update_result o K; - -(* getting all assumptions *) -val get_assm = RnTab.get o get_assms; -val get_assms_list = RnTab.list_of o get_assms; - -(* new empty theory *) -val empty = Theory { assms = Rn.NSet.empty, - topns = Rn.NSet.empty, - rs = Rn.NTab.empty, - deps = Rn.NTab.empty - }; - -(* does not note the names result *) -fun add_named_r nr thry = - let val (n2,rs2) = Rn.NTab.add nr (get_rs thry) - in (n2, set_rs rs2 thry) end; - -(* assume a new result *) -(* implicit theory *) -fun add_assm n r = - update_assms (Rn.NSet.ins_fresh n) - o update_rs (Rn.NTab.ins (n,r)); - -(* rename a result within a theory *) -fun rename_in_proof (from_n,to_n) (Proof (used_n,optto_n,lr,m)) = - Proof (if Rn.name_eq (from_n,used_n) - then to_n else used_n, - case optto_n - of NONE => NONE - | SOME n => if Rn.name_eq (n,optto_n) - then to_n else optto_n, - lr, m) - | rename_in_proof _ Refl = Refl; - -fun rename_in_result rn (Result (r,p)) = Result (r, rename_in_proof rn p); - -fun rename (rn as (n1, n2)) thry = - thry |> (NSet.fold - (update_result (rename_in_result rn)) - (get_r_deps n1 thry)) - |> update_deps (Dep.rename_dom n1 n2 o Dep.rename_cod n1 n2) - |> update_topns (Rn.NSet.rename n1 n2) - |> update_assms (Rn.NSet.rename n1 n2); - -(* for tagging top-level results you might want to refer to later *) -(* implicit: n thry *) -val note_result = update_topns o Rn.NSet.add; -val unnote_result = update_topns o Rn.NSet.del; - -(* Make a new trivial result from a graph. *) -fun trivial n g thry = add_named_r (n,(Result(Rule.mk (g,g), SOME Refl))) thry; -(* conjecture a new result, is unproved *) -fun conjecture n r thry = add_named_r (r,Result(r,NONE)) thry; - -(* implicit n thry *) -(* ? maybe we sould raise an exception if result is currently closed *) -val openify_result = update_result (fn Result (r,p) => Result (r,NONE)); - -(* delete result and all applications of result *) -fun delete n thry = - thry |> update_assms (NSet.del n) - |> update_topns (NSet.del n) - |> update_rs (NTab.delete n) - (* deps of n (depn) uses n, if n is subgoal of depn, - depn become open subgoal again. if n is used as a rule to rewrite - depn, then also depn becomes open. So, just openify depn. *) - |> NSet.fold openify_result (get_r_deps thry n) - |> update_deps (Dep.del_dom n); - - -(* converts between "A = B" and "B = A" *) -fun symmetric n thry = - let val Result (r,p) = get_result thry n - val rhs = Rule.get_rhs r - val m = Match.mk_id (get_boundary rhs) (* FIXME: this is not leaf names *) - (RGGraph.get_vnames rhs) - (RGGraph.get_enames rhs); - in case p of - NONE => let val (n2,thry2) = conjecture n (Rule.symmetric r) thry; - val res = Result (r, SOME (n2,NONE,true,m)) - in (n2, thry2 |> add_r_dep n n2 - |> set_result n res) - end - | SOME prf => - let val (n2,thry2) = - add_named_r n (Result (Rule.symmetric r, SOME (n,NONE,true,m))) thry - in thry |> add_r_dep n2 n end - end - - -exception no_such_result_exp of Rn.name * T; - -(* Subst using first name, from left to right, in rhs of second name *) -(* val subst : Rn.name -> Rn.name -> T -> (Rn.name * T) Seq.seq *) -(* -fun subst rulen n thry = - let - val Result (rule,_) = get_result thry rulen - val Result (r,prf) = get_result thry n - val rhs = Rule.get_rhs - in - case Rule.rule_matches_within - - end; -*) - -end; - diff --git a/core/theories/unused/theory_dir.ML b/core/theories/unused/theory_dir.ML deleted file mode 100644 index 9832a534..00000000 --- a/core/theories/unused/theory_dir.ML +++ /dev/null @@ -1,22 +0,0 @@ -structure TheoryDir = -struct - -fun find_rewrites dir = let - val dstream = OS.FileSys.openDir (dir ^ "/rewrites") - fun ls () = case OS.FileSys.readDir dstream - of SOME f => if OS.FileSys.isDir (dir ^ "/rewrites/" ^ f) - then f :: ls () - else ls () - | NONE => [] -in ls () -end - -fun load_rule dir name = let - val lhs = RGGraphLoaderSaver.load_graph (dir ^ "/rewrites/" ^ name ^ "/lhs.graph") - val rhs = RGGraphLoaderSaver.load_graph (dir ^ "/rewrites/" ^ name ^ "/rhs.graph") -in (name, Rule.mk (lhs,rhs)) -end - -fun load_rules dir = map (load_rule dir) (find_rewrites dir) - -end diff --git a/core/use_thy.ML b/core/use_thy.ML deleted file mode 100644 index c0a1477d..00000000 --- a/core/use_thy.ML +++ /dev/null @@ -1,37 +0,0 @@ -local - - val lexicon = Scan.make_lexicon (map Symbol.explode - ["theory", "begin", "end", "imports", "ML_file", ";"]); - - fun kw k s = s |> Scan.one (Token.keyword_with (fn k' => k' = k)); - fun name s = s |> Scan.one (Token.is_name); - - fun header s = s |> kw "theory" -- name -- kw "imports" -- Scan.repeat name -- kw "begin"; - fun useline s = s |> kw "ML_file" |-- name >> - (fn t => (use (Token.content_of t); t)); - - fun thy_file s = s |> header |-- Scan.repeat (useline || kw ";") --| kw "end"; - - fun read_thy pos str = - let val res = - str - |> Source.of_string - |> Symbol.source - |> Token.source {do_recover = NONE} (K (lexicon,Scan.empty_lexicon)) pos - |> Token.source_proper - |> Source.source Token.stopper (Scan.single (Scan.error thy_file)) NONE - |> Source.get_single; - in - (case res of - SOME (h, _) => h - | NONE => error ("Unexpected end of input" ^ Position.here pos)) - end; - -in - fun use_thy file = let - val thy_str = File.read (Path.explode file) - val _ = read_thy (Position.file file) thy_str - in () - end -end - diff --git a/cosy/INSTALL b/cosy/INSTALL deleted file mode 100644 index 51e2d12b..00000000 --- a/cosy/INSTALL +++ /dev/null @@ -1,24 +0,0 @@ -================================ -== INSTALLATION FOR FIREFOX == -================================ - -Ensure that the quantomatic core heap (../core/heaps/quanto.polyml-heap) is -built, then build the polychrome heap with: - -# make - -Once the heap is built, replace polychrome's default heap with the symlink to -the QuantoCosy heap. - -# cd $FIREFOX_SUPPORT_DIR/Profiles/XXXXXXX.default/extensions/polychrome@ed.ac.uk/poly/bin -# mv polychrome.polyml-heap polychrome.polyml-heap-bak -# ln -s $QUANTO_DIR/cosy/chromeui/polychrome.polyml-heap polychrome.polyml-heap - - -Once that's done, open chromeui/cosy.html in Firefox and synthesise away. All -interaction is from the ML prompt. Check cosy_ghzw.sml for useful functions -and guidance for implementing other theories. - -Note, you can also run QuantoCosy from the terminal, using: - -# poly --use console.ML diff --git a/cosy/Makefile b/cosy/Makefile deleted file mode 100644 index d78a2c49..00000000 --- a/cosy/Makefile +++ /dev/null @@ -1,7 +0,0 @@ - -polychrome.: - echo 'use "make_polychrome.ML";' | poly -H 200 - -clean: - rm -f chromeui/polychrome.polyml-heap - find . -type d -name .polysave | xargs rm -rf diff --git a/cosy/ROOT.ML b/cosy/ROOT.ML deleted file mode 100644 index 737d6e5e..00000000 --- a/cosy/ROOT.ML +++ /dev/null @@ -1,32 +0,0 @@ -(*val cosyRootDir = OS.FileSys.getDir(); -(OS.FileSys.chDir (cosyRootDir ^ "/../core/"); -use "ROOT.ML"; -OS.FileSys.chDir cosyRootDir); - -(* this may get clobbered by reloading the heap *) -val cosyRootDir = OS.FileSys.getDir(); - -print_depth 3; -PolyML.Project.depend_on_files [cosyRootDir^"/../core/ROOT.ML", cosyRootDir^"/ROOT.ML"];*) - -val cosyRootDir = OS.FileSys.getDir(); - -use "default_gens.ML"; -use "metric.ML"; - -use "graph_equiv.ML"; -use "eq_class_tab.ML"; -use "default_rws.ML"; -use "fast_graph_enum.ML"; -use "tensor_equiv.ML"; - -use "theories.ML"; - - - - -(*PolyML.Project.make "enumerate.ML"; -PolyML.Project.make "synthesize.ML"; -PolyML.Project.make "ruleset_builder.ML"; - -PolyML.Project.make "theories.ML"; *) diff --git a/cosy/chromeui/cosy.css b/cosy/chromeui/cosy.css deleted file mode 100644 index 02a03ff1..00000000 --- a/cosy/chromeui/cosy.css +++ /dev/null @@ -1,68 +0,0 @@ -body { - font-size: 10pt; - font-family: "Trebuchet MS", - Tahoma, - "Nimbus Sans L", - sans-serif; -} - -.container { - background-color: #fff; - margin: 0.5em; - border: 1px solid #bbb; -} - -.container .container { background-color: #f5f5f5; } - -.container .container .container { background-color: #eee; } - -.container .title { - background-color: #ccc; - border-bottom: 1px solid #bbb; - padding: 0.3em; -} - -.container .title a { - color: #000; - text-decoration: none; -} - -.container .title { font-size: 1.1em; } -.container .container .title { font-size: 0.9em; } -.container .container .container .title { font-size: 0.7em; } - -.container .content { - padding: 0.3em; -} - -.container .footer { - height: 0.3em; - margin-top: 0.2em; - background-color: #bbb; - border-top: 1px solid #bbb; -} - -.graph, .codebox, .rule { - border: 1px solid #bbb; - margin: 0.3em; - padding: 0.3em; - background-color: white; -} - -.rule { - background-color: #eee; -} - -.rule_label { - text-align: center; - font-size: 0.7em; -} - -.rule_arrow { - float: left; - margin: 2.5em 1em; -} - -.graph, .rule { float: left; } - -h3 { margin-bottom: 0em; margin-top: 1em; } \ No newline at end of file diff --git a/cosy/chromeui/cosy.html b/cosy/chromeui/cosy.html deleted file mode 100644 index 99723260..00000000 --- a/cosy/chromeui/cosy.html +++ /dev/null @@ -1,30 +0,0 @@ - - - - QuantoCoSy - - - - - - - - - - - - - - - - -
    - - - \ No newline at end of file diff --git a/cosy/chromeui/cosy.js b/cosy/chromeui/cosy.js deleted file mode 100644 index 057c320f..00000000 --- a/cosy/chromeui/cosy.js +++ /dev/null @@ -1,60 +0,0 @@ -function addGraph(tag, svg_text) { - var svg = $('
    ' + svg_text + '
    '); - //svg.css({'display':'none'}); - svg.find('svg').attr({'width':'50pt','height':'50pt'}); - $(tag).append(svg); - //svg.fadeIn(); - - //$('html,body').animate({scrollTop: svg.offset().top}, 'slow'); - - return svg; -} - -function addRule(tag, name, lhs_svg, rhs_svg) { - var rule = $('
    ' + name + '
    ' + - '
    ' + lhs_svg + '
    ' + - '
    ' + - '
    ' + rhs_svg + '
    ' + - '
    '); - rule.find('svg').attr({'width':'50pt','height':'50pt'}); - //if (!cong) rule.find('.rule_label').css({'font-weight':'bold'}); - $(tag).append(rule); - return rule; -} - -function addContainer (tag, title, expanders) { - var container = $('
    '+ - '
    '); - container.css({'display':'none'}); - - var link = container.find('div.title a'); - var content = container.find('div.content'); - if (expanders) { - var exps = $(''); - content.append(exps); - exps.find('#expand').click(function() { content.find('> .container > .content').slideDown(); return false; }); - exps.find('#collapse').click(function() { content.find('> .container > .content').slideUp(); return false; }); - } - - link.click(function() { content.slideToggle(); return false; }); - $(tag).append(container); - container.fadeIn(); - return content; -} - -function collapseContainer(tag) { - tag.slideToggle(); -} - -function addCodebox(tag, text) { - var codebox = $('
    '+$.trim(text)+'
    '); - codebox.css({'display':'none'}); - $(tag).append(codebox); - codebox.fadeIn(); - return codebox; -} - -function clearFloats(tag) { - $(tag).append($('
    ')); -} - diff --git a/cosy/chromeui/cosy.sml b/cosy/chromeui/cosy.sml deleted file mode 100644 index 0260a537..00000000 --- a/cosy/chromeui/cosy.sml +++ /dev/null @@ -1,245 +0,0 @@ - -fun dot_to_svg (ins,outs) dot = let - val () = TextIO.output (outs, dot) - (* eat the \n" => "\n" - | SOME ln => ln ^ (read_all ()) - | NONE => "" - val svg = read_all () -in svg -end - -fun addGraph io dom_element graph = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addGraph" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string (dot_to_svg io (Cosy.output_dot graph))]) - - -fun addRule io dom_element name rule = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addRule" [ - jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string (R.string_of_name name), - jsffi.arg.string (dot_to_svg io (Cosy.output_dot (Cosy.Theory.Rule.get_lhs rule))), - jsffi.arg.string (dot_to_svg io (Cosy.output_dot (Cosy.Theory.Rule.get_rhs rule))) - ]) - -fun addContainer dom_element title expanders = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addContainer" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string title, - jsffi.arg.bool expanders]) - -fun collapseContainer dom_element = - jsffi.exec_js "window|" "collapseContainer" [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element)] - -fun addCodebox dom_element text = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addCodebox" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string text]) - -fun clearFloats dom_element = - jsffi.exec_js "window|" "clearFloats" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element)] - -val gens = GHZW_Gens.gen_list 2 [GHZW_VertexData.GHZ,GHZW_VertexData.W]; - -val content_div = the (DOM.getElementById DOM.document "cosy_content") - handle Option => DOM.HTMLElement "NULL" - - -fun run_dot () = Unix.streamsOf (Unix.execute ("/usr/bin/env",["dot", "-Tsvg"])) -fun close_dot (ins,outs) = (TextIO.closeIn ins; TextIO.closeOut outs) - - -fun output_synth (synth as ((ins,outs,verts,plugs), class_tab)) = let - val (num_classes, num_congs, num_redexes) = Cosy.Synth.stats synth - val parent = addContainer content_div - (Cosy.Theory.theory_name ^ " Synth ("^(Int.toString ins)^","^(Int.toString outs)^","^ - (Int.toString verts)^","^(Int.toString plugs)^")") true - val details = - "SYNTHESIS RESULTS\n"^ - "-----------------------------------------\n"^ - " "^(Int.toString ins)^" inputs\n"^ - " "^(Int.toString outs)^" outputs\n"^ - " "^(Int.toString verts)^" max vertices\n"^ - " "^(Int.toString plugs)^" max pluggings\n"^ - "-----------------------------------------\n"^ - " Found "^(Int.toString num_classes)^" equivalence classes.\n"^ - " Average class size: "^(Int.toString ((num_congs + num_redexes) div num_classes))^".\n"^ - "-----------------------------------------\n" - val io = run_dot () - val _ = addCodebox parent details - fun output_class (tensor, class) i = let - val container = addContainer parent ("Class " ^ (Int.toString i)) false - val _ = addCodebox container (Cosy.Tensor.to_string tensor) - val c_container = addContainer container "Congruences" false - val r_container = addContainer container "Reducible Expressions" false - val (congruences, redexes) = (EqClass.get_congs class, EqClass.get_redexes class) - fun output_graph len c (i, gr) = if i = 100 then (clearFloats c; addCodebox c (Int.toString (len - 100) ^ " more...")) - else (if i < 100 then addGraph io c gr else c) - val _ = case (EqClass.get_rep class) of SOME rep => (addGraph io c_container rep; ()) | NONE => () - val _ = map_index (output_graph (length congruences) c_container) congruences - val _ = map_index (output_graph (length redexes) r_container) redexes - in i+1 - end - (*val _ = fold (output_class) ((TheoryData.get_class_list tdata) synth) 0*) - val _ = Cosy.Synth.eqclass_fold output_class synth 0 - val _ = close_dot io -in () -end - -fun output_graph graph = let - val c = addContainer content_div "GRAPH" false - val io = run_dot () - val _ = addRule io c graph - val _ = close_dot io -in () -end - -fun output_ruleset rs = let - (*val rs_pairs = (TheoryData.get_rs_pairs tdata) rs*) - val container = addContainer content_div (Cosy.Theory.theory_name ^ " Ruleset") false - val io = run_dot () - val _ = R.NTab.map_all (addRule io container) (Cosy.Theory.Ruleset.get_allrules rs) - val _ = close_dot io -in () -end - -fun output_rule rule = let - val c = addContainer content_div "Rule" false - val io = run_dot () - val _ = addRule io c (R.mk "*") rule - val _ = close_dot io -in () -end - -fun output_string string = let - val _ = addCodebox content_div string -in () -end - -fun output_graph graph = let - val c = addContainer content_div "Graph" false - val io = run_dot () - val _ = addGraph io c graph - val _ = close_dot io -in () -end - -fun output_gens () = let - val c = addContainer content_div (Cosy.Theory.theory_name ^ " Generators") false - val io = run_dot () - val _ = map (fn (gr,_,_) => addGraph io c gr) (Cosy.gens) - val _ = close_dot io -in () -end - -fun out (Cosy.SYNTH s) = output_synth s - | out (Cosy.RS rs) = output_ruleset rs - | out (Cosy.RULE r) = output_rule r - | out (Cosy.GRAPH g) = output_graph g - | out (Cosy.ERR e) = output_string e - - - - -(***********************) -(* SYNTH FUNCTIONS *) -(***********************) - -(*fun synth run = Cosy.SYNTH (Cosy.Synth.synth Cosy.gens run) -fun synth_with_rs (Cosy.RS rs) run = Cosy.SYNTH (Cosy.Synth.synth_with_rs rs Cosy.gens run) - -fun update (Cosy.SYNTH s) (Cosy.RS rs) = Cosy.RS (rs |> Cosy.RSBuilder.update s) -fun reduce (Cosy.RS rs) = Cosy.RS (Cosy.RSBuilder.reduce rs) - -fun update_redex run rs = rs |> update (synth_with_rs rs run) |> reduce -fun update_naive run rs = rs |> update (synth run) -*) - - - -(*************************) -(* RULESET FUNCTIONS *) -(*************************) - -(*fun size (Cosy.RS rs) = R.NTab.cardinality (Cosy.Theory.Ruleset.get_allrules rs) -fun rule_matches_rule (Cosy.RULE r1) (Cosy.RULE r2) = Cosy.RSBuilder.rule_matches_rule r1 r2 - -fun match_rule (Cosy.RULE rule) (Cosy.GRAPH target) = Cosy.rule_matches_graph rule target - - -fun get_rule (Cosy.RS rs) name = - case Cosy.Theory.Ruleset.lookup_rule rs (R.mk name) - of SOME r => Cosy.RULE r - | _ => Cosy.ERR "Rule not found." - -fun get_lhs (Cosy.RULE rule) = Cosy.GRAPH (Cosy.Theory.Rule.get_lhs rule) -fun get_rhs (Cosy.RULE rule) = Cosy.GRAPH (Cosy.Theory.Rule.get_rhs rule) - - -val default_runs = [(0,0,3,3),(0,1,3,3),(1,0,3,3),(0,2,3,3),(1,1,3,3), - (2,0,3,3),(0,3,3,3),(1,2,3,3),(2,1,3,3),(3,0,3,3)] -val short_runs = [(0,0,2,2),(0,1,2,2)] -val long_runs = default_runs @ [(2,2,4,4)] - -fun process updater ruleset runs = let - fun do_update (run as (r1,r2,r3,r4)) rs = let - val _ = output_string ("Updating for: ("^ - Int.toString r1^","^ - Int.toString r2^","^ - Int.toString r3^","^ - Int.toString r4^")...") - val rs' = rs |> updater run - in (size rs', rs') - end - val (sizes, final_rs) = fold_map do_update runs ruleset - val _ = output_string "Done." -in (sizes, final_rs) -end - -fun as_data list = fold2 - (fn i => fn d => fn str => str^"("^Int.toString i^","^Int.toString d^")\n") - (0 upto (length list - 1)) list "" - - -val cosy = process update_redex Cosy.initial_rs -*) - -(********************) -(* IO FUNCTIONS *) -(********************) - -fun escape str = let - fun esc #"<" = "<" - | esc #">" = ">" - | esc #"&" = "&" - | esc #"\"" = """ - | esc c = String.str c -in String.translate esc str -end - -fun to_xml (Cosy.RULE rule) = Cosy.Theory.IO_Xml.Output.Rule.output rule - | to_xml (Cosy.GRAPH graph) = Cosy.Theory.IO_Xml.Output.Graph.output graph - -fun xml item = output_string (escape (XMLWriter.write_to_string (to_xml item))) - -fun save file item = XMLWriter.write_to_file file (to_xml item) - -fun load file = let - val data = XMLReader.read_from_file file -in if String.isSuffix ".rule" file then - Cosy.RULE (Cosy.Theory.IO_Xml.Input.Rule.input data) - else if String.isSuffix ".graph" file then - Cosy.GRAPH (Cosy.Theory.IO_Xml.Input.Graph.input data) - else Cosy.ERR "Unknown file extension" -end - - - - - - - diff --git a/cosy/chromeui/cosy_ghzw.sml b/cosy/chromeui/cosy_ghzw.sml deleted file mode 100644 index b514a93a..00000000 --- a/cosy/chromeui/cosy_ghzw.sml +++ /dev/null @@ -1,156 +0,0 @@ - -structure Cosy = -struct - structure Theory = GHZW_Theory - structure Synth = GHZW_DefaultSynth - structure RSBuilder = GHZW_RSBuilder - structure Tensor = GHZW_TensorData.Tensor - - datatype T = - SYNTH of Synth.T | - RS of Theory.Ruleset.T | - RULE of Theory.Rule.T | - GRAPH of Theory.Graph.T | - ERR of string - - val output_dot = GHZW_OutputGraphDot.output - - val z00 = GHZW_Gens.gen GHZW_VertexData.ZERO (0,0) - val z10 = GHZW_Gens.gen GHZW_VertexData.ZERO (1,0) - val z01 = GHZW_Gens.gen GHZW_VertexData.ZERO (0,1) - - val gens = [z10,z01,GHZW_Gens.gen GHZW_VertexData.TICK (1,1)] @ - GHZW_Gens.gen_list 3 [GHZW_VertexData.GHZ, GHZW_VertexData.W] - - local - val rs' = GHZW_Theory.Ruleset.empty - val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule - (R.mk "ghz_fr", GHZW_Rws.frob GHZW_VertexData.GHZ) - val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule - (R.mk "ghz_sp", GHZW_Rws.special GHZW_VertexData.GHZ) - val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule - (R.mk "w_fr", GHZW_Rws.frob GHZW_VertexData.W) - (*val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule - (R.mk "z0", GHZW_Theory.Rule.mk (#1 z00, #1 z00)) - val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule - (R.mk "z1", GHZW_Theory.Rule.mk (#1 z10, #1 z10)) - val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule - (R.mk "z2", GHZW_Theory.Rule.mk (#1 z01, #1 z01)) *) - val redex = TagName.mk "r" - - val rs' = fold (fn r => GHZW_Theory.Ruleset.tag_rule (R.mk r) redex) - ["ghz_fr","ghz_sp","w_fr"] rs' - in - val initial_rs = RS rs' - end - - val rule_matches_graph = GHZW_Enum.rule_matches_graph -end - - - -(* -val ghzw_data : (GHZW_Theory.Graph.T, GHZW_Theory.Ruleset.T, GHZW_DefaultSynth.T) TheoryData.T = { - name = "GHZ/W", - dotfun = GHZW_OutputGraphDot.output, - gens = GHZW_Gens.gen GHZW_VertexData.TICK (1,1) :: (GHZW_Gens.gen_list 4 [GHZW_VertexData.GHZ, GHZW_VertexData.W]), - stats = GHZW_DefaultSynth.stats, - class_list = fn synth => GHZW_DefaultSynth.eqclass_fold (cons o (apfst GHZW_TensorData.Tensor.to_string)) synth [], - rs_pairs = - (rule_data GHZW_Theory.Rule.get_lhs GHZW_Theory.Rule.get_rhs) o - GHZW_Theory.Ruleset.get_allrules -} -*) - -(* -fun synth run = SYNTH (GHZW_DefaultSynth.synth (TheoryData.get_gens ghzw_data) run) -fun synth_with_rs (RS rs) run = - SYNTH (GHZW_DefaultSynth.synth_with_rs rs (TheoryData.get_gens ghzw_data) run) -fun ruleset (SYNTH s) = RS (GHZW_RSBuilder.from_synth s) -fun update (SYNTH s) (RS rs) = RS (rs |> GHZW_RSBuilder.update s) -fun reduce (RS rs) = RS (GHZW_RSBuilder.reduce rs) -fun update_redex run rs = rs |> update (synth_with_rs rs run) |> reduce -fun update_naive run rs = rs |> update (synth run) -fun size (RS rs) = R.NTab.cardinality (GHZW_Theory.Ruleset.get_allrules rs) -fun rule_matches_rule (RULE r1) (RULE r2) = GHZW_RSBuilder.rule_matches_rule r1 r2 - -fun match_rule (RULE rule) (GRAPH target) = GHZW_Enum.rule_matches_graph rule target - - -(*fun update_with run rs = rs |> update (synth run) |> reduce;*) -fun get_rule (RS rs) name = case GHZW_Theory.Ruleset.lookup_rule rs (R.mk name) - of SOME r => RULE r - | _ => ERR "Rule not found." - -fun get_lhs (RULE rule) = GRAPH (GHZW_Theory.Rule.get_lhs rule) -fun get_rhs (RULE rule) = GRAPH (GHZW_Theory.Rule.get_rhs rule) - -(*fun synth_list runs rs = fold update_redex runs rs*) - -fun escape str = let - fun esc #"<" = "<" - | esc #">" = ">" - | esc #"&" = "&" - | esc #"\"" = """ - | esc c = String.str c -in String.translate esc str -end - -fun to_xml (RULE rule) = GHZW_Theory.IO_Xml.Output.Rule.output rule - | to_xml (GRAPH graph) = GHZW_Theory.IO_Xml.Output.Graph.output graph - -fun xml item = output_string (escape (XMLWriter.write_to_string (to_xml item))) - -fun save file item = XMLWriter.write_to_file file (to_xml item) - -fun load file = let - val data = XMLReader.read_from_file file -in if String.isSuffix ".rule" file then - RULE (GHZW_Theory.IO_Xml.Input.Rule.input data) - else if String.isSuffix ".graph" file then - GRAPH (GHZW_Theory.IO_Xml.Input.Graph.input data) - else ERR "Unknown file extension" -end - -val rs' = GHZW_Theory.Ruleset.empty -val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule (R.mk "ghz_fr", GHZW_Rws.frob GHZW_VertexData.GHZ) -val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule (R.mk "ghz_sp", GHZW_Rws.special GHZW_VertexData.GHZ) -val (_,rs') = rs' |> GHZW_Theory.Ruleset.add_fresh_rule (R.mk "w_fr", GHZW_Rws.frob GHZW_VertexData.W) - -val redex = TagName.mk "r" -val rs' = rs' |> GHZW_Theory.Ruleset.tag_rule (R.mk "ghz_fr") redex - |> GHZW_Theory.Ruleset.tag_rule (R.mk "ghz_sp") redex - |> GHZW_Theory.Ruleset.tag_rule (R.mk "w_fr") redex - -val rs = RS rs' - -val default_runs = [(0,0,3,3),(0,1,3,3),(1,0,3,3),(0,2,3,3),(1,1,3,3), - (2,0,3,3),(0,3,3,3),(1,2,3,3),(2,1,3,3),(3,0,3,3)] -val short_runs = [(0,0,2,2),(0,1,2,2)] -val long_runs = default_runs @ [(2,2,4,4)] - -fun process updater ruleset runs = let - fun do_update (run as (r1,r2,r3,r4)) rs = let - val _ = output_string ("Updating for: ("^ - Int.toString r1^","^ - Int.toString r2^","^ - Int.toString r3^","^ - Int.toString r4^")...") - val rs' = rs |> updater run - in (size rs', rs') - end - val (sizes, final_rs) = fold_map do_update runs ruleset - val _ = output_string "Done." -in (sizes, final_rs) -end - -fun as_data list = fold2 (fn i => fn d => fn str => str^"("^Int.toString i^","^Int.toString d^")\n") (0 upto (length list - 1)) list "" - -fun out (SYNTH s) = output_synth ghzw_data s - | out (RS rs) = output_ruleset ghzw_data rs - | out (RULE r) = output_rule ghzw_data - (GHZW_Theory.Rule.get_lhs r) - (GHZW_Theory.Rule.get_rhs r) - | out (GRAPH g) = output_graph ghzw_data g - | out (ERR e) = output_string e -*) \ No newline at end of file diff --git a/cosy/chromeui/cosy_rg.sml b/cosy/chromeui/cosy_rg.sml deleted file mode 100644 index fef529e7..00000000 --- a/cosy/chromeui/cosy_rg.sml +++ /dev/null @@ -1,38 +0,0 @@ -structure Cosy = -struct - structure Theory = RG_Theory - structure Enum = RG_FastEnum - structure RSBuilder = RG_RSBuilder - structure Tensor = RG_TensorData.Tensor - - datatype T = - EQTAB of RG_FastEnum.EqClassTab.T | - RS of Theory.Ruleset.T | - RULE of Theory.Rule.T | - GRAPH of Theory.Graph.T | - ERR of string - - val output_dot = RG_OutputGraphDot.output - val zero = LinratAngleExpr.zero - val gens = RG_Gens.gen_list 3 [RG_InternVData.Xnd zero, RG_InternVData.Znd zero] - - local - val rs' = RG_Theory.Ruleset.empty - val (_,rs') = rs' |> RG_Theory.Ruleset.add_fresh_rule - (R.mk "r_fr", RG_Rws.frob (RG_InternVData.Xnd zero)) - val (_,rs') = rs' |> RG_Theory.Ruleset.add_fresh_rule - (R.mk "g_fr", RG_Rws.frob (RG_InternVData.Znd zero)) - val (_,rs') = rs' |> RG_Theory.Ruleset.add_fresh_rule - (R.mk "r_sp", RG_Rws.special (RG_InternVData.Xnd zero)) - val (_,rs') = rs' |> RG_Theory.Ruleset.add_fresh_rule - (R.mk "g_sp", RG_Rws.special (RG_InternVData.Znd zero)) - - val redex = TagName.mk "r" - val rs' = fold (fn s => RG_Theory.Ruleset.tag_rule (R.mk s) redex) - ["r_fr","g_fr","r_sp","g_sp"] rs' - in - val initial_rs = RS rs' - end - - val rule_matches_graph = RG_Enum.rule_matches_graph -end \ No newline at end of file diff --git a/cosy/chromeui/cosy_rgb.sml b/cosy/chromeui/cosy_rgb.sml deleted file mode 100644 index 8e7a0f91..00000000 --- a/cosy/chromeui/cosy_rgb.sml +++ /dev/null @@ -1,101 +0,0 @@ -structure Cosy = -struct - structure Theory = RGB_Theory - structure Synth = RGB_Synth - structure RSBuilder = RGB_RSBuilder - structure Tensor = RGB_TensorData.Tensor - - datatype T = - SYNTH of Synth.T | - RS of Theory.Ruleset.T | - RULE of Theory.Rule.T | - GRAPH of Theory.Graph.T | - ERR of string - - val output_dot = RGB_OutputGraphDot.output - val gens = RGB_Gens.gen_list 4 [RGB_VertexData.Red, RGB_VertexData.Green, RGB_VertexData.Blue] - - local - val rs' = RGB_Theory.Ruleset.empty - val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule - (R.mk "r_fr", RGB_Rws.frob RGB_VertexData.Red) - val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule - (R.mk "g_fr", RGB_Rws.frob RGB_VertexData.Green) - val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule - (R.mk "b_fr", RGB_Rws.frob RGB_VertexData.Blue) - val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule - (R.mk "r_sp", RGB_Rws.special RGB_VertexData.Red) - val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule - (R.mk "g_sp", RGB_Rws.special RGB_VertexData.Green) - val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule - (R.mk "b_sp", RGB_Rws.special RGB_VertexData.Blue) - - val redex = TagName.mk "r" - val rs' = fold (fn s => RGB_Theory.Ruleset.tag_rule (R.mk s) redex) - ["r_fr","g_fr","b_fr","r_sp","g_sp","b_sp"] rs' - in - val initial_rs = RS rs' - end - - val rule_matches_graph = RGB_Enum.rule_matches_graph -end - -(* -datatype cosy = - SYNTH of RGB_Synth.T | - RS of RGB_Theory.Ruleset.T | - RULE of RGB_Theory.Rule.T | - ERR of string - -val rgb_data : (RGB_Theory.Graph.T, RGB_Theory.Ruleset.T, RGB_Synth.T) TheoryData.T = { - name = "RGB", - dotfun = RGB_OutputGraphDot.output, - gens = RGB_Gens.gen_list 4 [RGB_VertexData.Red, RGB_VertexData.Green, RGB_VertexData.Blue], - stats = RGB_Synth.stats, - class_list = fn synth => RGB_Synth.eqclass_fold (cons o (apfst RGB_TensorData.Tensor.to_string)) synth [], - rs_pairs = - (rule_data RGB_Theory.Rule.get_lhs RGB_Theory.Rule.get_rhs) o - RGB_Theory.Ruleset.get_allrules -} - -fun synth run = SYNTH (RGB_Synth.synth (TheoryData.get_gens rgb_data) run) -fun synth_with_rs (RS rs) run = - SYNTH (RGB_Synth.synth_with_rs rs (TheoryData.get_gens rgb_data) run) -fun ruleset (SYNTH s) = RS (RGB_RSBuilder.from_synth s) -fun update (SYNTH s) (RS rs) = RS (rs |> RGB_RSBuilder.update s) -fun reduce (RS rs) = RS (RGB_RSBuilder.reduce rs) -fun update_with run rs = rs |> update (synth_with_rs rs run) |> reduce -fun size (RS rs) = R.NTab.cardinality (RGB_Theory.Ruleset.get_allrules rs) -fun rule_matches_rule (RULE r1) (RULE r2) = RGB_RSBuilder.rule_matches_rule r1 r2 - - -(*fun update_with run rs = rs |> update (synth run) |> reduce;*) -fun get_rule (RS rs) name = case RGB_Theory.Ruleset.lookup_rule rs (R.mk name) - of SOME r => RULE r - | _ => ERR "Rule not found." - -fun synth_list runs rs = fold update_with runs rs - - -val rs' = RGB_Theory.Ruleset.empty -val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule (R.mk "r_fr", RGB_Rws.frob RGB_VertexData.Red) -val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule (R.mk "g_fr", RGB_Rws.frob RGB_VertexData.Green) -val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule (R.mk "b_fr", RGB_Rws.frob RGB_VertexData.Blue) -val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule (R.mk "r_sp", RGB_Rws.special RGB_VertexData.Red) -val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule (R.mk "g_sp", RGB_Rws.special RGB_VertexData.Green) -val (_,rs') = rs' |> RGB_Theory.Ruleset.add_fresh_rule (R.mk "b_sp", RGB_Rws.special RGB_VertexData.Blue) - -val redex = TagName.mk "r" -val rs' = fold (fn s => RGB_Theory.Ruleset.tag_rule (R.mk s) redex) - ["r_fr","g_fr","b_fr","r_sp","g_sp","b_sp"] rs' - -val rs = RS rs' - -fun out (SYNTH s) = output_synth rgb_data s - | out (RS rs) = output_ruleset rgb_data rs - | out (RULE r) = output_rule rgb_data - (RGB_Theory.Rule.get_lhs r) - (RGB_Theory.Rule.get_rhs r) - | out (ERR e) = output_string e - -*) diff --git a/cosy/chromeui/cosy_util.sml b/cosy/chromeui/cosy_util.sml deleted file mode 100644 index a9228e42..00000000 --- a/cosy/chromeui/cosy_util.sml +++ /dev/null @@ -1,224 +0,0 @@ -functor CosyUtil( - structure Enum : GRAPH_ENUM - val data_list : Enum.Theory.OVData.IData.data list - val output_dot : Enum.Theory.Graph.T -> string - val initial_rs : Enum.Theory.Ruleset.T -) = -struct - -structure Enum = Enum -structure EqClassTab = Enum.EqClassTab -structure EqClass = EqClassTab.EqClass -structure GraphEntry = EqClassTab.GraphEntry -structure Theory = Enum.Theory - -(*fun gen_list max_arity data_list = let - fun alist 0 0 = [] - | alist k 0 = (0,k)::alist (k-1) (k-1) - | alist k i = (i,k-i)::alist k (i-1) - fun gen d (iw,ow) = (Theory.OVData.NVert d,iw,ow) - in (fold_product (cons oo gen) data_list (alist max_arity max_arity) []) - end*) - - -val gens = let - fun gens_for d = [ - (Theory.OVData.NVert d,1,2), - (Theory.OVData.NVert d,2,1), - (Theory.OVData.NVert d,2,0), - (Theory.OVData.NVert d,0,2), - (Theory.OVData.NVert d,1,0), - (Theory.OVData.NVert d,0,1) - ] -in maps gens_for data_list -end - -fun dot_to_svg (ins,outs) dot = let - val () = TextIO.output (outs, dot) - (* eat the \n" => "\n" - | SOME ln => ln ^ (read_all ()) - | NONE => "" - val svg = read_all () -in svg -end - -fun addGraph io dom_element graph = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addGraph" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string (dot_to_svg io (output_dot graph))]) - - -fun addRule io dom_element name rule = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addRule" [ - jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string (R.string_of_name name), - jsffi.arg.string (dot_to_svg io (output_dot (Theory.Rule.get_lhs rule))), - jsffi.arg.string (dot_to_svg io (output_dot (Theory.Rule.get_rhs rule))) - ]) - -fun addContainer dom_element title expanders = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addContainer" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string title, - jsffi.arg.bool expanders]) - -fun collapseContainer dom_element = - jsffi.exec_js "window|" "collapseContainer" [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element)] - -fun addCodebox dom_element text = - DOM.HTMLElement (jsffi.exec_js_r "window|" "addCodebox" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element), - jsffi.arg.string text]) - -fun clearFloats dom_element = - jsffi.exec_js "window|" "clearFloats" - [jsffi.arg.reference (DOM.fptr_of_HTMLElement dom_element)] - -(*val content_div = ref (DOM.HTMLElement "NULL")*) - - -fun run_dot () = Unix.streamsOf (Unix.execute ("/usr/bin/env",["dot", "-Tsvg"])) -fun close_dot (ins,outs) = (TextIO.closeIn ins; TextIO.closeOut outs) - - -fun output_graph content_div graph = let - val c = addContainer content_div "GRAPH" false - val io = run_dot () - val _ = addRule io c graph - val _ = close_dot io -in () -end - -fun output_ruleset content_div rs = let - (*val rs_pairs = (TheoryData.get_rs_pairs tdata) rs*) - val container = addContainer content_div (Theory.theory_name ^ " Ruleset") false - val io = run_dot () - val _ = R.NTab.map_all (addRule io container) (Theory.Ruleset.get_allrules rs) - val _ = close_dot io -in () -end - -fun output_rule content_div rule = let - val c = addContainer content_div "Rule" false - val io = run_dot () - val _ = addRule io c (R.mk "*") rule - val _ = close_dot io -in () -end - -fun output_string content_div string = let - val _ = addCodebox content_div string -in () -end - -fun output_graph content_div graph = let - val c = addContainer content_div "Graph" false - val io = run_dot () - val _ = addGraph io c graph - val _ = close_dot io -in () -end - -fun output_graph_list content_div gs = let - val c = addContainer content_div "Graph List" false - val io = run_dot () - val _ = map (addGraph io c) gs - val _ = close_dot io -in () -end - -fun output_eqtab content_div eqt (max_v,max_p,max_m,max_n) = let - val parent = addContainer content_div - (Theory.theory_name ^ " Synth " ^ - "(" ^ Int.toString max_v ^ "," ^ Int.toString max_p ^ "," ^ - Int.toString max_m ^ "," ^ Int.toString max_n ^ ")") true - val details = - "SYNTHESIS RESULTS\n"^ - "-----------------------------------------\n" -(* ^ - " "^(Int.toString ins)^" inputs\n"^ - " "^(Int.toString outs)^" outputs\n"^ - " "^(Int.toString verts)^" max vertices\n"^ - " "^(Int.toString plugs)^" max pluggings\n"^ - "-----------------------------------------\n"^ - " Found "^(Int.toString num_classes)^" equivalence classes.\n"^ - " Average class size: "^(Int.toString ((num_congs + num_redexes) div num_classes))^".\n"^ - "-----------------------------------------\n"*) - val io = run_dot () - val _ = addCodebox parent details - fun output_class class i = let - val container = addContainer parent ("Class " ^ (Int.toString i)) false - val rep = EqClassTab.get_graph_entry eqt (EqClass.get_rep class) - val _ = case GraphEntry.get_edata rep - of SOME d => addCodebox container (GraphEntry.Equiv.to_string d) - | NONE => addCodebox container ("No Equiv data") - val c_container = addContainer container "Congruences" false - val r_container = addContainer container "Reducible Expressions" false - val congruences = map (GraphEntry.get_graph o EqClassTab.get_graph_entry eqt) (EqClass.get_congs class) - val redexes = map (GraphEntry.get_graph o EqClassTab.get_graph_entry eqt) (EqClass.get_redexes class) - fun output_graph len c (i, gr) = if i = 100 then (clearFloats c; addCodebox c (Int.toString (len - 100) ^ " more...")) - else (if i < 100 then addGraph io c gr else c) - val _ = addGraph io c_container (GraphEntry.get_graph rep) - val _ = map_index (output_graph (length congruences) c_container) congruences - val _ = map_index (output_graph (length redexes) r_container) redexes - in i+1 - end - val _ = EqClassTab.fold_eqclasses output_class eqt 0 - val _ = close_dot io -in () -end - -fun get_rules content_div sz = -let - val eqt = Enum.tab_update gens sz (Enum.EqClassTab.mk initial_rs) -in - output_ruleset - content_div - (Enum.EqClassTab.get_ruleset eqt) -end - -fun synth content_div sz = -let - val eqt = Enum.tab_update gens sz (Enum.EqClassTab.mk initial_rs) -in - output_eqtab content_div eqt sz -end - -fun enum content_div sz = -let - val gs = Enum.enum gens sz -in output_graph_list content_div gs -end - -end - -val rg_data_list = [RG_InternVData.Xnd LinratAngleExpr.zero, - RG_InternVData.Znd LinratAngleExpr.zero] -structure RGCosy = CosyUtil( - structure Enum = RG_Enum - val data_list = rg_data_list - val output_dot = RG_GraphicalTheoryIO.OutputGraphDot.output - val initial_rs = RG_Spiders.frob_and_special_rules data_list -) - -val ghzw_data_list = [GHZW_Data.GHZ, GHZW_Data.W] -structure GHZWCosy = CosyUtil( - structure Enum = GHZW_Enum - val data_list = ghzw_data_list - val output_dot = GHZW_GraphicalTheoryIO.OutputGraphDot.output - val initial_rs = GHZW_Spiders.frob_rules data_list -) - - -(*local - open RGCosy -in -fun rg_synth content_div sz = - output_ruleset content_div - (RSBuilder.get_ruleset - (Enum.tab_update gens sz - (RG_Spiders.eq_class_tab rg_data_list))) -end*) \ No newline at end of file diff --git a/cosy/chromeui/jquery.js b/cosy/chromeui/jquery.js deleted file mode 100644 index 5d5a1d58..00000000 --- a/cosy/chromeui/jquery.js +++ /dev/null @@ -1,8936 +0,0 @@ -/*! - * jQuery JavaScript Library v1.6.1 - * http://jquery.com/ - * - * Copyright 2011, John Resig - * Dual licensed under the MIT or GPL Version 2 licenses. - * http://jquery.org/license - * - * Includes Sizzle.js - * http://sizzlejs.com/ - * Copyright 2011, The Dojo Foundation - * Released under the MIT, BSD, and GPL Licenses. - * - * Date: Thu May 12 15:04:36 2011 -0400 - */ -(function( window, undefined ) { - -// Use the correct document accordingly with window argument (sandbox) -var document = window.document, - navigator = window.navigator, - location = window.location; -var jQuery = (function() { - -// Define a local copy of jQuery -var jQuery = function( selector, context ) { - // The jQuery object is actually just the init constructor 'enhanced' - return new jQuery.fn.init( selector, context, rootjQuery ); - }, - - // Map over jQuery in case of overwrite - _jQuery = window.jQuery, - - // Map over the $ in case of overwrite - _$ = window.$, - - // A central reference to the root jQuery(document) - rootjQuery, - - // A simple way to check for HTML strings or ID strings - // (both of which we optimize for) - quickExpr = /^(?:[^<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/, - - // Check if a string has a non-whitespace character in it - rnotwhite = /\S/, - - // Used for trimming whitespace - trimLeft = /^\s+/, - trimRight = /\s+$/, - - // Check for digits - rdigit = /\d/, - - // Match a standalone tag - rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>)?$/, - - // JSON RegExp - rvalidchars = /^[\],:{}\s]*$/, - rvalidescape = /\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, - rvalidtokens = /"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, - rvalidbraces = /(?:^|:|,)(?:\s*\[)+/g, - - // Useragent RegExp - rwebkit = /(webkit)[ \/]([\w.]+)/, - ropera = /(opera)(?:.*version)?[ \/]([\w.]+)/, - rmsie = /(msie) ([\w.]+)/, - rmozilla = /(mozilla)(?:.*? rv:([\w.]+))?/, - - // Keep a UserAgent string for use with jQuery.browser - userAgent = navigator.userAgent, - - // For matching the engine and version of the browser - browserMatch, - - // The deferred used on DOM ready - readyList, - - // The ready event handler - DOMContentLoaded, - - // Save a reference to some core methods - toString = Object.prototype.toString, - hasOwn = Object.prototype.hasOwnProperty, - push = Array.prototype.push, - slice = Array.prototype.slice, - trim = String.prototype.trim, - indexOf = Array.prototype.indexOf, - - // [[Class]] -> type pairs - class2type = {}; - -jQuery.fn = jQuery.prototype = { - constructor: jQuery, - init: function( selector, context, rootjQuery ) { - var match, elem, ret, doc; - - // Handle $(""), $(null), or $(undefined) - if ( !selector ) { - return this; - } - - // Handle $(DOMElement) - if ( selector.nodeType ) { - this.context = this[0] = selector; - this.length = 1; - return this; - } - - // The body element only exists once, optimize finding it - if ( selector === "body" && !context && document.body ) { - this.context = document; - this[0] = document.body; - this.selector = selector; - this.length = 1; - return this; - } - - // Handle HTML strings - if ( typeof selector === "string" ) { - // Are we dealing with HTML string or an ID? - if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) { - // Assume that strings that start and end with <> are HTML and skip the regex check - match = [ null, selector, null ]; - - } else { - match = quickExpr.exec( selector ); - } - - // Verify a match, and that no context was specified for #id - if ( match && (match[1] || !context) ) { - - // HANDLE: $(html) -> $(array) - if ( match[1] ) { - context = context instanceof jQuery ? context[0] : context; - doc = (context ? context.ownerDocument || context : document); - - // If a single string is passed in and it's a single tag - // just do a createElement and skip the rest - ret = rsingleTag.exec( selector ); - - if ( ret ) { - if ( jQuery.isPlainObject( context ) ) { - selector = [ document.createElement( ret[1] ) ]; - jQuery.fn.attr.call( selector, context, true ); - - } else { - selector = [ doc.createElement( ret[1] ) ]; - } - - } else { - ret = jQuery.buildFragment( [ match[1] ], [ doc ] ); - selector = (ret.cacheable ? jQuery.clone(ret.fragment) : ret.fragment).childNodes; - } - - return jQuery.merge( this, selector ); - - // HANDLE: $("#id") - } else { - elem = document.getElementById( match[2] ); - - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - if ( elem && elem.parentNode ) { - // Handle the case where IE and Opera return items - // by name instead of ID - if ( elem.id !== match[2] ) { - return rootjQuery.find( selector ); - } - - // Otherwise, we inject the element directly into the jQuery object - this.length = 1; - this[0] = elem; - } - - this.context = document; - this.selector = selector; - return this; - } - - // HANDLE: $(expr, $(...)) - } else if ( !context || context.jquery ) { - return (context || rootjQuery).find( selector ); - - // HANDLE: $(expr, context) - // (which is just equivalent to: $(context).find(expr) - } else { - return this.constructor( context ).find( selector ); - } - - // HANDLE: $(function) - // Shortcut for document ready - } else if ( jQuery.isFunction( selector ) ) { - return rootjQuery.ready( selector ); - } - - if (selector.selector !== undefined) { - this.selector = selector.selector; - this.context = selector.context; - } - - return jQuery.makeArray( selector, this ); - }, - - // Start with an empty selector - selector: "", - - // The current version of jQuery being used - jquery: "1.6.1", - - // The default length of a jQuery object is 0 - length: 0, - - // The number of elements contained in the matched element set - size: function() { - return this.length; - }, - - toArray: function() { - return slice.call( this, 0 ); - }, - - // Get the Nth element in the matched element set OR - // Get the whole matched element set as a clean array - get: function( num ) { - return num == null ? - - // Return a 'clean' array - this.toArray() : - - // Return just the object - ( num < 0 ? this[ this.length + num ] : this[ num ] ); - }, - - // Take an array of elements and push it onto the stack - // (returning the new matched element set) - pushStack: function( elems, name, selector ) { - // Build a new jQuery matched element set - var ret = this.constructor(); - - if ( jQuery.isArray( elems ) ) { - push.apply( ret, elems ); - - } else { - jQuery.merge( ret, elems ); - } - - // Add the old object onto the stack (as a reference) - ret.prevObject = this; - - ret.context = this.context; - - if ( name === "find" ) { - ret.selector = this.selector + (this.selector ? " " : "") + selector; - } else if ( name ) { - ret.selector = this.selector + "." + name + "(" + selector + ")"; - } - - // Return the newly-formed element set - return ret; - }, - - // Execute a callback for every element in the matched set. - // (You can seed the arguments with an array of args, but this is - // only used internally.) - each: function( callback, args ) { - return jQuery.each( this, callback, args ); - }, - - ready: function( fn ) { - // Attach the listeners - jQuery.bindReady(); - - // Add the callback - readyList.done( fn ); - - return this; - }, - - eq: function( i ) { - return i === -1 ? - this.slice( i ) : - this.slice( i, +i + 1 ); - }, - - first: function() { - return this.eq( 0 ); - }, - - last: function() { - return this.eq( -1 ); - }, - - slice: function() { - return this.pushStack( slice.apply( this, arguments ), - "slice", slice.call(arguments).join(",") ); - }, - - map: function( callback ) { - return this.pushStack( jQuery.map(this, function( elem, i ) { - return callback.call( elem, i, elem ); - })); - }, - - end: function() { - return this.prevObject || this.constructor(null); - }, - - // For internal use only. - // Behaves like an Array's method, not like a jQuery method. - push: push, - sort: [].sort, - splice: [].splice -}; - -// Give the init function the jQuery prototype for later instantiation -jQuery.fn.init.prototype = jQuery.fn; - -jQuery.extend = jQuery.fn.extend = function() { - var options, name, src, copy, copyIsArray, clone, - target = arguments[0] || {}, - i = 1, - length = arguments.length, - deep = false; - - // Handle a deep copy situation - if ( typeof target === "boolean" ) { - deep = target; - target = arguments[1] || {}; - // skip the boolean and the target - i = 2; - } - - // Handle case when target is a string or something (possible in deep copy) - if ( typeof target !== "object" && !jQuery.isFunction(target) ) { - target = {}; - } - - // extend jQuery itself if only one argument is passed - if ( length === i ) { - target = this; - --i; - } - - for ( ; i < length; i++ ) { - // Only deal with non-null/undefined values - if ( (options = arguments[ i ]) != null ) { - // Extend the base object - for ( name in options ) { - src = target[ name ]; - copy = options[ name ]; - - // Prevent never-ending loop - if ( target === copy ) { - continue; - } - - // Recurse if we're merging plain objects or arrays - if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) { - if ( copyIsArray ) { - copyIsArray = false; - clone = src && jQuery.isArray(src) ? src : []; - - } else { - clone = src && jQuery.isPlainObject(src) ? src : {}; - } - - // Never move original objects, clone them - target[ name ] = jQuery.extend( deep, clone, copy ); - - // Don't bring in undefined values - } else if ( copy !== undefined ) { - target[ name ] = copy; - } - } - } - } - - // Return the modified object - return target; -}; - -jQuery.extend({ - noConflict: function( deep ) { - if ( window.$ === jQuery ) { - window.$ = _$; - } - - if ( deep && window.jQuery === jQuery ) { - window.jQuery = _jQuery; - } - - return jQuery; - }, - - // Is the DOM ready to be used? Set to true once it occurs. - isReady: false, - - // A counter to track how many items to wait for before - // the ready event fires. See #6781 - readyWait: 1, - - // Hold (or release) the ready event - holdReady: function( hold ) { - if ( hold ) { - jQuery.readyWait++; - } else { - jQuery.ready( true ); - } - }, - - // Handle when the DOM is ready - ready: function( wait ) { - // Either a released hold or an DOMready/load event and not yet ready - if ( (wait === true && !--jQuery.readyWait) || (wait !== true && !jQuery.isReady) ) { - // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). - if ( !document.body ) { - return setTimeout( jQuery.ready, 1 ); - } - - // Remember that the DOM is ready - jQuery.isReady = true; - - // If a normal DOM Ready event fired, decrement, and wait if need be - if ( wait !== true && --jQuery.readyWait > 0 ) { - return; - } - - // If there are functions bound, to execute - readyList.resolveWith( document, [ jQuery ] ); - - // Trigger any bound ready events - if ( jQuery.fn.trigger ) { - jQuery( document ).trigger( "ready" ).unbind( "ready" ); - } - } - }, - - bindReady: function() { - if ( readyList ) { - return; - } - - readyList = jQuery._Deferred(); - - // Catch cases where $(document).ready() is called after the - // browser event has already occurred. - if ( document.readyState === "complete" ) { - // Handle it asynchronously to allow scripts the opportunity to delay ready - return setTimeout( jQuery.ready, 1 ); - } - - // Mozilla, Opera and webkit nightlies currently support this event - if ( document.addEventListener ) { - // Use the handy event callback - document.addEventListener( "DOMContentLoaded", DOMContentLoaded, false ); - - // A fallback to window.onload, that will always work - window.addEventListener( "load", jQuery.ready, false ); - - // If IE event model is used - } else if ( document.attachEvent ) { - // ensure firing before onload, - // maybe late but safe also for iframes - document.attachEvent( "onreadystatechange", DOMContentLoaded ); - - // A fallback to window.onload, that will always work - window.attachEvent( "onload", jQuery.ready ); - - // If IE and not a frame - // continually check to see if the document is ready - var toplevel = false; - - try { - toplevel = window.frameElement == null; - } catch(e) {} - - if ( document.documentElement.doScroll && toplevel ) { - doScrollCheck(); - } - } - }, - - // See test/unit/core.js for details concerning isFunction. - // Since version 1.3, DOM methods and functions like alert - // aren't supported. They return false on IE (#2968). - isFunction: function( obj ) { - return jQuery.type(obj) === "function"; - }, - - isArray: Array.isArray || function( obj ) { - return jQuery.type(obj) === "array"; - }, - - // A crude way of determining if an object is a window - isWindow: function( obj ) { - return obj && typeof obj === "object" && "setInterval" in obj; - }, - - isNaN: function( obj ) { - return obj == null || !rdigit.test( obj ) || isNaN( obj ); - }, - - type: function( obj ) { - return obj == null ? - String( obj ) : - class2type[ toString.call(obj) ] || "object"; - }, - - isPlainObject: function( obj ) { - // Must be an Object. - // Because of IE, we also have to check the presence of the constructor property. - // Make sure that DOM nodes and window objects don't pass through, as well - if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) { - return false; - } - - // Not own constructor property must be Object - if ( obj.constructor && - !hasOwn.call(obj, "constructor") && - !hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { - return false; - } - - // Own properties are enumerated firstly, so to speed up, - // if last one is own, then all properties are own. - - var key; - for ( key in obj ) {} - - return key === undefined || hasOwn.call( obj, key ); - }, - - isEmptyObject: function( obj ) { - for ( var name in obj ) { - return false; - } - return true; - }, - - error: function( msg ) { - throw msg; - }, - - parseJSON: function( data ) { - if ( typeof data !== "string" || !data ) { - return null; - } - - // Make sure leading/trailing whitespace is removed (IE can't handle it) - data = jQuery.trim( data ); - - // Attempt to parse using the native JSON parser first - if ( window.JSON && window.JSON.parse ) { - return window.JSON.parse( data ); - } - - // Make sure the incoming data is actual JSON - // Logic borrowed from http://json.org/json2.js - if ( rvalidchars.test( data.replace( rvalidescape, "@" ) - .replace( rvalidtokens, "]" ) - .replace( rvalidbraces, "")) ) { - - return (new Function( "return " + data ))(); - - } - jQuery.error( "Invalid JSON: " + data ); - }, - - // Cross-browser xml parsing - // (xml & tmp used internally) - parseXML: function( data , xml , tmp ) { - - if ( window.DOMParser ) { // Standard - tmp = new DOMParser(); - xml = tmp.parseFromString( data , "text/xml" ); - } else { // IE - xml = new ActiveXObject( "Microsoft.XMLDOM" ); - xml.async = "false"; - xml.loadXML( data ); - } - - tmp = xml.documentElement; - - if ( ! tmp || ! tmp.nodeName || tmp.nodeName === "parsererror" ) { - jQuery.error( "Invalid XML: " + data ); - } - - return xml; - }, - - noop: function() {}, - - // Evaluates a script in a global context - // Workarounds based on findings by Jim Driscoll - // http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context - globalEval: function( data ) { - if ( data && rnotwhite.test( data ) ) { - // We use execScript on Internet Explorer - // We use an anonymous function so that context is window - // rather than jQuery in Firefox - ( window.execScript || function( data ) { - window[ "eval" ].call( window, data ); - } )( data ); - } - }, - - nodeName: function( elem, name ) { - return elem.nodeName && elem.nodeName.toUpperCase() === name.toUpperCase(); - }, - - // args is for internal usage only - each: function( object, callback, args ) { - var name, i = 0, - length = object.length, - isObj = length === undefined || jQuery.isFunction( object ); - - if ( args ) { - if ( isObj ) { - for ( name in object ) { - if ( callback.apply( object[ name ], args ) === false ) { - break; - } - } - } else { - for ( ; i < length; ) { - if ( callback.apply( object[ i++ ], args ) === false ) { - break; - } - } - } - - // A special, fast, case for the most common use of each - } else { - if ( isObj ) { - for ( name in object ) { - if ( callback.call( object[ name ], name, object[ name ] ) === false ) { - break; - } - } - } else { - for ( ; i < length; ) { - if ( callback.call( object[ i ], i, object[ i++ ] ) === false ) { - break; - } - } - } - } - - return object; - }, - - // Use native String.trim function wherever possible - trim: trim ? - function( text ) { - return text == null ? - "" : - trim.call( text ); - } : - - // Otherwise use our own trimming functionality - function( text ) { - return text == null ? - "" : - text.toString().replace( trimLeft, "" ).replace( trimRight, "" ); - }, - - // results is for internal usage only - makeArray: function( array, results ) { - var ret = results || []; - - if ( array != null ) { - // The window, strings (and functions) also have 'length' - // The extra typeof function check is to prevent crashes - // in Safari 2 (See: #3039) - // Tweaked logic slightly to handle Blackberry 4.7 RegExp issues #6930 - var type = jQuery.type( array ); - - if ( array.length == null || type === "string" || type === "function" || type === "regexp" || jQuery.isWindow( array ) ) { - push.call( ret, array ); - } else { - jQuery.merge( ret, array ); - } - } - - return ret; - }, - - inArray: function( elem, array ) { - - if ( indexOf ) { - return indexOf.call( array, elem ); - } - - for ( var i = 0, length = array.length; i < length; i++ ) { - if ( array[ i ] === elem ) { - return i; - } - } - - return -1; - }, - - merge: function( first, second ) { - var i = first.length, - j = 0; - - if ( typeof second.length === "number" ) { - for ( var l = second.length; j < l; j++ ) { - first[ i++ ] = second[ j ]; - } - - } else { - while ( second[j] !== undefined ) { - first[ i++ ] = second[ j++ ]; - } - } - - first.length = i; - - return first; - }, - - grep: function( elems, callback, inv ) { - var ret = [], retVal; - inv = !!inv; - - // Go through the array, only saving the items - // that pass the validator function - for ( var i = 0, length = elems.length; i < length; i++ ) { - retVal = !!callback( elems[ i ], i ); - if ( inv !== retVal ) { - ret.push( elems[ i ] ); - } - } - - return ret; - }, - - // arg is for internal usage only - map: function( elems, callback, arg ) { - var value, key, ret = [], - i = 0, - length = elems.length, - // jquery objects are treated as arrays - isArray = elems instanceof jQuery || length !== undefined && typeof length === "number" && ( ( length > 0 && elems[ 0 ] && elems[ length -1 ] ) || length === 0 || jQuery.isArray( elems ) ) ; - - // Go through the array, translating each of the items to their - if ( isArray ) { - for ( ; i < length; i++ ) { - value = callback( elems[ i ], i, arg ); - - if ( value != null ) { - ret[ ret.length ] = value; - } - } - - // Go through every key on the object, - } else { - for ( key in elems ) { - value = callback( elems[ key ], key, arg ); - - if ( value != null ) { - ret[ ret.length ] = value; - } - } - } - - // Flatten any nested arrays - return ret.concat.apply( [], ret ); - }, - - // A global GUID counter for objects - guid: 1, - - // Bind a function to a context, optionally partially applying any - // arguments. - proxy: function( fn, context ) { - if ( typeof context === "string" ) { - var tmp = fn[ context ]; - context = fn; - fn = tmp; - } - - // Quick check to determine if target is callable, in the spec - // this throws a TypeError, but we will just return undefined. - if ( !jQuery.isFunction( fn ) ) { - return undefined; - } - - // Simulated bind - var args = slice.call( arguments, 2 ), - proxy = function() { - return fn.apply( context, args.concat( slice.call( arguments ) ) ); - }; - - // Set the guid of unique handler to the same of original handler, so it can be removed - proxy.guid = fn.guid = fn.guid || proxy.guid || jQuery.guid++; - - return proxy; - }, - - // Mutifunctional method to get and set values to a collection - // The value/s can be optionally by executed if its a function - access: function( elems, key, value, exec, fn, pass ) { - var length = elems.length; - - // Setting many attributes - if ( typeof key === "object" ) { - for ( var k in key ) { - jQuery.access( elems, k, key[k], exec, fn, value ); - } - return elems; - } - - // Setting one attribute - if ( value !== undefined ) { - // Optionally, function values get executed if exec is true - exec = !pass && exec && jQuery.isFunction(value); - - for ( var i = 0; i < length; i++ ) { - fn( elems[i], key, exec ? value.call( elems[i], i, fn( elems[i], key ) ) : value, pass ); - } - - return elems; - } - - // Getting an attribute - return length ? fn( elems[0], key ) : undefined; - }, - - now: function() { - return (new Date()).getTime(); - }, - - // Use of jQuery.browser is frowned upon. - // More details: http://docs.jquery.com/Utilities/jQuery.browser - uaMatch: function( ua ) { - ua = ua.toLowerCase(); - - var match = rwebkit.exec( ua ) || - ropera.exec( ua ) || - rmsie.exec( ua ) || - ua.indexOf("compatible") < 0 && rmozilla.exec( ua ) || - []; - - return { browser: match[1] || "", version: match[2] || "0" }; - }, - - sub: function() { - function jQuerySub( selector, context ) { - return new jQuerySub.fn.init( selector, context ); - } - jQuery.extend( true, jQuerySub, this ); - jQuerySub.superclass = this; - jQuerySub.fn = jQuerySub.prototype = this(); - jQuerySub.fn.constructor = jQuerySub; - jQuerySub.sub = this.sub; - jQuerySub.fn.init = function init( selector, context ) { - if ( context && context instanceof jQuery && !(context instanceof jQuerySub) ) { - context = jQuerySub( context ); - } - - return jQuery.fn.init.call( this, selector, context, rootjQuerySub ); - }; - jQuerySub.fn.init.prototype = jQuerySub.fn; - var rootjQuerySub = jQuerySub(document); - return jQuerySub; - }, - - browser: {} -}); - -// Populate the class2type map -jQuery.each("Boolean Number String Function Array Date RegExp Object".split(" "), function(i, name) { - class2type[ "[object " + name + "]" ] = name.toLowerCase(); -}); - -browserMatch = jQuery.uaMatch( userAgent ); -if ( browserMatch.browser ) { - jQuery.browser[ browserMatch.browser ] = true; - jQuery.browser.version = browserMatch.version; -} - -// Deprecated, use jQuery.browser.webkit instead -if ( jQuery.browser.webkit ) { - jQuery.browser.safari = true; -} - -// IE doesn't match non-breaking spaces with \s -if ( rnotwhite.test( "\xA0" ) ) { - trimLeft = /^[\s\xA0]+/; - trimRight = /[\s\xA0]+$/; -} - -// All jQuery objects should point back to these -rootjQuery = jQuery(document); - -// Cleanup functions for the document ready method -if ( document.addEventListener ) { - DOMContentLoaded = function() { - document.removeEventListener( "DOMContentLoaded", DOMContentLoaded, false ); - jQuery.ready(); - }; - -} else if ( document.attachEvent ) { - DOMContentLoaded = function() { - // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443). - if ( document.readyState === "complete" ) { - document.detachEvent( "onreadystatechange", DOMContentLoaded ); - jQuery.ready(); - } - }; -} - -// The DOM ready check for Internet Explorer -function doScrollCheck() { - if ( jQuery.isReady ) { - return; - } - - try { - // If IE is used, use the trick by Diego Perini - // http://javascript.nwbox.com/IEContentLoaded/ - document.documentElement.doScroll("left"); - } catch(e) { - setTimeout( doScrollCheck, 1 ); - return; - } - - // and execute any waiting functions - jQuery.ready(); -} - -// Expose jQuery to the global object -return jQuery; - -})(); - - -var // Promise methods - promiseMethods = "done fail isResolved isRejected promise then always pipe".split( " " ), - // Static reference to slice - sliceDeferred = [].slice; - -jQuery.extend({ - // Create a simple deferred (one callbacks list) - _Deferred: function() { - var // callbacks list - callbacks = [], - // stored [ context , args ] - fired, - // to avoid firing when already doing so - firing, - // flag to know if the deferred has been cancelled - cancelled, - // the deferred itself - deferred = { - - // done( f1, f2, ...) - done: function() { - if ( !cancelled ) { - var args = arguments, - i, - length, - elem, - type, - _fired; - if ( fired ) { - _fired = fired; - fired = 0; - } - for ( i = 0, length = args.length; i < length; i++ ) { - elem = args[ i ]; - type = jQuery.type( elem ); - if ( type === "array" ) { - deferred.done.apply( deferred, elem ); - } else if ( type === "function" ) { - callbacks.push( elem ); - } - } - if ( _fired ) { - deferred.resolveWith( _fired[ 0 ], _fired[ 1 ] ); - } - } - return this; - }, - - // resolve with given context and args - resolveWith: function( context, args ) { - if ( !cancelled && !fired && !firing ) { - // make sure args are available (#8421) - args = args || []; - firing = 1; - try { - while( callbacks[ 0 ] ) { - callbacks.shift().apply( context, args ); - } - } - finally { - fired = [ context, args ]; - firing = 0; - } - } - return this; - }, - - // resolve with this as context and given arguments - resolve: function() { - deferred.resolveWith( this, arguments ); - return this; - }, - - // Has this deferred been resolved? - isResolved: function() { - return !!( firing || fired ); - }, - - // Cancel - cancel: function() { - cancelled = 1; - callbacks = []; - return this; - } - }; - - return deferred; - }, - - // Full fledged deferred (two callbacks list) - Deferred: function( func ) { - var deferred = jQuery._Deferred(), - failDeferred = jQuery._Deferred(), - promise; - // Add errorDeferred methods, then and promise - jQuery.extend( deferred, { - then: function( doneCallbacks, failCallbacks ) { - deferred.done( doneCallbacks ).fail( failCallbacks ); - return this; - }, - always: function() { - return deferred.done.apply( deferred, arguments ).fail.apply( this, arguments ); - }, - fail: failDeferred.done, - rejectWith: failDeferred.resolveWith, - reject: failDeferred.resolve, - isRejected: failDeferred.isResolved, - pipe: function( fnDone, fnFail ) { - return jQuery.Deferred(function( newDefer ) { - jQuery.each( { - done: [ fnDone, "resolve" ], - fail: [ fnFail, "reject" ] - }, function( handler, data ) { - var fn = data[ 0 ], - action = data[ 1 ], - returned; - if ( jQuery.isFunction( fn ) ) { - deferred[ handler ](function() { - returned = fn.apply( this, arguments ); - if ( returned && jQuery.isFunction( returned.promise ) ) { - returned.promise().then( newDefer.resolve, newDefer.reject ); - } else { - newDefer[ action ]( returned ); - } - }); - } else { - deferred[ handler ]( newDefer[ action ] ); - } - }); - }).promise(); - }, - // Get a promise for this deferred - // If obj is provided, the promise aspect is added to the object - promise: function( obj ) { - if ( obj == null ) { - if ( promise ) { - return promise; - } - promise = obj = {}; - } - var i = promiseMethods.length; - while( i-- ) { - obj[ promiseMethods[i] ] = deferred[ promiseMethods[i] ]; - } - return obj; - } - }); - // Make sure only one callback list will be used - deferred.done( failDeferred.cancel ).fail( deferred.cancel ); - // Unexpose cancel - delete deferred.cancel; - // Call given func if any - if ( func ) { - func.call( deferred, deferred ); - } - return deferred; - }, - - // Deferred helper - when: function( firstParam ) { - var args = arguments, - i = 0, - length = args.length, - count = length, - deferred = length <= 1 && firstParam && jQuery.isFunction( firstParam.promise ) ? - firstParam : - jQuery.Deferred(); - function resolveFunc( i ) { - return function( value ) { - args[ i ] = arguments.length > 1 ? sliceDeferred.call( arguments, 0 ) : value; - if ( !( --count ) ) { - // Strange bug in FF4: - // Values changed onto the arguments object sometimes end up as undefined values - // outside the $.when method. Cloning the object into a fresh array solves the issue - deferred.resolveWith( deferred, sliceDeferred.call( args, 0 ) ); - } - }; - } - if ( length > 1 ) { - for( ; i < length; i++ ) { - if ( args[ i ] && jQuery.isFunction( args[ i ].promise ) ) { - args[ i ].promise().then( resolveFunc(i), deferred.reject ); - } else { - --count; - } - } - if ( !count ) { - deferred.resolveWith( deferred, args ); - } - } else if ( deferred !== firstParam ) { - deferred.resolveWith( deferred, length ? [ firstParam ] : [] ); - } - return deferred.promise(); - } -}); - - - -jQuery.support = (function() { - - var div = document.createElement( "div" ), - documentElement = document.documentElement, - all, - a, - select, - opt, - input, - marginDiv, - support, - fragment, - body, - bodyStyle, - tds, - events, - eventName, - i, - isSupported; - - // Preliminary tests - div.setAttribute("className", "t"); - div.innerHTML = "
    a"; - - all = div.getElementsByTagName( "*" ); - a = div.getElementsByTagName( "a" )[ 0 ]; - - // Can't get basic test support - if ( !all || !all.length || !a ) { - return {}; - } - - // First batch of supports tests - select = document.createElement( "select" ); - opt = select.appendChild( document.createElement("option") ); - input = div.getElementsByTagName( "input" )[ 0 ]; - - support = { - // IE strips leading whitespace when .innerHTML is used - leadingWhitespace: ( div.firstChild.nodeType === 3 ), - - // Make sure that tbody elements aren't automatically inserted - // IE will insert them into empty tables - tbody: !div.getElementsByTagName( "tbody" ).length, - - // Make sure that link elements get serialized correctly by innerHTML - // This requires a wrapper element in IE - htmlSerialize: !!div.getElementsByTagName( "link" ).length, - - // Get the style information from getAttribute - // (IE uses .cssText instead) - style: /top/.test( a.getAttribute("style") ), - - // Make sure that URLs aren't manipulated - // (IE normalizes it by default) - hrefNormalized: ( a.getAttribute( "href" ) === "/a" ), - - // Make sure that element opacity exists - // (IE uses filter instead) - // Use a regex to work around a WebKit issue. See #5145 - opacity: /^0.55$/.test( a.style.opacity ), - - // Verify style float existence - // (IE uses styleFloat instead of cssFloat) - cssFloat: !!a.style.cssFloat, - - // Make sure that if no value is specified for a checkbox - // that it defaults to "on". - // (WebKit defaults to "" instead) - checkOn: ( input.value === "on" ), - - // Make sure that a selected-by-default option has a working selected property. - // (WebKit defaults to false instead of true, IE too, if it's in an optgroup) - optSelected: opt.selected, - - // Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7) - getSetAttribute: div.className !== "t", - - // Will be defined later - submitBubbles: true, - changeBubbles: true, - focusinBubbles: false, - deleteExpando: true, - noCloneEvent: true, - inlineBlockNeedsLayout: false, - shrinkWrapBlocks: false, - reliableMarginRight: true - }; - - // Make sure checked status is properly cloned - input.checked = true; - support.noCloneChecked = input.cloneNode( true ).checked; - - // Make sure that the options inside disabled selects aren't marked as disabled - // (WebKit marks them as disabled) - select.disabled = true; - support.optDisabled = !opt.disabled; - - // Test to see if it's possible to delete an expando from an element - // Fails in Internet Explorer - try { - delete div.test; - } catch( e ) { - support.deleteExpando = false; - } - - if ( !div.addEventListener && div.attachEvent && div.fireEvent ) { - div.attachEvent( "onclick", function click() { - // Cloning a node shouldn't copy over any - // bound event handlers (IE does this) - support.noCloneEvent = false; - div.detachEvent( "onclick", click ); - }); - div.cloneNode( true ).fireEvent( "onclick" ); - } - - // Check if a radio maintains it's value - // after being appended to the DOM - input = document.createElement("input"); - input.value = "t"; - input.setAttribute("type", "radio"); - support.radioValue = input.value === "t"; - - input.setAttribute("checked", "checked"); - div.appendChild( input ); - fragment = document.createDocumentFragment(); - fragment.appendChild( div.firstChild ); - - // WebKit doesn't clone checked state correctly in fragments - support.checkClone = fragment.cloneNode( true ).cloneNode( true ).lastChild.checked; - - div.innerHTML = ""; - - // Figure out if the W3C box model works as expected - div.style.width = div.style.paddingLeft = "1px"; - - // We use our own, invisible, body - body = document.createElement( "body" ); - bodyStyle = { - visibility: "hidden", - width: 0, - height: 0, - border: 0, - margin: 0, - // Set background to avoid IE crashes when removing (#9028) - background: "none" - }; - for ( i in bodyStyle ) { - body.style[ i ] = bodyStyle[ i ]; - } - body.appendChild( div ); - documentElement.insertBefore( body, documentElement.firstChild ); - - // Check if a disconnected checkbox will retain its checked - // value of true after appended to the DOM (IE6/7) - support.appendChecked = input.checked; - - support.boxModel = div.offsetWidth === 2; - - if ( "zoom" in div.style ) { - // Check if natively block-level elements act like inline-block - // elements when setting their display to 'inline' and giving - // them layout - // (IE < 8 does this) - div.style.display = "inline"; - div.style.zoom = 1; - support.inlineBlockNeedsLayout = ( div.offsetWidth === 2 ); - - // Check if elements with layout shrink-wrap their children - // (IE 6 does this) - div.style.display = ""; - div.innerHTML = "
    "; - support.shrinkWrapBlocks = ( div.offsetWidth !== 2 ); - } - - div.innerHTML = "
    t
    "; - tds = div.getElementsByTagName( "td" ); - - // Check if table cells still have offsetWidth/Height when they are set - // to display:none and there are still other visible table cells in a - // table row; if so, offsetWidth/Height are not reliable for use when - // determining if an element has been hidden directly using - // display:none (it is still safe to use offsets if a parent element is - // hidden; don safety goggles and see bug #4512 for more information). - // (only IE 8 fails this test) - isSupported = ( tds[ 0 ].offsetHeight === 0 ); - - tds[ 0 ].style.display = ""; - tds[ 1 ].style.display = "none"; - - // Check if empty table cells still have offsetWidth/Height - // (IE < 8 fail this test) - support.reliableHiddenOffsets = isSupported && ( tds[ 0 ].offsetHeight === 0 ); - div.innerHTML = ""; - - // Check if div with explicit width and no margin-right incorrectly - // gets computed margin-right based on width of container. For more - // info see bug #3333 - // Fails in WebKit before Feb 2011 nightlies - // WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right - if ( document.defaultView && document.defaultView.getComputedStyle ) { - marginDiv = document.createElement( "div" ); - marginDiv.style.width = "0"; - marginDiv.style.marginRight = "0"; - div.appendChild( marginDiv ); - support.reliableMarginRight = - ( parseInt( ( document.defaultView.getComputedStyle( marginDiv, null ) || { marginRight: 0 } ).marginRight, 10 ) || 0 ) === 0; - } - - // Remove the body element we added - body.innerHTML = ""; - documentElement.removeChild( body ); - - // Technique from Juriy Zaytsev - // http://thinkweb2.com/projects/prototype/detecting-event-support-without-browser-sniffing/ - // We only care about the case where non-standard event systems - // are used, namely in IE. Short-circuiting here helps us to - // avoid an eval call (in setAttribute) which can cause CSP - // to go haywire. See: https://developer.mozilla.org/en/Security/CSP - if ( div.attachEvent ) { - for( i in { - submit: 1, - change: 1, - focusin: 1 - } ) { - eventName = "on" + i; - isSupported = ( eventName in div ); - if ( !isSupported ) { - div.setAttribute( eventName, "return;" ); - isSupported = ( typeof div[ eventName ] === "function" ); - } - support[ i + "Bubbles" ] = isSupported; - } - } - - return support; -})(); - -// Keep track of boxModel -jQuery.boxModel = jQuery.support.boxModel; - - - - -var rbrace = /^(?:\{.*\}|\[.*\])$/, - rmultiDash = /([a-z])([A-Z])/g; - -jQuery.extend({ - cache: {}, - - // Please use with caution - uuid: 0, - - // Unique for each copy of jQuery on the page - // Non-digits removed to match rinlinejQuery - expando: "jQuery" + ( jQuery.fn.jquery + Math.random() ).replace( /\D/g, "" ), - - // The following elements throw uncatchable exceptions if you - // attempt to add expando properties to them. - noData: { - "embed": true, - // Ban all objects except for Flash (which handle expandos) - "object": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000", - "applet": true - }, - - hasData: function( elem ) { - elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ]; - - return !!elem && !isEmptyDataObject( elem ); - }, - - data: function( elem, name, data, pvt /* Internal Use Only */ ) { - if ( !jQuery.acceptData( elem ) ) { - return; - } - - var internalKey = jQuery.expando, getByName = typeof name === "string", thisCache, - - // We have to handle DOM nodes and JS objects differently because IE6-7 - // can't GC object references properly across the DOM-JS boundary - isNode = elem.nodeType, - - // Only DOM nodes need the global jQuery cache; JS object data is - // attached directly to the object so GC can occur automatically - cache = isNode ? jQuery.cache : elem, - - // Only defining an ID for JS objects if its cache already exists allows - // the code to shortcut on the same path as a DOM node with no cache - id = isNode ? elem[ jQuery.expando ] : elem[ jQuery.expando ] && jQuery.expando; - - // Avoid doing any more work than we need to when trying to get data on an - // object that has no data at all - if ( (!id || (pvt && id && !cache[ id ][ internalKey ])) && getByName && data === undefined ) { - return; - } - - if ( !id ) { - // Only DOM nodes need a new unique ID for each element since their data - // ends up in the global cache - if ( isNode ) { - elem[ jQuery.expando ] = id = ++jQuery.uuid; - } else { - id = jQuery.expando; - } - } - - if ( !cache[ id ] ) { - cache[ id ] = {}; - - // TODO: This is a hack for 1.5 ONLY. Avoids exposing jQuery - // metadata on plain JS objects when the object is serialized using - // JSON.stringify - if ( !isNode ) { - cache[ id ].toJSON = jQuery.noop; - } - } - - // An object can be passed to jQuery.data instead of a key/value pair; this gets - // shallow copied over onto the existing cache - if ( typeof name === "object" || typeof name === "function" ) { - if ( pvt ) { - cache[ id ][ internalKey ] = jQuery.extend(cache[ id ][ internalKey ], name); - } else { - cache[ id ] = jQuery.extend(cache[ id ], name); - } - } - - thisCache = cache[ id ]; - - // Internal jQuery data is stored in a separate object inside the object's data - // cache in order to avoid key collisions between internal data and user-defined - // data - if ( pvt ) { - if ( !thisCache[ internalKey ] ) { - thisCache[ internalKey ] = {}; - } - - thisCache = thisCache[ internalKey ]; - } - - if ( data !== undefined ) { - thisCache[ jQuery.camelCase( name ) ] = data; - } - - // TODO: This is a hack for 1.5 ONLY. It will be removed in 1.6. Users should - // not attempt to inspect the internal events object using jQuery.data, as this - // internal data object is undocumented and subject to change. - if ( name === "events" && !thisCache[name] ) { - return thisCache[ internalKey ] && thisCache[ internalKey ].events; - } - - return getByName ? thisCache[ jQuery.camelCase( name ) ] : thisCache; - }, - - removeData: function( elem, name, pvt /* Internal Use Only */ ) { - if ( !jQuery.acceptData( elem ) ) { - return; - } - - var internalKey = jQuery.expando, isNode = elem.nodeType, - - // See jQuery.data for more information - cache = isNode ? jQuery.cache : elem, - - // See jQuery.data for more information - id = isNode ? elem[ jQuery.expando ] : jQuery.expando; - - // If there is already no cache entry for this object, there is no - // purpose in continuing - if ( !cache[ id ] ) { - return; - } - - if ( name ) { - var thisCache = pvt ? cache[ id ][ internalKey ] : cache[ id ]; - - if ( thisCache ) { - delete thisCache[ name ]; - - // If there is no data left in the cache, we want to continue - // and let the cache object itself get destroyed - if ( !isEmptyDataObject(thisCache) ) { - return; - } - } - } - - // See jQuery.data for more information - if ( pvt ) { - delete cache[ id ][ internalKey ]; - - // Don't destroy the parent cache unless the internal data object - // had been the only thing left in it - if ( !isEmptyDataObject(cache[ id ]) ) { - return; - } - } - - var internalCache = cache[ id ][ internalKey ]; - - // Browsers that fail expando deletion also refuse to delete expandos on - // the window, but it will allow it on all other JS objects; other browsers - // don't care - if ( jQuery.support.deleteExpando || cache != window ) { - delete cache[ id ]; - } else { - cache[ id ] = null; - } - - // We destroyed the entire user cache at once because it's faster than - // iterating through each key, but we need to continue to persist internal - // data if it existed - if ( internalCache ) { - cache[ id ] = {}; - // TODO: This is a hack for 1.5 ONLY. Avoids exposing jQuery - // metadata on plain JS objects when the object is serialized using - // JSON.stringify - if ( !isNode ) { - cache[ id ].toJSON = jQuery.noop; - } - - cache[ id ][ internalKey ] = internalCache; - - // Otherwise, we need to eliminate the expando on the node to avoid - // false lookups in the cache for entries that no longer exist - } else if ( isNode ) { - // IE does not allow us to delete expando properties from nodes, - // nor does it have a removeAttribute function on Document nodes; - // we must handle all of these cases - if ( jQuery.support.deleteExpando ) { - delete elem[ jQuery.expando ]; - } else if ( elem.removeAttribute ) { - elem.removeAttribute( jQuery.expando ); - } else { - elem[ jQuery.expando ] = null; - } - } - }, - - // For internal use only. - _data: function( elem, name, data ) { - return jQuery.data( elem, name, data, true ); - }, - - // A method for determining if a DOM node can handle the data expando - acceptData: function( elem ) { - if ( elem.nodeName ) { - var match = jQuery.noData[ elem.nodeName.toLowerCase() ]; - - if ( match ) { - return !(match === true || elem.getAttribute("classid") !== match); - } - } - - return true; - } -}); - -jQuery.fn.extend({ - data: function( key, value ) { - var data = null; - - if ( typeof key === "undefined" ) { - if ( this.length ) { - data = jQuery.data( this[0] ); - - if ( this[0].nodeType === 1 ) { - var attr = this[0].attributes, name; - for ( var i = 0, l = attr.length; i < l; i++ ) { - name = attr[i].name; - - if ( name.indexOf( "data-" ) === 0 ) { - name = jQuery.camelCase( name.substring(5) ); - - dataAttr( this[0], name, data[ name ] ); - } - } - } - } - - return data; - - } else if ( typeof key === "object" ) { - return this.each(function() { - jQuery.data( this, key ); - }); - } - - var parts = key.split("."); - parts[1] = parts[1] ? "." + parts[1] : ""; - - if ( value === undefined ) { - data = this.triggerHandler("getData" + parts[1] + "!", [parts[0]]); - - // Try to fetch any internally stored data first - if ( data === undefined && this.length ) { - data = jQuery.data( this[0], key ); - data = dataAttr( this[0], key, data ); - } - - return data === undefined && parts[1] ? - this.data( parts[0] ) : - data; - - } else { - return this.each(function() { - var $this = jQuery( this ), - args = [ parts[0], value ]; - - $this.triggerHandler( "setData" + parts[1] + "!", args ); - jQuery.data( this, key, value ); - $this.triggerHandler( "changeData" + parts[1] + "!", args ); - }); - } - }, - - removeData: function( key ) { - return this.each(function() { - jQuery.removeData( this, key ); - }); - } -}); - -function dataAttr( elem, key, data ) { - // If nothing was found internally, try to fetch any - // data from the HTML5 data-* attribute - if ( data === undefined && elem.nodeType === 1 ) { - var name = "data-" + key.replace( rmultiDash, "$1-$2" ).toLowerCase(); - - data = elem.getAttribute( name ); - - if ( typeof data === "string" ) { - try { - data = data === "true" ? true : - data === "false" ? false : - data === "null" ? null : - !jQuery.isNaN( data ) ? parseFloat( data ) : - rbrace.test( data ) ? jQuery.parseJSON( data ) : - data; - } catch( e ) {} - - // Make sure we set the data so it isn't changed later - jQuery.data( elem, key, data ); - - } else { - data = undefined; - } - } - - return data; -} - -// TODO: This is a hack for 1.5 ONLY to allow objects with a single toJSON -// property to be considered empty objects; this property always exists in -// order to make sure JSON.stringify does not expose internal metadata -function isEmptyDataObject( obj ) { - for ( var name in obj ) { - if ( name !== "toJSON" ) { - return false; - } - } - - return true; -} - - - - -function handleQueueMarkDefer( elem, type, src ) { - var deferDataKey = type + "defer", - queueDataKey = type + "queue", - markDataKey = type + "mark", - defer = jQuery.data( elem, deferDataKey, undefined, true ); - if ( defer && - ( src === "queue" || !jQuery.data( elem, queueDataKey, undefined, true ) ) && - ( src === "mark" || !jQuery.data( elem, markDataKey, undefined, true ) ) ) { - // Give room for hard-coded callbacks to fire first - // and eventually mark/queue something else on the element - setTimeout( function() { - if ( !jQuery.data( elem, queueDataKey, undefined, true ) && - !jQuery.data( elem, markDataKey, undefined, true ) ) { - jQuery.removeData( elem, deferDataKey, true ); - defer.resolve(); - } - }, 0 ); - } -} - -jQuery.extend({ - - _mark: function( elem, type ) { - if ( elem ) { - type = (type || "fx") + "mark"; - jQuery.data( elem, type, (jQuery.data(elem,type,undefined,true) || 0) + 1, true ); - } - }, - - _unmark: function( force, elem, type ) { - if ( force !== true ) { - type = elem; - elem = force; - force = false; - } - if ( elem ) { - type = type || "fx"; - var key = type + "mark", - count = force ? 0 : ( (jQuery.data( elem, key, undefined, true) || 1 ) - 1 ); - if ( count ) { - jQuery.data( elem, key, count, true ); - } else { - jQuery.removeData( elem, key, true ); - handleQueueMarkDefer( elem, type, "mark" ); - } - } - }, - - queue: function( elem, type, data ) { - if ( elem ) { - type = (type || "fx") + "queue"; - var q = jQuery.data( elem, type, undefined, true ); - // Speed up dequeue by getting out quickly if this is just a lookup - if ( data ) { - if ( !q || jQuery.isArray(data) ) { - q = jQuery.data( elem, type, jQuery.makeArray(data), true ); - } else { - q.push( data ); - } - } - return q || []; - } - }, - - dequeue: function( elem, type ) { - type = type || "fx"; - - var queue = jQuery.queue( elem, type ), - fn = queue.shift(), - defer; - - // If the fx queue is dequeued, always remove the progress sentinel - if ( fn === "inprogress" ) { - fn = queue.shift(); - } - - if ( fn ) { - // Add a progress sentinel to prevent the fx queue from being - // automatically dequeued - if ( type === "fx" ) { - queue.unshift("inprogress"); - } - - fn.call(elem, function() { - jQuery.dequeue(elem, type); - }); - } - - if ( !queue.length ) { - jQuery.removeData( elem, type + "queue", true ); - handleQueueMarkDefer( elem, type, "queue" ); - } - } -}); - -jQuery.fn.extend({ - queue: function( type, data ) { - if ( typeof type !== "string" ) { - data = type; - type = "fx"; - } - - if ( data === undefined ) { - return jQuery.queue( this[0], type ); - } - return this.each(function() { - var queue = jQuery.queue( this, type, data ); - - if ( type === "fx" && queue[0] !== "inprogress" ) { - jQuery.dequeue( this, type ); - } - }); - }, - dequeue: function( type ) { - return this.each(function() { - jQuery.dequeue( this, type ); - }); - }, - // Based off of the plugin by Clint Helfers, with permission. - // http://blindsignals.com/index.php/2009/07/jquery-delay/ - delay: function( time, type ) { - time = jQuery.fx ? jQuery.fx.speeds[time] || time : time; - type = type || "fx"; - - return this.queue( type, function() { - var elem = this; - setTimeout(function() { - jQuery.dequeue( elem, type ); - }, time ); - }); - }, - clearQueue: function( type ) { - return this.queue( type || "fx", [] ); - }, - // Get a promise resolved when queues of a certain type - // are emptied (fx is the type by default) - promise: function( type, object ) { - if ( typeof type !== "string" ) { - object = type; - type = undefined; - } - type = type || "fx"; - var defer = jQuery.Deferred(), - elements = this, - i = elements.length, - count = 1, - deferDataKey = type + "defer", - queueDataKey = type + "queue", - markDataKey = type + "mark", - tmp; - function resolve() { - if ( !( --count ) ) { - defer.resolveWith( elements, [ elements ] ); - } - } - while( i-- ) { - if (( tmp = jQuery.data( elements[ i ], deferDataKey, undefined, true ) || - ( jQuery.data( elements[ i ], queueDataKey, undefined, true ) || - jQuery.data( elements[ i ], markDataKey, undefined, true ) ) && - jQuery.data( elements[ i ], deferDataKey, jQuery._Deferred(), true ) )) { - count++; - tmp.done( resolve ); - } - } - resolve(); - return defer.promise(); - } -}); - - - - -var rclass = /[\n\t\r]/g, - rspace = /\s+/, - rreturn = /\r/g, - rtype = /^(?:button|input)$/i, - rfocusable = /^(?:button|input|object|select|textarea)$/i, - rclickable = /^a(?:rea)?$/i, - rboolean = /^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i, - rinvalidChar = /\:/, - formHook, boolHook; - -jQuery.fn.extend({ - attr: function( name, value ) { - return jQuery.access( this, name, value, true, jQuery.attr ); - }, - - removeAttr: function( name ) { - return this.each(function() { - jQuery.removeAttr( this, name ); - }); - }, - - prop: function( name, value ) { - return jQuery.access( this, name, value, true, jQuery.prop ); - }, - - removeProp: function( name ) { - name = jQuery.propFix[ name ] || name; - return this.each(function() { - // try/catch handles cases where IE balks (such as removing a property on window) - try { - this[ name ] = undefined; - delete this[ name ]; - } catch( e ) {} - }); - }, - - addClass: function( value ) { - if ( jQuery.isFunction( value ) ) { - return this.each(function(i) { - var self = jQuery(this); - self.addClass( value.call(this, i, self.attr("class") || "") ); - }); - } - - if ( value && typeof value === "string" ) { - var classNames = (value || "").split( rspace ); - - for ( var i = 0, l = this.length; i < l; i++ ) { - var elem = this[i]; - - if ( elem.nodeType === 1 ) { - if ( !elem.className ) { - elem.className = value; - - } else { - var className = " " + elem.className + " ", - setClass = elem.className; - - for ( var c = 0, cl = classNames.length; c < cl; c++ ) { - if ( className.indexOf( " " + classNames[c] + " " ) < 0 ) { - setClass += " " + classNames[c]; - } - } - elem.className = jQuery.trim( setClass ); - } - } - } - } - - return this; - }, - - removeClass: function( value ) { - if ( jQuery.isFunction(value) ) { - return this.each(function(i) { - var self = jQuery(this); - self.removeClass( value.call(this, i, self.attr("class")) ); - }); - } - - if ( (value && typeof value === "string") || value === undefined ) { - var classNames = (value || "").split( rspace ); - - for ( var i = 0, l = this.length; i < l; i++ ) { - var elem = this[i]; - - if ( elem.nodeType === 1 && elem.className ) { - if ( value ) { - var className = (" " + elem.className + " ").replace(rclass, " "); - for ( var c = 0, cl = classNames.length; c < cl; c++ ) { - className = className.replace(" " + classNames[c] + " ", " "); - } - elem.className = jQuery.trim( className ); - - } else { - elem.className = ""; - } - } - } - } - - return this; - }, - - toggleClass: function( value, stateVal ) { - var type = typeof value, - isBool = typeof stateVal === "boolean"; - - if ( jQuery.isFunction( value ) ) { - return this.each(function(i) { - var self = jQuery(this); - self.toggleClass( value.call(this, i, self.attr("class"), stateVal), stateVal ); - }); - } - - return this.each(function() { - if ( type === "string" ) { - // toggle individual class names - var className, - i = 0, - self = jQuery( this ), - state = stateVal, - classNames = value.split( rspace ); - - while ( (className = classNames[ i++ ]) ) { - // check each className given, space seperated list - state = isBool ? state : !self.hasClass( className ); - self[ state ? "addClass" : "removeClass" ]( className ); - } - - } else if ( type === "undefined" || type === "boolean" ) { - if ( this.className ) { - // store className if set - jQuery._data( this, "__className__", this.className ); - } - - // toggle whole className - this.className = this.className || value === false ? "" : jQuery._data( this, "__className__" ) || ""; - } - }); - }, - - hasClass: function( selector ) { - var className = " " + selector + " "; - for ( var i = 0, l = this.length; i < l; i++ ) { - if ( (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) > -1 ) { - return true; - } - } - - return false; - }, - - val: function( value ) { - var hooks, ret, - elem = this[0]; - - if ( !arguments.length ) { - if ( elem ) { - hooks = jQuery.valHooks[ elem.nodeName.toLowerCase() ] || jQuery.valHooks[ elem.type ]; - - if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) { - return ret; - } - - return (elem.value || "").replace(rreturn, ""); - } - - return undefined; - } - - var isFunction = jQuery.isFunction( value ); - - return this.each(function( i ) { - var self = jQuery(this), val; - - if ( this.nodeType !== 1 ) { - return; - } - - if ( isFunction ) { - val = value.call( this, i, self.val() ); - } else { - val = value; - } - - // Treat null/undefined as ""; convert numbers to string - if ( val == null ) { - val = ""; - } else if ( typeof val === "number" ) { - val += ""; - } else if ( jQuery.isArray( val ) ) { - val = jQuery.map(val, function ( value ) { - return value == null ? "" : value + ""; - }); - } - - hooks = jQuery.valHooks[ this.nodeName.toLowerCase() ] || jQuery.valHooks[ this.type ]; - - // If set returns undefined, fall back to normal setting - if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) { - this.value = val; - } - }); - } -}); - -jQuery.extend({ - valHooks: { - option: { - get: function( elem ) { - // attributes.value is undefined in Blackberry 4.7 but - // uses .value. See #6932 - var val = elem.attributes.value; - return !val || val.specified ? elem.value : elem.text; - } - }, - select: { - get: function( elem ) { - var value, - index = elem.selectedIndex, - values = [], - options = elem.options, - one = elem.type === "select-one"; - - // Nothing was selected - if ( index < 0 ) { - return null; - } - - // Loop through all the selected options - for ( var i = one ? index : 0, max = one ? index + 1 : options.length; i < max; i++ ) { - var option = options[ i ]; - - // Don't return options that are disabled or in a disabled optgroup - if ( option.selected && (jQuery.support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null) && - (!option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" )) ) { - - // Get the specific value for the option - value = jQuery( option ).val(); - - // We don't need an array for one selects - if ( one ) { - return value; - } - - // Multi-Selects return an array - values.push( value ); - } - } - - // Fixes Bug #2551 -- select.val() broken in IE after form.reset() - if ( one && !values.length && options.length ) { - return jQuery( options[ index ] ).val(); - } - - return values; - }, - - set: function( elem, value ) { - var values = jQuery.makeArray( value ); - - jQuery(elem).find("option").each(function() { - this.selected = jQuery.inArray( jQuery(this).val(), values ) >= 0; - }); - - if ( !values.length ) { - elem.selectedIndex = -1; - } - return values; - } - } - }, - - attrFn: { - val: true, - css: true, - html: true, - text: true, - data: true, - width: true, - height: true, - offset: true - }, - - attrFix: { - // Always normalize to ensure hook usage - tabindex: "tabIndex" - }, - - attr: function( elem, name, value, pass ) { - var nType = elem.nodeType; - - // don't get/set attributes on text, comment and attribute nodes - if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { - return undefined; - } - - if ( pass && name in jQuery.attrFn ) { - return jQuery( elem )[ name ]( value ); - } - - // Fallback to prop when attributes are not supported - if ( !("getAttribute" in elem) ) { - return jQuery.prop( elem, name, value ); - } - - var ret, hooks, - notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); - - // Normalize the name if needed - name = notxml && jQuery.attrFix[ name ] || name; - - hooks = jQuery.attrHooks[ name ]; - - if ( !hooks ) { - // Use boolHook for boolean attributes - if ( rboolean.test( name ) && - (typeof value === "boolean" || value === undefined || value.toLowerCase() === name.toLowerCase()) ) { - - hooks = boolHook; - - // Use formHook for forms and if the name contains certain characters - } else if ( formHook && (jQuery.nodeName( elem, "form" ) || rinvalidChar.test( name )) ) { - hooks = formHook; - } - } - - if ( value !== undefined ) { - - if ( value === null ) { - jQuery.removeAttr( elem, name ); - return undefined; - - } else if ( hooks && "set" in hooks && notxml && (ret = hooks.set( elem, value, name )) !== undefined ) { - return ret; - - } else { - elem.setAttribute( name, "" + value ); - return value; - } - - } else if ( hooks && "get" in hooks && notxml ) { - return hooks.get( elem, name ); - - } else { - - ret = elem.getAttribute( name ); - - // Non-existent attributes return null, we normalize to undefined - return ret === null ? - undefined : - ret; - } - }, - - removeAttr: function( elem, name ) { - var propName; - if ( elem.nodeType === 1 ) { - name = jQuery.attrFix[ name ] || name; - - if ( jQuery.support.getSetAttribute ) { - // Use removeAttribute in browsers that support it - elem.removeAttribute( name ); - } else { - jQuery.attr( elem, name, "" ); - elem.removeAttributeNode( elem.getAttributeNode( name ) ); - } - - // Set corresponding property to false for boolean attributes - if ( rboolean.test( name ) && (propName = jQuery.propFix[ name ] || name) in elem ) { - elem[ propName ] = false; - } - } - }, - - attrHooks: { - type: { - set: function( elem, value ) { - // We can't allow the type property to be changed (since it causes problems in IE) - if ( rtype.test( elem.nodeName ) && elem.parentNode ) { - jQuery.error( "type property can't be changed" ); - } else if ( !jQuery.support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) { - // Setting the type on a radio button after the value resets the value in IE6-9 - // Reset value to it's default in case type is set after value - // This is for element creation - var val = elem.value; - elem.setAttribute( "type", value ); - if ( val ) { - elem.value = val; - } - return value; - } - } - }, - tabIndex: { - get: function( elem ) { - // elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set - // http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ - var attributeNode = elem.getAttributeNode("tabIndex"); - - return attributeNode && attributeNode.specified ? - parseInt( attributeNode.value, 10 ) : - rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ? - 0 : - undefined; - } - } - }, - - propFix: { - tabindex: "tabIndex", - readonly: "readOnly", - "for": "htmlFor", - "class": "className", - maxlength: "maxLength", - cellspacing: "cellSpacing", - cellpadding: "cellPadding", - rowspan: "rowSpan", - colspan: "colSpan", - usemap: "useMap", - frameborder: "frameBorder", - contenteditable: "contentEditable" - }, - - prop: function( elem, name, value ) { - var nType = elem.nodeType; - - // don't get/set properties on text, comment and attribute nodes - if ( !elem || nType === 3 || nType === 8 || nType === 2 ) { - return undefined; - } - - var ret, hooks, - notxml = nType !== 1 || !jQuery.isXMLDoc( elem ); - - // Try to normalize/fix the name - name = notxml && jQuery.propFix[ name ] || name; - - hooks = jQuery.propHooks[ name ]; - - if ( value !== undefined ) { - if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) { - return ret; - - } else { - return (elem[ name ] = value); - } - - } else { - if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== undefined ) { - return ret; - - } else { - return elem[ name ]; - } - } - }, - - propHooks: {} -}); - -// Hook for boolean attributes -boolHook = { - get: function( elem, name ) { - // Align boolean attributes with corresponding properties - return elem[ jQuery.propFix[ name ] || name ] ? - name.toLowerCase() : - undefined; - }, - set: function( elem, value, name ) { - var propName; - if ( value === false ) { - // Remove boolean attributes when set to false - jQuery.removeAttr( elem, name ); - } else { - // value is true since we know at this point it's type boolean and not false - // Set boolean attributes to the same name and set the DOM property - propName = jQuery.propFix[ name ] || name; - if ( propName in elem ) { - // Only set the IDL specifically if it already exists on the element - elem[ propName ] = value; - } - - elem.setAttribute( name, name.toLowerCase() ); - } - return name; - } -}; - -// Use the value property for back compat -// Use the formHook for button elements in IE6/7 (#1954) -jQuery.attrHooks.value = { - get: function( elem, name ) { - if ( formHook && jQuery.nodeName( elem, "button" ) ) { - return formHook.get( elem, name ); - } - return elem.value; - }, - set: function( elem, value, name ) { - if ( formHook && jQuery.nodeName( elem, "button" ) ) { - return formHook.set( elem, value, name ); - } - // Does not return so that setAttribute is also used - elem.value = value; - } -}; - -// IE6/7 do not support getting/setting some attributes with get/setAttribute -if ( !jQuery.support.getSetAttribute ) { - - // propFix is more comprehensive and contains all fixes - jQuery.attrFix = jQuery.propFix; - - // Use this for any attribute on a form in IE6/7 - formHook = jQuery.attrHooks.name = jQuery.valHooks.button = { - get: function( elem, name ) { - var ret; - ret = elem.getAttributeNode( name ); - // Return undefined if nodeValue is empty string - return ret && ret.nodeValue !== "" ? - ret.nodeValue : - undefined; - }, - set: function( elem, value, name ) { - // Check form objects in IE (multiple bugs related) - // Only use nodeValue if the attribute node exists on the form - var ret = elem.getAttributeNode( name ); - if ( ret ) { - ret.nodeValue = value; - return value; - } - } - }; - - // Set width and height to auto instead of 0 on empty string( Bug #8150 ) - // This is for removals - jQuery.each([ "width", "height" ], function( i, name ) { - jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { - set: function( elem, value ) { - if ( value === "" ) { - elem.setAttribute( name, "auto" ); - return value; - } - } - }); - }); -} - - -// Some attributes require a special call on IE -if ( !jQuery.support.hrefNormalized ) { - jQuery.each([ "href", "src", "width", "height" ], function( i, name ) { - jQuery.attrHooks[ name ] = jQuery.extend( jQuery.attrHooks[ name ], { - get: function( elem ) { - var ret = elem.getAttribute( name, 2 ); - return ret === null ? undefined : ret; - } - }); - }); -} - -if ( !jQuery.support.style ) { - jQuery.attrHooks.style = { - get: function( elem ) { - // Return undefined in the case of empty string - // Normalize to lowercase since IE uppercases css property names - return elem.style.cssText.toLowerCase() || undefined; - }, - set: function( elem, value ) { - return (elem.style.cssText = "" + value); - } - }; -} - -// Safari mis-reports the default selected property of an option -// Accessing the parent's selectedIndex property fixes it -if ( !jQuery.support.optSelected ) { - jQuery.propHooks.selected = jQuery.extend( jQuery.propHooks.selected, { - get: function( elem ) { - var parent = elem.parentNode; - - if ( parent ) { - parent.selectedIndex; - - // Make sure that it also works with optgroups, see #5701 - if ( parent.parentNode ) { - parent.parentNode.selectedIndex; - } - } - } - }); -} - -// Radios and checkboxes getter/setter -if ( !jQuery.support.checkOn ) { - jQuery.each([ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = { - get: function( elem ) { - // Handle the case where in Webkit "" is returned instead of "on" if a value isn't specified - return elem.getAttribute("value") === null ? "on" : elem.value; - } - }; - }); -} -jQuery.each([ "radio", "checkbox" ], function() { - jQuery.valHooks[ this ] = jQuery.extend( jQuery.valHooks[ this ], { - set: function( elem, value ) { - if ( jQuery.isArray( value ) ) { - return (elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0); - } - } - }); -}); - - - - -var hasOwn = Object.prototype.hasOwnProperty, - rnamespaces = /\.(.*)$/, - rformElems = /^(?:textarea|input|select)$/i, - rperiod = /\./g, - rspaces = / /g, - rescape = /[^\w\s.|`]/g, - fcleanup = function( nm ) { - return nm.replace(rescape, "\\$&"); - }; - -/* - * A number of helper functions used for managing events. - * Many of the ideas behind this code originated from - * Dean Edwards' addEvent library. - */ -jQuery.event = { - - // Bind an event to an element - // Original by Dean Edwards - add: function( elem, types, handler, data ) { - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - if ( handler === false ) { - handler = returnFalse; - } else if ( !handler ) { - // Fixes bug #7229. Fix recommended by jdalton - return; - } - - var handleObjIn, handleObj; - - if ( handler.handler ) { - handleObjIn = handler; - handler = handleObjIn.handler; - } - - // Make sure that the function being executed has a unique ID - if ( !handler.guid ) { - handler.guid = jQuery.guid++; - } - - // Init the element's event structure - var elemData = jQuery._data( elem ); - - // If no elemData is found then we must be trying to bind to one of the - // banned noData elements - if ( !elemData ) { - return; - } - - var events = elemData.events, - eventHandle = elemData.handle; - - if ( !events ) { - elemData.events = events = {}; - } - - if ( !eventHandle ) { - elemData.handle = eventHandle = function( e ) { - // Discard the second event of a jQuery.event.trigger() and - // when an event is called after a page has unloaded - return typeof jQuery !== "undefined" && (!e || jQuery.event.triggered !== e.type) ? - jQuery.event.handle.apply( eventHandle.elem, arguments ) : - undefined; - }; - } - - // Add elem as a property of the handle function - // This is to prevent a memory leak with non-native events in IE. - eventHandle.elem = elem; - - // Handle multiple events separated by a space - // jQuery(...).bind("mouseover mouseout", fn); - types = types.split(" "); - - var type, i = 0, namespaces; - - while ( (type = types[ i++ ]) ) { - handleObj = handleObjIn ? - jQuery.extend({}, handleObjIn) : - { handler: handler, data: data }; - - // Namespaced event handlers - if ( type.indexOf(".") > -1 ) { - namespaces = type.split("."); - type = namespaces.shift(); - handleObj.namespace = namespaces.slice(0).sort().join("."); - - } else { - namespaces = []; - handleObj.namespace = ""; - } - - handleObj.type = type; - if ( !handleObj.guid ) { - handleObj.guid = handler.guid; - } - - // Get the current list of functions bound to this event - var handlers = events[ type ], - special = jQuery.event.special[ type ] || {}; - - // Init the event handler queue - if ( !handlers ) { - handlers = events[ type ] = []; - - // Check for a special event handler - // Only use addEventListener/attachEvent if the special - // events handler returns false - if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { - // Bind the global event handler to the element - if ( elem.addEventListener ) { - elem.addEventListener( type, eventHandle, false ); - - } else if ( elem.attachEvent ) { - elem.attachEvent( "on" + type, eventHandle ); - } - } - } - - if ( special.add ) { - special.add.call( elem, handleObj ); - - if ( !handleObj.handler.guid ) { - handleObj.handler.guid = handler.guid; - } - } - - // Add the function to the element's handler list - handlers.push( handleObj ); - - // Keep track of which events have been used, for event optimization - jQuery.event.global[ type ] = true; - } - - // Nullify elem to prevent memory leaks in IE - elem = null; - }, - - global: {}, - - // Detach an event or set of events from an element - remove: function( elem, types, handler, pos ) { - // don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - if ( handler === false ) { - handler = returnFalse; - } - - var ret, type, fn, j, i = 0, all, namespaces, namespace, special, eventType, handleObj, origType, - elemData = jQuery.hasData( elem ) && jQuery._data( elem ), - events = elemData && elemData.events; - - if ( !elemData || !events ) { - return; - } - - // types is actually an event object here - if ( types && types.type ) { - handler = types.handler; - types = types.type; - } - - // Unbind all events for the element - if ( !types || typeof types === "string" && types.charAt(0) === "." ) { - types = types || ""; - - for ( type in events ) { - jQuery.event.remove( elem, type + types ); - } - - return; - } - - // Handle multiple events separated by a space - // jQuery(...).unbind("mouseover mouseout", fn); - types = types.split(" "); - - while ( (type = types[ i++ ]) ) { - origType = type; - handleObj = null; - all = type.indexOf(".") < 0; - namespaces = []; - - if ( !all ) { - // Namespaced event handlers - namespaces = type.split("."); - type = namespaces.shift(); - - namespace = new RegExp("(^|\\.)" + - jQuery.map( namespaces.slice(0).sort(), fcleanup ).join("\\.(?:.*\\.)?") + "(\\.|$)"); - } - - eventType = events[ type ]; - - if ( !eventType ) { - continue; - } - - if ( !handler ) { - for ( j = 0; j < eventType.length; j++ ) { - handleObj = eventType[ j ]; - - if ( all || namespace.test( handleObj.namespace ) ) { - jQuery.event.remove( elem, origType, handleObj.handler, j ); - eventType.splice( j--, 1 ); - } - } - - continue; - } - - special = jQuery.event.special[ type ] || {}; - - for ( j = pos || 0; j < eventType.length; j++ ) { - handleObj = eventType[ j ]; - - if ( handler.guid === handleObj.guid ) { - // remove the given handler for the given type - if ( all || namespace.test( handleObj.namespace ) ) { - if ( pos == null ) { - eventType.splice( j--, 1 ); - } - - if ( special.remove ) { - special.remove.call( elem, handleObj ); - } - } - - if ( pos != null ) { - break; - } - } - } - - // remove generic event handler if no more handlers exist - if ( eventType.length === 0 || pos != null && eventType.length === 1 ) { - if ( !special.teardown || special.teardown.call( elem, namespaces ) === false ) { - jQuery.removeEvent( elem, type, elemData.handle ); - } - - ret = null; - delete events[ type ]; - } - } - - // Remove the expando if it's no longer used - if ( jQuery.isEmptyObject( events ) ) { - var handle = elemData.handle; - if ( handle ) { - handle.elem = null; - } - - delete elemData.events; - delete elemData.handle; - - if ( jQuery.isEmptyObject( elemData ) ) { - jQuery.removeData( elem, undefined, true ); - } - } - }, - - // Events that are safe to short-circuit if no handlers are attached. - // Native DOM events should not be added, they may have inline handlers. - customEvent: { - "getData": true, - "setData": true, - "changeData": true - }, - - trigger: function( event, data, elem, onlyHandlers ) { - // Event object or event type - var type = event.type || event, - namespaces = [], - exclusive; - - if ( type.indexOf("!") >= 0 ) { - // Exclusive events trigger only for the exact event (no namespaces) - type = type.slice(0, -1); - exclusive = true; - } - - if ( type.indexOf(".") >= 0 ) { - // Namespaced trigger; create a regexp to match event type in handle() - namespaces = type.split("."); - type = namespaces.shift(); - namespaces.sort(); - } - - if ( (!elem || jQuery.event.customEvent[ type ]) && !jQuery.event.global[ type ] ) { - // No jQuery handlers for this event type, and it can't have inline handlers - return; - } - - // Caller can pass in an Event, Object, or just an event type string - event = typeof event === "object" ? - // jQuery.Event object - event[ jQuery.expando ] ? event : - // Object literal - new jQuery.Event( type, event ) : - // Just the event type (string) - new jQuery.Event( type ); - - event.type = type; - event.exclusive = exclusive; - event.namespace = namespaces.join("."); - event.namespace_re = new RegExp("(^|\\.)" + namespaces.join("\\.(?:.*\\.)?") + "(\\.|$)"); - - // triggerHandler() and global events don't bubble or run the default action - if ( onlyHandlers || !elem ) { - event.preventDefault(); - event.stopPropagation(); - } - - // Handle a global trigger - if ( !elem ) { - // TODO: Stop taunting the data cache; remove global events and always attach to document - jQuery.each( jQuery.cache, function() { - // internalKey variable is just used to make it easier to find - // and potentially change this stuff later; currently it just - // points to jQuery.expando - var internalKey = jQuery.expando, - internalCache = this[ internalKey ]; - if ( internalCache && internalCache.events && internalCache.events[ type ] ) { - jQuery.event.trigger( event, data, internalCache.handle.elem ); - } - }); - return; - } - - // Don't do events on text and comment nodes - if ( elem.nodeType === 3 || elem.nodeType === 8 ) { - return; - } - - // Clean up the event in case it is being reused - event.result = undefined; - event.target = elem; - - // Clone any incoming data and prepend the event, creating the handler arg list - data = data ? jQuery.makeArray( data ) : []; - data.unshift( event ); - - var cur = elem, - // IE doesn't like method names with a colon (#3533, #8272) - ontype = type.indexOf(":") < 0 ? "on" + type : ""; - - // Fire event on the current element, then bubble up the DOM tree - do { - var handle = jQuery._data( cur, "handle" ); - - event.currentTarget = cur; - if ( handle ) { - handle.apply( cur, data ); - } - - // Trigger an inline bound script - if ( ontype && jQuery.acceptData( cur ) && cur[ ontype ] && cur[ ontype ].apply( cur, data ) === false ) { - event.result = false; - event.preventDefault(); - } - - // Bubble up to document, then to window - cur = cur.parentNode || cur.ownerDocument || cur === event.target.ownerDocument && window; - } while ( cur && !event.isPropagationStopped() ); - - // If nobody prevented the default action, do it now - if ( !event.isDefaultPrevented() ) { - var old, - special = jQuery.event.special[ type ] || {}; - - if ( (!special._default || special._default.call( elem.ownerDocument, event ) === false) && - !(type === "click" && jQuery.nodeName( elem, "a" )) && jQuery.acceptData( elem ) ) { - - // Call a native DOM method on the target with the same name name as the event. - // Can't use an .isFunction)() check here because IE6/7 fails that test. - // IE<9 dies on focus to hidden element (#1486), may want to revisit a try/catch. - try { - if ( ontype && elem[ type ] ) { - // Don't re-trigger an onFOO event when we call its FOO() method - old = elem[ ontype ]; - - if ( old ) { - elem[ ontype ] = null; - } - - jQuery.event.triggered = type; - elem[ type ](); - } - } catch ( ieError ) {} - - if ( old ) { - elem[ ontype ] = old; - } - - jQuery.event.triggered = undefined; - } - } - - return event.result; - }, - - handle: function( event ) { - event = jQuery.event.fix( event || window.event ); - // Snapshot the handlers list since a called handler may add/remove events. - var handlers = ((jQuery._data( this, "events" ) || {})[ event.type ] || []).slice(0), - run_all = !event.exclusive && !event.namespace, - args = Array.prototype.slice.call( arguments, 0 ); - - // Use the fix-ed Event rather than the (read-only) native event - args[0] = event; - event.currentTarget = this; - - for ( var j = 0, l = handlers.length; j < l; j++ ) { - var handleObj = handlers[ j ]; - - // Triggered event must 1) be non-exclusive and have no namespace, or - // 2) have namespace(s) a subset or equal to those in the bound event. - if ( run_all || event.namespace_re.test( handleObj.namespace ) ) { - // Pass in a reference to the handler function itself - // So that we can later remove it - event.handler = handleObj.handler; - event.data = handleObj.data; - event.handleObj = handleObj; - - var ret = handleObj.handler.apply( this, args ); - - if ( ret !== undefined ) { - event.result = ret; - if ( ret === false ) { - event.preventDefault(); - event.stopPropagation(); - } - } - - if ( event.isImmediatePropagationStopped() ) { - break; - } - } - } - return event.result; - }, - - props: "altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "), - - fix: function( event ) { - if ( event[ jQuery.expando ] ) { - return event; - } - - // store a copy of the original event object - // and "clone" to set read-only properties - var originalEvent = event; - event = jQuery.Event( originalEvent ); - - for ( var i = this.props.length, prop; i; ) { - prop = this.props[ --i ]; - event[ prop ] = originalEvent[ prop ]; - } - - // Fix target property, if necessary - if ( !event.target ) { - // Fixes #1925 where srcElement might not be defined either - event.target = event.srcElement || document; - } - - // check if target is a textnode (safari) - if ( event.target.nodeType === 3 ) { - event.target = event.target.parentNode; - } - - // Add relatedTarget, if necessary - if ( !event.relatedTarget && event.fromElement ) { - event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement; - } - - // Calculate pageX/Y if missing and clientX/Y available - if ( event.pageX == null && event.clientX != null ) { - var eventDocument = event.target.ownerDocument || document, - doc = eventDocument.documentElement, - body = eventDocument.body; - - event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0); - event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0); - } - - // Add which for key events - if ( event.which == null && (event.charCode != null || event.keyCode != null) ) { - event.which = event.charCode != null ? event.charCode : event.keyCode; - } - - // Add metaKey to non-Mac browsers (use ctrl for PC's and Meta for Macs) - if ( !event.metaKey && event.ctrlKey ) { - event.metaKey = event.ctrlKey; - } - - // Add which for click: 1 === left; 2 === middle; 3 === right - // Note: button is not normalized, so don't use it - if ( !event.which && event.button !== undefined ) { - event.which = (event.button & 1 ? 1 : ( event.button & 2 ? 3 : ( event.button & 4 ? 2 : 0 ) )); - } - - return event; - }, - - // Deprecated, use jQuery.guid instead - guid: 1E8, - - // Deprecated, use jQuery.proxy instead - proxy: jQuery.proxy, - - special: { - ready: { - // Make sure the ready event is setup - setup: jQuery.bindReady, - teardown: jQuery.noop - }, - - live: { - add: function( handleObj ) { - jQuery.event.add( this, - liveConvert( handleObj.origType, handleObj.selector ), - jQuery.extend({}, handleObj, {handler: liveHandler, guid: handleObj.handler.guid}) ); - }, - - remove: function( handleObj ) { - jQuery.event.remove( this, liveConvert( handleObj.origType, handleObj.selector ), handleObj ); - } - }, - - beforeunload: { - setup: function( data, namespaces, eventHandle ) { - // We only want to do this special case on windows - if ( jQuery.isWindow( this ) ) { - this.onbeforeunload = eventHandle; - } - }, - - teardown: function( namespaces, eventHandle ) { - if ( this.onbeforeunload === eventHandle ) { - this.onbeforeunload = null; - } - } - } - } -}; - -jQuery.removeEvent = document.removeEventListener ? - function( elem, type, handle ) { - if ( elem.removeEventListener ) { - elem.removeEventListener( type, handle, false ); - } - } : - function( elem, type, handle ) { - if ( elem.detachEvent ) { - elem.detachEvent( "on" + type, handle ); - } - }; - -jQuery.Event = function( src, props ) { - // Allow instantiation without the 'new' keyword - if ( !this.preventDefault ) { - return new jQuery.Event( src, props ); - } - - // Event object - if ( src && src.type ) { - this.originalEvent = src; - this.type = src.type; - - // Events bubbling up the document may have been marked as prevented - // by a handler lower down the tree; reflect the correct value. - this.isDefaultPrevented = (src.defaultPrevented || src.returnValue === false || - src.getPreventDefault && src.getPreventDefault()) ? returnTrue : returnFalse; - - // Event type - } else { - this.type = src; - } - - // Put explicitly provided properties onto the event object - if ( props ) { - jQuery.extend( this, props ); - } - - // timeStamp is buggy for some events on Firefox(#3843) - // So we won't rely on the native value - this.timeStamp = jQuery.now(); - - // Mark it as fixed - this[ jQuery.expando ] = true; -}; - -function returnFalse() { - return false; -} -function returnTrue() { - return true; -} - -// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding -// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html -jQuery.Event.prototype = { - preventDefault: function() { - this.isDefaultPrevented = returnTrue; - - var e = this.originalEvent; - if ( !e ) { - return; - } - - // if preventDefault exists run it on the original event - if ( e.preventDefault ) { - e.preventDefault(); - - // otherwise set the returnValue property of the original event to false (IE) - } else { - e.returnValue = false; - } - }, - stopPropagation: function() { - this.isPropagationStopped = returnTrue; - - var e = this.originalEvent; - if ( !e ) { - return; - } - // if stopPropagation exists run it on the original event - if ( e.stopPropagation ) { - e.stopPropagation(); - } - // otherwise set the cancelBubble property of the original event to true (IE) - e.cancelBubble = true; - }, - stopImmediatePropagation: function() { - this.isImmediatePropagationStopped = returnTrue; - this.stopPropagation(); - }, - isDefaultPrevented: returnFalse, - isPropagationStopped: returnFalse, - isImmediatePropagationStopped: returnFalse -}; - -// Checks if an event happened on an element within another element -// Used in jQuery.event.special.mouseenter and mouseleave handlers -var withinElement = function( event ) { - // Check if mouse(over|out) are still within the same parent element - var parent = event.relatedTarget; - - // set the correct event type - event.type = event.data; - - // Firefox sometimes assigns relatedTarget a XUL element - // which we cannot access the parentNode property of - try { - - // Chrome does something similar, the parentNode property - // can be accessed but is null. - if ( parent && parent !== document && !parent.parentNode ) { - return; - } - - // Traverse up the tree - while ( parent && parent !== this ) { - parent = parent.parentNode; - } - - if ( parent !== this ) { - // handle event if we actually just moused on to a non sub-element - jQuery.event.handle.apply( this, arguments ); - } - - // assuming we've left the element since we most likely mousedover a xul element - } catch(e) { } -}, - -// In case of event delegation, we only need to rename the event.type, -// liveHandler will take care of the rest. -delegate = function( event ) { - event.type = event.data; - jQuery.event.handle.apply( this, arguments ); -}; - -// Create mouseenter and mouseleave events -jQuery.each({ - mouseenter: "mouseover", - mouseleave: "mouseout" -}, function( orig, fix ) { - jQuery.event.special[ orig ] = { - setup: function( data ) { - jQuery.event.add( this, fix, data && data.selector ? delegate : withinElement, orig ); - }, - teardown: function( data ) { - jQuery.event.remove( this, fix, data && data.selector ? delegate : withinElement ); - } - }; -}); - -// submit delegation -if ( !jQuery.support.submitBubbles ) { - - jQuery.event.special.submit = { - setup: function( data, namespaces ) { - if ( !jQuery.nodeName( this, "form" ) ) { - jQuery.event.add(this, "click.specialSubmit", function( e ) { - var elem = e.target, - type = elem.type; - - if ( (type === "submit" || type === "image") && jQuery( elem ).closest("form").length ) { - trigger( "submit", this, arguments ); - } - }); - - jQuery.event.add(this, "keypress.specialSubmit", function( e ) { - var elem = e.target, - type = elem.type; - - if ( (type === "text" || type === "password") && jQuery( elem ).closest("form").length && e.keyCode === 13 ) { - trigger( "submit", this, arguments ); - } - }); - - } else { - return false; - } - }, - - teardown: function( namespaces ) { - jQuery.event.remove( this, ".specialSubmit" ); - } - }; - -} - -// change delegation, happens here so we have bind. -if ( !jQuery.support.changeBubbles ) { - - var changeFilters, - - getVal = function( elem ) { - var type = elem.type, val = elem.value; - - if ( type === "radio" || type === "checkbox" ) { - val = elem.checked; - - } else if ( type === "select-multiple" ) { - val = elem.selectedIndex > -1 ? - jQuery.map( elem.options, function( elem ) { - return elem.selected; - }).join("-") : - ""; - - } else if ( jQuery.nodeName( elem, "select" ) ) { - val = elem.selectedIndex; - } - - return val; - }, - - testChange = function testChange( e ) { - var elem = e.target, data, val; - - if ( !rformElems.test( elem.nodeName ) || elem.readOnly ) { - return; - } - - data = jQuery._data( elem, "_change_data" ); - val = getVal(elem); - - // the current data will be also retrieved by beforeactivate - if ( e.type !== "focusout" || elem.type !== "radio" ) { - jQuery._data( elem, "_change_data", val ); - } - - if ( data === undefined || val === data ) { - return; - } - - if ( data != null || val ) { - e.type = "change"; - e.liveFired = undefined; - jQuery.event.trigger( e, arguments[1], elem ); - } - }; - - jQuery.event.special.change = { - filters: { - focusout: testChange, - - beforedeactivate: testChange, - - click: function( e ) { - var elem = e.target, type = jQuery.nodeName( elem, "input" ) ? elem.type : ""; - - if ( type === "radio" || type === "checkbox" || jQuery.nodeName( elem, "select" ) ) { - testChange.call( this, e ); - } - }, - - // Change has to be called before submit - // Keydown will be called before keypress, which is used in submit-event delegation - keydown: function( e ) { - var elem = e.target, type = jQuery.nodeName( elem, "input" ) ? elem.type : ""; - - if ( (e.keyCode === 13 && !jQuery.nodeName( elem, "textarea" ) ) || - (e.keyCode === 32 && (type === "checkbox" || type === "radio")) || - type === "select-multiple" ) { - testChange.call( this, e ); - } - }, - - // Beforeactivate happens also before the previous element is blurred - // with this event you can't trigger a change event, but you can store - // information - beforeactivate: function( e ) { - var elem = e.target; - jQuery._data( elem, "_change_data", getVal(elem) ); - } - }, - - setup: function( data, namespaces ) { - if ( this.type === "file" ) { - return false; - } - - for ( var type in changeFilters ) { - jQuery.event.add( this, type + ".specialChange", changeFilters[type] ); - } - - return rformElems.test( this.nodeName ); - }, - - teardown: function( namespaces ) { - jQuery.event.remove( this, ".specialChange" ); - - return rformElems.test( this.nodeName ); - } - }; - - changeFilters = jQuery.event.special.change.filters; - - // Handle when the input is .focus()'d - changeFilters.focus = changeFilters.beforeactivate; -} - -function trigger( type, elem, args ) { - // Piggyback on a donor event to simulate a different one. - // Fake originalEvent to avoid donor's stopPropagation, but if the - // simulated event prevents default then we do the same on the donor. - // Don't pass args or remember liveFired; they apply to the donor event. - var event = jQuery.extend( {}, args[ 0 ] ); - event.type = type; - event.originalEvent = {}; - event.liveFired = undefined; - jQuery.event.handle.call( elem, event ); - if ( event.isDefaultPrevented() ) { - args[ 0 ].preventDefault(); - } -} - -// Create "bubbling" focus and blur events -if ( !jQuery.support.focusinBubbles ) { - jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) { - - // Attach a single capturing handler while someone wants focusin/focusout - var attaches = 0; - - jQuery.event.special[ fix ] = { - setup: function() { - if ( attaches++ === 0 ) { - document.addEventListener( orig, handler, true ); - } - }, - teardown: function() { - if ( --attaches === 0 ) { - document.removeEventListener( orig, handler, true ); - } - } - }; - - function handler( donor ) { - // Donor event is always a native one; fix it and switch its type. - // Let focusin/out handler cancel the donor focus/blur event. - var e = jQuery.event.fix( donor ); - e.type = fix; - e.originalEvent = {}; - jQuery.event.trigger( e, null, e.target ); - if ( e.isDefaultPrevented() ) { - donor.preventDefault(); - } - } - }); -} - -jQuery.each(["bind", "one"], function( i, name ) { - jQuery.fn[ name ] = function( type, data, fn ) { - var handler; - - // Handle object literals - if ( typeof type === "object" ) { - for ( var key in type ) { - this[ name ](key, data, type[key], fn); - } - return this; - } - - if ( arguments.length === 2 || data === false ) { - fn = data; - data = undefined; - } - - if ( name === "one" ) { - handler = function( event ) { - jQuery( this ).unbind( event, handler ); - return fn.apply( this, arguments ); - }; - handler.guid = fn.guid || jQuery.guid++; - } else { - handler = fn; - } - - if ( type === "unload" && name !== "one" ) { - this.one( type, data, fn ); - - } else { - for ( var i = 0, l = this.length; i < l; i++ ) { - jQuery.event.add( this[i], type, handler, data ); - } - } - - return this; - }; -}); - -jQuery.fn.extend({ - unbind: function( type, fn ) { - // Handle object literals - if ( typeof type === "object" && !type.preventDefault ) { - for ( var key in type ) { - this.unbind(key, type[key]); - } - - } else { - for ( var i = 0, l = this.length; i < l; i++ ) { - jQuery.event.remove( this[i], type, fn ); - } - } - - return this; - }, - - delegate: function( selector, types, data, fn ) { - return this.live( types, data, fn, selector ); - }, - - undelegate: function( selector, types, fn ) { - if ( arguments.length === 0 ) { - return this.unbind( "live" ); - - } else { - return this.die( types, null, fn, selector ); - } - }, - - trigger: function( type, data ) { - return this.each(function() { - jQuery.event.trigger( type, data, this ); - }); - }, - - triggerHandler: function( type, data ) { - if ( this[0] ) { - return jQuery.event.trigger( type, data, this[0], true ); - } - }, - - toggle: function( fn ) { - // Save reference to arguments for access in closure - var args = arguments, - guid = fn.guid || jQuery.guid++, - i = 0, - toggler = function( event ) { - // Figure out which function to execute - var lastToggle = ( jQuery.data( this, "lastToggle" + fn.guid ) || 0 ) % i; - jQuery.data( this, "lastToggle" + fn.guid, lastToggle + 1 ); - - // Make sure that clicks stop - event.preventDefault(); - - // and execute the function - return args[ lastToggle ].apply( this, arguments ) || false; - }; - - // link all the functions, so any of them can unbind this click handler - toggler.guid = guid; - while ( i < args.length ) { - args[ i++ ].guid = guid; - } - - return this.click( toggler ); - }, - - hover: function( fnOver, fnOut ) { - return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver ); - } -}); - -var liveMap = { - focus: "focusin", - blur: "focusout", - mouseenter: "mouseover", - mouseleave: "mouseout" -}; - -jQuery.each(["live", "die"], function( i, name ) { - jQuery.fn[ name ] = function( types, data, fn, origSelector /* Internal Use Only */ ) { - var type, i = 0, match, namespaces, preType, - selector = origSelector || this.selector, - context = origSelector ? this : jQuery( this.context ); - - if ( typeof types === "object" && !types.preventDefault ) { - for ( var key in types ) { - context[ name ]( key, data, types[key], selector ); - } - - return this; - } - - if ( name === "die" && !types && - origSelector && origSelector.charAt(0) === "." ) { - - context.unbind( origSelector ); - - return this; - } - - if ( data === false || jQuery.isFunction( data ) ) { - fn = data || returnFalse; - data = undefined; - } - - types = (types || "").split(" "); - - while ( (type = types[ i++ ]) != null ) { - match = rnamespaces.exec( type ); - namespaces = ""; - - if ( match ) { - namespaces = match[0]; - type = type.replace( rnamespaces, "" ); - } - - if ( type === "hover" ) { - types.push( "mouseenter" + namespaces, "mouseleave" + namespaces ); - continue; - } - - preType = type; - - if ( liveMap[ type ] ) { - types.push( liveMap[ type ] + namespaces ); - type = type + namespaces; - - } else { - type = (liveMap[ type ] || type) + namespaces; - } - - if ( name === "live" ) { - // bind live handler - for ( var j = 0, l = context.length; j < l; j++ ) { - jQuery.event.add( context[j], "live." + liveConvert( type, selector ), - { data: data, selector: selector, handler: fn, origType: type, origHandler: fn, preType: preType } ); - } - - } else { - // unbind live handler - context.unbind( "live." + liveConvert( type, selector ), fn ); - } - } - - return this; - }; -}); - -function liveHandler( event ) { - var stop, maxLevel, related, match, handleObj, elem, j, i, l, data, close, namespace, ret, - elems = [], - selectors = [], - events = jQuery._data( this, "events" ); - - // Make sure we avoid non-left-click bubbling in Firefox (#3861) and disabled elements in IE (#6911) - if ( event.liveFired === this || !events || !events.live || event.target.disabled || event.button && event.type === "click" ) { - return; - } - - if ( event.namespace ) { - namespace = new RegExp("(^|\\.)" + event.namespace.split(".").join("\\.(?:.*\\.)?") + "(\\.|$)"); - } - - event.liveFired = this; - - var live = events.live.slice(0); - - for ( j = 0; j < live.length; j++ ) { - handleObj = live[j]; - - if ( handleObj.origType.replace( rnamespaces, "" ) === event.type ) { - selectors.push( handleObj.selector ); - - } else { - live.splice( j--, 1 ); - } - } - - match = jQuery( event.target ).closest( selectors, event.currentTarget ); - - for ( i = 0, l = match.length; i < l; i++ ) { - close = match[i]; - - for ( j = 0; j < live.length; j++ ) { - handleObj = live[j]; - - if ( close.selector === handleObj.selector && (!namespace || namespace.test( handleObj.namespace )) && !close.elem.disabled ) { - elem = close.elem; - related = null; - - // Those two events require additional checking - if ( handleObj.preType === "mouseenter" || handleObj.preType === "mouseleave" ) { - event.type = handleObj.preType; - related = jQuery( event.relatedTarget ).closest( handleObj.selector )[0]; - - // Make sure not to accidentally match a child element with the same selector - if ( related && jQuery.contains( elem, related ) ) { - related = elem; - } - } - - if ( !related || related !== elem ) { - elems.push({ elem: elem, handleObj: handleObj, level: close.level }); - } - } - } - } - - for ( i = 0, l = elems.length; i < l; i++ ) { - match = elems[i]; - - if ( maxLevel && match.level > maxLevel ) { - break; - } - - event.currentTarget = match.elem; - event.data = match.handleObj.data; - event.handleObj = match.handleObj; - - ret = match.handleObj.origHandler.apply( match.elem, arguments ); - - if ( ret === false || event.isPropagationStopped() ) { - maxLevel = match.level; - - if ( ret === false ) { - stop = false; - } - if ( event.isImmediatePropagationStopped() ) { - break; - } - } - } - - return stop; -} - -function liveConvert( type, selector ) { - return (type && type !== "*" ? type + "." : "") + selector.replace(rperiod, "`").replace(rspaces, "&"); -} - -jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblclick " + - "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " + - "change select submit keydown keypress keyup error").split(" "), function( i, name ) { - - // Handle event binding - jQuery.fn[ name ] = function( data, fn ) { - if ( fn == null ) { - fn = data; - data = null; - } - - return arguments.length > 0 ? - this.bind( name, data, fn ) : - this.trigger( name ); - }; - - if ( jQuery.attrFn ) { - jQuery.attrFn[ name ] = true; - } -}); - - - -/*! - * Sizzle CSS Selector Engine - * Copyright 2011, The Dojo Foundation - * Released under the MIT, BSD, and GPL Licenses. - * More information: http://sizzlejs.com/ - */ -(function(){ - -var chunker = /((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, - done = 0, - toString = Object.prototype.toString, - hasDuplicate = false, - baseHasDuplicate = true, - rBackslash = /\\/g, - rNonWord = /\W/; - -// Here we check if the JavaScript engine is using some sort of -// optimization where it does not always call our comparision -// function. If that is the case, discard the hasDuplicate value. -// Thus far that includes Google Chrome. -[0, 0].sort(function() { - baseHasDuplicate = false; - return 0; -}); - -var Sizzle = function( selector, context, results, seed ) { - results = results || []; - context = context || document; - - var origContext = context; - - if ( context.nodeType !== 1 && context.nodeType !== 9 ) { - return []; - } - - if ( !selector || typeof selector !== "string" ) { - return results; - } - - var m, set, checkSet, extra, ret, cur, pop, i, - prune = true, - contextXML = Sizzle.isXML( context ), - parts = [], - soFar = selector; - - // Reset the position of the chunker regexp (start from head) - do { - chunker.exec( "" ); - m = chunker.exec( soFar ); - - if ( m ) { - soFar = m[3]; - - parts.push( m[1] ); - - if ( m[2] ) { - extra = m[3]; - break; - } - } - } while ( m ); - - if ( parts.length > 1 && origPOS.exec( selector ) ) { - - if ( parts.length === 2 && Expr.relative[ parts[0] ] ) { - set = posProcess( parts[0] + parts[1], context ); - - } else { - set = Expr.relative[ parts[0] ] ? - [ context ] : - Sizzle( parts.shift(), context ); - - while ( parts.length ) { - selector = parts.shift(); - - if ( Expr.relative[ selector ] ) { - selector += parts.shift(); - } - - set = posProcess( selector, set ); - } - } - - } else { - // Take a shortcut and set the context if the root selector is an ID - // (but not if it'll be faster if the inner selector is an ID) - if ( !seed && parts.length > 1 && context.nodeType === 9 && !contextXML && - Expr.match.ID.test(parts[0]) && !Expr.match.ID.test(parts[parts.length - 1]) ) { - - ret = Sizzle.find( parts.shift(), context, contextXML ); - context = ret.expr ? - Sizzle.filter( ret.expr, ret.set )[0] : - ret.set[0]; - } - - if ( context ) { - ret = seed ? - { expr: parts.pop(), set: makeArray(seed) } : - Sizzle.find( parts.pop(), parts.length === 1 && (parts[0] === "~" || parts[0] === "+") && context.parentNode ? context.parentNode : context, contextXML ); - - set = ret.expr ? - Sizzle.filter( ret.expr, ret.set ) : - ret.set; - - if ( parts.length > 0 ) { - checkSet = makeArray( set ); - - } else { - prune = false; - } - - while ( parts.length ) { - cur = parts.pop(); - pop = cur; - - if ( !Expr.relative[ cur ] ) { - cur = ""; - } else { - pop = parts.pop(); - } - - if ( pop == null ) { - pop = context; - } - - Expr.relative[ cur ]( checkSet, pop, contextXML ); - } - - } else { - checkSet = parts = []; - } - } - - if ( !checkSet ) { - checkSet = set; - } - - if ( !checkSet ) { - Sizzle.error( cur || selector ); - } - - if ( toString.call(checkSet) === "[object Array]" ) { - if ( !prune ) { - results.push.apply( results, checkSet ); - - } else if ( context && context.nodeType === 1 ) { - for ( i = 0; checkSet[i] != null; i++ ) { - if ( checkSet[i] && (checkSet[i] === true || checkSet[i].nodeType === 1 && Sizzle.contains(context, checkSet[i])) ) { - results.push( set[i] ); - } - } - - } else { - for ( i = 0; checkSet[i] != null; i++ ) { - if ( checkSet[i] && checkSet[i].nodeType === 1 ) { - results.push( set[i] ); - } - } - } - - } else { - makeArray( checkSet, results ); - } - - if ( extra ) { - Sizzle( extra, origContext, results, seed ); - Sizzle.uniqueSort( results ); - } - - return results; -}; - -Sizzle.uniqueSort = function( results ) { - if ( sortOrder ) { - hasDuplicate = baseHasDuplicate; - results.sort( sortOrder ); - - if ( hasDuplicate ) { - for ( var i = 1; i < results.length; i++ ) { - if ( results[i] === results[ i - 1 ] ) { - results.splice( i--, 1 ); - } - } - } - } - - return results; -}; - -Sizzle.matches = function( expr, set ) { - return Sizzle( expr, null, null, set ); -}; - -Sizzle.matchesSelector = function( node, expr ) { - return Sizzle( expr, null, null, [node] ).length > 0; -}; - -Sizzle.find = function( expr, context, isXML ) { - var set; - - if ( !expr ) { - return []; - } - - for ( var i = 0, l = Expr.order.length; i < l; i++ ) { - var match, - type = Expr.order[i]; - - if ( (match = Expr.leftMatch[ type ].exec( expr )) ) { - var left = match[1]; - match.splice( 1, 1 ); - - if ( left.substr( left.length - 1 ) !== "\\" ) { - match[1] = (match[1] || "").replace( rBackslash, "" ); - set = Expr.find[ type ]( match, context, isXML ); - - if ( set != null ) { - expr = expr.replace( Expr.match[ type ], "" ); - break; - } - } - } - } - - if ( !set ) { - set = typeof context.getElementsByTagName !== "undefined" ? - context.getElementsByTagName( "*" ) : - []; - } - - return { set: set, expr: expr }; -}; - -Sizzle.filter = function( expr, set, inplace, not ) { - var match, anyFound, - old = expr, - result = [], - curLoop = set, - isXMLFilter = set && set[0] && Sizzle.isXML( set[0] ); - - while ( expr && set.length ) { - for ( var type in Expr.filter ) { - if ( (match = Expr.leftMatch[ type ].exec( expr )) != null && match[2] ) { - var found, item, - filter = Expr.filter[ type ], - left = match[1]; - - anyFound = false; - - match.splice(1,1); - - if ( left.substr( left.length - 1 ) === "\\" ) { - continue; - } - - if ( curLoop === result ) { - result = []; - } - - if ( Expr.preFilter[ type ] ) { - match = Expr.preFilter[ type ]( match, curLoop, inplace, result, not, isXMLFilter ); - - if ( !match ) { - anyFound = found = true; - - } else if ( match === true ) { - continue; - } - } - - if ( match ) { - for ( var i = 0; (item = curLoop[i]) != null; i++ ) { - if ( item ) { - found = filter( item, match, i, curLoop ); - var pass = not ^ !!found; - - if ( inplace && found != null ) { - if ( pass ) { - anyFound = true; - - } else { - curLoop[i] = false; - } - - } else if ( pass ) { - result.push( item ); - anyFound = true; - } - } - } - } - - if ( found !== undefined ) { - if ( !inplace ) { - curLoop = result; - } - - expr = expr.replace( Expr.match[ type ], "" ); - - if ( !anyFound ) { - return []; - } - - break; - } - } - } - - // Improper expression - if ( expr === old ) { - if ( anyFound == null ) { - Sizzle.error( expr ); - - } else { - break; - } - } - - old = expr; - } - - return curLoop; -}; - -Sizzle.error = function( msg ) { - throw "Syntax error, unrecognized expression: " + msg; -}; - -var Expr = Sizzle.selectors = { - order: [ "ID", "NAME", "TAG" ], - - match: { - ID: /#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, - CLASS: /\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, - NAME: /\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/, - ATTR: /\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(?:(['"])(.*?)\3|(#?(?:[\w\u00c0-\uFFFF\-]|\\.)*)|)|)\s*\]/, - TAG: /^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/, - CHILD: /:(only|nth|last|first)-child(?:\(\s*(even|odd|(?:[+\-]?\d+|(?:[+\-]?\d*)?n\s*(?:[+\-]\s*\d+)?))\s*\))?/, - POS: /:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/, - PSEUDO: /:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/ - }, - - leftMatch: {}, - - attrMap: { - "class": "className", - "for": "htmlFor" - }, - - attrHandle: { - href: function( elem ) { - return elem.getAttribute( "href" ); - }, - type: function( elem ) { - return elem.getAttribute( "type" ); - } - }, - - relative: { - "+": function(checkSet, part){ - var isPartStr = typeof part === "string", - isTag = isPartStr && !rNonWord.test( part ), - isPartStrNotTag = isPartStr && !isTag; - - if ( isTag ) { - part = part.toLowerCase(); - } - - for ( var i = 0, l = checkSet.length, elem; i < l; i++ ) { - if ( (elem = checkSet[i]) ) { - while ( (elem = elem.previousSibling) && elem.nodeType !== 1 ) {} - - checkSet[i] = isPartStrNotTag || elem && elem.nodeName.toLowerCase() === part ? - elem || false : - elem === part; - } - } - - if ( isPartStrNotTag ) { - Sizzle.filter( part, checkSet, true ); - } - }, - - ">": function( checkSet, part ) { - var elem, - isPartStr = typeof part === "string", - i = 0, - l = checkSet.length; - - if ( isPartStr && !rNonWord.test( part ) ) { - part = part.toLowerCase(); - - for ( ; i < l; i++ ) { - elem = checkSet[i]; - - if ( elem ) { - var parent = elem.parentNode; - checkSet[i] = parent.nodeName.toLowerCase() === part ? parent : false; - } - } - - } else { - for ( ; i < l; i++ ) { - elem = checkSet[i]; - - if ( elem ) { - checkSet[i] = isPartStr ? - elem.parentNode : - elem.parentNode === part; - } - } - - if ( isPartStr ) { - Sizzle.filter( part, checkSet, true ); - } - } - }, - - "": function(checkSet, part, isXML){ - var nodeCheck, - doneName = done++, - checkFn = dirCheck; - - if ( typeof part === "string" && !rNonWord.test( part ) ) { - part = part.toLowerCase(); - nodeCheck = part; - checkFn = dirNodeCheck; - } - - checkFn( "parentNode", part, doneName, checkSet, nodeCheck, isXML ); - }, - - "~": function( checkSet, part, isXML ) { - var nodeCheck, - doneName = done++, - checkFn = dirCheck; - - if ( typeof part === "string" && !rNonWord.test( part ) ) { - part = part.toLowerCase(); - nodeCheck = part; - checkFn = dirNodeCheck; - } - - checkFn( "previousSibling", part, doneName, checkSet, nodeCheck, isXML ); - } - }, - - find: { - ID: function( match, context, isXML ) { - if ( typeof context.getElementById !== "undefined" && !isXML ) { - var m = context.getElementById(match[1]); - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - return m && m.parentNode ? [m] : []; - } - }, - - NAME: function( match, context ) { - if ( typeof context.getElementsByName !== "undefined" ) { - var ret = [], - results = context.getElementsByName( match[1] ); - - for ( var i = 0, l = results.length; i < l; i++ ) { - if ( results[i].getAttribute("name") === match[1] ) { - ret.push( results[i] ); - } - } - - return ret.length === 0 ? null : ret; - } - }, - - TAG: function( match, context ) { - if ( typeof context.getElementsByTagName !== "undefined" ) { - return context.getElementsByTagName( match[1] ); - } - } - }, - preFilter: { - CLASS: function( match, curLoop, inplace, result, not, isXML ) { - match = " " + match[1].replace( rBackslash, "" ) + " "; - - if ( isXML ) { - return match; - } - - for ( var i = 0, elem; (elem = curLoop[i]) != null; i++ ) { - if ( elem ) { - if ( not ^ (elem.className && (" " + elem.className + " ").replace(/[\t\n\r]/g, " ").indexOf(match) >= 0) ) { - if ( !inplace ) { - result.push( elem ); - } - - } else if ( inplace ) { - curLoop[i] = false; - } - } - } - - return false; - }, - - ID: function( match ) { - return match[1].replace( rBackslash, "" ); - }, - - TAG: function( match, curLoop ) { - return match[1].replace( rBackslash, "" ).toLowerCase(); - }, - - CHILD: function( match ) { - if ( match[1] === "nth" ) { - if ( !match[2] ) { - Sizzle.error( match[0] ); - } - - match[2] = match[2].replace(/^\+|\s*/g, ''); - - // parse equations like 'even', 'odd', '5', '2n', '3n+2', '4n-1', '-n+6' - var test = /(-?)(\d*)(?:n([+\-]?\d*))?/.exec( - match[2] === "even" && "2n" || match[2] === "odd" && "2n+1" || - !/\D/.test( match[2] ) && "0n+" + match[2] || match[2]); - - // calculate the numbers (first)n+(last) including if they are negative - match[2] = (test[1] + (test[2] || 1)) - 0; - match[3] = test[3] - 0; - } - else if ( match[2] ) { - Sizzle.error( match[0] ); - } - - // TODO: Move to normal caching system - match[0] = done++; - - return match; - }, - - ATTR: function( match, curLoop, inplace, result, not, isXML ) { - var name = match[1] = match[1].replace( rBackslash, "" ); - - if ( !isXML && Expr.attrMap[name] ) { - match[1] = Expr.attrMap[name]; - } - - // Handle if an un-quoted value was used - match[4] = ( match[4] || match[5] || "" ).replace( rBackslash, "" ); - - if ( match[2] === "~=" ) { - match[4] = " " + match[4] + " "; - } - - return match; - }, - - PSEUDO: function( match, curLoop, inplace, result, not ) { - if ( match[1] === "not" ) { - // If we're dealing with a complex expression, or a simple one - if ( ( chunker.exec(match[3]) || "" ).length > 1 || /^\w/.test(match[3]) ) { - match[3] = Sizzle(match[3], null, null, curLoop); - - } else { - var ret = Sizzle.filter(match[3], curLoop, inplace, true ^ not); - - if ( !inplace ) { - result.push.apply( result, ret ); - } - - return false; - } - - } else if ( Expr.match.POS.test( match[0] ) || Expr.match.CHILD.test( match[0] ) ) { - return true; - } - - return match; - }, - - POS: function( match ) { - match.unshift( true ); - - return match; - } - }, - - filters: { - enabled: function( elem ) { - return elem.disabled === false && elem.type !== "hidden"; - }, - - disabled: function( elem ) { - return elem.disabled === true; - }, - - checked: function( elem ) { - return elem.checked === true; - }, - - selected: function( elem ) { - // Accessing this property makes selected-by-default - // options in Safari work properly - if ( elem.parentNode ) { - elem.parentNode.selectedIndex; - } - - return elem.selected === true; - }, - - parent: function( elem ) { - return !!elem.firstChild; - }, - - empty: function( elem ) { - return !elem.firstChild; - }, - - has: function( elem, i, match ) { - return !!Sizzle( match[3], elem ).length; - }, - - header: function( elem ) { - return (/h\d/i).test( elem.nodeName ); - }, - - text: function( elem ) { - var attr = elem.getAttribute( "type" ), type = elem.type; - // IE6 and 7 will map elem.type to 'text' for new HTML5 types (search, etc) - // use getAttribute instead to test this case - return elem.nodeName.toLowerCase() === "input" && "text" === type && ( attr === type || attr === null ); - }, - - radio: function( elem ) { - return elem.nodeName.toLowerCase() === "input" && "radio" === elem.type; - }, - - checkbox: function( elem ) { - return elem.nodeName.toLowerCase() === "input" && "checkbox" === elem.type; - }, - - file: function( elem ) { - return elem.nodeName.toLowerCase() === "input" && "file" === elem.type; - }, - - password: function( elem ) { - return elem.nodeName.toLowerCase() === "input" && "password" === elem.type; - }, - - submit: function( elem ) { - var name = elem.nodeName.toLowerCase(); - return (name === "input" || name === "button") && "submit" === elem.type; - }, - - image: function( elem ) { - return elem.nodeName.toLowerCase() === "input" && "image" === elem.type; - }, - - reset: function( elem ) { - var name = elem.nodeName.toLowerCase(); - return (name === "input" || name === "button") && "reset" === elem.type; - }, - - button: function( elem ) { - var name = elem.nodeName.toLowerCase(); - return name === "input" && "button" === elem.type || name === "button"; - }, - - input: function( elem ) { - return (/input|select|textarea|button/i).test( elem.nodeName ); - }, - - focus: function( elem ) { - return elem === elem.ownerDocument.activeElement; - } - }, - setFilters: { - first: function( elem, i ) { - return i === 0; - }, - - last: function( elem, i, match, array ) { - return i === array.length - 1; - }, - - even: function( elem, i ) { - return i % 2 === 0; - }, - - odd: function( elem, i ) { - return i % 2 === 1; - }, - - lt: function( elem, i, match ) { - return i < match[3] - 0; - }, - - gt: function( elem, i, match ) { - return i > match[3] - 0; - }, - - nth: function( elem, i, match ) { - return match[3] - 0 === i; - }, - - eq: function( elem, i, match ) { - return match[3] - 0 === i; - } - }, - filter: { - PSEUDO: function( elem, match, i, array ) { - var name = match[1], - filter = Expr.filters[ name ]; - - if ( filter ) { - return filter( elem, i, match, array ); - - } else if ( name === "contains" ) { - return (elem.textContent || elem.innerText || Sizzle.getText([ elem ]) || "").indexOf(match[3]) >= 0; - - } else if ( name === "not" ) { - var not = match[3]; - - for ( var j = 0, l = not.length; j < l; j++ ) { - if ( not[j] === elem ) { - return false; - } - } - - return true; - - } else { - Sizzle.error( name ); - } - }, - - CHILD: function( elem, match ) { - var type = match[1], - node = elem; - - switch ( type ) { - case "only": - case "first": - while ( (node = node.previousSibling) ) { - if ( node.nodeType === 1 ) { - return false; - } - } - - if ( type === "first" ) { - return true; - } - - node = elem; - - case "last": - while ( (node = node.nextSibling) ) { - if ( node.nodeType === 1 ) { - return false; - } - } - - return true; - - case "nth": - var first = match[2], - last = match[3]; - - if ( first === 1 && last === 0 ) { - return true; - } - - var doneName = match[0], - parent = elem.parentNode; - - if ( parent && (parent.sizcache !== doneName || !elem.nodeIndex) ) { - var count = 0; - - for ( node = parent.firstChild; node; node = node.nextSibling ) { - if ( node.nodeType === 1 ) { - node.nodeIndex = ++count; - } - } - - parent.sizcache = doneName; - } - - var diff = elem.nodeIndex - last; - - if ( first === 0 ) { - return diff === 0; - - } else { - return ( diff % first === 0 && diff / first >= 0 ); - } - } - }, - - ID: function( elem, match ) { - return elem.nodeType === 1 && elem.getAttribute("id") === match; - }, - - TAG: function( elem, match ) { - return (match === "*" && elem.nodeType === 1) || elem.nodeName.toLowerCase() === match; - }, - - CLASS: function( elem, match ) { - return (" " + (elem.className || elem.getAttribute("class")) + " ") - .indexOf( match ) > -1; - }, - - ATTR: function( elem, match ) { - var name = match[1], - result = Expr.attrHandle[ name ] ? - Expr.attrHandle[ name ]( elem ) : - elem[ name ] != null ? - elem[ name ] : - elem.getAttribute( name ), - value = result + "", - type = match[2], - check = match[4]; - - return result == null ? - type === "!=" : - type === "=" ? - value === check : - type === "*=" ? - value.indexOf(check) >= 0 : - type === "~=" ? - (" " + value + " ").indexOf(check) >= 0 : - !check ? - value && result !== false : - type === "!=" ? - value !== check : - type === "^=" ? - value.indexOf(check) === 0 : - type === "$=" ? - value.substr(value.length - check.length) === check : - type === "|=" ? - value === check || value.substr(0, check.length + 1) === check + "-" : - false; - }, - - POS: function( elem, match, i, array ) { - var name = match[2], - filter = Expr.setFilters[ name ]; - - if ( filter ) { - return filter( elem, i, match, array ); - } - } - } -}; - -var origPOS = Expr.match.POS, - fescape = function(all, num){ - return "\\" + (num - 0 + 1); - }; - -for ( var type in Expr.match ) { - Expr.match[ type ] = new RegExp( Expr.match[ type ].source + (/(?![^\[]*\])(?![^\(]*\))/.source) ); - Expr.leftMatch[ type ] = new RegExp( /(^(?:.|\r|\n)*?)/.source + Expr.match[ type ].source.replace(/\\(\d+)/g, fescape) ); -} - -var makeArray = function( array, results ) { - array = Array.prototype.slice.call( array, 0 ); - - if ( results ) { - results.push.apply( results, array ); - return results; - } - - return array; -}; - -// Perform a simple check to determine if the browser is capable of -// converting a NodeList to an array using builtin methods. -// Also verifies that the returned array holds DOM nodes -// (which is not the case in the Blackberry browser) -try { - Array.prototype.slice.call( document.documentElement.childNodes, 0 )[0].nodeType; - -// Provide a fallback method if it does not work -} catch( e ) { - makeArray = function( array, results ) { - var i = 0, - ret = results || []; - - if ( toString.call(array) === "[object Array]" ) { - Array.prototype.push.apply( ret, array ); - - } else { - if ( typeof array.length === "number" ) { - for ( var l = array.length; i < l; i++ ) { - ret.push( array[i] ); - } - - } else { - for ( ; array[i]; i++ ) { - ret.push( array[i] ); - } - } - } - - return ret; - }; -} - -var sortOrder, siblingCheck; - -if ( document.documentElement.compareDocumentPosition ) { - sortOrder = function( a, b ) { - if ( a === b ) { - hasDuplicate = true; - return 0; - } - - if ( !a.compareDocumentPosition || !b.compareDocumentPosition ) { - return a.compareDocumentPosition ? -1 : 1; - } - - return a.compareDocumentPosition(b) & 4 ? -1 : 1; - }; - -} else { - sortOrder = function( a, b ) { - // The nodes are identical, we can exit early - if ( a === b ) { - hasDuplicate = true; - return 0; - - // Fallback to using sourceIndex (in IE) if it's available on both nodes - } else if ( a.sourceIndex && b.sourceIndex ) { - return a.sourceIndex - b.sourceIndex; - } - - var al, bl, - ap = [], - bp = [], - aup = a.parentNode, - bup = b.parentNode, - cur = aup; - - // If the nodes are siblings (or identical) we can do a quick check - if ( aup === bup ) { - return siblingCheck( a, b ); - - // If no parents were found then the nodes are disconnected - } else if ( !aup ) { - return -1; - - } else if ( !bup ) { - return 1; - } - - // Otherwise they're somewhere else in the tree so we need - // to build up a full list of the parentNodes for comparison - while ( cur ) { - ap.unshift( cur ); - cur = cur.parentNode; - } - - cur = bup; - - while ( cur ) { - bp.unshift( cur ); - cur = cur.parentNode; - } - - al = ap.length; - bl = bp.length; - - // Start walking down the tree looking for a discrepancy - for ( var i = 0; i < al && i < bl; i++ ) { - if ( ap[i] !== bp[i] ) { - return siblingCheck( ap[i], bp[i] ); - } - } - - // We ended someplace up the tree so do a sibling check - return i === al ? - siblingCheck( a, bp[i], -1 ) : - siblingCheck( ap[i], b, 1 ); - }; - - siblingCheck = function( a, b, ret ) { - if ( a === b ) { - return ret; - } - - var cur = a.nextSibling; - - while ( cur ) { - if ( cur === b ) { - return -1; - } - - cur = cur.nextSibling; - } - - return 1; - }; -} - -// Utility function for retreiving the text value of an array of DOM nodes -Sizzle.getText = function( elems ) { - var ret = "", elem; - - for ( var i = 0; elems[i]; i++ ) { - elem = elems[i]; - - // Get the text from text nodes and CDATA nodes - if ( elem.nodeType === 3 || elem.nodeType === 4 ) { - ret += elem.nodeValue; - - // Traverse everything else, except comment nodes - } else if ( elem.nodeType !== 8 ) { - ret += Sizzle.getText( elem.childNodes ); - } - } - - return ret; -}; - -// Check to see if the browser returns elements by name when -// querying by getElementById (and provide a workaround) -(function(){ - // We're going to inject a fake input element with a specified name - var form = document.createElement("div"), - id = "script" + (new Date()).getTime(), - root = document.documentElement; - - form.innerHTML = ""; - - // Inject it into the root element, check its status, and remove it quickly - root.insertBefore( form, root.firstChild ); - - // The workaround has to do additional checks after a getElementById - // Which slows things down for other browsers (hence the branching) - if ( document.getElementById( id ) ) { - Expr.find.ID = function( match, context, isXML ) { - if ( typeof context.getElementById !== "undefined" && !isXML ) { - var m = context.getElementById(match[1]); - - return m ? - m.id === match[1] || typeof m.getAttributeNode !== "undefined" && m.getAttributeNode("id").nodeValue === match[1] ? - [m] : - undefined : - []; - } - }; - - Expr.filter.ID = function( elem, match ) { - var node = typeof elem.getAttributeNode !== "undefined" && elem.getAttributeNode("id"); - - return elem.nodeType === 1 && node && node.nodeValue === match; - }; - } - - root.removeChild( form ); - - // release memory in IE - root = form = null; -})(); - -(function(){ - // Check to see if the browser returns only elements - // when doing getElementsByTagName("*") - - // Create a fake element - var div = document.createElement("div"); - div.appendChild( document.createComment("") ); - - // Make sure no comments are found - if ( div.getElementsByTagName("*").length > 0 ) { - Expr.find.TAG = function( match, context ) { - var results = context.getElementsByTagName( match[1] ); - - // Filter out possible comments - if ( match[1] === "*" ) { - var tmp = []; - - for ( var i = 0; results[i]; i++ ) { - if ( results[i].nodeType === 1 ) { - tmp.push( results[i] ); - } - } - - results = tmp; - } - - return results; - }; - } - - // Check to see if an attribute returns normalized href attributes - div.innerHTML = ""; - - if ( div.firstChild && typeof div.firstChild.getAttribute !== "undefined" && - div.firstChild.getAttribute("href") !== "#" ) { - - Expr.attrHandle.href = function( elem ) { - return elem.getAttribute( "href", 2 ); - }; - } - - // release memory in IE - div = null; -})(); - -if ( document.querySelectorAll ) { - (function(){ - var oldSizzle = Sizzle, - div = document.createElement("div"), - id = "__sizzle__"; - - div.innerHTML = "

    "; - - // Safari can't handle uppercase or unicode characters when - // in quirks mode. - if ( div.querySelectorAll && div.querySelectorAll(".TEST").length === 0 ) { - return; - } - - Sizzle = function( query, context, extra, seed ) { - context = context || document; - - // Only use querySelectorAll on non-XML documents - // (ID selectors don't work in non-HTML documents) - if ( !seed && !Sizzle.isXML(context) ) { - // See if we find a selector to speed up - var match = /^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec( query ); - - if ( match && (context.nodeType === 1 || context.nodeType === 9) ) { - // Speed-up: Sizzle("TAG") - if ( match[1] ) { - return makeArray( context.getElementsByTagName( query ), extra ); - - // Speed-up: Sizzle(".CLASS") - } else if ( match[2] && Expr.find.CLASS && context.getElementsByClassName ) { - return makeArray( context.getElementsByClassName( match[2] ), extra ); - } - } - - if ( context.nodeType === 9 ) { - // Speed-up: Sizzle("body") - // The body element only exists once, optimize finding it - if ( query === "body" && context.body ) { - return makeArray( [ context.body ], extra ); - - // Speed-up: Sizzle("#ID") - } else if ( match && match[3] ) { - var elem = context.getElementById( match[3] ); - - // Check parentNode to catch when Blackberry 4.6 returns - // nodes that are no longer in the document #6963 - if ( elem && elem.parentNode ) { - // Handle the case where IE and Opera return items - // by name instead of ID - if ( elem.id === match[3] ) { - return makeArray( [ elem ], extra ); - } - - } else { - return makeArray( [], extra ); - } - } - - try { - return makeArray( context.querySelectorAll(query), extra ); - } catch(qsaError) {} - - // qSA works strangely on Element-rooted queries - // We can work around this by specifying an extra ID on the root - // and working up from there (Thanks to Andrew Dupont for the technique) - // IE 8 doesn't work on object elements - } else if ( context.nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { - var oldContext = context, - old = context.getAttribute( "id" ), - nid = old || id, - hasParent = context.parentNode, - relativeHierarchySelector = /^\s*[+~]/.test( query ); - - if ( !old ) { - context.setAttribute( "id", nid ); - } else { - nid = nid.replace( /'/g, "\\$&" ); - } - if ( relativeHierarchySelector && hasParent ) { - context = context.parentNode; - } - - try { - if ( !relativeHierarchySelector || hasParent ) { - return makeArray( context.querySelectorAll( "[id='" + nid + "'] " + query ), extra ); - } - - } catch(pseudoError) { - } finally { - if ( !old ) { - oldContext.removeAttribute( "id" ); - } - } - } - } - - return oldSizzle(query, context, extra, seed); - }; - - for ( var prop in oldSizzle ) { - Sizzle[ prop ] = oldSizzle[ prop ]; - } - - // release memory in IE - div = null; - })(); -} - -(function(){ - var html = document.documentElement, - matches = html.matchesSelector || html.mozMatchesSelector || html.webkitMatchesSelector || html.msMatchesSelector; - - if ( matches ) { - // Check to see if it's possible to do matchesSelector - // on a disconnected node (IE 9 fails this) - var disconnectedMatch = !matches.call( document.createElement( "div" ), "div" ), - pseudoWorks = false; - - try { - // This should fail with an exception - // Gecko does not error, returns false instead - matches.call( document.documentElement, "[test!='']:sizzle" ); - - } catch( pseudoError ) { - pseudoWorks = true; - } - - Sizzle.matchesSelector = function( node, expr ) { - // Make sure that attribute selectors are quoted - expr = expr.replace(/\=\s*([^'"\]]*)\s*\]/g, "='$1']"); - - if ( !Sizzle.isXML( node ) ) { - try { - if ( pseudoWorks || !Expr.match.PSEUDO.test( expr ) && !/!=/.test( expr ) ) { - var ret = matches.call( node, expr ); - - // IE 9's matchesSelector returns false on disconnected nodes - if ( ret || !disconnectedMatch || - // As well, disconnected nodes are said to be in a document - // fragment in IE 9, so check for that - node.document && node.document.nodeType !== 11 ) { - return ret; - } - } - } catch(e) {} - } - - return Sizzle(expr, null, null, [node]).length > 0; - }; - } -})(); - -(function(){ - var div = document.createElement("div"); - - div.innerHTML = "
    "; - - // Opera can't find a second classname (in 9.6) - // Also, make sure that getElementsByClassName actually exists - if ( !div.getElementsByClassName || div.getElementsByClassName("e").length === 0 ) { - return; - } - - // Safari caches class attributes, doesn't catch changes (in 3.2) - div.lastChild.className = "e"; - - if ( div.getElementsByClassName("e").length === 1 ) { - return; - } - - Expr.order.splice(1, 0, "CLASS"); - Expr.find.CLASS = function( match, context, isXML ) { - if ( typeof context.getElementsByClassName !== "undefined" && !isXML ) { - return context.getElementsByClassName(match[1]); - } - }; - - // release memory in IE - div = null; -})(); - -function dirNodeCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) { - for ( var i = 0, l = checkSet.length; i < l; i++ ) { - var elem = checkSet[i]; - - if ( elem ) { - var match = false; - - elem = elem[dir]; - - while ( elem ) { - if ( elem.sizcache === doneName ) { - match = checkSet[elem.sizset]; - break; - } - - if ( elem.nodeType === 1 && !isXML ){ - elem.sizcache = doneName; - elem.sizset = i; - } - - if ( elem.nodeName.toLowerCase() === cur ) { - match = elem; - break; - } - - elem = elem[dir]; - } - - checkSet[i] = match; - } - } -} - -function dirCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) { - for ( var i = 0, l = checkSet.length; i < l; i++ ) { - var elem = checkSet[i]; - - if ( elem ) { - var match = false; - - elem = elem[dir]; - - while ( elem ) { - if ( elem.sizcache === doneName ) { - match = checkSet[elem.sizset]; - break; - } - - if ( elem.nodeType === 1 ) { - if ( !isXML ) { - elem.sizcache = doneName; - elem.sizset = i; - } - - if ( typeof cur !== "string" ) { - if ( elem === cur ) { - match = true; - break; - } - - } else if ( Sizzle.filter( cur, [elem] ).length > 0 ) { - match = elem; - break; - } - } - - elem = elem[dir]; - } - - checkSet[i] = match; - } - } -} - -if ( document.documentElement.contains ) { - Sizzle.contains = function( a, b ) { - return a !== b && (a.contains ? a.contains(b) : true); - }; - -} else if ( document.documentElement.compareDocumentPosition ) { - Sizzle.contains = function( a, b ) { - return !!(a.compareDocumentPosition(b) & 16); - }; - -} else { - Sizzle.contains = function() { - return false; - }; -} - -Sizzle.isXML = function( elem ) { - // documentElement is verified for cases where it doesn't yet exist - // (such as loading iframes in IE - #4833) - var documentElement = (elem ? elem.ownerDocument || elem : 0).documentElement; - - return documentElement ? documentElement.nodeName !== "HTML" : false; -}; - -var posProcess = function( selector, context ) { - var match, - tmpSet = [], - later = "", - root = context.nodeType ? [context] : context; - - // Position selectors must be done after the filter - // And so must :not(positional) so we move all PSEUDOs to the end - while ( (match = Expr.match.PSEUDO.exec( selector )) ) { - later += match[0]; - selector = selector.replace( Expr.match.PSEUDO, "" ); - } - - selector = Expr.relative[selector] ? selector + "*" : selector; - - for ( var i = 0, l = root.length; i < l; i++ ) { - Sizzle( selector, root[i], tmpSet ); - } - - return Sizzle.filter( later, tmpSet ); -}; - -// EXPOSE -jQuery.find = Sizzle; -jQuery.expr = Sizzle.selectors; -jQuery.expr[":"] = jQuery.expr.filters; -jQuery.unique = Sizzle.uniqueSort; -jQuery.text = Sizzle.getText; -jQuery.isXMLDoc = Sizzle.isXML; -jQuery.contains = Sizzle.contains; - - -})(); - - -var runtil = /Until$/, - rparentsprev = /^(?:parents|prevUntil|prevAll)/, - // Note: This RegExp should be improved, or likely pulled from Sizzle - rmultiselector = /,/, - isSimple = /^.[^:#\[\.,]*$/, - slice = Array.prototype.slice, - POS = jQuery.expr.match.POS, - // methods guaranteed to produce a unique set when starting from a unique set - guaranteedUnique = { - children: true, - contents: true, - next: true, - prev: true - }; - -jQuery.fn.extend({ - find: function( selector ) { - var self = this, - i, l; - - if ( typeof selector !== "string" ) { - return jQuery( selector ).filter(function() { - for ( i = 0, l = self.length; i < l; i++ ) { - if ( jQuery.contains( self[ i ], this ) ) { - return true; - } - } - }); - } - - var ret = this.pushStack( "", "find", selector ), - length, n, r; - - for ( i = 0, l = this.length; i < l; i++ ) { - length = ret.length; - jQuery.find( selector, this[i], ret ); - - if ( i > 0 ) { - // Make sure that the results are unique - for ( n = length; n < ret.length; n++ ) { - for ( r = 0; r < length; r++ ) { - if ( ret[r] === ret[n] ) { - ret.splice(n--, 1); - break; - } - } - } - } - } - - return ret; - }, - - has: function( target ) { - var targets = jQuery( target ); - return this.filter(function() { - for ( var i = 0, l = targets.length; i < l; i++ ) { - if ( jQuery.contains( this, targets[i] ) ) { - return true; - } - } - }); - }, - - not: function( selector ) { - return this.pushStack( winnow(this, selector, false), "not", selector); - }, - - filter: function( selector ) { - return this.pushStack( winnow(this, selector, true), "filter", selector ); - }, - - is: function( selector ) { - return !!selector && ( typeof selector === "string" ? - jQuery.filter( selector, this ).length > 0 : - this.filter( selector ).length > 0 ); - }, - - closest: function( selectors, context ) { - var ret = [], i, l, cur = this[0]; - - // Array - if ( jQuery.isArray( selectors ) ) { - var match, selector, - matches = {}, - level = 1; - - if ( cur && selectors.length ) { - for ( i = 0, l = selectors.length; i < l; i++ ) { - selector = selectors[i]; - - if ( !matches[ selector ] ) { - matches[ selector ] = POS.test( selector ) ? - jQuery( selector, context || this.context ) : - selector; - } - } - - while ( cur && cur.ownerDocument && cur !== context ) { - for ( selector in matches ) { - match = matches[ selector ]; - - if ( match.jquery ? match.index( cur ) > -1 : jQuery( cur ).is( match ) ) { - ret.push({ selector: selector, elem: cur, level: level }); - } - } - - cur = cur.parentNode; - level++; - } - } - - return ret; - } - - // String - var pos = POS.test( selectors ) || typeof selectors !== "string" ? - jQuery( selectors, context || this.context ) : - 0; - - for ( i = 0, l = this.length; i < l; i++ ) { - cur = this[i]; - - while ( cur ) { - if ( pos ? pos.index(cur) > -1 : jQuery.find.matchesSelector(cur, selectors) ) { - ret.push( cur ); - break; - - } else { - cur = cur.parentNode; - if ( !cur || !cur.ownerDocument || cur === context || cur.nodeType === 11 ) { - break; - } - } - } - } - - ret = ret.length > 1 ? jQuery.unique( ret ) : ret; - - return this.pushStack( ret, "closest", selectors ); - }, - - // Determine the position of an element within - // the matched set of elements - index: function( elem ) { - if ( !elem || typeof elem === "string" ) { - return jQuery.inArray( this[0], - // If it receives a string, the selector is used - // If it receives nothing, the siblings are used - elem ? jQuery( elem ) : this.parent().children() ); - } - // Locate the position of the desired element - return jQuery.inArray( - // If it receives a jQuery object, the first element is used - elem.jquery ? elem[0] : elem, this ); - }, - - add: function( selector, context ) { - var set = typeof selector === "string" ? - jQuery( selector, context ) : - jQuery.makeArray( selector && selector.nodeType ? [ selector ] : selector ), - all = jQuery.merge( this.get(), set ); - - return this.pushStack( isDisconnected( set[0] ) || isDisconnected( all[0] ) ? - all : - jQuery.unique( all ) ); - }, - - andSelf: function() { - return this.add( this.prevObject ); - } -}); - -// A painfully simple check to see if an element is disconnected -// from a document (should be improved, where feasible). -function isDisconnected( node ) { - return !node || !node.parentNode || node.parentNode.nodeType === 11; -} - -jQuery.each({ - parent: function( elem ) { - var parent = elem.parentNode; - return parent && parent.nodeType !== 11 ? parent : null; - }, - parents: function( elem ) { - return jQuery.dir( elem, "parentNode" ); - }, - parentsUntil: function( elem, i, until ) { - return jQuery.dir( elem, "parentNode", until ); - }, - next: function( elem ) { - return jQuery.nth( elem, 2, "nextSibling" ); - }, - prev: function( elem ) { - return jQuery.nth( elem, 2, "previousSibling" ); - }, - nextAll: function( elem ) { - return jQuery.dir( elem, "nextSibling" ); - }, - prevAll: function( elem ) { - return jQuery.dir( elem, "previousSibling" ); - }, - nextUntil: function( elem, i, until ) { - return jQuery.dir( elem, "nextSibling", until ); - }, - prevUntil: function( elem, i, until ) { - return jQuery.dir( elem, "previousSibling", until ); - }, - siblings: function( elem ) { - return jQuery.sibling( elem.parentNode.firstChild, elem ); - }, - children: function( elem ) { - return jQuery.sibling( elem.firstChild ); - }, - contents: function( elem ) { - return jQuery.nodeName( elem, "iframe" ) ? - elem.contentDocument || elem.contentWindow.document : - jQuery.makeArray( elem.childNodes ); - } -}, function( name, fn ) { - jQuery.fn[ name ] = function( until, selector ) { - var ret = jQuery.map( this, fn, until ), - // The variable 'args' was introduced in - // https://github.com/jquery/jquery/commit/52a0238 - // to work around a bug in Chrome 10 (Dev) and should be removed when the bug is fixed. - // http://code.google.com/p/v8/issues/detail?id=1050 - args = slice.call(arguments); - - if ( !runtil.test( name ) ) { - selector = until; - } - - if ( selector && typeof selector === "string" ) { - ret = jQuery.filter( selector, ret ); - } - - ret = this.length > 1 && !guaranteedUnique[ name ] ? jQuery.unique( ret ) : ret; - - if ( (this.length > 1 || rmultiselector.test( selector )) && rparentsprev.test( name ) ) { - ret = ret.reverse(); - } - - return this.pushStack( ret, name, args.join(",") ); - }; -}); - -jQuery.extend({ - filter: function( expr, elems, not ) { - if ( not ) { - expr = ":not(" + expr + ")"; - } - - return elems.length === 1 ? - jQuery.find.matchesSelector(elems[0], expr) ? [ elems[0] ] : [] : - jQuery.find.matches(expr, elems); - }, - - dir: function( elem, dir, until ) { - var matched = [], - cur = elem[ dir ]; - - while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) { - if ( cur.nodeType === 1 ) { - matched.push( cur ); - } - cur = cur[dir]; - } - return matched; - }, - - nth: function( cur, result, dir, elem ) { - result = result || 1; - var num = 0; - - for ( ; cur; cur = cur[dir] ) { - if ( cur.nodeType === 1 && ++num === result ) { - break; - } - } - - return cur; - }, - - sibling: function( n, elem ) { - var r = []; - - for ( ; n; n = n.nextSibling ) { - if ( n.nodeType === 1 && n !== elem ) { - r.push( n ); - } - } - - return r; - } -}); - -// Implement the identical functionality for filter and not -function winnow( elements, qualifier, keep ) { - - // Can't pass null or undefined to indexOf in Firefox 4 - // Set to 0 to skip string check - qualifier = qualifier || 0; - - if ( jQuery.isFunction( qualifier ) ) { - return jQuery.grep(elements, function( elem, i ) { - var retVal = !!qualifier.call( elem, i, elem ); - return retVal === keep; - }); - - } else if ( qualifier.nodeType ) { - return jQuery.grep(elements, function( elem, i ) { - return (elem === qualifier) === keep; - }); - - } else if ( typeof qualifier === "string" ) { - var filtered = jQuery.grep(elements, function( elem ) { - return elem.nodeType === 1; - }); - - if ( isSimple.test( qualifier ) ) { - return jQuery.filter(qualifier, filtered, !keep); - } else { - qualifier = jQuery.filter( qualifier, filtered ); - } - } - - return jQuery.grep(elements, function( elem, i ) { - return (jQuery.inArray( elem, qualifier ) >= 0) === keep; - }); -} - - - - -var rinlinejQuery = / jQuery\d+="(?:\d+|null)"/g, - rleadingWhitespace = /^\s+/, - rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig, - rtagName = /<([\w:]+)/, - rtbody = /", "" ], - legend: [ 1, "
    ", "
    " ], - thead: [ 1, "", "
    " ], - tr: [ 2, "", "
    " ], - td: [ 3, "", "
    " ], - col: [ 2, "", "
    " ], - area: [ 1, "", "" ], - _default: [ 0, "", "" ] - }; - -wrapMap.optgroup = wrapMap.option; -wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; -wrapMap.th = wrapMap.td; - -// IE can't serialize and - - - -

    - - - - diff --git a/cosy/console.ML b/cosy/console.ML deleted file mode 100644 index d06882bb..00000000 --- a/cosy/console.ML +++ /dev/null @@ -1,4 +0,0 @@ -OS.FileSys.chDir "chromeui/"; - -PolyML.SaveState.loadState "polychrome.polyml-heap"; - diff --git a/cosy/default_eq_class_tab.ML b/cosy/default_eq_class_tab.ML deleted file mode 100644 index 83043386..00000000 --- a/cosy/default_eq_class_tab.ML +++ /dev/null @@ -1,51 +0,0 @@ -(* pre-add spider laws *) -functor SpiderEqClasses ( - structure Theory : GRAPHICAL_THEORY - structure EqClassTab : EQ_CLASS_TAB - sharing Theory.Graph.SharingOGraph = - EqClassTab.GraphEntry.Graph.SharingOGraph - val default_data : EqClassTab.GraphEntry.data - val default_edata : EqClassTab.GraphEntry.Equiv.T -) = -struct - structure Graph = Theory.Graph - structure GraphEntry = EqClassTab.GraphEntry - - val edge = (Graph.Directed, Graph.EData.default_data) - - fun bx g = let - val (vn,g) = g |> Graph.add_vertex Graph.OVData.WVert - val (bn,g) = g |> Graph.add_bbox - in (vn, #2 (Graph.add_to_bbox bn (V.NSet.single vn) g)) - end - - fun frob data = let - val rhs = Graph.empty - val (b1,rhs) = rhs |> bx - val (b2,rhs) = rhs |> bx - val (b3,rhs) = rhs |> bx - val (b4,rhs) = rhs |> bx - val (i1,rhs) = rhs |> Graph.add_vertex (data) - val rhs = rhs |> Graph.doadd_edge edge b1 i1 |> Graph.doadd_edge edge i1 b2 - val (i2,lhs) = rhs |> Graph.add_vertex (data) - val lhs = lhs |> Graph.doadd_edge edge b3 i2 |> Graph.doadd_edge edge i2 b4 |> Graph.doadd_edge edge i1 i2 - val rhs = rhs |> Graph.doadd_edge edge b3 i1 |> Graph.doadd_edge edge i1 b4 - in (GraphEntry.mk' ((rhs, default_edata), default_data), - [GraphEntry.mk' ((lhs, default_edata), default_data)]) - end - - fun special data = let - val rhs = Graph.empty - val (b1,rhs) = rhs |> bx - val (b2,rhs) = rhs |> bx - val (i1,rhs) = rhs |> Graph.add_vertex (data) - val rhs = rhs |> Graph.doadd_edge edge b1 i1 |> Graph.doadd_edge edge i1 b2 - val lhs = rhs |> Graph.doadd_edge edge i1 i1 - in (GraphEntry.mk' ((rhs, default_edata), default_data), - [GraphEntry.mk' ((lhs, default_edata), default_data)]) - end - - fun eq_class_tab data_list = - fold (EqClassTab.add_eq_class o frob) data_list EqClassTab.empty - -end \ No newline at end of file diff --git a/cosy/default_gens.ML b/cosy/default_gens.ML deleted file mode 100644 index df02812c..00000000 --- a/cosy/default_gens.ML +++ /dev/null @@ -1,58 +0,0 @@ -functor DefaultGenerators( - structure Graph : BANG_GRAPH -) = -struct - structure Graph = Graph - - val id = let - val gr = Graph.empty - val (a,gr) = gr |> Graph.add_vertex (Graph.WVert) - val (b,gr) = gr |> Graph.add_vertex (Graph.WVert) - in (gr |> Graph.add_edge_anon (Directed, Graph.default_edata) a b, 1, 1) - end - - fun gen data (ins, outs) = let - (* fun addk 0 gr = (gr, V.NSet.empty) - | addk k gr = let - val (v,gr) = gr |> Graph.add_vertex Graph.OVData.WVert - val (gr,vs) = addk (k-1) gr - in (gr, V.NSet.add v vs) - end - val (iv, gr) = Graph.empty |> Graph.add_vertex (Graph.OVData.NVert data) - val (gr,invs) = addk ins gr - val (gr,outvs) = addk outs gr - val gr = V.NSet.fold (fn v => Graph.add_edge_anon (Directed, Graph.default_edata) v iv) invs gr - val gr = V.NSet.fold (fn v => Graph.add_edge_anon (Directed, Graph.default_edata) iv v) outvs gr *) - in (data, ins, outs) - end - - fun gen_list max_arity data_list = let - fun alist 0 0 = [] - | alist k 0 = (0,k)::alist (k-1) (k-1) - | alist k i = (i,k-i)::alist k (i-1) - in (fold_product (cons oo gen) data_list (alist max_arity max_arity) []) - end -end - - -(* -val gens = [ - gen GHZW_VertexData.GHZ 3 0, - gen GHZW_VertexData.W 3 0, - gen GHZW_VertexData.GHZ 2 1, - gen GHZW_VertexData.W 2 1, - gen GHZW_VertexData.GHZ 1 2, - gen GHZW_VertexData.W 1 2, - gen GHZW_VertexData.GHZ 0 3, - gen GHZW_VertexData.W 0 3, - gen GHZW_VertexData.GHZ 2 0, - gen GHZW_VertexData.W 2 0, - gen GHZW_VertexData.GHZ 1 1, - gen GHZW_VertexData.W 1 1, - gen GHZW_VertexData.W 0 2, - gen GHZW_VertexData.GHZ 0 2, - gen GHZW_VertexData.GHZ 1 0, - gen GHZW_VertexData.W 1 0, - gen GHZW_VertexData.GHZ 0 1, - gen GHZW_VertexData.W 0 1 -];*) \ No newline at end of file diff --git a/cosy/default_rws.ML b/cosy/default_rws.ML deleted file mode 100644 index 77dc97d7..00000000 --- a/cosy/default_rws.ML +++ /dev/null @@ -1,67 +0,0 @@ -signature SPIDER_REWRITES = -sig - structure Theory : GRAPHICAL_THEORY - val frob_rules : Theory.Graph.nvdata list -> Theory.Ruleset.T - val frob_and_special_rules : Theory.Graph.nvdata list -> Theory.Ruleset.T -end - -functor SpiderRewrites ( - structure Theory : GRAPHICAL_THEORY -) : SPIDER_REWRITES = -struct - structure Theory = Theory - structure Ruleset = Theory.Ruleset - structure Rule = Theory.Rule - structure Graph = Theory.Graph - - val edge = (Directed, Graph.default_edata) - - fun bx g = let - val (vn,g) = g |> Graph.add_vertex Graph.WVert - val (bn,g) = g |> Graph.add_bbox - in (vn, #2 (Graph.add_to_bbox bn (V.NSet.single vn) g)) - end - - fun frob data = let - val rhs = Graph.empty - val (b1,rhs) = rhs |> bx - val (b2,rhs) = rhs |> bx - val (b3,rhs) = rhs |> bx - val (b4,rhs) = rhs |> bx - val (i1,rhs) = rhs |> Graph.add_vertex (Graph.NVert data) - val rhs = rhs |> Graph.add_edge_anon edge b1 i1 |> Graph.add_edge_anon edge i1 b2 - val (i2,lhs) = rhs |> Graph.add_vertex (Graph.NVert data) - val lhs = lhs |> Graph.add_edge_anon edge b3 i2 |> Graph.add_edge_anon edge i2 b4 |> Graph.add_edge_anon edge i1 i2 - val rhs = rhs |> Graph.add_edge_anon edge b3 i1 |> Graph.add_edge_anon edge i1 b4 - in Rule.mk (lhs,rhs) - end - - fun special data = let - val rhs = Graph.empty - val (b1,rhs) = rhs |> bx - val (b2,rhs) = rhs |> bx - val (i1,rhs) = rhs |> Graph.add_vertex (Graph.NVert data) - val rhs = rhs |> Graph.add_edge_anon edge b1 i1 |> Graph.add_edge_anon edge i1 b2 - val lhs = rhs |> Graph.add_edge_anon edge i1 i1 - in Rule.mk (lhs,rhs) - end - - val t_redex = TagName.mk "redex" - - fun add_frob_and_special d rs = - let - val (fr, rs) = Ruleset.add_fresh_rule (frob d) rs - val (sp, rs) = Ruleset.add_fresh_rule (special d) rs - in rs |> Ruleset.tag_rule fr t_redex - |> Ruleset.tag_rule sp t_redex - end - - fun add_frob d rs = - let - val (fr, rs) = Ruleset.add_fresh_rule (frob d) rs - in rs |> Ruleset.tag_rule fr t_redex - end - - fun frob_rules ds = fold add_frob ds Ruleset.empty - fun frob_and_special_rules ds = fold add_frob_and_special ds Ruleset.empty -end diff --git a/cosy/enumerate.ML b/cosy/enumerate.ML deleted file mode 100644 index 6fd1731c..00000000 --- a/cosy/enumerate.ML +++ /dev/null @@ -1,123 +0,0 @@ -signature GRAPH_ENUMERATOR = -sig - structure Theory : GRAPHICAL_THEORY - val fold_graphs : (Theory.Graph.T -> 'a -> 'a) -> (* folder *) - (Theory.Graph.T * int * int) list * (* generators + arities *) - int * int * int * int -> (* in, out, max verts, max pluggings *) - 'a -> (* base case *) - 'a - val fold_graphs_with_rs : Theory.Ruleset.T -> (* ruleset *) - (Theory.Graph.T -> 'a -> 'a) -> (* folder *) - (Theory.Graph.T * int * int) list * (* generators + arities *) - int * int * int * int -> (* in, out, max verts, max pluggings *) - 'a -> (* base case *) - 'a - val rule_matches_graph : Theory.Rule.T -> Theory.Graph.T -> bool - val has_match : Theory.Ruleset.T -> R.NSet.T -> Theory.Graph.T -> bool -end - -functor GraphEnumerator ( - structure Theory : GRAPHICAL_THEORY -) : GRAPH_ENUMERATOR = -struct - structure Theory = Theory - structure Graph = Theory.Graph - - (* maintain list of available pluggings *) - structure Plugs = NameBRelFun(structure Dom=V and Cod=V) - - (* get the lowest plugging *) - fun first_plug rel = - case V.NSet.get_first (Plugs.get_domset rel) - of SOME a => SOME (a, the (V.NSet.get_first (Plugs.domf rel a))) - | NONE => NONE - - fun disjoint_union g1 g2 = let - val vrn = VSub.mk_from_avoids (Graph.get_vertices g2) - val ern = ESub.mk_from_avoids (Graph.get_edges g2) - in - Graph.merge g1 (Graph.rename_ograph_anon (vrn,ern) g2) - end - fun get_plugs gr = let - fun not_adj_to_boundary v = - V.NSet.forall (not o (Graph.is_boundary gr)) - (Graph.get_adj_vertices gr v) - val filt = V.NSet.filter not_adj_to_boundary - val (ins,outs) = (Graph.get_inputs gr, Graph.get_outputs gr) - val (ins',outs') = (filt ins, filt outs) - in if V.NSet.is_empty ins' orelse V.NSet.is_empty outs' - then Plugs.empty else Plugs.empty |> Plugs.add_many ins' outs' - end - fun delete_plugs_like gr (p,q) plugs = let (* remove all plugs that are essentially (p,q) because of commutativity *) - val ps = Graph.get_predecessor_vertices gr (the (V.NSet.get_first (Graph.get_successor_vertices gr p))) - val qs = Graph.get_successor_vertices gr (the (V.NSet.get_first (Graph.get_predecessor_vertices gr q))) - in plugs |> Plugs.del_many ps qs - end - (* fun dft (Lf s) vs = s::vs | dft (Br (s,ch)) vs = s::(fold dft ch vs); *) - - structure MatchSearch = GreedyMatchSearch(Graph) - (* precondition: g contains no !-boxes *) - fun rule_matches_concrete_graph r g = let - val lhs = Theory.Rule.get_lhs r - val gs = [lhs, g] - (*val names = - (fold Graph.add_to_vnames gs V.NSet.empty, - fold Graph.add_to_enames gs E.NSet.empty, - fold Graph.add_to_bboxes gs B.NSet.empty)*) - val matches = MatchSearch.match lhs g - in case Seq.pull matches of NONE=>false | _=>true - end - - val rule_matches_graph = rule_matches_concrete_graph - - (* matching using !-graphs as a target seems to be broken, so return false by default *) - (*fun rule_matches_graph r g = if (Graph.VtoBoxMap.is_empty o Graph.get_bboxes) g - then rule_matches_concrete_graph r g - else false*) - - (*fun has_match rs in_rules target_graph = let - val redsets = RuleName.NSet.subdivide (4 * Multithreading.max_threads_value ()) in_rules - fun search_reds redset = RuleName.NSet.exists (fn r => rule_matches_graph (Ruleset.get_rule rs r) target_graph) redset - in Par_List.exists search_reds redsets - end*) - - fun has_match rs in_rules target_graph = - R.NSet.exists (fn r => rule_matches_graph (Theory.Ruleset.get_rule rs r) target_graph) in_rules - - fun fold_graphs_with_rs rs f (generators, total_ins, total_outs, max_verts, max_plugs) base = let - (*val rule_list = RuleName.NSet.fold (cons o (Ruleset.get_rule rs)) (Ruleset.get_rules_in_tag rs (TagName.mk "r")) []*) - val all_reds = Theory.Ruleset.get_rules_in_tag rs (TagName.mk "r") - fun enum_plugs (gr, ins, plugs, plug_count) accum = - if (ins < total_ins orelse - plug_count > max_plugs orelse - has_match rs all_reds gr) then accum (* return if impossible or already redicible *) - else (if ins = total_ins then f gr else I) - (case first_plug plugs - of SOME (p,q) => - enum_plugs (gr |> Graph.plug_anon p q |> Graph.minimise, (* do plug *) - ins-1, - plugs |> Plugs.del_dom p |> Plugs.del_cod q, - plug_count+1) - (enum_plugs (gr, (* skip to next plug, remove any similar plugs *) - ins, - plugs |> delete_plugs_like gr (p,q), - plug_count) accum) - | NONE => accum) - - fun enum_gens (gr, gens as ((g,gin,gout)::gens'), ins, outs, verts) accum = - if (verts > max_verts orelse - ins > total_ins + max_plugs orelse - outs > total_outs + max_plugs) then accum (* return if impossible *) - else (enum_gens (disjoint_union g gr, gens, ins+gin, outs+gout, verts+1) (* add gen *) - (enum_gens (gr, gens', ins, outs, verts) accum)) (* pop and don't add gen *) - | enum_gens (gr, [], ins, outs, _) accum = (* ready for plugging *) - if ins >= total_ins andalso ins - total_ins = outs - total_outs (* plugging is possible *) - then enum_plugs (gr, ins, get_plugs gr, 0) accum (* plug until ins = total_ins *) - else accum - in enum_gens (Graph.empty, generators, 0, 0, 0) base - end - - fun fold_graphs f run base = fold_graphs_with_rs Theory.Ruleset.empty f run base -end - - diff --git a/cosy/eq_class_tab.ML b/cosy/eq_class_tab.ML deleted file mode 100644 index a078db41..00000000 --- a/cosy/eq_class_tab.ML +++ /dev/null @@ -1,405 +0,0 @@ -signature GRAPH_ENTRY = -sig - structure Graph : BANG_GRAPH - structure Equiv : GRAPH_EQUIV - sharing Equiv.Graph.Sharing = Graph.Sharing - - type T - type data - val update_graph : (Graph.T -> Graph.T) -> T -> T - val update_data : (data -> data) -> T -> T - val update_edata : (Equiv.T option -> Equiv.T option) -> T -> T - val update_dirty : (bool -> bool) -> T -> T - val get_graph : T -> Graph.T - val get_data : T -> data - val get_edata : T -> Equiv.T option - val get_dirty : T -> bool - val set_graph : Graph.T -> T -> T - val set_data : data -> T -> T - val set_edata : Equiv.T option -> T -> T - val set_dirty : bool -> T -> T - val mk : Graph.T * data -> T - val mk' : (Graph.T * Equiv.T option) * data -> T -end - -signature EQ_CLASS = -sig - type T - val update_rep : (GraphName.name -> GraphName.name) -> T -> T - val update_congs : (GraphName.name list -> GraphName.name list) -> T -> T - val update_redexes : (GraphName.name list -> GraphName.name list) -> T -> T - val get_rep : T -> GraphName.name - val get_congs : T -> GraphName.name list - val get_redexes : T -> GraphName.name list - val set_rep : GraphName.name -> T -> T - val set_congs : GraphName.name list -> T -> T - val set_redexes : GraphName.name list -> T -> T - val mk : GraphName.name -> T -end - -signature EQ_CLASS_TAB = -sig - type T - structure Theory : GRAPHICAL_THEORY - structure EqClass : EQ_CLASS - structure GraphEntry : GRAPH_ENTRY - structure DNet : TOP_DNET - sharing GraphEntry.Graph.Sharing - = Theory.Graph.Sharing - = DNet.G.Sharing - - - val update_initial_rs : (Theory.Ruleset.T -> Theory.Ruleset.T) -> T -> T - val get_initial_rs : T -> Theory.Ruleset.T - val set_initial_rs : Theory.Ruleset.T -> T -> T - - val get_ruleset : T -> Theory.Ruleset.T - - val get_graph_entry : T -> GraphName.name -> GraphEntry.T - val get_graph_tab : T -> GraphEntry.T GraphName.NTab.T - - val fold_eqclasses : (EqClass.T -> 'a -> 'a) -> T -> 'a -> 'a - val fold_redexes : (GraphEntry.T -> 'a -> 'a) -> T -> 'a -> 'a - val fold_irredexes : (GraphEntry.T -> 'a -> 'a) -> T -> 'a -> 'a - - (* folds over a function which gets args "(graph, rep) is_redex" *) - (*val fold_rule_pairs : - (GraphEntry.Graph.T * GraphEntry.Graph.T -> bool -> 'a -> 'a) -> - T -> 'a -> 'a*) - - val exists_redexes : (GraphEntry.T -> bool) -> T -> bool - val exists_irredexes : (GraphEntry.T -> bool) -> T -> bool - val get_irredexes : T -> GraphEntry.T list - val get_redexes : T -> GraphEntry.T list - - - (* add one graph entry to table. Only return the table if it has actually changed. *) - val update_one : GraphEntry.T -> T -> T option - val update : GraphEntry.T list -> T -> GraphEntry.T list * T - - val empty : T - val mk : Theory.Ruleset.T -> T -end - -functor GraphEntry( - structure Equiv : GRAPH_EQUIV - type data -) : GRAPH_ENTRY = -struct - structure Graph = Equiv.Graph - structure Equiv = Equiv - type data = data - - exception stub_data_access of unit - datatype T = GE of { graph: Graph.T, data: data, edata: Equiv.T option, dirty: bool } - fun update_graph f (GE r) = GE {graph = f(#graph r), data = #data r, edata = #edata r, dirty = #dirty r} - fun update_data f (GE r) = GE {graph = #graph r, data = f(#data r), edata = #edata r, dirty = #dirty r} - fun update_edata f (GE r) = GE {graph = #graph r, data = #data r, edata = f(#edata r), dirty = #dirty r} - fun update_dirty f (GE r) = GE {graph = #graph r, data = #data r, edata = #edata r, dirty = f(#dirty r)} - fun get_graph (GE r) = #graph r - fun get_data (GE r) = #data r - fun get_edata (GE r) = #edata r - fun get_dirty (GE r) = #dirty r - val set_graph = update_graph o K - val set_data = update_data o K - val set_edata = update_edata o K - val set_dirty = update_dirty o K - - fun mk' ((gr, ed), d) = - GE { - graph = gr, - data = d, - edata = ed, - dirty = true - } - - fun mk (gr, d) = let - val (gr' ,ed) = Equiv.compute_equiv_data gr - in mk' ((gr', SOME ed), d) - end -end - -structure EqClass = -struct - datatype T = EQC of { rep: GraphName.name, congs: GraphName.name list, redexes: GraphName.name list } - fun update_rep f (EQC r) = EQC {rep= f(#rep r),congs= #congs r,redexes= #redexes r} - fun update_congs f (EQC r) = EQC {rep= #rep r,congs= f(#congs r),redexes= #redexes r} - fun update_redexes f (EQC r) = EQC {rep= #rep r,congs= #congs r,redexes= f(#redexes r)} - fun get_rep (EQC r) = #rep r - fun get_congs (EQC r) = #congs r - fun get_redexes (EQC r) = #redexes r - val set_rep = update_rep o K - val set_congs = update_congs o K - val set_redexes = update_redexes o K - fun mk g = EQC { rep = g, congs = [], redexes = [] } -end - - -functor EqClassTab( - structure Theory : GRAPHICAL_THEORY - structure Metric : GRAPH_METRIC - structure Equiv : GRAPH_EQUIV - sharing Theory.Graph.Sharing = - Metric.Graph.Sharing = - Equiv.Graph.Sharing - type data - val default_data : data -) : EQ_CLASS_TAB = -struct - structure Theory = Theory - structure Ruleset = Theory.Ruleset - structure EqClass = EqClass - structure G = Theory.Graph - structure GraphEntry = GraphEntry( - structure Equiv = Equiv - type data = data) - structure GE = GraphEntry - structure DNet = Top_DNet(G) - - structure ClassIndex = Table( - type key = int * int - val ord = prod_ord int_ord int_ord - ) - - datatype T = EQT of { - tab : (EqClass.T list) ClassIndex.table, - initial_rs : Theory.Ruleset.T, - dnet : DNet.T, - graph_tab : GE.T GraphName.NTab.T - } - - val empty = EQT { - tab=ClassIndex.empty, - initial_rs=Theory.Ruleset.empty, - dnet=DNet.empty, - graph_tab=GraphName.NTab.empty - } - - fun update_tab f (EQT r) = EQT {tab=f(#tab r),initial_rs= #initial_rs r,dnet= #dnet r,graph_tab= #graph_tab r} - fun update_initial_rs f (EQT r) = EQT {tab= #tab r,initial_rs=f(#initial_rs r),dnet= #dnet r,graph_tab= #graph_tab r} - fun update_dnet f (EQT r) = EQT {tab= #tab r,initial_rs= #initial_rs r,dnet=f(#dnet r),graph_tab= #graph_tab r} - fun update_graph_tab f (EQT r) = EQT {tab= #tab r,initial_rs= #initial_rs r,dnet= #dnet r,graph_tab=f(#graph_tab r)} - - fun get_tab (EQT r) = #tab r - fun get_initial_rs (EQT r) = #initial_rs r - fun get_dnet (EQT r) = #dnet r - fun get_graph_tab (EQT r) = #graph_tab r - - val set_tab = update_tab o K - val set_initial_rs = update_initial_rs o K - val set_dnet = update_dnet o K - val set_graph_tab = update_graph_tab o K - - - fun get_graph_entry eqt g = - GraphName.NTab.get (get_graph_tab eqt) g - - fun set_as_redex gn eqt = - eqt |> update_dnet (DNet.add_graph (gn, GE.get_graph (get_graph_entry eqt gn))) - - - (* convenience functions for pulling out redexes and irredexes *) - local - exception found_exn of unit - fun fold_to_exists fldf f eqt = - fldf (fn x => fn _ => - (if f x then raise found_exn () else false)) eqt false - handle found_exn () => true - in - fun fold_eqclasses f = ClassIndex.fold (fn (_, classlist) => - fold f classlist - ) o get_tab - - fun fold_redexes f eqt = - fold_eqclasses (fn class => - fold (fn gn => f (GraphName.NTab.get (get_graph_tab eqt) gn)) - (EqClass.get_redexes class) - ) eqt - - fun fold_irredexes f eqt = - fold_eqclasses (fn class => - fold (fn gn => f (GraphName.NTab.get (get_graph_tab eqt) gn)) - (EqClass.get_rep class :: EqClass.get_congs class) - ) eqt - - (*fun fold_rule_pairs f = - let - fun class_fld class x = - let - val rep = GraphEntry.get_graph (EqClass.get_rep class) - in - fold - (fn redex => f (GraphEntry.get_graph redex, rep) true) - (EqClass.get_redexes class) - (fold - (fn cong => f (GraphEntry.get_graph cong, rep) false) - (EqClass.get_congs class) x) - end - in - ClassIndex.fold (fn (_, classlist) => - fold class_fld classlist) o get_tab - end*) - - fun get_redexes eqt = fold_redexes cons eqt [] - fun get_irredexes eqt = fold_irredexes cons eqt [] - val exists_redexes = fold_to_exists fold_redexes - val exists_irredexes = fold_to_exists fold_irredexes - end - - - fun get_ruleset eqt = - let - fun add_rule nm tag rhs lhs_n rs = - let - val lhs = GE.get_graph (GraphName.NTab.get (get_graph_tab eqt) lhs_n) - val r = Theory.Rule.mk (lhs, rhs) - val (rname, rs) = rs |> Theory.Ruleset.add_fresh_rule r - in rs |> Theory.Ruleset.tag_rule rname (TagName.mk tag) - end - - fun add_class class rs = - let - val rep = GE.get_graph (GraphName.NTab.get (get_graph_tab eqt) (EqClass.get_rep class)) - val add_redex = add_rule "r_0" "redex" rep - val add_cong = add_rule "c_0" "cong" rep - in - fold add_redex (EqClass.get_redexes class) - (fold add_cong (EqClass.get_congs class) rs) - end - in - ClassIndex.fold (fn (_, classlist) => - fold add_class classlist) (get_tab eqt) (get_initial_rs eqt) - end - - - (*fun has_match tgt pat = - is_some (Seq.pull (Theory.MatchSearch.match - (get_graph pat) (get_graph tgt) - )) - - fun rs_can_reduce rs tgt = - let - fun r_matches r_name = - is_some (Seq.pull (Theory.MatchSearch.match - (Theory.Rule.get_lhs (Ruleset.get_rule rs r_name)) (get_graph tgt) - )) - in R.NSet.exists r_matches - (Theory.Ruleset.get_rules_in_tag rs (TagName.mk "redex")) - end*) - - (* returns (SOME new_class) if graph is equivalent to the representative, - and NONE otherwise *) - - -(* fun find_and_add eqt gn = let - val ge = GraphName.NTab.get (get_graph_tab eqt) gn - - *) - - (*fun update_one ge eqt = - if (*rs_can_reduce (get_initial_rs eqt) ge orelse - exists_redexes (has_match ge) eqt*) false - then NONE - else let - val (gn, gt) = get_graph_tab eqt |> GraphName.NTab.add (GraphName.mk "g0", ge) - val eqt = eqt |> set_graph_tab gt - val gr = GE.get_graph ge - val arity = (V.NSet.cardinality (G.get_inputs gr), - V.NSet.cardinality (G.get_outputs gr)) - in SOME (eqt |> - update_tab (ClassIndex.map_default (arity, []) - (find_and_add eqt gn) - )) - end*) - - fun has_match_in_dnet eqt tgt = let - fun has_match pat_n = let - val pat = get_graph_entry eqt pat_n - in is_some (Seq.pull ( - Theory.MatchSearch.match (GE.get_graph pat) (GE.get_graph tgt) - )) - end - in GraphName.NSet.exists has_match - (DNet.get_match_candidates (get_dnet eqt) (GE.get_graph tgt)) - end - - fun update_one ge eqt = - if has_match_in_dnet eqt ge then NONE - else let - val (gn, gt) = get_graph_tab eqt |> GraphName.NTab.add (GraphName.mk "g0", ge) - val eqt' = eqt |> set_graph_tab gt - val gr = GE.get_graph ge - val arity = (V.NSet.cardinality (G.get_inputs gr), - V.NSet.cardinality (G.get_outputs gr)) - val classlist = case ClassIndex.lookup (get_tab eqt) arity - of SOME cs => cs | NONE => [] - - fun tryadd_to_class cls eqt = let - val cls_ge = GraphName.NTab.get (get_graph_tab eqt) (EqClass.get_rep cls) - val inclass = - case (GE.get_edata cls_ge, GE.get_edata ge) - of (SOME d1, SOME d2) => Equiv.eq (d1, d2) - | _ => false - in - if inclass - then - SOME ( - case Metric.ord_graph (GE.get_graph cls_ge, GE.get_graph ge) - of LESS => (cls |> EqClass.update_redexes (cons gn), eqt |> set_as_redex gn) - | GREATER => - let - val new_reds = EqClass.get_rep cls :: EqClass.get_congs cls - in (cls |> EqClass.update_redexes (fn reds => new_reds @ reds) - |> EqClass.set_congs [] - |> EqClass.set_rep gn, - fold set_as_redex new_reds eqt) - end - | EQUAL => (cls |> EqClass.update_congs (cons gn), eqt) - ) - else NONE - end - - fun tryaddf cls (found, rest, eqt) = - if found then (true, cls :: rest, eqt) - else case tryadd_to_class cls eqt - of SOME (cls', eqt') => (true, cls' :: rest, eqt') - | NONE => (false, cls :: rest, eqt) - - val (found, classlist', eqt') = fold tryaddf classlist (false, [], eqt') - in - SOME ( - eqt' |> update_tab (ClassIndex.update (arity, - if found then classlist' - else EqClass.mk gn :: classlist')) - ) - end - - fun update ge_list eqtab = let - fun updatef ge (ges, eqt) = - case update_one ge eqt - of SOME eqt' => (ge::ges, eqt') - | NONE => (ges, eqt) - in fold updatef ge_list ([],eqtab) - end - - - fun mk rs = let - val initial_redexes = Ruleset.get_rules_in_tag rs (TagName.mk "redex") - fun add_rule rn eqt = let - val ge = GE.mk' ((Theory.Rule.get_lhs (Ruleset.get_rule rs rn), NONE), default_data) - val (gn, tab) = (get_graph_tab eqt) |> GraphName.NTab.add (GraphName.mk "r0", ge) - in eqt |> set_graph_tab tab |> set_as_redex gn - end - in R.NSet.fold add_rule initial_redexes (empty |> set_initial_rs rs) - end -end - - - - - - - - - - diff --git a/cosy/fast_graph_enum.ML b/cosy/fast_graph_enum.ML deleted file mode 100644 index 9a16f240..00000000 --- a/cosy/fast_graph_enum.ML +++ /dev/null @@ -1,343 +0,0 @@ -signature GRAPH_ENUM = -sig - structure Theory : GRAPHICAL_THEORY - structure Metric : GRAPH_METRIC - structure EqClassTab : EQ_CLASS_TAB - - type data - type generator = (Theory.Graph.vdata * int * int) - - val default_data : data - type size_param = int * int * int * int - - sharing Theory.Graph.Sharing = - Metric.Graph.Sharing - sharing Theory.Ruleset.Sharing = - EqClassTab.Theory.Ruleset.Sharing - - val tab_update : generator list -> size_param -> EqClassTab.T -> EqClassTab.T - val tab_enum : generator list -> size_param -> EqClassTab.T - val enum : generator list -> size_param -> Theory.Graph.T list -end - -functor FastGraphEnum( - structure Theory : GRAPHICAL_THEORY - structure Metric : GRAPH_METRIC - structure Equiv : GRAPH_EQUIV - sharing Theory.Graph.Sharing = - Metric.Graph.Sharing = - Equiv.Graph.Sharing -) : GRAPH_ENUM = -struct - structure Theory = Theory - structure Metric = Metric - structure Graph = Theory.Graph - - type size_param = int * int * int * int - type generator = (Theory.Graph.vdata * int * int) (* generators (data + arities) *) - (* Ordered Vertices, free inputs, free outputs, list of lists of - edges, one vertex at a time *) - type adjmat = generator list * int list * int list * (int list) list - type data = adjmat * int - val empty_graph_rep = (([],[],[],[]),0) - val default_data = empty_graph_rep - - fun generator_eq ((data1,i1,o1), (data2,i2,o2)) = - Theory.Graph.vdata_eq (data1, data2) andalso - (i1,o1) = (i2,o2) - - structure EqClassTab = EqClassTab( - structure Theory = Theory - structure Metric = Metric - structure Equiv = Equiv - type data = data - val default_data = default_data) - structure GraphEntry = EqClassTab.GraphEntry - - (****** Utility Functions ******) - - fun drop_while _ [] = [] - | drop_while p (x::xs) = - if p x then drop_while p xs - else x::xs - - fun keep_drop 0 0 ts = ts - | keep_drop 0 j (t::ts) = keep_drop 0 (j-1) ts - | keep_drop i j (t::ts) = t::(keep_drop (i-1) j ts) - | keep_drop _ _ _ = raise Match - - fun filter_max m zs = let - fun filter_max' m (x, (n,ys)) = - case (Int.compare (m x, n)) of - LESS => (n,ys) - | GREATER => ((m x),x::[]) - | EQUAL => (n,x::ys) - in snd (List.foldl (filter_max' m) (0,[]) zs) - end - - val sum = List.foldr (fn (x,y) => x+y) 0 - - - (****** adjmat functions ******) - - (* Number of: vertices, total inputs, total outputs *) - fun adjmat_total_arities (gs,_,_,_) = let - val m = sum (map (fn (_,i,_) => i) gs) - val n = sum (map (fn (_,_,j) => j) gs) - in (m, n) - end - - (* Number of: vertices, total inputs, total outputs *) - fun adjmat_arity (g as (gs,_,_,edges)) = let - val p = sum (map sum edges) - val (m,n) = adjmat_total_arities g - in (m-p, n-p) - end - - (* Number of: vertices, total inputs, total outputs *) - fun adjmat_pluggings (g as (_,_,_,edges)) = sum (map sum edges) - - - - fun split_edge [] = ([],[]) - | split_edge xs = let - fun split_edge' xs ys 0 = (xs,ys) - | split_edge' (x::xs) ys n = split_edge' xs (x::ys) (n-1) - | split_edge' _ _ _ = raise Match - in split_edge' xs [] ((length xs - 1) div 2) - end - - val to_block_matr = let - fun switch xs yss = let - val (cs,rs) = split_edge xs - in ListPair.map (fn (y, ys) => y::ys) (cs, rs::yss) - end - in List.foldr (uncurry switch) [] - end - - fun from_block_matr [] = [] - | from_block_matr (es::ess) = - (List.revAppend (map hd ess, es)) :: from_block_matr (map tl ess) - - val order = List.foldl (List.revAppend) [] - - fun swap_adj_matr i tss = let - fun swap_adj_list 1 (t1::t2::ts) = t2::t1::ts - | swap_adj_list i (t::ts) = t::(swap_adj_list (i-1) ts) - | swap_adj_list _ ts = ts - in swap_adj_list i (map (swap_adj_list i) tss) - end - - fun perm_matr tss = let - fun shift_matr 0 tsss = tsss - | shift_matr i tsss = tsss @ (shift_matr (i-1) (map (swap_adj_matr i) tsss)) - fun perm_matr' tss 0 = [tss] - | perm_matr' tss i = shift_matr i (perm_matr' tss (i-1)) - in perm_matr' tss (length tss-1) - end - - fun matr_canonical ess = let - val cur_order = order ess - in forall - ((fn x => (list_ord int_ord (x, cur_order) <> GREATER)) o - order o from_block_matr) - (perm_matr (to_block_matr ess)) - end - - fun compare_vert ts us = let - val k = (length ts - 1) div 2 - val d = (length us - 1) div 2 - k - val ts' = (keep_drop (k+1) d (keep_drop k d us)) - in list_ord int_ord (ts, ts') - end - - fun num_same_vert xss = let - fun num_same_vert' i (v::[]) = i - | num_same_vert' i (v1::v2::vs) = - (case (compare_vert v1 v2) - of LESS => 0 - | GREATER => num_same_vert' 1 (v2::vs) - | EQUAL => num_same_vert' (i+1) (v1::vs)) - | num_same_vert' _ [] = 0 - in num_same_vert' 1 xss - end - - fun centre [] = [] - | centre (t::ts) = let - fun centre' x i [] = [] - | centre' x i (t::ts) = take i (drop x t) :: (centre' x (i+2) ts) - in centre' ((length t - 1) div 2) 1 (t::ts) - end - - fun edges_canonical [] = true - | edges_canonical ts = let - val k = num_same_vert ts - val vs = drop (length ts - k) ts - in (matr_canonical o rev o centre) vs - end - - fun last_vert_canonical ([], _, _, _) = true - | last_vert_canonical (vs, _, _, ess) = let - fun last_vert (x::y::xs) (es1::ess) ls = if (x=y) then last_vert (x::xs) ess (es1::ls) else es1::ls - | last_vert (x::_) (es1::_) ls = es1::ls - | last_vert _ _ _ = raise Match - in edges_canonical (last_vert vs ess []) - end - - fun to_graph_idfree (vs,is,os,ess) = let - val g0 = Graph.empty - fun to_graph_vert [] g = ([],g) - | to_graph_vert ((v,_,_)::vs) g = let - val (vs', g') = to_graph_vert vs g - val (v', g'') = Graph.add_vertex v g' - in (v'::vs', g'') - end - val (vertices, g0) = to_graph_vert vs g0 - fun into_edges [] (_, []) = [] - | into_edges (vin::vins) (vout, (n::ns)) = List.tabulate (n, K (vout,vin)) @ (into_edges vins (vout, ns)) - | into_edges _ _ = raise Match - fun list_int_edges vs ess = maps (into_edges vs) (ListPair.zip (vs, ess)) - val edge_list = list_int_edges vertices (to_block_matr ess) - val g0 = List.foldr (fn ((x,y), g) => snd (g |> Graph.add_edge (Directed, Theory.Graph.default_edata) x y)) g0 edge_list - val edge_in_list = maps (fn (n,y) => List.tabulate (n, K y)) (ListPair.zip (is, vertices)) - val edge_out_list = maps (fn (n,y) => List.tabulate (n, K y)) (ListPair.zip (os, vertices)) - fun add_in_edge (v, g) = let - val (bound, g') = g |> Graph.add_vertex (Theory.Graph.WVert); - val (_,g'') = g' |> Graph.add_edge (Directed, Theory.Graph.default_edata) bound v - in g'' - end - fun add_out_edge (v, g) = let - val (bound, g') = g |> Graph.add_vertex (Theory.Graph.WVert); - val (_,g'') = g' |> Graph.add_edge (Directed, Theory.Graph.default_edata) v bound - in g'' - end - val g0 = List.foldr add_in_edge g0 edge_in_list - val g0 = List.foldr add_out_edge g0 edge_out_list - in g0 - end - - fun add_ident_wire g = let - val (x, g) = g |> Graph.add_vertex (Theory.Graph.WVert); - val (y, g) = g |> Graph.add_vertex (Theory.Graph.WVert); - val (_, g) = g |> Graph.add_edge (Directed, Theory.Graph.default_edata) x y - in g - end - - fun to_graph (am,i) = funpow i add_ident_wire (to_graph_idfree am) - - fun to_graph_entry ami = GraphEntry.mk (to_graph ami, ami) - - - (****** Building adjmat graphs ******) - - (* adds a specific type of vertex with no edges *) - (* add_gen : generator -> adjmat -> adjmat *) - fun add_gen (v' as (_,inp,out)) (verts, inps, outs, edges) = (v'::verts, inp::inps, out::outs, (List.tabulate ((2 * length (verts) +1), K 0)::edges)) - - (* adds any possible vertex with no edges *) - (* add_gens : [generator] -> adjmat -> adjmat list *) - fun add_gens (max_p,max_m,max_n) gens (matr as ([],_,_,_)) = let - val gens = filter (fn (_,m,n) => m <= max_m + max_p andalso n <= max_n + max_p) gens - in map (fn v => add_gen v matr) gens - end - | add_gens (max_p,max_m,max_n) gens (matr as (vert::_,_,_,_)) = let - fun is_small (_,gm,gn) = let - val (m,n) = adjmat_total_arities matr - in (m+gm <= max_m + max_p) andalso (n+gn <= max_n + max_p) - end - val gens = drop_while (fn g => not (generator_eq (g,vert))) gens - val gens = filter is_small gens - in map (fn v => add_gen v matr) gens - end - - (* adds any possible edge *) - (* add_edge : adjmat -> adjmat list *) - fun add_edge ([], _, _, _) = [] - | add_edge (_, _, _, []) = raise Match - | add_edge (verts, inps, outs, edge::edges) = let - fun new_edge_here (i2::is2) (o2::os2) (e2::es2) = if ((i2<>0) andalso (o2<>0)) - then [(i2-1::is2, o2-1::os2, e2+1::es2)] else [] - | new_edge_here _ _ _ = raise Match - fun add_edge' [i1] [o1] es = new_edge_here [i1] [o1] es - | add_edge' (i1::[]) (o1::os) (e1::es) = new_edge_here (i1::[]) (o1::os) (e1::es) @ - map (fn (x,y,z) => (x,o1::y,0::z)) (if (e1=0) then add_edge' [i1] os es else [] ) - | add_edge' (i1::is) (o1::os) (e1::es) = new_edge_here (i1::is) (o1::os) (e1::es) @ - map (fn (x,y,z) => (i1::x,y,0::z)) (if (e1=0) then add_edge' is (o1::os) es else [] ) - | add_edge' _ _ _ = raise Match - in map (fn (x,y,z) => (verts,rev x,y,z::edges)) (add_edge' (rev inps) outs edge) - end - - - (****** Enumeration with EqClassTab ******) - - (* TODO: replace fopt with (f, filt) *) - fun fold_graphs gens (max_v,max_p,max_m,max_n) fopt initial = let - fun fold_save fopt (x::xs) (ys,accum) = - (if (fn (m,n) => (m + snd x) <= max_m andalso (n + snd x) <= max_n) (adjmat_arity (fst x)) - then case fopt (to_graph_entry x) accum - of SOME accum' => fold_save fopt xs (x::ys, accum') - | NONE => fold_save fopt xs (ys,accum) - else fold_save fopt xs (x::ys, accum)) - | fold_save fopt [] (ys,accum) = (ys, accum) - (* add edges in all possible ways *) - fun fold_edges_round [] (next, accum) = (next, accum) - | fold_edges_round egs (next, accum) = - let - val (next',accum') = fold_save fopt (map (fn eg => (eg,0)) egs) ([],accum) - val egs' = filter (fn (g,_) => (adjmat_pluggings g) < max_p) next' - val egs' = maps (add_edge o fst) egs' - in fold_edges_round (egs') (next' @ next, accum') - end - (***) - (* one round of enumeration. (prev, accum) => add one vertex => (next, accum') *) - fun fold_graphs_round (prev, accum) = - let - (* start by adding one bare wire to the output of the previous round *) - val (prev_with_bare, accum') = - fold_save fopt (map (fn (am,i) => (am,i+1)) (filter (fn (am,i) => (fst (adjmat_arity am) + i < max_m ) andalso (snd (adjmat_arity am) + i < max_n )) prev)) ([], accum) - (* only add non-trivial generators to graphs that don't have any bare wires yet *) - val prev = map fst (filter (fn (_,i)=> i = 0) prev) - val graphs = flat (map (add_gens (max_p,max_m,max_n) gens) prev) - in fold (fn g => fold_edges_round [g]) graphs (prev_with_bare, accum') - end - (***) - in - snd (funpow max_v fold_graphs_round ([empty_graph_rep], initial)) - end - - fun enum gens max_size = - fold_graphs gens max_size - (fn g => fn gs => SOME (GraphEntry.get_graph g :: gs)) [] - - fun tab_update gens max_size = fold_graphs gens max_size EqClassTab.update_one - fun tab_enum gens max_size = tab_update gens max_size EqClassTab.empty - - (* recursively adds new graphs to tab by adding one new edge to each irredex *) - (* tab_add_edges : [adjmat] -> EqClassTab -> EqClassTab *) - (*fun tab_add_edges [] tab = tab - | tab_add_edges egs tab = let - val egs' = maps add_edge egs - val (irreds,tab') = EqClassTab.update (map (fn eg => to_graph_entry (eg,0)) egs') tab - in tab_add_edges (map (fst o GraphEntry.get_data) irreds) tab' - end - - (* adds new graphs with one new vertex and any possible number of extra edges *) - (* tab_add_gens : generator list -> EqClassTab -> EqClassTab *) - fun tab_add_gens gens tab = let - val irred_graphs = filter_max (fn (g,i) => i + adjmat_size g) - (map GraphEntry.get_data (EqClassTab.get_irredexes tab)) - val (_,tab') = EqClassTab.update (map (fn (am,i) => to_graph_entry (am,i+1)) irred_graphs) tab - val irred_graphs = map fst (filter (fn (g,i) => i=0) irred_graphs) - val graphs = flat (map (add_gens gens) irred_graphs) - in List.foldl (fn (g,t) => tab_add_edges [g] t) tab' graphs - end*) - - - (*fun tab_update gens max_verts tab = - funpow max_verts (tab_add_gens gens) - (snd (EqClassTab.update [to_graph_entry (([],[],[],[]),0)] tab)) - - (* Creates a table of graphs up to a certain size *) - (* tab_enum : generator list -> Int -> EqClassTab *) - fun tab_enum gens max_verts = EqClassTab.empty |> tab_update gens max_verts*) -end diff --git a/cosy/generators.ML b/cosy/generators.ML deleted file mode 100644 index e062e531..00000000 --- a/cosy/generators.ML +++ /dev/null @@ -1,29 +0,0 @@ -use "tensor.ML"; - -val wunit = Tensor.tensor (2,0,1) -[ 0, - 1 ]; - -val wcounit = Tensor.tensor (2,1,0) -[ 1, 0 ]; - -val wmult = Tensor.tensor (2,2,1) -[ 0, 1, 1, 0, - 0, 0, 0, 1 ]; - -val wcomult = Tensor.tensor (2,1,2) -[ 1, 0, - 0, 1, - 0, 1, - 0, 0 ]; - -val ghzmult = Tensor.tensor (2,2,1) -[ 1, 0, 0, 0, - 0, 0, 0, 1 ]; - -val ghzcomult = Tensor.tensor (2,1,2) -[ 1, 0, - 0, 0, - 0, 0, - 0, 1 ]; - diff --git a/cosy/graph_equiv.ML b/cosy/graph_equiv.ML deleted file mode 100644 index d115d0ad..00000000 --- a/cosy/graph_equiv.ML +++ /dev/null @@ -1,14 +0,0 @@ -signature GRAPH_EQUIV = -sig - structure Graph : BANG_GRAPH - type T - - (* compute data that is used to check graph equivalence. This function may - also do some renaming of the graph to put it in some canonical form (e.g. - wrt. the ordering of inputs and outputs). *) - val compute_equiv_data : Graph.T -> Graph.T * T - val to_string : T -> string - val eq : T * T -> bool -end - - diff --git a/cosy/graph_equivalence.ML b/cosy/graph_equivalence.ML deleted file mode 100644 index b5ebb273..00000000 --- a/cosy/graph_equivalence.ML +++ /dev/null @@ -1,7 +0,0 @@ -signature GRAPH_EQUIVALENCE = -sig - structure Graph : BANG_GRAPH - val eq : Graph.T * Graph.T -> bool -end - - diff --git a/cosy/load_heap.ML b/cosy/load_heap.ML deleted file mode 100644 index 8af98ac6..00000000 --- a/cosy/load_heap.ML +++ /dev/null @@ -1 +0,0 @@ -PolyML.SaveState.loadState "../core/heaps/quanto.polyml-heap"; diff --git a/cosy/make_polychrome.ML b/cosy/make_polychrome.ML deleted file mode 100644 index 4a5c68ab..00000000 --- a/cosy/make_polychrome.ML +++ /dev/null @@ -1,51 +0,0 @@ -(* - -(* include quanto heap *) -PolyML.SaveState.loadState "../core/heaps/quanto.polyml-heap"; - -(* build cosy *) -PolyML.Project.use_root "ROOT.ML"; - -(* include polychrome API *) -OS.FileSys.chDir "../../PolyChrome/polychrome/poly"; -PolyML.Project.basic_use "ROOT.ML"; -OS.FileSys.chDir "../../../quantomatic/cosy"; - -(* output the heap *) -(*PolyML.Project.Log.log 2 "finished updating polysaves"; -PolyML.Project.ensure_at_latest_heap (); -PolyML.Project.Log.log 2 "finished ensure_at_latest_heap";*) -(*PolyML.shareCommonData run; PolyML.fullGC (); -PolyML.Project.Log.log 2 "finished garbage collection";*) -PolyML.SaveState.saveState "chromeui/polychrome.polyml-heap"; -PolyML.Project.Log.log 2 "finished saving global heap"; -val _ = OS.Process.exit OS.Process.success; - -*) - -(* build cosy *) -val _ = PolyML.exception_trace (fn () => PolyML.use "ROOT.ML") - handle _ => OS.Process.exit OS.Process.failure; - -PolyML.Project.Log.log 2 "finished updating polysaves"; - -PolyML.Project.ensure_at_latest_heap (); - -PolyML.Project.Log.log 2 "finished ensure_at_latest_heap"; - -(* include polychrome API *) -OS.FileSys.chDir "../../PolyChrome/polychrome/poly"; -PolyML.Project.basic_use "ROOT.ML"; -OS.FileSys.chDir "../../../quantomatic/cosy"; - -PolyML.Project.make "chromeui/cosy_util.sml"; - -PolyML.shareCommonData run; PolyML.fullGC (); - -PolyML.Project.Log.log 2 "finished garbage collection"; - -PolyML.SaveState.saveState "chromeui/polychrome.polyml-heap"; - -PolyML.Project.Log.log 2 "finished saving global heap"; - -val _ = OS.Process.exit OS.Process.success; diff --git a/cosy/metric.ML b/cosy/metric.ML deleted file mode 100644 index b7df06be..00000000 --- a/cosy/metric.ML +++ /dev/null @@ -1,92 +0,0 @@ -signature GRAPH_METRIC = -sig - type T - structure Graph : BANG_GRAPH - val compute : Graph.T -> T - val ord : T * T -> order - val bottom : T - - (* ASSUMED: ord_graph = ord o (apfst compute) o (apsnd compute) *) - val ord_graph : Graph.T * Graph.T -> order -end - -functor EdgeComplexityMetric( - structure Graph : BANG_GRAPH -) : GRAPH_METRIC = -struct - type T = int * int * int - structure Graph = Graph - - fun ord ((a,b,c), (d,e,f)) = (prod_ord int_ord (prod_ord int_ord int_ord)) ((a,(b,c)), (d,(e,f))) - val bottom = (0,0,0) - - fun compute graph = let - fun vert_weight v = let - val edges = E.NSet.cardinality (E.NSet.union_merge - (Graph.get_in_edges graph v) - (Graph.get_out_edges graph v)) - in if edges > 2 then edges else 0 - end - fun vfld v (ec,count) = (ec + vert_weight v, count+1) - val (ec,verts) = V.NSet.fold vfld (Graph.get_vertices graph) (0,0) - val edges = (E.NSet.cardinality o Graph.get_edges) graph - in (ec, verts, edges) - end - - val ord_graph = ord o (apfst compute) o (apsnd compute) -end - -functor WeightedArityMetric( - structure Graph : BANG_GRAPH - val weight_for_data : Graph.vdata -> int -) : GRAPH_METRIC = -struct - type T = int - structure Graph = Graph - val ord = int_ord - val bottom = 0 - - (*fun weight_for_arity 0 0 = 10 - | weight_for_arity 1 0 = 20 - | weight_for_arity 0 1 = 23 - | weight_for_arity 1 1 = 30 - | weight_for_arity 2 0 = 50 - | weight_for_arity 0 2 = 53 - | weight_for_arity 2 1 = 80 - | weight_for_arity 1 2 = 83 - | weight_for_arity 3 0 = 90 - | weight_for_arity 0 3 = 93 - | weight_for_arity 2 2 = 110 - | weight_for_arity 3 1 = 120 - | weight_for_arity 1 3 = 123 - | weight_for_arity 4 0 = 130 - | weight_for_arity 0 4 = 133 - | weight_for_arity n m = (20 * (5 + n)) + (21 * m)*) - - fun weight_for_arity 0 0 l = 10 + l - | weight_for_arity 1 0 l = 20 + l - | weight_for_arity 0 1 l = 22 + l - | weight_for_arity 1 1 l = 30 + l - | weight_for_arity 2 0 l = 40 + l - | weight_for_arity 0 2 l = 44 + l - | weight_for_arity 2 1 l = 50 + l - | weight_for_arity 1 2 l = 55 + l - | weight_for_arity m n l = 20*(m*m + n*n) + 20*(m + n)*(m + n) + l - - (*fun weight_for_arity m n l = 2*(m*m + n*n) + 2*(m + n)*(m + n) + l*) - - fun compute graph = let - fun vert_weights v tot = let - val loops = E.NSet.cardinality (Graph.get_self_loops graph v) - val ins = E.NSet.cardinality (Graph.get_in_edges graph v) - loops - val outs = E.NSet.cardinality (Graph.get_out_edges graph v) - loops - val dat = Graph.get_vertex_data graph v - in (weight_for_data dat * weight_for_arity ins outs loops) + tot - end - in V.NSet.fold vert_weights (Graph.get_vertices graph) 0 - end - - val ord_graph = ord o (apfst compute) o (apsnd compute) -end - - diff --git a/cosy/par_names.ML b/cosy/par_names.ML deleted file mode 100644 index e6441c0f..00000000 --- a/cosy/par_names.ML +++ /dev/null @@ -1,30 +0,0 @@ -signature PAR_NAMES = -sig - structure Name : NAMES - exception FOUND - exception ERROR - val process_nset : (Name.NSet.T -> 'a) -> Name.NSet.T -> ('a Exn.result) list -end - -functor Par_Name ( - structure Name : NAMES -) : PAR_NAMES = -struct - structure Name = Name - - (* general-purpose exceptions used to signal the top-level join and cancel other futures *) - exception FOUND - exception ERROR - - fun process_nset f nset = let - val nsets = Name.NSet.subdivide (Thread.numProcessors ()) nset - val group = Task_Queue.new_group (Future.worker_group ()) - (*val cancelf = fn () => Future.cancel_group group*) - val futures = Future.forks {name = "process_nset", group = SOME group, deps = [], pri = 0} - (map (fn ns => fn () => f ns) nsets) - in Future.join_results futures - end - -end - -structure Par_RuleName = Par_Name(structure Name = RuleName) diff --git a/cosy/ruleset_builder.ML b/cosy/ruleset_builder.ML deleted file mode 100644 index 4781c8f4..00000000 --- a/cosy/ruleset_builder.ML +++ /dev/null @@ -1,103 +0,0 @@ -signature RULESET_BUILDER = -sig - structure Theory : GRAPHICAL_THEORY - structure EqClassTab : EQ_CLASS_TAB - val get_ruleset : EqClassTab.T -> Theory.Ruleset.T -end - - -functor RulesetBuilder( - structure Theory : GRAPHICAL_THEORY - structure EqClassTab : EQ_CLASS_TAB - sharing Theory.Graph.SharingOGraph = - EqClassTab.GraphEntry.Equiv.Graph.SharingOGraph) = -struct - structure Theory = Theory - structure EqClassTab = EqClassTab - - fun get_ruleset eq_tab = - let - val tag_redex = TagName.mk "redex" - val tag_cong = TagName.mk "cong" - fun add_rule pair is_redex rs = - let - val (nm, rs') = rs |> Theory.Ruleset.add_fresh_rule - (R.mk (if is_redex then "r_0" else "c_0"), Theory.Rule.mk pair) - in - rs' |> Theory.Ruleset.tag_rule nm - (if is_redex then tag_redex else tag_cong) - end - in - EqClassTab.fold_rule_pairs add_rule eq_tab Theory.Ruleset.empty - end -end - - -(* signature RULESET_BUILDER = -sig - structure Synthesize : SYNTHESIZE - - val update : Synthesize.T -> Synthesize.GraphEnum.Theory.Ruleset.T -> Synthesize.GraphEnum.Theory.Ruleset.T - val from_synth : Synthesize.T -> Synthesize.GraphEnum.Theory.Ruleset.T - val reduce : Synthesize.GraphEnum.Theory.Ruleset.T -> Synthesize.GraphEnum.Theory.Ruleset.T - val rule_matches_rule : Synthesize.GraphEnum.Theory.Ruleset.Rule.T -> Synthesize.GraphEnum.Theory.Ruleset.Rule.T -> bool -end - -functor RulesetBuilder ( - structure Synthesize : SYNTHESIZE -) : RULESET_BUILDER = -struct - structure Synthesize = Synthesize - structure GraphEnum = Synthesize.GraphEnum - structure Theory = GraphEnum.Theory - structure Ruleset = Theory.Ruleset - structure Rule = Theory.Rule - structure HomeoFinder = BangGraphHomeomorphismSearcher(Theory.Graph) - - fun update synth ruleset = let - fun add_rule tag rhs lhs rs = let - (*val tryname = R.mk ((TagName.string_of_name tag) ^ "_a")*) - val (nm,rs') = rs |> Ruleset.add_fresh_rule (Rule.mk (lhs,rhs)) - in rs' |> Ruleset.tag_rule nm tag - end - fun process_class class rs = - case (EqClass.get_rep class) - of SOME rep => - fold (add_rule (TagName.mk "c") rep) (EqClass.get_congs class) - (fold (add_rule (TagName.mk "r") rep) (EqClass.get_redexes class) rs) - | NONE => rs - in Synthesize.eqclass_fold process_class synth ruleset - end - - fun from_synth synth = Ruleset.empty |> update synth - - fun rule_matches_rule r1 r2 = GraphEnum.rule_matches_graph r1 (Rule.get_lhs r2) - - (*fun matches_with_boundary pattern target = let - val m = Match.init (Vertex.NSet.empty, Edge.NSet.empty, Graph.BBox.NSet.empty) - pattern target - val m = Vertex.NSet.fold (fn b => Match.match_boundary b b) (Graph.get_boundary pattern) m - val gs = [pattern, target] - val names = (Vertex.NSet.empty,Edge.NSet.empty,Graph.BBox.NSet.empty) - in case Seq.pull (SimpleMatchSearch.match' m pattern target Vertex.NTab.empty) - of NONE=>false | _=>true - end*) - - (* iso of lhs and rhs need to respect boundaries *) - fun cong_trivial r = HomeoFinder.is_homeomorphic (Rule.get_lhs r) (Rule.get_rhs r) - - - fun has_match rs target_rn = - GraphEnum.has_match rs (Ruleset.get_rules_in_tag rs (TagName.mk "r") |> R.NSet.delete target_rn) - (Rule.get_lhs (Ruleset.get_rule rs target_rn)) - - fun reduce ruleset = let - val all_rules = RTab.get_dom_set o Ruleset.get_allrules - fun iso_filter rn rs = if cong_trivial (Ruleset.get_rule rs rn) then rs |> Ruleset.delete_rule rn else rs - fun match_filter rn rs = if has_match rs rn - then rs |> Ruleset.delete_rule rn else rs - val ruleset' = R.NSet.fold iso_filter (Ruleset.get_rules_in_tag ruleset (TagName.mk "c")) ruleset - in R.NSet.fold match_filter (all_rules ruleset') ruleset' - end - -end *) diff --git a/cosy/synthesize.ML b/cosy/synthesize.ML deleted file mode 100644 index e3e238fb..00000000 --- a/cosy/synthesize.ML +++ /dev/null @@ -1,124 +0,0 @@ -signature EQ_CLASS = -sig - type 'a T = { - rep : 'a option, - congs : 'a list, - redexes : 'a list } - val get_rep : 'a T -> 'a option - val get_congs : 'a T -> 'a list - val get_redexes : 'a T -> 'a list - val update_rep : ('a option -> 'a option) -> 'a T -> 'a T - val update_congs : ('a list -> 'a list) -> 'a T -> 'a T - val update_redexes : ('a list -> 'a list) -> 'a T -> 'a T - val set_rep : 'a option -> 'a T -> 'a T - val set_congs : 'a list -> 'a T -> 'a T - val set_redexes : 'a list -> 'a T -> 'a T - val empty : 'a T -end - -structure EqClass : EQ_CLASS = -struct - type 'a T = { - rep : 'a option, - congs : 'a list, - redexes : 'a list } - fun get_rep ({rep=x,...}:'a T) = x - fun get_congs ({congs=x,...}:'a T) = x - fun get_redexes ({redexes=x,...}:'a T) = x - fun update_rep f ({rep=rep,congs=congs,redexes=redexes}:'a T) = {rep=f rep,congs=congs,redexes=redexes} - fun update_congs f ({rep=rep,congs=congs,redexes=redexes}:'a T) = {rep=rep,congs=f congs,redexes=redexes} - fun update_redexes f ({rep=rep,congs=congs,redexes=redexes}:'a T) = {rep=rep,congs=congs,redexes=f redexes} - fun set_rep x cl = update_rep (K x) cl - fun set_congs x cl = update_congs (K x) cl - fun set_redexes x cl = update_redexes (K x) cl - - val empty = { rep = NONE, congs = [], redexes = [] } -end - -signature SYNTHESIZE = -sig - structure Graph : BANG_GRAPH - structure GraphEnum : GRAPH_ENUMERATOR - structure Metric : GRAPH_METRIC - (*structure TData : TENSOR_DATA*) - sharing Graph.Sharing = Metric.Graph.Sharing = GraphEnum.Theory.Graph.Sharing - type rundesc = int * int * int * int - type T - val synth : (Graph.T * int * int) list -> rundesc -> T - val synth_with_rs : GraphEnum.Theory.Ruleset.T -> (Graph.T * int * int) list -> rundesc -> T - val stats : T -> int * int * int - (*val eqclass_fold : (TData.Tensor.T * Graph.T EqClass.T -> 'b -> 'b) -> T -> 'b -> 'b*) - val eqclass_fold : (Graph.T EqClass.T -> 'b -> 'b) -> T -> 'b -> 'b -end - -functor TensorSynth ( - structure GraphEnum : GRAPH_ENUMERATOR - structure Metric : GRAPH_METRIC - structure TData : TENSOR_DATA - sharing GraphEnum.Theory.Graph.Sharing = - Metric.Graph.Sharing = - TData.Graph.Sharing -) : SYNTHESIZE = -struct - structure TData = TData - structure TEval = TensorEval(structure TData=TData) - structure GraphEnum = GraphEnum - structure Graph = GraphEnum.Theory.Graph - structure Metric = Metric - structure Tensor = TData.Tensor - - type rundesc = int * int * int * int - type T = rundesc * (Graph.T EqClass.T) Tensor.Tab.table - - - fun classify gr class_table = let - val (ins,outs,tens) = TEval.tensor_for_graph gr - val ((scalar,perm_outs,perm_ins),tens') = Tensor.normalise tens - val norm_ins = map (fn n => V.mk ("i_"^(Int.toString n))) perm_ins - val norm_outs = map (fn n => V.mk ("o_"^(Int.toString n))) perm_outs - val gr' = fold2 Graph.rename_vertex ins norm_ins (fold2 Graph.rename_vertex outs norm_outs gr) - in class_table |> Tensor.Tab.update (tens', - (case Tensor.Tab.lookup class_table tens' - of SOME eq => eq | NONE => EqClass.empty) - |> EqClass.update_congs (cons gr')) - end - - fun filter_class class = let - fun f graph (min,list) = let - val metric = Metric.compute graph - in case min of SOME (min_met, min_graph) => - if Metric.ord (metric, min_met) = LESS - then (SOME (metric, graph), (min_met, min_graph)::list) - else (SOME (min_met, min_graph), (metric, graph)::list) - | NONE => (SOME (metric, graph), list) - end - val (min_met, min_graph, metrics) = case fold f (EqClass.get_congs class) (NONE, []) - of (NONE, ms) => (Metric.bottom, Graph.empty, ms) - | (SOME (mm,mg), ms) => (mm,mg,ms) - val congs = fold (fn (m,g) => fn lst => - if Metric.ord (m, min_met) = EQUAL then g::lst else lst) metrics [] - val redexes = fold (fn (m,g) => fn lst => - if Metric.ord (m, min_met) = GREATER then g::lst else lst) metrics [] - in class - |> EqClass.set_rep (SOME min_graph) - |> EqClass.set_congs congs - |> EqClass.set_redexes redexes - end - - fun synth_with_rs rs gens (rd as (ins,outs,verts,plugs)) = (rd, - Tensor.Tab.map (K filter_class) - (GraphEnum.fold_graphs_with_rs - rs classify (gens,ins,outs,verts,plugs) Tensor.Tab.empty)) - - val synth = synth_with_rs GraphEnum.Theory.Ruleset.empty - - fun stats (_,class_tab) = - Tensor.Tab.fold (fn (_,class) => fn (i,j,k) => - (i + 1, - j + length (EqClass.get_congs class), - k + length (EqClass.get_redexes class)) - ) class_tab (0,0,0) - - fun eqclass_fold f (_,tab) base = TData.Tensor.Tab.fold (f o snd) tab base -end - diff --git a/cosy/tensor_equiv.ML b/cosy/tensor_equiv.ML deleted file mode 100644 index bccda10f..00000000 --- a/cosy/tensor_equiv.ML +++ /dev/null @@ -1,91 +0,0 @@ -signature TENSOR_DATA = -sig - structure Graph : BANG_GRAPH - structure Tensor : TENSOR - - val dimension : int (* all generators are assumed to be of fixed dimension *) - (* given vertex data and arities, return a tensor *) - val tensor_for_vertex_data : Graph.vdata -> (int * int) -> Tensor.T -end - - -functor TensorEquiv( - structure TData : TENSOR_DATA -) : GRAPH_EQUIV = -struct - type T = TData.Tensor.T - structure Graph = TData.Graph - - (* offset tables assign a vertex name to the rightmost tensor index that connects - to that name. *) - - (* shifts all of the offsets >= the offset of vertex "nm" down by 1. if "nm" has - no space left, it is removed from the offset table. *) - fun contract_offsets_for_name nm offsets = let - val current_offset = V.NTab.get offsets nm - fun dec (nm, offset) (tab, rm) = - if offset >= current_offset - then (tab |> V.NTab.doadd (nm, offset-1), rm) - else (tab |> V.NTab.doadd (nm, offset), rm orelse current_offset-1 = offset) - val (tab, rm) = V.NTab.fold dec offsets (V.NTab.empty, current_offset = 0) - in if rm then tab |> V.NTab.delete nm else tab - end - - fun contract_edge graph edge (i_offsets, o_offsets, tensor) = let - val src = TData.Graph.get_edge_source graph edge - val tgt = TData.Graph.get_edge_target graph edge - val upper = V.NTab.get o_offsets src - val lower = V.NTab.get i_offsets tgt - in (contract_offsets_for_name tgt i_offsets, - contract_offsets_for_name src o_offsets, - TData.Tensor.contract (lower,upper) tensor) - end - - fun boundary_list offsets = map fst (sort (fn ((_,o1),(_,o2)) => int_ord (o1, o2)) (V.NTab.list_of offsets)) - - fun append_vertex graph vert (i_offsets, o_offsets, verts, tensor) = let - val data = TData.Graph.get_vertex_data graph vert - val in_edges = TData.Graph.get_in_edges graph vert - val out_edges = TData.Graph.get_out_edges graph vert - val (num_in, num_out) = - case data of TData.Graph.NVert _ => (E.NSet.cardinality in_edges, E.NSet.cardinality out_edges) - | TData.Graph.WVert => (1,1) - val (current_in, current_out) = (TData.Tensor.lower_index_count tensor, TData.Tensor.upper_index_count tensor) - val new_tensor = TData.Tensor.product tensor (TData.tensor_for_vertex_data data (num_in, num_out)) - val new_i_offsets = if num_in = 0 then i_offsets - else i_offsets |> V.NTab.doadd (vert, (TData.Tensor.lower_index_count tensor) + num_in - 1) - val new_o_offsets = if num_out = 0 then o_offsets - else o_offsets |> V.NTab.doadd (vert, (TData.Tensor.upper_index_count tensor) + num_out - 1) - val new_verts = verts |> V.NSet.add vert - val edges_to_contract = V.NSet.fold - (E.NSet.union_merge o (TData.Graph.edges_between graph vert)) - new_verts E.NSet.empty - val (new_i_offsets, new_o_offsets, new_tensor) = - E.NSet.fold (contract_edge graph) edges_to_contract (new_i_offsets,new_o_offsets,new_tensor) - in (new_i_offsets, new_o_offsets, new_verts, new_tensor) - end - - fun tensor_for_graph graph = let - val (i_offsets,o_offsets,_,t) = - V.NSet.fold_rev (append_vertex graph) - (TData.Graph.get_vertices graph) - (V.NTab.empty,V.NTab.empty,V.NSet.empty, - TData.Tensor.id TData.dimension 0) - in (boundary_list i_offsets, boundary_list o_offsets,t) - end - - fun compute_equiv_data gr = let - val (ins,outs,tens) = tensor_for_graph gr - val ((scalar,perm_outs,perm_ins),tens') = TData.Tensor.normalise tens - val norm_ins = map (fn n => V.mk ("i_"^(Int.toString n))) perm_ins - val norm_outs = map (fn n => V.mk ("o_"^(Int.toString n))) perm_outs - val gr' = fold2 Graph.rename_vertex ins norm_ins (fold2 Graph.rename_vertex outs norm_outs gr) - in - (gr', tens') - end - - val eq = TData.Tensor.eq - val to_string = TData.Tensor.to_string -end - - diff --git a/cosy/theories.ML b/cosy/theories.ML deleted file mode 100644 index ac65871c..00000000 --- a/cosy/theories.ML +++ /dev/null @@ -1,232 +0,0 @@ -structure GHZW_TensorData : TENSOR_DATA = -struct - structure Graph = GHZW_Theory.Graph - structure Tensor = IntTensor - - val dimension = 2 - - fun ghz (maxi, maxj) (i,j) = if ((i=0 andalso j=0) orelse (i=maxi andalso j=maxj)) then 1 else 0 - fun w (ins,outs) (i,j) = let - val outsum = List.foldr (op+) 0 (Tensor.decompose_index 2 outs i) - val insum = List.foldr (op+) 0 (Tensor.decompose_index 2 ins j) - in if ((outsum = 1 andalso insum = ins) orelse (outsum = 0 andalso insum = (ins - 1))) then 1 else 0 - end - - - fun tensor_for_vertex_data (Graph.NVert GHZW_Data.GHZ) (ins, outs) = - Tensor.tensorf (2,ins,outs) (ghz ((Tensor.pow 2 outs)-1, (Tensor.pow 2 ins)-1)) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.W) (ins, outs) = - Tensor.tensorf (2,ins,outs) (w (ins,outs)) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.TICK) (1,1) = - Tensor.tensor (2,1,1) [0,1,1,0] - | tensor_for_vertex_data (Graph.NVert GHZW_Data.TICK) (ins,outs) = - Tensor.tensorf (2,ins,outs) (K 0) (* just return a zero tensor if bad dimensions on tick *) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.ZERO) (ins,outs) = - Tensor.tensorf (2,ins,outs) (K 0) (* always return 0 tensor *) - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id dimension 1 -end - -(* -structure RG_TensorDataNoPhase : TENSOR_DATA = -struct - structure Graph = RG_Theory.Graph - structure Tensor = IntTensor - - val dimension = 2 - fun red (ins,outs) (i,j) = - if (fold (curry op+) (Tensor.decompose_index 2 outs i) 0) mod 2 = - (fold (curry op+) (Tensor.decompose_index 2 ins j) 0) mod 2 then 1 else 0 - fun green (maxi,maxj) (i,j) = if ((i=0 andalso j=0) orelse (i=maxi andalso j=maxj)) then 1 else 0 - - fun tensor_for_vertex_data (Graph.NVert (RG_Data.Znd _)) (ins, outs) = - Tensor.tensorf (2,ins,outs) (green ((Tensor.pow 2 outs)-1, (Tensor.pow 2 ins)-1)) - | tensor_for_vertex_data (Graph.NVert (RG_Data.Xnd _)) (ins, outs) = - Tensor.tensorf (2,ins,outs) (red (ins,outs)) - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id dimension 1 -end -*) - -structure RGB_TensorData : TENSOR_DATA = -struct - structure Graph = RGB_Theory.Graph - structure Tensor = CIntTensor - - val dimension = 2 - - val green_basis = (Tensor.tensor (2,0,1) [(1,0),(0,0)], Tensor.tensor (2,0,1) [(0,0),(1,0)]) - val red_basis = (Tensor.tensor (2,0,1) [(1,0),(1,0)], Tensor.tensor (2,0,1) [(0,~1),(0,1)]) - val blue_basis = (Tensor.tensor (2,0,1) [(1,0),(0,1)], Tensor.tensor (2,0,1) [(1,0),(0,~1)]) - - fun gen (ket1,ket2) (ins, outs) = let - val one = Tensor.id 2 0 - val bra1 = Tensor.conjugate_transpose ket1 - val bra2 = Tensor.conjugate_transpose ket2 - val term1 = Tensor.product (funpow ins (Tensor.product bra1) one) - (funpow outs (Tensor.product ket1) one) - val term2 = Tensor.product (funpow ins (Tensor.product bra2) one) - (funpow outs (Tensor.product ket2) one) - in Tensor.add term1 term2 - end - - fun tensor_for_vertex_data (Graph.NVert RGB_Data.Red) io = gen red_basis io - | tensor_for_vertex_data (Graph.NVert RGB_Data.Green) io = gen green_basis io - | tensor_for_vertex_data (Graph.NVert RGB_Data.Blue) io = gen blue_basis io - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id 2 1 -end - - -structure RG_TensorData : TENSOR_DATA = -struct - structure Graph = RG_Theory.Graph - structure Tensor = CIntTensor - - val dimension = 2 - - val green_basis = (Tensor.tensor (2,0,1) [(1,0),(0,0)], Tensor.tensor (2,0,1) [(0,0),(1,0)]) - val red_basis = (Tensor.tensor (2,0,1) [(1,0),(1,0)], Tensor.tensor (2,0,1) [(1,0),(~1,0)]) - - fun gen (ket1,ket2) (ins, outs) = let - val one = Tensor.id 2 0 - val bra1 = Tensor.conjugate_transpose ket1 - val bra2 = Tensor.conjugate_transpose ket2 - val term1 = Tensor.product (funpow ins (Tensor.product bra1) one) - (funpow outs (Tensor.product ket1) one) - val term2 = Tensor.product (funpow ins (Tensor.product bra2) one) - (funpow outs (Tensor.product ket2) one) - in Tensor.add term1 term2 - end - - val zero = LinratAngleExpr.zero - - fun tensor_for_vertex_data (Graph.NVert (RG_Data.Xnd _)) io = gen red_basis io - | tensor_for_vertex_data (Graph.NVert (RG_Data.Znd _)) io = gen green_basis io - | tensor_for_vertex_data (Graph.NVert RG_Data.Hnd) _ = raise ERROR "Cannot deal with hadamards" - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id 2 1 -end - - -(* FUNCTOR APPLICATIONS *) -structure RG_Enum = FastGraphEnum( - structure Theory = RG_Theory - - structure Metric = WeightedArityMetric( - structure Graph = RG_Theory.Graph - fun weight_for_data (Graph.NVert (RG_Data.Xnd _)) = 6 - | weight_for_data (Graph.NVert (RG_Data.Znd _)) = 5 - | weight_for_data _ = 4 - ) - - structure Equiv = TensorEquiv(structure TData = RG_TensorData) -) - -structure RG_Spiders = SpiderRewrites(structure Theory = RG_Theory) - - -structure GHZW_Enum = FastGraphEnum( - structure Theory = GHZW_Theory - - structure Metric = WeightedArityMetric( - structure Graph = GHZW_Theory.Graph - fun weight_for_data (Graph.NVert (GHZW_Data.GHZ)) = 6 - | weight_for_data (Graph.NVert (GHZW_Data.W)) = 5 - | weight_for_data _ = 4 - ) - - structure Equiv = TensorEquiv(structure TData = GHZW_TensorData) -) - -structure GHZW_Spiders = SpiderRewrites(structure Theory = GHZW_Theory) - - -(* -structure RG_Enum = GraphEnumerator(structure Theory = RG_Theory) - -structure RG_Synth = TensorSynth( - structure GraphEnum = RG_Enum - structure Metric = WeightedArityMetric(structure Graph = RG_Theory.Graph; - fun weight_for_data (Graph.NVert (RG_Data.Xnd _)) = 6 - | weight_for_data (Graph.NVert (RG_Data.Znd _)) = 5 - | weight_for_data _ = 4) - structure TData = RG_TensorData -) - -structure RG_RSBuilder = RulesetBuilder(structure Synthesize = RG_Synth) -structure RG_Gens = DefaultGenerators(structure Graph = RG_Theory.Graph) -structure RG_Rws = DefaultRewrites(structure Rule = RG_Theory.Rule) - - -structure GHZW_Enum = GraphEnumerator(structure Theory = GHZW_Theory) - -structure GHZW_Metric = WeightedArityMetric( - structure Graph = GHZW_Theory.Graph - fun weight_for_data (Graph.NVert GHZW_Data.ZERO) = 1 - | weight_for_data _ = 5 -) - -structure GHZW_DefaultSynth = TensorSynth( - structure GraphEnum = GHZW_Enum - structure Metric = GHZW_Metric - structure TData = GHZW_TensorData -) - -structure GHZW_RSBuilder = RulesetBuilder(structure Synthesize = GHZW_DefaultSynth) -structure GHZW_Gens = DefaultGenerators(structure Graph = GHZW_Theory.Graph) -structure GHZW_Rws = DefaultRewrites(structure Rule = GHZW_Theory.Rule) - -structure RGB_Enum = GraphEnumerator(structure Theory = RGB_Theory) - -structure RGB_Synth = TensorSynth( - structure GraphEnum = RGB_Enum - structure Metric = WeightedArityMetric(structure Graph = RGB_Theory.Graph; val weight_for_data = K 1) - structure TData = RGB_TensorData -) - -structure RGB_RSBuilder = RulesetBuilder(structure Synthesize = RGB_Synth) -structure RGB_Gens = DefaultGenerators(structure Graph = RGB_Theory.Graph) -structure RGB_Rws = DefaultRewrites(structure Rule = RGB_Theory.Rule) - - -(* -structure RG_NoPhaseSynth = TensorSynth( - structure GraphEnum = GraphEnumerator(structure Graph = RG_Theory.Graph) - structure Metric = EdgeComplexityMetric(structure Graph = RG_Theory.Graph) - structure TData = RG_TensorDataNoPhase -) - -structure RG_Gens = DefaultGenerators(structure Graph = RG_Theory.Graph) -*) -*) - -(* -structure TheoryData = -struct - type ('graph, 'ruleset, 'synth) T = { - name : string, - dotfun : 'graph -> string, - gens : ('graph * int * int) list, - stats : 'synth -> int * int * int, - class_list : 'synth -> (string * 'graph EqClass.T) list, - rs_pairs : 'ruleset -> ('graph * 'graph) R.NTab.T - } - - fun get_name ({name=x,...}:('g,'r,'s) T) = x - fun get_dotfun ({dotfun=x,...}:('g,'r,'s) T) = x - fun get_gens ({gens=x,...}:('g,'r,'s) T) = x - fun get_stats ({stats=x,...}:('g,'r,'s) T) = x - fun get_class_list ({class_list=x,...}:('g,'r,'s) T) = x - fun get_rs_pairs ({rs_pairs=x,...}:('g,'r,'s) T) = x -end - -fun rule_data flhs frhs = (R.NTab.map_all (fn _ => fn r => (flhs r, frhs r) )) -*) - -(* -val rg_data = { - name = "RG", - dotfun = RG_OutputGraphDot.output, - gens = RG_Gens.gen_list 4 [RG_Data.Znd LinratAngleExpr.zero, RG_Data.Xnd LinratAngleExpr.zero], - synth = RG_NoPhaseSynth.synth -} -*) - - diff --git a/cosy/util.ML b/cosy/util.ML deleted file mode 100644 index 00388476..00000000 --- a/cosy/util.ML +++ /dev/null @@ -1,126 +0,0 @@ -PolyML.SaveState.loadState "../core/heaps/quanto.heap"; -use "ROOT.ML"; - - -fun write_file file str = let - val outs = TextIO.openOut file - val _ = TextIO.outputSubstr (outs, Substring.full (str)) - val _ = TextIO.closeOut outs -in () -end - -(*structure GHZW_TensorData : TENSOR_DATA = -struct - structure Graph = GHZW_Theory.Graph - structure Tensor = IntTensor - - val dimension = 2 - - fun ghz (maxi, maxj) (i,j) = if ((i=0 andalso j=0) orelse (i=maxi andalso j=maxj)) then 1 else 0 - fun w (ins,outs) (i,j) = let - val outsum = List.foldr (op+) 0 (Tensor.decompose_index 2 outs i) - val insum = List.foldr (op+) 0 (Tensor.decompose_index 2 ins j) - in if ((outsum = 1 andalso insum = ins) orelse (outsum = 0 andalso insum = (ins - 1))) then 1 else 0 - end - - - fun tensor_for_vertex_data (Graph.NVert GHZW_Data.GHZ) (ins, outs) = - Tensor.tensorf (2,ins,outs) (ghz ((Tensor.pow 2 outs)-1, (Tensor.pow 2 ins)-1)) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.W) (ins, outs) = - Tensor.tensorf (2,ins,outs) (w (ins,outs)) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.TICK) (1,1) = - Tensor.tensor (2,1,1) [0,1,1,0] - | tensor_for_vertex_data (Graph.NVert GHZW_Data.TICK) (ins,outs) = - Tensor.tensorf (2,ins,outs) (K 0) (* just return a zero tensor if bad dimensions on tick *) - | tensor_for_vertex_data (Graph.NVert GHZW_Data.ZERO) (ins,outs) = - Tensor.tensorf (2,ins,outs) (K 0) (* always return 0 tensor *) - | tensor_for_vertex_data (Graph.WVert) _ = Tensor.id dimension 1 -end - -structure GHZW_Enum = GraphEnumerator(structure Theory = GHZW_Theory) - -structure GHZW_Metric = WeightedArityMetric( - structure Graph = GHZW_Theory.Graph - fun weight_for_data (Graph.NVert GHZW_Data.ZERO) = 1 - | weight_for_data _ = 5 -) - -structure GHZW_DefaultSynth = TensorSynth( - structure GraphEnum = GHZW_Enum - structure Metric = GHZW_Metric - structure TData = GHZW_TensorData -) - -structure GHZW_RSBuilder = RulesetBuilder(structure Synthesize = GHZW_DefaultSynth) -structure GHZW_Gens = DefaultGenerators(structure Graph = GHZW_Theory.Graph) -structure GHZW_Rws = DefaultRewrites(structure Rule = GHZW_Theory.Rule) - - - -structure G = GHZW_Theory.Graph; - -(*fun dot tfilenum graph = write_file - ("/tmp/dot/"^(Int.toString tfilenum)^".dot") - (GHZW_OutputGraphDot.output graph) -*) - -fun output_res res = () (*Tensor.Tab.fold - (fn (tensor, class) => fn i => ( - write_file ("output/"^(Int.toString i)^".tensor") (Tensor.to_string tensor); - map_index (fn (j,gr) => ( - write_file ("output/congruence_"^(Int.toString i)^"_"^(Int.toString j)^".dot") - (GHZW_OutputGraphDot.output gr) - )) (EqClass.get_congs class); - map_index (fn (j,gr) => ( - write_file ("output/redex_"^(Int.toString i)^"_"^(Int.toString j)^".dot") - (GHZW_OutputGraphDot.output gr) - )) (EqClass.get_redexes class); - i+1)) res 0*) - -fun synth (run as (ins,outs,verts,plugs)) = let - val gens = GHZW_Gens.gen_list 3 [GHZW_VertexData.GHZ,GHZW_VertexData.W] - val synth_result = GHZW_DefaultSynth.synth gens run - val (num_classes, num_congs, num_redexes) = GHZW_DefaultSynth.stats synth_result - val details = - "SYNTHESIS RESULTS\n"^ - "-----------------------------------------\n"^ - " "^(Int.toString ins)^" inputs\n"^ - " "^(Int.toString outs)^" outputs\n"^ - " "^(Int.toString verts)^" max vertices\n"^ - " "^(Int.toString plugs)^" max pluggings\n"^ - "-----------------------------------------\n"^ - " Found "^(Int.toString num_classes)^" equivalence classes.\n"^ - " Average class size: "^(Int.toString ((num_congs + num_redexes) div num_classes))^".\n"^ - "-----------------------------------------\n" - val _ = OS.Process.system "mkdir -p output; rm -f output/*" - val _ = write_file "output/details.txt" details - val _ = output_res result - (*val ret = OS.Process.system "./postprocess.rb"*) -in (PolyML.print details; synth_result) -end*) - -(*fun dot_to_svg dot = let - val (ins,outs) = Unix.streamsOf (Unix.execute ("/usr/bin/env",["dot", "-Tsvg"])) - val () = TextIO.output (outs, dot) - (* eat the \n" => "\n" - | SOME ln => ln ^ (read_all ()) - | NONE => "" -in read_all () -end*) - - - - - - - - - - - - - - diff --git a/gui/app_skel.zip b/gui/app_skel.zip deleted file mode 100644 index ea370dd7..00000000 Binary files a/gui/app_skel.zip and /dev/null differ diff --git a/gui/build.xml b/gui/build.xml deleted file mode 100644 index e6da354b..00000000 --- a/gui/build.xml +++ /dev/null @@ -1,248 +0,0 @@ - - - - Build the Quantomatic GUI as a JAR file. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - This target can only run inside the NetBeans IDE. - - - - - - - - - - - - - This target can only run inside the NetBeans IDE. - - - - - - - - - - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java b/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java deleted file mode 100644 index a9d45734..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * Created on Jan 28, 2004 - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.blockmodel; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.CollectionUtils; -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * Identifies sets of structurally equivalent vertices in a graph. Vertices - * i and j are structurally equivalent iff the set of i's - * neighbors is identical to the set of j's neighbors, with the - * exception of i and j themselves. This algorithm finds all - * sets of equivalent vertices in O(V^2) time. - * - *

    You can extend this class to have a different definition of equivalence (by - * overriding isStructurallyEquivalent), and may give it hints for - * accelerating the process by overriding canPossiblyCompare. - * (For example, in a bipartite graph, canPossiblyCompare may - * return false for vertices in - * different partitions. This function should be fast.) - * - * @author Danyel Fisher - */ -public class StructurallyEquivalent implements Transformer, VertexPartition> -{ - public VertexPartition transform(Graph g) - { - Set> vertex_pairs = getEquivalentPairs(g); - - Set> rv = new HashSet>(); - Map> intermediate = new HashMap>(); - for (Pair p : vertex_pairs) - { - Set res = intermediate.get(p.getFirst()); - if (res == null) - res = intermediate.get(p.getSecond()); - if (res == null) // we haven't seen this one before - res = new HashSet(); - res.add(p.getFirst()); - res.add(p.getSecond()); - intermediate.put(p.getFirst(), res); - intermediate.put(p.getSecond(), res); - } - rv.addAll(intermediate.values()); - - // pick up the vertices which don't appear in intermediate; they are - // singletons (equivalence classes of size 1) - Collection singletons = CollectionUtils.subtract(g.getVertices(), - intermediate.keySet()); - for (V v : singletons) - { - Set v_set = Collections.singleton(v); - intermediate.put(v, v_set); - rv.add(v_set); - } - - return new VertexPartition(g, intermediate, rv); - } - - /** - * For each vertex pair v, v1 in G, checks whether v and v1 are fully - * equivalent: meaning that they connect to the exact same vertices. (Is - * this regular equivalence, or whathaveyou?) - * - * Returns a Set of Pairs of vertices, where all the vertices in the inner - * Pairs are equivalent. - * - * @param g - */ - protected Set> getEquivalentPairs(Graph g) { - - Set> rv = new HashSet>(); - Set alreadyEquivalent = new HashSet(); - - List l = new ArrayList(g.getVertices()); - - for (V v1 : l) - { - if (alreadyEquivalent.contains(v1)) - continue; - - for (Iterator iterator = l.listIterator(l.indexOf(v1) + 1); iterator.hasNext();) { - V v2 = iterator.next(); - - if (alreadyEquivalent.contains(v2)) - continue; - - if (!canPossiblyCompare(v1, v2)) - continue; - - if (isStructurallyEquivalent(g, v1, v2)) { - Pair p = new Pair(v1, v2); - alreadyEquivalent.add(v2); - rv.add(p); - } - } - } - - return rv; - } - - /** - * Checks whether a pair of vertices are structurally equivalent. - * Specifically, whether v1's predecessors are equal to v2's predecessors, - * and same for successors. - * - * @param g the graph in which the structural equivalence comparison is to take place - * @param v1 the vertex to check for structural equivalence to v2 - * @param v2 the vertex to check for structural equivalence to v1 - */ - protected boolean isStructurallyEquivalent(Graph g, V v1, V v2) { - - if( g.degree(v1) != g.degree(v2)) { - return false; - } - - Set n1 = new HashSet(g.getPredecessors(v1)); - n1.remove(v2); - n1.remove(v1); - Set n2 = new HashSet(g.getPredecessors(v2)); - n2.remove(v1); - n2.remove(v2); - - Set o1 = new HashSet(g.getSuccessors(v1)); - Set o2 = new HashSet(g.getSuccessors(v2)); - o1.remove(v1); - o1.remove(v2); - o2.remove(v1); - o2.remove(v2); - - // this neglects self-loops and directed edges from 1 to other - boolean b = (n1.equals(n2) && o1.equals(o2)); - if (!b) - return b; - - // if there's a directed edge v1->v2 then there's a directed edge v2->v1 - b &= ( g.isSuccessor(v1, v2) == g.isSuccessor(v2, v1)); - - // self-loop check - b &= ( g.isSuccessor(v1, v1) == g.isSuccessor(v2, v2)); - - return b; - - } - - /** - * This is a space for optimizations. For example, for a bipartite graph, - * vertices from different partitions cannot possibly be compared. - * - * @param v1 - * @param v2 - */ - protected boolean canPossiblyCompare(V v1, V v2) { - return true; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java b/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java deleted file mode 100644 index b5ec5831..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Feb 3, 2004 - */ -package edu.uci.ics.jung.algorithms.blockmodel; - -import java.util.*; - -import edu.uci.ics.jung.graph.Graph; - - -/** - * Maintains information about a vertex partition of a graph. - * This can be built from a map from vertices to vertex sets - * or from a collection of (disjoint) vertex sets, - * such as those created by various clustering methods. - */ -public class VertexPartition -{ - private Map> vertex_partition_map; - private Collection> vertex_sets; - private Graph graph; - - /** - * Creates an instance based on the specified graph and mapping from vertices - * to vertex sets, and generates a set of partitions based on this mapping. - * @param g the graph over which the vertex partition is defined - * @param partition_map the mapping from vertices to vertex sets (partitions) - */ - public VertexPartition(Graph g, Map> partition_map) - { - this.vertex_partition_map = Collections.unmodifiableMap(partition_map); - this.graph = g; - } - - /** - * Creates an instance based on the specified graph, vertex-set mapping, - * and set of disjoint vertex sets. The vertex-set mapping and vertex - * partitions must be consistent; that is, the mapping must reflect the - * division of vertices into partitions, and each vertex must appear in - * exactly one partition. - * @param g the graph over which the vertex partition is defined - * @param partition_map the mapping from vertices to vertex sets (partitions) - * @param vertex_sets the set of disjoint vertex sets - */ - public VertexPartition(Graph g, Map> partition_map, - Collection> vertex_sets) - { - this.vertex_partition_map = Collections.unmodifiableMap(partition_map); - this.vertex_sets = vertex_sets; - this.graph = g; - } - - /** - * Creates an instance based on the specified graph and set of disjoint vertex sets, - * and generates a vertex-to-partition map based on these sets. - * @param g the graph over which the vertex partition is defined - * @param vertex_sets the set of disjoint vertex sets - */ - public VertexPartition(Graph g, Collection> vertex_sets) - { - this.vertex_sets = vertex_sets; - this.graph = g; - } - - /** - * Returns the graph on which the partition is defined. - * @return the graph on which the partition is defined - */ - public Graph getGraph() - { - return graph; - } - - /** - * Returns a map from each vertex in the input graph to its partition. - * This map is generated if it does not already exist. - * @return a map from each vertex in the input graph to a vertex set - */ - public Map> getVertexToPartitionMap() - { - if (vertex_partition_map == null) - { - this.vertex_partition_map = new HashMap>(); - for (Set set : this.vertex_sets) - for (V v : set) - this.vertex_partition_map.put(v, set); - } - return vertex_partition_map; - } - - /** - * Returns a collection of vertex sets, where each vertex in the - * input graph is in exactly one set. - * This collection is generated based on the vertex-to-partition map - * if it does not already exist. - * @return a collection of vertex sets such that each vertex in the - * instance's graph is in exactly one set - */ - public Collection> getVertexPartitions() - { - if (vertex_sets == null) - { - this.vertex_sets = new HashSet>(); - this.vertex_sets.addAll(vertex_partition_map.values()); - } - return vertex_sets; - } - - /** - * Returns the number of partitions. - */ - public int numPartitions() - { - return vertex_sets.size(); - } - - @Override - public String toString() - { - return "Partitions: " + vertex_partition_map; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/package.html deleted file mode 100644 index d1cb06ac..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/blockmodel/package.html +++ /dev/null @@ -1,33 +0,0 @@ - - - - - - - -Support for establishing and maintaining graph element equivalence (such as in blockmodeling). -

    -In blockmodeling, groups of vertices are clustered together by similarity -(as if members of a "block" appearing on the diagonal of the graph's adjacency -matrix). -

    -This support currently includes: -

      -
    • VertexPartition: A class that maintains information on a -division of the vertices of a graph into disjoint sets. -
    • StructurallyEquivalent: An algorithm that finds sets of vertices that are -structurally equivalent. -
    - -

    - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java deleted file mode 100644 index aa697c7d..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java +++ /dev/null @@ -1,162 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.cluster; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; -import java.util.Stack; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Finds all biconnected components (bicomponents) of an undirected graph. - * A graph is a biconnected component if - * at least 2 vertices must be removed in order to disconnect the graph. (Graphs - * consisting of one vertex, or of two connected vertices, are also biconnected.) Biconnected - * components of three or more vertices have the property that every pair of vertices in the component - * are connected by two or more vertex-disjoint paths. - *

    - * Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges - * @see "Depth first search and linear graph algorithms by R. E. Tarjan (1972), SIAM J. Comp." - * - * @author Joshua O'Madadhain - */ -public class BicomponentClusterer implements Transformer, Set>> -{ - protected Map dfs_num; - protected Map high; - protected Map parents; - protected Stack stack; - protected int converse_depth; - - /** - * Constructs a new bicomponent finder - */ - public BicomponentClusterer() { - } - - /** - * Extracts the bicomponents from the graph. - * @param theGraph the graph whose bicomponents are to be extracted - * @return the ClusterSet of bicomponents - */ - public Set> transform(UndirectedGraph theGraph) - { - Set> bicomponents = new LinkedHashSet>(); - - if (theGraph.getVertices().isEmpty()) - return bicomponents; - - // initialize DFS number for each vertex to 0 - dfs_num = new HashMap(); - for (V v : theGraph.getVertices()) - { - dfs_num.put(v, 0); - } - - for (V v : theGraph.getVertices()) - { - if (dfs_num.get(v).intValue() == 0) // if we haven't hit this vertex yet... - { - high = new HashMap(); - stack = new Stack(); - parents = new HashMap(); - converse_depth = theGraph.getVertexCount(); - // find the biconnected components for this subgraph, starting from v - findBiconnectedComponents(theGraph, v, bicomponents); - - // if we only visited one vertex, this method won't have - // ID'd it as a biconnected component, so mark it as one - if (theGraph.getVertexCount() - converse_depth == 1) - { - Set s = new HashSet(); - s.add(v); - bicomponents.add(s); - } - } - } - - return bicomponents; - } - - /** - *

    Stores, in bicomponents, all the biconnected - * components that are reachable from v.

    - * - *

    The algorithm basically proceeds as follows: do a depth-first - * traversal starting from v, marking each vertex with - * a value that indicates the order in which it was encountered (dfs_num), - * and with - * a value that indicates the highest point in the DFS tree that is known - * to be reachable from this vertex using non-DFS edges (high). (Since it - * is measured on non-DFS edges, "high" tells you how far back in the DFS - * tree you can reach by two distinct paths, hence biconnectivity.) - * Each time a new vertex w is encountered, push the edge just traversed - * on a stack, and call this method recursively. If w.high is no greater than - * v.dfs_num, then the contents of the stack down to (v,w) is a - * biconnected component (and v is an articulation point, that is, a - * component boundary). In either case, set v.high to max(v.high, w.high), - * and continue. If w has already been encountered but is - * not v's parent, set v.high max(v.high, w.dfs_num) and continue. - * - *

    (In case anyone cares, the version of this algorithm on p. 224 of - * Udi Manber's "Introduction to Algorithms: A Creative Approach" seems to be - * wrong: the stack should be initialized outside this method, - * (v,w) should only be put on the stack if w hasn't been seen already, - * and there's no real benefit to putting v on the stack separately: just - * check for (v,w) on the stack rather than v. Had I known this, I could - * have saved myself a few days. JRTOM)

    - * - */ - protected void findBiconnectedComponents(UndirectedGraph g, V v, Set> bicomponents) - { - int v_dfs_num = converse_depth; - dfs_num.put(v, v_dfs_num); - converse_depth--; - high.put(v, v_dfs_num); - - for (V w : g.getNeighbors(v)) - { - int w_dfs_num = dfs_num.get(w).intValue();//get(w, dfs_num); - E vw = g.findEdge(v,w); - if (w_dfs_num == 0) // w hasn't yet been visited - { - parents.put(w, v); // v is w's parent in the DFS tree - stack.push(vw); - findBiconnectedComponents(g, w, bicomponents); - int w_high = high.get(w).intValue();//get(w, high); - if (w_high <= v_dfs_num) - { - // v disconnects w from the rest of the graph, - // i.e., v is an articulation point - // thus, everything between the top of the stack and - // v is part of a single biconnected component - Set bicomponent = new HashSet(); - E e; - do - { - e = stack.pop(); - bicomponent.addAll(g.getIncidentVertices(e)); - } - while (e != vw); - bicomponents.add(bicomponent); - } - high.put(v, Math.max(w_high, high.get(v).intValue())); - } - else if (w != parents.get(v)) // (v,w) is a back or a forward edge - high.put(v, Math.max(w_dfs_num, high.get(v).intValue())); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java deleted file mode 100644 index 59e4605e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java +++ /dev/null @@ -1,109 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.cluster; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.BetweennessCentrality; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - - -/** - * An algorithm for computing clusters (community structure) in graphs based on edge betweenness. - * The betweenness of an edge is defined as the extent to which that edge lies along - * shortest paths between all pairs of nodes. - * - * This algorithm works by iteratively following the 2 step process: - *
      - *
    • Compute edge betweenness for all edges in current graph - *
    • Remove edge with highest betweenness - *
    - *

    - * Running time is: O(kmn) where k is the number of edges to remove, m is the total number of edges, and - * n is the total number of vertices. For very sparse graphs the running time is closer to O(kn^2) and for - * graphs with strong community structure, the complexity is even lower. - *

    - * This algorithm is a slight modification of the algorithm discussed below in that the number of edges - * to be removed is parameterized. - * @author Scott White - * @author Tom Nelson (converted to jung2) - * @see "Community structure in social and biological networks by Michelle Girvan and Mark Newman" - */ -public class EdgeBetweennessClusterer implements Transformer,Set>> { - private int mNumEdgesToRemove; - private Map> edges_removed; - - /** - * Constructs a new clusterer for the specified graph. - * @param numEdgesToRemove the number of edges to be progressively removed from the graph - */ - public EdgeBetweennessClusterer(int numEdgesToRemove) { - mNumEdgesToRemove = numEdgesToRemove; - edges_removed = new LinkedHashMap>(); - } - - /** - * Finds the set of clusters which have the strongest "community structure". - * The more edges removed the smaller and more cohesive the clusters. - * @param graph the graph - */ - public Set> transform(Graph graph) { - - if (mNumEdgesToRemove < 0 || mNumEdgesToRemove > graph.getEdgeCount()) { - throw new IllegalArgumentException("Invalid number of edges passed in."); - } - - edges_removed.clear(); - - for (int k=0;k bc = new BetweennessCentrality(graph); - E to_remove = null; - double score = 0; - for (E e : graph.getEdges()) - if (bc.getEdgeScore(e) > score) - { - to_remove = e; - score = bc.getEdgeScore(e); - } - edges_removed.put(to_remove, graph.getEndpoints(to_remove)); - graph.removeEdge(to_remove); - } - - WeakComponentClusterer wcSearch = new WeakComponentClusterer(); - Set> clusterSet = wcSearch.transform(graph); - - for (Map.Entry> entry : edges_removed.entrySet()) - { - Pair endpoints = entry.getValue(); - graph.addEdge(entry.getKey(), endpoints.getFirst(), endpoints.getSecond()); - } - return clusterSet; - } - - /** - * Retrieves the list of all edges that were removed - * (assuming extract(...) was previously called). - * The edges returned - * are stored in order in which they were removed. - * - * @return the edges in the original graph - */ - public List getEdgesRemoved() - { - return new ArrayList(edges_removed.keySet()); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java deleted file mode 100644 index 859c0630..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java +++ /dev/null @@ -1,366 +0,0 @@ -/* - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 12, 2004 - */ -package edu.uci.ics.jung.algorithms.cluster; - -import edu.uci.ics.jung.algorithms.scoring.VoltageScorer; -import edu.uci.ics.jung.algorithms.util.DiscreteDistribution; -import edu.uci.ics.jung.algorithms.util.KMeansClusterer; -import edu.uci.ics.jung.algorithms.util.KMeansClusterer.NotEnoughClustersException; -import edu.uci.ics.jung.graph.Graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; - -/** - *

    Clusters vertices of a Graph based on their ranks as - * calculated by VoltageScorer. This algorithm is based on, - * but not identical with, the method described in the paper below. - * The primary difference is that Wu and Huberman assume a priori that the clusters - * are of approximately the same size, and therefore use a more complex - * method than k-means (which is used here) for determining cluster - * membership based on co-occurrence data.

    - * - *

    The algorithm proceeds as follows: - *

      - *
    • first, generate a set of candidate clusters as follows: - *
        - *
      • pick (widely separated) vertex pair, run VoltageScorer - *
      • group the vertices in two clusters according to their voltages - *
      • store resulting candidate clusters - *
      - *
    • second, generate k-1 clusters as follows: - *
        - *
      • pick a vertex v as a cluster 'seed' - *
        (Wu/Huberman: most frequent vertex in candidate clusters) - *
      • calculate co-occurrence over all candidate clusters of v with each other - * vertex - *
      • separate co-occurrence counts into high/low; - * high vertices constitute a cluster - *
      • remove v's vertices from candidate clusters; continue - *
      - *
    • finally, remaining unassigned vertices are assigned to the kth ("garbage") - * cluster. - *

    - * - *

    NOTE: Depending on how the co-occurrence data splits the data into - * clusters, the number of clusters returned by this algorithm may be less than the - * number of clusters requested. The number of clusters will never be more than - * the number requested, however.

    - * - * @author Joshua O'Madadhain - * @see "'Finding communities in linear time: a physics approach', Fang Wu and Bernardo Huberman, http://www.hpl.hp.com/research/idl/papers/linear/" - * @see VoltageScorer - * @see KMeansClusterer - */ -public class VoltageClusterer -{ - protected int num_candidates; - protected KMeansClusterer kmc; - protected Random rand; - protected Graph g; - - /** - * Creates an instance of a VoltageCluster with the specified parameters. - * These are mostly parameters that are passed directly to VoltageScorer - * and KMeansClusterer. - * - * @param num_candidates the number of candidate clusters to create - */ - public VoltageClusterer(Graph g, int num_candidates) - { - if (num_candidates < 1) - throw new IllegalArgumentException("must generate >=1 candidates"); - - this.num_candidates = num_candidates; - this.kmc = new KMeansClusterer(); - rand = new Random(); - this.g = g; - } - - protected void setRandomSeed(int random_seed) - { - rand = new Random(random_seed); - } - - /** - * Returns a community (cluster) centered around v. - * @param v the vertex whose community we wish to discover - */ - public Collection> getCommunity(V v) - { - return cluster_internal(v, 2); - } - - /** - * Clusters the vertices of g into - * num_clusters clusters, based on their connectivity. - * @param num_clusters the number of clusters to identify - */ - public Collection> cluster(int num_clusters) - { - return cluster_internal(null, num_clusters); - } - - /** - * Does the work of getCommunity and cluster. - * @param origin the vertex around which clustering is to be done - * @param num_clusters the (maximum) number of clusters to find - */ - protected Collection> cluster_internal(V origin, int num_clusters) - { - // generate candidate clusters - // repeat the following 'samples' times: - // * pick (widely separated) vertex pair, run VoltageScorer - // * use k-means to identify 2 communities in ranked graph - // * store resulting candidate communities - ArrayList v_array = new ArrayList(g.getVertices()); - - LinkedList> candidates = new LinkedList>(); - - for (int j = 0; j < num_candidates; j++) - { - V source; - if (origin == null) - source = v_array.get((int)(rand.nextDouble() * v_array.size())); - else - source = origin; - V target = null; - do - { - target = v_array.get((int)(rand.nextDouble() * v_array.size())); - } - while (source == target); - VoltageScorer vs = new VoltageScorer(g, source, target); - vs.evaluate(); - - Map voltage_ranks = new HashMap(); - for (V v : g.getVertices()) - voltage_ranks.put(v, new double[] {vs.getVertexScore(v)}); - -// addOneCandidateCluster(candidates, voltage_ranks); - addTwoCandidateClusters(candidates, voltage_ranks); - } - - // repeat the following k-1 times: - // * pick a vertex v as a cluster seed - // (Wu/Huberman: most frequent vertex in candidates) - // * calculate co-occurrence (in candidate clusters) - // of this vertex with all others - // * use k-means to separate co-occurrence counts into high/low; - // high vertices are a cluster - // * remove v's vertices from candidate clusters - - Collection> clusters = new LinkedList>(); - Set remaining = new HashSet(g.getVertices()); - - List seed_candidates = getSeedCandidates(candidates); - int seed_index = 0; - - for (int j = 0; j < (num_clusters - 1); j++) - { - if (remaining.isEmpty()) - break; - - V seed; - if (seed_index == 0 && origin != null) - seed = origin; - else - { - do { seed = seed_candidates.get(seed_index++); } - while (!remaining.contains(seed)); - } - - Map occur_counts = getObjectCounts(candidates, seed); - if (occur_counts.size() < 2) - break; - - // now that we have the counts, cluster them... - try - { - Collection> high_low = kmc.cluster(occur_counts, 2); - // ...get the cluster with the highest-valued centroid... - Iterator> h_iter = high_low.iterator(); - Map cluster1 = h_iter.next(); - Map cluster2 = h_iter.next(); - double[] centroid1 = DiscreteDistribution.mean(cluster1.values()); - double[] centroid2 = DiscreteDistribution.mean(cluster2.values()); - Set new_cluster; - if (centroid1[0] >= centroid2[0]) - new_cluster = cluster1.keySet(); - else - new_cluster = cluster2.keySet(); - - // ...remove the elements of new_cluster from each candidate... - for (Set cluster : candidates) - cluster.removeAll(new_cluster); - clusters.add(new_cluster); - remaining.removeAll(new_cluster); - } - catch (NotEnoughClustersException nece) - { - // all remaining vertices are in the same cluster - break; - } - } - - // identify remaining vertices (if any) as a 'garbage' cluster - if (!remaining.isEmpty()) - clusters.add(remaining); - - return clusters; - } - - /** - * Do k-means with three intervals and pick the - * smaller two clusters (presumed to be on the ends); this is closer to the Wu-Huberman method. - * @param candidates - * @param voltage_ranks - */ - protected void addTwoCandidateClusters(LinkedList> candidates, - Map voltage_ranks) - { - try - { - List> clusters = new ArrayList>(kmc.cluster(voltage_ranks, 3)); - boolean b01 = clusters.get(0).size() > clusters.get(1).size(); - boolean b02 = clusters.get(0).size() > clusters.get(2).size(); - boolean b12 = clusters.get(1).size() > clusters.get(2).size(); - if (b01 && b02) - { - candidates.add(clusters.get(1).keySet()); - candidates.add(clusters.get(2).keySet()); - } - else if (!b01 && b12) - { - candidates.add(clusters.get(0).keySet()); - candidates.add(clusters.get(2).keySet()); - } - else if (!b02 && !b12) - { - candidates.add(clusters.get(0).keySet()); - candidates.add(clusters.get(1).keySet()); - } - } - catch (NotEnoughClustersException e) - { - // no valid candidates, continue - } - } - - /** - * alternative to addTwoCandidateClusters(): cluster vertices by voltages into 2 clusters. - * We only consider the smaller of the two clusters returned - * by k-means to be a 'true' cluster candidate; the other is a garbage cluster. - * @param candidates - * @param voltage_ranks - */ - protected void addOneCandidateCluster(LinkedList> candidates, - Map voltage_ranks) - { - try - { - List> clusters; - clusters = new ArrayList>(kmc.cluster(voltage_ranks, 2)); - if (clusters.get(0).size() < clusters.get(1).size()) - candidates.add(clusters.get(0).keySet()); - else - candidates.add(clusters.get(1).keySet()); - } - catch (NotEnoughClustersException e) - { - // no valid candidates, continue - } - } - - /** - * Returns an array of cluster seeds, ranked in decreasing order - * of number of appearances in the specified collection of candidate - * clusters. - * @param candidates - */ - protected List getSeedCandidates(Collection> candidates) - { - final Map occur_counts = getObjectCounts(candidates, null); - - ArrayList occurrences = new ArrayList(occur_counts.keySet()); - Collections.sort(occurrences, new MapValueArrayComparator(occur_counts)); - - System.out.println("occurrences: "); - for (int i = 0; i < occurrences.size(); i++) - System.out.println(occur_counts.get(occurrences.get(i))[0]); - - return occurrences; - } - - protected Map getObjectCounts(Collection> candidates, V seed) - { - Map occur_counts = new HashMap(); - for (V v : g.getVertices()) - occur_counts.put(v, new double[]{0}); - - for (Set candidate : candidates) - { - if (seed == null) - System.out.println(candidate.size()); - if (seed == null || candidate.contains(seed)) - { - for (V element : candidate) - { - double[] count = occur_counts.get(element); - count[0]++; - } - } - } - - if (seed == null) - { - System.out.println("occur_counts size: " + occur_counts.size()); - for (V v : occur_counts.keySet()) - System.out.println(occur_counts.get(v)[0]); - } - - return occur_counts; - } - - protected class MapValueArrayComparator implements Comparator - { - private Map map; - - protected MapValueArrayComparator(Map map) - { - this.map = map; - } - - public int compare(V o1, V o2) - { - double[] count0 = map.get(o1); - double[] count1 = map.get(o2); - if (count0[0] < count1[0]) - return 1; - else if (count0[0] > count1[0]) - return -1; - return 0; - } - - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java deleted file mode 100644 index cb79a784..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java +++ /dev/null @@ -1,73 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.cluster; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import org.apache.commons.collections15.Buffer; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.buffer.UnboundedFifoBuffer; - -import edu.uci.ics.jung.graph.Graph; - - - -/** - * Finds all weak components in a graph as sets of vertex sets. A weak component is defined as - * a maximal subgraph in which all pairs of vertices in the subgraph are reachable from one - * another in the underlying undirected subgraph. - *

    This implementation identifies components as sets of vertex sets. - * To create the induced graphs from any or all of these vertex sets, - * see algorithms.filters.FilterUtils. - *

    - * Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges. - * @author Scott White - */ -public class WeakComponentClusterer implements Transformer, Set>> -{ - /** - * Extracts the weak components from a graph. - * @param graph the graph whose weak components are to be extracted - * @return the list of weak components - */ - public Set> transform(Graph graph) { - - Set> clusterSet = new HashSet>(); - - HashSet unvisitedVertices = new HashSet(graph.getVertices()); - - while (!unvisitedVertices.isEmpty()) { - Set cluster = new HashSet(); - V root = unvisitedVertices.iterator().next(); - unvisitedVertices.remove(root); - cluster.add(root); - - Buffer queue = new UnboundedFifoBuffer(); - queue.add(root); - - while (!queue.isEmpty()) { - V currentVertex = queue.remove(); - Collection neighbors = graph.getNeighbors(currentVertex); - - for(V neighbor : neighbors) { - if (unvisitedVertices.contains(neighbor)) { - queue.add(neighbor); - unvisitedVertices.remove(neighbor); - cluster.add(neighbor); - } - } - } - clusterSet.add(cluster); - } - return clusterSet; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/package.html deleted file mode 100644 index f8bdb227..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/cluster/package.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - -Mechanisms for identifying clusters in graphs. Where these clusters define disjoint sets of vertices, -they may be used to define a VertexPartition for more convenient manipulation of the vertex/set -relationships. - -Current clustering algorithms include: -

      -
    • BicomponentClusterer: finds all subsets of vertices for which at least -2 vertices must be removed in order to disconnect the induced subgraphs. -
    • EdgeBetweennessClusterer: identifies vertex clusters by removing the edges of the highest -'betweenness' scores (see the importance/scoring package). -
    • VoltageClusterer: Clusters vertices based on their ranks as -calculated by VoltageRanker. -
    • WeakComponentVertexClusterer: Clusters vertices based on their membership in weakly -connected components of a graph. -
    - - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java b/gui/jung-src/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java deleted file mode 100644 index 5e3be06d..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Created on May 19, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.filters; - -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Transforms the input graph into one which contains only those edges - * that pass the specified Predicate. The filtered graph - * is a copy of the original graph (same type, uses the same vertex and - * edge objects). All vertices from the original graph - * are copied into the new graph (even if they are not incident to any - * edges in the new graph). - * - * @author Joshua O'Madadhain - */ -public class EdgePredicateFilter implements Filter -{ - protected Predicate edge_pred; - - /** - * Creates an instance based on the specified edge Predicate. - * @param edge_pred the predicate that specifies which edges to add to the filtered graph - */ - public EdgePredicateFilter(Predicate edge_pred) - { - this.edge_pred = edge_pred; - } - - @SuppressWarnings("unchecked") - public Graph transform(Graph g) - { - Graph filtered; - try - { - filtered = g.getClass().newInstance(); - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - - for (V v : g.getVertices()) - filtered.addVertex(v); - - for (E e : g.getEdges()) - { - if (edge_pred.evaluate(e)) - filtered.addEdge(e, g.getIncidentVertices(e)); - } - - return filtered; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/Filter.java b/gui/jung-src/edu/uci/ics/jung/algorithms/filters/Filter.java deleted file mode 100644 index a62895cc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/Filter.java +++ /dev/null @@ -1,26 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.filters; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - - - -/** - * An interface for classes that return a subset of the input Graph - * as a Graph. The Graph returned may be either a - * new graph or a view into an existing graph; the documentation for the filter - * must specify which. - * - * @author danyelf - */ -public interface Filter extends Transformer, Graph>{ } diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/FilterUtils.java b/gui/jung-src/edu/uci/ics/jung/algorithms/filters/FilterUtils.java deleted file mode 100644 index 4845c0f3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/FilterUtils.java +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jun 7, 2008 - * - */ -package edu.uci.ics.jung.algorithms.filters; - -import java.util.ArrayList; -import java.util.Collection; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Utility methods relating to filtering. - */ -public class FilterUtils -{ - /** - * Creates the induced subgraph from graph whose vertex set - * is equal to vertices. The graph returned has - * vertices as its vertex set, and includes all edges from - * graph which are incident only to elements of - * vertices. - * - * @param the vertex type - * @param the edge type - * @param vertices the subset of graph's vertices around - * which the subgraph is to be constructed - * @param graph the graph whose subgraph is to be constructed - * @return the subgraph induced by vertices - * @throws IllegalArgumentException if any vertex in - * vertices is not in graph - */ - @SuppressWarnings("unchecked") - public static > G createInducedSubgraph(Collection - vertices, G graph) - { - G subgraph = null; - try - { - subgraph = (G)graph.getClass().newInstance(); - - for (V v : vertices) - { - if (!graph.containsVertex(v)) - throw new IllegalArgumentException("Vertex " + v + - " is not an element of " + graph); - subgraph.addVertex(v); - } - - for (E e : graph.getEdges()) - { - Collection incident = graph.getIncidentVertices(e); - if (vertices.containsAll(incident)) - subgraph.addEdge(e, incident, graph.getEdgeType(e)); - } - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - return subgraph; - } - - /** - * Creates the induced subgraphs of graph associated with each - * element of vertex_collections. - * Note that these vertex collections need not be disjoint. - * @param the vertex type - * @param the edge type - * @param vertex_collections the collections of vertex collections to be - * used to induce the subgraphs - * @param graph the graph whose subgraphs are to be created - * @return the induced subgraphs of graph associated with each - * element of vertex_collections - */ - public static > Collection - createAllInducedSubgraphs(Collection> - vertex_collections, G graph) - { - Collection subgraphs = new ArrayList(); - - for (Collection vertex_set : vertex_collections) - subgraphs.add(createInducedSubgraph(vertex_set, graph)); - - return subgraphs; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java b/gui/jung-src/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java deleted file mode 100644 index 62bcfc29..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Dec 26, 2001 - * - */ -package edu.uci.ics.jung.algorithms.filters; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import edu.uci.ics.jung.algorithms.filters.Filter; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * A filter used to extract the k-neighborhood around one or more root node(s). - * The k-neighborhood is defined as the subgraph induced by the set of - * vertices that are k or fewer hops (unweighted shortest-path distance) - * away from the root node. - * - * @author Danyel Fisher - */ -public class KNeighborhoodFilter implements Filter { - - /** - * The type of edge to follow for defining the neighborhood. - */ - public static enum EdgeType { IN_OUT, IN, OUT } - private Set rootNodes; - private int radiusK; - private EdgeType edgeType; - - /** - * Constructs a new instance of the filter. - * @param rootNodes the set of root nodes - * @param radiusK the neighborhood radius around the root set - * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges - */ - public KNeighborhoodFilter(Set rootNodes, int radiusK, EdgeType edgeType) { - this.rootNodes = rootNodes; - this.radiusK = radiusK; - this.edgeType = edgeType; - } - - /** - * Constructs a new instance of the filter. - * @param rootNode the root node - * @param radiusK the neighborhood radius around the root set - * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges - */ - public KNeighborhoodFilter(V rootNode, int radiusK, EdgeType edgeType) { - this.rootNodes = new HashSet(); - this.rootNodes.add(rootNode); - this.radiusK = radiusK; - this.edgeType = edgeType; - } - - /** - * Constructs an unassembled graph containing the k-neighborhood around the root node(s). - */ - @SuppressWarnings("unchecked") - public Graph transform(Graph graph) { - // generate a Set of Vertices we want - // add all to the UG - int currentDepth = 0; - List currentVertices = new ArrayList(); - Set visitedVertices = new HashSet(); - Set visitedEdges = new HashSet(); - Set acceptedVertices = new HashSet(); - //Copy, mark, and add all the root nodes to the new subgraph - for (V currentRoot : rootNodes) { - - visitedVertices.add(currentRoot); - acceptedVertices.add(currentRoot); - currentVertices.add(currentRoot); - } - ArrayList newVertices = null; - //Use BFS to locate the neighborhood around the root nodes within distance k - while (currentDepth < radiusK) { - newVertices = new ArrayList(); - for (V currentVertex : currentVertices) { - - Collection edges = null; - switch (edgeType) { - case IN_OUT : - edges = graph.getIncidentEdges(currentVertex); - break; - case IN : - edges = graph.getInEdges(currentVertex); - break; - case OUT : - edges = graph.getOutEdges(currentVertex); - break; - } - for (E currentEdge : edges) { - - V currentNeighbor = - graph.getOpposite(currentVertex, currentEdge); - if (!visitedEdges.contains(currentEdge)) { - visitedEdges.add(currentEdge); - if (!visitedVertices.contains(currentNeighbor)) { - visitedVertices.add(currentNeighbor); - acceptedVertices.add(currentNeighbor); - newVertices.add(currentNeighbor); - } - } - } - } - currentVertices = newVertices; - currentDepth++; - } - Graph ug = null; - try { - ug = graph.getClass().newInstance(); - for(E edge : graph.getEdges()) { - Pair endpoints = graph.getEndpoints(edge); - if(acceptedVertices.containsAll(endpoints)) { - ug.addEdge(edge, endpoints.getFirst(), endpoints.getSecond()); - } - } - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - return ug; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java b/gui/jung-src/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java deleted file mode 100644 index 4543b424..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Created on May 19, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.filters; - -import java.util.Collection; - -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Transforms the input graph into one which contains only those vertices - * that pass the specified Predicate. The filtered graph - * is a copy of the original graph (same type, uses the same vertex and - * edge objects). Only those edges whose entire incident vertex collection - * passes the predicate are copied into the new graph. - * - * @author Joshua O'Madadhain - */ -public class VertexPredicateFilter implements Filter -{ - protected Predicate vertex_pred; - - /** - * Creates an instance based on the specified vertex Predicate. - * @param vertex_pred the predicate that specifies which vertices to add to the filtered graph - */ - public VertexPredicateFilter(Predicate vertex_pred) - { - this.vertex_pred = vertex_pred; - } - - @SuppressWarnings("unchecked") - public Graph transform(Graph g) - { - Graph filtered; - try - { - filtered = g.getClass().newInstance(); - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - - for (V v : g.getVertices()) - if (vertex_pred.evaluate(v)) - filtered.addVertex(v); - - Collection filtered_vertices = filtered.getVertices(); - - for (E e : g.getEdges()) - { - Collection incident = g.getIncidentVertices(e); - if (filtered_vertices.containsAll(incident)) - filtered.addEdge(e, incident); - } - - return filtered; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/filters/package.html deleted file mode 100644 index 0f9a018f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/filters/package.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - -Filtering mechanisms that produce subgraphs of an original graph. -Currently includes: -
      -
    • Filter: an interface for graph filters -
    • {Edge,Vertex}PredicateFilter: graph filters that return the -induced subgraph according to the -specified edge or vertex Predicate, respectively. -
    • KNeighborhoodFilter: a filter that returns the subgraph -induced by vertices within (unweighted) distance k of a specified vertex. -
    - - - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java b/gui/jung-src/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java deleted file mode 100644 index af9ee34c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java +++ /dev/null @@ -1,314 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.flows; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Buffer; -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.buffer.UnboundedFifoBuffer; - -import edu.uci.ics.jung.algorithms.util.IterativeProcess; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.util.EdgeType; - - -/** - * Implements the Edmonds-Karp maximum flow algorithm for solving the maximum flow problem. - * After the algorithm is executed, - * the input {@code Map} is populated with a {@code Number} for each edge that indicates - * the flow along that edge. - *

    - * An example of using this algorithm is as follows: - *

    - * EdmondsKarpMaxFlow ek = new EdmondsKarpMaxFlow(graph, source, sink, edge_capacities, edge_flows, 
    - * edge_factory);
    - * ek.evaluate(); // This instructs the class to compute the max flow
    - * 
    - * - * @see "Introduction to Algorithms by Cormen, Leiserson, Rivest, and Stein." - * @see "Network Flows by Ahuja, Magnanti, and Orlin." - * @see "Theoretical improvements in algorithmic efficiency for network flow problems by Edmonds and Karp, 1972." - * @author Scott White, adapted to jung2 by Tom Nelson - */ -public class EdmondsKarpMaxFlow extends IterativeProcess { - - private DirectedGraph mFlowGraph; - private DirectedGraph mOriginalGraph; - private V source; - private V target; - private int mMaxFlow; - private Set mSourcePartitionNodes; - private Set mSinkPartitionNodes; - private Set mMinCutEdges; - - private Map residualCapacityMap = new HashMap(); - private Map parentMap = new HashMap(); - private Map parentCapacityMap = new HashMap(); - private Transformer edgeCapacityTransformer; - private Map edgeFlowMap; - private Factory edgeFactory; - - /** - * Constructs a new instance of the algorithm solver for a given graph, source, and sink. - * Source and sink vertices must be elements of the specified graph, and must be - * distinct. - * @param directedGraph the flow graph - * @param source the source vertex - * @param sink the sink vertex - * @param edgeCapacityTransformer the transformer that gets the capacity for each edge. - * @param edgeFlowMap the map where the solver will place the value of the flow for each edge - * @param edgeFactory used to create new edge instances for backEdges - */ - @SuppressWarnings("unchecked") - public EdmondsKarpMaxFlow(DirectedGraph directedGraph, V source, V sink, - Transformer edgeCapacityTransformer, Map edgeFlowMap, - Factory edgeFactory) { - - if(directedGraph.getVertices().contains(source) == false || - directedGraph.getVertices().contains(sink) == false) { - throw new IllegalArgumentException("source and sink vertices must be elements of the specified graph"); - } - if (source.equals(sink)) { - throw new IllegalArgumentException("source and sink vertices must be distinct"); - } - mOriginalGraph = directedGraph; - - this.source = source; - this.target = sink; - this.edgeFlowMap = edgeFlowMap; - this.edgeCapacityTransformer = edgeCapacityTransformer; - this.edgeFactory = edgeFactory; - try { - mFlowGraph = directedGraph.getClass().newInstance(); - for(E e : mOriginalGraph.getEdges()) { - mFlowGraph.addEdge(e, mOriginalGraph.getSource(e), - mOriginalGraph.getDest(e), EdgeType.DIRECTED); - } - for(V v : mOriginalGraph.getVertices()) { - mFlowGraph.addVertex(v); - } - - } catch (InstantiationException e) { - e.printStackTrace(); - } catch (IllegalAccessException e) { - e.printStackTrace(); - } - mMaxFlow = 0; - mSinkPartitionNodes = new HashSet(); - mSourcePartitionNodes = new HashSet(); - mMinCutEdges = new HashSet(); - } - - private void clearParentValues() { - parentMap.clear(); - parentCapacityMap.clear(); - parentCapacityMap.put(source, Integer.MAX_VALUE); - parentMap.put(source, source); - } - - protected boolean hasAugmentingPath() { - - mSinkPartitionNodes.clear(); - mSourcePartitionNodes.clear(); - mSinkPartitionNodes.addAll(mFlowGraph.getVertices()); - - Set visitedEdgesMap = new HashSet(); - Buffer queue = new UnboundedFifoBuffer(); - queue.add(source); - - while (!queue.isEmpty()) { - V currentVertex = queue.remove(); - mSinkPartitionNodes.remove(currentVertex); - mSourcePartitionNodes.add(currentVertex); - Number currentCapacity = parentCapacityMap.get(currentVertex); - - Collection neighboringEdges = mFlowGraph.getOutEdges(currentVertex); - - for (E neighboringEdge : neighboringEdges) { - - V neighboringVertex = mFlowGraph.getDest(neighboringEdge); - - Number residualCapacity = residualCapacityMap.get(neighboringEdge); - if (residualCapacity.intValue() <= 0 || visitedEdgesMap.contains(neighboringEdge)) - continue; - - V neighborsParent = parentMap.get(neighboringVertex); - Number neighborCapacity = parentCapacityMap.get(neighboringVertex); - int newCapacity = Math.min(residualCapacity.intValue(),currentCapacity.intValue()); - - if ((neighborsParent == null) || newCapacity > neighborCapacity.intValue()) { - parentMap.put(neighboringVertex, currentVertex); - parentCapacityMap.put(neighboringVertex, new Integer(newCapacity)); - visitedEdgesMap.add(neighboringEdge); - if (neighboringVertex != target) { - queue.add(neighboringVertex); - } - } - } - } - - boolean hasAugmentingPath = false; - Number targetsParentCapacity = parentCapacityMap.get(target); - if (targetsParentCapacity != null && targetsParentCapacity.intValue() > 0) { - updateResidualCapacities(); - hasAugmentingPath = true; - } - clearParentValues(); - return hasAugmentingPath; - } - - @Override - public void step() { - while (hasAugmentingPath()) { - } - computeMinCut(); -// return 0; - } - - private void computeMinCut() { - - for (E e : mOriginalGraph.getEdges()) { - - V source = mOriginalGraph.getSource(e); - V destination = mOriginalGraph.getDest(e); - if (mSinkPartitionNodes.contains(source) && mSinkPartitionNodes.contains(destination)) { - continue; - } - if (mSourcePartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { - continue; - } - if (mSinkPartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { - continue; - } - mMinCutEdges.add(e); - } - } - - /** - * Returns the value of the maximum flow from the source to the sink. - */ - public int getMaxFlow() { - return mMaxFlow; - } - - /** - * Returns the nodes which share the same partition (as defined by the min-cut edges) - * as the sink node. - */ - public Set getNodesInSinkPartition() { - return mSinkPartitionNodes; - } - - /** - * Returns the nodes which share the same partition (as defined by the min-cut edges) - * as the source node. - */ - public Set getNodesInSourcePartition() { - return mSourcePartitionNodes; - } - - /** - * Returns the edges in the minimum cut. - */ - public Set getMinCutEdges() { - return mMinCutEdges; - } - - /** - * Returns the graph for which the maximum flow is calculated. - */ - public DirectedGraph getFlowGraph() { - return mFlowGraph; - } - - @Override - protected void initializeIterations() { - parentCapacityMap.put(source, Integer.MAX_VALUE); - parentMap.put(source, source); - - List edgeList = new ArrayList(mFlowGraph.getEdges()); - - for (int eIdx=0;eIdx< edgeList.size();eIdx++) { - E edge = edgeList.get(eIdx); - Number capacity = edgeCapacityTransformer.transform(edge); - - if (capacity == null) { - throw new IllegalArgumentException("Edge capacities must be provided in Transformer passed to constructor"); - } - residualCapacityMap.put(edge, capacity); - - V source = mFlowGraph.getSource(edge); - V destination = mFlowGraph.getDest(edge); - - if(mFlowGraph.isPredecessor(source, destination) == false) { - E backEdge = edgeFactory.create(); - mFlowGraph.addEdge(backEdge, destination, source, EdgeType.DIRECTED); - residualCapacityMap.put(backEdge, 0); - } - } - } - - @Override - protected void finalizeIterations() { - - for (E currentEdge : mFlowGraph.getEdges()) { - Number capacity = edgeCapacityTransformer.transform(currentEdge); - - Number residualCapacity = residualCapacityMap.get(currentEdge); - if (capacity != null) { - Integer flowValue = new Integer(capacity.intValue()-residualCapacity.intValue()); - this.edgeFlowMap.put(currentEdge, flowValue); - } - } - - Set backEdges = new HashSet(); - for (E currentEdge: mFlowGraph.getEdges()) { - - if (edgeCapacityTransformer.transform(currentEdge) == null) { - backEdges.add(currentEdge); - } else { - residualCapacityMap.remove(currentEdge); - } - } - for(E e : backEdges) { - mFlowGraph.removeEdge(e); - } - } - - private void updateResidualCapacities() { - - Number augmentingPathCapacity = parentCapacityMap.get(target); - mMaxFlow += augmentingPathCapacity.intValue(); - V currentVertex = target; - V parentVertex = null; - while ((parentVertex = parentMap.get(currentVertex)) != currentVertex) { - E currentEdge = mFlowGraph.findEdge(parentVertex, currentVertex); - - Number residualCapacity = residualCapacityMap.get(currentEdge); - - residualCapacity = residualCapacity.intValue() - augmentingPathCapacity.intValue(); - residualCapacityMap.put(currentEdge, residualCapacity); - - E backEdge = mFlowGraph.findEdge(currentVertex, parentVertex); - residualCapacity = residualCapacityMap.get(backEdge); - residualCapacity = residualCapacity.intValue() + augmentingPathCapacity.intValue(); - residualCapacityMap.put(backEdge, residualCapacity); - currentVertex = parentVertex; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/flows/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/flows/package.html deleted file mode 100644 index 1ec243d8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/flows/package.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - -Methods for calculating properties relating to network flows (such as max flow/min cut). - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java deleted file mode 100644 index d351f9b1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.generators; - - - -/** - * An interface for algorithms that generate graphs that evolve iteratively. - * @author Scott White - */ -public interface EvolvingGraphGenerator extends GraphGenerator { - - /** - * Instructs the algorithm to evolve the graph N steps. - * @param numSteps number of steps to iterate from the current state - */ - void evolveGraph(int numSteps); - - /** - * Retrieves the total number of steps elapsed. - * @return number of elapsed steps - */ - int numIterations(); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java deleted file mode 100644 index a3290609..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java +++ /dev/null @@ -1,20 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.generators; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.Graph; - -/** - * An interface for algorithms that generate graphs. - * @author Scott White - */ -public interface GraphGenerator extends Factory>{ } diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java deleted file mode 100644 index e84425ce..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2009, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ - -package edu.uci.ics.jung.algorithms.generators; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * Simple generator of an m x n lattice where each vertex - * is incident with each of its neighbors (to the left, right, up, and down). - * May be toroidal, in which case the vertices on the edges are connected to - * their counterparts on the opposite edges as well. - * - *

    If the graph factory supplied has a default edge type of {@code EdgeType.DIRECTED}, - * then edges will be created in both directions between adjacent vertices. - * - * @author Joshua O'Madadhain - */ -public class Lattice2DGenerator implements GraphGenerator -{ - protected int row_count; - protected int col_count; - protected boolean is_toroidal; - protected boolean is_directed; - protected Factory> graph_factory; - protected Factory vertex_factory; - protected Factory edge_factory; - private List v_array; - - /** - * Constructs a generator of square lattices of size {@code latticeSize} - * with the specified parameters. - * - * @param graph_factory used to create the {@code Graph} for the lattice - * @param vertex_factory used to create the lattice vertices - * @param edge_factory used to create the lattice edges - * @param latticeSize the number of rows and columns of the lattice - * @param isToroidal if true, the created lattice wraps from top to bottom and left to right - */ - public Lattice2DGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int latticeSize, boolean isToroidal) - { - this(graph_factory, vertex_factory, edge_factory, latticeSize, latticeSize, isToroidal); - } - - /** - * Creates a generator of {@code row_count} x {@code col_count} lattices - * with the specified parameters. - * - * @param graph_factory used to create the {@code Graph} for the lattice - * @param vertex_factory used to create the lattice vertices - * @param edge_factory used to create the lattice edges - * @param row_count the number of rows in the lattice - * @param col_count the number of columns in the lattice - * @param isToroidal if true, the created lattice wraps from top to bottom and left to right - */ - public Lattice2DGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int row_count, int col_count, boolean isToroidal) - { - if (row_count < 2 || col_count < 2) - { - throw new IllegalArgumentException("Row and column counts must each be at least 2."); - } - - this.row_count = row_count; - this.col_count = col_count; - this.is_toroidal = isToroidal; - this.graph_factory = graph_factory; - this.vertex_factory = vertex_factory; - this.edge_factory = edge_factory; - this.is_directed = (graph_factory.create().getDefaultEdgeType() == EdgeType.DIRECTED); - } - - /** - * @see edu.uci.ics.jung.algorithms.generators.GraphGenerator#create() - */ - @SuppressWarnings("unchecked") - public Graph create() - { - int vertex_count = row_count * col_count; - Graph graph = graph_factory.create(); - v_array = new ArrayList(vertex_count); - for (int i = 0; i < vertex_count; i++) - { - V v = vertex_factory.create(); - graph.addVertex(v); - v_array.add(i, v); - } - - int start = is_toroidal ? 0 : 1; - int end_row = is_toroidal ? row_count : row_count - 1; - int end_col = is_toroidal ? col_count : col_count - 1; - - // fill in edges - // down - for (int i = 0; i < end_row; i++) - for (int j = 0; j < col_count; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i+1, j)); - // right - for (int i = 0; i < row_count; i++) - for (int j = 0; j < end_col; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i, j+1)); - - // if the graph is directed, fill in the edges going the other direction... - if (graph.getDefaultEdgeType() == EdgeType.DIRECTED) - { - // up - for (int i = start; i < row_count; i++) - for (int j = 0; j < col_count; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i-1, j)); - // left - for (int i = 0; i < row_count; i++) - for (int j = start; j < col_count; j++) - graph.addEdge(edge_factory.create(), getVertex(i,j), getVertex(i, j-1)); - } - - return graph; - } - - /** - * Returns the number of edges found in a lattice of this generator's specifications. - * (This is useful for subclasses that may modify the generated graphs to add more edges.) - */ - public int getGridEdgeCount() - { - int boundary_adjustment = (is_toroidal ? 0 : 1); - int vertical_edge_count = col_count * (row_count - boundary_adjustment); - int horizontal_edge_count = row_count * (col_count - boundary_adjustment); - - return (vertical_edge_count + horizontal_edge_count) * (is_directed ? 2 : 1); - } - - protected int getIndex(int i, int j) - { - return ((mod(i, row_count)) * col_count) + (mod(j, col_count)); - } - - protected int mod(int i, int modulus) - { - int i_mod = i % modulus; - return i_mod >= 0 ? i_mod : i_mod + modulus; - } - - /** - * Returns the vertex at position ({@code i mod row_count, j mod col_count}). - */ - protected V getVertex(int i, int j) - { - return v_array.get(getIndex(i, j)); - } - - /** - * Returns the {@code i}th vertex (counting row-wise). - */ - protected V getVertex(int i) - { - return v_array.get(i); - } - - /** - * Returns the row in which vertex {@code i} is found. - */ - protected int getRow(int i) - { - return i / row_count; - } - - /** - * Returns the column in which vertex {@code i} is found. - */ - protected int getCol(int i) - { - return i % col_count; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/package.html deleted file mode 100644 index 441922dc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/package.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - -Methods for generating new (often random) graphs with various properties. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java deleted file mode 100644 index 77b419b4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java +++ /dev/null @@ -1,227 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.EvolvingGraphGenerator; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.MultiGraph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - - -/** - *

    Simple evolving scale-free random graph generator. At each time - * step, a new vertex is created and is connected to existing vertices - * according to the principle of "preferential attachment", whereby - * vertices with higher degree have a higher probability of being - * selected for attachment.

    - * - *

    At a given timestep, the probability p of creating an edge - * between an existing vertex v and the newly added vertex is - *

    - * p = (degree(v) + 1) / (|E| + |V|);
    - * 
    - * - *

    where |E| and |V| are, respectively, the number - * of edges and vertices currently in the network (counting neither the new - * vertex nor the other edges that are being attached to it).

    - * - *

    Note that the formula specified in the original paper - * (cited below) was - *

    - * p = degree(v) / |E|
    - * 
    - *

    - * - *

    However, this would have meant that the probability of attachment for any existing - * isolated vertex would be 0. This version uses Lagrangian smoothing to give - * each existing vertex a positive attachment probability.

    - * - *

    The graph created may be either directed or undirected (controlled by a constructor - * parameter); the default is undirected. - * If the graph is specified to be directed, then the edges added will be directed - * from the newly added vertex u to the existing vertex v, with probability proportional to the - * indegree of v (number of edges directed towards v). If the graph is specified to be undirected, - * then the (undirected) edges added will connect u to v, with probability proportional to the - * degree of v.

    - * - *

    The parallel constructor parameter specifies whether parallel edges - * may be created.

    - * - * @see "A.-L. Barabasi and R. Albert, Emergence of scaling in random networks, Science 286, 1999." - * @author Scott White - * @author Joshua O'Madadhain - * @author Tom Nelson - adapted to jung2 - */ -public class BarabasiAlbertGenerator implements EvolvingGraphGenerator { - private Graph mGraph = null; - private int mNumEdgesToAttachPerStep; - private int mElapsedTimeSteps; - private Random mRandom; - protected List vertex_index; - protected int init_vertices; - protected Map index_vertex; - protected Factory> graphFactory; - protected Factory vertexFactory; - protected Factory edgeFactory; - - /** - * Constructs a new instance of the generator. - * @param init_vertices number of unconnected 'seed' vertices that the graph should start with - * @param numEdgesToAttach the number of edges that should be attached from the - * new vertex to pre-existing vertices at each time step - * @param directed specifies whether the graph and edges to be created should be directed or not - * @param parallel specifies whether the algorithm permits parallel edges - * @param seed random number seed - */ - public BarabasiAlbertGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int init_vertices, int numEdgesToAttach, - int seed, Set seedVertices) - { - assert init_vertices > 0 : "Number of initial unconnected 'seed' vertices " + - "must be positive"; - assert numEdgesToAttach > 0 : "Number of edges to attach " + - "at each time step must be positive"; - - mNumEdgesToAttachPerStep = numEdgesToAttach; - mRandom = new Random(seed); - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - this.init_vertices = init_vertices; - initialize(seedVertices); - } - - - /** - * Constructs a new instance of the generator, whose output will be an undirected graph, - * and which will use the current time as a seed for the random number generation. - * @param init_vertices number of vertices that the graph should start with - * @param numEdgesToAttach the number of edges that should be attached from the - * new vertex to pre-existing vertices at each time step - */ - public BarabasiAlbertGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int init_vertices, int numEdgesToAttach, Set seedVertices) { - this(graphFactory, vertexFactory, edgeFactory, init_vertices, numEdgesToAttach, (int) System.currentTimeMillis(), seedVertices); - } - - private void initialize(Set seedVertices) { - - mGraph = graphFactory.create(); - - vertex_index = new ArrayList(2*init_vertices); - index_vertex = new HashMap(2*init_vertices); - for (int i = 0; i < init_vertices; i++) { - V v = vertexFactory.create(); - mGraph.addVertex(v); - vertex_index.add(v); - index_vertex.put(v, i); - seedVertices.add(v); - } - - mElapsedTimeSteps = 0; - } - - private void createRandomEdge(Collection preexistingNodes, - V newVertex, Set> added_pairs) { - V attach_point; - boolean created_edge = false; - Pair endpoints; - do { - attach_point = vertex_index.get(mRandom.nextInt(vertex_index.size())); - - endpoints = new Pair(newVertex, attach_point); - - // if parallel edges are not allowed, skip attach_point if - // already exists; note that because of the way edges are added, we only need to check - // the list of candidate edges for duplicates. - if (!(mGraph instanceof MultiGraph)) - { - if (added_pairs.contains(endpoints)) - continue; - if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED && - added_pairs.contains(new Pair(attach_point, newVertex))) - continue; - } - - double degree = mGraph.inDegree(attach_point); - - // subtract 1 from numVertices because we don't want to count newVertex - // (which has already been added to the graph, but not to vertex_index) - double attach_prob = (degree + 1) / (mGraph.getEdgeCount() + mGraph.getVertexCount() - 1); - if (attach_prob >= mRandom.nextDouble()) - created_edge = true; - } - while (!created_edge); - - added_pairs.add(endpoints); - - if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED) { - added_pairs.add(new Pair(attach_point, newVertex)); - } - } - - public void evolveGraph(int numTimeSteps) { - - for (int i = 0; i < numTimeSteps; i++) { - evolveGraph(); - mElapsedTimeSteps++; - } - } - - private void evolveGraph() { - Collection preexistingNodes = mGraph.getVertices(); - V newVertex = vertexFactory.create(); - - mGraph.addVertex(newVertex); - - // generate and store the new edges; don't add them to the graph - // yet because we don't want to bias the degree calculations - // (all new edges in a timestep should be added in parallel) - Set> added_pairs = new HashSet>(mNumEdgesToAttachPerStep*3); - - for (int i = 0; i < mNumEdgesToAttachPerStep; i++) - createRandomEdge(preexistingNodes, newVertex, added_pairs); - - for (Pair pair : added_pairs) - { - V v1 = pair.getFirst(); - V v2 = pair.getSecond(); - if (mGraph.getDefaultEdgeType() != EdgeType.UNDIRECTED || - !mGraph.isNeighbor(v1, v2)) - mGraph.addEdge(edgeFactory.create(), pair); - } - // now that we're done attaching edges to this new vertex, - // add it to the index - vertex_index.add(newVertex); - index_vertex.put(newVertex, new Integer(vertex_index.size() - 1)); - } - - public int numIterations() { - return mElapsedTimeSteps; - } - - public Graph create() { - return mGraph; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java deleted file mode 100644 index e3bf04b6..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java +++ /dev/null @@ -1,128 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.GraphGenerator; -import edu.uci.ics.jung.graph.Graph; - -/** - * Graph generator that generates undirected graphs with power-law degree distributions. - * @author Scott White - * @see "A Steady State Model for Graph Power Law by David Eppstein and Joseph Wang" - */ -public class EppsteinPowerLawGenerator implements GraphGenerator { - private int mNumVertices; - private int mNumEdges; - private int mNumIterations; - private double mMaxDegree; - private Random mRandom; - private Factory> graphFactory; - private Factory vertexFactory; - private Factory edgeFactory; - - /** - * Creates an instance with the specified factories and specifications. - * @param graphFactory the factory to use to generate the graph - * @param vertexFactory the factory to use to create vertices - * @param edgeFactory the factory to use to create edges - * @param numVertices the number of vertices for the generated graph - * @param numEdges the number of edges the generated graph will have, should be Theta(numVertices) - * @param r the number of iterations to use; the larger the value the better the graph's degree - * distribution will approximate a power-law - */ - public EppsteinPowerLawGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int numVertices, int numEdges, int r) { - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mNumVertices = numVertices; - mNumEdges = numEdges; - mNumIterations = r; - mRandom = new Random(); - } - - protected Graph initializeGraph() { - Graph graph = null; - graph = graphFactory.create(); - for(int i=0; i vertices = new ArrayList(graph.getVertices()); - while (graph.getEdgeCount() < mNumEdges) { - V u = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - V v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - if (!graph.isSuccessor(v,u)) { - graph.addEdge(edgeFactory.create(), u, v); - } - } - - double maxDegree = 0; - for (V v : graph.getVertices()) { - maxDegree = Math.max(graph.degree(v),maxDegree); - } - mMaxDegree = maxDegree; //(maxDegree+1)*(maxDegree)/2; - - return graph; - } - - /** - * Generates a graph whose degree distribution approximates a power-law. - * @return the generated graph - */ - public Graph create() { - Graph graph = initializeGraph(); - - List vertices = new ArrayList(graph.getVertices()); - for (int rIdx = 0; rIdx < mNumIterations; rIdx++) { - - V v = null; - int degree = 0; - do { - v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - degree = graph.degree(v); - - } while (degree == 0); - - List edges = new ArrayList(graph.getIncidentEdges(v)); - E randomExistingEdge = edges.get((int) (mRandom.nextDouble()*degree)); - - // FIXME: look at email thread on a more efficient RNG for arbitrary distributions - - V x = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - V y = null; - do { - y = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - - } while (mRandom.nextDouble() > ((graph.degree(y)+1)/mMaxDegree)); - - if (!graph.isSuccessor(y,x) && x != y) { - graph.removeEdge(randomExistingEdge); - graph.addEdge(edgeFactory.create(), x, y); - } - } - - return graph; - } - - /** - * Sets the seed for the random number generator. - * @param seed input to the random number generator. - */ - public void setSeed(long seed) { - mRandom.setSeed(seed); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java deleted file mode 100644 index 3a337308..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java +++ /dev/null @@ -1,100 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.GraphGenerator; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Generates a random graph using the Erdos-Renyi binomial model - * (each pair of vertices is connected with probability p). - * - * @author William Giordano, Scott White, Joshua O'Madadhain - */ -public class ErdosRenyiGenerator implements GraphGenerator { - private int mNumVertices; - private double mEdgeConnectionProbability; - private Random mRandom; - Factory> graphFactory; - Factory vertexFactory; - Factory edgeFactory; - - /** - * - * @param numVertices number of vertices graph should have - * @param p Connection's probability between 2 vertices - */ - public ErdosRenyiGenerator(Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - int numVertices,double p) - { - if (numVertices <= 0) { - throw new IllegalArgumentException("A positive # of vertices must be specified."); - } - mNumVertices = numVertices; - if (p < 0 || p > 1) { - throw new IllegalArgumentException("p must be between 0 and 1."); - } - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mEdgeConnectionProbability = p; - mRandom = new Random(); - } - - /** - * Returns a graph in which each pair of vertices is connected by - * an undirected edge with the probability specified by the constructor. - */ - public Graph create() { - UndirectedGraph g = graphFactory.create(); - for(int i=0; i list = new ArrayList(g.getVertices()); - - for (int i = 0; i < mNumVertices-1; i++) { - V v_i = list.get(i); - for (int j = i+1; j < mNumVertices; j++) { - V v_j = list.get(j); - if (mRandom.nextDouble() < mEdgeConnectionProbability) { - g.addEdge(edgeFactory.create(), v_i, v_j); - } - } - } - return g; - } - - /** - * Sets the seed of the internal random number generator to {@code seed}. - * Enables consistent behavior. - */ - public void setSeed(long seed) { - mRandom.setSeed(seed); - } -} - - - - - - - - - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java deleted file mode 100644 index de01b69b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java +++ /dev/null @@ -1,184 +0,0 @@ - -package edu.uci.ics.jung.algorithms.generators.random; - -/* -* Copyright (c) 2009, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ - -import java.util.HashMap; -import java.util.Map; -import java.util.Random; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.generators.Lattice2DGenerator; -import edu.uci.ics.jung.algorithms.util.WeightedChoice; -import edu.uci.ics.jung.graph.Graph; - -/** - * Graph generator that produces a random graph with small world properties. - * The underlying model is an mxn (optionally toroidal) lattice. Each node u - * has four local connections, one to each of its neighbors, and - * in addition 1+ long range connections to some node v where v is chosen randomly according to - * probability proportional to d^-alpha where d is the lattice distance between u and v and alpha - * is the clustering exponent. - * - * @see "Navigation in a small world J. Kleinberg, Nature 406(2000), 845." - * @author Joshua O'Madadhain - */ -public class KleinbergSmallWorldGenerator extends Lattice2DGenerator { - private double clustering_exponent; - private Random random; - private int num_connections = 1; - - /** - * Creates - * @param graph_factory - * @param vertex_factory - * @param edge_factory - * @param latticeSize - * @param clusteringExponent - */ - public KleinbergSmallWorldGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int latticeSize, double clusteringExponent) - { - this(graph_factory, vertex_factory, edge_factory, latticeSize, latticeSize, clusteringExponent); - } - - /** - * @param graph_factory - * @param vertex_factory - * @param edge_factory - * @param row_count - * @param col_count - * @param clusteringExponent - */ - public KleinbergSmallWorldGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int row_count, int col_count, double clusteringExponent) - { - super(graph_factory, vertex_factory, edge_factory, row_count, col_count, true); - clustering_exponent = clusteringExponent; - initialize(); - } - - /** - * @param graph_factory - * @param vertex_factory - * @param edge_factory - * @param row_count - * @param col_count - * @param clusteringExponent - * @param isToroidal - */ - public KleinbergSmallWorldGenerator(Factory> graph_factory, Factory vertex_factory, - Factory edge_factory, int row_count, int col_count, double clusteringExponent, - boolean isToroidal) - { - super(graph_factory, vertex_factory, edge_factory, row_count, col_count, isToroidal); - clustering_exponent = clusteringExponent; - initialize(); - } - - private void initialize() - { - this.random = new Random(); - } - - /** - * Sets the {@code Random} instance used by this instance. Useful for - * unit testing. - */ - public void setRandom(Random random) - { - this.random = random; - } - - /** - * Sets the seed of the internal random number generator. May be used to provide repeatable - * experiments. - */ - public void setRandomSeed(long seed) - { - random.setSeed(seed); - } - - /** - * Sets the number of new 'small-world' connections (outgoing edges) to be added to each vertex. - */ - public void setConnectionCount(int num_connections) - { - if (num_connections <= 0) - { - throw new IllegalArgumentException("Number of new connections per vertex must be >= 1"); - } - this.num_connections = num_connections; - } - - /** - * Returns the number of new 'small-world' connections to be made to each vertex. - */ - public int getConnectionCount() - { - return this.num_connections; - } - - /** - * Generates a random small world network according to the parameters given - * @return a random small world graph - */ - @Override - public Graph create() - { - Graph graph = super.create(); - - // TODO: For toroidal graphs, we can make this more clever by pre-creating the WeightedChoice object - // and using the output as an offset to the current vertex location. - WeightedChoice weighted_choice; - - // Add long range connections - for (int i = 0; i < graph.getVertexCount(); i++) - { - V source = getVertex(i); - int row = getRow(i); - int col = getCol(i); - int row_offset = row < row_count/2 ? -row_count : row_count; - int col_offset = col < col_count/2 ? -col_count : col_count; - - Map vertex_weights = new HashMap(); - for (int j = 0; j < row_count; j++) - { - for (int k = 0; k < col_count; k++) - { - if (j == row && k == col) - continue; - int v_dist = Math.abs(j - row); - int h_dist = Math.abs(k - col); - if (is_toroidal) - { - v_dist = Math.min(v_dist, Math.abs(j - row+row_offset)); - h_dist = Math.min(h_dist, Math.abs(k - col+col_offset)); - } - int distance = v_dist + h_dist; - if (distance < 2) - continue; - else - vertex_weights.put(getVertex(j,k), (float)Math.pow(distance, -clustering_exponent)); - } - } - - for (int j = 0; j < this.num_connections; j++) { - weighted_choice = new WeightedChoice(vertex_weights, random); - V target = weighted_choice.nextItem(); - graph.addEdge(edge_factory.create(), source, target); - } - } - - return graph; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java deleted file mode 100644 index a39a6404..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Jul 2, 2003 - * - */ -package edu.uci.ics.jung.algorithms.generators.random; - -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * - * Generates a mixed-mode random graph based on the output of BarabasiAlbertGenerator. - * Primarily intended for providing a heterogeneous sample graph for visualization testing, etc. - * - */ -public class MixedRandomGraphGenerator { - - /** - * Equivalent to generateMixedRandomGraph(edge_weight, num_vertices, true). - */ - public static Graph generateMixedRandomGraph( - Factory> graphFactory, - Factory vertexFactory, - Factory edgeFactory, - Map edge_weight, - int num_vertices, Set seedVertices) - { - return generateMixedRandomGraph(graphFactory, vertexFactory, edgeFactory, - edge_weight, num_vertices, true, seedVertices); - } - - /** - * Returns a random mixed-mode graph. Starts with a randomly generated - * Barabasi-Albert (preferential attachment) generator - * (4 initial vertices, 3 edges added at each step, and num_vertices - 4 evolution steps). - * Then takes the resultant graph, replaces random undirected edges with directed - * edges, and assigns random weights to each edge. - */ - public static Graph generateMixedRandomGraph( - Factory> graphFactory, - Factory vertexFactory, - Factory edgeFactory, - Map edge_weights, - int num_vertices, boolean parallel, Set seedVertices) - { - int seed = (int)(Math.random() * 10000); - BarabasiAlbertGenerator bag = - new BarabasiAlbertGenerator(graphFactory, vertexFactory, edgeFactory, - 4, 3, //false, parallel, - seed, seedVertices); - bag.evolveGraph(num_vertices - 4); - Graph ug = bag.create(); - - // create a SparseMultigraph version of g - Graph g = graphFactory.create(); - //new SparseMultigraph(); - for(V v : ug.getVertices()) { - g.addVertex(v); - } - - // randomly replace some of the edges by directed edges to - // get a mixed-mode graph, add random weights - - for(E e : ug.getEdges()) { - V v1 = ug.getEndpoints(e).getFirst(); - V v2 = ug.getEndpoints(e).getSecond(); - - E me = edgeFactory.create(); - g.addEdge(me, v1, v2, Math.random() < .5 ? EdgeType.DIRECTED : EdgeType.UNDIRECTED); - edge_weights.put(me, Math.random()); - } - - return g; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/package.html deleted file mode 100644 index 9f85614a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/generators/random/package.html +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - -Methods for generating random graphs with various properties. These include: -
      -
    • BarabasiAlbertGenerator: scale-free graphs using the preferential attachment heuristic. -
    • EppsteinPowerLawGenerator: graphs whose degree distribution approximates a power law -
    • ErdosRenyiGenerator: graphs for which edges are created with a specified probability -
    • MixedRandomGraphGenerator: takes the output of BarabasiAlbertGenerator and -perturbs it to generate a mixed-mode analog with both directed and undirected edges. -
    • - - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java deleted file mode 100644 index 1e853c41..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java +++ /dev/null @@ -1,388 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.text.DecimalFormat; -import java.text.Format; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.algorithms.util.IterativeProcess; -import edu.uci.ics.jung.graph.Graph; - -/** - * Abstract class for algorithms that rank nodes or edges by some "importance" metric. Provides a common set of - * services such as: - *
        - *
      • storing rank scores
      • - *
      • getters and setters for rank scores
      • - *
      • computing default edge weights
      • - *
      • normalizing default or user-provided edge transition weights
      • - *
      • normalizing rank scores
      • - *
      • automatic cleanup of decorations
      • - *
      • creation of Ranking list
      • - *
      • print rankings in sorted order by rank
      • - *
      - *

      - * By default, all rank scores are removed from the vertices (or edges) being ranked. - * @author Scott White - */ -public abstract class AbstractRanker extends IterativeProcess { - private Graph mGraph; - private List> mRankings; - private boolean mRemoveRankScoresOnFinalize; - private boolean mRankNodes; - private boolean mRankEdges; - private boolean mNormalizeRankings; - protected Map> vertexRankScores = - LazyMap.decorate( - new HashMap>(), - new Factory>() { - public Map create() { - return new HashMap(); - }}); - protected Map> edgeRankScores = - LazyMap.decorate( - new HashMap>(), - new Factory>() { - public Map create() { - return new HashMap(); - }}); - private Map edgeWeights = new HashMap(); - - protected void initialize(Graph graph, boolean isNodeRanker, - boolean isEdgeRanker) { - if (!isNodeRanker && !isEdgeRanker) - throw new IllegalArgumentException("Must rank edges, vertices, or both"); - mGraph = graph; - mRemoveRankScoresOnFinalize = true; - mNormalizeRankings = true; - mRankNodes = isNodeRanker; - mRankEdges = isEdgeRanker; - } - - /** - * @return all rankScores - */ - public Map> getVertexRankScores() { - return vertexRankScores; - } - - public Map> getEdgeRankScores() { - return edgeRankScores; - } - - /** - * @return the rankScores - */ - public Map getVertexRankScores(Object key) { - return vertexRankScores.get(key); - } - - public Map getEdgeRankScores(Object key) { - return edgeRankScores.get(key); - } - - protected Collection getVertices() { - return mGraph.getVertices(); - } - - protected int getVertexCount() { - return mGraph.getVertexCount(); - } - - protected Graph getGraph() { - return mGraph; - } - - @Override - public void reset() { - } - - /** - * Returns true if this ranker ranks nodes, and - * false otherwise. - */ - public boolean isRankingNodes() { - return mRankNodes; - } - - /** - * Returns true if this ranker ranks edges, and - * false otherwise. - */ - public boolean isRankingEdges() { - return mRankEdges; - } - - /** - * Instructs the ranker whether or not it should remove the rank scores from the nodes (or edges) once the ranks - * have been computed. - * @param removeRankScoresOnFinalize true if the rank scores are to be removed, false otherwise - */ - public void setRemoveRankScoresOnFinalize(boolean removeRankScoresOnFinalize) { - this.mRemoveRankScoresOnFinalize = removeRankScoresOnFinalize; - } - - protected void onFinalize(Object e) {} - - /** - * The user datum key used to store the rank score. - * @return the key - */ - abstract public Object getRankScoreKey(); - - - @Override - protected void finalizeIterations() { - List> sortedRankings = new ArrayList>(); - - int id = 1; - if (mRankNodes) { - for (V currentVertex : getVertices()) { - Ranking ranking = new Ranking(id,getVertexRankScore(currentVertex),currentVertex); - sortedRankings.add(ranking); - if (mRemoveRankScoresOnFinalize) { - this.vertexRankScores.get(getRankScoreKey()).remove(currentVertex); - } - id++; - onFinalize(currentVertex); - } - } - if (mRankEdges) { - for (E currentEdge : mGraph.getEdges()) { - - Ranking ranking = new Ranking(id,getEdgeRankScore(currentEdge),currentEdge); - sortedRankings.add(ranking); - if (mRemoveRankScoresOnFinalize) { - this.edgeRankScores.get(getRankScoreKey()).remove(currentEdge); - } - id++; - onFinalize(currentEdge); - } - } - - mRankings = sortedRankings; - Collections. >sort(mRankings); - } - - /** - * Retrieves the list of ranking instances in descending sorted order by rank score - * If the algorithm is ranking edges, the instances will be of type EdgeRanking, otherwise - * if the algorithm is ranking nodes the instances will be of type NodeRanking - * @return the list of rankings - */ - public List> getRankings() { - return mRankings; - } - - /** - * Return a list of the top k rank scores. - * @param topKRankings the value of k to use - * @return list of rank scores - */ - public List getRankScores(int topKRankings) { - List scores = new ArrayList(); - int count=1; - for (Ranking currentRanking : getRankings()) { - if (count > topKRankings) { - return scores; - } - scores.add(currentRanking.rankScore); - count++; - } - - return scores; - } - - /** - * Given an edge or node, returns the corresponding rank score. This is a default - * implementation of getRankScore which assumes the decorations are of type MutableDouble. - * This method only returns legal values if setRemoveRankScoresOnFinalize(false) was called - * prior to evaluate(). - * @return the rank score value - */ - public double getVertexRankScore(V v) { - Number rankScore = vertexRankScores.get(getRankScoreKey()).get(v); - if (rankScore != null) { - return rankScore.doubleValue(); - } else { - throw new RuntimeException("setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); - } - } - - public double getVertexRankScore(V v, Object key) { - return vertexRankScores.get(key).get(v).doubleValue(); - } - - public double getEdgeRankScore(E e) { - Number rankScore = edgeRankScores.get(getRankScoreKey()).get(e); - if (rankScore != null) { - return rankScore.doubleValue(); - } else { - throw new RuntimeException("setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); - } - } - - public double getEdgeRankScore(E e, Object key) { - return edgeRankScores.get(key).get(e).doubleValue(); - } - - protected void setVertexRankScore(V v, double rankValue, Object key) { - vertexRankScores.get(key).put(v, rankValue); - } - - protected void setEdgeRankScore(E e, double rankValue, Object key) { - edgeRankScores.get(key).put(e, rankValue); - } - - protected void setVertexRankScore(V v, double rankValue) { - setVertexRankScore(v,rankValue, getRankScoreKey()); - } - - protected void setEdgeRankScore(E e, double rankValue) { - setEdgeRankScore(e, rankValue, getRankScoreKey()); - } - - protected void removeVertexRankScore(V v, Object key) { - vertexRankScores.get(key).remove(v); - } - - protected void removeEdgeRankScore(E e, Object key) { - edgeRankScores.get(key).remove(e); - } - - protected void removeVertexRankScore(V v) { - vertexRankScores.get(getRankScoreKey()).remove(v); - } - - protected void removeEdgeRankScore(E e) { - edgeRankScores.get(getRankScoreKey()).remove(e); - } - - protected double getEdgeWeight(E e) { - return edgeWeights.get(e).doubleValue(); - } - - protected void setEdgeWeight(E e, double weight) { - edgeWeights.put(e, weight); - } - - public void setEdgeWeights(Map edgeWeights) { - this.edgeWeights = edgeWeights; - } - - /** - * @return the edgeWeights - */ - public Map getEdgeWeights() { - return edgeWeights; - } - - protected void assignDefaultEdgeTransitionWeights() { - - for (V currentVertex : getVertices()) { - - Collection outgoingEdges = mGraph.getOutEdges(currentVertex); - - double numOutEdges = outgoingEdges.size(); - for (E currentEdge : outgoingEdges) { - setEdgeWeight(currentEdge,1.0/numOutEdges); - } - } - } - - protected void normalizeEdgeTransitionWeights() { - - for (V currentVertex : getVertices()) { - - Collection outgoingEdges = mGraph.getOutEdges(currentVertex); - - double totalEdgeWeight = 0; - for (E currentEdge : outgoingEdges) { - totalEdgeWeight += getEdgeWeight(currentEdge); - } - - for (E currentEdge : outgoingEdges) { - setEdgeWeight(currentEdge,getEdgeWeight(currentEdge)/totalEdgeWeight); - } - } - } - - protected void normalizeRankings() { - if (!mNormalizeRankings) { - return; - } - double totalWeight = 0; - - for (V currentVertex : getVertices()) { - totalWeight += getVertexRankScore(currentVertex); - } - - for (V currentVertex : getVertices()) { - setVertexRankScore(currentVertex,getVertexRankScore(currentVertex)/totalWeight); - } - } - - /** - * Print the rankings to standard out in descending order of rank score - * @param verbose if true, include information about the actual rank order as well as - * the original position of the vertex before it was ranked - * @param printScore if true, include the actual value of the rank score - */ - public void printRankings(boolean verbose,boolean printScore) { - double total = 0; - Format formatter = new DecimalFormat("#0.#######"); - int rank = 1; - - for (Ranking currentRanking : getRankings()) { - double rankScore = currentRanking.rankScore; - if (verbose) { - System.out.print("Rank " + rank + ": "); - if (printScore) { - System.out.print(formatter.format(rankScore)); - } - System.out.print("\tVertex Id: " + currentRanking.originalPos); - System.out.print(" (" + currentRanking.getRanked() + ")"); - System.out.println(); - } else { - System.out.print(rank + "\t"); - if (printScore) { - System.out.print(formatter.format(rankScore)); - } - System.out.println("\t" + currentRanking.originalPos); - - } - total += rankScore; - rank++; - } - - if (verbose) { - System.out.println("Total: " + formatter.format(total)); - } - } - - /** - * Allows the user to specify whether or not s/he wants the rankings to be normalized. - * In some cases, this will have no effect since the algorithm doesn't allow normalization - * as an option - * @param normalizeRankings - */ - public void setNormalizeRankings(boolean normalizeRankings) { - mNormalizeRankings = normalizeRankings; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java deleted file mode 100644 index 25906f20..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java +++ /dev/null @@ -1,190 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Stack; - -import org.apache.commons.collections15.Buffer; -import org.apache.commons.collections15.buffer.UnboundedFifoBuffer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Computes betweenness centrality for each vertex and edge in the graph. The result is that each vertex - * and edge has a UserData element of type MutableDouble whose key is 'centrality.BetweennessCentrality'. - * Note: Many social network researchers like to normalize the betweenness values by dividing the values by - * (n-1)(n-2)/2. The values given here are unnormalized.

      - * - * A simple example of usage is: - *

      - * BetweennessCentrality ranker = new BetweennessCentrality(someGraph);
      - * ranker.evaluate();
      - * ranker.printRankings();
      - * 
      - * - * Running time is: O(n^2 + nm). - * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001." - * @author Scott White - * @author Tom Nelson converted to jung2 - */ - -public class BetweennessCentrality extends AbstractRanker { - - public static final String CENTRALITY = "centrality.BetweennessCentrality"; - - /** - * Constructor which initializes the algorithm - * @param g the graph whose nodes are to be analyzed - */ - public BetweennessCentrality(Graph g) { - initialize(g, true, true); - } - - public BetweennessCentrality(Graph g, boolean rankNodes) { - initialize(g, rankNodes, true); - } - - public BetweennessCentrality(Graph g, boolean rankNodes, boolean rankEdges) - { - initialize(g, rankNodes, rankEdges); - } - - protected void computeBetweenness(Graph graph) { - - Map decorator = new HashMap(); - Map bcVertexDecorator = - vertexRankScores.get(getRankScoreKey()); - bcVertexDecorator.clear(); - Map bcEdgeDecorator = - edgeRankScores.get(getRankScoreKey()); - bcEdgeDecorator.clear(); - - Collection vertices = graph.getVertices(); - - for (V s : vertices) { - - initializeData(graph,decorator); - - decorator.get(s).numSPs = 1; - decorator.get(s).distance = 0; - - Stack stack = new Stack(); - Buffer queue = new UnboundedFifoBuffer(); - queue.add(s); - - while (!queue.isEmpty()) { - V v = queue.remove(); - stack.push(v); - - for(V w : getGraph().getSuccessors(v)) { - - if (decorator.get(w).distance < 0) { - queue.add(w); - decorator.get(w).distance = decorator.get(v).distance + 1; - } - - if (decorator.get(w).distance == decorator.get(v).distance + 1) { - decorator.get(w).numSPs += decorator.get(v).numSPs; - decorator.get(w).predecessors.add(v); - } - } - } - - while (!stack.isEmpty()) { - V w = stack.pop(); - - for (V v : decorator.get(w).predecessors) { - - double partialDependency = (decorator.get(v).numSPs / decorator.get(w).numSPs); - partialDependency *= (1.0 + decorator.get(w).dependency); - decorator.get(v).dependency += partialDependency; - E currentEdge = getGraph().findEdge(v, w); - double edgeValue = bcEdgeDecorator.get(currentEdge).doubleValue(); - edgeValue += partialDependency; - bcEdgeDecorator.put(currentEdge, edgeValue); - } - if (w != s) { - double bcValue = bcVertexDecorator.get(w).doubleValue(); - bcValue += decorator.get(w).dependency; - bcVertexDecorator.put(w, bcValue); - } - } - } - - if(graph instanceof UndirectedGraph) { - for (V v : vertices) { - double bcValue = bcVertexDecorator.get(v).doubleValue(); - bcValue /= 2.0; - bcVertexDecorator.put(v, bcValue); - } - for (E e : graph.getEdges()) { - double bcValue = bcEdgeDecorator.get(e).doubleValue(); - bcValue /= 2.0; - bcEdgeDecorator.put(e, bcValue); - } - } - - for (V vertex : vertices) { - decorator.remove(vertex); - } - } - - private void initializeData(Graph g, Map decorator) { - for (V vertex : g.getVertices()) { - - Map bcVertexDecorator = vertexRankScores.get(getRankScoreKey()); - if(bcVertexDecorator.containsKey(vertex) == false) { - bcVertexDecorator.put(vertex, 0.0); - } - decorator.put(vertex, new BetweennessData()); - } - for (E e : g.getEdges()) { - - Map bcEdgeDecorator = edgeRankScores.get(getRankScoreKey()); - if(bcEdgeDecorator.containsKey(e) == false) { - bcEdgeDecorator.put(e, 0.0); - } - } - } - - /** - * the user datum key used to store the rank scores - * @return the key - */ - @Override - public String getRankScoreKey() { - return CENTRALITY; - } - - @Override - public void step() { - computeBetweenness(getGraph()); - } - - class BetweennessData { - double distance; - double numSPs; - List predecessors; - double dependency; - - BetweennessData() { - distance = -1; - numSPs = 0; - predecessors = new ArrayList(); - dependency = 0; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java deleted file mode 100644 index 9ee4030c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java +++ /dev/null @@ -1,135 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import edu.uci.ics.jung.graph.DirectedGraph; - - -/** - * Algorithm variant of PageRankWithPriors that computes the importance of a node based upon taking fixed-length random - * walks out from the root set and then computing the stationary probability of being at each node. Specifically, it computes - * the relative probability that the markov chain will spend at any particular node, given that it start in the root - * set and ends after k steps. - *

      - * A simple example of usage is: - *

      - * KStepMarkov ranker = new KStepMarkov(someGraph,rootSet,6,null);
      - * ranker.evaluate();
      - * ranker.printRankings();
      - * 
      - *

      - * - * @author Scott White - * @author Tom Nelson - adapter to jung2 - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - */ -public class KStepMarkov extends RelativeAuthorityRanker { - public final static String RANK_SCORE = "jung.algorithms.importance.KStepMarkovExperimental.RankScore"; - private final static String CURRENT_RANK = "jung.algorithms.importance.KStepMarkovExperimental.CurrentRank"; - private int mNumSteps; - HashMap mPreviousRankingsMap; - - /** - * Construct the algorihm instance and initializes the algorithm. - * @param graph the graph to be analyzed - * @param priors the set of root nodes - * @param k positive integer parameter which controls the relative tradeoff between a distribution "biased" towards - * R and the steady-state distribution which is independent of where the Markov-process started. Generally values - * between 4-8 are reasonable - * @param edgeWeights the weight for each edge - */ - public KStepMarkov(DirectedGraph graph, Set priors, int k, Map edgeWeights) { - super.initialize(graph,true,false); - mNumSteps = k; - setPriors(priors); - initializeRankings(); - if (edgeWeights == null) { - assignDefaultEdgeTransitionWeights(); - } else { - setEdgeWeights(edgeWeights); - } - normalizeEdgeTransitionWeights(); - } - - /** - * The user datum key used to store the rank scores. - * @return the key - */ - @Override - public String getRankScoreKey() { - return RANK_SCORE; - } - - protected void incrementRankScore(V v, double rankValue) { - double value = getVertexRankScore(v, RANK_SCORE); - value += rankValue; - setVertexRankScore(v, value, RANK_SCORE); - } - - protected double getCurrentRankScore(V v) { - return getVertexRankScore(v, CURRENT_RANK); - } - - protected void setCurrentRankScore(V v, double rankValue) { - setVertexRankScore(v, rankValue, CURRENT_RANK); - } - - protected void initializeRankings() { - mPreviousRankingsMap = new HashMap(); - for (V v : getVertices()) { - Set priors = getPriors(); - double numPriors = priors.size(); - - if (getPriors().contains(v)) { - setVertexRankScore(v, 1.0/ numPriors); - setCurrentRankScore(v, 1.0/ numPriors); - mPreviousRankingsMap.put(v,1.0/numPriors); - } else { - setVertexRankScore(v, 0); - setCurrentRankScore(v, 0); - mPreviousRankingsMap.put(v, 0); - } - } - } - @Override - public void step() { - - for (int i=0;i incomingEdges = getGraph().getInEdges(v); - - double currentPageRankSum = 0; - for (E e : incomingEdges) { - double currentWeight = getEdgeWeight(e); - currentPageRankSum += - mPreviousRankingsMap.get(getGraph().getOpposite(v,e)).doubleValue()*currentWeight; - } - setCurrentRankScore(v,currentPageRankSum); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/MarkovCentrality.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/MarkovCentrality.java deleted file mode 100644 index e84e63bc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/MarkovCentrality.java +++ /dev/null @@ -1,118 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.BidiMap; -import org.apache.commons.collections15.functors.MapTransformer; - -import cern.colt.matrix.DoubleMatrix1D; -import cern.colt.matrix.DoubleMatrix2D; -import cern.colt.matrix.impl.DenseDoubleMatrix1D; -import cern.colt.matrix.impl.SparseDoubleMatrix1D; -import edu.uci.ics.jung.algorithms.matrix.GraphMatrixOperations; -import edu.uci.ics.jung.algorithms.scoring.PageRank; -import edu.uci.ics.jung.algorithms.util.Indexer; -import edu.uci.ics.jung.graph.DirectedGraph; - -/** - * @author Scott White and Joshua O'Madadhain - * @author Tom Nelson - adapted to jung2 - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - */ -public class MarkovCentrality extends RelativeAuthorityRanker { - public final static String MEAN_FIRST_PASSAGE_TIME = "jung.algorithms.importance.mean_first_passage_time"; - private DoubleMatrix1D mRankings; - private BidiMap mIndexer; - - public MarkovCentrality(DirectedGraph graph, Set rootNodes) { - this(graph,rootNodes,null); - } - - public MarkovCentrality(DirectedGraph graph, Set rootNodes, Map edgeWeightKey) { - super.initialize(graph, true, false); - setPriors(rootNodes); - if (edgeWeightKey == null) - assignDefaultEdgeTransitionWeights(); - else - setEdgeWeights(edgeWeightKey); - normalizeEdgeTransitionWeights(); - - mIndexer = Indexer.create(graph.getVertices()); - mRankings = new SparseDoubleMatrix1D(graph.getVertexCount()); - } - - /** - * @see edu.uci.ics.jung.algorithms.importance.AbstractRanker#getRankScoreKey() - */ - @Override - public String getRankScoreKey() { - return MEAN_FIRST_PASSAGE_TIME; - } - - /** - * @see edu.uci.ics.jung.algorithms.importance.AbstractRanker#getVertexRankScore(Object) - */ - @Override - public double getVertexRankScore(V vert) { - return mRankings.get(mIndexer.get(vert)); - } - - /** - * @see edu.uci.ics.jung.algorithms.util.IterativeProcess#step() - */ - @Override - public void step() { - DoubleMatrix2D mFPTMatrix = GraphMatrixOperations.computeMeanFirstPassageMatrix(getGraph(), getEdgeWeights(), getStationaryDistribution()); - - mRankings.assign(0); - - for (V p : getPriors()) { - int p_id = mIndexer.get(p); - for (V v : getVertices()) { - int v_id = mIndexer.get(v); - mRankings.set(v_id, mRankings.get(v_id) + mFPTMatrix.get(p_id, v_id)); - } - } - - for (V v : getVertices()) { - int v_id = mIndexer.get(v); - mRankings.set(v_id, 1 / (mRankings.get(v_id) / getPriors().size())); - } - - double total = mRankings.zSum(); - - for (V v : getVertices()) { - int v_id = mIndexer.get(v); - mRankings.set(v_id, mRankings.get(v_id) / total); - } - } - - - /** - * Loads the stationary distribution into a vector if it was passed in, - * or calculates it if not. - * - * @return DoubleMatrix1D - */ - private DoubleMatrix1D getStationaryDistribution() { - DoubleMatrix1D piVector = new DenseDoubleMatrix1D(getVertexCount()); - PageRank pageRank = new PageRank(getGraph(), - MapTransformer.getInstance(getEdgeWeights()), 0); - pageRank.evaluate(); - - for (V v : getGraph().getVertices()) - piVector.set(mIndexer.get(v), pageRank.getVertexScore(v)); - return piVector; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/RandomWalkBetweenness.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/RandomWalkBetweenness.java deleted file mode 100644 index e6bd396e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/RandomWalkBetweenness.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.importance; - -import edu.uci.ics.jung.graph.UndirectedGraph; - - -/** - * Computes betweenness centrality for each vertex in the graph. The betweenness values in this case - * are based on random walks, measuring the expected number of times a node is traversed by a random walk - * averaged over all pairs of nodes. The result is that each vertex has a UserData element of type - * MutableDouble whose key is 'centrality.RandomWalkBetweennessCentrality' - * - * A simple example of usage is:
      - * RandomWalkBetweenness ranker = new RandomWalkBetweenness(someGraph);
      - * ranker.evaluate();
      - * ranker.printRankings();

      - * - * Running time is: O((m+n)*n^2). - * @see "Mark Newman: A measure of betweenness centrality based on random walks, 2002." - - * @author Scott White - */ -public class RandomWalkBetweenness extends RandomWalkSTBetweenness { - - public static final String CENTRALITY = "centrality.RandomWalkBetweennessCentrality"; - - /** - * Constructor which initializes the algorithm - * @param g the graph whose nodes are to be analyzed - */ - public RandomWalkBetweenness(UndirectedGraph g) { - super(g,null,null); - } - - @Override - protected void computeBetweenness() { - setUp(); - - int numVertices = getGraph().getVertexCount(); - double normalizingConstant = numVertices*(numVertices-1)/2.0; - - for (V ithVertex : getGraph().getVertices()) { - - double ithBetweenness = 0; - for (int t=0;t - * RandomWalkSTBetweenness ranker = new RandomWalkBetweenness(someGraph,someSource,someTarget);
      - * ranker.evaluate();
      - * ranker.printRankings();

      - * - * Running time is: O(n^3). - * @see "Mark Newman: A measure of betweenness centrality based on random walks, 2002." - - * @author Scott White - */ -public class RandomWalkSTBetweenness extends AbstractRanker { - - public static final String CENTRALITY = "centrality.RandomWalkSTBetweennessCentrality"; - private DoubleMatrix2D mVoltageMatrix; - private BidiMap mIndexer; - V mSource; - V mTarget; - - /** - * Constructor which initializes the algorithm - * @param g the graph whose nodes are to be analyzed - * @param s the source vertex - * @param t the target vertex - */ - public RandomWalkSTBetweenness(UndirectedGraph g, V s, V t) { - initialize(g, true, false); - mSource = s; - mTarget = t; - } - - protected BidiMap getIndexer() { - return mIndexer; - } - - protected DoubleMatrix2D getVoltageMatrix() { - return mVoltageMatrix; - } - - protected void setUp() { - mVoltageMatrix = GraphMatrixOperations.computeVoltagePotentialMatrix((UndirectedGraph) getGraph()); - mIndexer = Indexer.create(getGraph().getVertices()); - } - - protected void computeBetweenness() { - setUp(); - - for (V v : getGraph().getVertices()) { - setVertexRankScore(v,computeSTBetweenness(v,mSource, mTarget)); - } - } - - public double computeSTBetweenness(V ithVertex, V source, V target) { - if (ithVertex == source || ithVertex == target) return 1; - if (mVoltageMatrix == null) { - setUp(); - } - int i = mIndexer.get(ithVertex); - int s = mIndexer.get(source); - int t = mIndexer.get(target); - - double betweenness = 0; - for (V jthVertex : getGraph().getSuccessors(ithVertex)) { - int j = mIndexer.get(jthVertex); - double currentFlow = 0; - currentFlow += mVoltageMatrix.get(i,s); - currentFlow -= mVoltageMatrix.get(i,t); - currentFlow -= mVoltageMatrix.get(j,s); - currentFlow += mVoltageMatrix.get(j,t); - betweenness += Math.abs(currentFlow); - } - return betweenness/2.0; - } - - /** - * the user datum key used to store the rank scores - * @return the key - */ - @Override - public String getRankScoreKey() { - return CENTRALITY; - } - - @Override - public void step() { - computeBetweenness(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/Ranking.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/Ranking.java deleted file mode 100644 index 80d91b3a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/Ranking.java +++ /dev/null @@ -1,76 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - - -/** - * Abstract data container for ranking objects. Stores common data relevant to both node and edge rankings, namely, - * the original position of the instance in the list and the actual ranking score. - * @author Scott White - */ -public class Ranking implements Comparable { - /** - * The original (0-indexed) position of the instance being ranked - */ - public int originalPos; - /** - * The actual rank score (normally between 0 and 1) - */ - public double rankScore; - - /** - * what is being ranked - */ - private V ranked; - - /** - * Constructor which allows values to be set on construction - * @param originalPos The original (0-indexed) position of the instance being ranked - * @param rankScore The actual rank score (normally between 0 and 1) - */ - public Ranking(int originalPos, double rankScore, V ranked) { - this.originalPos = originalPos; - this.rankScore = rankScore; - this.ranked = ranked; - } - - /** - * Compares two ranking based on the rank score. - * @param o The other ranking - * @return -1 if the other ranking is higher, 0 if they are equal, and 1 if this ranking is higher - */ - public int compareTo(Ranking otherRanking) { - - return Double.compare(otherRanking.rankScore,rankScore); - } - - /** - * Returns the rank score as a string. - * @return the stringified rank score - */ - @Override - public String toString() { - return String.valueOf(rankScore); - } - - /** - * @return the ranked - */ - public V getRanked() { - return ranked; - } - - /** - * @param ranked the ranked to set - */ - public void setRanked(V ranked) { - this.ranked = ranked; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java deleted file mode 100644 index b40ba8d4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java +++ /dev/null @@ -1,73 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - - -/** - * This class provides basic infrastructure for relative authority algorithms that compute the importance of nodes - * relative to one or more root nodes. The services provided are: - *

        - *
      • The set of root nodes (priors) is stored and maintained
      • - *
      • Getters and setters for the prior rank score are provided
      • - *
      - * - * @author Scott White - */ -public abstract class RelativeAuthorityRanker extends AbstractRanker { - private Set mPriors; - /** - * The default key used for the user datum key corresponding to prior rank scores. - */ - - protected Map priorRankScoreMap = new HashMap(); - /** - * Cleans up all of the prior rank scores on finalize. - */ - @Override - protected void finalizeIterations() { - super.finalizeIterations(); - priorRankScoreMap.clear(); - } - - /** - * Retrieves the value of the prior rank score. - * @param v the root node (prior) - * @return the prior rank score - */ - protected double getPriorRankScore(V v) { - return priorRankScoreMap.get(v).doubleValue(); - - } - - /** - * Allows the user to specify a value to set for the prior rank score - * @param v the root node (prior) - * @param value the score to set to - */ - public void setPriorRankScore(V v, double value) { - this.priorRankScoreMap.put(v, value); - } - - /** - * Retrieves the set of priors. - * @return the set of root nodes (priors) - */ - protected Set getPriors() { return mPriors; } - - /** - * Specifies which vertices are root nodes (priors). - * @param priors the root nodes - */ - protected void setPriors(Set priors) { mPriors = priors; } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java b/gui/jung-src/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java deleted file mode 100644 index bd715ce7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java +++ /dev/null @@ -1,194 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.importance; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.DirectedGraph; - - - -/** - * This algorithm measures the importance of nodes based upon both the number and length of disjoint paths that lead - * to a given node from each of the nodes in the root set. Specifically the formula for measuring the importance of a - * node is given by: I(t|R) = sum_i=1_|P(r,t)|_{alpha^|p_i|} where alpha is the path decay coefficient, p_i is path i - * and P(r,t) is a set of maximum-sized node-disjoint paths from r to t. - *

      - * This algorithm uses heuristic breadth-first search to try and find the maximum-sized set of node-disjoint paths - * between two nodes. As such, it is not guaranteed to give exact answers. - *

      - * A simple example of usage is: - *

      - * WeightedNIPaths ranker = new WeightedNIPaths(someGraph,2.0,6,rootSet);
      - * ranker.evaluate();
      - * ranker.printRankings();
      - * 
      - * - * @author Scott White - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - */ -public class WeightedNIPaths extends AbstractRanker { - public final static String WEIGHTED_NIPATHS_KEY = "jung.algorithms.importance.WEIGHTED_NIPATHS_KEY"; - private double mAlpha; - private int mMaxDepth; - private Set mPriors; - private Map pathIndices = new HashMap(); - private Map roots = new HashMap(); - private Map> pathsSeenMap = new HashMap>(); - private Factory vertexFactory; - private Factory edgeFactory; - - /** - * Constructs and initializes the algorithm. - * @param graph the graph whose nodes are being measured for their importance - * @param alpha the path decay coefficient (>= 1); 2 is recommended - * @param maxDepth the maximal depth to search out from the root set - * @param priors the root set (starting vertices) - */ - public WeightedNIPaths(DirectedGraph graph, Factory vertexFactory, - Factory edgeFactory, double alpha, int maxDepth, Set priors) { - super.initialize(graph, true,false); - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mAlpha = alpha; - mMaxDepth = maxDepth; - mPriors = priors; - for (V v : graph.getVertices()) { - super.setVertexRankScore(v, 0.0); - } - } - - protected void incrementRankScore(V v, double rankValue) { - setVertexRankScore(v, getVertexRankScore(v) + rankValue); - } - - protected void computeWeightedPathsFromSource(V root, int depth) { - - int pathIdx = 1; - - for (E e : getGraph().getOutEdges(root)) { - this.pathIndices.put(e, pathIdx); - this.roots.put(e, root); - newVertexEncountered(pathIdx, getGraph().getEndpoints(e).getSecond(), root); - pathIdx++; - } - - List edges = new ArrayList(); - - V virtualNode = vertexFactory.create(); - getGraph().addVertex(virtualNode); - E virtualSinkEdge = edgeFactory.create(); - - getGraph().addEdge(virtualSinkEdge, virtualNode, root); - edges.add(virtualSinkEdge); - - int currentDepth = 0; - while (currentDepth <= depth) { - - double currentWeight = Math.pow(mAlpha, -1.0 * currentDepth); - for (E currentEdge : edges) { - incrementRankScore(getGraph().getEndpoints(currentEdge).getSecond(),// - currentWeight); - } - - if ((currentDepth == depth) || (edges.size() == 0)) break; - - List newEdges = new ArrayList(); - - for (E currentSourceEdge : edges) { //Iterator sourceEdgeIt = edges.iterator(); sourceEdgeIt.hasNext();) { - Number sourcePathIndex = this.pathIndices.get(currentSourceEdge); - - // from the currentSourceEdge, get its opposite end - // then iterate over the out edges of that opposite end - V newDestVertex = getGraph().getEndpoints(currentSourceEdge).getSecond(); - Collection outs = getGraph().getOutEdges(newDestVertex); - for (E currentDestEdge : outs) { - V destEdgeRoot = this.roots.get(currentDestEdge); - V destEdgeDest = getGraph().getEndpoints(currentDestEdge).getSecond(); - - if (currentSourceEdge == virtualSinkEdge) { - newEdges.add(currentDestEdge); - continue; - } - if (destEdgeRoot == root) { - continue; - } - if (destEdgeDest == getGraph().getEndpoints(currentSourceEdge).getFirst()) {//currentSourceEdge.getSource()) { - continue; - } - Set pathsSeen = this.pathsSeenMap.get(destEdgeDest); - - if (pathsSeen == null) { - newVertexEncountered(sourcePathIndex.intValue(), destEdgeDest, root); - } else if (roots.get(destEdgeDest) != root) { - roots.put(destEdgeDest,root); - pathsSeen.clear(); - pathsSeen.add(sourcePathIndex); - } else if (!pathsSeen.contains(sourcePathIndex)) { - pathsSeen.add(sourcePathIndex); - } else { - continue; - } - - this.pathIndices.put(currentDestEdge, sourcePathIndex); - this.roots.put(currentDestEdge, root); - newEdges.add(currentDestEdge); - } - } - - edges = newEdges; - currentDepth++; - } - - getGraph().removeVertex(virtualNode); - } - - private void newVertexEncountered(int sourcePathIndex, V dest, V root) { - Set pathsSeen = new HashSet(); - pathsSeen.add(sourcePathIndex); - this.pathsSeenMap.put(dest, pathsSeen); - roots.put(dest, root); - } - - @Override - public void step() { - for (V v : mPriors) { - computeWeightedPathsFromSource(v, mMaxDepth); - } - - normalizeRankings(); -// return 0; - } - - /** - * Given a node, returns the corresponding rank score. This implementation of getRankScore assumes - * the decoration representing the rank score is of type MutableDouble. - * @return the rank score for this node - */ - @Override - public String getRankScoreKey() { - return WEIGHTED_NIPATHS_KEY; - } - - @Override - protected void onFinalize(Object udc) { - pathIndices.remove(udc); - roots.remove(udc); - pathsSeenMap.remove(udc); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java deleted file mode 100644 index b59dcfaa..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 7, 2003 - * - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ChainedTransformer; -import org.apache.commons.collections15.functors.CloneTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Abstract class for implementations of {@code Layout}. It handles some of the - * basic functions: storing coordinates, maintaining the dimensions, initializing - * the locations, maintaining locked vertices. - * - * @author Danyel Fisher, Scott White - * @author Tom Nelson - converted to jung2 - * @param the vertex type - * @param the edge type - */ -abstract public class AbstractLayout implements Layout { - - /** - * a set of vertices that should not move in relation to the - * other vertices - */ - private Set dontmove = new HashSet(); - - protected Dimension size; - protected Graph graph; - protected boolean initialized; - - protected Map locations = - LazyMap.decorate(new HashMap(), - new Transformer() { - public Point2D transform(V arg0) { - return new Point2D.Double(); - }}); - - - /** - * Creates an instance which does not initialize the vertex locations. - * - * @param graph the graph for which the layout algorithm is to be created. - */ - protected AbstractLayout(Graph graph) { - if (graph == null) - { - throw new IllegalArgumentException("Graph must be non-null"); - } - this.graph = graph; - } - - @SuppressWarnings("unchecked") - protected AbstractLayout(Graph graph, Transformer initializer) { - this.graph = graph; - Transformer chain = - ChainedTransformer.getInstance(initializer, CloneTransformer.getInstance()); - this.locations = LazyMap.decorate(new HashMap(), (Transformer)chain); - initialized = true; - } - - protected AbstractLayout(Graph graph, Dimension size) { - this.graph = graph; - this.size = size; - } - - @SuppressWarnings("unchecked") - protected AbstractLayout(Graph graph, Transformer initializer, Dimension size) { - this.graph = graph; - Transformer chain = - ChainedTransformer.getInstance(initializer, CloneTransformer.getInstance()); - this.locations = LazyMap.decorate(new HashMap(), (Transformer)chain); - this.size = size; - } - - public void setGraph(Graph graph) { - this.graph = graph; - if(size != null && graph != null) { - initialize(); - } - } - - /** - * When a visualization is resized, it presumably wants to fix the - * locations of the vertices and possibly to reinitialize its data. The - * current method calls initializeLocations followed by initialize_local. - */ - public void setSize(Dimension size) { - - if(size != null && graph != null) { - - Dimension oldSize = this.size; - this.size = size; - initialize(); - - if(oldSize != null) { - adjustLocations(oldSize, size); - } - } - } - - private void adjustLocations(Dimension oldSize, Dimension size) { - - int xOffset = (size.width - oldSize.width) / 2; - int yOffset = (size.height - oldSize.height) / 2; - - // now, move each vertex to be at the new screen center - while(true) { - try { - for(V v : getGraph().getVertices()) { - offsetVertex(v, xOffset, yOffset); - } - break; - } catch(ConcurrentModificationException cme) { - } - } - } - - public boolean isLocked(V v) { - return dontmove.contains(v); - } - - @SuppressWarnings("unchecked") - public void setInitializer(Transformer initializer) { - if(this.equals(initializer)) { - throw new IllegalArgumentException("Layout cannot be initialized with itself"); - } - Transformer chain = - ChainedTransformer.getInstance(initializer, CloneTransformer.getInstance()); - this.locations = LazyMap.decorate(new HashMap(), (Transformer)chain); - initialized = true; - } - - /** - * Returns the current size of the visualization space, accoring to the - * last call to resize(). - * - * @return the current size of the screen - */ - public Dimension getSize() { - return size; - } - - /** - * Returns the Coordinates object that stores the vertex' x and y location. - * - * @param v - * A Vertex that is a part of the Graph being visualized. - * @return A Coordinates object with x and y locations. - */ - private Point2D getCoordinates(V v) { - return locations.get(v); - } - - public Point2D transform(V v) { - return getCoordinates(v); - } - - /** - * Returns the x coordinate of the vertex from the Coordinates object. - * in most cases you will be better off calling transform(v). - */ - public double getX(V v) { - assert getCoordinates(v) != null : "Cannot getX for an unmapped vertex "+v; - return getCoordinates(v).getX(); - } - - /** - * Returns the y coordinate of the vertex from the Coordinates object. - * In most cases you will be better off calling transform(v). - */ - public double getY(V v) { - assert getCoordinates(v) != null : "Cannot getY for an unmapped vertex "+v; - return getCoordinates(v).getY(); - } - - /** - * @param v - * @param xOffset - * @param yOffset - */ - protected void offsetVertex(V v, double xOffset, double yOffset) { - Point2D c = getCoordinates(v); - c.setLocation(c.getX()+xOffset, c.getY()+yOffset); - setLocation(v, c); - } - - /** - * Accessor for the graph that represets all vertices. - * - * @return the graph that contains all vertices. - */ - public Graph getGraph() { - return graph; - } - - /** - * Forcibly moves a vertex to the (x,y) location by setting its x and y - * locations to the inputted location. Does not add the vertex to the - * "dontmove" list, and (in the default implementation) does not make any - * adjustments to the rest of the graph. - */ - public void setLocation(V picked, double x, double y) { - Point2D coord = getCoordinates(picked); - coord.setLocation(x, y); - } - - public void setLocation(V picked, Point2D p) { - Point2D coord = getCoordinates(picked); - coord.setLocation(p); - } - - /** - * Locks {@code v} in place if {@code state} is {@code true}, otherwise unlocks it. - */ - public void lock(V v, boolean state) { - if(state == true) - dontmove.add(v); - else - dontmove.remove(v); - } - - /** - * Locks all vertices in place if {@code lock} is {@code true}, otherwise unlocks all vertices. - */ - public void lock(boolean lock) { - for(V v : graph.getVertices()) { - lock(v, lock); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java deleted file mode 100644 index 38058377..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -/** - * A {@code Layout} implementation that combines - * multiple other layouts so that they may be manipulated - * as one layout. The relaxer thread will step each layout - * in sequence. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param the vertex type - * @param the edge type - */ -public class AggregateLayout implements Layout, IterativeContext { - - protected Layout delegate; - protected Map,Point2D> layouts = new HashMap,Point2D>(); - - /** - * Creates an instance backed by the specified {@code delegate}. - * @param delegate - */ - public AggregateLayout(Layout delegate) { - this.delegate = delegate; - } - - /** - * @return the delegate - */ - public Layout getDelegate() { - return delegate; - } - - /** - * @param delegate the delegate to set - */ - public void setDelegate(Layout delegate) { - this.delegate = delegate; - } - - /** - * adds the passed layout as a sublayout, also specifying - * the center of where this sublayout should appear - * @param layout - * @param center - */ - public void put(Layout layout, Point2D center) { - layouts.put(layout,center); - } - - /** - * Returns the center of the passed layout. - * @param layout - * @return the center of the passed layout - */ - public Point2D get(Layout layout) { - return layouts.get(layout); - } - - /** - * Removes {@code layout} from this instance. - */ - public void remove(Layout layout) { - layouts.remove(layout); - } - - /** - * Removes all layouts from this instance. - */ - public void removeAll() { - layouts.clear(); - } - - /** - * Returns the graph for which this layout is defined. - * @return the graph for which this layout is defined - * @see edu.uci.ics.jung.algorithms.layout.Layout#getGraph() - */ - public Graph getGraph() { - return delegate.getGraph(); - } - - /** - * Returns the size of the underlying layout. - * @return the size of the underlying layout - * @see edu.uci.ics.jung.algorithms.layout.Layout#getSize() - */ - public Dimension getSize() { - return delegate.getSize(); - } - - /** - * - * @see edu.uci.ics.jung.algorithms.layout.Layout#initialize() - */ - public void initialize() { - delegate.initialize(); - for(Layout layout : layouts.keySet()) { - layout.initialize(); - } - } - - /** - * Override to test if the passed vertex is locked in - * any of the layouts. - * @param v - * @return true if v is locked in any of the layouts, and false otherwise - * @see edu.uci.ics.jung.algorithms.layout.Layout#isLocked(java.lang.Object) - */ - public boolean isLocked(V v) { - boolean locked = false; - for(Layout layout : layouts.keySet()) { - locked |= layout.isLocked(v); - } - locked |= delegate.isLocked(v); - return locked; - } - - /** - * override to lock or unlock this vertex in any layout with - * a subgraph containing it - * @param v - * @param state - * @see edu.uci.ics.jung.algorithms.layout.Layout#lock(java.lang.Object, boolean) - */ - public void lock(V v, boolean state) { - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - layout.lock(v, state); - } - } - delegate.lock(v, state); - } - - /** - * - * @see edu.uci.ics.jung.algorithms.layout.Layout#reset() - */ - public void reset() { - for(Layout layout : layouts.keySet()) { - layout.reset(); - } - delegate.reset(); - } - - /** - * @param graph - * @see edu.uci.ics.jung.algorithms.layout.Layout#setGraph(edu.uci.ics.jung.graph.Graph) - */ - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } - - /** - * @param initializer - * @see edu.uci.ics.jung.algorithms.layout.Layout#setInitializer(org.apache.commons.collections15.Transformer) - */ - public void setInitializer(Transformer initializer) { - delegate.setInitializer(initializer); - } - - /** - * @param v - * @param location - * @see edu.uci.ics.jung.algorithms.layout.Layout#setLocation(java.lang.Object, java.awt.geom.Point2D) - */ - public void setLocation(V v, Point2D location) { - boolean wasInSublayout = false; - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - Point2D center = layouts.get(layout); - // transform by the layout itself, but offset to the - // center of the sublayout - Dimension d = layout.getSize(); - - AffineTransform at = - AffineTransform.getTranslateInstance(-center.getX()+d.width/2,-center.getY()+d.height/2); - Point2D localLocation = at.transform(location, null); - layout.setLocation(v, localLocation); - wasInSublayout = true; - } - } - if(wasInSublayout == false && getGraph().getVertices().contains(v)) { - delegate.setLocation(v, location); - } - } - - /** - * @param d - * @see edu.uci.ics.jung.algorithms.layout.Layout#setSize(java.awt.Dimension) - */ - public void setSize(Dimension d) { - delegate.setSize(d); - } - - /** - * Returns a map from each {@code Layout} instance to its center point. - */ - public Map,Point2D> getLayouts() { - return layouts; - } - - /** - * Returns the location of the vertex. The location is specified first - * by the sublayouts, and then by the base layout if no sublayouts operate - * on this vertex. - * @return the location of the vertex - * @see org.apache.commons.collections15.Transformer#transform(java.lang.Object) - */ - public Point2D transform(V v) { - boolean wasInSublayout = false; - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - wasInSublayout = true; - Point2D center = layouts.get(layout); - // transform by the layout itself, but offset to the - // center of the sublayout - Dimension d = layout.getSize(); - AffineTransform at = - AffineTransform.getTranslateInstance(center.getX()-d.width/2, - center.getY()-d.height/2); - return at.transform(layout.transform(v),null); - } - } - if(wasInSublayout == false) { - return delegate.transform(v); - } - return null; - - } - - /** - * Check all sublayouts.keySet() and the delegate layout, returning - * done == true iff all are done. - */ - public boolean done() { - boolean done = true; - for(Layout layout : layouts.keySet()) { - if(layout instanceof IterativeContext) { - done &= ((IterativeContext)layout).done(); - } - } - if(delegate instanceof IterativeContext) { - done &= ((IterativeContext)delegate).done(); - } - return done; - } - - /** - * call step on any sublayout that is also an IterativeContext - * and is not done - */ - public void step() { - for(Layout layout : layouts.keySet()) { - if(layout instanceof IterativeContext) { - IterativeContext context = (IterativeContext)layout; - if(context.done() == false) { - context.step(); - } - } - } - if(delegate instanceof IterativeContext) { - IterativeContext context = (IterativeContext)delegate; - if(context.done() == false) { - context.step(); - } - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java deleted file mode 100644 index 1d9f384a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 9, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.util.TreeUtils; - -/** - * A {@code Layout} implementation that assigns positions to {@code Tree} or - * {@code Forest} vertices using associations with nested circles ("balloons"). - * A balloon is nested inside another balloon if the first balloon's subtree - * is a subtree of the second balloon's subtree. - * - * @author Tom Nelson - * - */ -public class BalloonLayout extends TreeLayout { - - protected Map polarLocations = - LazyMap.decorate(new HashMap(), - new Transformer() { - public PolarPoint transform(V arg0) { - return new PolarPoint(); - }}); - - protected Map radii = new HashMap(); - - /** - * Creates an instance based on the input forest. - */ - public BalloonLayout(Forest g) - { - super(g); - } - - protected void setRootPolars() - { - List roots = TreeUtils.getRoots(graph); - if(roots.size() == 1) { - // its a Tree - V root = roots.get(0); - setRootPolar(root); - setPolars(new ArrayList(graph.getChildren(root)), - getCenter(), getSize().width/2); - } else if (roots.size() > 1) { - // its a Forest - setPolars(roots, getCenter(), getSize().width/2); - } - } - - protected void setRootPolar(V root) { - PolarPoint pp = new PolarPoint(0,0); - Point2D p = getCenter(); - polarLocations.put(root, pp); - locations.put(root, p); - } - - - protected void setPolars(List kids, Point2D parentLocation, double parentRadius) { - - int childCount = kids.size(); - if(childCount == 0) return; - // handle the 1-child case with 0 limit on angle. - double angle = Math.max(0, Math.PI / 2 * (1 - 2.0/childCount)); - double childRadius = parentRadius*Math.cos(angle) / (1 + Math.cos(angle)); - double radius = parentRadius - childRadius; - - double rand = Math.random(); - - for(int i=0; i< childCount; i++) { - V child = kids.get(i); - double theta = i* 2*Math.PI/childCount + rand; - radii.put(child, childRadius); - - PolarPoint pp = new PolarPoint(theta, radius); - polarLocations.put(child, pp); - - Point2D p = PolarPoint.polarToCartesian(pp); - p.setLocation(p.getX()+parentLocation.getX(), p.getY()+parentLocation.getY()); - locations.put(child, p); - setPolars(new ArrayList(graph.getChildren(child)), p, childRadius); - } - } - - @Override - public void setSize(Dimension size) { - this.size = size; - setRootPolars(); - } - - /** - * Returns the coordinates of {@code v}'s parent, or the - * center of this layout's area if it's a root. - */ - public Point2D getCenter(V v) { - V parent = graph.getParent(v); - if(parent == null) { - return getCenter(); - } - return locations.get(parent); - } - - @Override - public void setLocation(V v, Point2D location) { - Point2D c = getCenter(v); - Point2D pv = new Point2D.Double(location.getX()-c.getX(),location.getY()-c.getY()); - PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); - polarLocations.get(v).setLocation(newLocation); - - Point2D center = getCenter(v); - pv.setLocation(pv.getX()+center.getX(), pv.getY()+center.getY()); - locations.put(v, pv); - } - - @Override - public Point2D transform(V v) { - return locations.get(v); - } - - /** - * @return the radii - */ - public Map getRadii() { - return radii; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/CircleLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/CircleLayout.java deleted file mode 100644 index 8cafb778..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/CircleLayout.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Dec 4, 2003 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Graph; - - - -/** - * A {@code Layout} implementation that positions vertices equally spaced on a regular circle. - * - * @author Masanori Harada - */ -public class CircleLayout extends AbstractLayout { - - private double radius; - private List vertex_ordered_list; - - Map circleVertexDataMap = - LazyMap.decorate(new HashMap(), - new Factory() { - public CircleVertexData create() { - return new CircleVertexData(); - }}); - - /** - * Creates an instance for the specified graph. - */ - public CircleLayout(Graph g) { - super(g); - } - - /** - * Returns the radius of the circle. - */ - public double getRadius() { - return radius; - } - - /** - * Sets the radius of the circle. Must be called before - * {@code initialize()} is called. - */ - public void setRadius(double radius) { - this.radius = radius; - } - - /** - * Sets the order of the vertices in the layout according to the ordering - * specified by {@code comparator}. - */ - public void setVertexOrder(Comparator comparator) - { - if (vertex_ordered_list == null) - vertex_ordered_list = new ArrayList(getGraph().getVertices()); - Collections.sort(vertex_ordered_list, comparator); - } - - /** - * Sets the order of the vertices in the layout according to the ordering - * of {@code vertex_list}. - */ - public void setVertexOrder(List vertex_list) - { - if (!vertex_list.containsAll(getGraph().getVertices())) - throw new IllegalArgumentException("Supplied list must include " + - "all vertices of the graph"); - this.vertex_ordered_list = vertex_list; - } - - public void reset() { - initialize(); - } - - public void initialize() - { - Dimension d = getSize(); - - if (d != null) - { - if (vertex_ordered_list == null) - setVertexOrder(new ArrayList(getGraph().getVertices())); - - double height = d.getHeight(); - double width = d.getWidth(); - - if (radius <= 0) { - radius = 0.45 * (height < width ? height : width); - } - - int i = 0; - for (V v : vertex_ordered_list) - { - Point2D coord = transform(v); - - double angle = (2 * Math.PI * i) / vertex_ordered_list.size(); - - coord.setLocation(Math.cos(angle) * radius + width / 2, - Math.sin(angle) * radius + height / 2); - - CircleVertexData data = getCircleData(v); - data.setAngle(angle); - i++; - } - } - } - - protected CircleVertexData getCircleData(V v) { - return circleVertexDataMap.get(v); - } - - protected static class CircleVertexData { - private double angle; - - protected double getAngle() { - return angle; - } - - protected void setAngle(double angle) { - this.angle = angle; - } - - @Override - public String toString() { - return "CircleVertexData: angle=" + angle; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/DAGLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/DAGLayout.java deleted file mode 100644 index 97d3ee6b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/DAGLayout.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * - * Created on Dec 4, 2003 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of {@code Layout} suitable for tree-like directed - * acyclic graphs. Parts of it will probably not terminate if the graph is - * cyclic! The layout will result in directed edges pointing generally upwards. - * Any vertices with no successors are considered to be level 0, and tend - * towards the top of the layout. Any vertex has a level one greater than the - * maximum level of all its successors. - * - * - * @author John Yesberg - */ -public class DAGLayout extends SpringLayout { - - /** - * Each vertex has a minimumLevel. Any vertex with no successors has - * minimumLevel of zero. The minimumLevel of any vertex must be strictly - * greater than the minimumLevel of its parents. (Vertex A is a parent of - * Vertex B iff there is an edge from B to A.) Typically, a vertex will - * have a minimumLevel which is one greater than the minimumLevel of its - * parent's. However, if the vertex has two parents, its minimumLevel will - * be one greater than the maximum of the parents'. We need to calculate - * the minimumLevel for each vertex. When we layout the graph, vertices - * cannot be drawn any higher than the minimumLevel. The graphHeight of a - * graph is the greatest minimumLevel that is used. We will modify the - * SpringLayout calculations so that nodes cannot move above their assigned - * minimumLevel. - */ - private Map minLevels = new HashMap(); - // Simpler than the "pair" technique. - static int graphHeight; - static int numRoots; - final double SPACEFACTOR = 1.3; - // How much space do we allow for additional floating at the bottom. - final double LEVELATTRACTIONRATE = 0.8; - - /** - * A bunch of parameters to help work out when to stop quivering. - * - * If the MeanSquareVel(ocity) ever gets below the MSV_THRESHOLD, then we - * will start a final cool-down phase of COOL_DOWN_INCREMENT increments. If - * the MeanSquareVel ever exceeds the threshold, we will exit the cool down - * phase, and continue looking for another opportunity. - */ - final double MSV_THRESHOLD = 10.0; - double meanSquareVel; - boolean stoppingIncrements = false; - int incrementsLeft; - final int COOL_DOWN_INCREMENTS = 200; - - /** - * Creates an instance for the specified graph. - */ - public DAGLayout(Graph g) { - super(g); - } - - /** - * setRoot calculates the level of each vertex in the graph. Level 0 is - * allocated to any vertex with no successors. Level n+1 is allocated to - * any vertex whose successors' maximum level is n. - */ - public void setRoot(Graph g) { - numRoots = 0; - for(V v : g.getVertices()) { - Collection successors = getGraph().getSuccessors(v); - if (successors.size() == 0) { - setRoot(v); - numRoots++; - } - } - } - - /** - * Set vertex v to be level 0. - */ - public void setRoot(V v) { - minLevels.put(v, new Integer(0)); - // set all the levels. - propagateMinimumLevel(v); - } - - /** - * A recursive method for allocating the level for each vertex. Ensures - * that all predecessors of v have a level which is at least one greater - * than the level of v. - * - * @param v - */ - public void propagateMinimumLevel(V v) { - int level = minLevels.get(v).intValue(); - for(V child : getGraph().getPredecessors(v)) { - int oldLevel, newLevel; - Number o = minLevels.get(child); - if (o != null) - oldLevel = o.intValue(); - else - oldLevel = 0; - newLevel = Math.max(oldLevel, level + 1); - minLevels.put(child, new Integer(newLevel)); - - if (newLevel > graphHeight) - graphHeight = newLevel; - propagateMinimumLevel(child); - } - } - - /** - * Sets random locations for a vertex within the dimensions of the space. - * This overrides the method in AbstractLayout - * - * @param coord - * @param d - */ - private void initializeLocation( - V v, - Point2D coord, - Dimension d) { - - int level = minLevels.get(v).intValue(); - int minY = (int) (level * d.getHeight() / (graphHeight * SPACEFACTOR)); - double x = Math.random() * d.getWidth(); - double y = Math.random() * (d.getHeight() - minY) + minY; - coord.setLocation(x,y); - } - - @Override - public void setSize(Dimension size) { - super.setSize(size); - for(V v : getGraph().getVertices()) { - initializeLocation(v,transform(v),getSize()); - } - } - - /** - * Had to override this one as well, to ensure that setRoot() is called. - */ - @Override - public void initialize() { - super.initialize(); - setRoot(getGraph()); - } - - /** - * Override the moveNodes() method from SpringLayout. The only change we - * need to make is to make sure that nodes don't float higher than the minY - * coordinate, as calculated by their minimumLevel. - */ - @Override - protected void moveNodes() { - // Dimension d = currentSize; - double oldMSV = meanSquareVel; - meanSquareVel = 0; - - synchronized (getSize()) { - - for(V v : getGraph().getVertices()) { - if (isLocked(v)) - continue; - SpringLayout.SpringVertexData vd = springVertexData.get(v); - Point2D xyd = transform(v); - - int width = getSize().width; - int height = getSize().height; - - // (JY addition: three lines are new) - int level = - minLevels.get(v).intValue(); - int minY = (int) (level * height / (graphHeight * SPACEFACTOR)); - int maxY = - level == 0 - ? (int) (height / (graphHeight * SPACEFACTOR * 2)) - : height; - - // JY added 2* - double the sideways repulsion. - vd.dx += 2 * vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - - // JY Addition: Attract the vertex towards it's minimumLevel - // height. - double delta = xyd.getY() - minY; - vd.dy -= delta * LEVELATTRACTIONRATE; - if (level == 0) - vd.dy -= delta * LEVELATTRACTIONRATE; - // twice as much at the top. - - // JY addition: - meanSquareVel += (vd.dx * vd.dx + vd.dy * vd.dy); - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)) , xyd.getY()+Math.max(-5, Math.min(5, vd.dy)) ); - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY()); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); - } - - // (JY addition: These two lines replaced 0 with minY) - if (xyd.getY() < minY) { - xyd.setLocation(xyd.getX(), minY); - // (JY addition: replace height with maxY) - } else if (xyd.getY() > maxY) { - xyd.setLocation(xyd.getX(), maxY); - } - - // (JY addition: if there's only one root, anchor it in the - // middle-top of the screen) - if (numRoots == 1 && level == 0) { - xyd.setLocation(width/2, xyd.getY()); - } - } - } - //System.out.println("MeanSquareAccel="+meanSquareVel); - if (!stoppingIncrements - && Math.abs(meanSquareVel - oldMSV) < MSV_THRESHOLD) { - stoppingIncrements = true; - incrementsLeft = COOL_DOWN_INCREMENTS; - } else if ( - stoppingIncrements - && Math.abs(meanSquareVel - oldMSV) <= MSV_THRESHOLD) { - incrementsLeft--; - if (incrementsLeft <= 0) - incrementsLeft = 0; - } - } - - /** - * Override incrementsAreDone so that we can eventually stop. - */ - @Override - public boolean done() { - if (stoppingIncrements && incrementsLeft == 0) - return true; - else - return false; - } - - /** - * Override forceMove so that if someone moves a node, we can re-layout - * everything. - */ - @Override - public void setLocation(V picked, double x, double y) { - Point2D coord = transform(picked); - coord.setLocation(x,y); - stoppingIncrements = false; - } - - /** - * Override forceMove so that if someone moves a node, we can re-layout - * everything. - */ - @Override - public void setLocation(V picked, Point2D p) { - Point2D coord = transform(picked); - coord.setLocation(p); - stoppingIncrements = false; - } - - /** - * Overridden relaxEdges. This one reduces the effect of edges between - * greatly different levels. - * - */ - @Override - protected void relaxEdges() { - for(E e : getGraph().getEdges()) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - double vx = p1.getX() - p2.getX(); - double vy = p1.getY() - p2.getY(); - double len = Math.sqrt(vx * vx + vy * vy); - - // JY addition. - int level1 = - minLevels.get(v1).intValue(); - int level2 = - minLevels.get(v2).intValue(); - - // desiredLen *= Math.pow( 1.1, (v1.degree() + v2.degree()) ); -// double desiredLen = getLength(e); - double desiredLen = lengthFunction.transform(e); - - // round from zero, if needed [zero would be Bad.]. - len = (len == 0) ? .0001 : len; - - // force factor: optimal length minus actual length, - // is made smaller as the current actual length gets larger. - // why? - - // System.out.println("Desired : " + getLength( e )); - double f = force_multiplier * (desiredLen - len) / len; - - f = f * Math.pow(stretch / 100.0, - (getGraph().degree(v1) + getGraph().degree(v2) -2)); - - // JY addition. If this is an edge which stretches a long way, - // don't be so concerned about it. - if (level1 != level2) - f = f / Math.pow(Math.abs(level2 - level1), 1.5); - - // f= Math.min( 0, f ); - - // the actual movement distance 'dx' is the force multiplied by the - // distance to go. - double dx = f * vx; - double dy = f * vy; - SpringVertexData v1D, v2D; - v1D = springVertexData.get(v1); - v2D = springVertexData.get(v2); - -// SpringEdgeData sed = getSpringEdgeData(e); -// sed.f = f; - - v1D.edgedx += dx; - v1D.edgedy += dy; - v2D.edgedx += -dx; - v2D.edgedy += -dy; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/FRLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/FRLayout.java deleted file mode 100644 index c8a2a24a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/FRLayout.java +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.Map; - -/** - * Implements the Fruchterman-Reingold force-directed algorithm for node layout. - * - *

      Behavior is determined by the following settable parameters: - *

        - *
      • attraction multiplier: how much edges try to keep their vertices together - *
      • repulsion multiplier: how much vertices try to push each other apart - *
      • maximum iterations: how many iterations this algorithm will use before stopping - *
      - * Each of the first two defaults to 0.75; the maximum number of iterations defaults to 700. - * - * @see "Fruchterman and Reingold, 'Graph Drawing by Force-directed Placement'" - * @see "http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf" - * @author Scott White, Yan-Biao Boey, Danyel Fisher - */ -public class FRLayout extends AbstractLayout implements IterativeContext { - - private double forceConstant; - - private double temperature; - - private int currentIteration; - - private int mMaxIterations = 700; - - private Map frVertexData = - LazyMap.decorate(new HashMap(), new Factory() { - public FRVertexData create() { - return new FRVertexData(); - }}); - - private double attraction_multiplier = 0.75; - - private double attraction_constant; - - private double repulsion_multiplier = 0.75; - - private double repulsion_constant; - - private double max_dimension; - - /** - * Creates an instance for the specified graph. - */ - public FRLayout(Graph g) { - super(g); - } - - /** - * Creates an instance of size {@code d} for the specified graph. - */ - public FRLayout(Graph g, Dimension d) { - super(g, new RandomLocationTransformer(d), d); - initialize(); - max_dimension = Math.max(d.height, d.width); - } - - @Override - public void setSize(Dimension size) { - if(initialized == false) { - setInitializer(new RandomLocationTransformer(size)); - } - super.setSize(size); - max_dimension = Math.max(size.height, size.width); - } - - /** - * Sets the attraction multiplier. - */ - public void setAttractionMultiplier(double attraction) { - this.attraction_multiplier = attraction; - } - - /** - * Sets the repulsion multiplier. - */ - public void setRepulsionMultiplier(double repulsion) { - this.repulsion_multiplier = repulsion; - } - - public void reset() { - doInit(); - } - - public void initialize() { - doInit(); - } - - private void doInit() { - Graph graph = getGraph(); - Dimension d = getSize(); - if(graph != null && d != null) { - currentIteration = 0; - temperature = d.getWidth() / 10; - - forceConstant = - Math - .sqrt(d.getHeight() - * d.getWidth() - / graph.getVertexCount()); - - attraction_constant = attraction_multiplier * forceConstant; - repulsion_constant = repulsion_multiplier * forceConstant; - } - } - - private double EPSILON = 0.000001D; - - /** - * Moves the iteration forward one notch, calculation attraction and - * repulsion between vertices and edges and cooling the temperature. - */ - public synchronized void step() { - currentIteration++; - - /** - * Calculate repulsion - */ - while(true) { - - try { - for(V v1 : getGraph().getVertices()) { - calcRepulsion(v1); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - /** - * Calculate attraction - */ - while(true) { - try { - for(E e : getGraph().getEdges()) { - - calcAttraction(e); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - - while(true) { - try { - for(V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - calcPositions(v); - } - break; - } catch(ConcurrentModificationException cme) {} - } - cool(); - } - - protected synchronized void calcPositions(V v) { - FRVertexData fvd = getFRData(v); - if(fvd == null) return; - Point2D xyd = transform(v); - double deltaLength = Math.max(EPSILON, fvd.norm()); - - double newXDisp = fvd.getX() / deltaLength - * Math.min(deltaLength, temperature); - - if (Double.isNaN(newXDisp)) { - throw new IllegalArgumentException( - "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"); } - - double newYDisp = fvd.getY() / deltaLength - * Math.min(deltaLength, temperature); - xyd.setLocation(xyd.getX()+newXDisp, xyd.getY()+newYDisp); - - double borderWidth = getSize().getWidth() / 50.0; - double newXPos = xyd.getX(); - if (newXPos < borderWidth) { - newXPos = borderWidth + Math.random() * borderWidth * 2.0; - } else if (newXPos > (getSize().getWidth() - borderWidth)) { - newXPos = getSize().getWidth() - borderWidth - Math.random() - * borderWidth * 2.0; - } - - double newYPos = xyd.getY(); - if (newYPos < borderWidth) { - newYPos = borderWidth + Math.random() * borderWidth * 2.0; - } else if (newYPos > (getSize().getHeight() - borderWidth)) { - newYPos = getSize().getHeight() - borderWidth - - Math.random() * borderWidth * 2.0; - } - - xyd.setLocation(newXPos, newYPos); - } - - protected void calcAttraction(E e) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - boolean v1_locked = isLocked(v1); - boolean v2_locked = isLocked(v2); - - if(v1_locked && v2_locked) { - // both locked, do nothing - return; - } - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) return; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, Math.sqrt((xDelta * xDelta) - + (yDelta * yDelta))); - - double force = (deltaLength * deltaLength) / attraction_constant; - - if (Double.isNaN(force)) { throw new IllegalArgumentException( - "Unexpected mathematical result in FRLayout:calcPositions [force]"); } - - double dx = (xDelta / deltaLength) * force; - double dy = (yDelta / deltaLength) * force; - if(v1_locked == false) { - FRVertexData fvd1 = getFRData(v1); - fvd1.offset(-dx, -dy); - } - if(v2_locked == false) { - FRVertexData fvd2 = getFRData(v2); - fvd2.offset(dx, dy); - } - } - - protected void calcRepulsion(V v1) { - FRVertexData fvd1 = getFRData(v1); - if(fvd1 == null) - return; - fvd1.setLocation(0, 0); - - try { - for(V v2 : getGraph().getVertices()) { - -// if (isLocked(v2)) continue; - if (v1 != v2) { - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) continue; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, Math - .sqrt((xDelta * xDelta) + (yDelta * yDelta))); - - double force = (repulsion_constant * repulsion_constant) / deltaLength; - - if (Double.isNaN(force)) { throw new RuntimeException( - "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"); } - - fvd1.offset((xDelta / deltaLength) * force, - (yDelta / deltaLength) * force); - } - } - } catch(ConcurrentModificationException cme) { - calcRepulsion(v1); - } - } - - private void cool() { - temperature *= (1.0 - currentIteration / (double) mMaxIterations); - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int maxIterations) { - mMaxIterations = maxIterations; - } - - protected FRVertexData getFRData(V v) { - return frVertexData.get(v); - } - - /** - * This one is an incremental visualization. - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true once the current iteration has passed the maximum count, - * MAX_ITERATIONS. - */ - public boolean done() { - if (currentIteration > mMaxIterations || temperature < 1.0/max_dimension) - { - return true; - } - return false; - } - - protected static class FRVertexData extends Point2D.Double - { - protected void offset(double x, double y) - { - this.x += x; - this.y += y; - } - - protected double norm() - { - return Math.sqrt(x*x + y*y); - } - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/FRLayout2.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/FRLayout2.java deleted file mode 100644 index 0f5b05ea..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/FRLayout2.java +++ /dev/null @@ -1,331 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * Implements the Fruchterman-Reingold force-directed algorithm for node layout. - * This is an experimental attempt at optimizing {@code FRLayout}; if it is successful - * it will be folded back into {@code FRLayout} (and this class will disappear). - * - *

      Behavior is determined by the following settable parameters: - *

        - *
      • attraction multiplier: how much edges try to keep their vertices together - *
      • repulsion multiplier: how much vertices try to push each other apart - *
      • maximum iterations: how many iterations this algorithm will use before stopping - *
      - * Each of the first two defaults to 0.75; the maximum number of iterations defaults to 700. - - * - * @see "Fruchterman and Reingold, 'Graph Drawing by Force-directed Placement'" - * @see http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf - * - * @author Tom Nelson - * @author Scott White, Yan-Biao Boey, Danyel Fisher - */ -public class FRLayout2 extends AbstractLayout implements IterativeContext { - - private double forceConstant; - - private double temperature; - - private int currentIteration; - - private int maxIterations = 700; - - private Map frVertexData = - LazyMap.decorate(new HashMap(), new Factory() { - public Point2D create() { - return new Point2D.Double(); - }}); - - private double attraction_multiplier = 0.75; - - private double attraction_constant; - - private double repulsion_multiplier = 0.75; - - private double repulsion_constant; - - private double max_dimension; - - private Rectangle2D innerBounds = new Rectangle2D.Double(); - - private boolean checked = false; - - /** - * Creates an instance for the specified graph. - */ - public FRLayout2(Graph g) { - super(g); - } - - /** - * Creates an instance of size {@code d} for the specified graph. - */ - public FRLayout2(Graph g, Dimension d) { - super(g, new RandomLocationTransformer(d), d); - max_dimension = Math.max(d.height, d.width); - initialize(); - } - - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - double t = size.width/50.0; - innerBounds.setFrameFromDiagonal(t,t,size.width-t,size.height-t); - max_dimension = Math.max(size.height, size.width); - } - - /** - * Sets the attraction multiplier. - */ - public void setAttractionMultiplier(double attraction) { - this.attraction_multiplier = attraction; - } - - /** - * Sets the repulsion multiplier. - */ - public void setRepulsionMultiplier(double repulsion) { - this.repulsion_multiplier = repulsion; - } - - public void reset() { - doInit(); - } - - public void initialize() { - doInit(); - } - - private void doInit() { - Graph graph = getGraph(); - Dimension d = getSize(); - if(graph != null && d != null) { - currentIteration = 0; - temperature = d.getWidth() / 10; - - forceConstant = - Math - .sqrt(d.getHeight() - * d.getWidth() - / graph.getVertexCount()); - - attraction_constant = attraction_multiplier * forceConstant; - repulsion_constant = repulsion_multiplier * forceConstant; - } - } - - private double EPSILON = 0.000001D; - - /** - * Moves the iteration forward one notch, calculation attraction and - * repulsion between vertices and edges and cooling the temperature. - */ - public synchronized void step() { - currentIteration++; - - /** - * Calculate repulsion - */ - while(true) { - - try { - for(V v1 : getGraph().getVertices()) { - calcRepulsion(v1); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - /** - * Calculate attraction - */ - while(true) { - try { - for(E e : getGraph().getEdges()) { - calcAttraction(e); - } - break; - } catch(ConcurrentModificationException cme) {} - } - - - while(true) { - try { - for(V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - calcPositions(v); - } - break; - } catch(ConcurrentModificationException cme) {} - } - cool(); - } - - protected synchronized void calcPositions(V v) { - Point2D fvd = this.frVertexData.get(v); - if(fvd == null) return; - Point2D xyd = transform(v); - double deltaLength = Math.max(EPSILON, - Math.sqrt(fvd.getX()*fvd.getX()+fvd.getY()*fvd.getY())); - - double newXDisp = fvd.getX() / deltaLength - * Math.min(deltaLength, temperature); - - assert Double.isNaN(newXDisp) == false : "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"; - - double newYDisp = fvd.getY() / deltaLength - * Math.min(deltaLength, temperature); - double newX = xyd.getX()+Math.max(-5, Math.min(5,newXDisp)); - double newY = xyd.getY()+Math.max(-5, Math.min(5,newYDisp)); - - newX = Math.max(innerBounds.getMinX(), Math.min(newX, innerBounds.getMaxX())); - newY = Math.max(innerBounds.getMinY(), Math.min(newY, innerBounds.getMaxY())); - - xyd.setLocation(newX, newY); - - } - - protected void calcAttraction(E e) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - boolean v1_locked = isLocked(v1); - boolean v2_locked = isLocked(v2); - - if(v1_locked && v2_locked) { - // both locked, do nothing - return; - } - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) return; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, p1.distance(p2)); - - double force = deltaLength / attraction_constant; - - assert Double.isNaN(force) == false : "Unexpected mathematical result in FRLayout:calcPositions [force]"; - - double dx = xDelta * force; - double dy = yDelta * force; - Point2D fvd1 = frVertexData.get(v1); - Point2D fvd2 = frVertexData.get(v2); - if(v2_locked) { - // double the offset for v1, as v2 will not be moving in - // the opposite direction - fvd1.setLocation(fvd1.getX()-2*dx, fvd1.getY()-2*dy); - } else { - fvd1.setLocation(fvd1.getX()-dx, fvd1.getY()-dy); - } - if(v1_locked) { - // double the offset for v2, as v1 will not be moving in - // the opposite direction - fvd2.setLocation(fvd2.getX()+2*dx, fvd2.getY()+2*dy); - } else { - fvd2.setLocation(fvd2.getX()+dx, fvd2.getY()+dy); - } - } - - protected void calcRepulsion(V v1) { - Point2D fvd1 = frVertexData.get(v1); - if(fvd1 == null) return; - fvd1.setLocation(0, 0); - boolean v1_locked = isLocked(v1); - - try { - for(V v2 : getGraph().getVertices()) { - - boolean v2_locked = isLocked(v2); - if (v1_locked && v2_locked) continue; - if (v1 != v2) { - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) continue; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, p1.distanceSq(p2)); - - double force = (repulsion_constant * repulsion_constant);// / deltaLength; - - double forceOverDeltaLength = force / deltaLength; - - assert Double.isNaN(force) == false : "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"; - - if(v2_locked) { - // double the offset for v1, as v2 will not be moving in - // the opposite direction - fvd1.setLocation(fvd1.getX()+2 * xDelta * forceOverDeltaLength, - fvd1.getY()+ 2 * yDelta * forceOverDeltaLength); - } else { - fvd1.setLocation(fvd1.getX()+xDelta * forceOverDeltaLength, - fvd1.getY()+yDelta * forceOverDeltaLength); - } - } - } - } catch(ConcurrentModificationException cme) { - calcRepulsion(v1); - } - } - - private void cool() { - temperature *= (1.0 - currentIteration / (double) maxIterations); - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int maxIterations) { - this.maxIterations = maxIterations; - } - - /** - * This one is an incremental visualization. - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true once the current iteration has passed the maximum count, - * MAX_ITERATIONS. - */ - public boolean done() { - if (currentIteration > maxIterations || temperature < 1.0/max_dimension) { - if (!checked) - { -// System.out.println("current iteration: " + currentIteration); -// System.out.println("temperature: " + temperature); - checked = true; - } - return true; - } - return false; - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java deleted file mode 100644 index 4cf1c51c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * Created on Apr 12, 2005 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Shape; -import java.util.Collection; - -/** - * Interface for coordinate-based selection of graph components. - * @author Tom Nelson - * @author Joshua O'Madadhain - */ -public interface GraphElementAccessor -{ - /** - * Returns a vertex which is associated with the - * location (x,y). This is typically determined - * with respect to the vertex's location as specified - * by a Layout. - */ - V getVertex(Layout layout, double x, double y); - - /** - * Returns the vertices contained within {@code rectangle} relative - * to {@code layout}. - */ - Collection getVertices(Layout layout, Shape rectangle); - - /** - * Returns an edge which is associated with the - * location (x,y). This is typically determined - * with respect to the edge's location as specified - * by a {@code Layout}. - */ - E getEdge(Layout layout, double x, double y); - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java deleted file mode 100644 index bea8edaa..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java +++ /dev/null @@ -1,231 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Implements a self-organizing map layout algorithm, based on Meyer's - * self-organizing graph methods. - * - * @author Yan Biao Boey - */ -public class ISOMLayout extends AbstractLayout implements IterativeContext { - - Map isomVertexData = - LazyMap.decorate(new HashMap(), - new Factory() { - public ISOMVertexData create() { - return new ISOMVertexData(); - }}); - - private int maxEpoch; - private int epoch; - - private int radiusConstantTime; - private int radius; - private int minRadius; - - private double adaption; - private double initialAdaption; - private double minAdaption; - - protected GraphElementAccessor elementAccessor = - new RadiusGraphElementAccessor(); - - private double coolingFactor; - - private List queue = new ArrayList(); - private String status = null; - - /** - * Returns the current number of epochs and execution status, as a string. - */ - public String getStatus() { - return status; - } - - /** - * Creates an ISOMLayout instance for the specified graph g. - * @param g - */ - public ISOMLayout(Graph g) { - super(g); - } - - public void initialize() { - - setInitializer(new RandomLocationTransformer(getSize())); - maxEpoch = 2000; - epoch = 1; - - radiusConstantTime = 100; - radius = 5; - minRadius = 1; - - initialAdaption = 90.0D / 100.0D; - adaption = initialAdaption; - minAdaption = 0; - - //factor = 0; //Will be set later on - coolingFactor = 2; - - //temperature = 0.03; - //initialJumpRadius = 100; - //jumpRadius = initialJumpRadius; - - //delay = 100; - } - - - /** - * Advances the current positions of the graph elements. - */ - public void step() { - status = "epoch: " + epoch + "; "; - if (epoch < maxEpoch) { - adjust(); - updateParameters(); - status += " status: running"; - - } else { - status += "adaption: " + adaption + "; "; - status += "status: done"; -// done = true; - } - } - - private synchronized void adjust() { - //Generate random position in graph space - Point2D tempXYD = new Point2D.Double(); - - // creates a new XY data location - tempXYD.setLocation(10 + Math.random() * getSize().getWidth(), - 10 + Math.random() * getSize().getHeight()); - - //Get closest vertex to random position - V winner = elementAccessor.getVertex(this, tempXYD.getX(), tempXYD.getY()); - - while(true) { - try { - for(V v : getGraph().getVertices()) { - ISOMVertexData ivd = getISOMVertexData(v); - ivd.distance = 0; - ivd.visited = false; - } - break; - } catch(ConcurrentModificationException cme) {} - } - adjustVertex(winner, tempXYD); - } - - private synchronized void updateParameters() { - epoch++; - double factor = Math.exp(-1 * coolingFactor * (1.0 * epoch / maxEpoch)); - adaption = Math.max(minAdaption, factor * initialAdaption); - //jumpRadius = (int) factor * jumpRadius; - //temperature = factor * temperature; - if ((radius > minRadius) && (epoch % radiusConstantTime == 0)) { - radius--; - } - } - - private synchronized void adjustVertex(V v, Point2D tempXYD) { - queue.clear(); - ISOMVertexData ivd = getISOMVertexData(v); - ivd.distance = 0; - ivd.visited = true; - queue.add(v); - V current; - - while (!queue.isEmpty()) { - current = queue.remove(0); - ISOMVertexData currData = getISOMVertexData(current); - Point2D currXYData = transform(current); - - double dx = tempXYD.getX() - currXYData.getX(); - double dy = tempXYD.getY() - currXYData.getY(); - double factor = adaption / Math.pow(2, currData.distance); - - currXYData.setLocation(currXYData.getX()+(factor*dx), currXYData.getY()+(factor*dy)); - - if (currData.distance < radius) { - Collection s = getGraph().getNeighbors(current); - while(true) { - try { - for(V child : s) { - ISOMVertexData childData = getISOMVertexData(child); - if (childData != null && !childData.visited) { - childData.visited = true; - childData.distance = currData.distance + 1; - queue.add(child); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - } - } - } - - protected ISOMVertexData getISOMVertexData(V v) { - return isomVertexData.get(v); - } - - /** - * This one is an incremental visualization. - * @return true is the layout algorithm is incremental, false otherwise - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true if the vertex positions are no longer being - * updated. Currently ISOMLayout stops updating vertex - * positions after a certain number of iterations have taken place. - * @return true if the vertex position updates have stopped, - * false otherwise - */ - public boolean done() { - return epoch >= maxEpoch; - } - - protected static class ISOMVertexData { - int distance; - boolean visited; - - protected ISOMVertexData() { - distance = 0; - visited = false; - } - } - - /** - * Resets the layout iteration count to 0, which allows the layout algorithm to - * continue updating vertex positions. - */ - public void reset() { - epoch = 0; - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/KKLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/KKLayout.java deleted file mode 100644 index a1b9f402..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/KKLayout.java +++ /dev/null @@ -1,433 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; -/* - * This source is under the same license with JUNG. - * http://jung.sourceforge.net/license.txt for a description. - */ - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.algorithms.shortestpath.DistanceStatistics; -import edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -/** - * Implements the Kamada-Kawai algorithm for node layout. - * Does not respect filter calls, and sometimes crashes when the view changes to it. - * - * @see "Tomihisa Kamada and Satoru Kawai: An algorithm for drawing general indirect graphs. Information Processing Letters 31(1):7-15, 1989" - * @see "Tomihisa Kamada: On visualization of abstract objects and relations. Ph.D. dissertation, Dept. of Information Science, Univ. of Tokyo, Dec. 1988." - * - * @author Masanori Harada - */ -public class KKLayout extends AbstractLayout implements IterativeContext { - - private double EPSILON = 0.1d; - - private int currentIteration; - private int maxIterations = 2000; - private String status = "KKLayout"; - - private double L; // the ideal length of an edge - private double K = 1; // arbitrary const number - private double[][] dm; // distance matrix - - private boolean adjustForGravity = true; - private boolean exchangeVertices = true; - - private V[] vertices; - private Point2D[] xydata; - - /** - * Retrieves graph distances between vertices of the visible graph - */ - protected Distance distance; - - /** - * The diameter of the visible graph. In other words, the maximum over all pairs - * of vertices of the length of the shortest path between a and bf the visible graph. - */ - protected double diameter; - - /** - * A multiplicative factor which partly specifies the "preferred" length of an edge (L). - */ - private double length_factor = 0.9; - - /** - * A multiplicative factor which specifies the fraction of the graph's diameter to be - * used as the inter-vertex distance between disconnected vertices. - */ - private double disconnected_multiplier = 0.5; - - /** - * Creates an instance for the specified graph. - */ - public KKLayout(Graph g) - { - this(g, new UnweightedShortestPath(g)); - } - - /** - * Creates an instance for the specified graph and distance metric. - */ - public KKLayout(Graph g, Distance distance){ - super(g); - this.distance = distance; - } - - /** - * Sets a multiplicative factor which - * partly specifies the "preferred" length of an edge (L). - */ - public void setLengthFactor(double length_factor){ - this.length_factor = length_factor; - } - - /** - * Sets a multiplicative factor that specifies the fraction of the graph's diameter to be - * used as the inter-vertex distance between disconnected vertices. - */ - public void setDisconnectedDistanceMultiplier(double disconnected_multiplier){ - this.disconnected_multiplier = disconnected_multiplier; - } - - /** - * Returns a string with information about the current status of the algorithm. - */ - public String getStatus() { - return status + this.getSize(); - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int maxIterations) { - this.maxIterations = maxIterations; - } - - /** - * This one is an incremental visualization. - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true once the current iteration has passed the maximum count. - */ - public boolean done() { - if (currentIteration > maxIterations) { - return true; - } - return false; - } - - @SuppressWarnings("unchecked") - public void initialize() { - currentIteration = 0; - - if(graph != null && size != null) { - - double height = size.getHeight(); - double width = size.getWidth(); - - int n = graph.getVertexCount(); - dm = new double[n][n]; - vertices = (V[])graph.getVertices().toArray(); - xydata = new Point2D[n]; - - // assign IDs to all visible vertices - while(true) { - try { - int index = 0; - for(V v : graph.getVertices()) { - Point2D xyd = transform(v); - vertices[index] = v; - xydata[index] = xyd; - index++; - } - break; - } catch(ConcurrentModificationException cme) {} - } - - diameter = DistanceStatistics.diameter(graph, distance, true); - - double L0 = Math.min(height, width); - L = (L0 / diameter) * length_factor; // length_factor used to be hardcoded to 0.9 - //L = 0.75 * Math.sqrt(height * width / n); - - for (int i = 0; i < n - 1; i++) { - for (int j = i + 1; j < n; j++) { - Number d_ij = distance.getDistance(vertices[i], vertices[j]); - Number d_ji = distance.getDistance(vertices[j], vertices[i]); - double dist = diameter * disconnected_multiplier; - if (d_ij != null) - dist = Math.min(d_ij.doubleValue(), dist); - if (d_ji != null) - dist = Math.min(d_ji.doubleValue(), dist); - dm[i][j] = dm[j][i] = dist; - } - } - } - } - - public void step() { - try { - currentIteration++; - double energy = calcEnergy(); - status = "Kamada-Kawai V=" + getGraph().getVertexCount() - + "(" + getGraph().getVertexCount() + ")" - + " IT: " + currentIteration - + " E=" + energy - ; - - int n = getGraph().getVertexCount(); - if (n == 0) - return; - - double maxDeltaM = 0; - int pm = -1; // the node having max deltaM - for (int i = 0; i < n; i++) { - if (isLocked(vertices[i])) - continue; - double deltam = calcDeltaM(i); - - if (maxDeltaM < deltam) { - maxDeltaM = deltam; - pm = i; - } - } - if (pm == -1) - return; - - for (int i = 0; i < 100; i++) { - double[] dxy = calcDeltaXY(pm); - xydata[pm].setLocation(xydata[pm].getX()+dxy[0], xydata[pm].getY()+dxy[1]); - - double deltam = calcDeltaM(pm); - if (deltam < EPSILON) - break; - } - - if (adjustForGravity) - adjustForGravity(); - - if (exchangeVertices && maxDeltaM < EPSILON) { - energy = calcEnergy(); - for (int i = 0; i < n - 1; i++) { - if (isLocked(vertices[i])) - continue; - for (int j = i + 1; j < n; j++) { - if (isLocked(vertices[j])) - continue; - double xenergy = calcEnergyIfExchanged(i, j); - if (energy > xenergy) { - double sx = xydata[i].getX(); - double sy = xydata[i].getY(); - xydata[i].setLocation(xydata[j]); - xydata[j].setLocation(sx, sy); - return; - } - } - } - } - } - finally { -// fireStateChanged(); - } - } - - /** - * Shift all vertices so that the center of gravity is located at - * the center of the screen. - */ - public void adjustForGravity() { - Dimension d = getSize(); - double height = d.getHeight(); - double width = d.getWidth(); - double gx = 0; - double gy = 0; - for (int i = 0; i < xydata.length; i++) { - gx += xydata[i].getX(); - gy += xydata[i].getY(); - } - gx /= xydata.length; - gy /= xydata.length; - double diffx = width / 2 - gx; - double diffy = height / 2 - gy; - for (int i = 0; i < xydata.length; i++) { - xydata[i].setLocation(xydata[i].getX()+diffx, xydata[i].getY()+diffy); - } - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.layout.AbstractLayout#setSize(java.awt.Dimension) - */ - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - } - - /** - * Enable or disable gravity point adjusting. - */ - public void setAdjustForGravity(boolean on) { - adjustForGravity = on; - } - - /** - * Returns true if gravity point adjusting is enabled. - */ - public boolean getAdjustForGravity() { - return adjustForGravity; - } - - /** - * Enable or disable the local minimum escape technique by - * exchanging vertices. - */ - public void setExchangeVertices(boolean on) { - exchangeVertices = on; - } - - /** - * Returns true if the local minimum escape technique by - * exchanging vertices is enabled. - */ - public boolean getExchangeVertices() { - return exchangeVertices; - } - - /** - * Determines a step to new position of the vertex m. - */ - private double[] calcDeltaXY(int m) { - double dE_dxm = 0; - double dE_dym = 0; - double d2E_d2xm = 0; - double d2E_dxmdym = 0; - double d2E_dymdxm = 0; - double d2E_d2ym = 0; - - for (int i = 0; i < vertices.length; i++) { - if (i != m) { - - double dist = dm[m][i]; - double l_mi = L * dist; - double k_mi = K / (dist * dist); - double dx = xydata[m].getX() - xydata[i].getX(); - double dy = xydata[m].getY() - xydata[i].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - double ddd = d * d * d; - - dE_dxm += k_mi * (1 - l_mi / d) * dx; - dE_dym += k_mi * (1 - l_mi / d) * dy; - d2E_d2xm += k_mi * (1 - l_mi * dy * dy / ddd); - d2E_dxmdym += k_mi * l_mi * dx * dy / ddd; - d2E_d2ym += k_mi * (1 - l_mi * dx * dx / ddd); - } - } - // d2E_dymdxm equals to d2E_dxmdym. - d2E_dymdxm = d2E_dxmdym; - - double denomi = d2E_d2xm * d2E_d2ym - d2E_dxmdym * d2E_dymdxm; - double deltaX = (d2E_dxmdym * dE_dym - d2E_d2ym * dE_dxm) / denomi; - double deltaY = (d2E_dymdxm * dE_dxm - d2E_d2xm * dE_dym) / denomi; - return new double[]{deltaX, deltaY}; - } - - /** - * Calculates the gradient of energy function at the vertex m. - */ - private double calcDeltaM(int m) { - double dEdxm = 0; - double dEdym = 0; - for (int i = 0; i < vertices.length; i++) { - if (i != m) { - double dist = dm[m][i]; - double l_mi = L * dist; - double k_mi = K / (dist * dist); - - double dx = xydata[m].getX() - xydata[i].getX(); - double dy = xydata[m].getY() - xydata[i].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - double common = k_mi * (1 - l_mi / d); - dEdxm += common * dx; - dEdym += common * dy; - } - } - return Math.sqrt(dEdxm * dEdxm + dEdym * dEdym); - } - - /** - * Calculates the energy function E. - */ - private double calcEnergy() { - double energy = 0; - for (int i = 0; i < vertices.length - 1; i++) { - for (int j = i + 1; j < vertices.length; j++) { - double dist = dm[i][j]; - double l_ij = L * dist; - double k_ij = K / (dist * dist); - double dx = xydata[i].getX() - xydata[j].getX(); - double dy = xydata[i].getY() - xydata[j].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - - energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - - 2 * l_ij * d); - } - } - return energy; - } - - /** - * Calculates the energy function E as if positions of the - * specified vertices are exchanged. - */ - private double calcEnergyIfExchanged(int p, int q) { - if (p >= q) - throw new RuntimeException("p should be < q"); - double energy = 0; // < 0 - for (int i = 0; i < vertices.length - 1; i++) { - for (int j = i + 1; j < vertices.length; j++) { - int ii = i; - int jj = j; - if (i == p) ii = q; - if (j == q) jj = p; - - double dist = dm[i][j]; - double l_ij = L * dist; - double k_ij = K / (dist * dist); - double dx = xydata[ii].getX() - xydata[jj].getX(); - double dy = xydata[ii].getY() - xydata[jj].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - - 2 * l_ij * d); - } - } - return energy; - } - - public void reset() { - currentIteration = 0; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/Layout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/Layout.java deleted file mode 100644 index 5162ac59..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/Layout.java +++ /dev/null @@ -1,93 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * A generalized interface is a mechanism for returning (x,y) coordinates - * from vertices. In general, most of these methods are used to both control and - * get information from the layout algorithm. - *

      - * @author danyelf - * @author tom nelson - */ -public interface Layout extends Transformer { - - /** - * Initializes fields in the node that may not have - * been set during the constructor. Must be called before - * the iterations begin. - */ - void initialize(); - - /** - * provides initial locations for all vertices. - * @param initializer - */ - void setInitializer(Transformer initializer); - - /** - * setter for graph - * @param graph - */ - void setGraph(Graph graph); - - /** - * Returns the full graph (the one that was passed in at - * construction time) that this Layout refers to. - * - */ - Graph getGraph(); - - /** - * - * - */ - void reset(); - - /** - * @param d - */ - void setSize(Dimension d); - - /** - * Returns the current size of the visualization's space. - */ - Dimension getSize(); - - - /** - * Sets a flag which fixes this vertex in place. - * - * @param v vertex - */ - void lock(V v, boolean state); - - /** - * Returns true if the position of vertex v - * is locked. - */ - boolean isLocked(V v); - - /** - * set the location of a vertex - * @param v - * @param location - */ - void setLocation(V v, Point2D location); - - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java deleted file mode 100644 index b1f25958..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - -/** - * a pure decorator for the Layout interface. Intended to be overridden - * to provide specific behavior decoration - * - * @author Tom Nelson - * - */ -public abstract class LayoutDecorator implements Layout, IterativeContext { - - protected Layout delegate; - - /** - * Creates an instance backed by the specified delegate layout. - */ - public LayoutDecorator(Layout delegate) { - this.delegate = delegate; - } - - /** - * Returns the backing (delegate) layout. - */ - public Layout getDelegate() { - return delegate; - } - - /** - * Sets the backing (delegate) layout. - */ - public void setDelegate(Layout delegate) { - this.delegate = delegate; - } - - /** - * @see edu.uci.ics.jung.algorithms.util.IterativeContext#done() - */ - public void step() { - if(delegate instanceof IterativeContext) { - ((IterativeContext)delegate).step(); - } - } - - /** - * - * @see edu.uci.ics.jung.algorithms.layout.Layout#initialize() - */ - public void initialize() { - delegate.initialize(); - } - - /** - * @param initializer - * @see edu.uci.ics.jung.algorithms.layout.Layout#setInitializer(org.apache.commons.collections15.Transformer) - */ - public void setInitializer(Transformer initializer) { - delegate.setInitializer(initializer); - } - - /** - * @param v - * @param location - * @see edu.uci.ics.jung.algorithms.layout.Layout#setLocation(java.lang.Object, java.awt.geom.Point2D) - */ - public void setLocation(V v, Point2D location) { - delegate.setLocation(v, location); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#getSize() - */ - public Dimension getSize() { - return delegate.getSize(); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#getGraph() - */ - public Graph getGraph() { - return delegate.getGraph(); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#transform(Object) - */ - public Point2D transform(V v) { - return delegate.transform(v); - } - - /** - * @see edu.uci.ics.jung.algorithms.util.IterativeContext#done() - */ - public boolean done() { - if(delegate instanceof IterativeContext) { - return ((IterativeContext)delegate).done(); - } - return true; - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#lock(Object, boolean) - */ - public void lock(V v, boolean state) { - delegate.lock(v, state); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#isLocked(Object) - */ - public boolean isLocked(V v) { - return delegate.isLocked(v); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#setSize(Dimension) - */ - public void setSize(Dimension d) { - delegate.setSize(d); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#reset() - */ - public void reset() { - delegate.reset(); - } - - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/PolarPoint.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/PolarPoint.java deleted file mode 100644 index aa3dc7b4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/PolarPoint.java +++ /dev/null @@ -1,103 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.geom.Point2D; - -/** - * Represents a point in polar coordinates: distance and angle from the origin. - * Includes conversions between polar and Cartesian - * coordinates (Point2D). - * - * @author Tom Nelson - tomnelson@dev.java.net - */ -public class PolarPoint -{ - double theta; - double radius; - - /** - * Creates a new instance with radius and angle each 0. - */ - public PolarPoint() { - this(0,0); - } - - /** - * Creates a new instance with radius {@code radius} and angle {@code theta}. - */ - public PolarPoint(double theta, double radius) { - this.theta = theta; - this.radius = radius; - } - - /** - * Returns the angle for this point. - */ - public double getTheta() { return theta; } - - /** - * Returns the radius for this point. - */ - public double getRadius() { return radius; } - - /** - * Sets the angle for this point to {@code theta}. - */ - public void setTheta(double theta) { this.theta = theta; } - - /** - * Sets the radius for this point to {@code theta}. - */ - public void setRadius(double radius) { this.radius = radius; } - - /** - * Returns the result of converting polar to Cartesian coordinates. - */ - public static Point2D polarToCartesian(PolarPoint polar) { - return polarToCartesian(polar.getTheta(), polar.getRadius()); - } - - /** - * Returns the result of converting (theta, radius) to Cartesian coordinates. - */ - public static Point2D polarToCartesian(double theta, double radius) { - return new Point2D.Double(radius*Math.cos(theta), radius*Math.sin(theta)); - } - - /** - * Returns the result of converting point to polar coordinates. - */ - public static PolarPoint cartesianToPolar(Point2D point) { - return cartesianToPolar(point.getX(), point.getY()); - } - - /** - * Returns the result of converting (x, y) to polar coordinates. - */ - public static PolarPoint cartesianToPolar(double x, double y) { - double theta = Math.atan2(y,x); - double radius = Math.sqrt(x*x+y*y); - return new PolarPoint(theta, radius); - } - - @Override - public String toString() { - return "PolarPoint[" + radius + "," + theta +"]"; - } - - /** - * Sets the angle and radius of this point to those of {@code p}. - */ - public void setLocation(PolarPoint p) { - this.theta = p.getTheta(); - this.radius = p.getRadius(); - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java deleted file mode 100644 index 457bd961..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 9, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Forest; - -/** - * A radial layout for Tree or Forest graphs. - * - * @author Tom Nelson - * - */ -public class RadialTreeLayout extends TreeLayout { - - protected Map polarLocations; - - /** - * Creates an instance for the specified graph with default X and Y distances. - */ - public RadialTreeLayout(Forest g) { - this(g, DEFAULT_DISTX, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph and X distance with - * default Y distance. - */ - public RadialTreeLayout(Forest g, int distx) { - this(g, distx, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph, X distance, and Y distance. - */ - public RadialTreeLayout(Forest g, int distx, int disty) { - super(g, distx, disty); - } - - @Override - protected void buildTree() { - super.buildTree(); - this.polarLocations = new HashMap(); - setRadialLocations(); - } - - @Override - public void setSize(Dimension size) { - this.size = size; - buildTree(); - } - - @Override - protected void setCurrentPositionFor(V vertex) { - locations.get(vertex).setLocation(m_currentPoint); - } - - @Override - public void setLocation(V v, Point2D location) - { - Point2D c = getCenter(); - Point2D pv = new Point2D.Double(location.getX() - c.getX(), - location.getY() - c.getY()); - PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); - PolarPoint currentLocation = polarLocations.get(v); - if (currentLocation == null) - polarLocations.put(v, newLocation); - else - currentLocation.setLocation(newLocation); - } - - /** - * Returns the map from vertices to their locations in polar coordinates. - */ - public Map getPolarLocations() { - return polarLocations; - } - - @Override - public Point2D transform(V v) { - PolarPoint pp = polarLocations.get(v); - double centerX = getSize().getWidth()/2; - double centerY = getSize().getHeight()/2; - Point2D cartesian = PolarPoint.polarToCartesian(pp); - cartesian.setLocation(cartesian.getX()+centerX,cartesian.getY()+centerY); - return cartesian; - } - - private Point2D getMaxXY() { - double maxx = 0; - double maxy = 0; - for(Point2D p : locations.values()) { - maxx = Math.max(maxx, p.getX()); - maxy = Math.max(maxy, p.getY()); - } - return new Point2D.Double(maxx,maxy); - } - - private void setRadialLocations() { - Point2D max = getMaxXY(); - double maxx = max.getX(); - double maxy = max.getY(); - maxx = Math.max(maxx, size.width); - double theta = 2*Math.PI/maxx; - - double deltaRadius = size.width/2/maxy; - for(Map.Entry entry : locations.entrySet()) { - V v = entry.getKey(); - Point2D p = entry.getValue(); - PolarPoint polarPoint = new PolarPoint(p.getX()*theta, (p.getY() - this.distY)*deltaRadius); - polarLocations.put(v, polarPoint); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java deleted file mode 100644 index 5f12c3ca..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * Created on Apr 12, 2005 - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Shape; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Set; - -import edu.uci.ics.jung.graph.Graph; - - -/** - * Simple implementation of PickSupport that returns the vertex or edge - * that is closest to the specified location. This implementation - * provides the same picking options that were available in - * previous versions of AbstractLayout. - * - *

      No element will be returned that is farther away than the specified - * maximum distance. - * - * @author Tom Nelson - * @author Joshua O'Madadhain - */ -public class RadiusGraphElementAccessor implements GraphElementAccessor { - - protected double maxDistance; - - /** - * Creates an instance with an effectively infinite default maximum distance. - */ - public RadiusGraphElementAccessor() { - this(Math.sqrt(Double.MAX_VALUE - 1000)); - } - - /** - * Creates an instance with the specified default maximum distance. - */ - public RadiusGraphElementAccessor(double maxDistance) { - this.maxDistance = maxDistance; - } - - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of maxDistance. Iterates through all - * visible vertices and checks their distance from the click. Override this - * method to provde a more efficient implementation. - */ - public V getVertex(Layout layout, double x, double y) { - return getVertex(layout, x, y, this.maxDistance); - } - - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of maxDistance. Iterates through all - * visible vertices and checks their distance from the click. Override this - * method to provde a more efficient implementation. - * @param x - * @param y - * @param maxDistance temporarily overrides member maxDistance - */ - public V getVertex(Layout layout, double x, double y, double maxDistance) { - double minDistance = maxDistance * maxDistance; - V closest = null; - while(true) { - try { - for(V v : layout.getGraph().getVertices()) { - - Point2D p = layout.transform(v); - double dx = p.getX() - x; - double dy = p.getY() - y; - double dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = v; - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } - - public Collection getVertices(Layout layout, Shape rectangle) { - Set pickedVertices = new HashSet(); - while(true) { - try { - for(V v : layout.getGraph().getVertices()) { - - Point2D p = layout.transform(v); - if(rectangle.contains(p)) { - pickedVertices.add(v); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return pickedVertices; - } - - /** - * Gets the edge nearest to the location of the (x,y) location selected. - * Calls the longer form of the call. - */ - public E getEdge(Layout layout, double x, double y) { - return getEdge(layout, x, y, this.maxDistance); - } - - /** - * Gets the edge nearest to the location of the (x,y) location selected, - * within a distance of maxDistance, Iterates through all - * visible edges and checks their distance from the click. Override this - * method to provide a more efficient implementation. - * - * @param x - * @param y - * @param maxDistance temporarily overrides member maxDistance - * @return Edge closest to the click. - */ - public E getEdge(Layout layout, double x, double y, double maxDistance) { - double minDistance = maxDistance * maxDistance; - E closest = null; - while(true) { - try { - for(E e : layout.getGraph().getEdges()) { - - // Could replace all this set stuff with getFrom_internal() etc. - Graph graph = layout.getGraph(); - Collection vertices = graph.getIncidentVertices(e); - Iterator vertexIterator = vertices.iterator(); - V v1 = vertexIterator.next(); - V v2 = vertexIterator.next(); - // Get coords - Point2D p1 = layout.transform(v1); - Point2D p2 = layout.transform(v2); - double x1 = p1.getX(); - double y1 = p1.getY(); - double x2 = p2.getX(); - double y2 = p2.getY(); - // Calculate location on line closest to (x,y) - // First, check that v1 and v2 are not coincident. - if (x1 == x2 && y1 == y2) - continue; - double b = - ((y - y1) * (y2 - y1) + (x - x1) * (x2 - x1)) - / ((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1)); - // - double distance2; // square of the distance - if (b <= 0) - distance2 = (x - x1) * (x - x1) + (y - y1) * (y - y1); - else if (b >= 1) - distance2 = (x - x2) * (x - x2) + (y - y2) * (y - y2); - else { - double x3 = x1 + b * (x2 - x1); - double y3 = y1 + b * (y2 - y1); - distance2 = (x - x3) * (x - x3) + (y - y3) * (y - y3); - } - - if (distance2 < minDistance) { - minDistance = distance2; - closest = e; - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SmoothLayoutDecorator.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SmoothLayoutDecorator.java deleted file mode 100644 index 08ca4eb6..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SmoothLayoutDecorator.java +++ /dev/null @@ -1,133 +0,0 @@ -package edu.uci.ics.jung.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.util.Relaxer; -import edu.uci.ics.jung.algorithms.layout.util.VisRunner; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import java.awt.geom.Point2D; -import org.apache.commons.collections15.Transformer; - -public class SmoothLayoutDecorator extends LayoutDecorator { - private double lastTick; - private double speed; - private StaticLayout currentState; - private OriginTransformer origin; - public SmoothLayoutDecorator(Layout delegate) { - super(delegate); - speed = 0.0002; - origin = new OriginTransformer(); - } - - @Override - public void initialize() { - super.initialize(); - - if (currentState == null) { - currentState = new StaticLayout(delegate.getGraph(), origin); - } - - lastTick = -1.0; - - if (getDelegate() instanceof IterativeContext) { - Relaxer relaxer = new VisRunner((IterativeContext)getDelegate()); - relaxer.prerelax(); - } - } - - @Override - public void reset() { - super.reset(); - lastTick = -1.0; - } - - public void setOrigin(Point2D o) { - origin.setOrigin(o); - } - - public void setOrigin(double x, double y) { - setOrigin(new Point2D.Double(x,y)); - } - - @Override - public Point2D transform(V v) { - return currentState.transform(v); - } - - @Override - public void setLocation(V v, Point2D location) { - super.setLocation(v, location); - currentState.setLocation(v, location); - } - - // check out strange condition here - @Override - public void step() { - synchronized (getGraph()) { - boolean moved = false; - for (V v : getGraph().getVertices()) { - moved = tick(v) || moved; - } - //done = ! moved; - } - } - - public void getRelaxer(){ - - } - - @Override - public boolean done() { - synchronized (getGraph()) { - for (V v : getGraph().getVertices()) { - if (! currentState.transform(v).equals(getDelegate().transform(v))) { - return false; - } - } - } - return true; - } - - private double millis() { - return (double)System.currentTimeMillis(); - } - - private boolean tick(V v) { - Point2D source = currentState.transform(v); - Point2D dest = getDelegate().transform(v); - if (lastTick == -1.0) { - lastTick = millis(); - } - - double thisTick = millis(); - double rate = (thisTick - lastTick) * speed; - if (rate>1) { - rate = 1; - } - double dx = dest.getX() - source.getX(); - double dy = dest.getY() - source.getY(); - - if (Math.floor(dx)==0 && Math.floor(dy)==0) { - currentState.setLocation(v, dest); - return false; - } else { - currentState.setLocation(v, new Point2D.Double( - source.getX() + (dx*rate), - source.getY() + (dy*rate) - )); - return true; - } - } - - private class OriginTransformer implements Transformer { - private Point2D origin; - public Point2D transform(V input) { - return origin; - } - public void setOrigin(Point2D origin) { - this.origin = origin; - } - public OriginTransformer() { - this.origin = new Point2D.Double(0.0,0.0); - } - - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SpringLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SpringLayout.java deleted file mode 100644 index d21c2a15..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SpringLayout.java +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import java.awt.Dimension; -import java.awt.event.ComponentAdapter; -import java.awt.event.ComponentEvent; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; -import java.util.HashMap; -import java.util.Map; - -/** - * The SpringLayout package represents a visualization of a set of nodes. The - * SpringLayout, which is initialized with a Graph, assigns X/Y locations to - * each node. When called relax(), the SpringLayout moves the - * visualization forward one step. - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class SpringLayout extends AbstractLayout implements IterativeContext { - - protected double stretch = 0.70; - protected Transformer lengthFunction; - protected int repulsion_range_sq = 100 * 100; - protected double force_multiplier = 1.0 / 3.0; - - protected Map springVertexData = - LazyMap.decorate(new HashMap(), - new Factory() { - public SpringVertexData create() { - return new SpringVertexData(); - }}); - - /** - * Constructor for a SpringLayout for a raw graph with associated - * dimension--the input knows how big the graph is. Defaults to the unit - * length function. - */ - @SuppressWarnings("unchecked") - public SpringLayout(Graph g) { - this(g, new ConstantTransformer(30)); - } - - /** - * Constructor for a SpringLayout for a raw graph with associated component. - * - * @param g the {@code Graph} to lay out - * @param length_function provides a length for each edge - */ - public SpringLayout(Graph g, Transformer length_function) - { - super(g); - this.lengthFunction = length_function; - } - - /** - * Returns the current value for the stretch parameter. - * @see #setStretch(double) - */ - public double getStretch() { - return stretch; - } - - /** - * Sets the dimensions of the available space for layout to {@code size}. - */ - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - } - - /** - *

      Sets the stretch parameter for this instance. This value - * specifies how much the degrees of an edge's incident vertices - * should influence how easily the endpoints of that edge - * can move (that is, that edge's tendency to change its length).

      - * - *

      The default value is 0.70. Positive values less than 1 cause - * high-degree vertices to move less than low-degree vertices, and - * values > 1 cause high-degree vertices to move more than - * low-degree vertices. Negative values will have unpredictable - * and inconsistent results.

      - * @param stretch - */ - public void setStretch(double stretch) { - this.stretch = stretch; - } - - /** - * Returns the current value for the node repulsion range. - * @see #setRepulsionRange(int) - */ - public int getRepulsionRange() { - return (int)(Math.sqrt(repulsion_range_sq)); - } - - /** - * Sets the node repulsion range (in drawing area units) for this instance. - * Outside this range, nodes do not repel each other. The default value - * is 100. Negative values are treated as their positive equivalents. - * @param range - */ - public void setRepulsionRange(int range) { - this.repulsion_range_sq = range * range; - } - - /** - * Returns the current value for the edge length force multiplier. - * @see #setForceMultiplier(double) - */ - public double getForceMultiplier() { - return force_multiplier; - } - - /** - * Sets the force multiplier for this instance. This value is used to - * specify how strongly an edge "wants" to be its default length - * (higher values indicate a greater attraction for the default length), - * which affects how much its endpoints move at each timestep. - * The default value is 1/3. A value of 0 turns off any attempt by the - * layout to cause edges to conform to the default length. Negative - * values cause long edges to get longer and short edges to get shorter; use - * at your own risk. - */ - public void setForceMultiplier(double force) { - this.force_multiplier = force; - } - - public void initialize() { - } - - /** - * Relaxation step. Moves all nodes a smidge. - */ - public void step() { - try { - for(V v : getGraph().getVertices()) { - SpringVertexData svd = springVertexData.get(v); - if (svd == null) { - continue; - } - svd.dx /= 4; - svd.dy /= 4; - svd.edgedx = svd.edgedy = 0; - svd.repulsiondx = svd.repulsiondy = 0; - } - } catch(ConcurrentModificationException cme) { - step(); - } - - relaxEdges(); - calculateRepulsion(); - moveNodes(); - } - - protected void relaxEdges() { - try { - for(E e : getGraph().getEdges()) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - Point2D p1 = transform(v1); - Point2D p2 = transform(v2); - if(p1 == null || p2 == null) continue; - double vx = p1.getX() - p2.getX(); - double vy = p1.getY() - p2.getY(); - double len = Math.sqrt(vx * vx + vy * vy); - - double desiredLen = lengthFunction.transform(e); - - // round from zero, if needed [zero would be Bad.]. - len = (len == 0) ? .0001 : len; - - double f = force_multiplier * (desiredLen - len) / len; - - f = f * Math.pow(stretch, (getGraph().degree(v1) + getGraph().degree(v2) - 2)); - - // the actual movement distance 'dx' is the force multiplied by the - // distance to go. - double dx = f * vx; - double dy = f * vy; - SpringVertexData v1D, v2D; - v1D = springVertexData.get(v1); - v2D = springVertexData.get(v2); - - v1D.edgedx += dx; - v1D.edgedy += dy; - v2D.edgedx += -dx; - v2D.edgedy += -dy; - } - } catch(ConcurrentModificationException cme) { - relaxEdges(); - } - } - - protected void calculateRepulsion() { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - - SpringVertexData svd = springVertexData.get(v); - if(svd == null) continue; - double dx = 0, dy = 0; - - for (V v2 : getGraph().getVertices()) { - if (v == v2) continue; - Point2D p = transform(v); - Point2D p2 = transform(v2); - if(p == null || p2 == null) continue; - double vx = p.getX() - p2.getX(); - double vy = p.getY() - p2.getY(); - double distanceSq = p.distanceSq(p2); - if (distanceSq == 0) { - dx += Math.random(); - dy += Math.random(); - } else if (distanceSq < repulsion_range_sq) { - double factor = 1; - dx += factor * vx / distanceSq; - dy += factor * vy / distanceSq; - } - } - double dlen = dx * dx + dy * dy; - if (dlen > 0) { - dlen = Math.sqrt(dlen) / 2; - svd.repulsiondx += dx / dlen; - svd.repulsiondy += dy / dlen; - } - } - } catch(ConcurrentModificationException cme) { - calculateRepulsion(); - } - } - - protected void moveNodes() - { - synchronized (getSize()) { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - SpringVertexData vd = springVertexData.get(v); - if(vd == null) continue; - Point2D xyd = transform(v); - - vd.dx += vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)), - xyd.getY()+Math.max(-5, Math.min(5, vd.dy))); - - Dimension d = getSize(); - int width = d.width; - int height = d.height; - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY()); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); - } - if (xyd.getY() < 0) { - xyd.setLocation(xyd.getX(), 0); - } else if (xyd.getY() > height) { - xyd.setLocation(xyd.getX(), height); - } - - } - } catch(ConcurrentModificationException cme) { - moveNodes(); - } - } - } - - protected static class SpringVertexData { - protected double edgedx; - protected double edgedy; - protected double repulsiondx; - protected double repulsiondy; - - /** movement speed, x */ - protected double dx; - - /** movement speed, y */ - protected double dy; - } - - - /** - * Used for changing the size of the layout in response to a component's size. - */ - public class SpringDimensionChecker extends ComponentAdapter { - @Override - public void componentResized(ComponentEvent e) { - setSize(e.getComponent().getSize()); - } - } - - /** - * This one is an incremental visualization - */ - public boolean isIncremental() { - return true; - } - - /** - * For now, we pretend it never finishes. - */ - public boolean done() { - return false; - } - - /** - * No effect. - */ - public void reset() { - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java deleted file mode 100644 index e62a30c1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * The SpringLayout package represents a visualization of a set of nodes. The - * SpringLayout, which is initialized with a Graph, assigns X/Y locations to - * each node. When called relax(), the SpringLayout moves the - * visualization forward one step. - * - * - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class SpringLayout2 extends SpringLayout -{ - protected int currentIteration; - protected int averageCounter; - protected int loopCountMax = 4; - protected boolean done; - - protected Point2D averageDelta = new Point2D.Double(); - - /** - * Constructor for a SpringLayout for a raw graph with associated - * dimension--the input knows how big the graph is. Defaults to the unit - * length function. - */ - @SuppressWarnings("unchecked") - public SpringLayout2(Graph g) { - super(g); - } - - /** - * Constructor for a SpringLayout for a raw graph with associated component. - * - * @param g the {@code Graph} to lay out - * @param length_function provides a length for each edge - */ - public SpringLayout2(Graph g, Transformer length_function) - { - super(g, length_function); - } - - /** - * Relaxation step. Moves all nodes a smidge. - */ - @Override - public void step() { - super.step(); - currentIteration++; - testAverageDeltas(); - } - - private void testAverageDeltas() { - double dx = this.averageDelta.getX(); - double dy = this.averageDelta.getY(); - if(Math.abs(dx) < .001 && Math.abs(dy) < .001) { - done = true; - System.err.println("done, dx="+dx+", dy="+dy); - } - if(currentIteration > loopCountMax) { - this.averageDelta.setLocation(0,0); - averageCounter = 0; - currentIteration = 0; - } - } - - @Override - protected void moveNodes() { - synchronized (getSize()) { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - SpringVertexData vd = springVertexData.get(v); - if(vd == null) continue; - Point2D xyd = transform(v); - - vd.dx += vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - -// int currentCount = currentIteration % this.loopCountMax; -// System.err.println(averageCounter+" --- vd.dx="+vd.dx+", vd.dy="+vd.dy); -// System.err.println("averageDelta was "+averageDelta); - - averageDelta.setLocation( - ((averageDelta.getX() * averageCounter) + vd.dx) / (averageCounter+1), - ((averageDelta.getY() * averageCounter) + vd.dy) / (averageCounter+1) - ); -// System.err.println("averageDelta now "+averageDelta); -// System.err.println(); - averageCounter++; - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)), - xyd.getY()+Math.max(-5, Math.min(5, vd.dy))); - - Dimension d = getSize(); - int width = d.width; - int height = d.height; - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY());// setX(0); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); //setX(width); - } - if (xyd.getY() < 0) { - xyd.setLocation(xyd.getX(),0);//setY(0); - } else if (xyd.getY() > height) { - xyd.setLocation(xyd.getX(), height); //setY(height); - } - - } - } catch(ConcurrentModificationException cme) { - moveNodes(); - } - } - } - - @Override - public boolean done() { - return done; - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/StaticLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/StaticLayout.java deleted file mode 100644 index 31b32554..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/StaticLayout.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Created on Jul 21, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * StaticLayout places the vertices in the locations specified by its Transformer - * initializer. Vertex locations can be placed in a Map and then supplied to - * this layout as follows: - * - Transformer vertexLocations = - TransformerUtils.mapTransformer(map); - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -public class StaticLayout extends AbstractLayout { - - /** - * Creates an instance for the specified graph, locations, and size. - */ - public StaticLayout(Graph graph, Transformer initializer, Dimension size) { - super(graph, initializer, size); - } - - /** - * Creates an instance for the specified graph and locations, with default size. - */ - public StaticLayout(Graph graph, Transformer initializer) { - super(graph, initializer); - } - - /** - * Creates an instance for the specified graph and default size; vertex locations - * are randomly assigned. - */ - public StaticLayout(Graph graph) { - super(graph); - } - - /** - * Creates an instance for the specified graph and size. - */ - public StaticLayout(Graph graph, Dimension size) { - super(graph, size); - } - - public void initialize() {} - - public void reset() {} - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/TreeLayout.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/TreeLayout.java deleted file mode 100644 index 4bebd3a9..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/TreeLayout.java +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 9, 2005 - */ - -package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.Point; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.TreeUtils; - -/** - * @author Karlheinz Toni - * @author Tom Nelson - converted to jung2 - * - */ - -public class TreeLayout implements Layout { - - protected Dimension size = new Dimension(600,600); - protected Forest graph; - protected Map basePositions = new HashMap(); - - protected Map locations = - LazyMap.decorate(new HashMap(), - new Transformer() { - public Point2D transform(V arg0) { - return new Point2D.Double(); - }}); - - protected transient Set alreadyDone = new HashSet(); - - /** - * The default horizontal vertex spacing. Initialized to 50. - */ - public static int DEFAULT_DISTX = 50; - - /** - * The default vertical vertex spacing. Initialized to 50. - */ - public static int DEFAULT_DISTY = 50; - - /** - * The horizontal vertex spacing. Defaults to {@code DEFAULT_XDIST}. - */ - protected int distX = 50; - - /** - * The vertical vertex spacing. Defaults to {@code DEFAULT_YDIST}. - */ - protected int distY = 50; - - protected transient Point m_currentPoint = new Point(); - - /** - * Creates an instance for the specified graph with default X and Y distances. - */ - public TreeLayout(Forest g) { - this(g, DEFAULT_DISTX, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph and X distance with - * default Y distance. - */ - public TreeLayout(Forest g, int distx) { - this(g, distx, DEFAULT_DISTY); - } - - /** - * Creates an instance for the specified graph, X distance, and Y distance. - */ - public TreeLayout(Forest g, int distx, int disty) { - if (g == null) - throw new IllegalArgumentException("Graph must be non-null"); - if (distx < 1 || disty < 1) - throw new IllegalArgumentException("X and Y distances must each be positive"); - this.graph = g; - this.distX = distx; - this.distY = disty; - buildTree(); - } - - protected void buildTree() { - this.m_currentPoint = new Point(0, 20); - Collection roots = TreeUtils.getRoots(graph); - if (roots.size() > 0 && graph != null) { - calculateDimensionX(roots); - for(V v : roots) { - calculateDimensionX(v); - m_currentPoint.x += this.basePositions.get(v)/2 + this.distX; - buildTree(v, this.m_currentPoint.x); - } - } - int width = 0; - for(V v : roots) { - width += basePositions.get(v); - } - } - - protected void buildTree(V v, int x) { - - if (!alreadyDone.contains(v)) { - alreadyDone.add(v); - - //go one level further down - this.m_currentPoint.y += this.distY; - this.m_currentPoint.x = x; - - this.setCurrentPositionFor(v); - - int sizeXofCurrent = basePositions.get(v); - - int lastX = x - sizeXofCurrent / 2; - - int sizeXofChild; - int startXofChild; - - for (V element : graph.getSuccessors(v)) { - sizeXofChild = this.basePositions.get(element); - startXofChild = lastX + sizeXofChild / 2; - buildTree(element, startXofChild); - lastX = lastX + sizeXofChild + distX; - } - this.m_currentPoint.y -= this.distY; - } - } - - private int calculateDimensionX(V v) { - - int size = 0; - int childrenNum = graph.getSuccessors(v).size(); - - if (childrenNum != 0) { - for (V element : graph.getSuccessors(v)) { - size += calculateDimensionX(element) + distX; - } - } - size = Math.max(0, size - distX); - basePositions.put(v, size); - - return size; - } - - private int calculateDimensionX(Collection roots) { - - int size = 0; - for(V v : roots) { - int childrenNum = graph.getSuccessors(v).size(); - - if (childrenNum != 0) { - for (V element : graph.getSuccessors(v)) { - size += calculateDimensionX(element) + distX; - } - } - size = Math.max(0, size - distX); - basePositions.put(v, size); - } - - return size; - } - - /** - * This method is not supported by this class. The size of the layout - * is determined by the topology of the tree, and by the horizontal - * and vertical spacing (optionally set by the constructor). - */ - public void setSize(Dimension size) { - throw new UnsupportedOperationException("Size of TreeLayout is set" + - " by vertex spacing in constructor"); - } - - protected void setCurrentPositionFor(V vertex) { - int x = m_currentPoint.x; - int y = m_currentPoint.y; - if(x < 0) size.width -= x; - - if(x > size.width-distX) - size.width = x + distX; - - if(y < 0) size.height -= y; - if(y > size.height-distY) - size.height = y + distY; - locations.get(vertex).setLocation(m_currentPoint); - - } - - public Graph getGraph() { - return graph; - } - - public Dimension getSize() { - return size; - } - - public void initialize() { - - } - - public boolean isLocked(V v) { - return false; - } - - public void lock(V v, boolean state) { - } - - public void reset() { - } - - public void setGraph(Graph graph) { - if(graph instanceof Forest) { - this.graph = (Forest)graph; - buildTree(); - } else { - throw new IllegalArgumentException("graph must be a Forest"); - } - } - - public void setInitializer(Transformer initializer) { - } - - /** - * Returns the center of this layout's area. - */ - public Point2D getCenter() { - return new Point2D.Double(size.getWidth()/2,size.getHeight()/2); - } - - public void setLocation(V v, Point2D location) { - locations.get(v).setLocation(location); - } - - public Point2D transform(V v) { - return locations.get(v); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/package.html deleted file mode 100644 index a5ed0d05..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/package.html +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - -Algorithms for assigning 2D coordinates (typically used for graph visualizations) -to vertices. -Current layout algorithms include: -
        -
      • Layout, AbstractLayout: interface and abstract class defining the Layout contract and handling -some common implementation details -
      • AggregateLayout: allows multiple layouts to be combined and manipulated as one layout -
      • BalloonLayout: places vertices on nested circles (trees/forests only) -
      • CircleLayout: places vertices on a circle -
      • DAGLayout: places vertices in a hierarchy (directed acyclic graphs only) -
      • FRLayout: Fruchterman-Reingold algorithm (force-directed) -
      • ISOMLayout: self-organizing map layout -
      • KKLayout: Kamada-Kawai algorithm (tries to maintain specified distances) -
      • RadialTreeLayout: places vertices on concentric circles (trees only) -
      • SpringLayout: simple force-directed layout -
      • StaticLayout: places vertices at user-specified locations -
      • TreeLayout: simple tree/forest layout -
      - -Rendering and other aspects of visualization are handled in the visualization package. - - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java deleted file mode 100644 index 34428b18..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Created on Jul 19, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.layout.util; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.Date; -import java.util.Random; - -import org.apache.commons.collections15.Transformer; - -/** - * Transforms the input type into a random location within - * the bounds of the Dimension property. - * This is used as the backing Transformer for the LazyMap - * for many Layouts, - * and provides a random location for unmapped vertices - * the first time they are accessed. - * - * @author Tom Nelson - * - * @param - */ -public class RandomLocationTransformer implements Transformer { - - Dimension d; - Random random; - - /** - * Creates an instance with the specified size which uses the current time - * as the random seed. - */ - public RandomLocationTransformer(Dimension d) { - this(d, new Date().getTime()); - } - - /** - * Creates an instance with the specified dimension and random seed. - * @param d - * @param seed - */ - public RandomLocationTransformer(final Dimension d, long seed) { - this.d = d; - this.random = new Random(seed); - } - - public Point2D transform(V v) { - return new Point2D.Double(random.nextDouble() * d.width, random.nextDouble() * d.height); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java deleted file mode 100644 index a31113f1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java +++ /dev/null @@ -1,43 +0,0 @@ -package edu.uci.ics.jung.algorithms.layout.util; - -/** - * Interface for operating the relax iterations on a layout. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public interface Relaxer { - - /** - * Execute a loop of steps in a new Thread, - * firing an event after each step. - */ - void relax(); - - /** - * Execute a loop of steps in the calling - * thread, firing no events. - */ - void prerelax(); - - /** - * Make the relaxer thread wait. - */ - void pause(); - - /** - * Make the relaxer thread resume. - * - */ - void resume(); - - /** - * Set flags to stop the relaxer thread. - */ - void stop(); - - /** - * Sets the sleep time. - */ - void setSleepTime(long i); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java deleted file mode 100644 index 14f6dfc8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.algorithms.layout.util; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; - -/** - * - * Implementation of a relaxer thread for layouts. - * Extracted from the {@code VisualizationModel} in previous - * versions of JUNG. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class VisRunner implements Relaxer, Runnable { - - protected boolean running; - protected IterativeContext process; - protected boolean stop; - protected boolean manualSuspend; - protected Thread thread; - - /** - * how long the relaxer thread pauses between iteration loops. - */ - protected long sleepTime = 100L; - - - /** - * Creates an instance for the specified process. - */ - public VisRunner(IterativeContext process) { - this.process = process; - } - - /** - * @return the relaxerThreadSleepTime - */ - public long getSleepTime() { - return sleepTime; - } - - /** - * @param sleepTime the sleep time to set for this thread - */ - public void setSleepTime(long sleepTime) { - this.sleepTime = sleepTime; - } - - public void prerelax() { - manualSuspend = true; - long timeNow = System.currentTimeMillis(); - while (System.currentTimeMillis() - timeNow < 500 && !process.done()) { - process.step(); - } - manualSuspend = false; - } - - public void pause() { - manualSuspend = true; - } - - public void relax() { - // in case its running - stop(); - stop = false; - thread = new Thread(this); - thread.setPriority(Thread.MIN_PRIORITY); - thread.start(); - } - - /** - * Used for synchronization. - */ - public Object pauseObject = new String("PAUSE OBJECT"); - - public void resume() { - manualSuspend = false; - if(running == false) { - prerelax(); - relax(); - } else { - synchronized(pauseObject) { - pauseObject.notifyAll(); - } - } - } - - public synchronized void stop() { - if(thread != null) { - manualSuspend = false; - stop = true; - // interrupt the relaxer, in case it is paused or sleeping - // this should ensure that visRunnerIsRunning gets set to false - try { thread.interrupt(); } - catch(Exception ex) { - // the applet security manager may have prevented this. - // just sleep for a second to let the thread stop on its own - try { Thread.sleep(1000); } - catch(InterruptedException ie) {} // ignore - } - synchronized (pauseObject) { - pauseObject.notifyAll(); - } - } - } - - public void run() { - running = true; - try { - while (!process.done() && !stop) { - synchronized (pauseObject) { - while (manualSuspend && !stop) { - try { - pauseObject.wait(); - } catch (InterruptedException e) { - // ignore - } - } - } - process.step(); - - if (stop) - return; - - try { - Thread.sleep(sleepTime); - } catch (InterruptedException ie) { - // ignore - } - } - - } finally { - running = false; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/package.html deleted file mode 100644 index 356f7d5f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/layout/util/package.html +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - -Utility classes for updating layout positions. - - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/GraphMatrixOperations.java b/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/GraphMatrixOperations.java deleted file mode 100644 index 6e6e775f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/GraphMatrixOperations.java +++ /dev/null @@ -1,378 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. This software is open-source under the BSD - * license; see either "license.txt" or http://jung.sourceforge.net/license.txt - * for a description. - */ -package edu.uci.ics.jung.algorithms.matrix; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.BidiMap; -import org.apache.commons.collections15.Factory; - -import cern.colt.matrix.DoubleMatrix1D; -import cern.colt.matrix.DoubleMatrix2D; -import cern.colt.matrix.impl.DenseDoubleMatrix1D; -import cern.colt.matrix.impl.SparseDoubleMatrix2D; -import cern.colt.matrix.linalg.Algebra; -import edu.uci.ics.jung.algorithms.util.ConstantMap; -import edu.uci.ics.jung.algorithms.util.Indexer; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - - -/** - * Contains methods for performing the analogues of certain matrix operations on - * graphs. - *

      - * These implementations are efficient on sparse graphs, but may not be the best - * implementations for very dense graphs. - * - * @author Joshua O'Madadhain - * @see MatrixElementOperations - */ -public class GraphMatrixOperations -{ - /** - * Returns the graph that corresponds to the square of the (weighted) - * adjacency matrix that the specified graph g encodes. The - * implementation of MatrixElementOperations that is furnished to the - * constructor specifies the implementation of the dot product, which is an - * integral part of matrix multiplication. - * - * @param g - * the graph to be squared - * @return the result of squaring g - */ - @SuppressWarnings("unchecked") - public static Graph square(Graph g, - Factory edgeFactory, MatrixElementOperations meo) - { - // create new graph of same type - Graph squaredGraph = null; - try { - squaredGraph = g.getClass().newInstance(); - } catch (InstantiationException e3) { - e3.printStackTrace(); - } catch (IllegalAccessException e3) { - e3.printStackTrace(); - } - - Collection vertices = g.getVertices(); - for (V v : vertices) - { - squaredGraph.addVertex(v); - } - for (V v : vertices) - { - for (V src : g.getPredecessors(v)) - { - // get the edge connecting src to v in G - E e1 = g.findEdge(src,v); - for (V dest : g.getSuccessors(v)) - { - // get edge connecting v to dest in G - E e2 = g.findEdge(v,dest); - // collect data on path composed of e1 and e2 - Number pathData = meo.computePathData(e1, e2); - E e = squaredGraph.findEdge(src,dest); - // if no edge from src to dest exists in G2, create one - if (e == null) { - e = edgeFactory.create(); - squaredGraph.addEdge(e, src, dest); - } - meo.mergePaths(e, pathData); - } - } - } - return squaredGraph; - } - - /** - * Creates a graph from a square (weighted) adjacency matrix. If - * nev is non-null then it will be used to store the edge weights. - * - *

      Notes on implementation: - *

        - *
      • The matrix indices will be mapped onto vertices in the order in which the - * vertex factory generates the vertices. This means the user is responsible - *
      • The type of edges created (directed or undirected) depends - * entirely on the graph factory supplied, regardless of whether the - * matrix is symmetric or not. The Colt {@code Property.isSymmetric} - * method may be used to find out whether the matrix - * is symmetric prior to making this call. - *
      • The matrix supplied need not be square. If it is not square, then - * the - * - * @return a representation of matrix as a JUNG - * Graph - */ - public static Graph matrixToGraph(DoubleMatrix2D matrix, - Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory, - Map nev) - { - if (matrix.rows() != matrix.columns()) - { - throw new IllegalArgumentException("Matrix must be square."); - } - int size = matrix.rows(); - - Graph graph = graphFactory.create(); - - for(int i = 0; i < size; i++) - { - V vertex = vertexFactory.create(); - graph.addVertex(vertex); - } - - List vertices = new ArrayList(graph.getVertices()); - for (int i = 0; i < size; i++) - { - for (int j = 0; j < size; j++) - { - double value = matrix.getQuick(i, j); - if (value != 0) - { - E e = edgeFactory.create(); - if (graph.addEdge(e, vertices.get(i), vertices.get(j))) - { - if (e != null && nev != null) - nev.put(e, value); - } - } - } - } - - - return graph; - } - - - /** - * Creates a graph from a square (weighted) adjacency matrix. - * - * @return a representation of matrix as a JUNG Graph - */ - public static Graph matrixToGraph(DoubleMatrix2D matrix, - Factory> graphFactory, - Factory vertexFactory, Factory edgeFactory) - { - return GraphMatrixOperations.matrixToGraph(matrix, - graphFactory, vertexFactory, edgeFactory, null); - } - - /** - * Returns an unweighted (0-1) adjacency matrix based on the specified graph. - * @param the vertex type - * @param the edge type - * @param g the graph to convert to a matrix - */ - public static SparseDoubleMatrix2D graphToSparseMatrix(Graph g) - { - return graphToSparseMatrix(g, null); - } - - /** - * Returns a SparseDoubleMatrix2D whose entries represent the edge weights for the - * edges in g, as specified by nev. - * - *

        The (i,j) entry of the matrix returned will be equal to the sum - * of the weights of the edges connecting the vertex with index i to - * j. - * - *

        If nev is null, then a constant edge weight of 1 is used. - * - * @param g - * @param nev - */ - public static SparseDoubleMatrix2D graphToSparseMatrix(Graph g, Map nev) - { - if (nev == null) - nev = new ConstantMap(1); - int numVertices = g.getVertexCount(); - SparseDoubleMatrix2D matrix = new SparseDoubleMatrix2D(numVertices, - numVertices); - - BidiMap indexer = Indexer.create(g.getVertices()); - int i=0; - - for(V v : g.getVertices()) - { - for (E e : g.getOutEdges(v)) - { - V w = g.getOpposite(v,e); - int j = indexer.get(w); - matrix.set(i, j, matrix.getQuick(i,j) + nev.get(e).doubleValue()); - } - i++; - } - return matrix; - } - - /** - * Returns a diagonal matrix whose diagonal entries contain the degree for - * the corresponding node. - * - *

        NOTE: the vertices will be traversed in the order given by the graph's vertex - * collection. If you want to be assured of a particular ordering, use a graph - * implementation that guarantees such an ordering (see the implementations with {@code Ordered} - * or {@code Sorted} in their name). - * - * @return SparseDoubleMatrix2D - */ - public static SparseDoubleMatrix2D createVertexDegreeDiagonalMatrix(Graph graph) - { - int numVertices = graph.getVertexCount(); - SparseDoubleMatrix2D matrix = new SparseDoubleMatrix2D(numVertices, - numVertices); - int i = 0; - for (V v : graph.getVertices()) - { - matrix.set(i, i, graph.degree(v)); - i++; - } - return matrix; - } - - /** - * The idea here is based on the metaphor of an electric circuit. We assume - * that an undirected graph represents the structure of an electrical - * circuit where each edge has unit resistance. One unit of current is - * injected into any arbitrary vertex s and one unit of current is extracted - * from any arbitrary vertex t. The voltage at some vertex i for source - * vertex s and target vertex t can then be measured according to the - * equation: V_i^(s,t) = T_is - T-it where T is the voltage potential matrix - * returned by this method. * - * - * @param graph - * an undirected graph representing an electrical circuit - * @return the voltage potential matrix - * @see "P. Doyle and J. Snell, 'Random walks and electric networks,', 1989" - * @see "M. Newman, 'A measure of betweenness centrality based on random walks', pp. 5-7, 2003" - */ - public static DoubleMatrix2D computeVoltagePotentialMatrix( - UndirectedGraph graph) - { - int numVertices = graph.getVertexCount(); - //create adjacency matrix from graph - DoubleMatrix2D A = GraphMatrixOperations.graphToSparseMatrix(graph, - null); - //create diagonal matrix of vertex degrees - DoubleMatrix2D D = GraphMatrixOperations - .createVertexDegreeDiagonalMatrix(graph); - DoubleMatrix2D temp = new SparseDoubleMatrix2D(numVertices - 1, - numVertices - 1); - //compute D - A except for last row and column - for (int i = 0; i < numVertices - 1; i++) - { - for (int j = 0; j < numVertices - 1; j++) - { - temp.set(i, j, D.get(i, j) - A.get(i, j)); - } - } - Algebra algebra = new Algebra(); - DoubleMatrix2D tempInverse = algebra.inverse(temp); - DoubleMatrix2D T = new SparseDoubleMatrix2D(numVertices, numVertices); - //compute "voltage" matrix - for (int i = 0; i < numVertices - 1; i++) - { - for (int j = 0; j < numVertices - 1; j++) - { - T.set(i, j, tempInverse.get(i, j)); - } - } - return T; - } - - /** - * Converts a Map of (Vertex, Double) pairs to a DoubleMatrix1D. - * - *

        Note: the vertices will appear in the output array in the order given - * by {@code map}'s iterator. If you want a particular ordering, use a {@code Map} - * implementation that provides such an ordering ({@code SortedMap, LinkedHashMap}, etc.). - */ - public static DoubleMatrix1D mapTo1DMatrix(Map map) - { - int numVertices = map.size(); - DoubleMatrix1D vector = new DenseDoubleMatrix1D(numVertices); - int i = 0; - for (V v : map.keySet()) - { - vector.set(i, map.get(v).doubleValue()); - i++; - } - return vector; - } - - /** - * Computes the all-pairs mean first passage time for the specified graph, - * given an existing stationary probability distribution. - *

        - * The mean first passage time from vertex v to vertex w is defined, for a - * Markov network (in which the vertices represent states and the edge - * weights represent state->state transition probabilities), as the expected - * number of steps required to travel from v to w if the steps occur - * according to the transition probabilities. - *

        - * The stationary distribution is the fraction of time, in the limit as the - * number of state transitions approaches infinity, that a given state will - * have been visited. Equivalently, it is the probability that a given state - * will be the current state after an arbitrarily large number of state - * transitions. - * - * @param G - * the graph on which the MFPT will be calculated - * @param edgeWeights - * the edge weights - * @param stationaryDistribution - * the asymptotic state probabilities - * @return the mean first passage time matrix - */ - public static DoubleMatrix2D computeMeanFirstPassageMatrix(Graph G, - Map edgeWeights, DoubleMatrix1D stationaryDistribution) - { - DoubleMatrix2D temp = GraphMatrixOperations.graphToSparseMatrix(G, - edgeWeights); - for (int i = 0; i < temp.rows(); i++) - { - for (int j = 0; j < temp.columns(); j++) - { - double value = -1 * temp.get(i, j) - + stationaryDistribution.get(j); - if (i == j) - value += 1; - if (value != 0) - temp.set(i, j, value); - } - } - Algebra algebra = new Algebra(); - DoubleMatrix2D fundamentalMatrix = algebra.inverse(temp); - temp = new SparseDoubleMatrix2D(temp.rows(), temp.columns()); - for (int i = 0; i < temp.rows(); i++) - { - for (int j = 0; j < temp.columns(); j++) - { - double value = -1.0 * fundamentalMatrix.get(i, j); - value += fundamentalMatrix.get(j, j); - if (i == j) - value += 1; - if (value != 0) - temp.set(i, j, value); - } - } - DoubleMatrix2D stationaryMatrixDiagonal = new SparseDoubleMatrix2D(temp - .rows(), temp.columns()); - int numVertices = stationaryDistribution.size(); - for (int i = 0; i < numVertices; i++) - stationaryMatrixDiagonal.set(i, i, 1.0 / stationaryDistribution - .get(i)); - DoubleMatrix2D meanFirstPassageMatrix = algebra.mult(temp, - stationaryMatrixDiagonal); - return meanFirstPassageMatrix; - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/MatrixElementOperations.java b/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/MatrixElementOperations.java deleted file mode 100644 index 1124bdf0..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/MatrixElementOperations.java +++ /dev/null @@ -1,73 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.matrix; - -import java.util.Map; - - -/** - * An interface for specifying the behavior of graph/matrix operations - * for a particular element type. - *

        - * Graph/matrix multiplication requires the definition of two operations: - *

        - *

          - *
        1. - * Calculating an aggregate property of paths of length 2 between two - * vertices v1 and v2 (analogous to element multiplication in matrix - * arithmetic); this is handled by computePathData(). - *
        2. - *
        3. - * Aggregating the properties of all such paths, and assigning the result to - * a new edge in the output graph (analogous to element addition in matrix - * arithmetic); this is handled by mergePaths(). - *
        4. - *
        - *

        - * Together, computePathData() and mergePaths() specify how the equivalent of - * the vector inner (dot) product is to function. - *

        - * For instance, to implement the equivalent of standard matrix multiplication - * on two graphs, computePathData() should return the products of the - * weights of a two-edge path, and mergePaths() should add - * the output of computePathData() to an existing edge (or possibly create such - * an edge if none exists). - * - * @author Joshua O'Madadhain - */ -public interface MatrixElementOperations -{ - /** - * If either e or pathData is null, the effect of mergePaths() is - * implementation-dependent. - * - * @param e (possibly) existing edge in the output graph which - * represents a path in the input graph(s) - * - * @param pathData data (which represents another path with the same source - * and destination as e in the input graphs) which is to be merged into e - */ - public void mergePaths(E e, Object pathData); - - /** - * If either e1 or e2 is null, the Object reference returned should be null. - * - * @param e1 first edge from 2-edge path in input graph(s) - * @param e2 second edge from 2-edge path in input graph(s) - * @return aggregation of data from the edges of the 2-edge path - * (from source of e1 to destination of e2) comprised of (e1, e2) - */ - public Number computePathData(E e1, E e2); - - /** - * Returns a map from edges to values. - */ - public Map getEdgeData(); -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/RealMatrixElementOperations.java b/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/RealMatrixElementOperations.java deleted file mode 100644 index ada1406c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/RealMatrixElementOperations.java +++ /dev/null @@ -1,68 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.matrix; - - -import java.util.HashMap; -import java.util.Map; - -/** - * Implements the basic matrix operations on double-precision values. Assumes - * that the edges have a MutableDouble value. - * - * @author Joshua O'Madadhain - */ -public class RealMatrixElementOperations implements MatrixElementOperations -{ - private Map edgeData = new HashMap(); - - /** - * Creates an instance using the specified edge values. - */ - public RealMatrixElementOperations(Map edgeData) - { - this.edgeData = edgeData; - } - - /** - * @see MatrixElementOperations#mergePaths(Object, Object) - */ - public void mergePaths(E e, Object pathData) - { - - Number pd = (Number)pathData; - Number ed = edgeData.get(e); - if (ed == null) { - edgeData.put(e, pd); - - } else { - edgeData.put(e, ed.doubleValue()+pd.doubleValue()); - - } - - } - - /** - * @see MatrixElementOperations#computePathData(Object, Object) - */ - public Number computePathData(E e1, E e2) - { - double d1 = edgeData.get(e1).doubleValue(); - double d2 = edgeData.get(e2).doubleValue(); - return d1*d2; - } - - /** - * @return the edgeData - */ - public Map getEdgeData() { - return edgeData; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/package.html deleted file mode 100644 index 6025a412..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/matrix/package.html +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - -Mechanisms for dealing with graphs as matrices. These include conversion to and -from Colt matrices, and some matrix algorithms. - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/Metrics.java b/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/Metrics.java deleted file mode 100644 index 1dfcf123..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/Metrics.java +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jun 7, 2008 - * - */ -package edu.uci.ics.jung.algorithms.metrics; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; - -/** - * A class consisting of static methods for calculating graph metrics. - */ -public class Metrics -{ - /** - * Returns a Map of vertices to their clustering coefficients. - * The clustering coefficient cc(v) of a vertex v is defined as follows: - *

          - *
        • degree(v) == {0,1}: 0 - *
        • degree(v) == n, n >= 2: given S, the set of neighbors - * of v: cc(v) = (the sum over all w in S of the number of - * other elements of w that are neighbors of w) / ((|S| * (|S| - 1) / 2). - * Less formally, the fraction of v's neighbors that are also - * neighbors of each other. - *

          Note: This algorithm treats its argument as an undirected graph; - * edge direction is ignored. - * @param graph the graph whose clustering coefficients are to be calculated - * @see "The structure and function of complex networks, M.E.J. Newman, aps.arxiv.org/abs/cond-mat/0303516" - */ - public static Map clusteringCoefficients(Graph graph) - { - Map coefficients = new HashMap(); - - for (V v : graph.getVertices()) - { - int n = graph.getNeighborCount(v); - if (n < 2) - coefficients.put(v, new Double(0)); - else - { - // how many of v's neighbors are connected to each other? - ArrayList neighbors = new ArrayList(graph.getNeighbors(v)); - double edge_count = 0; - for (int i = 0; i < n; i++) - { - V w = neighbors.get(i); - for (int j = i+1; j < n; j++ ) - { - V x = neighbors.get(j); - edge_count += graph.isNeighbor(w, x) ? 1 : 0; - } - } - double possible_edges = (n * (n - 1))/2.0; - coefficients.put(v, new Double(edge_count / possible_edges)); - } - } - - return coefficients; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java b/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java deleted file mode 100644 index aec84b9b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Created on Sep 19, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.metrics; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - * Calculates some of the measures from Burt's text "Structural Holes: - * The Social Structure of Competition". - * - *

          Notes: - *

            - *
          • Each of these measures assumes that each edge has an associated - * non-null weight whose value is accessed through the specified - * Transformer instance. - *
          • Nonexistent edges are treated as edges with weight 0 for purposes - * of edge weight calculations. - *
          - * - *

          Based on code donated by Jasper Voskuilen and - * Diederik van Liere of the Department of Information and Decision Sciences - * at Erasmus University.

          - * - * @author Joshua O'Madadhain - * @author Jasper Voskuilen - * @see "Ronald Burt, Structural Holes: The Social Structure of Competition" - * @author Tom Nelson - converted to jung2 - */ -public class StructuralHoles { - - protected Transformer edge_weight; - protected Graph g; - - /** - * Creates a StructuralHoles instance based on the - * edge weights specified by nev. - */ - public StructuralHoles(Graph graph, Transformer nev) - { - this.g = graph; - this.edge_weight = nev; - } - - /** - * Burt's measure of the effective size of a vertex's network. Essentially, the - * number of neighbors minus the average degree of those in v's neighbor set, - * not counting ties to v. Formally: - *
          -     * effectiveSize(v) = v.degree() - (sum_{u in N(v)} sum_{w in N(u), w !=u,v} p(v,w)*m(u,w))
          -     * 
          - * where - *
            - *
          • N(a) = a.getNeighbors() - *
          • p(v,w) = normalized mutual edge weight of v and w - *
          • m(u,w) = maximum-scaled mutual edge weight of u and w - *
          - * @see #normalizedMutualEdgeWeight(Object, Object) - * @see #maxScaledMutualEdgeWeight(Object, Object) - */ - public double effectiveSize(V v) - { - double result = g.degree(v); - for(V u : g.getNeighbors(v)) { - - for(V w : g.getNeighbors(u)) { - - if (w != v && w != u) - result -= normalizedMutualEdgeWeight(v,w) * - maxScaledMutualEdgeWeight(u,w); - } - } - return result; - } - - /** - * Returns the effective size of v divided by the number of - * alters in v's network. (In other words, - * effectiveSize(v) / v.degree().) - * If v.degree() == 0, returns 0. - */ - public double efficiency(V v) { - double degree = g.degree(v); - - if (degree == 0) - return 0; - else - return effectiveSize(v) / degree; - } - - /** - * Burt's constraint measure (equation 2.4, page 55 of Burt, 1992). Essentially a - * measure of the extent to which v is invested in people who are invested in - * other of v's alters (neighbors). The "constraint" is characterized - * by a lack of primary holes around each neighbor. Formally: - *
          -     * constraint(v) = sum_{w in MP(v), w != v} localConstraint(v,w)
          -     * 
          - * where MP(v) is the subset of v's neighbors that are both predecessors and successors of v. - * @see #localConstraint(Object, Object) - */ - public double constraint(V v) { - double result = 0; - for(V w : g.getSuccessors(v)) { - - if (v != w && g.isPredecessor(v,w)) - { - result += localConstraint(v, w); - } - } - - return result; - } - - - /** - * Calculates the hierarchy value for a given vertex. Returns NaN when - * v's degree is 0, and 1 when v's degree is 1. - * Formally: - *
          -     * hierarchy(v) = (sum_{v in N(v), w != v} s(v,w) * log(s(v,w))}) / (v.degree() * Math.log(v.degree()) 
          -     * 
          - * where - *
            - *
          • N(v) = v.getNeighbors() - *
          • s(v,w) = localConstraint(v,w) / (aggregateConstraint(v) / v.degree()) - *
          - * @see #localConstraint(Object, Object) - * @see #aggregateConstraint(Object) - */ - public double hierarchy(V v) - { - double v_degree = g.degree(v); - - if (v_degree == 0) - return Double.NaN; - if (v_degree == 1) - return 1; - - double v_constraint = aggregateConstraint(v); - - double numerator = 0; - for (V w : g.getNeighbors(v)) { - - if (v != w) - { - double sl_constraint = localConstraint(v, w) / (v_constraint / v_degree); - numerator += sl_constraint * Math.log(sl_constraint); - } - } - - return numerator / (v_degree * Math.log(v_degree)); - } - - /** - * Returns the local constraint on v from a lack of primary holes - * around its neighbor v2. - * Based on Burt's equation 2.4. Formally: - *
          -     * localConstraint(v1, v2) = ( p(v1,v2) + ( sum_{w in N(v)} p(v1,w) * p(w, v2) ) )^2
          -     * 
          - * where - *
            - *
          • N(v) = v.getNeighbors() - *
          • p(v,w) = normalized mutual edge weight of v and w - *
          - * @see #normalizedMutualEdgeWeight(Object, Object) - */ - public double localConstraint(V v1, V v2) - { - double nmew_vw = normalizedMutualEdgeWeight(v1, v2); - double inner_result = 0; - for (V w : g.getNeighbors(v1)) { - - inner_result += normalizedMutualEdgeWeight(v1,w) * - normalizedMutualEdgeWeight(w,v2); - } - return (nmew_vw + inner_result) * (nmew_vw + inner_result); - } - - /** - * The aggregate constraint on v. Based on Burt's equation 2.7. - * Formally: - *
          -     * aggregateConstraint(v) = sum_{w in N(v)} localConstraint(v,w) * O(w)
          -     * 
          - * where - *
            - *
          • N(v) = v.getNeighbors() - *
          • O(w) = organizationalMeasure(w) - *
          - */ - public double aggregateConstraint(V v) - { - double result = 0; - for (V w : g.getNeighbors(v)) { - - result += localConstraint(v, w) * organizationalMeasure(g, w); - } - return result; - } - - /** - * A measure of the organization of individuals within the subgraph - * centered on v. Burt's text suggests that this is - * in some sense a measure of how "replaceable" v is by - * some other element of this subgraph. Should be a number in the - * closed interval [0,1]. - * - *

          This implementation returns 1. Users may wish to override this - * method in order to define their own behavior.

          - */ - protected double organizationalMeasure(Graph g, V v) { - return 1.0; - } - - - /** - * Returns the proportion of v1's network time and energy invested - * in the relationship with v2. Formally: - *
          -     * normalizedMutualEdgeWeight(a,b) = mutual_weight(a,b) / (sum_c mutual_weight(a,c))
          -     * 
          - * Returns 0 if either numerator or denominator = 0, or if v1 == v2. - * @see #mutualWeight(Object, Object) - */ - protected double normalizedMutualEdgeWeight(V v1, V v2) - { - if (v1 == v2) - return 0; - - double numerator = mutualWeight(v1, v2); - - if (numerator == 0) - return 0; - - double denominator = 0; - for (V v : g.getNeighbors(v1)) { - denominator += mutualWeight(v1, v); - } - if (denominator == 0) - return 0; - - return numerator / denominator; - } - - /** - * Returns the weight of the edge from v1 to v2 - * plus the weight of the edge from v2 to v1; - * if either edge does not exist, it is treated as an edge with weight 0. - * Undirected edges are treated as two antiparallel directed edges (that - * is, if there is one undirected edge with weight w connecting - * v1 to v2, the value returned is 2w). - * Ignores parallel edges; if there are any such, one is chosen at random. - * Throws NullPointerException if either edge is - * present but not assigned a weight by the constructor-specified - * NumberEdgeValue. - */ - protected double mutualWeight(V v1, V v2) - { - E e12 = g.findEdge(v1,v2); - E e21 = g.findEdge(v2,v1); - double w12 = (e12 != null ? edge_weight.transform(e12).doubleValue() : 0); - double w21 = (e21 != null ? edge_weight.transform(e21).doubleValue() : 0); - - return w12 + w21; - } - - /** - * The marginal strength of v1's relation with contact vertex2. - * Formally: - *
          -     * normalized_mutual_weight = mutual_weight(a,b) / (max_c mutual_weight(a,c))
          -     * 
          - * Returns 0 if either numerator or denominator is 0, or if v1 == v2. - * @see #mutualWeight(Object, Object) - */ - protected double maxScaledMutualEdgeWeight(V v1, V v2) - { - if (v1 == v2) - return 0; - - double numerator = mutualWeight(v1, v2); - - if (numerator == 0) - return 0; - - double denominator = 0; - for (V w : g.getNeighbors(v1)) { - - if (v2 != w) - denominator = Math.max(numerator, mutualWeight(v1, w)); - } - - if (denominator == 0) - return 0; - - return numerator / denominator; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java b/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java deleted file mode 100644 index 634eb3bc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.metrics; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.collections15.CollectionUtils; - -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Graph; - - -/** - * TriadicCensus is a standard social network tool that counts, for each of the - * different possible configurations of three vertices, the number of times - * that that configuration occurs in the given graph. - * This may then be compared to the set of expected counts for this particular - * graph or to an expected sample. This is often used in p* modeling. - *

          - * To use this class, - *

          - * long[] triad_counts = TriadicCensus(dg);
          - * 
          - * where dg is a DirectedGraph. - * ith element of the array (for i in [1,16]) is the number of - * occurrences of the corresponding triad type. - * (The 0th element is not meaningful; this array is effectively 1-based.) - * To get the name of the ith triad (e.g. "003"), - * look at the global constant array c.TRIAD_NAMES[i] - *

          - * Triads are named as - * (number of pairs that are mutually tied) - * (number of pairs that are one-way tied) - * (number of non-tied pairs) - * in the triple. Since there are be only three pairs, there is a finite - * set of these possible triads. - *

          - * In fact, there are exactly 16, conventionally sorted by the number of - * realized edges in the triad: - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
          Number Configuration Notes
          1003The empty triad
          2012
          3102
          4021D"Down": the directed edges point away
          5021U"Up": the directed edges meet
          6021C"Circle": one in, one out
          7111D"Down": 021D but one edge is mutual
          8111U"Up": 021U but one edge is mutual
          9030T"Transitive": two point to the same vertex
          10030C"Circle": A->B->C->A
          11201
          12120D"Down": 021D but the third edge is mutual
          13120U"Up": 021U but the third edge is mutual
          14120C"Circle": 021C but the third edge is mutual
          15210
          16300The complete
          - *

          - * This implementation takes O( m ), m is the number of edges in the graph. - *
          - * It is based on - * - * A subquadratic triad census algorithm for large sparse networks - * with small maximum degree - * Vladimir Batagelj and Andrej Mrvar, University of Ljubljana - * Published in Social Networks. - * @author Danyel Fisher - * @author Tom Nelson - converted to jung2 - * - */ -public class TriadicCensus { - - // NOTE THAT THIS RETURNS STANDARD 1-16 COUNT! - - // and their types - public static final String[] TRIAD_NAMES = { "N/A", "003", "012", "102", "021D", - "021U", "021C", "111D", "111U", "030T", "030C", "201", "120D", - "120U", "120C", "210", "300" }; - - public static final int MAX_TRIADS = TRIAD_NAMES.length; - - /** - * Returns an array whose ith element (for i in [1,16]) is the number of - * occurrences of the corresponding triad type in g. - * (The 0th element is not meaningful; this array is effectively 1-based.) - * - * @param g - */ - public static long[] getCounts(DirectedGraph g) { - long[] count = new long[MAX_TRIADS]; - - List id = new ArrayList(g.getVertices()); - - // apply algorithm to each edge, one at at time - for (int i_v = 0; i_v < g.getVertexCount(); i_v++) { - V v = id.get(i_v); - for(V u : g.getNeighbors(v)) { - int triType = -1; - if (id.indexOf(u) <= i_v) - continue; - Set neighbors = new HashSet(CollectionUtils.union(g.getNeighbors(u), g.getNeighbors(v))); - neighbors.remove(u); - neighbors.remove(v); - if (g.isSuccessor(v,u) && g.isSuccessor(u,v)) { - triType = 3; - } else { - triType = 2; - } - count[triType] += g.getVertexCount() - neighbors.size() - 2; - for (V w : neighbors) { - if (shouldCount(g, id, u, v, w)) { - count [ triType ( triCode(g, u, v, w) ) ] ++; - } - } - } - } - int sum = 0; - for (int i = 2; i <= 16; i++) { - sum += count[i]; - } - int n = g.getVertexCount(); - count[1] = n * (n-1) * (n-2) / 6 - sum; - return count; - } - - /** - * This is the core of the technique in the paper. Returns an int from 0 to - * 65 based on: WU -> 32 UW -> 16 WV -> 8 VW -> 4 UV -> 2 VU -> 1 - * - */ - public static int triCode(Graph g, V u, V v, V w) { - int i = 0; - i += link(g, v, u ) ? 1 : 0; - i += link(g, u, v ) ? 2 : 0; - i += link(g, v, w ) ? 4 : 0; - i += link(g, w, v ) ? 8 : 0; - i += link(g, u, w ) ? 16 : 0; - i += link(g, w, u ) ? 32 : 0; - return i; - } - - protected static boolean link(Graph g, V a, V b) { - return g.isPredecessor(b, a); - } - - - /** - * Simply returns the triCode. - * @param triCode - * @return the string code associated with the numeric type - */ - public static int triType( int triCode ) { - return codeToType[ triCode ]; - } - - /** - * For debugging purposes, this is copied straight out of the paper which - * means that they refer to triad types 1-16. - */ - protected static final int[] codeToType = { 1, 2, 2, 3, 2, 4, 6, 8, 2, 6, 5, 7, 3, 8, - 7, 11, 2, 6, 4, 8, 5, 9, 9, 13, 6, 10, 9, 14, 7, 14, 12, 15, 2, 5, - 6, 7, 6, 9, 10, 14, 4, 9, 9, 12, 8, 13, 14, 15, 3, 7, 8, 11, 7, 12, - 14, 15, 8, 14, 13, 15, 11, 15, 15, 16 }; - - /** - * Make sure we have a canonical ordering: Returns true if u < w, or v < w < - * u and v doesn't link to w - * - * @param id - * @param u - * @param v - * @param w - * @return true if u < w, or if v < w < u and v doesn't link to w; false otherwise - */ - protected static boolean shouldCount(Graph g, List id, V u, V v, V w) { - int i_u = id.indexOf(u); - int i_w = id.indexOf(w); - if (i_u < i_w) - return true; - int i_v = id.indexOf(v); - if ((i_v < i_w) && (i_w < i_u) && (!g.isNeighbor(w,v))) - return true; - return false; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/package.html deleted file mode 100644 index ce5144b9..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/metrics/package.html +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - -Specialized measures for graph properties. These currently include: - -

            -
          • StructuralHoles: calculates some of Burt's 'structural holes' -measures (e.g. efficiency, hierarchy, constraint). -
          • TriadicCensus: returns counts for each triad type found in a -graph. -
          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/package.html deleted file mode 100644 index f9d2e250..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/package.html +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - -

          Algorithms for graphs and networks.

          - -

          These algorithms are divided into categories as follows: -

            -
          • blockmodel: dividing graph elements (typically vertices) into -equivalence classes, -generally by topological properties (e.g. structural equivalence) -
          • cluster: identifying coherent (not necessarily disjoint) groups of elements -(e.g. weakly connected components, edge betweenness clustering) -
          • filters: removing parts of a graph according to specified criteria -
          • flows: calculating properties relating to network flows -(e.g. max flow/min cut) -
          • generators: creating graphs with certain properties -
          • importance (deprecated): assigning values to vertices/edges -based on topological properties -
          • layout: arrangement of graph elements, generally for visualization -
          • metrics: calculating structural properties (triad census, structural -holes) -
          • scoring: assigning values (denoting significance, influence, -centrality, etc.) to vertices/edges based on topological properties, -e.g. PageRank, HITS, betweenness centrality (replaces "importance", above) -
          • shortestpath: calculation of shortest paths between vertices -
          • util: low-level utility classes used in a variety of algorithms -
          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java deleted file mode 100644 index 70d677b5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java +++ /dev/null @@ -1,368 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.DelegateToEdgeTransformer; -import edu.uci.ics.jung.algorithms.scoring.util.VEPair; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * An abstract class for algorithms that assign scores to vertices based on iterative methods. - * Generally, any (concrete) subclass will function by creating an instance, and then either calling - * evaluate (if the user wants to iterate until the algorithms is 'done') or - * repeatedly call step (if the user wants to observe the values at each step). - */ -public abstract class AbstractIterativeScorer implements IterativeContext, VertexScorer -{ - /** - * Maximum number of iterations to use before terminating. Defaults to 100. - */ - protected int max_iterations; - - /** - * Minimum change from one step to the next; if all changes are <= tolerance, - * no further updates will occur. - * Defaults to 0.001. - */ - protected double tolerance; - - /** - * The graph on which the calculations are to be made. - */ - protected Hypergraph graph; - - /** - * The total number of iterations used so far. - */ - protected int total_iterations; - - /** - * The edge weights used by this algorithm. - */ - protected Transformer, ? extends Number> edge_weights; - - /** - * Indicates whether the output and current values are in a 'swapped' state. - * Intended for internal use only. - */ - protected boolean output_reversed; - - /** - * The map in which the output values are stored. - */ - private Map output; - - /** - * The map in which the current values are stored. - */ - private Map current_values; - - /** - * A flag representing whether this instance tolerates disconnected graphs. - * Instances that do not accept disconnected graphs may have unexpected behavior - * on disconnected graphs; they are not guaranteed to do an explicit check. - * Defaults to true. - */ - private boolean accept_disconnected_graph; - - - protected boolean hyperedges_are_self_loops = false; - - /** - * Sets the output value for this vertex. - * @param v the vertex whose output value is to be set - * @param value the value to set - */ - protected void setOutputValue(V v, T value) - { - output.put(v, value); - } - - /** - * Gets the output value for this vertex. - * @param v the vertex whose output value is to be retrieved - * @return the output value for this vertex - */ - protected T getOutputValue(V v) - { - return output.get(v); - } - - /** - * Gets the current value for this vertex - * @param v the vertex whose current value is to be retrieved - * @return the current value for this vertex - */ - protected T getCurrentValue(V v) - { - return current_values.get(v); - } - - /** - * Sets the current value for this vertex. - * @param v the vertex whose current value is to be set - * @param value the current value to set - */ - protected void setCurrentValue(V v, T value) - { - current_values.put(v, value); - } - - /** - * The largest change seen so far among all vertex scores. - */ - protected double max_delta; - - /** - * Creates an instance for the specified graph and edge weights. - * @param g the graph for which the instance is to be created - * @param edge_weights the edge weights for this instance - */ - public AbstractIterativeScorer(Hypergraph g, Transformer edge_weights) - { - this.graph = g; - this.max_iterations = 100; - this.tolerance = 0.001; - this.accept_disconnected_graph = true; - setEdgeWeights(edge_weights); - } - - /** - * Creates an instance for the specified graph g. - * NOTE: This constructor does not set the internal - * edge_weights variable. If this variable is used by - * the subclass which invoked this constructor, it must be initialized - * by that subclass. - * @param g the graph for which the instance is to be created - */ - public AbstractIterativeScorer(Hypergraph g) - { - this.graph = g; - this.max_iterations = 100; - this.tolerance = 0.001; - this.accept_disconnected_graph = true; - } - - /** - * Initializes the internal state for this instance. - */ - protected void initialize() - { - this.total_iterations = 0; - this.max_delta = Double.MIN_VALUE; - this.output_reversed = true; - this.current_values = new HashMap(); - this.output = new HashMap(); - } - - /** - * Steps through this scoring algorithm until a termination condition is reached. - */ - public void evaluate() - { - do - step(); - while (!done()); - } - - /** - * Returns true if the total number of iterations is greater than or equal to - * max_iterations - * or if the maximum value change observed is less than tolerance. - */ - public boolean done() - { - return total_iterations >= max_iterations || max_delta < tolerance; - } - - /** - * Performs one step of this algorithm; updates the state (value) for each vertex. - */ - public void step() - { - swapOutputForCurrent(); - - for (V v : graph.getVertices()) - { - double diff = update(v); - updateMaxDelta(v, diff); - } - total_iterations++; - afterStep(); - } - - /** - * - */ - protected void swapOutputForCurrent() - { - Map tmp = output; - output = current_values; - current_values = tmp; - output_reversed = !output_reversed; - } - - /** - * Updates the value for v. - * This is the key - * @param v the vertex whose value is to be updated - * @return - */ - protected abstract double update(V v); - - protected void updateMaxDelta(V v, double diff) - { - max_delta = Math.max(max_delta, diff); - } - - protected void afterStep() {} - - public T getVertexScore(V v) - { - if (!graph.containsVertex(v)) - throw new IllegalArgumentException("Vertex " + v + " not an element of this graph"); - - return output.get(v); - } - - /** - * Returns the maximum number of iterations that this instance will use. - * @return the maximum number of iterations that evaluate will use - * prior to terminating - */ - public int getMaxIterations() - { - return max_iterations; - } - - /** - * Returns the number of iterations that this instance has used so far. - * @return the number of iterations that this instance has used so far - */ - public int getIterations() - { - return total_iterations; - } - - /** - * Sets the maximum number of times that evaluate will call step. - * @param max_iterations the maximum - */ - public void setMaxIterations(int max_iterations) - { - this.max_iterations = max_iterations; - } - - /** - * Gets the size of the largest change (difference between the current and previous values) - * for any vertex that can be tolerated. Once all changes are less than this value, - * evaluate will terminate. - * @return the size of the largest change that evaluate() will permit - */ - public double getTolerance() - { - return tolerance; - } - - /** - * Sets the size of the largest change (difference between the current and previous values) - * for any vertex that can be tolerated. - * @param tolerance the size of the largest change that evaluate() will permit - */ - public void setTolerance(double tolerance) - { - this.tolerance = tolerance; - } - - /** - * Returns the Transformer that this instance uses to associate edge weights with each edge. - * @return the Transformer that associates an edge weight with each edge - */ - public Transformer, ? extends Number> getEdgeWeights() - { - return edge_weights; - } - - /** - * Sets the Transformer that this instance uses to associate edge weights with each edge - * @param edge_weights the Transformer to use to associate an edge weight with each edge - * @see edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight - */ - public void setEdgeWeights(Transformer edge_weights) - { - this.edge_weights = new DelegateToEdgeTransformer(edge_weights); - } - - /** - * Gets the edge weight for e in the context of its (incident) vertex v. - * @param v the vertex incident to e as a context in which the edge weight is to be calculated - * @param e the edge whose weight is to be returned - * @return the edge weight for e in the context of its (incident) vertex v - */ - protected Number getEdgeWeight(V v, E e) - { - return edge_weights.transform(new VEPair(v,e)); - } - - /** - * Collects the 'potential' from v (its current value) if it has no outgoing edges; this - * can then be redistributed among the other vertices as a means of normalization. - * @param v - */ - protected void collectDisappearingPotential(V v) {} - - /** - * Specifies whether this instance should accept vertices with no outgoing edges. - * @param accept true if this instance should accept vertices with no outgoing edges, false otherwise - */ - public void acceptDisconnectedGraph(boolean accept) - { - this.accept_disconnected_graph = accept; - } - - /** - * Returns true if this instance accepts vertices with no outgoing edges, and false otherwise. - * @return true if this instance accepts vertices with no outgoing edges, otherwise false - */ - public boolean isDisconnectedGraphOK() - { - return this.accept_disconnected_graph; - } - - /** - * Specifies whether hyperedges are to be treated as self-loops. If they - * are, then potential will flow along a hyperedge a vertex to itself, - * just as it does to all other vertices incident to that hyperedge. - * @param arg if {@code true}, hyperedges are treated as self-loops - */ - public void setHyperedgesAreSelfLoops(boolean arg) - { - this.hyperedges_are_self_loops = arg; - } - - /** - * Returns the effective number of vertices incident to this edge. If - * the graph is a binary relation or if hyperedges are treated as self-loops, - * the value returned is {@code graph.getIncidentCount(e)}; otherwise it is - * {@code graph.getIncidentCount(e) - 1}. - */ - protected int getAdjustedIncidentCount(E e) - { - return graph.getIncidentCount(e) - (hyperedges_are_self_loops ? 0 : 1); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java deleted file mode 100644 index 6883e263..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Created on Jul 14, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * An abstract class for iterative random-walk-based vertex scoring algorithms - * that have a - * fixed probability, for each vertex, of 'jumping' to that vertex at each - * step in the algorithm (rather than following a link out of that vertex). - * - * @param the vertex type - * @param the edge type - * @param the score type - */ -public abstract class AbstractIterativeScorerWithPriors extends - AbstractIterativeScorer implements VertexScorer -{ - /** - * The prior probability of each vertex being visited on a given - * 'jump' (non-link-following) step. - */ - protected Transformer vertex_priors; - - /** - * The probability of making a 'jump' at each step. - */ - protected double alpha; - - /** - * Creates an instance for the specified graph, edge weights, vertex - * priors, and jump probability. - * @param g the graph whose vertices are to be assigned scores - * @param edge_weights the edge weights to use in the score assignment - * @param vertex_priors the prior probabilities of each vertex being 'jumped' to - * @param alpha the probability of making a 'jump' at each step - */ - public AbstractIterativeScorerWithPriors(Hypergraph g, - Transformer edge_weights, - Transformer vertex_priors, double alpha) - { - super(g, edge_weights); - this.vertex_priors = vertex_priors; - this.alpha = alpha; - initialize(); - } - - /** - * Creates an instance for the specified graph, vertex priors, and jump - * probability, with edge weights specified by the subclass. - * @param g the graph whose vertices are to be assigned scores - * @param vertex_priors the prior probabilities of each vertex being 'jumped' to - * @param alpha the probability of making a 'jump' at each step - */ - public AbstractIterativeScorerWithPriors(Hypergraph g, - Transformer vertex_priors, double alpha) - { - super(g); - this.vertex_priors = vertex_priors; - this.alpha = alpha; - initialize(); - } - - /** - * Initializes the state of this instance. - */ - @Override - public void initialize() - { - super.initialize(); - // initialize output values to priors - // (output and current are swapped before each step(), so current will - // have priors when update()s start happening) - for (V v : graph.getVertices()) - setOutputValue(v, getVertexPrior(v)); - } - - /** - * Returns the prior probability for v. - * @param v the vertex whose prior probability is being queried - * @return the prior probability for v - */ - protected S getVertexPrior(V v) - { - return vertex_priors.transform(v); - } - - /** - * Returns a Transformer which maps each vertex to its prior probability. - * @return a Transformer which maps each vertex to its prior probability - */ - public Transformer getVertexPriors() - { - return vertex_priors; - } - - /** - * Returns the probability of making a 'jump' (non-link-following step). - * @return the probability of making a 'jump' (non-link-following step) - */ - public double getAlpha() - { - return alpha; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java deleted file mode 100644 index 1c9c178c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to each vertex according to the sum of its distances to all other vertices. - */ -public class BarycenterScorer extends DistanceCentralityScorer -{ - /** - * Creates an instance with the specified graph and distance metric. - * @param graph the input graph - * @param distance the distance metric to use - */ - public BarycenterScorer(Hypergraph graph, Distance distance) - { - super(graph, distance, false); - } - - /** - * Creates an instance with the specified graph and edge weights. - * Will generate a Distance metric internally based on the edge weights. - * @param graph the input graph - * @param edge_weights the edge weights to use to calculate vertex/vertex distances - */ - public BarycenterScorer(Hypergraph graph, Transformer edge_weights) - { - super(graph, edge_weights, false); - } - - /** - * Creates an instance with the specified graph. - * Will generate a Distance metric internally assuming that the - * graph is unweighted. - * @param graph the input graph - */ - public BarycenterScorer(Hypergraph graph) - { - super(graph, false); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java deleted file mode 100644 index 5cfeb164..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java +++ /dev/null @@ -1,351 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Sep 16, 2008 - * - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Stack; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - -/** - * Computes betweenness centrality for each vertex and edge in the graph. - * - * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001." - */ -public class BetweennessCentrality - implements VertexScorer, EdgeScorer -{ - protected Graph graph; - protected Map vertex_scores; - protected Map edge_scores; - protected Map vertex_data; - - /** - * Calculates betweenness scores based on the all-pairs unweighted shortest paths - * in the graph. - * @param graph the graph for which the scores are to be calculated - */ - @SuppressWarnings("unchecked") - public BetweennessCentrality(Graph graph) - { - initialize(graph); - computeBetweenness(new LinkedList(), new ConstantTransformer(1)); - } - - /** - * Calculates betweenness scores based on the all-pairs weighted shortest paths in the - * graph. - * - *

          NOTE: This version of the algorithm may not work correctly on all graphs; we're still - * working out the bugs. Use at your own risk. - * @param graph the graph for which the scores are to be calculated - * @param edge_weights the edge weights to be used in the path length calculations - */ - public BetweennessCentrality(Graph graph, - Transformer edge_weights) - { - // reject negative-weight edges up front - for (E e : graph.getEdges()) - { - double e_weight = edge_weights.transform(e).doubleValue(); - if (e_weight < 0) - throw new IllegalArgumentException(String.format( - "Weight for edge '%s' is < 0: %d", e, e_weight)); - } - - initialize(graph); - computeBetweenness(new MapBinaryHeap(new BetweennessComparator()), - edge_weights); - } - - protected void initialize(Graph graph) - { - this.graph = graph; - this.vertex_scores = new HashMap(); - this.edge_scores = new HashMap(); - this.vertex_data = new HashMap(); - - for (V v : graph.getVertices()) - this.vertex_scores.put(v, 0.0); - - for (E e : graph.getEdges()) - this.edge_scores.put(e, 0.0); - } - - protected void computeBetweenness(Queue queue, - Transformer edge_weights) - { - for (V v : graph.getVertices()) - { - // initialize the betweenness data for this new vertex - for (V s : graph.getVertices()) - this.vertex_data.put(s, new BetweennessData()); - -// if (v.equals(new Integer(0))) -// System.out.println("pause"); - - vertex_data.get(v).numSPs = 1; - vertex_data.get(v).distance = 0; - - Stack stack = new Stack(); -// Buffer queue = new UnboundedFifoBuffer(); -// queue.add(v); - queue.offer(v); - - while (!queue.isEmpty()) - { -// V w = queue.remove(); - V w = queue.poll(); - stack.push(w); - BetweennessData w_data = vertex_data.get(w); - - for (E e : graph.getOutEdges(w)) - { - // TODO (jrtom): change this to getOtherVertices(w, e) - V x = graph.getOpposite(w, e); - if (x.equals(w)) - continue; - double wx_weight = edge_weights.transform(e).doubleValue(); - - -// for(V x : graph.getSuccessors(w)) -// { -// if (x.equals(w)) -// continue; - - // FIXME: the other problem is that I need to - // keep putting the neighbors of things we've just - // discovered in the queue, if they're undiscovered or - // at greater distance. - - // FIXME: this is the problem, right here, I think: - // need to update position in queue if distance changes - // (which can only happen with weighted edges). - // for each outgoing edge e from w, get other end x - // if x not already visited (dist x < 0) - // set x's distance to w's dist + edge weight - // add x to queue; pri in queue is x's dist - // if w's dist + edge weight < x's dist - // update x's dist - // update x in queue (MapBinaryHeap) - // clear x's incoming edge list - // if w's dist + edge weight = x's dist - // add e to x's incoming edge list - - BetweennessData x_data = vertex_data.get(x); - double x_potential_dist = w_data.distance + wx_weight; - - if (x_data.distance < 0) - { -// queue.add(x); -// vertex_data.get(x).distance = vertex_data.get(w).distance + 1; - x_data.distance = x_potential_dist; - queue.offer(x); - } - - // note: - // (1) this can only happen with weighted edges - // (2) x's SP count and incoming edges are updated below - if (x_data.distance > x_potential_dist) - { - x_data.distance = x_potential_dist; - // invalidate previously identified incoming edges - // (we have a new shortest path distance to x) - x_data.incomingEdges.clear(); - // update x's position in queue - ((MapBinaryHeap)queue).update(x); - } -// if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) - // -// if (x_data.distance == x_potential_dist) -// { -// x_data.numSPs += w_data.numSPs; -//// vertex_data.get(x).predecessors.add(w); -// x_data.incomingEdges.add(e); -// } - } - for (E e: graph.getOutEdges(w)) - { - V x = graph.getOpposite(w, e); - if (x.equals(w)) - continue; - double e_weight = edge_weights.transform(e).doubleValue(); - BetweennessData x_data = vertex_data.get(x); - double x_potential_dist = w_data.distance + e_weight; - if (x_data.distance == x_potential_dist) - { - x_data.numSPs += w_data.numSPs; -// vertex_data.get(x).predecessors.add(w); - x_data.incomingEdges.add(e); - } - } - } - while (!stack.isEmpty()) - { - V x = stack.pop(); - -// for (V w : vertex_data.get(x).predecessors) - for (E e : vertex_data.get(x).incomingEdges) - { - V w = graph.getOpposite(x, e); - double partialDependency = - vertex_data.get(w).numSPs / vertex_data.get(x).numSPs * - (1.0 + vertex_data.get(x).dependency); - vertex_data.get(w).dependency += partialDependency; -// E w_x = graph.findEdge(w, x); -// double w_x_score = edge_scores.get(w_x).doubleValue(); -// w_x_score += partialDependency; -// edge_scores.put(w_x, w_x_score); - double e_score = edge_scores.get(e).doubleValue(); - edge_scores.put(e, e_score + partialDependency); - } - if (!x.equals(v)) - { - double x_score = vertex_scores.get(x).doubleValue(); - x_score += vertex_data.get(x).dependency; - vertex_scores.put(x, x_score); - } - } - } - - if(graph instanceof UndirectedGraph) - { - for (V v : graph.getVertices()) { - double v_score = vertex_scores.get(v).doubleValue(); - v_score /= 2.0; - vertex_scores.put(v, v_score); - } - for (E e : graph.getEdges()) { - double e_score = edge_scores.get(e).doubleValue(); - e_score /= 2.0; - edge_scores.put(e, e_score); - } - } - - vertex_data.clear(); - } - -// protected void computeWeightedBetweenness(Transformer edge_weights) -// { -// for (V v : graph.getVertices()) -// { -// // initialize the betweenness data for this new vertex -// for (V s : graph.getVertices()) -// this.vertex_data.put(s, new BetweennessData()); -// vertex_data.get(v).numSPs = 1; -// vertex_data.get(v).distance = 0; -// -// Stack stack = new Stack(); -//// Buffer queue = new UnboundedFifoBuffer(); -// SortedSet pqueue = new TreeSet(new BetweennessComparator()); -//// queue.add(v); -// pqueue.add(v); -// -//// while (!queue.isEmpty()) -// while (!pqueue.isEmpty()) -// { -//// V w = queue.remove(); -// V w = pqueue.first(); -// pqueue.remove(w); -// stack.push(w); -// -//// for(V x : graph.getSuccessors(w)) -// for (E e : graph.getOutEdges(w)) -// { -// // TODO (jrtom): change this to getOtherVertices(w, e) -// V x = graph.getOpposite(w, e); -// if (x.equals(w)) -// continue; -// double e_weight = edge_weights.transform(e).doubleValue(); -// -// if (vertex_data.get(x).distance < 0) -// { -//// queue.add(x); -// pqueue.add(v); -//// vertex_data.get(x).distance = vertex_data.get(w).distance + 1; -// vertex_data.get(x).distance = -// vertex_data.get(w).distance + e_weight; -// } -// -//// if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) -// if (vertex_data.get(x).distance == -// vertex_data.get(w).distance + e_weight) -// { -// vertex_data.get(x).numSPs += vertex_data.get(w).numSPs; -// vertex_data.get(x).predecessors.add(w); -// } -// } -// } -// updateScores(v, stack); -// } -// -// if(graph instanceof UndirectedGraph) -// adjustUndirectedScores(); -// -// vertex_data.clear(); -// } - - public Double getVertexScore(V v) - { - return vertex_scores.get(v); - } - - public Double getEdgeScore(E e) - { - return edge_scores.get(e); - } - - private class BetweennessData - { - double distance; - double numSPs; -// List predecessors; - List incomingEdges; - double dependency; - - BetweennessData() - { - distance = -1; - numSPs = 0; -// predecessors = new ArrayList(); - incomingEdges = new ArrayList(); - dependency = 0; - } - - @Override - public String toString() - { - return "[d:" + distance + ", sp:" + numSPs + - ", p:" + incomingEdges + ", d:" + dependency + "]\n"; -// ", p:" + predecessors + ", d:" + dependency + "]\n"; - } - } - - private class BetweennessComparator implements Comparator - { - public int compare(V v1, V v2) - { - return vertex_data.get(v1).distance > vertex_data.get(v2).distance ? 1 : -1; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java deleted file mode 100644 index d64f01ed..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to each vertex based on the mean distance to each other vertex. - * - * @author Joshua O'Madadhain - */ -public class ClosenessCentrality extends DistanceCentralityScorer -{ - - /** - * Creates an instance using the specified vertex/vertex distance metric. - * @param graph the input - * @param distance the vertex/vertex distance metric. - */ - public ClosenessCentrality(Hypergraph graph, Distance distance) - { - super(graph, distance, true); - } - - /** - * Creates an instance which measures distance using the specified edge weights. - * @param graph the input graph - * @param edge_weights the edge weights to be used to determine vertex/vertex distances - */ - public ClosenessCentrality(Hypergraph graph, Transformer edge_weights) - { - super(graph, edge_weights, true); - } - - /** - * Creates an instance which measures distance on the graph without edge weights. - * @param graph - */ - public ClosenessCentrality(Hypergraph graph) - { - super(graph, true); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java deleted file mode 100644 index 2ec31481..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns a score to each vertex equal to its degree. - * - * @param the vertex type - */ -public class DegreeScorer implements VertexScorer -{ - /** - * The graph for which scores are to be generated. - */ - protected Hypergraph graph; - - /** - * Creates an instance for the specified graph. - * @param graph the input graph - */ - public DegreeScorer(Hypergraph graph) - { - this.graph = graph; - } - - /** - * Returns the degree of the vertex. - * @return the degree of the vertex - */ - public Integer getVertexScore(V v) - { - return graph.degree(v); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java deleted file mode 100644 index 16dd8621..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Created on Jul 10, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.shortestpath.DijkstraDistance; -import edu.uci.ics.jung.algorithms.shortestpath.Distance; -import edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to vertices based on their distances to each other vertex - * in the graph. - * - * This class optionally normalizes its results based on the value of its - * 'averaging' constructor parameter. If it is true, - * then the value returned for vertex v is 1 / (_average_ distance from v to all other vertices); - * this is sometimes called closeness centrality. - * If it is false, then the value returned is 1 / (_total_ distance from - * v to all other vertices); this is sometimes referred to as barycenter centrality. - * (If the average/total distance is 0, the value returned is {@code Double.POSITIVE_INFINITY}.) - * - * @see BarycenterScorer - * @see ClosenessCentrality - */ -public class DistanceCentralityScorer implements VertexScorer -{ - /** - * The graph on which the vertex scores are to be calculated. - */ - protected Hypergraph graph; - - /** - * The metric to use for specifying the distance between pairs of vertices. - */ - protected Distance distance; - - /** - * The cache for the output results. Null encodes "not yet calculated", - * < 0 encodes "no such distance exists". - */ - protected Map output; - - /** - * Specifies whether the values returned are the sum of the v-distances - * or the mean v-distance. - */ - protected boolean averaging; - - /** - * Specifies whether, for a vertex v with missing (null) distances, - * v's score should ignore the missing values or be set to 'null'. - * Defaults to 'true'. - */ - protected boolean ignore_missing; - - /** - * Specifies whether the values returned should ignore self-distances - * (distances from v to itself). - * Defaults to 'true'. - */ - protected boolean ignore_self_distances; - - /** - * Creates an instance with the specified graph, distance metric, and - * averaging behavior. - * - * @param graph The graph on which the vertex scores are to be calculated. - * @param distance The metric to use for specifying the distance between - * pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of all - * v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, Distance distance, - boolean averaging, boolean ignore_missing, - boolean ignore_self_distances) - { - this.graph = graph; - this.distance = distance; - this.averaging = averaging; - this.ignore_missing = ignore_missing; - this.ignore_self_distances = ignore_self_distances; - this.output = new HashMap(); - } - - /** - * Equivalent to this(graph, distance, averaging, true, true). - * - * @param graph The graph on which the vertex scores are to be calculated. - * @param distance The metric to use for specifying the distance between - * pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of all - * v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, Distance distance, - boolean averaging) - { - this(graph, distance, averaging, true, true); - } - - /** - * Creates an instance with the specified graph and averaging behavior - * whose vertex distances are calculated based on the specified edge - * weights. - * - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param edge_weights The edge weights to use for specifying the distance - * between pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, - Transformer edge_weights, boolean averaging, - boolean ignore_missing, boolean ignore_self_distances) - { - this(graph, new DijkstraDistance(graph, edge_weights), averaging, - ignore_missing, ignore_self_distances); - } - - /** - * Equivalent to this(graph, edge_weights, averaging, true, true). - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param edge_weights The edge weights to use for specifying the distance - * between pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, - Transformer edge_weights, boolean averaging) - { - this(graph, new DijkstraDistance(graph, edge_weights), averaging, - true, true); - } - - /** - * Creates an instance with the specified graph and averaging behavior - * whose vertex distances are calculated on the unweighted graph. - * - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, boolean averaging, - boolean ignore_missing, boolean ignore_self_distances) - { - this(graph, new UnweightedShortestPath(graph), averaging, - ignore_missing, ignore_self_distances); - } - - /** - * Equivalent to this(graph, averaging, true, true). - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, boolean averaging) - { - this(graph, new UnweightedShortestPath(graph), averaging, true, true); - } - - /** - * Calculates the score for the specified vertex. Returns {@code null} if - * there are missing distances and such are not ignored by this instance. - */ - public Double getVertexScore(V v) - { - Double value = output.get(v); - if (value != null) - { - if (value < 0) - return null; - return value; - } - - Map v_distances = new HashMap(distance.getDistanceMap(v)); - if (ignore_self_distances) - v_distances.remove(v); - - // if we don't ignore missing distances and there aren't enough - // distances, output null (shortcut) - if (!ignore_missing) - { - int num_dests = graph.getVertexCount() - - (ignore_self_distances ? 1 : 0); - if (v_distances.size() != num_dests) - { - output.put(v, -1.0); - return null; - } - } - - Double sum = 0.0; - for (V w : graph.getVertices()) - { - if (w.equals(v) && ignore_self_distances) - continue; - Number w_distance = v_distances.get(w); - if (w_distance == null) - if (ignore_missing) - continue; - else - { - output.put(v, -1.0); - return null; - } - else - sum += w_distance.doubleValue(); - } - value = sum; - if (averaging) - value /= v_distances.size(); - - double score = value == 0 ? - Double.POSITIVE_INFINITY : - 1.0 / value; - output.put(v, score); - - return score; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java deleted file mode 100644 index 7e648746..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - - -/** - * An interface for algorithms that assign scores to edges. - * - * @param the edge type - * @param the score type - */ -public interface EdgeScorer -{ - /** - * Returns the algorithm's score for this edge. - * @return the algorithm's score for this edge - */ - public S getEdgeScore(E e); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java deleted file mode 100644 index 87d7f3ae..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Calculates eigenvector centrality for each vertex in the graph. - * The 'eigenvector centrality' for a vertex is defined as the fraction of - * time that a random walk(er) will spend at that vertex over an infinite - * time horizon. - * Assumes that the graph is strongly connected. - */ -public class EigenvectorCentrality extends PageRank -{ - /** - * Creates an instance with the specified graph and edge weights. - * The outgoing edge weights for each edge must sum to 1. - * (See UniformDegreeWeight for one way to handle this for - * undirected graphs.) - * @param graph the graph for which the centrality is to be calculated - * @param edge_weights the edge weights - */ - public EigenvectorCentrality(Hypergraph graph, - Transformer edge_weights) - { - super(graph, edge_weights, 0); - acceptDisconnectedGraph(false); - } - - /** - * Creates an instance with the specified graph and default edge weights. - * (Default edge weights: UniformDegreeWeight.) - * @param graph the graph for which the centrality is to be calculated. - */ - public EigenvectorCentrality(Hypergraph graph) - { - super(graph, 0); - acceptDisconnectedGraph(false); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/HITS.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/HITS.java deleted file mode 100644 index b1b4f428..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/HITS.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Created on Jul 15, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.Graph; - -import org.apache.commons.collections15.Transformer; - -/** - * Assigns hub and authority scores to each vertex depending on the topology of - * the network. The essential idea is that a vertex is a hub to the extent - * that it links to authoritative vertices, and is an authority to the extent - * that it links to 'hub' vertices. - * - *

          The classic HITS algorithm essentially proceeds as follows: - *

          - * assign equal initial hub and authority values to each vertex
          - * repeat
          - *   for each vertex w:
          - *     w.hub = sum over successors x of x.authority
          - *     w.authority = sum over predecessors v of v.hub
          - *   normalize hub and authority scores so that the sum of the squares of each = 1
          - * until scores converge
          - * 
          - * - * HITS is somewhat different from random walk/eigenvector-based algorithms - * such as PageRank in that: - *
            - *
          • there are two mutually recursive scores being calculated, rather than - * a single value - *
          • the edge weights are effectively all 1, i.e., they can't be interpreted - * as transition probabilities. This means that the more inlinks and outlinks - * that a vertex has, the better, since adding an inlink (or outlink) does - * not dilute the influence of the other inlinks (or outlinks) as in - * random walk-based algorithms. - *
          • the scores cannot be interpreted as posterior probabilities (due to the different - * normalization) - *
          - * - * This implementation has the classic behavior by default. However, it has - * been generalized somewhat so that it can act in a more "PageRank-like" fashion: - *
            - *
          • this implementation has an optional 'random jump probability' parameter analogous - * to the 'alpha' parameter used by PageRank. Varying this value between 0 and 1 - * allows the user to vary between the classic HITS behavior and one in which the - * scores are smoothed to a uniform distribution. - * The default value for this parameter is 0 (no random jumps possible). - *
          • the edge weights can be set to anything the user likes, and in - * particular they can be set up (e.g. using UniformDegreeWeight) - * so that the weights of the relevant edges incident to a vertex sum to 1. - *
          • The vertex score normalization has been factored into its own method - * so that it can be overridden by a subclass. Thus, for example, - * since the vertices' values are set to sum to 1 initially, if the weights of the - * relevant edges incident to a vertex sum to 1, then the vertices' values - * will continue to sum to 1 if the "sum-of-squares" normalization code - * is overridden to a no-op. (Other normalization methods may also be employed.) - *
          - * - * @param the vertex type - * @param the edge type - * - * @see "'Authoritative sources in a hyperlinked environment' by Jon Kleinberg, 1997" - */ -public class HITS extends HITSWithPriors -{ - - /** - * Creates an instance for the specified graph, edge weights, and alpha - * (random jump probability) parameter. - * @param g the input graph - * @param edge_weights the weights to use for each edge - * @param alpha the probability of a hub giving some authority to all vertices, - * and of an authority increasing the score of all hubs (not just those connected - * via links) - */ - public HITS(Graph g, Transformer edge_weights, double alpha) - { - super(g, edge_weights, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); - } - - /** - * Creates an instance for the specified graph and alpha (random jump probability) - * parameter. The edge weights are all set to 1. - * @param g the input graph - * @param alpha the probability of a hub giving some authority to all vertices, - * and of an authority increasing the score of all hubs (not just those connected - * via links) - */ - public HITS(Graph g, double alpha) - { - super(g, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); - } - - /** - * Creates an instance for the specified graph. The edge weights are all set to 1 - * and alpha is set to 0. - * @param g the input graph - */ - public HITS(Graph g) - { - this(g, 0.0); - } - - - /** - * Maintains hub and authority score information for a vertex. - */ - public static class Scores - { - /** - * The hub score for a vertex. - */ - public double hub; - - /** - * The authority score for a vertex. - */ - public double authority; - - /** - * Creates an instance with the specified hub and authority score. - */ - public Scores(double hub, double authority) - { - this.hub = hub; - this.authority = authority; - } - - @Override - public String toString() - { - return String.format("[h:%.4f,a:%.4f]", this.hub, this.authority); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java deleted file mode 100644 index 51ba7190..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Created on Jul 14, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * A generalization of HITS that permits non-uniformly-distributed random jumps. - * The 'vertex_priors' (that is, prior probabilities for each vertex) may be - * thought of as the fraction of the total 'potential' (hub or authority score) - * that is assigned to that vertex out of the portion that is assigned according - * to random jumps. - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - */ -public class HITSWithPriors - extends AbstractIterativeScorerWithPriors -{ - /** - * The sum of the potential, at each step, associated with vertices with no outedges (authority) - * or no inedges (hub). - */ - protected HITS.Scores disappearing_potential; - - /** - * Creates an instance for the specified graph, edge weights, vertex prior probabilities, - * and random jump probability (alpha). - * @param g the input graph - * @param edge_weights the edge weights - * @param vertex_priors the prior probability for each vertex - * @param alpha the probability of a random jump at each step - */ - public HITSWithPriors(Hypergraph g, - Transformer edge_weights, - Transformer vertex_priors, double alpha) - { - super(g, edge_weights, vertex_priors, alpha); - disappearing_potential = new HITS.Scores(0,0); - } - - /** - * Creates an instance for the specified graph, vertex priors, and random - * jump probability (alpha). The edge weights default to 1.0. - * @param g the input graph - * @param vertex_priors the prior probability for each vertex - * @param alpha the probability of a random jump at each step - */ - @SuppressWarnings("unchecked") - public HITSWithPriors(Hypergraph g, - Transformer vertex_priors, double alpha) - { - super(g, new ConstantTransformer(1.0), vertex_priors, alpha); - disappearing_potential = new HITS.Scores(0,0); - } - - /** - * Updates the value for this vertex. - */ - @Override - protected double update(V v) - { - collectDisappearingPotential(v); - - double v_auth = 0; - for (E e : graph.getInEdges(v)) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_auth += (getCurrentValue(w).hub * - getEdgeWeight(w,e).doubleValue() / incident_count); - } -// V w = graph.getOpposite(v, e); -// auth += (getCurrentValue(w).hub * getEdgeWeight(w, e).doubleValue()); - } - - double v_hub = 0; - for (E e : graph.getOutEdges(v)) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_hub += (getCurrentValue(w).authority * - getEdgeWeight(w,e).doubleValue() / incident_count); - } -// V x = graph.getOpposite(v,e); -// hub += (getCurrentValue(x).authority * getEdgeWeight(x, e).doubleValue()); - } - - // modify total_input according to alpha - if (alpha > 0) - { - v_auth = v_auth * (1 - alpha) + getVertexPrior(v).authority * alpha; - v_hub = v_hub * (1 - alpha) + getVertexPrior(v).hub * alpha; - } - setOutputValue(v, new HITS.Scores(v_hub, v_auth)); - - return Math.max(Math.abs(getCurrentValue(v).hub - v_hub), - Math.abs(getCurrentValue(v).authority - v_auth)); - } - - /** - * Code which is executed after each step. In this case, deals with the - * 'disappearing potential', normalizes the scores, and then calls - * super.afterStep(). - * @see #collectDisappearingPotential(Object) - */ - @Override - protected void afterStep() - { - if (disappearing_potential.hub > 0 || disappearing_potential.authority > 0) - { - for (V v : graph.getVertices()) - { - double new_hub = getOutputValue(v).hub + - (1 - alpha) * (disappearing_potential.hub * getVertexPrior(v).hub); - double new_auth = getOutputValue(v).authority + - (1 - alpha) * (disappearing_potential.authority * getVertexPrior(v).authority); - setOutputValue(v, new HITS.Scores(new_hub, new_auth)); - } - disappearing_potential.hub = 0; - disappearing_potential.authority = 0; - } - - normalizeScores(); - - super.afterStep(); - } - - /** - * Normalizes scores so that sum of their squares = 1. - * This method may be overridden so as to yield different - * normalizations. - */ - protected void normalizeScores() { - double hub_ssum = 0; - double auth_ssum = 0; - for (V v : graph.getVertices()) - { - double hub_val = getOutputValue(v).hub; - double auth_val = getOutputValue(v).authority; - hub_ssum += (hub_val * hub_val); - auth_ssum += (auth_val * auth_val); - } - - hub_ssum = Math.sqrt(hub_ssum); - auth_ssum = Math.sqrt(auth_ssum); - - for (V v : graph.getVertices()) - { - HITS.Scores values = getOutputValue(v); - setOutputValue(v, new HITS.Scores( - values.hub / hub_ssum, - values.authority / auth_ssum)); - } - } - - /** - * Collects the "disappearing potential" associated with vertices that have either - * no incoming edges, no outgoing edges, or both. Vertices that have no incoming edges - * do not directly contribute to the hub scores of other vertices; similarly, vertices - * that have no outgoing edges do not directly contribute to the authority scores of - * other vertices. These values are collected at each step and then distributed across all vertices - * as a part of the normalization process. (This process is not required for, and does - * not affect, the 'sum-of-squares'-style normalization.) - */ - @Override - protected void collectDisappearingPotential(V v) - { - if (graph.outDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential.hub += getCurrentValue(v).authority; - else - throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); - } - if (graph.inDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential.authority += getCurrentValue(v).hub; - else - throw new IllegalArgumentException("Indegree of " + v + " must be > 0"); - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java deleted file mode 100644 index e640b1b3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java +++ /dev/null @@ -1,156 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Aug 22, 2008 - * - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * A special case of {@code PageRankWithPriors} in which the final scores - * represent a probability distribution over position assuming a random (Markovian) - * walk of exactly k steps, based on the initial distribution specified by the priors. - * - *

          NOTE: The version of {@code KStepMarkov} in {@code algorithms.importance} - * (and in JUNG 1.x) is believed to be incorrect: rather than returning - * a score which represents a probability distribution over position assuming - * a k-step random walk, it returns a score which represents the sum over all steps - * of the probability for each step. If you want that behavior, set the - * 'cumulative' flag as follows before calling {@code evaluate()}: - *

          - *     KStepMarkov ksm = new KStepMarkov(...);
          - *     ksm.setCumulative(true);
          - *     ksm.evaluate();
          - * 
          - * - * By default, the 'cumulative' flag is set to false. - * - * NOTE: THIS CLASS IS NOT YET COMPLETE. USE AT YOUR OWN RISK. (The original behavior - * is captured by the version still available in {@code algorithms.importance}.) - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - * @see PageRank - * @see PageRankWithPriors - */ -public class KStepMarkov extends PageRankWithPriors -{ - private boolean cumulative; - - /** - * Creates an instance based on the specified graph, edge weights, vertex - * priors (initial scores), and number of steps to take. - * @param graph the input graph - * @param edge_weights the edge weights (transition probabilities) - * @param vertex_priors the initial probability distribution (score assignment) - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, Transformer edge_weights, - Transformer vertex_priors, int steps) - { - super(graph, edge_weights, vertex_priors, 0); - initialize(steps); - } - - /** - * Creates an instance based on the specified graph, vertex - * priors (initial scores), and number of steps to take. The edge - * weights (transition probabilities) are set to default values (a uniform - * distribution over all outgoing edges). - * @param graph the input graph - * @param vertex_priors the initial probability distribution (score assignment) - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, Transformer vertex_priors, int steps) - { - super(graph, vertex_priors, 0); - initialize(steps); - } - - /** - * Creates an instance based on the specified graph and number of steps to - * take. The edge weights (transition probabilities) and vertex initial scores - * (prior probabilities) are set to default values (a uniform - * distribution over all outgoing edges, and a uniform distribution over - * all vertices, respectively). - * @param graph the input graph - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, int steps) - { - super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), 0); - initialize(steps); - } - - private void initialize(int steps) - { - this.acceptDisconnectedGraph(false); - - if (steps <= 0) - throw new IllegalArgumentException("Number of steps must be > 0"); - - this.max_iterations = steps; - this.tolerance = -1.0; - - this.cumulative = false; - } - - /** - * Specifies whether this instance should assign a score to each vertex - * based on the - * @param cumulative - */ - public void setCumulative(boolean cumulative) - { - this.cumulative = cumulative; - } - - /** - * Updates the value for this vertex. Called by step(). - */ - @Override - public double update(V v) - { - if (!cumulative) - return super.update(v); - - collectDisappearingPotential(v); - - double v_input = 0; - for (E e : graph.getInEdges(v)) - { - // For graphs, the code below is equivalent to -// V w = graph.getOpposite(v, e); -// total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); - // For hypergraphs, this divides the potential coming from w - // by the number of vertices in the connecting edge e. - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_input += (getCurrentValue(w) * - getEdgeWeight(w,e).doubleValue() / incident_count); - } - } - - // modify total_input according to alpha - double new_value = alpha > 0 ? - v_input * (1 - alpha) + getVertexPrior(v) * alpha : - v_input; - setOutputValue(v, new_value + getCurrentValue(v)); - - // FIXME: DO WE NEED TO CHANGE HOW DISAPPEARING IS COUNTED? NORMALIZE? - - return Math.abs(getCurrentValue(v) - new_value); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/PageRank.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/PageRank.java deleted file mode 100644 index ca7266d5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/PageRank.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to each vertex according to the PageRank algorithm. - * - *

          PageRank is an eigenvector-based algorithm. The score for a given vertex may be thought of - * as the fraction of time spent 'visiting' that vertex (measured over all time) - * in a random walk over the vertices (following outgoing edges from each vertex). - * PageRank modifies this random walk by adding to the model a probability (specified as 'alpha' - * in the constructor) of jumping to any vertex. If alpha is 0, this is equivalent to the - * eigenvector centrality algorithm; if alpha is 1, all vertices will receive the same score - * (1/|V|). Thus, alpha acts as a sort of score smoothing parameter. - * - *

          The original algorithm assumed that, for a given vertex, the probability of following any - * outgoing edge was the same; this is the default if edge weights are not specified. - * This implementation generalizes the original by permitting - * the user to specify edge weights; in order to maintain the original semantics, however, - * the weights on the outgoing edges for a given vertex must represent transition probabilities; - * that is, they must sum to 1. - * - *

          If a vertex has no outgoing edges, then the probability of taking a random jump from that - * vertex is (by default) effectively 1. If the user wishes to instead throw an exception when this happens, - * call acceptDisconnectedGraph(false) on this instance. - * - *

          Typical values for alpha (according to the original paper) are in the range [0.1, 0.2] - * but may be any value between 0 and 1 inclusive. - * - * @see "The Anatomy of a Large-Scale Hypertextual Web Search Engine by L. Page and S. Brin, 1999" - */ -public class PageRank extends PageRankWithPriors -{ - - /** - * Creates an instance for the specified graph, edge weights, and random jump probability. - * @param graph the input graph - * @param edge_weight the edge weights (transition probabilities) - * @param alpha the probability of taking a random jump to an arbitrary vertex - */ - public PageRank(Hypergraph graph, Transformer edge_weight, double alpha) - { - super(graph, edge_weight, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); - } - - /** - * Creates an instance for the specified graph and random jump probability; the probability - * of following any outgoing edge from a given vertex is the same. - * @param graph the input graph - * @param alpha the probability of taking a random jump to an arbitrary vertex - */ - public PageRank(Hypergraph graph, double alpha) - { - super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java deleted file mode 100644 index 717d5eaf..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * A generalization of PageRank that permits non-uniformly-distributed random jumps. - * The 'vertex_priors' (that is, prior probabilities for each vertex) may be - * thought of as the fraction of the total 'potential' that is assigned to that - * vertex at each step out of the portion that is assigned according - * to random jumps (this portion is specified by 'alpha'). - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" - * @see PageRank - */ -public class PageRankWithPriors - extends AbstractIterativeScorerWithPriors -{ - /** - * Maintains the amount of potential associated with vertices with no out-edges. - */ - protected double disappearing_potential = 0.0; - - /** - * Creates an instance with the specified graph, edge weights, vertex priors, and - * 'random jump' probability (alpha). - * @param graph the input graph - * @param edge_weights the edge weights, denoting transition probabilities from source to destination - * @param vertex_priors the prior probabilities for each vertex - * @param alpha the probability of executing a 'random jump' at each step - */ - public PageRankWithPriors(Hypergraph graph, - Transformer edge_weights, - Transformer vertex_priors, double alpha) - { - super(graph, edge_weights, vertex_priors, alpha); - } - - /** - * Creates an instance with the specified graph, vertex priors, and - * 'random jump' probability (alpha). The outgoing edge weights for each - * vertex will be equal and sum to 1. - * @param graph the input graph - * @param vertex_priors the prior probabilities for each vertex - * @param alpha the probability of executing a 'random jump' at each step - */ - public PageRankWithPriors(Hypergraph graph, - Transformer vertex_priors, double alpha) - { - super(graph, vertex_priors, alpha); - this.edge_weights = new UniformDegreeWeight(graph); - } - - /** - * Updates the value for this vertex. Called by step(). - */ - @Override - public double update(V v) - { - collectDisappearingPotential(v); - - double v_input = 0; - for (E e : graph.getInEdges(v)) - { - // For graphs, the code below is equivalent to -// V w = graph.getOpposite(v, e); -// total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); - // For hypergraphs, this divides the potential coming from w - // by the number of vertices in the connecting edge e. - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_input += (getCurrentValue(w) * - getEdgeWeight(w,e).doubleValue() / incident_count); - } - } - - // modify total_input according to alpha - double new_value = alpha > 0 ? - v_input * (1 - alpha) + getVertexPrior(v) * alpha : - v_input; - setOutputValue(v, new_value); - - return Math.abs(getCurrentValue(v) - new_value); - } - - /** - * Cleans up after each step. In this case that involves allocating the disappearing - * potential (thus maintaining normalization of the scores) according to the vertex - * probability priors, and then calling - * super.afterStep. - */ - @Override - protected void afterStep() - { - // distribute disappearing potential according to priors - if (disappearing_potential > 0) - { - for (V v : graph.getVertices()) - { - setOutputValue(v, getOutputValue(v) + - (1 - alpha) * (disappearing_potential * getVertexPrior(v))); - } - disappearing_potential = 0; - } - - super.afterStep(); - } - - /** - * Collects the "disappearing potential" associated with vertices that have - * no outgoing edges. Vertices that have no outgoing edges do not directly - * contribute to the scores of other vertices. These values are collected - * at each step and then distributed across all vertices - * as a part of the normalization process. - */ - @Override - protected void collectDisappearingPotential(V v) - { - if (graph.outDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential += getCurrentValue(v); - else - throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java deleted file mode 100644 index 610de6b5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Created on Jul 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - - -/** - * An interface for algorithms that assign scores to vertices. - * - * @param the vertex type - * @param the score type - */ -public interface VertexScorer -{ - /** - * Returns the algorithm's score for this vertex. - * @return the algorithm's score for this vertex - */ - public S getVertexScore(V v); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java deleted file mode 100644 index f05b9110..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Created on Jul 15, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Assigns scores to vertices according to their 'voltage' in an approximate - * solution to the Kirchoff equations. This is accomplished by tying "source" - * vertices to specified positive voltages, "sink" vertices to 0 V, and - * iteratively updating the voltage of each other vertex to the (weighted) - * average of the voltages of its neighbors. - * - *

          The resultant voltages will all be in the range [0, max] - * where max is the largest voltage of any source vertex (in the - * absence of negative source voltages; see below). - * - *

          A few notes about this algorithm's interpretation of the graph data: - *

            - *
          • Higher edge weights are interpreted as indicative of greater - * influence/effect than lower edge weights. - *
          • Negative edge weights (and negative "source" voltages) invalidate - * the interpretation of the resultant values as voltages. However, this - * algorithm will not reject graphs with negative edge weights or source voltages. - *
          • Parallel edges are equivalent to a single edge whose weight is the - * sum of the weights on the parallel edges. - *
          • Current flows along undirected edges in both directions, - * but only flows along directed edges in the direction of the edge. - *
          - *

          - */ -public class VoltageScorer extends AbstractIterativeScorer - implements VertexScorer -{ - protected Map source_voltages; - protected Collection sinks; - - /** - * Creates an instance with the specified graph, edge weights, source voltages, - * and sinks. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param source_voltages the (fixed) voltage for each source - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Transformer edge_weights, - Map source_voltages, Collection sinks) - { - super(g, edge_weights); - this.source_voltages = source_voltages; - this.sinks = sinks; - initialize(); - } - - /** - * Creates an instance with the specified graph, edge weights, source vertices - * (each of whose 'voltages' are tied to 1), and sinks. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param sources the vertices whose voltages are tied to 1 - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Transformer edge_weights, - Collection sources, Collection sinks) - { - super(g, edge_weights); - - Map unit_voltages = new HashMap(); - for(V v : sources) - unit_voltages.put(v, new Double(1.0)); - this.source_voltages = unit_voltages; - this.sinks = sinks; - initialize(); - } - - /** - * Creates an instance with the specified graph, source vertices - * (each of whose 'voltages' are tied to 1), and sinks. - * The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param sources the vertices whose voltages are tied to 1 - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Collection sources, Collection sinks) - { - super(g); - - Map unit_voltages = new HashMap(); - for(V v : sources) - unit_voltages.put(v, new Double(1.0)); - this.source_voltages = unit_voltages; - this.sinks = sinks; - initialize(); - } - - /** - * Creates an instance with the specified graph, source voltages, - * and sinks. The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param source_voltages the (fixed) voltage for each source - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Map source_voltages, - Collection sinks) - { - super(g); - this.source_voltages = source_voltages; - this.sinks = sinks; - this.edge_weights = new UniformDegreeWeight(g); - initialize(); - } - - /** - * Creates an instance with the specified graph, edge weights, source, and - * sink. The source vertex voltage is tied to 1. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param source the vertex whose voltage is tied to 1 - * @param sink the vertex whose voltage is tied to 0 - */ - public VoltageScorer(Hypergraph g, Transformer edge_weights, - V source, V sink) - { - this(g, edge_weights, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); - initialize(); - } - - /** - * Creates an instance with the specified graph, edge weights, source, and - * sink. The source vertex voltage is tied to 1. - * The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param source the vertex whose voltage is tied to 1 - * @param sink the vertex whose voltage is tied to 0 - */ - public VoltageScorer(Hypergraph g, V source, V sink) - { - this(g, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); - initialize(); - } - - - /** - * Initializes the state of this instance. - */ - @Override - public void initialize() - { - super.initialize(); - - // sanity check - if (source_voltages.isEmpty() || sinks.isEmpty()) - throw new IllegalArgumentException("Both sources and sinks (grounds) must be defined"); - - if (source_voltages.size() + sinks.size() > graph.getVertexCount()) - throw new IllegalArgumentException("Source/sink sets overlap, or contain vertices not in graph"); - - for (Map.Entry entry : source_voltages.entrySet()) - { - V v = entry.getKey(); - if (sinks.contains(v)) - throw new IllegalArgumentException("Vertex " + v + " is incorrectly specified as both source and sink"); - double value = entry.getValue().doubleValue(); - if (value <= 0) - throw new IllegalArgumentException("Source vertex " + v + " has negative voltage"); - } - - // set up initial voltages - for (V v : graph.getVertices()) - { - if (source_voltages.containsKey(v)) - setOutputValue(v, source_voltages.get(v).doubleValue()); - else - setOutputValue(v, 0.0); - } - } - - /** - * @see edu.uci.ics.jung.algorithms.scoring.AbstractIterativeScorer#update(Object) - */ - @Override - public double update(V v) - { - // if it's a voltage source or sink, we're done - Number source_volts = source_voltages.get(v); - if (source_volts != null) - { - setOutputValue(v, source_volts.doubleValue()); - return 0.0; - } - if (sinks.contains(v)) - { - setOutputValue(v, 0.0); - return 0.0; - } - - Collection edges = graph.getInEdges(v); - double voltage_sum = 0; - double weight_sum = 0; - for (E e: edges) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - { - double weight = getEdgeWeight(w,e).doubleValue() / incident_count; - voltage_sum += getCurrentValue(w).doubleValue() * weight; - weight_sum += weight; - } - } -// V w = graph.getOpposite(v, e); -// double weight = getEdgeWeight(w,e).doubleValue(); -// voltage_sum += getCurrentValue(w).doubleValue() * weight; -// weight_sum += weight; - } - - // if either is 0, new value is 0 - if (voltage_sum == 0 || weight_sum == 0) - { - setOutputValue(v, 0.0); - return getCurrentValue(v).doubleValue(); - } - - setOutputValue(v, voltage_sum / weight_sum); - return Math.abs(getCurrentValue(v).doubleValue() - voltage_sum / weight_sum); - } - -} - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/package.html deleted file mode 100644 index a1f8196e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/package.html +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - -Mechanisms for assigning values (denoting significance, influence, centrality, etc.) -to graph elements based on topological properties. These include: - -
            -
          • BarycenterScorer: assigns a score to each vertex according to -the sum of the distances to all other vertices -
          • ClosenessCentrality: assigns a score to each vertex based on -the mean distance to each other vertex -
          • DegreeScorer: assigns a score to each vertex based on its degree -
          • EigenvectorCentrality: assigns vertex scores based on -long-term probabilities of random walks passing through the vertex at time t -
          • PageRank: like EigenvectorCentrality, but with -a constant probability of the -random walk restarting at a uniform-randomly chosen vertex -
          • PageRankWithPriors: like PageRank, but with a -constant probability of the random -walk restarting at a vertex drawn from an arbitrary distribution -
          • HITS: assigns hubs-and-authorities scores to vertices based on -complementary random walk processes -
          • HITSWithPriors: analogous to HITS -(see PageRankWithPriors) -
          • VoltageScorer: assigns scores to vertices based on simulated -current flow along edges -
          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java deleted file mode 100644 index f836a9b4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Created on Jul 11, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -/** - * A Transformer that delegates its operation to a - * Transformer. Mainly useful for technical reasons inside - * AbstractIterativeScorer; in essence it allows the edge weight instance - * variable to be of type VEPair,W even if the edge weight - * Transformer only operates on edges. - */ -public class DelegateToEdgeTransformer implements - Transformer,Number> -{ - /** - * The transformer to which this instance delegates its function. - */ - protected Transformer delegate; - - /** - * Creates an instance with the specified delegate transformer. - * @param delegate the Transformer to which this instance will delegate - */ - public DelegateToEdgeTransformer(Transformer delegate) - { - this.delegate = delegate; - } - - /** - * @see Transformer#transform(Object) - */ - public Number transform(VEPair arg0) - { - return delegate.transform(arg0.getE()); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java deleted file mode 100644 index 793944b2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Created on Jul 12, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import java.util.Collection; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.HITS; - -/** - * Methods for assigning values (to be interpreted as prior probabilities) to vertices in the context - * of random-walk-based scoring algorithms. - */ -public class ScoringUtils -{ - /** - * Assigns a probability of 1/roots.size() to each of the elements of roots. - * @param the vertex type - * @param roots the vertices to be assigned nonzero prior probabilities - * @return - */ - public static Transformer getUniformRootPrior(Collection roots) - { - final Collection inner_roots = roots; - Transformer distribution = new Transformer() - { - public Double transform(V input) - { - if (inner_roots.contains(input)) - return new Double(1.0 / inner_roots.size()); - else - return 0.0; - } - }; - - return distribution; - } - - /** - * Returns a Transformer that hub and authority values of 1/roots.size() to each - * element of roots. - * @param the vertex type - * @param roots the vertices to be assigned nonzero scores - * @return a Transformer that assigns uniform prior hub/authority probabilities to each root - */ - public static Transformer getHITSUniformRootPrior(Collection roots) - { - final Collection inner_roots = roots; - Transformer distribution = - new Transformer() - { - public HITS.Scores transform(V input) - { - if (inner_roots.contains(input)) - return new HITS.Scores(1.0 / inner_roots.size(), 1.0 / inner_roots.size()); - else - return new HITS.Scores(0.0, 0.0); - } - }; - return distribution; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java deleted file mode 100644 index f22bfcc8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jul 14, 2008 - * - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Hypergraph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * An edge weight function that assigns weights as uniform - * transition probabilities. - * For undirected edges, returns 1/degree(v) (where 'v' is the - * vertex in the VEPair. - * For directed edges, returns 1/outdegree(source(e)) (where 'e' - * is the edge in the VEPair). - * Throws an IllegalArgumentException if the input - * edge is neither EdgeType.UNDIRECTED nor EdgeType.DIRECTED. - * - */ -public class UniformDegreeWeight implements - Transformer, Double> -{ - private Hypergraph graph; - - /** - * Creates an instance for the specified graph. - */ - public UniformDegreeWeight(Hypergraph graph) - { - this.graph = graph; - } - - /** - * @see org.apache.commons.collections15.Transformer#transform(java.lang.Object) - */ - public Double transform(VEPair ve_pair) - { - E e = ve_pair.getE(); - V v = ve_pair.getV(); - EdgeType edge_type = graph.getEdgeType(e); - if (edge_type == EdgeType.UNDIRECTED) - return 1.0 / graph.degree(v); - if (edge_type == EdgeType.DIRECTED) - return 1.0 / graph.outDegree(graph.getSource(e)); - throw new IllegalArgumentException("can't handle edge type: " + edge_type); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java deleted file mode 100644 index 7853f002..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Created on Jul 11, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * Assigns weights to directed edges (the edge of the vertex/edge pair) depending on - * whether the vertex is the edge's source or its destination. - * If the vertex v is the edge's source, assigns 1/outdegree(v). - * Otherwise, assigns 1/indegree(w). - * Throws IllegalArgumentException if the edge is not directed. - */ -public class UniformInOut implements Transformer, Double> -{ - /** - * The graph for which the edge weights are defined. - */ - protected Graph graph; - - /** - * Creates an instance for the specified graph. - * @param graph the graph for which the edge weights will be defined - */ - public UniformInOut(Graph graph) - { - this.graph = graph; - } - - /** - * @see org.apache.commons.collections15.Transformer#transform(Object) - * @throws IllegalArgumentException - */ - public Double transform(VEPair ve_pair) - { - V v = ve_pair.getV(); - E e = ve_pair.getE(); - if (graph.getEdgeType(e) != EdgeType.DIRECTED) - throw new IllegalArgumentException("This transformer only" + - " operates on directed edges"); - return 1.0 / (graph.isSource(v, e) ? - graph.outDegree(v) : - graph.inDegree(v)); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java deleted file mode 100644 index ad902935..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Created on Jul 8, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -/** - * Convenience class for associating a vertex and an edge. Used, for example, - * in contexts in which it is necessary to know the origin for an edge traversal - * (that is, the direction in which an (undirected) edge is being traversed). - * - * @param the vertex type - * @param the edge type - */ -public class VEPair -{ - private V v; - private E e; - - /** - * Creates an instance with the specified vertex and edge - * @param v the vertex to add - * @param e the edge to add - */ - public VEPair(V v, E e) - { - if (v == null || e == null) - throw new IllegalArgumentException("elements must be non-null"); - - this.v = v; - this.e = e; - } - - /** - * Returns the vertex of this pair. - */ - public V getV() - { - return v; - } - - /** - * Returns the edge of this pair. - */ - public E getE() - { - return e; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java deleted file mode 100644 index 851c08ee..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Created on Jul 18, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.scoring.util; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.VertexScorer; - -/** - * A Transformer convenience wrapper around VertexScorer. - */ -public class VertexScoreTransformer implements Transformer -{ - /** - * The VertexScorer instance that provides the values returned by transform. - */ - protected VertexScorer vs; - - /** - * Creates an instance based on the specified VertexScorer. - */ - public VertexScoreTransformer(VertexScorer vs) - { - this.vs = vs; - } - - /** - * Returns the score for this vertex. - */ - public S transform(V v) - { - return vs.getVertexScore(v); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/package.html deleted file mode 100644 index 3bf18f35..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/scoring/util/package.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - -Utility functions for assigning scores to graph elements. These include: -
            -
          • EdgeWeight: interface for classes that associate numeric values -with edges -
          • ScoringUtils: methods for calculating transition probabilities -for random-walk-based algorithms. -
          • UniformOut: an edge weight function that assigns weights as uniform -transition probabilities to all outgoing edges of a vertex. -
          • UniformIncident: an edge weight function that assigns -weights as uniform transition probabilities to all incident edges of a -vertex (useful for undirected graphs). -
          • VEPair: analogous to Pair but specifically -containing an associated vertex and edge. -
          • VertexEdgeWeight: a subtype of EdgeWeight that -assigns edge weights with respect to a specified 'source' vertex. -
          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java deleted file mode 100644 index 38d3b001..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java +++ /dev/null @@ -1,169 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Labels each node in the graph according to the BFS distance from the start node(s). If nodes are unreachable, then - * they are assigned a distance of -1. - * All nodes traversed at step k are marked as predecessors of their successors traversed at step k+1. - *

          - * Running time is: O(m) - * @author Scott White - */ -public class BFSDistanceLabeler { - - private Map distanceDecorator = new HashMap(); - private List mCurrentList; - private Set mUnvisitedVertices; - private List mVerticesInOrderVisited; - private Map> mPredecessorMap; - - /** - * Creates a new BFS labeler for the specified graph and root set - * The distances are stored in the corresponding Vertex objects and are of type MutableInteger - */ - public BFSDistanceLabeler() { - mPredecessorMap = new HashMap>(); - } - - /** - * Returns the list of vertices visited in order of traversal - * @return the list of vertices - */ - public List getVerticesInOrderVisited() { - return mVerticesInOrderVisited; - } - - /** - * Returns the set of all vertices that were not visited - * @return the list of unvisited vertices - */ - public Set getUnvisitedVertices() { - return mUnvisitedVertices; - } - - /** - * Given a vertex, returns the shortest distance from any node in the root set to v - * @param v the vertex whose distance is to be retrieved - * @return the shortest distance from any node in the root set to v - */ - public int getDistance(Hypergraph g, V v) { - if (!g.getVertices().contains(v)) { - throw new IllegalArgumentException("Vertex is not contained in the graph."); - } - - return distanceDecorator.get(v).intValue(); - } - - /** - * Returns set of predecessors of the given vertex - * @param v the vertex whose predecessors are to be retrieved - * @return the set of predecessors - */ - public Set getPredecessors(V v) { - return mPredecessorMap.get(v); - } - - protected void initialize(Hypergraph g, Set rootSet) { - mVerticesInOrderVisited = new ArrayList(); - mUnvisitedVertices = new HashSet(); - for(V currentVertex : g.getVertices()) { - mUnvisitedVertices.add(currentVertex); - mPredecessorMap.put(currentVertex,new HashSet()); - } - - mCurrentList = new ArrayList(); - for(V v : rootSet) { - distanceDecorator.put(v, new Integer(0)); - mCurrentList.add(v); - mUnvisitedVertices.remove(v); - mVerticesInOrderVisited.add(v); - } - } - - private void addPredecessor(V predecessor,V sucessor) { - HashSet predecessors = mPredecessorMap.get(sucessor); - predecessors.add(predecessor); - } - - /** - * Computes the distances of all the node from the starting root nodes. If there is more than one root node - * the minimum distance from each root node is used as the designated distance to a given node. Also keeps track - * of the predecessors of each node traversed as well as the order of nodes traversed. - * @param graph the graph to label - * @param rootSet the set of starting vertices to traverse from - */ - public void labelDistances(Hypergraph graph, Set rootSet) { - - initialize(graph,rootSet); - - int distance = 1; - while (true) { - List newList = new ArrayList(); - for(V currentVertex : mCurrentList) { - if(graph.containsVertex(currentVertex)) { - for(V next : graph.getSuccessors(currentVertex)) { - visitNewVertex(currentVertex,next, distance, newList); - } - } - } - if (newList.size() == 0) break; - mCurrentList = newList; - distance++; - } - - for(V v : mUnvisitedVertices) { - distanceDecorator.put(v,new Integer(-1)); - } - } - - /** - * Computes the distances of all the node from the specified root node. Also keeps track - * of the predecessors of each node traversed as well as the order of nodes traversed. - * @param graph the graph to label - * @param root the single starting vertex to traverse from - */ - public void labelDistances(Hypergraph graph, V root) { - labelDistances(graph, Collections.singleton(root)); - } - - private void visitNewVertex(V predecessor, V neighbor, int distance, List newList) { - if (mUnvisitedVertices.contains(neighbor)) { - distanceDecorator.put(neighbor, new Integer(distance)); - newList.add(neighbor); - mVerticesInOrderVisited.add(neighbor); - mUnvisitedVertices.remove(neighbor); - } - int predecessorDistance = distanceDecorator.get(predecessor).intValue(); - int successorDistance = distanceDecorator.get(neighbor).intValue(); - if (predecessorDistance < successorDistance) { - addPredecessor(predecessor,neighbor); - } - } - - /** - * Returns a map from vertices to minimum distances from the original source(s). - * Must be called after {@code labelDistances} in order to contain valid data. - */ - public Map getDistanceDecorator() { - return distanceDecorator; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java deleted file mode 100644 index a91ed894..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java +++ /dev/null @@ -1,524 +0,0 @@ -/* - * Created on Jul 9, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.algorithms.util.BasicMapEntry; -import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - *

          Calculates distances in a specified graph, using - * Dijkstra's single-source-shortest-path algorithm. All edge weights - * in the graph must be nonnegative; if any edge with negative weight is - * found in the course of calculating distances, an - * IllegalArgumentException will be thrown. - * (Note: this exception will only be thrown when such an edge would be - * used to update a given tentative distance; - * the algorithm does not check for negative-weight edges "up front".) - * - *

          Distances and partial results are optionally cached (by this instance) - * for later reference. Thus, if the 10 closest vertices to a specified source - * vertex are known, calculating the 20 closest vertices does not require - * starting Dijkstra's algorithm over from scratch.

          - * - *

          Distances are stored as double-precision values. - * If a vertex is not reachable from the specified source vertex, no - * distance is stored. This is new behavior with version 1.4; - * the previous behavior was to store a value of - * Double.POSITIVE_INFINITY. This change gives the algorithm - * an approximate complexity of O(kD log k), where k is either the number of - * requested targets or the number of reachable vertices (whichever is smaller), - * and D is the average degree of a vertex.

          - * - *

          The elements in the maps returned by getDistanceMap - * are ordered (that is, returned - * by the iterator) by nondecreasing distance from source.

          - * - *

          Users are cautioned that distances calculated should be assumed to - * be invalidated by changes to the graph, and should invoke reset() - * when appropriate so that the distances can be recalculated.

          - * - * @author Joshua O'Madadhain - * @author Tom Nelson converted to jung2 - */ -public class DijkstraDistance implements Distance -{ - protected Hypergraph g; - protected Transformer nev; - protected Map sourceMap; // a map of source vertices to an instance of SourceData - protected boolean cached; - protected double max_distance; - protected int max_targets; - - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally if and only if - * cached is true. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - * @param cached specifies whether the results are to be cached - */ - public DijkstraDistance(Hypergraph g, Transformer nev, boolean cached) { - this.g = g; - this.nev = nev; - this.sourceMap = new HashMap(); - this.cached = cached; - this.max_distance = Double.POSITIVE_INFINITY; - this.max_targets = Integer.MAX_VALUE; - } - - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - */ - public DijkstraDistance(Hypergraph g, Transformer nev) { - this(g, nev, true); - } - - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - */ - @SuppressWarnings("unchecked") - public DijkstraDistance(Graph g) { - this(g, new ConstantTransformer(1), true); - } - - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - * @param cached specifies whether the results are to be cached - */ - @SuppressWarnings("unchecked") - public DijkstraDistance(Graph g, boolean cached) { - this(g, new ConstantTransformer(1), cached); - } - - /** - * Implements Dijkstra's single-source shortest-path algorithm for - * weighted graphs. Uses a MapBinaryHeap as the priority queue, - * which gives this algorithm a time complexity of O(m lg n) (m = # of edges, n = - * # of vertices). - * This algorithm will terminate when any of the following have occurred (in order - * of priority): - *

            - *
          • the distance to the specified target (if any) has been found - *
          • no more vertices are reachable - *
          • the specified # of distances have been found, or the maximum distance - * desired has been exceeded - *
          • all distances have been found - *
          - * - * @param source the vertex from which distances are to be measured - * @param numDests the number of distances to measure - * @param targets the set of vertices to which distances are to be measured - */ - protected LinkedHashMap singleSourceShortestPath(V source, Collection targets, int numDests) - { - SourceData sd = getSourceData(source); - - Set to_get = new HashSet(); - if (targets != null) { - to_get.addAll(targets); - Set existing_dists = sd.distances.keySet(); - for(V o : targets) { - if (existing_dists.contains(o)) - to_get.remove(o); - } - } - - // if we've exceeded the max distance or max # of distances we're willing to calculate, or - // if we already have all the distances we need, - // terminate - if (sd.reached_max || - (targets != null && to_get.isEmpty()) || - (sd.distances.size() >= numDests)) - { - return sd.distances; - } - - while (!sd.unknownVertices.isEmpty() && (sd.distances.size() < numDests || !to_get.isEmpty())) - { - Map.Entry p = sd.getNextVertex(); - V v = p.getKey(); - double v_dist = p.getValue().doubleValue(); - to_get.remove(v); - if (v_dist > this.max_distance) - { - // we're done; put this vertex back in so that we're not including - // a distance beyond what we specified - sd.restoreVertex(v, v_dist); - sd.reached_max = true; - break; - } - sd.dist_reached = v_dist; - - if (sd.distances.size() >= this.max_targets) - { - sd.reached_max = true; - break; - } - - for (E e : getEdgesToCheck(v) ) - { - for (V w : g.getIncidentVertices(e)) - { - if (!sd.distances.containsKey(w)) - { - double edge_weight = nev.transform(e).doubleValue(); - if (edge_weight < 0) - throw new IllegalArgumentException("Edges weights must be non-negative"); - double new_dist = v_dist + edge_weight; - if (!sd.estimatedDistances.containsKey(w)) - { - sd.createRecord(w, e, new_dist); - } - else - { - double w_dist = ((Double)sd.estimatedDistances.get(w)).doubleValue(); - if (new_dist < w_dist) // update tentative distance & path for w - sd.update(w, e, new_dist); - } - } - } - } - } - return sd.distances; - } - - protected SourceData getSourceData(V source) - { - SourceData sd = sourceMap.get(source); - if (sd == null) - sd = new SourceData(source); - return sd; - } - - /** - * Returns the set of edges incident to v that should be tested. - * By default, this is the set of outgoing edges for instances of Graph, - * the set of incident edges for instances of Hypergraph, - * and is otherwise undefined. - */ - protected Collection getEdgesToCheck(V v) - { - if (g instanceof Graph) - return ((Graph)g).getOutEdges(v); - else - return g.getIncidentEdges(v); - - } - - - /** - * Returns the length of a shortest path from the source to the target vertex, - * or null if the target is not reachable from the source. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - * - * @see #getDistanceMap(Object) - * @see #getDistanceMap(Object,int) - */ - public Number getDistance(V source, V target) - { - if (g.containsVertex(target) == false) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - if (g.containsVertex(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - Set targets = new HashSet(); - targets.add(target); - Map distanceMap = getDistanceMap(source, targets); - return distanceMap.get(target); - } - - - /** - * Returns a {@code Map} from each element {@code t} of {@code targets} to the - * shortest-path distance from {@code source} to {@code t}. - */ - public Map getDistanceMap(V source, Collection targets) - { - if (g.containsVertex(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - if (targets.size() > max_targets) - throw new IllegalArgumentException("size of target set exceeds maximum " + - "number of targets allowed: " + this.max_targets); - - Map distanceMap = - singleSourceShortestPath(source, targets, - Math.min(g.getVertexCount(), max_targets)); - if (!cached) - reset(source); - - return distanceMap; - } - - /** - *

          Returns a LinkedHashMap which maps each vertex - * in the graph (including the source vertex) - * to its distance from the source vertex. - * The map's iterator will return the elements in order of - * increasing distance from source.

          - * - *

          The size of the map returned will be the number of - * vertices reachable from source.

          - * - * @see #getDistanceMap(Object,int) - * @see #getDistance(Object,Object) - * @param source the vertex from which distances are measured - */ - public Map getDistanceMap(V source) - { - return getDistanceMap(source, Math.min(g.getVertexCount(), max_targets)); - } - - - - /** - *

          Returns a LinkedHashMap which maps each of the closest - * numDist vertices to the source vertex - * in the graph (including the source vertex) - * to its distance from the source vertex. Throws - * an IllegalArgumentException if source - * is not in this instance's graph, or if numDests is - * either less than 1 or greater than the number of vertices in the - * graph.

          - * - *

          The size of the map returned will be the smaller of - * numDests and the number of vertices reachable from - * source. - * - * @see #getDistanceMap(Object) - * @see #getDistance(Object,Object) - * @param source the vertex from which distances are measured - * @param numDests the number of vertices for which to measure distances - */ - public LinkedHashMap getDistanceMap(V source, int numDests) - { - - if(g.getVertices().contains(source) == false) { - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - } - if (numDests < 1 || numDests > g.getVertexCount()) - throw new IllegalArgumentException("numDests must be >= 1 " + - "and <= g.numVertices()"); - - if (numDests > max_targets) - throw new IllegalArgumentException("numDests must be <= the maximum " + - "number of targets allowed: " + this.max_targets); - - LinkedHashMap distanceMap = - singleSourceShortestPath(source, null, numDests); - - if (!cached) - reset(source); - - return distanceMap; - } - - /** - * Allows the user to specify the maximum distance that this instance will calculate. - * Any vertices past this distance will effectively be unreachable from the source, in - * the sense that the algorithm will not calculate the distance to any vertices which - * are farther away than this distance. A negative value for max_dist - * will ensure that no further distances are calculated. - * - *

          This can be useful for limiting the amount of time and space used by this algorithm - * if the graph is very large.

          - * - *

          Note: if this instance has already calculated distances greater than max_dist, - * and the results are cached, those results will still be valid and available; this limit - * applies only to subsequent distance calculations.

          - * @see #setMaxTargets(int) - */ - public void setMaxDistance(double max_dist) - { - this.max_distance = max_dist; - for (V v : sourceMap.keySet()) - { - SourceData sd = sourceMap.get(v); - sd.reached_max = (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); - } - } - - /** - * Allows the user to specify the maximum number of target vertices per source vertex - * for which this instance will calculate distances. Once this threshold is reached, - * any further vertices will effectively be unreachable from the source, in - * the sense that the algorithm will not calculate the distance to any more vertices. - * A negative value for max_targets will ensure that no further distances are calculated. - * - *

          This can be useful for limiting the amount of time and space used by this algorithm - * if the graph is very large.

          - * - *

          Note: if this instance has already calculated distances to a greater number of - * targets than max_targets, and the results are cached, those results - * will still be valid and available; this limit applies only to subsequent distance - * calculations.

          - * @see #setMaxDistance(double) - */ - public void setMaxTargets(int max_targets) - { - this.max_targets = max_targets; - for (V v : sourceMap.keySet()) - { - SourceData sd = sourceMap.get(v); - sd.reached_max = (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); - } - } - - /** - * Clears all stored distances for this instance. - * Should be called whenever the graph is modified (edge weights - * changed or edges added/removed). If the user knows that - * some currently calculated distances are unaffected by a - * change, reset(V) may be appropriate instead. - * - * @see #reset(Object) - */ - public void reset() - { - sourceMap = new HashMap(); - } - - /** - * Specifies whether or not this instance of DijkstraShortestPath - * should cache its results (final and partial) for future reference. - * - * @param enable true if the results are to be cached, and - * false otherwise - */ - public void enableCaching(boolean enable) - { - this.cached = enable; - } - - /** - * Clears all stored distances for the specified source vertex - * source. Should be called whenever the stored distances - * from this vertex are invalidated by changes to the graph. - * - * @see #reset() - */ - public void reset(V source) - { - sourceMap.put(source, null); - } - - /** - * Compares according to distances, so that the BinaryHeap knows how to - * order the tree. - */ - protected static class VertexComparator implements Comparator - { - private Map distances; - - protected VertexComparator(Map distances) - { - this.distances = distances; - } - - public int compare(V o1, V o2) - { - return ((Double) distances.get(o1)).compareTo((Double) distances.get(o2)); - } - } - - /** - * For a given source vertex, holds the estimated and final distances, - * tentative and final assignments of incoming edges on the shortest path from - * the source vertex, and a priority queue (ordered by estimated distance) - * of the vertices for which distances are unknown. - * - * @author Joshua O'Madadhain - */ - protected class SourceData - { - protected LinkedHashMap distances; - protected Map estimatedDistances; - protected MapBinaryHeap unknownVertices; - protected boolean reached_max = false; - protected double dist_reached = 0; - - protected SourceData(V source) - { - distances = new LinkedHashMap(); - estimatedDistances = new HashMap(); - unknownVertices = new MapBinaryHeap(new VertexComparator(estimatedDistances)); - - sourceMap.put(source, this); - - // initialize priority queue - estimatedDistances.put(source, new Double(0)); // distance from source to itself is 0 - unknownVertices.add(source); - reached_max = false; - dist_reached = 0; - } - - protected Map.Entry getNextVertex() - { - V v = unknownVertices.remove(); - Double dist = (Double)estimatedDistances.remove(v); - distances.put(v, dist); - return new BasicMapEntry(v, dist); - } - - protected void update(V dest, E tentative_edge, double new_dist) - { - estimatedDistances.put(dest, new_dist); - unknownVertices.update(dest); - } - - protected void createRecord(V w, E e, double new_dist) - { - estimatedDistances.put(w, new_dist); - unknownVertices.add(w); - } - - protected void restoreVertex(V v, double dist) - { - estimatedDistances.put(v, dist); - unknownVertices.add(v); - distances.remove(v); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java deleted file mode 100644 index 749cf0bf..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java +++ /dev/null @@ -1,282 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; - -/** - *

          Calculates distances and shortest paths using Dijkstra's - * single-source-shortest-path algorithm. This is a lightweight - * extension of DijkstraDistance that also stores - * path information, so that the shortest paths can be reconstructed.

          - * - *

          The elements in the maps returned by - * getIncomingEdgeMap are ordered (that is, returned - * by the iterator) by nondecreasing distance from source.

          - * - * @author Joshua O'Madadhain - * @author Tom Nelson converted to jung2 - * @see DijkstraDistance - */ -public class DijkstraShortestPath extends DijkstraDistance implements ShortestPath -{ - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally if and only if - * cached is true. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - * @param cached specifies whether the results are to be cached - */ - public DijkstraShortestPath(Graph g, Transformer nev, boolean cached) - { - super(g, nev, cached); - } - - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - */ - public DijkstraShortestPath(Graph g, Transformer nev) - { - super(g, nev); - } - - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - */ - public DijkstraShortestPath(Graph g) - { - super(g); - } - - /** - *

          Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - * @param cached specifies whether the results are to be cached - */ - public DijkstraShortestPath(Graph g, boolean cached) - { - super(g, cached); - } - - @Override - protected SourceData getSourceData(V source) - { - SourceData sd = sourceMap.get(source); - if (sd == null) - sd = new SourcePathData(source); - return sd; - } - - /** - *

          Returns the last edge on a shortest path from source - * to target, or null if target is not - * reachable from source.

          - * - *

          If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException.

          - */ - public E getIncomingEdge(V source, V target) - { - if (!g.containsVertex(source)) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if (!g.containsVertex(target)) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - - Set targets = new HashSet(); - targets.add(target); - singleSourceShortestPath(source, targets, g.getVertexCount()); - Map incomingEdgeMap = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - E incomingEdge = incomingEdgeMap.get(target); - - if (!cached) - reset(source); - - return incomingEdge; - } - - /** - *

          Returns a LinkedHashMap which maps each vertex - * in the graph (including the source vertex) - * to the last edge on the shortest path from the - * source vertex. - * The map's iterator will return the elements in order of - * increasing distance from source.

          - * - * @see DijkstraDistance#getDistanceMap(Object,int) - * @see DijkstraDistance#getDistance(Object,Object) - * @param source the vertex from which distances are measured - */ - public Map getIncomingEdgeMap(V source) - { - return getIncomingEdgeMap(source, g.getVertexCount()); - } - - /** - * Returns a List of the edges on the shortest path from - * source to target, in order of their - * occurrence on this path. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - */ - public List getPath(V source, V target) - { - if(!g.containsVertex(source)) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if(!g.containsVertex(target)) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - - LinkedList path = new LinkedList(); - - // collect path data; must use internal method rather than - // calling getIncomingEdge() because getIncomingEdge() may - // wipe out results if results are not cached - Set targets = new HashSet(); - targets.add(target); - singleSourceShortestPath(source, targets, g.getVertexCount()); - Map incomingEdges = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - - if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) - return path; - V current = target; - while (!current.equals(source)) - { - E incoming = incomingEdges.get(current); - path.addFirst(incoming); - current = ((Graph)g).getOpposite(current, incoming); - } - return path; - } - - - /** - *

          Returns a LinkedHashMap which maps each of the closest - * numDist vertices to the source vertex - * in the graph (including the source vertex) - * to the incoming edge along the path from that vertex. Throws - * an IllegalArgumentException if source - * is not in this instance's graph, or if numDests is - * either less than 1 or greater than the number of vertices in the - * graph. - * - * @see #getIncomingEdgeMap(Object) - * @see #getPath(Object,Object) - * @param source the vertex from which distances are measured - * @param numDests the number of vertics for which to measure distances - */ - public LinkedHashMap getIncomingEdgeMap(V source, int numDests) - { - if (g.getVertices().contains(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if (numDests < 1 || numDests > g.getVertexCount()) - throw new IllegalArgumentException("numDests must be >= 1 " + - "and <= g.numVertices()"); - - singleSourceShortestPath(source, null, numDests); - - LinkedHashMap incomingEdgeMap = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - - if (!cached) - reset(source); - - return incomingEdgeMap; - } - - - /** - * For a given source vertex, holds the estimated and final distances, - * tentative and final assignments of incoming edges on the shortest path from - * the source vertex, and a priority queue (ordered by estimaed distance) - * of the vertices for which distances are unknown. - * - * @author Joshua O'Madadhain - */ - protected class SourcePathData extends SourceData - { - protected Map tentativeIncomingEdges; - protected LinkedHashMap incomingEdges; - - protected SourcePathData(V source) - { - super(source); - incomingEdges = new LinkedHashMap(); - tentativeIncomingEdges = new HashMap(); - } - - @Override - public void update(V dest, E tentative_edge, double new_dist) - { - super.update(dest, tentative_edge, new_dist); - tentativeIncomingEdges.put(dest, tentative_edge); - } - - @Override - public Map.Entry getNextVertex() - { - Map.Entry p = super.getNextVertex(); - V v = p.getKey(); - E incoming = tentativeIncomingEdges.remove(v); - incomingEdges.put(v, incoming); - return p; - } - - @Override - public void restoreVertex(V v, double dist) - { - super.restoreVertex(v, dist); - E incoming = incomingEdges.get(v); - tentativeIncomingEdges.put(v, incoming); - } - - @Override - public void createRecord(V w, E e, double new_dist) - { - super.createRecord(w, e, new_dist); - tentativeIncomingEdges.put(w, e); - } - - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/Distance.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/Distance.java deleted file mode 100644 index 85820d14..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/Distance.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Created on Apr 2, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Map; - - -/** - * An interface for classes which calculate the distance between - * one vertex and another. - * - * @author Joshua O'Madadhain - */ -public interface Distance -{ - /** - * Returns the distance from the source vertex - * to the target vertex. If target - * is not reachable from source, returns null. - */ - Number getDistance(V source, V target); - - /** - *

          Returns a Map which maps each vertex - * in the graph (including the source vertex) - * to its distance (represented as a Number) - * from source. If any vertex - * is not reachable from source, no - * distance is stored for that vertex. - */ - Map getDistanceMap(V source); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java deleted file mode 100644 index f0f20a32..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java +++ /dev/null @@ -1,136 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; -import java.util.Collection; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.scoring.ClosenessCentrality; -import edu.uci.ics.jung.algorithms.scoring.util.VertexScoreTransformer; -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Statistics relating to vertex-vertex distances in a graph. - * - *

          Formerly known as GraphStatistics in JUNG 1.x.

          - * - * @author Scott White - * @author Joshua O'Madadhain - */ -public class DistanceStatistics -{ - /** - * For each vertex v in graph, - * calculates the average shortest path length from v - * to all other vertices in graph using the metric - * specified by d, and returns the results in a - * Map from vertices to Double values. - * If there exists an ordered pair <u,v> - * for which d.getDistance(u,v) returns null, - * then the average distance value for u will be stored - * as Double.POSITIVE_INFINITY). - * - *

          Does not include self-distances (path lengths from v - * to v). - * - *

          To calculate the average distances, ignoring edge weights if any: - *

          -     * Map distances = DistanceStatistics.averageDistances(g, new UnweightedShortestPath(g));
          -     * 
          - * To calculate the average distances respecting edge weights: - *
          -     * DijkstraShortestPath dsp = new DijkstraShortestPath(g, nev);
          -     * Map distances = DistanceStatistics.averageDistances(g, dsp);
          -     * 
          - * where nev is an instance of Transformer that - * is used to fetch the weight for each edge. - * - * @see edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath - * @see edu.uci.ics.jung.algorithms.shortestpath.DijkstraDistance - */ - public static Transformer averageDistances(Hypergraph graph, Distance d) - { - final ClosenessCentrality cc = new ClosenessCentrality(graph, d); - return new VertexScoreTransformer(cc); - } - - /** - * For each vertex v in g, - * calculates the average shortest path length from v - * to all other vertices in g, ignoring edge weights. - * @see #diameter(Hypergraph) - * @see edu.uci.ics.jung.algorithms.scoring.ClosenessCentrality - */ - public static Transformer averageDistances(Hypergraph g) - { - final ClosenessCentrality cc = new ClosenessCentrality(g, - new UnweightedShortestPath(g)); - return new VertexScoreTransformer(cc); - } - - /** - * Returns the diameter of g using the metric - * specified by d. The diameter is defined to be - * the maximum, over all pairs of vertices u,v, - * of the length of the shortest path from u to - * v. If the graph is disconnected (that is, not - * all pairs of vertices are reachable from one another), the - * value returned will depend on use_max: - * if use_max == true, the value returned - * will be the the maximum shortest path length over all pairs of connected - * vertices; otherwise it will be Double.POSITIVE_INFINITY. - */ - public static double diameter(Hypergraph g, Distance d, boolean use_max) - { - double diameter = 0; - Collection vertices = g.getVertices(); - for(V v : vertices) { - for(V w : vertices) { - - if (v.equals(w) == false) // don't include self-distances - { - Number dist = d.getDistance(v, w); - if (dist == null) - { - if (!use_max) - return Double.POSITIVE_INFINITY; - } - else - diameter = Math.max(diameter, dist.doubleValue()); - } - } - } - return diameter; - } - - /** - * Returns the diameter of g using the metric - * specified by d. The diameter is defined to be - * the maximum, over all pairs of vertices u,v, - * of the length of the shortest path from u to - * v, or Double.POSITIVE_INFINITY - * if any of these distances do not exist. - * @see #diameter(Hypergraph, Distance, boolean) - */ - public static double diameter(Hypergraph g, Distance d) - { - return diameter(g, d, false); - } - - /** - * Returns the diameter of g, ignoring edge weights. - * @see #diameter(Hypergraph, Distance, boolean) - */ - public static double diameter(Hypergraph g) - { - return diameter(g, new UnweightedShortestPath(g)); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java deleted file mode 100644 index 18cb0fe0..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java +++ /dev/null @@ -1,165 +0,0 @@ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.functors.ConstantTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -public class MinimumSpanningForest { - - protected Graph graph; - protected Forest forest; - protected Map weights; - - /** - * Creates a Forest from the supplied Graph and supplied Factory, which - * is used to create a new, empty Forest. If non-null, the supplied root - * will be used as the root of the tree/forest. If the supplied root is - * null, or not present in the Graph, then an arbitrary Graph vertex - * will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created. - * @param graph the input graph - * @param factory the factory to use to create the new forest - * @param root the vertex of the graph to be used as the root of the forest - * @param weights edge weights - */ - public MinimumSpanningForest(Graph graph, Factory> factory, - V root, Map weights) { - this(graph, factory.create(), root, weights); - } - - /** - * Creates a minimum spanning forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitrary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param root first Tree root, may be null - * @param weights edge weights, may be null - */ - public MinimumSpanningForest(Graph graph, Forest forest, - V root, Map weights) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - if(weights != null) { - this.weights = weights; - } - Set unfinishedEdges = new HashSet(graph.getEdges()); - if(graph.getVertices().contains(root)) { - this.forest.addVertex(root); - } - updateForest(forest.getVertices(), unfinishedEdges); - } - - /** - * Creates a minimum spanning forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitrary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param root first Tree root, may be null - */ - @SuppressWarnings("unchecked") - public MinimumSpanningForest(Graph graph, Forest forest, - V root) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - this.weights = LazyMap.decorate(new HashMap(), - new ConstantTransformer(1.0)); - Set unfinishedEdges = new HashSet(graph.getEdges()); - if(graph.getVertices().contains(root)) { - this.forest.addVertex(root); - } - updateForest(forest.getVertices(), unfinishedEdges); - } - - /** - * Returns the generated forest. - */ - public Forest getForest() { - return forest; - } - - protected void updateForest(Collection tv, Collection unfinishedEdges) { - double minCost = Double.MAX_VALUE; - E nextEdge = null; - V nextVertex = null; - V currentVertex = null; - for(E e : unfinishedEdges) { - - if(forest.getEdges().contains(e)) continue; - // find the lowest cost edge, get its opposite endpoint, - // and then update forest from its Successors - Pair endpoints = graph.getEndpoints(e); - V first = endpoints.getFirst(); - V second = endpoints.getSecond(); - if(tv.contains(first) == true && tv.contains(second) == false) { - if(weights.get(e) < minCost) { - minCost = weights.get(e); - nextEdge = e; - currentVertex = first; - nextVertex = second; - } - } - if(graph.getEdgeType(e) == EdgeType.UNDIRECTED && - tv.contains(second) == true && tv.contains(first) == false) { - if(weights.get(e) < minCost) { - minCost = weights.get(e); - nextEdge = e; - currentVertex = second; - nextVertex = first; - } - } - } - - if(nextVertex != null && nextEdge != null) { - unfinishedEdges.remove(nextEdge); - forest.addEdge(nextEdge, currentVertex, nextVertex); - updateForest(forest.getVertices(), unfinishedEdges); - } - Collection leftovers = new HashSet(graph.getVertices()); - leftovers.removeAll(forest.getVertices()); - if(leftovers.size() > 0) { - V anotherRoot = leftovers.iterator().next(); - forest.addVertex(anotherRoot); - updateForest(forest.getVertices(), unfinishedEdges); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java deleted file mode 100644 index 13e800c7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java +++ /dev/null @@ -1,104 +0,0 @@ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.algorithms.cluster.WeakComponentClusterer; -import edu.uci.ics.jung.algorithms.filters.FilterUtils; -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Tree; -import edu.uci.ics.jung.graph.util.TreeUtils; - -/** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -@SuppressWarnings("unchecked") -public class MinimumSpanningForest2 { - - protected Graph graph; - protected Forest forest; - protected Transformer weights = - (Transformer)new ConstantTransformer(1.0); - - /** - * create a Forest from the supplied Graph and supplied Factory, which - * is used to create a new, empty Forest. If non-null, the supplied root - * will be used as the root of the tree/forest. If the supplied root is - * null, or not present in the Graph, then an arbitary Graph vertex - * will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph - * @param factory - * @param weights - */ - public MinimumSpanningForest2(Graph graph, - Factory> factory, - Factory> treeFactory, - Transformer weights) { - this(graph, factory.create(), - treeFactory, - weights); - } - - /** - * create a forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param weights edge weights, may be null - */ - public MinimumSpanningForest2(Graph graph, - Forest forest, - Factory> treeFactory, - Transformer weights) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - if(weights != null) { - this.weights = weights; - } - - WeakComponentClusterer wcc = - new WeakComponentClusterer(); - Set> component_vertices = wcc.transform(graph); - Collection> components = - FilterUtils.createAllInducedSubgraphs(component_vertices, graph); - - for(Graph component : components) { - PrimMinimumSpanningTree mst = - new PrimMinimumSpanningTree(treeFactory, this.weights); - Graph subTree = mst.transform(component); - if(subTree instanceof Tree) { - TreeUtils.addSubTree(forest, (Tree)subTree, null, null); - } - } - } - - /** - * Returns the generated forest. - */ - public Forest getForest() { - return forest; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java deleted file mode 100644 index b029dda7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java +++ /dev/null @@ -1,116 +0,0 @@ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param the vertex type - * @param the edge type - */ -@SuppressWarnings("unchecked") -public class PrimMinimumSpanningTree implements Transformer,Graph> { - - protected Factory> treeFactory; - protected Transformer weights; - - /** - * Creates an instance which generates a minimum spanning tree assuming constant edge weights. - */ - public PrimMinimumSpanningTree(Factory> factory) { - this(factory, new ConstantTransformer(1.0)); - } - - /** - * Creates an instance which generates a minimum spanning tree using the input edge weights. - */ - public PrimMinimumSpanningTree(Factory> factory, - Transformer weights) { - this.treeFactory = factory; - if(weights != null) { - this.weights = weights; - } - } - - /** - * @param graph the Graph to find MST in - */ - public Graph transform(Graph graph) { - Set unfinishedEdges = new HashSet(graph.getEdges()); - Graph tree = treeFactory.create(); - V root = findRoot(graph); - if(graph.getVertices().contains(root)) { - tree.addVertex(root); - } else if(graph.getVertexCount() > 0) { - // pick an arbitrary vertex to make root - tree.addVertex(graph.getVertices().iterator().next()); - } - updateTree(tree, graph, unfinishedEdges); - - return tree; - } - - protected V findRoot(Graph graph) { - for(V v : graph.getVertices()) { - if(graph.getInEdges(v).size() == 0) { - return v; - } - } - // if there is no obvious root, pick any vertex - if(graph.getVertexCount() > 0) { - return graph.getVertices().iterator().next(); - } - // this graph has no vertices - return null; - } - - protected void updateTree(Graph tree, Graph graph, Collection unfinishedEdges) { - Collection tv = tree.getVertices(); - double minCost = Double.MAX_VALUE; - E nextEdge = null; - V nextVertex = null; - V currentVertex = null; - for(E e : unfinishedEdges) { - - if(tree.getEdges().contains(e)) continue; - // find the lowest cost edge, get its opposite endpoint, - // and then update forest from its Successors - Pair endpoints = graph.getEndpoints(e); - V first = endpoints.getFirst(); - V second = endpoints.getSecond(); - if((tv.contains(first) == true && tv.contains(second) == false)) { - if(weights.transform(e) < minCost) { - minCost = weights.transform(e); - nextEdge = e; - currentVertex = first; - nextVertex = second; - } - } else if((tv.contains(second) == true && tv.contains(first) == false)) { - if(weights.transform(e) < minCost) { - minCost = weights.transform(e); - nextEdge = e; - currentVertex = second; - nextVertex = first; - } - } - } - - if(nextVertex != null && nextEdge != null) { - unfinishedEdges.remove(nextEdge); - tree.addEdge(nextEdge, currentVertex, nextVertex); - updateTree(tree, graph, unfinishedEdges); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java deleted file mode 100644 index a922cdd3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java +++ /dev/null @@ -1,29 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -* -* Created on Feb 12, 2004 -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.Map; - - -/** - * An interface for algorithms that calculate shortest paths. - */ -public interface ShortestPath -{ - /** - *

          Returns a Map which maps each vertex - * in the graph (including the source vertex) - * to the last edge on the shortest path from the - * source vertex. - */ - Map getIncomingEdgeMap(V source); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java deleted file mode 100644 index d3e59ebe..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Created on Jul 10, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.LinkedList; -import java.util.List; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * Utilities relating to the shortest paths in a graph. - */ -public class ShortestPathUtils -{ - /** - * Returns a List of the edges on the shortest path from - * source to target, in order of their - * occurrence on this path. - */ - public static List getPath(Graph graph, ShortestPath sp, V source, V target) - { - LinkedList path = new LinkedList(); - - Map incomingEdges = sp.getIncomingEdgeMap(source); - - if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) - return path; - V current = target; - while (!current.equals(source)) - { - E incoming = incomingEdges.get(current); - path.addFirst(incoming); - Pair endpoints = graph.getEndpoints(incoming); - if(endpoints.getFirst().equals(current)) { - current = endpoints.getSecond(); - } else { - current = endpoints.getFirst(); - } - //incoming.getOpposite(current); - } - return path; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java deleted file mode 100644 index 1d3390c0..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java +++ /dev/null @@ -1,151 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.shortestpath; - -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.graph.Hypergraph; - -/** - * Computes the shortest path distances for graphs whose edges are not weighted (using BFS). - * - * @author Scott White - */ -public class UnweightedShortestPath - implements ShortestPath, Distance -{ - private Map> mDistanceMap; - private Map> mIncomingEdgeMap; - private Hypergraph mGraph; - private Map distances = new HashMap(); - - /** - * Constructs and initializes algorithm - * @param g the graph - */ - public UnweightedShortestPath(Hypergraph g) - { - mDistanceMap = new HashMap>(); - mIncomingEdgeMap = new HashMap>(); - mGraph = g; - } - - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistance(Object, Object) - */ - public Number getDistance(V source, V target) - { - Map sourceSPMap = getDistanceMap(source); - return sourceSPMap.get(target); - } - - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistanceMap(Object) - */ - public Map getDistanceMap(V source) - { - Map sourceSPMap = mDistanceMap.get(source); - if (sourceSPMap == null) - { - computeShortestPathsFromSource(source); - sourceSPMap = mDistanceMap.get(source); - } - return sourceSPMap; - } - - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.ShortestPath#getIncomingEdgeMap(Object) - */ - public Map getIncomingEdgeMap(V source) - { - Map sourceIEMap = mIncomingEdgeMap.get(source); - if (sourceIEMap == null) - { - computeShortestPathsFromSource(source); - sourceIEMap = mIncomingEdgeMap.get(source); - } - return sourceIEMap; - } - - - /** - * Computes the shortest path distances from a given node to all other nodes. - * @param source the source node - */ - private void computeShortestPathsFromSource(V source) - { - BFSDistanceLabeler labeler = new BFSDistanceLabeler(); - labeler.labelDistances(mGraph, source); - distances = labeler.getDistanceDecorator(); - Map currentSourceSPMap = new HashMap(); - Map currentSourceEdgeMap = new HashMap(); - - for(V vertex : mGraph.getVertices()) { - - Number distanceVal = distances.get(vertex); - // BFSDistanceLabeler uses -1 to indicate unreachable vertices; - // don't bother to store unreachable vertices - if (distanceVal != null && distanceVal.intValue() >= 0) - { - currentSourceSPMap.put(vertex, distanceVal); - int minDistance = distanceVal.intValue(); - for(E incomingEdge : mGraph.getInEdges(vertex)) - { - for (V neighbor : mGraph.getIncidentVertices(incomingEdge)) - { - if (neighbor.equals(vertex)) - continue; -// V neighbor = mGraph.getOpposite(vertex, incomingEdge); - - Number predDistanceVal = distances.get(neighbor); - - int pred_distance = predDistanceVal.intValue(); - if (pred_distance < minDistance && pred_distance >= 0) - { - minDistance = predDistanceVal.intValue(); - currentSourceEdgeMap.put(vertex, incomingEdge); - } - } - } - } - } - mDistanceMap.put(source, currentSourceSPMap); - mIncomingEdgeMap.put(source, currentSourceEdgeMap); - } - - /** - * Clears all stored distances for this instance. - * Should be called whenever the graph is modified (edge weights - * changed or edges added/removed). If the user knows that - * some currently calculated distances are unaffected by a - * change, reset(V) may be appropriate instead. - * - * @see #reset(Object) - */ - public void reset() - { - mDistanceMap.clear(); - mIncomingEdgeMap.clear(); - } - - /** - * Clears all stored distances for the specified source vertex - * source. Should be called whenever the stored distances - * from this vertex are invalidated by changes to the graph. - * - * @see #reset() - */ - public void reset(V v) - { - mDistanceMap.remove(v); - mIncomingEdgeMap.remove(v); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/package.html deleted file mode 100644 index 01f27b5f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/shortestpath/package.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - -Provides interfaces and classes for calculating (geodesic) distances and shortest paths. Currently includes: -

            -
          • DijkstraDistance: finds the distances from a specified source vertex to other vertices in a -weighted graph with no negative cycles -
          • DijkstraShortestPath: extends DijkstraDistance, also finds shortest paths -
          • Distance: an interface for defining vertex-vertex distances -
          • PrimMinimumSpanningTree: identifies the spanning tree for a graph of least total edge weight -
          • ShortestPath: an interface for shortest-path algorithms -
          • ShortestPathUtils: utility functions for manipulating shortest paths -
          • UnweightedShortestPath: finds the distances from a specified source vertex to other vertices in an -unweighted graph -
          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java deleted file mode 100644 index 8fa33b88..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Apr 21, 2004 - */ -package edu.uci.ics.jung.algorithms.transformation; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - *

          Functions for transforming graphs into directed or undirected graphs.

          - * - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class DirectionTransformer -{ - - /** - * Transforms graph (which may be of any directionality) - * into an undirected graph. (This may be useful for - * visualization tasks). - * Specifically: - *
            - *
          • Vertices are copied from graph. - *
          • Directed edges are 'converted' into a single new undirected edge in the new graph. - *
          • Each undirected edge (if any) in graph is 'recreated' with a new undirected edge in the new - * graph if create_new is true, or copied from graph otherwise. - *
          - * - * @param graph the graph to be transformed - * @param create_new specifies whether existing undirected edges are to be copied or recreated - * @param graph_factory used to create the new graph object - * @param edge_factory used to create new edges - * @return the transformed Graph - */ - public static UndirectedGraph toUndirected(Graph graph, - Factory> graph_factory, - Factory edge_factory, boolean create_new) - { - UndirectedGraph out = graph_factory.create(); - - for (V v : graph.getVertices()) - out.addVertex(v); - - for (E e : graph.getEdges()) - { - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - E to_add; - if (graph.getEdgeType(e) == EdgeType.DIRECTED || create_new) - to_add = edge_factory.create(); - else - to_add = e; - out.addEdge(to_add, v1, v2, EdgeType.UNDIRECTED); - } - return out; - } - - /** - * Transforms graph (which may be of any directionality) - * into a directed graph. - * Specifically: - *
            - *
          • Vertices are copied from graph. - *
          • Undirected edges are 'converted' into two new antiparallel directed edges in the new graph. - *
          • Each directed edge (if any) in graph is 'recreated' with a new edge in the new - * graph if create_new is true, or copied from graph otherwise. - *
          - * - * @param graph the graph to be transformed - * @param create_new specifies whether existing directed edges are to be copied or recreated - * @param graph_factory used to create the new graph object - * @param edge_factory used to create new edges - * @return the transformed Graph - */ - public static Graph toDirected(Graph graph, Factory> graph_factory, - Factory edge_factory, boolean create_new) - { - DirectedGraph out = graph_factory.create(); - - for (V v : graph.getVertices()) - out.addVertex(v); - - for (E e : graph.getEdges()) - { - Pair endpoints = graph.getEndpoints(e); - if (graph.getEdgeType(e) == EdgeType.UNDIRECTED) - { - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - out.addEdge(edge_factory.create(), v1, v2, EdgeType.DIRECTED); - out.addEdge(edge_factory.create(), v2, v1, EdgeType.DIRECTED); - } - else // if the edge is directed, just add it - { - V source = graph.getSource(e); - V dest = graph.getDest(e); - E to_add = create_new ? edge_factory.create() : e; - out.addEdge(to_add, source, dest, EdgeType.DIRECTED); - } - - } - return out; - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java deleted file mode 100644 index 21933194..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java +++ /dev/null @@ -1,325 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Apr 21, 2004 - */ -package edu.uci.ics.jung.algorithms.transformation; - -import java.util.ArrayList; -import java.util.Collection; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Hypergraph; -import edu.uci.ics.jung.graph.KPartiteGraph; - -/** - * Methods for creating a "folded" graph based on a k-partite graph or a - * hypergraph. - * - *

          A "folded" graph is derived from a k-partite graph by identifying - * a partition of vertices which will become the vertices of the new graph, copying - * these vertices into the new graph, and then connecting those vertices whose - * original analogues were connected indirectly through elements - * of other partitions.

          - * - *

          A "folded" graph is derived from a hypergraph by creating vertices based on - * either the vertices or the hyperedges of the original graph, and connecting - * vertices in the new graph if their corresponding vertices/hyperedges share a - * connection with a common hyperedge/vertex.

          - * - * @author Danyel Fisher - * @author Joshua O'Madadhain - */ -public class FoldingTransformer -{ - - /** - * Converts g into a unipartite graph whose vertex set is the - * vertices of g's partition p. For vertices - * a and b in this partition, the resultant - * graph will include the edge (a,b) if the original graph - * contains edges (a,c) and (c,b) for at least - * one vertex c. - * - *

          The vertices of the new graph are the same as the vertices of the - * appropriate partition in the old graph; the edges in the new graph are - * created by the input edge Factory.

          - * - *

          If there is more than 1 such vertex c for a given pair - * (a,b), the type of the output graph will determine whether - * it will contain parallel edges or not.

          - * - *

          This function will not create self-loops.

          - * - * @param vertex type - * @param input edge type - * @param g input k-partite graph - * @param p predicate specifying vertex partition - * @param graph_factory factory used to create the output graph - * @param edge_factory factory used to create the edges in the new graph - * @return a copy of the input graph folded with respect to the input partition - */ - public static Graph foldKPartiteGraph(KPartiteGraph g, Predicate p, - Factory> graph_factory, Factory edge_factory) - { - Graph newGraph = graph_factory.create(); - - // get vertices for the specified partition - Collection vertices = g.getVertices(p); - for (V v : vertices) - { - newGraph.addVertex(v); - for (V s : g.getSuccessors(v)) - { - for (V t : g.getSuccessors(s)) - { - if (!vertices.contains(t) || t.equals(v)) - continue; - newGraph.addVertex(t); - newGraph.addEdge(edge_factory.create(), v, t); - } - } - } - return newGraph; - } - - /** - * Converts g into a unipartite graph whose vertices are the - * vertices of g's partition p, and whose edges - * consist of collections of the intermediate vertices from other partitions. - * For vertices - * a and b in this partition, the resultant - * graph will include the edge (a,b) if the original graph - * contains edges (a,c) and (c,b) for at least - * one vertex c. - * - *

          The vertices of the new graph are the same as the vertices of the - * appropriate partition in the old graph; the edges in the new graph are - * collections of the intermediate vertices c.

          - * - *

          This function will not create self-loops.

          - * - * @param vertex type - * @param input edge type - * @param g input k-partite graph - * @param p predicate specifying vertex partition - * @param graph_factory factory used to create the output graph - * @return the result of folding g into unipartite graph whose vertices - * are those of the p partition of g - */ - public static Graph> foldKPartiteGraph(KPartiteGraph g, Predicate p, - Factory>> graph_factory) - { - Graph> newGraph = graph_factory.create(); - - // get vertices for the specified partition, copy into new graph - Collection vertices = g.getVertices(p); - - for (V v : vertices) - { - newGraph.addVertex(v); - for (V s : g.getSuccessors(v)) - { - for (V t : g.getSuccessors(s)) - { - if (!vertices.contains(t) || t.equals(v)) - continue; - newGraph.addVertex(t); - Collection v_coll = newGraph.findEdge(v, t); - if (v_coll == null) - { - v_coll = new ArrayList(); - newGraph.addEdge(v_coll, v, t); - } - v_coll.add(s); - } - } - } - return newGraph; - } - - /** - * Creates a Graph which is an edge-folded version of h, where - * hyperedges are replaced by k-cliques in the output graph. - * - *

          The vertices of the new graph are the same objects as the vertices of - * h, and a - * is connected to b in the new graph if the corresponding vertices - * in h are connected by a hyperedge. Thus, each hyperedge with - * k vertices in h induces a k-clique in the new graph.

          - * - *

          The edges of the new graph consist of collections of each hyperedge that connected - * the corresponding vertex pair in the original graph.

          - * - * @param vertex type - * @param input edge type - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @return a copy of the input graph where hyperedges are replaced by cliques - */ - public static Graph> foldHypergraphEdges(Hypergraph h, - Factory>> graph_factory) - { - Graph> target = graph_factory.create(); - - for (V v : h.getVertices()) - target.addVertex(v); - - for (E e : h.getEdges()) - { - ArrayList incident = new ArrayList(h.getIncidentVertices(e)); - populateTarget(target, e, incident); - } - return target; - } - - - /** - * Creates a Graph which is an edge-folded version of h, where - * hyperedges are replaced by k-cliques in the output graph. - * - *

          The vertices of the new graph are the same objects as the vertices of - * h, and a - * is connected to b in the new graph if the corresponding vertices - * in h are connected by a hyperedge. Thus, each hyperedge with - * k vertices in h induces a k-clique in the new graph.

          - * - *

          The edges of the new graph are generated by the specified edge factory.

          - * - * @param vertex type - * @param input edge type - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @param edge_factory factory used to create the new edges - * @return a copy of the input graph where hyperedges are replaced by cliques - */ - public static Graph foldHypergraphEdges(Hypergraph h, - Factory> graph_factory, Factory edge_factory) - { - Graph target = graph_factory.create(); - - for (V v : h.getVertices()) - target.addVertex(v); - - for (E e : h.getEdges()) - { - ArrayList incident = new ArrayList(h.getIncidentVertices(e)); - for (int i = 0; i < incident.size(); i++) - for (int j = i+1; j < incident.size(); j++) - target.addEdge(edge_factory.create(), incident.get(i), incident.get(j)); - } - return target; - } - - /** - * Creates a Graph which is a vertex-folded version of h, whose - * vertices are the input's hyperedges and whose edges are induced by adjacent hyperedges - * in the input. - * - *

          The vertices of the new graph are the same objects as the hyperedges of - * h, and a - * is connected to b in the new graph if the corresponding edges - * in h have a vertex in common. Thus, each vertex incident to - * k edges in h induces a k-clique in the new graph.

          - * - *

          The edges of the new graph are created by the specified factory.

          - * - * @param vertex type - * @param input edge type - * @param output edge type - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @param edge_factory factory used to generate the output edges - * @return a transformation of the input graph whose vertices correspond to the input's hyperedges - * and edges are induced by hyperedges sharing vertices in the input - */ - public static Graph foldHypergraphVertices(Hypergraph h, - Factory> graph_factory, Factory edge_factory) - { - Graph target = graph_factory.create(); - - for (E e : h.getEdges()) - target.addVertex(e); - - for (V v : h.getVertices()) - { - ArrayList incident = new ArrayList(h.getIncidentEdges(v)); - for (int i = 0; i < incident.size(); i++) - for (int j = i+1; j < incident.size(); j++) - target.addEdge(edge_factory.create(), incident.get(i), incident.get(j)); - } - - return target; - } - - /** - * Creates a Graph which is a vertex-folded version of h, whose - * vertices are the input's hyperedges and whose edges are induced by adjacent hyperedges - * in the input. - * - *

          The vertices of the new graph are the same objects as the hyperedges of - * h, and a - * is connected to b in the new graph if the corresponding edges - * in h have a vertex in common. Thus, each vertex incident to - * k edges in h induces a k-clique in the new graph.

          - * - *

          The edges of the new graph consist of collections of each vertex incident to - * the corresponding hyperedge pair in the original graph.

          - * - * @param h hypergraph to be folded - * @param graph_factory factory used to generate the output graph - * @return a transformation of the input graph whose vertices correspond to the input's hyperedges - * and edges are induced by hyperedges sharing vertices in the input - */ - public Graph> foldHypergraphVertices(Hypergraph h, - Factory>> graph_factory) - { - Graph> target = graph_factory.create(); - - for (E e : h.getEdges()) - target.addVertex(e); - - for (V v : h.getVertices()) - { - ArrayList incident = new ArrayList(h.getIncidentEdges(v)); - populateTarget(target, v, incident); - } - return target; - } - - /** - * @param target - * @param e - * @param incident - */ - private static void populateTarget(Graph> target, T e, - ArrayList incident) - { - for (int i = 0; i < incident.size(); i++) - { - S v1 = incident.get(i); - for (int j = i+1; j < incident.size(); j++) - { - S v2 = incident.get(j); - Collection e_coll = target.findEdge(v1, v2); - if (e_coll == null) - { - e_coll = new ArrayList(); - target.addEdge(e_coll, v1, v2); - } - e_coll.add(e); - } - } - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java b/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java deleted file mode 100644 index e44d05d4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java +++ /dev/null @@ -1,103 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.transformation; - -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.MapTransformer; - -import edu.uci.ics.jung.algorithms.blockmodel.VertexPartition; -import edu.uci.ics.jung.graph.Graph; - -/** - * This class transforms a graph with a known vertex partitioning into a graph whose - * vertices correspond to the input graph's partitions. Two vertices in the output graph - * are connected if and only if there exists at least one edge between vertices in the - * corresponding partitions of the input graph. If the output graph permits parallel edges, - * there will be an edge connecting two vertices in the new graph for each such - * edge connecting constituent vertices in the input graph. - * - *

          Concept based on Danyel Fisher's GraphCollapser in JUNG 1.x. - * - */ -public class VertexPartitionCollapser -{ - protected Factory> graph_factory; - protected Factory vertex_factory; - protected Factory edge_factory; - protected Map, CV> set_collapsedv; - - /** - * Creates an instance with the specified graph and element factories. - * @param vertex_factory used to construct the vertices of the new graph - * @param edge_factory used to construct the edges of the new graph - * @param graph_factory used to construct the new graph - */ - public VertexPartitionCollapser(Factory> graph_factory, - Factory vertex_factory, Factory edge_factory) - { - this.graph_factory = graph_factory; - this.vertex_factory = vertex_factory; - this.edge_factory = edge_factory; - this.set_collapsedv = new HashMap, CV>(); - } - - /** - * Creates a new graph whose vertices correspond to the partitions of the supplied graph. - * @param partitioning - * @return a new graph whose vertices correspond to the partitions of the supplied graph - */ - public Graph collapseVertexPartitions(VertexPartition partitioning) - { - Graph original = partitioning.getGraph(); - Graph collapsed = graph_factory.create(); - - // create vertices in new graph corresponding to equivalence sets in the original graph - for (Set set : partitioning.getVertexPartitions()) - { - CV cv = vertex_factory.create(); - collapsed.addVertex(vertex_factory.create()); - set_collapsedv.put(set, cv); - } - - // create edges in new graph corresponding to edges in original graph - for (E e : original.getEdges()) - { - Collection incident = original.getIncidentVertices(e); - Collection collapsed_vertices = new HashSet(); - Map> vertex_partitions = partitioning.getVertexToPartitionMap(); - // collect the collapsed vertices corresponding to the original incident vertices - for (V v : incident) - collapsed_vertices.add(set_collapsedv.get(vertex_partitions.get(v))); - // if there's only one collapsed vertex, continue (no edges to create) - if (collapsed_vertices.size() > 1) - { - CE ce = edge_factory.create(); - collapsed.addEdge(ce, collapsed_vertices); - } - } - return collapsed; - } - - /** - * Returns a transformer from vertex sets in the original graph to collapsed vertices - * in the transformed graph. - */ - public Transformer, CV> getSetToCollapsedVertexTransformer() - { - return MapTransformer.getInstance(set_collapsedv); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/package.html deleted file mode 100644 index 6680095f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/transformation/package.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - -Mechanisms for graph transformation. These currently include: -

            -
          • DirectionTransformer: generates graphs where input undirected -edges have been converted to directed edges, or vice versa -
          • FoldingTransformer: transforms k-partite graphs or hypergraphs -into unipartite graphs -
          • VertexPartitionCollapser: transforms a graph, given a -partition of its vertices into disjoint sets, into a graph in which each -of these disjoint sets has been 'collapsed' into a single new vertex. -
          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java deleted file mode 100644 index a82aea6b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java +++ /dev/null @@ -1,80 +0,0 @@ -package edu.uci.ics.jung.algorithms.util; - -import java.util.HashMap; -import java.util.Map; - -/** - * An simple minimal implementation of Map.Entry. - * - * @param the key type - * @param the value type - */ -public class BasicMapEntry implements Map.Entry { - final K key; - V value; - - /** - * Create new entry. - */ - public BasicMapEntry(K k, V v) { - value = v; - key = k; - } - - public K getKey() { - return key; - } - - public V getValue() { - return value; - } - - public V setValue(V newValue) { - V oldValue = value; - value = newValue; - return oldValue; - } - - @SuppressWarnings("unchecked") - @Override - public boolean equals(Object o) { - if (!(o instanceof Map.Entry)) - return false; - Map.Entry e = (Map.Entry)o; - Object k1 = getKey(); - Object k2 = e.getKey(); - if (k1 == k2 || (k1 != null && k1.equals(k2))) { - Object v1 = getValue(); - Object v2 = e.getValue(); - if (v1 == v2 || (v1 != null && v1.equals(v2))) - return true; - } - return false; - } - - @Override - public int hashCode() { - return (key==null ? 0 : key.hashCode()) ^ - (value==null ? 0 : value.hashCode()); - } - - @Override - public String toString() { - return getKey() + "=" + getValue(); - } - - /** - * This method is invoked whenever the value in an entry is - * overwritten by an invocation of put(k,v) for a key k that's already - * in the HashMap. - */ - void recordAccess(HashMap m) { - } - - /** - * This method is invoked whenever the entry is - * removed from the table. - */ - void recordRemoval(HashMap m) { - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/ConstantMap.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/ConstantMap.java deleted file mode 100644 index 53054d71..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/ConstantMap.java +++ /dev/null @@ -1,93 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ - -package edu.uci.ics.jung.algorithms.util; - -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.Set; - -/** - * An implementation of Map that returns the constructor-supplied - * value for any input. - * - * @param the key type - * @param the value type - */ -public class ConstantMap implements Map { - - private Map delegate; - - /** - * Creates an instance whose {@code get} method always returns {@code value}. - */ - public ConstantMap(V value) { - delegate = Collections.unmodifiableMap(Collections.singletonMap(null, value)); - } - - public V get(Object key) { - return delegate.get(null); - } - - public void clear() { - delegate.clear(); - } - - public boolean containsKey(Object key) { - return true; - } - - public boolean containsValue(Object value) { - return delegate.containsValue(value); - } - - public Set> entrySet() { - return delegate.entrySet(); - } - - @Override - public boolean equals(Object o) { - return delegate.equals(o); - } - - @Override - public int hashCode() { - return delegate.hashCode(); - } - - public boolean isEmpty() { - return delegate.isEmpty(); - } - - public Set keySet() { - return delegate.keySet(); - } - - public V put(K key, V value) { - return delegate.put(key, value); - } - - public void putAll(Map t) { - delegate.putAll(t); - } - - public V remove(Object key) { - return delegate.remove(key); - } - - public int size() { - return delegate.size(); - } - - public Collection values() { - return delegate.values(); - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java deleted file mode 100644 index 84eadc07..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * - * Created on Feb 18, 2004 - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.Collection; -import java.util.Iterator; - -/** - * A utility class for calculating properties of discrete distributions. - * Generally, these distributions are represented as arrays of - * double values, which are assumed to be normalized - * such that the entries in a single array sum to 1. - * - * @author Joshua O'Madadhain - */ -public class DiscreteDistribution -{ - - /** - * Returns the Kullback-Leibler divergence between the - * two specified distributions, which must have the same - * number of elements. This is defined as - * the sum over all i of - * dist[i] * Math.log(dist[i] / reference[i]). - * Note that this value is not symmetric; see - * symmetricKL for a symmetric variant. - * @see #symmetricKL(double[], double[]) - */ - public static double KullbackLeibler(double[] dist, double[] reference) - { - double distance = 0; - - checkLengths(dist, reference); - - for (int i = 0; i < dist.length; i++) - { - if (dist[i] > 0 && reference[i] > 0) - distance += dist[i] * Math.log(dist[i] / reference[i]); - } - return distance; - } - - /** - * Returns KullbackLeibler(dist, reference) + KullbackLeibler(reference, dist). - * @see #KullbackLeibler(double[], double[]) - */ - public static double symmetricKL(double[] dist, double[] reference) - { - return KullbackLeibler(dist, reference) - + KullbackLeibler(reference, dist); - } - - /** - * Returns the squared difference between the - * two specified distributions, which must have the same - * number of elements. This is defined as - * the sum over all i of the square of - * (dist[i] - reference[i]). - */ - public static double squaredError(double[] dist, double[] reference) - { - double error = 0; - - checkLengths(dist, reference); - - for (int i = 0; i < dist.length; i++) - { - double difference = dist[i] - reference[i]; - error += difference * difference; - } - return error; - } - - /** - * Returns the cosine distance between the two - * specified distributions, which must have the same number - * of elements. The distributions are treated as vectors - * in dist.length-dimensional space. - * Given the following definitions - *
            - *
          • v = the sum over all i of dist[i] * dist[i] - *
          • w = the sum over all i of reference[i] * reference[i] - *
          • vw = the sum over all i of dist[i] * reference[i] - *
          - * the value returned is defined as vw / (Math.sqrt(v) * Math.sqrt(w)). - */ - public static double cosine(double[] dist, double[] reference) - { - double v_prod = 0; // dot product x*x - double w_prod = 0; // dot product y*y - double vw_prod = 0; // dot product x*y - - checkLengths(dist, reference); - - for (int i = 0; i < dist.length; i++) - { - vw_prod += dist[i] * reference[i]; - v_prod += dist[i] * dist[i]; - w_prod += reference[i] * reference[i]; - } - // cosine distance between v and w - return vw_prod / (Math.sqrt(v_prod) * Math.sqrt(w_prod)); - } - - /** - * Returns the entropy of this distribution. - * High entropy indicates that the distribution is - * close to uniform; low entropy indicates that the - * distribution is close to a Dirac delta (i.e., if - * the probability mass is concentrated at a single - * point, this method returns 0). Entropy is defined as - * the sum over all i of - * -(dist[i] * Math.log(dist[i])) - */ - public static double entropy(double[] dist) - { - double total = 0; - - for (int i = 0; i < dist.length; i++) - { - if (dist[i] > 0) - total += dist[i] * Math.log(dist[i]); - } - return -total; - } - - /** - * Throws an IllegalArgumentException if the two arrays are not of the same length. - */ - protected static void checkLengths(double[] dist, double[] reference) - { - if (dist.length != reference.length) - throw new IllegalArgumentException("Arrays must be of the same length"); - } - - /** - * Normalizes, with Lagrangian smoothing, the specified double - * array, so that the values sum to 1 (i.e., can be treated as probabilities). - * The effect of the Lagrangian smoothing is to ensure that all entries - * are nonzero; effectively, a value of alpha is added to each - * entry in the original array prior to normalization. - * @param counts - * @param alpha - */ - public static void normalize(double[] counts, double alpha) - { - double total_count = 0; - - for (int i = 0; i < counts.length; i++) - total_count += counts[i]; - - for (int i = 0; i < counts.length; i++) - counts[i] = (counts[i] + alpha) - / (total_count + counts.length * alpha); - } - - /** - * Returns the mean of the specified Collection of - * distributions, which are assumed to be normalized arrays of - * double values. - * @see #mean(double[][]) - */ - public static double[] mean(Collection distributions) - { - if (distributions.isEmpty()) - throw new IllegalArgumentException("Distribution collection must be non-empty"); - Iterator iter = distributions.iterator(); - double[] first = iter.next(); - double[][] d_array = new double[distributions.size()][first.length]; - d_array[0] = first; - for (int i = 1; i < d_array.length; i++) - d_array[i] = iter.next(); - - return mean(d_array); - } - - /** - * Returns the mean of the specified array of distributions, - * represented as normalized arrays of double values. - * Will throw an "index out of bounds" exception if the - * distribution arrays are not all of the same length. - */ - public static double[] mean(double[][] distributions) - { - double[] d_mean = new double[distributions[0].length]; - for (int j = 0; j < d_mean.length; j++) - d_mean[j] = 0; - - for (int i = 0; i < distributions.length; i++) - for (int j = 0; j < d_mean.length; j++) - d_mean[j] += distributions[i][j] / distributions.length; - - return d_mean; - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/Indexer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/Indexer.java deleted file mode 100644 index b8a215e3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/Indexer.java +++ /dev/null @@ -1,56 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.util; - -import java.util.Collection; - -import org.apache.commons.collections15.BidiMap; -import org.apache.commons.collections15.bidimap.DualHashBidiMap; - -/** - * A class providing static methods useful for improving the - * performance of graph algorithms. - * - * @author Tom Nelson - * - */ -public class Indexer { - - /** - * Returns a BidiMap mapping each element of the collection to its - * index as encountered while iterating over the collection. The purpose - * of the index operation is to supply an O(1) replacement operation for the - * O(n) indexOf(element) method of a List - * @param - * @param collection - * @return a bidirectional map from collection elements to 0-based indices - */ - public static BidiMap create(Collection collection) { - return create(collection, 0); - } - /** - * Returns a BidiMap mapping each element of the collection to its - * index as encountered while iterating over the collection. The purpose - * of the index operation is to supply an O(1) replacement operation for the - * O(n) indexOf(element) method of a List - * @param - * @param collection - * @param start start index - * @return a bidirectional map from collection elements to start-based indices - */ - public static BidiMap create(Collection collection, int start) { - BidiMap map = new DualHashBidiMap(); - int i=start; - for(T t : collection) { - map.put(t,i++); - } - return map; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/IterativeContext.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/IterativeContext.java deleted file mode 100644 index 92bd45d3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/IterativeContext.java +++ /dev/null @@ -1,28 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.util; - - -/** - * An interface for algorithms that proceed iteratively. - * - */ -public interface IterativeContext -{ - /** - * Advances one step. - */ - void step(); - - /** - * Returns true if this iterative process is finished, and false otherwise. - */ - boolean done(); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/IterativeProcess.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/IterativeProcess.java deleted file mode 100644 index fbe07f4e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/IterativeProcess.java +++ /dev/null @@ -1,174 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.algorithms.util; - - - -/** - * Provides basic infrastructure for iterative algorithms. Services provided include: - *
            - *
          • storage of current and max iteration count
          • - *
          • framework for initialization, iterative evaluation, and finalization
          • - *
          • test for convergence
          • - *
          • etc.
          • - *
          - *

          - * Algorithms that subclass this class are typically used in the following way:
          - *

          - * FooAlgorithm foo = new FooAlgorithm(...)
          - * foo.setMaximumIterations(100); //set up conditions
          - * ...
          - * foo.evaluate(); //key method which initiates iterative process
          - * foo.getSomeResult();
          - * 
          - * - * @author Scott White (originally written by Didier Besset) - */ -public abstract class IterativeProcess implements IterativeContext { - /** - * Number of iterations performed. - */ - private int iterations; - /** - * Maximum allowed number of iterations. - */ - private int maximumIterations = 50; - /** - * Desired precision. - */ - private double desiredPrecision = Double.MIN_VALUE; - /** - * Achieved precision. - */ - private double precision; - - - /** - * Generic constructor. - */ - public IterativeProcess() { - } - - /** - * Performs the iterative process. - * Note: this method does not return anything because Java does not - * allow mixing double, int, or objects - */ - public void evaluate() { - iterations = 0; - initializeIterations(); - while (iterations++ < maximumIterations) { - step(); - precision = getPrecision(); - if (hasConverged()) - break; - } - finalizeIterations(); - } - - /** - * Evaluate the result of the current iteration. - */ - abstract public void step(); - - /** - * Perform eventual clean-up operations - * (must be implement by subclass when needed). - */ - protected void finalizeIterations() { - } - - /** - * Returns the desired precision. - */ - public double getDesiredPrecision() { - return desiredPrecision; - } - - /** - * Returns the number of iterations performed. - */ - public int getIterations() { - return iterations; - } - - /** - * Returns the maximum allowed number of iterations. - */ - public int getMaximumIterations() { - return maximumIterations; - } - - /** - * Returns the attained precision. - */ - public double getPrecision() { - return precision; - } - - /** - * @param precision the precision to set - */ - public void setPrecision(double precision) { - this.precision = precision; - } - - /** - * - * Check to see if the result has been attained. - * @return boolean - */ - public boolean hasConverged() { - return precision < desiredPrecision; - } - - public boolean done() { - return hasConverged(); - } - - /** - * Initializes internal parameters to start the iterative process. - */ - protected void initializeIterations() { - } - - /** - * - */ - public void reset() { - } - - /** - * @return double - * @param epsilon double - * @param x double - */ - public double relativePrecision(double epsilon, double x) { - return x > desiredPrecision ? epsilon / x: epsilon; - } - - /** - * Defines the desired precision. - */ - public void setDesiredPrecision(double prec) throws IllegalArgumentException { - if (prec <= 0) - throw new IllegalArgumentException("Non-positive precision: " + prec); - desiredPrecision = prec; - } - - /** - * Defines the maximum allowed number of iterations. - */ - public void setMaximumIterations(int maxIter) throws IllegalArgumentException { - if (maxIter < 1) - throw new IllegalArgumentException("Non-positive maximum iteration: " + maxIter); - maximumIterations = maxIter; - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java deleted file mode 100644 index dce550f4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Aug 9, 2004 - * - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Random; -import java.util.Set; - - - -/** - * Groups items into a specified number of clusters, based on their proximity in - * d-dimensional space, using the k-means algorithm. Calls to - * cluster will terminate when either of the two following - * conditions is true: - *
            - *
          • the number of iterations is > max_iterations - *
          • none of the centroids has moved as much as convergence_threshold - * since the previous iteration - *
          - * - * @author Joshua O'Madadhain - */ -public class KMeansClusterer -{ - protected int max_iterations; - protected double convergence_threshold; - protected Random rand; - - /** - * Creates an instance whose termination conditions are set according - * to the parameters. - */ - public KMeansClusterer(int max_iterations, double convergence_threshold) - { - this.max_iterations = max_iterations; - this.convergence_threshold = convergence_threshold; - this.rand = new Random(); - } - - /** - * Creates an instance with max iterations of 100 and convergence threshold - * of 0.001. - */ - public KMeansClusterer() - { - this(100, 0.001); - } - - /** - * Returns the maximum number of iterations. - */ - public int getMaxIterations() - { - return max_iterations; - } - - /** - * Sets the maximum number of iterations. - */ - public void setMaxIterations(int max_iterations) - { - if (max_iterations < 0) - throw new IllegalArgumentException("max iterations must be >= 0"); - - this.max_iterations = max_iterations; - } - - /** - * Returns the convergence threshold. - */ - public double getConvergenceThreshold() - { - return convergence_threshold; - } - - /** - * Sets the convergence threshold. - * @param convergence_threshold - */ - public void setConvergenceThreshold(double convergence_threshold) - { - if (convergence_threshold <= 0) - throw new IllegalArgumentException("convergence threshold " + - "must be > 0"); - - this.convergence_threshold = convergence_threshold; - } - - /** - * Returns a Collection of clusters, where each cluster is - * represented as a Map of Objects to locations - * in d-dimensional space. - * @param object_locations a map of the Objects to cluster, to - * double arrays that specify their locations in d-dimensional space. - * @param num_clusters the number of clusters to create - * @throws NotEnoughClustersException - */ - @SuppressWarnings("unchecked") - public Collection> cluster(Map object_locations, int num_clusters) - { - if (object_locations == null || object_locations.isEmpty()) - throw new IllegalArgumentException("'objects' must be non-empty"); - - if (num_clusters < 2 || num_clusters > object_locations.size()) - throw new IllegalArgumentException("number of clusters " + - "must be >= 2 and <= number of objects (" + - object_locations.size() + ")"); - - - Set centroids = new HashSet(); - - Object[] obj_array = object_locations.keySet().toArray(); - Set tried = new HashSet(); - - // create the specified number of clusters - while (centroids.size() < num_clusters && tried.size() < object_locations.size()) - { - T o = (T)obj_array[(int)(rand.nextDouble() * obj_array.length)]; - tried.add(o); - double[] mean_value = object_locations.get(o); - boolean duplicate = false; - for (double[] cur : centroids) - { - if (Arrays.equals(mean_value, cur)) - duplicate = true; - } - if (!duplicate) - centroids.add(mean_value); - } - - if (tried.size() >= object_locations.size()) - throw new NotEnoughClustersException(); - - // put items in their initial clusters - Map> clusterMap = assignToClusters(object_locations, centroids); - - // keep reconstituting clusters until either - // (a) membership is stable, or - // (b) number of iterations passes max_iterations, or - // (c) max movement of any centroid is <= convergence_threshold - int iterations = 0; - double max_movement = Double.POSITIVE_INFINITY; - while (iterations++ < max_iterations && max_movement > convergence_threshold) - { - max_movement = 0; - Set new_centroids = new HashSet(); - // calculate new mean for each cluster - for (Map.Entry> entry : clusterMap.entrySet()) - { - double[] centroid = entry.getKey(); - Map elements = entry.getValue(); - ArrayList locations = new ArrayList(elements.values()); - - double[] mean = DiscreteDistribution.mean(locations); - max_movement = Math.max(max_movement, - Math.sqrt(DiscreteDistribution.squaredError(centroid, mean))); - new_centroids.add(mean); - } - - // TODO: check membership of clusters: have they changed? - - // regenerate cluster membership based on means - clusterMap = assignToClusters(object_locations, new_centroids); - } - return clusterMap.values(); - } - - /** - * Assigns each object to the cluster whose centroid is closest to the - * object. - * @param object_locations a map of objects to locations - * @param centroids the centroids of the clusters to be formed - * @return a map of objects to assigned clusters - */ - protected Map> assignToClusters(Map object_locations, Set centroids) - { - Map> clusterMap = new HashMap>(); - for (double[] centroid : centroids) - clusterMap.put(centroid, new HashMap()); - - for (Map.Entry object_location : object_locations.entrySet()) - { - T object = object_location.getKey(); - double[] location = object_location.getValue(); - - // find the cluster with the closest centroid - Iterator c_iter = centroids.iterator(); - double[] closest = c_iter.next(); - double distance = DiscreteDistribution.squaredError(location, closest); - - while (c_iter.hasNext()) - { - double[] centroid = c_iter.next(); - double dist_cur = DiscreteDistribution.squaredError(location, centroid); - if (dist_cur < distance) - { - distance = dist_cur; - closest = centroid; - } - } - clusterMap.get(closest).put(object, location); - } - - return clusterMap; - } - - /** - * Sets the seed used by the internal random number generator. - * Enables consistent outputs. - */ - public void setSeed(int random_seed) - { - this.rand = new Random(random_seed); - } - - /** - * An exception that indicates that the specified data points cannot be - * clustered into the number of clusters requested by the user. - * This will happen if and only if there are fewer distinct points than - * requested clusters. (If there are fewer total data points than - * requested clusters, IllegalArgumentException will be thrown.) - * - * @author Joshua O'Madadhain - */ - @SuppressWarnings("serial") - public static class NotEnoughClustersException extends RuntimeException - { - @Override - public String getMessage() - { - return "Not enough distinct points in the input data set to form " + - "the requested number of clusters"; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java deleted file mode 100644 index bd00a828..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * - * Created on Oct 29, 2003 - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.AbstractCollection; -import java.util.Collection; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.NoSuchElementException; -import java.util.Queue; -import java.util.Vector; - -import org.apache.commons.collections15.IteratorUtils; - -/** - * An array-based binary heap implementation of a priority queue, - * which also provides - * efficient update() and contains operations. - * It contains extra infrastructure (a hash table) to keep track of the - * position of each element in the array; thus, if the key value of an element - * changes, it may be "resubmitted" to the heap via update - * so that the heap can reposition it efficiently, as necessary. - * - * @author Joshua O'Madadhain - */ -public class MapBinaryHeap - extends AbstractCollection - implements Queue -{ - private Vector heap = new Vector(); // holds the heap as an implicit binary tree - private Map object_indices = new HashMap(); // maps each object in the heap to its index in the heap - private Comparator comp; - private final static int TOP = 0; // the index of the top of the heap - - /** - * Creates a MapBinaryHeap whose heap ordering - * is based on the ordering of the elements specified by c. - */ - public MapBinaryHeap(Comparator comp) - { - initialize(comp); - } - - /** - * Creates a MapBinaryHeap whose heap ordering - * will be based on the natural ordering of the elements, - * which must be Comparable. - */ - public MapBinaryHeap() - { - initialize(new ComparableComparator()); - } - - /** - * Creates a MapBinaryHeap based on the specified - * collection whose heap ordering - * will be based on the natural ordering of the elements, - * which must be Comparable. - */ - public MapBinaryHeap(Collection c) - { - this(); - addAll(c); - } - - /** - * Creates a MapBinaryHeap based on the specified collection - * whose heap ordering - * is based on the ordering of the elements specified by c. - */ - public MapBinaryHeap(Collection c, Comparator comp) - { - this(comp); - addAll(c); - } - - private void initialize(Comparator comp) - { - this.comp = comp; - clear(); - } - - /** - * @see Collection#clear() - */ - @Override - public void clear() - { - object_indices.clear(); - heap.clear(); - } - - /** - * Inserts o into this collection. - */ - @Override - public boolean add(T o) - { - int i = heap.size(); // index 1 past the end of the heap - heap.setSize(i+1); - percolateUp(i, o); - return true; - } - - /** - * Returns true if this collection contains no elements, and - * false otherwise. - */ - @Override - public boolean isEmpty() - { - return heap.isEmpty(); - } - - /** - * Returns the element at the top of the heap; does not - * alter the heap. - */ - public T peek() - { - if (heap.size() > 0) - return heap.elementAt(TOP); - else - return null; - } - - /** - * Removes the element at the top of this heap, and returns it. - * @deprecated Use {@link MapBinaryHeap#poll()} - * or {@link MapBinaryHeap#remove()} instead. - */ - @Deprecated - public T pop() throws NoSuchElementException - { - return this.remove(); - } - - /** - * Returns the size of this heap. - */ - @Override - public int size() - { - return heap.size(); - } - - /** - * Informs the heap that this object's internal key value has been - * updated, and that its place in the heap may need to be shifted - * (up or down). - * @param o - */ - public void update(T o) - { - // Since we don't know whether the key value increased or - // decreased, we just percolate up followed by percolating down; - // one of the two will have no effect. - - int cur = object_indices.get(o).intValue(); // current index - int new_idx = percolateUp(cur, o); - percolateDown(new_idx); - } - - /** - * @see Collection#contains(java.lang.Object) - */ - @Override - public boolean contains(Object o) - { - return object_indices.containsKey(o); - } - - /** - * Moves the element at position cur closer to - * the bottom of the heap, or returns if no further motion is - * necessary. Calls itself recursively if further motion is - * possible. - */ - private void percolateDown(int cur) - { - int left = lChild(cur); - int right = rChild(cur); - int smallest; - - if ((left < heap.size()) && - (comp.compare(heap.elementAt(left), heap.elementAt(cur)) < 0)) { - smallest = left; - } else { - smallest = cur; - } - - if ((right < heap.size()) && - (comp.compare(heap.elementAt(right), heap.elementAt(smallest)) < 0)) { - smallest = right; - } - - if (cur != smallest) - { - swap(cur, smallest); - percolateDown(smallest); - } - } - - /** - * Moves the element o at position cur - * as high as it can go in the heap. Returns the new position of the - * element in the heap. - */ - private int percolateUp(int cur, T o) - { - int i = cur; - - while ((i > TOP) && (comp.compare(heap.elementAt(parent(i)), o) > 0)) - { - T parentElt = heap.elementAt(parent(i)); - heap.setElementAt(parentElt, i); - object_indices.put(parentElt, new Integer(i)); // reset index to i (new location) - i = parent(i); - } - - // place object in heap at appropriate place - object_indices.put(o, new Integer(i)); - heap.setElementAt(o, i); - - return i; - } - - /** - * Returns the index of the left child of the element at - * index i of the heap. - * @param i - * @return the index of the left child of the element at - * index i of the heap - */ - private int lChild(int i) - { - return (i<<1) + 1; - } - - /** - * Returns the index of the right child of the element at - * index i of the heap. - * @param i - * @return the index of the right child of the element at - * index i of the heap - */ - private int rChild(int i) - { - return (i<<1) + 2; - } - - /** - * Returns the index of the parent of the element at - * index i of the heap. - * @param i - * @return the index of the parent of the element at index i of the heap - */ - private int parent(int i) - { - return (i-1)>>1; - } - - /** - * Swaps the positions of the elements at indices i - * and j of the heap. - * @param i - * @param j - */ - private void swap(int i, int j) - { - T iElt = heap.elementAt(i); - T jElt = heap.elementAt(j); - - heap.setElementAt(jElt, i); - object_indices.put(jElt, new Integer(i)); - - heap.setElementAt(iElt, j); - object_indices.put(iElt, new Integer(j)); - } - - /** - * Comparator used if none is specified in the constructor. - * @author Joshua O'Madadhain - */ - private class ComparableComparator implements Comparator - { - /** - * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) - */ - @SuppressWarnings("unchecked") - public int compare(T arg0, T arg1) - { - if (!(arg0 instanceof Comparable) || !(arg1 instanceof Comparable)) - throw new IllegalArgumentException("Arguments must be Comparable"); - - return ((Comparable)arg0).compareTo(arg1); - } - } - - /** - * Returns an Iterator that does not support modification - * of the heap. - */ - @Override - public Iterator iterator() - { - return IteratorUtils.unmodifiableIterator(heap.iterator()); - } - - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean remove(Object o) - { - throw new UnsupportedOperationException(); - } - - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean removeAll(Collection c) - { - throw new UnsupportedOperationException(); - } - - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean retainAll(Collection c) - { - throw new UnsupportedOperationException(); - } - - public T element() throws NoSuchElementException - { - T top = this.peek(); - if (top == null) - throw new NoSuchElementException(); - return top; - } - - public boolean offer(T o) - { - return add(o); - } - - public T poll() - { - T top = this.peek(); - if (top != null) - { - T bottom_elt = heap.lastElement(); - heap.setElementAt(bottom_elt, TOP); - object_indices.put(bottom_elt, new Integer(TOP)); - - heap.setSize(heap.size() - 1); // remove the last element - if (heap.size() > 1) - percolateDown(TOP); - - object_indices.remove(top); - } - return top; - } - - public T remove() - { - T top = this.poll(); - if (top == null) - throw new NoSuchElementException(); - return top; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java deleted file mode 100644 index 1aa7d507..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Created on Aug 5, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.Map; - - -/** - * A SettableTransformer that operates on an underlying Map instance. - * Similar to MapTransformer. - * - * @author Joshua O'Madadhain - */ -public class MapSettableTransformer implements SettableTransformer -{ - protected Map map; - - /** - * Creates an instance based on m. - */ - public MapSettableTransformer(Map m) - { - this.map = m; - } - - public O transform(I input) - { - return map.get(input); - } - - public void set(I input, O output) - { - map.put(input, output); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java deleted file mode 100644 index a92c3b8d..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java +++ /dev/null @@ -1,23 +0,0 @@ -package edu.uci.ics.jung.algorithms.util; - -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * A Predicate that returns true if the input edge's - * endpoints in the input graph are identical. (Thus, an edge which connects - * its sole incident vertex to itself). - * - * @param - * @param - */ -public class SelfLoopEdgePredicate implements Predicate,E>> { - - public boolean evaluate(Context,E> context) { - Pair endpoints = context.graph.getEndpoints(context.element); - return endpoints.getFirst().equals(endpoints.getSecond()); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/SettableTransformer.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/SettableTransformer.java deleted file mode 100644 index 5e5168ab..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/SettableTransformer.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Created on Aug 5, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.algorithms.util; - -import org.apache.commons.collections15.Transformer; - -/** - * An interface for classes that can set the value to be returned (from transform()) - * when invoked on a given input. - * - * @author Joshua O'Madadhain - */ -public interface SettableTransformer extends Transformer -{ - /** - * Sets the value (output) to be returned by a call to - * transform(input)). - * @param input - * @param output - */ - public void set(I input, O output); -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/WeightedChoice.java b/gui/jung-src/edu/uci/ics/jung/algorithms/util/WeightedChoice.java deleted file mode 100644 index d9590b26..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/WeightedChoice.java +++ /dev/null @@ -1,193 +0,0 @@ -/** - * Copyright (c) 2009, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Jan 8, 2009 - * - */ -package edu.uci.ics.jung.algorithms.util; - -import java.util.ArrayList; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Random; - -/** - * Selects items according to their probability in an arbitrary probability - * distribution. The distribution is specified by a {@code Map} from - * items (of type {@code T}) to weights of type {@code Number}, supplied - * to the constructor; these weights are normalized internally to act as - * probabilities. - * - *

          This implementation selects items in O(1) time, and requires O(n) space. - * - * @author Joshua O'Madadhain - */ -public class WeightedChoice -{ - private List item_pairs; - private Random random; - - /** - * The default minimum value that is treated as a valid probability - * (as opposed to rounding error from floating-point operations). - */ - public static final double DEFAULT_THRESHOLD = 0.00000000001; - - /** - * Equivalent to {@code this(item_weights, new Random(), DEFAULT_THRESHOLD)}. - * @param item_weights - */ - public WeightedChoice(Map item_weights) - { - this(item_weights, new Random(), DEFAULT_THRESHOLD); - } - - /** - * Equivalent to {@code this(item_weights, new Random(), threshold)}. - */ - public WeightedChoice(Map item_weights, double threshold) - { - this(item_weights, new Random(), threshold); - } - - /** - * Equivalent to {@code this(item_weights, random, DEFAULT_THRESHOLD)}. - */ - public WeightedChoice(Map item_weights, Random random) - { - this(item_weights, random, DEFAULT_THRESHOLD); - } - - /** - * Creates an instance with the specified mapping from items to weights, - * random number generator, and threshold value. - * - *

          The mapping defines the weight for each item to be selected; this - * will be proportional to the probability of its selection. - *

          The random number generator specifies the mechanism which will be - * used to provide uniform integer and double values. - *

          The threshold indicates default minimum value that is treated as a valid - * probability (as opposed to rounding error from floating-point operations). - */ - public WeightedChoice(Map item_weights, Random random, - double threshold) - { - if (item_weights.isEmpty()) - throw new IllegalArgumentException("Item weights must be non-empty"); - - int item_count = item_weights.size(); - item_pairs = new ArrayList(item_count); - - double sum = 0; - for (Map.Entry entry : item_weights.entrySet()) - { - double value = entry.getValue().doubleValue(); - if (value <= 0) - throw new IllegalArgumentException("Weights must be > 0"); - sum += value; - } - double bucket_weight = 1.0 / item_weights.size(); - - Queue light_weights = new LinkedList(); - Queue heavy_weights = new LinkedList(); - for (Map.Entry entry : item_weights.entrySet()) - { - double value = entry.getValue().doubleValue() / sum; - enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); - } - - // repeat until both queues empty - while (!heavy_weights.isEmpty() || !light_weights.isEmpty()) - { - ItemPair heavy_item = heavy_weights.poll(); - ItemPair light_item = light_weights.poll(); - double light_weight = 0; - T light = null; - T heavy = null; - if (light_item != null) - { - light_weight = light_item.weight; - light = light_item.light; - } - if (heavy_item != null) - { - heavy = heavy_item.heavy; - // put the 'left over' weight from the heavy item--what wasn't - // needed to make up the difference between the light weight and - // 1/n--back in the appropriate queue - double new_weight = heavy_item.weight - (bucket_weight - light_weight); - if (new_weight > threshold) - enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights); - } - light_weight *= item_count; - - item_pairs.add(new ItemPair(light, heavy, light_weight)); - } - - this.random = random; - } - - /** - * Adds key/value to the appropriate queue. Keys with values less than - * the threshold get added to {@code light_weights}, all others get added - * to {@code heavy_weights}. - */ - private void enqueueItem(T key, double value, double threshold, - Queue light_weights, Queue heavy_weights) - { - if (value < threshold) - light_weights.offer(new ItemPair(key, null, value)); - else - heavy_weights.offer(new ItemPair(null, key, value)); - } - - /** - * Sets the seed used by the internal random number generator. - */ - public void setRandomSeed(long seed) - { - this.random.setSeed(seed); - } - - /** - * Retrieves an item with probability proportional to its weight in the - * {@code Map} provided in the input. - */ - public T nextItem() - { - ItemPair item_pair = item_pairs.get(random.nextInt(item_pairs.size())); - if (random.nextDouble() < item_pair.weight) - return item_pair.light; - return item_pair.heavy; - } - - /** - * Manages light object/heavy object/light conditional probability tuples. - */ - private class ItemPair - { - T light; - T heavy; - double weight; - - private ItemPair(T light, T heavy, double weight) - { - this.light = light; - this.heavy = heavy; - this.weight = weight; - } - - @Override - public String toString() - { - return String.format("[L:%s, H:%s, %.3f]", light, heavy, weight); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/algorithms/util/package.html b/gui/jung-src/edu/uci/ics/jung/algorithms/util/package.html deleted file mode 100644 index 58c5f591..00000000 --- a/gui/jung-src/edu/uci/ics/jung/algorithms/util/package.html +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - -Provides general algorithmic utilities. These include: -

            -
          • DiscreteDistribution: calculates statistical measures on -discrete probability distributions represented as double arrays -
          • KMeansClusterer: uses the k-means algorithm to cluster -points in d-dimensional space into k clusters -
          • MapBinaryHeap: a binary heap implementation that permits -efficient element access and update operations -
          • RandomLocationTransformer: a class that randomly assigns -2D coordinates to items (default initializer for iterative Layouts) -
          • SettableTransformer: an extension of Transformer -that allows mutation of the transformation -
          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AKDotLayout.java b/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AKDotLayout.java deleted file mode 100644 index dc418dd2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AKDotLayout.java +++ /dev/null @@ -1,1240 +0,0 @@ -package edu.uci.ics.jung.contrib.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.AbstractLayout; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; -import edu.uci.ics.jung.graph.Graph; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.*; -import java.util.logging.Level; -import java.util.logging.Logger; -import org.apache.commons.collections15.SortedBag; -import org.apache.commons.collections15.bag.TreeBag; - -public class AKDotLayout extends AbstractLayout { - - private final static Logger logger = - Logger.getLogger("edu.uci.ics.jung.contrib.algorithms.layout"); - protected double vertexSpacing = 20.0; - List vertexTable; - Map inverseVertexTable; - Graph dag; - Graph tree; - Graph virtualGraph; - Queue edgeQueue; - int edgeCounter; - double width, height; - int[] xBestCoords, yBestCoords; - Map ranks; - int maxRank; - Map cutValues; - List edgesCopy; - int badness; - Ordering bestOrdering; - int xBestLength; - public int nodeSeparation; - public int rankSeparation; - private int coordPrecision; - public static final int MAX_RANK_ITERATIONS = 10; - public static final int CYCLE_SIZE = 5; - public static final int MAX_ORDER_ITERATIONS = 8; - public static final int MAX_POSITION_ITERATIONS = 16; - public static final int OMEGA_RR = 1; - public static final int OMEGA_VR = 2; - public static final int OMEGA_VV = 8; - - public AKDotLayout(DirectedGraph graph) { - super(graph, new Dimension(600, 600)); - } - - public void initialize() { - long tm = System.currentTimeMillis(); - - nodeSeparation = 75; - rankSeparation = 60; - - DirectedGraph digraph = (DirectedGraph) getGraph(); - List> components = getComponentsForCut(digraph, null); - - logger.log(Level.FINEST, "Processing {0} component(s).", components.size()); - - //List> subLayouts = new ArrayList>(); - double minX = (double) nodeSeparation; - for (Set comp : components) { - DirectedGraph gr = getSubgraphWithVertices(digraph, comp); - AKDotLayout layout = new AKDotLayout(gr); - layout.rankSeparation = rankSeparation; - layout.nodeSeparation = nodeSeparation; - //subLayouts.add(layout); - layout.doLayout(); - - for (V v : comp) { - Point2D p = layout.transform(v); - p.setLocation(p.getX() + minX, p.getY() + (double) rankSeparation); - setLocation(v, p); - } - - minX += width + (double) nodeSeparation; - } - - logger.log(Level.FINEST, "Layout took {0} milliseconds", - System.currentTimeMillis() - tm); - } - - public void reset() { - initialize(); - } - - @Override - public Dimension getSize() { - return new Dimension((int) Math.ceil(width), (int) Math.ceil(height)); - } - - @Override - public void setSize(Dimension size) { - throw new UnsupportedOperationException("Size of dot layout is determined by dot!"); - } - - public static DirectedGraph getSubgraphWithVertices(DirectedGraph gr, Collection verts) { - DirectedGraph sub = new DirectedSparseMultigraph(); - - for (V v : verts) { - sub.addVertex(v); - } - - for (E e : gr.getEdges()) { - V s = gr.getSource(e); - V d = gr.getDest(e); - if (sub.containsVertex(s) && sub.containsVertex(d)) { - sub.addEdge(e, s, d); - } - } - - return sub; - } - - public static List> getComponentsForCut(Graph gr, E cut) { - List> comps = new ArrayList>(); - Set verts = new HashSet(gr.getVertices()); - - while (!verts.isEmpty()) { - Set comp = new HashSet(); - addToComponent(verts.iterator().next(), gr, comp, cut); - comps.add(comp); - verts.removeAll(comp); - } - - return comps; - } - - public static List> getComponents(Graph gr) { - return getComponentsForCut(gr, null); - } - - public static void addToComponent(V vert, Graph gr, Set comp, E cut) { - comp.add(vert); - for (E e : gr.getIncidentEdges(vert)) { - if (e != cut) { - V next = gr.getOpposite(vert, e); - if (!comp.contains(next)) { - addToComponent(next, gr, comp, cut); - } - } - } - } - - public static Collection trySortedSet(Collection coll) { - Collection sorted; - try { - sorted = new TreeSet(coll); - } catch (ClassCastException e) { - sorted = coll; - } - return sorted; - } - - private void doLayout() { - if (graph.getVertexCount() == 0) { - return; - } - logger.log(Level.FINEST, - "Laying out graph ({0}, {1})", - new Object[]{graph.getVertexCount(), graph.getEdgeCount()}); - - dagify(); - normalize(); // for debug purposes - logger.log(Level.FINEST, "init ranks: {0}", ranks); - - rank(); - logger.log(Level.FINEST, "final ranks: {0}", ranks); - logger.log(Level.FINEST, "max rank: {0}", maxRank); - - ordering(); - position(); - } - - private void dagify() { - dag = new DirectedSparseGraph(); - ranks = new HashMap(); - vertexTable = new ArrayList(getGraph().getVertexCount()); - inverseVertexTable = new HashMap(); - - edgeCounter = 0; - - Collection srt = trySortedSet(getGraph().getVertices()); - V root = srt.iterator().next(); - - inverseVertexTable.put(root, 0); - vertexTable.add(root); - dag.addVertex(0); - addToDag(root, 0, 0); - - logger.log(Level.FINEST, "dag ({0}, {1})", - new Object[]{dag.getVertexCount(), dag.getEdgeCount()}); - logger.log(Level.FINEST, "{0}", dag); - } - - private void addToDag(V vertex, int vertexId, int rnk) { - ranks.put(vertexId, rnk); - for (E in : trySortedSet(graph.getInEdges(vertex))) { - V src = graph.getSource(in); - if (inverseVertexTable.containsKey(src)) { - int srcId = inverseVertexTable.get(src); - if (srcId != vertexId - && dag.findEdge(srcId, vertexId) == null - && dag.findEdge(vertexId, srcId) == null) { // merge multi-edges, ignore self-loops - if (ranks.get(srcId) > rnk) { - dag.addEdge(edgeCounter++, vertexId, srcId); // reverse edge - } else { - dag.addEdge(edgeCounter++, srcId, vertexId); - } - } - } else { - int srcId = vertexTable.size(); - inverseVertexTable.put(src, srcId); - vertexTable.add(src); - dag.addVertex(srcId); - dag.addEdge(edgeCounter++, srcId, vertexId); - addToDag(src, srcId, rnk - 1); - } - } - - for (E out : trySortedSet(graph.getOutEdges(vertex))) { - V dest = graph.getDest(out); - if (inverseVertexTable.containsKey(dest)) { - int destId = inverseVertexTable.get(dest); - if (destId != vertexId - && dag.findEdge(vertexId, destId) == null - && dag.findEdge(destId, vertexId) == null) { // merge multi-edges, ignore self-loops - if (ranks.get(destId) < rnk) { - dag.addEdge(edgeCounter++, destId, vertexId); // reverse edge - } else { - dag.addEdge(edgeCounter++, vertexId, destId); - } - } - } else { - int destId = vertexTable.size(); - inverseVertexTable.put(dest, destId); - vertexTable.add(dest); - dag.addVertex(destId); - dag.addEdge(edgeCounter++, vertexId, destId); - addToDag(dest, destId, rnk + 1); - } -// if (dag.containsVertex(d)) { -// if (dag.findEdge(v,d)==null) { // merge multi-edges -// if (ranks.get(d) < rnk) { -// dag.addEdge(out, d, v); // reverse edge -// } else { -// dag.addEdge(out, v, d); -// } -// } -// } else { -// dag.addVertex(d); -// dag.addEdge(out, v, d); -// addToDag(d, rnk+1); -// } - } - } - - private void rank() { - feasibleTree(); - logger.log(Level.FINEST, "init tree ({0}, {1}) (comps: {2})", - new Object[]{ - tree.getVertexCount(), - tree.getEdgeCount(), - getComponents(tree).size()}); - - int iter = 0; - int minBadness = Integer.MAX_VALUE; - - edgeQueue = new LinkedList(); - for (int e : dag.getEdges()) { - if (!tree.containsEdge(e)) { - edgeQueue.add(e); - } - } - - while (updateCutValues()) { - logger.log(Level.FINEST, "Cut values: {0}", cutValues); - - // anti-cycling: once iter hits MAX_ITERATIONS, watch for CYCLE_SIZE - // more iterations to find a local minimum. Try to break on that local - // min in the next CYCLE_SIZE iterations. Otherwise, break anyway. - if (iter > MAX_RANK_ITERATIONS) { - //badness = cutValues.size(); - if (iter < MAX_RANK_ITERATIONS + CYCLE_SIZE) { - if (badness < minBadness) { - minBadness = badness; - } - } else if (iter < MAX_RANK_ITERATIONS + CYCLE_SIZE + CYCLE_SIZE) { - if (badness <= minBadness) { - break; - } - } else { - break; - } - } - - ++iter; - } - logger.log(Level.FINEST, "rank(): cut values: {0}", cutValues); - - logger.log(Level.FINEST, "final tree ({0}, {1}) (comps: {2})", - new Object[]{ - tree.getVertexCount(), - tree.getEdgeCount(), - getComponents(tree).size()}); - - ranksFromFeasibleTree(); - normalize(); - balance(); - } - - private void feasibleTree() { - initRank(); - - Graph gr = dag; - int root = gr.getVertices().iterator().next(); - int numVerts = gr.getVertexCount(); - - - while (tightTree(root) < numVerts) { - int minSlack = Integer.MAX_VALUE; - boolean head, tail; - boolean incidentHead = false; - int src, dest; - for (int e : gr.getEdges()) { - if (!tree.containsEdge(e)) { - src = gr.getSource(e); - dest = gr.getDest(e); - head = tree.containsVertex(dest); - tail = tree.containsVertex(src); - - if (head != tail) { - int slack = - ranks.get(dest) - - ranks.get(src) - 1; - if (slack < minSlack) { - minSlack = slack; - incidentHead = head; - } - } - } - } - - int delta = (incidentHead) ? -minSlack : minSlack; - - for (int v : tree.getVertices()) { - ranks.put(v, ranks.get(v) + delta); - } - - logger.log(Level.FINEST, "Ranks: {0}", ranks); - } - - logger.log(Level.FINEST, "Tree: {0}", tree); - } - - private void initRank() { - ranks = new HashMap(); - - Set verts = new HashSet(dag.getVertices()); - Set marked = new HashSet(); - - int rnk = 0; - - // vertices marked for removal - Collection rem = new ArrayList(verts.size()); - - while (!verts.isEmpty()) { - for (int v : verts) { - boolean allMarked = true; - for (int e : dag.getInEdges(v)) { - if (!marked.contains(e)) { - allMarked = false; - break; - } - } - - if (allMarked) { - rem.add(v); - } - } - - for (int v : rem) { - for (int e : dag.getOutEdges(v)) { - marked.add(e); - } - ranks.put(v, rnk); - verts.remove(v); - } - - ++rnk; - rem.clear(); - } - } - - private int tightTree(int root) { - tree = new DirectedSparseMultigraph(); - tree.addVertex(root); - tightTreeForVert(root); - return tree.getVertexCount(); - } - - private void tightTreeForVert(int v) { - for (int e : dag.getInEdges(v)) { - int src = dag.getSource(e); - if (!tree.containsVertex(src) - && (ranks.get(v) - ranks.get(src)) == 1) { - tree.addVertex(src); - tree.addEdge(e, src, v); - tightTreeForVert(src); - } - } - - for (int e : dag.getOutEdges(v)) { - int dest = dag.getDest(e); - if (!tree.containsVertex(dest) - && (ranks.get(dest) - ranks.get(v)) == 1) { - tree.addVertex(dest); - tree.addEdge(e, v, dest); - tightTreeForVert(dest); - } - } - } - - private boolean updateCutValues() { - cutValues = new HashMap(); - Graph gr = dag; - - int treeEdge = -1, outsideEdge = -1; - int outsideSrc = -1, outsideDest = -1; - - badness = 0; - - for (int e : tree.getEdges()) { - List> comps = getComponentsForCut(tree, e); - int hd, tl; - if (comps.get(0).contains(gr.getSource(e))) { - tl = 0; - hd = 1; - } else { - tl = 1; - hd = 0; - } - - int cut = 1; - int minSlack = Integer.MAX_VALUE; - int slack; - //Collections.shuffle(edgesCopy); - - - for (int e1 : edgeQueue) { - int s = gr.getSource(e1); - int d = gr.getDest(e1); - if (comps.get(tl).contains(s) - && comps.get(hd).contains(d)) { - ++cut; - } else if (comps.get(tl).contains(d) - && comps.get(hd).contains(s)) { - --cut; - - if (!tree.containsEdge(e1)) { - slack = ranks.get(d) - ranks.get(s) - 1; - if (slack < minSlack) { - if (treeEdge == -1) { - outsideEdge = e1; - outsideSrc = s; - outsideDest = d; - } - minSlack = slack; - } - } - } - } - - if (cut < 0) { - if (treeEdge == -1) { - treeEdge = e; - } - badness -= cut; - cutValues.put(e, cut); - } - - - } - - if (treeEdge != -1 && outsideEdge != -1) { - tree.removeEdge(treeEdge); - edgeQueue.add(treeEdge); - - edgeQueue.remove(outsideEdge); - tree.addEdge(outsideEdge, outsideSrc, outsideDest); - return true; - } else { - return false; - } - } - - private void ranksFromFeasibleTree() { - // use the feasible tree to reconstruct ranks - ranks = new HashMap(); - - setRank(tree.getVertices().iterator().next(), 0); - } - - private void setRank(int v, int rnk) { - if (!ranks.containsKey(v)) { - ranks.put(v, rnk); - for (int in : tree.getInEdges(v)) { - setRank(tree.getSource(in), rnk - 1); - } - - for (int out : tree.getOutEdges(v)) { - setRank(tree.getDest(out), rnk + 1); - } - } - } - - private void normalize() { - // normalize ranks - int minRank = Integer.MAX_VALUE; - maxRank = Integer.MIN_VALUE; - for (int r : ranks.values()) { - if (r < minRank) { - minRank = r; - } - if (r > maxRank) { - maxRank = r; - } - } - - if (minRank != 0) { - maxRank -= minRank; - for (int v : ranks.keySet()) { - ranks.put(v, ranks.get(v) - minRank); - } - } - } - - private void balance() { - // TODO Auto-generated method stub - } - - private void ordering() { - initVirtualGraph(); - - Ordering ord; - - ord = initOrdering(true); - bestOrdering = new Ordering(ord); - findBestOrdering(ord); - - ord = initOrdering(false); - findBestOrdering(ord); - - for (int i = 0; i < 8; ++i) { - bestOrdering.transpose(i); - } - } - - private void initVirtualGraph() { - virtualGraph = new DirectedSparseGraph(); - for (int v : dag.getVertices()) { - virtualGraph.addVertex(v); - } - - int vertexCounter = vertexTable.size(); - - for (int e : dag.getEdges()) { - int s = dag.getSource(e); - int t = dag.getDest(e); - int rs = ranks.get(s); - int rt = ranks.get(t); - if (rt - rs == 1) { - virtualGraph.addEdge(e, s, t); - } else { - if (tree.containsEdge(e)) { - tree.removeEdge(e); - } - - for (int i = rs + 1; i < rt; ++i) { - virtualGraph.addVertex(vertexCounter); - tree.addVertex(vertexCounter); - ranks.put(vertexCounter, i); - if (i == rs + 1) { - virtualGraph.addEdge(edgeCounter, s, vertexCounter); - tree.addEdge(edgeCounter, s, vertexCounter); - } else { - virtualGraph.addEdge(edgeCounter, vertexCounter - 1, vertexCounter); - tree.addEdge(edgeCounter, vertexCounter - 1, vertexCounter); - } - - ++edgeCounter; - ++vertexCounter; - } - - virtualGraph.addEdge(edgeCounter++, vertexCounter - 1, t); - // if we don't add the final edge to the tree, it will always stay a tree - } - } - } - - private void findBestOrdering(Ordering ord) { - for (int i = 0; i < MAX_ORDER_ITERATIONS; ++i) { - ord.wmedian(i); - ord.transpose(i); - ord.updateCrossings(); - logger.log(Level.FINEST, "findBestOrdering(): crossings: {0}", - ord.crossings); - if (ord.crossings < bestOrdering.crossings) { - bestOrdering = new Ordering(ord); - logger.log(Level.FINEST, "best crossings: {0}", - bestOrdering.crossings); - } - } - } - - private Ordering initOrdering(boolean direction) { - Ordering ord = new Ordering(maxRank + 1, virtualGraph); - Queue q = new LinkedList(); - Collection adjacent; - for (Integer max : tree.getVertices()) { - adjacent = (direction) ? tree.getInEdges(max) : tree.getOutEdges(max); - if (adjacent.isEmpty()) { // minimal (or maximal) vertex - q.add(max); - Integer v; - while ((v = q.poll()) != null) { - if (!ord.contains(v)) { - ord.add(ranks.get(v), v); - adjacent = (direction) ? tree.getOutEdges(v) : tree.getInEdges(v); - - for (Integer e : adjacent) { - Integer v1 = tree.getOpposite(v, e); - - // note this check is performed when v1 is added _and_ removed - if (!ord.contains(v1)) { - q.add(v1); - } - } - } - } - } - } - - //orderTreeVertex(root, ord); - ord.updateCrossings(); - - return ord; - } - -// private void orderTreeVertex(int v, Ordering ord) { -// if (!ord.contains(v)) { -// ord.add(ranks.get(v), v); -// for (int v1 : tree.getNeighbors(v)) orderTreeVertex(v1, ord); -// } -// } - private void position() { - yCoordinate(); - xCoordinate(); - - applyPosition(); - } - - private void yCoordinate() { - yBestCoords = new int[bestOrdering.numRanks()]; - for (int i = 0; i < yBestCoords.length; ++i) { - yBestCoords[i] = rankSeparation * i; - } - // TODO: tweak to fix slope abuse - } - - private void xCoordinate() { - int[] xCoords = initXCoord(); - - coordPrecision = 4; - - for (int i = 0; i < xCoords.length; ++i) { - xCoords[i] <<= coordPrecision; - } - - xBestCoords = new int[xCoords.length]; - for (int i = 0; i < xBestCoords.length; ++i) { - xBestCoords[i] = xCoords[i]; - } - xBestLength = xLength(xBestCoords); - int len; - for (int i = 0; i < MAX_POSITION_ITERATIONS; ++i) { - medianPos(i, xCoords); - minEdge(i, xCoords); - minNode(i, xCoords); - minPath(i, xCoords); - packCut(i, xCoords); - - len = xLength(xCoords); - if (len <= xBestLength) { - xBestLength = len; - for (int j = 0; j < xCoords.length; ++j) { - xBestCoords[j] = xCoords[j]; - } - } - } - - //snapToGrid(); - - for (int i = 0; i < xCoords.length; ++i) { - xBestCoords[i] >>= coordPrecision; - } - - coordPrecision = 0; - - normaliseXCoords(); - } - -// private void snapToGrid() { -// int gridsize = (nodeSeparation< rank : bestOrdering.lists) { -// int minpos = 0; -// for (int v : rank) { -// int pos = xBestCoords[v]; -// int gridBelow = (xBestCoords[v] / gridsize) * gridsize; -// -// pos = gridBelow; -// -// if (pos < minpos) pos = minpos; -// xBestCoords[v] = pos; -// minpos = pos + (nodeSeparation< maxPos) { - maxPos = c; - } - } - - for (int i = 0; i < xBestCoords.length; ++i) { - xBestCoords[i] -= minPos; - } - width = maxPos - minPos; - } - - private int[] initXCoord() { - int[] xCoords = new int[virtualGraph.getVertexCount()]; - - int maxRankWidth = 0; - for (List rank : bestOrdering.lists) { - if (rank.size() > maxRankWidth) { - maxRankWidth = rank.size(); - } - } - - width = (maxRankWidth - 1) * (nodeSeparation); - height = (bestOrdering.numRanks() - 1) * (rankSeparation); - - for (int i = 0; i < bestOrdering.numRanks(); ++i) { - List rank = bestOrdering.lists[i]; - int offset = (maxRankWidth - rank.size()) * (nodeSeparation << coordPrecision) / 2; - for (int j = 0; j < rank.size(); ++j) { - xCoords[rank.get(j)] = offset + ((nodeSeparation << coordPrecision) * j); - } - } - - return xCoords; - } - - private int median(List lst) { - if (lst.size() % 2 == 0) { - return (lst.get(lst.size() / 2 - 1) + lst.get(lst.size() / 2)) / 2; - } else { - return lst.get(lst.size() / 2); - } - } - - private int mean(List lst) { - int tot = 0; - for (int val : lst) { - tot += val; - } - return tot / lst.size(); - } - - private void medianPos(int iter, int[] coords) { - int start, end, dir; - if (iter % 4 < 2) { - start = 0; - end = bestOrdering.numRanks() - 1; - dir = 1; - } else { - start = bestOrdering.numRanks() - 1; - end = 0; - dir = -1; - } - - int nudgeSize = 4; - - for (int r = start; r * dir <= end * dir; r += dir) { - List rnk = bestOrdering.lists[r]; -// int totalOffset = 0; -// int num = 0; - - int minPos; - int maxPos; - int mid; - if (rnk.size() % 2 == 0) { - mid = rnk.size() / 2 - iter % 2; - } else { - mid = rnk.size() / 2; - } - - // place the center node - List ch = new ArrayList(); - int v = rnk.get(mid); - for (int n : virtualGraph.getNeighbors(v)) { - ch.add(coords[n]); - } - Collections.sort(ch); - - int newPos; - int nudge; - int goal; - if (!ch.isEmpty()) { - goal = (iter % 2 == 0) ? median(ch) : mean(ch); - nudge = (goal - coords[v]) / nudgeSize; - coords[v] += nudge; - } - - - maxPos = coords[v] - (nodeSeparation << coordPrecision); - minPos = coords[v] + (nodeSeparation << coordPrecision); - - for (int i = 1; i < rnk.size() / 2 + 1; ++i) { - - // place a node to the left of mid - - if (mid - i > 0) { - v = rnk.get(mid - i); - - ch = new ArrayList(); - for (int n : virtualGraph.getNeighbors(v)) { - ch.add(coords[n]); - } - Collections.sort(ch); - - newPos = coords[v]; - if (!ch.isEmpty()) { - goal = (iter % 2 == 0) ? median(ch) : mean(ch); - nudge = (goal - coords[v]) / nudgeSize; - newPos += nudge; - } - - if (newPos > maxPos) { - newPos = maxPos; - } - coords[v] = newPos; - maxPos = newPos - (nodeSeparation << coordPrecision); - } - - // place a node to the right of mid - - if (mid + i < rnk.size()) { - v = rnk.get(mid + i); - - ch = new ArrayList(); - for (int n : virtualGraph.getNeighbors(v)) { - ch.add(coords[n]); - } - Collections.sort(ch); - - newPos = coords[v]; - if (!ch.isEmpty()) { - goal = (iter % 2 == 0) ? median(ch) : mean(ch); - nudge = (goal - coords[v]) / nudgeSize; - newPos += nudge; - } - - if (newPos < minPos) { - newPos = minPos; - } - coords[v] = newPos; - minPos = newPos + (nodeSeparation << coordPrecision); - } - } - } - - // normalise, so no negative coords -// int minCoord = Integer.MAX_VALUE; -// for (int x : coords) if (x < minCoord) minCoord = x; -// -// if (minCoord != 0) -// for (int i = 0; i < coords.length; ++i) coords[i] -= minCoord; - } - - private void minEdge(int iter, int[] coords) { - // TODO Auto-generated method stub - } - - private void minNode(int iter, int[] coords) { - // TODO Auto-generated method stub - } - - private void minPath(int iter, int[] coords) { - // TODO Auto-generated method stub - } - - private void packCut(int iter, int[] coords) { - // TODO Auto-generated method stub - } - - private int xLength(int[] coords) { - int s, d; - int firstVirtual = vertexTable.size(); - int len = 0; - int omega; - for (int e : virtualGraph.getEdges()) { - s = virtualGraph.getSource(e); - d = virtualGraph.getDest(e); - if (s < firstVirtual && d < firstVirtual) { - omega = OMEGA_RR; - } else if (s < firstVirtual != d < firstVirtual) { - omega = OMEGA_VR; - } else { - omega = OMEGA_VV; - } - - len += Math.abs(coords[d] - coords[s]) * omega; -// int xdist = coords[d] - coords[s]; -// int ydist = (rankSeparation< gr = - new DirectedSparseMultigraph(); - - Random rand = new Random(156); - int numVerts = 20; - - // random graph - - int numEdges = 30; - - for (int i = 0; i < numVerts; ++i) { - gr.addVertex(i); - } - - for (int i = numVerts; i < numVerts + numEdges; ++i) { - gr.addEdge(i, - rand.nextInt(numVerts), - rand.nextInt(numVerts)); - } - - - // random (undirected) tree - - DirectedGraph tr = - new DirectedSparseMultigraph(); - tr.addVertex(0); - - for (int i = 1; i < numVerts; ++i) { - int src = (i == 1) ? 0 : rand.nextInt(i - 1); - gr.addVertex(i); - if (rand.nextBoolean()) { - tr.addEdge(numVerts + i - 1, src, i); - } else { - tr.addEdge(numVerts + i - 1, i, src); - } - } - - tr.removeEdge(numVerts + 7); - - System.out.println(gr); - - AKDotLayout layout = - new AKDotLayout(gr); - - layout.initialize(); - } - - static class Ordering { - - public List[] lists; - public Map indices; - public Map ranks; - public int crossings; - private Graph graph; - - @SuppressWarnings("unchecked") - public Ordering(int numRanks, Graph graph) { - this.graph = graph; - crossings = 0; - lists = new List/**/[numRanks]; - int rankSizeGuess = 2 * (graph.getVertexCount() / numRanks); - for (int i = 0; i < numRanks; ++i) { - lists[i] = new ArrayList(rankSizeGuess); - } - indices = new HashMap(); - ranks = new HashMap(); - } - - // deep copying constructor - @SuppressWarnings("unchecked") - public Ordering(Ordering ord) { - graph = ord.graph; - crossings = ord.crossings; - - lists = new List/**/[ord.lists.length]; - for (int i = 0; i < ord.lists.length; ++i) { - lists[i] = (new ArrayList(ord.lists[i])); - } - indices = new HashMap(ord.indices); - ranks = new HashMap(ord.ranks); - } - - public void add(int rnk, int v) { - indices.put(v, lists[rnk].size()); - ranks.put(v, rnk); - lists[rnk].add(v); - } - - private void setVertex(int rnk, int pos, int v) { - lists[rnk].set(pos, v); - indices.put(v, pos); - } - - private int getVertex(int rank, int j) { - return lists[rank].get(j); - } - - public boolean contains(int v) { - return indices.keySet().contains(v); - } - - public int numRanks() { - return lists.length; - } - - @Override - public String toString() { - return "ordering: " + lists.toString(); - } - - public void updateCrossings() { - int cr = 0; - for (int i = 0; i < lists.length - 1; ++i) { - cr += crossingsForRank(i); - } - crossings = cr; - } - - private int crossingsForRank(int rnk) { - int[] edgesAfter = new int[lists[rnk + 1].size()]; - int idx; - int rk_crossings = 0; - SortedBag markIdx = new TreeBag(); - - for (int i = 0; i < edgesAfter.length; ++i) { - edgesAfter[i] = 0; - } - for (int v : lists[rnk]) { - for (int n : graph.getNeighbors(v)) { - if (ranks.get(n) == rnk + 1) { - idx = indices.get(n); - rk_crossings += edgesAfter[idx]; - markIdx.add(idx); - } - } - - for (int i = 0; i < edgesAfter.length; ++i) { - while (markIdx.size() > 0 && markIdx.first() <= i) { - markIdx.remove(markIdx.first()); - } - edgesAfter[i] += markIdx.size(); - } - } - - return rk_crossings; - } - - public void wmedian(int iter) { - int dir, startRank, endRank; - if (iter % 2 == 0) { - dir = 1; - startRank = 1; - endRank = lists.length - 1; - } else { - dir = -1; - startRank = lists.length - 2; - endRank = 0; - } - - List rnk; - MedianEntry[] medians; - for (int r = startRank; dir * r <= dir * endRank; r += dir) { - rnk = lists[r]; - medians = new MedianEntry[rnk.size()]; - for (int j = 0; j < rnk.size(); ++j) { - int vert = rnk.get(j); - medians[j] = new MedianEntry(vert, medianValue(vert, r - dir), iter); - } - - Arrays.sort(medians); - for (int j = 0; j < rnk.size(); ++j) { - setVertex(r, j, medians[j].vertex); - } - } - } - - private static class MedianEntry implements Comparable { - - public int median; - public int vertex; - public int iteration; - - public MedianEntry(int vertex, int median, int iteration) { - this.vertex = vertex; - this.median = median; - this.iteration = iteration; - } - - public int compareTo(MedianEntry o) { - if (median == -1 || o.median == -1) { - return 0; - } else { - if (median > o.median) { - return 1; - } else if (median < o.median) { - return -1; - } else { - return 0; - } - } - } - } - - // TODO: change to fixed point arithmetic for speed - private int medianValue(int v, int adjRank) { - List pos = adjPositions(v, adjRank); - int mid = pos.size() / 2; - if (pos.isEmpty()) { - return -1; - } else if (pos.size() % 2 == 1) { - return pos.get(mid); - } else if (pos.size() == 2) { - return (pos.get(0) + pos.get(1)) / 2; - } else { - int left = pos.get(mid - 1) - pos.get(0); - int right = pos.get(pos.size() - 1) - pos.get(mid + 1); - return (pos.get(mid - 1) * left + pos.get(mid) * right) / (left + right); - } - } - - private List adjPositions(int v, int adjRank) { - List pos = new ArrayList(); - Collection neigh = graph.getNeighbors(v); - for (int i = 0; i < lists[adjRank].size(); ++i) { - if (neigh.contains(lists[adjRank].get(i))) { - pos.add(i); - } - } - return pos; - } - - public void transpose(int iter) { - int cross1, cross2; - int tmp; - boolean improved = true; - - int start, end, dir; - - if (iter % 4 < 2) { - start = 0; - end = numRanks() - 1; - dir = 1; - } else { - start = numRanks() - 1; - end = 0; - dir = -1; - } - - while (improved) { - improved = false; - for (int rank = start; dir * rank <= dir * end; rank += dir) { -// updateCrossings(); -// cross1 = crossings; - cross1 = 0; - if (rank > 0) { - cross1 += crossingsForRank(rank - 1); - } - if (rank < numRanks() - 1) { - cross1 += crossingsForRank(rank); - } - - for (int j = 0; j < lists[rank].size() - 1; ++j) { - tmp = getVertex(rank, j + 1); - setVertex(rank, j + 1, getVertex(rank, j)); - setVertex(rank, j, tmp); - -// updateCrossings(); -// cross2 = crossings; - cross2 = 0; - if (rank > 0) { - cross2 += crossingsForRank(rank - 1); - } - if (rank < numRanks() - 1) { - cross2 += crossingsForRank(rank); - } - - - if (cross2 < cross1) { - improved = true; - cross1 = cross2; - } else { - tmp = getVertex(rank, j + 1); - setVertex(rank, j + 1, getVertex(rank, j)); - setVertex(rank, j, tmp); - } - } - } - } - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractDotBangBoxLayout.java b/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractDotBangBoxLayout.java deleted file mode 100644 index e0284f77..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractDotBangBoxLayout.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.algorithms.layout; - -import edu.uci.ics.jung.contrib.graph.DirectedBangBoxGraph; -import java.util.Collection; - -/** - * - * @author alemer - */ -public abstract class AbstractDotBangBoxLayout extends AbstractDotLayout { - - - public AbstractDotBangBoxLayout(DirectedBangBoxGraph graph, double vertexSpacing) { - super(graph, vertexSpacing); - } - - @Override - protected boolean isWorkToDo() { - return super.isWorkToDo() || (getGraph().getBangBoxCount() > 0); - } - - @Override - @SuppressWarnings("unchecked") - public DirectedBangBoxGraph getGraph() { - return (DirectedBangBoxGraph)super.getGraph(); - } - - protected void addBangBoxLines(StringBuilder g) { - int i = 0; - for (B b : getGraph().getBangBoxes()) { - Collection contents = getGraph().getBoxedVertices(b); - if (!contents.isEmpty()) { - g.append("subgraph \"cluster "); - g.append(i); - g.append("\" {\n"); - for (V v : getGraph().getBoxedVertices(b)) { - g.append("\""); - g.append(getVertexDotKey(v)); - g.append("\"; "); - } - } - g.append("\n}\n"); - } - } - - @Override - protected void addGraphContents(StringBuilder g) { - addVertexLines(g); - addEdgeLines(g); - addBangBoxLines(g); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractDotLayout.java b/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractDotLayout.java deleted file mode 100644 index 2b768796..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractDotLayout.java +++ /dev/null @@ -1,302 +0,0 @@ -package edu.uci.ics.jung.contrib.algorithms.layout; - -import edu.uci.ics.jung.algorithms.layout.AbstractLayout; -import edu.uci.ics.jung.graph.DirectedGraph; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.util.HashMap; -import java.util.Map; -import java.util.StringTokenizer; -import java.util.logging.Level; -import java.util.logging.Logger; - -public abstract class AbstractDotLayout extends AbstractLayout implements DynamicBoundsLayout { - - private final static Logger logger = - Logger.getLogger("edu.uci.ics.jung.contrib.algorithms.layout"); - public static String dotProgram = "dot"; - public final static double DOT_SCALE = 50.0; - protected Map vertexPositions = null; - protected double vertexSpacing = 20.0; - - public AbstractDotLayout(DirectedGraph graph, double vertexSpacing) { - super(graph, new Dimension((int) Math.ceil(2 * vertexSpacing), (int) Math.ceil(2 * vertexSpacing))); - this.vertexSpacing = vertexSpacing; - } - - protected void beginLayout() { - } - - protected void endLayout() { - } - - /** - * Get the vertex key to use to describe a vertex. - * - * This must be unique (within the graph) for each vertex, and must always - * return the same value for any given vertex between a call to - * beginLayout() and the corresponding call to endLayout(). - * - * @return a string containing no double quote characters - */ - protected abstract String getVertexDotKey(V vertex); - - @Override - public Dimension getSize() { - return size; - } - - @Override - public void setSize(Dimension size) { - throw new UnsupportedOperationException("Size of dot layout is determined by dot!"); - } - - public void recalculateSize() { - double right = vertexSpacing; - double bottom = vertexSpacing; - for (V v : getGraph().getVertices()) { - Point2D point = transform(v); - right = Math.max(right, point.getX() + vertexWidth(v) / 2.0); - bottom = Math.max(bottom, point.getY() + vertexHeight(v) / 2.0); - } - right += vertexSpacing; - bottom += vertexSpacing; - size.setSize(Math.ceil(right), Math.ceil(bottom)); - } - - @Override - public void setLocation(V picked, Point2D p) { - if (p.getX() < 20) { - p.setLocation(20, p.getY()); - } - if (p.getY() < 20) { - p.setLocation(p.getX(), 20); - } - super.setLocation(picked, p); - if (p.getX() + vertexSpacing > size.width) { - size.width = (int) Math.ceil(p.getX() + vertexSpacing); - } - if (p.getY() + vertexSpacing > size.height) { - size.height = (int) Math.ceil(p.getY() + vertexSpacing); - } - } - - protected boolean isWorkToDo() { - return getGraph().getVertexCount() > 0; - } - - /** - * (Re-)initialize the layout. - */ - public void initialize() { - try { - synchronized (getGraph()) { - if (!isWorkToDo()) { - return; - } - - vertexPositions = new HashMap(); - beginLayout(); - - String viz = graphToDot(); - calculateNodePositions(viz); - adjustPositions(); - layoutGraph(); - - endLayout(); - //recalculateSize(); - } - } catch (IOException e) { - logger.log(Level.SEVERE, "Failed to run dot", e); - } finally { - vertexPositions = null; - } - } - - public void reset() { - initialize(); - } - - protected void calculateNodePositions(String dot) throws IOException { - Process dotProcess = Runtime.getRuntime().exec(dotProgram + " -Tplain"); - BufferedReader dotIn = new BufferedReader(new InputStreamReader(dotProcess - .getInputStream())); - - OutputStreamWriter dotOut = new OutputStreamWriter(dotProcess - .getOutputStream()); - - dotOut.write(dot); - dotOut.close(); - - String ln = dotIn.readLine(); - - while (!ln.equals("stop")) { - logger.log(Level.FINEST, "Processing line: {0}", ln); - if (ln.startsWith("graph ")) { - StringTokenizer tok = new StringTokenizer(ln); - tok.nextToken(); // "graph" - tok.nextToken(); // scale - double width = Double.parseDouble(tok.nextToken()); - double height = Double.parseDouble(tok.nextToken()); - size.setSize(width * DOT_SCALE + 2 * vertexSpacing, height * DOT_SCALE + 2 * vertexSpacing); - } - if (ln.startsWith("node ")) { - // ad-hoc parsing, as we know exactly what the - // format will be - StringBuilder tok = new StringBuilder(); - int p = "node ".length(); - if (ln.charAt(p) == '"') { - ++p; - while (ln.charAt(p) != '"') { - tok.append(ln.charAt(p)); - ++p; - } - ++p; - } else { - while (ln.charAt(p) != ' ') { - tok.append(ln.charAt(p)); - ++p; - } - } - String name = tok.toString(); - ++p; - - tok = new StringBuilder(); - while (ln.charAt(p) != ' ') { - tok.append(ln.charAt(p)); - ++p; - } - double x = Double.parseDouble(tok.toString()) * DOT_SCALE + vertexSpacing; - ++p; - - tok = new StringBuilder(); - while (ln.charAt(p) != ' ') { - tok.append(ln.charAt(p)); - ++p; - } - double y = Double.parseDouble(tok.toString()) * DOT_SCALE + vertexSpacing; - y = size.height - y; - - vertexPositions.put(name, new Point2D.Double(x, y)); - } - ln = dotIn.readLine(); - if (ln == null) { - throw new DotException("Bad dot output: no 'stop' received"); - } - } - - dotIn.close(); - } - - protected void adjustPositions() { - /*double top=0; - for (Point2D c : vertexPositions.values()) { - c.setLocation(c.getX() * DOT_SCALE + vertexSpacing, c.getY() * -DOT_SCALE); - if (c.getY()\""); - g.append(getVertexDotKey(graph.getDest(e))); - g.append("\""); - g.append(" [arrowhead=none];\n"); - } - } - - protected void addGraphContents(StringBuilder g) { - addVertexLines(g); - addEdgeLines(g); - } - - /** - * Converts a graph to a DOT string. ALWAYS run in the context of - * synchronized(getGraph()) {...}. - */ - protected String graphToDot() { - StringBuilder g = new StringBuilder(); - g.append("digraph {\n"); - addGraphAttrs(g); - g.append("\n"); - addGraphContents(g); - g.append("}\n"); - logger.log(Level.FINEST, "Dot output: {0}", g); - return g.toString(); - } - - /** - * Sets the location without updating bang box rects. - * - * For use in layoutGraph() - * - * @param picked - * @param p - */ - protected void setLocationNoUpdates(V picked, Point2D p) { - super.setLocation(picked, p); - } - - /** - * Sets the location without updating bang box rects. - * - * For use in layoutGraph() - * - * @param picked - * @param p - */ - protected void setLocationNoUpdates(V picked, double x, double y) { - super.setLocation(picked, x, y); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractForceLayout.java b/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractForceLayout.java deleted file mode 100644 index 1d2306ee..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/AbstractForceLayout.java +++ /dev/null @@ -1,166 +0,0 @@ -package edu.uci.ics.jung.contrib.algorithms.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import quanto.core.data.Vertex; -import quanto.gui.QuantoAutoLayout; - - -import edu.uci.ics.jung.algorithms.layout.AbstractLayout; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.DirectedGraph; - -public abstract class AbstractForceLayout extends AbstractLayout implements DynamicBoundsLayout{ - - protected Map vertexVelocities; - protected Map vertexPositions; - protected double vertexSpacing = 20.0; - Transformer initializer; - - protected AbstractForceLayout(DirectedGraph graph, Transformer initializer, - double vertexSpacing ) { - super(graph, new Dimension((int)Math.ceil(2*vertexSpacing), (int)Math.ceil(2*vertexSpacing))); - this.initializer=initializer; - } - - - protected void beginLayout() {} - - protected void endLayout() {} - - @Override - public Dimension getSize() { - return size; - } - - - @Override - public void setLocation(V picked, Point2D p) { - if (p.getX() < 20) - p.setLocation(20, p.getY()); - if (p.getY() < 20) - p.setLocation(p.getX(), 20); - super.setLocation(picked, p); - if (p.getX() + vertexSpacing > size.width) { - size.width = (int)Math.ceil(p.getX() + vertexSpacing); - } - if (p.getY() + vertexSpacing > size.height) { - size.height = (int)Math.ceil(p.getY() + vertexSpacing); - } - } - - protected Point2D coulombRepulsion(V v1, V v2){ - Point2D p1= locations.get(v1); - Point2D p2= locations.get(v2); - double distSq=(p1.getX()-p2.getX())*(p1.getX()-p2.getX())+ - (p1.getY()-p2.getY())*(p1.getY()-p2.getY()); - //distSq=Math.sqrt(distSq); - return new Point2D.Double(200*(p1.getX()-p2.getX())/distSq, 200*(p1.getY()-p2.getY())/distSq ); - } - - protected Point2D hookeAttraction(V v1, V v2){ - Point2D p1= locations.get(v1); - Point2D p2= locations.get(v2); - double atr=0.06; - return new Point2D.Double(atr*(p1.getX()-p2.getX()), atr*(p1.getY()-p2.getY())); - } - - protected void forceLayout(){ - double kineticEnergy=graph.getVertices().size(); - double damping=0.65; - double timestep=0.01; - vertexVelocities= new HashMap(); - vertexPositions= new HashMap(); - for (V v : graph.getVertices()) - vertexVelocities.put(v, new Point2D.Double(0, 0)); - int i=0; - //int sq=graph.getVertices().size()*graph.getVertices().size(); - while(i<10000){ - kineticEnergy=0; - for (V v : graph.getVertices()) { - if (!isLocked(v)){ - Point2D netForce=new Point2D.Double(0, 0); - vertexVelocities.put(v, new Point2D.Double(0, 0)); - for (V u : graph.getVertices()) { - if(v!=u){ - Point2D q=coulombRepulsion(u, v); - netForce.setLocation(netForce.getX()-q.getX(), netForce.getY()-q.getY()); - } - } - for(V u : graph.getSuccessors(v)){ - Point2D q=hookeAttraction(u, v); - netForce.setLocation(netForce.getX()+q.getX(), netForce.getY()+q.getY()); - } - Point2D p=vertexVelocities.get(v); - vertexVelocities.put(v, new Point2D.Double((p.getX()+netForce.getX()*timestep)*damping , - (p.getY()+netForce.getY()*timestep)*damping )); - p=vertexVelocities.get(v); - Point2D q=locations.get(v); - Point2D r= new Point2D.Double(q.getX()+p.getX()*timestep, q.getY()+p.getY()*timestep); - setLocation(v, r); - kineticEnergy+=p.getX()*p.getX()+p.getY()*p.getY(); - } - } - i++; - } - } - - - protected boolean isWorkToDo() { - return getGraph().getVertexCount() > 0; - } - - public void initialize() { - setInitializer(initializer); - - - /* - Point2D p= new Point2D.Double(0, 0); - for(V v : graph.getVertices()) - if(!transform(v).equals(p)) - - for (V v : graph.getVertices()) - if(transform(v).equals(p)) - setLocation(v, initializer.transform(v)); - */ - } - - - public void reset() { - if (!isWorkToDo()) return; - beginLayout(); - forceLayout(); - //layoutGraph(); - endLayout(); - recalculateSize(); - } - - - - protected void layoutGraph() { - for(V v : graph.getVertices()) - setLocation(v, vertexPositions.get(v)); - } - - protected double vertexWidth(V v) { return 14; } - protected double vertexHeight(V v) { return 14; } - - public void recalculateSize() { - double right = vertexSpacing; - double bottom = vertexSpacing; - for (V v : getGraph().getVertices()) { - Point2D point = transform(v); - right = Math.max(right, point.getX() + vertexWidth(v)/2.0); - bottom = Math.max(bottom, point.getY() + vertexHeight(v)/2.0); - } - right += vertexSpacing; - bottom += vertexSpacing; - size.setSize(Math.ceil(right), Math.ceil(bottom)); - } - - } diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DotException.java b/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DotException.java deleted file mode 100644 index 7b974d75..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DotException.java +++ /dev/null @@ -1,10 +0,0 @@ -package edu.uci.ics.jung.contrib.algorithms.layout; - -public class DotException extends RuntimeException { - - private static final long serialVersionUID = 8173148319748759814L; - - public DotException(String s) { - super(s); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DotLayout.java b/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DotLayout.java deleted file mode 100644 index 311ec864..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DotLayout.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.algorithms.layout; - -import edu.uci.ics.jung.graph.DirectedGraph; -import java.awt.Dimension; -import java.util.HashMap; -import java.util.Map; - -/** - * - * @author alex - */ -public class DotLayout extends AbstractDotLayout { - private Map vertexKeys = null; - - public DotLayout(DirectedGraph graph, double vertexPadding) { - super(graph, vertexPadding); - } - - @Override - protected void beginLayout() { - vertexKeys = new HashMap(); - int i = 1; - for (V v : graph.getVertices()) { - vertexKeys.put(v, Integer.toString(i)); - ++i; - } - } - - @Override - protected String getVertexDotKey(V vertex) { - return vertexKeys.get(vertex); - } - - @Override - protected void endLayout() { - vertexKeys = null; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DynamicBoundsLayout.java b/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DynamicBoundsLayout.java deleted file mode 100644 index dcbbf170..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/algorithms/layout/DynamicBoundsLayout.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.algorithms.layout; - -/** - * - * @author alex - */ -public interface DynamicBoundsLayout { - public void recalculateSize(); -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/graph/BangBoxGraph.java b/gui/jung-src/edu/uci/ics/jung/contrib/graph/BangBoxGraph.java deleted file mode 100644 index b876752d..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/graph/BangBoxGraph.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.graph; - -import edu.uci.ics.jung.graph.Graph; -import java.util.Collection; - -/** - * Interface for a graph with a collection of subgraphs, known as !-boxes. - * - * @author alemer - */ -public interface BangBoxGraph extends Graph { - /** - * Adds a !-box to this graph. - * - * Fails if @p bangbox is already an element of this graph, or - * @p bangbox or @p vertices is null. - * - * @param bangbox the object representing the !-box - * @param vertices the vertices that will be contained in the !-box - * @return @c true if the add is successful, and @c false otherwise - * @throws IllegalArgumentException if bangbox or vertices is null - */ - public boolean addBangBox(B bangbox, Collection vertices); - /** - * Whether this graph contains a particular !-box. - * - * Equivalent to getBangBoxes().contains(bangbox). - * - * @param bangbox the object representing the !-box - * @return @c true if the graph contains @p bangbox, @c false otherwise - */ - public boolean containsBangBox(B bangbox); - /** - * The number of !-boxes in this graph - * - * Equivalent to getBangBoxes().size(). - * - * @return the number of !-boxes in this graph - */ - public int getBangBoxCount(); - /** - * Get !-boxes in the graph. - * - * In general, this obeys the Collection contract, and therefore makes - * no guarantees about the ordering of the !-boxes within the set. - * - * @return a view on the !-boxes in this graph - */ - public Collection getBangBoxes(); - /** - * Get the contents of a !-box. - * - * In general, this obeys the Collection contract, and therefore makes - * no guarantees about the ordering of the vertices within the set. - * - * @param bangbox the object representing the !-box - * @return a view on a subset of vertices of this graph - */ - public Collection getBoxedVertices(B bangbox); - /** - * Add some vertices to a !-box - * - * @param bangbox The !-box to add it to - * @param vertices The vertices to add - */ - public void addVerticesToBangBox(B bangbox, Collection vertices); - /** - * Remove some vertices from a !-box - * - * @param bangbox The !-box to add it to - * @param vertices The vertices to remove - */ - public void removeVerticesFromBangBox(B bangbox, Collection vertices); - /** - * Changes the contents of a !-box in this graph. - * - * Fails if @p bangbox is not an element of this graph, or - * @p bangbox or @p vertices is null. - * - * @param bangbox the object representing the !-box - * @param vertices the vertices that will be contained in the !-box - * @return the original contents of the bangbox - * @throws IllegalArgumentException if bangbox or vertices is null, or - * bangbox is not in the graph - */ - public void setBoxedVertices(B bangbox, Collection vertices); - /** - * Removes a !-box from this graph. - * - * Fails if @p bangbox is not an element of this graph, or - * @p bangbox is null. - * - * This does not affect the vertices contained in the !-box. - * @param bangbox the object representing the !-box - * @return @c true if the remove is successful, and @c false otherwise - */ - public boolean removeBangBox(B bangbox); -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/graph/DirectedBangBoxGraph.java b/gui/jung-src/edu/uci/ics/jung/contrib/graph/DirectedBangBoxGraph.java deleted file mode 100644 index 9d372ee6..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/graph/DirectedBangBoxGraph.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.graph; - -import edu.uci.ics.jung.graph.DirectedGraph; - -/** - * Interface for a directed graph with a collection of subgraphs, known as - * !-boxes. - * - * @author alex - */ -public interface DirectedBangBoxGraph - extends BangBoxGraph, - DirectedGraph { -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/graph/DirectedSparseBangBoxMultigraph.java b/gui/jung-src/edu/uci/ics/jung/contrib/graph/DirectedSparseBangBoxMultigraph.java deleted file mode 100644 index 5ff5f213..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/graph/DirectedSparseBangBoxMultigraph.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.graph; - -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -/** - * - * @author alex - */ -public class DirectedSparseBangBoxMultigraph - extends DirectedSparseMultigraph - implements DirectedBangBoxGraph { - - protected Map> bangBoxes; - - public DirectedSparseBangBoxMultigraph() { - bangBoxes = new HashMap>(); - } - - public Collection getBangBoxes() { - return Collections.unmodifiableCollection(bangBoxes.keySet()); - } - - public boolean containsBangBox(B b) { - return bangBoxes.containsKey(b); - } - - public boolean addBangBox(B bangbox, Collection vertices) { - if (bangbox == null) - throw new NullPointerException("bangbox"); - if (vertices == null) - throw new NullPointerException("vertices"); - if (containsBangBox(bangbox)) - return false; - - bangBoxes.put(bangbox, new HashSet(vertices)); - return true; - } - - public int getBangBoxCount() { - return bangBoxes.size(); - } - - public boolean removeBangBox(B bangbox) { - if (!containsBangBox(bangbox)) - return false; - - bangBoxes.remove(bangbox); - - return true; - } - - public Collection getBoxedVertices(B bangbox) { - if (!containsBangBox(bangbox)) - return null; - - return Collections.unmodifiableCollection(bangBoxes.get(bangbox)); - } - - public void addVerticesToBangBox(B bangbox, Collection vertices) { - if (bangbox == null) - throw new NullPointerException("bangbox"); - if (vertices == null) - throw new NullPointerException("vertices"); - if (!containsBangBox(bangbox)) - throw new IllegalArgumentException("bangbox is not in this graph"); - - bangBoxes.get(bangbox).addAll(vertices); - } - - public void removeVerticesFromBangBox(B bangbox, Collection vertices) { - if (bangbox == null) - throw new NullPointerException("bangbox"); - if (vertices == null) - throw new NullPointerException("vertices"); - if (!containsBangBox(bangbox)) - throw new IllegalArgumentException("bangbox is not in this graph"); - - bangBoxes.get(bangbox).removeAll(vertices); - } - - public void setBoxedVertices(B bangbox, Collection vertices) { - if (bangbox == null) - throw new NullPointerException("bangbox"); - if (vertices == null) - throw new NullPointerException("vertices"); - if (!containsBangBox(bangbox)) - throw new IllegalArgumentException("bangbox is not in this graph"); - - bangBoxes.put(bangbox, new HashSet(vertices)); - } - - @Override - public boolean removeVertex(V vertex) { - if (super.removeVertex(vertex)) { - for (Set contents : bangBoxes.values()) { - contents.remove(vertex); - } - return true; - } else { - return false; - } - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(super.toString()); - sb.append("\n!-boxes:"); - for (B bangbox : getBangBoxes()) { - sb.append(bangbox); - sb.append("["); - Collection contents = getBoxedVertices(bangbox); - if (contents.size() > 0) { - for (V v : contents) { - sb.append(v); - sb.append(','); - } - sb.setLength(sb.length()-1); - } - sb.append("] "); - } - sb.setLength(sb.length()-1); - return sb.toString(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphElementAccessor.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphElementAccessor.java deleted file mode 100644 index da893561..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphElementAccessor.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; - -/** - * - * @author alemer - */ -public interface BangBoxGraphElementAccessor - extends GraphElementAccessor -{ - B getBangBox(Layout layout, double x, double y); -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphRenderContext.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphRenderContext.java deleted file mode 100644 index 4dbf7746..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphRenderContext.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.graph.BangBoxGraph; -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxLabelRenderer; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.picking.PickedState; - -import java.awt.Font; -import java.awt.Paint; -import java.awt.Shape; -import java.awt.Stroke; -import org.apache.commons.collections15.Predicate; -import org.apache.commons.collections15.Transformer; - -/** - * - * @author alemer - */ -public interface BangBoxGraphRenderContext extends RenderContext { - - PickedState getPickedBangBoxState(); - void setPickedBangBoxState(PickedState pickedBangBoxState); - - BangBoxGraphElementAccessor getPickSupport(); - void setPickSupport(BangBoxGraphElementAccessor pickSupport); - - Transformer getBangBoxStrokeTransformer(); - void setBangBoxStrokeTransformer(Transformer bangBoxStrokeTransformer); - - Transformer getBangBoxLabelTransformer(); - void setBangBoxLabelTransformer(Transformer bangBoxLabelTransformer); - - BangBoxLabelRenderer getBangBoxLabelRenderer(); - void setBangBoxLabelRenderer(BangBoxLabelRenderer bangBoxLabelRenderer); - Transformer getBangBoxFontTransformer(); - void setBangBoxFontTransformer(Transformer bangBoxFontTransformer); - - Predicate, B>> getBangBoxIncludePredicate(); - void setBangBoxIncludePredicate(Predicate, B>> bangBoxIncludePredicate); - - // NB: unlike the vertex and edge shape transforms, this one will not be - // translated or scaled - Transformer, B>, Shape> getBangBoxShapeTransformer(); - void setBangBoxShapeTransformer(Transformer, B>, Shape> bangBoxShapeTransformer); - - Transformer getBangBoxFillPaintTransformer(); - void setBangBoxFillPaintTransformer(Transformer bangBoxFillPaintTransformer); - - Transformer getBangBoxDrawPaintTransformer(); - void setBangBoxDrawPaintTransformer(Transformer bangBoxDrawPaintTransformer); -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphVisualizationServer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphVisualizationServer.java deleted file mode 100644 index 5fe9c4b3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphVisualizationServer.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxGraphRenderer; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.picking.PickedState; - -/** - * - * @author alemer - */ -public interface BangBoxGraphVisualizationServer extends VisualizationServer -{ - PickedState getPickedBangBoxState(); - void setPickedBangBoxState(PickedState pickedBangBoxState); - BangBoxGraphElementAccessor getPickSupport(); - void setPickSupport(BangBoxGraphElementAccessor pickSupport); - BangBoxGraphRenderContext getRenderContext(); - void setRenderContext(BangBoxGraphRenderContext renderContext); - BangBoxGraphRenderer getRenderer(); - void setRenderer(BangBoxGraphRenderer renderer); -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphVisualizationViewer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphVisualizationViewer.java deleted file mode 100644 index 5c43faa6..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/BangBoxGraphVisualizationViewer.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.visualization.decorators.PickableBangBoxPaintTransformer; -import edu.uci.ics.jung.contrib.visualization.decorators.PickableElementStrokeTransformer; -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxGraphRenderer; -import edu.uci.ics.jung.contrib.visualization.renderers.BasicBangBoxGraphRenderer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.VisualizationModel; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.decorators.PickableEdgePaintTransformer; -import edu.uci.ics.jung.visualization.decorators.PickableVertexPaintTransformer; -import edu.uci.ics.jung.visualization.picking.MultiPickedState; -import edu.uci.ics.jung.visualization.picking.PickedState; -import edu.uci.ics.jung.visualization.renderers.Renderer; -import java.awt.BasicStroke; -import java.awt.Color; -import java.awt.Dimension; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; - -/** - * - * @author alemer - */ -public class BangBoxGraphVisualizationViewer - extends VisualizationViewer - implements BangBoxGraphVisualizationServer { - - protected PickedState pickedBangBoxState = new MultiPickedState(); - - public BangBoxGraphVisualizationViewer(Layout layout) { - super(layout); - init(); - } - - public BangBoxGraphVisualizationViewer(Layout layout, Dimension preferredSize) { - super(layout, preferredSize); - init(); - } - - public BangBoxGraphVisualizationViewer(VisualizationModel model) { - super(model); - init(); - } - - public BangBoxGraphVisualizationViewer(VisualizationModel model, Dimension preferredSize) { - super(model, preferredSize); - init(); - } - - private void init() { - PluggableBangBoxGraphRenderContext prc = new PluggableBangBoxGraphRenderContext(); - renderContext = prc; - renderer = new BasicBangBoxGraphRenderer(); - setPickSupport(new ShapeBangBoxPickSupport(this)); - setPickedVertexState(new MultiPickedState()); - setPickedEdgeState(new MultiPickedState()); - setPickedBangBoxState(new MultiPickedState()); - renderContext.setEdgeDrawPaintTransformer(new PickableEdgePaintTransformer(getPickedEdgeState(), Color.black, Color.cyan)); - renderContext.setVertexFillPaintTransformer(new PickableVertexPaintTransformer(getPickedVertexState(), - Color.red, Color.yellow)); - prc.setBangBoxDrawPaintTransformer(new PickableBangBoxPaintTransformer(getPickedBangBoxState(), Color.gray, Color.blue)); - prc.setBangBoxStrokeTransformer(new PickableElementStrokeTransformer(getPickedBangBoxState(), new BasicStroke(1), new BasicStroke(2))); - renderContext.getMultiLayerTransformer().addChangeListener(this); - } - - public PickedState getPickedBangBoxState() { - return pickedBangBoxState; - } - - public void setPickedBangBoxState(PickedState pickedBangBoxState) { - if (pickEventListener != null && this.pickedBangBoxState != null) { - this.pickedBangBoxState.removeItemListener(pickEventListener); - } - this.pickedBangBoxState = pickedBangBoxState; - getRenderContext().setPickedBangBoxState(pickedBangBoxState); - if (pickEventListener == null) { - pickEventListener = new ItemListener() { - - public void itemStateChanged(ItemEvent e) { - repaint(); - } - }; - } - pickedBangBoxState.addItemListener(pickEventListener); - } - - @Override - public BangBoxGraphElementAccessor getPickSupport() { - return getRenderContext().getPickSupport(); - } - - public void setPickSupport(BangBoxGraphElementAccessor pickSupport) { - super.setPickSupport(pickSupport); - } - - @Override - public void setPickSupport(GraphElementAccessor pickSupport) { - if (pickSupport instanceof BangBoxGraphElementAccessor) { - super.setPickSupport(pickSupport); - } - else if (super.getPickSupport() != null) { - // ^^ this is for when the BVS constructor calls this - throw new IllegalArgumentException("renderer must be a BangBoxGraphElementAccessor"); - } - } - - @Override - @SuppressWarnings("unchecked") - public BangBoxGraphRenderContext getRenderContext() { - return (BangBoxGraphRenderContext) renderContext; - } - - public void setRenderContext(BangBoxGraphRenderContext renderContext) { - super.setRenderContext(renderContext); - } - - @Override - public void setRenderContext(RenderContext renderContext) { - if (!(renderContext instanceof RenderContext)) { - throw new IllegalArgumentException("renderer must be a RenderContext"); - } - super.setRenderContext(renderContext); - } - - @Override - @SuppressWarnings("unchecked") - public BangBoxGraphRenderer getRenderer() { - return (BangBoxGraphRenderer) renderer; - } - - public void setRenderer(BangBoxGraphRenderer renderer) { - super.setRenderer(renderer); - } - - @Override - public void setRenderer(Renderer renderer) { - if (!(renderer instanceof BangBoxGraphRenderer)) { - throw new IllegalArgumentException("renderer must be a BangBoxGraphRenderer"); - } - super.setRenderer(renderer); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/DefaultBangBoxLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/DefaultBangBoxLabelRenderer.java deleted file mode 100644 index 23ed21de..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/DefaultBangBoxLabelRenderer.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 14, 2005 - */ - -package edu.uci.ics.jung.contrib.visualization; - -import java.awt.Color; -import java.awt.Component; -import java.awt.Font; -import java.awt.Rectangle; -import java.io.Serializable; - -import javax.swing.JComponent; -import javax.swing.JLabel; -import javax.swing.border.Border; -import javax.swing.border.EmptyBorder; - -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxLabelRenderer; - -/** - * DefaultVertexLabelRenderer is similar to the cell renderers - * used by the JTable and JTree jfc classes. - * - * @author Tom Nelson - * - * - */ -@SuppressWarnings("serial") -public class DefaultBangBoxLabelRenderer extends JLabel implements - BangBoxLabelRenderer, Serializable { - - protected static Border noFocusBorder = new EmptyBorder(0,0,0,0); - - /** - * Creates a default table cell renderer. - */ - public DefaultBangBoxLabelRenderer() { - setOpaque(true); - setBorder(noFocusBorder); - } - - /** - * Overrides JComponent.setForeground to assign - * the unselected-foreground color to the specified color. - * - * @param c set the foreground color to this value - */ - @Override - public void setForeground(Color c) { - super.setForeground(c); - } - - /** - * Overrides JComponent.setBackground to assign - * the unselected-background color to the specified color. - * - * @param c set the background color to this value - */ - @Override - public void setBackground(Color c) { - super.setBackground(c); - } - - /** - * Notification from the UIManager that the look and feel - * [L&F] has changed. - * Replaces the current UI object with the latest version from the - * UIManager. - * - * @see JComponent#updateUI - */ - @Override - public void updateUI() { - super.updateUI(); - setForeground(null); - setBackground(null); - } - - /** - * - * Returns the default label renderer for a Vertex - * - * @param vv the VisualizationViewer to render on - * @param value the value to assign to the label for - * Vertex - * @param vertex the Vertex - * @return the default label renderer - */ - public Component getBangBoxLabelRendererComponent(JComponent vv, - Object value, Font font, boolean isSelected, B bangBox) { - super.setForeground(vv.getForeground()); - super.setBackground(vv.getBackground()); - if(font != null) { - setFont(font); - } else { - setFont(vv.getFont()); - } - setIcon(null); - setBorder(noFocusBorder); - setValue(value); - return this; - } - - /* - * The following methods are overridden as a performance measure to - * to prune code-paths are often called in the case of renders - * but which we know are unnecessary. Great care should be taken - * when writing your own renderer to weigh the benefits and - * drawbacks of overriding methods like these. - */ - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public boolean isOpaque() { - Color back = getBackground(); - Component p = getParent(); - if (p != null) { - p = p.getParent(); - } - boolean colorMatch = (back != null) && (p != null) && - back.equals(p.getBackground()) && - p.isOpaque(); - return !colorMatch && super.isOpaque(); - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void validate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void revalidate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(long tm, int x, int y, int width, int height) {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(Rectangle r) { } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - protected void firePropertyChange(String propertyName, Object oldValue, Object newValue) { - // Strings get interned... - if (propertyName=="text") { - super.firePropertyChange(propertyName, oldValue, newValue); - } - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void firePropertyChange(String propertyName, boolean oldValue, boolean newValue) { } - - /** - * Sets the String object for the cell being rendered to - * value. - * - * @param value the string value for this cell; if value is - * null it sets the text value to an empty string - * @see JLabel#setText - * - */ - protected void setValue(Object value) { - setText((value == null) ? "" : value.toString()); - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/DefaultBangBoxShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/DefaultBangBoxShapeTransformer.java deleted file mode 100644 index 378b354e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/DefaultBangBoxShapeTransformer.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.graph.BangBoxGraph; -import edu.uci.ics.jung.visualization.Layer; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; -import org.apache.commons.collections15.Transformer; - -/** - * - * @author alemer - */ -public class DefaultBangBoxShapeTransformer - implements Transformer, B>, Shape> { - - protected double padding; - protected BangBoxGraphRenderContext renderContext; - - public DefaultBangBoxShapeTransformer(BangBoxGraphRenderContext renderContext, double padding) { - this.renderContext = renderContext; - this.padding = padding; - } - - public Shape transform(LayoutContext, B> context) { - @SuppressWarnings("unchecked") - BangBoxGraph graph = (BangBoxGraph) context.layout.getGraph(); - Rectangle2D rect = vertexBounds(context.layout, graph.getBoxedVertices(context.element)); - if (rect != null) { - rect.setRect(rect.getX() - padding, - rect.getY() - padding, - rect.getWidth() + 2*padding, - rect.getHeight() + 2*padding); - } else { - rect = new Rectangle2D.Double(0, 0, 0, 0); - } - return rect; - } - - protected Rectangle2D vertexBounds(Layout layout, Collection vertices) { - Rectangle2D rect = null; - for (V v : vertices) { - Point2D p = layout.transform(v); - p = renderContext.getMultiLayerTransformer().transform(Layer.LAYOUT, p); - AffineTransform xform = AffineTransform.getTranslateInstance(p.getX(), p.getY()); - Shape shape = xform.createTransformedShape(renderContext.getVertexShapeTransformer().transform(v)); - if (rect == null) - rect = shape.getBounds2D(); - else - rect.add(shape.getBounds2D()); - } - return rect; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/LayerTransformer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/LayerTransformer.java deleted file mode 100644 index 7d14e2b2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/LayerTransformer.java +++ /dev/null @@ -1,21 +0,0 @@ -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import java.awt.geom.Point2D; -import org.apache.commons.collections15.Transformer; - -public class LayerTransformer implements Transformer { - - private RenderContext rc; - private Layer layer; - - public LayerTransformer(RenderContext rc, Layer layer) { - this.rc = rc; - this.layer = layer; - } - - public Point2D transform(Point2D i) { - return rc.getMultiLayerTransformer().transform(layer, i); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/LayoutContext.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/LayoutContext.java deleted file mode 100644 index ee013d4f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/LayoutContext.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization; - -/** - * - * @author alemer - */ -public class LayoutContext { - public L layout; - public E element; - - public static LayoutContext getInstance(L l, E e) { - LayoutContext context = new LayoutContext(); - context.layout = l; - context.element = e; - return context; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/PluggableBangBoxGraphRenderContext.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/PluggableBangBoxGraphRenderContext.java deleted file mode 100644 index 7dc07c0c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/PluggableBangBoxGraphRenderContext.java +++ /dev/null @@ -1,700 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.graph.BangBoxGraph; -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxLabelRenderer; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.DefaultParallelEdgeIndexFunction; -import edu.uci.ics.jung.graph.util.EdgeIndexFunction; -import edu.uci.ics.jung.graph.util.IncidentEdgeIndexFunction; -import edu.uci.ics.jung.visualization.BasicTransformer; -import edu.uci.ics.jung.visualization.MultiLayerTransformer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.decorators.ConstantDirectionalEdgeValueTransformer; -import edu.uci.ics.jung.visualization.decorators.DirectionalEdgeArrowTransformer; -import edu.uci.ics.jung.visualization.decorators.EdgeShape; -import edu.uci.ics.jung.visualization.picking.PickedState; -import edu.uci.ics.jung.visualization.renderers.DefaultEdgeLabelRenderer; -import edu.uci.ics.jung.visualization.renderers.DefaultVertexLabelRenderer; -import edu.uci.ics.jung.visualization.renderers.EdgeLabelRenderer; -import edu.uci.ics.jung.visualization.renderers.VertexLabelRenderer; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; -import java.awt.BasicStroke; -import java.awt.Color; -import java.awt.Font; -import java.awt.Paint; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.geom.Ellipse2D; -import javax.swing.CellRendererPane; -import javax.swing.Icon; -import javax.swing.JComponent; -import org.apache.commons.collections15.Predicate; -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; -import org.apache.commons.collections15.functors.TruePredicate; - -/** - * - * @author alemer - */ -@SuppressWarnings("unchecked") -public class PluggableBangBoxGraphRenderContext - implements BangBoxGraphRenderContext { - - /** - * pluggable support for picking graph elements by - * finding them based on their coordinates. - */ - protected BangBoxGraphElementAccessor pickSupport; - protected PickedState pickedBangBoxState; - protected Transformer bangBoxStrokeTransformer = new ConstantTransformer(new BasicStroke(1)); - protected Predicate, B>> bangBoxIncludePredicate = TruePredicate., B>>getInstance(); - protected Transformer, B>, Shape> bangBoxShapeTransformer = new DefaultBangBoxShapeTransformer(this, 5.0); - protected Transformer bangBoxFillPaintTransformer = new ConstantTransformer(Color.lightGray); - protected Transformer bangBoxDrawPaintTransformer = new ConstantTransformer(Color.gray); - protected Transformer bangBoxLabelTransformer = new ConstantTransformer(null); - protected BangBoxLabelRenderer bangBoxLabelRenderer = new DefaultBangBoxLabelRenderer(); - // code copied from PluggableGraphRenderContext, since we can't usefully - // subclass it, because they didn't make the constructor protected - protected PluggableBangBoxGraphRenderContext() { - this.setEdgeShapeTransformer(new EdgeShape.QuadCurve()); - } - - public PickedState getPickedBangBoxState() { - return pickedBangBoxState; - } - - public void setPickedBangBoxState(PickedState pickedBangBoxState) { - this.pickedBangBoxState = pickedBangBoxState; - } - - public Transformer getBangBoxStrokeTransformer() { - return bangBoxStrokeTransformer; - } - - public void setBangBoxStrokeTransformer(Transformer bangBoxStrokeTransformer) { - this.bangBoxStrokeTransformer = bangBoxStrokeTransformer; - } - - public Predicate, B>> getBangBoxIncludePredicate() { - return bangBoxIncludePredicate; - } - - public void setBangBoxIncludePredicate(Predicate, B>> bangBoxIncludePredicate) { - this.bangBoxIncludePredicate = bangBoxIncludePredicate; - } - - public Transformer, B>, Shape> getBangBoxShapeTransformer() { - return bangBoxShapeTransformer; - } - - public void setBangBoxShapeTransformer(Transformer, B>, Shape> bangBoxShapeTransformer) { - this.bangBoxShapeTransformer = bangBoxShapeTransformer; - } - - public Transformer getBangBoxFillPaintTransformer() { - return bangBoxFillPaintTransformer; - } - - public void setBangBoxFillPaintTransformer(Transformer bangBoxFillPaintTransformer) { - this.bangBoxFillPaintTransformer = bangBoxFillPaintTransformer; - } - - public Transformer getBangBoxDrawPaintTransformer() { - return bangBoxDrawPaintTransformer; - } - - public void setBangBoxDrawPaintTransformer(Transformer bangBoxDrawPaintTransformer) { - this.bangBoxDrawPaintTransformer = bangBoxDrawPaintTransformer; - } - - /** - * @return the pickSupport - */ - public BangBoxGraphElementAccessor getPickSupport() { - return pickSupport; - } - - /** - * @param pickSupport the pickSupport to set - */ - public void setPickSupport(GraphElementAccessor pickSupport) { - if (pickSupport instanceof BangBoxGraphElementAccessor) { - this.pickSupport = (BangBoxGraphElementAccessor)pickSupport; - } else { - throw new IllegalArgumentException("pickSupport must be BangBoxGraphElementAccessor"); - } - } - public void setPickSupport(BangBoxGraphElementAccessor pickSupport) { - this.pickSupport = pickSupport; - } - - // everything below copied from PluggableGraphRenderContext - - protected float arrowPlacementTolerance = 1; - protected Predicate, V>> vertexIncludePredicate = TruePredicate.getInstance(); - protected Transformer vertexStrokeTransformer = - new ConstantTransformer(new BasicStroke(1.0f)); - protected Transformer vertexShapeTransformer = - new ConstantTransformer( - new Ellipse2D.Float(-10, -10, 20, 20)); - protected Transformer vertexLabelTransformer = new ConstantTransformer(null); - protected Transformer vertexIconTransformer; - protected Transformer vertexFontTransformer = - new ConstantTransformer(new Font("Helvetica", Font.PLAIN, 12)); - protected Transformer vertexDrawPaintTransformer = new ConstantTransformer(Color.BLACK); - protected Transformer vertexFillPaintTransformer = new ConstantTransformer(Color.RED); - protected Transformer edgeLabelTransformer = new ConstantTransformer(null); - protected Transformer edgeStrokeTransformer = new ConstantTransformer(new BasicStroke(1.0f)); - protected Transformer edgeArrowStrokeTransformer = new ConstantTransformer(new BasicStroke(1.0f)); - protected Transformer, E>, Shape> edgeArrowTransformer = - new DirectionalEdgeArrowTransformer(10, 8, 4); - protected Predicate, E>> edgeArrowPredicate = new DirectedEdgeArrowPredicate(); - protected Predicate, E>> edgeIncludePredicate = TruePredicate.getInstance(); - protected Transformer edgeFontTransformer = - new ConstantTransformer(new Font("Helvetica", Font.PLAIN, 12)); - protected Transformer, E>, Number> edgeLabelClosenessTransformer = - new ConstantDirectionalEdgeValueTransformer(0.5, 0.65); - protected Transformer, E>, Shape> edgeShapeTransformer; - protected Transformer edgeFillPaintTransformer = - new ConstantTransformer(null); - protected Transformer edgeDrawPaintTransformer = - new ConstantTransformer(Color.black); - protected Transformer arrowFillPaintTransformer = - new ConstantTransformer(Color.black); - protected Transformer arrowDrawPaintTransformer = - new ConstantTransformer(Color.black); - protected EdgeIndexFunction parallelEdgeIndexFunction = - DefaultParallelEdgeIndexFunction.getInstance(); - protected EdgeIndexFunction incidentEdgeIndexFunction = - IncidentEdgeIndexFunction.getInstance(); - protected MultiLayerTransformer multiLayerTransformer = new BasicTransformer(); - protected int labelOffset = LABEL_OFFSET; - /** - * the JComponent that this Renderer will display the graph on - */ - protected JComponent screenDevice; - protected PickedState pickedVertexState; - protected PickedState pickedEdgeState; - /** - * The CellRendererPane is used here just as it is in JTree - * and JTable, to allow a pluggable JLabel-based renderer for - * Vertex and Edge label strings and icons. - */ - protected CellRendererPane rendererPane = new CellRendererPane(); - /** - * A default GraphLabelRenderer - picked Vertex labels are - * blue, picked edge labels are cyan - */ - protected VertexLabelRenderer vertexLabelRenderer = - new DefaultVertexLabelRenderer(Color.blue); - protected EdgeLabelRenderer edgeLabelRenderer = new DefaultEdgeLabelRenderer(Color.cyan); - protected GraphicsDecorator graphicsContext; - - /** - * @return the vertexShapeTransformer - */ - public Transformer getVertexShapeTransformer() { - return vertexShapeTransformer; - } - - /** - * @param vertexShapeTransformer the vertexShapeTransformer to set - */ - public void setVertexShapeTransformer( - Transformer vertexShapeTransformer) { - this.vertexShapeTransformer = vertexShapeTransformer; - } - - /** - * @return the vertexStrokeTransformer - */ - public Transformer getVertexStrokeTransformer() { - return vertexStrokeTransformer; - } - - /** - * @param vertexStrokeTransformer the vertexStrokeTransformer to set - */ - public void setVertexStrokeTransformer( - Transformer vertexStrokeTransformer) { - this.vertexStrokeTransformer = vertexStrokeTransformer; - } - - public static float[] getDashing() { - return dashing; - } - - public static float[] getDotting() { - return dotting; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getArrow_placement_tolerance() - */ - public float getArrowPlacementTolerance() { - return arrowPlacementTolerance; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setArrow_placement_tolerance(float) - */ - public void setArrowPlacementTolerance(float arrow_placement_tolerance) { - this.arrowPlacementTolerance = arrow_placement_tolerance; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeArrowTransformer() - */ - public Transformer, E>, Shape> getEdgeArrowTransformer() { - return edgeArrowTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeArrowTransformer(edu.uci.ics.jung.visualization.decorators.EdgeArrowTransformer) - */ - public void setEdgeArrowTransformer(Transformer, E>, Shape> edgeArrowTransformer) { - this.edgeArrowTransformer = edgeArrowTransformer; - } - - /** - * @see RenderContext#getEdgeArrowPredicate() - */ - public Predicate, E>> getEdgeArrowPredicate() { - return edgeArrowPredicate; - } - - /** - * @see RenderContext#setEdgeArrowPredicate(Predicate) - */ - public void setEdgeArrowPredicate(Predicate, E>> edgeArrowPredicate) { - this.edgeArrowPredicate = edgeArrowPredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeFontTransformer() - */ - public Transformer getEdgeFontTransformer() { - return edgeFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeFontTransformer(edu.uci.ics.jung.visualization.decorators.EdgeFontTransformer) - */ - public void setEdgeFontTransformer(Transformer edgeFontTransformer) { - this.edgeFontTransformer = edgeFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeIncludePredicate() - */ - public Predicate, E>> getEdgeIncludePredicate() { - return edgeIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeIncludePredicate(org.apache.commons.collections15.Predicate) - */ - public void setEdgeIncludePredicate(Predicate, E>> edgeIncludePredicate) { - this.edgeIncludePredicate = edgeIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeLabelClosenessTransformer() - */ - public Transformer, E>, Number> getEdgeLabelClosenessTransformer() { - return edgeLabelClosenessTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeLabelClosenessTransformer(edu.uci.ics.jung.visualization.decorators.NumberDirectionalEdgeValue) - */ - public void setEdgeLabelClosenessTransformer( - Transformer, E>, Number> edgeLabelClosenessTransformer) { - this.edgeLabelClosenessTransformer = edgeLabelClosenessTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeLabelRenderer() - */ - public EdgeLabelRenderer getEdgeLabelRenderer() { - return edgeLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeLabelRenderer(edu.uci.ics.jung.visualization.EdgeLabelRenderer) - */ - public void setEdgeLabelRenderer(EdgeLabelRenderer edgeLabelRenderer) { - this.edgeLabelRenderer = edgeLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgePaintTransformer() - */ - public Transformer getEdgeFillPaintTransformer() { - return edgeFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgePaintTransformer(edu.uci.ics.jung.visualization.decorators.EdgePaintTransformer) - */ - public void setEdgeDrawPaintTransformer(Transformer edgeDrawPaintTransformer) { - this.edgeDrawPaintTransformer = edgeDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgePaintTransformer() - */ - public Transformer getEdgeDrawPaintTransformer() { - return edgeDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgePaintTransformer(edu.uci.ics.jung.visualization.decorators.EdgePaintTransformer) - */ - public void setEdgeFillPaintTransformer(Transformer edgeFillPaintTransformer) { - this.edgeFillPaintTransformer = edgeFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeShapeTransformer() - */ - public Transformer, E>, Shape> getEdgeShapeTransformer() { - return edgeShapeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeShapeTransformer(edu.uci.ics.jung.visualization.decorators.EdgeShapeTransformer) - */ - public void setEdgeShapeTransformer(Transformer, E>, Shape> edgeShapeTransformer) { - this.edgeShapeTransformer = edgeShapeTransformer; - if (edgeShapeTransformer instanceof EdgeShape.Orthogonal) { - ((EdgeShape.IndexedRendering) edgeShapeTransformer).setEdgeIndexFunction(this.incidentEdgeIndexFunction); - } - else if (edgeShapeTransformer instanceof EdgeShape.IndexedRendering) { - ((EdgeShape.IndexedRendering) edgeShapeTransformer).setEdgeIndexFunction(this.parallelEdgeIndexFunction); - } - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeLabelTransformer() - */ - public Transformer getEdgeLabelTransformer() { - return edgeLabelTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeLabelTransformer(edu.uci.ics.jung.visualization.decorators.EdgeLabelTransformer) - */ - public void setEdgeLabelTransformer(Transformer edgeLabelTransformer) { - this.edgeLabelTransformer = edgeLabelTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeStrokeTransformer() - */ - public Transformer getEdgeStrokeTransformer() { - return edgeStrokeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeStrokeTransformer(edu.uci.ics.jung.visualization.decorators.EdgeStrokeTransformer) - */ - public void setEdgeStrokeTransformer(Transformer edgeStrokeTransformer) { - this.edgeStrokeTransformer = edgeStrokeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeStrokeTransformer() - */ - public Transformer getEdgeArrowStrokeTransformer() { - return edgeArrowStrokeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeStrokeTransformer(edu.uci.ics.jung.visualization.decorators.EdgeStrokeTransformer) - */ - public void setEdgeArrowStrokeTransformer(Transformer edgeArrowStrokeTransformer) { - this.edgeArrowStrokeTransformer = edgeArrowStrokeTransformer; - } - - /** - * @see RenderContext#getGraphicsContext() - */ - public GraphicsDecorator getGraphicsContext() { - return graphicsContext; - } - - /** - * @see RenderContext#setGraphicsContext(GraphicsDecorator) - */ - public void setGraphicsContext(GraphicsDecorator graphicsContext) { - this.graphicsContext = graphicsContext; - } - - /** - * @see RenderContext#getLabelOffset() - */ - public int getLabelOffset() { - return labelOffset; - } - - /** - * @see RenderContext#setLabelOffset(int) - */ - public void setLabelOffset(int labelOffset) { - this.labelOffset = labelOffset; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getParallelEdgeIndexTransformer() - */ - public EdgeIndexFunction getParallelEdgeIndexFunction() { - return parallelEdgeIndexFunction; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setParallelEdgeIndexFunction(edu.uci.ics.graph.util.ParallelEdgeIndexFunction) - */ - public void setParallelEdgeIndexFunction( - EdgeIndexFunction parallelEdgeIndexFunction) { - this.parallelEdgeIndexFunction = parallelEdgeIndexFunction; - // reset the edge shape transformer, as the parallel edge index function - // is used by it - this.setEdgeShapeTransformer(getEdgeShapeTransformer()); - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getPickedEdgeState() - */ - public PickedState getPickedEdgeState() { - return pickedEdgeState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setPickedEdgeState(edu.uci.ics.jung.visualization.picking.PickedState) - */ - public void setPickedEdgeState(PickedState pickedEdgeState) { - this.pickedEdgeState = pickedEdgeState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getPickedVertexState() - */ - public PickedState getPickedVertexState() { - return pickedVertexState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setPickedVertexState(edu.uci.ics.jung.visualization.picking.PickedState) - */ - public void setPickedVertexState(PickedState pickedVertexState) { - this.pickedVertexState = pickedVertexState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getRendererPane() - */ - public CellRendererPane getRendererPane() { - return rendererPane; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setRendererPane(javax.swing.CellRendererPane) - */ - public void setRendererPane(CellRendererPane rendererPane) { - this.rendererPane = rendererPane; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getScreenDevice() - */ - public JComponent getScreenDevice() { - return screenDevice; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setScreenDevice(edu.uci.ics.jung.visualization.VisualizationViewer) - */ - public void setScreenDevice(JComponent screenDevice) { - this.screenDevice = screenDevice; - screenDevice.add(rendererPane); - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexFontTransformer() - */ - public Transformer getVertexFontTransformer() { - return vertexFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexFontTransformer(edu.uci.ics.jung.visualization.decorators.VertexFontTransformer) - */ - public void setVertexFontTransformer(Transformer vertexFontTransformer) { - this.vertexFontTransformer = vertexFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexIconTransformer() - */ - public Transformer getVertexIconTransformer() { - return vertexIconTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexIconTransformer(edu.uci.ics.jung.visualization.decorators.VertexIconTransformer) - */ - public void setVertexIconTransformer(Transformer vertexIconTransformer) { - this.vertexIconTransformer = vertexIconTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexIncludePredicate() - */ - public Predicate, V>> getVertexIncludePredicate() { - return vertexIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexIncludePredicate(org.apache.commons.collections15.Predicate) - */ - public void setVertexIncludePredicate(Predicate, V>> vertexIncludePredicate) { - this.vertexIncludePredicate = vertexIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexLabelRenderer() - */ - public VertexLabelRenderer getVertexLabelRenderer() { - return vertexLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexLabelRenderer(edu.uci.ics.jung.visualization.VertexLabelRenderer) - */ - public void setVertexLabelRenderer(VertexLabelRenderer vertexLabelRenderer) { - this.vertexLabelRenderer = vertexLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexPaintTransformer() - */ - public Transformer getVertexFillPaintTransformer() { - return vertexFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexPaintTransformer(edu.uci.ics.jung.visualization.decorators.VertexPaintTransformer) - */ - public void setVertexFillPaintTransformer(Transformer vertexFillPaintTransformer) { - this.vertexFillPaintTransformer = vertexFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexPaintTransformer() - */ - public Transformer getVertexDrawPaintTransformer() { - return vertexDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexPaintTransformer(edu.uci.ics.jung.visualization.decorators.VertexPaintTransformer) - */ - public void setVertexDrawPaintTransformer(Transformer vertexDrawPaintTransformer) { - this.vertexDrawPaintTransformer = vertexDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexLabelTransformer() - */ - public Transformer getVertexLabelTransformer() { - return vertexLabelTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexLabelTransformer(edu.uci.ics.jung.visualization.decorators.VertexLabelTransformer) - */ - public void setVertexLabelTransformer(Transformer vertexLabelTransformer) { - this.vertexLabelTransformer = vertexLabelTransformer; - } - - /** - * @return the basicTransformer - */ - public MultiLayerTransformer getMultiLayerTransformer() { - return multiLayerTransformer; - } - - /** - * @param basicTransformer the basicTransformer to set - */ - public void setMultiLayerTransformer(MultiLayerTransformer basicTransformer) { - this.multiLayerTransformer = basicTransformer; - } - - /** - * @see RenderContext#getArrowDrawPaintTransformer() - */ - public Transformer getArrowDrawPaintTransformer() { - return arrowDrawPaintTransformer; - } - - /** - * @see RenderContext#getArrowFillPaintTransformer() - */ - public Transformer getArrowFillPaintTransformer() { - return arrowFillPaintTransformer; - } - - /** - * @see RenderContext#setArrowDrawPaintTransformer(Transformer) - */ - public void setArrowDrawPaintTransformer(Transformer arrowDrawPaintTransformer) { - this.arrowDrawPaintTransformer = arrowDrawPaintTransformer; - - } - - /** - * @see RenderContext#setArrowFillPaintTransformer(Transformer) - */ - public void setArrowFillPaintTransformer(Transformer arrowFillPaintTransformer) { - this.arrowFillPaintTransformer = arrowFillPaintTransformer; - - } - /* Never Used */ - public Transformer getBangBoxLabelTransformer() { - return this.bangBoxLabelTransformer; - } - /* Never Used */ - public void setBangBoxLabelTransformer( - Transformer bangBoxLabelTransformer) { - this.bangBoxLabelTransformer = bangBoxLabelTransformer; - } - - public BangBoxLabelRenderer getBangBoxLabelRenderer() { - return this.bangBoxLabelRenderer; - } - - public void setBangBoxLabelRenderer( - BangBoxLabelRenderer bangBoxLabelRenderer) { - this.bangBoxLabelRenderer = bangBoxLabelRenderer; - } - - /* Never used */ - public Transformer getBangBoxFontTransformer() { - return null; - } - - /* Never Used */ - public void setBangBoxFontTransformer( - Transformer bangBoxFontTransformer) { - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/ShapeBangBoxPickSupport.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/ShapeBangBoxPickSupport.java deleted file mode 100644 index 4aa212db..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/ShapeBangBoxPickSupport.java +++ /dev/null @@ -1,139 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.graph.BangBoxGraph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.picking.ShapePickSupport; -import java.awt.Shape; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.LinkedHashSet; -import org.apache.commons.collections15.Predicate; -import org.apache.commons.collections15.functors.ChainedTransformer; -import org.apache.commons.collections15.functors.TruePredicate; - -/** - * - * @author alemer - */ -public class ShapeBangBoxPickSupport - extends ShapePickSupport - implements BangBoxGraphElementAccessor { - - protected BangBoxGraphVisualizationServer bvv; - - public ShapeBangBoxPickSupport(BangBoxGraphVisualizationServer vv) { - super(vv); - this.bvv = vv; - } - - public ShapeBangBoxPickSupport(BangBoxGraphVisualizationServer vv, float pickSize) { - super(vv, pickSize); - this.bvv = vv; - } - - public B getBangBox(Layout layout, double x, double y) { - if (!(layout.getGraph() instanceof BangBoxGraph)) { - return null; - } - @SuppressWarnings("unchecked") - BangBoxGraph graph = (BangBoxGraph) layout.getGraph(); - - B closest = null; - double minDistance = Double.MAX_VALUE; - Point2D ip = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, - new Point2D.Double(x, y)); - x = ip.getX(); - y = ip.getY(); - - while (true) { - try { - for (B b : getFilteredBangBoxes(graph)) { - - Shape shape = bvv.getRenderContext().getBangBoxShapeTransformer().transform( - LayoutContext.,B>getInstance(layout, b)); - - - if (shape.contains(x, y)) { - - if (style == Style.LOWEST) { - // return the first match - return b; - } - else if (style == Style.HIGHEST) { - // will return the last match - closest = b; - } - else { - - // return the !-box closest to the - // center of a !-box shape - Rectangle2D bounds = shape.getBounds2D(); - double dx = bounds.getCenterX() - x; - double dy = bounds.getCenterY() - y; - double dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = b; - } - } - } - } - break; - } - catch (ConcurrentModificationException cme) { - } - } - return closest; - - } - - protected Collection getFilteredBangBoxes(BangBoxGraph graph) { - if (bangBoxesAreFiltered()) { - Collection unfiltered = graph.getBangBoxes(); - Collection filtered = new LinkedHashSet(); - for (B b : unfiltered) { - if (isBangBoxRendered(Context., B>getInstance(graph, b))) { - filtered.add(b); - } - } - return filtered; - } - else { - return graph.getBangBoxes(); - } - } - - /** - * Quick test to allow optimization of getFilteredVertices(). - * @return true if there is an active vertex filtering - * mechanism for this visualization, false otherwise - */ - protected boolean bangBoxesAreFiltered() { - Predicate, B>> bangBoxIncludePredicate = - bvv.getRenderContext().getBangBoxIncludePredicate(); - return bangBoxIncludePredicate != null - && bangBoxIncludePredicate instanceof TruePredicate == false; - } - - /** - * Returns true if this !-box in this graph is included - * in the collections of elements to be rendered, and false otherwise. - * @param context the vertex and graph to be queried - * @return true if this !-box is - * included in the collections of elements to be rendered, false - * otherwise. - */ - protected boolean isBangBoxRendered(Context, B> context) { - Predicate, B>> vertexIncludePredicate = - bvv.getRenderContext().getBangBoxIncludePredicate(); - return vertexIncludePredicate == null || vertexIncludePredicate.evaluate(context); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/ViewZoomScrollPane.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/ViewZoomScrollPane.java deleted file mode 100644 index 876019c5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/ViewZoomScrollPane.java +++ /dev/null @@ -1,331 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import java.awt.BorderLayout; -import java.awt.Dimension; -import java.awt.Rectangle; -import java.awt.event.AdjustmentEvent; -import java.awt.event.AdjustmentListener; -import java.awt.event.ComponentAdapter; -import java.awt.event.ComponentEvent; -import java.awt.geom.AffineTransform; -import java.awt.geom.Line2D; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Set; - -import javax.swing.BoundedRangeModel; -import javax.swing.JComponent; -import javax.swing.JPanel; -import javax.swing.JScrollBar; -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; - -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; -import edu.uci.ics.jung.visualization.transform.shape.Intersector; - - - -/** - * ViewZoomScrollPane is a Container for the Graph's VisualizationViewer - * and includes custom horizontal and vertical scrollbars. - * GraphZoomScrollPane listens for changes in the scale and - * translation of the VisualizationViewer, and will update the - * scrollbar positions and sizes accordingly. Changes in the - * scrollbar positions will cause the corresponding change in - * the translation component (offset) of the VisualizationViewer. - * The scrollbars are modified so that they will allow panning - * of the graph when the scale has been changed (e.g. zoomed-in - * or zoomed-out). - * - * Works just like GraphZoomScrollPane, but operates on the View layer, - * rather than the Layout layer. - * - * The lower-right corner of this component is available to - * use as a small button or menu. - * - * samples.graph.GraphZoomScrollPaneDemo shows the use of this component. - * - * @author Tom Nelson - * - * - */ -@SuppressWarnings("serial") -public class ViewZoomScrollPane extends JPanel { - protected VisualizationViewer vv; - protected JScrollBar horizontalScrollBar; - protected JScrollBar verticalScrollBar; - protected JComponent corner; - protected boolean scrollBarsMayControlAdjusting = true; - protected JPanel south; - - /** - * Create an instance of the ViewZoomScrollPane to contain the - * VisualizationViewer - * @param vv - */ - public ViewZoomScrollPane(VisualizationViewer vv) { - super(new BorderLayout()); - this.vv = vv; - addComponentListener(new ResizeListener()); - Dimension d = vv.getGraphLayout().getSize(); - verticalScrollBar = new JScrollBar(JScrollBar.VERTICAL, 0, d.height, 0, d.height); - horizontalScrollBar = new JScrollBar(JScrollBar.HORIZONTAL, 0, d.width, 0, d.width); - verticalScrollBar.addAdjustmentListener(new VerticalAdjustmentListenerImpl()); - horizontalScrollBar.addAdjustmentListener(new HorizontalAdjustmentListenerImpl()); - verticalScrollBar.setUnitIncrement(20); - horizontalScrollBar.setUnitIncrement(20); - // respond to changes in the VisualizationViewer's transform - // and set the scroll bar parameters appropriately - vv.addChangeListener( - new ChangeListener(){ - public void stateChanged(ChangeEvent evt) { - VisualizationViewer vv = - (VisualizationViewer)evt.getSource(); - setScrollBars(vv); - } - }); - add(vv); - add(verticalScrollBar, BorderLayout.EAST); - south = new JPanel(new BorderLayout()); - south.add(horizontalScrollBar); - setCorner(new JPanel()); - add(south, BorderLayout.SOUTH); - } - - /** - * listener for adjustment of the horizontal scroll bar. - * Sets the translation of the VisualizationViewer - */ - class HorizontalAdjustmentListenerImpl implements AdjustmentListener { - int previous = 0; - public void adjustmentValueChanged(AdjustmentEvent e) { - int hval = e.getValue(); - float dh = previous - hval; - previous = hval; - if(dh != 0 && scrollBarsMayControlAdjusting) { - // get the uniform scale of all transforms - float layoutScale = (float) vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW).getScale(); - dh *= layoutScale; - AffineTransform at = AffineTransform.getTranslateInstance(dh, 0); - vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW).preConcatenate(at); - } - } - } - - /** - * Listener for adjustment of the vertical scroll bar. - * Sets the translation of the VisualizationViewer - */ - class VerticalAdjustmentListenerImpl implements AdjustmentListener { - int previous = 0; - public void adjustmentValueChanged(AdjustmentEvent e) { - JScrollBar sb = (JScrollBar)e.getSource(); - BoundedRangeModel m = sb.getModel(); - int vval = m.getValue(); - float dv = previous - vval; - previous = vval; - if(dv != 0 && scrollBarsMayControlAdjusting) { - - // get the uniform scale of all transforms - float viewScale = (float) vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW).getScale(); - dv *= viewScale; - AffineTransform at = AffineTransform.getTranslateInstance(0, dv); - vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW).preConcatenate(at); - } - } - } - - /** - * use the supplied vv characteristics to set the position and - * dimensions of the scroll bars. Called in response to - * a ChangeEvent from the VisualizationViewer - * @param xform the transform of the VisualizationViewer - */ - private void setScrollBars(VisualizationViewer vv) { - Dimension d = vv.getGraphLayout().getSize(); - Rectangle2D vvBounds = vv.getBounds(); - - // a rectangle representing the layout - Rectangle layoutRectangle = - new Rectangle(0,0,d.width,d.height); - //-d.width/2, -d.height/2, 2*d.width, 2*d.height); - - BidirectionalTransformer viewTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - BidirectionalTransformer layoutTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - - Point2D h0 = new Point2D.Double(vvBounds.getMinX(), vvBounds.getCenterY()); - Point2D h1 = new Point2D.Double(vvBounds.getMaxX(), vvBounds.getCenterY()); - Point2D v0 = new Point2D.Double(vvBounds.getCenterX(), vvBounds.getMinY()); - Point2D v1 = new Point2D.Double(vvBounds.getCenterX(), vvBounds.getMaxY()); - - h0 = viewTransformer.inverseTransform(h0); - h0 = layoutTransformer.inverseTransform(h0); - h1 = viewTransformer.inverseTransform(h1); - h1 = layoutTransformer.inverseTransform(h1); - v0 = viewTransformer.inverseTransform(v0); - v0 = layoutTransformer.inverseTransform(v0); - v1 = viewTransformer.inverseTransform(v1); - v1 = layoutTransformer.inverseTransform(v1); - - scrollBarsMayControlAdjusting = false; - setScrollBarValues(layoutRectangle, h0, h1, v0, v1); - scrollBarsMayControlAdjusting = true; - } - - @SuppressWarnings("unchecked") - protected void setScrollBarValues(Rectangle rectangle, - Point2D h0, Point2D h1, - Point2D v0, Point2D v1) { - boolean containsH0 = rectangle.contains(h0); - boolean containsH1 = rectangle.contains(h1); - boolean containsV0 = rectangle.contains(v0); - boolean containsV1 = rectangle.contains(v1); - - // horizontal scrollbar: - - Intersector intersector = new Intersector(rectangle, new Line2D.Double(h0, h1)); - - int min = 0; - int ext; - int val = 0; - int max; - - Set points = intersector.getPoints(); - Point2D first = null; - Point2D second = null; - - Point2D[] pointArray = (Point2D[])points.toArray(new Point2D[points.size()]); - if(pointArray.length > 1) { - first = pointArray[0]; - second = pointArray[1]; - } else if(pointArray.length > 0) { - first = second = pointArray[0]; - } - - if(first != null && second != null) { - // correct direction of intersect points - if((h0.getX() - h1.getX()) * (first.getX() - second.getX()) < 0) { - // swap them - Point2D temp = first; - first = second; - second = temp; - } - - if(containsH0 && containsH1) { - max = (int)first.distance(second); - val = (int)first.distance(h0); - ext = (int)h0.distance(h1); - - } else if(containsH0) { - max = (int)first.distance(second); - val = (int)first.distance(h0); - ext = (int)h0.distance(second); - - } else if(containsH1) { - max = (int) first.distance(second); - val = 0; - ext = (int) first.distance(h1); - - } else { - max = ext = rectangle.width; - val = min; - } - horizontalScrollBar.setValues(val, ext+1, min, max); - } - - // vertical scroll bar - min = val = 0; - - intersector.intersectLine(new Line2D.Double(v0, v1)); - points = intersector.getPoints(); - - pointArray = (Point2D[])points.toArray(new Point2D[points.size()]); - if(pointArray.length > 1) { - first = pointArray[0]; - second = pointArray[1]; - } else if(pointArray.length > 0) { - first = second = pointArray[0]; - } - - if(first != null && second != null) { - - // arrange for direction - if((v0.getY() - v1.getY()) * (first.getY() - second.getY()) < 0) { - // swap them - Point2D temp = first; - first = second; - second = temp; - } - - if(containsV0 && containsV1) { - max = (int)first.distance(second); - val = (int)first.distance(v0); - ext = (int)v0.distance(v1); - - } else if(containsV0) { - max = (int)first.distance(second); - val = (int)first.distance(v0); - ext = (int)v0.distance(second); - - } else if(containsV1) { - max = (int) first.distance(second); - val = 0; - ext = (int) first.distance(v1); - - } else { - max = ext = rectangle.height; - val = min; - } - verticalScrollBar.setValues(val, ext+1, min, max); - } - } - - /** - * Listener to adjust the scroll bar parameters when the window - * is resized - */ - protected class ResizeListener extends ComponentAdapter { - - public void componentHidden(ComponentEvent e) { - } - - public void componentResized(ComponentEvent e) { - setScrollBars(vv); - } - public void componentShown(ComponentEvent e) { - } - } - - /** - * @return Returns the corner component. - */ - public JComponent getCorner() { - return corner; - } - - /** - * @param corner The cornerButton to set. - */ - public void setCorner(JComponent corner) { - this.corner = corner; - corner.setPreferredSize(new Dimension(verticalScrollBar.getPreferredSize().width, - horizontalScrollBar.getPreferredSize().height)); - south.add(this.corner, BorderLayout.EAST); - } - - public JScrollBar getHorizontalScrollBar() { - return horizontalScrollBar; - } - - public JScrollBar getVerticalScrollBar() { - return verticalScrollBar; - } -} - diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/AddEdgeGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/AddEdgeGraphMousePlugin.java deleted file mode 100644 index 40b3fa8b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/AddEdgeGraphMousePlugin.java +++ /dev/null @@ -1,89 +0,0 @@ -package edu.uci.ics.jung.contrib.visualization.control; - -import java.awt.Color; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Line2D; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.control.AbstractGraphMousePlugin; - -public class AddEdgeGraphMousePlugin extends AbstractGraphMousePlugin -implements MouseListener, MouseMotionListener { - protected VisualizationViewer vis; - protected Adder adder; - private Point2D current; - - public static interface Adder { - void addEdge(V s, V t); - } - - public AddEdgeGraphMousePlugin(VisualizationViewer vis, - Adder adder, int modifiers) { - super(modifiers); - this.vis = vis; - this.adder = adder; - vis.addPostRenderPaintable(new LinePaintable()); - } - public void mouseClicked(MouseEvent e) {} - public void mouseEntered(MouseEvent e) {} - public void mouseExited(MouseEvent e) {} - public void mouseMoved(MouseEvent e) {} - - public void mousePressed(MouseEvent e) { - if (checkModifiers(e)) down = e.getPoint(); - } - - private V vertexAt(Point2D p) { - GraphElementAccessor ps = vis.getPickSupport(); - if (p == null) return null; - if (ps == null) return null; - return ps.getVertex(vis.getGraphLayout(), p.getX(), p.getY()); - } - - public void mouseReleased(MouseEvent e) { - if (checkModifiers(e)) { - V s = vertexAt(down); - V t = vertexAt(e.getPoint()); - if (s!=null && t!=null) { - adder.addEdge(s, t); - } - } - down = null; - current = null; - vis.repaint(); - } - public void mouseDragged(MouseEvent e) { - if (checkModifiers(e)) { - current = e.getPoint(); - vis.repaint(); - } - } - - class LinePaintable implements VisualizationServer.Paintable { - public void paint(Graphics g) { - if(down != null && current != null) { - Color oldColor = g.getColor(); - g.setColor(Color.red); - ((Graphics2D)g).fill(new Ellipse2D.Double - (down.getX()-5,down.getY()-5,10,10)); - ((Graphics2D)g).draw(new Line2D.Double(down,current)); - ((Graphics2D)g).fill(new Ellipse2D.Double - (current.getX()-5,current.getY()-5,10,10)); - g.setColor(oldColor); - } - } - - public boolean useTransform() { - return false; - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ConstrainedPickingBangBoxGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ConstrainedPickingBangBoxGraphMousePlugin.java deleted file mode 100644 index 5ffee117..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ConstrainedPickingBangBoxGraphMousePlugin.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization.control; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphVisualizationViewer; -import edu.uci.ics.jung.visualization.picking.PickedState; -import java.awt.Point; -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; -import java.util.Set; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * - * @author alemer - */ -public class ConstrainedPickingBangBoxGraphMousePlugin - extends PickingBangBoxMousePlugin { - - private static final Logger logger = Logger.getLogger( - "edu.uci.ics.jung.contrib.visualization.control.ConstrainedPickingBangBoxGraphMousePlugin"); - - protected double leftConstraint = 0.0; - protected double topConstraint = 0.0; - protected double xDragBounce = 0.0; - protected double yDragBounce = 0.0; - protected ConstrainingAction constrainingAction = ConstrainingAction.StopMovement; - - /** - * What to do when a movement is inhibited because of the constraint - */ - public enum ConstrainingAction - { - /** - * Simply stops the movement from taking place - */ - StopMovement, - /** - * Moves the rest of the graph away - */ - MoveOthers - } - - public ConstrainedPickingBangBoxGraphMousePlugin() { - } - - /** - * Create a ConstrainedPickingGraphMousePlugin with a particular - * action to perform when movement is inhibited - * - * @param constrainingAction The action to perform when movement is - * inhibited by the constraints - */ - public ConstrainedPickingBangBoxGraphMousePlugin(ConstrainingAction constrainingAction) { - this.constrainingAction = constrainingAction; - } - - /** - * Create a ConstrainedPickingGraphMousePlugin with specific left and - * top constraints (by default they are both 0.0). - * - * This is useful to provide a padding box around vertices. - * - * @param leftConstraint The furthest left a vertex may be dragged - * @param topConstraint The highest a vertex may be dragged - */ - public ConstrainedPickingBangBoxGraphMousePlugin(double leftConstraint, - double topConstraint) { - this.leftConstraint = leftConstraint; - this.topConstraint = topConstraint; - } - - /** - * Create a ConstrainedPickingGraphMousePlugin with specific left and - * top constraints (by default they are both 0.0) and a particular - * action to perform when movement is inhibited - * - * @param constrainingAction The action to perform when movement is - * inhibited by the constraints - * @param leftConstraint The furthest left a vertex may be dragged - * @param topConstraint The highest a vertex may be dragged - */ - public ConstrainedPickingBangBoxGraphMousePlugin( - ConstrainingAction constrainingAction, - double leftConstraint, - double topConstraint) - { - this.constrainingAction = constrainingAction; - this.leftConstraint = leftConstraint; - this.topConstraint = topConstraint; - } - - private void moveNodes(BangBoxGraphVisualizationViewer vv, - double dx, double dy) - { - Layout layout = vv.getGraphLayout(); - double odx = 0.0; - double ody = 0.0; - logger.log(Level.FINEST, - "Asked to move nodes by ({0},{1})", - new Object[]{dx, dy}); - // if the mouse has moved without taking nodes - // with it, because of the constraints, let it - // move back to its starting point (relative to - // the nodes) before moving again. - if (dx > 0 && xDragBounce > 0) - { - double xfer = Math.min(dx, xDragBounce); - logger.log(Level.FINEST, - "We have an x bounce of {0}; unwinding by {1}", - new Object[]{xDragBounce, xfer}); - dx -= xfer; - xDragBounce -= xfer; - odx -= xfer; - } - if (dy > 0 && yDragBounce > 0) - { - double xfer = Math.min(dy, yDragBounce); - logger.log(Level.FINEST, - "We have a y bounce of {0}; unwinding by {1}", - new Object[]{yDragBounce, xfer}); - dy -= xfer; - yDragBounce -= xfer; - ody -= xfer; - } - PickedState ps = vv.getPickedVertexState(); - Set picked = ps.getPicked(); - - double farLeft = Double.MAX_VALUE; - double farTop = Double.MAX_VALUE; - if (dx < 0 || dy < 0) { - for (V v : picked) { - Point2D vp = layout.transform(v); - farLeft = Math.min(farLeft, vp.getX()); - farTop = Math.min(farTop, vp.getY()); - } - } - logger.log(Level.FINEST, - "Top left of selected nodes is ({0},{1}); constraints start at ({2},{3})", - new Object[]{farLeft, farTop,leftConstraint,topConstraint}); - // record how far we moved without taking nodes - // with us, so we can bounce back later - if (farLeft + dx < leftConstraint) { - double diff = leftConstraint - (farLeft + dx); - xDragBounce += diff; - dx += diff; - odx += diff; - } - if (farTop + dy < topConstraint) { - double diff = topConstraint - (farTop + dy); - yDragBounce += diff; - dy += diff; - ody += diff; - } - logger.log(Level.FINEST, - "Final adjustment is ({0},{1})", - new Object[]{dx, dy}); - if (constrainingAction == ConstrainingAction.StopMovement || - (odx == 0.0 && ody == 0.0)) - { - for (V v : ps.getPicked()) { - Point2D vp = layout.transform(v); - vp.setLocation(vp.getX() + dx, vp.getY() + dy); - layout.setLocation(v, vp); - } - } - else - { - for (V v : vv.getGraphLayout().getGraph().getVertices()) { - Point2D vp = layout.transform(v); - if (picked.contains(v)) - vp.setLocation(vp.getX() + dx, vp.getY() + dy); - else - vp.setLocation(vp.getX() + odx, vp.getY() + ody); - layout.setLocation(v, vp); - } - } - } - - @Override - @SuppressWarnings("unchecked") - public void mouseDragged(MouseEvent e) { - if (locked == false) { - BangBoxGraphVisualizationViewer vv = (BangBoxGraphVisualizationViewer) e.getSource(); - if (vertex != null) { - Point p = e.getPoint(); - Point2D graphPoint = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(p); - Point2D graphDown = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(down); - double dx = graphPoint.getX() - graphDown.getX(); - double dy = graphPoint.getY() - graphDown.getY(); - moveNodes(vv, dx, dy); - down = p; - vv.revalidate(); - } - else if (down != null) { - Point2D out = e.getPoint(); - if (e.getModifiers() == this.addToSelectionModifiers - || e.getModifiers() == modifiers) { - rect.setFrameFromDiagonal(down, out); - } - } - if (vertex != null) { - e.consume(); - } - vv.repaint(); - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ConstrainedPickingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ConstrainedPickingGraphMousePlugin.java deleted file mode 100644 index 099ceffb..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ConstrainedPickingGraphMousePlugin.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization.control; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.control.PickingGraphMousePlugin; -import edu.uci.ics.jung.visualization.picking.PickedState; -import java.awt.Point; -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; -import java.util.Set; - -/** - * Constrains the left and top drag movements of the picking graph mouse - * plugin, to prevent nodes being dragged into negative co-ordinates. - * - * @author Alex Merry - */ -public class ConstrainedPickingGraphMousePlugin - extends PickingGraphMousePlugin -{ - protected double leftConstraint = 0.0; - protected double topConstraint = 0.0; - protected double xDragBounce = 0.0; - protected double yDragBounce = 0.0; - protected ConstrainingAction constrainingAction = ConstrainingAction.StopMovement; - - /** - * What to do when a movement is inhibited because of the constraint - */ - public enum ConstrainingAction - { - /** - * Simply stops the movement from taking place - */ - StopMovement, - /** - * Moves the rest of the graph away - */ - MoveOthers - } - - public ConstrainedPickingGraphMousePlugin() { - } - - /** - * Create a ConstrainedPickingGraphMousePlugin with a particular - * action to perform when movement is inhibited - * - * @param constrainingAction The action to perform when movement is - * inhibited by the constraints - */ - public ConstrainedPickingGraphMousePlugin(ConstrainingAction constrainingAction) { - this.constrainingAction = constrainingAction; - } - - /** - * Create a ConstrainedPickingGraphMousePlugin with specific left and - * top constraints (by default they are both 0.0). - * - * This is useful to provide a padding box around vertices. - * - * @param leftConstraint The furthest left a vertex may be dragged - * @param topConstraint The highest a vertex may be dragged - */ - public ConstrainedPickingGraphMousePlugin(double leftConstraint, - double topConstraint) { - this.leftConstraint = leftConstraint; - this.topConstraint = topConstraint; - } - - /** - * Create a ConstrainedPickingGraphMousePlugin with specific left and - * top constraints (by default they are both 0.0) and a particular - * action to perform when movement is inhibited - * - * @param constrainingAction The action to perform when movement is - * inhibited by the constraints - * @param leftConstraint The furthest left a vertex may be dragged - * @param topConstraint The highest a vertex may be dragged - */ - public ConstrainedPickingGraphMousePlugin( - ConstrainingAction constrainingAction, - double leftConstraint, - double topConstraint) - { - this.constrainingAction = constrainingAction; - this.leftConstraint = leftConstraint; - this.topConstraint = topConstraint; - } - - private void moveNodes(VisualizationViewer vv, - double dx, double dy) - { - Layout layout = vv.getGraphLayout(); - double odx = 0.0; - double ody = 0.0; - // if the mouse has moved without taking nodes - // with it, because of the constraints, let it - // move back to its starting point (relative to - // the nodes) before moving again. - if (dx > 0 && xDragBounce < 0) - { - double xfer = Math.min(dx, -xDragBounce); - dx -= xfer; - xDragBounce += xfer; - odx -= xfer; - } - if (dy > 0 && yDragBounce < 0) - { - double xfer = Math.min(dy, -yDragBounce); - dy -= xfer; - yDragBounce += xfer; - ody -= xfer; - } - PickedState ps = vv.getPickedVertexState(); - Set picked = ps.getPicked(); - - double farLeft = Double.MAX_VALUE; - double farTop = Double.MAX_VALUE; - if (dx < 0 || dy < 0) { - for (V v : picked) { - Point2D vp = layout.transform(v); - farLeft = Math.min(farLeft, vp.getX()); - farTop = Math.min(farTop, vp.getY()); - } - } - // record how far we moved without taking nodes - // with us, so we can bounce back later - if (farLeft + dx < leftConstraint) { - double diff = leftConstraint - (farLeft + dx); - xDragBounce -= diff; - dx += diff; - odx += diff; - } - if (farTop + dy < topConstraint) { - double diff = topConstraint - (farTop + dy); - yDragBounce -= diff; - dy += diff; - ody += diff; - } - if (constrainingAction == ConstrainingAction.StopMovement || - (odx == 0.0 && ody == 0.0)) - { - for (V v : ps.getPicked()) { - Point2D vp = layout.transform(v); - vp.setLocation(vp.getX() + dx, vp.getY() + dy); - layout.setLocation(v, vp); - } - } - else - { - for (V v : vv.getGraphLayout().getGraph().getVertices()) { - Point2D vp = layout.transform(v); - if (picked.contains(v)) - vp.setLocation(vp.getX() + dx, vp.getY() + dy); - else - vp.setLocation(vp.getX() + odx, vp.getY() + ody); - layout.setLocation(v, vp); - } - } - } - - @Override - @SuppressWarnings("unchecked") - public void mouseDragged(MouseEvent e) { - if (locked == false) { - VisualizationViewer vv = (VisualizationViewer) e.getSource(); - if (vertex != null) { - Point p = e.getPoint(); - Point2D graphPoint = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(p); - Point2D graphDown = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(down); - double dx = graphPoint.getX() - graphDown.getX(); - double dy = graphPoint.getY() - graphDown.getY(); - moveNodes(vv, dx, dy); - down = p; - vv.revalidate(); - } - else { - Point2D out = e.getPoint(); - if (e.getModifiers() == this.addToSelectionModifiers - || e.getModifiers() == modifiers) { - rect.setFrameFromDiagonal(down, out); - } - } - if (vertex != null) { - e.consume(); - } - vv.repaint(); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/PickingBangBoxMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/PickingBangBoxMousePlugin.java deleted file mode 100644 index 1d43d0cb..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/PickingBangBoxMousePlugin.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization.control; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphElementAccessor; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphVisualizationViewer; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.control.PickingGraphMousePlugin; -import edu.uci.ics.jung.visualization.picking.PickedState; -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; - -/** - * !-box aware version of PickingGraphMousePlugin - * - * @author alemer - */ -public class PickingBangBoxMousePlugin - extends PickingGraphMousePlugin { - - B bangBox = null; - - @SuppressWarnings("unchecked") - @Override - public void mousePressed(MouseEvent e) { - down = e.getPoint(); - BangBoxGraphVisualizationViewer vv = (BangBoxGraphVisualizationViewer) e.getSource(); - BangBoxGraphElementAccessor pickSupport = vv.getPickSupport(); - PickedState pickedVertexState = vv.getPickedVertexState(); - PickedState pickedEdgeState = vv.getPickedEdgeState(); - PickedState pickedBangBoxState = vv.getPickedBangBoxState(); - if (pickSupport != null && pickedVertexState != null) { - Layout layout = vv.getGraphLayout(); - if (e.getModifiers() == modifiers) { - rect.setFrameFromDiagonal(down, down); - // p is the screen point for the mouse event - Point2D ip = e.getPoint(); - - vertex = pickSupport.getVertex(layout, ip.getX(), ip.getY()); - if (vertex != null) { - if (pickedVertexState.isPicked(vertex) == false) { - pickedVertexState.clear(); - pickedEdgeState.clear(); - pickedBangBoxState.clear(); - pickedVertexState.pick(vertex, true); - } - // layout.getLocation applies the layout transformer so - // q is transformed by the layout transformer only - Point2D q = layout.transform(vertex); - // transform the mouse point to graph coordinate system - Point2D gp = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.LAYOUT, ip); - - offsetx = (float) (gp.getX() - q.getX()); - offsety = (float) (gp.getY() - q.getY()); - } - else if ((edge = pickSupport.getEdge(layout, ip.getX(), ip.getY())) != null) { - pickedVertexState.clear(); - pickedEdgeState.clear(); - pickedBangBoxState.clear(); - pickedEdgeState.pick(edge, true); - } - else if ((bangBox = pickSupport.getBangBox(layout, ip.getX(), ip.getY())) != null) { - pickedVertexState.clear(); - pickedEdgeState.clear(); - pickedBangBoxState.clear(); - pickedBangBoxState.pick(bangBox, true); - } - else { - vv.addPostRenderPaintable(lensPaintable); - pickedVertexState.clear(); - pickedEdgeState.clear(); - pickedBangBoxState.clear(); - } - - } - else if (e.getModifiers() == addToSelectionModifiers) { - vv.addPostRenderPaintable(lensPaintable); - rect.setFrameFromDiagonal(down, down); - Point2D ip = e.getPoint(); - vertex = pickSupport.getVertex(layout, ip.getX(), ip.getY()); - if (vertex != null) { - boolean wasThere = pickedVertexState.pick(vertex, !pickedVertexState.isPicked(vertex)); - if (wasThere) { - vertex = null; - } - else { - - // layout.getLocation applies the layout transformer so - // q is transformed by the layout transformer only - Point2D q = layout.transform(vertex); - // translate mouse point to graph coord system - Point2D gp = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.LAYOUT, ip); - - offsetx = (float) (gp.getX() - q.getX()); - offsety = (float) (gp.getY() - q.getY()); - } - } - else if ((edge = pickSupport.getEdge(layout, ip.getX(), ip.getY())) != null) { - pickedEdgeState.pick(edge, !pickedEdgeState.isPicked(edge)); - } - else if ((bangBox = pickSupport.getBangBox(layout, ip.getX(), ip.getY())) != null) { - pickedBangBoxState.pick(bangBox, !pickedBangBoxState.isPicked(bangBox)); - } - } - } - if (vertex != null) { - e.consume(); - } - } - - @SuppressWarnings("unchecked") - @Override - public void mouseReleased(MouseEvent e) { - super.mouseReleased(e); - bangBox = null; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ViewScrollingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ViewScrollingGraphMousePlugin.java deleted file mode 100644 index 385327b7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/control/ViewScrollingGraphMousePlugin.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization.control; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.control.AbstractGraphMousePlugin; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import java.awt.event.MouseEvent; -import java.awt.event.MouseWheelEvent; -import java.awt.event.MouseWheelListener; - -/** - * - * @author alex - */ -public class ViewScrollingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseWheelListener { - - public enum ModiferStrictness { - Exact, - All, - Any - } - private ModiferStrictness strictness; - private double shift = 1.0; - - public ViewScrollingGraphMousePlugin() { - this(0, ModiferStrictness.Exact); - } - - public ViewScrollingGraphMousePlugin(int modifiers) { - this(modifiers, ModiferStrictness.Exact); - } - - public ViewScrollingGraphMousePlugin(int modifiers, ModiferStrictness strictness) { - super(modifiers); - this.strictness = strictness; - } - - @Override - public boolean checkModifiers(MouseEvent e) { - switch (strictness) { - case Exact: - return e.getModifiers() == modifiers; - case All: - return (e.getModifiers() & modifiers) == modifiers; - case Any: - return (e.getModifiers() & modifiers) != 0; - } - // shouldn't get this: - return false; - } - - /** - * Get the amount a mouse wheel "click" moves the view by - * - * @return The per-click shift amount - */ - public double getShift() { - return shift; - } - - /** - * Set the amount a mouse wheel "click" moves the view by - * - * @param shift The per-click shift amount - */ - public void setShift(double shift) { - this.shift = shift; - } - - public void mouseWheelMoved(MouseWheelEvent e) { - if (checkModifiers(e)) { - VisualizationViewer vv = (VisualizationViewer) e.getSource(); - MutableTransformer viewTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - - int amount = e.getWheelRotation(); - // negative wheel rotation, so that scrolling - // down shifts the view up - double dy = shift * (-amount); - viewTransformer.translate(0, dy); - - e.consume(); - vv.repaint(); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/GridPaintable.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/GridPaintable.java deleted file mode 100644 index 6df37013..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/GridPaintable.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization.decorators; - -import edu.uci.ics.jung.visualization.VisualizationServer; -import java.awt.BasicStroke; -import java.awt.Color; -import java.awt.Font; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Stroke; -import java.awt.geom.Rectangle2D; - -/** - * Shows a grid behind the graph, for debugging purposes - * - * @author alemer - */ -public class GridPaintable implements VisualizationServer.Paintable -{ - public interface BoundsCalculator { - Rectangle2D getBounds(); - } - - private static final int SPACING = 20; - private boolean useTransform = true; - private Color color = Color.green; - private BoundsCalculator boundsCalc; - - public GridPaintable(BoundsCalculator boundsCalc) { - this.boundsCalc = boundsCalc; - } - - public Color getColor() { - return color; - } - - public void setColor(Color color) { - this.color = color; - } - - public boolean isSubjectToTransform() { - return useTransform; - } - - public void setSubjectToTransform(boolean useTransform) { - this.useTransform = useTransform; - } - - public BoundsCalculator getBoundsCalculator() { - return boundsCalc; - } - - public void setBoundsCalculator(BoundsCalculator boundsCalc) { - this.boundsCalc = boundsCalc; - } - - public void paint(Graphics g) { - Color oldColor = g.getColor(); - Stroke oldStroke = ((Graphics2D) g).getStroke(); - Font oldfont = g.getFont(); - Rectangle2D bounds = boundsCalc.getBounds(); - if (bounds.getHeight() > 0 && bounds.getWidth() > 0) - { - g.setFont(oldfont.deriveFont(2)); - ((Graphics2D)g).setStroke(new BasicStroke(1)); - int left = (int)bounds.getMinX(); - int right = (int)bounds.getMaxX() + 1; - int top = (int)bounds.getMinY(); - int bottom = (int)bounds.getMaxY() + 1; - for (int row = top; row <= bottom; row += SPACING) - { - g.setColor(color); - g.drawLine(left, row, right, row); - g.setColor(Color.black); - g.drawString(String.valueOf(row), right+2, row+6); - g.drawString(String.valueOf(row), left-30, row+6); - } - g.setColor(color); - for (int col = left; col <= right; col += SPACING) - { - g.drawLine(col, top, col, bottom); - } - g.setColor(Color.black); - for (int col = left; col <= right; col += SPACING) - { - g.translate(col-3, bottom+5); - ((Graphics2D)g).rotate(Math.PI/2); - g.drawString(String.valueOf(col), 0, 0); - ((Graphics2D)g).rotate(-Math.PI/2); - g.translate(0, (top-30)-(bottom+5)); - ((Graphics2D)g).rotate(Math.PI/2); - g.drawString(String.valueOf(col), 0, 0); - ((Graphics2D)g).rotate(-Math.PI/2); - g.translate(-(col-3), -(top-30)); - } - } - g.setColor(oldColor); - ((Graphics2D) g).setStroke(oldStroke); - g.setFont(oldfont); - } - - public boolean useTransform() { - return useTransform; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/MixedShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/MixedShapeTransformer.java deleted file mode 100644 index 9a28adb4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/MixedShapeTransformer.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization.decorators; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeIndexFunction; -import edu.uci.ics.jung.visualization.decorators.AbstractEdgeShapeTransformer; -import edu.uci.ics.jung.visualization.decorators.EdgeShape; -import edu.uci.ics.jung.visualization.decorators.EdgeShape.IndexedRendering; -import java.awt.Shape; - -/** - * Swing seems to have trouble with bezier curves with no inflection, - * so we use the line transformer to draw straight edges and the - * QuadCurve transformer otherwise. - */ -public class MixedShapeTransformer - extends AbstractEdgeShapeTransformer - implements IndexedRendering -{ - EdgeShape.QuadCurve quad = new EdgeShape.QuadCurve(); - EdgeShape.Line line = new EdgeShape.Line(); - private EdgeIndexFunction peif = null; - - public Shape transform(Context, E> input) { - // if we have no index function, or the index is -1 (straight) - // then draw a straight line. - if (peif == null - || peif.getIndex(input.graph, input.element) == -1) { - return line.transform(input); - } - // otherwise draw a quadratic curve - else { - return quad.transform(input); - } - } - - public EdgeIndexFunction getEdgeIndexFunction() { - return peif; - } - - public void setEdgeIndexFunction( - EdgeIndexFunction peif) { - this.peif = peif; - quad.setEdgeIndexFunction(peif); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/PickableBangBoxPaintTransformer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/PickableBangBoxPaintTransformer.java deleted file mode 100644 index 027aafe6..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/PickableBangBoxPaintTransformer.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization.decorators; - -import edu.uci.ics.jung.visualization.picking.PickedInfo; -import java.awt.Paint; -import org.apache.commons.collections15.Transformer; - -/** - * - * @author alemer - */ -public class PickableBangBoxPaintTransformer implements Transformer { - - protected PickedInfo pi; - protected Paint draw_paint; - protected Paint picked_paint; - - public PickableBangBoxPaintTransformer(PickedInfo pi, Paint draw_paint, Paint picked_paint) { - if (pi == null) { - throw new IllegalArgumentException("PickedInfo instance must be non-null"); - } - - this.pi = pi; - this.draw_paint = draw_paint; - this.picked_paint = picked_paint; - } - - public Paint transform(B b) { - if (pi.isPicked(b)) { - return picked_paint; - } - else { - return draw_paint; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/PickableElementStrokeTransformer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/PickableElementStrokeTransformer.java deleted file mode 100644 index 152b3b57..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/decorators/PickableElementStrokeTransformer.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization.decorators; - -import edu.uci.ics.jung.visualization.picking.PickedInfo; -import java.awt.Stroke; -import org.apache.commons.collections15.Transformer; - -/** - * - * @author alemer - */ -public class PickableElementStrokeTransformer implements Transformer -{ - protected PickedInfo pi; - protected Stroke normal_stroke; - protected Stroke picked_stroke; - - public PickableElementStrokeTransformer(PickedInfo pi, Stroke normal_stroke, Stroke picked_stroke) { - if (pi == null) { - throw new IllegalArgumentException("PickedInfo instance must be non-null"); - } - - this.pi = pi; - this.normal_stroke = normal_stroke; - this.picked_stroke = picked_stroke; - } - - public Stroke transform(E b) { - if (pi.isPicked(b)) { - return picked_stroke; - } - else { - return normal_stroke; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BangBoxGraphRenderer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BangBoxGraphRenderer.java deleted file mode 100644 index 2eecd1dc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BangBoxGraphRenderer.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package edu.uci.ics.jung.contrib.visualization.renderers; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphRenderContext; -import edu.uci.ics.jung.visualization.renderers.Renderer; - -/** - * - * @author alemer - */ -public interface BangBoxGraphRenderer extends Renderer -{ - interface BangBox { - class NOOP implements BangBox { - public void paintBangBox(BangBoxGraphRenderContext rc, Layout layout, Object b) { - } - } - void paintBangBox(BangBoxGraphRenderContext rc, Layout layout, B b); - } - interface BangBoxLabel { - class NOOP implements BangBoxLabel { - public void labelBangBox(BangBoxGraphRenderContext rc, - Layout layout, Object e, String label) { - } - } - void labelBangBox(BangBoxGraphRenderContext rc, Layout layout, B e, String label); - } - - void render(BangBoxGraphRenderContext renderContext, Layout layout); - - BangBox getBangBoxRenderer(); - void setBangBoxRenderer(BangBox bangBoxRenderer); - - BangBoxLabel getBangBoxLabelRenderer(); - void setBangBoxLabelRenderer(BangBoxLabel bangBoxLabelRenderer); - void renderBangBox(BangBoxGraphRenderContext rc, Layout layout, B b); - - void renderBangBoxLabel(BangBoxGraphRenderContext rc, Layout layout, B b); -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BangBoxLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BangBoxLabelRenderer.java deleted file mode 100644 index 28564804..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BangBoxLabelRenderer.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization.renderers; - -import java.awt.Component; -import java.awt.Font; -import javax.swing.JComponent; - -/** - * - * @author alemer - */ -public interface BangBoxLabelRenderer { - - /** - * Returns the component used for drawing the label. This method is - * used to configure the renderer appropriately before drawing. - * - * @param vv the JComponent that is asking the - * renderer to draw; can be null - * @param value the value of the cell to be rendered. It is - * up to the specific renderer to interpret - * and draw the value. For example, if - * value - * is the string "true", it could be rendered as a - * string or it could be rendered as a check - * box that is checked. null is a - * valid value - * @param vertex the vertex for the label being drawn. - */ - Component getBangBoxLabelRendererComponent(JComponent vv, Object value, - Font font, boolean isSelected, B bangBox); -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxGraphRenderer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxGraphRenderer.java deleted file mode 100644 index ed541e42..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxGraphRenderer.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization.renderers; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.graph.BangBoxGraph; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphRenderContext; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.renderers.BasicRenderer; -import java.util.ConcurrentModificationException; - -/** - * - * @author alemer - */ -public class BasicBangBoxGraphRenderer - extends BasicRenderer - implements BangBoxGraphRenderer { - - protected BangBox bangBoxRenderer = new BasicBangBoxRenderer(); - protected BangBoxLabel bangBoxLabelRenderer = new BasicBangBoxLabelRenderer(); - @Override - @SuppressWarnings("unchecked") - public void render(RenderContext renderContext, Layout layout) { - if (renderContext instanceof BangBoxGraphRenderContext) { - render((BangBoxGraphRenderContext)renderContext, layout); - } else { - super.render(renderContext, layout); - } - } - - public void render(BangBoxGraphRenderContext renderContext, Layout layout) { - if (layout.getGraph() instanceof BangBoxGraph) { - // paint all the !-boxes - @SuppressWarnings("unchecked") - BangBoxGraph graph = (BangBoxGraph)layout.getGraph(); - try { - for (B b : graph.getBangBoxes()) { - - renderBangBox( - renderContext, - layout, - b); - renderBangBoxLabel( - renderContext, - layout, - b); - } - } - catch (ConcurrentModificationException cme) { - renderContext.getScreenDevice().repaint(); - } - } - - // paint all the edges - try { - for (E e : layout.getGraph().getEdges()) { - - renderEdge( - renderContext, - layout, - e); - renderEdgeLabel( - renderContext, - layout, - e); - } - } - catch (ConcurrentModificationException cme) { - renderContext.getScreenDevice().repaint(); - } - - // paint all the vertices - try { - for (V v : layout.getGraph().getVertices()) { - - renderVertex( - renderContext, - layout, - v); - renderVertexLabel( - renderContext, - layout, - v); - } - } - catch (ConcurrentModificationException cme) { - renderContext.getScreenDevice().repaint(); - } - } - - public void renderBangBox(BangBoxGraphRenderContext rc, Layout layout, B b) { - bangBoxRenderer.paintBangBox(rc, layout, b); - } - - public void renderBangBoxLabel(BangBoxGraphRenderContext rc, Layout layout, B b) { - bangBoxLabelRenderer.labelBangBox(rc, layout, b, rc.getBangBoxLabelTransformer().transform(b)); - } - - public BangBox getBangBoxRenderer() { - return bangBoxRenderer; - } - - public void setBangBoxRenderer(BangBox bangBoxRenderer) { - this.bangBoxRenderer = bangBoxRenderer; - } - - public BangBoxLabel getBangBoxLabelRenderer() { - return this.bangBoxLabelRenderer; - } - - public void setBangBoxLabelRenderer(BangBoxLabel bangBoxLabelRenderer) { - this.bangBoxLabelRenderer = bangBoxLabelRenderer; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxLabelRenderer.java deleted file mode 100644 index db5ef1f8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxLabelRenderer.java +++ /dev/null @@ -1,58 +0,0 @@ -package edu.uci.ics.jung.contrib.visualization.renderers; - -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Point; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.graph.BangBoxGraph; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphRenderContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.renderers.VertexLabelRenderer; -import edu.uci.ics.jung.visualization.renderers.BasicVertexLabelRenderer.OutsidePositioner; -import edu.uci.ics.jung.visualization.renderers.Renderer.VertexLabel.Position; -import edu.uci.ics.jung.visualization.renderers.Renderer.VertexLabel.Positioner; -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; -import edu.uci.ics.jung.visualization.transform.shape.TransformingGraphics; - -public class BasicBangBoxLabelRenderer -implements BangBoxGraphRenderer.BangBoxLabel { - - protected Position position = Position.SE; - private Positioner positioner = new OutsidePositioner(); - - public BasicBangBoxLabelRenderer() { - super(); - } - - public Component prepareRenderer(BangBoxGraphRenderContext rc, BangBoxLabelRenderer bangLabelRenderer, Object value, - boolean isSelected, B bangBox) { - return rc.getBangBoxLabelRenderer().getBangBoxLabelRendererComponent(rc.getScreenDevice(), value, - null, isSelected, bangBox); - } - - /** - * Labels the specified vertex with the specified label. - * Uses the font specified by this instance's - * VertexFontFunction. (If the font is unspecified, the existing - * font for the graphics context is used.) If vertex label centering - * is active, the label is centered on the position of the vertex; otherwise - * the label is offset slightly. - */ - public void labelBangBox(BangBoxGraphRenderContext rc, Layout layout, B b, String label) { - Graph graph = layout.getGraph(); - - prepareRenderer(rc, rc.getBangBoxLabelRenderer(), label, - rc.getPickedBangBoxState().isPicked(b), b); - GraphicsDecorator g = rc.getGraphicsContext(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxRenderer.java b/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxRenderer.java deleted file mode 100644 index dc4f7477..00000000 --- a/gui/jung-src/edu/uci/ics/jung/contrib/visualization/renderers/BasicBangBoxRenderer.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package edu.uci.ics.jung.contrib.visualization.renderers; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.contrib.graph.BangBoxGraph; -import edu.uci.ics.jung.contrib.visualization.LayoutContext; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphRenderContext; -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxGraphRenderer.BangBox; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformerDecorator; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; -import java.awt.Dimension; -import java.awt.Paint; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.Stroke; -import javax.swing.JComponent; - -/** - * - * @author alemer - */ -public class BasicBangBoxRenderer - implements BangBoxGraphRenderer.BangBox { - - public void paintBangBox(BangBoxGraphRenderContext rc, Layout layout, B b) { - @SuppressWarnings("unchecked") - BangBoxGraph graph = (BangBoxGraph) layout.getGraph(); - if (!rc.getBangBoxIncludePredicate().evaluate(Context., B>getInstance(graph, b))) { - return; - } - Shape shape = rc.getBangBoxShapeTransformer().transform(LayoutContext.,B>getInstance(layout, b)); - if (isOnScreen(rc, shape)) { - paintShapeForBangBox(rc, b, shape); - } - } - - protected boolean isOnScreen(RenderContext rc, Shape s) { - JComponent vv = rc.getScreenDevice(); - Rectangle deviceRectangle = null; - if (vv != null) { - Dimension d = vv.getSize(); - deviceRectangle = new Rectangle( - 0, 0, - d.width, d.height); - } - MutableTransformer vt = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW); - if (vt instanceof MutableTransformerDecorator) { - vt = ((MutableTransformerDecorator) vt).getDelegate(); - } - return vt.transform(s).intersects(deviceRectangle); - } - - protected void paintShapeForBangBox(BangBoxGraphRenderContext rc, B b, Shape shape) { - GraphicsDecorator g = rc.getGraphicsContext(); - Paint oldPaint = g.getPaint(); - Paint fillPaint = rc.getBangBoxFillPaintTransformer().transform(b); - if (fillPaint != null) { - g.setPaint(fillPaint); - g.fill(shape); - g.setPaint(oldPaint); - } - Paint drawPaint = rc.getBangBoxDrawPaintTransformer().transform(b); - if (drawPaint != null) { - g.setPaint(drawPaint); - Stroke oldStroke = g.getStroke(); - Stroke stroke = rc.getBangBoxStrokeTransformer().transform(b); - if (stroke != null) { - g.setStroke(stroke); - } - g.draw(shape); - g.setPaint(oldPaint); - g.setStroke(oldStroke); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/AbstractGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/AbstractGraph.java deleted file mode 100644 index 98bbd5ea..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/AbstractGraph.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Created on Apr 2, 2006 - * - * Copyright (c) 2006, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * Abstract implementation of the Graph interface. - * Designed to simplify implementation of new graph classes. - * - * @author Joshua O'Madadhain - */ -@SuppressWarnings("serial") -public abstract class AbstractGraph implements Graph, Serializable -{ - public boolean addEdge(E edge, Collection vertices) - { - return addEdge(edge, vertices, this.getDefaultEdgeType()); - } - - @SuppressWarnings("unchecked") - public boolean addEdge(E edge, Collection vertices, EdgeType edgeType) { - if (vertices == null) - throw new IllegalArgumentException("'vertices' parameter must not be null"); - if (vertices.size() == 2) - return addEdge(edge, - vertices instanceof Pair ? (Pair)vertices : new Pair(vertices), - edgeType); - else if (vertices.size() == 1) - { - V vertex = vertices.iterator().next(); - return addEdge(edge, new Pair(vertex, vertex), edgeType); - } - else - throw new IllegalArgumentException("Graph objects connect 1 or 2 vertices; vertices arg has " + vertices.size()); - } - - public boolean addEdge(E e, V v1, V v2) - { - return addEdge(e, v1, v2, this.getDefaultEdgeType()); - } - - public boolean addEdge(E e, V v1, V v2, EdgeType edge_type) - { - return addEdge(e, new Pair(v1, v2), edge_type); - } - - /** - * Adds {@code edge} to this graph with the specified {@code endpoints}, - * with the default edge type. - * @return {@code} true iff the graph was modified as a result of this call - */ - public boolean addEdge(E edge, Pair endpoints) - { - return addEdge(edge, endpoints, this.getDefaultEdgeType()); - } - - /** - * Adds {@code edge} to this graph with the specified {@code endpoints} - * and {@code EdgeType}. - * @return {@code} true iff the graph was modified as a result of this call - */ - public abstract boolean addEdge(E edge, Pair endpoints, EdgeType edgeType); - - protected Pair getValidatedEndpoints(E edge, Pair endpoints) - { - if (edge == null) - throw new IllegalArgumentException("input edge may not be null"); - - if (endpoints == null) - throw new IllegalArgumentException("endpoints may not be null"); - - Pair new_endpoints = new Pair(endpoints.getFirst(), endpoints.getSecond()); - if (containsEdge(edge)) - { - Pair existing_endpoints = getEndpoints(edge); - if (!existing_endpoints.equals(new_endpoints)) { - throw new IllegalArgumentException("edge " + edge + - " already exists in this graph with endpoints " + existing_endpoints + - " and cannot be added with endpoints " + endpoints); - } else { - return null; - } - } - return new_endpoints; - } - - public int inDegree(V vertex) - { - return this.getInEdges(vertex).size(); - } - - public int outDegree(V vertex) - { - return this.getOutEdges(vertex).size(); - } - - public boolean isPredecessor(V v1, V v2) - { - return this.getPredecessors(v1).contains(v2); - } - - public boolean isSuccessor(V v1, V v2) - { - return this.getSuccessors(v1).contains(v2); - } - - public int getPredecessorCount(V vertex) - { - return this.getPredecessors(vertex).size(); - } - - public int getSuccessorCount(V vertex) - { - return this.getSuccessors(vertex).size(); - } - - public boolean isNeighbor(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - throw new IllegalArgumentException("At least one of these not in this graph: " + v1 + ", " + v2); - return this.getNeighbors(v1).contains(v2); - } - - public boolean isIncident(V vertex, E edge) - { - if (!containsVertex(vertex) || !containsEdge(edge)) - throw new IllegalArgumentException("At least one of these not in this graph: " + vertex + ", " + edge); - return this.getIncidentEdges(vertex).contains(edge); - } - - public int getNeighborCount(V vertex) - { - if (!containsVertex(vertex)) - throw new IllegalArgumentException(vertex + " is not a vertex in this graph"); - return this.getNeighbors(vertex).size(); - } - - public int degree(V vertex) - { - if (!containsVertex(vertex)) - throw new IllegalArgumentException(vertex + " is not a vertex in this graph"); - return this.getIncidentEdges(vertex).size(); - } - - public int getIncidentCount(E edge) - { - Pair incident = this.getEndpoints(edge); - if (incident == null) - return 0; - if (incident.getFirst() == incident.getSecond()) - return 1; - else - return 2; - } - - public V getOpposite(V vertex, E edge) - { - Pair incident = this.getEndpoints(edge); - V first = incident.getFirst(); - V second = incident.getSecond(); - if (vertex.equals(first)) - return second; - else if (vertex.equals(second)) - return first; - else - throw new IllegalArgumentException(vertex + " is not incident to " + edge + " in this graph"); - } - - public E findEdge(V v1, V v2) - { - for (E e : getOutEdges(v1)) - { - if (getOpposite(v1, e).equals(v2)) - return e; - } - return null; - } - - public Collection findEdgeSet(V v1, V v2) - { - if (!getVertices().contains(v1)) - throw new IllegalArgumentException(v1 + " is not an element of this graph"); - - if (!getVertices().contains(v2)) - throw new IllegalArgumentException(v2 + " is not an element of this graph"); - - Collection edges = new ArrayList(); - for (E e : getOutEdges(v1)) - { - if (getOpposite(v1, e).equals(v2)) - edges.add(e); - } - return Collections.unmodifiableCollection(edges); - } - - public Collection getIncidentVertices(E edge) - { - Pair endpoints = this.getEndpoints(edge); - Collection incident = new ArrayList(); - incident.add(endpoints.getFirst()); - incident.add(endpoints.getSecond()); - - return Collections.unmodifiableCollection(incident); - } - - @Override - public String toString() { - StringBuffer sb = new StringBuffer("Vertices:"); - for(V v : getVertices()) { - sb.append(v+","); - } - sb.setLength(sb.length()-1); - sb.append("\nEdges:"); - for(E e : getEdges()) { - Pair ep = getEndpoints(e); - sb.append(e+"["+ep.getFirst()+","+ep.getSecond()+"] "); - } - return sb.toString(); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/AbstractTypedGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/AbstractTypedGraph.java deleted file mode 100644 index 76379a6a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/AbstractTypedGraph.java +++ /dev/null @@ -1,97 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Sep 1, 2008 - * - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; -import java.util.Collections; - -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * An abstract class for graphs whose edges all have the same {@code EdgeType}. - * Intended to simplify the implementation of such graph classes. - */ -@SuppressWarnings("serial") -public abstract class AbstractTypedGraph extends AbstractGraph -{ - /** - * The edge type for all edges in this graph. - */ - protected final EdgeType edge_type; - - /** - * Creates an instance with the specified edge type. - * @param edge_type the type of edges that this graph accepts - */ - public AbstractTypedGraph(EdgeType edge_type) - { - this.edge_type = edge_type; - } - - /** - * Returns this graph's edge type. - */ - public EdgeType getDefaultEdgeType() - { - return this.edge_type; - } - - /** - * Returns this graph's edge type, or {@code null} if {@code e} is not - * in this graph. - */ - public EdgeType getEdgeType(E e) - { - return hasEqualEdgeType(edge_type) ? this.edge_type : null; - } - - /** - * Returns the edge set for this graph if {@code edgeType} matches the - * edge type for this graph, and an empty set otherwise. - */ - public Collection getEdges(EdgeType edge_type) - { - return hasEqualEdgeType(edge_type) ? this.getEdges() : Collections.emptySet(); - } - - /** - * Returns the edge count for this graph if {@code edge_type} matches - * the edge type for this graph, and 0 otherwise. - */ - public int getEdgeCount(EdgeType edge_type) - { - return hasEqualEdgeType(edge_type) ? this.getEdgeCount() : 0; - } - - /** - * Returns {@code true} if {@code edge_type} matches the default edge type for - * this graph, and {@code false} otherwise. - * @param edge_type the edge type to compare to this instance's default edge type - */ - protected boolean hasEqualEdgeType(EdgeType edge_type) - { - return this.edge_type.equals(edge_type); - } - - /** - * Throws an {@code IllegalArgumentException} if {@code edge_type} does not - * match the default edge type for this graph. - * @param edge_type the edge type to compare to this instance's default edge type - */ - protected void validateEdgeType(EdgeType edge_type) - { - if (!hasEqualEdgeType(edge_type)) - throw new IllegalArgumentException("Edge type '" + edge_type + - "' does not match the default edge type for this graph: '" + - this.edge_type + "'"); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/DelegateForest.java b/gui/jung-src/edu/uci/ics/jung/graph/DelegateForest.java deleted file mode 100644 index a334f23d..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/DelegateForest.java +++ /dev/null @@ -1,327 +0,0 @@ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.graph.util.TreeUtils; - -/** - * An implementation of Forest that delegates to a specified DirectedGraph - * instance. - * @author Tom Nelson - * - * @param the vertex type - * @param the edge type - */ -@SuppressWarnings("serial") -public class DelegateForest extends GraphDecorator implements Forest -{ - /** - * Creates an instance backed by a new {@code DirectedSparseGraph} instance. - */ - public DelegateForest() { - this(new DirectedSparseGraph()); - } - - /** - * Creates an instance backed by the input {@code DirectedGraph} i - */ - public DelegateForest(DirectedGraph delegate) { - super(delegate); - } - - /** - * Add an edge to the tree, connecting v1, the parent and v2, the child. - * v1 must already exist in the tree, and v2 must not already exist - * the passed edge must be unique in the tree. Passing an edgeType - * other than EdgeType.DIRECTED may cause an illegal argument exception - * in the delegate graph. - * - * @param e a unique edge to add - * @param v1 the parent node - * @param v2 the child node - * @param edgeType should be EdgeType.DIRECTED - * @return true if this call mutates the underlying graph - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object, edu.uci.ics.jung.graph.util.EdgeType) - */ - @Override - public boolean addEdge(E e, V v1, V v2, EdgeType edgeType) { - if(delegate.getVertices().contains(v1) == false) { - throw new IllegalArgumentException("Tree must already contain "+v1); - } - if(delegate.getVertices().contains(v2)) { - throw new IllegalArgumentException("Tree must not already contain "+v2); - } - return delegate.addEdge(e, v1, v2, edgeType); - } - - /** - * Add vertex as a root of the tree - * - * @param vertex the tree root to add - * @return true if this call mutates the underlying graph - * @see edu.uci.ics.jung.graph.Graph#addVertex(java.lang.Object) - */ - @Override - public boolean addVertex(V vertex) { - setRoot(vertex); - return true; - } - - /** - * Removes edge from this tree, and the subtree rooted - * at the child vertex incident to edge. - * (The subtree is removed to ensure that the tree in which the edge - * was found is still a tree rather than a forest. To change this - * behavior so that the - * @param edge the edge to remove - * @return true iff the tree was modified - * @see edu.uci.ics.jung.graph.Hypergraph#removeEdge(java.lang.Object) - */ - @Override - public boolean removeEdge(E edge) { - return removeEdge(edge, true); - } - - /** - * Removes edge from this tree. - * If remove_subtree is true, removes - * the subtree rooted at the child vertex incident to edge. - * Otherwise, leaves the subtree intact as a new component tree of this - * forest. - * @param edge the edge to remove - * @param remove_subtree if true, remove the subtree - * @return true iff the tree was modified - */ - public boolean removeEdge(E edge, boolean remove_subtree) - { - if (!delegate.containsEdge(edge)) - return false; - V child = getDest(edge); - if (remove_subtree) - return removeVertex(child); - else - { - delegate.removeEdge(edge); - return false; - } - } - - /** - * Removes vertex from this tree, and the subtree - * rooted at vertex. - * @param vertex the vertex to remove - * @return true iff the tree was modified - * @see edu.uci.ics.jung.graph.Hypergraph#removeVertex(java.lang.Object) - */ - @Override - public boolean removeVertex(V vertex) { - return removeVertex(vertex, true); - } - - /** - * Removes vertex from this tree. - * If remove_subtrees is true, removes - * the subtrees rooted at the children of vertex. - * Otherwise, leaves these subtrees intact as new component trees of this - * forest. - * @param vertex the vertex to remove - * @param remove_subtrees if true, remove the subtrees - * rooted at vertex's children - * @return true iff the tree was modified - */ - public boolean removeVertex(V vertex, boolean remove_subtrees) - { - if (!delegate.containsVertex(vertex)) - return false; - if (remove_subtrees) - for(V v : new ArrayList(delegate.getSuccessors(vertex))) - removeVertex(v, true); - return delegate.removeVertex(vertex); - } - - /** - * returns an ordered list of the nodes beginning at the root - * and ending at the passed child node, including all intermediate - * nodes. - * @param child the last node in the path from the root - * @return an ordered list of the nodes from root to child - */ - public List getPath(V child) { - if (!delegate.containsVertex(child)) - return null; - List list = new ArrayList(); - list.add(child); - V parent = getParent(child); - while(parent != null) { - list.add(list.size(), parent); - parent = getParent(parent); - } - return list; - } - - public V getParent(V child) { - if (!delegate.containsVertex(child)) - return null; - Collection parents = delegate.getPredecessors(child); - if(parents.size() > 0) { - return parents.iterator().next(); - } - return null; - } - - /** - * getter for the root of the tree - * returns null, as this tree has >1 roots - * @return the root - */ - public V getRoot() { - return null; - } - - /** - * adds root as a root of the tree - * @param root the initial tree root - */ - public void setRoot(V root) { - delegate.addVertex(root); - } - - /** - * removes a node from the tree, causing all descendants of - * the removed node also to be removed - * @param orphan the node to remove - * @return whether this call mutates the underlying graph - */ - public boolean removeChild(V orphan) { - return removeVertex(orphan); - } - - /** - * computes and returns the depth of the tree from the - * root to the passed vertex - * - * @param v the node who's depth is computed - * @return the depth to the passed node. - */ - public int getDepth(V v) { - return getPath(v).size(); - } - - /** - * computes and returns the height of the tree - * - * @return the height - */ - public int getHeight() { - int height = 0; - for(V v : getVertices()) { - height = Math.max(height, getDepth(v)); - } - return height; - } - - /** - * computes and returns whether the passed node is - * neither the root, nor a leaf node. - * @return true if v is neither a leaf - * nor a root - */ - public boolean isInternal(V v) { - return isLeaf(v) == false && isRoot(v) == false; - } - - /** - * Returns true if {@code v} has no child nodes. - */ - public boolean isLeaf(V v) { - return getChildren(v).size() == 0; - } - - /** - * Returns the children of {@code v}. - */ - public Collection getChildren(V v) { - return delegate.getSuccessors(v); - } - - /** - * Returns true if {@code v} has no parent node. - */ - public boolean isRoot(V v) { - return getParent(v) == null; - } - - @Override - public int getIncidentCount(E edge) - { - return 2; - } - - @SuppressWarnings("unchecked") - @Override - public boolean addEdge(E edge, Collection vertices) { - Pair pair = null; - if(vertices instanceof Pair) { - pair = (Pair)vertices; - } else { - pair = new Pair(vertices); - } - return addEdge(edge, pair.getFirst(), pair.getSecond()); - } - - /** - * Returns the root of each tree of this forest as a {@code Collection}. - */ - public Collection getRoots() { - Collection roots = new HashSet(); - for(V v : delegate.getVertices()) { - if(delegate.getPredecessorCount(v) == 0) { - roots.add(v); - } - } - return roots; - } - - public Collection> getTrees() { - Collection> trees = new HashSet>(); - for(V v : getRoots()) { - Tree tree = new DelegateTree(); - tree.addVertex(v); - TreeUtils.growSubTree(this, tree, v); - trees.add(tree); - } - return trees; - } - - /** - * Adds {@code tree} to this graph as an element of this forest. - * - * @param tree the tree to add to this forest as a component - */ - public void addTree(Tree tree) { - TreeUtils.addSubTree(this, tree, null, null); - } - - public int getChildCount(V vertex) - { - return delegate.getSuccessorCount(vertex); - } - - public Collection getChildEdges(V vertex) - { - return delegate.getOutEdges(vertex); - } - - public E getParentEdge(V vertex) - { - if (isRoot(vertex)) - return null; - return delegate.getInEdges(vertex).iterator().next(); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/DelegateTree.java b/gui/jung-src/edu/uci/ics/jung/graph/DelegateTree.java deleted file mode 100644 index daf00380..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/DelegateTree.java +++ /dev/null @@ -1,365 +0,0 @@ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of Tree that delegates to - * a specified instance of DirectedGraph. - * @author Tom Nelson - * - * @param the vertex type - * @param the edge type - */ -@SuppressWarnings("serial") -public class DelegateTree extends GraphDecorator implements Tree -{ - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static final Factory> getFactory() { - return new Factory> () { - public Tree create() { - return new DelegateTree(new DirectedSparseMultigraph()); - } - }; - } - - protected V root; - protected Map vertex_depths; - - /** - * Creates an instance. - */ - public DelegateTree() { - this(DirectedSparseMultigraph.getFactory()); - } - - /** - * create an instance with passed values. - * @param graphFactory must create a DirectedGraph to use as a delegate - */ - public DelegateTree(Factory> graphFactory) { - super(graphFactory.create()); - this.vertex_depths = new HashMap(); - } - - /** - * Creates a new DelegateTree which delegates to graph. - * Assumes that graph is already a tree; if it's not, future behavior - * of this instance is undefined. - */ - public DelegateTree(DirectedGraph graph) { - super(graph); -// if(graph.getVertexCount() != 0) throw new IllegalArgumentException( -// "Passed DirectedGraph must be empty"); - this.vertex_depths = new HashMap(); - } - - /** - * Add an edge to the tree, connecting v1, the parent and v2, the child. - * v1 must already exist in the tree, and v2 must not already exist - * the passed edge must be unique in the tree. Passing an edgeType - * other than EdgeType.DIRECTED may cause an illegal argument exception - * in the delegate graph. - * - * @param e a unique edge to add - * @param v1 the parent node - * @param v2 the child node - * @param edgeType should be EdgeType.DIRECTED - * @return true if this call mutates the underlying graph - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object, edu.uci.ics.jung.graph.util.EdgeType) - */ - @Override - public boolean addEdge(E e, V v1, V v2, EdgeType edgeType) { - return addChild(e, v1, v2, edgeType); - } - - /** - * Add an edge to the tree, connecting v1, the parent and v2, the child. - * v1 must already exist in the tree, and v2 must not already exist - * the passed edge must be unique in the tree. - * - * @param e a unique edge to add - * @param v1 the parent node - * @param v2 the child node - * @return true if this call mutates the underlying graph - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object) - */ - @Override - public boolean addEdge(E e, V v1, V v2) { - return addChild(e, v1, v2); - } - - /** - * Will set the root of the Tree, only if the Tree is empty and the - * root is currently unset. - * - * @param vertex the tree root to set - * @return true if this call mutates the underlying graph - * @see edu.uci.ics.jung.graph.Graph#addVertex(java.lang.Object) - * @throws UnsupportedOperationException if the root was previously set - */ - @Override - public boolean addVertex(V vertex) { - if(root == null) { - this.root = vertex; - vertex_depths.put(vertex, 0); - return delegate.addVertex(vertex); - } else { - throw new UnsupportedOperationException("Unless you are setting the root, use addChild()"); - } - } - - /** - * remove the passed node, and all nodes that are descendants of the - * passed node. - * @param vertex - * @return true iff the tree was modified - * @see edu.uci.ics.jung.graph.Graph#removeVertex(java.lang.Object) - */ - @Override - public boolean removeVertex(V vertex) { - if (!delegate.containsVertex(vertex)) - return false; - for(V v : getChildren(vertex)) { - removeVertex(v); - vertex_depths.remove(v); - } - - // recalculate height - vertex_depths.remove(vertex); - return delegate.removeVertex(vertex); - } - - /** - * add the passed child node as a child of parent. - * parent must exist in the tree, and child must not already exist. - * - * @param edge the unique edge to connect the parent and child nodes - * @param parent the existing parent to attach the child to - * @param child the new child to add to the tree as a child of parent - * @param edgeType must be EdgeType.DIRECTED or the underlying graph may throw an exception - * @return whether this call mutates the underlying graph - */ - public boolean addChild(E edge, V parent, V child, EdgeType edgeType) { - Collection vertices = delegate.getVertices(); - if(vertices.contains(parent) == false) { - throw new IllegalArgumentException("Tree must already contain parent "+parent); - } - if(vertices.contains(child)) { - throw new IllegalArgumentException("Tree must not already contain child "+child); - } - vertex_depths.put(child, vertex_depths.get(parent) + 1); - return delegate.addEdge(edge, parent, child, edgeType); - } - - /** - * add the passed child node as a child of parent. - * parent must exist in the tree, and child must not already exist - * @param edge the unique edge to connect the parent and child nodes - * @param parent the existing parent to attach the child to - * @param child the new child to add to the tree as a child of parent - * @return whether this call mutates the underlying graph - */ - public boolean addChild(E edge, V parent, V child) { - Collection vertices = delegate.getVertices(); - if(vertices.contains(parent) == false) { - throw new IllegalArgumentException("Tree must already contain parent "+parent); - } - if(vertices.contains(child)) { - throw new IllegalArgumentException("Tree must not already contain child "+child); - } - vertex_depths.put(child, vertex_depths.get(parent) + 1); - return delegate.addEdge(edge, parent, child); - } - - /** - * get the number of children of the passed parent node - */ - public int getChildCount(V parent) { - if (!delegate.containsVertex(parent)) - return 0; - return getChildren(parent).size(); - } - - /** - * get the immediate children nodes of the passed parent - */ - public Collection getChildren(V parent) { - if (!delegate.containsVertex(parent)) - return null; - return delegate.getSuccessors(parent); - } - - /** - * get the single parent node of the passed child - */ - public V getParent(V child) { - if (!delegate.containsVertex(child)) - return null; - Collection predecessors = delegate.getPredecessors(child); - if(predecessors.size() == 0) { - return null; - } - return predecessors.iterator().next(); - } - - /** - * Returns an ordered list of the nodes beginning at the root - * and ending at {@code vertex}, including all intermediate - * nodes. - * @param vertex the last node in the path from the root - * @return an ordered list of the nodes from root to child - */ - public List getPath(V vertex) { - if (!delegate.containsVertex(vertex)) - return null; - List vertex_to_root = new ArrayList(); - vertex_to_root.add(vertex); - V parent = getParent(vertex); - while(parent != null) { - vertex_to_root.add(parent); - parent = getParent(parent); - } - // reverse list so that it goes from root to child - List root_to_vertex = new ArrayList(vertex_to_root.size()); - for (int i = vertex_to_root.size() - 1; i >= 0; i--) - root_to_vertex.add(vertex_to_root.get(i)); - return root_to_vertex; - } - - /** - * getter for the root of the tree - * @return the root - */ - public V getRoot() { - return root; - } - - /** - * sets the root to the passed value, only if the root is - * previously unset - * @param root the initial tree root - */ - public void setRoot(V root) { - addVertex(root); - } - - /** - * removes a node from the tree, causing all descendants of - * the removed node also to be removed - * @param orphan the node to remove - * @return whether this call mutates the underlying graph - */ - public boolean removeChild(V orphan) { - return removeVertex(orphan); - } - - /** - * computes and returns the depth of the tree from the - * root to the passed vertex - * - * @param v the node who's depth is computed - * @return the depth to the passed node. - */ - public int getDepth(V v) { - return this.vertex_depths.get(v); - } - - /** - * Computes and returns the height of the tree. - * - * @return the height - */ - public int getHeight() { - int height = 0; - for(V v : getVertices()) { - height = Math.max(height, getDepth(v)); - } - return height; - } - - /** - * Returns true if v is neither - * a leaf nor the root of this tree. - * @return true if v is neither - * a leaf nor the root of this tree - */ - public boolean isInternal(V v) { - if (!delegate.containsVertex(v)) - return false; - return isLeaf(v) == false && isRoot(v) == false; - } - - /** - * Returns true if the passed node has no - * children. - * @return true if the passed node has no - * children - */ - public boolean isLeaf(V v) { - if (!delegate.containsVertex(v)) - return false; - return getChildren(v).size() == 0; - } - - /** - * computes whether the passed node is a root node - * (has no children) - */ - public boolean isRoot(V v) { - if (!delegate.containsVertex(v)) - return false; - return getParent(v) == null; - } - - @Override - public int getIncidentCount(E edge) - { - if (!delegate.containsEdge(edge)) - return 0; - // all edges in a tree connect exactly 2 vertices - return 2; - } - - @SuppressWarnings("unchecked") - @Override - public boolean addEdge(E edge, Collection vertices) { - Pair pair = null; - if(vertices instanceof Pair) { - pair = (Pair)vertices; - } else { - pair = new Pair(vertices); - } - return addEdge(edge, pair.getFirst(), pair.getSecond()); - } - - @Override - public String toString() { - return "Tree of "+delegate.toString(); - } - - public Collection> getTrees() { - return Collections.>singleton(this); - } - - public Collection getChildEdges(V vertex) { - return getOutEdges(vertex); - } - - public E getParentEdge(V vertex) { - return getInEdges(vertex).iterator().next(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/DirectedGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/DirectedGraph.java deleted file mode 100644 index ee54f3b7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/DirectedGraph.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Created on Oct 17, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -/** - * A tagging interface for implementations of Graph - * that accept only directed edges. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param type specification for vertices - * @param type specification for edges - */ -public interface DirectedGraph extends Graph { -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/DirectedOrderedSparseMultigraph.java b/gui/jung-src/edu/uci/ics/jung/graph/DirectedOrderedSparseMultigraph.java deleted file mode 100644 index 7d425954..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/DirectedOrderedSparseMultigraph.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Created on Oct 17, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.Pair; - - -/** - * An implementation of DirectedGraph, suitable for sparse graphs, - * that orders its vertex and edge collections - * according to insertion time. - */ -@SuppressWarnings("serial") -public class DirectedOrderedSparseMultigraph - extends DirectedSparseMultigraph - implements DirectedGraph, MultiGraph -{ - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() { - return new Factory> () { - public DirectedGraph create() { - return new DirectedOrderedSparseMultigraph(); - } - }; - } - - /** - * Creates a new instance. - */ - public DirectedOrderedSparseMultigraph() { - vertices = new LinkedHashMap>>(); - edges = new LinkedHashMap>(); - } - - @Override - public boolean addVertex(V vertex) { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) { - vertices.put(vertex, new Pair>(new LinkedHashSet(), new LinkedHashSet())); - return true; - } else { - return false; - } - } - - @Override - public Collection getPredecessors(V vertex) { - if (!containsVertex(vertex)) - return null; - Set preds = new LinkedHashSet(); - for (E edge : getIncoming_internal(vertex)) - preds.add(this.getSource(edge)); - - return Collections.unmodifiableCollection(preds); - } - - @Override - public Collection getSuccessors(V vertex) { - if (!containsVertex(vertex)) - return null; - Set succs = new LinkedHashSet(); - for (E edge : getOutgoing_internal(vertex)) - succs.add(this.getDest(edge)); - - return Collections.unmodifiableCollection(succs); - } - - @Override - public Collection getNeighbors(V vertex) { - if (!containsVertex(vertex)) - return null; - Collection neighbors = new LinkedHashSet(); - for (E edge : getIncoming_internal(vertex)) - neighbors.add(this.getSource(edge)); - for (E edge : getOutgoing_internal(vertex)) - neighbors.add(this.getDest(edge)); - return Collections.unmodifiableCollection(neighbors); - } - - @Override - public Collection getIncidentEdges(V vertex) { - if (!containsVertex(vertex)) - return null; - Collection incident = new LinkedHashSet(); - incident.addAll(getIncoming_internal(vertex)); - incident.addAll(getOutgoing_internal(vertex)); - return incident; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/DirectedSparseGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/DirectedSparseGraph.java deleted file mode 100644 index 8d95caff..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/DirectedSparseGraph.java +++ /dev/null @@ -1,289 +0,0 @@ -/* - * Created on Mar 26, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of DirectedGraph suitable for sparse graphs. - */ -@SuppressWarnings("serial") -public class DirectedSparseGraph extends AbstractTypedGraph implements - DirectedGraph -{ - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static final Factory> getFactory() { - return new Factory> () { - public DirectedGraph create() { - return new DirectedSparseGraph(); - } - }; - } - - protected Map>> vertices; // Map of vertices to Pair of adjacency maps {incoming, outgoing} - // of neighboring vertices to incident edges - protected Map> edges; // Map of edges to incident vertex pairs - - /** - * Creates an instance. - */ - public DirectedSparseGraph() - { - super(EdgeType.DIRECTED); - vertices = new HashMap>>(); - edges = new HashMap>(); - } - - @Override - public boolean addEdge(E edge, Pair endpoints, EdgeType edgeType) - { - this.validateEdgeType(edgeType); - Pair new_endpoints = getValidatedEndpoints(edge, endpoints); - if (new_endpoints == null) - return false; - - V source = new_endpoints.getFirst(); - V dest = new_endpoints.getSecond(); - - if (findEdge(source, dest) != null) - return false; - - edges.put(edge, new_endpoints); - - if (!vertices.containsKey(source)) - this.addVertex(source); - - if (!vertices.containsKey(dest)) - this.addVertex(dest); - - // map source of this edge to and vice versa - vertices.get(source).getSecond().put(dest, edge); - vertices.get(dest).getFirst().put(source, edge); - - return true; - } - - @Override - public E findEdge(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - return vertices.get(v1).getSecond().get(v2); - } - - @Override - public Collection findEdgeSet(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - ArrayList edge_collection = new ArrayList(1); - E e = findEdge(v1, v2); - if (e == null) - return edge_collection; - edge_collection.add(e); - return edge_collection; - } - - protected Collection getIncoming_internal(V vertex) - { - return vertices.get(vertex).getFirst().values(); - } - - protected Collection getOutgoing_internal(V vertex) - { - return vertices.get(vertex).getSecond().values(); - } - - protected Collection getPreds_internal(V vertex) - { - return vertices.get(vertex).getFirst().keySet(); - } - - protected Collection getSuccs_internal(V vertex) - { - return vertices.get(vertex).getSecond().keySet(); - } - - public Collection getInEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(getIncoming_internal(vertex)); - } - - public Collection getOutEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(getOutgoing_internal(vertex)); - } - - public Collection getPredecessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(getPreds_internal(vertex)); - } - - public Collection getSuccessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(getSuccs_internal(vertex)); - } - - public Pair getEndpoints(E edge) - { - if (!containsEdge(edge)) - return null; - return edges.get(edge); - } - - public V getSource(E directed_edge) - { - if (!containsEdge(directed_edge)) - return null; - return edges.get(directed_edge).getFirst(); - } - - public V getDest(E directed_edge) - { - if (!containsEdge(directed_edge)) - return null; - return edges.get(directed_edge).getSecond(); - } - - public boolean isSource(V vertex, E edge) - { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return vertex.equals(this.getEndpoints(edge).getFirst()); - } - - public boolean isDest(V vertex, E edge) - { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return vertex.equals(this.getEndpoints(edge).getSecond()); - } - - public Collection getEdges() - { - return Collections.unmodifiableCollection(edges.keySet()); - } - - public Collection getVertices() - { - return Collections.unmodifiableCollection(vertices.keySet()); - } - - public boolean containsVertex(V vertex) - { - return vertices.containsKey(vertex); - } - - public boolean containsEdge(E edge) - { - return edges.containsKey(edge); - } - - public int getEdgeCount() - { - return edges.size(); - } - - public int getVertexCount() - { - return vertices.size(); - } - - public Collection getNeighbors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Collection neighbors = new HashSet(); - neighbors.addAll(getPreds_internal(vertex)); - neighbors.addAll(getSuccs_internal(vertex)); - return Collections.unmodifiableCollection(neighbors); - } - - public Collection getIncidentEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Collection incident_edges = new HashSet(); - incident_edges.addAll(getIncoming_internal(vertex)); - incident_edges.addAll(getOutgoing_internal(vertex)); - return Collections.unmodifiableCollection(incident_edges); - } - - public boolean addVertex(V vertex) - { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) { - vertices.put(vertex, new Pair>(new HashMap(), new HashMap())); - return true; - } else { - return false; - } - } - - public boolean removeVertex(V vertex) { - if (!containsVertex(vertex)) - return false; - - // copy to avoid concurrent modification in removeEdge - ArrayList incident = new ArrayList(getIncoming_internal(vertex)); - incident.addAll(getOutgoing_internal(vertex)); - - for (E edge : incident) - removeEdge(edge); - - vertices.remove(vertex); - - return true; - } - - public boolean removeEdge(E edge) { - if (!containsEdge(edge)) - return false; - - Pair endpoints = this.getEndpoints(edge); - V source = endpoints.getFirst(); - V dest = endpoints.getSecond(); - - // remove vertices from each others' adjacency maps - vertices.get(source).getSecond().remove(dest); - vertices.get(dest).getFirst().remove(source); - - edges.remove(edge); - return true; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/DirectedSparseMultigraph.java b/gui/jung-src/edu/uci/ics/jung/graph/DirectedSparseMultigraph.java deleted file mode 100644 index bbbe2f7e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/DirectedSparseMultigraph.java +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Created on Oct 17, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - - -/** - * An implementation of DirectedGraph, suitable for sparse graphs, - * that permits parallel edges. - */ -@SuppressWarnings("serial") -public class DirectedSparseMultigraph - extends AbstractTypedGraph - implements DirectedGraph, MultiGraph { - - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() { - return new Factory> () { - public DirectedGraph create() { - return new DirectedSparseMultigraph(); - } - }; - } - - protected Map>> vertices; // Map of vertices to Pair of adjacency sets {incoming, outgoing} - protected Map> edges; // Map of edges to incident vertex pairs - - /** - * Creates a new instance. - */ - public DirectedSparseMultigraph() { - super(EdgeType.DIRECTED); - vertices = new HashMap>>(); - edges = new HashMap>(); - } - - public Collection getEdges() { - return Collections.unmodifiableCollection(edges.keySet()); - } - - public Collection getVertices() { - return Collections.unmodifiableCollection(vertices.keySet()); - } - - public boolean containsVertex(V vertex) { - return vertices.keySet().contains(vertex); - } - - public boolean containsEdge(E edge) { - return edges.keySet().contains(edge); - } - - protected Collection getIncoming_internal(V vertex) - { - return vertices.get(vertex).getFirst(); - } - - protected Collection getOutgoing_internal(V vertex) - { - return vertices.get(vertex).getSecond(); - } - - public boolean addVertex(V vertex) { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) { - vertices.put(vertex, new Pair>(new HashSet(), new HashSet())); - return true; - } else { - return false; - } - } - - public boolean removeVertex(V vertex) { - if (!containsVertex(vertex)) - return false; - - // copy to avoid concurrent modification in removeEdge - Set incident = new HashSet(getIncoming_internal(vertex)); - incident.addAll(getOutgoing_internal(vertex)); - - for (E edge : incident) - removeEdge(edge); - - vertices.remove(vertex); - - return true; - } - - public boolean removeEdge(E edge) { - if (!containsEdge(edge)) - return false; - - Pair endpoints = this.getEndpoints(edge); - V source = endpoints.getFirst(); - V dest = endpoints.getSecond(); - - // remove edge from incident vertices' adjacency sets - getOutgoing_internal(source).remove(edge); - getIncoming_internal(dest).remove(edge); - - edges.remove(edge); - return true; - } - - - public Collection getInEdges(V vertex) { - if (!containsVertex(vertex)) - return null; - - return Collections.unmodifiableCollection(getIncoming_internal(vertex)); - } - - public Collection getOutEdges(V vertex) { - if (!containsVertex(vertex)) - return null; - - return Collections.unmodifiableCollection(getOutgoing_internal(vertex)); - } - - public Collection getPredecessors(V vertex) { - if (!containsVertex(vertex)) - return null; - - Set preds = new HashSet(); - for (E edge : getIncoming_internal(vertex)) - preds.add(this.getSource(edge)); - - return Collections.unmodifiableCollection(preds); - } - - public Collection getSuccessors(V vertex) { - if (!containsVertex(vertex)) - return null; - - Set succs = new HashSet(); - for (E edge : getOutgoing_internal(vertex)) - succs.add(this.getDest(edge)); - - return Collections.unmodifiableCollection(succs); - } - - public Collection getNeighbors(V vertex) { - if (!containsVertex(vertex)) - return null; - - Collection neighbors = new HashSet(); - for (E edge : getIncoming_internal(vertex)) - neighbors.add(this.getSource(edge)); - for (E edge : getOutgoing_internal(vertex)) - neighbors.add(this.getDest(edge)); - return Collections.unmodifiableCollection(neighbors); - } - - public Collection getIncidentEdges(V vertex) { - if (!containsVertex(vertex)) - return null; - - Collection incident = new HashSet(); - incident.addAll(getIncoming_internal(vertex)); - incident.addAll(getOutgoing_internal(vertex)); - return incident; - } - - @Override - public E findEdge(V v1, V v2) { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - for (E edge : getOutgoing_internal(v1)) - if (this.getDest(edge).equals(v2)) - return edge; - - return null; - } - - @Override - public boolean addEdge(E edge, Pair endpoints, EdgeType edgeType) - { - this.validateEdgeType(edgeType); - Pair new_endpoints = getValidatedEndpoints(edge, endpoints); - if (new_endpoints == null) - return false; - - edges.put(edge, new_endpoints); - - V source = new_endpoints.getFirst(); - V dest = new_endpoints.getSecond(); - - if (!containsVertex(source)) - this.addVertex(source); - - if (!containsVertex(dest)) - this.addVertex(dest); - - getIncoming_internal(dest).add(edge); - getOutgoing_internal(source).add(edge); - - return true; - } - - - public V getSource(E edge) { - if (!containsEdge(edge)) - return null; - return this.getEndpoints(edge).getFirst(); - } - - public V getDest(E edge) { - if (!containsEdge(edge)) - return null; - return this.getEndpoints(edge).getSecond(); - } - - public boolean isSource(V vertex, E edge) { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return vertex.equals(this.getEndpoints(edge).getFirst()); - } - - public boolean isDest(V vertex, E edge) { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return vertex.equals(this.getEndpoints(edge).getSecond()); - } - - public Pair getEndpoints(E edge) { - return edges.get(edge); - } - - public int getEdgeCount() { - return edges.size(); - } - - public int getVertexCount() { - return vertices.size(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/Forest.java b/gui/jung-src/edu/uci/ics/jung/graph/Forest.java deleted file mode 100644 index bab2124d..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/Forest.java +++ /dev/null @@ -1,91 +0,0 @@ -package edu.uci.ics.jung.graph; - -import java.util.Collection; - -/** - * An interface for a graph which consists of a collection of rooted - * directed acyclic graphs. - * - * @author Joshua O'Madadhain - */ -public interface Forest extends DirectedGraph { - - /** - * Returns a view of this graph as a collection of Tree instances. - * @return a view of this graph as a collection of Trees - */ - Collection> getTrees(); - - /** - * Returns the parent of vertex in this tree. - * (If vertex is the root, returns null.) - * The parent of a vertex is defined as being its predecessor in the - * (unique) shortest path from the root to this vertex. - * This is a convenience method which is equivalent to - * Graph.getPredecessors(vertex).iterator().next(). - * @return the parent of vertex in this tree - * @see Graph#getPredecessors(Object) - * @see #getParentEdge(Object) - */ - public V getParent(V vertex); - - /** - * Returns the edge connecting vertex to its parent in - * this tree. - * (If vertex is the root, returns null.) - * The parent of a vertex is defined as being its predecessor in the - * (unique) shortest path from the root to this vertex. - * This is a convenience method which is equivalent to - * Graph.getInEdges(vertex).iterator().next(), - * and also to Graph.findEdge(vertex, getParent(vertex)). - * @return the edge connecting vertex to its parent, or - * null if vertex is the root - * @see Graph#getInEdges(Object) - * @see #getParent(Object) - */ - public E getParentEdge(V vertex); - - /** - * Returns the children of vertex in this tree. - * The children of a vertex are defined as being the successors of - * that vertex on the respective (unique) shortest paths from the root to - * those vertices. - * This is syntactic (maple) sugar for getSuccessors(vertex). - * @param vertex the vertex whose children are to be returned - * @return the Collection of children of vertex - * in this tree - * @see Graph#getSuccessors(Object) - * @see #getChildEdges(Object) - */ - public Collection getChildren(V vertex); - - /** - * Returns the edges connecting vertex to its children - * in this tree. - * The children of a vertex are defined as being the successors of - * that vertex on the respective (unique) shortest paths from the root to - * those vertices. - * This is syntactic (maple) sugar for getOutEdges(vertex). - * @param vertex the vertex whose child edges are to be returned - * @return the Collection of edges connecting - * vertex to its children in this tree - * @see Graph#getOutEdges(Object) - * @see #getChildren(Object) - */ - public Collection getChildEdges(V vertex); - - /** - * Returns the number of children that vertex has in this tree. - * The children of a vertex are defined as being the successors of - * that vertex on the respective (unique) shortest paths from the root to - * those vertices. - * This is syntactic (maple) sugar for getSuccessorCount(vertex). - * @param vertex the vertex whose child edges are to be returned - * @return the Collection of edges connecting - * vertex to its children in this tree - * @see #getChildEdges(Object) - * @see #getChildren(Object) - * @see Graph#getSuccessorCount(Object) - */ - public int getChildCount(V vertex); -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/Graph.java b/gui/jung-src/edu/uci/ics/jung/graph/Graph.java deleted file mode 100644 index 24003e7a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/Graph.java +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Created on Oct 17, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - - -/** - * A graph consisting of a set of vertices of type V - * set and a set of edges of type E. Edges of this - * graph type have exactly two endpoints; whether these endpoints - * must be distinct depends on the implementation. - *

          - * This interface permits, but does not enforce, any of the following - * common variations of graphs: - *

            - *
          • directed and undirected edges - *
          • vertices and edges with attributes (for example, weighted edges) - *
          • vertices and edges of different types (for example, bipartite - * or multimodal graphs) - *
          • parallel edges (multiple edges which connect a single set of vertices) - *
          • representations as matrices or as adjacency lists or adjacency maps - *
          - * Extensions or implementations of this interface - * may enforce or disallow any or all of these variations. - * - *

          Definitions (with respect to a given vertex v): - *

            - *
          • incoming edge of v: an edge that can be traversed - * from a neighbor of v to reach v - *
          • outgoing edge of v: an edge that can be traversed - * from v to reach some neighbor of v - *
          • predecessor of v: a vertex at the other end of an - * incoming edge of v - *
          • successor of v: a vertex at the other end of an - * outgoing edge of v - *
          • - *
          - * - * @author Joshua O'Madadhain - */ -public interface Graph extends Hypergraph -{ - /** - * Returns a Collection view of the incoming edges incident to vertex - * in this graph. - * @param vertex the vertex whose incoming edges are to be returned - * @return a Collection view of the incoming edges incident - * to vertex in this graph - */ - Collection getInEdges(V vertex); - - /** - * Returns a Collection view of the outgoing edges incident to vertex - * in this graph. - * @param vertex the vertex whose outgoing edges are to be returned - * @return a Collection view of the outgoing edges incident - * to vertex in this graph - */ - Collection getOutEdges(V vertex); - - /** - * Returns a Collection view of the predecessors of vertex - * in this graph. A predecessor of vertex is defined as a vertex v - * which is connected to - * vertex by an edge e, where e is an outgoing edge of - * v and an incoming edge of vertex. - * @param vertex the vertex whose predecessors are to be returned - * @return a Collection view of the predecessors of - * vertex in this graph - */ - Collection getPredecessors(V vertex); - - /** - * Returns a Collection view of the successors of vertex - * in this graph. A successor of vertex is defined as a vertex v - * which is connected to - * vertex by an edge e, where e is an incoming edge of - * v and an outgoing edge of vertex. - * @param vertex the vertex whose predecessors are to be returned - * @return a Collection view of the successors of - * vertex in this graph - */ - Collection getSuccessors(V vertex); - - /** - * Returns the number of incoming edges incident to vertex. - * Equivalent to getInEdges(vertex).size(). - * @param vertex the vertex whose indegree is to be calculated - * @return the number of incoming edges incident to vertex - */ - int inDegree(V vertex); - - /** - * Returns the number of outgoing edges incident to vertex. - * Equivalent to getOutEdges(vertex).size(). - * @param vertex the vertex whose outdegree is to be calculated - * @return the number of outgoing edges incident to vertex - */ - int outDegree(V vertex); - - /** - * Returns true if v1 is a predecessor of v2 in this graph. - * Equivalent to v1.getPredecessors().contains(v2). - * @param v1 the first vertex to be queried - * @param v2 the second vertex to be queried - * @return true if v1 is a predecessor of v2, and false otherwise. - */ - boolean isPredecessor(V v1, V v2); - - /** - * Returns true if v1 is a successor of v2 in this graph. - * Equivalent to v1.getSuccessors().contains(v2). - * @param v1 the first vertex to be queried - * @param v2 the second vertex to be queried - * @return true if v1 is a successor of v2, and false otherwise. - */ - boolean isSuccessor(V v1, V v2); - - /** - * Returns the number of predecessors that vertex has in this graph. - * Equivalent to vertex.getPredecessors().size(). - * @param vertex the vertex whose predecessor count is to be returned - * @return the number of predecessors that vertex has in this graph - */ - int getPredecessorCount(V vertex); - - /** - * Returns the number of successors that vertex has in this graph. - * Equivalent to vertex.getSuccessors().size(). - * @param vertex the vertex whose successor count is to be returned - * @return the number of successors that vertex has in this graph - */ - int getSuccessorCount(V vertex); - - /** - * If directed_edge is a directed edge in this graph, returns the source; - * otherwise returns null. - * The source of a directed edge d is defined to be the vertex for which - * d is an outgoing edge. - * directed_edge is guaranteed to be a directed edge if - * its EdgeType is DIRECTED. - * @param directed_edge - * @return the source of directed_edge if it is a directed edge in this graph, or null otherwise - */ - V getSource(E directed_edge); - - /** - * If directed_edge is a directed edge in this graph, returns the destination; - * otherwise returns null. - * The destination of a directed edge d is defined to be the vertex - * incident to d for which - * d is an incoming edge. - * directed_edge is guaranteed to be a directed edge if - * its EdgeType is DIRECTED. - * @param directed_edge - * @return the destination of directed_edge if it is a directed edge in this graph, or null otherwise - */ - V getDest(E directed_edge); - - /** - * Returns true if vertex is the source of edge. - * Equivalent to getSource(edge).equals(vertex). - * @param vertex the vertex to be queried - * @param edge the edge to be queried - * @return true iff vertex is the source of edge - */ - boolean isSource(V vertex, E edge); - - /** - * Returns true if vertex is the destination of edge. - * Equivalent to getDest(edge).equals(vertex). - * @param vertex the vertex to be queried - * @param edge the edge to be queried - * @return true iff vertex is the destination of edge - */ - boolean isDest(V vertex, E edge); - - /** - * Adds edge e to this graph such that it connects - * vertex v1 to v2. - * Equivalent to addEdge(e, new Pair(v1, v2)). - * If this graph does not contain v1, v2, - * or both, implementations may choose to either silently add - * the vertices to the graph or throw an IllegalArgumentException. - * If this graph assigns edge types to its edges, the edge type of - * e will be the default for this graph. - * See Hypergraph.addEdge() for a listing of possible reasons - * for failure. - * @param e the edge to be added - * @param v1 the first vertex to be connected - * @param v2 the second vertex to be connected - * @return true if the add is successful, false otherwise - * @see Hypergraph#addEdge(Object, Collection) - * @see #addEdge(Object, Object, Object, EdgeType) - */ - boolean addEdge(E e, V v1, V v2); - - /** - * Adds edge e to this graph such that it connects - * vertex v1 to v2. - * Equivalent to addEdge(e, new Pair(v1, v2)). - * If this graph does not contain v1, v2, - * or both, implementations may choose to either silently add - * the vertices to the graph or throw an IllegalArgumentException. - * If edgeType is not legal for this graph, this method will - * throw IllegalArgumentException. - * See Hypergraph.addEdge() for a listing of possible reasons - * for failure. - * @param e the edge to be added - * @param v1 the first vertex to be connected - * @param v2 the second vertex to be connected - * @param edgeType the type to be assigned to the edge - * @return true if the add is successful, false otherwise - * @see Hypergraph#addEdge(Object, Collection) - * @see #addEdge(Object, Object, Object) - */ - boolean addEdge(E e, V v1, V v2, EdgeType edgeType); - - /** - * Returns the endpoints of edge as a Pair. - * @param edge the edge whose endpoints are to be returned - * @return the endpoints (incident vertices) of edge - */ - Pair getEndpoints(E edge); - - /** - * Returns the vertex at the other end of edge from vertex. - * (That is, returns the vertex incident to edge which is not vertex.) - * @param vertex the vertex to be queried - * @param edge the edge to be queried - * @return the vertex at the other end of edge from vertex - */ - V getOpposite(V vertex, E edge); -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/GraphDecorator.java b/gui/jung-src/edu/uci/ics/jung/graph/GraphDecorator.java deleted file mode 100644 index bc8529f0..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/GraphDecorator.java +++ /dev/null @@ -1,332 +0,0 @@ -package edu.uci.ics.jung.graph; - -import java.io.Serializable; -import java.util.Collection; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of Graph that delegates its method calls to a - * constructor-specified Graph instance. This is useful for adding - * additional behavior (such as synchronization or unmodifiability) to an existing - * instance. - */ -@SuppressWarnings("serial") -public class GraphDecorator implements Graph, Serializable { - - protected Graph delegate; - - /** - * Creates a new instance based on the provided {@code delegate}. - * @param delegate - */ - public GraphDecorator(Graph delegate) { - this.delegate = delegate; - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addEdge(java.lang.Object, java.util.Collection) - */ - public boolean addEdge(E edge, Collection vertices) { - return delegate.addEdge(edge, vertices); - } - - /** - * @see Hypergraph#addEdge(Object, Collection, EdgeType) - */ - public boolean addEdge(E edge, Collection vertices, EdgeType - edge_type) - { - return delegate.addEdge(edge, vertices, edge_type); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object, edu.uci.ics.jung.graph.util.EdgeType) - */ - public boolean addEdge(E e, V v1, V v2, EdgeType edgeType) { - return delegate.addEdge(e, v1, v2, edgeType); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object) - */ - public boolean addEdge(E e, V v1, V v2) { - return delegate.addEdge(e, v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addVertex(java.lang.Object) - */ - public boolean addVertex(V vertex) { - return delegate.addVertex(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isIncident(java.lang.Object, java.lang.Object) - */ - public boolean isIncident(V vertex, E edge) { - return delegate.isIncident(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isNeighbor(java.lang.Object, java.lang.Object) - */ - public boolean isNeighbor(V v1, V v2) { - return delegate.isNeighbor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#degree(java.lang.Object) - */ - public int degree(V vertex) { - return delegate.degree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdge(java.lang.Object, java.lang.Object) - */ - public E findEdge(V v1, V v2) { - return delegate.findEdge(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdgeSet(java.lang.Object, java.lang.Object) - */ - public Collection findEdgeSet(V v1, V v2) { - return delegate.findEdgeSet(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getDest(java.lang.Object) - */ - public V getDest(E directed_edge) { - return delegate.getDest(directed_edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdgeCount() - */ - public int getEdgeCount() { - return delegate.getEdgeCount(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdgeCount(EdgeType) - */ - public int getEdgeCount(EdgeType edge_type) - { - return delegate.getEdgeCount(edge_type); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdges() - */ - public Collection getEdges() { - return delegate.getEdges(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEdges(edu.uci.ics.jung.graph.util.EdgeType) - */ - public Collection getEdges(EdgeType edgeType) { - return delegate.getEdges(edgeType); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEdgeType(java.lang.Object) - */ - public EdgeType getEdgeType(E edge) { - return delegate.getEdgeType(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getDefaultEdgeType() - */ - public EdgeType getDefaultEdgeType() - { - return delegate.getDefaultEdgeType(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEndpoints(java.lang.Object) - */ - public Pair getEndpoints(E edge) { - return delegate.getEndpoints(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentCount(java.lang.Object) - */ - public int getIncidentCount(E edge) { - return delegate.getIncidentCount(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentEdges(java.lang.Object) - */ - public Collection getIncidentEdges(V vertex) { - return delegate.getIncidentEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentVertices(java.lang.Object) - */ - public Collection getIncidentVertices(E edge) { - return delegate.getIncidentVertices(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getInEdges(java.lang.Object) - */ - public Collection getInEdges(V vertex) { - return delegate.getInEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighborCount(java.lang.Object) - */ - public int getNeighborCount(V vertex) { - return delegate.getNeighborCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighbors(java.lang.Object) - */ - public Collection getNeighbors(V vertex) { - return delegate.getNeighbors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOpposite(java.lang.Object, java.lang.Object) - */ - public V getOpposite(V vertex, E edge) { - return delegate.getOpposite(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOutEdges(java.lang.Object) - */ - public Collection getOutEdges(V vertex) { - return delegate.getOutEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessorCount(java.lang.Object) - */ - public int getPredecessorCount(V vertex) { - return delegate.getPredecessorCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessors(java.lang.Object) - */ - public Collection getPredecessors(V vertex) { - return delegate.getPredecessors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSource(java.lang.Object) - */ - public V getSource(E directed_edge) { - return delegate.getSource(directed_edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessorCount(java.lang.Object) - */ - public int getSuccessorCount(V vertex) { - return delegate.getSuccessorCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessors(java.lang.Object) - */ - public Collection getSuccessors(V vertex) { - return delegate.getSuccessors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertexCount() - */ - public int getVertexCount() { - return delegate.getVertexCount(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertices() - */ - public Collection getVertices() { - return delegate.getVertices(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#inDegree(java.lang.Object) - */ - public int inDegree(V vertex) { - return delegate.inDegree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isDest(java.lang.Object, java.lang.Object) - */ - public boolean isDest(V vertex, E edge) { - return delegate.isDest(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isPredecessor(java.lang.Object, java.lang.Object) - */ - public boolean isPredecessor(V v1, V v2) { - return delegate.isPredecessor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSource(java.lang.Object, java.lang.Object) - */ - public boolean isSource(V vertex, E edge) { - return delegate.isSource(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSuccessor(java.lang.Object, java.lang.Object) - */ - public boolean isSuccessor(V v1, V v2) { - return delegate.isSuccessor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#outDegree(java.lang.Object) - */ - public int outDegree(V vertex) { - return delegate.outDegree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeEdge(java.lang.Object) - */ - public boolean removeEdge(E edge) { - return delegate.removeEdge(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeVertex(java.lang.Object) - */ - public boolean removeVertex(V vertex) { - return delegate.removeVertex(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsEdge(java.lang.Object) - */ - public boolean containsEdge(E edge) { - return delegate.containsEdge(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsVertex(java.lang.Object) - */ - public boolean containsVertex(V vertex) { - return delegate.containsVertex(vertex); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/Hypergraph.java b/gui/jung-src/edu/uci/ics/jung/graph/Hypergraph.java deleted file mode 100644 index 40945501..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/Hypergraph.java +++ /dev/null @@ -1,436 +0,0 @@ -/* - * Created on Oct 17, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; - -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * A hypergraph, consisting of a set of vertices of type V - * and a set of hyperedges of type E which connect the vertices. - * This is the base interface for all JUNG graph types. - *

          - * This interface permits, but does not enforce, any of the following - * common variations of graphs: - *

            - *
          • hyperedges (edges which connect a set of vertices of any size) - *
          • edges (these have have exactly two endpoints, which may or may not be distinct) - *
          • self-loops (edges which connect exactly one vertex) - *
          • directed and undirected edges - *
          • vertices and edges with attributes (for example, weighted edges) - *
          • vertices and edges with different constraints or properties (for example, bipartite - * or multimodal graphs) - *
          • parallel edges (multiple edges which connect a single set of vertices) - *
          • internal representations as matrices or as adjacency lists or adjacency maps - *
          - * Extensions or implementations of this interface - * may enforce or disallow any or all of these variations. - *

          Notes: - *

            - *
          • The collections returned by Hypergraph instances - * should be treated in general as if read-only. While they are not contractually - * guaranteed (or required) to be immutable, - * this interface does not define the outcome if they are mutated. - * Mutations should be done via {add,remove}{Edge,Vertex}, or - * in the constructor. - *
          • - *
          - * - * @author Joshua O'Madadhain - */ -public interface Hypergraph -{ - /** - * Returns a view of all edges in this graph. In general, this - * obeys the Collection contract, and therefore makes no guarantees - * about the ordering of the vertices within the set. - * @return a Collection view of all edges in this graph - */ - Collection getEdges(); - - /** - * Returns a view of all vertices in this graph. In general, this - * obeys the Collection contract, and therefore makes no guarantees - * about the ordering of the vertices within the set. - * @return a Collection view of all vertices in this graph - */ - Collection getVertices(); - - /** - * Returns true if this graph's vertex collection contains vertex. - * Equivalent to getVertices().contains(vertex). - * @param vertex the vertex whose presence is being queried - * @return true iff this graph contains a vertex vertex - */ - boolean containsVertex(V vertex); - - /** - * Returns true if this graph's edge collection contains edge. - * Equivalent to getEdges().contains(edge). - * @param edge the edge whose presence is being queried - * @return true iff this graph contains an edge edge - */ - boolean containsEdge(E edge); - - /** - * Returns the number of edges in this graph. - * @return the number of edges in this graph - */ - int getEdgeCount(); - - /** - * Returns the number of vertices in this graph. - * @return the number of vertices in this graph - */ - int getVertexCount(); - - /** - * Returns the collection of vertices which are connected to vertex - * via any edges in this graph. - * If vertex is connected to itself with a self-loop, then - * it will be included in the collection returned. - * - * @param vertex the vertex whose neighbors are to be returned - * @return the collection of vertices which are connected to vertex, - * or null if vertex is not present - */ - Collection getNeighbors(V vertex); - - /** - * Returns the collection of edges in this graph which are connected to vertex. - * - * @param vertex the vertex whose incident edges are to be returned - * @return the collection of edges which are connected to vertex, - * or null if vertex is not present - */ - Collection getIncidentEdges(V vertex); - - /** - * Returns the collection of vertices in this graph which are connected to edge. - * Note that for some graph types there are guarantees about the size of this collection - * (i.e., some graphs contain edges that have exactly two endpoints, which may or may - * not be distinct). Implementations for those graph types may provide alternate methods - * that provide more convenient access to the vertices. - * - * @param edge the edge whose incident vertices are to be returned - * @return the collection of vertices which are connected to edge, - * or null if edge is not present - */ - Collection getIncidentVertices(E edge); - - /** - * Returns an edge that connects this vertex to v. - * If this edge is not uniquely - * defined (that is, if the graph contains more than one edge connecting - * v1 to v2), any of these edges - * may be returned. findEdgeSet(v1, v2) may be - * used to return all such edges. - * Returns null if either of the following is true: - *
            - *
          • v2 is not connected to v1 - *
          • either v1 or v2 are not present in this graph - *
          - *

          Note: for purposes of this method, v1 is only considered to be connected to - * v2 via a given directed edge e if - * v1 == e.getSource() && v2 == e.getDest() evaluates to true. - * (v1 and v2 are connected by an undirected edge u if - * u is incident to both v1 and v2.) - * - * @return an edge that connects v1 to v2, - * or null if no such edge exists (or either vertex is not present) - * @see Hypergraph#findEdgeSet(Object, Object) - */ - E findEdge(V v1, V v2); - - /** - * Returns all edges that connects this vertex to v. - * If this edge is not uniquely - * defined (that is, if the graph contains more than one edge connecting - * v1 to v2), any of these edges - * may be returned. findEdgeSet(v1, v2) may be - * used to return all such edges. - * Returns null if v2 is not connected to v1. - *
          Returns an empty collection if either v1 or v2 are not present in this graph. - * - *

          Note: for purposes of this method, v1 is only considered to be connected to - * v2 via a given directed edge d if - * v1 == d.getSource() && v2 == d.getDest() evaluates to true. - * (v1 and v2 are connected by an undirected edge u if - * u is incident to both v1 and v2.) - * - * @return a collection containing all edges that connect v1 to v2, - * or null if either vertex is not present - * @see Hypergraph#findEdge(Object, Object) - */ - Collection findEdgeSet(V v1, V v2); - - /** - * Adds vertex to this graph. - * Fails if vertex is null or already in the graph. - * - * @param vertex the vertex to add - * @return true if the add is successful, and false otherwise - * @throws IllegalArgumentException if vertex is null - */ - boolean addVertex(V vertex); - - /** - * Adds edge to this graph. - * Fails under the following circumstances: - *

            - *
          • edge is already an element of the graph - *
          • either edge or vertices is null - *
          • vertices has the wrong number of vertices for the graph type - *
          • vertices are already connected by another edge in this graph, - * and this graph does not accept parallel edges - *
          - * - * @param edge - * @param vertices - * @return true if the add is successful, and false otherwise - * @throws IllegalArgumentException if edge or vertices is null, - * or if a different vertex set in this graph is already connected by edge, - * or if vertices are not a legal vertex set for edge - */ - boolean addEdge(E edge, Collection vertices); - - /** - * Adds edge to this graph with type edge_type. - * Fails under the following circumstances: - *
            - *
          • edge is already an element of the graph - *
          • either edge or vertices is null - *
          • vertices has the wrong number of vertices for the graph type - *
          • vertices are already connected by another edge in this graph, - * and this graph does not accept parallel edges - *
          • edge_type is not legal for this graph - *
          - * - * @param edge - * @param vertices - * @return true if the add is successful, and false otherwise - * @throws IllegalArgumentException if edge or vertices is null, - * or if a different vertex set in this graph is already connected by edge, - * or if vertices are not a legal vertex set for edge - */ - boolean addEdge(E edge, Collection vertices, EdgeType - edge_type); - - /** - * Removes vertex from this graph. - * As a side effect, removes any edges e incident to vertex if the - * removal of vertex would cause e to be incident to an illegal - * number of vertices. (Thus, for example, incident hyperedges are not removed, but - * incident edges--which must be connected to a vertex at both endpoints--are removed.) - * - *

          Fails under the following circumstances: - *

            - *
          • vertex is not an element of this graph - *
          • vertex is null - *
          - * - * @param vertex the vertex to remove - * @return true if the removal is successful, false otherwise - */ - boolean removeVertex(V vertex); - - /** - * Removes edge from this graph. - * Fails if edge is null, or is otherwise not an element of this graph. - * - * @param edge the edge to remove - * @return true if the removal is successful, false otherwise - */ - boolean removeEdge(E edge); - - - /** - * Returns true if v1 and v2 share an incident edge. - * Equivalent to getNeighbors(v1).contains(v2). - * - * @param v1 the first vertex to test - * @param v2 the second vertex to test - * @return true if v1 and v2 share an incident edge - */ - boolean isNeighbor(V v1, V v2); - - /** - * Returns true if vertex and edge - * are incident to each other. - * Equivalent to getIncidentEdges(vertex).contains(edge) and to - * getIncidentVertices(edge).contains(vertex). - * @param vertex - * @param edge - * @return true if vertex and edge - * are incident to each other - */ - boolean isIncident(V vertex, E edge); - - /** - * Returns the number of edges incident to vertex. - * Special cases of interest: - *
            - *
          • Incident self-loops are counted once. - *
          • If there is only one edge that connects this vertex to - * each of its neighbors (and vice versa), then the value returned - * will also be equal to the number of neighbors that this vertex has - * (that is, the output of getNeighborCount). - *
          • If the graph is directed, then the value returned will be - * the sum of this vertex's indegree (the number of edges whose - * destination is this vertex) and its outdegree (the number - * of edges whose source is this vertex), minus the number of - * incident self-loops (to avoid double-counting). - *
          - *

          Equivalent to getIncidentEdges(vertex).size(). - * - * @param vertex the vertex whose degree is to be returned - * @return the degree of this node - * @see Hypergraph#getNeighborCount(Object) - */ - int degree(V vertex); - - /** - * Returns the number of vertices that are adjacent to vertex - * (that is, the number of vertices that are incident to edges in vertex's - * incident edge set). - * - *

          Equivalent to getNeighbors(vertex).size(). - * @param vertex the vertex whose neighbor count is to be returned - * @return the number of neighboring vertices - */ - int getNeighborCount(V vertex); - - /** - * Returns the number of vertices that are incident to edge. - * For hyperedges, this can be any nonnegative integer; for edges this - * must be 2 (or 1 if self-loops are permitted). - * - *

          Equivalent to getIncidentVertices(edge).size(). - * @param edge the edge whose incident vertex count is to be returned - * @return the number of vertices that are incident to edge. - */ - int getIncidentCount(E edge); - - /** - * Returns the edge type of edge in this graph. - * @param edge - * @return the EdgeType of edge, or null if edge has no defined type - */ - EdgeType getEdgeType(E edge); - - /** - * Returns the default edge type for this graph. - * - * @return the default edge type for this graph - */ - EdgeType getDefaultEdgeType(); - - /** - * Returns the collection of edges in this graph which are of type edge_type. - * @param edge_type the type of edges to be returned - * @return the collection of edges which are of type edge_type, or - * null if the graph does not accept edges of this type - * @see EdgeType - */ - Collection getEdges(EdgeType edge_type); - - /** - * Returns the number of edges of type edge_type in this graph. - * @param edge_type the type of edge for which the count is to be returned - * @return the number of edges of type edge_type in this graph - */ - int getEdgeCount(EdgeType edge_type); - - /** - * Returns a Collection view of the incoming edges incident to vertex - * in this graph. - * @param vertex the vertex whose incoming edges are to be returned - * @return a Collection view of the incoming edges incident - * to vertex in this graph - */ - Collection getInEdges(V vertex); - - /** - * Returns a Collection view of the outgoing edges incident to vertex - * in this graph. - * @param vertex the vertex whose outgoing edges are to be returned - * @return a Collection view of the outgoing edges incident - * to vertex in this graph - */ - Collection getOutEdges(V vertex); - - /** - * Returns the number of incoming edges incident to vertex. - * Equivalent to getInEdges(vertex).size(). - * @param vertex the vertex whose indegree is to be calculated - * @return the number of incoming edges incident to vertex - */ - int inDegree(V vertex); - - /** - * Returns the number of outgoing edges incident to vertex. - * Equivalent to getOutEdges(vertex).size(). - * @param vertex the vertex whose outdegree is to be calculated - * @return the number of outgoing edges incident to vertex - */ - int outDegree(V vertex); - - /** - * If directed_edge is a directed edge in this graph, returns the source; - * otherwise returns null. - * The source of a directed edge d is defined to be the vertex for which - * d is an outgoing edge. - * directed_edge is guaranteed to be a directed edge if - * its EdgeType is DIRECTED. - * @param directed_edge - * @return the source of directed_edge if it is a directed edge in this graph, or null otherwise - */ - V getSource(E directed_edge); - - /** - * If directed_edge is a directed edge in this graph, returns the destination; - * otherwise returns null. - * The destination of a directed edge d is defined to be the vertex - * incident to d for which - * d is an incoming edge. - * directed_edge is guaranteed to be a directed edge if - * its EdgeType is DIRECTED. - * @param directed_edge - * @return the destination of directed_edge if it is a directed edge in this graph, or null otherwise - */ - V getDest(E directed_edge); - - /** - * Returns a Collection view of the predecessors of vertex - * in this graph. A predecessor of vertex is defined as a vertex v - * which is connected to - * vertex by an edge e, where e is an outgoing edge of - * v and an incoming edge of vertex. - * @param vertex the vertex whose predecessors are to be returned - * @return a Collection view of the predecessors of - * vertex in this graph - */ - Collection getPredecessors(V vertex); - - /** - * Returns a Collection view of the successors of vertex - * in this graph. A successor of vertex is defined as a vertex v - * which is connected to - * vertex by an edge e, where e is an incoming edge of - * v and an outgoing edge of vertex. - * @param vertex the vertex whose predecessors are to be returned - * @return a Collection view of the successors of - * vertex in this graph - */ - Collection getSuccessors(V vertex); -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/KPartiteGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/KPartiteGraph.java deleted file mode 100644 index 74c0631a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/KPartiteGraph.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Created on May 24, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; - -import org.apache.commons.collections15.Predicate; - -/** - * An interface for graphs whose vertices are each members of one of 2 or more - * disjoint sets (partitions), and whose edges connect only vertices in distinct - * partitions. - * - * @author Joshua O'Madadhain - */ -public interface KPartiteGraph extends Graph -{ - /** - * Returns all vertices which satisfy the specified partition predicate. - * @param partition Predicate which defines a partition - * @return all vertices satisfying partition - */ - public Collection getVertices(Predicate partition); - - /** - * Returns the set of Predicate instances which define this graph's partitions. - * @return the set of Predicate instances which define this graph's partitions - */ - public Collection> getPartitions(); - -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/MultiGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/MultiGraph.java deleted file mode 100644 index ff309195..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/MultiGraph.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Created on Aug 31, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -/** - * A tagging interface which indicates that the implementing graph accepts - * parallel edges. - * - * @author Joshua O'Madadhain - */ -public interface MultiGraph {} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/ObservableGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/ObservableGraph.java deleted file mode 100644 index 1e066e64..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/ObservableGraph.java +++ /dev/null @@ -1,136 +0,0 @@ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; - -import edu.uci.ics.jung.graph.event.GraphEvent; -import edu.uci.ics.jung.graph.event.GraphEventListener; -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * A decorator class for graphs which generates events - * - * @author Joshua O'Madadhain - */ -@SuppressWarnings("serial") -public class ObservableGraph extends GraphDecorator { - - List> listenerList = - Collections.synchronizedList(new LinkedList>()); - - /** - * Creates a new instance based on the provided {@code delegate}. - */ - public ObservableGraph(Graph delegate) { - super(delegate); - } - - /** - * Adds {@code l} as a listener to this graph. - */ - public void addGraphEventListener(GraphEventListener l) { - listenerList.add(l); - } - - /** - * Removes {@code l} as a listener to this graph. - */ - public void removeGraphEventListener(GraphEventListener l) { - listenerList.remove(l); - } - - protected void fireGraphEvent(GraphEvent evt) { - for(GraphEventListener listener : listenerList) { - listener.handleGraphEvent(evt); - } - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addEdge(java.lang.Object, java.util.Collection) - */ - @Override - public boolean addEdge(E edge, Collection vertices) { - boolean state = super.addEdge(edge, vertices); - if(state) { - GraphEvent evt = new GraphEvent.Edge(delegate, GraphEvent.Type.EDGE_ADDED, edge); - fireGraphEvent(evt); - } - return state; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object, edu.uci.ics.jung.graph.util.EdgeType) - */ - @Override - public boolean addEdge(E e, V v1, V v2, EdgeType edgeType) { - boolean state = super.addEdge(e, v1, v2, edgeType); - if(state) { - GraphEvent evt = new GraphEvent.Edge(delegate, GraphEvent.Type.EDGE_ADDED, e); - fireGraphEvent(evt); - } - return state; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object) - */ - @Override - public boolean addEdge(E e, V v1, V v2) { - boolean state = super.addEdge(e, v1, v2); - if(state) { - GraphEvent evt = new GraphEvent.Edge(delegate, GraphEvent.Type.EDGE_ADDED, e); - fireGraphEvent(evt); - } - return state; - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addVertex(java.lang.Object) - */ - @Override - public boolean addVertex(V vertex) { - boolean state = super.addVertex(vertex); - if(state) { - GraphEvent evt = new GraphEvent.Vertex(delegate, GraphEvent.Type.VERTEX_ADDED, vertex); - fireGraphEvent(evt); - } - return state; - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeEdge(java.lang.Object) - */ - @Override - public boolean removeEdge(E edge) { - boolean state = delegate.removeEdge(edge); - if(state) { - GraphEvent evt = new GraphEvent.Edge(delegate, GraphEvent.Type.EDGE_REMOVED, edge); - fireGraphEvent(evt); - } - return state; - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeVertex(java.lang.Object) - */ - @Override - public boolean removeVertex(V vertex) { - // remove all incident edges first, so that the appropriate events will - // be fired (otherwise they'll be removed inside {@code delegate.removeVertex} - // and the events will not be fired) - Collection incident_edges = new ArrayList(delegate.getIncidentEdges(vertex)); - for (E e : incident_edges) - this.removeEdge(e); - - boolean state = delegate.removeVertex(vertex); - if(state) { - GraphEvent evt = new GraphEvent.Vertex(delegate, GraphEvent.Type.VERTEX_REMOVED, vertex); - fireGraphEvent(evt); - } - return state; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/OrderedKAryTree.java b/gui/jung-src/edu/uci/ics/jung/graph/OrderedKAryTree.java deleted file mode 100644 index 7980f42e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/OrderedKAryTree.java +++ /dev/null @@ -1,795 +0,0 @@ -/* - * Created on May 8, 2008 - * - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.collections15.CollectionUtils; -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of Tree in which each vertex has - * <= k children. The value of 'k' is specified by the constructor - * parameter. A specific child (edge) can be retrieved directly by specifying the - * index at which the child is located. By default, new (child) vertices - * are added at the lowest index available, if no index is specified. - * - */ -@SuppressWarnings("serial") -public class OrderedKAryTree extends AbstractTypedGraph implements Tree -{ - protected Map> edge_vpairs; - protected Map vertex_data; - protected int height; - protected V root; - protected int order; - - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory(final int order) { - return new Factory> () { - public DirectedGraph create() { - return new OrderedKAryTree(order); - } - }; - } - - /** - * Creates a new instance with the specified order (maximum number of children). - */ - public OrderedKAryTree(int order) - { - super(EdgeType.DIRECTED); - this.order = order; - this.height = -1; - this.edge_vpairs = new HashMap>(); - this.vertex_data = new HashMap(); - } - - /** - * Returns the number of children that {@code vertex} has. - * @see edu.uci.ics.jung.graph.Tree#getChildCount(java.lang.Object) - */ - public int getChildCount(V vertex) { - if (!containsVertex(vertex)) - return 0; - List edges = vertex_data.get(vertex).child_edges; - if (edges == null) - return 0; - int count = 0; - for (E edge : edges) - count += edge == null ? 0 : 1; - - return count; - } - - /** - * Returns the child edge of the vertex at index index. - * @param vertex - * @param index - * @return the child edge of the vertex at index index - */ - public E getChildEdge(V vertex, int index) - { - if (!containsVertex(vertex)) - return null; - List edges = vertex_data.get(vertex).child_edges; - if (edges == null) - return null; - return edges.get(index); - } - - /** - * @see edu.uci.ics.jung.graph.Tree#getChildEdges(java.lang.Object) - */ - public Collection getChildEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - List edges = vertex_data.get(vertex).child_edges; - return edges == null ? Collections.emptySet() : - CollectionUtils.unmodifiableCollection(edges); - } - - /** - * Returns an ordered list of {@code vertex}'s child vertices. - * If there is no child in position i, then the list will contain - * {@code null} in position i. If {@code vertex} has no children - * then the empty set will be returned. - * @see edu.uci.ics.jung.graph.Tree#getChildren(java.lang.Object) - */ - public Collection getChildren(V vertex) - { - if (!containsVertex(vertex)) - return null; - List edges = vertex_data.get(vertex).child_edges; - if (edges == null) - return Collections.emptySet(); - Collection children = new ArrayList(order); - for (E edge : edges) - children.add(this.getOpposite(vertex, edge)); - return CollectionUtils.unmodifiableCollection(children); - } - - /** - * @see edu.uci.ics.jung.graph.Tree#getDepth(java.lang.Object) - * @return the depth of the vertex in this tree, or -1 if the vertex is - * not present in this tree - */ - public int getDepth(V vertex) - { - if (!containsVertex(vertex)) - return -1; - return vertex_data.get(vertex).depth; - } - - /** - * Returns the height of the tree, or -1 if the tree is empty. - * @see edu.uci.ics.jung.graph.Tree#getHeight() - */ - public int getHeight() - { - return height; - } - - /** - * @see edu.uci.ics.jung.graph.Tree#getParent(java.lang.Object) - */ - public V getParent(V vertex) - { - if (!containsVertex(vertex)) - return null; - else if (vertex.equals(root)) - return null; - return edge_vpairs.get(vertex_data.get(vertex).parent_edge).getFirst(); - } - - /** - * @see edu.uci.ics.jung.graph.Tree#getParentEdge(java.lang.Object) - */ - public E getParentEdge(V vertex) - { - if (!containsVertex(vertex)) - return null; - return vertex_data.get(vertex).parent_edge; - } - - /** - * @see edu.uci.ics.jung.graph.Tree#getRoot() - */ - public V getRoot() - { - return root; - } - - /** - * @see edu.uci.ics.jung.graph.Forest#getTrees() - */ - public Collection> getTrees() - { - Collection> forest = new ArrayList>(1); - forest.add(this); - return forest; - } - - /** - * Adds the specified {@code child} vertex and edge {@code e} to the graph - * with the specified parent vertex {@code parent}. If {@code index} is - * greater than or equal to 0, then the child is placed at position - * {@code index}; if it is less than 0, the child is placed at the lowest - * available position; if it is greater than or equal to the order of this - * tree, an exception is thrown. - * - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object) - */ - public boolean addEdge(E e, V parent, V child, int index) - { - if (e == null || child == null || parent == null) - throw new IllegalArgumentException("Inputs may not be null"); - if (!containsVertex(parent)) - throw new IllegalArgumentException("Tree must already " + - "include parent: " + parent); - if (containsVertex(child)) - throw new IllegalArgumentException("Tree must not already " + - "include child: " + child); - if (parent.equals(child)) - throw new IllegalArgumentException("Input vertices must be distinct"); - if (index < 0 || index >= order) - throw new IllegalArgumentException("'index' must be in [0, [order-1]]"); - - Pair endpoints = new Pair(parent, child); - if (containsEdge(e)) - if (!endpoints.equals(edge_vpairs.get(e))) - throw new IllegalArgumentException("Tree already includes edge" + - e + " with different endpoints " + edge_vpairs.get(e)); - else - return false; - - VertexData parent_data = vertex_data.get(parent); - List outedges = parent_data.child_edges; - - if (outedges == null) - outedges = new ArrayList(this.order); - - boolean edge_placed = false; - if (index >= 0) - if (outedges.get(index) != null) - throw new IllegalArgumentException("Parent " + parent + - " already has a child at index " + index + " in this tree"); - else - outedges.set(index, e); - for (int i = 0; i < order; i++) - { - if (outedges.get(i) == null) - { - outedges.set(i, e); - edge_placed = true; - break; - } - } - if (!edge_placed) - throw new IllegalArgumentException("Parent " + parent + " already" + - " has " + order + " children in this tree"); - - // initialize VertexData for child; leave child's child_edges null for now - VertexData child_data = new VertexData(e, parent_data.depth + 1); - vertex_data.put(child, child_data); - - height = child_data.depth > height ? child_data.depth : height; - edge_vpairs.put(e, endpoints); - - return true; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object) - */ - @Override - public boolean addEdge(E e, V parent, V child) - { - return addEdge(e, parent, child, -1); - } - - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(java.lang.Object, java.lang.Object, java.lang.Object, edu.uci.ics.jung.graph.util.EdgeType) - */ - @Override - public boolean addEdge(E e, V v1, V v2, EdgeType edge_type) - { - this.validateEdgeType(edge_type); - return addEdge(e, v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getDest(java.lang.Object) - */ - public V getDest(E directed_edge) - { - if (!containsEdge(directed_edge)) - return null; - return edge_vpairs.get(directed_edge).getSecond(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEndpoints(java.lang.Object) - */ - public Pair getEndpoints(E edge) - { - if (!containsEdge(edge)) - return null; - return edge_vpairs.get(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getInEdges(java.lang.Object) - */ - public Collection getInEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - else if (vertex.equals(root)) - return Collections.emptySet(); - else - return Collections.singleton(getParentEdge(vertex)); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOpposite(java.lang.Object, java.lang.Object) - */ - @Override - public V getOpposite(V vertex, E edge) - { - if (!containsVertex(vertex) || !containsEdge(edge)) - return null; - Pair endpoints = edge_vpairs.get(edge); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - return v1.equals(vertex) ? v2 : v1; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOutEdges(java.lang.Object) - */ - public Collection getOutEdges(V vertex) - { - return getChildEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessorCount(java.lang.Object) - * @return 0 if vertex is the root, -1 if the vertex is - * not an element of this tree, and 1 otherwise - */ - @Override - public int getPredecessorCount(V vertex) - { - if (!containsVertex(vertex)) - return -1; - return vertex.equals(root) ? 0 : 1; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessors(java.lang.Object) - */ - public Collection getPredecessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - if (vertex.equals(root)) - return Collections.emptySet(); - return Collections.singleton(getParent(vertex)); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSource(java.lang.Object) - */ - public V getSource(E directed_edge) - { - if (!containsEdge(directed_edge)) - return null; - return edge_vpairs.get(directed_edge).getSecond(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessorCount(java.lang.Object) - */ - @Override - public int getSuccessorCount(V vertex) - { - return getChildCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessors(java.lang.Object) - */ - public Collection getSuccessors(V vertex) - { - return getChildren(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#inDegree(java.lang.Object) - */ - @Override - public int inDegree(V vertex) - { - if (!containsVertex(vertex)) - return 0; - if (vertex.equals(root)) - return 0; - return 1; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isDest(java.lang.Object, java.lang.Object) - */ - public boolean isDest(V vertex, E edge) - { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return edge_vpairs.get(edge).getSecond().equals(vertex); - } - - /** - * Returns true if vertex is a leaf of this tree, - * i.e., if it has no children. - * @param vertex the vertex to be queried - * @return true if outDegree(vertex)==0 - */ - public boolean isLeaf(V vertex) - { - if (!containsVertex(vertex)) - return false; - return outDegree(vertex) == 0; - } - - /** - * Returns true iff v1 is the parent of v2. - * Note that if v2 is the root and v1 is null, - * this method returns true. - * - * @see edu.uci.ics.jung.graph.Graph#isPredecessor(java.lang.Object, java.lang.Object) - */ - @Override - public boolean isPredecessor(V v1, V v2) - { - if (!containsVertex(v2)) - return false; - return getParent(v2).equals(v1); - } - - /** - * Returns true if vertex is a leaf of this tree, - * i.e., if it has no children. - * @param vertex the vertex to be queried - * @return true if outDegree(vertex)==0 - */ - public boolean isRoot(V vertex) - { - if (root == null) - return false; - return root.equals(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSource(java.lang.Object, java.lang.Object) - */ - public boolean isSource(V vertex, E edge) - { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return edge_vpairs.get(edge).getFirst().equals(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSuccessor(java.lang.Object, java.lang.Object) - */ - @Override - public boolean isSuccessor(V v1, V v2) - { - if (!containsVertex(v2)) - return false; - if (containsVertex(v1)) - return getParent(v1).equals(v2); - return isLeaf(v2) && v1 == null; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#outDegree(java.lang.Object) - */ - @Override - public int outDegree(V vertex) - { - if (!containsVertex(vertex)) - return 0; - List out_edges = vertex_data.get(vertex).child_edges; - if (out_edges == null) - return 0; - int degree = 0; - for (E e : out_edges) - degree += (e == null) ? 0 : 1; - return degree; - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addEdge(java.lang.Object, java.util.Collection) - */ - @Override - @SuppressWarnings("unchecked") - public boolean addEdge(E edge, Collection vertices, EdgeType edge_type) - { - if (edge == null || vertices == null) - throw new IllegalArgumentException("inputs may not be null"); - if (vertices.size() != 2) - throw new IllegalArgumentException("'vertices' must contain " + - "exactly 2 distinct vertices"); - this.validateEdgeType(edge_type); - Pair endpoints; - if (vertices instanceof Pair) - endpoints = (Pair)vertices; - else - endpoints = new Pair(vertices); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - if (v1.equals(v2)) - throw new IllegalArgumentException("Input vertices must be distinct"); - return addEdge(edge, v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addVertex(java.lang.Object) - */ - public boolean addVertex(V vertex) - { - if(root == null) - { - this.root = vertex; - vertex_data.put(vertex, new VertexData(null, 0)); - this.height = 0; - return true; - } - else - { - throw new UnsupportedOperationException("Unless you are setting " + - "the root, use addEdge() or addChild()"); - } - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isIncident(java.lang.Object, java.lang.Object) - */ - @Override - public boolean isIncident(V vertex, E edge) - { - if (!containsVertex(vertex) || !containsEdge(edge)) - return false; - return edge_vpairs.get(edge).contains(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isNeighbor(java.lang.Object, java.lang.Object) - */ - @Override - public boolean isNeighbor(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return false; - return getNeighbors(v1).contains(v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsEdge(java.lang.Object) - */ - public boolean containsEdge(E edge) - { - return edge_vpairs.containsKey(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsVertex(java.lang.Object) - */ - public boolean containsVertex(V vertex) - { - return vertex_data.containsKey(vertex); - } - - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdge(java.lang.Object, java.lang.Object) - */ - @Override - public E findEdge(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - VertexData v1_data = vertex_data.get(v1); - if (edge_vpairs.get(v1_data.parent_edge).getFirst().equals(v2)) - return v1_data.parent_edge; - List edges = v1_data.child_edges; - if (edges == null) - return null; - for (E edge : edges) - if (edge != null && edge_vpairs.get(edge).getSecond().equals(v2)) - return edge; - return null; - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdgeSet(java.lang.Object, java.lang.Object) - */ - @Override - public Collection findEdgeSet(V v1, V v2) - { - E edge = findEdge(v1, v2); - if (edge == null) - return Collections.emptySet(); - else - return Collections.singleton(edge); - } - - /** - * Returns the child of vertex at position index - * in this tree, or null if it has no child at that position. - * @param vertex the vertex to query - * @return the child of vertex at position index - * in this tree, or null if it has no child at that position - * @throws ArrayIndexOutOfBoundsException if index is not in - * the range {@code [0, order-1]} - */ - public V getChild(V vertex, int index) - { - if (index < 0 || index >= order) - throw new ArrayIndexOutOfBoundsException(index + " is not in [0, order-1]"); - if (!containsVertex(vertex)) - return null; - List edges = vertex_data.get(vertex).child_edges; - if (edges == null) - return null; - E edge = edges.get(index); - return edge == null ? null : edge_vpairs.get(edge).getSecond(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdgeCount() - */ - public int getEdgeCount() - { - return edge_vpairs.size(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdges() - */ - public Collection getEdges() - { - return CollectionUtils.unmodifiableCollection(edge_vpairs.keySet()); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentCount(java.lang.Object) - */ - @Override - public int getIncidentCount(E edge) - { - return 2; // all tree edges have 2 incident vertices - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentEdges(java.lang.Object) - */ - public Collection getIncidentEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - ArrayList edges = new ArrayList(order+1); - VertexData v_data = vertex_data.get(vertex); - if (v_data.parent_edge != null) - edges.add(v_data.parent_edge); - if (v_data.child_edges != null) - { - for (E edge : v_data.child_edges) - if (edge != null) - edges.add(edge); - } - if (edges.isEmpty()) - return Collections.emptySet(); - return Collections.unmodifiableCollection(edges); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentVertices(java.lang.Object) - */ - @Override - public Collection getIncidentVertices(E edge) - { - return edge_vpairs.get(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighborCount(java.lang.Object) - */ - @Override - public int getNeighborCount(V vertex) - { - if (!containsVertex(vertex)) - return 0; - return (vertex.equals(root) ? 0 : 1) + this.getChildCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighbors(java.lang.Object) - */ - public Collection getNeighbors(V vertex) - { - if (!containsVertex(vertex)) - return null; - ArrayList vertices = new ArrayList(order+1); - VertexData v_data = vertex_data.get(vertex); - if (v_data.parent_edge != null) - vertices.add(edge_vpairs.get(v_data.parent_edge).getFirst()); - if (v_data.child_edges != null) - { - for (E edge : v_data.child_edges) - if (edge != null) - vertices.add(edge_vpairs.get(edge).getSecond()); - } - if (vertices.isEmpty()) - return Collections.emptySet(); - return Collections.unmodifiableCollection(vertices); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertexCount() - */ - public int getVertexCount() - { - return vertex_data.size(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertices() - */ - public Collection getVertices() - { - return CollectionUtils.unmodifiableCollection(vertex_data.keySet()); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeEdge(java.lang.Object) - */ - public boolean removeEdge(E edge) - { - if (!containsEdge(edge)) - return false; - - removeVertex(edge_vpairs.get(edge).getSecond()); - edge_vpairs.remove(edge); - - return true; - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeVertex(java.lang.Object) - */ - public boolean removeVertex(V vertex) - { - if (!containsVertex(vertex)) - return false; - - // recursively remove all of vertex's children - for(V v : getChildren(vertex)) - removeVertex(v); - - E parent_edge = getParentEdge(vertex); - edge_vpairs.remove(parent_edge); - List edges = vertex_data.get(vertex).child_edges; - if (edges != null) - for (E edge : edges) - edge_vpairs.remove(edge); - vertex_data.remove(vertex); - - return true; - } - - protected class VertexData - { - List child_edges; - E parent_edge; - int depth; - - protected VertexData(E parent_edge, int depth) - { - this.parent_edge = parent_edge; - this.depth = depth; - } - } - - @Override - public boolean addEdge(E edge, Pair endpoints, EdgeType edgeType) - { - if (edge == null || endpoints == null) - throw new IllegalArgumentException("inputs must not be null"); - return addEdge(edge, endpoints.getFirst(), endpoints.getSecond(), edgeType); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/OrderedSparseMultigraph.java b/gui/jung-src/edu/uci/ics/jung/graph/OrderedSparseMultigraph.java deleted file mode 100644 index 823ec7a1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/OrderedSparseMultigraph.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Created on Oct 18, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of Graph that orders its vertex and edge collections - * according to insertion time, is suitable for sparse graphs, and - * permits directed, undirected, and parallel edges. - */ -@SuppressWarnings("serial") -public class OrderedSparseMultigraph - extends SparseMultigraph - implements MultiGraph { - - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() { - return new Factory> () { - public Graph create() { - return new OrderedSparseMultigraph(); - } - }; - } - - /** - * Creates a new instance. - */ - public OrderedSparseMultigraph() - { - vertices = new LinkedHashMap>>(); - edges = new LinkedHashMap>(); - directedEdges = new LinkedHashSet(); - } - - @Override - public boolean addVertex(V vertex) { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) { - vertices.put(vertex, new Pair>(new LinkedHashSet(), new LinkedHashSet())); - return true; - } else { - return false; - } - } - - - @Override - public Collection getPredecessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Set preds = new LinkedHashSet(); - for (E edge : getIncoming_internal(vertex)) { - if(getEdgeType(edge) == EdgeType.DIRECTED) { - preds.add(this.getSource(edge)); - } else { - preds.add(getOpposite(vertex, edge)); - } - } - return Collections.unmodifiableCollection(preds); - } - - @Override - public Collection getSuccessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Set succs = new LinkedHashSet(); - for (E edge : getOutgoing_internal(vertex)) { - if(getEdgeType(edge) == EdgeType.DIRECTED) { - succs.add(this.getDest(edge)); - } else { - succs.add(getOpposite(vertex, edge)); - } - } - return Collections.unmodifiableCollection(succs); - } - - @Override - public Collection getNeighbors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Collection out = new LinkedHashSet(); - out.addAll(this.getPredecessors(vertex)); - out.addAll(this.getSuccessors(vertex)); - return out; - } - - @Override - public Collection getIncidentEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Collection out = new LinkedHashSet(); - out.addAll(this.getInEdges(vertex)); - out.addAll(this.getOutEdges(vertex)); - return out; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/SetHypergraph.java b/gui/jung-src/edu/uci/ics/jung/graph/SetHypergraph.java deleted file mode 100644 index e2a23c13..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/SetHypergraph.java +++ /dev/null @@ -1,344 +0,0 @@ -/* - * Created on Feb 4, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; - -/** - * An implementation of Hypergraph that is suitable for sparse graphs and - * permits parallel edges. - */ -@SuppressWarnings("serial") -public class SetHypergraph - implements Hypergraph, MultiGraph, Serializable -{ - protected Map> vertices; // Map of vertices to incident hyperedge sets - protected Map> edges; // Map of hyperedges to incident vertex sets - - /** - * Returns a Factory which creates instances of this class. - * @param vertex type of the hypergraph to be created - * @param edge type of the hypergraph to be created - * @return a Factory which creates instances of this class - */ - public static Factory> getFactory() { - return new Factory> () { - public Hypergraph create() { - return new SetHypergraph(); - } - }; - } - - /** - * Creates a SetHypergraph and initializes the internal data structures. - */ - public SetHypergraph() - { - vertices = new HashMap>(); - edges = new HashMap>(); - } - - /** - * Adds hyperedge to this graph and connects them to the vertex collection to_attach. - * Any vertices in to_attach that appear more than once will only appear once in the - * incident vertex collection for hyperedge, that is, duplicates will be ignored. - * - * @see Hypergraph#addEdge(Object, Collection) - */ - public boolean addEdge(H hyperedge, Collection to_attach) - { - if (hyperedge == null) - throw new IllegalArgumentException("input hyperedge may not be null"); - - if (to_attach == null) - throw new IllegalArgumentException("endpoints may not be null"); - - if(to_attach.contains(null)) - throw new IllegalArgumentException("cannot add an edge with a null endpoint"); - - Set new_endpoints = new HashSet(to_attach); - if (edges.containsKey(hyperedge)) - { - Collection attached = edges.get(hyperedge); - if (!attached.equals(new_endpoints)) - { - throw new IllegalArgumentException("Edge " + hyperedge + - " exists in this graph with endpoints " + attached); - } - else - return false; - } - edges.put(hyperedge, new_endpoints); - for (V v : to_attach) - { - // add v if it's not already in the graph - addVertex(v); - - // associate v with hyperedge - vertices.get(v).add(hyperedge); - } - return true; - } - - /** - * @see Hypergraph#addEdge(Object, Collection, EdgeType) - */ - public boolean addEdge(H hyperedge, Collection to_attach, - EdgeType edge_type) - { - if (edge_type != EdgeType.UNDIRECTED) - throw new IllegalArgumentException("Edge type for this " + - "implementation must be EdgeType.HYPER, not " + - edge_type); - return addEdge(hyperedge, to_attach); - } - - /** - * @see Hypergraph#getEdgeType(Object) - */ - public EdgeType getEdgeType(H edge) - { - if (containsEdge(edge)) - return EdgeType.UNDIRECTED; - else - return null; - } - - public boolean containsVertex(V vertex) { - return vertices.keySet().contains(vertex); - } - - public boolean containsEdge(H edge) { - return edges.keySet().contains(edge); - } - - public Collection getEdges() - { - return edges.keySet(); - } - - public Collection getVertices() - { - return vertices.keySet(); - } - - public int getEdgeCount() - { - return edges.size(); - } - - public int getVertexCount() - { - return vertices.size(); - } - - public Collection getNeighbors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Set neighbors = new HashSet(); - for (H hyperedge : vertices.get(vertex)) - { - neighbors.addAll(edges.get(hyperedge)); - } - return neighbors; - } - - public Collection getIncidentEdges(V vertex) - { - return vertices.get(vertex); - } - - public Collection getIncidentVertices(H edge) - { - return edges.get(edge); - } - - public H findEdge(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - - for (H h : getIncidentEdges(v1)) - { - if (isIncident(v2, h)) - return h; - } - return null; - } - - public Collection findEdgeSet(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - - Collection edges = new ArrayList(); - for (H h : getIncidentEdges(v1)) - { - if (isIncident(v2, h)) - edges.add(h); - } - return Collections.unmodifiableCollection(edges); - } - - public boolean addVertex(V vertex) - { - if(vertex == null) - throw new IllegalArgumentException("cannot add a null vertex"); - if (containsVertex(vertex)) - return false; - vertices.put(vertex, new HashSet()); - return true; - } - - public boolean removeVertex(V vertex) - { - if (!containsVertex(vertex)) - return false; - for (H hyperedge : vertices.get(vertex)) - { - edges.get(hyperedge).remove(vertex); - } - vertices.remove(vertex); - return true; - } - - public boolean removeEdge(H hyperedge) - { - if (!containsEdge(hyperedge)) - return false; - for (V vertex : edges.get(hyperedge)) - { - vertices.get(vertex).remove(hyperedge); - } - edges.remove(hyperedge); - return true; - } - - public boolean isNeighbor(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return false; - - if (vertices.get(v2).isEmpty()) - return false; - for (H hyperedge : vertices.get(v1)) - { - if (edges.get(hyperedge).contains(v2)) - return true; - } - return false; - } - - public boolean isIncident(V vertex, H edge) - { - if (!containsVertex(vertex) || !containsEdge(edge)) - return false; - - return vertices.get(vertex).contains(edge); - } - - public int degree(V vertex) - { - if (!containsVertex(vertex)) - return 0; - - return vertices.get(vertex).size(); - } - - public int getNeighborCount(V vertex) - { - if (!containsVertex(vertex)) - return 0; - - return getNeighbors(vertex).size(); - } - - public int getIncidentCount(H edge) - { - if (!containsEdge(edge)) - return 0; - - return edges.get(edge).size(); - } - - public int getEdgeCount(EdgeType edge_type) - { - if (edge_type == EdgeType.UNDIRECTED) - return edges.size(); - return 0; - } - - public Collection getEdges(EdgeType edge_type) - { - if (edge_type == EdgeType.UNDIRECTED) - return edges.keySet(); - return null; - } - - public EdgeType getDefaultEdgeType() - { - return EdgeType.UNDIRECTED; - } - - public Collection getInEdges(V vertex) - { - return getIncidentEdges(vertex); - } - - public Collection getOutEdges(V vertex) - { - return getIncidentEdges(vertex); - } - - public int inDegree(V vertex) - { - return degree(vertex); - } - - public int outDegree(V vertex) - { - return degree(vertex); - } - - public V getDest(H directed_edge) - { - return null; - } - - public V getSource(H directed_edge) - { - return null; - } - - public Collection getPredecessors(V vertex) - { - return getNeighbors(vertex); - } - - public Collection getSuccessors(V vertex) - { - return getNeighbors(vertex); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/SortedSparseMultigraph.java b/gui/jung-src/edu/uci/ics/jung/graph/SortedSparseMultigraph.java deleted file mode 100644 index 6b6d66a0..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/SortedSparseMultigraph.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Created on Oct 18, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Comparator; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; -import java.util.TreeSet; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.comparators.ComparableComparator; - -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of Graph that is suitable for sparse graphs, - * orders its vertex and edge collections according to either specified Comparator - * instances or the natural ordering of their elements, and permits directed, undirected, - * and parallel edges. - * - * @author Joshua O'Madadhain - */ -@SuppressWarnings("serial") -public class SortedSparseMultigraph - extends OrderedSparseMultigraph - implements MultiGraph -{ - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() - { - return new Factory> () - { - public Graph create() - { - return new SortedSparseMultigraph(); - } - }; - } - - /** - * Comparator used in ordering vertices. Defaults to util.ComparableComparator - * if no comparators are specified in the constructor. - */ - protected Comparator vertex_comparator; - - /** - * Comparator used in ordering edges. Defaults to util.ComparableComparator - * if no comparators are specified in the constructor. - */ - protected Comparator edge_comparator; - - /** - * Creates a new instance which sorts its vertices and edges according to the - * specified {@code Comparator}s. - */ - public SortedSparseMultigraph(Comparator vertex_comparator, Comparator edge_comparator) - { - this.vertex_comparator = vertex_comparator; - this.edge_comparator = edge_comparator; - vertices = new TreeMap>>(vertex_comparator); - edges = new TreeMap>(edge_comparator); - directedEdges = new TreeSet(edge_comparator); - } - - /** - * Creates a new instance which sorts its vertices and edges according to - * their natural ordering. - */ - @SuppressWarnings("unchecked") - public SortedSparseMultigraph() - { - this(new ComparableComparator(), new ComparableComparator()); - } - - /** - * Provides a new {@code Comparator} to be used in sorting the vertices. - * @param vertex_comparator the comparator that defines the new ordering - */ - public void setVertexComparator(Comparator vertex_comparator) - { - this.vertex_comparator = vertex_comparator; - Map>> tmp_vertices = new TreeMap>>(vertex_comparator); - for (Map.Entry>> entry : vertices.entrySet()) - tmp_vertices.put(entry.getKey(), entry.getValue()); - this.vertices = tmp_vertices; - } - - @Override - public boolean addVertex(V vertex) { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) - { - vertices.put(vertex, new Pair>(new TreeSet(edge_comparator), - new TreeSet(edge_comparator))); - return true; - } - else - { - return false; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/SparseGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/SparseGraph.java deleted file mode 100644 index 276a173f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/SparseGraph.java +++ /dev/null @@ -1,373 +0,0 @@ -/* - * Created on Apr 15, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of Graph that is suitable for sparse graphs and - * permits both directed and undirected edges. - */ -@SuppressWarnings("serial") -public class SparseGraph - extends AbstractGraph - implements Graph -{ - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() - { - return new Factory> () - { - public Graph create() - { - return new SparseGraph(); - } - }; - } - - protected static final int INCOMING = 0; - protected static final int OUTGOING = 1; - protected static final int INCIDENT = 2; - - protected Map[]> vertex_maps; // Map of vertices to adjacency maps of vertices to {incoming, outgoing, incident} edges - protected Map> directed_edges; // Map of directed edges to incident vertex sets - protected Map> undirected_edges; // Map of undirected edges to incident vertex sets - - /** - * Creates an instance. - */ - public SparseGraph() - { - vertex_maps = new HashMap[]>(); - directed_edges = new HashMap>(); - undirected_edges = new HashMap>(); - } - - @Override - public E findEdge(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - E edge = vertex_maps.get(v1)[OUTGOING].get(v2); - if (edge == null) - edge = vertex_maps.get(v1)[INCIDENT].get(v2); - return edge; - } - - @Override - public Collection findEdgeSet(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - Collection edges = new ArrayList(2); - E e1 = vertex_maps.get(v1)[OUTGOING].get(v2); - if (e1 != null) - edges.add(e1); - E e2 = vertex_maps.get(v1)[INCIDENT].get(v2); - if (e1 != null) - edges.add(e2); - return edges; - } - - @Override - public boolean addEdge(E edge, Pair endpoints, EdgeType edgeType) - { - Pair new_endpoints = getValidatedEndpoints(edge, endpoints); - if (new_endpoints == null) - return false; - - V v1 = new_endpoints.getFirst(); - V v2 = new_endpoints.getSecond(); - - // undirected edges and directed edges are not considered to be parallel to each other, - // so as long as anything that's returned by findEdge is not of the same type as - // edge, we're fine - E connection = findEdge(v1, v2); - if (connection != null && getEdgeType(connection) == edgeType) - return false; - - if (!containsVertex(v1)) - this.addVertex(v1); - - if (!containsVertex(v2)) - this.addVertex(v2); - - // map v1 to and vice versa - if (edgeType == EdgeType.DIRECTED) - { - vertex_maps.get(v1)[OUTGOING].put(v2, edge); - vertex_maps.get(v2)[INCOMING].put(v1, edge); - directed_edges.put(edge, new_endpoints); - } - else - { - vertex_maps.get(v1)[INCIDENT].put(v2, edge); - vertex_maps.get(v2)[INCIDENT].put(v1, edge); - undirected_edges.put(edge, new_endpoints); - } - - return true; - } - - - - public Collection getInEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - - // combine directed inedges and undirected - Collection in = new HashSet(vertex_maps.get(vertex)[INCOMING].values()); - in.addAll(vertex_maps.get(vertex)[INCIDENT].values()); - return Collections.unmodifiableCollection(in); - } - - public Collection getOutEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - - // combine directed outedges and undirected - Collection out = new HashSet(vertex_maps.get(vertex)[OUTGOING].values()); - out.addAll(vertex_maps.get(vertex)[INCIDENT].values()); - return Collections.unmodifiableCollection(out); - } - - public Collection getPredecessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - // consider directed inedges and undirected - Collection preds = new HashSet(vertex_maps.get(vertex)[INCOMING].keySet()); - preds.addAll(vertex_maps.get(vertex)[INCIDENT].keySet()); - return Collections.unmodifiableCollection(preds); - } - - public Collection getSuccessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - // consider directed outedges and undirected - Collection succs = new HashSet(vertex_maps.get(vertex)[OUTGOING].keySet()); - succs.addAll(vertex_maps.get(vertex)[INCIDENT].keySet()); - return Collections.unmodifiableCollection(succs); - } - - public Collection getEdges(EdgeType edgeType) - { - if (edgeType == EdgeType.DIRECTED) - return Collections.unmodifiableCollection(directed_edges.keySet()); - else if (edgeType == EdgeType.UNDIRECTED) - return Collections.unmodifiableCollection(undirected_edges.keySet()); - else - return null; - } - - public Pair getEndpoints(E edge) - { - Pair endpoints; - endpoints = directed_edges.get(edge); - if (endpoints == null) - return undirected_edges.get(edge); - else - return endpoints; - } - - public EdgeType getEdgeType(E edge) - { - if (directed_edges.containsKey(edge)) - return EdgeType.DIRECTED; - else if (undirected_edges.containsKey(edge)) - return EdgeType.UNDIRECTED; - else - return null; - } - - public V getSource(E directed_edge) - { - if (getEdgeType(directed_edge) == EdgeType.DIRECTED) - return directed_edges.get(directed_edge).getFirst(); - else - return null; - } - - public V getDest(E directed_edge) - { - if (getEdgeType(directed_edge) == EdgeType.DIRECTED) - return directed_edges.get(directed_edge).getSecond(); - else - return null; - } - - public boolean isSource(V vertex, E edge) - { - if (!containsVertex(vertex) || !containsEdge(edge)) - return false; - - V source = getSource(edge); - if (source != null) - return source.equals(vertex); - else - return false; - } - - public boolean isDest(V vertex, E edge) - { - if (!containsVertex(vertex) || !containsEdge(edge)) - return false; - - V dest = getDest(edge); - if (dest != null) - return dest.equals(vertex); - else - return false; - } - - public Collection getEdges() - { - Collection edges = new ArrayList(directed_edges.keySet()); - edges.addAll(undirected_edges.keySet()); - return Collections.unmodifiableCollection(edges); - } - - public Collection getVertices() - { - return Collections.unmodifiableCollection(vertex_maps.keySet()); - } - - public boolean containsVertex(V vertex) - { - return vertex_maps.containsKey(vertex); - } - - public boolean containsEdge(E edge) - { - return directed_edges.containsKey(edge) || undirected_edges.containsKey(edge); - } - - public int getEdgeCount() - { - return directed_edges.size() + undirected_edges.size(); - } - - public int getVertexCount() - { - return vertex_maps.size(); - } - - public Collection getNeighbors(V vertex) - { - if (!containsVertex(vertex)) - return null; - // consider directed edges and undirected edges - Collection neighbors = new HashSet(vertex_maps.get(vertex)[INCOMING].keySet()); - neighbors.addAll(vertex_maps.get(vertex)[OUTGOING].keySet()); - neighbors.addAll(vertex_maps.get(vertex)[INCIDENT].keySet()); - return Collections.unmodifiableCollection(neighbors); - } - - public Collection getIncidentEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - Collection incident = new HashSet(vertex_maps.get(vertex)[INCOMING].values()); - incident.addAll(vertex_maps.get(vertex)[OUTGOING].values()); - incident.addAll(vertex_maps.get(vertex)[INCIDENT].values()); - return Collections.unmodifiableCollection(incident); - } - - @SuppressWarnings("unchecked") - public boolean addVertex(V vertex) - { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) { - vertex_maps.put(vertex, new HashMap[]{new HashMap(), new HashMap(), new HashMap()}); - return true; - } else { - return false; - } - } - - public boolean removeVertex(V vertex) - { - if (!containsVertex(vertex)) - return false; - - // copy to avoid concurrent modification in removeEdge - Collection incident = new ArrayList(getIncidentEdges(vertex)); - - for (E edge : incident) - removeEdge(edge); - - vertex_maps.remove(vertex); - - return true; - } - - public boolean removeEdge(E edge) - { - if (!containsEdge(edge)) - return false; - - Pair endpoints = getEndpoints(edge); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - // remove edge from incident vertices' adjacency maps - if (getEdgeType(edge) == EdgeType.DIRECTED) - { - vertex_maps.get(v1)[OUTGOING].remove(v2); - vertex_maps.get(v2)[INCOMING].remove(v1); - directed_edges.remove(edge); - } - else - { - vertex_maps.get(v1)[INCIDENT].remove(v2); - vertex_maps.get(v2)[INCIDENT].remove(v1); - undirected_edges.remove(edge); - } - - return true; - } - - public int getEdgeCount(EdgeType edge_type) - { - if (edge_type == EdgeType.DIRECTED) - return directed_edges.size(); - if (edge_type == EdgeType.UNDIRECTED) - return undirected_edges.size(); - return 0; - } - - public EdgeType getDefaultEdgeType() - { - return EdgeType.UNDIRECTED; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/SparseMultigraph.java b/gui/jung-src/edu/uci/ics/jung/graph/SparseMultigraph.java deleted file mode 100644 index f2d2b636..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/SparseMultigraph.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Created on Oct 18, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of Graph that is suitable for sparse graphs - * and permits directed, undirected, and parallel edges. - */ -@SuppressWarnings("serial") -public class SparseMultigraph - extends AbstractGraph - implements MultiGraph { - - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() { - return new Factory> () { - public Graph create() { - return new SparseMultigraph(); - } - }; - } - - // TODO: refactor internal representation: right now directed edges each have two references (in vertices and directedEdges) - // and undirected also have two (incoming and outgoing). - protected Map>> vertices; // Map of vertices to Pair of adjacency sets {incoming, outgoing} - protected Map> edges; // Map of edges to incident vertex pairs - protected Set directedEdges; - - /** - * Creates a new instance. - */ - public SparseMultigraph() - { - vertices = new HashMap>>(); - edges = new HashMap>(); - directedEdges = new HashSet(); - } - - public Collection getEdges() - { - return Collections.unmodifiableCollection(edges.keySet()); - } - - public Collection getVertices() - { - return Collections.unmodifiableCollection(vertices.keySet()); - } - - public boolean containsVertex(V vertex) { - return vertices.keySet().contains(vertex); - } - - public boolean containsEdge(E edge) { - return edges.keySet().contains(edge); - } - - protected Collection getIncoming_internal(V vertex) - { - return vertices.get(vertex).getFirst(); - } - - protected Collection getOutgoing_internal(V vertex) - { - return vertices.get(vertex).getSecond(); - } - - public boolean addVertex(V vertex) { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!vertices.containsKey(vertex)) { - vertices.put(vertex, new Pair>(new HashSet(), new HashSet())); - return true; - } else { - return false; - } - } - - public boolean removeVertex(V vertex) { - if (!containsVertex(vertex)) - return false; - - // copy to avoid concurrent modification in removeEdge - Set incident = new HashSet(getIncoming_internal(vertex)); - incident.addAll(getOutgoing_internal(vertex)); - - for (E edge : incident) - removeEdge(edge); - - vertices.remove(vertex); - - return true; - } - - @Override - public boolean addEdge(E edge, Pair endpoints, EdgeType edgeType) { - - Pair new_endpoints = getValidatedEndpoints(edge, endpoints); - if (new_endpoints == null) - return false; - - V v1 = new_endpoints.getFirst(); - V v2 = new_endpoints.getSecond(); - - if (!vertices.containsKey(v1)) - this.addVertex(v1); - - if (!vertices.containsKey(v2)) - this.addVertex(v2); - - - vertices.get(v1).getSecond().add(edge); - vertices.get(v2).getFirst().add(edge); - edges.put(edge, new_endpoints); - if(edgeType == EdgeType.DIRECTED) { - directedEdges.add(edge); - } else { - vertices.get(v1).getFirst().add(edge); - vertices.get(v2).getSecond().add(edge); - } - return true; - } - - public boolean removeEdge(E edge) - { - if (!containsEdge(edge)) { - return false; - } - - Pair endpoints = getEndpoints(edge); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - // remove edge from incident vertices' adjacency sets - vertices.get(v1).getSecond().remove(edge); - vertices.get(v2).getFirst().remove(edge); - - if(directedEdges.remove(edge) == false) { - - // its an undirected edge, remove the other ends - vertices.get(v2).getSecond().remove(edge); - vertices.get(v1).getFirst().remove(edge); - } - edges.remove(edge); - return true; - } - - public Collection getInEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(vertices.get(vertex).getFirst()); - } - - public Collection getOutEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(vertices.get(vertex).getSecond()); - } - - // TODO: this will need to get changed if we modify the internal representation - public Collection getPredecessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - - Set preds = new HashSet(); - for (E edge : getIncoming_internal(vertex)) { - if(getEdgeType(edge) == EdgeType.DIRECTED) { - preds.add(this.getSource(edge)); - } else { - preds.add(getOpposite(vertex, edge)); - } - } - return Collections.unmodifiableCollection(preds); - } - - // TODO: this will need to get changed if we modify the internal representation - public Collection getSuccessors(V vertex) - { - if (!containsVertex(vertex)) - return null; - Set succs = new HashSet(); - for (E edge : getOutgoing_internal(vertex)) { - if(getEdgeType(edge) == EdgeType.DIRECTED) { - succs.add(this.getDest(edge)); - } else { - succs.add(getOpposite(vertex, edge)); - } - } - return Collections.unmodifiableCollection(succs); - } - - public Collection getNeighbors(V vertex) - { - if (!containsVertex(vertex)) - return null; - Collection out = new HashSet(); - out.addAll(this.getPredecessors(vertex)); - out.addAll(this.getSuccessors(vertex)); - return out; - } - - public Collection getIncidentEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - Collection out = new HashSet(); - out.addAll(this.getInEdges(vertex)); - out.addAll(this.getOutEdges(vertex)); - return out; - } - - @Override - public E findEdge(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - for (E edge : getOutgoing_internal(v1)) - if (this.getOpposite(v1, edge).equals(v2)) - return edge; - - return null; - } - - public Pair getEndpoints(E edge) - { - return edges.get(edge); - } - - public V getSource(E edge) { - if(directedEdges.contains(edge)) { - return this.getEndpoints(edge).getFirst(); - } - return null; - } - - public V getDest(E edge) { - if(directedEdges.contains(edge)) { - return this.getEndpoints(edge).getSecond(); - } - return null; - } - - public boolean isSource(V vertex, E edge) { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return getSource(edge).equals(vertex); - } - - public boolean isDest(V vertex, E edge) { - if (!containsEdge(edge) || !containsVertex(vertex)) - return false; - return getDest(edge).equals(vertex); - } - - public EdgeType getEdgeType(E edge) { - return directedEdges.contains(edge) ? - EdgeType.DIRECTED : - EdgeType.UNDIRECTED; - } - - @SuppressWarnings("unchecked") - public Collection getEdges(EdgeType edgeType) { - if(edgeType == EdgeType.DIRECTED) { - return Collections.unmodifiableSet(this.directedEdges); - } else if(edgeType == EdgeType.UNDIRECTED) { - Collection edges = new HashSet(getEdges()); - edges.removeAll(directedEdges); - return edges; - } else { - return Collections.EMPTY_SET; - } - - } - - public int getEdgeCount() { - return edges.keySet().size(); - } - - public int getVertexCount() { - return vertices.keySet().size(); - } - - public int getEdgeCount(EdgeType edge_type) - { - return getEdges(edge_type).size(); - } - - public EdgeType getDefaultEdgeType() - { - return EdgeType.UNDIRECTED; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/Tree.java b/gui/jung-src/edu/uci/ics/jung/graph/Tree.java deleted file mode 100644 index b56199ec..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/Tree.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Created on Feb 3, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - - -/** - * A subtype of Graph which is a (directed, rooted) tree. - * What we refer to as a "tree" here is actually (in the terminology of graph theory) a - * rooted tree. (That is, there is a designated single vertex--the root--from which we measure - * the shortest path to each vertex, which we call its depth; the maximum over all such - * depths is the tree's height. Note that for a tree, there is exactly - * one unique path from the root to any vertex.) - * - * @author Joshua O'Madadhain - */ -public interface Tree extends Forest -{ - /** - * Returns the (unweighted) distance of vertex - * from the root of this tree. - * @param vertex the vertex whose depth is to be returned. - * @return the length of the shortest unweighted path - * from vertex to the root of this tree - * @see #getHeight() - */ - public int getDepth(V vertex); - - /** - * Returns the maximum depth in this tree. - * @return the maximum depth in this tree - * @see #getDepth(Object) - */ - public int getHeight(); - - /** - * Returns the root of this tree. - * The root is defined to be the vertex (designated either at the tree's - * creation time, or as the first vertex to be added) with respect to which - * vertex depth is measured. - * @return the root of this tree - */ - public V getRoot(); -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/UndirectedGraph.java deleted file mode 100644 index 61a0b461..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedGraph.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Created on Oct 17, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -/** - * A tagging interface for extensions of Graph that - * accept only undirected edges. - */ -public interface UndirectedGraph extends Graph {} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedOrderedSparseMultigraph.java b/gui/jung-src/edu/uci/ics/jung/graph/UndirectedOrderedSparseMultigraph.java deleted file mode 100644 index 9f6b4141..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedOrderedSparseMultigraph.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Created on Oct 18, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of UndirectedGraph that is suitable for sparse graphs, - * orders its vertex and edge collections according to insertion time, and permits - * parallel edges. - */ -@SuppressWarnings("serial") -public class UndirectedOrderedSparseMultigraph - extends UndirectedSparseMultigraph - implements UndirectedGraph { - - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() { - return new Factory> () { - public UndirectedGraph create() { - return new UndirectedOrderedSparseMultigraph(); - } - }; - } - - /** - * Creates a new instance. - */ - public UndirectedOrderedSparseMultigraph() { - vertices = new LinkedHashMap>(); - edges = new LinkedHashMap>(); - } - - @Override - public boolean addVertex(V vertex) { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) - { - vertices.put(vertex, new LinkedHashSet()); - return true; - } else { - return false; - } - } - - @Override - public Collection getNeighbors(V vertex) { - if (!containsVertex(vertex)) - return null; - - Set neighbors = new LinkedHashSet(); - for (E edge : getIncident_internal(vertex)) - { - Pair endpoints = this.getEndpoints(edge); - V e_a = endpoints.getFirst(); - V e_b = endpoints.getSecond(); - if (vertex.equals(e_a)) - neighbors.add(e_b); - else - neighbors.add(e_a); - } - - return Collections.unmodifiableCollection(neighbors); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedSparseGraph.java b/gui/jung-src/edu/uci/ics/jung/graph/UndirectedSparseGraph.java deleted file mode 100644 index 2299c243..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedSparseGraph.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Created on Apr 1, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of UndirectedGraph that is suitable - * for sparse graphs. - */ -@SuppressWarnings("serial") -public class UndirectedSparseGraph extends AbstractTypedGraph - implements UndirectedGraph -{ - - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() { - return new Factory> () { - - public UndirectedGraph create() { - return new UndirectedSparseGraph(); - } - }; - } - - protected Map> vertices; // Map of vertices to adjacency maps of vertices to incident edges - protected Map> edges; // Map of edges to incident vertex sets - - /** - * Creates an instance. - */ - public UndirectedSparseGraph() { - super(EdgeType.UNDIRECTED); - vertices = new HashMap>(); - edges = new HashMap>(); - } - - @Override - public boolean addEdge(E edge, Pair endpoints, EdgeType edgeType) - { - this.validateEdgeType(edgeType); - Pair new_endpoints = getValidatedEndpoints(edge, endpoints); - if (new_endpoints == null) - return false; - - V v1 = new_endpoints.getFirst(); - V v2 = new_endpoints.getSecond(); - - if (findEdge(v1, v2) != null) - return false; - - edges.put(edge, new_endpoints); - - if (!vertices.containsKey(v1)) - this.addVertex(v1); - - if (!vertices.containsKey(v2)) - this.addVertex(v2); - - // map v1 to and vice versa - vertices.get(v1).put(v2, edge); - vertices.get(v2).put(v1, edge); - - return true; - } - - public Collection getInEdges(V vertex) - { - return this.getIncidentEdges(vertex); - } - - public Collection getOutEdges(V vertex) - { - return this.getIncidentEdges(vertex); - } - - public Collection getPredecessors(V vertex) - { - return this.getNeighbors(vertex); - } - - public Collection getSuccessors(V vertex) - { - return this.getNeighbors(vertex); - } - - @Override - public E findEdge(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - return vertices.get(v1).get(v2); - } - - @Override - public Collection findEdgeSet(V v1, V v2) - { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - ArrayList edge_collection = new ArrayList(1); -// if (!containsVertex(v1) || !containsVertex(v2)) -// return edge_collection; - E e = findEdge(v1, v2); - if (e == null) - return edge_collection; - edge_collection.add(e); - return edge_collection; - } - - public Pair getEndpoints(E edge) - { - return edges.get(edge); - } - - public V getSource(E directed_edge) - { - return null; - } - - public V getDest(E directed_edge) - { - return null; - } - - public boolean isSource(V vertex, E edge) - { - return false; - } - - public boolean isDest(V vertex, E edge) - { - return false; - } - - public Collection getEdges() - { - return Collections.unmodifiableCollection(edges.keySet()); - } - - public Collection getVertices() - { - return Collections.unmodifiableCollection(vertices.keySet()); - } - - public boolean containsVertex(V vertex) - { - return vertices.containsKey(vertex); - } - - public boolean containsEdge(E edge) - { - return edges.containsKey(edge); - } - - public int getEdgeCount() - { - return edges.size(); - } - - public int getVertexCount() - { - return vertices.size(); - } - - public Collection getNeighbors(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(vertices.get(vertex).keySet()); - } - - public Collection getIncidentEdges(V vertex) - { - if (!containsVertex(vertex)) - return null; - return Collections.unmodifiableCollection(vertices.get(vertex).values()); - } - - public boolean addVertex(V vertex) - { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) { - vertices.put(vertex, new HashMap()); - return true; - } else { - return false; - } - } - - public boolean removeVertex(V vertex) - { - if (!containsVertex(vertex)) - return false; - - // iterate over copy of incident edge collection - for (E edge : new ArrayList(vertices.get(vertex).values())) - removeEdge(edge); - - vertices.remove(vertex); - return true; - } - - public boolean removeEdge(E edge) - { - if (!containsEdge(edge)) - return false; - - Pair endpoints = getEndpoints(edge); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - // remove incident vertices from each others' adjacency maps - vertices.get(v1).remove(v2); - vertices.get(v2).remove(v1); - - edges.remove(edge); - return true; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedSparseMultigraph.java b/gui/jung-src/edu/uci/ics/jung/graph/UndirectedSparseMultigraph.java deleted file mode 100644 index b5f8815f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/UndirectedSparseMultigraph.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Created on Mar 6, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Oct 18, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; - -/** - * An implementation of UndirectedGraph that is suitable for - * sparse graphs and permits parallel edges. - */ -@SuppressWarnings("serial") -public class UndirectedSparseMultigraph - extends AbstractTypedGraph - implements UndirectedGraph, MultiGraph -{ - /** - * Returns a {@code Factory} that creates an instance of this graph type. - * @param the vertex type for the graph factory - * @param the edge type for the graph factory - */ - public static Factory> getFactory() { - return new Factory> () { - - public UndirectedGraph create() { - return new UndirectedSparseMultigraph(); - } - }; - } - - protected Map> vertices; // Map of vertices to adjacency sets - protected Map> edges; // Map of edges to incident vertex sets - - /** - * Creates a new instance. - */ - public UndirectedSparseMultigraph() { - super(EdgeType.UNDIRECTED); - vertices = new HashMap>(); - edges = new HashMap>(); - } - - public Collection getEdges() { - return Collections.unmodifiableCollection(edges.keySet()); - } - - public Collection getVertices() { - return Collections.unmodifiableCollection(vertices.keySet()); - } - - public boolean containsVertex(V vertex) { - return vertices.keySet().contains(vertex); - } - - public boolean containsEdge(E edge) { - return edges.keySet().contains(edge); - } - - protected Collection getIncident_internal(V vertex) - { - return vertices.get(vertex); - } - - public boolean addVertex(V vertex) { - if(vertex == null) { - throw new IllegalArgumentException("vertex may not be null"); - } - if (!containsVertex(vertex)) - { - vertices.put(vertex, new HashSet()); - return true; - } else { - return false; - } - } - - public boolean removeVertex(V vertex) { - if (!containsVertex(vertex)) - return false; - - for (E edge : new ArrayList(getIncident_internal(vertex))) - removeEdge(edge); - - vertices.remove(vertex); - return true; - } - - @Override - public boolean addEdge(E edge, V v1, V v2, EdgeType edgeType) { - return addEdge(edge, new Pair(v1, v2), edgeType); - } - - @Override - public boolean addEdge(E edge, Pair endpoints, EdgeType edge_type) - { - validateEdgeType(edge_type); - - Pair new_endpoints = getValidatedEndpoints(edge, endpoints); - if (new_endpoints == null) - return false; - - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - edges.put(edge, new_endpoints); - - if (!containsVertex(v1)) - this.addVertex(v1); - - if (!containsVertex(v2)) - this.addVertex(v2); - - vertices.get(v1).add(edge); - vertices.get(v2).add(edge); - - return true; - } - - public boolean removeEdge(E edge) { - if (!containsEdge(edge)) - return false; - - Pair endpoints = getEndpoints(edge); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - // remove edge from incident vertices' adjacency sets - vertices.get(v1).remove(edge); - vertices.get(v2).remove(edge); - - edges.remove(edge); - return true; - } - - public Collection getInEdges(V vertex) { - return this.getIncidentEdges(vertex); - } - - public Collection getOutEdges(V vertex) { - return this.getIncidentEdges(vertex); - } - - public Collection getPredecessors(V vertex) { - return this.getNeighbors(vertex); - } - - public Collection getSuccessors(V vertex) { - return this.getNeighbors(vertex); - } - - public Collection getNeighbors(V vertex) { - if (!containsVertex(vertex)) - return null; - - Set neighbors = new HashSet(); - for (E edge : getIncident_internal(vertex)) - { - Pair endpoints = this.getEndpoints(edge); - V e_a = endpoints.getFirst(); - V e_b = endpoints.getSecond(); - if (vertex.equals(e_a)) - neighbors.add(e_b); - else - neighbors.add(e_a); - } - - return Collections.unmodifiableCollection(neighbors); - } - - public Collection getIncidentEdges(V vertex) { - if (!containsVertex(vertex)) - return null; - - return Collections.unmodifiableCollection(getIncident_internal(vertex)); - } - - @Override - public E findEdge(V v1, V v2) { - if (!containsVertex(v1) || !containsVertex(v2)) - return null; - for (E edge : getIncident_internal(v1)) { - Pair endpoints = this.getEndpoints(edge); - V e_a = endpoints.getFirst(); - V e_b = endpoints.getSecond(); - if ((v1.equals(e_a) && v2.equals(e_b)) || (v1.equals(e_b) && v2.equals(e_a))) - return edge; - } - return null; - } - - public Pair getEndpoints(E edge) { - return edges.get(edge); - } - - public V getDest(E directed_edge) { - return null; - } - - public V getSource(E directed_edge) { - return null; - } - - public boolean isDest(V vertex, E edge) { - return false; - } - - public boolean isSource(V vertex, E edge) { - return false; - } - - public int getEdgeCount() { - return edges.size(); - } - - public int getVertexCount() { - return vertices.size(); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/event/GraphEvent.java b/gui/jung-src/edu/uci/ics/jung/graph/event/GraphEvent.java deleted file mode 100644 index 0c398c6c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/event/GraphEvent.java +++ /dev/null @@ -1,106 +0,0 @@ -package edu.uci.ics.jung.graph.event; - -import edu.uci.ics.jung.graph.Graph; - -/** - * - * - * @author tom nelson - * - * @param the vertex type - * @param the edge type - */ -public abstract class GraphEvent { - - protected Graph source; - protected Type type; - - /** - * Creates an instance with the specified {@code source} graph and {@code Type} - * (vertex/edge addition/removal). - */ - public GraphEvent(Graph source, Type type) { - this.source = source; - this.type = type; - } - - /** - * Types of graph events. - */ - public static enum Type { - VERTEX_ADDED, - VERTEX_REMOVED, - EDGE_ADDED, - EDGE_REMOVED - } - - /** - * An event type pertaining to graph vertices. - */ - public static class Vertex extends GraphEvent { - protected V vertex; - - /** - * Creates a graph event for the specified graph, vertex, and type. - */ - public Vertex(Graph source, Type type, V vertex) { - super(source,type); - this.vertex = vertex; - } - - /** - * Retrieves the vertex associated with this event. - */ - public V getVertex() { - return vertex; - } - - @Override - public String toString() { - return "GraphEvent type:"+type+" for "+vertex; - } - - } - - /** - * An event type pertaining to graph edges. - */ - public static class Edge extends GraphEvent { - protected E edge; - - /** - * Creates a graph event for the specified graph, edge, and type. - */ - public Edge(Graph source, Type type, E edge) { - super(source,type); - this.edge = edge; - } - - /** - * Retrieves the edge associated with this event. - */ - public E getEdge() { - return edge; - } - - @Override - public String toString() { - return "GraphEvent type:"+type+" for "+edge; - } - - } - - /** - * @return the source - */ - public Graph getSource() { - return source; - } - - /** - * @return the type - */ - public Type getType() { - return type; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/event/GraphEventListener.java b/gui/jung-src/edu/uci/ics/jung/graph/event/GraphEventListener.java deleted file mode 100644 index 943c3004..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/event/GraphEventListener.java +++ /dev/null @@ -1,16 +0,0 @@ -package edu.uci.ics.jung.graph.event; - -import java.util.EventListener; - -/** - * An interface for classes that listen for graph events. - */ -public interface GraphEventListener extends EventListener -{ - /** - * Method called by the process generating a graph event to which - * this instance is listening. The implementor of this interface - * is responsible for deciding what behavior is appropriate. - */ - void handleGraphEvent(GraphEvent evt); -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/event/package.html b/gui/jung-src/edu/uci/ics/jung/graph/event/package.html deleted file mode 100644 index 2aeb7ad4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/event/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Support for generating events in response to graph actions, especially mutations. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/graph/package.html b/gui/jung-src/edu/uci/ics/jung/graph/package.html deleted file mode 100644 index 849e0d2e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/package.html +++ /dev/null @@ -1,72 +0,0 @@ - - - - - - - -

          Interfaces for the JUNG graph types, and some representative implementations.

          - -

          A graph consists of a set of vertices set and a set of edges which connect the -vertices. The base interface is Hypergraph, which defines the most -general type of graph; other interfaces (Graph, DirectedGraph, etc.) -define more restrictive graph types. -

          Vertex and edge types are specified at compile type using Java 1.5 generics. - -

          Types of graphs which are supported include (but are not limited to) -

            -
          • edges (these have have exactly two endpoints, which may or may not be distinct) -
          • self-loops (edges which connect exactly one vertex) -
          • directed and undirected edges -
          • vertices and edges with attributes (for example, weighted edges) -
          • vertices and edges with different constraints or properties (examples: trees, bipartite - graphs, or multimodal) -
          • parallel edges (multiple edges which connect a single set of vertices) -
          • internal representations as matrices or as adjacency lists or adjacency maps -
          • internal representations that order their elements according to insertion time, -natural ordering, or a specified Comparator -
          -Extensions or implementations of this interface -may enforce or disallow any or all of these variations. -

          Notes: -

            -
          • The collections returned by graph instances -should be treated in general as if read-only. While they are not contractually -guaranteed (or required) to be immutable, -these interfaces do not define the outcome if they are mutated. -Mutations should be done via {add,remove}{Edge,Vertex}, or -in the constructor. -
          • "Wrapper" graphs are available through GraphDecorator; these are useful -if you want to create a graph implementation that uses another implementation to do the work, -and adds some extra behavior. (One example: ObservableGraph, which notifies -registered listeners when graph mutations occur.) -
          - - diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/BalancedEdgeIndexFunction.java b/gui/jung-src/edu/uci/ics/jung/graph/util/BalancedEdgeIndexFunction.java deleted file mode 100644 index 54c0e4d8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/BalancedEdgeIndexFunction.java +++ /dev/null @@ -1,85 +0,0 @@ -package edu.uci.ics.jung.graph.util; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; - -/** - * EdgeIndexFunction that draws parallel edges in a balanced way. Note - * this returns indices in [-n/2, n/2], rather that [0, n-1]. - * @author aleks - * - */ -public class BalancedEdgeIndexFunction implements EdgeIndexFunction { - private Map indexer; - private BalancedEdgeIndexFunction() { - indexer = new HashMap(); - } - - public static BalancedEdgeIndexFunction getInstance() { - return new BalancedEdgeIndexFunction(); - } - - public int getIndex(Graph graph, E e) { - Integer idx = indexer.get(e); - if (idx==null) { - reset(graph, e); - idx = indexer.get(e); - } - return idx; - } - - public void reset() { - indexer = new HashMap(); - } - - public void reset(Graph g, E edge) { - synchronized (g) { - V v1 = g.getSource(edge); - V v2 = g.getDest(edge); - List out = new ArrayList(); - List in = new ArrayList(); - - // Use two loops to make sure one direction comes - // before the other. - for (E e : g.getOutEdges(v1)) { - if (g.getDest(e)==v2) { - out.add(e); - } - } - - for (E e : g.getInEdges(v1)) { - if (g.getSource(e)==v2) { - in.add(e); - } - } - - int index; - if (v1==v2) { - index = 0; - for (E e : in) indexer.put(e, index++); - for (E e : out) indexer.put(e, index++); - } else { - int edge_count = in.size()+out.size(); - index = -edge_count/2 - 1; - for (E e : in) { - indexer.put(e,index); - index++; - if (index == -1 && edge_count%2==0) index++; - } - index+=2; - for (E e : out) { - // these edges are drawn in the opposite orientation, - // so bend backwards. - indexer.put(e,-index); - index++; - if (index == 1 && edge_count%2==0) index++; - } - } - } - - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/Context.java b/gui/jung-src/edu/uci/ics/jung/graph/util/Context.java deleted file mode 100644 index 641feebe..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/Context.java +++ /dev/null @@ -1,48 +0,0 @@ -package edu.uci.ics.jung.graph.util; - -/** - * A class that is used to link together a graph element and a specific graph. - * Provides appropriate implementations of hashCode and equals. - */ -public class Context -{ - @SuppressWarnings("unchecked") - private static Context instance = new Context(); - - /** - * The graph element which defines this context. - */ - public G graph; - - /** - * The edge element which defines this context. - */ - public E element; - - /** - * Returns an instance of this type for the specified graph and element. - * @param the graph type - * @param the element type - */ - @SuppressWarnings("unchecked") - public static Context getInstance(G graph, E element) { - instance.graph = graph; - instance.element = element; - return instance; - } - - @Override - public int hashCode() { - return graph.hashCode() ^ element.hashCode(); - } - - @SuppressWarnings("unchecked") - @Override - public boolean equals(Object o) { - if (!(o instanceof Context)) - return false; - Context context = (Context)o; - return context.graph.equals(graph) && context.element.equals(element); - } -} - diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/DefaultParallelEdgeIndexFunction.java b/gui/jung-src/edu/uci/ics/jung/graph/util/DefaultParallelEdgeIndexFunction.java deleted file mode 100644 index 8b7a760f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/DefaultParallelEdgeIndexFunction.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Created on Sep 24, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph.util; - -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; - -/** - * A class which creates and maintains indices for parallel edges. - * Parallel edges are defined here to be the collection of edges - * that are returned by v.findEdgeSet(w) for some - * v and w. - * - *

          At this time, users are responsible for resetting the indices - * (by calling reset()) if changes to the - * graph make it appropriate.

          - * - * @author Joshua O'Madadhain - * @author Tom Nelson - * - */ -public class DefaultParallelEdgeIndexFunction implements EdgeIndexFunction -{ - protected Map,E>, Integer> edge_index = new HashMap,E>, Integer>(); - - private DefaultParallelEdgeIndexFunction() { - } - - /** - * Returns an instance of this class. - * @param the vertex type - * @param the edge type - */ - public static DefaultParallelEdgeIndexFunction getInstance() { - return new DefaultParallelEdgeIndexFunction(); - } - - /** - * Returns the index for e in graph. - * Calculates the indices for e and for all edges parallel - * to e, if they are not already assigned. - */ - public int getIndex(Graph graph, E e) - { - - Integer index = edge_index.get(Context.,E>getInstance(graph,e)); - //edge_index.get(e); - if(index == null) { - Pair endpoints = graph.getEndpoints(e); - V u = endpoints.getFirst(); - V v = endpoints.getSecond(); - if(u.equals(v)) { - index = getIndex(graph, e, v); - } else { - index = getIndex(graph, e, u, v); - } - } - return index.intValue(); - } - - protected int getIndex(Graph graph, E e, V v, V u) { - Collection commonEdgeSet = new HashSet(graph.getIncidentEdges(u)); - commonEdgeSet.retainAll(graph.getIncidentEdges(v)); - for(Iterator iterator=commonEdgeSet.iterator(); iterator.hasNext(); ) { - E edge = iterator.next(); - Pair ep = graph.getEndpoints(edge); - V first = ep.getFirst(); - V second = ep.getSecond(); - // remove loops - if(first.equals(second) == true) { - iterator.remove(); - } - // remove edges in opposite direction - if(first.equals(v) == false) { - iterator.remove(); - } - } - int count=0; - for(E other : commonEdgeSet) { - if(e.equals(other) == false) { - edge_index.put(Context.,E>getInstance(graph,other), count); - count++; - } - } - edge_index.put(Context.,E>getInstance(graph,e), count); - return count; - } - - protected int getIndex(Graph graph, E e, V v) { - Collection commonEdgeSet = new HashSet(); - for(E another : graph.getIncidentEdges(v)) { - V u = graph.getOpposite(v, another); - if(u.equals(v)) { - commonEdgeSet.add(another); - } - } - int count=0; - for(E other : commonEdgeSet) { - if(e.equals(other) == false) { - edge_index.put(Context.,E>getInstance(graph,other), count); - count++; - } - } - edge_index.put(Context.,E>getInstance(graph,e), count); - return count; - } - - - /** - * Resets the indices for this edge and its parallel edges. - * Should be invoked when an edge parallel to e - * has been added or removed. - * @param e - */ - public void reset(Graph graph, E e) { - Pair endpoints = graph.getEndpoints(e); - getIndex(graph, e, endpoints.getFirst()); - getIndex(graph, e, endpoints.getFirst(), endpoints.getSecond()); - } - - /** - * Clears all edge indices for all edges in all graphs. - * Does not recalculate the indices. - */ - public void reset() - { - edge_index.clear(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/EdgeIndexFunction.java b/gui/jung-src/edu/uci/ics/jung/graph/util/EdgeIndexFunction.java deleted file mode 100644 index 07ad5080..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/EdgeIndexFunction.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Created on Sep 24, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph.util; - -import edu.uci.ics.jung.graph.Graph; - - -/** - * An interface for a service to access the index of a given edge (in a given graph) - * into the set formed by the given edge and all the other edges it is parallel to. - * - *

          Note that in current use, this index is assumed to be an integer value in - * the interval [0,n-1], where n-1 is the number of edges parallel to e. - * - * @author Tom Nelson - * - */ -public interface EdgeIndexFunction { - - /** - * Returns e's index in graph. - * The index of e is defined as its position in some - * consistent ordering of e and all edges parallel to e. - * @param graph the graph in which the edge is to be queried - * @param e the edge whose index is to be queried - * @return e's index in graph - */ - int getIndex(Graph graph, E e); - - /** - * Resets the indices for edge and its parallel edges in graph. - * Should be invoked when an edge parallel to edge - * has been added or removed. - * - * @param g the graph in which edge's index is to be reset - * @param edge the edge whose index is to be reset - */ - void reset(Graph g, E edge); - - /** - * Clears all edge indices for all edges in all graphs. - * Does not recalculate the indices. - */ - void reset(); -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/EdgeType.java b/gui/jung-src/edu/uci/ics/jung/graph/util/EdgeType.java deleted file mode 100644 index f2c68edf..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/EdgeType.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Created on February 27, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ - -package edu.uci.ics.jung.graph.util; - -/** - * Defines the possible edge types for graphs which assign types to edges. - */ -public enum EdgeType -{ - DIRECTED, - UNDIRECTED -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/Graphs.java b/gui/jung-src/edu/uci/ics/jung/graph/util/Graphs.java deleted file mode 100644 index ea217c4a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/Graphs.java +++ /dev/null @@ -1,989 +0,0 @@ -package edu.uci.ics.jung.graph.util; - -import java.io.Serializable; -import java.util.Collection; - -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Tree; -import edu.uci.ics.jung.graph.UndirectedGraph; -/** - * Provides specialized implementations of GraphDecorator. Currently these - * wrapper types include "synchronized" and "unmodifiable". - * - *

          The methods of this class may each throw a NullPointerException - * if the graphs or class objects provided to them are null. - * - * @author Tom Nelson - */ - -public class Graphs { - - /** - * Returns a synchronized graph backed by the passed argument graph. - * @param the vertex type - * @param the edge type - * @param graph the graph for which a synchronized wrapper is to be created - * @return a synchronized graph backed by the passed argument graph - */ - public static Graph synchronizedGraph(Graph graph) { - return new SynchronizedGraph(graph); - } - - /** - * Returns a synchronized DirectedGraph backed by the passed DirectedGraph. - * @param the vertex type - * @param the edge type - * @param graph the graph for which a synchronized wrapper is to be created - * @return a synchronized DirectedGraph backed by the passed DirectedGraph - */ - public static DirectedGraph synchronizedDirectedGraph(DirectedGraph graph) { - return new SynchronizedDirectedGraph(graph); - } - - /** - * Returns a synchronized UndirectedGraph backed by the passed UndirectedGraph. - * @param the vertex type - * @param the edge type - * @param graph the graph for which a synchronized wrapper is to be created - * @return a synchronized UndirectedGraph backed by the passed UndirectedGraph - */ - public static UndirectedGraph synchronizedUndirectedGraph(UndirectedGraph graph) { - return new SynchronizedUndirectedGraph(graph); - } - - /** - * Returns a synchronized Forest backed by the passed Forest. - * @param the vertex type - * @param the edge type - * @param forest the forest for which a synchronized wrapper is to be created - * @return a synchronized Forest backed by the passed Forest - */ - public static Forest synchronizedForest(Forest forest) { - return new SynchronizedForest(forest); - } - - /** - * Returns a synchronized Tree backed by the passed Tree. - * @param the vertex type - * @param the edge type - * @param tree the tree for which a synchronized wrapper is to be created - * @return a synchronized Tree backed by the passed Tree - */ - public static Tree synchronizedTree(Tree tree) { - return new SynchronizedTree(tree); - } - - /** - * Returns an unmodifiable Graph backed by the passed Graph. - * @param the vertex type - * @param the edge type - * @param graph the graph for which the unmodifiable wrapper is to be returned - * @return an unmodifiable Graph backed by the passed Graph - */ - public static Graph unmodifiableGraph(Graph graph) { - return new UnmodifiableGraph(graph); - } - - /** - * Returns an unmodifiable DirectedGraph backed by the passed graph. - * @param the vertex type - * @param the edge type - * @param graph the graph for which the unmodifiable wrapper is to be returned - * @return an unmodifiable DirectedGraph backed by the passed graph - */ - public static DirectedGraph unmodifiableDirectedGraph(DirectedGraph graph) { - return new UnmodifiableDirectedGraph(graph); - } - - /** - * Returns an unmodifiable UndirectedGraph backed by the passed graph. - * @param the vertex type - * @param the edge type - * @param graph the graph for which the unmodifiable wrapper is to be returned - * @return an unmodifiable UndirectedGraph backed by the passed graph - */ - public static UndirectedGraph unmodifiableUndirectedGraph(UndirectedGraph graph) { - return new UnmodifiableUndirectedGraph(graph); - } - - /** - * Returns an unmodifiable Tree backed by the passed tree. - * @param the vertex type - * @param the edge type - * @param tree the tree for which the unmodifiable wrapper is to be returned - * @return an unmodifiable Tree backed by the passed tree - */ - public static Tree unmodifiableTree(Tree tree) { - return new UnmodifiableTree(tree); - } - - /** - * Returns an unmodifiable Forest backed by the passed forest. - * @param the vertex type - * @param the edge type - * @param forest the forest for which the unmodifiable wrapper is to be returned - * @return an unmodifiable Forest backed by the passed forest - */ - public static Forest unmodifiableForest(Forest forest) { - return new UnmodifiableForest(forest); - } - - - @SuppressWarnings("serial") - static abstract class SynchronizedAbstractGraph implements Graph, Serializable { - protected Graph delegate; - - private SynchronizedAbstractGraph(Graph delegate) { - if(delegate == null) { - throw new NullPointerException(); - } - this.delegate = delegate; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getDefaultEdgeType() - */ - public EdgeType getDefaultEdgeType() - { - return delegate.getDefaultEdgeType(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(Object, Object, Object, EdgeType) - */ - public synchronized boolean addEdge(E e, V v1, V v2, EdgeType edgeType) { - return delegate.addEdge(e, v1, v2, edgeType); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addEdge(Object, Collection, EdgeType) - */ - public synchronized boolean addEdge(E e, Collection - vertices, EdgeType edgeType) - { - return delegate.addEdge(e, vertices, edgeType); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(Object, Object, Object) - */ - public synchronized boolean addEdge(E e, V v1, V v2) { - return delegate.addEdge(e, v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addVertex(java.lang.Object) - */ - public synchronized boolean addVertex(V vertex) { - return delegate.addVertex(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isIncident(java.lang.Object, java.lang.Object) - */ - public synchronized boolean isIncident(V vertex, E edge) { - return delegate.isIncident(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isNeighbor(java.lang.Object, java.lang.Object) - */ - public synchronized boolean isNeighbor(V v1, V v2) { - return delegate.isNeighbor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#degree(java.lang.Object) - */ - public synchronized int degree(V vertex) { - return delegate.degree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdge(java.lang.Object, java.lang.Object) - */ - public synchronized E findEdge(V v1, V v2) { - return delegate.findEdge(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdgeSet(java.lang.Object, java.lang.Object) - */ - public synchronized Collection findEdgeSet(V v1, V v2) - { - return delegate.findEdgeSet(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdges() - */ - public synchronized Collection getEdges() { - return delegate.getEdges(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEdges(EdgeType) - */ - public synchronized Collection getEdges(EdgeType edgeType) { - return delegate.getEdges(edgeType); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEndpoints(java.lang.Object) - */ - public synchronized Pair getEndpoints(E edge) { - return delegate.getEndpoints(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentEdges(java.lang.Object) - */ - public synchronized Collection getIncidentEdges(V vertex) { - return delegate.getIncidentEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentVertices(java.lang.Object) - */ - public synchronized Collection getIncidentVertices(E edge) { - return delegate.getIncidentVertices(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getInEdges(java.lang.Object) - */ - public synchronized Collection getInEdges(V vertex) { - return delegate.getInEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighbors(java.lang.Object) - */ - public synchronized Collection getNeighbors(V vertex) { - return delegate.getNeighbors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOpposite(java.lang.Object, java.lang.Object) - */ - public synchronized V getOpposite(V vertex, E edge) { - return delegate.getOpposite(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOutEdges(java.lang.Object) - */ - public synchronized Collection getOutEdges(V vertex) { - return delegate.getOutEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessors(java.lang.Object) - */ - public synchronized Collection getPredecessors(V vertex) { - return delegate.getPredecessors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessors(java.lang.Object) - */ - public synchronized Collection getSuccessors(V vertex) { - return delegate.getSuccessors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertices() - */ - public synchronized Collection getVertices() { - return delegate.getVertices(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdgeCount() - */ - public synchronized int getEdgeCount() { - return delegate.getEdgeCount(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdgeCount(EdgeType) - */ - public synchronized int getEdgeCount(EdgeType edge_type) - { - return delegate.getEdgeCount(edge_type); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertexCount() - */ - public synchronized int getVertexCount() { - return delegate.getVertexCount(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#inDegree(java.lang.Object) - */ - public synchronized int inDegree(V vertex) { - return delegate.inDegree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEdgeType(java.lang.Object) - */ - public synchronized EdgeType getEdgeType(E edge) { - return delegate.getEdgeType(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isPredecessor(java.lang.Object, java.lang.Object) - */ - public synchronized boolean isPredecessor(V v1, V v2) { - return delegate.isPredecessor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSuccessor(java.lang.Object, java.lang.Object) - */ - public synchronized boolean isSuccessor(V v1, V v2) { - return delegate.isSuccessor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighborCount(java.lang.Object) - */ - public synchronized int getNeighborCount(V vertex) { - return delegate.getNeighborCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessorCount(java.lang.Object) - */ - public synchronized int getPredecessorCount(V vertex) { - return delegate.getPredecessorCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessorCount(java.lang.Object) - */ - public synchronized int getSuccessorCount(V vertex) { - return delegate.getSuccessorCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#outDegree(java.lang.Object) - */ - public synchronized int outDegree(V vertex) { - return delegate.outDegree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeEdge(java.lang.Object) - */ - public synchronized boolean removeEdge(E edge) { - return delegate.removeEdge(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeVertex(java.lang.Object) - */ - public synchronized boolean removeVertex(V vertex) { - return delegate.removeVertex(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getDest(java.lang.Object) - */ - public synchronized V getDest(E directed_edge) { - return delegate.getDest(directed_edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSource(java.lang.Object) - */ - public synchronized V getSource(E directed_edge) { - return delegate.getSource(directed_edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isDest(java.lang.Object, java.lang.Object) - */ - public synchronized boolean isDest(V vertex, E edge) { - return delegate.isDest(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSource(java.lang.Object, java.lang.Object) - */ - public synchronized boolean isSource(V vertex, E edge) { - return delegate.isSource(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentCount(Object) - */ - public synchronized int getIncidentCount(E edge) - { - return delegate.getIncidentCount(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addEdge(java.lang.Object, java.util.Collection) - */ - public synchronized boolean addEdge(E hyperedge, Collection vertices) { - return delegate.addEdge(hyperedge, vertices); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsEdge(java.lang.Object) - */ - public synchronized boolean containsEdge(E edge) { - return delegate.containsEdge(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsVertex(java.lang.Object) - */ - public synchronized boolean containsVertex(V vertex) { - return delegate.containsVertex(vertex); - } - - } - - @SuppressWarnings("serial") - static class SynchronizedGraph extends SynchronizedAbstractGraph implements Serializable { - - private SynchronizedGraph(Graph delegate) { - super(delegate); - } - } - - @SuppressWarnings("serial") - static class SynchronizedUndirectedGraph extends SynchronizedAbstractGraph - implements UndirectedGraph, Serializable { - private SynchronizedUndirectedGraph(UndirectedGraph delegate) { - super(delegate); - } - } - - @SuppressWarnings("serial") - static class SynchronizedDirectedGraph extends SynchronizedAbstractGraph - implements DirectedGraph, Serializable { - - private SynchronizedDirectedGraph(DirectedGraph delegate) { - super(delegate); - } - - @Override - public synchronized V getDest(E directed_edge) { - return ((DirectedGraph)delegate).getDest(directed_edge); - } - - @Override - public synchronized V getSource(E directed_edge) { - return ((DirectedGraph)delegate).getSource(directed_edge); - } - - @Override - public synchronized boolean isDest(V vertex, E edge) { - return ((DirectedGraph)delegate).isDest(vertex, edge); - } - - @Override - public synchronized boolean isSource(V vertex, E edge) { - return ((DirectedGraph)delegate).isSource(vertex, edge); - } - } - - @SuppressWarnings("serial") - static class SynchronizedTree extends SynchronizedForest implements Tree { - - /** - * Creates a new instance based on the provided {@code delegate}. - * @param delegate - */ - public SynchronizedTree(Tree delegate) { - super(delegate); - } - - public synchronized int getDepth(V vertex) { - return ((Tree)delegate).getDepth(vertex); - } - - public synchronized int getHeight() { - return ((Tree)delegate).getHeight(); - } - - public synchronized V getRoot() { - return ((Tree)delegate).getRoot(); - } - } - - @SuppressWarnings("serial") - static class SynchronizedForest extends SynchronizedDirectedGraph implements Forest { - - /** - * Creates a new instance based on the provided {@code delegate}. - * @param delegate - */ - public SynchronizedForest(Forest delegate) { - super(delegate); - } - - public synchronized Collection> getTrees() { - return ((Forest)delegate).getTrees(); - } - - public int getChildCount(V vertex) - { - return ((Forest)delegate).getChildCount(vertex); - } - - public Collection getChildEdges(V vertex) - { - return ((Forest)delegate).getChildEdges(vertex); - } - - public Collection getChildren(V vertex) - { - return ((Forest)delegate).getChildren(vertex); - } - - public V getParent(V vertex) - { - return ((Forest)delegate).getParent(vertex); - } - - public E getParentEdge(V vertex) - { - return ((Forest)delegate).getParentEdge(vertex); - } - } - - @SuppressWarnings("serial") - static abstract class UnmodifiableAbstractGraph implements Graph, Serializable { - protected Graph delegate; - - - private UnmodifiableAbstractGraph(Graph delegate) { - if(delegate == null) { - throw new NullPointerException(); - } - this.delegate = delegate; - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getDefaultEdgeType() - */ - public EdgeType getDefaultEdgeType() - { - return delegate.getDefaultEdgeType(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(Object, Object, Object, EdgeType) - */ - public boolean addEdge(E e, V v1, V v2, EdgeType edgeType) { - throw new UnsupportedOperationException(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(Object, Collection, EdgeType) - */ - public boolean addEdge(E e, Collection vertices, - EdgeType edgeType) - { - throw new UnsupportedOperationException(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#addEdge(Object, Object, Object) - */ - public boolean addEdge(E e, V v1, V v2) { - throw new UnsupportedOperationException(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addVertex(java.lang.Object) - */ - public boolean addVertex(V vertex) { - throw new UnsupportedOperationException(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isIncident(java.lang.Object, java.lang.Object) - */ - public boolean isIncident(V vertex, E edge) { - return delegate.isIncident(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#isNeighbor(java.lang.Object, java.lang.Object) - */ - public boolean isNeighbor(V v1, V v2) { - return delegate.isNeighbor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#degree(java.lang.Object) - */ - public int degree(V vertex) { - return delegate.degree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdge(java.lang.Object, java.lang.Object) - */ - public E findEdge(V v1, V v2) { - return delegate.findEdge(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#findEdgeSet(java.lang.Object, java.lang.Object) - */ - public Collection findEdgeSet(V v1, V v2) - { - return delegate.findEdgeSet(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdges() - */ - public Collection getEdges() { - return delegate.getEdges(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdgeCount() - */ - public int getEdgeCount() { - return delegate.getEdgeCount(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getEdgeCount(EdgeType) - */ - public int getEdgeCount(EdgeType edge_type) - { - return delegate.getEdgeCount(edge_type); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertexCount() - */ - public int getVertexCount() { - return delegate.getVertexCount(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEdges(edu.uci.ics.jung.graph.util.EdgeType) - */ - public Collection getEdges(EdgeType edgeType) { - return delegate.getEdges(edgeType); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEndpoints(java.lang.Object) - */ - public Pair getEndpoints(E edge) { - return delegate.getEndpoints(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentEdges(java.lang.Object) - */ - public Collection getIncidentEdges(V vertex) { - return delegate.getIncidentEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentVertices(java.lang.Object) - */ - public Collection getIncidentVertices(E edge) { - return delegate.getIncidentVertices(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getInEdges(java.lang.Object) - */ - public Collection getInEdges(V vertex) { - return delegate.getInEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighbors(java.lang.Object) - */ - public Collection getNeighbors(V vertex) { - return delegate.getNeighbors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOpposite(java.lang.Object, java.lang.Object) - */ - public V getOpposite(V vertex, E edge) { - return delegate.getOpposite(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getOutEdges(java.lang.Object) - */ - public Collection getOutEdges(V vertex) { - return delegate.getOutEdges(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessors(java.lang.Object) - */ - public Collection getPredecessors(V vertex) { - return delegate.getPredecessors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessors(java.lang.Object) - */ - public Collection getSuccessors(V vertex) { - return delegate.getSuccessors(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getVertices() - */ - public Collection getVertices() { - return delegate.getVertices(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#inDegree(java.lang.Object) - */ - public int inDegree(V vertex) { - return delegate.inDegree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getEdgeType(java.lang.Object) - */ - public EdgeType getEdgeType(E edge) { - return delegate.getEdgeType(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isPredecessor(java.lang.Object, java.lang.Object) - */ - public boolean isPredecessor(V v1, V v2) { - return delegate.isPredecessor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSuccessor(java.lang.Object, java.lang.Object) - */ - public boolean isSuccessor(V v1, V v2) { - return delegate.isSuccessor(v1, v2); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getNeighborCount(java.lang.Object) - */ - public int getNeighborCount(V vertex) { - return delegate.getNeighborCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getPredecessorCount(java.lang.Object) - */ - public int getPredecessorCount(V vertex) { - return delegate.getPredecessorCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSuccessorCount(java.lang.Object) - */ - public int getSuccessorCount(V vertex) { - return delegate.getSuccessorCount(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#outDegree(java.lang.Object) - */ - public int outDegree(V vertex) { - return delegate.outDegree(vertex); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeEdge(java.lang.Object) - */ - public boolean removeEdge(E edge) { - throw new UnsupportedOperationException(); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#removeVertex(java.lang.Object) - */ - public boolean removeVertex(V vertex) { - throw new UnsupportedOperationException(); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getDest(java.lang.Object) - */ - public V getDest(E directed_edge) { - return delegate.getDest(directed_edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#getSource(java.lang.Object) - */ - public V getSource(E directed_edge) { - return delegate.getSource(directed_edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isDest(java.lang.Object, java.lang.Object) - */ - public boolean isDest(V vertex, E edge) { - return delegate.isDest(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Graph#isSource(java.lang.Object, java.lang.Object) - */ - public boolean isSource(V vertex, E edge) { - return delegate.isSource(vertex, edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentCount(Object) - */ - public int getIncidentCount(E edge) - { - return delegate.getIncidentCount(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#addEdge(java.lang.Object, java.util.Collection) - */ - public boolean addEdge(E hyperedge, Collection vertices) { - return delegate.addEdge(hyperedge, vertices); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsEdge(java.lang.Object) - */ - public boolean containsEdge(E edge) { - return delegate.containsEdge(edge); - } - - /** - * @see edu.uci.ics.jung.graph.Hypergraph#containsVertex(java.lang.Object) - */ - public boolean containsVertex(V vertex) { - return delegate.containsVertex(vertex); - } - } - - @SuppressWarnings("serial") - static class UnmodifiableGraph extends UnmodifiableAbstractGraph implements Serializable { - private UnmodifiableGraph(Graph delegate) { - super(delegate); - } - } - - @SuppressWarnings("serial") - static class UnmodifiableDirectedGraph extends UnmodifiableAbstractGraph - implements DirectedGraph, Serializable { - private UnmodifiableDirectedGraph(DirectedGraph delegate) { - super(delegate); - } - - @Override - public V getDest(E directed_edge) { - return ((DirectedGraph)delegate).getDest(directed_edge); - } - - @Override - public V getSource(E directed_edge) { - return ((DirectedGraph)delegate).getSource(directed_edge); - } - - @Override - public boolean isDest(V vertex, E edge) { - return ((DirectedGraph)delegate).isDest(vertex, edge); - } - - @Override - public boolean isSource(V vertex, E edge) { - return ((DirectedGraph)delegate).isSource(vertex, edge); - } - } - - @SuppressWarnings("serial") - static class UnmodifiableUndirectedGraph extends UnmodifiableAbstractGraph - implements UndirectedGraph, Serializable { - private UnmodifiableUndirectedGraph(UndirectedGraph delegate) { - super(delegate); - } - } - - @SuppressWarnings("serial") - static class UnmodifiableForest extends UnmodifiableGraph - implements Forest, Serializable { - private UnmodifiableForest(Forest delegate) { - super(delegate); - } - - public Collection> getTrees() { - return ((Forest)delegate).getTrees(); - } - - public int getChildCount(V vertex) - { - return ((Forest)delegate).getChildCount(vertex); - } - - public Collection getChildEdges(V vertex) - { - return ((Forest)delegate).getChildEdges(vertex); - } - - public Collection getChildren(V vertex) - { - return ((Forest)delegate).getChildren(vertex); - } - - public V getParent(V vertex) - { - return ((Forest)delegate).getParent(vertex); - } - - public E getParentEdge(V vertex) - { - return ((Forest)delegate).getParentEdge(vertex); - } - } - - @SuppressWarnings("serial") - static class UnmodifiableTree extends UnmodifiableForest - implements Tree, Serializable { - private UnmodifiableTree(Tree delegate) { - super(delegate); - } - - public int getDepth(V vertex) { - return ((Tree)delegate).getDepth(vertex); - } - - public int getHeight() { - return ((Tree)delegate).getHeight(); - } - - public V getRoot() { - return ((Tree)delegate).getRoot(); - } - - @Override - public Collection> getTrees() { - return ((Tree)delegate).getTrees(); - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/IncidentEdgeIndexFunction.java b/gui/jung-src/edu/uci/ics/jung/graph/util/IncidentEdgeIndexFunction.java deleted file mode 100644 index 9b9bcbc1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/IncidentEdgeIndexFunction.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Created on Sep 24, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph.util; - -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; - -import edu.uci.ics.jung.graph.Graph; - -/** - * A class which creates and maintains indices for incident edges. - * - * @author Tom Nelson - * - */ -public class IncidentEdgeIndexFunction implements EdgeIndexFunction -{ - protected Map edge_index = new HashMap(); - - private IncidentEdgeIndexFunction() { - } - - /** - * Returns an instance of this type. - * @param the vertex type - * @param the edge type - */ - public static IncidentEdgeIndexFunction getInstance() { - return new IncidentEdgeIndexFunction(); - } - - /** - * Returns the index for the specified edge. - * Calculates the indices for e and for all edges parallel - * to e. - */ - public int getIndex(Graph graph, E e) - { - Integer index = edge_index.get(e); - if(index == null) { - Pair endpoints = graph.getEndpoints(e); - V u = endpoints.getFirst(); - V v = endpoints.getSecond(); - if(u.equals(v)) { - index = getIndex(graph, e, v); - } else { - index = getIndex(graph, e, u, v); - } - } - return index.intValue(); - } - - protected int getIndex(Graph graph, E e, V u, V v) { - Collection commonEdgeSet = new HashSet(graph.getIncidentEdges(u)); - int count=0; - for(E other : commonEdgeSet) { - if(e.equals(other) == false) { - edge_index.put(other, count); - count++; - } - } - edge_index.put(e, count); - return count; - } - - protected int getIndex(Graph graph, E e, V v) { - Collection commonEdgeSet = new HashSet(); - for(E another : graph.getIncidentEdges(v)) { - V u = graph.getOpposite(v, another); - if(u.equals(v)) { - commonEdgeSet.add(another); - } - } - int count=0; - for(E other : commonEdgeSet) { - if(e.equals(other) == false) { - edge_index.put(other, count); - count++; - } - } - edge_index.put(e, count); - return count; - } - - - /** - * Resets the indices for this edge and its parallel edges. - * Should be invoked when an edge parallel to e - * has been added or removed. - * @param e - */ - public void reset(Graph graph, E e) { - Pair endpoints = graph.getEndpoints(e); - getIndex(graph, e, endpoints.getFirst()); - getIndex(graph, e, endpoints.getFirst(), endpoints.getSecond()); - } - - /** - * Clears all edge indices for all edges in all graphs. - * Does not recalculate the indices. - */ - public void reset() - { - edge_index.clear(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/Pair.java b/gui/jung-src/edu/uci/ics/jung/graph/util/Pair.java deleted file mode 100644 index d147af59..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/Pair.java +++ /dev/null @@ -1,247 +0,0 @@ -/* - * Created on Apr 2, 2006 - * - * Copyright (c) 2006, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph.util; - -import java.io.Serializable; -import java.util.Collection; -import java.util.Iterator; - - -/** - * An implementation of Collection that stores exactly - * 2 non-null objects and is not mutable. They respect equals - * and may be used as indices or map keys.

          - * Note that they do not protect from malevolent behavior: if one or another - * object in the tuple is mutable, then it can be changed with the usual bad - * effects. - */ -@SuppressWarnings("serial") -public final class Pair implements Collection, Serializable -{ - private T first; - private T second; - - /** - * Creates a Pair from the specified elements. - * @param value1 the first value in the new Pair - * @param value2 the second value in the new Pair - * @throws IllegalArgumentException if either argument is null - */ - public Pair(T value1, T value2) - { - if(value1 == null || value2 == null) - throw new IllegalArgumentException("Pair cannot contain null values"); - first = value1; - second = value2; - } - - /** - * Creates a Pair from the passed Collection. - * The size of the Collection must be 2. - * @param values the elements of the new Pair - * @throws IllegalArgumentException if the input collection is null, - * contains null values, or has != 2 elements. - */ - public Pair(Collection values) - { - if (values == null) - throw new IllegalArgumentException("Input collection cannot be null"); - if (values.size() == 2) - { - if(values.contains(null)) - throw new IllegalArgumentException("Pair cannot contain null values"); - Iterator iter = values.iterator(); - first = iter.next(); - second = iter.next(); - } - else - throw new IllegalArgumentException("Pair may only be created from a Collection of exactly 2 elements"); - - } - - /** - * Creates a Pair from the passed array. - * The size of the array must be 2. - * @throws IllegalArgumentException if the input array is null, - * contains null values, or has != 2 elements. - */ - public Pair(T[] values) - { - if (values == null) - throw new IllegalArgumentException("Input array cannot be null"); - if (values.length == 2) - { - if(values[0] == null || values[1] == null) - throw new IllegalArgumentException("Pair cannot contain null values"); - first = values[0]; - second = values[1]; - } - else - throw new IllegalArgumentException("Pair may only be created from an " + - "array of 2 elements"); - } - - /** - * Returns the first element. - */ - public T getFirst() - { - return first; - } - - /** - * Returns the second element. - */ - public T getSecond() - { - return second; - } - - @SuppressWarnings("unchecked") - @Override - public boolean equals( Object o ) { - if (o == this) - return true; - - if (o instanceof Pair) { - Pair otherPair = (Pair) o; - Object otherFirst = otherPair.getFirst(); - Object otherSecond = otherPair.getSecond(); - return - (this.first == otherFirst || - (this.first != null && this.first.equals(otherFirst))) - && - (this.second == otherSecond || - (this.second != null && this.second.equals(otherSecond))); - } else { - return false; - } - } - - @Override - public int hashCode() - { - int hashCode = 1; - hashCode = 31*hashCode + (first==null ? 0 : first.hashCode()); - hashCode = 31*hashCode + (second==null ? 0 : second.hashCode()); - return hashCode; - } - - @Override - public String toString() - { - return "<" + first.toString() + ", " + second.toString() + ">"; - } - - public boolean add(T o) { - throw new UnsupportedOperationException("Pairs cannot be mutated"); - } - - public boolean addAll(Collection c) { - throw new UnsupportedOperationException("Pairs cannot be mutated"); - } - - public void clear() { - throw new UnsupportedOperationException("Pairs cannot be mutated"); - } - - public boolean contains(Object o) { - return (first == o || first.equals(o) || second == o || second.equals(o)); - } - - public boolean containsAll(Collection c) { - if (c.size() > 2) - return false; - Iterator iter = c.iterator(); - Object c_first = iter.next(); - Object c_second = iter.next(); - return this.contains(c_first) && this.contains(c_second); - } - - public boolean isEmpty() { - return false; - } - - public Iterator iterator() { - return new PairIterator(); - } - - public boolean remove(Object o) { - throw new UnsupportedOperationException("Pairs cannot be mutated"); - } - - public boolean removeAll(Collection c) { - throw new UnsupportedOperationException("Pairs cannot be mutated"); - } - - public boolean retainAll(Collection c) { - throw new UnsupportedOperationException("Pairs cannot be mutated"); - } - - public int size() { - return 2; - } - - public Object[] toArray() { - Object[] to_return = new Object[2]; - to_return[0] = first; - to_return[1] = second; - return to_return; - } - - @SuppressWarnings("unchecked") - public S[] toArray(S[] a) { - S[] to_return = a; - Class type = a.getClass().getComponentType(); - if (a.length < 2) - to_return = (S[])java.lang.reflect.Array.newInstance(type, 2); - to_return[0] = (S)first; - to_return[1] = (S)second; - - if (to_return.length > 2) - to_return[2] = null; - return to_return; - } - - private class PairIterator implements Iterator - { - int position; - - private PairIterator() - { - position = 0; - } - - public boolean hasNext() - { - return position < 2; - } - - public T next() - { - position++; - if (position == 1) - return first; - else if (position == 2) - return second; - else - return null; - } - - public void remove() - { - throw new UnsupportedOperationException("Pairs cannot be mutated"); - } - } -} - - diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/TestGraphs.java b/gui/jung-src/edu/uci/ics/jung/graph/util/TestGraphs.java deleted file mode 100644 index 50774c2e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/TestGraphs.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Jul 2, 2003 - * - */ -package edu.uci.ics.jung.graph.util; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.SparseMultigraph; -import edu.uci.ics.jung.graph.UndirectedGraph; -import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; - -/** - * Provides generators for several different test graphs. - */ -public class TestGraphs { - - /** - * A series of pairs that may be useful for generating graphs. The - * miniature graph consists of 8 edges, 10 nodes, and is formed of two - * connected components, one of 8 nodes, the other of 2. - * - */ - public static String[][] pairs = { { "a", "b", "3" }, { - "a", "c", "4" }, { - "a", "d", "5" }, { - "d", "c", "6" }, { - "d", "e", "7" }, { - "e", "f", "8" }, { - "f", "g", "9" }, { - "h", "i", "1" } - }; - - /** - * Creates a small sample graph that can be used for testing purposes. The - * graph is as described in the section on {@link #pairs pairs}. If isDirected, - * the graph is a {@link DirectedSparseMultigraph DirectedSparseMultigraph}, - * otherwise, it is an {@link UndirectedSparseMultigraph UndirectedSparseMultigraph}. - * - * @return a graph consisting of eight edges and ten nodes. - */ - public static Graph createTestGraph(boolean directed) { - Graph graph = null; - if(directed) { - graph = new DirectedSparseMultigraph(); - } else { - graph = new UndirectedSparseMultigraph(); - } - - for (int i = 0; i < pairs.length; i++) { - String[] pair = pairs[i]; - graph.addEdge(Integer.parseInt(pair[2]), pair[0], pair[1]); - } - return graph; - } - - /** - * Returns a graph consisting of a chain of vertex_count - 1 vertices - * plus one isolated vertex. - */ - public static Graph createChainPlusIsolates(int chain_length, int isolate_count) - { - Graph g = new UndirectedSparseMultigraph(); - if (chain_length > 0) - { - String[] v = new String[chain_length]; - v[0] = "v"+0; - g.addVertex(v[0]); - for (int i = 1; i < chain_length; i++) - { - v[i] = "v"+i; - g.addVertex(v[i]); - g.addEdge(new Double(Math.random()), v[i], v[i-1]); - } - } - for (int i = 0; i < isolate_count; i++) { - String v = "v"+(chain_length+i); - g.addVertex(v); - } - return g; - } - - /** - * Creates a sample directed acyclic graph by generating several "layers", - * and connecting nodes (randomly) to nodes in earlier (but never later) - * layers. Each layer has some random number of nodes in it 1 less than n - * less than maxNodesPerLayer. - * - * @return the created graph - */ - public static Graph createDirectedAcyclicGraph( - int layers, - int maxNodesPerLayer, - double linkprob) { - - DirectedGraph dag = new DirectedSparseMultigraph(); - Set previousLayers = new HashSet(); - Set inThisLayer = new HashSet(); - for (int i = 0; i < layers; i++) { - - int nodesThisLayer = (int) (Math.random() * maxNodesPerLayer) + 1; - for (int j = 0; j < nodesThisLayer; j++) { - String v = i+":"+j; - dag.addVertex(v); - inThisLayer.add(v); - // for each previous node... - for(String v2 : previousLayers) { - if (Math.random() < linkprob) { - Double de = new Double(Math.random()); - dag.addEdge(de, v, v2); - } - } - } - - previousLayers.addAll(inThisLayer); - inThisLayer.clear(); - } - return dag; - } - - private static void createEdge( - Graph g, - String v1Label, - String v2Label, - int weight) { - g.addEdge(new Double(Math.random()), v1Label, v2Label); - } - - /** - * Returns a bigger, undirected test graph with a just one component. This - * graph consists of a clique of ten edges, a partial clique (randomly - * generated, with edges of 0.6 probability), and one series of edges - * running from the first node to the last. - * - * @return the testgraph - */ - public static Graph getOneComponentGraph() { - - UndirectedGraph g = new UndirectedSparseMultigraph(); - // let's throw in a clique, too - for (int i = 1; i <= 10; i++) { - for (int j = i + 1; j <= 10; j++) { - String i1 = "" + i; - String i2 = "" + j; - g.addEdge(Math.pow(i+2,j), i1, i2); - } - } - - // and, last, a partial clique - for (int i = 11; i <= 20; i++) { - for (int j = i + 1; j <= 20; j++) { - if (Math.random() > 0.6) - continue; - String i1 = "" + i; - String i2 = "" + j; - g.addEdge(Math.pow(i+2,j), i1, i2); - } - } - - List index = new ArrayList(); - index.addAll(g.getVertices()); - // and one edge to connect them all - for (int i = 0; i < index.size() - 1; i++) - g.addEdge(new Integer(i), index.get(i), index.get(i+1)); - - return g; - } - - /** - * Returns a bigger test graph with a clique, several components, and other - * parts. - * - * @return a demonstration graph of type UndirectedSparseMultigraph - * with 28 vertices. - */ - public static Graph getDemoGraph() { - UndirectedGraph g = - new UndirectedSparseMultigraph(); - - for (int i = 0; i < pairs.length; i++) { - String[] pair = pairs[i]; - createEdge(g, pair[0], pair[1], Integer.parseInt(pair[2])); - } - - // let's throw in a clique, too - for (int i = 1; i <= 10; i++) { - for (int j = i + 1; j <= 10; j++) { - String i1 = "c" + i; - String i2 = "c" + j; - g.addEdge(Math.pow(i+2,j), i1, i2); - } - } - - // and, last, a partial clique - for (int i = 11; i <= 20; i++) { - for (int j = i + 1; j <= 20; j++) { - if (Math.random() > 0.6) - continue; - String i1 = "p" + i; - String i2 = "p" + j; - g.addEdge(Math.pow(i+2,j), i1, i2); - } - } - return g; - } - - /** - * Returns a small graph with directed and undirected edges, and parallel edges. - */ - public static Graph getSmallGraph() { - Graph graph = - new SparseMultigraph(); - String[] v = new String[3]; - for (int i = 0; i < 3; i++) { - v[i] = String.valueOf(i); - graph.addVertex(v[i]); - } - graph.addEdge(new Double(0), v[0], v[1], EdgeType.DIRECTED); - graph.addEdge(new Double(.1), v[0], v[1], EdgeType.DIRECTED); - graph.addEdge(new Double(.2), v[0], v[1], EdgeType.DIRECTED); - graph.addEdge(new Double(.3), v[1], v[0], EdgeType.DIRECTED); - graph.addEdge(new Double(.4), v[1], v[0], EdgeType.DIRECTED); - graph.addEdge(new Double(.5), v[1], v[2]); - graph.addEdge(new Double(.6), v[1], v[2]); - - return graph; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/TreeUtils.java b/gui/jung-src/edu/uci/ics/jung/graph/util/TreeUtils.java deleted file mode 100644 index 172fd80f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/TreeUtils.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Created on Mar 3, 2007 - * - * Copyright (c) 2007, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.graph.util; - -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.Tree; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -/** - * Contains static methods for operating on instances of Tree. - */ -public class TreeUtils -{ - /** - * Returns the roots of this forest. - * @param the vertex type - * @param the edge type - */ - public static List getRoots(Forest forest) - { - List roots = new ArrayList(); - for(Tree tree : forest.getTrees()) { - roots.add(tree.getRoot()); - } - return roots; - } - - /** - * Returns the subtree of tree which is rooted at root as a Forest. - * The tree returned is an independent entity, although it uses the same vertex and edge objects. - * @param the vertex type - * @param the edge type - * @param forest the tree whose subtree is to be extracted - * @param root the root of the subtree to be extracted - * @return the subtree of tree which is rooted at root - * @throws InstantiationException if a new tree of the same type cannot be created - * @throws IllegalAccessException - */ - @SuppressWarnings("unchecked") - public static Tree getSubTree(Forest forest, V root) throws InstantiationException, IllegalAccessException - { - if (!forest.containsVertex(root)) - throw new IllegalArgumentException("Specified tree does not contain the specified root as a vertex"); - Forest subforest = forest.getClass().newInstance(); - subforest.addVertex(root); - growSubTree(forest, subforest, root); - - return subforest.getTrees().iterator().next(); - } - - /** - * Populates subtree with the subtree of tree - * which is rooted at root. - * @param the vertex type - * @param the edge type - * @param tree the tree whose subtree is to be extracted - * @param subTree the tree instance which is to be populated with the subtree of tree - * @param root the root of the subtree to be extracted - */ - public static void growSubTree(Forest tree, Forest subTree, V root) { - if(tree.getSuccessorCount(root) > 0) { - Collection edges = tree.getOutEdges(root); - for(E e : edges) { - subTree.addEdge(e, tree.getEndpoints(e)); - } - Collection kids = tree.getSuccessors(root); - for(V kid : kids) { - growSubTree(tree, subTree, kid); - } - } - } - - /** - * Connects subTree to tree by attaching it as a child - * of node with edge connectingEdge. - * @param the vertex type - * @param the edge type - * @param tree the tree to which subTree is to be added - * @param subTree the tree which is to be grafted on to tree - * @param node the parent of subTree in its new position in tree - * @param connectingEdge the edge used to connect subtree's root as a child of node - */ - public static void addSubTree(Forest tree, Forest subTree, - V node, E connectingEdge) { - if (node != null && !tree.containsVertex(node)) - throw new IllegalArgumentException("Specified tree does not contain the specified node as a vertex"); - V root = subTree.getTrees().iterator().next().getRoot(); - addFromSubTree(tree, subTree, connectingEdge, node, root); - } - - /** - * Adds the trees in source to destination. - * source is left unchanged. The vertex and edge objects - * in source will also be used in destination, - * in the same (structural) roles. - * @param the vertex type - * @param the edge type - * @param destination the forest to which the trees in source - * will be added - * @param source the forest whose trees will be added to - * destination - * FIXME also note that this is redundant with DelegateForest.addTree() - * - */ -// public static void mergeForests(Forest destination, -// Forest source) -// { -// for (Tree tree : source.getTrees()) -// { -// V root = tree.getRoot(); -// // FIXME this is not done: addChildrenToForest is not yet complete -// // also still need to integrate into MST2, etc. (see email thread) -//// addChildrenToForest(destination, tree, root); -// for (E e: tree.getOutEdges(root)) -// { -// V child = tree.getOpposite(root, e); -// addFromSubTree(destination, source, e, root, child); -// } -// } -// } - - public static void addFromSubTree(Forest tree, Forest subTree, - E edge, V parent, V root) { - - // add edge connecting parent and root to tree - if(edge != null && parent != null) { - tree.addEdge(edge, parent, root); - } else { - tree.addVertex(root); - } - - Collection outEdges = subTree.getOutEdges(root); - for(E e : outEdges) { - V opposite = subTree.getOpposite(root, e); - addFromSubTree(tree, subTree, e, root, opposite); - } - } - - // FIXME: not done or integrated yet -// private static void addChildrenToForest(Forest forest, Tree tree, -// V subtree_root) -// { -// V parent = tree.getPredecessors(subtree_root).iterator().next(); -// for (E e : tree.getOutEdges(subtree_root)) -// { -// V child = tree.getOpposite(subtree_root, e); -// addChildrenToForest(forest, tree, child); -// } -// } -} diff --git a/gui/jung-src/edu/uci/ics/jung/graph/util/package.html b/gui/jung-src/edu/uci/ics/jung/graph/util/package.html deleted file mode 100644 index 38627e37..00000000 --- a/gui/jung-src/edu/uci/ics/jung/graph/util/package.html +++ /dev/null @@ -1,47 +0,0 @@ - - - - - - - -

          Utility interfaces and classes for the JUNG API. These include: - -

            -
          • Context: a wrapper for an element in the context of a specific graph -
          • classes for maintaining edge indices (primarily for rendering) -
          • Pair<T>: an implementation of Collection designed for -two-element immutable collections -
          • Graphs: facilitates the creation of special delegate -types such as synchronized and unmodifiable graphs -
          • TreeUtils: utilities for trees and forests (subtree extraction, grafting, merging, etc.) -
          - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/BasicTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/BasicTransformer.java deleted file mode 100644 index a0b39000..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/BasicTransformer.java +++ /dev/null @@ -1,235 +0,0 @@ -package edu.uci.ics.jung.visualization; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; - -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; - -import edu.uci.ics.jung.visualization.transform.MutableAffineTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; -import edu.uci.ics.jung.visualization.util.DefaultChangeEventSupport; - -/** - * A basic implementation of the MultiLayerTransformer interface that - * provides two Layers: VIEW and LAYOUT. It also provides ChangeEventSupport - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class BasicTransformer implements MultiLayerTransformer, - ShapeTransformer, ChangeListener, ChangeEventSupport { - - protected ChangeEventSupport changeSupport = - new DefaultChangeEventSupport(this); - - protected MutableTransformer viewTransformer = - new MutableAffineTransformer(new AffineTransform()); - - protected MutableTransformer layoutTransformer = - new MutableAffineTransformer(new AffineTransform()); - - /** - * Creates an instance and notifies the view and layout transformers to listen to - * changes published by this instance. - */ - public BasicTransformer() { - super(); - viewTransformer.addChangeListener(this); - layoutTransformer.addChangeListener(this); - } - - /** - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#setViewTransformer(edu.uci.ics.jung.visualization.transform.MutableTransformer) - */ - protected void setViewTransformer(MutableTransformer transformer) { - this.viewTransformer.removeChangeListener(this); - this.viewTransformer = transformer; - this.viewTransformer.addChangeListener(this); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#setLayoutTransformer(edu.uci.ics.jung.visualization.transform.MutableTransformer) - */ - protected void setLayoutTransformer(MutableTransformer transformer) { - this.layoutTransformer.removeChangeListener(this); - this.layoutTransformer = transformer; - this.layoutTransformer.addChangeListener(this); - } - - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#getLayoutTransformer() - */ - protected MutableTransformer getLayoutTransformer() { - return layoutTransformer; - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#getViewTransformer() - */ - protected MutableTransformer getViewTransformer() { - return viewTransformer; - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#inverseTransform(java.awt.geom.Point2D) - */ - public Point2D inverseTransform(Point2D p) { - return inverseLayoutTransform(inverseViewTransform(p)); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#inverseViewTransform(java.awt.geom.Point2D) - */ - protected Point2D inverseViewTransform(Point2D p) { - return viewTransformer.inverseTransform(p); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#inverseLayoutTransform(java.awt.geom.Point2D) - */ - protected Point2D inverseLayoutTransform(Point2D p) { - return layoutTransformer.inverseTransform(p); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#transform(java.awt.geom.Point2D) - */ - public Point2D transform(Point2D p) { - return viewTransform(layoutTransform(p)); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#viewTransform(java.awt.geom.Point2D) - */ - protected Point2D viewTransform(Point2D p) { - return viewTransformer.transform(p); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#layoutTransform(java.awt.geom.Point2D) - */ - protected Point2D layoutTransform(Point2D p) { - return layoutTransformer.transform(p); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#inverseTransform(java.awt.Shape) - */ - public Shape inverseTransform(Shape shape) { - return inverseLayoutTransform(inverseViewTransform(shape)); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#inverseViewTransform(java.awt.Shape) - */ - protected Shape inverseViewTransform(Shape shape) { - return viewTransformer.inverseTransform(shape); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#inverseLayoutTransform(java.awt.Shape) - */ - protected Shape inverseLayoutTransform(Shape shape) { - return layoutTransformer.inverseTransform(shape); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#transform(java.awt.Shape) - */ - public Shape transform(Shape shape) { - return viewTransform(layoutTransform(shape)); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#viewTransform(java.awt.Shape) - */ - protected Shape viewTransform(Shape shape) { - return viewTransformer.transform(shape); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#layoutTransform(java.awt.Shape) - */ - protected Shape layoutTransform(Shape shape) { - return layoutTransformer.transform(shape); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.MultiLayerTransformer#setToIdentity() - */ - public void setToIdentity() { - layoutTransformer.setToIdentity(); - viewTransformer.setToIdentity(); - } - - /* (non-Javadoc) - */ - public void addChangeListener(ChangeListener l) { - changeSupport.addChangeListener(l); - } - - /* (non-Javadoc) - */ - public void removeChangeListener(ChangeListener l) { - changeSupport.removeChangeListener(l); - } - - /* (non-Javadoc) - */ - public ChangeListener[] getChangeListeners() { - return changeSupport.getChangeListeners(); - } - - /* (non-Javadoc) - */ - public void fireStateChanged() { - changeSupport.fireStateChanged(); - } - - /* (non-Javadoc) - */ - public void stateChanged(ChangeEvent e) { - fireStateChanged(); - } - - public MutableTransformer getTransformer(Layer layer) { - if(layer == Layer.LAYOUT) return layoutTransformer; - if(layer == Layer.VIEW) return viewTransformer; - return null; - } - - public Point2D inverseTransform(Layer layer, Point2D p) { - if(layer == Layer.LAYOUT) return inverseLayoutTransform(p); - if(layer == Layer.VIEW) return inverseViewTransform(p); - return null; - } - - public void setTransformer(Layer layer, MutableTransformer transformer) { - if(layer == Layer.LAYOUT) setLayoutTransformer(transformer); - if(layer == Layer.VIEW) setViewTransformer(transformer); - - } - - public Point2D transform(Layer layer, Point2D p) { - if(layer == Layer.LAYOUT) return layoutTransform(p); - if(layer == Layer.VIEW) return viewTransform(p); - return null; - } - - public Shape transform(Layer layer, Shape shape) { - if(layer == Layer.LAYOUT) return layoutTransform(shape); - if(layer == Layer.VIEW) return viewTransform(shape); - return null; - } - - public Shape inverseTransform(Layer layer, Shape shape) { - if(layer == Layer.LAYOUT) return inverseLayoutTransform(shape); - if(layer == Layer.VIEW) return inverseViewTransform(shape); - return null; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/BasicVisualizationServer.java b/gui/jung-src/edu/uci/ics/jung/visualization/BasicVisualizationServer.java deleted file mode 100644 index 33882ba2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/BasicVisualizationServer.java +++ /dev/null @@ -1,546 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization; - -import java.awt.Color; -import java.awt.Dimension; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.RenderingHints; -import java.awt.RenderingHints.Key; -import java.awt.event.ComponentAdapter; -import java.awt.event.ComponentEvent; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.awt.image.BufferedImage; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.swing.JPanel; -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.control.ScalingControl; -import edu.uci.ics.jung.visualization.decorators.PickableEdgePaintTransformer; -import edu.uci.ics.jung.visualization.decorators.PickableVertexPaintTransformer; -import edu.uci.ics.jung.visualization.picking.MultiPickedState; -import edu.uci.ics.jung.visualization.picking.PickedState; -import edu.uci.ics.jung.visualization.picking.ShapePickSupport; -import edu.uci.ics.jung.visualization.renderers.BasicRenderer; -import edu.uci.ics.jung.visualization.renderers.Renderer; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; -import edu.uci.ics.jung.visualization.util.Caching; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; -import edu.uci.ics.jung.visualization.util.DefaultChangeEventSupport; - -/** - * A class that maintains many of the details necessary for creating - * visualizations of graphs. - * This is the old VisualizationViewer without tooltips and mouse behaviors. Its purpose is - * to be a base class that can also be used on the server side of a multi-tiered application. - * - * @author Joshua O'Madadhain - * @author Tom Nelson - * @author Danyel Fisher - */ -@SuppressWarnings("serial") -public class BasicVisualizationServer extends JPanel - implements ChangeListener, ChangeEventSupport, VisualizationServer{ - - protected ChangeEventSupport changeSupport = - new DefaultChangeEventSupport(this); - - /** - * holds the state of this View - */ - protected VisualizationModel model; - - /** - * handles the actual drawing of graph elements - */ - protected Renderer renderer = new BasicRenderer(); - - /** - * rendering hints used in drawing. Anti-aliasing is on - * by default - */ - protected Map renderingHints = new HashMap(); - - /** - * holds the state of which vertices of the graph are - * currently 'picked' - */ - protected PickedState pickedVertexState; - - /** - * holds the state of which edges of the graph are - * currently 'picked' - */ - protected PickedState pickedEdgeState; - - /** - * a listener used to cause pick events to result in - * repaints, even if they come from another view - */ - protected ItemListener pickEventListener; - - /** - * an offscreen image to render the graph - * Used if doubleBuffered is set to true - */ - protected BufferedImage offscreen; - - /** - * graphics context for the offscreen image - * Used if doubleBuffered is set to true - */ - protected Graphics2D offscreenG2d; - - /** - * user-settable choice to use the offscreen image - * or not. 'false' by default - */ - protected boolean doubleBuffered; - - /** - * a collection of user-implementable functions to render under - * the topology (before the graph is rendered) - */ - protected List preRenderers = new ArrayList(); - - /** - * a collection of user-implementable functions to render over the - * topology (after the graph is rendered) - */ - protected List postRenderers = new ArrayList(); - - protected RenderContext renderContext = new PluggableRenderContext(); - - /** - * Create an instance with passed parameters. - * - * @param layout The Layout to apply, with its associated Graph - * @param renderer The Renderer to draw it with - */ - public BasicVisualizationServer(Layout layout) { - this(new DefaultVisualizationModel(layout)); - } - - /** - * Create an instance with passed parameters. - * - * @param layout The Layout to apply, with its associated Graph - * @param renderer The Renderer to draw it with - * @param preferredSize the preferred size of this View - */ - public BasicVisualizationServer(Layout layout, Dimension preferredSize) { - this(new DefaultVisualizationModel(layout, preferredSize), preferredSize); - } - - /** - * Create an instance with passed parameters. - * - * @param model - * @param renderer - */ - public BasicVisualizationServer(VisualizationModel model) { - this(model, new Dimension(600,600)); - } - /** - * Create an instance with passed parameters. - * - * @param model - * @param renderer - * @param preferredSize initial preferred size of the view - */ - @SuppressWarnings("unchecked") - public BasicVisualizationServer(VisualizationModel model, - Dimension preferredSize) { - this.model = model; -// renderContext.setScreenDevice(this); - model.addChangeListener(this); - setDoubleBuffered(false); - this.addComponentListener(new VisualizationListener(this)); - - setPickSupport(new ShapePickSupport(this)); - setPickedVertexState(new MultiPickedState()); - setPickedEdgeState(new MultiPickedState()); - - renderContext.setEdgeDrawPaintTransformer(new PickableEdgePaintTransformer(getPickedEdgeState(), Color.black, Color.cyan)); - renderContext.setVertexFillPaintTransformer(new PickableVertexPaintTransformer(getPickedVertexState(), - Color.red, Color.yellow)); - - setPreferredSize(preferredSize); - renderingHints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); - - renderContext.getMultiLayerTransformer().addChangeListener(this); - } - - @Override - public void setDoubleBuffered(boolean doubleBuffered) { - this.doubleBuffered = doubleBuffered; - } - - @Override - public boolean isDoubleBuffered() { - return doubleBuffered; - } - - /** - * Always sanity-check getSize so that we don't use a - * value that is improbable - * @see java.awt.Component#getSize() - */ - @Override - public Dimension getSize() { - Dimension d = super.getSize(); - if(d.width <= 0 || d.height <= 0) { - d = getPreferredSize(); - } - return d; - } - - /** - * Ensure that, if doubleBuffering is enabled, the offscreen - * image buffer exists and is the correct size. - * @param d - */ - protected void checkOffscreenImage(Dimension d) { - if(doubleBuffered) { - if(offscreen == null || offscreen.getWidth() != d.width || offscreen.getHeight() != d.height) { - offscreen = new BufferedImage(d.width, d.height, BufferedImage.TYPE_INT_ARGB); - offscreenG2d = offscreen.createGraphics(); - } - } - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#getModel() - */ - public VisualizationModel getModel() { - return model; - } - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#setModel(edu.uci.ics.jung.visualization.VisualizationModel) - */ - public void setModel(VisualizationModel model) { - this.model = model; - } - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#stateChanged(javax.swing.event.ChangeEvent) - */ - public void stateChanged(ChangeEvent e) { - repaint(); - fireStateChanged(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#setRenderer(edu.uci.ics.jung.visualization.Renderer) - */ - public void setRenderer(Renderer r) { - this.renderer = r; - repaint(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#getRenderer() - */ - public Renderer getRenderer() { - return renderer; - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#setGraphLayout(edu.uci.ics.jung.visualization.layout.Layout) - */ - public void setGraphLayout(Layout layout) { - Dimension viewSize = getPreferredSize(); - if(this.isShowing()) { - viewSize = getSize(); - } - model.setGraphLayout(layout, viewSize); - } - - public void scaleToLayout(ScalingControl scaler) { - Dimension vd = getPreferredSize(); - if(this.isShowing()) { - vd = getSize(); - } - Dimension ld = getGraphLayout().getSize(); - if(vd.equals(ld) == false) { - scaler.scale(this, (float)(vd.getWidth()/ld.getWidth()), new Point2D.Double()); - } - } - - public Layout getGraphLayout() { - return model.getGraphLayout(); - } - - @Override - public void setVisible(boolean aFlag) { - super.setVisible(aFlag); - if(aFlag == true) { - Dimension d = this.getSize(); - if(d.width <= 0 || d.height <= 0) { - d = this.getPreferredSize(); - } - model.getGraphLayout().setSize(d); - } - } - - public Map getRenderingHints() { - return renderingHints; - } - - public void setRenderingHints(Map renderingHints) { - this.renderingHints = renderingHints; - } - - @Override - protected void paintComponent(Graphics g) { - super.paintComponent(g); - - Graphics2D g2d = (Graphics2D)g; - if(doubleBuffered) { - checkOffscreenImage(getSize()); - renderGraph(offscreenG2d); - g2d.drawImage(offscreen, null, 0, 0); - } else { - renderGraph(g2d); - } - } - - protected void renderGraph(Graphics2D g2d) { - if(renderContext.getGraphicsContext() == null) { - renderContext.setGraphicsContext(new GraphicsDecorator(g2d)); - } else { - renderContext.getGraphicsContext().setDelegate(g2d); - } - renderContext.setScreenDevice(this); - Layout layout = model.getGraphLayout(); - - g2d.setRenderingHints(renderingHints); - - // the size of the VisualizationViewer - Dimension d = getSize(); - - // clear the offscreen image - g2d.setColor(getBackground()); - g2d.fillRect(0,0,d.width,d.height); - - AffineTransform oldXform = g2d.getTransform(); - AffineTransform newXform = new AffineTransform(oldXform); - newXform.concatenate( - renderContext.getMultiLayerTransformer().getTransformer(Layer.VIEW).getTransform()); -// viewTransformer.getTransform()); - - g2d.setTransform(newXform); - - // if there are preRenderers set, paint them - for(Paintable paintable : preRenderers) { - - if(paintable.useTransform()) { - paintable.paint(g2d); - } else { - g2d.setTransform(oldXform); - paintable.paint(g2d); - g2d.setTransform(newXform); - } - } - - if(layout instanceof Caching) { - ((Caching)layout).clear(); - } - - renderer.render(renderContext, layout); - - // if there are postRenderers set, do it - for(Paintable paintable : postRenderers) { - - if(paintable.useTransform()) { - paintable.paint(g2d); - } else { - g2d.setTransform(oldXform); - paintable.paint(g2d); - g2d.setTransform(newXform); - } - } - g2d.setTransform(oldXform); - } - - /** - * VisualizationListener reacts to changes in the size of the - * VisualizationViewer. When the size changes, it ensures - * that the offscreen image is sized properly. - * If the layout is locked to this view size, then the layout - * is also resized to be the same as the view size. - * - * - */ - protected class VisualizationListener extends ComponentAdapter { - protected BasicVisualizationServer vv; - public VisualizationListener(BasicVisualizationServer vv) { - this.vv = vv; - } - - /** - * create a new offscreen image for the graph - * whenever the window is resied - */ - @Override - public void componentResized(ComponentEvent e) { - Dimension d = vv.getSize(); - if(d.width <= 0 || d.height <= 0) return; - checkOffscreenImage(d); - repaint(); - } - } - - public void addPreRenderPaintable(Paintable paintable) { - if(preRenderers == null) { - preRenderers = new ArrayList(); - } - preRenderers.add(paintable); - } - - public void prependPreRenderPaintable(Paintable paintable) { - if(preRenderers == null) { - preRenderers = new ArrayList(); - } - preRenderers.add(0,paintable); - } - - public void removePreRenderPaintable(Paintable paintable) { - if(preRenderers != null) { - preRenderers.remove(paintable); - } - } - - public void addPostRenderPaintable(Paintable paintable) { - if(postRenderers == null) { - postRenderers = new ArrayList(); - } - postRenderers.add(paintable); - } - - public void prependPostRenderPaintable(Paintable paintable) { - if(postRenderers == null) { - postRenderers = new ArrayList(); - } - postRenderers.add(0,paintable); - } - - public void removePostRenderPaintable(Paintable paintable) { - if(postRenderers != null) { - postRenderers.remove(paintable); - } - } - - public void addChangeListener(ChangeListener l) { - changeSupport.addChangeListener(l); - } - - public void removeChangeListener(ChangeListener l) { - changeSupport.removeChangeListener(l); - } - - public ChangeListener[] getChangeListeners() { - return changeSupport.getChangeListeners(); - } - - public void fireStateChanged() { - changeSupport.fireStateChanged(); - } - - public PickedState getPickedVertexState() { - return pickedVertexState; - } - - public PickedState getPickedEdgeState() { - return pickedEdgeState; - } - - public void setPickedVertexState(PickedState pickedVertexState) { - if(pickEventListener != null && this.pickedVertexState != null) { - this.pickedVertexState.removeItemListener(pickEventListener); - } - this.pickedVertexState = pickedVertexState; - this.renderContext.setPickedVertexState(pickedVertexState); - if(pickEventListener == null) { - pickEventListener = new ItemListener() { - - public void itemStateChanged(ItemEvent e) { - repaint(); - } - }; - } - pickedVertexState.addItemListener(pickEventListener); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#setPickedEdgeState(edu.uci.ics.jung.visualization.picking.PickedState) - */ - public void setPickedEdgeState(PickedState pickedEdgeState) { - if(pickEventListener != null && this.pickedEdgeState != null) { - this.pickedEdgeState.removeItemListener(pickEventListener); - } - this.pickedEdgeState = pickedEdgeState; - this.renderContext.setPickedEdgeState(pickedEdgeState); - if(pickEventListener == null) { - pickEventListener = new ItemListener() { - - public void itemStateChanged(ItemEvent e) { - repaint(); - } - }; - } - pickedEdgeState.addItemListener(pickEventListener); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#getPickSupport() - */ - public GraphElementAccessor getPickSupport() { - return renderContext.getPickSupport(); - } - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#setPickSupport(edu.uci.ics.jung.visualization.GraphElementAccessor) - */ - public void setPickSupport(GraphElementAccessor pickSupport) { - renderContext.setPickSupport(pickSupport); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#getCenter() - */ - public Point2D getCenter() { - Dimension d = getSize(); - return new Point2D.Float(d.width/2, d.height/2); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#getRenderContext() - */ - public RenderContext getRenderContext() { - return renderContext; - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.VisualizationServer#setRenderContext(edu.uci.ics.jung.visualization.RenderContext) - */ - public void setRenderContext(RenderContext renderContext) { - this.renderContext = renderContext; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/DefaultVisualizationModel.java b/gui/jung-src/edu/uci/ics/jung/visualization/DefaultVisualizationModel.java deleted file mode 100644 index 77e2b2a7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/DefaultVisualizationModel.java +++ /dev/null @@ -1,188 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization; - -import java.awt.Dimension; - -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; -import javax.swing.event.EventListenerList; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.layout.util.Relaxer; -import edu.uci.ics.jung.algorithms.layout.util.VisRunner; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.visualization.layout.ObservableCachingLayout; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; -import edu.uci.ics.jung.visualization.util.DefaultChangeEventSupport; - -/** - * The model containing state values for - * visualizations of graphs. - * Refactored and extracted from the 1.6.0 version of VisualizationViewer - * - * @author Tom Nelson - */ -public class DefaultVisualizationModel implements VisualizationModel, ChangeEventSupport { - - ChangeEventSupport changeSupport = new DefaultChangeEventSupport(this); - - /** - * manages the thread that applies the current layout algorithm - */ - protected Relaxer relaxer; - - /** - * the layout algorithm currently in use - */ - protected Layout layout; - - /** - * listens for changes in the layout, forwards to the viewer - * - */ - protected ChangeListener changeListener; - - /** - * - * @param layout The Layout to apply, with its associated Graph - */ - public DefaultVisualizationModel(Layout layout) { - this(layout, null); - } - - /** - * - * @param layout - * @param d The preferred size of the View that will display this graph - */ - public DefaultVisualizationModel(Layout layout, Dimension d) { - if(changeListener == null) { - changeListener = new ChangeListener() { - public void stateChanged(ChangeEvent e) { - fireStateChanged(); - } - }; - } - setGraphLayout(layout, d); - } - - /** - * Removes the current graph layout, and adds a new one. - * @param layout the new layout to use - * @param viewSize the size of the View that will display this layout - */ - public void setGraphLayout(Layout layout, Dimension viewSize) { - // remove listener from old layout - if(this.layout != null && this.layout instanceof ChangeEventSupport) { - ((ChangeEventSupport)this.layout).removeChangeListener(changeListener); - } - // set to new layout - if(layout instanceof ChangeEventSupport) { - this.layout = layout; - } else { - this.layout = new ObservableCachingLayout(layout); - } - - ((ChangeEventSupport)this.layout).addChangeListener(changeListener); - - if(viewSize == null) { - viewSize = new Dimension(600,600); - } - Dimension layoutSize = layout.getSize(); - // if the layout has NOT been initialized yet, initialize its size - // now to the size of the VisualizationViewer window - if(layoutSize == null) { - layout.setSize(viewSize); - } - if(relaxer != null) { - relaxer.stop(); - relaxer = null; - } - if(layout instanceof IterativeContext) { - layout.initialize(); - if(relaxer == null) { - relaxer = new VisRunner((IterativeContext)this.layout); - relaxer.prerelax(); - relaxer.relax(); - } - } - fireStateChanged(); - } - - /** - * set the graph Layout and if it is not already initialized, initialize - * it to the default VisualizationViewer preferred size of 600x600 - */ - public void setGraphLayout(Layout layout) { - setGraphLayout(layout, null); - } - - /** - * Returns the current graph layout. - */ - public Layout getGraphLayout() { - return layout; - } - - /** - * @return the relaxer - */ - public Relaxer getRelaxer() { - return relaxer; - } - - /** - * @param relaxer the relaxer to set - */ - public void setRelaxer(VisRunner relaxer) { - this.relaxer = relaxer; - } - - /** - * Adds a ChangeListener. - * @param l the listener to be added - */ - public void addChangeListener(ChangeListener l) { - changeSupport.addChangeListener(l); - } - - /** - * Removes a ChangeListener. - * @param l the listener to be removed - */ - public void removeChangeListener(ChangeListener l) { - changeSupport.removeChangeListener(l); - } - - /** - * Returns an array of all the ChangeListeners added - * with addChangeListener(). - * - * @return all of the ChangeListeners added or an empty - * array if no listeners have been added - */ - public ChangeListener[] getChangeListeners() { - return changeSupport.getChangeListeners(); - } - - /** - * Notifies all listeners that have registered interest for - * notification on this event type. The event instance - * is lazily created. - * The primary listeners will be views that need to be repainted - * because of changes in this model instance - * @see EventListenerList - */ - public void fireStateChanged() { - changeSupport.fireStateChanged(); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/FourPassImageShaper.java b/gui/jung-src/edu/uci/ics/jung/visualization/FourPassImageShaper.java deleted file mode 100644 index 0f31792b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/FourPassImageShaper.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jun 17, 2005 - */ - -package edu.uci.ics.jung.visualization; - -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Image; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Area; -import java.awt.geom.GeneralPath; -import java.awt.geom.Line2D; -import java.awt.geom.Point2D; -import java.awt.image.BufferedImage; -import java.io.IOException; - -import javax.imageio.ImageIO; - -/** - * Provides factory methods that, given a BufferedImage, an Image, - * or the fileName of an image, will return a java.awt.Shape that - * is the contiguous traced outline of the opaque part of the image. - * This could be used to define an image for use in a Vertex, where - * the shape used for picking and edge-arrow placement follows the - * opaque part of an image that has a transparent background. - * The methods try to detect lines in order to minimize points - * in the path - * - * @author Tom Nelson - * - * - */ -public class FourPassImageShaper { - - /** - * given the fileName of an image, possibly with a transparent - * background, return the Shape of the opaque part of the image - * @param fileName name of the image, loaded from the classpath - * @return the Shape - */ - public static Shape getShape(String fileName) { - return getShape(fileName, Integer.MAX_VALUE); - } - public static Shape getShape(String fileName, int max) { - BufferedImage image = null; - try { - image = ImageIO.read(FourPassImageShaper.class.getResource(fileName)); - } catch(IOException ex) { - ex.printStackTrace(); - } - return getShape(image, max); - } - - /** - * Given an image, possibly with a transparent background, return - * the Shape of the opaque part of the image - * @param image - * @return the Shape - */ - public static Shape getShape(Image image) { - return getShape(image, Integer.MAX_VALUE); - } - public static Shape getShape(Image image, int max) { - BufferedImage bi = - new BufferedImage(image.getWidth(null), image.getHeight(null), - BufferedImage.TYPE_INT_ARGB); - Graphics g = bi.createGraphics(); - g.drawImage(image, 0, 0, null); - g.dispose(); - return getShape(bi, max); - } - - /** - * Given an image, possibly with a transparent background, return - * the Shape of the opaque part of the image - * - * If the image is larger than max in either direction, scale the - * image down to max-by-max, do the trace (on fewer points) then - * scale the resulting shape back up to the size of the original - * image. - * - * @param image the image to trace - * @param max used to restrict number of points in the resulting shape - * @return the Shape - */ - public static Shape getShape(BufferedImage image, int max) { - - float width = image.getWidth(); - float height = image.getHeight(); - if(width > max || height > max) { - BufferedImage smaller = - new BufferedImage(max, max, BufferedImage.TYPE_INT_ARGB); - Graphics g = smaller.createGraphics(); - AffineTransform at = AffineTransform.getScaleInstance(max/width,max/height); - AffineTransform back = AffineTransform.getScaleInstance(width/max,height/max); - Graphics2D g2 = (Graphics2D)g; - g2.drawImage(image, at, null); - g2.dispose(); - return back.createTransformedShape(getShape(smaller)); - } else { - return getShape(image); - } - } - - public static Shape getShape(BufferedImage image) { - Area area = new Area(leftEdge(image)); - area.intersect(new Area(bottomEdge(image))); - area.intersect(new Area(rightEdge(image))); - area.intersect(new Area(topEdge(image))); - return area; - } - /** - * Checks to see if point p is on a line that passes thru - * points p1 and p2. If p is on the line, extend the line - * segment so that it is from p1 to the location of p. - * If the point p is not on the line, update my shape - * with a line extending to the old p2 location, make - * the old p2 the new p1, and make p2 the old p - * @param p1 - * @param p2 - * @param p - * @param line - * @param path - * @return - */ - private static Point2D detectLine(Point2D p1, Point2D p2, Point2D p, - Line2D line, GeneralPath path) { - if(p2 == null) { - p2 = p; - line.setLine(p1,p2); - } - // check for line - else if(line.ptLineDistSq(p) < 1) { // its on the line - // make it p2 - p2.setLocation(p); - } else { // its not on the current line - p1.setLocation(p2); - p2.setLocation(p); - line.setLine(p1,p2); - path.lineTo((float)p1.getX(), (float)p1.getY()); - } - return p2; - } - /** - * trace the left side of the image - * @param image - * @param path - * @return - */ - private static Shape leftEdge(BufferedImage image) { - GeneralPath path = new GeneralPath(); - Point2D p1 = new Point2D.Float(image.getWidth()-1, 0); - Point2D p2 = null; - Line2D line = new Line2D.Float(); - Point2D p = new Point2D.Float(); - path.moveTo(image.getWidth()-1, 0); - - for(int i=0; i=0; j--) { - if((image.getRGB(i,j) & 0xff000000) != 0) { - // this is a point I want - p.setLocation(i,j); - break; - } - } - p2 = detectLine(p1, p2, p, line, path); - } - p.setLocation(image.getWidth()-1, 0); - detectLine(p1, p2, p, line, path); - path.closePath(); - return path; - } - - /** - * trace the right side of the image - * @param image - * @param path - * @param start - * @return - */ - private static Shape rightEdge(BufferedImage image) { - GeneralPath path = new GeneralPath(); - Point2D p1 = new Point2D.Float(0, image.getHeight()-1); - Point2D p2 = null; - Line2D line = new Line2D.Float(); - Point2D p = new Point2D.Float(); - path.moveTo(0, image.getHeight()-1); - - for(int i=image.getHeight()-1; i>=0; i--) { - p.setLocation(0, i); - for(int j=image.getWidth()-1; j>=0; j--) { - if((image.getRGB(j,i) & 0xff000000) != 0) { - // this is a point I want - p.setLocation(j,i); - break; - } - } - p2 = detectLine(p1, p2, p, line, path); - } - p.setLocation(0, 0); - detectLine(p1, p2, p,line, path); - path.closePath(); - return path; - } - - /** - * trace the top of the image - * @param image - * @param path - * @param start - * @return - */ - private static Shape topEdge(BufferedImage image) { - GeneralPath path = new GeneralPath(); - Point2D p1 = new Point2D.Float(image.getWidth()-1, image.getHeight()-1); - Point2D p2 = null; - Line2D line = new Line2D.Float(); - Point2D p = new Point2D.Float(); - path.moveTo(image.getWidth()-1, image.getHeight()-1); - - for(int i=image.getWidth()-1; i>=0; i--) { - p.setLocation(i, image.getHeight()-1); - for(int j=0; j 1) { - first = pointArray[0]; - second = pointArray[1]; - } else if(pointArray.length > 0) { - first = second = pointArray[0]; - } - - if(first != null && second != null) { - // correct direction of intersect points - if((h0.getX() - h1.getX()) * (first.getX() - second.getX()) < 0) { - // swap them - Point2D temp = first; - first = second; - second = temp; - } - - if(containsH0 && containsH1) { - max = (int)first.distance(second); - val = (int)first.distance(h0); - ext = (int)h0.distance(h1); - - } else if(containsH0) { - max = (int)first.distance(second); - val = (int)first.distance(h0); - ext = (int)h0.distance(second); - - } else if(containsH1) { - max = (int) first.distance(second); - val = 0; - ext = (int) first.distance(h1); - - } else { - max = ext = rectangle.width; - val = min; - } - horizontalScrollBar.setValues(val, ext+1, min, max); - } - - // vertical scroll bar - min = val = 0; - - intersector.intersectLine(new Line2D.Double(v0, v1)); - points = intersector.getPoints(); - - pointArray = (Point2D[])points.toArray(new Point2D[points.size()]); - if(pointArray.length > 1) { - first = pointArray[0]; - second = pointArray[1]; - } else if(pointArray.length > 0) { - first = second = pointArray[0]; - } - - if(first != null && second != null) { - - // arrange for direction - if((v0.getY() - v1.getY()) * (first.getY() - second.getY()) < 0) { - // swap them - Point2D temp = first; - first = second; - second = temp; - } - - if(containsV0 && containsV1) { - max = (int)first.distance(second); - val = (int)first.distance(v0); - ext = (int)v0.distance(v1); - - } else if(containsV0) { - max = (int)first.distance(second); - val = (int)first.distance(v0); - ext = (int)v0.distance(second); - - } else if(containsV1) { - max = (int) first.distance(second); - val = 0; - ext = (int) first.distance(v1); - - } else { - max = ext = rectangle.height; - val = min; - } - verticalScrollBar.setValues(val, ext+1, min, max); - } - } - - /** - * Listener to adjust the scroll bar parameters when the window - * is resized - */ - protected class ResizeListener extends ComponentAdapter { - - public void componentHidden(ComponentEvent e) { - } - - public void componentResized(ComponentEvent e) { - setScrollBars(vv); - } - public void componentShown(ComponentEvent e) { - } - } - - /** - * @return Returns the corner component. - */ - public JComponent getCorner() { - return corner; - } - - /** - * @param corner The cornerButton to set. - */ - public void setCorner(JComponent corner) { - this.corner = corner; - corner.setPreferredSize(new Dimension(verticalScrollBar.getPreferredSize().width, - horizontalScrollBar.getPreferredSize().height)); - south.add(this.corner, BorderLayout.EAST); - } - - public JScrollBar getHorizontalScrollBar() { - return horizontalScrollBar; - } - - public JScrollBar getVerticalScrollBar() { - return verticalScrollBar; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/Layer.java b/gui/jung-src/edu/uci/ics/jung/visualization/Layer.java deleted file mode 100644 index 708e980e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/Layer.java +++ /dev/null @@ -1,5 +0,0 @@ -package edu.uci.ics.jung.visualization; - -public enum Layer { - LAYOUT, VIEW -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/LayeredIcon.java b/gui/jung-src/edu/uci/ics/jung/visualization/LayeredIcon.java deleted file mode 100644 index 65bb12cb..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/LayeredIcon.java +++ /dev/null @@ -1,47 +0,0 @@ -package edu.uci.ics.jung.visualization; - -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Graphics; -import java.awt.Image; -import java.util.LinkedHashSet; -import java.util.Set; - -import javax.swing.Icon; -import javax.swing.ImageIcon; - -/** - * An icon that is made up of a collection of Icons. - * They are rendered in layers starting with the first - * Icon added (from the constructor). - * - * @author Tom Nelson - * - */ -public class LayeredIcon extends ImageIcon { - - Set iconSet = new LinkedHashSet(); - - public LayeredIcon(Image image) { - super(image); - } - - public void paintIcon(Component c, Graphics g, int x, int y) { - super.paintIcon(c, g, x, y); - Dimension d = new Dimension(getIconWidth(), getIconHeight()); - for (Icon icon : iconSet) { - Dimension id = new Dimension(icon.getIconWidth(), icon.getIconHeight()); - int dx = (d.width - id.width)/2; - int dy = (d.height - id.height)/2; - icon.paintIcon(c, g, x+dx, y+dy); - } - } - - public void add(Icon icon) { - iconSet.add(icon); - } - - public boolean remove(Icon icon) { - return iconSet.remove(icon); - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/MultiLayerTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/MultiLayerTransformer.java deleted file mode 100644 index 99f65a86..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/MultiLayerTransformer.java +++ /dev/null @@ -1,40 +0,0 @@ -package edu.uci.ics.jung.visualization; - -import java.awt.Shape; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; - -public interface MultiLayerTransformer extends BidirectionalTransformer, ShapeTransformer, ChangeEventSupport { - - - /** - * @see edu.uci.ics.jung.visualization.VisualizationServer#setViewTransformer(edu.uci.ics.jung.visualization.transform.MutableTransformer) - */ - void setTransformer(Layer layer, MutableTransformer transformer); - - /** - * @return the layoutTransformer - */ - MutableTransformer getTransformer(Layer layer); - - /** - */ - Point2D inverseTransform(Layer layer, Point2D p); - - /** - */ - Point2D transform(Layer layer, Point2D p); - - /** - */ - Shape transform(Layer layer, Shape shape); - - Shape inverseTransform(Layer layer, Shape shape); - - void setToIdentity(); - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/PivotingImageShaper.java b/gui/jung-src/edu/uci/ics/jung/visualization/PivotingImageShaper.java deleted file mode 100644 index 00f7f322..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/PivotingImageShaper.java +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jun 17, 2005 - */ - -package edu.uci.ics.jung.visualization; - -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Image; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.Line2D; -import java.awt.geom.Point2D; -import java.awt.image.BufferedImage; -import java.io.IOException; - -import javax.imageio.ImageIO; - - -/** - * Provides factory methods that, given a BufferedImage, an Image, - * or the fileName of an image, will return a java.awt.Shape that - * is the contiguous traced outline of the opaque part of the image. - * This could be used to define an image for use in a Vertex, where - * the shape used for picking and edge-arrow placement follows the - * opaque part of an image that has a transparent background. - * The methods try to detect lines in order to minimize points - * in the path - * - * @author Tom Nelson - * - * - */ -public class PivotingImageShaper { - - /** - * the number of pixels to skip while sampling the - * images edges - */ - static int sample = 1; - /** - * the first x coordinate of the shape. Used to discern - * when we are done - */ - static int firstx = 0; - - public static Shape getShape(String fileName) { - return getShape(fileName, Integer.MAX_VALUE); - } - public static Shape getShape(String fileName, int max) { - BufferedImage image = null; - try { - image = ImageIO.read(FourPassImageShaper.class.getResource(fileName)); - } catch(IOException ex) { - ex.printStackTrace(); - } - return getShape(image, max); - } - - /** - * Given an image, possibly with a transparent background, return - * the Shape of the opaque part of the image - * @param image - * @return the Shape - */ - public static Shape getShape(Image image) { - return getShape(image, Integer.MAX_VALUE); - } - public static Shape getShape(Image image, int max) { - BufferedImage bi = - new BufferedImage(image.getWidth(null), image.getHeight(null), - BufferedImage.TYPE_INT_ARGB); - Graphics g = bi.createGraphics(); - g.drawImage(image, 0, 0, null); - g.dispose(); - return getShape(bi, max); - } - - /** - * Given an image, possibly with a transparent background, return - * the Shape of the opaque part of the image - * @param image - * @return the Shape - */ - public static Shape getShape(BufferedImage image, int max) { - - float width = image.getWidth(); - float height = image.getHeight(); - if(width > max || height > max) { - BufferedImage smaller = - new BufferedImage(max, max, BufferedImage.TYPE_INT_ARGB); - Graphics g = smaller.createGraphics(); - AffineTransform at = AffineTransform.getScaleInstance(max/width,max/height); - AffineTransform back = AffineTransform.getScaleInstance(width/max,height/max); - Graphics2D g2 = (Graphics2D)g; - g2.drawImage(image, at, null); - g2.dispose(); - return back.createTransformedShape(getShape(smaller)); - } else { - return getShape(image); - } - } - - /** - * Given an image, possibly with a transparent background, return - * the Shape of the opaque part of the image - * @param image - * @return the Shape - */ - public static Shape getShape(BufferedImage image) { - firstx = 0; - return leftEdge(image, new GeneralPath()); - } - - private static Point2D detectLine(Point2D p1, Point2D p2, Point2D p, - Line2D line, GeneralPath path) { - if(p2 == null) { - p2 = p; - line.setLine(p1,p2); - } - // check for line - else if(line.ptLineDistSq(p) < 1) { // its on the line - // make it p2 - p2.setLocation(p); - } else { // its not on the current line - p1.setLocation(p2); - p2.setLocation(p); - line.setLine(p1,p2); - path.lineTo((float)p1.getX(), (float)p1.getY()); - } - return p2; - } - /** - * trace the left side of the image - * @param image - * @param path - * @return - */ - private static Shape leftEdge(BufferedImage image, GeneralPath path) { - int lastj = 0; - Point2D p1 = null; - Point2D p2 = null; - Line2D line = new Line2D.Float(); - for(int i=0; i=0; j-=sample) { - if((image.getRGB(i,j) & 0xff000000) != 0) { - // this is a point I want - Point2D p = new Point2D.Float(i,j); - aPointExistsOnThisLine = true; - p2 = detectLine(p1,p2,p,line,path); - lastj = j; - break; - } - } - if(aPointExistsOnThisLine == false) { - break; - } - } - return rightEdge(image, path, lastj); - } - - /** - * trace the right side of the image - * @param image - * @param path - * @param start - * @return - */ - private static Shape rightEdge(BufferedImage image, GeneralPath path, int start) { - int lastj = 0; - Point2D p1 = path.getCurrentPoint(); - Point2D p2 = null; - Line2D line = new Line2D.Float(); - for(int i=start; i>=0; i-=sample) { - boolean aPointExistsOnThisLine = false; - - for(int j=image.getWidth()-1; j>=0; j-=sample) { - if((image.getRGB(j,i) & 0xff000000) != 0) { - // this is a point I want - Point2D p = new Point2D.Float(j,i); - aPointExistsOnThisLine = true; - p2 = detectLine(p1,p2,p,line,path); - lastj=j; - break; - } - } - if(aPointExistsOnThisLine == false) { - break; - } - } - return topEdge(image, path, lastj); - } - - /** - * trace the top of the image - * @param image - * @param path - * @param start - * @return - */ - private static Shape topEdge(BufferedImage image, GeneralPath path, int start) { - Point2D p1 = path.getCurrentPoint(); - Point2D p2 = null; - Line2D line = new Line2D.Float(); - for(int i=start; i>=firstx; i-=sample) { - boolean aPointExistsOnThisLine = false; - for(int j=0; j implements RenderContext { - - protected float arrowPlacementTolerance = 1; - protected Predicate,V>> vertexIncludePredicate = TruePredicate.getInstance(); - protected Transformer vertexStrokeTransformer = - new ConstantTransformer(new BasicStroke(1.0f)); - - protected Transformer vertexShapeTransformer = - new ConstantTransformer( - new Ellipse2D.Float(-10,-10,20,20)); - - protected Transformer vertexLabelTransformer = new ConstantTransformer(null); - protected Transformer vertexIconTransformer; - protected Transformer vertexFontTransformer = - new ConstantTransformer(new Font("Helvetica", Font.PLAIN, 12)); - - protected Transformer vertexDrawPaintTransformer = new ConstantTransformer(Color.BLACK); - protected Transformer vertexFillPaintTransformer = new ConstantTransformer(Color.RED); - - protected Transformer edgeLabelTransformer = new ConstantTransformer(null); - protected Transformer edgeStrokeTransformer = new ConstantTransformer(new BasicStroke(1.0f)); - protected Transformer edgeArrowStrokeTransformer = new ConstantTransformer(new BasicStroke(1.0f)); - - protected Transformer,E>,Shape> edgeArrowTransformer = - new DirectionalEdgeArrowTransformer(10, 8, 4); - - protected Predicate,E>> edgeArrowPredicate = new DirectedEdgeArrowPredicate(); - protected Predicate,E>> edgeIncludePredicate = TruePredicate.getInstance(); - protected Transformer edgeFontTransformer = - new ConstantTransformer(new Font("Helvetica", Font.PLAIN, 12)); - protected Transformer,E>,Number> edgeLabelClosenessTransformer = - new ConstantDirectionalEdgeValueTransformer(0.5, 0.65); - protected Transformer,E>,Shape> edgeShapeTransformer = - new EdgeShape.QuadCurve(); - protected Transformer edgeFillPaintTransformer = - new ConstantTransformer(null); - protected Transformer edgeDrawPaintTransformer = - new ConstantTransformer(Color.black); - protected Transformer arrowFillPaintTransformer = - new ConstantTransformer(Color.black); - protected Transformer arrowDrawPaintTransformer = - new ConstantTransformer(Color.black); - - protected EdgeIndexFunction parallelEdgeIndexFunction = - DefaultParallelEdgeIndexFunction.getInstance(); - - protected EdgeIndexFunction incidentEdgeIndexFunction = - IncidentEdgeIndexFunction.getInstance(); - - protected MultiLayerTransformer multiLayerTransformer = new BasicTransformer(); - - /** - * pluggable support for picking graph elements by - * finding them based on their coordinates. - */ - protected GraphElementAccessor pickSupport; - - - protected int labelOffset = LABEL_OFFSET; - - /** - * the JComponent that this Renderer will display the graph on - */ - protected JComponent screenDevice; - - protected PickedState pickedVertexState; - protected PickedState pickedEdgeState; - - /** - * The CellRendererPane is used here just as it is in JTree - * and JTable, to allow a pluggable JLabel-based renderer for - * Vertex and Edge label strings and icons. - */ - protected CellRendererPane rendererPane = new CellRendererPane(); - - /** - * A default GraphLabelRenderer - picked Vertex labels are - * blue, picked edge labels are cyan - */ - protected VertexLabelRenderer vertexLabelRenderer = - new DefaultVertexLabelRenderer(Color.blue); - - protected EdgeLabelRenderer edgeLabelRenderer = new DefaultEdgeLabelRenderer(Color.cyan); - - protected GraphicsDecorator graphicsContext; - - PluggableRenderContext() { - this.setEdgeShapeTransformer(new EdgeShape.QuadCurve()); - } - - /** - * @return the vertexShapeTransformer - */ - public Transformer getVertexShapeTransformer() { - return vertexShapeTransformer; - } - - /** - * @param vertexShapeTransformer the vertexShapeTransformer to set - */ - public void setVertexShapeTransformer( - Transformer vertexShapeTransformer) { - this.vertexShapeTransformer = vertexShapeTransformer; - } - - /** - * @return the vertexStrokeTransformer - */ - public Transformer getVertexStrokeTransformer() { - return vertexStrokeTransformer; - } - - /** - * @param vertexStrokeTransformer the vertexStrokeTransformer to set - */ - public void setVertexStrokeTransformer( - Transformer vertexStrokeTransformer) { - this.vertexStrokeTransformer = vertexStrokeTransformer; - } - - public static float[] getDashing() { - return dashing; - } - - public static float[] getDotting() { - return dotting; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getArrow_placement_tolerance() - */ - public float getArrowPlacementTolerance() { - return arrowPlacementTolerance; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setArrow_placement_tolerance(float) - */ - public void setArrowPlacementTolerance(float arrow_placement_tolerance) { - this.arrowPlacementTolerance = arrow_placement_tolerance; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeArrowTransformer() - */ - public Transformer,E>,Shape> getEdgeArrowTransformer() { - return edgeArrowTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeArrowTransformer(edu.uci.ics.jung.visualization.decorators.EdgeArrowTransformer) - */ - public void setEdgeArrowTransformer(Transformer,E>,Shape> edgeArrowTransformer) { - this.edgeArrowTransformer = edgeArrowTransformer; - } - - /** - * @see RenderContext#getEdgeArrowPredicate() - */ - public Predicate,E>> getEdgeArrowPredicate() { - return edgeArrowPredicate; - } - - /** - * @see RenderContext#setEdgeArrowPredicate(Predicate) - */ - public void setEdgeArrowPredicate(Predicate,E>> edgeArrowPredicate) { - this.edgeArrowPredicate = edgeArrowPredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeFontTransformer() - */ - public Transformer getEdgeFontTransformer() { - return edgeFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeFontTransformer(edu.uci.ics.jung.visualization.decorators.EdgeFontTransformer) - */ - public void setEdgeFontTransformer(Transformer edgeFontTransformer) { - this.edgeFontTransformer = edgeFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeIncludePredicate() - */ - public Predicate,E>> getEdgeIncludePredicate() { - return edgeIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeIncludePredicate(org.apache.commons.collections15.Predicate) - */ - public void setEdgeIncludePredicate(Predicate,E>> edgeIncludePredicate) { - this.edgeIncludePredicate = edgeIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeLabelClosenessTransformer() - */ - public Transformer,E>,Number> getEdgeLabelClosenessTransformer() { - return edgeLabelClosenessTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeLabelClosenessTransformer(edu.uci.ics.jung.visualization.decorators.NumberDirectionalEdgeValue) - */ - public void setEdgeLabelClosenessTransformer( - Transformer,E>,Number> edgeLabelClosenessTransformer) { - this.edgeLabelClosenessTransformer = edgeLabelClosenessTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeLabelRenderer() - */ - public EdgeLabelRenderer getEdgeLabelRenderer() { - return edgeLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeLabelRenderer(edu.uci.ics.jung.visualization.EdgeLabelRenderer) - */ - public void setEdgeLabelRenderer(EdgeLabelRenderer edgeLabelRenderer) { - this.edgeLabelRenderer = edgeLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgePaintTransformer() - */ - public Transformer getEdgeFillPaintTransformer() { - return edgeFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgePaintTransformer(edu.uci.ics.jung.visualization.decorators.EdgePaintTransformer) - */ - public void setEdgeDrawPaintTransformer(Transformer edgeDrawPaintTransformer) { - this.edgeDrawPaintTransformer = edgeDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgePaintTransformer() - */ - public Transformer getEdgeDrawPaintTransformer() { - return edgeDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgePaintTransformer(edu.uci.ics.jung.visualization.decorators.EdgePaintTransformer) - */ - public void setEdgeFillPaintTransformer(Transformer edgeFillPaintTransformer) { - this.edgeFillPaintTransformer = edgeFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeShapeTransformer() - */ - public Transformer,E>,Shape> getEdgeShapeTransformer() { - return edgeShapeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeShapeTransformer(edu.uci.ics.jung.visualization.decorators.EdgeShapeTransformer) - */ - public void setEdgeShapeTransformer(Transformer,E>,Shape> edgeShapeTransformer) { - this.edgeShapeTransformer = edgeShapeTransformer; - if(edgeShapeTransformer instanceof EdgeShape.Orthogonal) { - ((EdgeShape.IndexedRendering)edgeShapeTransformer).setEdgeIndexFunction(this.incidentEdgeIndexFunction); - } else - if(edgeShapeTransformer instanceof EdgeShape.IndexedRendering) { - ((EdgeShape.IndexedRendering)edgeShapeTransformer).setEdgeIndexFunction(this.parallelEdgeIndexFunction); - } - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeLabelTransformer() - */ - public Transformer getEdgeLabelTransformer() { - return edgeLabelTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeLabelTransformer(edu.uci.ics.jung.visualization.decorators.EdgeLabelTransformer) - */ - public void setEdgeLabelTransformer(Transformer edgeLabelTransformer) { - this.edgeLabelTransformer = edgeLabelTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeStrokeTransformer() - */ - public Transformer getEdgeStrokeTransformer() { - return edgeStrokeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeStrokeTransformer(edu.uci.ics.jung.visualization.decorators.EdgeStrokeTransformer) - */ - public void setEdgeStrokeTransformer(Transformer edgeStrokeTransformer) { - this.edgeStrokeTransformer = edgeStrokeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getEdgeStrokeTransformer() - */ - public Transformer getEdgeArrowStrokeTransformer() { - return edgeArrowStrokeTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setEdgeStrokeTransformer(edu.uci.ics.jung.visualization.decorators.EdgeStrokeTransformer) - */ - public void setEdgeArrowStrokeTransformer(Transformer edgeArrowStrokeTransformer) { - this.edgeArrowStrokeTransformer = edgeArrowStrokeTransformer; - } - - /** - * @see RenderContext#getGraphicsContext() - */ - public GraphicsDecorator getGraphicsContext() { - return graphicsContext; - } - - /** - * @see RenderContext#setGraphicsContext(GraphicsDecorator) - */ - public void setGraphicsContext(GraphicsDecorator graphicsContext) { - this.graphicsContext = graphicsContext; - } - - /** - * @see RenderContext#getLabelOffset() - */ - public int getLabelOffset() { - return labelOffset; - } - - /** - * @see RenderContext#setLabelOffset(int) - */ - public void setLabelOffset(int labelOffset) { - this.labelOffset = labelOffset; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getParallelEdgeIndexTransformer() - */ - public EdgeIndexFunction getParallelEdgeIndexFunction() { - return parallelEdgeIndexFunction; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setParallelEdgeIndexFunction(edu.uci.ics.graph.util.ParallelEdgeIndexFunction) - */ - public void setParallelEdgeIndexFunction( - EdgeIndexFunction parallelEdgeIndexFunction) { - this.parallelEdgeIndexFunction = parallelEdgeIndexFunction; - // reset the edge shape transformer, as the parallel edge index function - // is used by it - this.setEdgeShapeTransformer(getEdgeShapeTransformer()); - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getPickedEdgeState() - */ - public PickedState getPickedEdgeState() { - return pickedEdgeState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setPickedEdgeState(edu.uci.ics.jung.visualization.picking.PickedState) - */ - public void setPickedEdgeState(PickedState pickedEdgeState) { - this.pickedEdgeState = pickedEdgeState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getPickedVertexState() - */ - public PickedState getPickedVertexState() { - return pickedVertexState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setPickedVertexState(edu.uci.ics.jung.visualization.picking.PickedState) - */ - public void setPickedVertexState(PickedState pickedVertexState) { - this.pickedVertexState = pickedVertexState; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getRendererPane() - */ - public CellRendererPane getRendererPane() { - return rendererPane; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setRendererPane(javax.swing.CellRendererPane) - */ - public void setRendererPane(CellRendererPane rendererPane) { - this.rendererPane = rendererPane; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getScreenDevice() - */ - public JComponent getScreenDevice() { - return screenDevice; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setScreenDevice(edu.uci.ics.jung.visualization.VisualizationViewer) - */ - public void setScreenDevice(JComponent screenDevice) { - this.screenDevice = screenDevice; - screenDevice.add(rendererPane); - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexFontTransformer() - */ - public Transformer getVertexFontTransformer() { - return vertexFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexFontTransformer(edu.uci.ics.jung.visualization.decorators.VertexFontTransformer) - */ - public void setVertexFontTransformer(Transformer vertexFontTransformer) { - this.vertexFontTransformer = vertexFontTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexIconTransformer() - */ - public Transformer getVertexIconTransformer() { - return vertexIconTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexIconTransformer(edu.uci.ics.jung.visualization.decorators.VertexIconTransformer) - */ - public void setVertexIconTransformer(Transformer vertexIconTransformer) { - this.vertexIconTransformer = vertexIconTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexIncludePredicate() - */ - public Predicate,V>> getVertexIncludePredicate() { - return vertexIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexIncludePredicate(org.apache.commons.collections15.Predicate) - */ - public void setVertexIncludePredicate(Predicate,V>> vertexIncludePredicate) { - this.vertexIncludePredicate = vertexIncludePredicate; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexLabelRenderer() - */ - public VertexLabelRenderer getVertexLabelRenderer() { - return vertexLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexLabelRenderer(edu.uci.ics.jung.visualization.VertexLabelRenderer) - */ - public void setVertexLabelRenderer(VertexLabelRenderer vertexLabelRenderer) { - this.vertexLabelRenderer = vertexLabelRenderer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexPaintTransformer() - */ - public Transformer getVertexFillPaintTransformer() { - return vertexFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexPaintTransformer(edu.uci.ics.jung.visualization.decorators.VertexPaintTransformer) - */ - public void setVertexFillPaintTransformer(Transformer vertexFillPaintTransformer) { - this.vertexFillPaintTransformer = vertexFillPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexPaintTransformer() - */ - public Transformer getVertexDrawPaintTransformer() { - return vertexDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexPaintTransformer(edu.uci.ics.jung.visualization.decorators.VertexPaintTransformer) - */ - public void setVertexDrawPaintTransformer(Transformer vertexDrawPaintTransformer) { - this.vertexDrawPaintTransformer = vertexDrawPaintTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#getVertexLabelTransformer() - */ - public Transformer getVertexLabelTransformer() { - return vertexLabelTransformer; - } - - /** - * @see edu.uci.ics.jung.visualization.RenderContext#setVertexLabelTransformer(edu.uci.ics.jung.visualization.decorators.VertexLabelTransformer) - */ - public void setVertexLabelTransformer(Transformer vertexLabelTransformer) { - this.vertexLabelTransformer = vertexLabelTransformer; - } - - /** - * @return the pickSupport - */ - public GraphElementAccessor getPickSupport() { - return pickSupport; - } - - /** - * @param pickSupport the pickSupport to set - */ - public void setPickSupport(GraphElementAccessor pickSupport) { - this.pickSupport = pickSupport; - } - - /** - * @return the basicTransformer - */ - public MultiLayerTransformer getMultiLayerTransformer() { - return multiLayerTransformer; - } - - /** - * @param basicTransformer the basicTransformer to set - */ - public void setMultiLayerTransformer(MultiLayerTransformer basicTransformer) { - this.multiLayerTransformer = basicTransformer; - } - - /** - * @see RenderContext#getArrowDrawPaintTransformer() - */ - public Transformer getArrowDrawPaintTransformer() { - return arrowDrawPaintTransformer; - } - - /** - * @see RenderContext#getArrowFillPaintTransformer() - */ - public Transformer getArrowFillPaintTransformer() { - return arrowFillPaintTransformer; - } - - /** - * @see RenderContext#setArrowDrawPaintTransformer(Transformer) - */ - public void setArrowDrawPaintTransformer(Transformer arrowDrawPaintTransformer) { - this.arrowDrawPaintTransformer = arrowDrawPaintTransformer; - - } - - /** - * @see RenderContext#setArrowFillPaintTransformer(Transformer) - */ - public void setArrowFillPaintTransformer(Transformer arrowFillPaintTransformer) { - this.arrowFillPaintTransformer = arrowFillPaintTransformer; - - } -} - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/RenderContext.java b/gui/jung-src/edu/uci/ics/jung/visualization/RenderContext.java deleted file mode 100644 index 52e09e79..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/RenderContext.java +++ /dev/null @@ -1,214 +0,0 @@ -package edu.uci.ics.jung.visualization; - -import java.awt.BasicStroke; -import java.awt.Font; -import java.awt.Paint; -import java.awt.Shape; -import java.awt.Stroke; - -import javax.swing.CellRendererPane; -import javax.swing.Icon; -import javax.swing.JComponent; - -import org.apache.commons.collections15.Predicate; -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeIndexFunction; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.visualization.picking.PickedState; -import edu.uci.ics.jung.visualization.renderers.EdgeLabelRenderer; -import edu.uci.ics.jung.visualization.renderers.VertexLabelRenderer; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; - -public interface RenderContext { - - float[] dotting = {1.0f, 3.0f}; - float[] dashing = {5.0f}; - - /** - * A stroke for a dotted line: 1 pixel width, round caps, round joins, and an - * array of {1.0f, 3.0f}. - */ - Stroke DOTTED = new BasicStroke(1.0f, - BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND, 1.0f, dotting, 0f); - - /** - * A stroke for a dashed line: 1 pixel width, square caps, beveled joins, and an - * array of {5.0f}. - */ - Stroke DASHED = new BasicStroke(1.0f, - BasicStroke.CAP_SQUARE, BasicStroke.JOIN_BEVEL, 1.0f, dashing, 0f); - - /** - * Specifies the offset for the edge labels. - */ - int LABEL_OFFSET = 10; - - int getLabelOffset(); - - void setLabelOffset(int labelOffset); - - float getArrowPlacementTolerance(); - - void setArrowPlacementTolerance(float arrow_placement_tolerance); - - Transformer,E>,Shape> getEdgeArrowTransformer(); - - void setEdgeArrowTransformer(Transformer,E>,Shape> edgeArrowTransformer); - - Predicate,E>> getEdgeArrowPredicate() ; - - void setEdgeArrowPredicate(Predicate,E>> edgeArrowPredicate); - - Transformer getEdgeFontTransformer(); - - void setEdgeFontTransformer(Transformer edgeFontTransformer); - - Predicate,E>> getEdgeIncludePredicate(); - - void setEdgeIncludePredicate(Predicate,E>> edgeIncludePredicate); - - Transformer,E>,Number> getEdgeLabelClosenessTransformer(); - - void setEdgeLabelClosenessTransformer( - Transformer,E>,Number> edgeLabelClosenessTransformer); - - EdgeLabelRenderer getEdgeLabelRenderer(); - - void setEdgeLabelRenderer(EdgeLabelRenderer edgeLabelRenderer); - - Transformer getEdgeFillPaintTransformer(); - - void setEdgeFillPaintTransformer(Transformer edgePaintTransformer); - - Transformer getEdgeDrawPaintTransformer(); - - void setEdgeDrawPaintTransformer(Transformer edgeDrawPaintTransformer); - - Transformer getArrowDrawPaintTransformer(); - - void setArrowDrawPaintTransformer(Transformer arrowDrawPaintTransformer); - - Transformer getArrowFillPaintTransformer(); - - void setArrowFillPaintTransformer(Transformer arrowFillPaintTransformer); - - Transformer,E>,Shape> getEdgeShapeTransformer(); - - void setEdgeShapeTransformer(Transformer,E>,Shape> edgeShapeTransformer); - - Transformer getEdgeLabelTransformer(); - - void setEdgeLabelTransformer(Transformer edgeStringer); - - Transformer getEdgeStrokeTransformer(); - - void setEdgeStrokeTransformer(Transformer edgeStrokeTransformer); - - Transformer getEdgeArrowStrokeTransformer(); - - void setEdgeArrowStrokeTransformer(Transformer edgeArrowStrokeTransformer); - - GraphicsDecorator getGraphicsContext(); - - void setGraphicsContext(GraphicsDecorator graphicsContext); - - EdgeIndexFunction getParallelEdgeIndexFunction(); - - void setParallelEdgeIndexFunction( - EdgeIndexFunction parallelEdgeIndexFunction); - - PickedState getPickedEdgeState(); - - void setPickedEdgeState(PickedState pickedEdgeState); - - PickedState getPickedVertexState(); - - void setPickedVertexState(PickedState pickedVertexState); - - CellRendererPane getRendererPane(); - - void setRendererPane(CellRendererPane rendererPane); - - JComponent getScreenDevice(); - - void setScreenDevice(JComponent screenDevice); - - Transformer getVertexFontTransformer(); - - void setVertexFontTransformer(Transformer vertexFontTransformer); - - Transformer getVertexIconTransformer(); - - void setVertexIconTransformer(Transformer vertexIconTransformer); - - Predicate,V>> getVertexIncludePredicate(); - - void setVertexIncludePredicate(Predicate,V>> vertexIncludePredicate); - - VertexLabelRenderer getVertexLabelRenderer(); - - void setVertexLabelRenderer(VertexLabelRenderer vertexLabelRenderer); - - Transformer getVertexFillPaintTransformer(); - - void setVertexFillPaintTransformer(Transformer vertexFillPaintTransformer); - - Transformer getVertexDrawPaintTransformer(); - - void setVertexDrawPaintTransformer(Transformer vertexDrawPaintTransformer); - - Transformer getVertexShapeTransformer(); - - void setVertexShapeTransformer(Transformer vertexShapeTransformer); - - Transformer getVertexLabelTransformer(); - - void setVertexLabelTransformer(Transformer vertexStringer); - - Transformer getVertexStrokeTransformer(); - - void setVertexStrokeTransformer(Transformer vertexStrokeTransformer); - -// MutableTransformer getViewTransformer(); - -// void setViewTransformer(MutableTransformer viewTransformer); - - class DirectedEdgeArrowPredicate - implements Predicate,E>> { - - public boolean evaluate(Context,E> c) { - return c.graph.getEdgeType(c.element) == EdgeType.DIRECTED; - } - - } - - class UndirectedEdgeArrowPredicate - implements Predicate,E>> { - //extends AbstractGraphPredicate { - - public boolean evaluate(Context,E> c) { - return c.graph.getEdgeType(c.element) == EdgeType.UNDIRECTED; - } - - } - - MultiLayerTransformer getMultiLayerTransformer(); - - void setMultiLayerTransformer(MultiLayerTransformer basicTransformer); - - /** - * @return the pickSupport - */ - GraphElementAccessor getPickSupport(); - - /** - * @param pickSupport the pickSupport to set - */ - void setPickSupport(GraphElementAccessor pickSupport); - - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationImageServer.java b/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationImageServer.java deleted file mode 100644 index 8a76e8f0..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationImageServer.java +++ /dev/null @@ -1,68 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization; - -import java.awt.Dimension; -import java.awt.Graphics2D; -import java.awt.Image; -import java.awt.RenderingHints; -import java.awt.geom.Point2D; -import java.awt.image.BufferedImage; -import java.util.HashMap; -import java.util.Map; - -import edu.uci.ics.jung.algorithms.layout.Layout; - -/** - * A class that could be used on the server side of a thin-client application. It creates the jung - * visualization, then produces an image of it. - * @author tom - * - * @param - * @param - */ -@SuppressWarnings("serial") -public class VisualizationImageServer extends BasicVisualizationServer { - - Map renderingHints = new HashMap(); - - /** - * Creates a new instance the specified layout and preferred size. - */ - public VisualizationImageServer(Layout layout, Dimension preferredSize) { - super(layout, preferredSize); - setSize(preferredSize); - renderingHints.put(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); - addNotify(); - } - - public Image getImage(Point2D center, Dimension d) - { - int width = getWidth(); - int height = getHeight(); - - float scalex = (float)width/d.width; - float scaley = (float)height/d.height; - try - { - renderContext.getMultiLayerTransformer().getTransformer(Layer.VIEW).scale(scalex, scaley, center); - - BufferedImage bi = new BufferedImage(width, height, - BufferedImage.TYPE_INT_RGB); - Graphics2D graphics = bi.createGraphics(); - graphics.setRenderingHints(renderingHints); - paint(graphics); - graphics.dispose(); - return bi; - } finally { - renderContext.getMultiLayerTransformer().getTransformer(Layer.VIEW).setToIdentity(); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationModel.java b/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationModel.java deleted file mode 100644 index db3807f2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationModel.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on May 4, 2005 - */ - -package edu.uci.ics.jung.visualization; - -import java.awt.Dimension; - -import javax.swing.event.ChangeListener; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.layout.util.Relaxer; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; - -/** - * Interface for the state holding model of the VisualizationViewer. - * Refactored and extracted from the 1.6.0 version of VisualizationViewer - * - * @author Tom Nelson - */ -public interface VisualizationModel extends ChangeEventSupport { - - - Relaxer getRelaxer(); - /** - * set the graph Layout - * @param layout - */ - void setGraphLayout(Layout layout); - - /** - * Sets the graph Layout and initialize the Layout size to - * the passed dimensions. The passed Dimension will often be - * the size of the View that will display the graph. - * @param layout - * @param d - */ - void setGraphLayout(Layout layout, Dimension d); - - /** - * Returns the current graph layout. - */ - Layout getGraphLayout(); - - /** - * Register l as a listeners to changes in the model. The View registers - * in order to repaint itself when the model changes. - */ - void addChangeListener(ChangeListener l); - - /** - * Removes a ChangeListener. - * @param l the listener to be removed - */ - void removeChangeListener(ChangeListener l); - - /** - * Returns an array of all the ChangeListeners added - * with addChangeListener(). - * - * @return all of the ChangeListeners added or an empty - * array if no listeners have been added - */ - ChangeListener[] getChangeListeners(); - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationServer.java b/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationServer.java deleted file mode 100644 index 6b03e6c6..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationServer.java +++ /dev/null @@ -1,251 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization; - -import java.awt.Graphics; -import java.awt.RenderingHints.Key; -import java.awt.geom.Point2D; -import java.util.Map; - -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; -import javax.swing.event.EventListenerList; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.picking.PickedState; -import edu.uci.ics.jung.visualization.renderers.Renderer; - -/** - * @author tom - * - * @param - * @param - */ -public interface VisualizationServer { - - /** - * set whether this class uses its offscreen image or not. If - * true, then doubleBuffering in the superclass is set to 'false' - */ - void setDoubleBuffered(boolean doubleBuffered); - - /** - * whether this class uses double buffering. The superclass - * will be the opposite state. - */ - boolean isDoubleBuffered(); - - /** - * @return Returns the model. - */ - VisualizationModel getModel(); - - /** - * @param model The model to set. - */ - void setModel(VisualizationModel model); - - /** - * In response to changes from the model, repaint the - * view, then fire an event to any listeners. - * Examples of listeners are the GraphZoomScrollPane and - * the BirdsEyeVisualizationViewer - */ - void stateChanged(ChangeEvent e); - - /** - * Sets the showing Renderer to be the input Renderer. Also - * tells the Renderer to refer to this visualizationviewer - * as a PickedKey. (Because Renderers maintain a small - * amount of state, such as the PickedKey, it is important - * to create a separate instance for each VV instance.) - */ - void setRenderer(Renderer r); - - /** - * Returns the renderer used by this instance. - */ - Renderer getRenderer(); - - /** - * Removes the current graph layout, and adds a new one. - * @param layout the new layout to set - */ - void setGraphLayout(Layout layout); - - /** - * Removes the current graph layout, and adds a new one, - * optionally re-scaling the view to show the entire layout - * @param layout the new layout to set - * @param scaleToLayout whether to scale the view to show the whole layout - */ -// void setGraphLayout(Layout layout, boolean scaleToLayout); - - /** - * Returns the current graph layout. - * Passes thru to the model - */ - Layout getGraphLayout(); - - /** - * - * @see javax.swing.JComponent#setVisible(boolean) - */ - void setVisible(boolean aFlag); - - /** - * Returns a flag that says whether the visRunner thread is running. If - * it is not, then you may need to restart the thread. - */ -// boolean isVisRunnerRunning(); - - /** - * Transform the mouse point with the inverse transform - * of the VisualizationViewer. This maps from screen coordinates - * to graph coordinates. - * @param p the point to transform (typically, a mouse point) - * @return a transformed Point2D - */ -// Point2D inverseTransform(Point2D p); -// -// Point2D inverseViewTransform(Point2D p); -// -// Point2D inverseLayoutTransform(Point2D p); - - /** - * Transform the mouse point with the current transform - * of the VisualizationViewer. This maps from graph coordinates - * to screen coordinates. - * @param p the point to transform - * @return a transformed Point2D - */ -// Point2D transform(Point2D p); -// -// Point2D viewTransform(Point2D p); -// -// Point2D layoutTransform(Point2D p); - - /** - * @param transformer The transformer to set. - */ -// void setViewTransformer(MutableTransformer transformer); -// -// void setLayoutTransformer(MutableTransformer transformer); -// -// MutableTransformer getViewTransformer(); -// -// MutableTransformer getLayoutTransformer(); - - /** - * @return Returns the renderingHints. - */ - Map getRenderingHints(); - - /** - * @param renderingHints The renderingHints to set. - */ - void setRenderingHints(Map renderingHints); - - /** - * @param paintable The paintable to add. - */ - void addPreRenderPaintable(Paintable paintable); - - /** - * @param paintable The paintable to remove. - */ - void removePreRenderPaintable(Paintable paintable); - - /** - * @param paintable The paintable to add. - */ - void addPostRenderPaintable(Paintable paintable); - - /** - * @param paintable The paintable to remove. - */ - void removePostRenderPaintable(Paintable paintable); - - /** - * Adds a ChangeListener. - * @param l the listener to be added - */ - void addChangeListener(ChangeListener l); - - /** - * Removes a ChangeListener. - * @param l the listener to be removed - */ - void removeChangeListener(ChangeListener l); - - /** - * Returns an array of all the ChangeListeners added - * with addChangeListener(). - * - * @return all of the ChangeListeners added or an empty - * array if no listeners have been added - */ - ChangeListener[] getChangeListeners(); - - /** - * Notifies all listeners that have registered interest for - * notification on this event type. The event instance - * is lazily created. - * @see EventListenerList - */ - void fireStateChanged(); - - /** - * @return Returns the pickedState. - */ - PickedState getPickedVertexState(); - - /** - * @return Returns the pickedState. - */ - PickedState getPickedEdgeState(); - - /** - * @param pickedState The pickedState to set. - */ - void setPickedVertexState(PickedState pickedVertexState); - - void setPickedEdgeState(PickedState pickedEdgeState); - - /** - * @return Returns the GraphElementAccessor. - */ - GraphElementAccessor getPickSupport(); - - /** - * @param pickSupport The pickSupport to set. - */ - void setPickSupport(GraphElementAccessor pickSupport); - - Point2D getCenter(); - - RenderContext getRenderContext(); - - void setRenderContext(RenderContext renderContext); - - void repaint(); - - /** - * an interface for the preRender and postRender - */ - interface Paintable { - public void paint(Graphics g); - public boolean useTransform(); - } - - - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationViewer.java b/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationViewer.java deleted file mode 100644 index 33f72141..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/VisualizationViewer.java +++ /dev/null @@ -1,219 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization; - -import java.awt.Dimension; -import java.awt.event.KeyListener; -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.event.MouseWheelListener; -import java.awt.geom.Point2D; - -import javax.swing.ToolTipManager; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.control.GraphMouseListener; -import edu.uci.ics.jung.visualization.control.MouseListenerTranslator; - -/** - * Adds mouse behaviors and tooltips to the graph visualization base class - * - * @author Joshua O'Madadhain - * @author Tom Nelson - * @author Danyel Fisher - */ -@SuppressWarnings("serial") -public class VisualizationViewer extends BasicVisualizationServer { - - protected Transformer vertexToolTipTransformer; - protected Transformer edgeToolTipTransformer; - protected Transformer mouseEventToolTipTransformer; - - /** - * provides MouseListener, MouseMotionListener, and MouseWheelListener - * events to the graph - */ - protected GraphMouse graphMouse; - - protected MouseListener requestFocusListener = new MouseAdapter() { - public void mouseClicked(MouseEvent e) { - requestFocusInWindow(); - } - }; - - - /** - * Create an instance with passed parameters. - * - * @param layout The Layout to apply, with its associated Graph - * @param renderer The Renderer to draw it with - */ - public VisualizationViewer(Layout layout) { - this(new DefaultVisualizationModel(layout)); - } - - /** - * Create an instance with passed parameters. - * - * @param layout The Layout to apply, with its associated Graph - * @param renderer The Renderer to draw it with - * @param preferredSize the preferred size of this View - */ - public VisualizationViewer(Layout layout, Dimension preferredSize) { - this(new DefaultVisualizationModel(layout, preferredSize), preferredSize); - } - - /** - * Create an instance with passed parameters. - * - * @param model - * @param renderer - */ - public VisualizationViewer(VisualizationModel model) { - this(model, new Dimension(600,600)); - } - /** - * Create an instance with passed parameters. - * - * @param model - * @param renderer - * @param preferredSize initial preferred size of the view - */ - @SuppressWarnings("unchecked") - public VisualizationViewer(VisualizationModel model, - Dimension preferredSize) { - super(model, preferredSize); - setFocusable(true); - addMouseListener(requestFocusListener); - } - - /** - * a setter for the GraphMouse. This will remove any - * previous GraphMouse (including the one that - * is added in the initMouseClicker method. - * @param graphMouse new value - */ - public void setGraphMouse(GraphMouse graphMouse) { - this.graphMouse = graphMouse; - MouseListener[] ml = getMouseListeners(); - for(int i=0; iGraphMouse - */ - public GraphMouse getGraphMouse() { - return graphMouse; - } - - /** - * This is the interface for adding a mouse listener. The GEL - * will be called back with mouse clicks on vertices. - * @param gel - */ - public void addGraphMouseListener( GraphMouseListener gel ) { - addMouseListener( new MouseListenerTranslator( gel, this )); - } - - /** - * Override to request focus on mouse enter, if a key listener is added - * @see java.awt.Component#addKeyListener(java.awt.event.KeyListener) - */ - @Override - public synchronized void addKeyListener(KeyListener l) { - super.addKeyListener(l); -// setFocusable(true); -// addMouseListener(requestFocusListener); - } - - /** - * @param edgeToolTipTransformer the edgeToolTipTransformer to set - */ - public void setEdgeToolTipTransformer( - Transformer edgeToolTipTransformer) { - this.edgeToolTipTransformer = edgeToolTipTransformer; - ToolTipManager.sharedInstance().registerComponent(this); - } - - /** - * @param mouseEventToolTipTransformer the mouseEventToolTipTransformer to set - */ - public void setMouseEventToolTipTransformer( - Transformer mouseEventToolTipTransformer) { - this.mouseEventToolTipTransformer = mouseEventToolTipTransformer; - ToolTipManager.sharedInstance().registerComponent(this); - } - - /** - * @param vertexToolTipTransformer the vertexToolTipTransformer to set - */ - public void setVertexToolTipTransformer( - Transformer vertexToolTipTransformer) { - this.vertexToolTipTransformer = vertexToolTipTransformer; - ToolTipManager.sharedInstance().registerComponent(this); - } - - /** - * called by the superclass to display tooltips - */ - public String getToolTipText(MouseEvent event) { - Layout layout = getGraphLayout(); - Point2D p = null; - if(vertexToolTipTransformer != null) { - p = event.getPoint(); - //renderContext.getBasicTransformer().inverseViewTransform(event.getPoint()); - V vertex = getPickSupport().getVertex(layout, p.getX(), p.getY()); - if(vertex != null) { - return vertexToolTipTransformer.transform(vertex); - } - } - if(edgeToolTipTransformer != null) { - if(p == null) p = renderContext.getMultiLayerTransformer().inverseTransform(Layer.VIEW, event.getPoint()); - E edge = getPickSupport().getEdge(layout, p.getX(), p.getY()); - if(edge != null) { - return edgeToolTipTransformer.transform(edge); - } - } - if(mouseEventToolTipTransformer != null) { - return mouseEventToolTipTransformer.transform(event); - } - return super.getToolTipText(event); - } - - /** - * a convenience type to represent a class that - * processes all types of mouse events for the graph - */ - public interface GraphMouse extends MouseListener, MouseMotionListener, MouseWheelListener {} - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotatingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotatingGraphMousePlugin.java deleted file mode 100644 index bbf4753c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotatingGraphMousePlugin.java +++ /dev/null @@ -1,301 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.annotations; - -import java.awt.Color; -import java.awt.Cursor; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Shape; -import java.awt.event.InputEvent; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.awt.geom.RectangularShape; - -import javax.swing.JComponent; -import javax.swing.JOptionPane; - -import edu.uci.ics.jung.visualization.MultiLayerTransformer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.VisualizationServer.Paintable; -import edu.uci.ics.jung.visualization.control.AbstractGraphMousePlugin; - -/** - * AnnotatingGraphMousePlugin can create Shape and Text annotations - * in a layer of the graph visualization. - * - * @author Tom Nelson - */ -public class AnnotatingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener, MouseMotionListener { - - /** - * additional modifiers for the action of adding to an existing - * selection - */ - protected int additionalModifiers; - - /** - * used to draw a Shape annotation - */ - protected RectangularShape rectangularShape = new Rectangle2D.Float(); - - /** - * the Paintable for the Shape annotation - */ - protected Paintable lensPaintable; - - /** - * a Paintable to store all Annotations - */ - protected AnnotationManager annotationManager; - - /** - * color for annotations - */ - protected Color annotationColor = Color.cyan; - - /** - * layer for annotations - */ - protected Annotation.Layer layer = Annotation.Layer.LOWER; - - protected boolean fill; - - /** - * holds rendering transforms - */ - protected MultiLayerTransformer basicTransformer; - - /** - * holds rendering settings - */ - protected RenderContext rc; - - /** - * set to true when the AnnotationPaintable has been - * added to the view component - */ - protected boolean added = false; - - /** - * create an instance with default settings - */ - public AnnotatingGraphMousePlugin(RenderContext rc) { - this(rc, InputEvent.BUTTON1_MASK, - InputEvent.BUTTON1_MASK | InputEvent.SHIFT_MASK); - } - - /** - * create an instance with overides - * @param selectionModifiers for primary selection - * @param additionalModifiers for additional selection - */ - public AnnotatingGraphMousePlugin(RenderContext rc, - int selectionModifiers, int additionalModifiers) { - super(selectionModifiers); - this.rc = rc; - this.basicTransformer = rc.getMultiLayerTransformer(); - this.additionalModifiers = additionalModifiers; - this.lensPaintable = new LensPaintable(); -// this.annotationPaintable = new AnnotationPaintable(rc); - this.annotationManager = new AnnotationManager(rc); - this.cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR); - } - - /** - * @return Returns the lensColor. - */ - public Color getAnnotationColor() { - return annotationColor; - } - - /** - * @param lensColor The lensColor to set. - */ - public void setAnnotationColor(Color lensColor) { - this.annotationColor = lensColor; - } - - /** - * the Paintable that draws a Shape annotation - * only while it is being created - * - */ - class LensPaintable implements Paintable { - - public void paint(Graphics g) { - Color oldColor = g.getColor(); - g.setColor(annotationColor); - ((Graphics2D)g).draw(rectangularShape); - g.setColor(oldColor); - } - - public boolean useTransform() { - return false; - } - } - - /** - * Sets the location for an Annotation. - * Will either pop up a dialog to prompt for text - * input for a text annotation, or begin the process - * of drawing a Shape annotation - * - * @param e the event - */ - @SuppressWarnings("unchecked") - public void mousePressed(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - down = e.getPoint(); - - if(added == false) { - vv.addPreRenderPaintable(annotationManager.getLowerAnnotationPaintable()); - vv.addPostRenderPaintable(annotationManager.getUpperAnnotationPaintable()); - added = true; - } - - - if(e.isPopupTrigger()) { - String annotationString = JOptionPane.showInputDialog(vv,"Annotation:"); - if(annotationString != null && annotationString.length() > 0) { - Point2D p = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(down); - Annotation annotation = - new Annotation(annotationString, layer, annotationColor, fill, p); - annotationManager.add(layer, annotation); - } - } else if(e.getModifiers() == additionalModifiers) { - Annotation annotation = annotationManager.getAnnotation(down); - annotationManager.remove(annotation); - } else if(e.getModifiers() == modifiers) { - rectangularShape.setFrameFromDiagonal(down,down); - vv.addPostRenderPaintable(lensPaintable); - } - vv.repaint(); - } - - /** - * Completes the process of adding a Shape annotation - * and removed the transient paintable - * - */ - @SuppressWarnings("unchecked") - public void mouseReleased(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - if(e.isPopupTrigger()) { - String annotationString = JOptionPane.showInputDialog(vv,"Annotation:"); - if(annotationString != null && annotationString.length() > 0) { - Point2D p = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(down); - Annotation annotation = - new Annotation(annotationString, layer, annotationColor, fill, p); - annotationManager.add(layer, annotation); - } - } else if(e.getModifiers() == modifiers) { - if(down != null) { - Point2D out = e.getPoint(); - RectangularShape arect = (RectangularShape)rectangularShape.clone(); - arect.setFrameFromDiagonal(down,out); - Shape s = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(arect); - Annotation annotation = - new Annotation(s, layer, annotationColor, fill, out); - annotationManager.add(layer, annotation); - } - } - down = null; - vv.removePostRenderPaintable(lensPaintable); - vv.repaint(); - } - - /** - * Draws the transient Paintable that will become - * a Shape annotation when the mouse button is - * released - * - */ - @SuppressWarnings("unchecked") - public void mouseDragged(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - - Point2D out = e.getPoint(); - if(e.getModifiers() == additionalModifiers) { - rectangularShape.setFrameFromDiagonal(down,out); - - } else if(e.getModifiers() == modifiers) { - rectangularShape.setFrameFromDiagonal(down,out); - - } - rectangularShape.setFrameFromDiagonal(down,out); - vv.repaint(); - } - - public void mouseClicked(MouseEvent e) { - } - - public void mouseEntered(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(cursor); - } - - public void mouseExited(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - - public void mouseMoved(MouseEvent e) { - } - - /** - * @return the rect - */ - public RectangularShape getRectangularShape() { - return rectangularShape; - } - - /** - * @param rect the rect to set - */ - public void setRectangularShape(RectangularShape rect) { - this.rectangularShape = rect; - } - - /** - * @return the layer - */ - public Annotation.Layer getLayer() { - return layer; - } - - /** - * @param layer the layer to set - */ - public void setLayer(Annotation.Layer layer) { - this.layer = layer; - } - - /** - * @return the fill - */ - public boolean isFill() { - return fill; - } - - /** - * @param fill the fill to set - */ - public void setFill(boolean fill) { - this.fill = fill; - } - - } diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotatingModalGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotatingModalGraphMouse.java deleted file mode 100644 index 17eb0b1a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotatingModalGraphMouse.java +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.visualization.annotations; - -import java.awt.Component; -import java.awt.Cursor; -import java.awt.Dimension; -import java.awt.ItemSelectable; -import java.awt.event.InputEvent; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; -import java.awt.event.KeyAdapter; -import java.awt.event.KeyEvent; - -import javax.swing.ButtonGroup; -import javax.swing.Icon; -import javax.swing.JComboBox; -import javax.swing.JMenu; -import javax.swing.JRadioButtonMenuItem; -import javax.swing.plaf.basic.BasicIconFactory; - -import edu.uci.ics.jung.visualization.MultiLayerTransformer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.control.AbstractModalGraphMouse; -import edu.uci.ics.jung.visualization.control.AnimatedPickingGraphMousePlugin; -import edu.uci.ics.jung.visualization.control.CrossoverScalingControl; -import edu.uci.ics.jung.visualization.control.ModalGraphMouse; -import edu.uci.ics.jung.visualization.control.PickingGraphMousePlugin; -import edu.uci.ics.jung.visualization.control.RotatingGraphMousePlugin; -import edu.uci.ics.jung.visualization.control.ScalingGraphMousePlugin; -import edu.uci.ics.jung.visualization.control.ShearingGraphMousePlugin; -import edu.uci.ics.jung.visualization.control.TranslatingGraphMousePlugin; - -/** - * a graph mouse that supplies an annotations mode - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -public class AnnotatingModalGraphMouse extends AbstractModalGraphMouse - implements ModalGraphMouse, ItemSelectable { - - protected AnnotatingGraphMousePlugin annotatingPlugin; - protected MultiLayerTransformer basicTransformer; - protected RenderContext rc; - - /** - * create an instance with default values - * - */ - public AnnotatingModalGraphMouse(RenderContext rc, - AnnotatingGraphMousePlugin annotatingPlugin) { - this(rc, annotatingPlugin, 1.1f, 1/1.1f); - } - - /** - * create an instance with passed values - * @param in override value for scale in - * @param out override value for scale out - */ - public AnnotatingModalGraphMouse(RenderContext rc, - AnnotatingGraphMousePlugin annotatingPlugin, - float in, float out) { - super(in,out); - this.rc = rc; - this.basicTransformer = rc.getMultiLayerTransformer(); - this.annotatingPlugin = annotatingPlugin; - loadPlugins(); - setModeKeyListener(new ModeKeyAdapter(this)); - } - - /** - * create the plugins, and load the plugins for TRANSFORMING mode - * - */ - @Override - protected void loadPlugins() { - this.pickingPlugin = new PickingGraphMousePlugin(); - this.animatedPickingPlugin = new AnimatedPickingGraphMousePlugin(); - this.translatingPlugin = new TranslatingGraphMousePlugin(InputEvent.BUTTON1_MASK); - this.scalingPlugin = new ScalingGraphMousePlugin(new CrossoverScalingControl(), 0, in, out); - this.rotatingPlugin = new RotatingGraphMousePlugin(); - this.shearingPlugin = new ShearingGraphMousePlugin(); - add(scalingPlugin); - setMode(Mode.TRANSFORMING); - } - - /** - * setter for the Mode. - */ - @Override - public void setMode(Mode mode) { - if(this.mode != mode) { - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, - this.mode, ItemEvent.DESELECTED)); - this.mode = mode; - if(mode == Mode.TRANSFORMING) { - setTransformingMode(); - } else if(mode == Mode.PICKING) { - setPickingMode(); - } else if(mode == Mode.ANNOTATING) { - setAnnotatingMode(); - } - if(modeBox != null) { - modeBox.setSelectedItem(mode); - } - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, mode, ItemEvent.SELECTED)); - } - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.control.ModalGraphMouse#setPickingMode() - */ - @Override - protected void setPickingMode() { - remove(translatingPlugin); - remove(rotatingPlugin); - remove(shearingPlugin); - remove(annotatingPlugin); - add(pickingPlugin); - add(animatedPickingPlugin); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.control.ModalGraphMouse#setTransformingMode() - */ - @Override - protected void setTransformingMode() { - remove(pickingPlugin); - remove(animatedPickingPlugin); - remove(annotatingPlugin); - add(translatingPlugin); - add(rotatingPlugin); - add(shearingPlugin); - } - - protected void setEditingMode() { - remove(pickingPlugin); - remove(animatedPickingPlugin); - remove(translatingPlugin); - remove(rotatingPlugin); - remove(shearingPlugin); - remove(annotatingPlugin); - } - - protected void setAnnotatingMode() { - remove(pickingPlugin); - remove(animatedPickingPlugin); - remove(translatingPlugin); - remove(rotatingPlugin); - remove(shearingPlugin); - add(annotatingPlugin); - } - - - /** - * @return Returns the modeBox. - */ - @Override - public JComboBox getModeComboBox() { - if(modeBox == null) { - modeBox = new JComboBox(new Mode[]{Mode.TRANSFORMING, Mode.PICKING, Mode.ANNOTATING}); - modeBox.addItemListener(getModeListener()); - } - modeBox.setSelectedItem(mode); - return modeBox; - } - - /** - * create (if necessary) and return a menu that will change - * the mode - * @return the menu - */ - @Override - public JMenu getModeMenu() { - if(modeMenu == null) { - modeMenu = new JMenu();// { - Icon icon = BasicIconFactory.getMenuArrowIcon(); - modeMenu.setIcon(BasicIconFactory.getMenuArrowIcon()); - modeMenu.setPreferredSize(new Dimension(icon.getIconWidth()+10, - icon.getIconHeight()+10)); - - final JRadioButtonMenuItem transformingButton = - new JRadioButtonMenuItem(Mode.TRANSFORMING.toString()); - transformingButton.addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - setMode(Mode.TRANSFORMING); - } - }}); - - final JRadioButtonMenuItem pickingButton = - new JRadioButtonMenuItem(Mode.PICKING.toString()); - pickingButton.addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - setMode(Mode.PICKING); - } - }}); - - ButtonGroup radio = new ButtonGroup(); - radio.add(transformingButton); - radio.add(pickingButton); - transformingButton.setSelected(true); - modeMenu.add(transformingButton); - modeMenu.add(pickingButton); - modeMenu.setToolTipText("Menu for setting Mouse Mode"); - addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - if(e.getItem() == Mode.TRANSFORMING) { - transformingButton.setSelected(true); - } else if(e.getItem() == Mode.PICKING) { - pickingButton.setSelected(true); - } - } - }}); - } - return modeMenu; - } - - public static class ModeKeyAdapter extends KeyAdapter { - private char t = 't'; - private char p = 'p'; - private char a = 'a'; - protected ModalGraphMouse graphMouse; - - public ModeKeyAdapter(ModalGraphMouse graphMouse) { - this.graphMouse = graphMouse; - } - - public ModeKeyAdapter(char t, char p, char a, ModalGraphMouse graphMouse) { - this.t = t; - this.p = p; - this.a = a; - this.graphMouse = graphMouse; - } - - @Override - public void keyTyped(KeyEvent event) { - char keyChar = event.getKeyChar(); - if(keyChar == t) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - graphMouse.setMode(Mode.TRANSFORMING); - } else if(keyChar == p) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); - graphMouse.setMode(Mode.PICKING); - } else if(keyChar == a) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR)); - graphMouse.setMode(Mode.ANNOTATING); - } - } - } -} - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/Annotation.java b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/Annotation.java deleted file mode 100644 index 9796e4fc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/Annotation.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.visualization.annotations; - -import java.awt.Paint; -import java.awt.geom.Point2D; - -/** - * stores an annotation, either a shape or a string - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - */ -public class Annotation { - - protected T annotation; - protected Paint paint; - protected Point2D location; - protected Layer layer; - protected boolean fill; - public static enum Layer { LOWER, UPPER } - - - - public Annotation(T annotation, Layer layer, Paint paint, - boolean fill, Point2D location) { - this.annotation = annotation; - this.layer = layer; - this.paint = paint; - this.fill = fill; - this.location = location; - } - /** - * @return the annotation - */ - public T getAnnotation() { - return annotation; - } - /** - * @param annotation the annotation to set - */ - public void setAnnotation(T annotation) { - this.annotation = annotation; - } - /** - * @return the location - */ - public Point2D getLocation() { - return location; - } - /** - * @return the layer - */ - public Layer getLayer() { - return layer; - } - /** - * @param layer the layer to set - */ - public void setLayer(Layer layer) { - this.layer = layer; - } - /** - * @param location the location to set - */ - public void setLocation(Point2D location) { - this.location = location; - } - /** - * @return the paint - */ - public Paint getPaint() { - return paint; - } - /** - * @param paint the paint to set - */ - public void setPaint(Paint paint) { - this.paint = paint; - } - /** - * @return the fill - */ - public boolean isFill() { - return fill; - } - /** - * @param fill the fill to set - */ - public void setFill(boolean fill) { - this.fill = fill; - } - - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationControls.java b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationControls.java deleted file mode 100644 index d5088029..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationControls.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.visualization.annotations; - -import java.awt.Color; -import java.awt.Component; -import java.awt.Shape; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Rectangle2D; -import java.awt.geom.RectangularShape; -import java.awt.geom.RoundRectangle2D; - -import javax.swing.DefaultListCellRenderer; -import javax.swing.JButton; -import javax.swing.JColorChooser; -import javax.swing.JComboBox; -import javax.swing.JList; -import javax.swing.JToggleButton; -import javax.swing.JToolBar; - -/** - * a collection of controls for annotations. - * allows selection of colors, shapes, etc - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class AnnotationControls { - - protected AnnotatingGraphMousePlugin annotatingPlugin; - - public AnnotationControls(AnnotatingGraphMousePlugin annotatingPlugin) { - this.annotatingPlugin = annotatingPlugin; - } - - @SuppressWarnings("serial") - public JComboBox getShapeBox() { - JComboBox shapeBox = new JComboBox( - new Shape[] { - new Rectangle2D.Double(), - new RoundRectangle2D.Double(0,0,0,0,50,50), - new Ellipse2D.Double() - }); - shapeBox.setRenderer(new DefaultListCellRenderer() { - @Override - public Component getListCellRendererComponent(JList list, Object value, - int index, boolean isSelected, boolean hasFocus) { - String valueString = value.toString(); - valueString = valueString.substring(0,valueString.indexOf("2D")); - valueString = valueString.substring(valueString.lastIndexOf('.')+1); - return super.getListCellRendererComponent(list, valueString, index, - isSelected, hasFocus); - } - }); - shapeBox.addItemListener(new ItemListener() { - - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - annotatingPlugin.setRectangularShape((RectangularShape)e.getItem()); - } - - }}); - return shapeBox; - } - - public JButton getColorChooserButton() { - final JButton colorChooser = new JButton("Color"); - colorChooser.setForeground(annotatingPlugin.getAnnotationColor()); - colorChooser.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - Color color = JColorChooser.showDialog(colorChooser, "Annotation Color", - colorChooser.getForeground()); - annotatingPlugin.setAnnotationColor(color); - colorChooser.setForeground(color); - }}); - return colorChooser; - } - - public JComboBox getLayerBox() { - final JComboBox layerBox = new JComboBox( - new Annotation.Layer[] { - Annotation.Layer.LOWER, Annotation.Layer.UPPER - }); - layerBox.addItemListener(new ItemListener() { - - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - annotatingPlugin.setLayer((Annotation.Layer)e.getItem()); - } - - }}); - - return layerBox; - } - - public JToggleButton getFillButton() { - JToggleButton fillButton = new JToggleButton("Fill"); - fillButton.addItemListener(new ItemListener() { - - public void itemStateChanged(ItemEvent e) { - annotatingPlugin.setFill(e.getStateChange() == ItemEvent.SELECTED); - - }}); - return fillButton; - } - - public JToolBar getAnnotationsToolBar() { - JToolBar toolBar = new JToolBar(); - toolBar.add(this.getShapeBox()); - toolBar.add(this.getColorChooserButton()); - toolBar.add(this.getFillButton()); - toolBar.add(this.getLayerBox()); - return toolBar; - - } - - - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationManager.java b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationManager.java deleted file mode 100644 index d033902e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationManager.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.visualization.annotations; - -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.transform.AffineTransformer; -import edu.uci.ics.jung.visualization.transform.LensTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * handles the selection of annotations, and the support for the - * tools to draw them at specific layers. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class AnnotationManager { - - protected AnnotationRenderer annotationRenderer = new AnnotationRenderer(); - protected AnnotationPaintable lowerAnnotationPaintable; - protected AnnotationPaintable upperAnnotationPaintable; - - protected RenderContext rc; - protected AffineTransformer transformer; - - public AnnotationManager(RenderContext rc) { - this.rc = rc; - this.lowerAnnotationPaintable = new AnnotationPaintable(rc, annotationRenderer); - this.upperAnnotationPaintable = new AnnotationPaintable(rc, annotationRenderer); - - MutableTransformer mt = rc.getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - if(mt instanceof AffineTransformer) { - transformer = (AffineTransformer)mt; - } else if(mt instanceof LensTransformer) { - transformer = (AffineTransformer)((LensTransformer)mt).getDelegate(); - } - - } - - public AnnotationPaintable getAnnotationPaintable(Annotation.Layer layer) { - if(layer == Annotation.Layer.LOWER) { - return this.lowerAnnotationPaintable; - } - if(layer == Annotation.Layer.UPPER) { - return this.upperAnnotationPaintable; - } - return null; - } - - public void add(Annotation.Layer layer, Annotation annotation) { - if(layer == Annotation.Layer.LOWER) { - this.lowerAnnotationPaintable.add(annotation); - } - if(layer == Annotation.Layer.UPPER) { - this.upperAnnotationPaintable.add(annotation); - } - } - - public void remove(Annotation annotation) { - this.lowerAnnotationPaintable.remove(annotation); - this.upperAnnotationPaintable.remove(annotation); - } - - protected AnnotationPaintable getLowerAnnotationPaintable() { - return lowerAnnotationPaintable; - } - - protected AnnotationPaintable getUpperAnnotationPaintable() { - return upperAnnotationPaintable; - } - - @SuppressWarnings("unchecked") - public Annotation getAnnotation(Point2D p) { - Set annotations = new HashSet(lowerAnnotationPaintable.getAnnotations()); - annotations.addAll(upperAnnotationPaintable.getAnnotations()); - return getAnnotation(p, annotations); - } - - @SuppressWarnings("unchecked") - public Annotation getAnnotation(Point2D p, Collection annotations) { - double closestDistance = Double.MAX_VALUE; - Annotation closestAnnotation = null; - for(Annotation annotation : annotations) { - Object ann = annotation.getAnnotation(); - if(ann instanceof Shape) { - Point2D ip = rc.getMultiLayerTransformer().inverseTransform(p); - Shape shape = (Shape)ann; - if(shape.contains(ip)) { - - Rectangle2D shapeBounds = shape.getBounds2D(); - Point2D shapeCenter = new Point2D.Double(shapeBounds.getCenterX(), shapeBounds.getCenterY()); - double distanceSq = shapeCenter.distanceSq(ip); - if(distanceSq < closestDistance) { - closestDistance = distanceSq; - closestAnnotation = annotation; - } - } - } else if(ann instanceof String) { - - Point2D ip = rc.getMultiLayerTransformer().inverseTransform(Layer.VIEW, p); - Point2D ap = annotation.getLocation(); - String label = (String)ann; - Component component = prepareRenderer(rc, annotationRenderer, label); - - AffineTransform base = new AffineTransform(transformer.getTransform()); - double rotation = transformer.getRotation(); - // unrotate the annotation - AffineTransform unrotate = - AffineTransform.getRotateInstance(-rotation, ap.getX(), ap.getY()); - base.concatenate(unrotate); - - Dimension d = component.getPreferredSize(); - Rectangle2D componentBounds = new Rectangle2D.Double(ap.getX(), ap.getY(), d.width, d.height); - - Shape componentBoundsShape = base.createTransformedShape(componentBounds); - Point2D componentCenter = new Point2D.Double(componentBoundsShape.getBounds().getCenterX(), - componentBoundsShape.getBounds().getCenterY()); - if(componentBoundsShape.contains(ip)) { - double distanceSq = componentCenter.distanceSq(ip); - if(distanceSq < closestDistance) { - closestDistance = distanceSq; - closestAnnotation = annotation; - } - } - - } - } - return closestAnnotation; - } - - public Component prepareRenderer(RenderContext rc, AnnotationRenderer annotationRenderer, Object value) { - return annotationRenderer.getAnnotationRendererComponent(rc.getScreenDevice(), value); - } - - - - - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationPaintable.java b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationPaintable.java deleted file mode 100644 index a37fcc7a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationPaintable.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.visualization.annotations; - -import java.awt.Color; -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Paint; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import javax.swing.JComponent; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.VisualizationServer.Paintable; -import edu.uci.ics.jung.visualization.transform.AffineTransformer; -import edu.uci.ics.jung.visualization.transform.LensTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * handles the actual drawing of annotations - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class AnnotationPaintable implements Paintable { - - @SuppressWarnings("unchecked") - protected Set annotations = new HashSet(); - protected AnnotationRenderer annotationRenderer; - - protected RenderContext rc; - protected AffineTransformer transformer; - - public AnnotationPaintable(RenderContext rc, AnnotationRenderer annotationRenderer) { - this.rc = rc; - this.annotationRenderer = annotationRenderer; - MutableTransformer mt = rc.getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - if(mt instanceof AffineTransformer) { - transformer = (AffineTransformer)mt; - } else if(mt instanceof LensTransformer) { - transformer = (AffineTransformer)((LensTransformer)mt).getDelegate(); - } - } - - @SuppressWarnings("unchecked") - public void add(Annotation annotation) { - annotations.add(annotation); - } - - @SuppressWarnings("unchecked") - public void remove(Annotation annotation) { - annotations.remove(annotation); - } - - /** - * @return the annotations - */ - @SuppressWarnings("unchecked") - public Set getAnnotations() { - return Collections.unmodifiableSet(annotations); - } - - @SuppressWarnings("unchecked") - public void paint(Graphics g) { - Graphics2D g2d = (Graphics2D)g; - Color oldColor = g.getColor(); - for(Annotation annotation : annotations) { - Object ann = annotation.getAnnotation(); - if(ann instanceof Shape) { - Shape shape = (Shape)ann; - Paint paint = annotation.getPaint(); - Shape s = transformer.transform(shape); - g2d.setPaint(paint); - if(annotation.isFill()) { - g2d.fill(s); - } else { - g2d.draw(s); - } - } else if(ann instanceof String) { - Point2D p = annotation.getLocation(); - String label = (String)ann; - Component component = prepareRenderer(rc, annotationRenderer, label); - component.setForeground((Color)annotation.getPaint()); - if(annotation.isFill()) { - ((JComponent)component).setOpaque(true); - component.setBackground((Color)annotation.getPaint()); - component.setForeground(Color.black); - } - Dimension d = component.getPreferredSize(); - AffineTransform old = g2d.getTransform(); - AffineTransform base = new AffineTransform(old); - AffineTransform xform = transformer.getTransform(); - - double rotation = transformer.getRotation(); - // unrotate the annotation - AffineTransform unrotate = - AffineTransform.getRotateInstance(-rotation, p.getX(), p.getY()); - base.concatenate(xform); - base.concatenate(unrotate); - g2d.setTransform(base); - rc.getRendererPane().paintComponent(g, component, rc.getScreenDevice(), - (int)p.getX(), (int)p.getY(), - d.width, d.height, true); - g2d.setTransform(old); - } - } - g.setColor(oldColor); - } - - public Component prepareRenderer(RenderContext rc, AnnotationRenderer annotationRenderer, Object value) { - return annotationRenderer.getAnnotationRendererComponent(rc.getScreenDevice(), value); - } - - public boolean useTransform() { - return true; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationRenderer.java deleted file mode 100644 index a82a3560..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/AnnotationRenderer.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ - -package edu.uci.ics.jung.visualization.annotations; - -import java.awt.Color; -import java.awt.Component; -import java.awt.Rectangle; -import java.io.Serializable; - -import javax.swing.JComponent; -import javax.swing.JLabel; -import javax.swing.border.Border; -import javax.swing.border.EmptyBorder; - -/** - * AnnotationRenderer is similar to the cell renderers - * used by the JTable and JTree jfc classes. - * - * @author Tom Nelson - * - * - */ -@SuppressWarnings("serial") -public class AnnotationRenderer extends JLabel implements - Serializable { - - protected static Border noFocusBorder = new EmptyBorder(0,0,0,0); - - /** - * Creates a default table cell renderer. - */ - public AnnotationRenderer() { - setOpaque(true); - setBorder(noFocusBorder); - } - - /** - * Overrides JComponent.setForeground to assign - * the unselected-foreground color to the specified color. - * - * @param c set the foreground color to this value - */ - @Override - public void setForeground(Color c) { - super.setForeground(c); - } - - /** - * Overrides JComponent.setBackground to assign - * the unselected-background color to the specified color. - * - * @param c set the background color to this value - */ - @Override - public void setBackground(Color c) { - super.setBackground(c); - } - - /** - * Notification from the UIManager that the look and feel - * [L&F] has changed. - * Replaces the current UI object with the latest version from the - * UIManager. - * - * @see JComponent#updateUI - */ - @Override - public void updateUI() { - super.updateUI(); - setForeground(null); - setBackground(null); - } - - /** - * - * Returns the default label renderer for an Edge - * - * @param vv the VisualizationViewer to render on - * @param value the value to assign to the label for - * Edge - * @param edge the Edge - * @return the default label renderer - */ - public Component getAnnotationRendererComponent(JComponent vv, Object value) { - - super.setForeground(vv.getForeground()); - super.setBackground(vv.getBackground()); - -// if(font != null) { -// setFont(font); -// } else { - setFont(vv.getFont()); -// } - setIcon(null); - setBorder(noFocusBorder); - setValue(value); - return this; - } - - /* - * The following methods are overridden as a performance measure to - * to prune code-paths are often called in the case of renders - * but which we know are unnecessary. Great care should be taken - * when writing your own renderer to weigh the benefits and - * drawbacks of overriding methods like these. - */ - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public boolean isOpaque() { - Color back = getBackground(); - Component p = getParent(); - if (p != null) { - p = p.getParent(); - } - boolean colorMatch = (back != null) && (p != null) && - back.equals(p.getBackground()) && - p.isOpaque(); - return !colorMatch && super.isOpaque(); - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void validate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void revalidate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(long tm, int x, int y, int width, int height) {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(Rectangle r) { } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - protected void firePropertyChange(String propertyName, Object oldValue, Object newValue) { - // Strings get interned... - if (propertyName=="text") { - super.firePropertyChange(propertyName, oldValue, newValue); - } - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void firePropertyChange(String propertyName, boolean oldValue, boolean newValue) { } - - /** - * Sets the String object for the cell being rendered to - * value. - * - * @param value the string value for this cell; if value is - * null it sets the text value to an empty string - * @see JLabel#setText - * - */ - protected void setValue(Object value) { - setText((value == null) ? "" : value.toString()); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/annotations/package.html deleted file mode 100644 index 5430eb2a..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/annotations/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Classes which support creating visual annotations for graphs. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbsoluteCrossoverScalingControl.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/AbsoluteCrossoverScalingControl.java deleted file mode 100644 index b2dd2cbc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbsoluteCrossoverScalingControl.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * scales to the absolute value passed as an argument. - * It first resets the scaling transformers, then uses - * the relative CrossoverScalingControl to achieve the - * absolute value. - * - * @author Tom Nelson - * - */ -public class AbsoluteCrossoverScalingControl extends CrossoverScalingControl - implements ScalingControl { - - /** - * scale to the absolute value passed as 'amount'. - * - */ - public void scale(VisualizationViewer vv, float amount, Point2D at) { - MutableTransformer layoutTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - MutableTransformer viewTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - double modelScale = layoutTransformer.getScale(); - double viewScale = viewTransformer.getScale(); - double inverseModelScale = Math.sqrt(crossover)/modelScale; - double inverseViewScale = Math.sqrt(crossover)/viewScale; - - Point2D transformedAt = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, at); - - // return the transformers to 1.0 - layoutTransformer.scale(inverseModelScale, inverseModelScale, transformedAt); - viewTransformer.scale(inverseViewScale, inverseViewScale, at); - - super.scale(vv, amount, at); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractGraphMousePlugin.java deleted file mode 100644 index d6513a58..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractGraphMousePlugin.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 6, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.Cursor; -import java.awt.Point; -import java.awt.event.MouseEvent; - -/** - * a base class for GraphMousePlugin instances. Holds some members - * common to all GraphMousePlugins - * @author thomasnelson - * - */ -public abstract class AbstractGraphMousePlugin implements GraphMousePlugin { - - /** - * modifiers to compare against mouse event modifiers - */ - protected int modifiers; - - /** - * the location in the View where the mouse was pressed - */ - protected Point down; - - /** - * the special cursor that plugins may display - */ - protected Cursor cursor; - - /** - * create an instance with passed values - * @param modifiers - */ - public AbstractGraphMousePlugin(int modifiers) { - this.modifiers = modifiers; - } - - /** - * getter for mouse modifiers - */ - public int getModifiers() { - return modifiers; - } - - /** - * setter for mouse modifiers - */ - public void setModifiers(int modifiers) { - this.modifiers = modifiers; - } - - /** - * check the mouse event modifiers against the - * instance member modifiers. Default implementation - * test with a mask, and accepts both normal modifiers - * and extended modifiers. Override for more control. - */ - public boolean checkModifiers(MouseEvent e) { - return((e.getModifiersEx() | e.getModifiers()) & modifiers) == modifiers; - } - - /** - * @return Returns the cursor. - */ - public Cursor getCursor() { - return cursor; - } - - /** - * @param cursor The cursor to set. - */ - public void setCursor(Cursor cursor) { - this.cursor = cursor; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractModalGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractModalGraphMouse.java deleted file mode 100644 index b295d87e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractModalGraphMouse.java +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Dimension; -import java.awt.ItemSelectable; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; -import java.awt.event.KeyListener; - -import javax.swing.ButtonGroup; -import javax.swing.Icon; -import javax.swing.JComboBox; -import javax.swing.JMenu; -import javax.swing.JRadioButtonMenuItem; -import javax.swing.event.EventListenerList; -import javax.swing.plaf.basic.BasicIconFactory; - - -/** - * - * AbstractModalGraphMouse is a PluggableGraphMouse class that - * manages a collection of plugins for picking and - * transforming the graph. Additionally, it carries the notion - * of a Mode: Picking or Translating. Switching between modes - * allows for a more natural choice of mouse modifiers to - * be used for the various plugins. The default modifiers are - * intended to mimick those of mainstream software applications - * in order to be intuitive to users. - * - * To change between modes, two different controls are offered, - * a combo box and a menu system. These controls are lazily created - * in their respective 'getter' methods so they don't impact - * code that does not intend to use them. - * The menu control can be placed in an unused corner of the - * GraphZoomScrollPane, which is a common location for mouse - * mode selection menus in mainstream applications. - * - * Users must implement the loadPlugins() method to create and - * install the GraphMousePlugins. The order of the plugins is - * important, as they are evaluated against the mask parameters - * in the order that they are added. - * - * @author Tom Nelson - */ -public abstract class AbstractModalGraphMouse extends PluggableGraphMouse - implements ModalGraphMouse, ItemSelectable { - - /** - * used by the scaling plugins for zoom in - */ - protected float in; - /** - * used by the scaling plugins for zoom out - */ - protected float out; - /** - * a listener for mode changes - */ - protected ItemListener modeListener; - /** - * a JComboBox control available to set the mode - */ - protected JComboBox modeBox; - /** - * a menu available to set the mode - */ - protected JMenu modeMenu; - /** - * the current mode - */ - protected Mode mode; - /** - * listeners for mode changes - */ - protected EventListenerList listenerList = new EventListenerList(); - - protected GraphMousePlugin pickingPlugin; - protected GraphMousePlugin translatingPlugin; - protected GraphMousePlugin animatedPickingPlugin; - protected GraphMousePlugin scalingPlugin; - protected GraphMousePlugin rotatingPlugin; - protected GraphMousePlugin shearingPlugin; - protected KeyListener modeKeyListener; - - - protected AbstractModalGraphMouse(float in, float out) { - this.in = in; - this.out = out; - } - - /** - * create the plugins, and load the plugins for TRANSFORMING mode - * - */ - protected abstract void loadPlugins(); - - /** - * setter for the Mode. - */ - public void setMode(Mode mode) { - if(this.mode != mode) { - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, - this.mode, ItemEvent.DESELECTED)); - this.mode = mode; - if(mode == Mode.TRANSFORMING) { - setTransformingMode(); - } else if(mode == Mode.PICKING) { - setPickingMode(); - } - if(modeBox != null) { - modeBox.setSelectedItem(mode); - } - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, mode, ItemEvent.SELECTED)); - } - } - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.control.ModalGraphMouse#setPickingMode() - */ - protected void setPickingMode() { - remove(translatingPlugin); - remove(rotatingPlugin); - remove(shearingPlugin); - add(pickingPlugin); - add(animatedPickingPlugin); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.control.ModalGraphMouse#setTransformingMode() - */ - protected void setTransformingMode() { - remove(pickingPlugin); - remove(animatedPickingPlugin); - add(translatingPlugin); - add(rotatingPlugin); - add(shearingPlugin); - } - - /** - * @param zoomAtMouse The zoomAtMouse to set. - */ - public void setZoomAtMouse(boolean zoomAtMouse) { - ((ScalingGraphMousePlugin) scalingPlugin).setZoomAtMouse(zoomAtMouse); - } - - /** - * listener to set the mode from an external event source - */ - class ModeListener implements ItemListener { - public void itemStateChanged(ItemEvent e) { - setMode((Mode) e.getItem()); - } - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.control.ModalGraphMouse#getModeListener() - */ - public ItemListener getModeListener() { - if (modeListener == null) { - modeListener = new ModeListener(); - } - return modeListener; - } - - /** - * @return the modeKeyListener - */ - public KeyListener getModeKeyListener() { - return modeKeyListener; - } - - /** - * @param modeKeyListener the modeKeyListener to set - */ - public void setModeKeyListener(KeyListener modeKeyListener) { - this.modeKeyListener = modeKeyListener; - } - - /** - * @return Returns the modeBox. - */ - public JComboBox getModeComboBox() { - if(modeBox == null) { - modeBox = new JComboBox(new Mode[]{Mode.TRANSFORMING, Mode.PICKING}); - modeBox.addItemListener(getModeListener()); - } - modeBox.setSelectedItem(mode); - return modeBox; - } - - /** - * create (if necessary) and return a menu that will change - * the mode - * @return the menu - */ - public JMenu getModeMenu() { - if(modeMenu == null) { - modeMenu = new JMenu();// { - Icon icon = BasicIconFactory.getMenuArrowIcon(); - modeMenu.setIcon(BasicIconFactory.getMenuArrowIcon()); - modeMenu.setPreferredSize(new Dimension(icon.getIconWidth()+10, - icon.getIconHeight()+10)); - - final JRadioButtonMenuItem transformingButton = - new JRadioButtonMenuItem(Mode.TRANSFORMING.toString()); - transformingButton.addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - setMode(Mode.TRANSFORMING); - } - }}); - - final JRadioButtonMenuItem pickingButton = - new JRadioButtonMenuItem(Mode.PICKING.toString()); - pickingButton.addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - setMode(Mode.PICKING); - } - }}); - ButtonGroup radio = new ButtonGroup(); - radio.add(transformingButton); - radio.add(pickingButton); - transformingButton.setSelected(true); - modeMenu.add(transformingButton); - modeMenu.add(pickingButton); - modeMenu.setToolTipText("Menu for setting Mouse Mode"); - addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - if(e.getItem() == Mode.TRANSFORMING) { - transformingButton.setSelected(true); - } else if(e.getItem() == Mode.PICKING) { - pickingButton.setSelected(true); - } - } - }}); - } - return modeMenu; - } - - /** - * add a listener for mode changes - */ - public void addItemListener(ItemListener aListener) { - listenerList.add(ItemListener.class,aListener); - } - - /** - * remove a listener for mode changes - */ - public void removeItemListener(ItemListener aListener) { - listenerList.remove(ItemListener.class,aListener); - } - - /** - * Returns an array of all the ItemListeners added - * to this JComboBox with addItemListener(). - * - * @return all of the ItemListeners added or an empty - * array if no listeners have been added - * @since 1.4 - */ - public ItemListener[] getItemListeners() { - return listenerList.getListeners(ItemListener.class); - } - - public Object[] getSelectedObjects() { - if ( mode == null ) - return new Object[0]; - else { - Object result[] = new Object[1]; - result[0] = mode; - return result; - } - } - - /** - * Notifies all listeners that have registered interest for - * notification on this event type. - * @param e the event of interest - * - * @see EventListenerList - */ - protected void fireItemStateChanged(ItemEvent e) { - // Guaranteed to return a non-null array - Object[] listeners = listenerList.getListenerList(); - // Process the listeners last to first, notifying - // those that are interested in this event - for ( int i = listeners.length-2; i>=0; i-=2 ) { - if ( listeners[i]==ItemListener.class ) { - ((ItemListener)listeners[i+1]).itemStateChanged(e); - } - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractPopupGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractPopupGraphMousePlugin.java deleted file mode 100644 index 08611225..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/AbstractPopupGraphMousePlugin.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; - -public abstract class AbstractPopupGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener { - - public AbstractPopupGraphMousePlugin() { - this(MouseEvent.BUTTON3_MASK); - } - public AbstractPopupGraphMousePlugin(int modifiers) { - super(modifiers); - } - public void mousePressed(MouseEvent e) { - if(e.isPopupTrigger()) { - handlePopup(e); - e.consume(); - } - } - - /** - * if this is the popup trigger, process here, otherwise - * defer to the superclass - */ - public void mouseReleased(MouseEvent e) { - if(e.isPopupTrigger()) { - handlePopup(e); - e.consume(); - } - } - - /** - * @param e - */ - protected abstract void handlePopup(MouseEvent e); - - public void mouseClicked(MouseEvent e) { - } - - public void mouseEntered(MouseEvent e) { - } - - public void mouseExited(MouseEvent e) { - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/AnimatedPickingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/AnimatedPickingGraphMousePlugin.java deleted file mode 100644 index 57151e06..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/AnimatedPickingGraphMousePlugin.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Cursor; -import java.awt.event.InputEvent; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; - -import javax.swing.JComponent; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.picking.PickedState; - -/** - * AnimatedPickingGraphMousePlugin supports the picking of one Graph - * Vertex. When the mouse is released, the graph is translated so that - * the picked Vertex is moved to the center of the view. This translateion - * is conducted in an animation Thread so that the graph slides to its - * new position - * - * @author Tom Nelson - */ -public class AnimatedPickingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener, MouseMotionListener { - - /** - * the picked Vertex - */ - protected V vertex; - - /** - * create an instance with default modifiers - * - */ - public AnimatedPickingGraphMousePlugin() { - this(InputEvent.BUTTON1_MASK | InputEvent.CTRL_MASK); - } - - /** - * create an instance, overriding the default modifiers - * @param selectionModifiers - */ - public AnimatedPickingGraphMousePlugin(int selectionModifiers) { - super(selectionModifiers); - this.cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR); - } - - /** - * If the event occurs on a Vertex, pick that single Vertex - * @param e the event - */ - @SuppressWarnings("unchecked") - public void mousePressed(MouseEvent e) { - if (e.getModifiers() == modifiers) { - VisualizationViewer vv = (VisualizationViewer) e.getSource(); - GraphElementAccessor pickSupport = vv.getPickSupport(); - PickedState pickedVertexState = vv.getPickedVertexState(); - Layout layout = vv.getGraphLayout(); - if (pickSupport != null && pickedVertexState != null) { - // p is the screen point for the mouse event - Point2D p = e.getPoint(); - vertex = pickSupport.getVertex(layout, p.getX(), p.getY()); - if (vertex != null) { - if (pickedVertexState.isPicked(vertex) == false) { - pickedVertexState.clear(); - pickedVertexState.pick(vertex, true); - } - } - } - e.consume(); - } - } - - -/** - * If a Vertex was picked in the mousePressed event, start a Thread - * to animate the translation of the graph so that the picked Vertex - * moves to the center of the view - * - * @param e the event - */ - @SuppressWarnings("unchecked") - public void mouseReleased(MouseEvent e) { - if (e.getModifiers() == modifiers) { - final VisualizationViewer vv = (VisualizationViewer) e.getSource(); - if (vertex != null) { - Layout layout = vv.getGraphLayout(); - Point2D q = layout.transform(vertex); - Point2D lvc = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(vv.getCenter()); - final double dx = (lvc.getX() - q.getX()) / 10; - final double dy = (lvc.getY() - q.getY()) / 10; - - Runnable animator = new Runnable() { - - public void run() { - for (int i = 0; i < 10; i++) { - vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT).translate(dx, dy); - try { - Thread.sleep(100); - } catch (InterruptedException ex) { - } - } - } - }; - Thread thread = new Thread(animator); - thread.start(); - } - } - } - - public void mouseClicked(MouseEvent e) { - } - - /** - * show a special cursor while the mouse is inside the window - */ - public void mouseEntered(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(cursor); - } - - /** - * revert to the default cursor when the mouse leaves this window - */ - public void mouseExited(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - - public void mouseMoved(MouseEvent e) { - } - - public void mouseDragged(MouseEvent arg0) { - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/CrossoverScalingControl.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/CrossoverScalingControl.java deleted file mode 100644 index 0084a666..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/CrossoverScalingControl.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * A scaling control that has a crossover point. - * When the overall scale of the view and - * model is less than the crossover point, the scaling is applied - * to the view's transform and the graph nodes, labels, etc grow - * smaller. This preserves the overall shape of the graph. - * When the scale is larger than the crossover, the scaling is - * applied to the graph layout. The graph spreads out, but the - * vertices and labels grow no larger than their original size. - * - * @author Tom Nelson - */ -public class CrossoverScalingControl implements ScalingControl { - - /** - * Point where scale crosses over from view to layout. - */ - protected double crossover = 1.0; - - /** - * Sets the crossover point to the specified value. - */ - public void setCrossover(double crossover) { - this.crossover = crossover; - } - - /** - * Returns the current crossover value. - */ - public double getCrossover() { - return crossover; - } - - /** - * @see edu.uci.ics.jung.visualization.control.ScalingControl#scale(VisualizationViewer, float, Point2D) - */ - public void scale(VisualizationServer vv, float amount, Point2D at) { - - MutableTransformer layoutTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - MutableTransformer viewTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - double modelScale = layoutTransformer.getScale(); - double viewScale = viewTransformer.getScale(); - double inverseModelScale = Math.sqrt(crossover)/modelScale; - double inverseViewScale = Math.sqrt(crossover)/viewScale; - double scale = modelScale * viewScale; - - Point2D transformedAt = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, at); - - if((scale*amount - crossover)*(scale*amount - crossover) < 0.001) { - // close to the control point, return both transformers to a scale of sqrt crossover value - layoutTransformer.scale(inverseModelScale, inverseModelScale, transformedAt); - viewTransformer.scale(inverseViewScale, inverseViewScale, at); - } else if(scale*amount < crossover) { - // scale the viewTransformer, return the layoutTransformer to sqrt crossover value - viewTransformer.scale(amount, amount, at); - layoutTransformer.scale(inverseModelScale, inverseModelScale, transformedAt); - } else { - // scale the layoutTransformer, return the viewTransformer to crossover value - layoutTransformer.scale(amount, amount, transformedAt); - viewTransformer.scale(inverseViewScale, inverseViewScale, at); - } - vv.repaint(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/DefaultModalGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/DefaultModalGraphMouse.java deleted file mode 100644 index ad5ef8de..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/DefaultModalGraphMouse.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Component; -import java.awt.Cursor; -import java.awt.ItemSelectable; -import java.awt.event.InputEvent; -import java.awt.event.KeyAdapter; -import java.awt.event.KeyEvent; - - -/** - * - * DefaultModalGraphMouse is a PluggableGraphMouse class that - * pre-installs a large collection of plugins for picking and - * transforming the graph. Additionally, it carries the notion - * of a Mode: Picking or Translating. Switching between modes - * allows for a more natural choice of mouse modifiers to - * be used for the various plugins. The default modifiers are - * intended to mimick those of mainstream software applications - * in order to be intuitive to users. - * - * To change between modes, two different controls are offered, - * a combo box and a menu system. These controls are lazily created - * in their respective 'getter' methods so they don't impact - * code that does not intend to use them. - * The menu control can be placed in an unused corner of the - * GraphZoomScrollPane, which is a common location for mouse - * mode selection menus in mainstream applications. - * - * @author Tom Nelson - */ -public class DefaultModalGraphMouse extends AbstractModalGraphMouse - implements ModalGraphMouse, ItemSelectable { - - /** - * create an instance with default values - * - */ - public DefaultModalGraphMouse() { - this(1.1f, 1/1.1f); - } - - /** - * create an instance with passed values - * @param in override value for scale in - * @param out override value for scale out - */ - public DefaultModalGraphMouse(float in, float out) { - super(in,out); - loadPlugins(); - setModeKeyListener(new ModeKeyAdapter(this)); - } - - /** - * create the plugins, and load the plugins for TRANSFORMING mode - * - */ - @Override - protected void loadPlugins() { - pickingPlugin = new PickingGraphMousePlugin(); - animatedPickingPlugin = new AnimatedPickingGraphMousePlugin(); - translatingPlugin = new TranslatingGraphMousePlugin(InputEvent.BUTTON1_MASK); - scalingPlugin = new ScalingGraphMousePlugin(new CrossoverScalingControl(), 0, in, out); - rotatingPlugin = new RotatingGraphMousePlugin(); - shearingPlugin = new ShearingGraphMousePlugin(); - - add(scalingPlugin); - setMode(Mode.TRANSFORMING); - } - - public static class ModeKeyAdapter extends KeyAdapter { - private char t = 't'; - private char p = 'p'; - protected ModalGraphMouse graphMouse; - - public ModeKeyAdapter(ModalGraphMouse graphMouse) { - this.graphMouse = graphMouse; - } - - public ModeKeyAdapter(char t, char p, ModalGraphMouse graphMouse) { - this.t = t; - this.p = p; - this.graphMouse = graphMouse; - } - - @Override - public void keyTyped(KeyEvent event) { - char keyChar = event.getKeyChar(); - if(keyChar == t) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - graphMouse.setMode(Mode.TRANSFORMING); - } else if(keyChar == p) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); - graphMouse.setMode(Mode.PICKING); - } - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingGraphMousePlugin.java deleted file mode 100644 index 9a7c3126..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingGraphMousePlugin.java +++ /dev/null @@ -1,263 +0,0 @@ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Color; -import java.awt.Cursor; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Shape; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.AffineTransform; -import java.awt.geom.CubicCurve2D; -import java.awt.geom.Point2D; - -import javax.swing.JComponent; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.util.ArrowFactory; - -/** - * A plugin that can create vertices, undirected edges, and directed edges - * using mouse gestures. - * - * @author Tom Nelson - * - */ -public class EditingGraphMousePlugin extends AbstractGraphMousePlugin implements - MouseListener, MouseMotionListener { - - protected V startVertex; - protected Point2D down; - - protected CubicCurve2D rawEdge = new CubicCurve2D.Float(); - protected Shape edgeShape; - protected Shape rawArrowShape; - protected Shape arrowShape; - protected VisualizationServer.Paintable edgePaintable; - protected VisualizationServer.Paintable arrowPaintable; - protected EdgeType edgeIsDirected; - protected Factory vertexFactory; - protected Factory edgeFactory; - - public EditingGraphMousePlugin(Factory vertexFactory, Factory edgeFactory) { - this(MouseEvent.BUTTON1_MASK, vertexFactory, edgeFactory); - } - - /** - * create instance and prepare shapes for visual effects - * @param modifiers - */ - public EditingGraphMousePlugin(int modifiers, Factory vertexFactory, Factory edgeFactory) { - super(modifiers); - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - rawEdge.setCurve(0.0f, 0.0f, 0.33f, 100, .66f, -50, - 1.0f, 0.0f); - rawArrowShape = ArrowFactory.getNotchedArrow(20, 16, 8); - edgePaintable = new EdgePaintable(); - arrowPaintable = new ArrowPaintable(); - this.cursor = Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR); - } - - /** - * Overridden to be more flexible, and pass events with - * key combinations. The default responds to both ButtonOne - * and ButtonOne+Shift - */ - @Override - public boolean checkModifiers(MouseEvent e) { - return (e.getModifiers() & modifiers) != 0; - } - - /** - * If the mouse is pressed in an empty area, create a new vertex there. - * If the mouse is pressed on an existing vertex, prepare to create - * an edge from that vertex to another - */ - @SuppressWarnings("unchecked") - public void mousePressed(MouseEvent e) { - if(checkModifiers(e)) { - final VisualizationViewer vv = - (VisualizationViewer)e.getSource(); - final Point2D p = e.getPoint(); - GraphElementAccessor pickSupport = vv.getPickSupport(); - if(pickSupport != null) { - Graph graph = vv.getModel().getGraphLayout().getGraph(); - // set default edge type - if(graph instanceof DirectedGraph) { - edgeIsDirected = EdgeType.DIRECTED; - } else { - edgeIsDirected = EdgeType.UNDIRECTED; - } - - final V vertex = pickSupport.getVertex(vv.getModel().getGraphLayout(), p.getX(), p.getY()); - if(vertex != null) { // get ready to make an edge - startVertex = vertex; - down = e.getPoint(); - transformEdgeShape(down, down); - vv.addPostRenderPaintable(edgePaintable); - if((e.getModifiers() & MouseEvent.SHIFT_MASK) != 0 - && vv.getModel().getGraphLayout().getGraph() instanceof UndirectedGraph == false) { - edgeIsDirected = EdgeType.DIRECTED; - } - if(edgeIsDirected == EdgeType.DIRECTED) { - transformArrowShape(down, e.getPoint()); - vv.addPostRenderPaintable(arrowPaintable); - } - } else { // make a new vertex - - V newVertex = vertexFactory.create(); - Layout layout = vv.getModel().getGraphLayout(); - graph.addVertex(newVertex); - layout.setLocation(newVertex, vv.getRenderContext().getMultiLayerTransformer().inverseTransform(e.getPoint())); - } - } - vv.repaint(); - } - } - - /** - * If startVertex is non-null, and the mouse is released over an - * existing vertex, create an undirected edge from startVertex to - * the vertex under the mouse pointer. If shift was also pressed, - * create a directed edge instead. - */ - @SuppressWarnings("unchecked") - public void mouseReleased(MouseEvent e) { - if(checkModifiers(e)) { - final VisualizationViewer vv = - (VisualizationViewer)e.getSource(); - final Point2D p = e.getPoint(); - Layout layout = vv.getModel().getGraphLayout(); - GraphElementAccessor pickSupport = vv.getPickSupport(); - if(pickSupport != null) { - final V vertex = pickSupport.getVertex(layout, p.getX(), p.getY()); - if(vertex != null && startVertex != null) { - Graph graph = - vv.getGraphLayout().getGraph(); - graph.addEdge(edgeFactory.create(), - startVertex, vertex, edgeIsDirected); - vv.repaint(); - } - } - startVertex = null; - down = null; - edgeIsDirected = EdgeType.UNDIRECTED; - vv.removePostRenderPaintable(edgePaintable); - vv.removePostRenderPaintable(arrowPaintable); - } - } - - /** - * If startVertex is non-null, stretch an edge shape between - * startVertex and the mouse pointer to simulate edge creation - */ - @SuppressWarnings("unchecked") - public void mouseDragged(MouseEvent e) { - if(checkModifiers(e)) { - if(startVertex != null) { - transformEdgeShape(down, e.getPoint()); - if(edgeIsDirected == EdgeType.DIRECTED) { - transformArrowShape(down, e.getPoint()); - } - } - VisualizationViewer vv = - (VisualizationViewer)e.getSource(); - vv.repaint(); - } - } - - /** - * code lifted from PluggableRenderer to move an edge shape into an - * arbitrary position - */ - private void transformEdgeShape(Point2D down, Point2D out) { - float x1 = (float) down.getX(); - float y1 = (float) down.getY(); - float x2 = (float) out.getX(); - float y2 = (float) out.getY(); - - AffineTransform xform = AffineTransform.getTranslateInstance(x1, y1); - - float dx = x2-x1; - float dy = y2-y1; - float thetaRadians = (float) Math.atan2(dy, dx); - xform.rotate(thetaRadians); - float dist = (float) Math.sqrt(dx*dx + dy*dy); - xform.scale(dist / rawEdge.getBounds().getWidth(), 1.0); - edgeShape = xform.createTransformedShape(rawEdge); - } - - private void transformArrowShape(Point2D down, Point2D out) { - float x1 = (float) down.getX(); - float y1 = (float) down.getY(); - float x2 = (float) out.getX(); - float y2 = (float) out.getY(); - - AffineTransform xform = AffineTransform.getTranslateInstance(x2, y2); - - float dx = x2-x1; - float dy = y2-y1; - float thetaRadians = (float) Math.atan2(dy, dx); - xform.rotate(thetaRadians); - arrowShape = xform.createTransformedShape(rawArrowShape); - } - - /** - * Used for the edge creation visual effect during mouse drag - */ - class EdgePaintable implements VisualizationServer.Paintable { - - public void paint(Graphics g) { - if(edgeShape != null) { - Color oldColor = g.getColor(); - g.setColor(Color.black); - ((Graphics2D)g).draw(edgeShape); - g.setColor(oldColor); - } - } - - public boolean useTransform() { - return false; - } - } - - /** - * Used for the directed edge creation visual effect during mouse drag - */ - class ArrowPaintable implements VisualizationServer.Paintable { - - public void paint(Graphics g) { - if(arrowShape != null) { - Color oldColor = g.getColor(); - g.setColor(Color.black); - ((Graphics2D)g).fill(arrowShape); - g.setColor(oldColor); - } - } - - public boolean useTransform() { - return false; - } - } - public void mouseClicked(MouseEvent e) {} - public void mouseEntered(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(cursor); - } - public void mouseExited(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - public void mouseMoved(MouseEvent e) {} -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingModalGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingModalGraphMouse.java deleted file mode 100644 index 2b9cdec8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingModalGraphMouse.java +++ /dev/null @@ -1,304 +0,0 @@ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Component; -import java.awt.Cursor; -import java.awt.Dimension; -import java.awt.ItemSelectable; -import java.awt.event.InputEvent; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; -import java.awt.event.KeyAdapter; -import java.awt.event.KeyEvent; - -import javax.swing.ButtonGroup; -import javax.swing.Icon; -import javax.swing.JComboBox; -import javax.swing.JMenu; -import javax.swing.JRadioButtonMenuItem; -import javax.swing.plaf.basic.BasicIconFactory; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.visualization.MultiLayerTransformer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.annotations.AnnotatingGraphMousePlugin; - -public class EditingModalGraphMouse extends AbstractModalGraphMouse - implements ModalGraphMouse, ItemSelectable { - - protected Factory vertexFactory; - protected Factory edgeFactory; - protected EditingGraphMousePlugin editingPlugin; - protected LabelEditingGraphMousePlugin labelEditingPlugin; - protected EditingPopupGraphMousePlugin popupEditingPlugin; - protected AnnotatingGraphMousePlugin annotatingPlugin; - protected MultiLayerTransformer basicTransformer; - protected RenderContext rc; - - /** - * create an instance with default values - * - */ - public EditingModalGraphMouse(RenderContext rc, - Factory vertexFactory, Factory edgeFactory) { - this(rc, vertexFactory, edgeFactory, 1.1f, 1/1.1f); - } - - /** - * create an instance with passed values - * @param in override value for scale in - * @param out override value for scale out - */ - public EditingModalGraphMouse(RenderContext rc, - Factory vertexFactory, Factory edgeFactory, float in, float out) { - super(in,out); - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - this.rc = rc; - this.basicTransformer = rc.getMultiLayerTransformer(); - loadPlugins(); - setModeKeyListener(new ModeKeyAdapter(this)); - } - - /** - * create the plugins, and load the plugins for TRANSFORMING mode - * - */ - @Override - protected void loadPlugins() { - pickingPlugin = new PickingGraphMousePlugin(); - animatedPickingPlugin = new AnimatedPickingGraphMousePlugin(); - translatingPlugin = new TranslatingGraphMousePlugin(InputEvent.BUTTON1_MASK); - scalingPlugin = new ScalingGraphMousePlugin(new CrossoverScalingControl(), 0, in, out); - rotatingPlugin = new RotatingGraphMousePlugin(); - shearingPlugin = new ShearingGraphMousePlugin(); - editingPlugin = new EditingGraphMousePlugin(vertexFactory, edgeFactory); - labelEditingPlugin = new LabelEditingGraphMousePlugin(); - annotatingPlugin = new AnnotatingGraphMousePlugin(rc); - popupEditingPlugin = new EditingPopupGraphMousePlugin(vertexFactory, edgeFactory); - add(scalingPlugin); - setMode(Mode.EDITING); - } - - /** - * setter for the Mode. - */ - @Override - public void setMode(Mode mode) { - if(this.mode != mode) { - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, - this.mode, ItemEvent.DESELECTED)); - this.mode = mode; - if(mode == Mode.TRANSFORMING) { - setTransformingMode(); - } else if(mode == Mode.PICKING) { - setPickingMode(); - } else if(mode == Mode.EDITING) { - setEditingMode(); - } else if(mode == Mode.ANNOTATING) { - setAnnotatingMode(); - } - if(modeBox != null) { - modeBox.setSelectedItem(mode); - } - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, mode, ItemEvent.SELECTED)); - } - } - - @Override - protected void setPickingMode() { - remove(translatingPlugin); - remove(rotatingPlugin); - remove(shearingPlugin); - remove(editingPlugin); - remove(annotatingPlugin); - add(pickingPlugin); - add(animatedPickingPlugin); - add(labelEditingPlugin); - add(popupEditingPlugin); - } - - @Override - protected void setTransformingMode() { - remove(pickingPlugin); - remove(animatedPickingPlugin); - remove(editingPlugin); - remove(annotatingPlugin); - add(translatingPlugin); - add(rotatingPlugin); - add(shearingPlugin); - add(labelEditingPlugin); - add(popupEditingPlugin); - } - - protected void setEditingMode() { - remove(pickingPlugin); - remove(animatedPickingPlugin); - remove(translatingPlugin); - remove(rotatingPlugin); - remove(shearingPlugin); - remove(labelEditingPlugin); - remove(annotatingPlugin); - add(editingPlugin); - add(popupEditingPlugin); - } - - protected void setAnnotatingMode() { - remove(pickingPlugin); - remove(animatedPickingPlugin); - remove(translatingPlugin); - remove(rotatingPlugin); - remove(shearingPlugin); - remove(labelEditingPlugin); - remove(editingPlugin); - remove(popupEditingPlugin); - add(annotatingPlugin); - } - - - /** - * @return the modeBox. - */ - @Override - public JComboBox getModeComboBox() { - if(modeBox == null) { - modeBox = new JComboBox(new Mode[]{Mode.TRANSFORMING, Mode.PICKING, Mode.EDITING, Mode.ANNOTATING}); - modeBox.addItemListener(getModeListener()); - } - modeBox.setSelectedItem(mode); - return modeBox; - } - - /** - * create (if necessary) and return a menu that will change - * the mode - * @return the menu - */ - @Override - public JMenu getModeMenu() { - if(modeMenu == null) { - modeMenu = new JMenu();// { - Icon icon = BasicIconFactory.getMenuArrowIcon(); - modeMenu.setIcon(BasicIconFactory.getMenuArrowIcon()); - modeMenu.setPreferredSize(new Dimension(icon.getIconWidth()+10, - icon.getIconHeight()+10)); - - final JRadioButtonMenuItem transformingButton = - new JRadioButtonMenuItem(Mode.TRANSFORMING.toString()); - transformingButton.addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - setMode(Mode.TRANSFORMING); - } - }}); - - final JRadioButtonMenuItem pickingButton = - new JRadioButtonMenuItem(Mode.PICKING.toString()); - pickingButton.addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - setMode(Mode.PICKING); - } - }}); - - final JRadioButtonMenuItem editingButton = - new JRadioButtonMenuItem(Mode.EDITING.toString()); - editingButton.addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - setMode(Mode.EDITING); - } - }}); - - ButtonGroup radio = new ButtonGroup(); - radio.add(transformingButton); - radio.add(pickingButton); - radio.add(editingButton); - transformingButton.setSelected(true); - modeMenu.add(transformingButton); - modeMenu.add(pickingButton); - modeMenu.add(editingButton); - modeMenu.setToolTipText("Menu for setting Mouse Mode"); - addItemListener(new ItemListener() { - public void itemStateChanged(ItemEvent e) { - if(e.getStateChange() == ItemEvent.SELECTED) { - if(e.getItem() == Mode.TRANSFORMING) { - transformingButton.setSelected(true); - } else if(e.getItem() == Mode.PICKING) { - pickingButton.setSelected(true); - } else if(e.getItem() == Mode.EDITING) { - editingButton.setSelected(true); - } - } - }}); - } - return modeMenu; - } - - public static class ModeKeyAdapter extends KeyAdapter { - private char t = 't'; - private char p = 'p'; - private char e = 'e'; - private char a = 'a'; - protected ModalGraphMouse graphMouse; - - public ModeKeyAdapter(ModalGraphMouse graphMouse) { - this.graphMouse = graphMouse; - } - - public ModeKeyAdapter(char t, char p, char e, char a, ModalGraphMouse graphMouse) { - this.t = t; - this.p = p; - this.e = e; - this.a = a; - this.graphMouse = graphMouse; - } - - @Override - public void keyTyped(KeyEvent event) { - char keyChar = event.getKeyChar(); - if(keyChar == t) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - graphMouse.setMode(Mode.TRANSFORMING); - } else if(keyChar == p) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); - graphMouse.setMode(Mode.PICKING); - } else if(keyChar == e) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR)); - graphMouse.setMode(Mode.EDITING); - } else if(keyChar == a) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR)); - graphMouse.setMode(Mode.ANNOTATING); - } - } - } - - /** - * @return the annotatingPlugin - */ - public AnnotatingGraphMousePlugin getAnnotatingPlugin() { - return annotatingPlugin; - } - - /** - * @return the editingPlugin - */ - public EditingGraphMousePlugin getEditingPlugin() { - return editingPlugin; - } - - /** - * @return the labelEditingPlugin - */ - public LabelEditingGraphMousePlugin getLabelEditingPlugin() { - return labelEditingPlugin; - } - - /** - * @return the popupEditingPlugin - */ - public EditingPopupGraphMousePlugin getPopupEditingPlugin() { - return popupEditingPlugin; - } -} - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingPopupGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingPopupGraphMousePlugin.java deleted file mode 100644 index c7ac2aba..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/EditingPopupGraphMousePlugin.java +++ /dev/null @@ -1,116 +0,0 @@ -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.ActionEvent; -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; -import java.util.Set; - -import javax.swing.AbstractAction; -import javax.swing.JMenu; -import javax.swing.JPopupMenu; - -import org.apache.commons.collections15.Factory; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.picking.PickedState; - -/** - * a plugin that uses popup menus to create vertices, undirected edges, - * and directed edges. - * - * @author Tom Nelson - * - */ -public class EditingPopupGraphMousePlugin extends AbstractPopupGraphMousePlugin { - - protected Factory vertexFactory; - protected Factory edgeFactory; - protected JPopupMenu popup = new JPopupMenu(); - - public EditingPopupGraphMousePlugin(Factory vertexFactory, Factory edgeFactory) { - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - } - - @SuppressWarnings({ "unchecked", "serial", "serial" }) - protected void handlePopup(MouseEvent e) { - final VisualizationViewer vv = - (VisualizationViewer)e.getSource(); - final Layout layout = vv.getGraphLayout(); - final Graph graph = layout.getGraph(); - final Point2D p = e.getPoint(); - final Point2D ivp = p; - GraphElementAccessor pickSupport = vv.getPickSupport(); - if(pickSupport != null) { - - final V vertex = pickSupport.getVertex(layout, ivp.getX(), ivp.getY()); - final E edge = pickSupport.getEdge(layout, ivp.getX(), ivp.getY()); - final PickedState pickedVertexState = vv.getPickedVertexState(); - final PickedState pickedEdgeState = vv.getPickedEdgeState(); - - if(vertex != null) { - Set picked = pickedVertexState.getPicked(); - if(picked.size() > 0) { - if(graph instanceof UndirectedGraph == false) { - JMenu directedMenu = new JMenu("Create Directed Edge"); - popup.add(directedMenu); - for(final V other : picked) { - directedMenu.add(new AbstractAction("["+other+","+vertex+"]") { - public void actionPerformed(ActionEvent e) { - graph.addEdge(edgeFactory.create(), - other, vertex, EdgeType.DIRECTED); - vv.repaint(); - } - }); - } - } - if(graph instanceof DirectedGraph == false) { - JMenu undirectedMenu = new JMenu("Create Undirected Edge"); - popup.add(undirectedMenu); - for(final V other : picked) { - undirectedMenu.add(new AbstractAction("[" + other+","+vertex+"]") { - public void actionPerformed(ActionEvent e) { - graph.addEdge(edgeFactory.create(), - other, vertex); - vv.repaint(); - } - }); - } - } - } - popup.add(new AbstractAction("Delete Vertex") { - public void actionPerformed(ActionEvent e) { - pickedVertexState.pick(vertex, false); - graph.removeVertex(vertex); - vv.repaint(); - }}); - } else if(edge != null) { - popup.add(new AbstractAction("Delete Edge") { - public void actionPerformed(ActionEvent e) { - pickedEdgeState.pick(edge, false); - graph.removeEdge(edge); - vv.repaint(); - }}); - } else { - popup.add(new AbstractAction("Create Vertex") { - public void actionPerformed(ActionEvent e) { - V newVertex = vertexFactory.create(); - graph.addVertex(newVertex); - layout.setLocation(newVertex, vv.getRenderContext().getMultiLayerTransformer().inverseTransform(p)); - vv.repaint(); - } - }); - } - if(popup.getComponentCount() > 0) { - popup.show(vv, e.getX(), e.getY()); - } - } - } -} - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMouseAdapter.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMouseAdapter.java deleted file mode 100644 index ab4132ac..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMouseAdapter.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 6, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; - -/** - * Simple extension of MouseAdapter that supplies modifier - * checking - * - * @author Tom Nelson - * - */ -public class GraphMouseAdapter extends MouseAdapter { - - protected int modifiers; - - public GraphMouseAdapter(int modifiers) { - this.modifiers = modifiers; - } - - public int getModifiers() { - return modifiers; - } - - public void setModifiers(int modifiers) { - this.modifiers = modifiers; - } - - protected boolean checkModifiers(MouseEvent e) { - return e.getModifiers() == modifiers; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMouseListener.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMouseListener.java deleted file mode 100644 index 56360095..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMouseListener.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Feb 17, 2004 - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseEvent; - -/** - * This interface allows users to register listeners to register to receive - * vertex clicks. - * - * @author danyelf - */ -public interface GraphMouseListener { - - void graphClicked(V v, MouseEvent me); - void graphPressed(V v, MouseEvent me); - void graphReleased(V v, MouseEvent me); - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMousePlugin.java deleted file mode 100644 index b95cd4cf..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/GraphMousePlugin.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 6, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseEvent; - -/** - * the interface for all plugins to the PluggableGraphMouse - * @author Tom Nelson - * - */ -public interface GraphMousePlugin { - - /** - * return the mouse event modifiers that will activate this plugin - * @return modifiers - */ - int getModifiers(); - - /** - * set the mouse event modifiers that will activate this plugin - * @param modifiers - */ - void setModifiers(int modifiers); - - /** - * compare the set modifiers against those of the supplied event - * @param e an event to compare to - * @return whether the member modifers match the event modifiers - */ - boolean checkModifiers(MouseEvent e); - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/LabelEditingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/LabelEditingGraphMousePlugin.java deleted file mode 100644 index 6dc5fc36..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/LabelEditingGraphMousePlugin.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Cursor; -import java.awt.event.InputEvent; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.geom.Point2D; -import java.util.Map; - -import javax.swing.JOptionPane; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.MapTransformer; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; - -/** - * - * - * @author Tom Nelson - */ -public class LabelEditingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener { - - /** - * the picked Vertex, if any - */ - protected V vertex; - - /** - * the picked Edge, if any - */ - protected E edge; - - /** - * create an instance with default settings - */ - public LabelEditingGraphMousePlugin() { - this(InputEvent.BUTTON1_MASK); - } - - /** - * create an instance with overides - * @param selectionModifiers for primary selection - * @param addToSelectionModifiers for additional selection - */ - public LabelEditingGraphMousePlugin(int selectionModifiers) { - super(selectionModifiers); - this.cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR); - } - - /** - * For primary modifiers (default, MouseButton1): - * pick a single Vertex or Edge that - * is under the mouse pointer. If no Vertex or edge is under - * the pointer, unselect all picked Vertices and edges, and - * set up to draw a rectangle for multiple selection - * of contained Vertices. - * For additional selection (default Shift+MouseButton1): - * Add to the selection, a single Vertex or Edge that is - * under the mouse pointer. If a previously picked Vertex - * or Edge is under the pointer, it is un-picked. - * If no vertex or Edge is under the pointer, set up - * to draw a multiple selection rectangle (as above) - * but do not unpick previously picked elements. - * - * @param e the event - */ - @SuppressWarnings("unchecked") - public void mouseClicked(MouseEvent e) { - if(e.getModifiers() == modifiers && e.getClickCount() == 2) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - GraphElementAccessor pickSupport = vv.getPickSupport(); - if(pickSupport != null) { - Transformer vs = vv.getRenderContext().getVertexLabelTransformer(); - if(vs instanceof MapTransformer) { - Map map = ((MapTransformer)vs).getMap(); - Layout layout = vv.getGraphLayout(); - // p is the screen point for the mouse event - Point2D p = e.getPoint(); - - V vertex = pickSupport.getVertex(layout, p.getX(), p.getY()); - if(vertex != null) { - String newLabel = vs.transform(vertex); - newLabel = JOptionPane.showInputDialog("New Vertex Label for "+vertex); - if(newLabel != null) { - map.put(vertex, newLabel); - vv.repaint(); - } - return; - } - } - Transformer es = vv.getRenderContext().getEdgeLabelTransformer(); - if(es instanceof MapTransformer) { - Map map = ((MapTransformer)es).getMap(); - Layout layout = vv.getGraphLayout(); - // p is the screen point for the mouse event - Point2D p = e.getPoint(); - // take away the view transform - Point2D ip = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, p); - E edge = pickSupport.getEdge(layout, ip.getX(), ip.getY()); - if(edge != null) { - String newLabel = JOptionPane.showInputDialog("New Edge Label for "+edge); - if(newLabel != null) { - map.put(edge, newLabel); - vv.repaint(); - } - return; - } - } - } - e.consume(); - } - } - - /** - * If the mouse is dragging a rectangle, pick the - * Vertices contained in that rectangle - * - * clean up settings from mousePressed - */ - @SuppressWarnings("unchecked") - public void mouseReleased(MouseEvent e) { - } - - /** - * If the mouse is over a picked vertex, drag all picked - * vertices with the mouse. - * If the mouse is not over a Vertex, draw the rectangle - * to select multiple Vertices - * - */ - - public void mousePressed(MouseEvent e) { - } - - public void mouseEntered(MouseEvent e) { - // TODO Auto-generated method stub - - } - - public void mouseExited(MouseEvent e) { - // TODO Auto-generated method stub - - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/LayoutScalingControl.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/LayoutScalingControl.java deleted file mode 100644 index 612518c9..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/LayoutScalingControl.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * LayoutScalingControl applies a scaling transformation to the graph layout. - * The Vertices get closer or farther apart, but do not themselves change - * size. ScalingGraphMouse uses MouseWheelEvents to apply the scaling. - * - * @author Tom Nelson - */ -public class LayoutScalingControl implements ScalingControl { - - /** - * zoom the display in or out, depending on the direction of the - * mouse wheel motion. - */ - public void scale(VisualizationServer vv, float amount, Point2D from) { - - Point2D ivtfrom = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, from); - MutableTransformer modelTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - modelTransformer.scale(amount, amount, ivtfrom); - vv.repaint(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/LensMagnificationGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/LensMagnificationGraphMousePlugin.java deleted file mode 100644 index c78c9677..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/LensMagnificationGraphMousePlugin.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseEvent; -import java.awt.event.MouseWheelEvent; -import java.awt.event.MouseWheelListener; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.LensTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * HyperbolicMagnificationGraphMousePlugin changes the magnification - * within the Hyperbolic projection of the HyperbolicTransformer. - * - * @author Tom Nelson - */ -public class LensMagnificationGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseWheelListener { - - protected float floor = 1.0f; - - protected float ceiling = 5.0f; - - protected float delta = .2f; - - /** - * create an instance with default zoom in/out values - */ - public LensMagnificationGraphMousePlugin() { - this(MouseEvent.CTRL_MASK); - } - - /** - * create an instance with passed modifiers - * @param modifiers - */ - public LensMagnificationGraphMousePlugin(float floor, float ceiling, float delta) { - this(MouseEvent.CTRL_MASK, floor, ceiling, delta); - } - - public LensMagnificationGraphMousePlugin(int modifiers) { - this(modifiers, 1.0f, 4.0f, .2f); - } - public LensMagnificationGraphMousePlugin(int modifiers, float floor, float ceiling, float delta) { - super(modifiers); - this.floor = floor; - this.ceiling = ceiling; - this.delta = delta; - } - /** - * override to check equality with a mask - */ - public boolean checkModifiers(MouseEvent e) { - return (e.getModifiers() & modifiers) != 0; - } - - private void changeMagnification(MutableTransformer transformer, float delta) { - if(transformer instanceof LensTransformer) { - LensTransformer ht = (LensTransformer)transformer; - float magnification = ht.getMagnification() + delta; - magnification = Math.max(floor, magnification); - magnification = Math.min(magnification, ceiling); - ht.setMagnification(magnification); - } - } - /** - * zoom the display in or out, depending on the direction of the - * mouse wheel motion. - */ - public void mouseWheelMoved(MouseWheelEvent e) { - boolean accepted = checkModifiers(e); - float delta = this.delta; - if(accepted == true) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - MutableTransformer modelTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - MutableTransformer viewTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - int amount = e.getWheelRotation(); - if(amount < 0) { - delta = -delta; - } - changeMagnification(modelTransformer, delta); - changeMagnification(viewTransformer, delta); - vv.repaint(); - e.consume(); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/LensTranslatingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/LensTranslatingGraphMousePlugin.java deleted file mode 100644 index 273a642f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/LensTranslatingGraphMousePlugin.java +++ /dev/null @@ -1,184 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Cursor; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.LensTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * Extends TranslatingGraphMousePlugin and adds the capability - * to drag and resize the viewing - * lens in the graph view. Mouse1 in the center moves the lens, - * mouse1 on the edge resizes the lens. The default mouse button and - * modifiers can be overridden in the constructor. - * - * - * @author Tom Nelson - */ -public class LensTranslatingGraphMousePlugin extends TranslatingGraphMousePlugin -implements MouseListener, MouseMotionListener { - - protected boolean dragOnLens; - protected boolean dragOnEdge; - protected double edgeOffset; - /** - * create an instance with default modifiers - */ - public LensTranslatingGraphMousePlugin() { - this(MouseEvent.BUTTON1_MASK); - } - - /** - * create an instance with passed modifer value - * @param modifiers the mouse event modifier to activate this function - */ - public LensTranslatingGraphMousePlugin(int modifiers) { - super(modifiers); - } - - /** - * Check the event modifiers. Set the 'down' point for later - * use. If this event satisfies the modifiers, change the cursor - * to the system 'move cursor' - * @param e the event - */ - public void mousePressed(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - MutableTransformer vt = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - if(vt instanceof LensTransformer) { - vt = ((LensTransformer)vt).getDelegate(); - } - Point2D p = vt.inverseTransform(e.getPoint()); - boolean accepted = checkModifiers(e); - if(accepted) { - vv.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR)); - testViewCenter(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT), p); - testViewCenter(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW), p); - vv.repaint(); - } - super.mousePressed(e); - } - - /** - * called to change the location of the lens - * @param transformer - * @param point - */ - private void setViewCenter(MutableTransformer transformer, Point2D point) { - if(transformer instanceof LensTransformer) { - LensTransformer ht = - (LensTransformer)transformer; - ht.setViewCenter(point); - } - } - - /** - * called to change the radius of the lens - * @param transformer - * @param point - */ - private void setViewRadius(MutableTransformer transformer, Point2D point) { - if(transformer instanceof LensTransformer) { - LensTransformer ht = - (LensTransformer)transformer; - double distanceFromCenter = ht.getDistanceFromCenter(point); - ht.setViewRadius(distanceFromCenter+edgeOffset); - } - } - - /** - * called to set up translating the lens center or changing the size - * @param transformer - * @param point - */ - private void testViewCenter(MutableTransformer transformer, Point2D point) { - if(transformer instanceof LensTransformer) { - LensTransformer ht = - (LensTransformer)transformer; - double distanceFromCenter = ht.getDistanceFromCenter(point); - if(distanceFromCenter < 10) { - ht.setViewCenter(point); - dragOnLens = true; - } else if(Math.abs(distanceFromCenter - ht.getViewRadius()) < 10) { - edgeOffset = ht.getViewRadius() - distanceFromCenter; - ht.setViewRadius(distanceFromCenter+edgeOffset); - dragOnEdge = true; - } - } - } - - /** - * unset the 'down' point and change the cursoe back to the system - * default cursor - */ - public void mouseReleased(MouseEvent e) { - super.mouseReleased(e); - dragOnLens = false; - dragOnEdge = false; - edgeOffset = 0; - } - - /** - * check the modifiers. If accepted, move or resize the lens according - * to the dragging of the mouse pointer - * @param e the event - */ - public void mouseDragged(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - MutableTransformer vt = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - if(vt instanceof LensTransformer) { - vt = ((LensTransformer)vt).getDelegate(); - } - Point2D p = vt.inverseTransform(e.getPoint()); - boolean accepted = checkModifiers(e); - - if(accepted ) { - MutableTransformer modelTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - vv.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR)); - if(dragOnLens) { - setViewCenter(modelTransformer, p); - setViewCenter(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW), p); - e.consume(); - vv.repaint(); - - } else if(dragOnEdge) { - - setViewRadius(modelTransformer, p); - setViewRadius(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW), p); - e.consume(); - vv.repaint(); - - } else { - - MutableTransformer mt = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - Point2D iq = vt.inverseTransform(down); - iq = mt.inverseTransform(iq); - Point2D ip = vt.inverseTransform(e.getPoint()); - ip = mt.inverseTransform(ip); - float dx = (float) (ip.getX()-iq.getX()); - float dy = (float) (ip.getY()-iq.getY()); - - modelTransformer.translate(dx, dy); - down.x = e.getX(); - down.y = e.getY(); - } - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalGraphMouse.java deleted file mode 100644 index b0fdacac..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalGraphMouse.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 26, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.ItemListener; - -import edu.uci.ics.jung.visualization.VisualizationViewer.GraphMouse; - -/** - * Interface for a GraphMouse that supports modality. - * - * @author Tom Nelson - * - */ -public interface ModalGraphMouse extends GraphMouse { - - void setMode(Mode mode); - - /** - * @return Returns the modeListener. - */ - ItemListener getModeListener(); - - /** - */ - enum Mode { TRANSFORMING, PICKING, ANNOTATING, EDITING } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalLensGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalLensGraphMouse.java deleted file mode 100644 index a0c4512e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalLensGraphMouse.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 26, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.Component; -import java.awt.Cursor; -import java.awt.event.InputEvent; -import java.awt.event.KeyAdapter; -import java.awt.event.KeyEvent; - -/** - * an implementation of the AbstractModalGraphMouse that includes plugins for - * manipulating a view that is using a LensTransformer. - * - * @author Tom Nelson - * - */ -public class ModalLensGraphMouse extends AbstractModalGraphMouse implements - ModalGraphMouse { - - /** - * not included in the base class - */ - protected LensMagnificationGraphMousePlugin magnificationPlugin; - - public ModalLensGraphMouse() { - this(1.1f, 1/1.1f); - } - - public ModalLensGraphMouse(float in, float out) { - this(in, out, new LensMagnificationGraphMousePlugin()); - } - - public ModalLensGraphMouse(LensMagnificationGraphMousePlugin magnificationPlugin) { - this(1.1f, 1/1.1f, magnificationPlugin); - } - - public ModalLensGraphMouse(float in, float out, LensMagnificationGraphMousePlugin magnificationPlugin) { - super(in,out); - this.in = in; - this.out = out; - this.magnificationPlugin = magnificationPlugin; - loadPlugins(); - setModeKeyListener(new ModeKeyAdapter(this)); - } - - protected void loadPlugins() { - pickingPlugin = new PickingGraphMousePlugin(); - animatedPickingPlugin = new AnimatedPickingGraphMousePlugin(); - translatingPlugin = new LensTranslatingGraphMousePlugin(InputEvent.BUTTON1_MASK); - scalingPlugin = new ScalingGraphMousePlugin(new CrossoverScalingControl(), 0, in, out); - rotatingPlugin = new RotatingGraphMousePlugin(); - shearingPlugin = new ShearingGraphMousePlugin(); - - add(magnificationPlugin); - add(scalingPlugin); - - setMode(Mode.TRANSFORMING); - } - public static class ModeKeyAdapter extends KeyAdapter { - private char t = 't'; - private char p = 'p'; - protected ModalGraphMouse graphMouse; - - public ModeKeyAdapter(ModalGraphMouse graphMouse) { - this.graphMouse = graphMouse; - } - - public ModeKeyAdapter(char t, char p, ModalGraphMouse graphMouse) { - this.t = t; - this.p = p; - this.graphMouse = graphMouse; - } - - public void keyTyped(KeyEvent event) { - char keyChar = event.getKeyChar(); - if(keyChar == t) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - graphMouse.setMode(Mode.TRANSFORMING); - } else if(keyChar == p) { - ((Component)event.getSource()).setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); - graphMouse.setMode(Mode.PICKING); - } - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalSatelliteGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalSatelliteGraphMouse.java deleted file mode 100644 index e1959d3c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ModalSatelliteGraphMouse.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 26, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.InputEvent; -/** - * - * @author Tom Nelson - * - */ -public class ModalSatelliteGraphMouse extends DefaultModalGraphMouse implements - ModalGraphMouse { - - public ModalSatelliteGraphMouse() { - this(1.1f, 1/1.1f); - } - - public ModalSatelliteGraphMouse(float in, float out) { - super(in, out); - } - - protected void loadPlugins() { - pickingPlugin = new PickingGraphMousePlugin(); - animatedPickingPlugin = new SatelliteAnimatedPickingGraphMousePlugin(); - translatingPlugin = new SatelliteTranslatingGraphMousePlugin(InputEvent.BUTTON1_MASK); - scalingPlugin = new SatelliteScalingGraphMousePlugin(new CrossoverScalingControl(), 0); - rotatingPlugin = new SatelliteRotatingGraphMousePlugin(); - shearingPlugin = new SatelliteShearingGraphMousePlugin(); - - add(scalingPlugin); - - setMode(Mode.TRANSFORMING); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/MouseListenerTranslator.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/MouseListenerTranslator.java deleted file mode 100644 index 54771452..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/MouseListenerTranslator.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Feb 17, 2004 - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.VisualizationViewer; - -/** - * This class translates mouse clicks into vertex clicks - * - * @author danyelf - */ -public class MouseListenerTranslator extends MouseAdapter { - - private VisualizationViewer vv; - private GraphMouseListener gel; - - /** - * @param gel - * @param vv - */ - public MouseListenerTranslator(GraphMouseListener gel, VisualizationViewer vv) { - this.gel = gel; - this.vv = vv; - } - - /** - * Transform the point to the coordinate system in the - * VisualizationViewer, then use either PickSuuport - * (if available) or Layout to find a Vertex - * @param point - * @return - */ - private V getVertex(Point2D point) { - // adjust for scale and offset in the VisualizationViewer - Point2D p = point; - //vv.getRenderContext().getBasicTransformer().inverseViewTransform(point); - GraphElementAccessor pickSupport = vv.getPickSupport(); - Layout layout = vv.getGraphLayout(); - V v = null; - if(pickSupport != null) { - v = pickSupport.getVertex(layout, p.getX(), p.getY()); - } - return v; - } - /** - * @see java.awt.event.MouseListener#mouseClicked(java.awt.event.MouseEvent) - */ - public void mouseClicked(MouseEvent e) { - V v = getVertex(e.getPoint()); - if ( v != null ) { - gel.graphClicked(v, e ); - } - } - - /** - * @see java.awt.event.MouseListener#mousePressed(java.awt.event.MouseEvent) - */ - public void mousePressed(MouseEvent e) { - V v = getVertex(e.getPoint()); - if ( v != null ) { - gel.graphPressed(v, e ); - } - } - - /** - * @see java.awt.event.MouseListener#mouseReleased(java.awt.event.MouseEvent) - */ - public void mouseReleased(MouseEvent e) { - V v = getVertex(e.getPoint()); - if ( v != null ) { - gel.graphReleased(v, e ); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/PickingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/PickingGraphMousePlugin.java deleted file mode 100644 index 3ba13f18..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/PickingGraphMousePlugin.java +++ /dev/null @@ -1,372 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Color; -import java.awt.Cursor; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Point; -import java.awt.event.InputEvent; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; - -import javax.swing.JComponent; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.VisualizationServer.Paintable; -import edu.uci.ics.jung.visualization.picking.PickedState; - -/** - * PickingGraphMousePlugin supports the picking of graph elements - * with the mouse. MouseButtonOne picks a single vertex - * or edge, and MouseButtonTwo adds to the set of selected Vertices - * or EdgeType. If a Vertex is selected and the mouse is dragged while - * on the selected Vertex, then that Vertex will be repositioned to - * follow the mouse until the button is released. - * - * @author Tom Nelson - */ -public class PickingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener, MouseMotionListener { - - /** - * the picked Vertex, if any - */ - protected V vertex; - - /** - * the picked Edge, if any - */ - protected E edge; - - /** - * the x distance from the picked vertex center to the mouse point - */ - protected double offsetx; - - /** - * the y distance from the picked vertex center to the mouse point - */ - protected double offsety; - - /** - * controls whether the Vertices may be moved with the mouse - */ - protected boolean locked; - - /** - * additional modifiers for the action of adding to an existing - * selection - */ - protected int addToSelectionModifiers; - - /** - * used to draw a rectangle to contain picked vertices - */ - protected Rectangle2D rect = new Rectangle2D.Float(); - - /** - * the Paintable for the lens picking rectangle - */ - protected Paintable lensPaintable; - - /** - * color for the picking rectangle - */ - protected Color lensColor = Color.cyan; - - /** - * create an instance with default settings - */ - public PickingGraphMousePlugin() { - this(InputEvent.BUTTON1_MASK, InputEvent.BUTTON1_MASK | InputEvent.SHIFT_MASK); - } - - /** - * create an instance with overides - * @param selectionModifiers for primary selection - * @param addToSelectionModifiers for additional selection - */ - public PickingGraphMousePlugin(int selectionModifiers, int addToSelectionModifiers) { - super(selectionModifiers); - this.addToSelectionModifiers = addToSelectionModifiers; - this.lensPaintable = new LensPaintable(); - this.cursor = Cursor.getPredefinedCursor(Cursor.HAND_CURSOR); - } - - /** - * @return Returns the lensColor. - */ - public Color getLensColor() { - return lensColor; - } - - /** - * @param lensColor The lensColor to set. - */ - public void setLensColor(Color lensColor) { - this.lensColor = lensColor; - } - - /** - * a Paintable to draw the rectangle used to pick multiple - * Vertices - * @author Tom Nelson - * - */ - class LensPaintable implements Paintable { - - public void paint(Graphics g) { - Color oldColor = g.getColor(); - g.setColor(lensColor); - ((Graphics2D)g).draw(rect); - g.setColor(oldColor); - } - - public boolean useTransform() { - return false; - } - } - - /** - * For primary modifiers (default, MouseButton1): - * pick a single Vertex or Edge that - * is under the mouse pointer. If no Vertex or edge is under - * the pointer, unselect all picked Vertices and edges, and - * set up to draw a rectangle for multiple selection - * of contained Vertices. - * For additional selection (default Shift+MouseButton1): - * Add to the selection, a single Vertex or Edge that is - * under the mouse pointer. If a previously picked Vertex - * or Edge is under the pointer, it is un-picked. - * If no vertex or Edge is under the pointer, set up - * to draw a multiple selection rectangle (as above) - * but do not unpick previously picked elements. - * - * @param e the event - */ - @SuppressWarnings("unchecked") - public void mousePressed(MouseEvent e) { - down = e.getPoint(); - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - GraphElementAccessor pickSupport = vv.getPickSupport(); - PickedState pickedVertexState = vv.getPickedVertexState(); - PickedState pickedEdgeState = vv.getPickedEdgeState(); - if(pickSupport != null && pickedVertexState != null) { - Layout layout = vv.getGraphLayout(); - if(e.getModifiers() == modifiers) { - rect.setFrameFromDiagonal(down,down); - // p is the screen point for the mouse event - Point2D ip = e.getPoint(); - - vertex = pickSupport.getVertex(layout, ip.getX(), ip.getY()); - if(vertex != null) { - if(pickedVertexState.isPicked(vertex) == false) { - pickedVertexState.clear(); - pickedVertexState.pick(vertex, true); - } - // layout.getLocation applies the layout transformer so - // q is transformed by the layout transformer only - Point2D q = layout.transform(vertex); - // transform the mouse point to graph coordinate system - Point2D gp = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.LAYOUT, ip); - - offsetx = (float) (gp.getX()-q.getX()); - offsety = (float) (gp.getY()-q.getY()); - } else if((edge = pickSupport.getEdge(layout, ip.getX(), ip.getY())) != null) { - pickedEdgeState.clear(); - pickedEdgeState.pick(edge, true); - } else { - vv.addPostRenderPaintable(lensPaintable); - pickedEdgeState.clear(); - pickedVertexState.clear(); - } - - } else if(e.getModifiers() == addToSelectionModifiers) { - vv.addPostRenderPaintable(lensPaintable); - rect.setFrameFromDiagonal(down,down); - Point2D ip = e.getPoint(); - vertex = pickSupport.getVertex(layout, ip.getX(), ip.getY()); - if(vertex != null) { - boolean wasThere = pickedVertexState.pick(vertex, !pickedVertexState.isPicked(vertex)); - if(wasThere) { - vertex = null; - } else { - - // layout.getLocation applies the layout transformer so - // q is transformed by the layout transformer only - Point2D q = layout.transform(vertex); - // translate mouse point to graph coord system - Point2D gp = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.LAYOUT, ip); - - offsetx = (float) (gp.getX()-q.getX()); - offsety = (float) (gp.getY()-q.getY()); - } - } else if((edge = pickSupport.getEdge(layout, ip.getX(), ip.getY())) != null) { - pickedEdgeState.pick(edge, !pickedEdgeState.isPicked(edge)); - } - } - } - if(vertex != null) e.consume(); - } - - /** - * If the mouse is dragging a rectangle, pick the - * Vertices contained in that rectangle - * - * clean up settings from mousePressed - */ - @SuppressWarnings("unchecked") - public void mouseReleased(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - if(e.getModifiers() == modifiers) { - if(down != null) { - Point2D out = e.getPoint(); - - if(vertex == null && heyThatsTooClose(down, out, 5) == false) { - pickContainedVertices(vv, down, out, true); - } - } - } else if(e.getModifiers() == this.addToSelectionModifiers) { - if(down != null) { - Point2D out = e.getPoint(); - - if(vertex == null && heyThatsTooClose(down,out,5) == false) { - pickContainedVertices(vv, down, out, false); - } - } - } - down = null; - vertex = null; - edge = null; - rect.setFrame(0,0,0,0); - vv.removePostRenderPaintable(lensPaintable); - vv.repaint(); - } - - /** - * If the mouse is over a picked vertex, drag all picked - * vertices with the mouse. - * If the mouse is not over a Vertex, draw the rectangle - * to select multiple Vertices - * - */ - @SuppressWarnings("unchecked") - public void mouseDragged(MouseEvent e) { - if(locked == false) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - if(vertex != null) { - Point p = e.getPoint(); - Point2D graphPoint = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(p); - Point2D graphDown = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(down); - Layout layout = vv.getGraphLayout(); - double dx = graphPoint.getX()-graphDown.getX(); - double dy = graphPoint.getY()-graphDown.getY(); - PickedState ps = vv.getPickedVertexState(); - - for(V v : ps.getPicked()) { - Point2D vp = layout.transform(v); - vp.setLocation(vp.getX()+dx, vp.getY()+dy); - layout.setLocation(v, vp); - } - down = p; - - } else { - Point2D out = e.getPoint(); - if(e.getModifiers() == this.addToSelectionModifiers || - e.getModifiers() == modifiers) { - rect.setFrameFromDiagonal(down,out); - } - } - if(vertex != null) e.consume(); - vv.repaint(); - } - } - - /** - * rejects picking if the rectangle is too small, like - * if the user meant to select one vertex but moved the - * mouse slightly - * @param p - * @param q - * @param min - * @return - */ - private boolean heyThatsTooClose(Point2D p, Point2D q, double min) { - return Math.abs(p.getX()-q.getX()) < min && - Math.abs(p.getY()-q.getY()) < min; - } - - /** - * pick the vertices inside the rectangle created from points - * 'down' and 'out' - * - */ - protected void pickContainedVertices(VisualizationViewer vv, Point2D down, Point2D out, boolean clear) { - - Layout layout = vv.getGraphLayout(); - PickedState pickedVertexState = vv.getPickedVertexState(); - - Rectangle2D pickRectangle = new Rectangle2D.Double(); - pickRectangle.setFrameFromDiagonal(down,out); - - if(pickedVertexState != null) { - if(clear) { - pickedVertexState.clear(); - } - GraphElementAccessor pickSupport = vv.getPickSupport(); - - Collection picked = pickSupport.getVertices(layout, pickRectangle); - for(V v : picked) { - pickedVertexState.pick(v, true); - } - } - } - - public void mouseClicked(MouseEvent e) { - } - - public void mouseEntered(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(cursor); - } - - public void mouseExited(MouseEvent e) { - JComponent c = (JComponent)e.getSource(); - c.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - - public void mouseMoved(MouseEvent e) { - } - - /** - * @return Returns the locked. - */ - public boolean isLocked() { - return locked; - } - - /** - * @param locked The locked to set. - */ - public void setLocked(boolean locked) { - this.locked = locked; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/PluggableGraphMouse.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/PluggableGraphMouse.java deleted file mode 100644 index e4afc2f6..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/PluggableGraphMouse.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 7, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.event.MouseWheelEvent; -import java.awt.event.MouseWheelListener; -import java.util.LinkedHashSet; -import java.util.Set; - -import edu.uci.ics.jung.visualization.VisualizationViewer; - -/** - * a GraphMouse that accepts plugins for various mouse events. - * - * @author Tom Nelson - * - * - */ -public class PluggableGraphMouse implements VisualizationViewer.GraphMouse { - - MouseListener[] mouseListeners; - MouseMotionListener[] mouseMotionListeners; - MouseWheelListener[] mouseWheelListeners; - Set mousePluginList = new LinkedHashSet(); - Set mouseMotionPluginList = new LinkedHashSet(); - Set mouseWheelPluginList = new LinkedHashSet(); - - public void add(GraphMousePlugin plugin) { - if(plugin instanceof MouseListener) { - mousePluginList.add(plugin); - mouseListeners = null; - } - if(plugin instanceof MouseMotionListener) { - mouseMotionPluginList.add((MouseMotionListener)plugin); - mouseMotionListeners = null; - } - if(plugin instanceof MouseWheelListener) { - mouseWheelPluginList.add((MouseWheelListener)plugin); - mouseWheelListeners = null; - } - } - - public void remove(GraphMousePlugin plugin) { - if(plugin instanceof MouseListener) { - boolean wasThere = mousePluginList.remove(plugin); - if(wasThere) mouseListeners = null; - } - if(plugin instanceof MouseMotionListener) { - boolean wasThere = mouseMotionPluginList.remove(plugin); - if(wasThere) mouseMotionListeners = null; - } - if(plugin instanceof MouseWheelListener) { - boolean wasThere = mouseWheelPluginList.remove(plugin); - if(wasThere) mouseWheelListeners = null; - } - } - - private void checkMouseListeners() { - if(mouseListeners == null) { - mouseListeners = (MouseListener[]) - mousePluginList.toArray(new MouseListener[mousePluginList.size()]); - } - } - - private void checkMouseMotionListeners() { - if(mouseMotionListeners == null){ - mouseMotionListeners = (MouseMotionListener[]) - mouseMotionPluginList.toArray(new MouseMotionListener[mouseMotionPluginList.size()]); - } - } - - private void checkMouseWheelListeners() { - if(mouseWheelListeners == null) { - mouseWheelListeners = (MouseWheelListener[]) - mouseWheelPluginList.toArray(new MouseWheelListener[mouseWheelPluginList.size()]); - } - } - - public void mouseClicked(MouseEvent e) { - checkMouseListeners(); - for(int i=0; i 0) { - cw = -1; - } - // dot product for angle - double angle = - cw*Math.acos( ( x1*x2 + y1*y2 ) / - ( Math.sqrt( x1*x1 + y1*y1 ) * - Math.sqrt( x2*x2 + y2*y2 ) ) ); - if(Double.isNaN(angle)) { - angle = 0; - } - return angle; - } - - public void mouseClicked(MouseEvent e) { - } - - public void mouseEntered(MouseEvent e) { - } - - public void mouseExited(MouseEvent e) { - } - - public void mouseMoved(MouseEvent e) { - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteAnimatedPickingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteAnimatedPickingGraphMousePlugin.java deleted file mode 100644 index a68dd7d3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteAnimatedPickingGraphMousePlugin.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.InputEvent; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; - -/** - * A version of the AnimatedPickingGraphMousePlugin that is for - * the SatelliteVisualizationViewer. The difference it that when - * you pick a Vertex in the Satellite View, the 'master view' is - * translated to move that Vertex to the center. - * @see AnimatedPickingGraphMousePlugin - * @author Tom Nelson - */ -public class SatelliteAnimatedPickingGraphMousePlugin extends AnimatedPickingGraphMousePlugin - implements MouseListener, MouseMotionListener { - - /** - * create an instance - * - */ - public SatelliteAnimatedPickingGraphMousePlugin() { - this(InputEvent.BUTTON1_MASK | InputEvent.CTRL_MASK); - } - - public SatelliteAnimatedPickingGraphMousePlugin(int selectionModifiers) { - super(selectionModifiers); - } - - /** - * override subclass method to translate the master view instead - * of this satellite view - * - */ - @SuppressWarnings("unchecked") - public void mouseReleased(MouseEvent e) { - if (e.getModifiers() == modifiers) { - final VisualizationViewer vv = (VisualizationViewer) e.getSource(); - if (vv instanceof SatelliteVisualizationViewer) { - final VisualizationViewer vvMaster = - ((SatelliteVisualizationViewer) vv).getMaster(); - - if (vertex != null) { - Layout layout = vvMaster.getGraphLayout(); - Point2D q = layout.transform(vertex); - Point2D lvc = - vvMaster.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.LAYOUT, vvMaster.getCenter()); - final double dx = (lvc.getX() - q.getX()) / 10; - final double dy = (lvc.getY() - q.getY()) / 10; - - Runnable animator = new Runnable() { - - public void run() { - for (int i = 0; i < 10; i++) { - vvMaster.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT).translate(dx, - dy); - try { - Thread.sleep(100); - } catch (InterruptedException ex) { - } - } - } - }; - Thread thread = new Thread(animator); - thread.start(); - } - } - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteRotatingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteRotatingGraphMousePlugin.java deleted file mode 100644 index 34d54b1b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteRotatingGraphMousePlugin.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 15, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * Mouse events in the SatelliteView that match the modifiers - * will cause the Main view to rotate - * @see RotatingGraphMousePlugin - * @author Tom Nelson - * - */ -public class SatelliteRotatingGraphMousePlugin extends RotatingGraphMousePlugin { - - public SatelliteRotatingGraphMousePlugin() { - super(); - } - - public SatelliteRotatingGraphMousePlugin(int modifiers) { - super(modifiers); - } - /** - * check the modifiers. If accepted, use the mouse drag motion - * to rotate the graph in the master view - */ - public void mouseDragged(MouseEvent e) { - if(down == null) return; - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - if(accepted) { - if(vv instanceof SatelliteVisualizationViewer) { - VisualizationViewer vvMaster = - ((SatelliteVisualizationViewer)vv).getMaster(); - - MutableTransformer modelTransformerMaster = - vvMaster.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - - // rotate - vv.setCursor(cursor); - // I want to compute rotation based on the view coordinates of the - // lens center in the satellite view. - // translate the master view center to layout coords, then translate - // that point to the satellite view's view coordinate system.... - Point2D center = vv.getRenderContext().getMultiLayerTransformer().transform(vvMaster.getRenderContext().getMultiLayerTransformer().inverseTransform(vvMaster.getCenter())); - Point2D q = down; - Point2D p = e.getPoint(); - Point2D v1 = new Point2D.Double(center.getX()-p.getX(), center.getY()-p.getY()); - Point2D v2 = new Point2D.Double(center.getX()-q.getX(), center.getY()-q.getY()); - double theta = angleBetween(v1, v2); - modelTransformerMaster.rotate(-theta, - vvMaster.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, vvMaster.getCenter())); - down.x = e.getX(); - down.y = e.getY(); - } - e.consume(); - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteScalingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteScalingGraphMousePlugin.java deleted file mode 100644 index 56643dd3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteScalingGraphMousePlugin.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 15, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseWheelEvent; - -import edu.uci.ics.jung.visualization.VisualizationViewer; - -/** - * Overrides ScalingGraphMousePlugin so that mouse events in the - * satellite view will cause scaling in the main view - * - * @see ScalingGraphMousePlugin - * @author Tom Nelson - * - */ -public class SatelliteScalingGraphMousePlugin extends ScalingGraphMousePlugin { - - public SatelliteScalingGraphMousePlugin(ScalingControl scaler, int modifiers) { - super(scaler, modifiers); - } - - public SatelliteScalingGraphMousePlugin(ScalingControl scaler, int modifiers, float in, float out) { - super(scaler, modifiers, in, out); - } - - /** - * zoom the master view display in or out, depending on the direction of the - * mouse wheel motion. - */ - public void mouseWheelMoved(MouseWheelEvent e) { - boolean accepted = checkModifiers(e); - if(accepted == true) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - - if(vv instanceof SatelliteVisualizationViewer) { - VisualizationViewer vvMaster = - ((SatelliteVisualizationViewer)vv).getMaster(); - - int amount = e.getWheelRotation(); - - if(amount > 0) { - scaler.scale(vvMaster, in, vvMaster.getCenter()); - - } else if(amount < 0) { - scaler.scale(vvMaster, out, vvMaster.getCenter()); - } - e.consume(); - vv.repaint(); - } - } - } - - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteShearingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteShearingGraphMousePlugin.java deleted file mode 100644 index 2b3a6163..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteShearingGraphMousePlugin.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 15, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.Dimension; -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * Overrides ShearingGraphMousePlugin so that mouse events in the - * satellite view cause shearing of the main view - * - * @see ShearingGraphMousePlugin - * @author Tom Nelson - * - */ -public class SatelliteShearingGraphMousePlugin extends ShearingGraphMousePlugin { - - public SatelliteShearingGraphMousePlugin() { - super(); - } - - public SatelliteShearingGraphMousePlugin(int modifiers) { - super(modifiers); - } - - /** - * overridden to shear the main view - */ - public void mouseDragged(MouseEvent e) { - if(down == null) return; - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - if(accepted) { - if(vv instanceof SatelliteVisualizationViewer) { - VisualizationViewer vvMaster = - ((SatelliteVisualizationViewer)vv).getMaster(); - - MutableTransformer modelTransformerMaster = - vvMaster.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - - vv.setCursor(cursor); - Point2D q = down; - Point2D p = e.getPoint(); - float dx = (float) (p.getX()-q.getX()); - float dy = (float) (p.getY()-q.getY()); - - Dimension d = vv.getSize(); - float shx = 2.f*dx/d.height; - float shy = 2.f*dy/d.width; - // I want to compute shear based on the view coordinates of the - // lens center in the satellite view. - // translate the master view center to layout coords, then translate - // that point to the satellite view's view coordinate system.... - Point2D center = vv.getRenderContext().getMultiLayerTransformer().transform(vvMaster.getRenderContext().getMultiLayerTransformer().inverseTransform(vvMaster.getCenter())); - if(p.getX() < center.getX()) { - shy = -shy; - } - if(p.getY() < center.getY()) { - shx = -shx; - } - modelTransformerMaster.shear(-shx, -shy, vvMaster.getCenter()); - - down.x = e.getX(); - down.y = e.getY(); - } - e.consume(); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteTranslatingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteTranslatingGraphMousePlugin.java deleted file mode 100644 index 34c42e99..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteTranslatingGraphMousePlugin.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 15, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.Cursor; -import java.awt.event.MouseEvent; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * Overrides TranslatingGraphMousePlugin so that mouse events in - * the satellite view cause translating of the main view - * - * @see TranslatingGraphMousePlugin - * @author Tom Nelson - * - */ -public class SatelliteTranslatingGraphMousePlugin extends - TranslatingGraphMousePlugin { - - public SatelliteTranslatingGraphMousePlugin() { - super(); - } - - public SatelliteTranslatingGraphMousePlugin(int modifiers) { - super(modifiers); - } - - /** - * chack the modifiers. If accepted, translate the main view according - * to the dragging of the mouse pointer in the satellite view - * @param e the event - */ - public void mouseDragged(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - if(accepted) { - if(vv instanceof SatelliteVisualizationViewer) { - VisualizationViewer vvMaster = - ((SatelliteVisualizationViewer)vv).getMaster(); - - MutableTransformer modelTransformerMaster = - vvMaster.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - vv.setCursor(Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR)); - try { - Point2D q = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(down); - Point2D p = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(e.getPoint()); - float dx = (float) (p.getX()-q.getX()); - float dy = (float) (p.getY()-q.getY()); - - modelTransformerMaster.translate(-dx, -dy); - down.x = e.getX(); - down.y = e.getY(); - } catch(RuntimeException ex) { - System.err.println("down = "+down+", e = "+e); - throw ex; - } - } - e.consume(); - } - } - - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteVisualizationViewer.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteVisualizationViewer.java deleted file mode 100644 index 4ba83096..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/SatelliteVisualizationViewer.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 15, 2005 - */ - -package edu.uci.ics.jung.visualization.control; - -import java.awt.Color; -import java.awt.Dimension; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Shape; -import java.awt.geom.AffineTransform; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationModel; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableAffineTransformer; -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; - -/** - * A VisualizationViewer that can act as a satellite view for another - * (master) VisualizationViewer. In this view, the full graph is always visible - * and all mouse actions affect the graph in the master view. - * - * A rectangular shape in the satellite view shows the visible bounds of - * the master view. - * - * @author Tom Nelson - * - * - */ -@SuppressWarnings("serial") -public class SatelliteVisualizationViewer - extends VisualizationViewer { - - /** - * the master VisualizationViewer that this is a satellite view for - */ - protected VisualizationViewer master; - - /** - * @param layout - * @param renderer - */ - public SatelliteVisualizationViewer(VisualizationViewer master) { - this(master, master.getModel()); - } - - /** - * @param layout - * @param renderer - * @param preferredSize - */ - public SatelliteVisualizationViewer(VisualizationViewer master, - Dimension preferredSize) { - this(master, master.getModel(), preferredSize); - } - - /** - * used internally, as the sattellite should always share the model of - * the master - * @param model - * @param renderer - */ - protected SatelliteVisualizationViewer(VisualizationViewer master, VisualizationModel model) { - this(master, model, new Dimension(300,300)); - } - - /** - * Used internally, as the satellite should always share the model of the master - * @param master the master view - * @param model - * @param renderer - * @param preferredSize - */ - protected SatelliteVisualizationViewer(VisualizationViewer master, VisualizationModel model, - Dimension preferredSize) { - super(model, preferredSize); - this.master = master; - - // create a graph mouse with custom plugins to affect the master view - ModalGraphMouse gm = new ModalSatelliteGraphMouse(); - setGraphMouse(gm); - - // this adds the Lens to the satellite view - addPreRenderPaintable(new ViewLens(this, master)); - - // get a copy of the current layout transform - // it may have been scaled to fit the graph - AffineTransform modelLayoutTransform = - new AffineTransform(master.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT).getTransform()); - - // I want no layout transformations in the satellite view - // this resets the auto-scaling that occurs in the super constructor - getRenderContext().getMultiLayerTransformer().setTransformer(Layer.LAYOUT, new MutableAffineTransformer(modelLayoutTransform)); - - // make sure the satellite listens for changes in the master - master.addChangeListener(this); - - // share the picked state of the master - setPickedVertexState(master.getPickedVertexState()); - setPickedEdgeState(master.getPickedEdgeState()); - } - - /** - * @return Returns the master. - */ - public VisualizationViewer getMaster() { - return master; - } - - /** - * A four-sided shape that represents the visible part of the - * master view and is drawn in the satellite view - * - * @author Tom Nelson - * - * - */ - static class ViewLens implements Paintable { - - VisualizationViewer master; - VisualizationViewer vv; - - public ViewLens(VisualizationViewer vv, VisualizationViewer master) { - this.vv = vv; - this.master = master; - } - public void paint(Graphics g) { - ShapeTransformer masterViewTransformer = - master.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - ShapeTransformer masterLayoutTransformer = master.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - ShapeTransformer vvLayoutTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - - Shape lens = master.getBounds(); - lens = masterViewTransformer.inverseTransform(lens); - lens = masterLayoutTransformer.inverseTransform(lens); - lens = vvLayoutTransformer.transform(lens); - Graphics2D g2d = (Graphics2D)g; - Color old = g.getColor(); - Color lensColor = master.getBackground(); - vv.setBackground(lensColor.darker()); - g.setColor(lensColor); - g2d.fill(lens); - g.setColor(Color.gray); - g2d.draw(lens); - g.setColor(old); - } - - public boolean useTransform() { - return true; - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ScalingControl.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ScalingControl.java deleted file mode 100644 index 75292f90..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ScalingControl.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.VisualizationServer; - -public interface ScalingControl { - - /** - * zoom the display in or out - * @param vv the VisualizationViewer - * @param amount how much to adjust scale by - * @param at where to adjust scale from - */ - void scale(VisualizationServer vv, float amount, Point2D at); - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ScalingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ScalingGraphMousePlugin.java deleted file mode 100644 index d08ffac2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ScalingGraphMousePlugin.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.event.MouseEvent; -import java.awt.event.MouseWheelEvent; -import java.awt.event.MouseWheelListener; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.VisualizationViewer; - -/** - * ScalingGraphMouse applies a scaling transformation to the graph layout. - * The Vertices get closer or farther apart, but do not themselves change - * size. ScalingGraphMouse uses MouseWheelEvents to apply the scaling. - * - * @author Tom Nelson - */ -public class ScalingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseWheelListener { - - /** - * the amount to zoom in by - */ - protected float in = 1.1f; - /** - * the amount to zoom out by - */ - protected float out = 1/1.1f; - - /** - * whether to center the zoom at the current mouse position - */ - protected boolean zoomAtMouse = true; - - /** - * controls scaling operations - */ - protected ScalingControl scaler; - - public ScalingGraphMousePlugin(ScalingControl scaler, int modifiers) { - this(scaler, modifiers, 1.1f, 1/1.1f); - } - - public ScalingGraphMousePlugin(ScalingControl scaler, int modifiers, float in, float out) { - super(modifiers); - this.scaler = scaler; - this.in = in; - this.out = out; - } - /** - * @param zoomAtMouse The zoomAtMouse to set. - */ - public void setZoomAtMouse(boolean zoomAtMouse) { - this.zoomAtMouse = zoomAtMouse; - } - - /** - * zoom the display in or out, depending on the direction of the - * mouse wheel motion. - */ - public void mouseWheelMoved(MouseWheelEvent e) { - boolean accepted = checkModifiers(e); - if(accepted == true) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - Point2D mouse = e.getPoint(); - Point2D center = vv.getCenter(); - int amount = e.getWheelRotation(); - if(zoomAtMouse) { - if(amount > 0) { - scaler.scale(vv, in, mouse); - } else if(amount < 0) { - scaler.scale(vv, out, mouse); - } - } else { - if(amount > 0) { - scaler.scale(vv, in, center); - } else if(amount < 0) { - scaler.scale(vv, out, center); - } - } - e.consume(); - vv.repaint(); - } - } - /** - * @return Returns the zoom in value. - */ - public float getIn() { - return in; - } - /** - * @param in The zoom in value to set. - */ - public void setIn(float in) { - this.in = in; - } - /** - * @return Returns the zoom out value. - */ - public float getOut() { - return out; - } - /** - * @param out The zoom out value to set. - */ - public void setOut(float out) { - this.out = out; - } - - public ScalingControl getScaler() { - return scaler; - } - - public void setScaler(ScalingControl scaler) { - this.scaler = scaler; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ShearingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ShearingGraphMousePlugin.java deleted file mode 100644 index 46d6db8c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ShearingGraphMousePlugin.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.BasicStroke; -import java.awt.Color; -import java.awt.Cursor; -import java.awt.Dimension; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Point; -import java.awt.RenderingHints; -import java.awt.Toolkit; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; -import java.awt.image.BufferedImage; -import java.util.Collections; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * ShearingGraphMousePlugin allows the user to drag with the mouse - * to shear the transform either in the horizontal or vertical direction. - * By default, the control or meta key must be depressed to activate - * shearing. - * - * - * @author Tom Nelson - */ -public class ShearingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener, MouseMotionListener { - - private static int mask = MouseEvent.CTRL_MASK; - - static { - if(System.getProperty("os.name").startsWith("Mac")) { - mask = MouseEvent.META_MASK; - } - } - /** - * create an instance with default modifier values - */ - public ShearingGraphMousePlugin() { - this(MouseEvent.BUTTON1_MASK | mask); - } - - /** - * create an instance with passed modifier values - * @param modifiers the mouse modifiers to use - */ - public ShearingGraphMousePlugin(int modifiers) { - super(modifiers); - Dimension cd = Toolkit.getDefaultToolkit().getBestCursorSize(16,16); - BufferedImage cursorImage = - new BufferedImage(cd.width,cd.height,BufferedImage.TYPE_INT_ARGB); - Graphics g = cursorImage.createGraphics(); - Graphics2D g2 = (Graphics2D)g; - g2.addRenderingHints(Collections.singletonMap(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON)); - g.setColor(new Color(0,0,0,0)); - g.fillRect(0,0,16,16); - - int left = 0; - int top = 0; - int right = 15; - int bottom = 15; - - g.setColor(Color.white); - g2.setStroke(new BasicStroke(3)); - g.drawLine(left+2,top+5,right-2,top+5); - g.drawLine(left+2,bottom-5,right-2,bottom-5); - g.drawLine(left+2,top+5,left+4,top+3); - g.drawLine(left+2,top+5,left+4,top+7); - g.drawLine(right-2,bottom-5,right-4,bottom-3); - g.drawLine(right-2,bottom-5,right-4,bottom-7); - - g.setColor(Color.black); - g2.setStroke(new BasicStroke(1)); - g.drawLine(left+2,top+5,right-2,top+5); - g.drawLine(left+2,bottom-5,right-2,bottom-5); - g.drawLine(left+2,top+5,left+4,top+3); - g.drawLine(left+2,top+5,left+4,top+7); - g.drawLine(right-2,bottom-5,right-4,bottom-3); - g.drawLine(right-2,bottom-5,right-4,bottom-7); - g.dispose(); - cursor = Toolkit.getDefaultToolkit().createCustomCursor(cursorImage, new Point(), "RotateCursor"); - - } - - /** - * - * @param e the event - */ - public void mousePressed(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - down = e.getPoint(); - if(accepted) { - vv.setCursor(cursor); - } - } - - /** - * - */ - public void mouseReleased(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - down = null; - vv.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - - /** - * - * - * - * - */ - public void mouseDragged(MouseEvent e) { - if(down == null) return; - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - if(accepted) { - MutableTransformer modelTransformer = - vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - vv.setCursor(cursor); - Point2D q = down; - Point2D p = e.getPoint(); - float dx = (float) (p.getX()-q.getX()); - float dy = (float) (p.getY()-q.getY()); - - Dimension d = vv.getSize(); - float shx = 2.f*dx/d.height; - float shy = 2.f*dy/d.width; - Point2D center = vv.getCenter(); - if(p.getX() < center.getX()) { - shy = -shy; - } - if(p.getY() < center.getY()) { - shx = -shx; - } - modelTransformer.shear(shx, shy, center); - down.x = e.getX(); - down.y = e.getY(); - - e.consume(); - } - } - - public void mouseClicked(MouseEvent e) { - // TODO Auto-generated method stub - - } - - public void mouseEntered(MouseEvent e) { - // TODO Auto-generated method stub - - } - - public void mouseExited(MouseEvent e) { - // TODO Auto-generated method stub - - } - - public void mouseMoved(MouseEvent e) { - // TODO Auto-generated method stub - - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/TranslatingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/TranslatingGraphMousePlugin.java deleted file mode 100644 index 55c443d5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/TranslatingGraphMousePlugin.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Cursor; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * TranslatingGraphMousePlugin uses a MouseButtonOne press and - * drag gesture to translate the graph display in the x and y - * direction. The default MouseButtonOne modifier can be overridden - * to cause a different mouse gesture to translate the display. - * - * - * @author Tom Nelson - */ -public class TranslatingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener, MouseMotionListener { - - /** - */ - public TranslatingGraphMousePlugin() { - this(MouseEvent.BUTTON1_MASK); - } - - /** - * create an instance with passed modifer value - * @param modifiers the mouse event modifier to activate this function - */ - public TranslatingGraphMousePlugin(int modifiers) { - super(modifiers); - this.cursor = Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR); - } - - /** - * Check the event modifiers. Set the 'down' point for later - * use. If this event satisfies the modifiers, change the cursor - * to the system 'move cursor' - * @param e the event - */ - public void mousePressed(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - down = e.getPoint(); - if(accepted) { - vv.setCursor(cursor); - } - } - - /** - * unset the 'down' point and change the cursoe back to the system - * default cursor - */ - public void mouseReleased(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - down = null; - vv.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - - /** - * chack the modifiers. If accepted, translate the graph according - * to the dragging of the mouse pointer - * @param e the event - */ - public void mouseDragged(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - if(accepted) { - MutableTransformer modelTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - vv.setCursor(cursor); - try { - Point2D q = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(down); - Point2D p = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(e.getPoint()); - float dx = (float) (p.getX()-q.getX()); - float dy = (float) (p.getY()-q.getY()); - - modelTransformer.translate(dx, dy); - down.x = e.getX(); - down.y = e.getY(); - } catch(RuntimeException ex) { - System.err.println("down = "+down+", e = "+e); - throw ex; - } - - e.consume(); - vv.repaint(); - } - } - - public void mouseClicked(MouseEvent e) { - // TODO Auto-generated method stub - - } - - public void mouseEntered(MouseEvent e) { - // TODO Auto-generated method stub - - } - - public void mouseExited(MouseEvent e) { - // TODO Auto-generated method stub - - } - - public void mouseMoved(MouseEvent e) { - // TODO Auto-generated method stub - - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ViewScalingControl.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ViewScalingControl.java deleted file mode 100644 index f1acb78c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ViewScalingControl.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * ViewScalingGraphMouse applies a scaling transform to the View - * of the graph. This causes all elements of the graph to grow - * larger or smaller. ViewScalingGraphMouse, by default, is activated - * by the MouseWheel when the control key is pressed. The control - * key modifier can be overridden in the contstructor. - * - * @author Tom Nelson - */ -public class ViewScalingControl implements ScalingControl { - - /** - * zoom the display in or out, depending on the direction of the - * mouse wheel motion. - */ - public void scale(VisualizationServer vv, float amount, Point2D from) { - MutableTransformer viewTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - viewTransformer.scale(amount, amount, from); - vv.repaint(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/ViewTranslatingGraphMousePlugin.java b/gui/jung-src/edu/uci/ics/jung/visualization/control/ViewTranslatingGraphMousePlugin.java deleted file mode 100644 index bc3489c8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/ViewTranslatingGraphMousePlugin.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 8, 2005 - * - */ -package edu.uci.ics.jung.visualization.control; - -import java.awt.Cursor; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.event.MouseMotionListener; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * ViewTranslatingGraphMousePlugin uses a MouseButtonOne press and - * drag gesture to translate the graph display in the x and y - * direction by changing the AffineTransform applied to the Graphics2D. - * The default MouseButtonOne modifier can be overridden - * to cause a different mouse gesture to translate the display. - * - * - * @author Tom Nelson - */ -public class ViewTranslatingGraphMousePlugin extends AbstractGraphMousePlugin - implements MouseListener, MouseMotionListener { - - /** - */ - public ViewTranslatingGraphMousePlugin() { - this(MouseEvent.BUTTON1_MASK); - } - - /** - * create an instance with passed modifer value - * @param modifiers the mouse event modifier to activate this function - */ - public ViewTranslatingGraphMousePlugin(int modifiers) { - super(modifiers); - this.cursor = Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR); - } - - /** - * Check the event modifiers. Set the 'down' point for later - * use. If this event satisfies the modifiers, change the cursor - * to the system 'move cursor' - * @param e the event - */ - public void mousePressed(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - down = e.getPoint(); - if(accepted) { - vv.setCursor(cursor); - } - } - - /** - * unset the 'down' point and change the cursoe back to the system - * default cursor - */ - public void mouseReleased(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - down = null; - vv.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); - } - - /** - * chack the modifiers. If accepted, translate the graph according - * to the dragging of the mouse pointer - * @param e the event - */ - public void mouseDragged(MouseEvent e) { - VisualizationViewer vv = (VisualizationViewer)e.getSource(); - boolean accepted = checkModifiers(e); - if(accepted) { - MutableTransformer viewTransformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - vv.setCursor(cursor); - try { - Point2D q = viewTransformer.inverseTransform(down); - Point2D p = viewTransformer.inverseTransform(e.getPoint()); - float dx = (float) (p.getX()-q.getX()); - float dy = (float) (p.getY()-q.getY()); - - viewTransformer.translate(dx, dy); - down.x = e.getX(); - down.y = e.getY(); - } catch(RuntimeException ex) { - System.err.println("down = "+down+", e = "+e); - throw ex; - } - - e.consume(); - } - } - - public void mouseClicked(MouseEvent e) { - } - - public void mouseEntered(MouseEvent e) { - } - - public void mouseExited(MouseEvent e) { - } - - public void mouseMoved(MouseEvent e) { - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/control/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/control/package.html deleted file mode 100644 index 394e2776..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/control/package.html +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - -

          Mechanisms for manipulating and controlling a graph visualization, largely -in terms of mouse plugins. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/AbstractEdgeShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/AbstractEdgeShapeTransformer.java deleted file mode 100644 index 811f0516..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/AbstractEdgeShapeTransformer.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on March 10, 2005 - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Shape; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; - - - - -/** - * An interface for decorators that return a - * Shape for a specified edge. - * - * @author Tom Nelson - */ -public abstract class AbstractEdgeShapeTransformer implements Transformer,E>,Shape> { - - /** - * Specifies how far apart to place the control points for edges being - * drawn in parallel. - */ - protected float control_offset_increment = 20.f; - - /** - * Sets the value of control_offset_increment. - */ - public void setControlOffsetIncrement(float y) { - control_offset_increment = y; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/AbstractVertexShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/AbstractVertexShapeTransformer.java deleted file mode 100644 index a503f970..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/AbstractVertexShapeTransformer.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Created on Jul 16, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -import edu.uci.ics.jung.visualization.util.VertexShapeFactory; - - - -/** - * - * @author Joshua O'Madadhain - */ -public abstract class AbstractVertexShapeTransformer implements SettableVertexShapeTransformer -{ - protected Transformer vsf; - protected Transformer varf; - protected VertexShapeFactory factory; - public final static int DEFAULT_SIZE = 8; - public final static float DEFAULT_ASPECT_RATIO = 1.0f; - - public AbstractVertexShapeTransformer(Transformer vsf, Transformer varf) - { - this.vsf = vsf; - this.varf = varf; - factory = new VertexShapeFactory(vsf, varf); - } - - @SuppressWarnings("unchecked") - public AbstractVertexShapeTransformer() - { - this(new ConstantTransformer(DEFAULT_SIZE), - new ConstantTransformer(DEFAULT_ASPECT_RATIO)); - } - - public void setSizeTransformer(Transformer vsf) - { - this.vsf = vsf; - factory = new VertexShapeFactory(vsf, varf); - } - - public void setAspectRatioTransformer(Transformer varf) - { - this.varf = varf; - factory = new VertexShapeFactory(vsf, varf); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/ConstantDirectionalEdgeValueTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/ConstantDirectionalEdgeValueTransformer.java deleted file mode 100644 index a53aadb5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/ConstantDirectionalEdgeValueTransformer.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Created on Oct 21, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeType; - - -/** - * Returns the constructor-specified value for each edge type. - * - * @author Joshua O'Madadhain - */ -public class ConstantDirectionalEdgeValueTransformer implements Transformer,E>,Number> -{ - protected Double undirected_value; - protected Double directed_value; - - /** - * - * @param undirected - * @param directed - */ - public ConstantDirectionalEdgeValueTransformer(double undirected, double directed) - { - this.undirected_value = new Double(undirected); - this.directed_value = new Double(directed); - } - - /** - * @see Transformer#transform(Object) - */ - public Number transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - if (graph.getEdgeType(e) == EdgeType.DIRECTED) - return directed_value; - else - return undirected_value; - } - - /** - * Sets the value returned for undirected edges to value. - * @param value the new value to return for undirected edges - */ - public void setUndirectedValue(double value) - { - this.undirected_value = value; - } - - /** - * Sets the value returned for directed edges to value. - * @param value the new value to return for directed edges - */ - public void setDirectedValue(double value) - { - this.directed_value = value; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/DefaultVertexIconTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/DefaultVertexIconTransformer.java deleted file mode 100644 index 691ac9e7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/DefaultVertexIconTransformer.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 1, 2005 - */ - -package edu.uci.ics.jung.visualization.decorators; - -import java.util.HashMap; -import java.util.Map; - -import javax.swing.Icon; - -import org.apache.commons.collections15.Transformer; - -/** - * A simple, stateful VertexIconFunction. - * Stores icons in a Map keyed on the Vertex - * - * @author Tom Nelson - * - * - */ -public class DefaultVertexIconTransformer implements Transformer { - - /** - * icon storage - */ - protected Map iconMap = new HashMap(); - - /** - * Returns the icon storage as a Map. - */ - public Map getIconMap() { - return iconMap; - } - - /** - * Sets the icon storage to the specified Map. - */ - public void setIconMap(Map iconMap) { - this.iconMap = iconMap; - } - - /** - * Returns the Icon associated with v. - */ - public Icon transform(V v) { - return iconMap.get(v); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/DirectionalEdgeArrowTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/DirectionalEdgeArrowTransformer.java deleted file mode 100644 index ef3574b1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/DirectionalEdgeArrowTransformer.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Created on Jul 18, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Shape; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.visualization.util.ArrowFactory; - -/** - * Returns wedge arrows for undirected edges and notched arrows - * for directed edges, of the specified dimensions. - * - * @author Joshua O'Madadhain - */ -public class DirectionalEdgeArrowTransformer implements Transformer,E>,Shape> { - protected Shape undirected_arrow; - protected Shape directed_arrow; - - public DirectionalEdgeArrowTransformer(int length, int width, int notch_depth) - { - directed_arrow = ArrowFactory.getNotchedArrow(width, length, notch_depth); - undirected_arrow = ArrowFactory.getWedgeArrow(width, length); - } - - /** - * - */ - public Shape transform(Context,E> context) - { - if (context.graph.getEdgeType(context.element) == EdgeType.DIRECTED) - return directed_arrow; - else - return undirected_arrow; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/EdgeShape.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/EdgeShape.java deleted file mode 100644 index 1754a5ee..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/EdgeShape.java +++ /dev/null @@ -1,482 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on March 10, 2005 - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.CubicCurve2D; -import java.awt.geom.Ellipse2D; -import java.awt.geom.GeneralPath; -import java.awt.geom.Line2D; -import java.awt.geom.QuadCurve2D; -import java.awt.geom.Rectangle2D; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeIndexFunction; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.util.ArrowFactory; - - -/** - * An interface for decorators that return a - * Shape for a specified edge. - * - * All edge shapes must be defined so that their endpoints are at - * (0,0) and (1,0). They will be scaled, rotated and translated into - * position by the PluggableRenderer. - * - * @author Tom Nelson - * @param - */ -public class EdgeShape { - - /** - * a convenience instance for other edge shapes to use - * for self-loop edges where parallel instances will not - * overlay each other. - */ - @SuppressWarnings("unchecked") - protected static Loop loop = new Loop(); - - /** - * a convenience instance for other edge shapes to use - * for self-loop edges where parallel instances overlay each - * other - */ - @SuppressWarnings("unchecked") - protected static SimpleLoop simpleLoop = new SimpleLoop(); - - @SuppressWarnings("unchecked") - protected static Box box = new Box(); - - /** - * An edge shape that renders as a straight line between - * the vertex endpoints. - */ - public static class Line extends AbstractEdgeShapeTransformer { - - /** - * Singleton instance of the Line2D edge shape - */ - private static Line2D instance = new Line2D.Float(0.0f, 0.0f, 1.0f, 0.0f); - /** - * Get the shape for this edge, returning either the - * shared instance or, in the case of self-loop edges, the - * SimpleLoop shared instance. - */ - @SuppressWarnings("unchecked") - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - - Pair endpoints = graph.getEndpoints(e); - if(endpoints != null) { - boolean isLoop = endpoints.getFirst().equals(endpoints.getSecond()); - if (isLoop) { - return loop.transform(context); - } - } - return instance; - } - } - - /** - * An edge shape that renders as a bent-line between the - * vertex endpoints. - */ - public static class BentLine - extends AbstractEdgeShapeTransformer implements IndexedRendering { - - /** - * singleton instance of the BentLine shape - */ - private static GeneralPath instance = new GeneralPath(); - - protected EdgeIndexFunction parallelEdgeIndexFunction; - - @SuppressWarnings("unchecked") - public void setEdgeIndexFunction(EdgeIndexFunction parallelEdgeIndexFunction) { - this.parallelEdgeIndexFunction = parallelEdgeIndexFunction; - loop.setEdgeIndexFunction(parallelEdgeIndexFunction); - } - - - - /** - * @return the parallelEdgeIndexFunction - */ - public EdgeIndexFunction getEdgeIndexFunction() { - return parallelEdgeIndexFunction; - } - - - - /** - * Get the shape for this edge, returning either the - * shared instance or, in the case of self-loop edges, the - * Loop shared instance. - */ - @SuppressWarnings("unchecked") - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - Pair endpoints = graph.getEndpoints(e); - if(endpoints != null) { - boolean isLoop = endpoints.getFirst().equals(endpoints.getSecond()); - if (isLoop) { - return loop.transform(context); - } - } - - int index = 1; - if(parallelEdgeIndexFunction != null) { - index = parallelEdgeIndexFunction.getIndex(graph, e); - } - float controlY = control_offset_increment + control_offset_increment*index; - instance.reset(); - instance.moveTo(0.0f, 0.0f); - instance.lineTo(0.5f, controlY); - instance.lineTo(1.0f, 1.0f); - return instance; - } - - } - - /** - * An edge shape that renders as a QuadCurve between vertex - * endpoints. - */ - public static class QuadCurve - extends AbstractEdgeShapeTransformer implements IndexedRendering { - - /** - * singleton instance of the QuadCurve shape - */ - private static QuadCurve2D instance = new QuadCurve2D.Float(); - - protected EdgeIndexFunction parallelEdgeIndexFunction; - - @SuppressWarnings("unchecked") - public void setEdgeIndexFunction(EdgeIndexFunction parallelEdgeIndexFunction) { - this.parallelEdgeIndexFunction = parallelEdgeIndexFunction; - loop.setEdgeIndexFunction(parallelEdgeIndexFunction); - } - - /** - * @return the parallelEdgeIndexFunction - */ - public EdgeIndexFunction getEdgeIndexFunction() { - return parallelEdgeIndexFunction; - } - - /** - * Get the shape for this edge, returning either the - * shared instance or, in the case of self-loop edges, the - * Loop shared instance. - */ - @SuppressWarnings("unchecked") - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - Pair endpoints = graph.getEndpoints(e); - if(endpoints != null) { - boolean isLoop = endpoints.getFirst().equals(endpoints.getSecond()); - if (isLoop) { - return loop.transform(context); - } - } - - int index = 1; - if(parallelEdgeIndexFunction != null) { - index = parallelEdgeIndexFunction.getIndex(graph, e); - } - - float controlY = control_offset_increment + - control_offset_increment * index; - instance.setCurve(0.0f, 0.0f, 0.5f, controlY, 1.0f, 0.0f); - return instance; - } - } - - /** - * An edge shape that renders as a CubicCurve between vertex - * endpoints. The two control points are at - * (1/3*length, 2*controlY) and (2/3*length, controlY) - * giving a 'spiral' effect. - */ - public static class CubicCurve - extends AbstractEdgeShapeTransformer implements IndexedRendering { - - /** - * singleton instance of the CubicCurve edge shape - */ - private static CubicCurve2D instance = new CubicCurve2D.Float(); - - protected EdgeIndexFunction parallelEdgeIndexFunction; - - @SuppressWarnings("unchecked") - public void setEdgeIndexFunction(EdgeIndexFunction parallelEdgeIndexFunction) { - this.parallelEdgeIndexFunction = parallelEdgeIndexFunction; - loop.setEdgeIndexFunction(parallelEdgeIndexFunction); - } - - /** - * @return the parallelEdgeIndexFunction - */ - public EdgeIndexFunction getEdgeIndexFunction() { - return parallelEdgeIndexFunction; - } - - /** - * Get the shape for this edge, returning either the - * shared instance or, in the case of self-loop edges, the - * Loop shared instance. - */ - @SuppressWarnings("unchecked") - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - Pair endpoints = graph.getEndpoints(e); - if(endpoints != null) { - boolean isLoop = endpoints.getFirst().equals(endpoints.getSecond()); - if (isLoop) { - return loop.transform(context); - } - } - - int index = 1; - if(parallelEdgeIndexFunction != null) { - index = parallelEdgeIndexFunction.getIndex(graph, e); - } - - float controlY = control_offset_increment - + control_offset_increment * index; - instance.setCurve(0.0f, 0.0f, 0.33f, 2 * controlY, .66f, -controlY, - 1.0f, 0.0f); - return instance; - } - } - - /** - * An edge shape that renders as a loop with its nadir at the center of the - * vertex. Parallel instances will overlap. - * - * @author Tom Nelson - */ - public static class SimpleLoop extends AbstractEdgeShapeTransformer { - - /** - * singleton instance of the SimpleLoop shape - */ - private static Ellipse2D instance = new Ellipse2D.Float(-.5f, -.5f, 1, 1); - - /** - * getter for the shape - * @return the shared instance - */ - public Shape transform(Context,E> context) { - return instance; - } - } - - /** - * An edge shape that renders as a loop with its nadir at the - * center of the vertex. Parallel instances will not overlap. - */ - public static class Loop - extends AbstractEdgeShapeTransformer implements IndexedRendering { - - /** - * singleton instance of the Loop shape - */ - private static Ellipse2D instance = new Ellipse2D.Float(); - - protected EdgeIndexFunction parallelEdgeIndexFunction; - - public void setEdgeIndexFunction(EdgeIndexFunction parallelEdgeIndexFunction) { - this.parallelEdgeIndexFunction = parallelEdgeIndexFunction; - } - - - /** - * @return the parallelEdgeIndexFunction - */ - public EdgeIndexFunction getEdgeIndexFunction() { - return parallelEdgeIndexFunction; - } - - - /** - * Get the shape for this edge, modifying the diameter in the - * case of parallel edges, so they do not overlap - */ - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - int count = 1; - if(parallelEdgeIndexFunction != null) { - count = parallelEdgeIndexFunction.getIndex(graph, e); - } - - float x = -.5f; - float y = -.5f; - float diam = 1.f; - diam += diam*count/2; - x += x*count/2; - y += y*count/2; - instance.setFrame(x,y,diam,diam); - return instance; - } - } - - /** - * An edge shape that renders as an isosceles triangle whose - * apex is at the destination vertex for directed edges, - * and as a "bowtie" shape for undirected edges. - * @author Joshua O'Madadhain - */ - public static class Wedge extends AbstractEdgeShapeTransformer { - private static GeneralPath triangle; - private static GeneralPath bowtie; - - public Wedge(int width) { - triangle = ArrowFactory.getWedgeArrow(width, 1); - triangle.transform(AffineTransform.getTranslateInstance(1,0)); - bowtie = new GeneralPath(GeneralPath.WIND_EVEN_ODD); - bowtie.moveTo(0, width/2); - bowtie.lineTo(1, -width/2); - bowtie.lineTo(1, width/2); - bowtie.lineTo(0, -width/2); - bowtie.closePath(); - } - - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - - Pair endpoints = graph.getEndpoints(e); - if(endpoints != null) { - boolean isLoop = endpoints.getFirst().equals(endpoints.getSecond()); - if (isLoop) { - return Loop.instance; - } - } - if (graph.getEdgeType(e) == EdgeType.DIRECTED) - return triangle; - else - return bowtie; - } - } - - /** - * An edge shape that renders as a loop with its nadir at the - * center of the vertex. Parallel instances will not overlap. - */ - public static class Box - extends AbstractEdgeShapeTransformer implements IndexedRendering { - - /** - * singleton instance of the Loop shape - */ - private static Rectangle2D instance = new Rectangle2D.Float(); - - protected EdgeIndexFunction parallelEdgeIndexFunction; - - public void setEdgeIndexFunction(EdgeIndexFunction parallelEdgeIndexFunction) { - this.parallelEdgeIndexFunction = parallelEdgeIndexFunction; - } - - /** - * @return the parallelEdgeIndexFunction - */ - public EdgeIndexFunction getEdgeIndexFunction() { - return parallelEdgeIndexFunction; - } - - /** - * Get the shape for this edge, modifying the diameter in the - * case of parallel edges, so they do not overlap - */ - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - int count = 1; - if(parallelEdgeIndexFunction != null) { - count = parallelEdgeIndexFunction.getIndex(graph, e); - } - - float x = -.5f; - float y = -.5f; - float diam = 1.f; - diam += diam*count/2; - x += x*count/2; - y += y*count/2; - instance.setFrame(x,y,diam,diam); - return instance; - } - } - - - /** - * An edge shape that renders as a bent-line between the - * vertex endpoints. - */ - public static class Orthogonal - extends AbstractEdgeShapeTransformer implements IndexedRendering { - - /** - * singleton instance of the BentLine shape - */ - private static Line2D instance = new Line2D.Float(0.0f, 0.0f, 1.0f, 0.0f); - - protected EdgeIndexFunction edgeIndexFunction; - - @SuppressWarnings("unchecked") - public void setEdgeIndexFunction(EdgeIndexFunction edgeIndexFunction) { - this.edgeIndexFunction = edgeIndexFunction; - box.setEdgeIndexFunction(edgeIndexFunction); - } - - /** - * @return the parallelEdgeIndexFunction - */ - public EdgeIndexFunction getEdgeIndexFunction() { - return edgeIndexFunction; - } - - /** - * Get the shape for this edge, returning either the - * shared instance or, in the case of self-loop edges, the - * Loop shared instance. - */ - @SuppressWarnings("unchecked") - public Shape transform(Context,E> context) { - Graph graph = context.graph; - E e = context.element; - Pair endpoints = graph.getEndpoints(e); - if(endpoints != null) { - boolean isLoop = endpoints.getFirst().equals(endpoints.getSecond()); - if (isLoop) { - return box.transform(context); - } - } - return instance; - } - } - - public static interface IndexedRendering { - void setEdgeIndexFunction(EdgeIndexFunction peif); - EdgeIndexFunction getEdgeIndexFunction(); - } -} - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/EllipseVertexShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/EllipseVertexShapeTransformer.java deleted file mode 100644 index 58690371..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/EllipseVertexShapeTransformer.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Created on Jul 16, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Shape; - -import org.apache.commons.collections15.Transformer; - -/** - * - * @author Joshua O'Madadhain - */ -public class EllipseVertexShapeTransformer extends AbstractVertexShapeTransformer - implements Transformer -{ - public EllipseVertexShapeTransformer() - { - } - public EllipseVertexShapeTransformer(Transformer vsf, Transformer varf) - { - super(vsf, varf); - } - - public Shape transform(V v) - { - return factory.getEllipse(v); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/GradientEdgePaintTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/GradientEdgePaintTransformer.java deleted file mode 100644 index 21b7d3a8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/GradientEdgePaintTransformer.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Created on Apr 8, 2005 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Color; -import java.awt.GradientPaint; -import java.awt.Paint; -import java.awt.geom.Point2D; - -import org.apache.commons.collections15.Predicate; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.util.SelfLoopEdgePredicate; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; - -/** - * Creates GradientPaint instances which can be used - * to paint an Edge. For DirectedEdges, - * the color will blend from c1 (source) to - * c2 (destination); for UndirectedEdges, - * the color will be c1 at each end and c2 - * in the middle. - * - * @author Joshua O'Madadhain - */ -public class GradientEdgePaintTransformer - implements org.apache.commons.collections15.Transformer -{ - protected Color c1; - protected Color c2; - protected VisualizationViewer vv; - protected BidirectionalTransformer transformer; - protected Predicate,E>> selfLoop = new SelfLoopEdgePredicate(); - - public GradientEdgePaintTransformer(Color c1, Color c2, - VisualizationViewer vv) - { - this.c1 = c1; - this.c2 = c2; - this.vv = vv; - this.transformer = vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT); - } - - public Paint transform(E e) - { - Layout layout = vv.getGraphLayout(); - Pair p = layout.getGraph().getEndpoints(e); - V b = p.getFirst(); - V f = p.getSecond(); - Point2D pb = transformer.transform(layout.transform(b)); - Point2D pf = transformer.transform(layout.transform(f)); - float xB = (float) pb.getX(); - float yB = (float) pb.getY(); - float xF = (float) pf.getX(); - float yF = (float) pf.getY(); - if ((layout.getGraph().getEdgeType(e)) == EdgeType.UNDIRECTED) { - xF = (xF + xB) / 2; - yF = (yF + yB) / 2; - } - if(selfLoop.evaluate(Context.,E>getInstance(layout.getGraph(), e))) { - yF += 50; - xF += 50; - } - - return new GradientPaint(xB, yB, getColor1(e), xF, yF, getColor2(e), true); - } - - /** - * Returns c1. Subclasses may override - * this method to enable more complex behavior (e.g., for - * picked edges). - */ - protected Color getColor1(E e) - { - return c1; - } - - /** - * Returns c2. Subclasses may override - * this method to enable more complex behavior (e.g., for - * picked edges). - */ - protected Color getColor2(E e) - { - return c2; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/InterpolatingVertexSizeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/InterpolatingVertexSizeTransformer.java deleted file mode 100644 index e873d4f3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/InterpolatingVertexSizeTransformer.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Created on Nov 3, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import org.apache.commons.collections15.Transformer; - - -/** - * Provides vertex sizes that are spaced proportionally between - * min_size and max_size depending on - * - * @author Joshua O'Madadhain - */ -public class InterpolatingVertexSizeTransformer implements Transformer -{ - protected double min; - protected double max; - protected Transformer values; - protected int min_size; - protected int size_diff; - - public InterpolatingVertexSizeTransformer(Transformer values, - int min_size, int max_size) - { - super(); - if (min_size < 0 || max_size < 0) - throw new IllegalArgumentException("sizes must be non-negative"); - if (min_size > max_size) - throw new IllegalArgumentException("min_size must be <= max_size"); - this.min = 0; - this.max = 0; - this.values = values; - setMinSize(min_size); - setMaxSize(max_size); - } - - public Integer transform(V v) - { - Number n = values.transform(v); - double value = min; - if (n != null) - value = n.doubleValue(); - min = Math.min(this.min, value); - max = Math.max(this.max, value); - - if (min == max) - return min_size; - - // interpolate between min and max sizes based on how big value is - // with respect to min and max values - return min_size + (int)(((value - min) / (max - min)) * size_diff); - } - - public void setMinSize(int min_size) - { - this.min_size = min_size; - } - - public void setMaxSize(int max_size) - { - this.size_diff = max_size - this.min_size; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/NumberFormattingTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/NumberFormattingTransformer.java deleted file mode 100644 index ce89abf3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/NumberFormattingTransformer.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Created on Feb 16, 2009 - * - * Copyright (c) 2009, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.text.NumberFormat; - -import org.apache.commons.collections15.Transformer; - -/** - * Transforms inputs to String representations by chaining an input - * {@code Number}-generating {@code Transformer} with an internal - * {@code NumberFormat} instance. - * @author Joshua O'Madadhain - */ -public class NumberFormattingTransformer implements Transformer -{ - private Transformer values; - private NumberFormat formatter = NumberFormat.getInstance(); - - public NumberFormattingTransformer(Transformer values) - { - this.values = values; - } - - /** - * Returns a formatted string for the input. - */ - public String transform(T input) - { - return formatter.format(values.transform(input)); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableEdgePaintTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableEdgePaintTransformer.java deleted file mode 100644 index c574816b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableEdgePaintTransformer.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Created on Mar 10, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Paint; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.visualization.picking.PickedInfo; - -/** - * Paints each edge according to the Paint - * parameters given in the constructor, so that picked and - * non-picked edges can be made to look different. - * - * @author Tom Nelson - * @author Joshua O'Madadhain - * - */ -public class PickableEdgePaintTransformer implements Transformer { - protected PickedInfo pi; - protected Paint draw_paint; - protected Paint picked_paint; - - /** - * - * @param pi specifies which vertices report as "picked" - * @param draw_paint Paint used to draw edge shapes - * @param picked_paint Paint used to draw picked edge shapes - */ - public PickableEdgePaintTransformer(PickedInfo pi, Paint draw_paint, Paint picked_paint) { - if (pi == null) - throw new IllegalArgumentException("PickedInfo instance must be non-null"); - this.pi = pi; - this.draw_paint = draw_paint; - this.picked_paint = picked_paint; - } - - /** - * - */ - public Paint transform(E e) { - if (pi.isPicked(e)) { - return picked_paint; - } - else { - return draw_paint; - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableVertexIconTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableVertexIconTransformer.java deleted file mode 100644 index ecb063eb..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableVertexIconTransformer.java +++ /dev/null @@ -1,55 +0,0 @@ -/* -* Created on Mar 10, 2005 -* -* Copyright (c) 2005, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization.decorators; - -import javax.swing.Icon; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.visualization.picking.PickedInfo; - -/** - * Supplies an Icon for each vertex according to the Icon - * parameters given in the constructor, so that picked and - * non-picked vertices can be made to look different. - */ -public class PickableVertexIconTransformer implements Transformer { - - protected Icon icon; - protected Icon picked_icon; - protected PickedInfo pi; - - /** - * - * @param pi specifies which vertices report as "picked" - * @param icon Icon used to represent vertices - * @param picked_icon Icon used to represent picked vertices - */ - public PickableVertexIconTransformer(PickedInfo pi, Icon icon, Icon picked_icon) - { - if (pi == null) - throw new IllegalArgumentException("PickedInfo instance must be non-null"); - this.pi = pi; - this.icon = icon; - this.picked_icon = picked_icon; - } - - /** - * Returns the appropriate Icon, depending on picked state. - */ - public Icon transform(V v) { - if (pi.isPicked(v)) - return picked_icon; - else - return icon; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableVertexPaintTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableVertexPaintTransformer.java deleted file mode 100644 index c033f1d4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/PickableVertexPaintTransformer.java +++ /dev/null @@ -1,56 +0,0 @@ -/* -* Created on Mar 10, 2005 -* -* Copyright (c) 2005, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Paint; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.visualization.picking.PickedInfo; - -/** - * Paints each vertex according to the Paint - * parameters given in the constructor, so that picked and - * non-picked vertices can be made to look different. - */ -public class PickableVertexPaintTransformer implements Transformer { - - protected Paint fill_paint; - protected Paint picked_paint; - protected PickedInfo pi; - - /** - * - * @param pi specifies which vertices report as "picked" - * @param draw_paint Paint used to draw vertex shapes - * @param fill_paint Paint used to fill vertex shapes - * @param picked_paint Paint used to fill picked vertex shapes - */ - public PickableVertexPaintTransformer(PickedInfo pi, - Paint fill_paint, Paint picked_paint) - { - if (pi == null) - throw new IllegalArgumentException("PickedInfo instance must be non-null"); - this.pi = pi; - this.fill_paint = fill_paint; - this.picked_paint = picked_paint; - } - - public Paint transform(V v) - { - if (pi.isPicked(v)) - return picked_paint; - else - return fill_paint; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/SettableVertexShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/SettableVertexShapeTransformer.java deleted file mode 100644 index 4e67f29c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/SettableVertexShapeTransformer.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Created on Jul 18, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Shape; - -import org.apache.commons.collections15.Transformer; - - - -/** - * - * @author Joshua O'Madadhain - */ -public interface SettableVertexShapeTransformer extends Transformer -{ - public abstract void setSizeTransformer(Transformer vsf); - - public abstract void setAspectRatioTransformer(Transformer varf); -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/ToStringLabeller.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/ToStringLabeller.java deleted file mode 100644 index 6fadc53e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/ToStringLabeller.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Apr 13, 2004 - */ -package edu.uci.ics.jung.visualization.decorators; - -import org.apache.commons.collections15.Transformer; - - - -/** - * Labels vertices by their toString. This class functions as a drop-in - * replacement for the default StringLabeller method. This class does not - * guarantee unique labels; or even consistent ones; as a result, - * getVertexByLabel will always return NULL. - * - * @author danyelf - */ -public class ToStringLabeller implements Transformer { - - /** - * Retunrs v.toString() - */ - public String transform(V v) { - return v.toString(); - } - - } \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/VertexIconShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/VertexIconShapeTransformer.java deleted file mode 100644 index 03ee48a9..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/VertexIconShapeTransformer.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 1, 2005 - */ - -package edu.uci.ics.jung.visualization.decorators; - -import java.awt.Image; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.util.HashMap; -import java.util.Map; - -import javax.swing.Icon; -import javax.swing.ImageIcon; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.visualization.FourPassImageShaper; - -/** - * A default implementation that stores images in a Map keyed on the - * vertex. Also applies a shaping function to images to extract the - * shape of the opaque part of a transparent image. - * - * @author Tom Nelson - * - * - */public class VertexIconShapeTransformer implements Transformer { - - protected Map shapeMap = new HashMap(); - protected Map iconMap; - protected Transformer delegate; - /** - * - * - */ - public VertexIconShapeTransformer(Transformer delegate) { - this.delegate = delegate; - } - - /** - * @return Returns the delegate. - */ - public Transformer getDelegate() { - return delegate; - } - - /** - * @param delegate The delegate to set. - */ - public void setDelegate(Transformer delegate) { - this.delegate = delegate; - } - - /** - * get the shape from the image. If not available, get - * the shape from the delegate VertexShapeFunction - */ - public Shape transform(V v) { - Icon icon = iconMap.get(v); - if (icon != null && icon instanceof ImageIcon) { - Image image = ((ImageIcon) icon).getImage(); - Shape shape = (Shape) shapeMap.get(image); - if (shape == null) { - shape = FourPassImageShaper.getShape(image, 30); - if(shape.getBounds().getWidth() > 0 && - shape.getBounds().getHeight() > 0) { - // don't cache a zero-sized shape, wait for the image - // to be ready - int width = image.getWidth(null); - int height = image.getHeight(null); - AffineTransform transform = AffineTransform - .getTranslateInstance(-width / 2, -height / 2); - shape = transform.createTransformedShape(shape); - shapeMap.put(image, shape); - } - } - return shape; - } else { - return delegate.transform(v); - } - } - - /** - * @return the iconMap - */ - public Map getIconMap() { - return iconMap; - } - - /** - * @param iconMap the iconMap to set - */ - public void setIconMap(Map iconMap) { - this.iconMap = iconMap; - } - - /** - * @return the shapeMap - */ - public Map getShapeMap() { - return shapeMap; - } - - /** - * @param shapeMap the shapeMap to set - */ - public void setShapeMap(Map shapeMap) { - this.shapeMap = shapeMap; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/decorators/package.html deleted file mode 100644 index f6062e3f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/decorators/package.html +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - -

          Mechanisms for associating data (shapes, colors, values, strings, etc.) with -graph elements. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/BoundingRectangleCollector.java b/gui/jung-src/edu/uci/ics/jung/visualization/layout/BoundingRectangleCollector.java deleted file mode 100644 index 187c916b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/BoundingRectangleCollector.java +++ /dev/null @@ -1,87 +0,0 @@ -package edu.uci.ics.jung.visualization.layout; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.ArrayList; -import java.util.List; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.RenderContext; - -public class BoundingRectangleCollector { - - protected RenderContext rc; - protected Graph graph; - protected Layout layout; - protected List rectangles = new ArrayList(); - - public BoundingRectangleCollector(RenderContext rc, Layout layout) { - this.rc = rc; - this.layout = layout; - this.graph = layout.getGraph(); - compute(); - } - - /** - * @return the rectangles - */ - public List getRectangles() { - return rectangles; - } - - public void compute() { - rectangles.clear(); -// Graphics2D g2d = (Graphics2D)g; -// g.setColor(Color.cyan); - - for(E e : graph.getEdges()) { - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - Point2D p1 = layout.transform(v1); - Point2D p2 = layout.transform(v2); - float x1 = (float)p1.getX(); - float y1 = (float)p1.getY(); - float x2 = (float)p2.getX(); - float y2 = (float)p2.getY(); - - boolean isLoop = v1.equals(v2); - Shape s2 = rc.getVertexShapeTransformer().transform(v2); - Shape edgeShape = rc.getEdgeShapeTransformer().transform(Context.,E>getInstance(graph,e)); - - AffineTransform xform = AffineTransform.getTranslateInstance(x1,y1); - - if(isLoop) { - Rectangle2D s2Bounds = s2.getBounds2D(); - xform.scale(s2Bounds.getWidth(), s2Bounds.getHeight()); - xform.translate(0, -edgeShape.getBounds2D().getWidth()/2); - } else { - float dx = x2-x1; - float dy = y2-y1; - float theta = (float)Math.atan2(dy,dx); - xform.rotate(theta); - float dist = (float)p1.distance(p2); - xform.scale(dist, 1.0); - } - edgeShape = xform.createTransformedShape(edgeShape); -// edgeShape = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, edgeShape); - rectangles.add(edgeShape.getBounds2D()); - } - - for(V v : graph.getVertices()) { - Shape shape = rc.getVertexShapeTransformer().transform(v); - Point2D p = layout.transform(v); -// p = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p); - float x = (float)p.getX(); - float y = (float)p.getY(); - AffineTransform xform = AffineTransform.getTranslateInstance(x, y); - shape = xform.createTransformedShape(shape); - rectangles.add(shape.getBounds2D()); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/BoundingRectanglePaintable.java b/gui/jung-src/edu/uci/ics/jung/visualization/layout/BoundingRectanglePaintable.java deleted file mode 100644 index 06eb35f2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/BoundingRectanglePaintable.java +++ /dev/null @@ -1,56 +0,0 @@ -package edu.uci.ics.jung.visualization.layout; - -import java.awt.Color; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.geom.Rectangle2D; -import java.util.List; - -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; - -public class BoundingRectanglePaintable implements VisualizationServer.Paintable { - - protected RenderContext rc; - protected Graph graph; - protected Layout layout; - protected List rectangles; - - public BoundingRectanglePaintable(RenderContext rc, Layout layout) { - super(); - this.rc = rc; - this.layout = layout; - this.graph = layout.getGraph(); - final BoundingRectangleCollector brc = new BoundingRectangleCollector(rc, layout); - this.rectangles = brc.getRectangles(); - if(layout instanceof ChangeEventSupport) { - ((ChangeEventSupport)layout).addChangeListener(new ChangeListener() { - - public void stateChanged(ChangeEvent e) { - brc.compute(); - rectangles = brc.getRectangles(); - }}); - } - } - - public void paint(Graphics g) { - Graphics2D g2d = (Graphics2D)g; - g.setColor(Color.cyan); - - for(Rectangle2D r : rectangles) { - g2d.draw(rc.getMultiLayerTransformer().transform(Layer.LAYOUT, r)); - } - } - - public boolean useTransform() { - return true; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/CachingLayout.java b/gui/jung-src/edu/uci/ics/jung/visualization/layout/CachingLayout.java deleted file mode 100644 index 6fdd98fb..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/CachingLayout.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ - -package edu.uci.ics.jung.visualization.layout; - -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ChainedTransformer; -import org.apache.commons.collections15.functors.CloneTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.layout.LayoutDecorator; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.visualization.util.Caching; - -/** - * A LayoutDecorator that caches locations in a clearable Map. This can be used to ensure that - * edge endpoints are always the same as vertex locations when they are drawn in the render loop - * during the time that the layout's relaxer thread is changing the locations. - * - * @see LayoutDecorator - * @author Tom Nelson - * - */ -public class CachingLayout extends LayoutDecorator implements Caching { - - protected Map locationMap; - - public CachingLayout(Layout delegate) { - super(delegate); - this.locationMap = LazyMap.decorate(new HashMap(), - new ChainedTransformer(new Transformer[]{delegate, CloneTransformer.getInstance()})); - } - - @Override - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } - - public void clear() { - this.locationMap.clear(); - } - - public void init() { - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.layout.LayoutDecorator#transform(java.lang.Object) - */ - @Override - public Point2D transform(V v) { - return locationMap.get(v); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/LayoutTransition.java b/gui/jung-src/edu/uci/ics/jung/visualization/layout/LayoutTransition.java deleted file mode 100644 index 628fe747..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/LayoutTransition.java +++ /dev/null @@ -1,61 +0,0 @@ -package edu.uci.ics.jung.visualization.layout; - -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.layout.StaticLayout; -import edu.uci.ics.jung.algorithms.layout.util.Relaxer; -import edu.uci.ics.jung.algorithms.layout.util.VisRunner; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.visualization.VisualizationViewer; - -public class LayoutTransition implements IterativeContext { - - protected Layout startLayout; - protected Layout endLayout; - protected Layout transitionLayout; - protected boolean done = false; - protected int count = 20; - protected int counter = 0; - protected VisualizationViewer vv; - - /** - * @param startLayout - * @param endLayout - */ - public LayoutTransition(VisualizationViewer vv, Layout startLayout, Layout endLayout) { - this.vv = vv; - this.startLayout = startLayout; - this.endLayout = endLayout; - if(endLayout instanceof IterativeContext) { - Relaxer relaxer = new VisRunner((IterativeContext)endLayout); - relaxer.prerelax(); - } - this.transitionLayout = - new StaticLayout(startLayout.getGraph(), startLayout); - vv.setGraphLayout(transitionLayout); - } - - public boolean done() { - return done; - } - - public void step() { - Graph g = transitionLayout.getGraph(); - for(V v : g.getVertices()) { - Point2D tp = transitionLayout.transform(v); - Point2D fp = endLayout.transform(v); - double dx = (fp.getX()-tp.getX())/(count-counter); - double dy = (fp.getY()-tp.getY())/(count-counter); - transitionLayout.setLocation(v, - new Point2D.Double(tp.getX()+dx,tp.getY()+dy)); - } - counter++; - if(counter >= count) { - done = true; - vv.setGraphLayout(endLayout); - } - vv.repaint(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/ObservableCachingLayout.java b/gui/jung-src/edu/uci/ics/jung/visualization/layout/ObservableCachingLayout.java deleted file mode 100644 index c8af7d93..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/ObservableCachingLayout.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ - -package edu.uci.ics.jung.visualization.layout; - -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import javax.swing.event.ChangeListener; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ChainedTransformer; -import org.apache.commons.collections15.functors.CloneTransformer; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.layout.LayoutDecorator; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.visualization.util.Caching; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; -import edu.uci.ics.jung.visualization.util.DefaultChangeEventSupport; - -/** - * A LayoutDecorator that fires ChangeEvents when certain methods - * are called. Used to wrap a Layout so that the visualization - * components can be notified of changes. - * - * @see LayoutDecorator - * @author Tom Nelson - * - */ -public class ObservableCachingLayout extends LayoutDecorator implements ChangeEventSupport, Caching { - - protected ChangeEventSupport changeSupport = - new DefaultChangeEventSupport(this); - - protected Map locationMap; - - public ObservableCachingLayout(Layout delegate) { - super(delegate); - this.locationMap = LazyMap.decorate(new HashMap(), - new ChainedTransformer(new Transformer[]{delegate, CloneTransformer.getInstance()})); - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.Layout#step() - */ - @Override - public void step() { - super.step(); - fireStateChanged(); - } - - /** - * - * @see edu.uci.ics.jung.algorithms.layout.Layout#initialize() - */ - @Override - public void initialize() { - super.initialize(); - fireStateChanged(); - } - - /** - * @see edu.uci.ics.jung.algorithms.util.IterativeContext#done() - */ - @Override - public boolean done() { - if(delegate instanceof IterativeContext) { - return ((IterativeContext)delegate).done(); - } - return true; - } - - - /** - * @param v - * @param location - * @see edu.uci.ics.jung.algorithms.layout.Layout#setLocation(java.lang.Object, java.awt.geom.Point2D) - */ - @Override - public void setLocation(V v, Point2D location) { - super.setLocation(v, location); - fireStateChanged(); - } - - public void addChangeListener(ChangeListener l) { - changeSupport.addChangeListener(l); - } - - public void removeChangeListener(ChangeListener l) { - changeSupport.removeChangeListener(l); - } - - public ChangeListener[] getChangeListeners() { - return changeSupport.getChangeListeners(); - } - - public void fireStateChanged() { - changeSupport.fireStateChanged(); - } - - @Override - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } - - public void clear() { - this.locationMap.clear(); - } - - public void init() { - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.layout.LayoutDecorator#transform(java.lang.Object) - */ - @Override - public Point2D transform(V v) { - return locationMap.get(v); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/PersistentLayout.java b/gui/jung-src/edu/uci/ics/jung/visualization/layout/PersistentLayout.java deleted file mode 100644 index d8108ed4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/PersistentLayout.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Oct 9, 2004 - * - */ -package edu.uci.ics.jung.visualization.layout; - -import java.awt.geom.Point2D; -import java.io.IOException; -import java.io.Serializable; - -import edu.uci.ics.jung.algorithms.layout.Layout; - -/** - * interface for PersistentLayout - * Also holds a nested class Point to serialize the - * Vertex locations - * - * @author Tom Nelson - */ -public interface PersistentLayout extends Layout { - - void persist(String fileName) throws IOException; - - void restore(String fileName) throws IOException, ClassNotFoundException; - - void lock(boolean state); - - /** - * a serializable class to save locations - */ - @SuppressWarnings("serial") - static class Point implements Serializable { - public double x; - public double y; - public Point(double x, double y) { - this.x=x; - this.y=y; - } - public Point(Point2D p) { - this.x = p.getX(); - this.y = p.getY(); - } - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/PersistentLayoutImpl.java b/gui/jung-src/edu/uci/ics/jung/visualization/layout/PersistentLayoutImpl.java deleted file mode 100644 index 0b1d99eb..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/PersistentLayoutImpl.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Oct 8, 2004 - * - */ -package edu.uci.ics.jung.visualization.layout; - -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.apache.commons.collections15.Factory; -import org.apache.commons.collections15.map.LazyMap; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.util.Caching; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; - - -/** - * Implementation of PersistentLayout. - * Defers to another layout until 'restore' is called, - * then it uses the saved vertex locations - * - * @author Tom Nelson - * - * - */ -public class PersistentLayoutImpl extends ObservableCachingLayout - implements PersistentLayout, ChangeEventSupport, Caching { - - /** - * a container for Vertices - */ - protected Map map; - - /** - * a collection of Vertices that should not move - */ - protected Set dontmove; - - /** - * whether the graph is locked (stops the VisualizationViewer rendering thread) - */ - protected boolean locked; - - /** - * create an instance with a passed layout - * create containers for graph components - * @param layout - */ - public PersistentLayoutImpl(Layout layout) { - super(layout); - this.map = LazyMap.decorate(new HashMap(), new RandomPointFactory(getSize())); - - this.dontmove = new HashSet(); - } - - /** - * This method calls initialize_local_vertex for each vertex, and - * also adds initial coordinate information for each vertex. (The vertex's - * initial location is set by calling initializeLocation. - */ - protected void initializeLocations() { - for(V v : getGraph().getVertices()) { - Point2D coord = delegate.transform(v); - if (!dontmove.contains(v)) - initializeLocation(v, coord, getSize()); - } - } - - - /** - * Sets persisted location for a vertex within the dimensions of the space. - * If the vertex has not been persisted, sets a random location. If you want - * to initialize in some different way, override this method. - * - * @param v - * @param coord - * @param d - */ - protected void initializeLocation(V v, Point2D coord, Dimension d) { - - Point point = map.get(v); - coord.setLocation(point.x, point.y); - } - - /** - * save the Vertex locations to a file - * @param fileName the file to save to - * @throws an IOException if the file cannot be used - */ - public void persist(String fileName) throws IOException { - - for(V v : getGraph().getVertices()) { - Point p = new Point(transform(v)); - map.put(v, p); - } - ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream( - fileName)); - oos.writeObject(map); - oos.close(); - } - - /** - * Restore the graph Vertex locations from a file - * @param fileName the file to use - * @throws IOException for file problems - * @throws ClassNotFoundException for classpath problems - */ - @SuppressWarnings("unchecked") - public void restore(String fileName) throws IOException, - ClassNotFoundException { - ObjectInputStream ois = new ObjectInputStream(new FileInputStream( - fileName)); - map = (Map) ois.readObject(); - ois.close(); - initializeLocations(); - locked = true; - fireStateChanged(); - } - - public void lock(boolean locked) { - this.locked = locked; - } - - /* - * (non-Javadoc) - * - * @see edu.uci.ics.jung.visualization.Layout#incrementsAreDone() - */ - public boolean done() { - return super.done() || locked; - } - - /* - * (non-Javadoc) - * - * @see edu.uci.ics.jung.visualization.Layout#lockVertex(edu.uci.ics.jung.graph.Vertex) - */ - public void lock(V v, boolean state) { - dontmove.add(v); - delegate.lock(v, state); - } - - @SuppressWarnings("serial") - public static class RandomPointFactory implements Factory, Serializable { - - Dimension d; - public RandomPointFactory(Dimension d) { - this.d = d; - } - public edu.uci.ics.jung.visualization.layout.PersistentLayout.Point create() { - double x = Math.random() * d.width; - double y = Math.random() * d.height; - return new Point(x,y); - } - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/layout/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/layout/package.html deleted file mode 100644 index 0ca9b8c2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/layout/package.html +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - -

          Visualization mechanisms related to graph layout: caching, persistence, -event-emitting, etc. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/package.html deleted file mode 100644 index eec05cba..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Frameworks and mechanisms for visualizing JUNG graphs using Swing/AWT.

          - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/AbstractPickedState.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/AbstractPickedState.java deleted file mode 100644 index 267ff863..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/AbstractPickedState.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * Created on Apr 2, 2005 - */ -package edu.uci.ics.jung.visualization.picking; - -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; - -import javax.swing.event.EventListenerList; - -/** - * An abstract class to support ItemEvents for PickedState - * - * @author Tom Nelson - */ -public abstract class AbstractPickedState implements PickedState { - - protected EventListenerList listenerList = new EventListenerList(); - - public void addItemListener(ItemListener l) { - listenerList.add(ItemListener.class, l); - - } - - public void removeItemListener(ItemListener l) { - listenerList.remove(ItemListener.class, l); - } - - protected void fireItemStateChanged(ItemEvent e) { - Object[] listeners = listenerList.getListenerList(); - for ( int i = listeners.length-2; i>=0; i-=2 ) { - if ( listeners[i]==ItemListener.class ) { - ((ItemListener)listeners[i+1]).itemStateChanged(e); - } - } - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ClassicPickSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/ClassicPickSupport.java deleted file mode 100644 index b1a24754..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ClassicPickSupport.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Created on Apr 11, 2005 - * - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.picking; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; - - -/** - * PickSupport implementation that emulates the picking behavior - * of versions of VisualizationViewer prior to version 1.6. - * (VisualizationViewer still has this behavior by default, but - * the picking behavior can now be changed.) - * - * @see ShapePickSupport - * - * @author Tom Nelson - * @author Joshua O'Madadhain - */ -public class ClassicPickSupport extends RadiusPickSupport implements GraphElementAccessor { - - public ClassicPickSupport() - { - super(); - } - - /** - * @return null ClassicPickSupport does not do edges - */ - public E getEdge(double x, double y) { - return null; - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ClosestShapePickSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/ClosestShapePickSupport.java deleted file mode 100644 index cafea395..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ClosestShapePickSupport.java +++ /dev/null @@ -1,140 +0,0 @@ -/** - * Copyright (c) 2008, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Apr 24, 2008 - * - */ -package edu.uci.ics.jung.visualization.picking; - -import java.awt.Shape; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.ConcurrentModificationException; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; - -/** - * A GraphElementAccessor that finds the closest element to - * the pick point, and returns it if it is within the element's shape. - * This is best suited to elements with convex shapes that do not overlap. - * It differs from ShapePickSupport in that it only checks - * the closest element to see whether it contains the pick point. - * Possible unexpected odd behaviors: - *
            - *
          • If the elements overlap, this mechanism may pick another element than the one that's - * "on top" (rendered last) if the pick point is closer to the center of an obscured vertex. - *
          • If element shapes are not convex, then this mechanism may return null - * even if the pick point is inside some element's shape, if the pick point is closer - * to the center of another element. - *
          - * Users who want to avoid either of these should use ShapePickSupport - * instead, which is slower but more flexible. If neither of the above conditions - * (overlapping elements or non-convex shapes) is true, then ShapePickSupport - * and this class should have the same behavior. - */ -public class ClosestShapePickSupport implements GraphElementAccessor { - - protected VisualizationServer vv; - protected float pickSize; - - /** - * Creates a ShapePickSupport for the vv - * VisualizationServer, with the specified pick footprint. - * The VisualizationServer is used to fetch the current - * Layout. - * @param vv source of the current Layout. - * @param pickSize the size of the pick footprint for line edges - */ - public ClosestShapePickSupport(VisualizationServer vv, float pickSize) - { - this.vv = vv; - this.pickSize = pickSize; - } - - /** - * Create a ShapePickSupport with the vv - * VisualizationServer and default pick footprint. - * The footprint defaults to 2. - */ - public ClosestShapePickSupport(VisualizationServer vv) - { - this.vv = vv; - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.GraphElementAccessor#getEdge(edu.uci.ics.jung.algorithms.layout.Layout, double, double) - */ - public E getEdge(Layout layout, double x, double y) - { - return null; - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.GraphElementAccessor#getVertex(edu.uci.ics.jung.algorithms.layout.Layout, double, double) - */ - public V getVertex(Layout layout, double x, double y) - { - // first, find the closest vertex to (x,y) - double minDistance = Double.MAX_VALUE; - V closest = null; - while(true) - { - try - { - for(V v : layout.getGraph().getVertices()) - { - Point2D p = layout.transform(v); - double dx = p.getX() - x; - double dy = p.getY() - y; - double dist = dx * dx + dy * dy; - if (dist < minDistance) - { - minDistance = dist; - closest = v; - } - } - break; - } - catch(ConcurrentModificationException cme) {} - } - - // now check to see whether (x,y) is in the shape for this vertex. - - // get the vertex shape - Shape shape = vv.getRenderContext().getVertexShapeTransformer().transform(closest); - // get the vertex location - Point2D p = layout.transform(closest); - // transform the vertex location to screen coords - p = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, p); - - double ox = x - p.getX(); - double oy = y - p.getY(); - - if (shape.contains(ox, oy)) - return closest; - else - return null; - } - - /** - * @see edu.uci.ics.jung.algorithms.layout.GraphElementAccessor#getVertices(edu.uci.ics.jung.algorithms.layout.Layout, java.awt.Shape) - */ - public Collection getVertices(Layout layout, Shape rectangle) - { - // FIXME: RadiusPickSupport and ShapePickSupport are not using the same mechanism! - // talk to Tom and make sure I understand which should be used. - // in particular, there are some transformations that the latter uses; the latter is also - // doing a couple of kinds of filtering. (well, only one--just predicate-based.) - // looks to me like the VV could (should) be doing this filtering. (maybe.) - // - return null; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/LayoutLensShapePickSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/LayoutLensShapePickSupport.java deleted file mode 100644 index e4e8c270..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/LayoutLensShapePickSupport.java +++ /dev/null @@ -1,233 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 11, 2005 - * - */ -package edu.uci.ics.jung.visualization.picking; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.HashSet; -import java.util.Set; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; - -/** - * ShapePickSupport provides access to Vertices and EdgeType based on - * their actual shapes. - * - * @author Tom Nelson - * - */ -public class LayoutLensShapePickSupport extends ShapePickSupport - implements GraphElementAccessor { - - /** - * Create an instance. - * The HasGraphLayout is used as the source of the current - * Graph Layout. The HasShapes - * is used to access the VertexShapes and the EdgeShapes - * @param hasGraphLayout source of the current layout. - * @param hasShapeFunctions source of Vertex and Edge shapes. - * @param pickSize how large to make the pick footprint for line edges - */ - public LayoutLensShapePickSupport(VisualizationServer vv, float pickSize) { - super(vv,pickSize); - } - - /** - * Create an instance. - * The pickSize footprint defaults to 2. - */ - public LayoutLensShapePickSupport(VisualizationServer vv) { - this(vv,2); - } - - /** - * Iterates over Vertices, checking to see if x,y is contained in the - * Vertex's Shape. If (x,y) is contained in more than one vertex, use - * the vertex whose center is closest to the pick point. - * @see edu.uci.ics.jung.visualization.picking.PickSupport#getVertex(double, double) - */ - public V getVertex(Layout layout, double x, double y) { - - V closest = null; - double minDistance = Double.MAX_VALUE; - - while(true) { - try { - for(V v : getFilteredVertices(layout)) { - - Shape shape = vv.getRenderContext().getVertexShapeTransformer().transform(v); - // get the vertex location - Point2D p = layout.transform(v); - if(p == null) continue; - // transform the vertex location to screen coords - p = vv.getRenderContext().getMultiLayerTransformer().transform(p); - AffineTransform xform = - AffineTransform.getTranslateInstance(p.getX(), p.getY()); - shape = xform.createTransformedShape(shape); - - // see if this vertex center is closest to the pick point - // among any other containing vertices - if(shape.contains(x, y)) { - - if(style == Style.LOWEST) { - // return the first match - return v; - } else if(style == Style.HIGHEST) { - // will return the last match - closest = v; - } else { - Rectangle2D bounds = shape.getBounds2D(); - double dx = bounds.getCenterX() - x; - double dy = bounds.getCenterY() - y; - double dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = v; - } - } - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } - - /** - * returns the vertices that are contained in the passed shape. - * The shape is in screen coordinates, and the graph vertices - * are transformed to screen coordinates before they are tested - * for inclusion - */ - public Collection getVertices(Layout layout, Shape rectangle) { - Set pickedVertices = new HashSet(); - - while(true) { - try { - for(V v : getFilteredVertices(layout)) { - Point2D p = layout.transform(v); - if(p == null) continue; - - p = vv.getRenderContext().getMultiLayerTransformer().transform(p); - if(rectangle.contains(p)) { - pickedVertices.add(v); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return pickedVertices; - } - /** - * return an edge whose shape intersects the 'pickArea' footprint of the passed - * x,y, coordinates. - */ - public E getEdge(Layout layout, double x, double y) { - - Point2D ip = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, new Point2D.Double(x,y)); - x = ip.getX(); - y = ip.getY(); - - // as a Line has no area, we can't always use edgeshape.contains(point) so we - // make a small rectangular pickArea around the point and check if the - // edgeshape.intersects(pickArea) - Rectangle2D pickArea = - new Rectangle2D.Float((float)x-pickSize/2,(float)y-pickSize/2,pickSize,pickSize); - E closest = null; - double minDistance = Double.MAX_VALUE; - while(true) { - try { - for(E e : getFilteredEdges(layout)) { - - Pair pair = layout.getGraph().getEndpoints(e); - V v1 = pair.getFirst(); - V v2 = pair.getSecond(); - boolean isLoop = v1.equals(v2); - Point2D p1 = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, layout.transform(v1)); - Point2D p2 = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, layout.transform(v2)); - if(p1 == null || p2 == null) continue; - float x1 = (float) p1.getX(); - float y1 = (float) p1.getY(); - float x2 = (float) p2.getX(); - float y2 = (float) p2.getY(); - - // translate the edge to the starting vertex - AffineTransform xform = AffineTransform.getTranslateInstance(x1, y1); - - Shape edgeShape = - vv.getRenderContext().getEdgeShapeTransformer().transform(Context.,E>getInstance(vv.getGraphLayout().getGraph(),e)); - if(isLoop) { - // make the loops proportional to the size of the vertex - Shape s2 = vv.getRenderContext().getVertexShapeTransformer().transform(v2); - Rectangle2D s2Bounds = s2.getBounds2D(); - xform.scale(s2Bounds.getWidth(),s2Bounds.getHeight()); - // move the loop so that the nadir is centered in the vertex - xform.translate(0, -edgeShape.getBounds2D().getHeight()/2); - } else { - float dx = x2 - x1; - float dy = y2 - y1; - // rotate the edge to the angle between the vertices - double theta = Math.atan2(dy,dx); - xform.rotate(theta); - // stretch the edge to span the distance between the vertices - float dist = (float) Math.sqrt(dx*dx + dy*dy); - xform.scale(dist, 1.0f); - } - - // transform the edge to its location and dimensions - edgeShape = xform.createTransformedShape(edgeShape); - - // because of the transform, the edgeShape is now a GeneralPath - // see if this edge is the closest of any that intersect - if(edgeShape.intersects(pickArea)) { - float cx=0; - float cy=0; - float[] f = new float[6]; - PathIterator pi = new GeneralPath(edgeShape).getPathIterator(null); - if(pi.isDone()==false) { - pi.next(); - pi.currentSegment(f); - cx = f[0]; - cy = f[1]; - if(pi.isDone()==false) { - pi.currentSegment(f); - cx = f[0]; - cy = f[1]; - } - } - float dx = (float) (cx - x); - float dy = (float) (cy - y); - float dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = e; - } - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/MultiPickedState.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/MultiPickedState.java deleted file mode 100644 index 7d7e89e2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/MultiPickedState.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * Created on Mar 28, 2005 - */ -package edu.uci.ics.jung.visualization.picking; - -import java.awt.event.ItemEvent; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; - -/** - * Maintains the state of what has been 'picked' in the graph. - * The Sets are constructed so that their iterators - * will traverse them in the order in which they are picked. - * - * @author Tom Nelson - * @author Joshua O'Madadhain - * - */ -public class MultiPickedState extends AbstractPickedState implements PickedState { - /** - * the 'picked' vertices - */ - protected Set picked = new LinkedHashSet(); - - /** - * @see PickedState#pick(ArchetypeVertex, boolean) - */ - public boolean pick(T v, boolean state) { - boolean prior_state = this.picked.contains(v); - if (state) { - picked.add(v); - if(prior_state == false) { - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, - v, ItemEvent.SELECTED)); - } - - } else { - picked.remove(v); - if(prior_state == true) { - fireItemStateChanged(new ItemEvent(this, ItemEvent.ITEM_STATE_CHANGED, - v, ItemEvent.DESELECTED)); - } - - } - return prior_state; - } - - /** - * @see edu.uci.ics.jung.visualization.picking.PickedState#clearPickedVertices() - */ - public void clear() { - Collection unpicks = new ArrayList(picked); - for(T v : unpicks) { - pick(v, false); - } - picked.clear(); - - } - - /** - * @see edu.uci.ics.jung.visualization.picking.PickedState#getPickedEdges() - */ - public Set getPicked() { - return Collections.unmodifiableSet(picked); - } - - /** - * @see edu.uci.ics.jung.visualization.picking.PickedState#isPicked(ArchetypeEdge) - */ - public boolean isPicked(T e) { - return picked.contains(e); - } - - /** - * for the ItemSelectable interface contract - */ - @SuppressWarnings("unchecked") - public T[] getSelectedObjects() { - List list = new ArrayList(picked); - return (T[])list.toArray(); - } - - public void restrict(Collection vs) { - ArrayList unpicks = new ArrayList(picked); - unpicks.removeAll(vs); - for(T v : unpicks) { - pick(v, false); - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/PickedInfo.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/PickedInfo.java deleted file mode 100644 index 93c64397..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/PickedInfo.java +++ /dev/null @@ -1,23 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization.picking; - - - -/** - * An interface for classes that return information regarding whether a - * given graph element (vertex or edge) has been selected. - * - * @author danyelf - */ -public interface PickedInfo { - - public boolean isPicked(T t); -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/PickedState.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/PickedState.java deleted file mode 100644 index df36cde7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/PickedState.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - * Created on Apr 2, 2005 - */ -package edu.uci.ics.jung.visualization.picking; - -import java.awt.ItemSelectable; -import java.util.Collection; -import java.util.Set; - -/** - * An interface for classes that keep track of the "picked" state - * of edges or vertices. - * - * @author Tom Nelson - * @author Joshua O'Madadhain - */ -public interface PickedState extends PickedInfo, ItemSelectable { - /** - * Marks v as "picked" if b == true, - * and unmarks v as picked if b == false. - * @return the "picked" state of v prior to this call - */ - boolean pick(T v, boolean b); - - /** - * Clears the "picked" state from all elements. - */ - void clear(); - - /** - * Returns all "picked" elements. - */ - Set getPicked(); - - /** - * Returns true if v is currently "picked". - */ - boolean isPicked(T v); - - /** - * Clears all "picked" elements not in the given set - * @param vs - */ - void restrict(Collection vs); - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/RadiusPickSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/RadiusPickSupport.java deleted file mode 100644 index 0c31040f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/RadiusPickSupport.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -/* - * Created on Mar 19, 2005 - * - */ -package edu.uci.ics.jung.visualization.picking; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.layout.RadiusGraphElementAccessor; - - - -/** - * Simple implementation of PickSupport that returns the vertex or edge - * that is closest to the specified location. This implementation - * provides the same picking options that were available in - * previous versions of AbstractLayout. - * - * @author Tom Nelson - * @author Joshua O'Madadhain - */ -public class RadiusPickSupport - extends RadiusGraphElementAccessor implements GraphElementAccessor { - - public RadiusPickSupport() { - this(Math.sqrt(Double.MAX_VALUE - 1000)); - } - - /** - * the layout will always be provided by the VisualizationViewer - * this is supporting picking for - * @param maxDistance - */ - public RadiusPickSupport(double maxDistance) { - super(maxDistance); - } - - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of maxDistance. Iterates through all - * visible vertices and checks their distance from the click. Override this - * method to provide a more efficient implementation. - */ - public V getVertex(Layout layout, double x, double y) { - return getVertex(layout, x, y, this.maxDistance); - } - - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of maxDistance. Iterates through all - * visible vertices and checks their distance from the click. Override this - * method to provide a more efficient implementation. - * @param x - * @param y - * @param maxDistance temporarily overrides member maxDistance - */ - public V getVertex(Layout layout, double x, double y, double maxDistance) { - return super.getVertex(layout, x, y, maxDistance); - } - - /** - * Gets the edge nearest to the location of the (x,y) location selected. - * Calls the longer form of the call. - */ - public E getEdge(Layout layout, double x, double y) { - return getEdge(layout, x, y, this.maxDistance); - } - - /** - * Gets the edge nearest to the location of the (x,y) location selected, - * within a distance of maxDistance, Iterates through all - * visible edges and checks their distance from the click. Override this - * method to provide a more efficient implementation. - * - * @param x - * @param y - * @param maxDistance temporarily overrides member maxDistance - * @return Edge closest to the click. - */ - public E getEdge(Layout layout, double x, double y, double maxDistance) { - return super.getEdge(layout, x, y, maxDistance); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ShapePickSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/ShapePickSupport.java deleted file mode 100644 index a3dbd025..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ShapePickSupport.java +++ /dev/null @@ -1,477 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 11, 2005 - * - */ -package edu.uci.ics.jung.visualization.picking; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Set; - -import org.apache.commons.collections15.Predicate; -import org.apache.commons.collections15.functors.TruePredicate; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; - -/** - * A GraphElementAccessor that returns elements whose Shape - * contains the specified pick point or region. - * - * @author Tom Nelson - * - */ -public class ShapePickSupport implements GraphElementAccessor { - - /** - * The available picking heuristics: - *
            - *
          • Style.CENTERED: returns the element whose - * center is closest to the pick point. - *
          • Style.LOWEST: returns the first such element - * encountered. (If the element collection has a consistent - * ordering, this will also be the element "on the bottom", - * that is, the one which is rendered first.) - *
          • Style.HIGHEST: returns the last such element - * encountered. (If the element collection has a consistent - * ordering, this will also be the element "on the top", - * that is, the one which is rendered last.) - *
          - * - */ - public static enum Style { LOWEST, CENTERED, HIGHEST }; - - /** - * - * - */ - protected float pickSize; - - /** - * The VisualizationServer in which the - * this instance is being used for picking. Used to - * retrieve properties such as the layout, renderer, - * vertex and edge shapes, and coordinate transformations. - */ - protected VisualizationServer vv; - - /** - * The current picking heuristic for this instance. Defaults - * to CENTERED. - */ - protected Style style = Style.CENTERED; - - /** - * Creates a ShapePickSupport for the vv - * VisualizationServer, with the specified pick footprint and - * the default pick style. - * The VisualizationServer is used to access - * properties of the current visualization (layout, renderer, - * coordinate transformations, vertex/edge shapes, etc.). - * @param vv source of the current Layout. - * @param pickSize the size of the pick footprint for line edges - */ - public ShapePickSupport(VisualizationServer vv, float pickSize) { - this.vv = vv; - this.pickSize = pickSize; - } - - /** - * Create a ShapePickSupport for the specified - * VisualizationServer with a default pick footprint. - * of size 2. - */ - public ShapePickSupport(VisualizationServer vv) { - this.vv = vv; - this.pickSize = 2; - } - - /** - * Returns the style of picking used by this instance. - * This specifies which of the elements, among those - * whose shapes contain the pick point, is returned. - * The available styles are: - *
            - *
          • Style.CENTERED: returns the element whose - * center is closest to the pick point. - *
          • Style.LOWEST: returns the first such element - * encountered. (If the element collection has a consistent - * ordering, this will also be the element "on the bottom", - * that is, the one which is rendered first.) - *
          • Style.HIGHEST: returns the last such element - * encountered. (If the element collection has a consistent - * ordering, this will also be the element "on the top", - * that is, the one which is rendered last.) - *
          - * - * @return the style of picking used by this instance - */ - public Style getStyle() { - return style; - } - - /** - * Specifies the style of picking to be used by this instance. - * This specifies which of the elements, among those - * whose shapes contain the pick point, will be returned. - * The available styles are: - *
            - *
          • Style.CENTERED: returns the element whose - * center is closest to the pick point. - *
          • Style.LOWEST: returns the first such element - * encountered. (If the element collection has a consistent - * ordering, this will also be the element "on the bottom", - * that is, the one which is rendered first.) - *
          • Style.HIGHEST: returns the last such element - * encountered. (If the element collection has a consistent - * ordering, this will also be the element "on the top", - * that is, the one which is rendered last.) - *
          - * @param style the style to set - */ - public void setStyle(Style style) { - this.style = style; - } - - /** - * Iterates over Vertices, checking to see if x,y is contained in the - * Vertex's Shape. If (x,y) is contained in more than one vertex, use - * the vertex whose center is closest to the pick point. - * @see edu.uci.ics.jung.visualization.picking.PickSupport#getVertex(double, double) - */ - public V getVertex(Layout layout, double x, double y) { - - V closest = null; - double minDistance = Double.MAX_VALUE; - Point2D ip = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, - new Point2D.Double(x,y)); - x = ip.getX(); - y = ip.getY(); - - while(true) { - try { - for(V v : getFilteredVertices(layout)) { - - Shape shape = vv.getRenderContext().getVertexShapeTransformer().transform(v); - // get the vertex location - Point2D p = layout.transform(v); - if(p == null) continue; - // transform the vertex location to screen coords - p = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, p); - - double ox = x - p.getX(); - double oy = y - p.getY(); - - if(shape.contains(ox, oy)) { - - if(style == Style.LOWEST) { - // return the first match - return v; - } else if(style == Style.HIGHEST) { - // will return the last match - closest = v; - } else { - - // return the vertex closest to the - // center of a vertex shape - Rectangle2D bounds = shape.getBounds2D(); - double dx = bounds.getCenterX() - ox; - double dy = bounds.getCenterY() - oy; - double dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = v; - } - } - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } - - /** - * Returns the vertices whose layout coordinates are contained in - * Shape. - * The shape is in screen coordinates, and the graph vertices - * are transformed to screen coordinates before they are tested - * for inclusion. - * @return the Collection of vertices whose layout - * coordinates are contained in shape. - */ - public Collection getVertices(Layout layout, Shape shape) { - Set pickedVertices = new HashSet(); - - // remove the view transform from the rectangle - shape = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, shape); - - while(true) { - try { - for(V v : getFilteredVertices(layout)) { - Point2D p = layout.transform(v); - if(p == null) continue; - - p = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, p); - if(shape.contains(p)) { - pickedVertices.add(v); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return pickedVertices; - } - - /** - * Returns an edge whose shape intersects the 'pickArea' footprint of the passed - * x,y, coordinates. - */ - public E getEdge(Layout layout, double x, double y) { - - Point2D ip = vv.getRenderContext().getMultiLayerTransformer().inverseTransform(Layer.VIEW, new Point2D.Double(x,y)); - x = ip.getX(); - y = ip.getY(); - - // as a Line has no area, we can't always use edgeshape.contains(point) so we - // make a small rectangular pickArea around the point and check if the - // edgeshape.intersects(pickArea) - Rectangle2D pickArea = - new Rectangle2D.Float((float)x-pickSize/2,(float)y-pickSize/2,pickSize,pickSize); - E closest = null; - double minDistance = Double.MAX_VALUE; - while(true) { - try { - for(E e : getFilteredEdges(layout)) { - - Shape edgeShape = getTransformedEdgeShape(layout, e); - if (edgeShape == null) - continue; - - // because of the transform, the edgeShape is now a GeneralPath - // see if this edge is the closest of any that intersect - if(edgeShape.intersects(pickArea)) { - float cx=0; - float cy=0; - float[] f = new float[6]; - PathIterator pi = new GeneralPath(edgeShape).getPathIterator(null); - if(pi.isDone()==false) { - pi.next(); - pi.currentSegment(f); - cx = f[0]; - cy = f[1]; - if(pi.isDone()==false) { - pi.currentSegment(f); - cx = f[0]; - cy = f[1]; - } - } - float dx = (float) (cx - x); - float dy = (float) (cy - y); - float dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = e; - } - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } - - /** - * Retrieves the shape template for e and - * transforms it according to the positions of its endpoints - * in layout. - * @param layout the Layout which specifies - * e's endpoints' positions - * @param e the edge whose shape is to be returned - * @return - */ - private Shape getTransformedEdgeShape(Layout layout, E e) { - Pair pair = layout.getGraph().getEndpoints(e); - V v1 = pair.getFirst(); - V v2 = pair.getSecond(); - boolean isLoop = v1.equals(v2); - Point2D p1 = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, layout.transform(v1)); - Point2D p2 = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, layout.transform(v2)); - if(p1 == null || p2 == null) - return null; - float x1 = (float) p1.getX(); - float y1 = (float) p1.getY(); - float x2 = (float) p2.getX(); - float y2 = (float) p2.getY(); - - // translate the edge to the starting vertex - AffineTransform xform = AffineTransform.getTranslateInstance(x1, y1); - - Shape edgeShape = - vv.getRenderContext().getEdgeShapeTransformer().transform(Context.,E>getInstance(vv.getGraphLayout().getGraph(),e)); - if(isLoop) { - // make the loops proportional to the size of the vertex - Shape s2 = vv.getRenderContext().getVertexShapeTransformer().transform(v2); - Rectangle2D s2Bounds = s2.getBounds2D(); - xform.scale(s2Bounds.getWidth(),s2Bounds.getHeight()); - // move the loop so that the nadir is centered in the vertex - xform.translate(0, -edgeShape.getBounds2D().getHeight()/2); - } else { - float dx = x2 - x1; - float dy = y2 - y1; - // rotate the edge to the angle between the vertices - double theta = Math.atan2(dy,dx); - xform.rotate(theta); - // stretch the edge to span the distance between the vertices - float dist = (float) Math.sqrt(dx*dx + dy*dy); - xform.scale(dist, 1.0f); - } - - // transform the edge to its location and dimensions - edgeShape = xform.createTransformedShape(edgeShape); - return edgeShape; - } - - /** - * - * @param layout - * @return - */ - protected Collection getFilteredVertices(Layout layout) { - if(verticesAreFiltered()) { - Collection unfiltered = layout.getGraph().getVertices(); - Collection filtered = new LinkedHashSet(); - for(V v : unfiltered) { - if(isVertexRendered(Context.,V>getInstance(layout.getGraph(),v))) { - filtered.add(v); - } - } - return filtered; - } else { - return layout.getGraph().getVertices(); - } - } - - /** - * - * @param layout - * @return - */ - protected Collection getFilteredEdges(Layout layout) { - if(edgesAreFiltered()) { - Collection unfiltered = layout.getGraph().getEdges(); - Collection filtered = new LinkedHashSet(); - for(E e : unfiltered) { - if(isEdgeRendered(Context.,E>getInstance(layout.getGraph(),e))) { - filtered.add(e); - } - } - return filtered; - } else { - return layout.getGraph().getEdges(); - } - } - - /** - * Quick test to allow optimization of getFilteredVertices(). - * @return true if there is an active vertex filtering - * mechanism for this visualization, false otherwise - */ - protected boolean verticesAreFiltered() { - Predicate,V>> vertexIncludePredicate = - vv.getRenderContext().getVertexIncludePredicate(); - return vertexIncludePredicate != null && - vertexIncludePredicate instanceof TruePredicate == false; - } - - /** - * Quick test to allow optimization of getFilteredEdges(). - * @return true if there is an active edge filtering - * mechanism for this visualization, false otherwise - */ - protected boolean edgesAreFiltered() { - Predicate,E>> edgeIncludePredicate = - vv.getRenderContext().getEdgeIncludePredicate(); - return edgeIncludePredicate != null && - edgeIncludePredicate instanceof TruePredicate == false; - } - - /** - * Returns true if this vertex in this graph is included - * in the collections of elements to be rendered, and false otherwise. - * @param context the vertex and graph to be queried - * @return true if this vertex is - * included in the collections of elements to be rendered, false - * otherwise. - */ - protected boolean isVertexRendered(Context,V> context) { - Predicate,V>> vertexIncludePredicate = - vv.getRenderContext().getVertexIncludePredicate(); - return vertexIncludePredicate == null || vertexIncludePredicate.evaluate(context); - } - - /** - * Returns true if this edge and its endpoints - * in this graph are all included in the collections of - * elements to be rendered, and false otherwise. - * @param context the edge and graph to be queried - * @return true if this edge and its endpoints are all - * included in the collections of elements to be rendered, false - * otherwise. - */ - protected boolean isEdgeRendered(Context,E> context) { - Predicate,V>> vertexIncludePredicate = - vv.getRenderContext().getVertexIncludePredicate(); - Predicate,E>> edgeIncludePredicate = - vv.getRenderContext().getEdgeIncludePredicate(); - Graph g = context.graph; - E e = context.element; - boolean edgeTest = edgeIncludePredicate == null || edgeIncludePredicate.evaluate(context); - Pair endpoints = g.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - boolean endpointsTest = vertexIncludePredicate == null || - (vertexIncludePredicate.evaluate(Context.,V>getInstance(g,v1)) && - vertexIncludePredicate.evaluate(Context.,V>getInstance(g,v2))); - return edgeTest && endpointsTest; - } - - /** - * Returns the size of the edge picking area. - * The picking area is square; the size is specified as the length of one - * side, in view coordinates. - * @return the size of the edge picking area - */ - public float getPickSize() { - return pickSize; - } - - /** - * Sets the size of the edge picking area. - * @param the length of one side of the (square) picking area, in view coordinates - */ - public void setPickSize(float pickSize) { - this.pickSize = pickSize; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ViewLensShapePickSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/picking/ViewLensShapePickSupport.java deleted file mode 100644 index 106361ff..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/ViewLensShapePickSupport.java +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - * Created on Mar 11, 2005 - * - */ -package edu.uci.ics.jung.visualization.picking; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.HashSet; -import java.util.Set; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.transform.MutableTransformerDecorator; - -/** - * ShapePickSupport provides access to Vertices and EdgeType based on - * their actual shapes. - * - * @author Tom Nelson - * - */ -public class ViewLensShapePickSupport extends ShapePickSupport - implements GraphElementAccessor { - - /** - * Create an instance. - * The HasGraphLayout is used as the source of the current - * Graph Layout. The HasShapes - * is used to access the VertexShapes and the EdgeShapes - * @param hasGraphLayout source of the current layout. - * @param hasShapeFunctions source of Vertex and Edge shapes. - * @param pickSize how large to make the pick footprint for line edges - */ - public ViewLensShapePickSupport(VisualizationServer vv, float pickSize) { - super(vv, pickSize); - } - - /** - * Create an instance. - * The pickSize footprint defaults to 2. - */ - public ViewLensShapePickSupport(VisualizationServer vv) { - this(vv, 2); - } - - /** - * Iterates over Vertices, checking to see if x,y is contained in the - * Vertex's Shape. If (x,y) is contained in more than one vertex, use - * the vertex whose center is closest to the pick point. - * @see edu.uci.ics.jung.visualization.picking.PickSupport#getVertex(double, double) - */ - public V getVertex(Layout layout, double x, double y) { - - V closest = null; - double minDistance = Double.MAX_VALUE; - Point2D ip = ((MutableTransformerDecorator)vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW)).getDelegate().inverseTransform(new Point2D.Double(x,y)); - x = ip.getX(); - y = ip.getY(); - - while(true) { - try { - for(V v : getFilteredVertices(layout)) { - // get the shape - Shape shape = vv.getRenderContext().getVertexShapeTransformer().transform(v); - // transform the vertex location to screen coords - Point2D p = layout.transform(v); - if(p == null) continue; - AffineTransform xform = - AffineTransform.getTranslateInstance(p.getX(), p.getY()); - shape = xform.createTransformedShape(shape); - - // use the LAYOUT transform to move the shape center without - // modifying the actual shape - Point2D lp = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.LAYOUT, p); - AffineTransform xlate = AffineTransform.getTranslateInstance( - lp.getX()-p.getX(),lp.getY()-p.getY()); - shape = xlate.createTransformedShape(shape); - // now use the VIEW transform to modify the actual shape - - shape = vv.getRenderContext().getMultiLayerTransformer().transform(Layer.VIEW, shape); - //vv.getRenderContext().getMultiLayerTransformer().transform(shape); - - // see if this vertex center is closest to the pick point - // among any other containing vertices - if(shape.contains(x, y)) { - - if(style == Style.LOWEST) { - // return the first match - return v; - } else if(style == Style.HIGHEST) { - // will return the last match - closest = v; - } else { - Rectangle2D bounds = shape.getBounds2D(); - double dx = bounds.getCenterX() - x; - double dy = bounds.getCenterY() - y; - double dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = v; - } - } - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } - - /** - * returns the vertices that are contained in the passed shape. - * The shape is in screen coordinates, and the graph vertices - * are transformed to screen coordinates before they are tested - * for inclusion - */ - public Collection getVertices(Layout layout, Shape rectangle) { - Set pickedVertices = new HashSet(); - -// remove the view transform from the rectangle - rectangle = ((MutableTransformerDecorator)vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW)).getDelegate().inverseTransform(rectangle); - - while(true) { - try { - for(V v : getFilteredVertices(layout)) { - Point2D p = layout.transform(v); - if(p == null) continue; - // get the shape - Shape shape = vv.getRenderContext().getVertexShapeTransformer().transform(v); - - AffineTransform xform = - AffineTransform.getTranslateInstance(p.getX(), p.getY()); - shape = xform.createTransformedShape(shape); - - shape = vv.getRenderContext().getMultiLayerTransformer().transform(shape); - Rectangle2D bounds = shape.getBounds2D(); - p.setLocation(bounds.getCenterX(),bounds.getCenterY()); - - if(rectangle.contains(p)) { - pickedVertices.add(v); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return pickedVertices; - } - /** - * return an edge whose shape intersects the 'pickArea' footprint of the passed - * x,y, coordinates. - */ - public E getEdge(Layout layout, double x, double y) { - - Point2D ip = ((MutableTransformerDecorator)vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW)).getDelegate().inverseTransform(new Point2D.Double(x,y)); - x = ip.getX(); - y = ip.getY(); - - // as a Line has no area, we can't always use edgeshape.contains(point) so we - // make a small rectangular pickArea around the point and check if the - // edgeshape.intersects(pickArea) - Rectangle2D pickArea = - new Rectangle2D.Float((float)x-pickSize/2,(float)y-pickSize/2,pickSize,pickSize); - E closest = null; - double minDistance = Double.MAX_VALUE; - while(true) { - try { - for(E e : getFilteredEdges(layout)) { - - Pair pair = layout.getGraph().getEndpoints(e); - V v1 = pair.getFirst(); - V v2 = pair.getSecond(); - boolean isLoop = v1.equals(v2); - Point2D p1 = layout.transform(v1); - //vv.getRenderContext().getBasicTransformer().transform(layout.transform(v1)); - Point2D p2 = layout.transform(v2); - //vv.getRenderContext().getBasicTransformer().transform(layout.transform(v2)); - if(p1 == null || p2 == null) continue; - float x1 = (float) p1.getX(); - float y1 = (float) p1.getY(); - float x2 = (float) p2.getX(); - float y2 = (float) p2.getY(); - - // translate the edge to the starting vertex - AffineTransform xform = AffineTransform.getTranslateInstance(x1, y1); - - Shape edgeShape = - vv.getRenderContext().getEdgeShapeTransformer().transform(Context.,E>getInstance(vv.getGraphLayout().getGraph(),e)); - if(isLoop) { - // make the loops proportional to the size of the vertex - Shape s2 = vv.getRenderContext().getVertexShapeTransformer().transform(v2); - Rectangle2D s2Bounds = s2.getBounds2D(); - xform.scale(s2Bounds.getWidth(),s2Bounds.getHeight()); - // move the loop so that the nadir is centered in the vertex - xform.translate(0, -edgeShape.getBounds2D().getHeight()/2); - } else { - float dx = x2 - x1; - float dy = y2 - y1; - // rotate the edge to the angle between the vertices - double theta = Math.atan2(dy,dx); - xform.rotate(theta); - // stretch the edge to span the distance between the vertices - float dist = (float) Math.sqrt(dx*dx + dy*dy); - xform.scale(dist, 1.0f); - } - - // transform the edge to its location and dimensions - edgeShape = xform.createTransformedShape(edgeShape); - - edgeShape = vv.getRenderContext().getMultiLayerTransformer().transform(edgeShape); - - // because of the transform, the edgeShape is now a GeneralPath - // see if this edge is the closest of any that intersect - if(edgeShape.intersects(pickArea)) { - float cx=0; - float cy=0; - float[] f = new float[6]; - PathIterator pi = new GeneralPath(edgeShape).getPathIterator(null); - if(pi.isDone()==false) { - pi.next(); - pi.currentSegment(f); - cx = f[0]; - cy = f[1]; - if(pi.isDone()==false) { - pi.currentSegment(f); - cx = f[0]; - cy = f[1]; - } - } - float dx = (float) (cx - x); - float dy = (float) (cy - y); - float dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = e; - } - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/picking/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/picking/package.html deleted file mode 100644 index 0511ca4b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/picking/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Visualization mechanisms for supporting the selection of graph elements. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeArrowRenderingSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeArrowRenderingSupport.java deleted file mode 100644 index 931f2e8e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeArrowRenderingSupport.java +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.Line2D; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.RenderContext; - -public class BasicEdgeArrowRenderingSupport implements EdgeArrowRenderingSupport { - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.renderers.EdgeArrowRenderingSupport#getArrowTransform(edu.uci.ics.jung.visualization.RenderContext, java.awt.geom.GeneralPath, java.awt.Shape) - */ - public AffineTransform getArrowTransform(RenderContext rc, Shape edgeShape, Shape vertexShape) { - GeneralPath path = new GeneralPath(edgeShape); - float[] seg = new float[6]; - Point2D p1=null; - Point2D p2=null; - AffineTransform at = new AffineTransform(); - // when the PathIterator is done, switch to the line-subdivide - // method to get the arrowhead closer. - for(PathIterator i=path.getPathIterator(null,1); !i.isDone(); i.next()) { - int ret = i.currentSegment(seg); - if(ret == PathIterator.SEG_MOVETO) { - p2 = new Point2D.Float(seg[0],seg[1]); - } else if(ret == PathIterator.SEG_LINETO) { - p1 = p2; - p2 = new Point2D.Float(seg[0],seg[1]); - if(vertexShape.contains(p2)) { - at = getArrowTransform(rc, new Line2D.Float(p1,p2),vertexShape); - break; - } - } - } - return at; - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.renderers.EdgeArrowRenderingSupport#getReverseArrowTransform(edu.uci.ics.jung.visualization.RenderContext, java.awt.geom.GeneralPath, java.awt.Shape) - */ - public AffineTransform getReverseArrowTransform(RenderContext rc, Shape edgeShape, Shape vertexShape) { - return getReverseArrowTransform(rc, edgeShape, vertexShape, true); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.renderers.EdgeArrowRenderingSupport#getReverseArrowTransform(edu.uci.ics.jung.visualization.RenderContext, java.awt.geom.GeneralPath, java.awt.Shape, boolean) - */ - public AffineTransform getReverseArrowTransform(RenderContext rc, Shape edgeShape, Shape vertexShape, - boolean passedGo) { - GeneralPath path = new GeneralPath(edgeShape); - float[] seg = new float[6]; - Point2D p1=null; - Point2D p2=null; - - AffineTransform at = new AffineTransform(); - for(PathIterator i=path.getPathIterator(null,1); !i.isDone(); i.next()) { - int ret = i.currentSegment(seg); - if(ret == PathIterator.SEG_MOVETO) { - p2 = new Point2D.Float(seg[0],seg[1]); - } else if(ret == PathIterator.SEG_LINETO) { - p1 = p2; - p2 = new Point2D.Float(seg[0],seg[1]); - if(passedGo == false && vertexShape.contains(p2)) { - passedGo = true; - } else if(passedGo==true && - vertexShape.contains(p2)==false) { - at = getReverseArrowTransform(rc, new Line2D.Float(p1,p2),vertexShape); - break; - } - } - } - return at; - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.renderers.EdgeArrowRenderingSupport#getArrowTransform(edu.uci.ics.jung.visualization.RenderContext, java.awt.geom.Line2D, java.awt.Shape) - */ - public AffineTransform getArrowTransform(RenderContext rc, Line2D edgeShape, Shape vertexShape) { - float dx = (float) (edgeShape.getX1()-edgeShape.getX2()); - float dy = (float) (edgeShape.getY1()-edgeShape.getY2()); - // iterate over the line until the edge shape will place the - // arrowhead closer than 'arrowGap' to the vertex shape boundary - while((dx*dx+dy*dy) > rc.getArrowPlacementTolerance()) { - try { - edgeShape = getLastOutsideSegment(edgeShape, vertexShape); - } catch(IllegalArgumentException e) { - System.err.println(e.toString()); - return null; - } - dx = (float) (edgeShape.getX1()-edgeShape.getX2()); - dy = (float) (edgeShape.getY1()-edgeShape.getY2()); - } - double atheta = Math.atan2(dx,dy)+Math.PI/2; - AffineTransform at = - AffineTransform.getTranslateInstance(edgeShape.getX1(), edgeShape.getY1()); - at.rotate(-atheta); - return at; - } - - /** - * This is used for the reverse-arrow of a non-directed edge - * get a transform to place the arrow shape on the passed edge at the - * point where it intersects the passed shape - * @param edgeShape - * @param vertexShape - * @return - */ - protected AffineTransform getReverseArrowTransform(RenderContext rc, Line2D edgeShape, Shape vertexShape) { - float dx = (float) (edgeShape.getX1()-edgeShape.getX2()); - float dy = (float) (edgeShape.getY1()-edgeShape.getY2()); - // iterate over the line until the edge shape will place the - // arrowhead closer than 'arrowGap' to the vertex shape boundary - while((dx*dx+dy*dy) > rc.getArrowPlacementTolerance()) { - try { - edgeShape = getFirstOutsideSegment(edgeShape, vertexShape); - } catch(IllegalArgumentException e) { - System.err.println(e.toString()); - return null; - } - dx = (float) (edgeShape.getX1()-edgeShape.getX2()); - dy = (float) (edgeShape.getY1()-edgeShape.getY2()); - } - // calculate the angle for the arrowhead - double atheta = Math.atan2(dx,dy)-Math.PI/2; - AffineTransform at = AffineTransform.getTranslateInstance(edgeShape.getX1(),edgeShape.getY1()); - at.rotate(-atheta); - return at; - } - - /** - * Passed Line's point2 must be inside the passed shape or - * an IllegalArgumentException is thrown - * @param line line to subdivide - * @param shape shape to compare with line - * @return a line that intersects the shape boundary - * @throws IllegalArgumentException if the passed line's point1 is not inside the shape - */ - protected Line2D getLastOutsideSegment(Line2D line, Shape shape) { - if(shape.contains(line.getP2())==false) { - String errorString = - "line end point: "+line.getP2()+" is not contained in shape: "+shape.getBounds2D(); - throw new IllegalArgumentException(errorString); - //return null; - } - Line2D left = new Line2D.Double(); - Line2D right = new Line2D.Double(); - // subdivide the line until its left segment intersects - // the shape boundary - do { - subdivide(line, left, right); - line = right; - } while(shape.contains(line.getP1())==false); - // now that right is completely inside shape, - // return left, which must be partially outside - return left; - } - - /** - * Passed Line's point1 must be inside the passed shape or - * an IllegalArgumentException is thrown - * @param line line to subdivide - * @param shape shape to compare with line - * @return a line that intersects the shape boundary - * @throws IllegalArgumentException if the passed line's point1 is not inside the shape - */ - protected Line2D getFirstOutsideSegment(Line2D line, Shape shape) { - - if(shape.contains(line.getP1())==false) { - String errorString = - "line start point: "+line.getP1()+" is not contained in shape: "+shape.getBounds2D(); - throw new IllegalArgumentException(errorString); - } - Line2D left = new Line2D.Float(); - Line2D right = new Line2D.Float(); - // subdivide the line until its right side intersects the - // shape boundary - do { - subdivide(line, left, right); - line = left; - } while(shape.contains(line.getP2())==false); - // now that left is completely inside shape, - // return right, which must be partially outside - return right; - } - - /** - * divide a Line2D into 2 new Line2Ds that are returned - * in the passed left and right instances, if non-null - * @param src the line to divide - * @param left the left side, or null - * @param right the right side, or null - */ - protected void subdivide(Line2D src, - Line2D left, - Line2D right) { - double x1 = src.getX1(); - double y1 = src.getY1(); - double x2 = src.getX2(); - double y2 = src.getY2(); - - double mx = x1 + (x2-x1)/2.0; - double my = y1 + (y2-y1)/2.0; - if (left != null) { - left.setLine(x1, y1, mx, my); - } - if (right != null) { - right.setLine(mx, my, x2, y2); - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeLabelRenderer.java deleted file mode 100644 index f2999623..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeLabelRenderer.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; - -public class BasicEdgeLabelRenderer implements Renderer.EdgeLabel { - - public Component prepareRenderer(RenderContext rc, EdgeLabelRenderer graphLabelRenderer, Object value, - boolean isSelected, E edge) { - return rc.getEdgeLabelRenderer().getEdgeLabelRendererComponent(rc.getScreenDevice(), value, - rc.getEdgeFontTransformer().transform(edge), isSelected, edge); - } - - public void labelEdge(RenderContext rc, Layout layout, E e, String label) { - if(label == null || label.length() == 0) return; - - Graph graph = layout.getGraph(); - // don't draw edge if either incident vertex is not drawn - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - if (!rc.getEdgeIncludePredicate().evaluate(Context.,E>getInstance(graph,e))) - return; - - if (!rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v1)) || - !rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v2))) - return; - - Point2D p1 = layout.transform(v1); - Point2D p2 = layout.transform(v2); - p1 = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p1); - p2 = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p2); - float x1 = (float) p1.getX(); - float y1 = (float) p1.getY(); - float x2 = (float) p2.getX(); - float y2 = (float) p2.getY(); - - GraphicsDecorator g = rc.getGraphicsContext(); - float distX = x2 - x1; - float distY = y2 - y1; - double totalLength = Math.sqrt(distX * distX + distY * distY); - - double closeness = rc.getEdgeLabelClosenessTransformer().transform(Context.,E>getInstance(graph, e)).doubleValue(); - - int posX = (int) (x1 + (closeness) * distX); - int posY = (int) (y1 + (closeness) * distY); - - int xDisplacement = (int) (rc.getLabelOffset() * (distY / totalLength)); - int yDisplacement = (int) (rc.getLabelOffset() * (-distX / totalLength)); - - Component component = prepareRenderer(rc, rc.getEdgeLabelRenderer(), label, - rc.getPickedEdgeState().isPicked(e), e); - - Dimension d = component.getPreferredSize(); - - Shape edgeShape = rc.getEdgeShapeTransformer().transform(Context.,E>getInstance(graph, e)); - - double parallelOffset = 1; - - parallelOffset += rc.getParallelEdgeIndexFunction().getIndex(graph, e); - - parallelOffset *= d.height; - if(edgeShape instanceof Ellipse2D) { - parallelOffset += edgeShape.getBounds().getHeight(); - parallelOffset = -parallelOffset; - } - - - AffineTransform old = g.getTransform(); - AffineTransform xform = new AffineTransform(old); - xform.translate(posX+xDisplacement, posY+yDisplacement); - double dx = x2 - x1; - double dy = y2 - y1; - if(rc.getEdgeLabelRenderer().isRotateEdgeLabels()) { - double theta = Math.atan2(dy, dx); - if(dx < 0) { - theta += Math.PI; - } - xform.rotate(theta); - } - if(dx < 0) { - parallelOffset = -parallelOffset; - } - - xform.translate(-d.width/2, -(d.height/2-parallelOffset)); - g.setTransform(xform); - g.draw(component, rc.getRendererPane(), 0, 0, d.width, d.height, true); - - g.setTransform(old); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeRenderer.java deleted file mode 100644 index d719ebd5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicEdgeRenderer.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Dimension; -import java.awt.Paint; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; - -import javax.swing.JComponent; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeIndexFunction; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.decorators.EdgeShape; -import edu.uci.ics.jung.visualization.decorators.EdgeShape.IndexedRendering; -import edu.uci.ics.jung.visualization.transform.LensTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; - -public class BasicEdgeRenderer implements Renderer.Edge { - - protected EdgeArrowRenderingSupport edgeArrowRenderingSupport = - new BasicEdgeArrowRenderingSupport(); - - public void paintEdge(RenderContext rc, Layout layout, E e) { - GraphicsDecorator g2d = rc.getGraphicsContext(); - Graph graph = layout.getGraph(); - if (!rc.getEdgeIncludePredicate().evaluate(Context.,E>getInstance(graph,e))) - return; - - // don't draw edge if either incident vertex is not drawn - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - if (!rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v1)) || - !rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v2))) - return; - - Stroke new_stroke = rc.getEdgeStrokeTransformer().transform(e); - Stroke old_stroke = g2d.getStroke(); - if (new_stroke != null) - g2d.setStroke(new_stroke); - - drawSimpleEdge(rc, layout, e); - - // restore paint and stroke - if (new_stroke != null) - g2d.setStroke(old_stroke); - - } - - /** - * Draws the edge e, whose endpoints are at (x1,y1) - * and (x2,y2), on the graphics context g. - * The Shape provided by the EdgeShapeFunction instance - * is scaled in the x-direction so that its width is equal to the distance between - * (x1,y1) and (x2,y2). - */ - @SuppressWarnings("unchecked") - protected void drawSimpleEdge(RenderContext rc, Layout layout, E e) { - - GraphicsDecorator g = rc.getGraphicsContext(); - Graph graph = layout.getGraph(); - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - Point2D p1 = layout.transform(v1); - Point2D p2 = layout.transform(v2); - p1 = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p1); - p2 = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p2); - float x1 = (float) p1.getX(); - float y1 = (float) p1.getY(); - float x2 = (float) p2.getX(); - float y2 = (float) p2.getY(); - - boolean isLoop = v1.equals(v2); - Shape s2 = rc.getVertexShapeTransformer().transform(v2); - Shape edgeShape = rc.getEdgeShapeTransformer().transform(Context.,E>getInstance(graph, e)); - - boolean edgeHit = true; - boolean arrowHit = true; - Rectangle deviceRectangle = null; - JComponent vv = rc.getScreenDevice(); - if(vv != null) { - Dimension d = vv.getSize(); - deviceRectangle = new Rectangle(0,0,d.width,d.height); - } - - AffineTransform xform = AffineTransform.getTranslateInstance(x1, y1); - - if(isLoop) { - // this is a self-loop. scale it is larger than the vertex - // it decorates and translate it so that its nadir is - // at the center of the vertex. - Rectangle2D s2Bounds = s2.getBounds2D(); - xform.scale(s2Bounds.getWidth(),s2Bounds.getHeight()); - xform.translate(0, -edgeShape.getBounds2D().getWidth()/2); - } else if(rc.getEdgeShapeTransformer() instanceof EdgeShape.Orthogonal) { - float dx = x2-x1; - float dy = y2-y1; - int index = 0; - if(rc.getEdgeShapeTransformer() instanceof IndexedRendering) { - EdgeIndexFunction peif = - ((IndexedRendering)rc.getEdgeShapeTransformer()).getEdgeIndexFunction(); - index = peif.getIndex(graph, e); - index *= 20; - } - GeneralPath gp = new GeneralPath(); - gp.moveTo(0,0);// the xform will do the translation to x1,y1 - if(x1 > x2) { - if(y1 > y2) { - gp.lineTo(0, index); - gp.lineTo(dx-index, index); - gp.lineTo(dx-index, dy); - gp.lineTo(dx, dy); - } else { - gp.lineTo(0, -index); - gp.lineTo(dx-index, -index); - gp.lineTo(dx-index, dy); - gp.lineTo(dx, dy); - } - - } else { - if(y1 > y2) { - gp.lineTo(0, index); - gp.lineTo(dx+index, index); - gp.lineTo(dx+index, dy); - gp.lineTo(dx, dy); - - } else { - gp.lineTo(0, -index); - gp.lineTo(dx+index, -index); - gp.lineTo(dx+index, dy); - gp.lineTo(dx, dy); - - } - - } - - edgeShape = gp; - - } else { - // this is a normal edge. Rotate it to the angle between - // vertex endpoints, then scale it to the distance between - // the vertices - float dx = x2-x1; - float dy = y2-y1; - float thetaRadians = (float) Math.atan2(dy, dx); - xform.rotate(thetaRadians); - float dist = (float) Math.sqrt(dx*dx + dy*dy); - xform.scale(dist, 1.0); - } - - edgeShape = xform.createTransformedShape(edgeShape); - - MutableTransformer vt = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW); - if(vt instanceof LensTransformer) { - vt = ((LensTransformer)vt).getDelegate(); - } - edgeHit = vt.transform(edgeShape).intersects(deviceRectangle); - - if(edgeHit == true) { - - Paint oldPaint = g.getPaint(); - - // get Paints for filling and drawing - // (filling is done first so that drawing and label use same Paint) - Paint fill_paint = rc.getEdgeFillPaintTransformer().transform(e); - if (fill_paint != null) - { - g.setPaint(fill_paint); - g.fill(edgeShape); - } - Paint draw_paint = rc.getEdgeDrawPaintTransformer().transform(e); - if (draw_paint != null) - { - g.setPaint(draw_paint); - g.draw(edgeShape); - } - - float scalex = (float)g.getTransform().getScaleX(); - float scaley = (float)g.getTransform().getScaleY(); - // see if arrows are too small to bother drawing - if(scalex < .3 || scaley < .3) return; - - if (rc.getEdgeArrowPredicate().evaluate(Context.,E>getInstance(graph, e))) { - - Stroke new_stroke = rc.getEdgeArrowStrokeTransformer().transform(e); - Stroke old_stroke = g.getStroke(); - if (new_stroke != null) - g.setStroke(new_stroke); - - - Shape destVertexShape = - rc.getVertexShapeTransformer().transform(graph.getEndpoints(e).getSecond()); - - AffineTransform xf = AffineTransform.getTranslateInstance(x2, y2); - destVertexShape = xf.createTransformedShape(destVertexShape); - - arrowHit = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW).transform(destVertexShape).intersects(deviceRectangle); - if(arrowHit) { - - AffineTransform at = - edgeArrowRenderingSupport.getArrowTransform(rc, edgeShape, destVertexShape); - if(at == null) return; - Shape arrow = rc.getEdgeArrowTransformer().transform(Context.,E>getInstance(graph, e)); - arrow = at.createTransformedShape(arrow); - g.setPaint(rc.getArrowFillPaintTransformer().transform(e)); - g.fill(arrow); - g.setPaint(rc.getArrowDrawPaintTransformer().transform(e)); - g.draw(arrow); - } - if (graph.getEdgeType(e) == EdgeType.UNDIRECTED) { - Shape vertexShape = - rc.getVertexShapeTransformer().transform(graph.getEndpoints(e).getFirst()); - xf = AffineTransform.getTranslateInstance(x1, y1); - vertexShape = xf.createTransformedShape(vertexShape); - - arrowHit = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW).transform(vertexShape).intersects(deviceRectangle); - - if(arrowHit) { - AffineTransform at = edgeArrowRenderingSupport.getReverseArrowTransform(rc, edgeShape, vertexShape, !isLoop); - if(at == null) return; - Shape arrow = rc.getEdgeArrowTransformer().transform(Context.,E>getInstance(graph, e)); - arrow = at.createTransformedShape(arrow); - g.setPaint(rc.getArrowFillPaintTransformer().transform(e)); - g.fill(arrow); - g.setPaint(rc.getArrowDrawPaintTransformer().transform(e)); - g.draw(arrow); - } - } - // restore paint and stroke - if (new_stroke != null) - g.setStroke(old_stroke); - - } - - // restore old paint - g.setPaint(oldPaint); - } - } - - public EdgeArrowRenderingSupport getEdgeArrowRenderingSupport() { - return edgeArrowRenderingSupport; - } - - public void setEdgeArrowRenderingSupport( - EdgeArrowRenderingSupport edgeArrowRenderingSupport) { - this.edgeArrowRenderingSupport = edgeArrowRenderingSupport; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicRenderer.java deleted file mode 100644 index ca599c24..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicRenderer.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.util.ConcurrentModificationException; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.RenderContext; - -/** - * The default implementation of the Renderer used by the - * VisualizationViewer. Default Vertex and Edge Renderers - * are supplied, or the user may set custom values. The - * Vertex and Edge renderers are used in the renderVertex - * and renderEdge methods, which are called in the render - * loop of the VisualizationViewer. - * - * @author Tom Nelson - */ -public class BasicRenderer implements Renderer { - - Renderer.Vertex vertexRenderer = new BasicVertexRenderer(); - Renderer.VertexLabel vertexLabelRenderer = new BasicVertexLabelRenderer(); - Renderer.Edge edgeRenderer = new BasicEdgeRenderer(); - Renderer.EdgeLabel edgeLabelRenderer = new BasicEdgeLabelRenderer(); - - public void render(RenderContext renderContext, Layout layout) { - - // paint all the edges - try { - for(E e : layout.getGraph().getEdges()) { - - renderEdge( - renderContext, - layout, - e); - renderEdgeLabel( - renderContext, - layout, - e); - } - } catch(ConcurrentModificationException cme) { - renderContext.getScreenDevice().repaint(); - } - - // paint all the vertices - try { - for(V v : layout.getGraph().getVertices()) { - - renderVertex( - renderContext, - layout, - v); - renderVertexLabel( - renderContext, - layout, - v); - } - } catch(ConcurrentModificationException cme) { - renderContext.getScreenDevice().repaint(); - } - } - - public void renderVertex(RenderContext rc, Layout layout, V v) { - vertexRenderer.paintVertex(rc, layout, v); - } - - public void renderVertexLabel(RenderContext rc, Layout layout, V v) { - vertexLabelRenderer.labelVertex(rc, layout, v, rc.getVertexLabelTransformer().transform(v)); - } - - public void renderEdge(RenderContext rc, Layout layout, E e) { - edgeRenderer.paintEdge(rc, layout, e); - } - - public void renderEdgeLabel(RenderContext rc, Layout layout, E e) { - edgeLabelRenderer.labelEdge(rc, layout, e, rc.getEdgeLabelTransformer().transform(e)); - } - - public void setVertexRenderer(Renderer.Vertex r) { - this.vertexRenderer = r; - } - - public void setEdgeRenderer(Renderer.Edge r) { - this.edgeRenderer = r; - } - - /** - * @return the edgeLabelRenderer - */ - public Renderer.EdgeLabel getEdgeLabelRenderer() { - return edgeLabelRenderer; - } - - /** - * @param edgeLabelRenderer the edgeLabelRenderer to set - */ - public void setEdgeLabelRenderer(Renderer.EdgeLabel edgeLabelRenderer) { - this.edgeLabelRenderer = edgeLabelRenderer; - } - - /** - * @return the vertexLabelRenderer - */ - public Renderer.VertexLabel getVertexLabelRenderer() { - return vertexLabelRenderer; - } - - /** - * @param vertexLabelRenderer the vertexLabelRenderer to set - */ - public void setVertexLabelRenderer( - Renderer.VertexLabel vertexLabelRenderer) { - this.vertexLabelRenderer = vertexLabelRenderer; - } - - /** - * @return the edgeRenderer - */ - public Renderer.Edge getEdgeRenderer() { - return edgeRenderer; - } - - /** - * @return the vertexRenderer - */ - public Renderer.Vertex getVertexRenderer() { - return vertexRenderer; - } - - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicVertexLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicVertexLabelRenderer.java deleted file mode 100644 index 36b8f331..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicVertexLabelRenderer.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - - -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Point; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; -import edu.uci.ics.jung.visualization.transform.shape.TransformingGraphics; - -public class BasicVertexLabelRenderer implements Renderer.VertexLabel { - - protected Position position = Position.SE; - private Positioner positioner = new OutsidePositioner(); - - public BasicVertexLabelRenderer() { - super(); - } - - public BasicVertexLabelRenderer(Position position) { - this.position = position; - } - - /** - * @return the position - */ - public Position getPosition() { - return position; - } - - /** - * @param position the position to set - */ - public void setPosition(Position position) { - this.position = position; - } - - public Component prepareRenderer(RenderContext rc, VertexLabelRenderer graphLabelRenderer, Object value, - boolean isSelected, V vertex) { - return rc.getVertexLabelRenderer().getVertexLabelRendererComponent(rc.getScreenDevice(), value, - rc.getVertexFontTransformer().transform(vertex), isSelected, vertex); - } - - /** - * Labels the specified vertex with the specified label. - * Uses the font specified by this instance's - * VertexFontFunction. (If the font is unspecified, the existing - * font for the graphics context is used.) If vertex label centering - * is active, the label is centered on the position of the vertex; otherwise - * the label is offset slightly. - */ - public void labelVertex(RenderContext rc, Layout layout, V v, String label) { - Graph graph = layout.getGraph(); - if (rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v)) == false) { - return; - } - Point2D pt = layout.transform(v); - pt = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, pt); - - float x = (float) pt.getX(); - float y = (float) pt.getY(); - - Component component = prepareRenderer(rc, rc.getVertexLabelRenderer(), label, - rc.getPickedVertexState().isPicked(v), v); - GraphicsDecorator g = rc.getGraphicsContext(); - Dimension d = component.getPreferredSize(); - AffineTransform xform = AffineTransform.getTranslateInstance(x, y); - - Shape shape = rc.getVertexShapeTransformer().transform(v); - shape = xform.createTransformedShape(shape); - if(rc.getGraphicsContext() instanceof TransformingGraphics) { - BidirectionalTransformer transformer = ((TransformingGraphics)rc.getGraphicsContext()).getTransformer(); - if(transformer instanceof ShapeTransformer) { - ShapeTransformer shapeTransformer = (ShapeTransformer)transformer; - shape = shapeTransformer.transform(shape); - } - } - Rectangle2D bounds = shape.getBounds2D(); - - Point p; - if(position == Position.AUTO) { - Dimension vvd = rc.getScreenDevice().getSize(); - if(vvd.width == 0 || vvd.height == 0) { - vvd = rc.getScreenDevice().getPreferredSize(); - } - p = getAnchorPoint(bounds, d, positioner.getPosition(v, x, y, vvd)); - } else { - p = getAnchorPoint(bounds, d, position); - } - g.draw(component, rc.getRendererPane(), p.x, p.y, d.width, d.height, true); - } - - protected Point getAnchorPoint(Rectangle2D vertexBounds, Dimension labelSize, Position position) { - double x; - double y; - int offset = 5; - switch(position) { - - case N: - x = vertexBounds.getCenterX()-labelSize.width/2; - y = vertexBounds.getMinY()-offset - labelSize.height; - return new Point((int)x,(int)y); - - case NE: - x = vertexBounds.getMaxX()+offset; - y = vertexBounds.getMinY()-offset-labelSize.height; - return new Point((int)x,(int)y); - - case E: - x = vertexBounds.getMaxX()+offset; - y = vertexBounds.getCenterY()-labelSize.height/2; - return new Point((int)x,(int)y); - - case SE: - x = vertexBounds.getMaxX()+offset; - y = vertexBounds.getMaxY()+offset; - return new Point((int)x,(int)y); - - case S: - x = vertexBounds.getCenterX()-labelSize.width/2; - y = vertexBounds.getMaxY()+offset; - return new Point((int)x,(int)y); - - case SW: - x = vertexBounds.getMinX()-offset-labelSize.width; - y = vertexBounds.getMaxY()+offset; - return new Point((int)x,(int)y); - - case W: - x = vertexBounds.getMinX()-offset-labelSize.width; - y = vertexBounds.getCenterY()-labelSize.height/2; - return new Point((int)x,(int)y); - - case NW: - x = vertexBounds.getMinX()-offset-labelSize.width; - y = vertexBounds.getMinY()-offset-labelSize.height; - return new Point((int)x,(int)y); - - case CNTR: - x = vertexBounds.getCenterX()-labelSize.width/2; - y = vertexBounds.getCenterY()-labelSize.height/2; - return new Point((int)x,(int)y); - - default: - return new Point(); - } - - } - public static class InsidePositioner implements Positioner { - public Position getPosition(V v, float x, float y, Dimension d) { - int cx = d.width/2; - int cy = d.height/2; - if(x > cx && y > cy) return Position.NW; - if(x > cx && y < cy) return Position.SW; - if(x < cx && y > cy) return Position.NE; - return Position.SE; - } - } - public static class OutsidePositioner implements Positioner { - public Position getPosition(V v, float x, float y, Dimension d) { - int cx = d.width/2; - int cy = d.height/2; - if(x > cx && y > cy) return Position.SE; - if(x > cx && y < cy) return Position.NE; - if(x < cx && y > cy) return Position.SW; - return Position.NW; - } - } - /** - * @return the positioner - */ - public Positioner getPositioner() { - return positioner; - } - - /** - * @param positioner the positioner to set - */ - public void setPositioner(Positioner positioner) { - this.positioner = positioner; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicVertexRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicVertexRenderer.java deleted file mode 100644 index b97a4a0c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/BasicVertexRenderer.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Dimension; -import java.awt.Paint; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; - -import javax.swing.Icon; -import javax.swing.JComponent; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformerDecorator; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; - -public class BasicVertexRenderer implements Renderer.Vertex { - - public void paintVertex(RenderContext rc, Layout layout, V v) { - Graph graph = layout.getGraph(); - if (rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v))) { - paintIconForVertex(rc, v, layout); - } - } - - /** - * Paint v's icon on g at (x,y). - */ - protected void paintIconForVertex(RenderContext rc, V v, Layout layout) { - GraphicsDecorator g = rc.getGraphicsContext(); - boolean vertexHit = true; - // get the shape to be rendered - Shape shape = rc.getVertexShapeTransformer().transform(v); - - Point2D p = layout.transform(v); - p = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p); - float x = (float)p.getX(); - float y = (float)p.getY(); - // create a transform that translates to the location of - // the vertex to be rendered - AffineTransform xform = AffineTransform.getTranslateInstance(x,y); - // transform the vertex shape with xtransform - shape = xform.createTransformedShape(shape); - - vertexHit = vertexHit(rc, shape); - //rc.getViewTransformer().transform(shape).intersects(deviceRectangle); - - if (vertexHit) { - if(rc.getVertexIconTransformer() != null) { - Icon icon = rc.getVertexIconTransformer().transform(v); - if(icon != null) { - - g.draw(icon, rc.getScreenDevice(), shape, (int)x, (int)y); - - } else { - paintShapeForVertex(rc, v, shape); - } - } else { - paintShapeForVertex(rc, v, shape); - } - } - } - - protected boolean vertexHit(RenderContext rc, Shape s) { - JComponent vv = rc.getScreenDevice(); - Rectangle deviceRectangle = null; - if(vv != null) { - Dimension d = vv.getSize(); - deviceRectangle = new Rectangle( - 0,0, - d.width,d.height); - } - MutableTransformer vt = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW); - if(vt instanceof MutableTransformerDecorator) { - vt = ((MutableTransformerDecorator)vt).getDelegate(); - } - return vt.transform(s).intersects(deviceRectangle); - } - - protected void paintShapeForVertex(RenderContext rc, V v, Shape shape) { - GraphicsDecorator g = rc.getGraphicsContext(); - Paint oldPaint = g.getPaint(); - Paint fillPaint = rc.getVertexFillPaintTransformer().transform(v); - if(fillPaint != null) { - g.setPaint(fillPaint); - g.fill(shape); - g.setPaint(oldPaint); - } - Paint drawPaint = rc.getVertexDrawPaintTransformer().transform(v); - if(drawPaint != null) { - g.setPaint(drawPaint); - Stroke oldStroke = g.getStroke(); - Stroke stroke = rc.getVertexStrokeTransformer().transform(v); - if(stroke != null) { - g.setStroke(stroke); - } - g.draw(shape); - g.setPaint(oldPaint); - g.setStroke(oldStroke); - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/CenterEdgeArrowRenderingSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/CenterEdgeArrowRenderingSupport.java deleted file mode 100644 index 50262b73..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/CenterEdgeArrowRenderingSupport.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.Line2D; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.RenderContext; - -public class CenterEdgeArrowRenderingSupport implements EdgeArrowRenderingSupport { - - /** - * Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape. - */ - public AffineTransform getArrowTransform(RenderContext rc, Shape edgeShape, Shape vertexShape) { - GeneralPath path = new GeneralPath(edgeShape); - float[] seg = new float[6]; - Point2D p1=null; - Point2D p2=null; - AffineTransform at = new AffineTransform(); - // count the segments. - int middleSegment = 0; - int current = 0; - for(PathIterator i=path.getPathIterator(null,1); !i.isDone(); i.next()) { - current++; - } - middleSegment = current/2; - // find the middle segment - current = 0; - for(PathIterator i=path.getPathIterator(null,1); !i.isDone(); i.next()) { - current++; - int ret = i.currentSegment(seg); - if(ret == PathIterator.SEG_MOVETO) { - p2 = new Point2D.Float(seg[0],seg[1]); - } else if(ret == PathIterator.SEG_LINETO) { - p1 = p2; - p2 = new Point2D.Float(seg[0],seg[1]); - } - if(current > middleSegment) { // done - at = getArrowTransform(rc, new Line2D.Float(p1,p2),vertexShape); - break; - } - - } - return at; - } - - /** - * Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape. - */ - public AffineTransform getReverseArrowTransform(RenderContext rc, Shape edgeShape, Shape vertexShape) { - return getReverseArrowTransform(rc, edgeShape, vertexShape, true); - } - - /** - *

          Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape.

          - * - *

          The Loop edge is a special case because its staring point is not inside - * the vertex. The passedGo flag handles this case.

          - * - * @param path - * @param vertexShape - * @param passedGo - used only for Loop edges - */ - public AffineTransform getReverseArrowTransform(RenderContext rc, Shape edgeShape, Shape vertexShape, - boolean passedGo) { - GeneralPath path = new GeneralPath(edgeShape); - float[] seg = new float[6]; - Point2D p1=null; - Point2D p2=null; - AffineTransform at = new AffineTransform(); - // count the segments. - int middleSegment = 0; - int current = 0; - for(PathIterator i=path.getPathIterator(null,1); !i.isDone(); i.next()) { - current++; - } - middleSegment = current/2; - // find the middle segment - current = 0; - for(PathIterator i=path.getPathIterator(null,1); !i.isDone(); i.next()) { - current++; - int ret = i.currentSegment(seg); - if(ret == PathIterator.SEG_MOVETO) { - p2 = new Point2D.Float(seg[0],seg[1]); - } else if(ret == PathIterator.SEG_LINETO) { - p1 = p2; - p2 = new Point2D.Float(seg[0],seg[1]); - } - if(current > middleSegment) { // done - at = getReverseArrowTransform(rc, new Line2D.Float(p1,p2),vertexShape); - break; - } - } - return at; - } - - /** - * This is used for the arrow of a directed and for one of the - * arrows for non-directed edges - * Get a transform to place the arrow shape on the passed edge at the - * point where it intersects the passed shape - * @param edgeShape - * @param vertexShape - * @return - */ - public AffineTransform getArrowTransform(RenderContext rc, Line2D edgeShape, Shape vertexShape) { - - // find the midpoint of the edgeShape line, and use it to make the transform - Line2D left = new Line2D.Float(); - Line2D right = new Line2D.Float(); - this.subdivide(edgeShape, left, right); - edgeShape = right; - float dx = (float) (edgeShape.getX1()-edgeShape.getX2()); - float dy = (float) (edgeShape.getY1()-edgeShape.getY2()); - double atheta = Math.atan2(dx,dy)+Math.PI/2; - AffineTransform at = - AffineTransform.getTranslateInstance(edgeShape.getX1(), edgeShape.getY1()); - at.rotate(-atheta); - return at; - } - - /** - * This is used for the reverse-arrow of a non-directed edge - * get a transform to place the arrow shape on the passed edge at the - * point where it intersects the passed shape - * @param edgeShape - * @param vertexShape - * @return - */ - protected AffineTransform getReverseArrowTransform(RenderContext rc, Line2D edgeShape, Shape vertexShape) { - // find the midpoint of the edgeShape line, and use it to make the transform - Line2D left = new Line2D.Float(); - Line2D right = new Line2D.Float(); - this.subdivide(edgeShape, left, right); - edgeShape = right; - float dx = (float) (edgeShape.getX1()-edgeShape.getX2()); - float dy = (float) (edgeShape.getY1()-edgeShape.getY2()); - // calculate the angle for the arrowhead - double atheta = Math.atan2(dx,dy)-Math.PI/2; - AffineTransform at = AffineTransform.getTranslateInstance(edgeShape.getX1(),edgeShape.getY1()); - at.rotate(-atheta); - return at; - } - - /** - * divide a Line2D into 2 new Line2Ds that are returned - * in the passed left and right instances, if non-null - * @param src the line to divide - * @param left the left side, or null - * @param right the right side, or null - */ - protected void subdivide(Line2D src, - Line2D left, - Line2D right) { - double x1 = src.getX1(); - double y1 = src.getY1(); - double x2 = src.getX2(); - double y2 = src.getY2(); - - double mx = x1 + (x2-x1)/2.0; - double my = y1 + (y2-y1)/2.0; - if (left != null) { - left.setLine(x1, y1, mx, my); - } - if (right != null) { - right.setLine(mx, my, x2, y2); - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/Checkmark.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/Checkmark.java deleted file mode 100644 index a6e1495e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/Checkmark.java +++ /dev/null @@ -1,63 +0,0 @@ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.BasicStroke; -import java.awt.Color; -import java.awt.Component; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.RenderingHints; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.util.Collections; - -import javax.swing.Icon; - -/** - * a simple Icon that draws a checkmark in the lower-right quadrant of its - * area. Used to draw a checkmark on Picked Vertices. - * @author Tom Nelson - */ -public class Checkmark implements Icon { - - GeneralPath path = new GeneralPath(); - AffineTransform highlight = AffineTransform.getTranslateInstance(-1,-1); - AffineTransform lowlight = AffineTransform.getTranslateInstance(1,1); - AffineTransform shadow = AffineTransform.getTranslateInstance(2,2); - Color color; - public Checkmark() { - this(Color.green); - } - public Checkmark(Color color) { - this.color = color; - path.moveTo(10,17); - path.lineTo(13,20); - path.lineTo(20,13); - } - public void paintIcon(Component c, Graphics g, int x, int y) { - Shape shape = AffineTransform.getTranslateInstance(x, y).createTransformedShape(path); - Graphics2D g2d = (Graphics2D)g; - g2d.addRenderingHints(Collections.singletonMap(RenderingHints.KEY_ANTIALIASING, - RenderingHints.VALUE_ANTIALIAS_ON)); - Stroke stroke = g2d.getStroke(); - g2d.setStroke(new BasicStroke(4)); - g2d.setColor(Color.darkGray); - g2d.draw(shadow.createTransformedShape(shape)); - g2d.setColor(Color.black); - g2d.draw(lowlight.createTransformedShape(shape)); - g2d.setColor(Color.white); - g2d.draw(highlight.createTransformedShape(shape)); - g2d.setColor(color); - g2d.draw(shape); - g2d.setStroke(stroke); - } - - public int getIconWidth() { - return 20; - } - - public int getIconHeight() { - return 20; - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/DefaultEdgeLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/DefaultEdgeLabelRenderer.java deleted file mode 100644 index 304d3ce8..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/DefaultEdgeLabelRenderer.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 14, 2005 - */ - -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Color; -import java.awt.Component; -import java.awt.Font; -import java.awt.Rectangle; -import java.io.Serializable; - -import javax.swing.JComponent; -import javax.swing.JLabel; -import javax.swing.border.Border; -import javax.swing.border.EmptyBorder; - -/** - * DefaultEdgeLabelRenderer is similar to the cell renderers - * used by the JTable and JTree jfc classes. - * - * @author Tom Nelson - * - * - */ -@SuppressWarnings("serial") -public class DefaultEdgeLabelRenderer extends JLabel implements - EdgeLabelRenderer, Serializable { - - protected static Border noFocusBorder = new EmptyBorder(0,0,0,0); - - protected Color pickedEdgeLabelColor = Color.black; - protected boolean rotateEdgeLabels; - - public DefaultEdgeLabelRenderer(Color pickedEdgeLabelColor) { - this(pickedEdgeLabelColor, true); - } - - /** - * Creates a default table cell renderer. - */ - public DefaultEdgeLabelRenderer(Color pickedEdgeLabelColor, boolean rotateEdgeLabels) { - super(); - this.pickedEdgeLabelColor = pickedEdgeLabelColor; - this.rotateEdgeLabels = rotateEdgeLabels; - setOpaque(true); - setBorder(noFocusBorder); - } - - /** - * @return Returns the rotateEdgeLabels. - */ - public boolean isRotateEdgeLabels() { - return rotateEdgeLabels; - } - /** - * @param rotateEdgeLabels The rotateEdgeLabels to set. - */ - public void setRotateEdgeLabels(boolean rotateEdgeLabels) { - this.rotateEdgeLabels = rotateEdgeLabels; - } - /** - * Overrides JComponent.setForeground to assign - * the unselected-foreground color to the specified color. - * - * @param c set the foreground color to this value - */ - @Override - public void setForeground(Color c) { - super.setForeground(c); - } - - /** - * Overrides JComponent.setBackground to assign - * the unselected-background color to the specified color. - * - * @param c set the background color to this value - */ - @Override - public void setBackground(Color c) { - super.setBackground(c); - } - - /** - * Notification from the UIManager that the look and feel - * [L&F] has changed. - * Replaces the current UI object with the latest version from the - * UIManager. - * - * @see JComponent#updateUI - */ - @Override - public void updateUI() { - super.updateUI(); - setForeground(null); - setBackground(null); - } - - /** - * - * Returns the default label renderer for an Edge - * - * @param vv the VisualizationViewer to render on - * @param value the value to assign to the label for - * Edge - * @param edge the Edge - * @return the default label renderer - */ - public Component getEdgeLabelRendererComponent(JComponent vv, Object value, - Font font, boolean isSelected, E edge) { - - super.setForeground(vv.getForeground()); - if(isSelected) setForeground(pickedEdgeLabelColor); - super.setBackground(vv.getBackground()); - - if(font != null) { - setFont(font); - } else { - setFont(vv.getFont()); - } - setIcon(null); - setBorder(noFocusBorder); - setValue(value); - return this; - } - - /* - * The following methods are overridden as a performance measure to - * to prune code-paths are often called in the case of renders - * but which we know are unnecessary. Great care should be taken - * when writing your own renderer to weigh the benefits and - * drawbacks of overriding methods like these. - */ - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public boolean isOpaque() { - Color back = getBackground(); - Component p = getParent(); - if (p != null) { - p = p.getParent(); - } - boolean colorMatch = (back != null) && (p != null) && - back.equals(p.getBackground()) && - p.isOpaque(); - return !colorMatch && super.isOpaque(); - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void validate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void revalidate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(long tm, int x, int y, int width, int height) {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(Rectangle r) { } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - protected void firePropertyChange(String propertyName, Object oldValue, Object newValue) { - // Strings get interned... - if (propertyName=="text") { - super.firePropertyChange(propertyName, oldValue, newValue); - } - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void firePropertyChange(String propertyName, boolean oldValue, boolean newValue) { } - - /** - * Sets the String object for the cell being rendered to - * value. - * - * @param value the string value for this cell; if value is - * null it sets the text value to an empty string - * @see JLabel#setText - * - */ - protected void setValue(Object value) { - setText((value == null) ? "" : value.toString()); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/DefaultVertexLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/DefaultVertexLabelRenderer.java deleted file mode 100644 index 7948f33f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/DefaultVertexLabelRenderer.java +++ /dev/null @@ -1,204 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 14, 2005 - */ - -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Color; -import java.awt.Component; -import java.awt.Font; -import java.awt.Rectangle; -import java.io.Serializable; - -import javax.swing.JComponent; -import javax.swing.JLabel; -import javax.swing.border.Border; -import javax.swing.border.EmptyBorder; - -/** - * DefaultVertexLabelRenderer is similar to the cell renderers - * used by the JTable and JTree jfc classes. - * - * @author Tom Nelson - * - * - */ -@SuppressWarnings("serial") -public class DefaultVertexLabelRenderer extends JLabel implements - VertexLabelRenderer, Serializable { - - protected static Border noFocusBorder = new EmptyBorder(0,0,0,0); - - protected Color pickedVertexLabelColor = Color.black; - - /** - * Creates a default table cell renderer. - */ - public DefaultVertexLabelRenderer(Color pickedVertexLabelColor) { - this.pickedVertexLabelColor = pickedVertexLabelColor; - setOpaque(true); - setBorder(noFocusBorder); - } - - /** - * Overrides JComponent.setForeground to assign - * the unselected-foreground color to the specified color. - * - * @param c set the foreground color to this value - */ - @Override - public void setForeground(Color c) { - super.setForeground(c); - } - - /** - * Overrides JComponent.setBackground to assign - * the unselected-background color to the specified color. - * - * @param c set the background color to this value - */ - @Override - public void setBackground(Color c) { - super.setBackground(c); - } - - /** - * Notification from the UIManager that the look and feel - * [L&F] has changed. - * Replaces the current UI object with the latest version from the - * UIManager. - * - * @see JComponent#updateUI - */ - @Override - public void updateUI() { - super.updateUI(); - setForeground(null); - setBackground(null); - } - - /** - * - * Returns the default label renderer for a Vertex - * - * @param vv the VisualizationViewer to render on - * @param value the value to assign to the label for - * Vertex - * @param vertex the Vertex - * @return the default label renderer - */ - public Component getVertexLabelRendererComponent(JComponent vv, Object value, - Font font, boolean isSelected, V vertex) { - - super.setForeground(vv.getForeground()); - if(isSelected) setForeground(pickedVertexLabelColor); - super.setBackground(vv.getBackground()); - if(font != null) { - setFont(font); - } else { - setFont(vv.getFont()); - } - setIcon(null); - setBorder(noFocusBorder); - setValue(value); - return this; - } - - /* - * The following methods are overridden as a performance measure to - * to prune code-paths are often called in the case of renders - * but which we know are unnecessary. Great care should be taken - * when writing your own renderer to weigh the benefits and - * drawbacks of overriding methods like these. - */ - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public boolean isOpaque() { - Color back = getBackground(); - Component p = getParent(); - if (p != null) { - p = p.getParent(); - } - boolean colorMatch = (back != null) && (p != null) && - back.equals(p.getBackground()) && - p.isOpaque(); - return !colorMatch && super.isOpaque(); - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void validate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void revalidate() {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(long tm, int x, int y, int width, int height) {} - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void repaint(Rectangle r) { } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - protected void firePropertyChange(String propertyName, Object oldValue, Object newValue) { - // Strings get interned... - if (propertyName=="text") { - super.firePropertyChange(propertyName, oldValue, newValue); - } - } - - /** - * Overridden for performance reasons. - * See the Implementation Note - * for more information. - */ - @Override - public void firePropertyChange(String propertyName, boolean oldValue, boolean newValue) { } - - /** - * Sets the String object for the cell being rendered to - * value. - * - * @param value the string value for this cell; if value is - * null it sets the text value to an empty string - * @see JLabel#setText - * - */ - protected void setValue(Object value) { - setText((value == null) ? "" : value.toString()); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/EdgeArrowRenderingSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/EdgeArrowRenderingSupport.java deleted file mode 100644 index 1fe426de..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/EdgeArrowRenderingSupport.java +++ /dev/null @@ -1,52 +0,0 @@ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Line2D; - -import edu.uci.ics.jung.visualization.RenderContext; - -public interface EdgeArrowRenderingSupport { - - /** - * Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape. - */ - AffineTransform getArrowTransform(RenderContext rc, - Shape edgeShape, Shape vertexShape); - - /** - * Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape. - */ - AffineTransform getReverseArrowTransform( - RenderContext rc, Shape edgeShape, Shape vertexShape); - - /** - *

          Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape.

          - * - *

          The Loop edge is a special case because its staring point is not inside - * the vertex. The passedGo flag handles this case.

          - * - * @param edgeShape - * @param vertexShape - * @param passedGo - used only for Loop edges - */ - AffineTransform getReverseArrowTransform( - RenderContext rc, Shape edgeShape, Shape vertexShape, - boolean passedGo); - - /** - * This is used for the arrow of a directed and for one of the - * arrows for non-directed edges - * Get a transform to place the arrow shape on the passed edge at the - * point where it intersects the passed shape - * @param edgeShape - * @param vertexShape - * @return - */ - AffineTransform getArrowTransform(RenderContext rc, - Line2D edgeShape, Shape vertexShape); - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/EdgeLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/EdgeLabelRenderer.java deleted file mode 100644 index afbf54b1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/EdgeLabelRenderer.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 14, 2005 - */ - -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Component; -import java.awt.Font; - -import javax.swing.JComponent; - -/** - * @author Tom Nelson - * - * - */ -public interface EdgeLabelRenderer { - /** - * Returns the component used for drawing the label. This method is - * used to configure the renderer appropriately before drawing. - * - * @param vv the JComponent that is asking the - * renderer to draw; can be null - * @param value the value of the cell to be rendered. It is - * up to the specific renderer to interpret - * and draw the value. For example, if - * value - * is the string "true", it could be rendered as a - * string or it could be rendered as a check - * box that is checked. null is a - * valid value - * @param vertex the vertex for the label being drawn. - */ - Component getEdgeLabelRendererComponent(JComponent vv, Object value, - Font font, boolean isSelected, T edge); - - boolean isRotateEdgeLabels(); - - void setRotateEdgeLabels(boolean state); -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/GradientVertexRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/GradientVertexRenderer.java deleted file mode 100644 index ad4d54b9..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/GradientVertexRenderer.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Color; -import java.awt.Dimension; -import java.awt.GradientPaint; -import java.awt.Paint; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; - -import javax.swing.JComponent; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.picking.PickedState; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; - -/** - * A renderer that will fill vertex shapes with a GradientPaint - * @author Tom Nelson - * - * @param - * @param - */ -public class GradientVertexRenderer implements Renderer.Vertex { - - Color colorOne; - Color colorTwo; - Color pickedColorOne; - Color pickedColorTwo; - PickedState pickedState; - boolean cyclic; - - - public GradientVertexRenderer(Color colorOne, Color colorTwo, boolean cyclic) { - this.colorOne = colorOne; - this.colorTwo = colorTwo; - this.cyclic = cyclic; - } - - - public GradientVertexRenderer(Color colorOne, Color colorTwo, Color pickedColorOne, Color pickedColorTwo, PickedState pickedState, boolean cyclic) { - this.colorOne = colorOne; - this.colorTwo = colorTwo; - this.pickedColorOne = pickedColorOne; - this.pickedColorTwo = pickedColorTwo; - this.pickedState = pickedState; - this.cyclic = cyclic; - } - - - public void paintVertex(RenderContext rc, Layout layout, V v) { - Graph graph = layout.getGraph(); - if (rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v))) { - boolean vertexHit = true; - // get the shape to be rendered - Shape shape = rc.getVertexShapeTransformer().transform(v); - - Point2D p = layout.transform(v); - p = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p); - - float x = (float)p.getX(); - float y = (float)p.getY(); - - // create a transform that translates to the location of - // the vertex to be rendered - AffineTransform xform = AffineTransform.getTranslateInstance(x,y); - // transform the vertex shape with xtransform - shape = xform.createTransformedShape(shape); - - vertexHit = vertexHit(rc, shape); - //rc.getViewTransformer().transform(shape).intersects(deviceRectangle); - - if (vertexHit) { - paintShapeForVertex(rc, v, shape); - } - } - } - - protected boolean vertexHit(RenderContext rc, Shape s) { - JComponent vv = rc.getScreenDevice(); - Rectangle deviceRectangle = null; - if(vv != null) { - Dimension d = vv.getSize(); - deviceRectangle = new Rectangle( - 0,0, - d.width,d.height); - } - return rc.getMultiLayerTransformer().getTransformer(Layer.VIEW).transform(s).intersects(deviceRectangle); - } - - protected void paintShapeForVertex(RenderContext rc, V v, Shape shape) { - GraphicsDecorator g = rc.getGraphicsContext(); - Paint oldPaint = g.getPaint(); - Rectangle r = shape.getBounds(); - float y2 = (float)r.getMaxY(); - if(cyclic) { - y2 = (float)(r.getMinY()+r.getHeight()/2); - } - - Paint fillPaint = null; - if(pickedState != null && pickedState.isPicked(v)) { - fillPaint = new GradientPaint((float)r.getMinX(), (float)r.getMinY(), pickedColorOne, - (float)r.getMinX(), y2, pickedColorTwo, cyclic); - } else { - fillPaint = new GradientPaint((float)r.getMinX(), (float)r.getMinY(), colorOne, - (float)r.getMinX(), y2, colorTwo, cyclic); - } - if(fillPaint != null) { - g.setPaint(fillPaint); - g.fill(shape); - g.setPaint(oldPaint); - } - Paint drawPaint = rc.getVertexDrawPaintTransformer().transform(v); - if(drawPaint != null) { - g.setPaint(drawPaint); - } - Stroke oldStroke = g.getStroke(); - Stroke stroke = rc.getVertexStrokeTransformer().transform(v); - if(stroke != null) { - g.setStroke(stroke); - } - g.draw(shape); - g.setPaint(oldPaint); - g.setStroke(oldStroke); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/Renderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/Renderer.java deleted file mode 100644 index d01c79d3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/Renderer.java +++ /dev/null @@ -1,88 +0,0 @@ -/* -* Copyright (c) 2003, the JUNG Project and the Regents of the University -* of California -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* http://jung.sourceforge.net/license.txt for a description. -*/ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Dimension; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.RenderContext; - -/** - * The interface for drawing vertices, edges, and their labels. - * Implementations of this class can set specific renderers for - * each element, allowing custom control of each. - */ -public interface Renderer { - - void render(RenderContext rc, Layout layout); - void renderVertex(RenderContext rc, Layout layout, V v); - void renderVertexLabel(RenderContext rc, Layout layout, V v); - void renderEdge(RenderContext rc, Layout layout, E e); - void renderEdgeLabel(RenderContext rc, Layout layout, E e); - void setVertexRenderer(Renderer.Vertex r); - void setEdgeRenderer(Renderer.Edge r); - void setVertexLabelRenderer(Renderer.VertexLabel r); - void setEdgeLabelRenderer(Renderer.EdgeLabel r); - Renderer.VertexLabel getVertexLabelRenderer(); - Renderer.Vertex getVertexRenderer(); - Renderer.Edge getEdgeRenderer(); - Renderer.EdgeLabel getEdgeLabelRenderer(); - - interface Vertex { - void paintVertex(RenderContext rc, Layout layout, V v); - class NOOP implements Vertex { - public void paintVertex(RenderContext rc, Layout layout, Object v) {} - }; - } - - interface Edge { - void paintEdge(RenderContext rc, Layout layout, E e); - EdgeArrowRenderingSupport getEdgeArrowRenderingSupport(); - void setEdgeArrowRenderingSupport(EdgeArrowRenderingSupport edgeArrowRenderingSupport); - class NOOP implements Edge { - public void paintEdge(RenderContext rc, Layout layout, Object e) {} - public EdgeArrowRenderingSupport getEdgeArrowRenderingSupport(){return null;} - public void setEdgeArrowRenderingSupport(EdgeArrowRenderingSupport edgeArrowRenderingSupport){} - } - } - - interface VertexLabel { - void labelVertex(RenderContext rc, Layout layout, V v, String label); - Position getPosition(); - void setPosition(Position position); - void setPositioner(Positioner positioner); - Positioner getPositioner(); - class NOOP implements VertexLabel { - public void labelVertex(RenderContext rc, Layout layout, Object v, String label) {} - public Position getPosition() { return Position.CNTR; } - public void setPosition(Position position) {} - public Positioner getPositioner() { - return new Positioner() { - public Position getPosition(Object v, float x, float y, Dimension d) { - return Position.CNTR; - }}; - } - public void setPositioner(Positioner positioner) { - } - } - enum Position { N, NE, E, SE, S, SW, W, NW, CNTR, AUTO } - interface Positioner { - Position getPosition(V vertex, float x, float y, Dimension d); - } - - } - - interface EdgeLabel { - void labelEdge(RenderContext rc, Layout layout, E e, String label); - class NOOP implements EdgeLabel { - public void labelEdge(RenderContext rc, Layout layout, Object e, String label) {} - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/ReshapingEdgeRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/ReshapingEdgeRenderer.java deleted file mode 100644 index e885ae73..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/ReshapingEdgeRenderer.java +++ /dev/null @@ -1,416 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Dimension; -import java.awt.Paint; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.awt.geom.RectangularShape; - -import javax.swing.JComponent; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.graph.util.EdgeType; -import edu.uci.ics.jung.graph.util.Pair; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.transform.LensTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import edu.uci.ics.jung.visualization.transform.shape.TransformingGraphics; - -/** - * uses a flatness argument to break edges into - * smaller segments. This produces a more detailed - * transformation of the edge shape - * - * @author Tom Nelson - tomnelson@dev.java.net - * - * @param - * @param - */ -public class ReshapingEdgeRenderer extends BasicEdgeRenderer - implements Renderer.Edge { - - /** - * Draws the edge e, whose endpoints are at (x1,y1) - * and (x2,y2), on the graphics context g. - * The Shape provided by the EdgeShapeFunction instance - * is scaled in the x-direction so that its width is equal to the distance between - * (x1,y1) and (x2,y2). - */ - protected void drawSimpleEdge(RenderContext rc, Layout layout, E e) { - - TransformingGraphics g = (TransformingGraphics)rc.getGraphicsContext(); - Graph graph = layout.getGraph(); - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - Point2D p1 = layout.transform(v1); - Point2D p2 = layout.transform(v2); - p1 = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p1); - p2 = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p2); - float x1 = (float) p1.getX(); - float y1 = (float) p1.getY(); - float x2 = (float) p2.getX(); - float y2 = (float) p2.getY(); - - float flatness = 0; - MutableTransformer transformer = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW); - if(transformer instanceof LensTransformer) { - LensTransformer ht = (LensTransformer)transformer; - RectangularShape lensShape = ht.getLensShape(); - if(lensShape.contains(x1,y1) || lensShape.contains(x2,y2)) { - flatness = .05f; - } - } - - boolean isLoop = v1.equals(v2); - Shape s2 = rc.getVertexShapeTransformer().transform(v2); - Shape edgeShape = rc.getEdgeShapeTransformer().transform(Context.,E>getInstance(graph, e)); - - boolean edgeHit = true; - boolean arrowHit = true; - Rectangle deviceRectangle = null; - JComponent vv = rc.getScreenDevice(); - if(vv != null) { - Dimension d = vv.getSize(); - deviceRectangle = new Rectangle(0,0,d.width,d.height); - } - - AffineTransform xform = AffineTransform.getTranslateInstance(x1, y1); - - if(isLoop) { - // this is a self-loop. scale it is larger than the vertex - // it decorates and translate it so that its nadir is - // at the center of the vertex. - Rectangle2D s2Bounds = s2.getBounds2D(); - xform.scale(s2Bounds.getWidth(),s2Bounds.getHeight()); - xform.translate(0, -edgeShape.getBounds2D().getWidth()/2); - } else { - // this is a normal edge. Rotate it to the angle between - // vertex endpoints, then scale it to the distance between - // the vertices - float dx = x2-x1; - float dy = y2-y1; - float thetaRadians = (float) Math.atan2(dy, dx); - xform.rotate(thetaRadians); - float dist = (float) Math.sqrt(dx*dx + dy*dy); - xform.scale(dist, 1.0); - } - - edgeShape = xform.createTransformedShape(edgeShape); - - MutableTransformer vt = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW); - if(vt instanceof LensTransformer) { - vt = ((LensTransformer)vt).getDelegate(); - } - edgeHit = vt.transform(edgeShape).intersects(deviceRectangle); - - if(edgeHit == true) { - - Paint oldPaint = g.getPaint(); - - // get Paints for filling and drawing - // (filling is done first so that drawing and label use same Paint) - Paint fill_paint = rc.getEdgeFillPaintTransformer().transform(e); - if (fill_paint != null) - { - g.setPaint(fill_paint); - g.fill(edgeShape, flatness); - } - Paint draw_paint = rc.getEdgeDrawPaintTransformer().transform(e); - if (draw_paint != null) - { - g.setPaint(draw_paint); - g.draw(edgeShape, flatness); - } - - float scalex = (float)g.getTransform().getScaleX(); - float scaley = (float)g.getTransform().getScaleY(); - // see if arrows are too small to bother drawing - if(scalex < .3 || scaley < .3) return; - - if (rc.getEdgeArrowPredicate().evaluate(Context.,E>getInstance(graph, e))) { - - Shape destVertexShape = - rc.getVertexShapeTransformer().transform(graph.getEndpoints(e).getSecond()); - - AffineTransform xf = AffineTransform.getTranslateInstance(x2, y2); - destVertexShape = xf.createTransformedShape(destVertexShape); - - arrowHit = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW).transform(destVertexShape).intersects(deviceRectangle); - if(arrowHit) { - - AffineTransform at = - edgeArrowRenderingSupport.getArrowTransform(rc, new GeneralPath(edgeShape), destVertexShape); - if(at == null) return; - Shape arrow = rc.getEdgeArrowTransformer().transform(Context.,E>getInstance(graph, e)); - arrow = at.createTransformedShape(arrow); - g.setPaint(rc.getArrowFillPaintTransformer().transform(e)); - g.fill(arrow); - g.setPaint(rc.getArrowDrawPaintTransformer().transform(e)); - g.draw(arrow); - } - if (graph.getEdgeType(e) == EdgeType.UNDIRECTED) { - Shape vertexShape = - rc.getVertexShapeTransformer().transform(graph.getEndpoints(e).getFirst()); - xf = AffineTransform.getTranslateInstance(x1, y1); - vertexShape = xf.createTransformedShape(vertexShape); - - arrowHit = rc.getMultiLayerTransformer().getTransformer(Layer.VIEW).transform(vertexShape).intersects(deviceRectangle); - - if(arrowHit) { - AffineTransform at = edgeArrowRenderingSupport.getReverseArrowTransform(rc, new GeneralPath(edgeShape), vertexShape, !isLoop); - if(at == null) return; - Shape arrow = rc.getEdgeArrowTransformer().transform(Context.,E>getInstance(graph, e)); - arrow = at.createTransformedShape(arrow); - g.setPaint(rc.getArrowFillPaintTransformer().transform(e)); - g.fill(arrow); - g.setPaint(rc.getArrowDrawPaintTransformer().transform(e)); - g.draw(arrow); - } - } - } - // use existing paint for text if no draw paint specified - if (draw_paint == null) - g.setPaint(oldPaint); -// String label = edgeStringer.getLabel(e); -// if (label != null) { -// labelEdge(g, graph, e, label, x1, x2, y1, y2); -// } - - - // restore old paint - g.setPaint(oldPaint); - } - } - - /** - * Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape. - */ -// public AffineTransform getArrowTransform(RenderContext rc, GeneralPath edgeShape, Shape vertexShape) { -// float[] seg = new float[6]; -// Point2D p1=null; -// Point2D p2=null; -// AffineTransform at = new AffineTransform(); -// // when the PathIterator is done, switch to the line-subdivide -// // method to get the arrowhead closer. -// for(PathIterator i=edgeShape.getPathIterator(null,1); !i.isDone(); i.next()) { -// int ret = i.currentSegment(seg); -// if(ret == PathIterator.SEG_MOVETO) { -// p2 = new Point2D.Float(seg[0],seg[1]); -// } else if(ret == PathIterator.SEG_LINETO) { -// p1 = p2; -// p2 = new Point2D.Float(seg[0],seg[1]); -// if(vertexShape.contains(p2)) { -// at = getArrowTransform(rc, new Line2D.Float(p1,p2),vertexShape); -// break; -// } -// } -// } -// return at; -// } - - /** - * Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape. - */ -// public AffineTransform getReverseArrowTransform(RenderContext rc, GeneralPath edgeShape, Shape vertexShape) { -// return getReverseArrowTransform(rc, edgeShape, vertexShape, true); -// } - - /** - *

          Returns a transform to position the arrowhead on this edge shape at the - * point where it intersects the passed vertex shape.

          - * - *

          The Loop edge is a special case because its staring point is not inside - * the vertex. The passedGo flag handles this case.

          - * - * @param edgeShape - * @param vertexShape - * @param passedGo - used only for Loop edges - */ -// public AffineTransform getReverseArrowTransform(RenderContext rc, GeneralPath edgeShape, Shape vertexShape, -// boolean passedGo) { -// float[] seg = new float[6]; -// Point2D p1=null; -// Point2D p2=null; -// -// AffineTransform at = new AffineTransform(); -// for(PathIterator i=edgeShape.getPathIterator(null,1); !i.isDone(); i.next()) { -// int ret = i.currentSegment(seg); -// if(ret == PathIterator.SEG_MOVETO) { -// p2 = new Point2D.Float(seg[0],seg[1]); -// } else if(ret == PathIterator.SEG_LINETO) { -// p1 = p2; -// p2 = new Point2D.Float(seg[0],seg[1]); -// if(passedGo == false && vertexShape.contains(p2)) { -// passedGo = true; -// } else if(passedGo==true && -// vertexShape.contains(p2)==false) { -// at = getReverseArrowTransform(rc, new Line2D.Float(p1,p2),vertexShape); -// break; -// } -// } -// } -// return at; -// } - - /** - * This is used for the arrow of a directed and for one of the - * arrows for non-directed edges - * Get a transform to place the arrow shape on the passed edge at the - * point where it intersects the passed shape - * @param edgeShape - * @param vertexShape - * @return - */ -// public AffineTransform getArrowTransform(RenderContext rc, Line2D edgeShape, Shape vertexShape) { -// float dx = (float) (edgeShape.getX1()-edgeShape.getX2()); -// float dy = (float) (edgeShape.getY1()-edgeShape.getY2()); -// // iterate over the line until the edge shape will place the -// // arrowhead closer than 'arrowGap' to the vertex shape boundary -// while((dx*dx+dy*dy) > rc.getArrowPlacementTolerance()) { -// try { -// edgeShape = getLastOutsideSegment(edgeShape, vertexShape); -// } catch(IllegalArgumentException e) { -// System.err.println(e.toString()); -// return null; -// } -// dx = (float) (edgeShape.getX1()-edgeShape.getX2()); -// dy = (float) (edgeShape.getY1()-edgeShape.getY2()); -// } -// double atheta = Math.atan2(dx,dy)+Math.PI/2; -// AffineTransform at = -// AffineTransform.getTranslateInstance(edgeShape.getX1(), edgeShape.getY1()); -// at.rotate(-atheta); -// return at; -// } - - /** - * This is used for the reverse-arrow of a non-directed edge - * get a transform to place the arrow shape on the passed edge at the - * point where it intersects the passed shape - * @param edgeShape - * @param vertexShape - * @return - */ -// protected AffineTransform getReverseArrowTransform(RenderContext rc, Line2D edgeShape, Shape vertexShape) { -// float dx = (float) (edgeShape.getX1()-edgeShape.getX2()); -// float dy = (float) (edgeShape.getY1()-edgeShape.getY2()); -// // iterate over the line until the edge shape will place the -// // arrowhead closer than 'arrowGap' to the vertex shape boundary -// while((dx*dx+dy*dy) > rc.getArrowPlacementTolerance()) { -// try { -// edgeShape = getFirstOutsideSegment(edgeShape, vertexShape); -// } catch(IllegalArgumentException e) { -// System.err.println(e.toString()); -// return null; -// } -// dx = (float) (edgeShape.getX1()-edgeShape.getX2()); -// dy = (float) (edgeShape.getY1()-edgeShape.getY2()); -// } -// // calculate the angle for the arrowhead -// double atheta = Math.atan2(dx,dy)-Math.PI/2; -// AffineTransform at = AffineTransform.getTranslateInstance(edgeShape.getX1(),edgeShape.getY1()); -// at.rotate(-atheta); -// return at; -// } - - /** - * Passed Line's point2 must be inside the passed shape or - * an IllegalArgumentException is thrown - * @param line line to subdivide - * @param shape shape to compare with line - * @return a line that intersects the shape boundary - * @throws IllegalArgumentException if the passed line's point1 is not inside the shape - */ -// protected Line2D getLastOutsideSegment(Line2D line, Shape shape) { -// if(shape.contains(line.getP2())==false) { -// String errorString = -// "line end point: "+line.getP2()+" is not contained in shape: "+shape.getBounds2D(); -// throw new IllegalArgumentException(errorString); -// //return null; -// } -// Line2D left = new Line2D.Double(); -// Line2D right = new Line2D.Double(); -// // subdivide the line until its left segment intersects -// // the shape boundary -// do { -// subdivide(line, left, right); -// line = right; -// } while(shape.contains(line.getP1())==false); -// // now that right is completely inside shape, -// // return left, which must be partially outside -// return left; -// } - - /** - * Passed Line's point1 must be inside the passed shape or - * an IllegalArgumentException is thrown - * @param line line to subdivide - * @param shape shape to compare with line - * @return a line that intersects the shape boundary - * @throws IllegalArgumentException if the passed line's point1 is not inside the shape - */ -// protected Line2D getFirstOutsideSegment(Line2D line, Shape shape) { -// -// if(shape.contains(line.getP1())==false) { -// String errorString = -// "line start point: "+line.getP1()+" is not contained in shape: "+shape.getBounds2D(); -// throw new IllegalArgumentException(errorString); -// } -// Line2D left = new Line2D.Float(); -// Line2D right = new Line2D.Float(); -// // subdivide the line until its right side intersects the -// // shape boundary -// do { -// subdivide(line, left, right); -// line = left; -// } while(shape.contains(line.getP2())==false); -// // now that left is completely inside shape, -// // return right, which must be partially outside -// return right; -// } - - /** - * divide a Line2D into 2 new Line2Ds that are returned - * in the passed left and right instances, if non-null - * @param src the line to divide - * @param left the left side, or null - * @param right the right side, or null - */ -// protected void subdivide(Line2D src, -// Line2D left, -// Line2D right) { -// double x1 = src.getX1(); -// double y1 = src.getY1(); -// double x2 = src.getX2(); -// double y2 = src.getY2(); -// -// double mx = x1 + (x2-x1)/2.0; -// double my = y1 + (y2-y1)/2.0; -// if (left != null) { -// left.setLine(x1, y1, mx, my); -// } -// if (right != null) { -// right.setLine(mx, my, x2, y2); -// } -// } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/VertexLabelAsShapeRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/VertexLabelAsShapeRenderer.java deleted file mode 100644 index a2d31be0..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/VertexLabelAsShapeRenderer.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; - -import org.apache.commons.collections15.Transformer; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; - -/** - * Renders Vertex Labels, but can also supply Shapes for vertices. - * This has the effect of making the vertex label the actual vertex - * shape. The user will probably want to center the vertex label - * on the vertex location. - * - * @author Tom Nelson - * - * @param - * @param - */ -public class VertexLabelAsShapeRenderer - implements Renderer.VertexLabel, Transformer { - - protected Map shapes = new HashMap(); - protected RenderContext rc; - - public VertexLabelAsShapeRenderer(RenderContext rc) { - this.rc = rc; - } - - public Component prepareRenderer(RenderContext rc, VertexLabelRenderer graphLabelRenderer, Object value, - boolean isSelected, V vertex) { - return rc.getVertexLabelRenderer().getVertexLabelRendererComponent(rc.getScreenDevice(), value, - rc.getVertexFontTransformer().transform(vertex), isSelected, vertex); - } - - /** - * Labels the specified vertex with the specified label. - * Uses the font specified by this instance's - * VertexFontFunction. (If the font is unspecified, the existing - * font for the graphics context is used.) If vertex label centering - * is active, the label is centered on the position of the vertex; otherwise - * the label is offset slightly. - */ - public void labelVertex(RenderContext rc, Layout layout, V v, String label) { - Graph graph = layout.getGraph(); - if (rc.getVertexIncludePredicate().evaluate(Context.,V>getInstance(graph,v)) == false) { - return; - } - GraphicsDecorator g = rc.getGraphicsContext(); - Component component = prepareRenderer(rc, rc.getVertexLabelRenderer(), label, - rc.getPickedVertexState().isPicked(v), v); - Dimension d = component.getPreferredSize(); - - int h_offset = -d.width / 2; - int v_offset = -d.height / 2; - - Point2D p = layout.transform(v); - p = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p); - - int x = (int)p.getX(); - int y = (int)p.getY(); - - g.draw(component, rc.getRendererPane(), x+h_offset, y+v_offset, d.width, d.height, true); - - Dimension size = component.getPreferredSize(); - Rectangle bounds = new Rectangle(-size.width/2 -2, -size.height/2 -2, size.width+4, size.height); - shapes.put(v, bounds); - } - - public Shape transform(V v) { - Component component = prepareRenderer(rc, rc.getVertexLabelRenderer(), rc.getVertexLabelTransformer().transform(v), - rc.getPickedVertexState().isPicked(v), v); - Dimension size = component.getPreferredSize(); - Rectangle bounds = new Rectangle(-size.width/2 -2, -size.height/2 -2, size.width+4, size.height); - return bounds; -// Shape shape = shapes.get(v); -// if(shape == null) { -// return new Rectangle(-20,-20,40,40); -// } -// else return shape; - } - - public Renderer.VertexLabel.Position getPosition() { - return Renderer.VertexLabel.Position.CNTR; - } - - public Renderer.VertexLabel.Positioner getPositioner() { - return new Positioner() { - public Renderer.VertexLabel.Position getPosition(V v, float x, float y, Dimension d) { - return Renderer.VertexLabel.Position.CNTR; - }}; - } - - public void setPosition(Renderer.VertexLabel.Position position) { - // noop - } - - public void setPositioner(Renderer.VertexLabel.Positioner positioner) { - //noop - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/VertexLabelRenderer.java b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/VertexLabelRenderer.java deleted file mode 100644 index 007c1695..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/VertexLabelRenderer.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 14, 2005 - */ - -package edu.uci.ics.jung.visualization.renderers; - -import java.awt.Component; -import java.awt.Font; - -import javax.swing.JComponent; - -/** - * @author Tom Nelson - * - * - */ -public interface VertexLabelRenderer { - /** - * Returns the component used for drawing the label. This method is - * used to configure the renderer appropriately before drawing. - * - * @param vv the JComponent that is asking the - * renderer to draw; can be null - * @param value the value of the cell to be rendered. It is - * up to the specific renderer to interpret - * and draw the value. For example, if - * value - * is the string "true", it could be rendered as a - * string or it could be rendered as a check - * box that is checked. null is a - * valid value - * @param vertex the vertex for the label being drawn. - */ - Component getVertexLabelRendererComponent(JComponent vv, Object value, - Font font, boolean isSelected, T vertex); -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/renderers/package.html deleted file mode 100644 index 21f699a2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/renderers/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Visualization mechanisms relating to rendering. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/GraphCollapser.java b/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/GraphCollapser.java deleted file mode 100644 index c78369b3..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/GraphCollapser.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.subLayout; - -import java.util.Collection; -import java.util.logging.Logger; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; -import java.util.logging.Level; - -public class GraphCollapser { - - private static final Logger logger = Logger.getLogger(GraphCollapser.class.getClass().getName()); - private Graph originalGraph; - - public GraphCollapser(Graph originalGraph) { - this.originalGraph = originalGraph; - } - - Graph createGraph() throws InstantiationException, IllegalAccessException { - return (Graph)originalGraph.getClass().newInstance(); - } - - @SuppressWarnings("unchecked") - public Graph collapse(Graph inGraph, Graph clusterGraph) { - - if(clusterGraph.getVertexCount() < 2) return inGraph; - - Graph graph = inGraph; - try { - graph = createGraph(); - } catch(Exception ex) { - logger.log(Level.FINE, "Could not create graph", ex); - } - Collection cluster = clusterGraph.getVertices(); - - // add all vertices in the delegate, unless the vertex is in the - // cluster. - for(Object v : inGraph.getVertices()) { - if(cluster.contains(v) == false) { - graph.addVertex(v); - } - } - // add the clusterGraph as a vertex - graph.addVertex(clusterGraph); - - //add all edges from the inGraph, unless both endpoints of - // the edge are in the cluster - for(Object e : (Collection)inGraph.getEdges()) { - Pair endpoints = inGraph.getEndpoints(e); - // don't add edges whose endpoints are both in the cluster - if(cluster.containsAll(endpoints) == false) { - - if(cluster.contains(endpoints.getFirst())) { - graph.addEdge(e, clusterGraph, endpoints.getSecond(), inGraph.getEdgeType(e)); - - } else if(cluster.contains(endpoints.getSecond())) { - graph.addEdge(e, endpoints.getFirst(), clusterGraph, inGraph.getEdgeType(e)); - - } else { - graph.addEdge(e,endpoints.getFirst(), endpoints.getSecond(), inGraph.getEdgeType(e)); - } - } - } - return graph; - } - - @SuppressWarnings("unchecked") - public Graph expand(Graph inGraph, Graph clusterGraph) { - Graph graph = inGraph; - try { - graph = createGraph(); - } catch(Exception ex) { - logger.log(Level.FINE, "Could not create graph", ex); - } - Collection cluster = clusterGraph.getVertices(); - logger.log(Level.FINE, "cluster to expand is {0}", cluster); - - // put all clusterGraph vertices and edges into the new Graph - for(Object v : cluster) { - graph.addVertex(v); - for(Object edge : clusterGraph.getIncidentEdges(v)) { - Pair endpoints = clusterGraph.getEndpoints(edge); - graph.addEdge(edge, endpoints.getFirst(), endpoints.getSecond(), clusterGraph.getEdgeType(edge)); - } - } - // add all the vertices from the current graph except for - // the cluster we are expanding - for(Object v : inGraph.getVertices()) { - if(v.equals(clusterGraph) == false) { - graph.addVertex(v); - } - } - - // now that all vertices have been added, add the edges, - // ensuring that no edge contains a vertex that has not - // already been added - for(Object v : inGraph.getVertices()) { - if(v.equals(clusterGraph) == false) { - for(Object edge : inGraph.getIncidentEdges(v)) { - Pair endpoints = inGraph.getEndpoints(edge); - Object v1 = endpoints.getFirst(); - Object v2 = endpoints.getSecond(); - if(cluster.containsAll(endpoints) == false) { - if(clusterGraph.equals(v1)) { - // i need a new v1 - Object originalV1 = originalGraph.getEndpoints(edge).getFirst(); - Object newV1 = findVertex(graph, originalV1); - assert newV1 != null : "newV1 for "+originalV1+" was not found!"; - graph.addEdge(edge, newV1, v2, inGraph.getEdgeType(edge)); - } else if(clusterGraph.equals(v2)) { - // i need a new v2 - Object originalV2 = originalGraph.getEndpoints(edge).getSecond(); - Object newV2 = findVertex(graph, originalV2); - assert newV2 != null : "newV2 for "+originalV2+" was not found!"; - graph.addEdge(edge, v1, newV2, inGraph.getEdgeType(edge)); - } else { - graph.addEdge(edge, v1, v2, inGraph.getEdgeType(edge)); - } - } - } - } - } - return graph; - } - Object findVertex(Graph inGraph, Object vertex) { - Collection vertices = inGraph.getVertices(); - if(vertices.contains(vertex)) { - return vertex; - } - for(Object v : vertices) { - if(v instanceof Graph) { - Graph g = (Graph)v; - if(contains(g, vertex)) { - return v; - } - } - } - return null; - } - - private boolean contains(Graph inGraph, Object vertex) { - boolean contained = false; - if(inGraph.getVertices().contains(vertex)) return true; - for(Object v : inGraph.getVertices()) { - if(v instanceof Graph) { - contained |= contains((Graph)v, vertex); - } - } - return contained; - } - - @SuppressWarnings("unchecked") - public Graph getClusterGraph(Graph inGraph, Collection picked) { - Graph clusterGraph; - try { - clusterGraph = createGraph(); - } catch(Exception ex) { - logger.log(Level.FINE, "Could not create graph", ex); - return null; - } - for(Object v : picked) { - clusterGraph.addVertex(v); - Collection edges = inGraph.getIncidentEdges(v); - for(Object edge : edges) { - Pair endpoints = inGraph.getEndpoints(edge); - Object v1 = endpoints.getFirst(); - Object v2 = endpoints.getSecond(); - if(picked.containsAll(endpoints)) { - clusterGraph.addEdge(edge, v1, v2, inGraph.getEdgeType(edge)); - } - } - } - return clusterGraph; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/TreeCollapser.java b/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/TreeCollapser.java deleted file mode 100644 index 5213f387..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/TreeCollapser.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 23, 2005 - */ -package edu.uci.ics.jung.visualization.subLayout; - -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.Forest; -import edu.uci.ics.jung.graph.util.TreeUtils; - -public class TreeCollapser { - - @SuppressWarnings("unchecked") - public void collapse(Layout layout, Forest tree, Object subRoot) throws InstantiationException, IllegalAccessException { - - // get a sub tree from subRoot - Forest subTree = TreeUtils.getSubTree(tree, subRoot); - Object parent = null; - Object edge = null; - if(tree.getPredecessorCount(subRoot) > 0) { - parent = tree.getPredecessors(subRoot).iterator().next(); - edge = tree.getInEdges(subRoot).iterator().next(); - } - tree.removeVertex(subRoot); - if(parent != null) { - tree.addEdge(edge, parent, subTree); - } else { - tree.addVertex(subTree); - } - - layout.setLocation(subTree, (Point2D)layout.transform(subRoot)); - } - - @SuppressWarnings("unchecked") - public void expand(Forest tree, Forest subTree) { - - Object parent = null; - Object edge = null; - if(tree.getPredecessorCount(subTree) > 0) { - parent = tree.getPredecessors(subTree).iterator().next(); - edge = tree.getInEdges(subTree).iterator().next(); - } -// Object edge = tree.getInEdges(subTree).iterator().next(); -// Object parent = tree.getPredecessors(subTree).iterator().next(); - tree.removeVertex(subTree); - TreeUtils.addSubTree(tree, subTree, parent, edge); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/package.html deleted file mode 100644 index ba7fb591..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/subLayout/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Visualization mechanisms relating to grouping or hiding specified element sets. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/AbstractLensSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/AbstractLensSupport.java deleted file mode 100644 index 3a681e7b..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/AbstractLensSupport.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 21, 2005 - */ - -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Color; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Paint; -import java.awt.geom.RectangularShape; - -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.VisualizationServer.Paintable; -import edu.uci.ics.jung.visualization.control.ModalGraphMouse; -/** - * A class to make it easy to add an - * examining lens to a jung graph application. See HyperbolicTransformerDemo, - * ViewLensSupport and LayoutLensSupport - * for examples of how to use it. - * - * @author Tom Nelson - * - * - */ -public abstract class AbstractLensSupport implements LensSupport { - - protected VisualizationViewer vv; - protected VisualizationViewer.GraphMouse graphMouse; - protected LensTransformer lensTransformer; - protected ModalGraphMouse lensGraphMouse; - protected Lens lens; - protected LensControls lensControls; - protected String defaultToolTipText; - - protected static final String instructions = - "

          Mouse-Drag the Lens center to move it

          "+ - "Mouse-Drag the Lens edge to resize it

          "+ - "Ctrl+MouseWheel to change magnification

          "; - - /** - * create the base class, setting common members and creating - * a custom GraphMouse - * @param vv the VisualizationViewer to work on - */ - public AbstractLensSupport(VisualizationViewer vv, ModalGraphMouse lensGraphMouse) { - this.vv = vv; - this.graphMouse = vv.getGraphMouse(); - this.defaultToolTipText = vv.getToolTipText(); - this.lensGraphMouse = lensGraphMouse; - } - - public void activate(boolean state) { - if(state) activate(); - else deactivate(); - } - - public LensTransformer getLensTransformer() { - return lensTransformer; - } - - /** - * @return Returns the hyperbolicGraphMouse. - */ - public ModalGraphMouse getGraphMouse() { - return lensGraphMouse; - } - - /** - * the background for the hyperbolic projection - * @author Tom Nelson - * - * - */ - public static class Lens implements Paintable { - LensTransformer lensTransformer; - RectangularShape lensShape; - Paint paint = Color.decode("0xdddddd"); - - public Lens(LensTransformer lensTransformer) { - this.lensTransformer = lensTransformer; - this.lensShape = lensTransformer.getLensShape(); - } - - /** - * @return the paint - */ - public Paint getPaint() { - return paint; - } - - /** - * @param paint the paint to set - */ - public void setPaint(Paint paint) { - this.paint = paint; - } - - /** - * @return Returns the hyperbolicTransformer. - */ - - public void paint(Graphics g) { - - Graphics2D g2d = (Graphics2D)g; - g2d.setPaint(paint); - g2d.fill(lensShape); - } - - public boolean useTransform() { - return true; - } - } - - /** - * the background for the hyperbolic projection - * @author Tom Nelson - * - * - */ - public static class LensControls implements Paintable { - LensTransformer lensTransformer; - RectangularShape lensShape; - Paint paint = Color.gray; - - public LensControls(LensTransformer lensTransformer) { - this.lensTransformer = lensTransformer; - this.lensShape = lensTransformer.getLensShape(); - } - - /** - * @return the paint - */ - public Paint getPaint() { - return paint; - } - - /** - * @param paint the paint to set - */ - public void setPaint(Paint paint) { - this.paint = paint; - } - - /** - * @return Returns the hyperbolicTransformer. - */ - - public void paint(Graphics g) { - - Graphics2D g2d = (Graphics2D)g; - g2d.setPaint(paint); - g2d.draw(lensShape); - int centerX = (int)Math.round(lensShape.getCenterX()); - int centerY = (int)Math.round(lensShape.getCenterY()); - g.drawOval(centerX-10, centerY-10, 20, 20); - } - - public boolean useTransform() { - return true; - } - } - - /** - * @return the lens - */ - public Lens getLens() { - return lens; - } - - /** - * @param lens the lens to set - */ - public void setLens(Lens lens) { - this.lens = lens; - } - - /** - * @return the lensControls - */ - public LensControls getLensControls() { - return lensControls; - } - - /** - * @param lensControls the lensControls to set - */ - public void setLensControls(LensControls lensControls) { - this.lensControls = lensControls; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/AffineTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/AffineTransformer.java deleted file mode 100644 index 47e49511..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/AffineTransformer.java +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 16, 2005 - */ - -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.NoninvertibleTransformException; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; - -/** - * - * Provides methods to map points from one coordinate system to - * another, by delegating to a wrapped AffineTransform (uniform) - * and its inverse. - * - * @author Tom Nelson - */ -public class AffineTransformer implements BidirectionalTransformer, ShapeTransformer { - - protected AffineTransform inverse; - - /** - * the AffineTransform to use. Initialize to identity - * - */ - protected AffineTransform transform = new AffineTransform(); - - /** - * create an instance that does not transform points - * - */ - public AffineTransformer() { - // nothing left to do - } - /** - * Create an instance with the supplied transform - */ - public AffineTransformer(AffineTransform transform) { - if(transform != null) - this.transform = transform; - } - - /** - * @return Returns the transform. - */ - public AffineTransform getTransform() { - return transform; - } - /** - * @param transform The transform to set. - */ - public void setTransform(AffineTransform transform) { - this.transform = transform; - } - - /** - * applies the inverse transform to the supplied point - * @param p - * @return - */ - public Point2D inverseTransform(Point2D p) { - - return getInverse().transform(p, null); - } - - public AffineTransform getInverse() { - if(inverse == null) { - try { - inverse = transform.createInverse(); - } catch (NoninvertibleTransformException e) { - e.printStackTrace(); - } - } - return inverse; - } - - /** - * getter for scalex - */ - public double getScaleX() { - return transform.getScaleX(); - } - - /** - * getter for scaley - */ - public double getScaleY() { - return transform.getScaleY(); - } - - public double getScale() { - return Math.sqrt(transform.getDeterminant()); - } - - /** - * getter for shear in x axis - */ - public double getShearX() { - return transform.getShearX(); - } - - /** - * getter for shear in y axis - */ - public double getShearY() { - return transform.getShearY(); - } - - /** - * get the translate x value - */ - public double getTranslateX() { - return transform.getTranslateX(); - } - - /** - * get the translate y value - */ - public double getTranslateY() { - return transform.getTranslateY(); - } - - - - - - /** - * applies the transform to the supplied point - */ - public Point2D transform(Point2D p) { - if(p == null) return null; - return transform.transform(p, null); - } - - /** - * transform the supplied shape from graph coordinates to - * screen coordinates - * @return the GeneralPath of the transformed shape - */ - public Shape transform(Shape shape) { - GeneralPath newPath = new GeneralPath(); - float[] coords = new float[6]; - for(PathIterator iterator=shape.getPathIterator(null); - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = transform(new Point2D.Float(coords[0], coords[1])); - newPath.moveTo((float)p.getX(), (float)p.getY()); - break; - - case PathIterator.SEG_LINETO: - p = transform(new Point2D.Float(coords[0], coords[1])); - newPath.lineTo((float)p.getX(), (float) p.getY()); - break; - - case PathIterator.SEG_QUADTO: - p = transform(new Point2D.Float(coords[0], coords[1])); - Point2D q = transform(new Point2D.Float(coords[2], coords[3])); - newPath.quadTo((float)p.getX(), (float)p.getY(), (float)q.getX(), (float)q.getY()); - break; - - case PathIterator.SEG_CUBICTO: - p = transform(new Point2D.Float(coords[0], coords[1])); - q = transform(new Point2D.Float(coords[2], coords[3])); - Point2D r = transform(new Point2D.Float(coords[4], coords[5])); - newPath.curveTo((float)p.getX(), (float)p.getY(), - (float)q.getX(), (float)q.getY(), - (float)r.getX(), (float)r.getY()); - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - break; - - } - } - return newPath; - } - - /** - * transform the supplied shape from graph coordinates to - * screen coordinates - * @return the GeneralPath of the transformed shape - */ - public Shape inverseTransform(Shape shape) { - GeneralPath newPath = new GeneralPath(); - float[] coords = new float[6]; - for(PathIterator iterator=shape.getPathIterator(null); - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = inverseTransform(new Point2D.Float(coords[0], coords[1])); - newPath.moveTo((float)p.getX(), (float)p.getY()); - break; - - case PathIterator.SEG_LINETO: - p = inverseTransform(new Point2D.Float(coords[0], coords[1])); - newPath.lineTo((float)p.getX(), (float) p.getY()); - break; - - case PathIterator.SEG_QUADTO: - p = inverseTransform(new Point2D.Float(coords[0], coords[1])); - Point2D q = inverseTransform(new Point2D.Float(coords[2], coords[3])); - newPath.quadTo((float)p.getX(), (float)p.getY(), (float)q.getX(), (float)q.getY()); - break; - - case PathIterator.SEG_CUBICTO: - p = inverseTransform(new Point2D.Float(coords[0], coords[1])); - q = inverseTransform(new Point2D.Float(coords[2], coords[3])); - Point2D r = inverseTransform(new Point2D.Float(coords[4], coords[5])); - newPath.curveTo((float)p.getX(), (float)p.getY(), - (float)q.getX(), (float)q.getY(), - (float)r.getX(), (float)r.getY()); - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - break; - - } - } - return newPath; - } - - public double getRotation() { - double[] unitVector = new double[]{0,0,1,0}; - double[] result = new double[4]; - - transform.transform(unitVector, 0, result, 0, 2); - - double dy = Math.abs(result[3] - result[1]); - double length = Point2D.distance(result[0], result[1], result[2], result[3]); - double rotation = Math.asin(dy / length); - - if (result[3] - result[1] > 0) { - if (result[2] - result[0] < 0) { - rotation = Math.PI - rotation; - } - } else { - if (result[2] - result[0] > 0) { - rotation = 2 * Math.PI - rotation; - } else { - rotation = rotation + Math.PI; - } - } - - return rotation; - } - - @Override - public String toString() { - return "Transformer using "+transform; - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/BidirectionalTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/BidirectionalTransformer.java deleted file mode 100644 index 1cb9ff79..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/BidirectionalTransformer.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 16, 2005 - */ - -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Shape; -import java.awt.geom.Point2D; - -/** - * Provides methods to map points from one coordinate system to - * another: graph to screen and screen to graph. - * - * @author Tom Nelson - */ -public interface BidirectionalTransformer { - - /** - * convert the supplied graph coordinate to the - * screen coordinate - * @param p graph point to convert - * @return screen point - */ - Point2D transform(Point2D p); - - /** - * convert the supplied screen coordinate to the - * graph coordinate. - * @param p screen point to convert - * @return the graph point - */ - Point2D inverseTransform(Point2D p); - - /** - * - * @param shape - * @return - */ - Shape transform(Shape shape); - - /** - * - * @param shape - * @return - */ - Shape inverseTransform(Shape shape); - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/HyperbolicTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/HyperbolicTransformer.java deleted file mode 100644 index 176758b7..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/HyperbolicTransformer.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Component; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.PolarPoint; - -/** - * HyperbolicTransformer wraps a MutableAffineTransformer and modifies - * the transform and inverseTransform methods so that they create a - * fisheye projection of the graph points, with points near the - * center spread out and points near the edges collapsed onto the - * circumference of an ellipse. - * - * HyperBolicTransformer is not an affine transform, but it uses an - * affine transform to cause translation, scaling, rotation, and shearing - * while applying a non-affine hyperbolic filter in its transform and - * inverseTransform methods. - * - * @author Tom Nelson - * - * - */ -public class HyperbolicTransformer extends LensTransformer implements MutableTransformer { - - - /** - * create an instance, setting values from the passed component - * and registering to listen for size changes on the component - * @param component - */ - public HyperbolicTransformer(Component component) { - this(component, new MutableAffineTransformer()); - } - /** - * create an instance with a possibly shared transform - * @param component - * @param delegate - */ - public HyperbolicTransformer(Component component, MutableTransformer delegate) { - super(component, delegate); - } - - /** - * override base class transform to project the fisheye effect - */ - public Point2D transform(Point2D graphPoint) { - if(graphPoint == null) return null; - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - // transform the point from the graph to the view - Point2D viewPoint = delegate.transform(graphPoint); - // calculate point from center - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - double theta = polar.getTheta(); - double radius = polar.getRadius(); - if(radius > viewRadius) return viewPoint; - - double mag = Math.tan(Math.PI/2*magnification); - radius *= mag; - - radius = Math.min(radius, viewRadius); - radius /= viewRadius; - radius *= Math.PI/2; - radius = Math.abs(Math.atan(radius)); - radius *= viewRadius; - Point2D projectedPoint = PolarPoint.polarToCartesian(theta, radius); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return translatedBack; - } - - /** - * override base class to un-project the fisheye effect - */ - public Point2D inverseTransform(Point2D viewPoint) { - - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - - double radius = polar.getRadius(); - if(radius > viewRadius) return delegate.inverseTransform(viewPoint); - - radius /= viewRadius; - radius = Math.abs(Math.tan(radius)); - radius /= Math.PI/2; - radius *= viewRadius; - double mag = Math.tan(Math.PI/2*magnification); - radius /= mag; - polar.setRadius(radius); - Point2D projectedPoint = PolarPoint.polarToCartesian(polar); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return delegate.inverseTransform(translatedBack); - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/LayoutLensSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/LayoutLensSupport.java deleted file mode 100644 index 58ca76b4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/LayoutLensSupport.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 21, 2005 - */ - -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Dimension; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.control.ModalGraphMouse; -import edu.uci.ics.jung.visualization.control.ModalLensGraphMouse; -import edu.uci.ics.jung.visualization.picking.LayoutLensShapePickSupport; -/** - * A class to make it easy to add an - * examining lens to a jung graph application. See HyperbolicTransformerDemo - * for an example of how to use it. - * - * @author Tom Nelson - * - * - */ -public class LayoutLensSupport extends AbstractLensSupport - implements LensSupport { - - protected GraphElementAccessor pickSupport; - - public LayoutLensSupport(VisualizationViewer vv) { - this(vv, new HyperbolicTransformer(vv, vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.LAYOUT)), - new ModalLensGraphMouse()); - } - /** - * create the base class, setting common members and creating - * a custom GraphMouse - * @param vv the VisualizationViewer to work on - */ - public LayoutLensSupport(VisualizationViewer vv, LensTransformer lensTransformer, - ModalGraphMouse lensGraphMouse) { - super(vv, lensGraphMouse); - this.lensTransformer = lensTransformer; - this.pickSupport = vv.getPickSupport(); - - Dimension d = vv.getSize(); - if(d.width <= 0 || d.height <= 0) { - d = vv.getPreferredSize(); - } - lensTransformer.setViewRadius(d.width/5); - } - - public void activate() { - if(lens == null) { - lens = new Lens(lensTransformer); - } - if(lensControls == null) { - lensControls = new LensControls(lensTransformer); - } - vv.getRenderContext().setPickSupport(new LayoutLensShapePickSupport(vv)); - vv.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.LAYOUT, lensTransformer); - vv.prependPreRenderPaintable(lens); - vv.addPostRenderPaintable(lensControls); - vv.setGraphMouse(lensGraphMouse); - vv.setToolTipText(instructions); - vv.repaint(); - } - - public void deactivate() { - if(lensTransformer != null) { - vv.removePreRenderPaintable(lens); - vv.removePostRenderPaintable(lensControls); - vv.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.LAYOUT, lensTransformer.getDelegate()); - } - vv.getRenderContext().setPickSupport(pickSupport); - vv.setToolTipText(defaultToolTipText); - vv.setGraphMouse(graphMouse); - vv.repaint(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/LensSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/LensSupport.java deleted file mode 100644 index a9d6ca6e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/LensSupport.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 5, 2005 - */ - -package edu.uci.ics.jung.visualization.transform; - -import edu.uci.ics.jung.visualization.control.ModalGraphMouse; - -/** - * basic API for implementing lens projection support - * - * @author Tom Nelson - * - */ -public interface LensSupport { - - void activate(); - void deactivate(); - void activate(boolean state); - LensTransformer getLensTransformer(); - - ModalGraphMouse getGraphMouse(); -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/LensTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/LensTransformer.java deleted file mode 100644 index 87184285..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/LensTransformer.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Shape; -import java.awt.event.ComponentAdapter; -import java.awt.event.ComponentEvent; -import java.awt.geom.AffineTransform; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.awt.geom.RectangularShape; - -/** - * LensTransformer wraps a MutableAffineTransformer and modifies - * the transform and inverseTransform methods so that they create a - * projection of the graph points within an elliptical lens. - * - * LensTransformer uses an - * affine transform to cause translation, scaling, rotation, and shearing - * while applying a possibly non-affine filter in its transform and - * inverseTransform methods. - * - * @author Tom Nelson - * - * - */ -public abstract class LensTransformer extends MutableTransformerDecorator implements MutableTransformer { - - /** - * the area affected by the transform - */ - protected RectangularShape lensShape = new Ellipse2D.Float(); - - protected float magnification = 0.7f; - - /** - * create an instance with a possibly shared transform - * @param component - * @param delegate - */ - public LensTransformer(Component component, MutableTransformer delegate) { - super(delegate); - setComponent(component); - component.addComponentListener(new ComponentListenerImpl()); - } - - /** - * set values from the passed component. - * declared private so it can't be overridden - * @param component - */ - private void setComponent(Component component) { - Dimension d = component.getSize(); - if(d.width <= 0 || d.height <= 0) { - d = component.getPreferredSize(); - } - float ewidth = d.width/1.5f; - float eheight = d.height/1.5f; - lensShape.setFrame(d.width/2-ewidth/2, d.height/2-eheight/2, ewidth, eheight); - } - - /** - * @return Returns the magnification. - */ - public float getMagnification() { - return magnification; - } - /** - * @param magnification The magnification to set. - */ - public void setMagnification(float magnification) { - this.magnification = magnification; - } - /** - * @return Returns the viewCenter. - */ - public Point2D getViewCenter() { - return new Point2D.Double(lensShape.getCenterX(), lensShape.getCenterY()); - } - /** - * @param viewCenter The viewCenter to set. - */ - public void setViewCenter(Point2D viewCenter) { - double width = lensShape.getWidth(); - double height = lensShape.getHeight(); - lensShape.setFrame(viewCenter.getX()-width/2, - viewCenter.getY()-height/2, - width, height); - } - - /** - * @return Returns the viewRadius. - */ - public double getViewRadius() { - return lensShape.getHeight()/2; - } - /** - * @param viewRadius The viewRadius to set. - */ - public void setViewRadius(double viewRadius) { - double x = lensShape.getCenterX(); - double y = lensShape.getCenterY(); - double viewRatio = getRatio(); - lensShape.setFrame(x-viewRadius/viewRatio, - y-viewRadius, - 2*viewRadius/viewRatio, - 2*viewRadius); - } - - /** - * @return Returns the ratio. - */ - public double getRatio() { - return lensShape.getHeight()/lensShape.getWidth(); - } - - public void setLensShape(RectangularShape ellipse) { - this.lensShape = ellipse; - } - public RectangularShape getLensShape() { - return lensShape; - } - public void setToIdentity() { - this.delegate.setToIdentity(); - } - - /** - * react to size changes on a component - */ - protected class ComponentListenerImpl extends ComponentAdapter { - public void componentResized(ComponentEvent e) { - setComponent(e.getComponent()); - } - } - - /** - * override base class transform to project the fisheye effect - */ - public abstract Point2D transform(Point2D graphPoint); - - /** - * override base class to un-project the fisheye effect - */ - public abstract Point2D inverseTransform(Point2D viewPoint); - - public double getDistanceFromCenter(Point2D p) { - - double dx = lensShape.getCenterX()-p.getX(); - double dy = lensShape.getCenterY()-p.getY(); - dx *= getRatio(); - return Math.sqrt(dx*dx + dy*dy); - } - - /** - * return the supplied shape, translated to the coordinates - * that result from calling transform on its center - */ - public Shape transform(Shape shape) { - Rectangle2D bounds = shape.getBounds2D(); - Point2D center = new Point2D.Double(bounds.getCenterX(),bounds.getCenterY()); - Point2D newCenter = transform(center); - double dx = newCenter.getX()-center.getX(); - double dy = newCenter.getY()-center.getY(); - AffineTransform at = AffineTransform.getTranslateInstance(dx,dy); - return at.createTransformedShape(shape); - } - - /** - * return the supplied shape, translated to the coordinates - * that result from calling inverseTransform on its center - */ - public Shape inverseTransform(Shape shape) { - Rectangle2D bounds = shape.getBounds2D(); - Point2D center = new Point2D.Double(bounds.getCenterX(),bounds.getCenterY()); - Point2D newCenter = inverseTransform(center); - double dx = newCenter.getX()-center.getX(); - double dy = newCenter.getY()-center.getY(); - AffineTransform at = AffineTransform.getTranslateInstance(dx,dy); - return at.createTransformedShape(shape); - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MagnifyTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/MagnifyTransformer.java deleted file mode 100644 index 771514b2..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MagnifyTransformer.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Component; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.PolarPoint; - -/** - * MagnifyTransformer wraps a MutableAffineTransformer and modifies - * the transform and inverseTransform methods so that they create an - * enlarging projection of the graph points. - * - * MagnifyTransformer uses an - * affine transform to cause translation, scaling, rotation, and shearing - * while applying a separate magnification filter in its transform and - * inverseTransform methods. - * - * @author Tom Nelson - * - * - */ -public class MagnifyTransformer extends LensTransformer implements MutableTransformer { - - - /** - * create an instance, setting values from the passed component - * and registering to listen for size changes on the component - * @param component - */ - public MagnifyTransformer(Component component) { - this(component, new MutableAffineTransformer()); - } - /** - * create an instance with a possibly shared transform - * @param component - * @param delegate - */ - public MagnifyTransformer(Component component, MutableTransformer delegate) { - super(component, delegate); - this.magnification = 3.f; - } - - /** - * override base class transform to project the fisheye effect - */ - public Point2D transform(Point2D graphPoint) { - if(graphPoint == null) return null; - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - // transform the point from the graph to the view - Point2D viewPoint = delegate.transform(graphPoint); - // calculate point from center - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - double theta = polar.getTheta(); - double radius = polar.getRadius(); - if(radius > viewRadius) return viewPoint; - - double mag = magnification; - radius *= mag; - - radius = Math.min(radius, viewRadius); - Point2D projectedPoint = PolarPoint.polarToCartesian(theta, radius); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return translatedBack; - } - - /** - * override base class to un-project the fisheye effect - */ - public Point2D inverseTransform(Point2D viewPoint) { - - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - - double radius = polar.getRadius(); - if(radius > viewRadius) return delegate.inverseTransform(viewPoint); - - double mag = magnification; - radius /= mag; - polar.setRadius(radius); - Point2D projectedPoint = PolarPoint.polarToCartesian(polar); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return delegate.inverseTransform(translatedBack); - } - - /** - * magnifies the point, without considering the Lens - * @param graphPoint - * @return - */ - public Point2D magnify(Point2D graphPoint) { - if(graphPoint == null) return null; - Point2D viewCenter = getViewCenter(); - double ratio = getRatio(); - // transform the point from the graph to the view - Point2D viewPoint = graphPoint; - // calculate point from center - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - double theta = polar.getTheta(); - double radius = polar.getRadius(); - - double mag = magnification; - radius *= mag; - -// radius = Math.min(radius, viewRadius); - Point2D projectedPoint = PolarPoint.polarToCartesian(theta, radius); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return translatedBack; - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableAffineTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableAffineTransformer.java deleted file mode 100644 index 74174c7f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableAffineTransformer.java +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 16, 2005 - */ - -package edu.uci.ics.jung.visualization.transform; - -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; - -import javax.swing.event.ChangeListener; -import javax.swing.event.EventListenerList; - -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; -import edu.uci.ics.jung.visualization.util.DefaultChangeEventSupport; - -/** - * - * Provides methods to mutate the AffineTransform used by AffineTransformer - * base class to map points from one coordinate system to - * another. - * - * - * @author Tom Nelson - * - * - */ -public class MutableAffineTransformer extends AffineTransformer -implements MutableTransformer, ShapeTransformer, ChangeEventSupport { - - protected ChangeEventSupport changeSupport = - new DefaultChangeEventSupport(this); - - /** - * create an instance that does not transform points - * - */ - public MutableAffineTransformer() { - // nothing left to do - } - /** - * Create an instance with the supplied transform - */ - public MutableAffineTransformer(AffineTransform transform) { - super(transform); - } - - public String toString() { - return "MutableAffineTransformer using "+transform; - } - /** - * setter for the scale - * fires a PropertyChangeEvent with the AffineTransforms representing - * the previous and new values for scale and offset - * @param scalex - * @param scaley - */ - public void scale(double scalex, double scaley, Point2D from) { - AffineTransform xf = AffineTransform.getTranslateInstance(from.getX(),from.getY()); - xf.scale(scalex, scaley); - xf.translate(-from.getX(), -from.getY()); - inverse = null; - transform.preConcatenate(xf); - fireStateChanged(); - } - - /** - * setter for the scale - * fires a PropertyChangeEvent with the AffineTransforms representing - * the previous and new values for scale and offset - * @param scalex - * @param scaley - */ - public void setScale(double scalex, double scaley, Point2D from) { - transform.setToIdentity(); - scale(scalex, scaley, from); - } - - /** - * shears the transform by passed parameters - * @param shx x value to shear - * @param shy y value to shear - */ - public void shear(double shx, double shy, Point2D from) { - inverse = null; - AffineTransform at = - AffineTransform.getTranslateInstance(from.getX(), from.getY()); - at.shear(shx, shy); - at.translate(-from.getX(), -from.getY()); - transform.preConcatenate(at); - fireStateChanged(); - } - - /** - * replace the Transform's translate x and y values - * with the passed values, leaving the scale values - * unchanged - * @param tx the x value - * @param ty the y value - */ - public void setTranslate(double tx, double ty) { - float scalex = (float) transform.getScaleX(); - float scaley = (float) transform.getScaleY(); - float shearx = (float) transform.getShearX(); - float sheary = (float) transform.getShearY(); - inverse = null; - transform.setTransform(scalex, - sheary, - shearx, - scaley, - tx, ty); - fireStateChanged(); - } - - /** - * Apply the passed values to the current Transform - * @param offsetx the x-value - * @param offsety the y-value - */ - public void translate(double offsetx, double offsety) { - inverse = null; - transform.translate(offsetx, offsety); - fireStateChanged(); - } - - /** - * preconcatenates the rotation at the supplied point with the current transform - */ - public void rotate(double theta, Point2D from) { - AffineTransform rotate = - AffineTransform.getRotateInstance(theta, from.getX(), from.getY()); - inverse = null; - transform.preConcatenate(rotate); - - fireStateChanged(); - } - - /** - * rotates the current transform at the supplied points - */ - public void rotate(double radians, double x, double y) { - inverse = null; - transform.rotate(radians, x, y); - fireStateChanged(); - } - - public void concatenate(AffineTransform xform) { - inverse = null; - transform.concatenate(xform); - fireStateChanged(); - - } - public void preConcatenate(AffineTransform xform) { - inverse = null; - transform.preConcatenate(xform); - fireStateChanged(); - } - - - /** - * Adds a ChangeListener. - * @param l the listener to be added - */ - public void addChangeListener(ChangeListener l) { - changeSupport.addChangeListener(l); - } - - /** - * Removes a ChangeListener. - * @param l the listener to be removed - */ - public void removeChangeListener(ChangeListener l) { - changeSupport.removeChangeListener(l); - } - - /** - * Returns an array of all the ChangeListeners added - * with addChangeListener(). - * - * @return all of the ChangeListeners added or an empty - * array if no listeners have been added - */ - public ChangeListener[] getChangeListeners() { - return changeSupport.getChangeListeners(); - } - - /** - * Notifies all listeners that have registered interest for - * notification on this event type. The event instance - * is lazily created. - * @see EventListenerList - */ - public void fireStateChanged() { - changeSupport.fireStateChanged(); - } - - public void setToIdentity() { - inverse = null; - transform.setToIdentity(); - fireStateChanged(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableTransformer.java deleted file mode 100644 index 954f3d3c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableTransformer.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 3, 2005 - */ - -package edu.uci.ics.jung.visualization.transform; - -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer; -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; - -/** - * Provides an API for the mutation of a transformer - * and for adding listeners for changes on the transformer - * - * @author Tom Nelson - * - * - */ -public interface MutableTransformer extends ShapeTransformer, ChangeEventSupport { - - void translate(double dx, double dy); - - void setTranslate(double dx, double dy); - - void scale(double sx, double sy, Point2D point); - - void setScale(double sx, double sy, Point2D point); - - void rotate(double radians, Point2D point); - - void rotate(double radians, double x, double y); - - void shear(double shx, double shy, Point2D from); - - void concatenate(AffineTransform transform); - - void preConcatenate(AffineTransform transform); - - double getScaleX(); - - double getScaleY(); - - double getScale(); - - double getTranslateX(); - - double getTranslateY(); - - double getShearX(); - - double getShearY(); - - AffineTransform getTransform(); - - void setToIdentity(); - - double getRotation(); - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableTransformerDecorator.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableTransformerDecorator.java deleted file mode 100644 index ad85dc9c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/MutableTransformerDecorator.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Point2D; - -import javax.swing.event.ChangeListener; - -/** - * a complete decorator that wraps a MutableTransformer. Subclasses - * use this to allow them to only declare methods they need to change. - * - * @author Tom Nelson - * - */ -public abstract class MutableTransformerDecorator implements MutableTransformer { - - protected MutableTransformer delegate; - - public MutableTransformerDecorator(MutableTransformer delegate) { - if(delegate == null) { - delegate = new MutableAffineTransformer(); - } - this.delegate = delegate; - } - - /** - * @return Returns the delegate. - */ - public MutableTransformer getDelegate() { - return delegate; - } - - /** - * @param delegate The delegate to set. - */ - public void setDelegate(MutableTransformer delegate) { - this.delegate = delegate; - } - - - - /* (non-Javadoc) - * @see edu.uci.ics.jung.utils.ChangeEventSupport#addChangeListener(javax.swing.event.ChangeListener) - */ - public void addChangeListener(ChangeListener l) { - delegate.addChangeListener(l); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#concatenate(java.awt.geom.AffineTransform) - */ - public void concatenate(AffineTransform transform) { - delegate.concatenate(transform); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.utils.ChangeEventSupport#fireStateChanged() - */ - public void fireStateChanged() { - delegate.fireStateChanged(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.utils.ChangeEventSupport#getChangeListeners() - */ - public ChangeListener[] getChangeListeners() { - return delegate.getChangeListeners(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getScale() - */ - public double getScale() { - return delegate.getScale(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getScaleX() - */ - public double getScaleX() { - return delegate.getScaleX(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getScaleY() - */ - public double getScaleY() { - return delegate.getScaleY(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getShearX() - */ - public double getShearX() { - return delegate.getShearX(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getShearY() - */ - public double getShearY() { - return delegate.getShearY(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getTransform() - */ - public AffineTransform getTransform() { - return delegate.getTransform(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getTranslateX() - */ - public double getTranslateX() { - return delegate.getTranslateX(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#getTranslateY() - */ - public double getTranslateY() { - return delegate.getTranslateY(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.Transformer#inverseTransform(java.awt.geom.Point2D) - */ - public Point2D inverseTransform(Point2D p) { - return delegate.inverseTransform(p); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer#inverseTransform(java.awt.Shape) - */ - public Shape inverseTransform(Shape shape) { - return delegate.inverseTransform(shape); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.Transformer#isTransformed() - */ -// public boolean isTransformed() { -// return delegate.isTransformed(); -// } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#preConcatenate(java.awt.geom.AffineTransform) - */ - public void preConcatenate(AffineTransform transform) { - delegate.preConcatenate(transform); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.utils.ChangeEventSupport#removeChangeListener(javax.swing.event.ChangeListener) - */ - public void removeChangeListener(ChangeListener l) { - delegate.removeChangeListener(l); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#rotate(double, java.awt.geom.Point2D) - */ - public void rotate(double radians, Point2D point) { - delegate.rotate(radians, point); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#scale(double, double, java.awt.geom.Point2D) - */ - public void scale(double sx, double sy, Point2D point) { - delegate.scale(sx, sy, point); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#setScale(double, double, java.awt.geom.Point2D) - */ - public void setScale(double sx, double sy, Point2D point) { - delegate.setScale(sx, sy, point); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#setToIdentity() - */ - public void setToIdentity() { - delegate.setToIdentity(); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#setTranslate(double, double) - */ - public void setTranslate(double dx, double dy) { - delegate.setTranslate(dx, dy); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#shear(double, double, java.awt.geom.Point2D) - */ - public void shear(double shx, double shy, Point2D from) { - delegate.shear(shx, shy, from); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.Transformer#transform(java.awt.geom.Point2D) - */ - public Point2D transform(Point2D p) { - return delegate.transform(p); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.shape.ShapeTransformer#transform(java.awt.Shape) - */ - public Shape transform(Shape shape) { - return delegate.transform(shape); - } - - /* (non-Javadoc) - * @see edu.uci.ics.jung.visualization.transform.MutableTransformer#translate(double, double) - */ - public void translate(double dx, double dy) { - delegate.translate(dx, dy); - } - - public double getRotation() { - return delegate.getRotation(); - } - - public void rotate(double radians, double x, double y) { - delegate.rotate(radians, x, y); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/transform/package.html deleted file mode 100644 index 415ffed4..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Visualization mechanisms related to transformations, including lens effects. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/Graphics2DWrapper.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/Graphics2DWrapper.java deleted file mode 100644 index e925a876..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/Graphics2DWrapper.java +++ /dev/null @@ -1,674 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 11, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Color; -import java.awt.Composite; -import java.awt.Font; -import java.awt.FontMetrics; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.GraphicsConfiguration; -import java.awt.Image; -import java.awt.Paint; -import java.awt.Polygon; -import java.awt.Rectangle; -import java.awt.RenderingHints; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.RenderingHints.Key; -import java.awt.font.FontRenderContext; -import java.awt.font.GlyphVector; -import java.awt.geom.AffineTransform; -import java.awt.image.BufferedImage; -import java.awt.image.BufferedImageOp; -import java.awt.image.ImageObserver; -import java.awt.image.RenderedImage; -import java.awt.image.renderable.RenderableImage; -import java.text.AttributedCharacterIterator; -import java.util.Map; - - -/** - * a complete wrapping of Graphics2D, useful as a base class. - * Contains no additional methods, other than direct calls - * to the delegate. - * - * @see GraphicsDecorator as an example subclass that - * adds additional methods. - * - * @author Tom Nelson - * - * - */ -public class Graphics2DWrapper { - - protected Graphics2D delegate; - - public Graphics2DWrapper() { - this(null); - } - public Graphics2DWrapper(Graphics2D delegate) { - this.delegate = delegate; - } - - public void setDelegate(Graphics2D delegate) { - this.delegate = delegate; - } - - public Graphics2D getDelegate() { - return delegate; - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#addRenderingHints(java.util.Map) - */ - public void addRenderingHints(Map hints) { - delegate.addRenderingHints(hints); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#clearRect(int, int, int, int) - */ - public void clearRect(int x, int y, int width, int height) { - delegate.clearRect(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#clip(java.awt.Shape) - */ - public void clip(Shape s) { - delegate.clip(s); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#clipRect(int, int, int, int) - */ - public void clipRect(int x, int y, int width, int height) { - delegate.clipRect(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#copyArea(int, int, int, int, int, int) - */ - public void copyArea(int x, int y, int width, int height, int dx, int dy) { - delegate.copyArea(x, y, width, height, dx, dy); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#create() - */ - public Graphics create() { - return delegate.create(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#create(int, int, int, int) - */ - public Graphics create(int x, int y, int width, int height) { - return delegate.create(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#dispose() - */ - public void dispose() { - delegate.dispose(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#draw(java.awt.Shape) - */ - public void draw(Shape s) { - delegate.draw(s); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#draw3DRect(int, int, int, int, boolean) - */ - public void draw3DRect(int x, int y, int width, int height, boolean raised) { - delegate.draw3DRect(x, y, width, height, raised); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawArc(int, int, int, int, int, int) - */ - public void drawArc(int x, int y, int width, int height, int startAngle, int arcAngle) { - delegate.drawArc(x, y, width, height, startAngle, arcAngle); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawBytes(byte[], int, int, int, int) - */ - public void drawBytes(byte[] data, int offset, int length, int x, int y) { - delegate.drawBytes(data, offset, length, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawChars(char[], int, int, int, int) - */ - public void drawChars(char[] data, int offset, int length, int x, int y) { - delegate.drawChars(data, offset, length, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawGlyphVector(java.awt.font.GlyphVector, float, float) - */ - public void drawGlyphVector(GlyphVector g, float x, float y) { - delegate.drawGlyphVector(g, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawImage(java.awt.image.BufferedImage, java.awt.image.BufferedImageOp, int, int) - */ - public void drawImage(BufferedImage img, BufferedImageOp op, int x, int y) { - delegate.drawImage(img, op, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawImage(java.awt.Image, java.awt.geom.AffineTransform, java.awt.image.ImageObserver) - */ - public boolean drawImage(Image img, AffineTransform xform, ImageObserver obs) { - return delegate.drawImage(img, xform, obs); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawImage(java.awt.Image, int, int, java.awt.Color, java.awt.image.ImageObserver) - */ - public boolean drawImage(Image img, int x, int y, Color bgcolor, ImageObserver observer) { - return delegate.drawImage(img, x, y, bgcolor, observer); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawImage(java.awt.Image, int, int, java.awt.image.ImageObserver) - */ - public boolean drawImage(Image img, int x, int y, ImageObserver observer) { - return delegate.drawImage(img, x, y, observer); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawImage(java.awt.Image, int, int, int, int, java.awt.Color, java.awt.image.ImageObserver) - */ - public boolean drawImage(Image img, int x, int y, int width, int height, Color bgcolor, ImageObserver observer) { - return delegate.drawImage(img, x, y, width, height, bgcolor, observer); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawImage(java.awt.Image, int, int, int, int, java.awt.image.ImageObserver) - */ - public boolean drawImage(Image img, int x, int y, int width, int height, ImageObserver observer) { - return delegate.drawImage(img, x, y, width, height, observer); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawImage(java.awt.Image, int, int, int, int, int, int, int, int, java.awt.Color, java.awt.image.ImageObserver) - */ - public boolean drawImage(Image img, int dx1, int dy1, int dx2, int dy2, int sx1, int sy1, int sx2, int sy2, Color bgcolor, ImageObserver observer) { - return delegate.drawImage(img, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, bgcolor, observer); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawImage(java.awt.Image, int, int, int, int, int, int, int, int, java.awt.image.ImageObserver) - */ - public boolean drawImage(Image img, int dx1, int dy1, int dx2, int dy2, int sx1, int sy1, int sx2, int sy2, ImageObserver observer) { - return delegate.drawImage(img, dx1, dy1, dx2, dy2, sx1, sy1, sx2, sy2, observer); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawLine(int, int, int, int) - */ - public void drawLine(int x1, int y1, int x2, int y2) { - delegate.drawLine(x1, y1, x2, y2); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawOval(int, int, int, int) - */ - public void drawOval(int x, int y, int width, int height) { - delegate.drawOval(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawPolygon(int[], int[], int) - */ - public void drawPolygon(int[] xPoints, int[] yPoints, int nPoints) { - delegate.drawPolygon(xPoints, yPoints, nPoints); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawPolygon(java.awt.Polygon) - */ - public void drawPolygon(Polygon p) { - delegate.drawPolygon(p); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawPolyline(int[], int[], int) - */ - public void drawPolyline(int[] xPoints, int[] yPoints, int nPoints) { - delegate.drawPolyline(xPoints, yPoints, nPoints); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawRect(int, int, int, int) - */ - public void drawRect(int x, int y, int width, int height) { - delegate.drawRect(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawRenderableImage(java.awt.image.renderable.RenderableImage, java.awt.geom.AffineTransform) - */ - public void drawRenderableImage(RenderableImage img, AffineTransform xform) { - delegate.drawRenderableImage(img, xform); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawRenderedImage(java.awt.image.RenderedImage, java.awt.geom.AffineTransform) - */ - public void drawRenderedImage(RenderedImage img, AffineTransform xform) { - delegate.drawRenderedImage(img, xform); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#drawRoundRect(int, int, int, int, int, int) - */ - public void drawRoundRect(int x, int y, int width, int height, int arcWidth, int arcHeight) { - delegate.drawRoundRect(x, y, width, height, arcWidth, arcHeight); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawString(java.text.AttributedCharacterIterator, float, float) - */ - public void drawString(AttributedCharacterIterator iterator, float x, float y) { - delegate.drawString(iterator, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawString(java.text.AttributedCharacterIterator, int, int) - */ - public void drawString(AttributedCharacterIterator iterator, int x, int y) { - delegate.drawString(iterator, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawString(java.lang.String, float, float) - */ - public void drawString(String s, float x, float y) { - delegate.drawString(s, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#drawString(java.lang.String, int, int) - */ - public void drawString(String str, int x, int y) { - delegate.drawString(str, x, y); - } - - /* (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ - public boolean equals(Object obj) { - return delegate.equals(obj); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#fill(java.awt.Shape) - */ - public void fill(Shape s) { - delegate.fill(s); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#fill3DRect(int, int, int, int, boolean) - */ - public void fill3DRect(int x, int y, int width, int height, boolean raised) { - delegate.fill3DRect(x, y, width, height, raised); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#fillArc(int, int, int, int, int, int) - */ - public void fillArc(int x, int y, int width, int height, int startAngle, int arcAngle) { - delegate.fillArc(x, y, width, height, startAngle, arcAngle); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#fillOval(int, int, int, int) - */ - public void fillOval(int x, int y, int width, int height) { - delegate.fillOval(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#fillPolygon(int[], int[], int) - */ - public void fillPolygon(int[] xPoints, int[] yPoints, int nPoints) { - delegate.fillPolygon(xPoints, yPoints, nPoints); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#fillPolygon(java.awt.Polygon) - */ - public void fillPolygon(Polygon p) { - delegate.fillPolygon(p); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#fillRect(int, int, int, int) - */ - public void fillRect(int x, int y, int width, int height) { - delegate.fillRect(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#fillRoundRect(int, int, int, int, int, int) - */ - public void fillRoundRect(int x, int y, int width, int height, int arcWidth, int arcHeight) { - delegate.fillRoundRect(x, y, width, height, arcWidth, arcHeight); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#finalize() - */ - public void finalize() { - delegate.finalize(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getBackground() - */ - public Color getBackground() { - return delegate.getBackground(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getClip() - */ - public Shape getClip() { - return delegate.getClip(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getClipBounds() - */ - public Rectangle getClipBounds() { - return delegate.getClipBounds(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getClipBounds(java.awt.Rectangle) - */ - public Rectangle getClipBounds(Rectangle r) { - return delegate.getClipBounds(r); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getClipRect() - */ - @SuppressWarnings("deprecation") - public Rectangle getClipRect() { - return delegate.getClipRect(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getColor() - */ - public Color getColor() { - return delegate.getColor(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getComposite() - */ - public Composite getComposite() { - return delegate.getComposite(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getDeviceConfiguration() - */ - public GraphicsConfiguration getDeviceConfiguration() { - return delegate.getDeviceConfiguration(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getFont() - */ - public Font getFont() { - return delegate.getFont(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getFontMetrics() - */ - public FontMetrics getFontMetrics() { - return delegate.getFontMetrics(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#getFontMetrics(java.awt.Font) - */ - public FontMetrics getFontMetrics(Font f) { - return delegate.getFontMetrics(f); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getFontRenderContext() - */ - public FontRenderContext getFontRenderContext() { - return delegate.getFontRenderContext(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getPaint() - */ - public Paint getPaint() { - return delegate.getPaint(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getRenderingHint(java.awt.RenderingHints.Key) - */ - public Object getRenderingHint(Key hintKey) { - return delegate.getRenderingHint(hintKey); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getRenderingHints() - */ - public RenderingHints getRenderingHints() { - return delegate.getRenderingHints(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getStroke() - */ - public Stroke getStroke() { - return delegate.getStroke(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#getTransform() - */ - public AffineTransform getTransform() { - return delegate.getTransform(); - } - - /* (non-Javadoc) - * @see java.lang.Object#hashCode() - */ - public int hashCode() { - return delegate.hashCode(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#hit(java.awt.Rectangle, java.awt.Shape, boolean) - */ - public boolean hit(Rectangle rect, Shape s, boolean onStroke) { - return delegate.hit(rect, s, onStroke); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#hitClip(int, int, int, int) - */ - public boolean hitClip(int x, int y, int width, int height) { - return delegate.hitClip(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#rotate(double, double, double) - */ - public void rotate(double theta, double x, double y) { - delegate.rotate(theta, x, y); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#rotate(double) - */ - public void rotate(double theta) { - delegate.rotate(theta); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#scale(double, double) - */ - public void scale(double sx, double sy) { - delegate.scale(sx, sy); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#setBackground(java.awt.Color) - */ - public void setBackground(Color color) { - delegate.setBackground(color); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#setClip(int, int, int, int) - */ - public void setClip(int x, int y, int width, int height) { - delegate.setClip(x, y, width, height); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#setClip(java.awt.Shape) - */ - public void setClip(Shape clip) { - delegate.setClip(clip); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#setColor(java.awt.Color) - */ - public void setColor(Color c) { - delegate.setColor(c); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#setComposite(java.awt.Composite) - */ - public void setComposite(Composite comp) { - delegate.setComposite(comp); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#setFont(java.awt.Font) - */ - public void setFont(Font font) { - delegate.setFont(font); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#setPaint(java.awt.Paint) - */ - public void setPaint(Paint paint) { - delegate.setPaint(paint); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#setPaintMode() - */ - public void setPaintMode() { - delegate.setPaintMode(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#setRenderingHint(java.awt.RenderingHints.Key, java.lang.Object) - */ - public void setRenderingHint(Key hintKey, Object hintValue) { - delegate.setRenderingHint(hintKey, hintValue); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#setRenderingHints(java.util.Map) - */ - public void setRenderingHints(Map hints) { - delegate.setRenderingHints(hints); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#setStroke(java.awt.Stroke) - */ - public void setStroke(Stroke s) { - delegate.setStroke(s); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#setTransform(java.awt.geom.AffineTransform) - */ - public void setTransform(AffineTransform Tx) { - delegate.setTransform(Tx); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#setXORMode(java.awt.Color) - */ - public void setXORMode(Color c1) { - delegate.setXORMode(c1); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#shear(double, double) - */ - public void shear(double shx, double shy) { - delegate.shear(shx, shy); - } - - /* (non-Javadoc) - * @see java.awt.Graphics#toString() - */ - public String toString() { - return delegate.toString(); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#transform(java.awt.geom.AffineTransform) - */ - public void transform(AffineTransform Tx) { - delegate.transform(Tx); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#translate(double, double) - */ - public void translate(double tx, double ty) { - delegate.translate(tx, ty); - } - - /* (non-Javadoc) - * @see java.awt.Graphics2D#translate(int, int) - */ - public void translate(int x, int y) { - delegate.translate(x, y); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/GraphicsDecorator.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/GraphicsDecorator.java deleted file mode 100644 index 6179d3ed..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/GraphicsDecorator.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 11, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Component; -import java.awt.Graphics2D; -import java.awt.Shape; - -import javax.swing.CellRendererPane; -import javax.swing.Icon; - - -/** - * an extendion of Graphics2DWrapper that adds enhanced - * methods for drawing icons and components - * - * @see TransformingGraphics as an example subclass - * - * @author Tom Nelson - * - * - */ -public class GraphicsDecorator extends Graphics2DWrapper { - - public GraphicsDecorator() { - this(null); - } - public GraphicsDecorator(Graphics2D delegate) { - super(delegate); - } - - public void draw(Icon icon, Component c, Shape clip, int x, int y) { - int w = icon.getIconWidth(); - int h = icon.getIconHeight(); - icon.paintIcon(c, delegate, x-w/2, y-h/2); - } - - public void draw(Component c, CellRendererPane rendererPane, - int x, int y, int w, int h, boolean shouldValidate) { - rendererPane.paintComponent(delegate, c, c.getParent(), x, y, w, h, shouldValidate); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/HyperbolicShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/HyperbolicShapeTransformer.java deleted file mode 100644 index add3ac9c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/HyperbolicShapeTransformer.java +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Component; -import java.awt.Shape; -import java.awt.geom.GeneralPath; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.PolarPoint; -import edu.uci.ics.jung.visualization.transform.HyperbolicTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - - -/** - * HyperbolicShapeTransformer extends HyperbolicTransformer and - * adds implementations for methods in ShapeFlatnessTransformer. - * It modifies the shapes (Vertex, Edge, and Arrowheads) so that - * they are distorted by the hyperbolic transformation - * - * @author Tom Nelson - * - * - */ -public class HyperbolicShapeTransformer extends HyperbolicTransformer - implements ShapeFlatnessTransformer { - - /** - * Create an instance, setting values from the passed component - * and registering to listen for size changes on the component. - */ - public HyperbolicShapeTransformer(Component component) { - this(component, null); - } - - /** - * Create an instance, setting values from the passed component - * and registering to listen for size changes on the component, - * with a possibly shared transform delegate. - */ - public HyperbolicShapeTransformer(Component component, MutableTransformer delegate) { - super(component, delegate); - } - - /** - * Transform the supplied shape with the overridden transform - * method so that the shape is distorted by the hyperbolic - * transform. - * @param shape a shape to transform - * @return a GeneralPath for the transformed shape - */ - public Shape transform(Shape shape) { - return transform(shape, 0); - } - public Shape transform(Shape shape, float flatness) { - GeneralPath newPath = new GeneralPath(); - float[] coords = new float[6]; - PathIterator iterator = null; - if(flatness == 0) { - iterator = shape.getPathIterator(null); - } else { - iterator = shape.getPathIterator(null, flatness); - } - for( ; - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = _transform(new Point2D.Float(coords[0], coords[1])); - newPath.moveTo((float)p.getX(), (float)p.getY()); - break; - - case PathIterator.SEG_LINETO: - p = _transform(new Point2D.Float(coords[0], coords[1])); - newPath.lineTo((float)p.getX(), (float) p.getY()); - break; - - case PathIterator.SEG_QUADTO: - p = _transform(new Point2D.Float(coords[0], coords[1])); - Point2D q = _transform(new Point2D.Float(coords[2], coords[3])); - newPath.quadTo((float)p.getX(), (float)p.getY(), (float)q.getX(), (float)q.getY()); - break; - - case PathIterator.SEG_CUBICTO: - p = _transform(new Point2D.Float(coords[0], coords[1])); - q = _transform(new Point2D.Float(coords[2], coords[3])); - Point2D r = _transform(new Point2D.Float(coords[4], coords[5])); - newPath.curveTo((float)p.getX(), (float)p.getY(), - (float)q.getX(), (float)q.getY(), - (float)r.getX(), (float)r.getY()); - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - break; - - } - } - return newPath; - } - - public Shape inverseTransform(Shape shape) { - GeneralPath newPath = new GeneralPath(); - float[] coords = new float[6]; - for(PathIterator iterator=shape.getPathIterator(null); - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - newPath.moveTo((float)p.getX(), (float)p.getY()); - break; - - case PathIterator.SEG_LINETO: - p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - newPath.lineTo((float)p.getX(), (float) p.getY()); - break; - - case PathIterator.SEG_QUADTO: - p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - Point2D q = _inverseTransform(new Point2D.Float(coords[2], coords[3])); - newPath.quadTo((float)p.getX(), (float)p.getY(), (float)q.getX(), (float)q.getY()); - break; - - case PathIterator.SEG_CUBICTO: - p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - q = _inverseTransform(new Point2D.Float(coords[2], coords[3])); - Point2D r = _inverseTransform(new Point2D.Float(coords[4], coords[5])); - newPath.curveTo((float)p.getX(), (float)p.getY(), - (float)q.getX(), (float)q.getY(), - (float)r.getX(), (float)r.getY()); - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - break; - - } - } - return newPath; - } - /** - * override base class transform to project the fisheye effect - */ - private Point2D _transform(Point2D graphPoint) { - if(graphPoint == null) return null; - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - // transform the point from the graph to the view - Point2D viewPoint = graphPoint;//delegate.transform(graphPoint); - // calculate point from center - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - double theta = polar.getTheta(); - double radius = polar.getRadius(); - if(radius > viewRadius) return viewPoint; - - double mag = Math.tan(Math.PI/2*magnification); - radius *= mag; - - radius = Math.min(radius, viewRadius); - radius /= viewRadius; - radius *= Math.PI/2; - radius = Math.abs(Math.atan(radius)); - radius *= viewRadius; - Point2D projectedPoint = PolarPoint.polarToCartesian(theta, radius); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return translatedBack; - } - - /** - * override base class to un-project the fisheye effect - */ - private Point2D _inverseTransform(Point2D viewPoint) { - - viewPoint = delegate.inverseTransform(viewPoint); - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - - double radius = polar.getRadius(); - if(radius > viewRadius) return viewPoint;//elegate.inverseTransform(viewPoint); - - radius /= viewRadius; - radius = Math.abs(Math.tan(radius)); - radius /= Math.PI/2; - radius *= viewRadius; - double mag = Math.tan(Math.PI/2*magnification); - radius /= mag; - polar.setRadius(radius); - Point2D projectedPoint = PolarPoint.polarToCartesian(polar); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return translatedBack; - //delegate.inverseTransform(translatedBack); - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/Intersector.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/Intersector.java deleted file mode 100644 index 96767698..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/Intersector.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Rectangle; -import java.awt.geom.Line2D; -import java.awt.geom.Point2D; -import java.util.HashSet; -import java.util.Set; - -public class Intersector { - - protected Rectangle rectangle; - Line2D line; - Set points = new HashSet(); - /** - * @param rectangle - */ - public Intersector(Rectangle rectangle) { - this.rectangle = rectangle; - } - /** - * @param rectangle - * @param line - */ - public Intersector(Rectangle rectangle, Line2D line) { - this.rectangle = rectangle; - intersectLine(line); - } - - public void intersectLine(Line2D line) { - this.line = line; - points.clear(); - float rx0 = (float) rectangle.getMinX(); - float ry0 = (float) rectangle.getMinY(); - float rx1 = (float) rectangle.getMaxX(); - float ry1 = (float) rectangle.getMaxY(); - - float x1 = (float) line.getX1(); - float y1 = (float) line.getY1(); - float x2 = (float) line.getX2(); - float y2 = (float) line.getY2(); - - float dy = y2 - y1; - float dx = x2 - x1; - - if(dx != 0) { - float m = dy/dx; - float b = y1 - m*x1; - - // base of rect where y == ry0 - float x = (ry0 - b) / m; - - if(rx0 <= x && x <= rx1) { - points.add(new Point2D.Float(x, ry0)); - } - - // top where y == ry1 - x = (ry1 - b) / m; - if(rx0 <= x && x <= rx1) { - points.add(new Point2D.Float(x, ry1)); - } - - // left side, where x == rx0 - float y = m * rx0 + b; - if(ry0 <= y && y <= ry1) { - points.add(new Point2D.Float(rx0, y)); - } - - - // right side, where x == rx1 - y = m * rx1 + b; - if(ry0 <= y && y <= ry1) { - points.add(new Point2D.Float(rx1, y)); - } - - } else { - - // base, where y == ry0 - float x = x1; - if(rx0 <= x && x <= rx1) { - points.add(new Point2D.Float(x, ry0)); - } - - // top, where y == ry1 - x = x2; - if(rx0 <= x && x <= rx1) { - points.add(new Point2D.Float(x, ry1)); - } - } - } - public Line2D getLine() { - return line; - } - public Set getPoints() { - return points; - } - public Rectangle getRectangle() { - return rectangle; - } - - public String toString() { - return "Rectangle: "+rectangle+", points:"+points; - } - - public static void main(String[] args) { - Rectangle rectangle = new Rectangle(0,0,10,10); - Line2D line = new Line2D.Float(4,4,5,5); - System.err.println(""+new Intersector(rectangle, line)); - System.err.println(""+new Intersector(rectangle, new Line2D.Float(9,11,11,9))); - System.err.println(""+new Intersector(rectangle, new Line2D.Float(1,1,3,2))); - System.err.println(""+new Intersector(rectangle, new Line2D.Float(4,6,6,4))); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyIconGraphics.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyIconGraphics.java deleted file mode 100644 index 807165f1..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyIconGraphics.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 11, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Component; -import java.awt.Graphics2D; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Area; -import java.awt.geom.Rectangle2D; - -import javax.swing.Icon; - -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; - - -/** - * Subclassed to apply a magnification transform to an icon. - * - * @author Tom Nelson - * - * - */ -public class MagnifyIconGraphics extends TransformingFlatnessGraphics { - - public MagnifyIconGraphics(BidirectionalTransformer transformer) { - this(transformer, null); - } - - public MagnifyIconGraphics(BidirectionalTransformer transformer, Graphics2D delegate) { - super(transformer, delegate); - } - - public void draw(Icon icon, Component c, Shape clip, int x, int y) { - - if(transformer instanceof MagnifyShapeTransformer) { - MagnifyShapeTransformer mst = (MagnifyShapeTransformer)transformer; - int w = icon.getIconWidth(); - int h = icon.getIconHeight(); - Rectangle2D r = new Rectangle2D.Double(x-w/2,y-h/2,w,h); - Shape lens = mst.getLensShape(); - if(lens.intersects(r)) { - // magnify the whole icon - Rectangle2D s = mst.magnify(r).getBounds2D(); - if(lens.intersects(s)) { - clip = mst.transform(clip); - double sx = s.getWidth()/r.getWidth(); - double sy = s.getHeight()/r.getHeight(); - - AffineTransform old = delegate.getTransform(); - AffineTransform xform = new AffineTransform(old); - xform.translate(s.getMinX(), s.getMinY()); - xform.scale(sx, sy); - xform.translate(-s.getMinX(), -s.getMinY()); - Shape oldClip = delegate.getClip(); - delegate.clip(clip); - delegate.setTransform(xform); - icon.paintIcon(c, delegate, (int)s.getMinX(), (int)s.getMinY()); - delegate.setTransform(old); - delegate.setClip(oldClip); - } else { - // clip out the lens so the small icon doesn't get drawn - // inside of it - Shape oldClip = delegate.getClip(); - Area viewBounds = new Area(oldClip); - viewBounds.subtract(new Area(lens)); - delegate.setClip(viewBounds); - icon.paintIcon(c, delegate, (int)r.getMinX(),(int)r.getMinY()); - delegate.setClip(oldClip); - } - - } else { - icon.paintIcon(c, delegate, (int)r.getMinX(),(int)r.getMinY()); - } - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyImageLensSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyImageLensSupport.java deleted file mode 100644 index 806b2082..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyImageLensSupport.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 21, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Dimension; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.control.ModalGraphMouse; -import edu.uci.ics.jung.visualization.control.ModalLensGraphMouse; -import edu.uci.ics.jung.visualization.picking.ViewLensShapePickSupport; -import edu.uci.ics.jung.visualization.renderers.BasicRenderer; -import edu.uci.ics.jung.visualization.renderers.Renderer; -import edu.uci.ics.jung.visualization.renderers.ReshapingEdgeRenderer; -import edu.uci.ics.jung.visualization.transform.AbstractLensSupport; -import edu.uci.ics.jung.visualization.transform.LensTransformer; -/** - * Changes various visualization settings to activate or deactivate an - * examining lens for a jung graph application. - * - * @author Tom Nelson - */ -public class MagnifyImageLensSupport extends AbstractLensSupport { - - protected RenderContext renderContext; - protected GraphicsDecorator lensGraphicsDecorator; - protected GraphicsDecorator savedGraphicsDecorator; - protected Renderer renderer; - protected Renderer transformingRenderer; - protected GraphElementAccessor pickSupport; - protected Renderer.Edge savedEdgeRenderer; - protected Renderer.Edge reshapingEdgeRenderer; - - static final String instructions = - "

          Mouse-Drag the Lens center to move it

          "+ - "Mouse-Drag the Lens edge to resize it

          "+ - "Ctrl+MouseWheel to change magnification

          "; - - public MagnifyImageLensSupport(VisualizationViewer vv) { - this(vv, new MagnifyShapeTransformer(vv), - new ModalLensGraphMouse()); - } - /** - * create the base class, setting common members and creating - * a custom GraphMouse - * @param vv the VisualizationViewer to work on - */ - public MagnifyImageLensSupport(VisualizationViewer vv, LensTransformer lensTransformer, - ModalGraphMouse lensGraphMouse) { - super(vv, lensGraphMouse); - this.renderContext = vv.getRenderContext(); - this.pickSupport = renderContext.getPickSupport(); - this.renderer = vv.getRenderer(); - this.transformingRenderer = new BasicRenderer(); - this.savedGraphicsDecorator = renderContext.getGraphicsContext(); - this.lensTransformer = lensTransformer; - this.savedEdgeRenderer = vv.getRenderer().getEdgeRenderer(); - this.reshapingEdgeRenderer = new ReshapingEdgeRenderer(); - this.reshapingEdgeRenderer.setEdgeArrowRenderingSupport(savedEdgeRenderer.getEdgeArrowRenderingSupport()); - - Dimension d = vv.getSize(); - if(d.width == 0 || d.height == 0) { - d = vv.getPreferredSize(); - } - lensTransformer.setViewRadius(d.width/5); - this.lensGraphicsDecorator = new MagnifyIconGraphics(lensTransformer); - } - - public void activate() { - lensTransformer.setDelegate(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW)); - if(lens == null) { - lens = new Lens(lensTransformer); - } - if(lensControls == null) { - lensControls = new LensControls(lensTransformer); - } - renderContext.setPickSupport(new ViewLensShapePickSupport(vv)); - lensTransformer.setDelegate(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW)); - vv.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.VIEW, lensTransformer); - this.renderContext.setGraphicsContext(lensGraphicsDecorator); - vv.getRenderer().setEdgeRenderer(reshapingEdgeRenderer); - vv.addPreRenderPaintable(lens); - vv.addPostRenderPaintable(lensControls); - vv.setGraphMouse(lensGraphMouse); - vv.setToolTipText(instructions); - vv.repaint(); - } - - public void deactivate() { - renderContext.setPickSupport(pickSupport); - vv.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.VIEW, lensTransformer.getDelegate()); - vv.removePreRenderPaintable(lens); - vv.removePostRenderPaintable(lensControls); - this.renderContext.setGraphicsContext(savedGraphicsDecorator); - vv.getRenderer().setEdgeRenderer(savedEdgeRenderer); - vv.setToolTipText(defaultToolTipText); - vv.setGraphMouse(graphMouse); - vv.repaint(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyShapeTransformer.java deleted file mode 100644 index 01b8516f..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/MagnifyShapeTransformer.java +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Component; -import java.awt.Shape; -import java.awt.geom.GeneralPath; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.PolarPoint; -import edu.uci.ics.jung.visualization.transform.MagnifyTransformer; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; - -/** - * MagnifyShapeTransformer extends MagnifyTransformer and - * adds implementations for methods in ShapeTransformer. - * It modifies the shapes (Vertex, Edge, and Arrowheads) so that - * they are enlarged by the magnify transformation - * - * @author Tom Nelson - * - * - */ -public class MagnifyShapeTransformer extends MagnifyTransformer - implements ShapeFlatnessTransformer { - - /** - * Create an instance, setting values from the passed component - * and registering to listen for size changes on the component. - */ - public MagnifyShapeTransformer(Component component) { - this(component, null); - } - - /** - * Create an instance, setting values from the passed component - * and registering to listen for size changes on the component, - * with a possibly shared transform delegate. - */ - public MagnifyShapeTransformer(Component component, MutableTransformer delegate) { - super(component, delegate); - } - - /** - * Transform the supplied shape with the overridden transform - * method so that the shape is distorted by the magnify - * transform. - * @param shape a shape to transform - * @return a GeneralPath for the transformed shape - */ - public Shape transform(Shape shape) { - return transform(shape, 0); - } - public Shape transform(Shape shape, float flatness) { - GeneralPath newPath = new GeneralPath(); - float[] coords = new float[6]; - PathIterator iterator = null; - if(flatness == 0) { - iterator = shape.getPathIterator(null); - } else { - iterator = shape.getPathIterator(null, flatness); - } - for( ; - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = _transform(new Point2D.Float(coords[0], coords[1])); - newPath.moveTo((float)p.getX(), (float)p.getY()); - break; - - case PathIterator.SEG_LINETO: - p = _transform(new Point2D.Float(coords[0], coords[1])); - newPath.lineTo((float)p.getX(), (float) p.getY()); - break; - - case PathIterator.SEG_QUADTO: - p = _transform(new Point2D.Float(coords[0], coords[1])); - Point2D q = _transform(new Point2D.Float(coords[2], coords[3])); - newPath.quadTo((float)p.getX(), (float)p.getY(), (float)q.getX(), (float)q.getY()); - break; - - case PathIterator.SEG_CUBICTO: - p = _transform(new Point2D.Float(coords[0], coords[1])); - q = _transform(new Point2D.Float(coords[2], coords[3])); - Point2D r = _transform(new Point2D.Float(coords[4], coords[5])); - newPath.curveTo((float)p.getX(), (float)p.getY(), - (float)q.getX(), (float)q.getY(), - (float)r.getX(), (float)r.getY()); - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - break; - - } - } - return newPath; - } - - public Shape inverseTransform(Shape shape) { - GeneralPath newPath = new GeneralPath(); - float[] coords = new float[6]; - for(PathIterator iterator=shape.getPathIterator(null); - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - newPath.moveTo((float)p.getX(), (float)p.getY()); - break; - - case PathIterator.SEG_LINETO: - p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - newPath.lineTo((float)p.getX(), (float) p.getY()); - break; - - case PathIterator.SEG_QUADTO: - p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - Point2D q = _inverseTransform(new Point2D.Float(coords[2], coords[3])); - newPath.quadTo((float)p.getX(), (float)p.getY(), (float)q.getX(), (float)q.getY()); - break; - - case PathIterator.SEG_CUBICTO: - p = _inverseTransform(new Point2D.Float(coords[0], coords[1])); - q = _inverseTransform(new Point2D.Float(coords[2], coords[3])); - Point2D r = _inverseTransform(new Point2D.Float(coords[4], coords[5])); - newPath.curveTo((float)p.getX(), (float)p.getY(), - (float)q.getX(), (float)q.getY(), - (float)r.getX(), (float)r.getY()); - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - break; - - } - } - return newPath; - } - /** - * - */ - private Point2D _transform(Point2D graphPoint) { - if(graphPoint == null) return null; - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - // transform the point from the graph to the view - Point2D viewPoint = graphPoint; -// delegate.transform(graphPoint); - // calculate point from center - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - double theta = polar.getTheta(); - double radius = polar.getRadius(); - if(radius > viewRadius) return viewPoint; - - double mag = magnification; - radius *= mag; - - radius = Math.min(radius, viewRadius); - Point2D projectedPoint = PolarPoint.polarToCartesian(theta, radius); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); - return translatedBack; - } - - /** - * override base class to un-project the fisheye effect - */ - private Point2D _inverseTransform(Point2D viewPoint) { - - viewPoint = delegate.inverseTransform(viewPoint); - Point2D viewCenter = getViewCenter(); - double viewRadius = getViewRadius(); - double ratio = getRatio(); - double dx = viewPoint.getX() - viewCenter.getX(); - double dy = viewPoint.getY() - viewCenter.getY(); - // factor out ellipse - dx *= ratio; - - Point2D pointFromCenter = new Point2D.Double(dx, dy); - - PolarPoint polar = PolarPoint.cartesianToPolar(pointFromCenter); - - double radius = polar.getRadius(); - if(radius > viewRadius) return viewPoint; - //delegate.inverseTransform(viewPoint); - - double mag = magnification; - radius /= mag; - polar.setRadius(radius); - Point2D projectedPoint = PolarPoint.polarToCartesian(polar); - projectedPoint.setLocation(projectedPoint.getX()/ratio, projectedPoint.getY()); - Point2D translatedBack = new Point2D.Double(projectedPoint.getX()+viewCenter.getX(), - projectedPoint.getY()+viewCenter.getY()); -// return delegate.inverseTransform(translatedBack); - return translatedBack; - } - /** - * magnify the shape, without considering the Lens - * @param shape - * @return - */ - public Shape magnify(Shape shape) { - return magnify(shape, 0); - } - public Shape magnify(Shape shape, float flatness) { - GeneralPath newPath = new GeneralPath(); - float[] coords = new float[6]; - PathIterator iterator = null; - if(flatness == 0) { - iterator = shape.getPathIterator(null); - } else { - iterator = shape.getPathIterator(null, flatness); - } - for( ; - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = magnify(new Point2D.Float(coords[0], coords[1])); - newPath.moveTo((float)p.getX(), (float)p.getY()); - break; - - case PathIterator.SEG_LINETO: - p = magnify(new Point2D.Float(coords[0], coords[1])); - newPath.lineTo((float)p.getX(), (float) p.getY()); - break; - - case PathIterator.SEG_QUADTO: - p = magnify(new Point2D.Float(coords[0], coords[1])); - Point2D q = magnify(new Point2D.Float(coords[2], coords[3])); - newPath.quadTo((float)p.getX(), (float)p.getY(), (float)q.getX(), (float)q.getY()); - break; - - case PathIterator.SEG_CUBICTO: - p = magnify(new Point2D.Float(coords[0], coords[1])); - q = magnify(new Point2D.Float(coords[2], coords[3])); - Point2D r = magnify(new Point2D.Float(coords[4], coords[5])); - newPath.curveTo((float)p.getX(), (float)p.getY(), - (float)q.getX(), (float)q.getY(), - (float)r.getX(), (float)r.getY()); - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - break; - - } - } - return newPath; - } - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ShapeFlatnessTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ShapeFlatnessTransformer.java deleted file mode 100644 index 3be15f17..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ShapeFlatnessTransformer.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 16, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Shape; - -/** - * Provides methods to map points from one coordinate system to - * another: graph to screen and screen to graph. - * The flatness parameter is used to break a curved shape into - * smaller segments in order to perform a more detailed - * transformation. - * - * @author Tom Nelson - */ -public interface ShapeFlatnessTransformer extends ShapeTransformer { - - /** - * map a shape from graph coordinate system to the - * screen coordinate system - * @param shape - * @param flatness used to break the supplied shape into segments - * @return a GeneralPath (Shape) representing the screen points of the shape - */ - Shape transform(Shape shape, float flatness); - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ShapeTransformer.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ShapeTransformer.java deleted file mode 100644 index fa2282dc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ShapeTransformer.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Apr 16, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Shape; - -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; - -/** - * Provides methods to map points from one coordinate system to - * another: graph to screen and screen to graph. - * - * @author Tom Nelson - */ -public interface ShapeTransformer extends BidirectionalTransformer { - - /** - * map a shape from graph coordinate system to the - * screen coordinate system - * @param shape - * @return a GeneralPath (Shape) representing the screen points of the shape - */ - Shape transform(Shape shape); - - Shape inverseTransform(Shape shape); -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/TransformingFlatnessGraphics.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/TransformingFlatnessGraphics.java deleted file mode 100644 index 7eceb78c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/TransformingFlatnessGraphics.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 11, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Graphics2D; -import java.awt.Shape; - -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; -import edu.uci.ics.jung.visualization.transform.HyperbolicTransformer; - - -/** - * subclassed to pass certain operations thru the transformer - * before the base class method is applied - * This is useful when you want to apply non-affine transformations - * to the Graphics2D used to draw elements of the graph. - * - * @author Tom Nelson - * - * - */ -public class TransformingFlatnessGraphics extends TransformingGraphics { - - float flatness = 0; - - public TransformingFlatnessGraphics(BidirectionalTransformer transformer) { - this(transformer, null); - } - - public TransformingFlatnessGraphics(BidirectionalTransformer transformer, Graphics2D delegate) { - super(transformer, delegate); - } - - public void draw(Shape s, float flatness) { - Shape shape = null; - if(transformer instanceof ShapeFlatnessTransformer) { - shape = ((ShapeFlatnessTransformer)transformer).transform(s, flatness); - } else { - shape = ((ShapeTransformer)transformer).transform(s); - } - delegate.draw(shape); - - } - - public void fill(Shape s, float flatness) { - Shape shape = null; - if(transformer instanceof HyperbolicTransformer) { - shape = ((HyperbolicShapeTransformer)transformer).transform(s, flatness); - } else { - shape = ((ShapeTransformer)transformer).transform(s); - } - delegate.fill(shape); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/TransformingGraphics.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/TransformingGraphics.java deleted file mode 100644 index d4652502..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/TransformingGraphics.java +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 11, 2005 - */ - -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.Image; -import java.awt.Rectangle; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Rectangle2D; -import java.awt.image.ImageObserver; - -import edu.uci.ics.jung.visualization.transform.BidirectionalTransformer; - - -/** - * subclassed to pass certain operations thru the transformer - * before the base class method is applied - * This is useful when you want to apply non-affine transformations - * to the Graphics2D used to draw elements of the graph. - * - * @author Tom Nelson - * - * - */ -public class TransformingGraphics extends GraphicsDecorator { - - /** - * the transformer to apply - */ - protected BidirectionalTransformer transformer; - - public TransformingGraphics(BidirectionalTransformer transformer) { - this(transformer, null); - } - - public TransformingGraphics(BidirectionalTransformer transformer, Graphics2D delegate) { - super(delegate); - this.transformer = transformer; - } - - /** - * @return Returns the transformer. - */ - public BidirectionalTransformer getTransformer() { - return transformer; - } - - /** - * @param transformer The transformer to set. - */ - public void setTransformer(BidirectionalTransformer transformer) { - this.transformer = transformer; - } - - /** - * transform the shape before letting the delegate draw it - */ - public void draw(Shape s) { - Shape shape = ((ShapeTransformer)transformer).transform(s); - delegate.draw(shape); - } - - public void draw(Shape s, float flatness) { - Shape shape = null; - if(transformer instanceof ShapeFlatnessTransformer) { - shape = ((ShapeFlatnessTransformer)transformer).transform(s, flatness); - } else { - shape = ((ShapeTransformer)transformer).transform(s); - } - delegate.draw(shape); - - } - - /** - * transform the shape before letting the delegate fill it - */ - public void fill(Shape s) { - Shape shape = ((ShapeTransformer)transformer).transform(s); - delegate.fill(shape); - } - - public void fill(Shape s, float flatness) { - Shape shape = null; - if(transformer instanceof ShapeFlatnessTransformer) { - shape = ((ShapeFlatnessTransformer)transformer).transform(s, flatness); - } else { - shape = ((ShapeTransformer)transformer).transform(s); - } - delegate.fill(shape); - } - - public boolean drawImage(Image img, int x, int y, ImageObserver observer) { - Image image = null; - if(transformer instanceof ShapeFlatnessTransformer) { - Rectangle2D r = new Rectangle2D.Double(x,y,img.getWidth(observer),img.getHeight(observer)); - Rectangle2D s = ((ShapeTransformer)transformer).transform(r).getBounds2D(); - image = img.getScaledInstance((int)s.getWidth(), (int)s.getHeight(), Image.SCALE_SMOOTH); - x = (int) s.getMinX(); - y = (int) s.getMinY(); - } else { - image = img; - } - return delegate.drawImage(image, x, y, observer); - } - - public boolean drawImage(Image img, AffineTransform at, ImageObserver observer) { - Image image = null; - int x = (int)at.getTranslateX(); - int y = (int)at.getTranslateY(); - if(transformer instanceof ShapeFlatnessTransformer) { - Rectangle2D r = new Rectangle2D.Double(x,y,img.getWidth(observer),img.getHeight(observer)); - Rectangle2D s = ((ShapeTransformer)transformer).transform(r).getBounds2D(); - image = img.getScaledInstance((int)s.getWidth(), (int)s.getHeight(), Image.SCALE_SMOOTH); - x = (int) s.getMinX(); - y = (int) s.getMinY(); - at.setToTranslation(s.getMinX(), s.getMinY()); - } else { - image = img; - } - return delegate.drawImage(image, at, observer); - } - - /** - * transform the shape before letting the delegate apply 'hit' - * with it - */ - public boolean hit(Rectangle rect, Shape s, boolean onStroke) { - Shape shape = ((ShapeTransformer)transformer).transform(s); - return delegate.hit(rect, shape, onStroke); - } - - public Graphics create() { - return delegate.create(); - } - - public void dispose() { - delegate.dispose(); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ViewLensSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ViewLensSupport.java deleted file mode 100644 index fa99b594..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/ViewLensSupport.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.transform.shape; - -import java.awt.Dimension; - -import edu.uci.ics.jung.algorithms.layout.GraphElementAccessor; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.VisualizationViewer; -import edu.uci.ics.jung.visualization.control.ModalGraphMouse; -import edu.uci.ics.jung.visualization.picking.ViewLensShapePickSupport; -import edu.uci.ics.jung.visualization.renderers.Renderer; -import edu.uci.ics.jung.visualization.renderers.ReshapingEdgeRenderer; -import edu.uci.ics.jung.visualization.transform.AbstractLensSupport; -import edu.uci.ics.jung.visualization.transform.LensSupport; -import edu.uci.ics.jung.visualization.transform.LensTransformer; - -/** - * Uses a LensTransformer to use in the view - * transform. This one will distort Vertex shapes. - * - * @author Tom Nelson - * - * - */ -public class ViewLensSupport extends AbstractLensSupport - implements LensSupport { - - protected RenderContext renderContext; - protected GraphicsDecorator lensGraphicsDecorator; - protected GraphicsDecorator savedGraphicsDecorator; - protected GraphElementAccessor pickSupport; - protected Renderer.Edge savedEdgeRenderer; - protected Renderer.Edge reshapingEdgeRenderer; - - public ViewLensSupport(VisualizationViewer vv, - LensTransformer lensTransformer, - ModalGraphMouse lensGraphMouse) { - super(vv, lensGraphMouse); - this.renderContext = vv.getRenderContext(); - this.pickSupport = renderContext.getPickSupport(); - this.savedGraphicsDecorator = renderContext.getGraphicsContext(); - this.lensTransformer = lensTransformer; - Dimension d = vv.getSize(); - lensTransformer.setViewRadius(d.width/5); - this.lensGraphicsDecorator = new TransformingFlatnessGraphics(lensTransformer); - this.savedEdgeRenderer = vv.getRenderer().getEdgeRenderer(); - this.reshapingEdgeRenderer = new ReshapingEdgeRenderer(); - this.reshapingEdgeRenderer.setEdgeArrowRenderingSupport(savedEdgeRenderer.getEdgeArrowRenderingSupport()); - - } - public void activate() { - lensTransformer.setDelegate(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW)); - if(lens == null) { - lens = new Lens(lensTransformer); - } - if(lensControls == null) { - lensControls = new LensControls(lensTransformer); - } - renderContext.setPickSupport(new ViewLensShapePickSupport(vv)); - lensTransformer.setDelegate(vv.getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW)); - vv.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.VIEW, lensTransformer); - this.renderContext.setGraphicsContext(lensGraphicsDecorator); - vv.getRenderer().setEdgeRenderer(reshapingEdgeRenderer); - vv.prependPreRenderPaintable(lens); - vv.addPostRenderPaintable(lensControls); - vv.setGraphMouse(lensGraphMouse); - vv.setToolTipText(instructions); - vv.repaint(); - } - - public void deactivate() { -// savedViewTransformer.setTransform(lensTransformer.getDelegate().getTransform()); -// vv.setViewTransformer(savedViewTransformer); - renderContext.setPickSupport(pickSupport); - vv.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.VIEW, lensTransformer.getDelegate()); - vv.removePreRenderPaintable(lens); - vv.removePostRenderPaintable(lensControls); - this.renderContext.setGraphicsContext(savedGraphicsDecorator); - vv.setRenderContext(renderContext); - vv.setToolTipText(defaultToolTipText); - vv.setGraphMouse(graphMouse); - vv.getRenderer().setEdgeRenderer(savedEdgeRenderer); - vv.repaint(); - } -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/package.html deleted file mode 100644 index 94807ee5..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/transform/shape/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Visualization mechanisms related to transformation of graph element shapes. - - - diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/Animator.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/Animator.java deleted file mode 100644 index d39f5b12..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/Animator.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.visualization.util; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; - -/** - * - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class Animator implements Runnable { - - protected IterativeContext process; - protected boolean stop; - protected Thread thread; - - /** - * how long the relaxer thread pauses between iteration loops. - */ - protected long sleepTime = 10L; - - - public Animator(IterativeContext process) { - this(process, 10L); - } - - public Animator(IterativeContext process, long sleepTime) { - this.process = process; - this.sleepTime = sleepTime; - } - - /** - * @return the relaxerThreadSleepTime - */ - public long getSleepTime() { - return sleepTime; - } - - /** - * @param relaxerThreadSleepTime the relaxerThreadSleepTime to set - */ - public void setSleepTime(long sleepTime) { - this.sleepTime = sleepTime; - } - - public void start() { - // in case its running - stop(); - stop = false; - thread = new Thread(this); - thread.setPriority(Thread.MIN_PRIORITY); - thread.start(); - } - - public synchronized void stop() { - stop = true; - } - - public void run() { - while (!process.done() && !stop) { - - process.step(); - - if (stop) - return; - - try { - Thread.sleep(sleepTime); - } catch (InterruptedException ie) { - } - } - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/ArrowFactory.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/ArrowFactory.java deleted file mode 100644 index 7049dddc..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/ArrowFactory.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Created on Oct 19, 2004 - * - * Copyright (c) 2004, the JUNG Project and the Regents of the University - * of California - * All rights reserved. - * - * This software is open-source under the BSD license; see either - * "license.txt" or - * http://jung.sourceforge.net/license.txt for a description. - */ -package edu.uci.ics.jung.visualization.util; - -import java.awt.geom.GeneralPath; - -/** - * A utility class for creating arrowhead shapes. - * - * @author Joshua O'Madadhain - */ -public class ArrowFactory -{ - /** - * Returns an arrowhead in the shape of a simple isosceles triangle - * with the specified base and height measurements. It is placed - * with the vertical axis along the negative x-axis, with its base - * centered on (0,0). - */ - public static GeneralPath getWedgeArrow(float base, float height) - { - GeneralPath arrow = new GeneralPath(); - arrow.moveTo(0,0); - arrow.lineTo( - height, base/2.0f); - arrow.lineTo( - height, -base/2.0f); - arrow.lineTo( 0, 0 ); - return arrow; - } - - /** - * Returns an arrowhead in the shape of an isosceles triangle - * with an isoceles-triangle notch taken out of the base, - * with the specified base and height measurements. It is placed - * with the vertical axis along the negative x-axis, with its base - * centered on (0,0). - */ - public static GeneralPath getNotchedArrow(float base, float height, float notch_height) - { - GeneralPath arrow = new GeneralPath(); - arrow.moveTo(0,0); - arrow.lineTo(-height, base/2.0f); - arrow.lineTo(-(height - notch_height), 0); - arrow.lineTo(-height, -base/2.0f); - arrow.lineTo(0,0); - return arrow; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/Caching.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/Caching.java deleted file mode 100644 index 5b4b977e..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/Caching.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * - */ -package edu.uci.ics.jung.visualization.util; - -/** - * Interface to provide external controls to an - * implementing class that manages a cache. - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public interface Caching { - - /** - * ititialize resources for a cache - * - */ - void init(); - - /** - * clear cache - * - */ - void clear(); - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/ChangeEventSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/ChangeEventSupport.java deleted file mode 100644 index eb2a798d..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/ChangeEventSupport.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 18, 2005 - */ - -package edu.uci.ics.jung.visualization.util; - -import javax.swing.event.ChangeListener; - -/** - * the implementing class provides support for ChangeEvents. - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public interface ChangeEventSupport { - - void addChangeListener(ChangeListener l); - - /** - * Removes a ChangeListener. - * @param l the listener to be removed - */ - void removeChangeListener(ChangeListener l); - - /** - * Returns an array of all the ChangeListeners added - * with addChangeListener(). - * - * @return all of the ChangeListeners added or an empty - * array if no listeners have been added - */ - ChangeListener[] getChangeListeners(); - - void fireStateChanged(); - -} \ No newline at end of file diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/DefaultChangeEventSupport.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/DefaultChangeEventSupport.java deleted file mode 100644 index 59d0d369..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/DefaultChangeEventSupport.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Aug 18, 2005 - */ - -package edu.uci.ics.jung.visualization.util; - -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; -import javax.swing.event.EventListenerList; - -/** - * Basic implementation of ChangeEventSupport, using - * standard jdk classes - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class DefaultChangeEventSupport implements ChangeEventSupport { - - Object eventSource; - /** - * holds the registered listeners - */ - protected EventListenerList listenerList = new EventListenerList(); - - /** - * Only one ChangeEvent is needed - * instance since the - * event's only state is the source property. The source of events - * generated is always "this". - */ - protected transient ChangeEvent changeEvent; - - public DefaultChangeEventSupport(Object eventSource) { - this.eventSource = eventSource; - } - - public void addChangeListener(ChangeListener l) { - listenerList.add(ChangeListener.class, l); - } - - public void removeChangeListener(ChangeListener l) { - listenerList.remove(ChangeListener.class, l); - } - - public ChangeListener[] getChangeListeners() { - return listenerList.getListeners(ChangeListener.class); - } - - /** - * Notifies all listeners that have registered interest for - * notification on this event type. The event instance - * is lazily created. - * The primary listeners will be views that need to be repainted - * because of changes in this model instance - * @see EventListenerList - */ - public void fireStateChanged() { - // Guaranteed to return a non-null array - Object[] listeners = listenerList.getListenerList(); - // Process the listeners last to first, notifying - // those that are interested in this event - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==ChangeListener.class) { - // Lazily create the event: - if (changeEvent == null) - changeEvent = new ChangeEvent(eventSource); - ((ChangeListener)listeners[i+1]).stateChanged(changeEvent); - } - } - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/GeneralPathAsString.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/GeneralPathAsString.java deleted file mode 100644 index 78dfe428..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/GeneralPathAsString.java +++ /dev/null @@ -1,51 +0,0 @@ -package edu.uci.ics.jung.visualization.util; - -import java.awt.geom.GeneralPath; -import java.awt.geom.PathIterator; -import java.awt.geom.Point2D; - -public class GeneralPathAsString { - - public static String toString(GeneralPath newPath) { - StringBuilder sb = new StringBuilder(); - float[] coords = new float[6]; - for(PathIterator iterator=newPath.getPathIterator(null); - iterator.isDone() == false; - iterator.next()) { - int type = iterator.currentSegment(coords); - switch(type) { - case PathIterator.SEG_MOVETO: - Point2D p = new Point2D.Float(coords[0], coords[1]); - sb.append("moveTo "+p+"--"); - break; - - case PathIterator.SEG_LINETO: - p = new Point2D.Float(coords[0], coords[1]); - sb.append("lineTo "+p+"--"); - break; - - case PathIterator.SEG_QUADTO: - p = new Point2D.Float(coords[0], coords[1]); - Point2D q = new Point2D.Float(coords[2], coords[3]); - sb.append("quadTo "+p+" controlled by "+q); - break; - - case PathIterator.SEG_CUBICTO: - p = new Point2D.Float(coords[0], coords[1]); - q = new Point2D.Float(coords[2], coords[3]); - Point2D r = new Point2D.Float(coords[4], coords[5]); - sb.append("cubeTo "+p+" controlled by "+q+","+r); - - break; - - case PathIterator.SEG_CLOSE: - newPath.closePath(); - sb.append("close"); - break; - - } - } - return sb.toString(); - } - -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/LabelWrapper.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/LabelWrapper.java deleted file mode 100644 index e570d91c..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/LabelWrapper.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright (c) 2005, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - */ -package edu.uci.ics.jung.visualization.util; - -import org.apache.commons.collections15.Transformer; - -/** - * A utility to wrap long lines, creating html strings - * with line breaks at a settable max line length - * - * @author Tom Nelson - tomnelson@dev.java.net - * - */ -public class LabelWrapper implements Transformer { - - int lineLength; - public static final String breaker = "

          "; - - /** - * Create an instance with default line break length = 10 - * - */ - public LabelWrapper() { - this(10); - } - - /** - * Create an instance with passed line break length - * @param lineLength the max length for lines - */ - public LabelWrapper(int lineLength) { - this.lineLength = lineLength; - } - - /** - * call 'wrap' to transform the passed String - */ - public String transform(String str) { - if(str != null) { - return wrap(str); - } else { - return null; - } - } - - /** - * line-wrap the passed String as an html string with - * break Strings inserted. - * - * @param str - * @return - */ - private String wrap(String str) { - StringBuilder buf = new StringBuilder(str); - int len = lineLength; - while(len < buf.length()) { - int idx = buf.lastIndexOf(" ", len); - if(idx != -1) { - buf.replace(idx, idx+1, breaker); - len = idx + breaker.length() +lineLength; - } else { - buf.insert(len, breaker); - len += breaker.length() + lineLength; - } - } - buf.insert(0, ""); - return buf.toString(); - } - - public static void main(String[] args) { - String[] lines = { - "This is a line with many short words that I will break into shorter lines.", - "thisisalinewithnobreakssowhoknowswhereitwillwrap", - "short line" - }; - LabelWrapper w = new LabelWrapper(10); - for(int i=0; i implements EdgeIndexFunction { - - protected Map edge_index = new HashMap(); - protected Predicate predicate; - - private PredicatedParallelEdgeIndexFunction() { - } - - public static PredicatedParallelEdgeIndexFunction getInstance() { - return new PredicatedParallelEdgeIndexFunction(); - } - /** - * Returns the index for the specified edge. - * Calculates the indices for e and for all edges parallel - * to e. - */ - public int getIndex(Graph graph, E e) { - - if(predicate.evaluate(e)) { - return 0; - } - Integer index = edge_index.get(e); - if(index == null) { - Pair endpoints = graph.getEndpoints(e); - V u = endpoints.getFirst(); - V v = endpoints.getSecond(); - if(u.equals(v)) { - index = getIndex(graph, e, v); - } else { - index = getIndex(graph, e, u, v); - } - } - return index.intValue(); - } - - protected int getIndex(Graph graph, E e, V v, V u) { - Collection commonEdgeSet = new HashSet(graph.getIncidentEdges(u)); - commonEdgeSet.retainAll(graph.getIncidentEdges(v)); - for(Iterator iterator=commonEdgeSet.iterator(); iterator.hasNext(); ) { - E edge = iterator.next(); - Pair ep = graph.getEndpoints(edge); - V first = ep.getFirst(); - V second = ep.getSecond(); - // remove loops - if(first.equals(second) == true) { - iterator.remove(); - } - // remove edges in opposite direction - if(first.equals(v) == false) { - iterator.remove(); - } - } - int count=0; - for(E other : commonEdgeSet) { - if(e.equals(other) == false) { - edge_index.put(other, count); - count++; - } - } - edge_index.put(e, count); - return count; - } - - protected int getIndex(Graph graph, E e, V v) { - Collection commonEdgeSet = new HashSet(); - for(E another : graph.getIncidentEdges(v)) { - V u = graph.getOpposite(v, another); - if(u.equals(v)) { - commonEdgeSet.add(another); - } - } - int count=0; - for(E other : commonEdgeSet) { - if(e.equals(other) == false) { - edge_index.put(other, count); - count++; - } - } - edge_index.put(e, count); - return count; - } - - /** - * @return the predicate - */ - public Predicate getPredicate() { - return predicate; - } - - /** - * @param predicate the predicate to set - */ - public void setPredicate(Predicate predicate) { - this.predicate = predicate; - } - - /** - * Resets the indices for this edge and its parallel edges. - * Should be invoked when an edge parallel to e - * has been added or removed. - * @param e - */ - public void reset(Graph graph, E e) { - Pair endpoints = graph.getEndpoints(e); - getIndex(graph, e, endpoints.getFirst()); - getIndex(graph, e, endpoints.getFirst(), endpoints.getSecond()); - } - - /** - * Clears all edge indices for all edges in all graphs. - * Does not recalculate the indices. - */ - public void reset() - { - edge_index.clear(); - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/VertexShapeFactory.java b/gui/jung-src/edu/uci/ics/jung/visualization/util/VertexShapeFactory.java deleted file mode 100644 index 9dd90b43..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/VertexShapeFactory.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright (c) 2003, the JUNG Project and the Regents of the University of - * California All rights reserved. - * - * This software is open-source under the BSD license; see either "license.txt" - * or http://jung.sourceforge.net/license.txt for a description. - * - * Created on Jul 20, 2004 - */ -package edu.uci.ics.jung.visualization.util; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Ellipse2D; -import java.awt.geom.GeneralPath; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.awt.geom.RoundRectangle2D; - -import org.apache.commons.collections15.Transformer; -import org.apache.commons.collections15.functors.ConstantTransformer; - -/** - * A utility class for generating Shapes for drawing vertices. - * The available shapes include rectangles, rounded rectangles, ellipses, - * regular polygons, and regular stars. The dimensions of the requested - * shapes are defined by the specified vertex size function (specified by - * a Transformer) and vertex aspect ratio function - * (specified by a Transformer) implementations: the width - * of the bounding box of the shape is given by the vertex size, and the - * height is given by the size multiplied by the vertex's aspect ratio. - * - * @author Joshua O'Madadhain - */ -public class VertexShapeFactory -{ - protected Transformer vsf; - protected Transformer varf; - - /** - * Creates a VertexShapeFactory with the specified - * vertex size and aspect ratio functions. - */ - public VertexShapeFactory(Transformer vsf, Transformer varf) - { - this.vsf = vsf; - this.varf = varf; - } - - /** - * Creates a VertexShapeFactory with a constant size of - * 10 and a constant aspect ratio of 1. - */ - @SuppressWarnings("unchecked") - public VertexShapeFactory() - { - this(new ConstantTransformer(10), - new ConstantTransformer(1.0f)); - } - - private static final Rectangle2D theRectangle = new Rectangle2D.Float(); - /** - * Returns a Rectangle2D whose width and - * height are defined by this instance's size and - * aspect ratio functions for this vertex. - */ - public Rectangle2D getRectangle(V v) - { - float width = vsf.transform(v); - float height = width * varf.transform(v); - float h_offset = -(width / 2); - float v_offset = -(height / 2); - theRectangle.setFrame(h_offset, v_offset, width, height); - return theRectangle; - } - - private static final Ellipse2D theEllipse = new Ellipse2D.Float(); - /** - * Returns a Ellipse2D whose width and - * height are defined by this instance's size and - * aspect ratio functions for this vertex. - */ - public Ellipse2D getEllipse(V v) - { - theEllipse.setFrame(getRectangle(v)); - return theEllipse; - } - - private static final RoundRectangle2D theRoundRectangle = - new RoundRectangle2D.Float(); - /** - * Returns a RoundRectangle2D whose width and - * height are defined by this instance's size and - * aspect ratio functions for this vertex. The arc size is - * set to be half the minimum of the height and width of the frame. - */ - public RoundRectangle2D getRoundRectangle(V v) - { - Rectangle2D frame = getRectangle(v); - float arc_size = (float)Math.min(frame.getHeight(), frame.getWidth()) / 2; - theRoundRectangle.setRoundRect(frame.getX(), frame.getY(), - frame.getWidth(), frame.getHeight(), arc_size, arc_size); - return theRoundRectangle; - } - - private static final GeneralPath thePolygon = new GeneralPath(); - /** - * Returns a regular num_sides-sided - * Polygon whose bounding - * box's width and height are defined by this instance's size and - * aspect ratio functions for this vertex. - * @param num_sides the number of sides of the polygon; must be >= 3. - */ - public Shape getRegularPolygon(V v, int num_sides) - { - if (num_sides < 3) - throw new IllegalArgumentException("Number of sides must be >= 3"); - Rectangle2D frame = getRectangle(v); - float width = (float)frame.getWidth(); - float height = (float)frame.getHeight(); - - // generate coordinates - double angle = 0; - thePolygon.reset(); - thePolygon.moveTo(0,0); - thePolygon.lineTo(width, 0); - double theta = (2 * Math.PI) / num_sides; - for (int i = 2; i < num_sides; i++) - { - angle -= theta; - float delta_x = (float) (width * Math.cos(angle)); - float delta_y = (float) (width * Math.sin(angle)); - Point2D prev = thePolygon.getCurrentPoint(); - thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y); - } - thePolygon.closePath(); - - // scale polygon to be right size, translate to center at (0,0) - Rectangle2D r = thePolygon.getBounds2D(); - double scale_x = width / r.getWidth(); - double scale_y = height / r.getHeight(); - float translationX = (float) (r.getMinX() + r.getWidth()/2); - float translationY = (float) (r.getMinY() + r.getHeight()/2); - - AffineTransform at = AffineTransform.getScaleInstance(scale_x, scale_y); - at.translate(-translationX, -translationY); - - Shape shape = at.createTransformedShape(thePolygon); - return shape; - } - - /** - * Returns a regular Polygon of num_points - * points whose bounding - * box's width and height are defined by this instance's size and - * aspect ratio functions for this vertex. - * @param num_points the number of points of the polygon; must be >= 5. - */ - public Shape getRegularStar(V v, int num_points) - { - if (num_points < 5) - throw new IllegalArgumentException("Number of sides must be >= 5"); - Rectangle2D frame = getRectangle(v); - float width = (float) frame.getWidth(); - float height = (float) frame.getHeight(); - - // generate coordinates - double theta = (2 * Math.PI) / num_points; - double angle = -theta/2; - thePolygon.reset(); - thePolygon.moveTo(0,0); - float delta_x = width * (float)Math.cos(angle); - float delta_y = width * (float)Math.sin(angle); - Point2D prev = thePolygon.getCurrentPoint(); - thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y); - for (int i = 1; i < num_points; i++) - { - angle += theta; - delta_x = width * (float)Math.cos(angle); - delta_y = width * (float)Math.sin(angle); - prev = thePolygon.getCurrentPoint(); - thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y); - angle -= theta*2; - delta_x = width * (float)Math.cos(angle); - delta_y = width * (float)Math.sin(angle); - prev = thePolygon.getCurrentPoint(); - thePolygon.lineTo((float)prev.getX() + delta_x, (float)prev.getY() + delta_y); - } - thePolygon.closePath(); - - // scale polygon to be right size, translate to center at (0,0) - Rectangle2D r = thePolygon.getBounds2D(); - double scale_x = width / r.getWidth(); - double scale_y = height / r.getHeight(); - - float translationX = (float) (r.getMinX() + r.getWidth()/2); - float translationY = (float) (r.getMinY() + r.getHeight()/2); - - AffineTransform at = AffineTransform.getScaleInstance(scale_x, scale_y); - at.translate(-translationX, -translationY); - - Shape shape = at.createTransformedShape(thePolygon); - return shape; - } -} diff --git a/gui/jung-src/edu/uci/ics/jung/visualization/util/package.html b/gui/jung-src/edu/uci/ics/jung/visualization/util/package.html deleted file mode 100644 index 3fea1179..00000000 --- a/gui/jung-src/edu/uci/ics/jung/visualization/util/package.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - -

          Utilities for graph visualization. - - - diff --git a/gui/lib/collections-generic-4.01.jar b/gui/lib/collections-generic-4.01.jar deleted file mode 100644 index 92d009c4..00000000 Binary files a/gui/lib/collections-generic-4.01.jar and /dev/null differ diff --git a/gui/lib/colt-1.2.0.jar b/gui/lib/colt-1.2.0.jar deleted file mode 100644 index a7192f68..00000000 Binary files a/gui/lib/colt-1.2.0.jar and /dev/null differ diff --git a/gui/lib/iText-5.0.6.jar b/gui/lib/iText-5.0.6.jar deleted file mode 100644 index d716b4ea..00000000 Binary files a/gui/lib/iText-5.0.6.jar and /dev/null differ diff --git a/gui/lib/jackson-annotations-2.1.0.jar b/gui/lib/jackson-annotations-2.1.0.jar deleted file mode 100644 index e4ca5597..00000000 Binary files a/gui/lib/jackson-annotations-2.1.0.jar and /dev/null differ diff --git a/gui/lib/jackson-core-2.1.0.jar b/gui/lib/jackson-core-2.1.0.jar deleted file mode 100644 index 6899eda2..00000000 Binary files a/gui/lib/jackson-core-2.1.0.jar and /dev/null differ diff --git a/gui/lib/jackson-databind-2.1.0.jar b/gui/lib/jackson-databind-2.1.0.jar deleted file mode 100644 index 5eceb007..00000000 Binary files a/gui/lib/jackson-databind-2.1.0.jar and /dev/null differ diff --git a/gui/lib/javacc.jar b/gui/lib/javacc.jar deleted file mode 100644 index 2550727e..00000000 Binary files a/gui/lib/javacc.jar and /dev/null differ diff --git a/gui/lindenb-src/org/lindenb/awt/Cap.java b/gui/lindenb-src/org/lindenb/awt/Cap.java deleted file mode 100644 index 225c718b..00000000 --- a/gui/lindenb-src/org/lindenb/awt/Cap.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.lindenb.awt; -import java.awt.BasicStroke; - -/** - * SVG definition for line cap - */ -public enum Cap { -BUTT() { public int stroke() { return BasicStroke.CAP_BUTT;}}, -ROUND { public int stroke() { return BasicStroke.CAP_ROUND;}}, -SQUARE { public int stroke() { return BasicStroke.CAP_SQUARE;}}; -/** return the value as a java.awt.BasicStroke.CAP_* */ -public abstract int stroke(); -/** return the value as a SVG string style */ -public String svg() - { - return name().toLowerCase(); - } - -public static Cap parseSVG(String style) - { - for(Cap j: values()) - { - if(j.svg().equals(style)) return j; - } - throw new IllegalArgumentException("Bad cap "+style); - } -} diff --git a/gui/lindenb-src/org/lindenb/awt/ColorUtils.java b/gui/lindenb-src/org/lindenb/awt/ColorUtils.java deleted file mode 100644 index 6ba78460..00000000 --- a/gui/lindenb-src/org/lindenb/awt/ColorUtils.java +++ /dev/null @@ -1,272 +0,0 @@ -package org.lindenb.awt; - -import java.awt.Color; -import java.util.HashMap; - -/** - * @author lindenb - * - * ColorUtils - */ -public class ColorUtils - { - static final private HashMap text2color; - - static { - text2color= new HashMap(); - text2color.put("aliceblue", new Color(240, 248, 255)); - text2color.put("antiquewhite", new Color(250, 235, 215)); - text2color.put("aquamarine", new Color(127, 255, 212)); - text2color.put("aqua", new Color( 0, 255, 255)); - text2color.put("azure", new Color(240, 255, 255)); - text2color.put("beige", new Color(245, 245, 220)); - text2color.put("bisque", new Color(255, 228, 196)); - text2color.put("black", Color.BLACK); - text2color.put("blanchedalmond", new Color(255, 235, 205)); - text2color.put("blue", Color.BLUE); - text2color.put("blueviolet", new Color(138, 43, 226)); - text2color.put("brown", new Color(165, 42, 42)); - text2color.put("burlywood", new Color(222, 184, 135)); - text2color.put("cadetblue", new Color( 95, 158, 160)); - text2color.put("chartreuse", new Color(127, 255, 0)); - text2color.put("chocolate", new Color(210, 105, 30)); - text2color.put("coral", new Color(255, 127, 80)); - text2color.put("cornflowerblue", new Color(100, 149, 237)); - text2color.put("cornsilk", new Color(255, 248, 220)); - text2color.put("crimson", new Color(220, 20, 60)); - text2color.put("cyan", Color.CYAN); - text2color.put("darkblue", new Color( 0, 0, 139)); - text2color.put("darkcyan", new Color( 0, 139, 139)); - text2color.put("darkgoldenrod", new Color(184, 134, 11)); - text2color.put("darkgray", new Color(169, 169, 169)); - text2color.put("darkgreen", new Color( 0, 100, 0)); - text2color.put("darkgrey", new Color(169, 169, 169)); - text2color.put("darkkhaki", new Color(189, 183, 107)); - text2color.put("darkmagenta", new Color(139, 0, 139)); - text2color.put("darkolivegreen", new Color( 85, 107, 47)); - text2color.put("darkorange", new Color(255, 140, 0)); - text2color.put("darkorchid", new Color(153, 50, 204)); - text2color.put("darkred", new Color(139, 0, 0)); - text2color.put("darksalmon", new Color(233, 150, 122)); - text2color.put("darkseagreen", new Color(143, 188, 143)); - text2color.put("darkslateblue", new Color( 72, 61, 139)); - text2color.put("darkslategray", new Color( 47, 79, 79)); - text2color.put("darkslategrey", new Color( 47, 79, 79)); - text2color.put("darkturquoise", new Color( 0, 206, 209)); - text2color.put("darkviolet", new Color(148, 0, 211)); - text2color.put("deeppink", new Color(255, 20, 147)); - text2color.put("deepskyblue", new Color( 0, 191, 255)); - text2color.put("dimgray", new Color(105, 105, 105)); - text2color.put("dimgrey", new Color(105, 105, 105)); - text2color.put("dodgerblue", new Color( 30, 144, 255)); - text2color.put("firebrick", new Color(178, 34, 34)); - text2color.put("floralwhite", new Color(255, 250, 240)); - text2color.put("forestgreen", new Color( 34, 139, 34)); - text2color.put("fuchsia", new Color(255, 0, 255)); - text2color.put("gainsboro", new Color(220, 220, 220)); - text2color.put("ghostwhite", new Color(248, 248, 255)); - text2color.put("goldenrod", new Color(218, 165, 32)); - text2color.put("gold", new Color(255, 215, 0)); - text2color.put("gray", new Color(128, 128, 128)); - text2color.put("green", Color.GREEN); - text2color.put("greenyellow", new Color(173, 255, 47)); - text2color.put("grey", new Color(128, 128, 128)); - text2color.put("honeydew", new Color(240, 255, 240)); - text2color.put("hotpink", new Color(255, 105, 180)); - text2color.put("indianred", new Color(205, 92, 92)); - text2color.put("indigo", new Color( 75, 0, 130)); - text2color.put("ivory", new Color(255, 255, 240)); - text2color.put("khaki", new Color(240, 230, 140)); - text2color.put("lavenderblush", new Color(255, 240, 245)); - text2color.put("lavender", new Color(230, 230, 250)); - text2color.put("lawngreen", new Color(124, 252, 0)); - text2color.put("lemonchiffon", new Color(255, 250, 205)); - text2color.put("lightblue", new Color(173, 216, 230)); - text2color.put("lightcoral", new Color(240, 128, 128)); - text2color.put("lightcyan", new Color(224, 255, 255)); - text2color.put("lightgoldenrodyellow", new Color(250, 250, 210)); - text2color.put("lightgray", new Color(211, 211, 211)); - text2color.put("lightgreen", new Color(144, 238, 144)); - text2color.put("lightgrey", new Color(211, 211, 211)); - text2color.put("lightpink", new Color(255, 182, 193)); - text2color.put("lightsalmon", new Color(255, 160, 122)); - text2color.put("lightseagreen", new Color( 32, 178, 170)); - text2color.put("lightskyblue", new Color(135, 206, 250)); - text2color.put("lightslategray", new Color(119, 136, 153)); - text2color.put("lightslategrey", new Color(119, 136, 153)); - text2color.put("lightsteelblue", new Color(176, 196, 222)); - text2color.put("lightyellow", new Color(255, 255, 224)); - text2color.put("limegreen", new Color( 50, 205, 50)); - text2color.put("lime", new Color( 0, 255, 0)); - text2color.put("linen", new Color(250, 240, 230)); - text2color.put("magenta", new Color(255, 0, 255)); - text2color.put("maroon", new Color(128, 0, 0)); - text2color.put("mediumaquamarine", new Color(102, 205, 170)); - text2color.put("mediumblue", new Color( 0, 0, 205)); - text2color.put("mediumorchid", new Color(186, 85, 211)); - text2color.put("mediumpurple", new Color(147, 112, 219)); - text2color.put("mediumseagreen", new Color( 60, 179, 113)); - text2color.put("mediumslateblue", new Color(123, 104, 238)); - text2color.put("mediumspringgreen", new Color( 0, 250, 154)); - text2color.put("mediumturquoise", new Color( 72, 209, 204)); - text2color.put("mediumvioletred", new Color(199, 21, 133)); - text2color.put("midnightblue", new Color( 25, 25, 112)); - text2color.put("mintcream", new Color(245, 255, 250)); - text2color.put("mistyrose", new Color(255, 228, 225)); - text2color.put("moccasin", new Color(255, 228, 181)); - text2color.put("navajowhite", new Color(255, 222, 173)); - text2color.put("navy", new Color( 0, 0, 128)); - text2color.put("none", null); - text2color.put("oldlace", new Color(253, 245, 230)); - text2color.put("olivedrab", new Color(107, 142, 35)); - text2color.put("olive", new Color(128, 128, 0)); - text2color.put("orange", Color.ORANGE); - text2color.put("orangered", new Color(255, 69, 0)); - text2color.put("orchid", new Color(218, 112, 214)); - text2color.put("palegoldenrod", new Color(238, 232, 170)); - text2color.put("palegreen", new Color(152, 251, 152)); - text2color.put("paleturquoise", new Color(175, 238, 238)); - text2color.put("palevioletred", new Color(219, 112, 147)); - text2color.put("papayawhip", new Color(255, 239, 213)); - text2color.put("peachpuff", new Color(255, 218, 185)); - text2color.put("peru", new Color(205, 133, 63)); - text2color.put("pink", Color.PINK); - text2color.put("plum", new Color(221, 160, 221)); - text2color.put("powderblue", new Color(176, 224, 230)); - text2color.put("purple", new Color(128, 0, 128)); - text2color.put("red", Color.RED); - text2color.put("rosybrown", new Color(188, 143, 143)); - text2color.put("royalblue", new Color( 65, 105, 225)); - text2color.put("saddlebrown", new Color(139, 69, 19)); - text2color.put("salmon", new Color(250, 128, 114)); - text2color.put("sandybrown", new Color(244, 164, 96)); - text2color.put("seagreen", new Color( 46, 139, 87)); - text2color.put("seashell", new Color(255, 245, 238)); - text2color.put("sienna", new Color(160, 82, 45)); - text2color.put("silver", new Color(192, 192, 192)); - text2color.put("skyblue", new Color(135, 206, 235)); - text2color.put("slateblue", new Color(106, 90, 205)); - text2color.put("slategray", new Color(112, 128, 144)); - text2color.put("slategrey", new Color(112, 128, 144)); - text2color.put("snow", new Color(255, 250, 250)); - text2color.put("springgreen", new Color( 0, 255, 127)); - text2color.put("steelblue", new Color( 70, 130, 180)); - text2color.put("tan", new Color(210, 180, 140)); - text2color.put("teal", new Color( 0, 128, 128)); - text2color.put("thistle", new Color(216, 191, 216)); - text2color.put("tomato", new Color(255, 99, 71)); - text2color.put("turquoise", new Color( 64, 224, 208)); - text2color.put("violet", new Color(238, 130, 238)); - text2color.put("wheat", new Color(245, 222, 179)); - text2color.put("white", Color.WHITE); - text2color.put("whitesmoke", new Color(245, 245, 245)); - text2color.put("yellowgreen", new Color(154, 205, 50)); - text2color.put("yellow", Color.YELLOW); - } - - /** - * Inverse RGB of a color - * @param c - * @return - */ - public static Color negative(Color c) - { - return new Color( - 255-c.getRed(), - 255-c.getGreen(), - 255-c.getBlue() - ); - } - - /** return a gray color */ - public static Color gray(float f) - { - return new Color(f,f,f); - } - - /** - * Choose a gradient color between twio colors - * @param first the first Color - * @param second the second Color - * @param ratio the fraction of the first/second colors - * @return the gradient color - */ - public static Color between(Color first,Color second,double ratio) - { - if(ratio<0 || ratio>1.0) throw new IllegalArgumentException("0<=ratio<=1 but ratio="+ratio); - return new Color( - (int)(first.getRed()+ (second.getRed()-first.getRed())*ratio), - (int)(first.getGreen()+ (second.getGreen()-first.getGreen())*ratio), - (int)(first.getBlue()+ (second.getBlue()-first.getBlue())*ratio), - (int)(first.getAlpha()+ (second.getAlpha()-first.getAlpha())*ratio) - ); - } - - /** - * convert this color as "rgb(red,green,blue)" - * @param c the color - * @return null if c is null or the rgb string - */ - public static String toRGB(Color c) - { - if(c==null) return null; - return "rgb("+c.getRed()+","+c.getGreen()+","+c.getBlue()+")"; - } - - /** - * parse Color as a string. - * String can be a nominal SVG value e.g. "blue","red"... or - * a RGB definition such as "rgb(100,200,300)" or "#ab12cc" - * @param c the color as a string - * @return the Color or null if it is "none" or if it cannot convert the string - */ - public static Color parseColor(String c) - { - if(c==null) return null; - c=c.trim().toLowerCase(); - if(c.equals("none")) return null; - else if(c.startsWith("#")) - { - return new Color( Integer.valueOf(c.substring(1),16).intValue()); - } - else if(c.startsWith("rgb(") && c.endsWith(")")) - { - int index=c.indexOf(','); - if(index==-1) - { - return null; - } - try - { - int r= java.lang.Integer.parseInt(c.substring(4,index)); - c=c.substring(index+1); - index=c.indexOf(','); - if(index==-1) - { - return null; - } - int g= java.lang.Integer.parseInt(c.substring(0,index)); - c=c.substring(index+1); - index=c.indexOf(')'); - if(index==-1) - { - return null; - } - int b= java.lang.Integer.parseInt(c.substring(0,index)); - if( c.substring(index+1).trim().length()!=0) - { - return null; - } - return new Color(r,g,b); - } - catch(Exception err) - { - return null; - } - } - Color color= (Color)text2color.get(c); - if(color==null) throw new IllegalArgumentException("Illegal Color:"+c); - return color; - } - - } diff --git a/gui/lindenb-src/org/lindenb/awt/Dimension2D.java b/gui/lindenb-src/org/lindenb/awt/Dimension2D.java deleted file mode 100644 index 5a3e9505..00000000 --- a/gui/lindenb-src/org/lindenb/awt/Dimension2D.java +++ /dev/null @@ -1,135 +0,0 @@ -package org.lindenb.awt; - -import java.awt.Shape; -import java.awt.geom.Rectangle2D; -import java.awt.geom.RectangularShape; - -/** simple implementation of java.awt.geom.Dimension2D */ -public abstract class Dimension2D - extends java.awt.geom.Dimension2D - { - public static class Double extends Dimension2D - { - public double width=0.0; - public double height=0.0; - - /** initialize using the width and height of the RectangularShape */ - public Double(RectangularShape shape) - { - this(shape.getWidth(),shape.getHeight()); - } - - public Double(java.awt.geom.Dimension2D cp) - { - this(cp.getWidth(),cp.getHeight()); - } - - public Double(double width,double height) - { - this.width=width; - this.height=height; - } - public Double() - { - this(0,0); - } - @Override - public double getWidth() { - return this.width; - } - @Override - public double getHeight() { - return this.height; - } - @Override - public void setSize(double width, double height) { - this.width=width; - this.height=height; - } - @Override - public Object clone() { - return new Dimension2D.Double(this); - } - } - - public static class Float extends Dimension2D - { - public float width=0.0f; - public float height=0.0f; - public Float(float width,float height) - { - this.width=width; - this.height=height; - } - - /** initialize using the width and height of the RectangularShape */ - public Float(RectangularShape shape) - { - this((float)shape.getWidth(),(float)shape.getHeight()); - } - - public Float(java.awt.geom.Dimension2D cp) - { - this((float)cp.getWidth(),(float)cp.getHeight()); - } - - public Float() - { - this(0f,0f); - } - @Override - public double getWidth() { - return this.width; - } - @Override - public double getHeight() { - return this.height; - } - @Override - public void setSize(double width, double height) { - this.width=(float)width; - this.height=(float)height; - } - - @Override - public Object clone() { - return new Dimension2D.Float(this); - } - } - /** set the maximum size of this and another shape. Useful when searching for - * the maximum size of an area containing some shapes - * @param other - */ - public void max(Shape shape) - { - Rectangle2D r= shape.getBounds2D(); - max(r.getWidth(),r.getHeight()); - } - - /** set the maximum size of this and another shape. Useful when searching for - * the maximum size of an area containing some shapes - * @param other - */ - public void max(double width,double height) - { - setSize( - Math.max(this.getWidth(),width), - Math.max(this.getHeight(),height) - ); - } - - - @Override - public int hashCode() { - return new java.lang.Double(getWidth()).hashCode()+ - new java.lang.Double(getHeight()).hashCode(); - } - - @Override - public boolean equals(Object obj) { - if(obj==this) return true; - if(obj==null || !(obj instanceof java.awt.geom.Dimension2D)) return false; - java.awt.geom.Dimension2D cp=java.awt.geom.Dimension2D.class.cast(obj); - return getWidth()==cp.getWidth() && getHeight()==cp.getHeight(); - } - } diff --git a/gui/lindenb-src/org/lindenb/awt/Join.java b/gui/lindenb-src/org/lindenb/awt/Join.java deleted file mode 100644 index 37bfadb7..00000000 --- a/gui/lindenb-src/org/lindenb/awt/Join.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.lindenb.awt; -import java.awt.BasicStroke; -/** - * SVG definition for line join - */ -public enum Join { -BEVEL(){ public int stroke() { return BasicStroke.JOIN_BEVEL;}}, -ROUND(){ public int stroke() { return BasicStroke.JOIN_ROUND;}}, -MITER(){ public int stroke() { return BasicStroke.JOIN_MITER;}}; -/** return the value as a java.awt.BasicStroke.JOIN_* */ -public abstract int stroke(); -/** return the value as a SVG string style */ -public String svg() - { - return name().toLowerCase(); - } - -public static Join parseSVG(String style) - { - for(Join j: values()) - { - if(j.svg().equals(style)) return j; - } - throw new IllegalArgumentException("Bad join "+style); - } - -} diff --git a/gui/lindenb-src/org/lindenb/io/IOUtils.java b/gui/lindenb-src/org/lindenb/io/IOUtils.java deleted file mode 100644 index ac5b876e..00000000 --- a/gui/lindenb-src/org/lindenb/io/IOUtils.java +++ /dev/null @@ -1,226 +0,0 @@ -package org.lindenb.io; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.FileReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.Reader; -import java.io.StreamTokenizer; -import java.io.StringWriter; -import java.io.Writer; -import java.net.URL; -import java.util.zip.GZIPInputStream; - -public class IOUtils - { - /** @return a representation of a StreamTokenizer */ - public static String toString(StreamTokenizer st) - { - if(st==null) return "null"; - switch(st.ttype) - { - case StreamTokenizer.TT_EOF: return ""; - case StreamTokenizer.TT_EOL: return ""; - case StreamTokenizer.TT_NUMBER: return String.valueOf(st.nval)+"(number)"; - case StreamTokenizer.TT_WORD: return st.sval+"(word)"; - default: return "'"+((char)st.ttype)+"'(char)"; - } - } - - public static String getReaderContent(Reader r) throws IOException - { - StringWriter w= new StringWriter(); - copyTo(r,w); - return w.toString(); - } - - - public static String getFileContent(File file) throws IOException - { - FileReader r=new FileReader(file); - String s=getReaderContent(r); - r.close(); - return s; - } - - public static String getURLContent(URL url) throws IOException - { - InputStreamReader r=new InputStreamReader(url.openStream()); - String s=getReaderContent(r); - r.close(); - return s; - } - - public static void copyToDir(File file, File dir) throws IOException - { - File dest=new File(dir,file.getName()); - if(file.equals(dest)) throw new IOException("copyToDir src==dest file"); - copyTo(file,dest); - } - - public static void copyTo(File src, File dest) throws IOException - { - if(src.equals(dest)) throw new IOException("copyTo src==dest file"); - FileOutputStream fout=new FileOutputStream(dest); - InputStream in=new FileInputStream(src); - IOUtils.copyTo(in, fout); - fout.flush(); - fout.close(); - in.close(); - } - - - public static void copyTo(InputStream in, OutputStream out) throws IOException - { - byte buffer[]=new byte[2048]; - int n=0; - while((n=in.read(buffer))!=-1) - { - out.write(buffer, 0, n); - } - out.flush(); - } - - public static void copyTo(Reader in, Writer out) throws IOException - { - char buffer[]=new char[2048]; - int n=0; - while((n=in.read(buffer))!=-1) - { - out.write(buffer, 0, n); - } - out.flush(); - } - - /** - * answers a BufferedReader to the given uri. - * if uri starts with a URL schema (http,ftp, etc...) then we can URL.openStream, else this is a file - * if uri ends with *.gz, a GZIPInputStream is added to decode the gzipped-stream - * @param uri the uri - * @return an input stream to the uri - * @throws IOException - */ - public static BufferedReader openReader(String uri) throws IOException - { - if( uri.startsWith("http://") || - uri.startsWith("https://") || - uri.startsWith("file://") || - uri.startsWith("ftp://") - ) - { - InputStream in= openInputStream(uri); - return new BufferedReader(new InputStreamReader(in)); - } - else - { - return openFile( new File(uri)); - } - } - - /** - * answers an input stream to the given uri. - * if uri starts with a URL schema (http,ftp, etc...) then we can URL.openStream, else this is a file - * if uri ends with *.gz, a GZIPInputStream is added to decode the gzipped-stream - * @param uri the uri - * @return an input stream to the uri - * @throws IOException - */ - public static InputStream openInputStream(String uri) throws IOException - { - InputStream in=null; - if( uri.startsWith("http://") || - uri.startsWith("https://") || - uri.startsWith("file://") || - uri.startsWith("ftp://") - ) - { - URL url= new URL(uri); - in = url.openStream(); - } - else - { - in=new FileInputStream(uri); - } - if(uri.toLowerCase().endsWith(".gz")) - { - return new GZIPInputStream(in); - } - return in; - } - - - /** open a file and return a BufferedReader, gunzip the file if it ends with *.gz*/ - public static BufferedReader openFile(File file) throws IOException - { - if( file.getName().endsWith(".gz")) - { - return new BufferedReader(new InputStreamReader(new GZIPInputStream(new FileInputStream(file)))); - } - return new BufferedReader(new FileReader(file)); - } - - /** flush a stream without throwing an exception */ - public static void safeFlush(OutputStream out) - { - if(out==null) return; - try { out.flush(); } catch(IOException err) {} - } - /** flush a writer without throwing an exception */ - public static void safeFlush(Writer out) - { - if(out==null) return; - try { out.flush(); } catch(IOException err) {} - } - - /** close a stream without throwing an exception */ - public static void safeClose(OutputStream out) - { - if(out==null) return; - try { out.close(); } catch(IOException err) {} - } - /** close a writer without throwing an exception */ - public static void safeClose(Writer out) - { - if(out==null) return; - try { out.close(); } catch(IOException err) {} - } - /** close a stream without throwing an exception */ - public static void safeClose(InputStream in) - { - if(in==null) return; - try { in.close(); } catch(IOException err) {} - } - /** close a writer without throwing an exception */ - public static void safeClose(Reader in) - { - if(in==null) return; - try { in.close(); } catch(IOException err) {} - } - - /** creates a new directory in the default tmp directory*/ - public static File createTempDir() throws IOException - { - return createTempDir(null); - } - - /** creates a new directory in the given directory*/ - public static File createTempDir(File parentDir) throws IOException - { - File dir= File.createTempFile("_tmp_dir_", ".dir",parentDir); - if(!(dir.delete()))//it is a FILE, delete it and make it a directory - { - throw new IOException("Could not delete file: " + dir.getAbsolutePath()); - } - if(!(dir.mkdir())) - { - throw new IOException("Could not create temp directory: " + dir.getAbsolutePath()); - } - return dir; - } - - } diff --git a/gui/lindenb-src/org/lindenb/lang/InvalidXMLException.java b/gui/lindenb-src/org/lindenb/lang/InvalidXMLException.java deleted file mode 100644 index 203fdd70..00000000 --- a/gui/lindenb-src/org/lindenb/lang/InvalidXMLException.java +++ /dev/null @@ -1,45 +0,0 @@ -package org.lindenb.lang; - -import org.lindenb.xml.XMLUtilities; -import org.w3c.dom.Node; - -/** - * Exception throwed when the schema DOM is invalid - * @author pierre - * - */ -public class InvalidXMLException extends Exception - { - private static final long serialVersionUID = 1L; - - public InvalidXMLException() - { - } - public InvalidXMLException(Node node) - { - this(node,"Illegal node"); - } - - public InvalidXMLException(Node node,String msg) - { - super( - (node==null?"Error":XMLUtilities.node2path(node)) - +(msg==null?"":" : "+msg) - ); - } - - public InvalidXMLException(String msg) - { - super(msg); - } - - public InvalidXMLException(Throwable err) - { - super( err); - } - - public InvalidXMLException(String msg, Throwable err) { - super(msg, err); - } - - } diff --git a/gui/lindenb-src/org/lindenb/svg/SVGRenderer.java b/gui/lindenb-src/org/lindenb/svg/SVGRenderer.java deleted file mode 100644 index a6a338d6..00000000 --- a/gui/lindenb-src/org/lindenb/svg/SVGRenderer.java +++ /dev/null @@ -1,482 +0,0 @@ -package org.lindenb.svg; - -import java.awt.AlphaComposite; -import java.awt.BasicStroke; -import java.awt.Composite; -import java.awt.Font; -import java.awt.Graphics2D; -import java.awt.Paint; -import java.awt.Shape; -import java.awt.Stroke; -import java.awt.font.FontRenderContext; -import java.awt.font.TextLayout; -import java.awt.geom.AffineTransform; -import java.awt.geom.Area; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Line2D; -import java.awt.geom.Rectangle2D; - -import org.lindenb.awt.Cap; -import org.lindenb.awt.ColorUtils; -import org.lindenb.awt.Dimension2D; -import org.lindenb.awt.Join; -import org.lindenb.lang.InvalidXMLException; -import org.lindenb.sw.vocabulary.SVG; -import org.lindenb.util.StringUtils; -import org.lindenb.xml.XMLUtilities; -import org.w3c.dom.Attr; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.NamedNodeMap; -import org.w3c.dom.Node; - -/** - * A Basic Renderer for Scalable Vector Graphics SVG - * @author lindenb - * - */ -public class SVGRenderer - extends SVGUtils - { - - public SVGRenderer() - { - } - - - /** - * Record the Graphic context for a given node - * @author lindenb - * - */ - private static class Shuttle - { - /** current graphics2D */ - Graphics2D g; - /** the svg:svg root */ - Element svgRoot; - /** current node */ - Element node; - /** current clip */ - Shape clip; - /** current shape */ - Shape shape; - float fontSize=12; - String fontFamily=null; - Paint fill=null; - Paint stroke=null; - float strokeWidth; - Cap linecap=Cap.BUTT; - Join linejoin=Join.MITER; - AffineTransform transform=new AffineTransform(); - float opacity=1f; - - public Shuttle(Graphics2D g,Element root) - { - this.g=g; - - Font font= g.getFont(); - this.fontSize= font.getSize(); - this.fontFamily= font.getFamily(); - this.svgRoot=root; - this.node=root; - this.transform=new AffineTransform(g.getTransform()); - this.clip= g.getClip(); - } - - Shuttle(Shuttle cp,Element e) - { - this.node=e; - this.g = cp.g; - this.svgRoot = cp.svgRoot; - this.shape=cp.shape; - this.clip=cp.clip; - this.fontSize=cp.fontSize; - this.fontFamily=cp.fontFamily; - this.stroke=cp.stroke; - this.fill=cp.fill; - this.strokeWidth=cp.strokeWidth; - this.transform=new AffineTransform(cp.transform); - this.opacity=cp.opacity; - if(e.hasAttributes()) - { - NamedNodeMap atts=e.getAttributes(); - for(int i=0;i< atts.getLength();++i) - { - - Attr att= Attr.class.cast(atts.item(i)); - if(att.getNamespaceURI()!=null) continue; - String s=att.getName(); - String value= att.getValue(); - if(s.equals("style")) - { - for(String styles:value.split("[;]+")) - { - int j=styles.indexOf(':'); - if(j!=-1) - { - applyStyle(styles.substring(0,j).trim(),styles.substring(j+1).trim()); - } - } - - } - else - { - applyStyle(s,att.getValue()); - } - } - } - } - private void applyStyle(String key,String value) - { - if(key.equals("fill")) - { - if(value!=null && value.equals("none")) return; - this.fill= ColorUtils.parseColor(value); - } - else if(key.equals("stroke")) - { - if(value!=null && value.equals("none")) return; - this.stroke= ColorUtils.parseColor(value); - } - else if(key.equals("stroke-width")) - { - this.strokeWidth= Float.parseFloat(value); - } - else if(key.equals("stroke-linecap")) - { - this.linecap= Cap.parseSVG(value); - } - else if(key.equals("stroke-linejoin")) - { - this.linejoin=Join.parseSVG(value); - } - else if(key.equals("transform")) - { - AffineTransform tr=svgToaffineTransform(value); - this.transform.concatenate(tr); - } - else if(key.equals("font-size")) - { - this.fontSize= (float)castUnit(value); - } - else if(key.equals("font-family")) - { - this.fontFamily= value; - } - else if(key.equals("opacity")) - { - this.opacity *= Float.parseFloat(value); - } - - - } - - } - - public void paint( - Graphics2D g, - Node dom - ) throws InvalidXMLException - { - paint(g,dom,null); - } - - - public void paint( - Graphics2D g, - Node dom, - Rectangle2D viewRect - ) throws InvalidXMLException - { - - if(g==null) throw new NullPointerException("g is null"); - if(dom==null) throw new NullPointerException("dom is null"); - Element root=null; - if(dom.getNodeType()==Node.DOCUMENT_NODE) - { - root= Document.class.cast(dom).getDocumentElement(); - } - else if(dom.getNodeType()==Node.ELEMENT_NODE) - { - root = Element.class.cast(dom); - } - - if(root==null) throw new InvalidXMLException(dom,"no root"); - if(!XMLUtilities.isA(root, SVG.NS, "svg")) throw new InvalidXMLException(root,"not a SVG root"); - - Dimension2D srcSize= getSize(root); - Rectangle2D viewBox=null; - Attr viewBoxAttr = root.getAttributeNode("viewBox"); - if(viewBoxAttr!=null) - { - String tokens[]= viewBoxAttr.getValue().trim().split("[ \t\n]+"); - if(tokens.length!=4) throw new InvalidXMLException(viewBoxAttr,"invalid "); - viewBox = new Rectangle2D.Double( - Double.parseDouble(tokens[0]), - Double.parseDouble(tokens[1]), - Double.parseDouble(tokens[2]), - Double.parseDouble(tokens[3]) - ); - srcSize= new Dimension2D.Double(viewBox.getWidth(),viewBox.getHeight()); - } - - - AffineTransform originalTr=null; - if(viewRect!=null) - { - if(srcSize.getWidth()>0 && srcSize.getHeight()>0) - { - originalTr= g.getTransform(); - - double ratio= Math.min( - viewRect.getWidth()/srcSize.getWidth(), - viewRect.getHeight()/srcSize.getHeight() - ); - - g.translate( - (viewRect.getWidth() -srcSize.getWidth()*ratio)/2.0, - (viewRect.getHeight()-srcSize.getHeight()*ratio)/2.0 - ); - g.scale(ratio,ratio); - } - } - Shape oldclip= g.getClip(); - Shuttle shuttle=new Shuttle(g,root); - if(viewBox!=null) - { - AffineTransform tr= AffineTransform.getTranslateInstance( - -viewBox.getX(), - -viewBox.getY() - ); - shuttle.transform.concatenate(tr); - Area area= new Area(new Rectangle2D.Double( - 0,0, - viewBox.getWidth(), - viewBox.getHeight() - )); - if (shuttle.clip != null) - area.intersect(new Area(shuttle.clip)); - shuttle.clip= area; - } - - paint(shuttle); - - - if(originalTr!=null) - { - g.setTransform(originalTr); - } - g.setClip(oldclip); - } - - private void paint(Shuttle shuttle) throws InvalidXMLException - { - Element e= shuttle.node; - String shapeName= e.getLocalName(); - - if(!SVG.NS.equals(e.getNamespaceURI())) - { - - for(Node c=e.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - Shuttle cp= new Shuttle(shuttle,Element.class.cast(c)); - paint(cp); - } - } - else if(shapeName==null) - { - LOG.warning("shapeName is null"); - } - else if(shapeName.equals("g")) - { - - for(Node c=e.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - Shuttle cp= new Shuttle(shuttle,Element.class.cast(c)); - paint(cp); - } - } - else if(shapeName.equals("path")) - { - Attr d= e.getAttributeNode("d"); - if(d!=null) - { - shuttle.shape = SVGUtils.pathToShape(d.getValue()); - drawShape(shuttle); - } - } - else if(shapeName.equals("polyline")) - { - Attr points= e.getAttributeNode("points"); - if(points!=null) - { - shuttle.shape = SVGUtils.polylineToShape(points.getValue()); - drawShape(shuttle); - } - } - else if(shapeName.equals("polygon")) - { - Attr points= e.getAttributeNode("points"); - if(points!=null) - { - shuttle.shape = SVGUtils.polygonToShape(points.getValue()); - drawShape(shuttle); - } - } - else if(shapeName.equals("rect")) - { - - Attr x= e.getAttributeNode("x"); - Attr y= e.getAttributeNode("y"); - Attr w= e.getAttributeNode("width"); - Attr h= e.getAttributeNode("height"); - if(x!=null && y!=null && w!=null && h!=null) - { - shuttle.shape =new Rectangle2D.Double( - Double.parseDouble(x.getValue()), - Double.parseDouble(y.getValue()), - Double.parseDouble(w.getValue()), - Double.parseDouble(h.getValue()) - ); - drawShape(shuttle); - } - } - else if(shapeName.equals("line")) - { - Attr x1= e.getAttributeNode("x1"); - Attr y1= e.getAttributeNode("y1"); - Attr x2= e.getAttributeNode("x2"); - Attr y2= e.getAttributeNode("y2"); - if(x1!=null && y1!=null && x2!=null && y2!=null) - { - shuttle.shape =new Line2D.Double( - Double.parseDouble(x1.getValue()), - Double.parseDouble(y1.getValue()), - Double.parseDouble(x2.getValue()), - Double.parseDouble(y2.getValue()) - ); - drawShape(shuttle); - } - } - else if(shapeName.equals("circle")) - { - Attr cx= e.getAttributeNode("cx"); - Attr cy= e.getAttributeNode("cy"); - Attr r= e.getAttributeNode("r"); - if(cx!=null && cy!=null && r!=null) - { - double radius=Double.parseDouble(r.getValue()); - shuttle.shape =new Ellipse2D.Double( - Double.parseDouble(cx.getValue())-radius, - Double.parseDouble(cy.getValue())-radius, - radius*2, - radius*2 - ); - drawShape(shuttle); - } - } - else if(shapeName.equals("ellipse")) - { - Attr cx= e.getAttributeNode("cx"); - Attr cy= e.getAttributeNode("cy"); - Attr rx= e.getAttributeNode("rx"); - Attr ry= e.getAttributeNode("ry"); - if(cx!=null && cy!=null && rx!=null && ry!=null) - { - double radiusx=Double.parseDouble(rx.getValue()); - double radiusy=Double.parseDouble(ry.getValue()); - shuttle.shape =new Ellipse2D.Double( - Double.parseDouble(cx.getValue())-radiusx, - Double.parseDouble(cy.getValue())-radiusy, - radiusx*2, - radiusy*2 - ); - drawShape(shuttle); - } - } - else if(StringUtils.isIn(shapeName,"title","defs","desc","metadata")) - { - //ignore - } - else if(shapeName.equals("text")) - { - Attr x= e.getAttributeNode("x"); - Attr y= e.getAttributeNode("y"); - if(x!=null && y!=null) - { - - Font f= new Font(shuttle.fontFamily,Font.PLAIN,(int)shuttle.fontSize); - - FontRenderContext frc = shuttle.g.getFontRenderContext(); - TextLayout tl = new TextLayout(e.getTextContent(), f, frc); - shuttle.shape= tl.getOutline(null); - shuttle.shape = AffineTransform.getTranslateInstance( - Double.parseDouble(x.getValue()), - Double.parseDouble(y.getValue())).createTransformedShape(shuttle.shape) - ; - - drawShape(shuttle); - } - } - else if(shapeName.equals("svg")) - { - - for(Node c=e.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - Shuttle cp = new Shuttle(shuttle,Element.class.cast(c)); - paint(cp); - } - } - else - { - LOG.warning("cannot display <"+e.getLocalName()+">"); - } - } - - - private void drawShape(Shuttle shuttle) - { - Graphics2D g= shuttle.g; - Shape oldclip=shuttle.g.getClip(); - g.setClip(shuttle.clip); - Composite oldcomposite=g.getComposite(); - if(shuttle.opacity!=1f) - { - g.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER,shuttle.opacity)); - } - - AffineTransform oldtr=g.getTransform(); - g.setTransform(shuttle.transform); - - Stroke oldStroke= g.getStroke(); - Stroke newStroke= new BasicStroke( - shuttle.strokeWidth, - shuttle.linecap.stroke(), - shuttle.linejoin.stroke() - ); - g.setStroke(newStroke); - - if(shuttle.fill!=null) - { - g.setPaint(shuttle.fill); - g.fill(shuttle.shape); - } - if(shuttle.stroke!=null) - { - g.setPaint(shuttle.stroke); - g.draw(shuttle.shape); - } - g.setClip(oldclip); - g.setStroke(oldStroke); - g.setTransform(oldtr); - g.setComposite(oldcomposite); - } - - - - -} diff --git a/gui/lindenb-src/org/lindenb/svg/SVGUtils.java b/gui/lindenb-src/org/lindenb/svg/SVGUtils.java deleted file mode 100644 index 9c76246a..00000000 --- a/gui/lindenb-src/org/lindenb/svg/SVGUtils.java +++ /dev/null @@ -1,342 +0,0 @@ -package org.lindenb.svg; - -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.GeneralPath; -import java.awt.geom.PathIterator; - -import java.io.PrintWriter; -import java.io.StringReader; -import java.io.StringWriter; -import java.io.Writer; -import java.util.Scanner; -import java.util.logging.Logger; -import org.lindenb.awt.Dimension2D; -import org.lindenb.lang.InvalidXMLException; -import org.lindenb.svg.path.SVGPathParser; -import org.lindenb.sw.vocabulary.SVG; -import org.lindenb.util.StringUtils; -import org.lindenb.xml.XMLUtilities; -import org.w3c.dom.Attr; -import org.w3c.dom.Element; - - -/** - * Utilities for Scalable Vector Graphics - * @author lindenb - * - */ -public class SVGUtils extends SVG -{ - protected static final Logger LOG=Logger.getLogger("org.lindenb"); - - public static double castUnit(String s) - { - s=s.trim(); - if(s.endsWith("px") || s.endsWith("pt") || s.endsWith("cm")) - { - s=s.substring(0,s.length()-2).trim(); - } - if(s.endsWith("in")) - { - s=s.substring(0,s.length()-2).trim(); - return 75.0*Double.parseDouble(s); - } - return Double.parseDouble(s); - } - - /** return the dimension of a SVG document */ - public static Dimension2D getSize(Element svgRoot)throws InvalidXMLException - { - if(!XMLUtilities.isA(svgRoot, NS, "svg")) throw new InvalidXMLException(svgRoot,"not a svg:svg element"); - try - { - Dimension2D.Double srcSize=new Dimension2D.Double(0,0); - Attr width= svgRoot.getAttributeNode("width"); - Attr height= svgRoot.getAttributeNode("height"); - - if(width==null) throw new InvalidXMLException(svgRoot,"@width missing"); - srcSize.width= castUnit(width.getValue()); - - if(height==null) throw new InvalidXMLException(svgRoot,"@height missing"); - srcSize.height= castUnit(height.getValue()); - return srcSize; - } - catch(NumberFormatException err) - { - throw new InvalidXMLException(err); - } - } - - static public AffineTransform svgToaffineTransform(String transform) - { - if(StringUtils.isBlank(transform)) return null; - String s=transform.trim(); - - if(s.startsWith("matrix(")) - { - int i=s.indexOf(")"); - if(i==-1) throw new IllegalArgumentException(s); - if(!StringUtils.isBlank(s.substring(i+1))) throw new IllegalArgumentException(s); - String tokens[]=s.substring(7, i).split("[,]"); - if(tokens.length!=6) throw new IllegalArgumentException(s); - return new AffineTransform(new double[]{ - Double.parseDouble(tokens[0]), - Double.parseDouble(tokens[1]), - Double.parseDouble(tokens[2]), - Double.parseDouble(tokens[3]), - Double.parseDouble(tokens[4]), - Double.parseDouble(tokens[5]) - }); - } - AffineTransform tr= new AffineTransform(); - while(s.length()!=0) - { - - - if(s.startsWith("scale(")) - { - int i=s.indexOf(")"); - if(i==-1) throw new IllegalArgumentException(s); - - String s2= s.substring(6,i).trim(); - s= s.substring(i+1).trim(); - i= s2.indexOf(','); - if(i==-1) - { - double scale= Double.parseDouble(s2.trim()); - - AffineTransform tr2= AffineTransform.getScaleInstance( - scale,scale - ); - tr2.concatenate(tr); - tr=tr2; - } - else - { - double scalex= Double.parseDouble(s2.substring(0,i).trim()); - double scaley= Double.parseDouble(s2.substring(i+1).trim()); - - AffineTransform tr2= AffineTransform.getScaleInstance( - scalex,scaley - ); - tr2.concatenate(tr); - tr=tr2; - } - } - else if(s.startsWith("translate(")) - { - int i=s.indexOf(")"); - if(i==-1) throw new IllegalArgumentException(s); - String s2= s.substring(10,i).trim(); - s= s.substring(i+1).trim(); - i= s2.indexOf(','); - if(i==-1) - { - double translate= Double.parseDouble(s2.trim()); - - AffineTransform tr2= AffineTransform.getTranslateInstance( - translate,0 - ); - tr2.concatenate(tr); - tr=tr2; - } - else - { - double translatex= Double.parseDouble(s2.substring(0,i).trim()); - double translatey= Double.parseDouble(s2.substring(i+1).trim()); - - AffineTransform tr2= AffineTransform.getTranslateInstance( - translatex,translatey - ); - tr2.concatenate(tr); - tr=tr2; - } - } - else if(s.startsWith("rotate(")) - { - int i=s.indexOf(")"); - if(i==-1) throw new IllegalArgumentException(s); - String s2= s.substring(7,i).trim(); - s= s.substring(i+1).trim(); - i= s2.indexOf(','); - if(i==-1) - { - double angle= Double.parseDouble(s2.trim()); - - AffineTransform tr2= AffineTransform.getRotateInstance((angle/180.0)*Math.PI); - tr2.concatenate(tr); - tr=tr2; - } - else - { - double angle= Double.parseDouble(s2.substring(0,i).trim()); - s2=s2.substring(i+1); - i= s2.indexOf(','); - if(i==-1) throw new IllegalArgumentException("bad rotation "+s); - - double cx= Double.parseDouble(s2.substring(0,i).trim()); - double cy= Double.parseDouble(s2.substring(i+1).trim()); - - AffineTransform tr2= AffineTransform.getRotateInstance( - angle,cx,cy - ); - tr2.concatenate(tr); - tr=tr2; - } - } - else if(s.startsWith("skewX(")) - { - int i=s.indexOf(")"); - if(i==-1) throw new IllegalArgumentException(s); - String s2= s.substring(6,i).trim(); - s= s.substring(i+1).trim(); - - double shx= Double.parseDouble(s2.trim()); - - AffineTransform tr2= AffineTransform.getShearInstance(shx, 1f); - tr2.concatenate(tr); - tr=tr2; - } - else if(s.startsWith("skewY(")) - { - int i=s.indexOf(")"); - if(i==-1) throw new IllegalArgumentException(s); - String s2= s.substring(6,i).trim(); - s= s.substring(i+1).trim(); - - double shy= Double.parseDouble(s2.trim()); - - AffineTransform tr2= AffineTransform.getShearInstance(1f,shy); - tr2.concatenate(tr); - tr=tr2; - } - - } - return tr; - } - - /** - * transform a shape into a SVG path as String - * @param shape the shape - * @return the SVG points for <path> - */ - static public String shapeToPath(Shape shape) - { - StringWriter out= new StringWriter(); - shapeToPath(out,shape); - return out.toString(); - } - - - - - /** - * transform a shape into a SVG path - * @param shape - * @return - */ - static public void shapeToPath(Writer out,Shape shape) - { - PrintWriter path= new PrintWriter(out); - - double tab[] = new double[6]; - PathIterator pathiterator = shape.getPathIterator(null); - - while(!pathiterator.isDone()) - { - int currSegmentType= pathiterator.currentSegment(tab); - switch(currSegmentType) { - case PathIterator.SEG_MOVETO: { - path.print( "M " + (tab[0]) + " " + (tab[1]) + " "); - break; - } - case PathIterator.SEG_LINETO: { - path.print( "L " + (tab[0]) + " " + (tab[1]) + " "); - break; - } - case PathIterator.SEG_CLOSE: { - path.print( "Z "); - break; - } - case PathIterator.SEG_QUADTO: { - path.print( "Q " + (tab[0]) + " " + (tab[1])); - path.print( " " + (tab[2]) + " " + (tab[3])); - path.print( " "); - break; - } - case PathIterator.SEG_CUBICTO: { - path.print( "C " + (tab[0]) + " " + (tab[1])); - path.print( " " + (tab[2]) + " " + (tab[3])); - path.print( " " + (tab[4]) + " " + (tab[5])); - path.print( " "); - break; - } - default: - { - LOG.info("Cannot handled "+currSegmentType); - break; - } - } - pathiterator.next(); - } - path.flush(); - } - - - public static GeneralPath polygonToShape(String lineString ) - { - GeneralPath p = polylineToShape(lineString); - p.closePath(); - return p; - } - - public static GeneralPath polylineToShape(String lineString ) - { - GeneralPath p = new GeneralPath(GeneralPath.WIND_EVEN_ODD); - Scanner scanner= new Scanner(new StringReader(lineString)); - scanner.useDelimiter("[ \n,\t]+"); - - boolean found=false; - Double prev=null; - while(scanner.hasNext()) - { - String s=scanner.next(); - if(s.length()==0) continue; - double v= Double.parseDouble(s); - if(prev==null) - { - prev=v; - } - else - { - if(!found) - { - p.moveTo(prev, v); - found=true; - } - else - { - p.lineTo(prev, v); - } - prev=null; - } - } - if(prev!=null) throw new IllegalArgumentException("bad polyline "+lineString); - return p; - } - - /** - * @param pathString the path string - * @return - */ - public static Shape pathToShape(String pathString ) - { - return SVGPathParser.parse(pathString); - } - -} - - - - diff --git a/gui/lindenb-src/org/lindenb/svg/path/SVGPathParser.jj b/gui/lindenb-src/org/lindenb/svg/path/SVGPathParser.jj deleted file mode 100644 index 4c6dc127..00000000 --- a/gui/lindenb-src/org/lindenb/svg/path/SVGPathParser.jj +++ /dev/null @@ -1,512 +0,0 @@ - -options { -STATIC=false; -DEBUG_PARSER=false; -DEBUG_TOKEN_MANAGER=false; -LOOKAHEAD=2; -FORCE_LA_CHECK=true; -} - - -PARSER_BEGIN(SVGPathParser) -package org.lindenb.svg.path; -import java.awt.Shape; -import java.awt.geom.GeneralPath; -import java.awt.geom.Arc2D; -import java.awt.geom.Point2D; - -/** - * - * http://www.w3.org/TR/SVG11/paths.html#PathDataBNF - */ -public class SVGPathParser - { - private Point2D.Double start=null; - private Point2D.Double last=null; - - public SVGPathParser(String s) - { - this(new java.io.StringReader(s)); - } - - public static Shape parse(String s) - { - try - { - return new SVGPathParser(s).path(); - } - catch(ParseException err) - { - throw new IllegalArgumentException(err); - } - } - - public static void main(String args[]) - throws Exception - { - for(String s:args) - { - parse(s); - } - } - } -PARSER_END(SVGPathParser) - -TOKEN: - { - - | - | - | <#DIGIT: ["0"-"9"] > - | )+ > - | | ) > - | <#EXPONENT: ("E"|"e") ()? > - | )? - ( - ()* ()* ()? - | () - ) - > - | - | - | - | - | - | - | - | - | - | - | - } - - -SKIP : - { - " " - | "\t" - | "\n" - | "\r" - | "," /* comma */ - } - -public Shape path():{GeneralPath shape=new GeneralPath();} - { - ( moveTo(shape) drawtoCommands(shape) )+ - - { - return shape; - } - } - - -/** -Start a new sub-path at the given (x,y) coordinate. -M (uppercase) indicates that absolute coordinates will follow; m (lowercase) indicates that relative coordinates will follow. If a relative moveto (m) appears as the first element of the path, then it is treated as a pair of absolute coordinates. If a moveto is followed by multiple pairs of coordinates, the subsequent pairs are treated as implicit lineto commands. -*/ -private void moveTo(GeneralPath shape):{boolean relative;Token t;Point2D.Double p;} - { - ( - t={ relative=t.image.equals("m");} - - p=coords() - { - if(last==null || !relative) - { - last=p; - } - else - { - last.x+=p.getX(); - last.y+=p.getY(); - } - shape.moveTo(last.getX(),last.getY()); - this.start=this.last; - } - (lineToSequence(shape,relative))? - ) - } - -void drawtoCommands(GeneralPath shape):{} - { - (drawtoCommand(shape))* - } - -void drawtoCommand(GeneralPath shape):{} - { - ( - closePath(shape) - | lineTo(shape) - | lineH(shape) - | lineV(shape) - | cubicBezier(shape) - | smoothCubicBezier(shape) - | quadraticCurve(shape) - | smootQuadraticCurve(shape) - | ellipticArc(shape) - ) - } - -/** - Draw a line from the current point to the given (x,y) coordinate which becomes the new current point. L (uppercase) indicates that absolute coordinates will follow; l (lowercase) indicates that relative coordinates will follow. A number of coordinates pairs may be specified to draw a polyline. At the end of the command, the new current point is set to the final set of coordinates provided. - */ -private void lineTo(GeneralPath shape):{boolean relative;Token t;Point2D.Double p;} - { - t={ relative=t.image.equals("l");} - lineToSequence(shape,relative) - } - -private void lineToSequence(GeneralPath shape,boolean relative):{Point2D.Double p;} - { - (p=coords() - { - if(!relative) - { - this.last=p; - } - else - { - this.last.x+=p.getX(); - this.last.y+=p.getY(); - } - shape.lineTo(this.last.getX(),this.last.getY()); - })+ - } - -/** -Draws a horizontal line from the current point (cpx, cpy) to (x, cpy). H (uppercase) indicates that absolute coordinates will follow; h (lowercase) indicates that relative coordinates will follow. Multiple x values can be provided (although usually this doesn't make sense). At the end of the command, the new current point becomes (x, cpy) for the final value of x.*/ - -private void lineH(GeneralPath shape):{boolean relative;Token t;} - { - t={ relative=t.image.equals("h");} - ( lineHSequence(shape,relative) ) - } - -private void lineHSequence(GeneralPath shape,boolean relative):{double v;} - { - ( v=coordinate() - { - if(!relative) - { - this.last.x=v; - } - else - { - this.last.x+=v; - } - shape.lineTo(this.last.getX(),this.last.getY()); - })+ - } - -/** -Draws a vertical line from the current point (cpx, cpy) to (cpx, y). V (uppercase) indicates that absolute coordinates will follow; v (lowercase) indicates that relative coordinates will follow. Multiple y values can be provided (although usually this doesn't make sense). At the end of the command, the new current point becomes (cpx, y) for the final value of y.*/ -private void lineV(GeneralPath shape):{boolean relative;Token t;double v;} - { - t={ relative=t.image.equals("v");} - (lineVSequence(shape,relative)) - } - -private void lineVSequence(GeneralPath shape,boolean relative):{double v;} - { - ( v=coordinate() - { - if(!relative) - { - this.last.y=v; - } - else - { - this.last.y+=v; - } - shape.lineTo(this.last.getX(),this.last.getY()); - })+ - } - - -/** -Close the current subpath by drawing a straight line from the current point to current subpath's initial point. -*/ -private void closePath(GeneralPath shape):{} - { - - { - shape.closePath(); - this.last=this.start; - } - } -/** Draws a cubic Bézier curve from the current point to (x,y) using (x1,y1) as the control point at the beginning of the curve and (x2,y2) as the control point at the end of the curve. C (uppercase) indicates that absolute coordinates will follow; c (lowercase) indicates that relative coordinates will follow. Multiple sets of coordinates may be specified to draw a polybézier. At the end of the command, the new current point becomes the final (x,y) coordinate pair used in the polybézier */ -private void cubicBezier(GeneralPath shape): - { - Token t; - boolean relative; - Point2D.Double p1,p2,p3; - } - { - t={ relative=t.image.equals("c");} - (( - p1=coords() - p2=coords() - p3=coords() - ){ - if(relative) - { - p1.x+=last.x; - p1.y+=last.y; - p2.x+=last.x; - p2.y+=last.y; - p3.x+=last.x; - p3.y+=last.y; - } - shape.curveTo(p1.getX(),p1.getY(),p2.getX(),p2.getY(),p3.getX(),p3.getY()); - this.last=p3; - })+ - } - -/** - *Draws a cubic Bézier curve from the current point to (x,y). The first control point is assumed to be the reflection of the second control point on the previous command relative to the current point. (If there is no previous command or if the previous command was not an C, c, S or s, assume the first control point is coincident with the current point.) (x2,y2) is the second control point (i.e., the control point at the end of the curve). S (uppercase) indicates that absolute coordinates will follow; s (lowercase) indicates that relative coordinates will follow. Multiple sets of coordinates may be specified to draw a polybézier. At the end of the command, the new current point becomes the final (x,y) coordinate pair used in the polybézier. - */ -private void smoothCubicBezier(GeneralPath shape): - { - Token t; - boolean relative; - Point2D.Double p1,p2; - } - { - t={ relative=t.image.equals("s");} - (( - p1=coords() - p2=coords() - ){ - if(relative) - { - p1.x+=last.x; - p1.y+=last.y; - p2.x+=last.x; - p2.y+=last.y; - } - //WRONG TODO - shape.curveTo(last.getX(),last.getY(),p1.getX(),p1.getY(),p2.getX(),p2.getY()); - this.last=p2; - })+ - } -/** -Draws a quadratic Bézier curve from the current point to (x,y) using (x1,y1) as the control point. Q (uppercase) indicates that absolute coordinates will follow; q (lowercase) indicates that relative coordinates will follow. Multiple sets of coordinates may be specified to draw a polybézier. At the end of the command, the new current point becomes the final (x,y) coordinate pair used in the polybézier. -*/ -private void quadraticCurve(GeneralPath shape): - { - Token t; - boolean relative; - Point2D.Double p1,p2; - } - { - t={ relative=t.image.equals("q");} - (( - p1=coords() - p2=coords() - ){ - if(relative) - { - p1.x+=last.x; - p1.y+=last.y; - p2.x+=last.x; - p2.y+=last.y; - } - //WRONG TODO - shape.quadTo(p1.getX(),p1.getY(),p2.getX(),p2.getY()); - this.last=p2; - })+ - } - - -private void smootQuadraticCurve(GeneralPath shape): - { - Token t; - boolean relative; - Point2D.Double p1; - } - { - t={ relative=t.image.equals("q");} - (( - p1=coords() - ){ - if(relative) - { - p1.x+=last.x; - p1.y+=last.y; - } - //WRONG TODO - shape.quadTo(last.getX(),last.getY(),p1.getX(),p1.getY()); - this.last=p1; - })+ - } - -/** Draws an elliptical arc from the current point to (x, y). The size and orientation of the ellipse are defined by two radii (rx, ry) and an x-axis-rotation, which indicates how the ellipse as a whole is rotated relative to the current coordinate system. The center (cx, cy) of the ellipse is calculated automatically to satisfy the constraints imposed by the other parameters. large-arc-flag and sweep-flag contribute to the automatic calculations and help determine how the arc is drawn.*/ -private void ellipticArc(GeneralPath shape): - { - Token t; - boolean relative; - Point2D.Double p1; - double rx,ry; - double theta; - int larg_arc_flag; - int sweep_flag; - double x,y; - } - { - t={ relative=t.image.equals("a");} - (( - rx=coordinate() - ry=coordinate() - theta=number() - larg_arc_flag=integer() - sweep_flag=integer() - x=coordinate() - y=coordinate() - ){ - /* this function was copied from - ZZ Coder - http://stackoverflow.com/questions/1805101/svg-elliptical-arcs-with-java/1805151#1805151 - */ - // Ensure radii are valid - if (rx == 0 || ry == 0) { - shape.lineTo(x, y); - this.last.x=x; - this.last.y=y; - return; - } - // Get the current (x, y) coordinates of the shape - Point2D p2d = shape.getCurrentPoint(); - double x0 = p2d.getX(); - double y0 = p2d.getY(); - // Compute the half distance between the current and the final point - double dx2 = (x0 - x) / 2.0; - double dy2 = (y0 - y) / 2.0; - // Convert theta from degrees to radians - theta = Math.toRadians(theta % 360); - - // - // Step 1 : Compute (x1, y1) - // - double x1 = (Math.cos(theta) * (double) dx2 + Math.sin(theta) - * (double) dy2); - double y1 = (-Math.sin(theta) * (double) dx2 + Math.cos(theta) - * (double) dy2); - - last.x=x1; - last.y=y1; - - // Ensure radii are large enough - rx = Math.abs(rx); - ry = Math.abs(ry); - double Prx = rx * rx; - double Pry = ry * ry; - double Px1 = x1 * x1; - double Py1 = y1 * y1; - double d = Px1 / Prx + Py1 / Pry; - if (d > 1) { - rx = Math.abs((Math.sqrt(d) * (double) rx)); - ry = Math.abs((Math.sqrt(d) * (double) ry)); - Prx = rx * rx; - Pry = ry * ry; - } - - // - // Step 2 : Compute (cx1, cy1) - // - double sign = (larg_arc_flag == sweep_flag) ? -1d : 1d; - double coef = (sign * Math - .sqrt(((Prx * Pry) - (Prx * Py1) - (Pry * Px1)) - / ((Prx * Py1) + (Pry * Px1)))); - double cx1 = coef * ((rx * y1) / ry); - double cy1 = coef * -((ry * x1) / rx); - - // - // Step 3 : Compute (cx, cy) from (cx1, cy1) - // - double sx2 = (x0 + x) / 2.0f; - double sy2 = (y0 + y) / 2.0f; - double cx = sx2 - + (Math.cos(theta) * (double) cx1 - Math.sin(theta) - * (double) cy1); - double cy = sy2 - + (double) (Math.sin(theta) * (double) cx1 + Math.cos(theta) - * (double) cy1); - - // - // Step 4 : Compute the angleStart (theta1) and the angleExtent (dtheta) - // - double ux = (x1 - cx1) / rx; - double uy = (y1 - cy1) / ry; - double vx = (-x1 - cx1) / rx; - double vy = (-y1 - cy1) / ry; - double p, n; - // Compute the angle start - n = Math.sqrt((ux * ux) + (uy * uy)); - p = ux; // (1 * ux) + (0 * uy) - sign = (uy < 0) ? -1d : 1d; - double angleStart = Math.toDegrees(sign * Math.acos(p / n)); - // Compute the angle extent - n = Math.sqrt((ux * ux + uy * uy) * (vx * vx + vy * vy)); - p = ux * vx + uy * vy; - sign = (ux * vy - uy * vx < 0) ? -1d : 1d; - double angleExtent = Math.toDegrees(sign * Math.acos(p / n)); - if (sweep_flag!=1 && angleExtent > 0) { - angleExtent -= 360f; - } else if (sweep_flag==1 && angleExtent < 0) { - angleExtent += 360f; - } - angleExtent %= 360f; - angleStart %= 360f; - - Arc2D.Double arc = new Arc2D.Double(); - arc.x = cx - rx; - arc.y = cy - ry; - arc.width = rx * 2.0f; - arc.height = ry * 2.0f; - arc.start = -angleStart; - arc.extent = -angleExtent; - shape.append(arc, true); - - })+ - } - -private Point2D.Double coords():{double x;double y;} - { - (x=coordinate() y=coordinate()) - { - return new Point2D.Double(x,y); - } - } - -private double coordinate():{double n;} - { - (n=number()) - { - return n; - } - } - -private double number():{double f;} - { - (f=integer()|f=floating()) - { - return f; - } - } - -private double floating():{Token t;} - { - t= { return Double.parseDouble(t.image);} - } - -private int integer():{int n;int sig=1;} - { - ({sig=1;} | {sig=-1;})? n=positiveInteger() { return n*sig;} - } - -private int positiveInteger():{Token t;} - { - t= { return Integer.parseInt(t.image);} - } - - diff --git a/gui/lindenb-src/org/lindenb/svg/transform/SVGTransformParser.jj b/gui/lindenb-src/org/lindenb/svg/transform/SVGTransformParser.jj deleted file mode 100644 index f734fcc0..00000000 --- a/gui/lindenb-src/org/lindenb/svg/transform/SVGTransformParser.jj +++ /dev/null @@ -1,223 +0,0 @@ - -options { -STATIC=false; -IGNORE_CASE=true; -DEBUG_PARSER=false; -DEBUG_TOKEN_MANAGER=false; -LOOKAHEAD=2; -} - - -PARSER_BEGIN(SVGTransformParser) -package org.lindenb.svg.transform; -import java.awt.geom.AffineTransform; -/** - * SVGTransformParser - */ -public class SVGTransformParser - { - public SVGTransformParser(String s) - { - this(new java.io.StringReader(s)); - } - - /** - * Parses a SVG tranformation - * and returns a java.awt.geom.AffineTransform - * @throws IllegalArgumentException if there is an error - */ - public static AffineTransform parse(String s) - { - try - { - return new SVGTransformParser(s).transform(); - } - catch(ParseException err) - { - throw new IllegalArgumentException("Cannot parse transform \""+s+"\" :"+err); - } - } - - /** test */ - public static void main(String args[]) - throws Exception - { - for(String s:args) - { - AffineTransform tr=parse(s); - double f[]=new double[6]; - tr.getMatrix(f); - System.out.println("("+ - f[0]+","+f[1]+","+ - f[2]+","+f[3]+","+ - f[4]+","+f[5]+")" - ); - } - } - } -PARSER_END(SVGTransformParser) - -TOKEN: - { - - | - | - | <#DIGIT: ["0"-"9"] > - | )+ > - | | ) > - | <#EXPONENT: ("E"|"e") ()? > - | )? - ( - ()* ()* ()? - | () - ) - > - | - | - | - | - | - | - | - | - | - } - -SKIP : - { - " " - | "\t" - | "\n" - | "\r" - } - -public AffineTransform transform(): - { - AffineTransform newtr; - AffineTransform tr=new AffineTransform(); - } - { - (( newtr = tr_item() - { - tr.concatenate(newtr); - } )+ ) - { - return tr; - } - } - -public AffineTransform tr_item():{AffineTransform tr;} - { - ( - tr=matrix() - | tr=translate() - | tr=rotate() - | tr=scale() - ) - { - return tr; - } - } - -private AffineTransform matrix():{double i1,i2,i3,i4,i5,i6;} - { - - - i1= number() - - i2= number() - - i3= number() - - i4= number() - - i5= number() - - i6= number() - - { - return new AffineTransform(i1,i2,i3,i4,i5,i6); - } - } - -private AffineTransform translate():{double i1,i2;} - { - - - i1= number() - ()? - i2= number() - - { - return AffineTransform.getTranslateInstance(i1,i2); - } - } - -private AffineTransform rotate():{ - double i1; - Double i2=null; - Double i3=null; - } - { - - - i1= number() - ( - ()? - i2= number() - ()? - i3= number() - )? - - { - if(i2!=null) - { - return AffineTransform.getRotateInstance(i1,i2,i3); - } - return AffineTransform.getRotateInstance(i1); - } - } - - -private AffineTransform scale():{ - double i1; - Double i2=null; - } - { - - - i1= number() - ( - ()? - i2= number() - )? - - { - return AffineTransform.getScaleInstance(i1,i2==null?i1:i2.doubleValue()); - } - } - -private double number():{double f;} - { - (f=integer()|f=floating()) - { - return f; - } - } - -private double floating():{Token t;} - { - t= { return Double.parseDouble(t.image);} - } - -private int integer():{int n;int sig=1;} - { - ({sig=1;} | {sig=-1;})? n=positiveInteger() { return n*sig;} - } - -private int positiveInteger():{Token t;} - { - t= { return Integer.parseInt(t.image);} - } - - diff --git a/gui/lindenb-src/org/lindenb/sw/vocabulary/Namespace.java b/gui/lindenb-src/org/lindenb/sw/vocabulary/Namespace.java deleted file mode 100644 index 0d9ddaa6..00000000 --- a/gui/lindenb-src/org/lindenb/sw/vocabulary/Namespace.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.lindenb.sw.vocabulary; - -public class Namespace { - -} diff --git a/gui/lindenb-src/org/lindenb/sw/vocabulary/SVG.java b/gui/lindenb-src/org/lindenb/sw/vocabulary/SVG.java deleted file mode 100644 index 42c93ecb..00000000 --- a/gui/lindenb-src/org/lindenb/sw/vocabulary/SVG.java +++ /dev/null @@ -1,6 +0,0 @@ -package org.lindenb.sw.vocabulary; - -public class SVG extends Namespace { -public static final String NS="http://www.w3.org/2000/svg"; -public static final String DOCTYPE=""; -} diff --git a/gui/lindenb-src/org/lindenb/util/Compilation.java b/gui/lindenb-src/org/lindenb/util/Compilation.java deleted file mode 100644 index 34feba5c..00000000 --- a/gui/lindenb-src/org/lindenb/util/Compilation.java +++ /dev/null @@ -1,20 +0,0 @@ -/** - * - */ -package org.lindenb.util; - -/** - * @author pierre - * - */ -public class Compilation { -private Compilation() - { - - } -static public String getName() { return "?";} -static public String getDate() { return "__DATE__";} -static public String getUser() { return "__USER__";} -static public String getPath() { return "__PWD__";} -static public String getLabel() { return "Compiled by "+getUser()+" on "+getDate()+" in "+getPath();} -} diff --git a/gui/lindenb-src/org/lindenb/util/StringUtils.java b/gui/lindenb-src/org/lindenb/util/StringUtils.java deleted file mode 100644 index d5e9d6c4..00000000 --- a/gui/lindenb-src/org/lindenb/util/StringUtils.java +++ /dev/null @@ -1,200 +0,0 @@ -package org.lindenb.util; - -import java.util.Arrays; -import java.util.Collection; - -/** - * Utilities for Strings or CharSequence - * @author lindenb - * - */ -public class StringUtils - { - protected StringUtils() - { - } - - @Override - protected final Object clone() throws CloneNotSupportedException { - throw new CloneNotSupportedException(); - } - - /** returns the substring of the first argument string that precedes the first occurrence of the second argument string in the first argument string - * or the empty string if the first argument string does not contain the second argument string. */ - public static String substringBefore(String s,String delim) - { - int i=s.indexOf(delim); - return(i==-1?null:s.substring(0,i)); - } - - /** returns the substring of the first argument string that precedes the first occurrence of the second argument char in the first argument string - * or null if the first argument string does not contain the second argument string. */ - public static String substringBefore(String s,char delim) - { - int i=s.indexOf(delim); - return(i==-1?null:s.substring(0,i)); - } - - - /** returns the substring of the first argument string that precedes the first occurrence of the second argument char in the first argument string - * or the empty string if the first argument string does not contain the second argument string. */ - public static String substringAfter(String s,String delim) - { - int i=s.indexOf(delim); - return(i==-1?null:s.substring(i+delim.length())); - } - - /** returns the substring of the first argument string that precedes the first occurrence of the second argument char in the first argument string - * or the empty string if the first argument string does not contain the second argument string. */ - public static String substringAfter(String s,char delim) - { - int i=s.indexOf(delim); - return(i==-1?null:s.substring(i+1)); - } - - /** returns wether the sequence is empty of null */ - public static boolean isEmpty(CharSequence s) - { - return s==null || s.length()==0; - } - - /** return wether the sequence is null, empty of contains only white characters */ - public static boolean isBlank(CharSequence s) - { - if(isEmpty(s)) return true; - for(int i=0;i< s.length();++i) - { - if(!Character.isWhitespace(s.charAt(i))) return false; - } - return true; - } - /** return wether the sequence is null, empty of contains only white characters */ - public static boolean isBlank(char array[],int start,int length) - { - if(array==null || length==0) return true; - for(int i=0;i< length ;++i) - { - if(!Character.isWhitespace(array[start+i])) return false; - } - return true; - } - - /** remove simple or double quote from a String */ - public static String unquote(String s) - { - if(s==null) return null; - if(s.length()>1 && ((s.startsWith("\'") && s.endsWith("\'")) || (s.startsWith("\"") && s.endsWith("\"")))) - { - return s.substring(1, s.length()-1); - } - return s; - } - - /** anwsers wether the first string is in the choice */ - public static boolean isIn(String search,String...choice) - { - for(String s:choice) if(s.equals(search)) return true; - return false; - } - - /** anwsers wether the first string is in the choice */ - public static boolean isInIgnoreCase(String search,String...choice) - { - for(String s:choice) if(s.equalsIgnoreCase(search)) return true; - return false; - } - - /** anwsers wether the first string starts with any of the other strings */ - public static boolean startsWith(String search,String...starts) - { - for(String s:starts) if(search.startsWith(s)) return true; - return false; - } - /** anwsers wether the first string ends with any of the other strings */ - public static boolean endsWith(String search,String...ends) - { - for(String s:ends) if(search.endsWith(s)) return true; - return false; - } - - public static String ljust(String s,int width,char fillchar) - { - if(s==null || s.length()>=width) return s; - - StringBuilder b=new StringBuilder(width); - b.append(s); - while(b.length()=width) return s; - StringBuilder b=new StringBuilder(width); - while(b.length()<(width-s.length())) - { - b.append(fillchar); - } - b.append(s); - return b.toString(); - } - - public static String rjust(String s,int width) - { - return rjust(s, width,' '); - } - - public static String swapcase(String s) - { - StringBuilder b=new StringBuilder(s.length()); - for(int i=0;i< s.length();++i) - { - char c=s.charAt(i); - if(Character.isLetter(c)) - { - if(Character.isUpperCase(c)) - { - c=Character.toLowerCase(c); - } - else - { - c=Character.toUpperCase(c); - } - } - b.append(c); - } - return b.toString(); - } - - public static String join(Collection c,String sep) - { - StringBuilder b=new StringBuilder(); - boolean first=true; - for(Object o:c) - { - if(!first) b.append(sep); - first=false; - b.append(String.valueOf(o)); - } - return b.toString(); - } - public static String join(Collection c) - { - return join(c," "); - } - public static String join(Object c[],String sep) - { - return join(Arrays.asList(c),sep); - } - public static String join(Object c[]) - { - return join(Arrays.asList(c)); - } - } diff --git a/gui/lindenb-src/org/lindenb/xml/XMLUtilities.java b/gui/lindenb-src/org/lindenb/xml/XMLUtilities.java deleted file mode 100644 index 60074b8a..00000000 --- a/gui/lindenb-src/org/lindenb/xml/XMLUtilities.java +++ /dev/null @@ -1,662 +0,0 @@ -package org.lindenb.xml; - -import java.io.IOException; -import java.io.Writer; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - - -import org.lindenb.util.StringUtils; -import org.w3c.dom.Attr; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.Node; - -/** Static methods for XML */ -public class XMLUtilities - { - public static final String VERSION= "1.0"; - /** xml header encoding UTF-8 */ - public static final String DECLARATION_UTF8= ""; - private XMLUtilities() {} - - -/** escape the XML of a given string */ -public static String escape(CharSequence s) - { - if(s==null) throw new NullPointerException("XML.escape(null)"); - int needed=-1; - for(int i=0;i< s.length();++i) - { - switch(s.charAt(i)) - { - case '\'': - case '\"': - case '&': - case '<': - case '>': needed=i; break; - - default: break; - } - if(needed!=-1) break; - } - if(needed==-1) return s.toString(); - StringBuilder buffer=new StringBuilder(s.subSequence(0,needed)); - for(int i=needed;i< s.length();++i) - { - switch(s.charAt(i)) - { - case '\'': buffer.append("'"); break; - case '\"': buffer.append("""); break; - case '&': buffer.append("&"); break; - case '<': buffer.append("<"); break; - case '>': buffer.append(">"); break; - default: buffer.append(s.charAt(i));break; - } - } - return buffer.toString(); - } - -/** write a CharSequence to a java.io.Writer */ -public static Writer escape(Writer out,CharSequence s) throws IOException - { - for(int i=0;i< s.length();++i) - { - switch(s.charAt(i)) - { - case '\'': out.write("'"); break; - case '\"': out.write("""); break; - case '&': out.write("&"); break; - case '<': out.write("<"); break; - case '>': out.write(">"); break; - default: out.write(s.charAt(i));break; - } - } - return out; - } - -/** Lousy function removing the tags in a string, it does NOT unescapes the entities */ -public static String removeTags(CharSequence seq) - { - boolean inXML=false; - StringBuilder b= new StringBuilder(seq.length()); - for(int i=0;i< seq.length();++i) - { - if(inXML) - { - if(seq.charAt(i)=='>') - { - inXML=false; - } - } - else - { - if(seq.charAt(i)=='<') - { - inXML=true; - } - else - { - b.append(seq.charAt(i)); - } - } - } - return b.toString(); - } - - -public static String unescape(CharSequence seq) - { - return removeTags(seq) - .replaceAll(">", ">") - .replaceAll("<", "<") - .replaceAll("'", "\'") - .replaceAll(""", "\"") - .replaceAll("&", "&"); - } - -/** - * return wether the given node matches ns and localName - * @param node - * @param ns namespace use null as a wildcard for all namespaceuri - * @param localName use null as a wildcard for all localNames - * @return is node matching - */ -public static boolean isA(Node node,String ns,String localName) - { - if(node.getNodeType()!=Node.ELEMENT_NODE) return false; - if(ns!=null && !ns.equals(node.getNamespaceURI()))return false; - if(localName!=null && !localName.equals(node.getLocalName())) return false; - return true; - } - - -/** - * count number of element under root matching ns and localName - * @param root - * @param ns namespace use null as a wildcard for all namespaceuri - * @param localName use null as a wildcard for all localNames - * @return number Found - */ -public static int count(Node root,String ns,String localName) - { - int n=0; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(!isA(c,ns,localName)) continue; - ++n; - } - return n; - } - -/** - * count number of element under root - * @param root - * @return number of element Found - */ -public static int count(Node root) - { - if(root==null) throw new NullPointerException("count(null)"); - int n=0; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Element.ELEMENT_NODE) continue; - ++n; - } - return n; - } - -/** - * count any kind of node under root - * @param root - * @return number of element Found - */ -public static int countAllNodes(Node root) - { - int n=0; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - ++n; - } - return n; - } - -/** - * return the first Element under root matching ns and localName - * @param root - * @param ns namespace use null as a wildcard for all namespaceuri - * @param localName use null as a wildcard for all localNames - * @return element Found or null - */ -public static Element firstChild(Node root,String ns,String localName) - { - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(!isA(c,ns,localName)) continue; - return Element.class.cast(c); - } - return null; - } - -/** - * return the first Element under root matching the given tagName - * @param root - * @param taglName element name - * @return element Found or null - */ -public static Element firstChild(Node root,String tagName) - { - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(!(c.getNodeType()==Node.ELEMENT_NODE && c.getNodeName().equals(tagName))) continue; - return Element.class.cast(c); - } - return null; - } - - - -/** - * return the first Element under root - */ -public static Element firstChild(Node root) - { - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - return Element.class.cast(c); - } - return null; - } - - -/** - * return one or no Element under root - */ -public static Element oneOrZero(Node root) - { - Element found=null; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - if(found!=null) throw new IllegalArgumentException( - "found two elements under "+node2path(root)); - found= Element.class.cast(c); - } - return found; - } - -/** - * return one or no Element under root - */ -public static Element oneOrZero(Node root,String tagName) - { - Element found=null; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - if(!tagName.equals(c.getNodeName())) continue; - if(found!=null) throw new IllegalArgumentException( - "found two elements under "+node2path(root)); - found= Element.class.cast(c); - } - return found; - } - -/** - * return one or no Element under root - * @param ns namespace can be used as a wildcard if null - * @param localName can be used as a wildcard if null - */ -public static Element oneOrZero(Node root,String ns,String local) - { - Element found=null; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - if(ns!=null && !ns.equals(c.getNamespaceURI())) continue; - if(local!=null && !local.equals(c.getLocalName())) continue; - if(found!=null) throw new IllegalArgumentException( - "found two elements under "+node2path(root)); - found= Element.class.cast(c); - } - return found; - } - -/** - * return one and only one Element under root - */ -public static Element one(Node root) - { - Element found=null; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - if(found!=null) throw new IllegalArgumentException( - "found two elements under "+node2path(root)); - found= Element.class.cast(c); - } - if(found==null) throw new IllegalArgumentException( - "found no element under "+node2path(root)); - return found; - } - -/** - * return one and only one Element under root - */ -public static Element one(Node root,String tagName) - { - Element found=null; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - if(!tagName.equals(c.getNodeName())) continue; - if(found!=null) throw new IllegalArgumentException( - "found two elements under "+node2path(root)); - found= Element.class.cast(c); - } - if(found==null) throw new IllegalArgumentException( - "found no element under "+node2path(root)); - return found; - } - -/** - * return one and only one Element under root - * @param ns namespace can be used as a wildcard if null - * @param localName can be used as a wildcard if null - */ -public static Element one(Node root,String ns,String local) - { - Element found=null; - for(Node c=root.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()!=Node.ELEMENT_NODE) continue; - if(ns!=null && !ns.equals(c.getNamespaceURI())) continue; - if(local!=null && !local.equals(c.getLocalName())) continue; - if(found!=null) throw new IllegalArgumentException( - "found two elements under "+node2path(root)); - found= Element.class.cast(c); - } - if(found==null) throw new IllegalArgumentException( - "found no element under "+node2path(root)); - return found; - } - - - -/** return Collection over child elements */ -public static List elements(Node parent) - { - List v= new ArrayList(); - for(Node c=parent.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()==Node.ELEMENT_NODE) - { - v.add( Element.class.cast(c)); - } - } - return v; - } - -/** - * return a collection of all Element under parent matching ns and localName - * @param parent - * @param ns namespace use null as a wildcard for all namespaceuri - * @param localName use null as a wildcard for all localNames - * @return element Found or null - */ -public static List elements(Node parent,String namespace,String localName) - { - List v= new ArrayList(); - for(Node c=parent.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(isA(c,namespace,localName)) - { - v.add( Element.class.cast(c)); - } - } - return v; - } - -/** - * return a collection of all Element under parent matching the given tag name - * @param parent - * @param tagName the element tagName - * @return element Found or null - */ -public static List elements(Node parent,String tagName) - { - List v= new ArrayList(); - for(Node c=parent.getFirstChild();c!=null;c=c.getNextSibling()) - { - if(c.getNodeType()==Node.ELEMENT_NODE && - tagName.equals(c.getNodeName())) - { - v.add( Element.class.cast(c)); - } - } - return v; - } - - -/** - * return a XPATH-like description of the node - */ -public static String node2path(Node n) - { - StringBuilder b=new StringBuilder(); - while(n!=null) - { - switch(n.getNodeType()) - { - case Node.ATTRIBUTE_NODE: - { - b.insert(0,"/@"+n.getNodeName()); - n= Attr.class.cast(n).getOwnerElement(); - continue; - } - case Node.TEXT_NODE: - { - b.insert(0,"text()"); - break; - } - case Node.COMMENT_NODE: - { - b.insert(0,"comment()"); - break; - } - case Node.ELEMENT_NODE: - { - int L=0; - Node curr= n; - while(curr!=null) - { - if(curr.getNodeType()==Node.ELEMENT_NODE && - curr.getNodeName().equals(n.getNodeName())) - { - L++; - } - curr=curr.getPreviousSibling(); - } - - b.insert(0,"/"+n.getNodeName()+"["+(L)+"]"); - break; - } - } - n=n.getParentNode(); - } - return b.toString(); - } - -/** return the level of a node */ -public static int getLevel(Node n) - { - if(n==null) return -1; - int L=0; - while(n.getParentNode()!=null) - { - ++L; - n= n.getParentNode(); - } - return L; - } - -/** remove all children under n - * @returns the number of node removed */ -public static int removeChildren(Node n) - { - int L=0; - while(n.hasChildNodes()) - { - ++L; - n.removeChild( n.getFirstChild()); - } - return L; - } - - -/** returns wether is document is Data Oriented - * @throws IllegalArgumentException if this document is not DataOriented*/ -public static void validateAsDataOrientedDocument(Node node) - throws IllegalArgumentException - { - if(node==null) throw new NullPointerException("node is null"); - if(node.getNodeType()==Node.DOCUMENT_NODE) - { - Element root= Document.class.cast(node).getDocumentElement(); - if(root==null) return; - validateAsDataOrientedDocument(root); - } - else if(node.getNodeType()==Node.ELEMENT_NODE) - { - boolean containsTag=false; - boolean blank=true; - for(Node n1=node.getFirstChild();n1!=null;n1=n1.getNextSibling()) - { - switch(n1.getNodeType()) - { - case Node.ELEMENT_NODE: - { - containsTag=true; - validateAsDataOrientedDocument(Element.class.cast(n1)); - break; - } - case Node.TEXT_NODE: - case Node.CDATA_SECTION_NODE: - { - if(blank) - { - blank=StringUtils.isBlank(n1.getTextContent()); - } - break; - } - } - } - if(!blank && containsTag) - { - throw new IllegalArgumentException("not a Data Oriented Document: see "+node2path(node)); - } - } - - } - -private static abstract class AbstractIter -implements Iterator - { - protected boolean _first=true; - protected boolean _hasNextCalled=false; - protected Element _next=null; - protected boolean _eofMet=false; - protected abstract boolean accept(Element e); - protected abstract Node firstChild(); - - public boolean hasNext() - { - if(_hasNextCalled) - { - return _next!=null; - } - _hasNextCalled=true; - if(_eofMet) - { - return false; - } - Node c; - if(_first) - { - c= firstChild(); - _first=false; - } - else - { - c=_next.getNextSibling(); - } - while(c!=null) - { - if(c.getNodeType()==Node.ELEMENT_NODE && - accept(Element.class.cast(c))) - { - break; - } - c=c.getNextSibling(); - } - if(c==null) - { - _eofMet=true; - _next=null; - return false; - } - else - { - _next=Element.class.cast(c); - return true; - } - } - - public Element next() - { - if(!_hasNextCalled) hasNext(); - if(_next==null) throw new IllegalStateException(); - _hasNextCalled=false; - return _next; - } - - public void remove() - { - throw new UnsupportedOperationException(); - } - } - -private static class ForEach1 - implements Iterable - { - private Node root; - private String namespaceuri; - private String localName; - private String tagName; - - class Iter extends AbstractIter - { - @Override - protected Node firstChild() - { - return ForEach1.this.root.getFirstChild(); - } - - @Override - protected boolean accept(Element c) - { - if(ForEach1.this.namespaceuri!=null || ForEach1.this.localName!=null) - { - if(ForEach1.this.namespaceuri!=null && !ForEach1.this.namespaceuri.equals(c.getNamespaceURI())) return false; - if(ForEach1.this.localName!=null && !ForEach1.this.localName.equals(c.getLocalName())) return false; - return true; - } - else if(ForEach1.this.tagName!=null) - { - return ForEach1.this.tagName.equals(c.getNodeName()); - } - else - { - return true; - } - - } - } - - ForEach1(Node root,String namespaceuri,String localName,String tagName) - { - this.root=root; - this.namespaceuri=namespaceuri; - this.localName=localName; - this.tagName=tagName; - } - - public Iterator iterator() - { - return new Iter(); - } - } - - -/** - * - * @param parent - * @param ns namespace can be used as a wildcard if null - * @param localName can be used as a wildcard if null - * @return - */ -public static Iterable forEach(Node parent,String namespaceuri,String localName) - { - return new ForEach1(parent,namespaceuri,localName,null); - } -public static Iterable forEach(Node parent,String name) - { - return new ForEach1(parent,null,null,name); - } -public static Iterable forEach(Node parent) - { - return new ForEach1(parent,null,null,null); - } -} diff --git a/gui/resources/icons/quanto_icon.svg b/gui/resources/icons/quanto_icon.svg deleted file mode 100644 index e6d87780..00000000 --- a/gui/resources/icons/quanto_icon.svg +++ /dev/null @@ -1,354 +0,0 @@ - - - -image/svg+xml - - - - - - - - - - - - - - - - - - - - - - - - - Q - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/gui/resources/icons/quanto_icon_128.png b/gui/resources/icons/quanto_icon_128.png deleted file mode 100644 index 9b62b2ef..00000000 Binary files a/gui/resources/icons/quanto_icon_128.png and /dev/null differ diff --git a/gui/resources/icons/quanto_icon_16.png b/gui/resources/icons/quanto_icon_16.png deleted file mode 100644 index a0acc390..00000000 Binary files a/gui/resources/icons/quanto_icon_16.png and /dev/null differ diff --git a/gui/resources/icons/quanto_icon_24.png b/gui/resources/icons/quanto_icon_24.png deleted file mode 100644 index 192e3b8b..00000000 Binary files a/gui/resources/icons/quanto_icon_24.png and /dev/null differ diff --git a/gui/resources/icons/quanto_icon_32.png b/gui/resources/icons/quanto_icon_32.png deleted file mode 100644 index 16ec0d82..00000000 Binary files a/gui/resources/icons/quanto_icon_32.png and /dev/null differ diff --git a/gui/resources/icons/quanto_icon_48.png b/gui/resources/icons/quanto_icon_48.png deleted file mode 100644 index 2f5ab44b..00000000 Binary files a/gui/resources/icons/quanto_icon_48.png and /dev/null differ diff --git a/gui/resources/icons/quanto_icon_512.png b/gui/resources/icons/quanto_icon_512.png deleted file mode 100644 index 43a5c872..00000000 Binary files a/gui/resources/icons/quanto_icon_512.png and /dev/null differ diff --git a/gui/resources/icons/quanto_icon_64.png b/gui/resources/icons/quanto_icon_64.png deleted file mode 100644 index cf2153ed..00000000 Binary files a/gui/resources/icons/quanto_icon_64.png and /dev/null differ diff --git a/gui/resources/icons/quanto_icon_small.svg b/gui/resources/icons/quanto_icon_small.svg deleted file mode 100644 index ccb47151..00000000 --- a/gui/resources/icons/quanto_icon_small.svg +++ /dev/null @@ -1,267 +0,0 @@ - - - -image/svg+xml - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/gui/resources/theories/black_white/black-white-theory.qth b/gui/resources/theories/black_white/black-white-theory.qth deleted file mode 100644 index 6aa88dc8..00000000 --- a/gui/resources/theories/black_white/black-white-theory.qth +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "Black/White", - "coreName": "ghz_w", - "vertexTypePath": "", - "vertexTypes": { - "GHZ": { - "mnemonic": "g", - "visualization": { - "node": "white.svg" - } - }, - "W": { - "mnemonic": "w", - "visualization": { - "node": "black.svg" - } - } - } -} diff --git a/gui/resources/theories/black_white/black.svg b/gui/resources/theories/black_white/black.svg deleted file mode 100644 index 37d9e079..00000000 --- a/gui/resources/theories/black_white/black.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - diff --git a/gui/resources/theories/black_white/white.svg b/gui/resources/theories/black_white/white.svg deleted file mode 100644 index 70efdd36..00000000 --- a/gui/resources/theories/black_white/white.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - diff --git a/gui/resources/theories/red_green/green.svg b/gui/resources/theories/red_green/green.svg deleted file mode 100644 index a5d4ea92..00000000 --- a/gui/resources/theories/red_green/green.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - \ No newline at end of file diff --git a/gui/resources/theories/red_green/hadamard.svg b/gui/resources/theories/red_green/hadamard.svg deleted file mode 100644 index 15eda292..00000000 --- a/gui/resources/theories/red_green/hadamard.svg +++ /dev/null @@ -1,17 +0,0 @@ - - - - - H - diff --git a/gui/resources/theories/red_green/red-green-theory.qth b/gui/resources/theories/red_green/red-green-theory.qth deleted file mode 100644 index bc00400c..00000000 --- a/gui/resources/theories/red_green/red-green-theory.qth +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "Red/Green", - "coreName": "red_green", - "vertexTypePath": "type", - "vertexTypes": { - "X": { - "labelPath": "angle.pretty", - "labelDataType": "MathExpression", - "mnemonic": "r", - "visualization": { - "node": "red.svg", - "label": { - "fill": "#ff9696" - } - } - }, - "Z": { - "labelPath": "angle.pretty", - "labelDataType": "MathExpression", - "mnemonic": "g", - "visualization": { - "node": "green.svg", - "label": { - "fill": "#96ff96" - } - } - }, - "hadamard": { - "mnemonic": "h", - "visualization": { - "node": "hadamard.svg" - } - } - } -} diff --git a/gui/resources/theories/red_green/red.svg b/gui/resources/theories/red_green/red.svg deleted file mode 100644 index 39047fa7..00000000 --- a/gui/resources/theories/red_green/red.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - diff --git a/gui/resources/theories/string/string-theory.qth b/gui/resources/theories/string/string-theory.qth deleted file mode 100644 index 537a089f..00000000 --- a/gui/resources/theories/string/string-theory.qth +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - diff --git a/gui/resources/theories/string/white.svg b/gui/resources/theories/string/white.svg deleted file mode 100644 index 70efdd36..00000000 --- a/gui/resources/theories/string/white.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - diff --git a/gui/resources/toolbarButtonGraphics/development/Applet16.gif b/gui/resources/toolbarButtonGraphics/development/Applet16.gif deleted file mode 100644 index b34de8a8..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Applet16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Applet24.gif b/gui/resources/toolbarButtonGraphics/development/Applet24.gif deleted file mode 100644 index bce83f2c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Applet24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Application16.gif b/gui/resources/toolbarButtonGraphics/development/Application16.gif deleted file mode 100644 index 50ab3ace..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Application16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Application24.gif b/gui/resources/toolbarButtonGraphics/development/Application24.gif deleted file mode 100644 index f2dd5c69..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Application24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/ApplicationDeploy16.gif b/gui/resources/toolbarButtonGraphics/development/ApplicationDeploy16.gif deleted file mode 100644 index 02be5a71..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/ApplicationDeploy16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/ApplicationDeploy24.gif b/gui/resources/toolbarButtonGraphics/development/ApplicationDeploy24.gif deleted file mode 100644 index cafe142c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/ApplicationDeploy24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Bean16.gif b/gui/resources/toolbarButtonGraphics/development/Bean16.gif deleted file mode 100644 index c773b994..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Bean16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Bean24.gif b/gui/resources/toolbarButtonGraphics/development/Bean24.gif deleted file mode 100644 index 63619c62..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Bean24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/BeanAdd16.gif b/gui/resources/toolbarButtonGraphics/development/BeanAdd16.gif deleted file mode 100644 index 09597d7b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/BeanAdd16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/BeanAdd24.gif b/gui/resources/toolbarButtonGraphics/development/BeanAdd24.gif deleted file mode 100644 index 70f04a9d..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/BeanAdd24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBean16.gif b/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBean16.gif deleted file mode 100644 index c68c353a..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBean16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBean24.gif b/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBean24.gif deleted file mode 100644 index d3c9f355..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBean24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBeanJar16.gif b/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBeanJar16.gif deleted file mode 100644 index 4be30080..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBeanJar16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBeanJar24.gif b/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBeanJar24.gif deleted file mode 100644 index 98c9be88..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/EnterpriseJavaBeanJar24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Host16.gif b/gui/resources/toolbarButtonGraphics/development/Host16.gif deleted file mode 100644 index 3bacd177..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Host16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Host24.gif b/gui/resources/toolbarButtonGraphics/development/Host24.gif deleted file mode 100644 index 85043120..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Host24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEApplication16.gif b/gui/resources/toolbarButtonGraphics/development/J2EEApplication16.gif deleted file mode 100644 index 137c3fa9..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEApplication16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEApplication24.gif b/gui/resources/toolbarButtonGraphics/development/J2EEApplication24.gif deleted file mode 100644 index dd928013..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEApplication24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClient16.gif b/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClient16.gif deleted file mode 100644 index 3bb63349..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClient16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClient24.gif b/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClient24.gif deleted file mode 100644 index a07d3747..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClient24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClientAdd16.gif b/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClientAdd16.gif deleted file mode 100644 index 0f0dbdd0..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClientAdd16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClientAdd24.gif b/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClientAdd24.gif deleted file mode 100644 index 5a8302fd..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEApplicationClientAdd24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEServer16.gif b/gui/resources/toolbarButtonGraphics/development/J2EEServer16.gif deleted file mode 100644 index 3660631d..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEServer16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/J2EEServer24.gif b/gui/resources/toolbarButtonGraphics/development/J2EEServer24.gif deleted file mode 100644 index d26862d5..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/J2EEServer24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Jar16.gif b/gui/resources/toolbarButtonGraphics/development/Jar16.gif deleted file mode 100644 index ee443878..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Jar16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Jar24.gif b/gui/resources/toolbarButtonGraphics/development/Jar24.gif deleted file mode 100644 index e7fd0f8b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Jar24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/JarAdd16.gif b/gui/resources/toolbarButtonGraphics/development/JarAdd16.gif deleted file mode 100644 index bebef0fe..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/JarAdd16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/JarAdd24.gif b/gui/resources/toolbarButtonGraphics/development/JarAdd24.gif deleted file mode 100644 index d6b746b0..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/JarAdd24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Server16.gif b/gui/resources/toolbarButtonGraphics/development/Server16.gif deleted file mode 100644 index 4e76682e..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Server16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/Server24.gif b/gui/resources/toolbarButtonGraphics/development/Server24.gif deleted file mode 100644 index 8a509245..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/Server24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/War16.gif b/gui/resources/toolbarButtonGraphics/development/War16.gif deleted file mode 100644 index 544ac783..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/War16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/War24.gif b/gui/resources/toolbarButtonGraphics/development/War24.gif deleted file mode 100644 index df61e136..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/War24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/WarAdd16.gif b/gui/resources/toolbarButtonGraphics/development/WarAdd16.gif deleted file mode 100644 index 633adcc7..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/WarAdd16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/WarAdd24.gif b/gui/resources/toolbarButtonGraphics/development/WarAdd24.gif deleted file mode 100644 index 1206afb5..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/WarAdd24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/WebComponent16.gif b/gui/resources/toolbarButtonGraphics/development/WebComponent16.gif deleted file mode 100644 index 7065df9f..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/WebComponent16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/WebComponent24.gif b/gui/resources/toolbarButtonGraphics/development/WebComponent24.gif deleted file mode 100644 index 4d63a54c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/WebComponent24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/WebComponentAdd16.gif b/gui/resources/toolbarButtonGraphics/development/WebComponentAdd16.gif deleted file mode 100644 index 4a265c71..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/WebComponentAdd16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/development/WebComponentAdd24.gif b/gui/resources/toolbarButtonGraphics/development/WebComponentAdd24.gif deleted file mode 100644 index 6c418e42..00000000 Binary files a/gui/resources/toolbarButtonGraphics/development/WebComponentAdd24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/About16.gif b/gui/resources/toolbarButtonGraphics/general/About16.gif deleted file mode 100644 index 04da95eb..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/About16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/About24.gif b/gui/resources/toolbarButtonGraphics/general/About24.gif deleted file mode 100644 index 9e116895..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/About24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Add16.gif b/gui/resources/toolbarButtonGraphics/general/Add16.gif deleted file mode 100644 index 0fc47e19..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Add16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Add24.gif b/gui/resources/toolbarButtonGraphics/general/Add24.gif deleted file mode 100644 index fecc7a83..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Add24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignBottom16.gif b/gui/resources/toolbarButtonGraphics/general/AlignBottom16.gif deleted file mode 100644 index 761bd0b9..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignBottom16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignBottom24.gif b/gui/resources/toolbarButtonGraphics/general/AlignBottom24.gif deleted file mode 100644 index 62e921d4..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignBottom24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignCenter16.gif b/gui/resources/toolbarButtonGraphics/general/AlignCenter16.gif deleted file mode 100644 index 1cf1427b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignCenter16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignCenter24.gif b/gui/resources/toolbarButtonGraphics/general/AlignCenter24.gif deleted file mode 100644 index f19d8ed4..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignCenter24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignJustifyHorizontal16.gif b/gui/resources/toolbarButtonGraphics/general/AlignJustifyHorizontal16.gif deleted file mode 100644 index bec011af..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignJustifyHorizontal16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignJustifyHorizontal24.gif b/gui/resources/toolbarButtonGraphics/general/AlignJustifyHorizontal24.gif deleted file mode 100644 index 32a1070d..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignJustifyHorizontal24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignJustifyVertical16.gif b/gui/resources/toolbarButtonGraphics/general/AlignJustifyVertical16.gif deleted file mode 100644 index 5b44b231..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignJustifyVertical16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignJustifyVertical24.gif b/gui/resources/toolbarButtonGraphics/general/AlignJustifyVertical24.gif deleted file mode 100644 index f5d4a00b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignJustifyVertical24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignLeft16.gif b/gui/resources/toolbarButtonGraphics/general/AlignLeft16.gif deleted file mode 100644 index 70793fb8..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignLeft16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignLeft24.gif b/gui/resources/toolbarButtonGraphics/general/AlignLeft24.gif deleted file mode 100644 index 4db364c0..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignLeft24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignRight16.gif b/gui/resources/toolbarButtonGraphics/general/AlignRight16.gif deleted file mode 100644 index 22c8651e..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignRight16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignRight24.gif b/gui/resources/toolbarButtonGraphics/general/AlignRight24.gif deleted file mode 100644 index 9c82ad46..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignRight24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignTop16.gif b/gui/resources/toolbarButtonGraphics/general/AlignTop16.gif deleted file mode 100644 index dbc816bc..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignTop16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/AlignTop24.gif b/gui/resources/toolbarButtonGraphics/general/AlignTop24.gif deleted file mode 100644 index 8fd07a2e..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/AlignTop24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Bookmarks16.gif b/gui/resources/toolbarButtonGraphics/general/Bookmarks16.gif deleted file mode 100644 index 137b3ca6..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Bookmarks16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Bookmarks24.gif b/gui/resources/toolbarButtonGraphics/general/Bookmarks24.gif deleted file mode 100644 index 3b180449..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Bookmarks24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ComposeMail16.gif b/gui/resources/toolbarButtonGraphics/general/ComposeMail16.gif deleted file mode 100644 index c7f574b4..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ComposeMail16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ComposeMail24.gif b/gui/resources/toolbarButtonGraphics/general/ComposeMail24.gif deleted file mode 100644 index 3b5ade70..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ComposeMail24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ContextualHelp16.gif b/gui/resources/toolbarButtonGraphics/general/ContextualHelp16.gif deleted file mode 100644 index 77cee5d6..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ContextualHelp16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ContextualHelp24.gif b/gui/resources/toolbarButtonGraphics/general/ContextualHelp24.gif deleted file mode 100644 index 4391708b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ContextualHelp24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Copy16.gif b/gui/resources/toolbarButtonGraphics/general/Copy16.gif deleted file mode 100644 index fa986813..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Copy16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Copy24.gif b/gui/resources/toolbarButtonGraphics/general/Copy24.gif deleted file mode 100644 index c665d071..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Copy24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Cut16.gif b/gui/resources/toolbarButtonGraphics/general/Cut16.gif deleted file mode 100644 index 14b73a85..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Cut16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Cut24.gif b/gui/resources/toolbarButtonGraphics/general/Cut24.gif deleted file mode 100644 index 5c37d3af..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Cut24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Delete16.gif b/gui/resources/toolbarButtonGraphics/general/Delete16.gif deleted file mode 100644 index d9d1a338..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Delete16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Delete24.gif b/gui/resources/toolbarButtonGraphics/general/Delete24.gif deleted file mode 100644 index 96d799a0..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Delete24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Edit16.gif b/gui/resources/toolbarButtonGraphics/general/Edit16.gif deleted file mode 100644 index f56a27e1..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Edit16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Edit24.gif b/gui/resources/toolbarButtonGraphics/general/Edit24.gif deleted file mode 100644 index a5af7d7c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Edit24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Export16.gif b/gui/resources/toolbarButtonGraphics/general/Export16.gif deleted file mode 100644 index d15a80ac..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Export16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Export24.gif b/gui/resources/toolbarButtonGraphics/general/Export24.gif deleted file mode 100644 index ee523416..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Export24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Find16.gif b/gui/resources/toolbarButtonGraphics/general/Find16.gif deleted file mode 100644 index abafbe28..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Find16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Find24.gif b/gui/resources/toolbarButtonGraphics/general/Find24.gif deleted file mode 100644 index c60430cb..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Find24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/FindAgain16.gif b/gui/resources/toolbarButtonGraphics/general/FindAgain16.gif deleted file mode 100644 index 913292ad..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/FindAgain16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/FindAgain24.gif b/gui/resources/toolbarButtonGraphics/general/FindAgain24.gif deleted file mode 100644 index 667da7f6..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/FindAgain24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Help16.gif b/gui/resources/toolbarButtonGraphics/general/Help16.gif deleted file mode 100644 index dc5c2d31..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Help16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Help24.gif b/gui/resources/toolbarButtonGraphics/general/Help24.gif deleted file mode 100644 index a2848d88..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Help24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/History16.gif b/gui/resources/toolbarButtonGraphics/general/History16.gif deleted file mode 100644 index bc278f9a..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/History16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/History24.gif b/gui/resources/toolbarButtonGraphics/general/History24.gif deleted file mode 100644 index 0bd250fb..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/History24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Import16.gif b/gui/resources/toolbarButtonGraphics/general/Import16.gif deleted file mode 100644 index b1605639..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Import16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Import24.gif b/gui/resources/toolbarButtonGraphics/general/Import24.gif deleted file mode 100644 index 6a34d20b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Import24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Information16.gif b/gui/resources/toolbarButtonGraphics/general/Information16.gif deleted file mode 100644 index 5748e325..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Information16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Information24.gif b/gui/resources/toolbarButtonGraphics/general/Information24.gif deleted file mode 100644 index 16cb3def..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Information24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/New16.gif b/gui/resources/toolbarButtonGraphics/general/New16.gif deleted file mode 100644 index 3513dfdd..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/New16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/New24.gif b/gui/resources/toolbarButtonGraphics/general/New24.gif deleted file mode 100644 index 1cc488d4..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/New24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Open16.gif b/gui/resources/toolbarButtonGraphics/general/Open16.gif deleted file mode 100644 index fabd5676..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Open16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Open24.gif b/gui/resources/toolbarButtonGraphics/general/Open24.gif deleted file mode 100644 index 2086bc29..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Open24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/PageSetup16.gif b/gui/resources/toolbarButtonGraphics/general/PageSetup16.gif deleted file mode 100644 index e5fde10b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/PageSetup16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/PageSetup24.gif b/gui/resources/toolbarButtonGraphics/general/PageSetup24.gif deleted file mode 100644 index 25fad07f..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/PageSetup24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Paste16.gif b/gui/resources/toolbarButtonGraphics/general/Paste16.gif deleted file mode 100644 index f118c7ea..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Paste16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Paste24.gif b/gui/resources/toolbarButtonGraphics/general/Paste24.gif deleted file mode 100644 index 26cc4c58..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Paste24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Preferences16.gif b/gui/resources/toolbarButtonGraphics/general/Preferences16.gif deleted file mode 100644 index 32b77eee..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Preferences16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Preferences24.gif b/gui/resources/toolbarButtonGraphics/general/Preferences24.gif deleted file mode 100644 index 2e727b2c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Preferences24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Print16.gif b/gui/resources/toolbarButtonGraphics/general/Print16.gif deleted file mode 100644 index 7eb82995..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Print16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Print24.gif b/gui/resources/toolbarButtonGraphics/general/Print24.gif deleted file mode 100644 index e6b4fb17..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Print24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/PrintPreview16.gif b/gui/resources/toolbarButtonGraphics/general/PrintPreview16.gif deleted file mode 100644 index c9bd34d9..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/PrintPreview16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/PrintPreview24.gif b/gui/resources/toolbarButtonGraphics/general/PrintPreview24.gif deleted file mode 100644 index 6755ceaa..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/PrintPreview24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Properties16.gif b/gui/resources/toolbarButtonGraphics/general/Properties16.gif deleted file mode 100644 index c0c3d432..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Properties16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Properties24.gif b/gui/resources/toolbarButtonGraphics/general/Properties24.gif deleted file mode 100644 index 6871254a..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Properties24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Redo16.gif b/gui/resources/toolbarButtonGraphics/general/Redo16.gif deleted file mode 100644 index 8fdd814c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Redo16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Redo24.gif b/gui/resources/toolbarButtonGraphics/general/Redo24.gif deleted file mode 100644 index 22f40b38..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Redo24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Refresh16.gif b/gui/resources/toolbarButtonGraphics/general/Refresh16.gif deleted file mode 100644 index cf7cea37..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Refresh16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Refresh24.gif b/gui/resources/toolbarButtonGraphics/general/Refresh24.gif deleted file mode 100644 index 577c4627..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Refresh24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Remove16.gif b/gui/resources/toolbarButtonGraphics/general/Remove16.gif deleted file mode 100644 index c1b4ca11..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Remove16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Remove24.gif b/gui/resources/toolbarButtonGraphics/general/Remove24.gif deleted file mode 100644 index fa40604c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Remove24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Replace16.gif b/gui/resources/toolbarButtonGraphics/general/Replace16.gif deleted file mode 100644 index 69bc4326..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Replace16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Replace24.gif b/gui/resources/toolbarButtonGraphics/general/Replace24.gif deleted file mode 100644 index 04be12d0..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Replace24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Save16.gif b/gui/resources/toolbarButtonGraphics/general/Save16.gif deleted file mode 100644 index 954f1acc..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Save16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Save24.gif b/gui/resources/toolbarButtonGraphics/general/Save24.gif deleted file mode 100644 index bfa98a8d..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Save24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/SaveAll16.gif b/gui/resources/toolbarButtonGraphics/general/SaveAll16.gif deleted file mode 100644 index 2d31c8b2..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/SaveAll16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/SaveAll24.gif b/gui/resources/toolbarButtonGraphics/general/SaveAll24.gif deleted file mode 100644 index 9e4e1684..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/SaveAll24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/SaveAs16.gif b/gui/resources/toolbarButtonGraphics/general/SaveAs16.gif deleted file mode 100644 index 8d3929c8..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/SaveAs16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/SaveAs24.gif b/gui/resources/toolbarButtonGraphics/general/SaveAs24.gif deleted file mode 100644 index 97eb6fa3..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/SaveAs24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Search16.gif b/gui/resources/toolbarButtonGraphics/general/Search16.gif deleted file mode 100644 index ebd3dcc6..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Search16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Search24.gif b/gui/resources/toolbarButtonGraphics/general/Search24.gif deleted file mode 100644 index 24fc7c16..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Search24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Select16.gif b/gui/resources/toolbarButtonGraphics/general/Select16.gif deleted file mode 100644 index c2e16b44..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Select16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Select24.gif b/gui/resources/toolbarButtonGraphics/general/Select24.gif deleted file mode 100644 index 0d9c3463..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Select24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/SendMail16.gif b/gui/resources/toolbarButtonGraphics/general/SendMail16.gif deleted file mode 100644 index 8e1774bc..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/SendMail16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/SendMail24.gif b/gui/resources/toolbarButtonGraphics/general/SendMail24.gif deleted file mode 100644 index 6bfeaa53..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/SendMail24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Stop16.gif b/gui/resources/toolbarButtonGraphics/general/Stop16.gif deleted file mode 100644 index 064202bf..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Stop16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Stop24.gif b/gui/resources/toolbarButtonGraphics/general/Stop24.gif deleted file mode 100644 index 90515c8a..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Stop24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/TipOfTheDay16.gif b/gui/resources/toolbarButtonGraphics/general/TipOfTheDay16.gif deleted file mode 100644 index db00671b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/TipOfTheDay16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/TipOfTheDay24.gif b/gui/resources/toolbarButtonGraphics/general/TipOfTheDay24.gif deleted file mode 100644 index 9376ede8..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/TipOfTheDay24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Undo16.gif b/gui/resources/toolbarButtonGraphics/general/Undo16.gif deleted file mode 100644 index 5731d2d3..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Undo16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Undo24.gif b/gui/resources/toolbarButtonGraphics/general/Undo24.gif deleted file mode 100644 index 1d545a77..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Undo24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Zoom16.gif b/gui/resources/toolbarButtonGraphics/general/Zoom16.gif deleted file mode 100644 index 9e488969..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Zoom16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/Zoom24.gif b/gui/resources/toolbarButtonGraphics/general/Zoom24.gif deleted file mode 100644 index 86ae8631..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/Zoom24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ZoomIn16.gif b/gui/resources/toolbarButtonGraphics/general/ZoomIn16.gif deleted file mode 100644 index 2329426e..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ZoomIn16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ZoomIn24.gif b/gui/resources/toolbarButtonGraphics/general/ZoomIn24.gif deleted file mode 100644 index dbd44778..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ZoomIn24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ZoomOut16.gif b/gui/resources/toolbarButtonGraphics/general/ZoomOut16.gif deleted file mode 100644 index f9f75658..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ZoomOut16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/general/ZoomOut24.gif b/gui/resources/toolbarButtonGraphics/general/ZoomOut24.gif deleted file mode 100644 index 259bf9cf..00000000 Binary files a/gui/resources/toolbarButtonGraphics/general/ZoomOut24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/FastForward16.gif b/gui/resources/toolbarButtonGraphics/media/FastForward16.gif deleted file mode 100644 index 679d8944..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/FastForward16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/FastForward24.gif b/gui/resources/toolbarButtonGraphics/media/FastForward24.gif deleted file mode 100644 index 814664c6..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/FastForward24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Movie16.gif b/gui/resources/toolbarButtonGraphics/media/Movie16.gif deleted file mode 100644 index c3df5eca..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Movie16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Movie24.gif b/gui/resources/toolbarButtonGraphics/media/Movie24.gif deleted file mode 100644 index 15cc67c2..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Movie24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Pause16.gif b/gui/resources/toolbarButtonGraphics/media/Pause16.gif deleted file mode 100644 index 678bc853..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Pause16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Pause24.gif b/gui/resources/toolbarButtonGraphics/media/Pause24.gif deleted file mode 100644 index 7afeafda..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Pause24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Play16.gif b/gui/resources/toolbarButtonGraphics/media/Play16.gif deleted file mode 100644 index a8bfcf55..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Play16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Play24.gif b/gui/resources/toolbarButtonGraphics/media/Play24.gif deleted file mode 100644 index 572467c5..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Play24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Rewind16.gif b/gui/resources/toolbarButtonGraphics/media/Rewind16.gif deleted file mode 100644 index 3f1b1e6c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Rewind16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Rewind24.gif b/gui/resources/toolbarButtonGraphics/media/Rewind24.gif deleted file mode 100644 index 4e77dde1..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Rewind24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/StepBack16.gif b/gui/resources/toolbarButtonGraphics/media/StepBack16.gif deleted file mode 100644 index 418d7620..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/StepBack16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/StepBack24.gif b/gui/resources/toolbarButtonGraphics/media/StepBack24.gif deleted file mode 100644 index 124bac7d..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/StepBack24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/StepForward16.gif b/gui/resources/toolbarButtonGraphics/media/StepForward16.gif deleted file mode 100644 index 63303945..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/StepForward16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/StepForward24.gif b/gui/resources/toolbarButtonGraphics/media/StepForward24.gif deleted file mode 100644 index 31a246fd..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/StepForward24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Stop16.gif b/gui/resources/toolbarButtonGraphics/media/Stop16.gif deleted file mode 100644 index a4a147a5..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Stop16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Stop24.gif b/gui/resources/toolbarButtonGraphics/media/Stop24.gif deleted file mode 100644 index 62b7743d..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Stop24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Volume16.gif b/gui/resources/toolbarButtonGraphics/media/Volume16.gif deleted file mode 100644 index 0e9020b7..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Volume16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/media/Volume24.gif b/gui/resources/toolbarButtonGraphics/media/Volume24.gif deleted file mode 100644 index 8f1aea85..00000000 Binary files a/gui/resources/toolbarButtonGraphics/media/Volume24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Back16.gif b/gui/resources/toolbarButtonGraphics/navigation/Back16.gif deleted file mode 100644 index f48362d7..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Back16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Back24.gif b/gui/resources/toolbarButtonGraphics/navigation/Back24.gif deleted file mode 100644 index 787518c8..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Back24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Down16.gif b/gui/resources/toolbarButtonGraphics/navigation/Down16.gif deleted file mode 100644 index 39849181..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Down16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Down24.gif b/gui/resources/toolbarButtonGraphics/navigation/Down24.gif deleted file mode 100644 index 2c47af8c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Down24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Forward16.gif b/gui/resources/toolbarButtonGraphics/navigation/Forward16.gif deleted file mode 100644 index d25a3f95..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Forward16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Forward24.gif b/gui/resources/toolbarButtonGraphics/navigation/Forward24.gif deleted file mode 100644 index 1936fd4b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Forward24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Home16.gif b/gui/resources/toolbarButtonGraphics/navigation/Home16.gif deleted file mode 100644 index 3a78ec3c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Home16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Home24.gif b/gui/resources/toolbarButtonGraphics/navigation/Home24.gif deleted file mode 100644 index a25dee25..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Home24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Up16.gif b/gui/resources/toolbarButtonGraphics/navigation/Up16.gif deleted file mode 100644 index cebe60d9..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Up16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/navigation/Up24.gif b/gui/resources/toolbarButtonGraphics/navigation/Up24.gif deleted file mode 100644 index 3db88736..00000000 Binary files a/gui/resources/toolbarButtonGraphics/navigation/Up24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/BangVertex32.png b/gui/resources/toolbarButtonGraphics/quanto/BangVertex32.png deleted file mode 100644 index 23fdbc96..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/BangVertex32.png and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/BangVertex64.gif b/gui/resources/toolbarButtonGraphics/quanto/BangVertex64.gif deleted file mode 100644 index 950faadc..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/BangVertex64.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/Compute16.gif b/gui/resources/toolbarButtonGraphics/quanto/Compute16.gif deleted file mode 100644 index 00e0b035..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/Compute16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/Compute24.gif b/gui/resources/toolbarButtonGraphics/quanto/Compute24.gif deleted file mode 100644 index 75ceed62..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/Compute24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/ComputeAdd16.gif b/gui/resources/toolbarButtonGraphics/quanto/ComputeAdd16.gif deleted file mode 100644 index 1a4f5951..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/ComputeAdd16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/ComputeAdd24.gif b/gui/resources/toolbarButtonGraphics/quanto/ComputeAdd24.gif deleted file mode 100644 index 38018888..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/ComputeAdd24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/ComputeRemove16.gif b/gui/resources/toolbarButtonGraphics/quanto/ComputeRemove16.gif deleted file mode 100644 index b464d2c3..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/ComputeRemove16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/ComputeRemove24.gif b/gui/resources/toolbarButtonGraphics/quanto/ComputeRemove24.gif deleted file mode 100644 index 63906584..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/ComputeRemove24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/DirectedLink16.gif b/gui/resources/toolbarButtonGraphics/quanto/DirectedLink16.gif deleted file mode 100644 index 6e712200..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/DirectedLink16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/DuplicateBangBox32.png b/gui/resources/toolbarButtonGraphics/quanto/DuplicateBangBox32.png deleted file mode 100644 index 6b440aaa..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/DuplicateBangBox32.png and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/EdgePointIcon20.png b/gui/resources/toolbarButtonGraphics/quanto/EdgePointIcon20.png deleted file mode 100644 index 9f83ad46..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/EdgePointIcon20.png and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/KillBangBox32.png b/gui/resources/toolbarButtonGraphics/quanto/KillBangBox32.png deleted file mode 100644 index beb1f4d1..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/KillBangBox32.png and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/Link16.gif b/gui/resources/toolbarButtonGraphics/quanto/Link16.gif deleted file mode 100644 index 6c7dcf20..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/Link16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/Lock12.gif b/gui/resources/toolbarButtonGraphics/quanto/Lock12.gif deleted file mode 100644 index 116e059a..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/Lock12.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/Lock24.gif b/gui/resources/toolbarButtonGraphics/quanto/Lock24.gif deleted file mode 100644 index c9124e70..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/Lock24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/UnbangVertex32.png b/gui/resources/toolbarButtonGraphics/quanto/UnbangVertex32.png deleted file mode 100644 index 8697b6c7..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/UnbangVertex32.png and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/Unlock12.gif b/gui/resources/toolbarButtonGraphics/quanto/Unlock12.gif deleted file mode 100644 index 5396782f..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/Unlock12.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/quanto/Unlock24.gif b/gui/resources/toolbarButtonGraphics/quanto/Unlock24.gif deleted file mode 100644 index d6371728..00000000 Binary files a/gui/resources/toolbarButtonGraphics/quanto/Unlock24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/ColumnDelete16.gif b/gui/resources/toolbarButtonGraphics/table/ColumnDelete16.gif deleted file mode 100644 index a874db62..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/ColumnDelete16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/ColumnDelete24.gif b/gui/resources/toolbarButtonGraphics/table/ColumnDelete24.gif deleted file mode 100644 index cdd09d3d..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/ColumnDelete24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/ColumnInsertAfter16.gif b/gui/resources/toolbarButtonGraphics/table/ColumnInsertAfter16.gif deleted file mode 100644 index b0248df7..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/ColumnInsertAfter16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/ColumnInsertAfter24.gif b/gui/resources/toolbarButtonGraphics/table/ColumnInsertAfter24.gif deleted file mode 100644 index f9eba5f4..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/ColumnInsertAfter24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/ColumnInsertBefore16.gif b/gui/resources/toolbarButtonGraphics/table/ColumnInsertBefore16.gif deleted file mode 100644 index 9a699bfb..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/ColumnInsertBefore16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/ColumnInsertBefore24.gif b/gui/resources/toolbarButtonGraphics/table/ColumnInsertBefore24.gif deleted file mode 100644 index 8c02608e..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/ColumnInsertBefore24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/RowDelete16.gif b/gui/resources/toolbarButtonGraphics/table/RowDelete16.gif deleted file mode 100644 index 1856848e..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/RowDelete16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/RowDelete24.gif b/gui/resources/toolbarButtonGraphics/table/RowDelete24.gif deleted file mode 100644 index 8cf24389..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/RowDelete24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/RowInsertAfter16.gif b/gui/resources/toolbarButtonGraphics/table/RowInsertAfter16.gif deleted file mode 100644 index a75371b7..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/RowInsertAfter16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/RowInsertAfter24.gif b/gui/resources/toolbarButtonGraphics/table/RowInsertAfter24.gif deleted file mode 100644 index 0e32f21a..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/RowInsertAfter24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/RowInsertBefore16.gif b/gui/resources/toolbarButtonGraphics/table/RowInsertBefore16.gif deleted file mode 100644 index f4056f66..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/RowInsertBefore16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/table/RowInsertBefore24.gif b/gui/resources/toolbarButtonGraphics/table/RowInsertBefore24.gif deleted file mode 100644 index 18f2455a..00000000 Binary files a/gui/resources/toolbarButtonGraphics/table/RowInsertBefore24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignCenter16.gif b/gui/resources/toolbarButtonGraphics/text/AlignCenter16.gif deleted file mode 100644 index 34c09f34..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignCenter16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignCenter24.gif b/gui/resources/toolbarButtonGraphics/text/AlignCenter24.gif deleted file mode 100644 index d4baf4ef..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignCenter24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignJustify16.gif b/gui/resources/toolbarButtonGraphics/text/AlignJustify16.gif deleted file mode 100644 index 07fdb5af..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignJustify16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignJustify24.gif b/gui/resources/toolbarButtonGraphics/text/AlignJustify24.gif deleted file mode 100644 index 30843ca2..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignJustify24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignLeft16.gif b/gui/resources/toolbarButtonGraphics/text/AlignLeft16.gif deleted file mode 100644 index 235e780f..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignLeft16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignLeft24.gif b/gui/resources/toolbarButtonGraphics/text/AlignLeft24.gif deleted file mode 100644 index 85631ca5..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignLeft24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignRight16.gif b/gui/resources/toolbarButtonGraphics/text/AlignRight16.gif deleted file mode 100644 index 9c06d30c..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignRight16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/AlignRight24.gif b/gui/resources/toolbarButtonGraphics/text/AlignRight24.gif deleted file mode 100644 index 25a77d35..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/AlignRight24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Bold16.gif b/gui/resources/toolbarButtonGraphics/text/Bold16.gif deleted file mode 100644 index e9494e17..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Bold16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Bold24.gif b/gui/resources/toolbarButtonGraphics/text/Bold24.gif deleted file mode 100644 index ef0aa1a1..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Bold24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Italic16.gif b/gui/resources/toolbarButtonGraphics/text/Italic16.gif deleted file mode 100644 index 301cfc79..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Italic16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Italic24.gif b/gui/resources/toolbarButtonGraphics/text/Italic24.gif deleted file mode 100644 index 069c6b9b..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Italic24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Normal16.gif b/gui/resources/toolbarButtonGraphics/text/Normal16.gif deleted file mode 100644 index 6c914d82..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Normal16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Normal24.gif b/gui/resources/toolbarButtonGraphics/text/Normal24.gif deleted file mode 100644 index 7764baa0..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Normal24.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Underline16.gif b/gui/resources/toolbarButtonGraphics/text/Underline16.gif deleted file mode 100644 index 97ac7c07..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Underline16.gif and /dev/null differ diff --git a/gui/resources/toolbarButtonGraphics/text/Underline24.gif b/gui/resources/toolbarButtonGraphics/text/Underline24.gif deleted file mode 100644 index 93e20be6..00000000 Binary files a/gui/resources/toolbarButtonGraphics/text/Underline24.gif and /dev/null differ diff --git a/gui/src/apple/dts/samplecode/osxadapter/OSXAdapter.java b/gui/src/apple/dts/samplecode/osxadapter/OSXAdapter.java deleted file mode 100644 index ca030b75..00000000 --- a/gui/src/apple/dts/samplecode/osxadapter/OSXAdapter.java +++ /dev/null @@ -1,207 +0,0 @@ -/* - -File: OSXAdapter.java - -Abstract: Hooks existing preferences/about/quit functionality from an - existing Java app into handlers for the Mac OS X application menu. - Uses a Proxy object to dynamically implement the - com.apple.eawt.ApplicationListener interface and register it with the - com.apple.eawt.Application object. This allows the complete project - to be both built and run on any platform without any stubs or - placeholders. Useful for developers looking to implement Mac OS X - features while supporting multiple platforms with minimal impact. - -Version: 2.0 - -Disclaimer: IMPORTANT: This Apple software is supplied to you by -Apple Inc. ("Apple") in consideration of your agreement to the -following terms, and your use, installation, modification or -redistribution of this Apple software constitutes acceptance of these -terms. If you do not agree with these terms, please do not use, -install, modify or redistribute this Apple software. - -In consideration of your agreement to abide by the following terms, and -subject to these terms, Apple grants you a personal, non-exclusive -license, under Apple's copyrights in this original Apple software (the -"Apple Software"), to use, reproduce, modify and redistribute the Apple -Software, with or without modifications, in source and/or binary forms; -provided that if you redistribute the Apple Software in its entirety and -without modifications, you must retain this notice and the following -text and disclaimers in all such redistributions of the Apple Software. -Neither the name, trademarks, service marks or logos of Apple Inc. -may be used to endorse or promote products derived from the Apple -Software without specific prior written permission from Apple. Except -as expressly stated in this notice, no other rights or licenses, express -or implied, are granted by Apple herein, including but not limited to -any patent rights that may be infringed by your derivative works or by -other works in which the Apple Software may be incorporated. - -The Apple Software is provided by Apple on an "AS IS" basis. APPLE -MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION -THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS -FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND -OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. - -IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL -OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, -MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED -AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), -STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. - -Copyright © 2003-2007 Apple, Inc., All Rights Reserved - -*/ - -package apple.dts.samplecode.osxadapter; - -import java.lang.reflect.*; - - -public class OSXAdapter implements InvocationHandler { - - protected Object targetObject; - protected Method targetMethod; - protected String proxySignature; - - static Object macOSXApplication; - - // Pass this method an Object and Method equipped to perform application shutdown logic - // The method passed should return a boolean stating whether or not the quit should occur - public static void setQuitHandler(Object target, Method quitHandler) { - setHandler(new OSXAdapter("handleQuit", target, quitHandler)); - } - - // Pass this method an Object and Method equipped to display application info - // They will be called when the About menu item is selected from the application menu - public static void setAboutHandler(Object target, Method aboutHandler) { - boolean enableAboutMenu = (target != null && aboutHandler != null); - if (enableAboutMenu) { - setHandler(new OSXAdapter("handleAbout", target, aboutHandler)); - } - // If we're setting a handler, enable the About menu item by calling - // com.apple.eawt.Application reflectively - try { - Method enableAboutMethod = macOSXApplication.getClass().getDeclaredMethod("setEnabledAboutMenu", new Class[] { boolean.class }); - enableAboutMethod.invoke(macOSXApplication, new Object[] { Boolean.valueOf(enableAboutMenu) }); - } catch (Exception ex) { - System.err.println("OSXAdapter could not access the About Menu"); - ex.printStackTrace(); - } - } - - // Pass this method an Object and a Method equipped to display application options - // They will be called when the Preferences menu item is selected from the application menu - public static void setPreferencesHandler(Object target, Method prefsHandler) { - boolean enablePrefsMenu = (target != null && prefsHandler != null); - if (enablePrefsMenu) { - setHandler(new OSXAdapter("handlePreferences", target, prefsHandler)); - } - // If we're setting a handler, enable the Preferences menu item by calling - // com.apple.eawt.Application reflectively - try { - Method enablePrefsMethod = macOSXApplication.getClass().getDeclaredMethod("setEnabledPreferencesMenu", new Class[] { boolean.class }); - enablePrefsMethod.invoke(macOSXApplication, new Object[] { Boolean.valueOf(enablePrefsMenu) }); - } catch (Exception ex) { - System.err.println("OSXAdapter could not access the About Menu"); - ex.printStackTrace(); - } - } - - // Pass this method an Object and a Method equipped to handle document events from the Finder - // Documents are registered with the Finder via the CFBundleDocumentTypes dictionary in the - // application bundle's Info.plist - public static void setFileHandler(Object target, Method fileHandler) { - setHandler(new OSXAdapter("handleOpenFile", target, fileHandler) { - // Override OSXAdapter.callTarget to send information on the - // file to be opened - public boolean callTarget(Object appleEvent) { - if (appleEvent != null) { - try { - Method getFilenameMethod = appleEvent.getClass().getDeclaredMethod("getFilename", (Class[])null); - String filename = (String) getFilenameMethod.invoke(appleEvent, (Object[])null); - this.targetMethod.invoke(this.targetObject, new Object[] { filename }); - } catch (Exception ex) { - - } - } - return true; - } - }); - } - - - // setHandler creates a Proxy object from the passed OSXAdapter and adds it as an ApplicationListener - @SuppressWarnings("unchecked") - public static void setHandler(OSXAdapter adapter) { - try { - Class applicationClass = Class.forName("com.apple.eawt.Application"); - if (macOSXApplication == null) { - macOSXApplication = applicationClass.getConstructor((Class[])null).newInstance((Object[])null); - } - Class applicationListenerClass = Class.forName("com.apple.eawt.ApplicationListener"); - Method addListenerMethod = applicationClass.getDeclaredMethod("addApplicationListener", new Class[] { applicationListenerClass }); - // Create a proxy object around this handler that can be reflectively added as an Apple ApplicationListener - Object osxAdapterProxy = Proxy.newProxyInstance(OSXAdapter.class.getClassLoader(), new Class[] { applicationListenerClass }, adapter); - addListenerMethod.invoke(macOSXApplication, new Object[] { osxAdapterProxy }); - } catch (ClassNotFoundException cnfe) { - System.err.println("This version of Mac OS X does not support the Apple EAWT. ApplicationEvent handling has been disabled (" + cnfe + ")"); - } catch (Exception ex) { // Likely a NoSuchMethodException or an IllegalAccessException loading/invoking eawt.Application methods - System.err.println("Mac OS X Adapter could not talk to EAWT:"); - ex.printStackTrace(); - } - } - - // Each OSXAdapter has the name of the EAWT method it intends to listen for (handleAbout, for example), - // the Object that will ultimately perform the task, and the Method to be called on that Object - protected OSXAdapter(String proxySignature, Object target, Method handler) { - this.proxySignature = proxySignature; - this.targetObject = target; - this.targetMethod = handler; - } - - // Override this method to perform any operations on the event - // that comes with the various callbacks - // See setFileHandler above for an example - public boolean callTarget(Object appleEvent) throws InvocationTargetException, IllegalAccessException { - Object result = targetMethod.invoke(targetObject, (Object[])null); - if (result == null) { - return true; - } - return Boolean.valueOf(result.toString()).booleanValue(); - } - - // InvocationHandler implementation - // This is the entry point for our proxy object; it is called every time an ApplicationListener method is invoked - public Object invoke (Object proxy, Method method, Object[] args) throws Throwable { - if (isCorrectMethod(method, args)) { - boolean handled = callTarget(args[0]); - setApplicationEventHandled(args[0], handled); - } - // All of the ApplicationListener methods are void; return null regardless of what happens - return null; - } - - // Compare the method that was called to the intended method when the OSXAdapter instance was created - // (e.g. handleAbout, handleQuit, handleOpenFile, etc.) - protected boolean isCorrectMethod(Method method, Object[] args) { - return (targetMethod != null && proxySignature.equals(method.getName()) && args.length == 1); - } - - // It is important to mark the ApplicationEvent as handled and cancel the default behavior - // This method checks for a boolean result from the proxy method and sets the event accordingly - protected void setApplicationEventHandled(Object event, boolean handled) { - if (event != null) { - try { - Method setHandledMethod = event.getClass().getDeclaredMethod("setHandled", new Class[] { boolean.class }); - // If the target method returns a boolean, use that as a hint - setHandledMethod.invoke(event, new Object[] { Boolean.valueOf(handled) }); - } catch (Exception ex) { - System.err.println("OSXAdapter was unable to handle an ApplicationEvent: " + event); - ex.printStackTrace(); - } - } - } -} \ No newline at end of file diff --git a/gui/src/com/sun/jaf/ui/ActionList.java b/gui/src/com/sun/jaf/ui/ActionList.java deleted file mode 100644 index 9ad953b5..00000000 --- a/gui/src/com/sun/jaf/ui/ActionList.java +++ /dev/null @@ -1,142 +0,0 @@ -/** - * @(#)ActionList.java 1.5 02/10/08 - * - * Copyright 2002 Sun Microsystems, Inc. All Rights Reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistribution in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of Sun Microsystems, Inc. or the names of - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * This software is provided "AS IS," without a warranty of any - * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND - * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY - * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY - * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR - * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THIS SOFTWARE OR - * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE - * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, - * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER - * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF - * THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS - * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - * - * You acknowledge that this software is not designed, licensed or - * intended for use in the design, construction, operation or - * maintenance of any nuclear facility. - * - */ - -package com.sun.jaf.ui; - -import java.util.*; - -/** - * Represents a list of action ids, ActionList elements or null. The null - * element may represent a separation of elements. - * - * This should be regarded as a "friend" class since it is created by - * the ActionManager for the UIFactory only. This class is not meant - * for general public consumption. - */ -class ActionList { - - private String id; - private String actionref; - private HashMap groupMap; - private List list; - - public ActionList(String id, String actionref) { - this.id = id; - this.actionref = actionref; - this.list = new ArrayList(); - } - - /** - * Retuns the action-list id that this class represents. - */ - public String getID() { - return id; - } - - /** - * Returns the ActionRef an action id that should be used for the - * action which represents this list item. - */ - public String getActionRef() { - return actionref; - } - - /** - * Returns the group id for the current action id - * @param id action id - * @return the group id for action or null if it doens't exist. - */ - public String getGroup(String id) { - if (groupMap == null) { - return null; - } - return groupMap.get(id); - } - - /** - * Maps the group with the action id for this ActionList - * @param id action id - * @param group the group id - */ - public void setGroup(String id, String group) { - if (groupMap == null) { - groupMap = new HashMap(); - } - groupMap.put(id, group); - } - - // - // List implementation - // - - public int size() { - return list.size(); - } - - public boolean add(Object o) { - return list.add(o); - } - - public Iterator iterator() { - return list.iterator(); - } - - /* - public boolean isEmpty() { - return list.isEmpty(); - } - - public boolean contains(Object o) { - return list.contains(o); - } - - - public Object[] toArray() { - return list.toArray(); - } - - public Object[] toArray(Object[] a) { - return list.toArray(a); - } - */ - -} - diff --git a/gui/src/com/sun/jaf/ui/ActionManager.java b/gui/src/com/sun/jaf/ui/ActionManager.java deleted file mode 100644 index 9042b452..00000000 --- a/gui/src/com/sun/jaf/ui/ActionManager.java +++ /dev/null @@ -1,1109 +0,0 @@ -/** - * %W% %E% - * Copyright 2003 Sun Microsystems, Inc. All Rights Reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistribution in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of Sun Microsystems, Inc. or the names of - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * This software is provided "AS IS," without a warranty of any - * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND - * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY - * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY - * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR - * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THIS SOFTWARE OR - * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE - * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, - * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER - * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF - * THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS - * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - * - * You acknowledge that this software is not designed, licensed or - * intended for use in the design, construction, operation or - * maintenance of any nuclear facility. - */ - -// NB: some edits made: search for EDIT, also all generics usage - -package com.sun.jaf.ui; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.InputEvent; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; - -import java.beans.EventHandler; -import java.beans.Statement; - -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintStream; - -import java.util.*; - -import java.net.URL; - -import javax.swing.AbstractAction; -import javax.swing.Action; -import javax.swing.Icon; -import javax.swing.ImageIcon; -import javax.swing.KeyStroke; - -import javax.xml.parsers.SAXParserFactory; -import javax.xml.parsers.SAXParser; -import javax.xml.parsers.ParserConfigurationException; - -import org.xml.sax.Attributes; -import org.xml.sax.SAXException; -import org.xml.sax.SAXParseException; - -import org.xml.sax.helpers.DefaultHandler; - -/** - * The ActionManager manages sets of javax.swing.Action. - * The Actions are speficied in an XML configuration file and will be lazily created - * when referenced. The schema for the XML document contains three major - * elements. - *
            - *
          • action Represents the properties of an Action. - *
          • action-list Represents lists and trees of actions which can be used to - * construct user interface components like toolbars, menus and popups. - *
          • action-set The document root which contains a set of action-lists and actions. - *
          - *

          - * All of these elements have a unique id tag which is used by the ActionManager to reference - * the element. Refer to action-set.dtd - * for details on the elements and attributes. - *

          - * The order of an action in an action-list will reflect to the order of the Action - * based component in the container. A tree is represented as an action-list that - * contains one or more action-lists. - *

          - * Once the Actions have been created you - * need to register callback methods that perform the logic of the - * associated Action. A typical use case of the ActionManager is: - *

          - *

          - *   ActionManager manager = ActionManager.getInstance();
          - *   // Load an action-set XML document
          - *
          - *   manager.loadActions(new URL("my-actions.xml));
          - *   // Register a callback for a particular Action
          - *
          - *   manager.registerCallback("new-action", actionHandler, "handleNewCommand");
          - * 
          - *   // Change the state of the action:
          - *   manager.setEnabled("new-action", newState);
          - * 
          - * - * The ActionManager also supports Actions that can have a selected state - * associated with them. These Actions are typically represented by a - * JCheckBox or similar widget. For such actions the registered method is - * invoked with an additional parameter indicating the selected state of - * the widget. For example, for the callback handler: - *

          - *

           
          - *  public class Handler {
          - *      public void stateChanged(boolean newState);
          - *   }
          - * 
          - * The registration method would look similar: - *
          - *  manager.registerCallback("select-action", new Handler(), "stateChanged");
          - * 
          - *

          - * The stateChanged method would be invoked as the selected state of - * the widget changed. Additionally if you need to change the selected - * state of the Action use the ActionManager method setSelected. - *

          - * The UIFactory uses the managed Actions in the ActionManager to create - * user interface components. For example, to create a JMenu based on an - * action-list id: - *

          - * JMenu file = UIFactory.getInstance().createMenu("file-menu");
          - * 
          - * - * @see UIFactory - * @see action-set.dtd - * @author Mark Davidson - */ -public class ActionManager { - - // Elements in the action-set.dtd - private final static String ACTION_SET_ELEMENT="action-set"; - private final static String ACTION_ELEMENT="action"; - private final static String ACTION_LIST_ELEMENT="action-list"; - private final static String EMPTY_ELEMENT="empty"; - private final static String GROUP_ELEMENT="group"; - - private final static String ACCEL_ATTRIBUTE = "accel"; - private final static String DESC_ATTRIBUTE = "desc"; - private final static String ICON_ATTRIBUTE = "icon"; - private final static String ID_ATTRIBUTE = "id"; - private final static String ACTIONREF_ATTRIBUTE = "actionref"; - private final static String IDREF_ATTRIBUTE = "idref"; - private final static String MNEMONIC_ATTRIBUTE = "mnemonic"; - private final static String NAME_ATTRIBUTE = "name"; - private final static String SMICON_ATTRIBUTE = "smicon"; - private final static String TYPE_ATTRIBUTE = "type"; - - // Indexes into the ActionAttributes array. - private final static int ACCEL_INDEX = 0; - private final static int DESC_INDEX = 1; - private final static int ICON_INDEX = 2; - private final static int ID_INDEX = 3; - private final static int MNEMONIC_INDEX = 4; - private final static int NAME_INDEX = 5; - private final static int SMICON_INDEX = 6; - private final static int TYPE_INDEX = 7; - - private static SAXParserFactory parserfactory; - private ActionHandler handler; - - /** - * A simple class that holds the parsed XML Attributes. - */ - class ActionAttributes { - - private String[] array; - - public ActionAttributes(Attributes attrs) { - // Populate the array with the objects that map to the - // attributes - array = new String[8]; - array[0] = attrs.getValue(ACCEL_ATTRIBUTE); - array[1] = attrs.getValue(DESC_ATTRIBUTE); - array[2] = attrs.getValue(ICON_ATTRIBUTE); - array[3] = attrs.getValue(ID_ATTRIBUTE); - array[4] = attrs.getValue(MNEMONIC_ATTRIBUTE); - array[5] = attrs.getValue(NAME_ATTRIBUTE); - array[6] = attrs.getValue(SMICON_ATTRIBUTE); - array[7] = attrs.getValue(TYPE_ATTRIBUTE); - } - - /** - * Retrieves the Attribute value. - * @param index one of ActionManager.._INDEX - */ - public String getValue(int index) { - return array[index]; - } - - public void setValue(int index, String value) { - if (index < array.length && value != null) { - array[index] = value; - } - } - - public void setAttributes(Attributes attrs) { - setValue(0, attrs.getValue(ACCEL_ATTRIBUTE)); - setValue(1, attrs.getValue(DESC_ATTRIBUTE)); - setValue(2, attrs.getValue(ICON_ATTRIBUTE)); - // Don't set the ID_ATTRIBUTE since it should be immutable - // setValue(3, attrs.getValue(ID_ATTRIBUTE)); - setValue(4, attrs.getValue(MNEMONIC_ATTRIBUTE)); - setValue(5, attrs.getValue(NAME_ATTRIBUTE)); - setValue(6, attrs.getValue(SMICON_ATTRIBUTE)); - setValue(7, attrs.getValue(TYPE_ATTRIBUTE)); - } - } - - - // Internal data structures which manage the actions. - - // The attrMap is an association between keys (value of ID_ATTRIBUTE) - // and the associated Action properties in an ActionAttributes class. - private Map attributeMap; - - // key: value of ID_ATTRIBUTE, value instanceof AbstractAction - private Map actionMap; - - // A mapping between the action-set id and a list of action ids. - private Map> actionSetMap; - - // A mapping between the action-list id and an ActionList - private Map actionListMap; - - /** - * Shared instance of the singleton ActionManager. - */ - private static ActionManager INSTANCE; - - // To enable debugging: - // Pass -Ddebug=true to the vm on start up. - // or - // set System.setProperty("debug", "true"); before constructing this Object - - private static boolean DEBUG = false; - - /** - * Creates the action manager - */ - public ActionManager() { - // XXX - System.getProperty is not allowed by the applet security model. - // DEBUG = Boolean.valueOf(System.getProperty("debug")).booleanValue(); - } - - /** - * Return the instance of the ActionManger. If this has not been explicity - * set then it will be created. - * - * @return the ActionManager instance. - * @see #setInstance - */ - public static ActionManager getInstance() { - if (INSTANCE == null) { - INSTANCE = new ActionManager(); - } - return INSTANCE; - } - - /** - * Sets the ActionManager instance. - */ - public static void setInstance(ActionManager manager) { - INSTANCE = manager; - } - - // The SAX Attributes are stored in a lightweight data structure. - // The corresponding Action will be constructed lazily the first time that - // they are requested from the public method getAction. - - /** - * Adds the values represented in the SAX Attrributes structure - * to a lightweight internal data strucure. - * - * @param attrs the Attributes for an action - * @param actionset the parent action-set id for the action - */ - private void addAttributes(Attributes attrs, String actionset) { - if (attributeMap == null) { - attributeMap = new HashMap(); - } - attributeMap.put(attrs.getValue(ID_ATTRIBUTE), - new ActionAttributes(attrs)); - - // Add this action id to the actionset - if (actionset != null && !actionset.equals("")) { - List list = getActionSet(actionset); - if (list == null) { - list = new ArrayList(); - } - list.add(attrs.getValue(ID_ATTRIBUTE)); - addActionSet(actionset, list); - } - } - - /** - * Return the attributes for an action id. - * @param action id - */ - private ActionAttributes getAttributes(String key) { - if (attributeMap == null) { - return null; - } - // EDIT (AHM): remove cast - return attributeMap.get(key); - } - - /** - * Unloads the actions that are assocated with the action-set from the - * action manager. - * - * Note: This method removes the actions and references from - * the action manager. References to the - * action may still be held in component that were created from - * actions. - * - * @param actionset the action-set id - */ - public void unloadActions(String actionset) { - if (DEBUG) { - System.out.println("unloadActions("+actionset+")"); - } - List list = getActionSet(actionset.toString()); - if (list == null) { - return; - } - - // Remove all Actions and ActionAttributes - String key = null; - Iterator iter = list.iterator(); - while (iter.hasNext()) { - key = iter.next(); - if (attributeMap != null) { - attributeMap.remove(key); - } - if (actionMap != null) { - actionMap.remove(key); - } - } - if (actionSetMap != null) { - actionSetMap.remove(actionset); - } - } - - - /** - * Returns the ids for all the managed actions. - *

          - * An action id is a unique idenitfier which can - * be used to retrieve the corrspondng Action from the ActionManager. - * This identifier can also - * be used to set the properties of the action through the action - * manager like setting the state of the enabled or selected flags. - * - * @return a set which represents all the action ids - */ - public Set getActionIDs() { - if (attributeMap == null) { - return null; - } - return attributeMap.keySet(); - } - - /** - * Retrieve the ids for all the managed actions-sets. - *

          - * An action set is an association between an action-set id and the - * action ids that it contains. For example, the actions-core.xml - * action-set document has the action-set id: "core-actions" that - * contains the actions: new-command, open-command, save-command, etc... - * - * @return a set which represents all the action-set ids - */ - public Set getActionSetIDs() { - if (actionSetMap == null) { - actionSetMap = new HashMap>(); - } - return actionSetMap.keySet(); - } - - /** - * Return a List of action ids for an action-set id. - * @param id the action-set id - * @return a List of action ids in the set - */ - private List getActionSet(String id) { - if (actionSetMap == null) { - actionSetMap = new HashMap>(); - } - return actionSetMap.get(id); - } - - private void addActionSet(String key, List set) { - if (actionSetMap == null) { - actionSetMap = new HashMap>(); - } - actionSetMap.put(key, set); - } - - /** - * Retrieve the ids for all the managed action-lists. - *

          - * An action-list is an ordered collection of actions, - * action-lists and empty elements which could represent - * containers of actions. These action-list ids can be - * used in factory classes to construct ui action containers - * like menus, toolbars and popups. - * - * @return a set which represents all the action-list ids - */ - public Set getActionListIDs() { - if (actionListMap == null) { - actionListMap = new HashMap(); - } - return actionListMap.keySet(); - } - - /** - * Return an ActionList for an action-list id. - * @param id the action-list id - * @return an ActionList List of action ids in the set - */ - ActionList getActionList(String id) { - if (actionListMap == null) { - return null; - } - return actionListMap.get(id); - } - - private void addActionList(String id, ActionList list) { - if (actionListMap == null) { - actionListMap = new HashMap(); - } - actionListMap.put(id, list); - } - - /** - * Adds an action to the ActionManager - * @param id value of the action id - */ - private void addAction(String id, Action action) { - if (actionMap == null) { - actionMap = new HashMap(); - } - actionMap.put(id, action); - } - - - /** - * Retrieves the action corresponding to an action id. - * - * @param id value of the action id - * @return an Action or null if id - */ - public Action getAction(String id) { - if (actionMap == null) { - actionMap = new HashMap(); - } - Action action = actionMap.get(id); - if (action == null) { - action = createAction(getAttributes(id)); - } - - return action; - } - - /** - * Convenience method for returning the DelegateAction - * - * @param id value of the action id - * @return the DelegateAction referenced by the named id or null - */ - private DelegateAction getDelegateAction(String id) { - Action a = getAction(id); - if (a instanceof DelegateAction) { - return (DelegateAction)a; - } - return null; - } - - /** - * Convenience method for returning the StateChangeAction - * - * @param id value of the action id - * @return the StateChangeAction referenced by the named id or null - */ - private StateChangeAction getStateChangeAction(String id) { - Action a = getAction(id); - if (a instanceof StateChangeAction) { - return (StateChangeAction)a; - } - return null; - } - - /** - * Enables or disables the state of the Action corresponding to the - * action id. This method should be used - * by application developers to ensure that all components created from an - * action remain in synch with respect to their enabled state. - * - * @param id value of the action id - * @param enabled true if the action is to be enabled; otherwise false - */ - public void setEnabled(String id, boolean enabled) { - Action action = getAction(id); - if (action != null) { - action.setEnabled(enabled); - } - } - - - /** - * Returns the enabled state of the Action. When enabled, - * any component associated with this object is active and - * able to fire this object's actionPerformed method. - * - * @param id value of the action id - * @return true if this Action is enabled; false if the - * action doesn't exist or disabled. - */ - public boolean isEnabled(String id) { - Action action = getAction(id); - if (action != null) { - return action.isEnabled(); - } - return false; - } - - /** - * Sets the selected state of a toggle action. If the id doesn't - * correspond to a toggle action then it will fail silently. - * - * @param id the value of the action id - * @param selected true if the action is to be selected; otherwise false. - */ - public void setSelected(String id, boolean selected) { - StateChangeAction action = getStateChangeAction(id); - if (action != null) { - action.setSelected(selected); - } - } - - /** - * Gets the selected state of a toggle action. If the id doesn't - * correspond to a toggle action then it will fail silently. - * - * @param id the value of the action id - * @return true if the action is selected; false if the action - * doesn't exist or is disabled. - */ - public boolean isSelected(String id) { - StateChangeAction action = getStateChangeAction(id); - if (action != null) { - return action.isSelected(); - } - return false; - } - - /** - * Adds the set of actions and action-lists - * from an action-set document into the ActionManager. - * A call to this method usually takes the form: - * ActionManager.getInstance().loadActions(getClass().getResource("myActions.xml")); - * - * @param url URL pointing to an actionSet document - * @throws IOException If there is an error in parsing - */ - public void loadActions(URL url) throws IOException { - if (DEBUG) { - System.out.println("loadActions("+url+")"); - } - InputStream stream = url.openStream(); - try { - loadActions(stream); - } - finally { - try { - stream.close(); - } catch (IOException ie) { - } - } - } - - /** - * Adds the set of actions and action-lists - * from an action-set document into the ActionManager. - * - * @param stream InputStream containing an actionSet document - * @throws IOException If there is an error in parsing - */ - public void loadActions(InputStream stream) throws IOException { - if (DEBUG) { - System.out.println("loadActions("+stream+")"); - } - - if (parserfactory == null) { - parserfactory = SAXParserFactory.newInstance(); - parserfactory.setValidating(true); - } - - if (handler == null) { - handler = new ActionHandler(); - } - - try { - SAXParser parser = parserfactory.newSAXParser(); - // Append a '/' as otherwise sax will look in this directory - // vs the lib directory. - parser.parse(stream, handler, getClass().getResource("resources").toString() + "/"); - // parser.parse(stream, handler, "/"); - } catch (SAXException ex) { - printException("SAXException: " + ex.getMessage(), ex); - throw new IOException("Error parsing: " + ex.getMessage()); - } catch (IOException ex2) { - printException("IOException: " + ex2.getMessage(), ex2); - throw ex2; - } catch (ParserConfigurationException ex3) { - printException("ParserConfigurationException: " + ex3.getMessage(), ex3); - throw new IOException("Error parsing: " + ex3.getMessage()); - } - } - - // Helper method to print exceptions. - // TODO: should probabaly use the logger API. - private void printException(String message, Exception ex) { - System.out.println(message); - if (DEBUG) { - ex.printStackTrace(); - } - } - - - /** - * Returns the Icon associated with the name of the resource. - * TODO: This should be exposed as a public utility method. - */ - Icon getIcon(String imagePath) { - if (imagePath != null && !imagePath.equals("")) { - URL url = this.getClass().getResource(imagePath); - if (url != null) { - return new ImageIcon(url); - } - } - return null; - } - - - /** - * A diagnostic which prints the Attributes of an action - * on the printStream - */ - static void printAction(PrintStream stream, Action action) { - stream.println("Attributes for " + action.getValue(Action.ACTION_COMMAND_KEY)); - - if (action instanceof AbstractAction) { - Object[] keys = ((AbstractAction)action).getKeys(); - - for (int i = 0; i < keys.length; i++) { - stream.println("\tkey: " + keys[i] + "\tvalue: " + - action.getValue((String)keys[i])); - } - } - } - - /** - * A diagnostic which prints the all the contained info from the ActionManager - * on the printStream - */ - static void printActionManager(PrintStream stream, ActionManager manager) { - // Print out the action sets. - Set keys = manager.getActionSetIDs(); - int numItems = keys.size(); - stream.println("Num action-sets: " + numItems); - Iterator iter = keys.iterator(); - while (iter.hasNext()) { - stream.println("\t" + iter.next()); - } - - // Print out the action lists. - keys = manager.getActionListIDs(); - if (keys != null) { - numItems = keys.size(); - stream.println("\nNum action-lists: " + numItems); - iter = keys.iterator(); - while (iter.hasNext()) { - stream.println("\t" + iter.next()); - } - } - - // Key dump - keys = manager.getActionIDs(); - if (keys != null) { - numItems = keys.size(); - stream.println("\nNum actions: " + numItems); - iter = keys.iterator(); - while (iter.hasNext()) { - printAction(stream, manager.getAction(iter.next())); - } - } - - } - - - // Implementation methods which create the action from the Attributes - - private Action createAction(ActionAttributes attr) { - Action action = null; - if (attr != null) { - String type = attr.getValue(TYPE_INDEX); - if ("toggle".equals(type)) { - // Multi state action. - action = new StateChangeAction(); - } else { - // Single state action by default. - action = new DelegateAction(); - } - configureAction(action, attr); - - addAction(attr.getValue(ID_INDEX), action); - } - return action; - } - - private boolean controlConvertedToMeta = false; - - public void setControlConvertedToMeta(boolean controlConvertedToMeta) { - this.controlConvertedToMeta = controlConvertedToMeta; - } - - public boolean isControlConvertedToMeta() { - return controlConvertedToMeta; - } - - /** - * Configures an action from the attributes. - * @param action the action to configure - * @param attr attributes to use in the configuration. - */ - private void configureAction(Action action, ActionAttributes attr) { - - action.putValue(Action.NAME, attr.getValue(NAME_INDEX)); - // EDIT (AHM): use SMICON_INDEX instead of ICON_INDEX - action.putValue(Action.SMALL_ICON, getIcon(attr.getValue(SMICON_INDEX))); - action.putValue(Action.ACTION_COMMAND_KEY, attr.getValue(ID_INDEX)); - action.putValue(Action.SHORT_DESCRIPTION, attr.getValue(DESC_INDEX)); - action.putValue(Action.LONG_DESCRIPTION, attr.getValue(DESC_INDEX)); - - String mnemonic = attr.getValue(MNEMONIC_INDEX); - if (mnemonic != null && !mnemonic.equals("")) { - action.putValue(Action.MNEMONIC_KEY, new Integer(mnemonic.charAt(0))); - } - String accel = attr.getValue(ACCEL_INDEX); - if (accel != null && !accel.equals("")) { - if (controlConvertedToMeta) - { - accel = accel.replaceFirst("control ", "meta "); - } - action.putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(accel)); - } - } - - /** - * Registers a callback method when the Action corresponding to any of - * the action ids is invoked. When a Component that was constructed from the - * Action identified by the action id invokes actionPerformed then the method - * named will be invoked on the handler Object. - *

          - * The method should take a String as an argument, which will be the - * id of the activated command. - *

          - * If the Action represented by the action id is a StateChangeAction, then - * the method passed should also take an int as an argument. The value of - * getStateChange() on the ItemEvent object will be passed as the parameter. - * - * @param id value of the action id - * @param handler the object which will be perform the action - * @param method the name of the method on the handler which will be called. - */ - // EDIT (AHM): added this method - public void registerGenericCallback(Collection ids, Object handler, String method) { - for (String id : ids) { - StateChangeAction sa = getStateChangeAction(id); - if (sa != null) { - // Create a handler for toogle type actions. - sa.addItemListener(new CommandInvocationHandler(handler, method, id)); - } - else { - DelegateAction a = getDelegateAction(id); - if (a != null) { - // Create a new ActionListener using the dynamic proxy api. - a.addActionListener(new CommandInvocationHandler(handler, method, id)); - } - } - } - } - - - /** - * The callback for generic methods - * - * TODO: should reimplement this class as something that can be persistable. - */ - // EDIT (AHM): added this class - private class CommandInvocationHandler implements ActionListener, ItemListener { - - private Object target; - private String methodName; - private String commandId; - - public CommandInvocationHandler(Object target, String methodName, String commandId) { - this.target = target; - this.methodName = methodName; - this.commandId = commandId; - } - - public void itemStateChanged(ItemEvent evt) { - Boolean value = Boolean.TRUE; - if (evt.getStateChange() == ItemEvent.DESELECTED) { - value = Boolean.FALSE; - } - - // Build the Statement representing the method to be invoked on the target - Statement statement = new Statement(target, methodName, new Object[] { commandId, value }); - try { - statement.execute(); - } catch (Exception ex) { - ex.printStackTrace(); - } - } - - public void actionPerformed(ActionEvent e) { - // Build the Statement representing the method to be invoked on the target - Statement statement = new Statement(target, methodName, new Object[] { commandId }); - try { - statement.execute(); - } catch (Exception ex) { - ex.printStackTrace(); - } - } - } - - - /** - * Registers a callback method when the Action corresponding to - * the action id is invoked. When a Component that was constructed from the - * Action identified by the action id invokes actionPerformed then the method - * named will be invoked on the handler Object. - *

          - * If the Action represented by the action id is a StateChangeAction, then - * the method passed should take an int as an argument. The value of - * getStateChange() on the ItemEvent object will be passed as the parameter. - * - * @param id value of the action id - * @param handler the object which will be perform the action - * @param method the name of the method on the handler which will be called. - */ - public void registerCallback(String id, Object handler, String method) { - StateChangeAction sa = getStateChangeAction(id); - if (sa != null) { - // Create a handler for toogle type actions. - sa.addItemListener(new BooleanInvocationHandler(handler, method)); - return; - } - - DelegateAction a = getDelegateAction(id); - if (a != null) { - // Create a new ActionListener using the dynamic proxy api. - a.addActionListener(EventHandler.create(ActionListener.class, - handler, method)); - } - } - - - /** - * The callback for the StateChangeAction that invokes a method with a - * boolean argument on a target. - * - * TODO: should reimplement this class as something that can be persistable. - */ - private class BooleanInvocationHandler implements ItemListener { - - private Object target; - private String methodName; - - public BooleanInvocationHandler(Object target, String methodName) { - this.target = target; - this.methodName = methodName; - } - - public void itemStateChanged(ItemEvent evt) { - Boolean value = Boolean.TRUE; - if (evt.getStateChange() == ItemEvent.DESELECTED) { - value = Boolean.FALSE; - } - - // Build the Statement representing the method to be invoked on the target - Statement statement = new Statement(target, methodName, new Object[] { value }); - try { - statement.execute(); - } catch (Exception ex) { - ex.printStackTrace(); - } - } - } - - - /** - * Determines if the Action corresponding to the action id is a state changed - * action (toggle, group type action). - * - * @param id value of the action id - * @return true if the action id represents a multi state action; false otherwise - */ - public boolean isStateChangeAction(String id) { - return (getStateChangeAction(id) != null); - } - - /** - * Implemenation of the SAX event handler which acts on elements - * and attributes defined in the action-set.dtd. - * - * This class creates the lightweight data structures which encapsulate - * the parsed xml data that can be used to contruct Actions - * and UI elements from Actions. - */ - class ActionHandler extends DefaultHandler { - - private Stack actionListStack; // keep track of nested action-lists. - private Stack actionSetStack; // keep track of nested action-sets. - - private String actionset; // current action-set id - private ActionList actionlist; // current action-list id - private String action; // current action id - private String group; // current group id - - @Override - public void startDocument() { - actionListStack = new Stack(); - actionSetStack = new Stack(); - - actionset = null; - actionlist = null; - action = null; - group = null; - } - - // Overloaded DefaultHandler methods. - - - @Override - public void startElement(String nameSpace, String localName, - String name, Attributes attributes) { - if (DEBUG) { - System.out.println("startElement("+nameSpace+"," - +localName+","+name+",...)"); - } - - if (ACTION_SET_ELEMENT.equals(name)) { - String newSet = attributes.getValue(ID_ATTRIBUTE); - if (actionset != null) { - actionSetStack.push(actionset); - } - actionset = newSet; - } - else if (ACTION_LIST_ELEMENT.equals(name)) { - String id = attributes.getValue(ID_ATTRIBUTE); - action = attributes.getValue(IDREF_ATTRIBUTE); - if (action == null) { - action = id; - } - ActionAttributes actionAtts = getAttributes(action); - if (actionAtts == null) { - addAttributes(attributes, actionset); - } else { - // See if some attributes are redefined - actionAtts.setAttributes(attributes); - } - - ActionList newList = new ActionList(id, action); - if (actionlist != null) { - actionlist.add(newList); - actionListStack.push(actionlist); - } - addActionList(id, newList); - - actionlist = newList; - } - else if (ACTION_ELEMENT.equals(name)) { - action = attributes.getValue(IDREF_ATTRIBUTE); - if (action == null) { - action = attributes.getValue(ID_ATTRIBUTE); - } - ActionAttributes actionAtts = getAttributes(action); - if (actionAtts == null) { - addAttributes(attributes, actionset); - } else { - // See if some attributes are redefined - actionAtts.setAttributes(attributes); - } - - - // If this action is within an action-list element then add - // it to the list. - if (actionlist != null) { - actionlist.add(action); - - // If this action is within a group element then associate - // it with the current action id. - if (group != null) { - actionlist.setGroup(action, group); - } - } - } - else if (GROUP_ELEMENT.equals(name)) { - group = attributes.getValue(ID_ATTRIBUTE); - } - else if (EMPTY_ELEMENT.equals(name)) { - if (actionlist != null) { - actionlist.add(null); - } - } - } - - @Override - public void endElement(String nameSpace, String localName, String name) { - if (DEBUG) { - System.out.println("endElement("+nameSpace+"," - +localName+","+name+")"); - } - - if (ACTION_SET_ELEMENT.equals(name)) { - try { - actionset = (String)actionSetStack.pop(); - } catch (EmptyStackException ex) { - actionset = null; - } - } - else if (ACTION_LIST_ELEMENT.equals(name)) { - try { - actionlist = (ActionList)actionListStack.pop(); - } catch (EmptyStackException ex) { - actionlist = null; - } - } - else if (GROUP_ELEMENT.equals(name)) { - group = null; - } - } - - @Override - public void endDocument() { - actionListStack = new Stack(); - actionSetStack = new Stack(); - - actionset = null; - actionlist = null; - action = null; - group = null; - } - - // - // Overloaded ErrorHandler methods for Validating parser. - // - - @Override - public void error(SAXParseException ex) throws SAXException { - System.out.println("**** validation error"); - reportException(ex); - } - - @Override - public void warning(SAXParseException ex) throws SAXException { - System.out.println("**** validation warning"); - reportException(ex); - } - - @Override - public void fatalError(SAXParseException ex) throws SAXException { - System.out.println("**** validation fatalError"); - reportException(ex); - } - - private void reportException(SAXParseException ex) { - System.out.println(ex.getLineNumber() + ":" + ex.getColumnNumber() + " " + ex.getMessage()); - System.out.println("Public ID: " + ex.getPublicId() + "\t" + - "System ID: " + ex.getSystemId()); - if (DEBUG) { - ex.printStackTrace(); - } - } - - } // end class ActionHandler -} diff --git a/gui/src/com/sun/jaf/ui/DelegateAction.java b/gui/src/com/sun/jaf/ui/DelegateAction.java deleted file mode 100644 index 1cce573e..00000000 --- a/gui/src/com/sun/jaf/ui/DelegateAction.java +++ /dev/null @@ -1,77 +0,0 @@ -/** - * @(#)DelegateAction.java 1.6 03/04/16 - * - * Copyright 2003 Sun Microsystems, Inc. All Rights Reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistribution in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of Sun Microsystems, Inc. or the names of - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * This software is provided "AS IS," without a warranty of any - * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND - * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY - * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY - * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR - * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THIS SOFTWARE OR - * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE - * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, - * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER - * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF - * THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS - * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - * - * You acknowledge that this software is not designed, licensed or - * intended for use in the design, construction, operation or - * maintenance of any nuclear facility. - * - */ -package com.sun.jaf.ui; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; - -import javax.swing.AbstractAction; - -/** - * Delegates the action performed to another ActionListener. - * - * @version 1.6 04/16/03 - * @author Mark Davidson - */ -class DelegateAction extends AbstractAction { - - // single instance unicast listener. Can add multicast - // support by using an EventListenerList. - private ActionListener listener; - - public void addActionListener(ActionListener listener) { - this.listener = listener; - } - - public void removeActionListener(ActionListener listener) { - this.listener = null; - } - - public ActionListener[] getActionListeners() { - return new ActionListener[] { listener }; - } - - public void actionPerformed(ActionEvent evt) { - if (listener != null) { - listener.actionPerformed(evt); - } - } -} diff --git a/gui/src/com/sun/jaf/ui/StateChangeAction.java b/gui/src/com/sun/jaf/ui/StateChangeAction.java deleted file mode 100644 index 431ef8ed..00000000 --- a/gui/src/com/sun/jaf/ui/StateChangeAction.java +++ /dev/null @@ -1,117 +0,0 @@ -/** - * @(#)StateChangeAction.java 1.6 03/04/16 - * - * Copyright 2003 Sun Microsystems, Inc. All Rights Reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistribution in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of Sun Microsystems, Inc. or the names of - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * This software is provided "AS IS," without a warranty of any - * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND - * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY - * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY - * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR - * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THIS SOFTWARE OR - * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE - * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, - * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER - * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF - * THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS - * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - * - * You acknowledge that this software is not designed, licensed or - * intended for use in the design, construction, operation or - * maintenance of any nuclear facility. - * - */ - -package com.sun.jaf.ui; - -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; - -/** - * Extends DelegateActions by adding support for ItemEvents. This class - * will forward the ItemEvent to the registered itemlisteners - * - * @author Mark Davidson - */ -class StateChangeAction extends DelegateAction - implements ItemListener { - - protected boolean selected = false; - - // ItemListener delegate. itemSelected Events are forwarded - // to this listener for handling. - private ItemListener listener; - - /** - * @return true if the action is in the selected state - */ - public boolean isSelected() { - return selected; - } - - /** - * Changes the state of the action - * @param newValue true to set the action as selected of the action. - */ - public synchronized void setSelected(boolean newValue) { - boolean oldValue = this.selected; - if (oldValue != newValue) { - this.selected = newValue; - firePropertyChange("selected", Boolean.valueOf(oldValue), - Boolean.valueOf(newValue)); - } - } - - /** - * Sets the ItemListener to delegate this action to. - */ - public void addItemListener(ItemListener listener) { - this.listener = listener; - } - - public ItemListener[] getItemListeners() { - return new ItemListener[] { listener }; - } - - public void removeItemListener(ItemListener listener) { - this.listener = null; - } - - public void itemStateChanged(ItemEvent evt) { - // Update all objects that share this item - boolean newValue; - boolean oldValue = this.selected; - - if (evt.getStateChange() == ItemEvent.SELECTED) { - newValue = true; - } else { - newValue = false; - } - - if (oldValue != newValue) { - setSelected(newValue); - - // Forward the event to the delgate for handling. - if (listener != null) { - listener.itemStateChanged(evt); - } - } - } -} diff --git a/gui/src/com/sun/jaf/ui/ToggleActionPropertyChangeListener.java b/gui/src/com/sun/jaf/ui/ToggleActionPropertyChangeListener.java deleted file mode 100644 index 9b610b5d..00000000 --- a/gui/src/com/sun/jaf/ui/ToggleActionPropertyChangeListener.java +++ /dev/null @@ -1,74 +0,0 @@ -/** - * @(#)ToggleActionPropertyChangeListener.java 1.5 02/10/07 - * - * Copyright 2002 Sun Microsystems, Inc. All Rights Reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistribution in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of Sun Microsystems, Inc. or the names of - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * This software is provided "AS IS," without a warranty of any - * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND - * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY - * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY - * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR - * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THIS SOFTWARE OR - * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE - * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, - * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER - * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF - * THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS - * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - * - * You acknowledge that this software is not designed, licensed or - * intended for use in the design, construction, operation or - * maintenance of any nuclear facility. - * - */ - -package com.sun.jaf.ui; - -import java.beans.PropertyChangeListener; -import java.beans.PropertyChangeEvent; - -import javax.swing.AbstractButton; - -/** - * Added to the Toggle type buttons and menu items so that various components - * which have been created from a single StateChangeAction can be in synch - */ -class ToggleActionPropertyChangeListener implements PropertyChangeListener { - - // XXX - Should be a WeakRef since it's unreachable! - // this is a potential memory leak but we don't really have to - // worry about it since the most of the time the buttons will be - // loaded for the lifetime of the application. Should make it - // weak referenced for a general purpose toolkit. - private AbstractButton button; - - public ToggleActionPropertyChangeListener(AbstractButton button) { - this.button = button; - } - - public void propertyChange(PropertyChangeEvent evt) { - String propertyName = evt.getPropertyName(); - - if (propertyName.equals("selected")) { - Boolean selected = (Boolean)evt.getNewValue(); - button.setSelected(selected.booleanValue()); - } - } -} diff --git a/gui/src/com/sun/jaf/ui/UIFactory.java b/gui/src/com/sun/jaf/ui/UIFactory.java deleted file mode 100644 index 7b5f5f54..00000000 --- a/gui/src/com/sun/jaf/ui/UIFactory.java +++ /dev/null @@ -1,485 +0,0 @@ -/** - * @(#)UIFactory.java 1.13 03/04/16 - * - * Copyright 2003 Sun Microsystems, Inc. All Rights Reserved. - * - * Redistribution and use in source and binary forms, with or - * without modification, are permitted provided that the following - * conditions are met: - * - * - Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * - Redistribution in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * Neither the name of Sun Microsystems, Inc. or the names of - * contributors may be used to endorse or promote products derived - * from this software without specific prior written permission. - * - * This software is provided "AS IS," without a warranty of any - * kind. ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND - * WARRANTIES, INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY - * EXCLUDED. SUN AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY - * DAMAGES OR LIABILITIES SUFFERED BY LICENSEE AS A RESULT OF OR - * RELATING TO USE, MODIFICATION OR DISTRIBUTION OF THIS SOFTWARE OR - * ITS DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE - * FOR ANY LOST REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, - * SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER - * CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, ARISING OUT OF - * THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF SUN HAS - * BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - * - * You acknowledge that this software is not designed, licensed or - * intended for use in the design, construction, operation or - * maintenance of any nuclear facility. - * - */ -package com.sun.jaf.ui; - -import java.util.HashMap; -import java.util.Iterator; - -import javax.swing.*; - -// NB: modified from original source: most removing enforcement of -// the singleton pattern - -/** - * Creates user interface elements based on action-lists managed - * in an ActionManagaer. - * @see ActionManager - */ -public class UIFactory { - - private static UIFactory INSTANCE; - - private ActionManager manager; - private HashMap groupMap; - - public UIFactory() { - } - - public UIFactory(ActionManager manager) { - this.manager = manager; - } - - /** - * Return the instance of the UIFactory if this will be used - * as a singleton. The instance will be created if it hasn't - * previously been set. - * - * @return the UIFactory instance. - * @see #setInstance - */ - public static UIFactory getInstance() { - if (INSTANCE == null) { - INSTANCE = new UIFactory(); - } - return INSTANCE; - } - - /** - * Gets the ActionManager instance. If the ActionManager has not been explicitly - * set then the default ActionManager instance will be used. - * - * @return the ActionManager used by the UIFactory. - * @see #setActionManager - */ - public ActionManager getActionManager() { - if (manager == null) { - manager = ActionManager.getInstance(); - } - return manager; - } - - /** - * Sets the ActionManager instance that will be used by this UIFactory - */ - public void setActionManager(ActionManager manager) { - this.manager = manager; - } - - - /** - * Constructs a toolbar from an action-list id. By convention, - * the identifier of the main toolbar should be "main-toolbar" - * - * @param id action-list id which should be used to construct the toolbar. - * @return the toolbar or null - */ - public JToolBar createToolBar(String id) { - ActionList list = getActionManager().getActionList(id); - if (list == null) { - return null; - } - - JToolBar toolbar = new JToolBar(); - Iterator iter = list.iterator(); - while(iter.hasNext()) { - Object element = iter.next(); - - if (element == null) { - toolbar.addSeparator(); - } else if (element instanceof String) { - toolbar.add(createButton((String)element, - list.getGroup((String)element), - toolbar)); - } - } - return toolbar; - } - - /** - * Constructs a popup menu from an action-list id. - * - * @param id action-list id which should be used to construct the popup. - * @return the popup or null - */ - public JPopupMenu createPopup(String id) { - ActionList list = getActionManager().getActionList(id); - if (list == null) { - return null; - } - - JPopupMenu popup = new JPopupMenu(); - Iterator iter = list.iterator(); - while(iter.hasNext()) { - Object element = iter.next(); - - if (element == null) { - popup.addSeparator(); - } else if (element instanceof String) { - popup.add(createMenuItem((String)element, - list.getGroup((String)element), - popup)); - } - } - return popup; - } - - /** - * Constructs a menu tree from an action-list id. The - * elements of the action-list will represent menus. By convention, the - * top level menu bar action-list id should be "main-menu" - * - * @param id the action-list id which represents the root item - * @return a menu bar which represents the xml tree or null if the - * tree cannot be found. - */ - public JMenuBar createMenuBar(String id) { - ActionList list = getActionManager().getActionList(id); - if (list == null) { - return null; - } - - JMenuBar menubar = new JMenuBar(); - JMenu menu = null; - - Iterator iter = list.iterator(); - while(iter.hasNext()) { - Object element = iter.next(); - - if (element == null) { - if (menu != null) { - menu.addSeparator(); - } - } else if (element instanceof String) { - if (menu != null) { - menu.add(createMenuItem((String)element, - list.getGroup((String)element), - menu)); - } - } else if (element instanceof ActionList) { - menu = createMenu((ActionList)element); - if (menu != null) { - menubar.add(menu); - } - } - } - return menubar; - } - - - /** - * Creates and returns a menu from an action-list id. The menu - * constructed will have the attributes from the action id the ActionList. - * - * @param id the action-list id that may represents a menu. - * @return the constructed JMenu or null if an action-list cannot be found. - */ - public JMenu createMenu(String id ) { - ActionList list = getActionManager().getActionList(id); - if (list == null) { - return null; - } - return createMenu(list); - } - - /** - * Creates and returns a menu from an ActionList class. The menu - * constructed will have the attributes from the action id the ActionList. - * - * @param list an ActionList that may represents a menu. - * @return the constructed JMenu or null if the ActionList doesn't have an - * action id - */ - private JMenu createMenu(ActionList list) { - Action action = getAction(list.getActionRef()); - if (action == null) { - return null; - } - JMenu menu = new JMenu(action); - - Iterator iter = list.iterator(); - while(iter.hasNext()) { - Object element = iter.next(); - if (element == null) { - menu.addSeparator(); - } else if (element instanceof String) { - menu.add(createMenuItem((String)element, - list.getGroup((String)element), - menu)); - } else if (element instanceof ActionList) { - JMenu newMenu = createMenu((ActionList)element); - if (newMenu != null) { - menu.add(newMenu); - } - } - } - return menu; - } - - - /** - * Convenience method to get the action from an ActionManager. - */ - private Action getAction(String id) { - Action action = getActionManager().getAction(id); - if (action == null) { - throw new RuntimeException("ERROR: No Action for " + id); - } - return action; - } - - /** - * Returns the button group corresponding to the groupid - * - * @param groupid the value of the groupid attribute for the action element - */ - private ButtonGroup getGroup(String groupid, JComponent container) { - if (groupMap == null) { - groupMap = new HashMap(); - } - Integer hashCode = new Integer(groupid.hashCode() ^ container.hashCode()); - - ButtonGroup group = groupMap.get(hashCode); - if (group == null) { - group = new ButtonGroup(); - groupMap.put(hashCode, group); - } - return group; - } - - /** - * Creates a menu item based on the attributes of the action element. - * Will return a JMenuItem, JRadioButtonMenuItem or a JCheckBoxMenuItem - * depending on the context of the type and groupid attibute. - * - * @return a JMenuItem or subclass depending on type. - */ - private JMenuItem createMenuItem(String id, String groupid, - JComponent container) { - // swich on type dictated by the type of action ie., - // StateChangeAction vs. DelegateAction - Action action = getAction(id); - JMenuItem menuItem; - if (action instanceof StateChangeAction) { - // If this action has a groupid attribute then it's a - // GroupAction - if (groupid != null) { - menuItem = createRadioButtonMenuItem(getGroup(groupid, container), - (StateChangeAction)action); - } else { - menuItem = createCheckBoxMenuItem((StateChangeAction)action); - } - } else { - menuItem = createMenuItem(action); - } - return menuItem; - } - - - /** - * Creates a button based on the attributes of the action element. - * Will return a JButton or a JToggleButton. - */ - private AbstractButton createButton(String id, String groupid, - JComponent container) { - - // swich on type dictated by the type of action ie., - // StateChangeAction vs. DelegateAction - Action action = getAction(id); - if (action == null) { - // XXX - - System.out.println("ERROR: Action doesn't exist for " + id); - return null; - } - - AbstractButton button; - if (action instanceof StateChangeAction) { - // If this action has a groupid attribute then it's a - // GroupAction - if (groupid == null) { - button = createToggleButton((StateChangeAction)action); - } else { - button = createToggleButton((StateChangeAction)action, - getGroup(groupid, container)); - } - } else { - button = createButton(action); - } - return button; - } - - /** - * Adds and configures the button based on the action. - */ - private JButton createButton(Action action) { - JButton button = new JButton(action); - - configureButton(button, action); - return button; - } - - /** - * Adds and configures a toggle button. - * @param a an abstraction of a toggle action. - */ - private JToggleButton createToggleButton(StateChangeAction a) { - return createToggleButton(a, null); - } - - /** - * Adds and configures a toggle button. - * @param a an abstraction of a toggle action. - * @param group the group to add the toggle button or null - */ - private JToggleButton createToggleButton(StateChangeAction a, ButtonGroup group) { - JToggleButton button = new JToggleButton(a); - button.addItemListener(a); - button.setSelected(a.isSelected()); - if (group != null) { - group.add(button); - } - configureToggleButton(button, a); - return button; - } - - /** - * This method will be called after toggle buttons are created. - * Override for custom configuration but the overriden method should be called - * first. - * - * @param button the button to be configured - * @param action the action used to construct the menu item. - */ - protected void configureToggleButton(JToggleButton button, Action action) { - configureButton(button, action); - - // The PropertyChangeListener that gets added - // to the Action doesn't know how to handle the "selected" property change - // in the meantime, the corect thing to do is to add another PropertyChangeListener - // to the StateChangeAction until this is fixed. - action.addPropertyChangeListener(new ToggleActionPropertyChangeListener(button)); - } - - - /** - * This method will be called after buttons created from an action. - * Override for custom configuration. - * - * @param button the button to be configured - * @param action the action used to construct the menu item. - */ - protected void configureButton(AbstractButton button, Action action) { - if (action.getValue(Action.SHORT_DESCRIPTION) == null) { - button.setToolTipText((String)action.getValue(Action.NAME)); - } - - // Don't show the text under the toolbar buttons. - button.setText(""); - } - - - - /** - * This method will be called after toggle type menu items (like - * JRadioButtonMenuItem and JCheckBoxMenuItem) are created. - * Override for custom configuration but the overriden method should be called - * first. - * - * @param menuItem the menu item to be configured - * @param action the action used to construct the menu item. - */ - protected void configureToggleMenuItem(JMenuItem menuItem, Action action) { - configureMenuItem(menuItem, action); - - // The PropertyChangeListener that gets added - // to the Action doesn't know how to handle the "selected" property change - // in the meantime, the corect thing to do is to add another PropertyChangeListener - // to the StateChangeAction until this is fixed. - action.addPropertyChangeListener(new ToggleActionPropertyChangeListener(menuItem)); - } - - - /** - * This method will be called after menu items are created. - * Override for custom configuration. - * - * @param menuItem the menu item to be configured - * @param action the action used to construct the menu item. - */ - protected void configureMenuItem(JMenuItem menuItem, Action action) { - } - - - /** - * Helper method to configure each item consistantly - */ - private JMenuItem createMenuItem(Action action) { - JMenuItem mi = new JMenuItem(action); - configureMenuItem(mi, action); - return mi; - } - - /** - * Helper method to add a checkbox menu item. - */ - private JCheckBoxMenuItem createCheckBoxMenuItem(StateChangeAction a) { - JCheckBoxMenuItem mi = new JCheckBoxMenuItem(a); - mi.addItemListener(a); - mi.setSelected(a.isSelected()); - - configureToggleMenuItem(mi, a); - return mi; - } - - /** - * Helper method to add a radio button menu item. - */ - private JRadioButtonMenuItem createRadioButtonMenuItem(ButtonGroup group, - StateChangeAction a) { - JRadioButtonMenuItem mi = new JRadioButtonMenuItem(a); - mi.addItemListener(a); - mi.setSelected(a.isSelected()); - if (group != null) { - group.add(mi); - } - configureToggleMenuItem(mi, a); - return mi; - } -} diff --git a/gui/src/com/sun/jaf/ui/resources/action-set.dtd b/gui/src/com/sun/jaf/ui/resources/action-set.dtd deleted file mode 100644 index a47939e3..00000000 --- a/gui/src/com/sun/jaf/ui/resources/action-set.dtd +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/gui/src/org/apache/commons/collections15/contrib/HashCodeComparator.java b/gui/src/org/apache/commons/collections15/contrib/HashCodeComparator.java deleted file mode 100644 index 56f4f42e..00000000 --- a/gui/src/org/apache/commons/collections15/contrib/HashCodeComparator.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.apache.commons.collections15.contrib; - -import java.util.Comparator; - -/** - * Dummy comparator, when we need an order, but don't care what it is. - * @author aleks - * - * @param - */ -public class HashCodeComparator implements Comparator { - public int compare(T o1, T o2) { - if (o1.hashCode() < o2.hashCode()) return -1; - else if (o1.hashCode() > o2.hashCode()) return 1; - else return 0; - } -} diff --git a/gui/src/quanto/core/BadResponseException.java b/gui/src/quanto/core/BadResponseException.java deleted file mode 100644 index 49545920..00000000 --- a/gui/src/quanto/core/BadResponseException.java +++ /dev/null @@ -1,23 +0,0 @@ -package quanto.core; - -/** - * The core returned an unexpected response - */ -public class BadResponseException extends CoreException { - - private static final long serialVersionUID = 1034523458932534589L; - private String response; - - public BadResponseException(String msg, String response) { - super(msg); - this.response = response; - } - - public BadResponseException(String response) { - this.response = response; - } - - public String getResponse() { - return response; - } -} diff --git a/gui/src/quanto/core/Completer.java b/gui/src/quanto/core/Completer.java deleted file mode 100644 index 44ac6ddb..00000000 --- a/gui/src/quanto/core/Completer.java +++ /dev/null @@ -1,49 +0,0 @@ -package quanto.core; - -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * A class for storing and retrieving command completions. - * @author alek - * - */ -public class Completer { - private SortedSet lexicon; - - public Completer() { - lexicon = new TreeSet(); - } - - public void addWord(String word) { - lexicon.add(word); - } - - public SortedSet getCompletions(String prefix) { - return lexicon.subSet(prefix, prefix + Character.MAX_VALUE); - } - - /** - * Find the greatest common prefix of a sorted set. - * @param compl - * @return - */ - public static String greatestCommonPrefix(SortedSet compl) { - StringBuilder buf = new StringBuilder(compl.last().length()); - char[] s1 = compl.first().toCharArray(); - char[] s2 = compl.last().toCharArray(); - for (int i=0; i getLexicon() { - return lexicon; - } - - public void setLexicon(SortedSet lexicon) { - this.lexicon = lexicon; - } -} diff --git a/gui/src/quanto/core/ConsoleInterface.java b/gui/src/quanto/core/ConsoleInterface.java deleted file mode 100644 index abc6d42c..00000000 --- a/gui/src/quanto/core/ConsoleInterface.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.StringReader; -import java.util.logging.Level; -import java.util.logging.Logger; -import quanto.core.protocol.CoreTalker; - -/** - * Simulates a console interface to the core backend - * - * @author alemer - */ -public class ConsoleInterface { - private final static Logger logger = Logger - .getLogger("quanto.core"); - - public interface ResponseListener { - public void responseReceived(String response); - } - - private CoreTalker core; - private ResponseListener responseListener; - private Completer completer; - - public ConsoleInterface(CoreTalker core) { - this.core = core; - - completer = new Completer(); - - try { - logger.finest("Retrieving commands..."); - String[] commands = core.consoleCommandList(); - for (String cmd : commands) { - completer.addWord(cmd); - } - logger.finest("Commands retrieved successfully"); - } catch (CoreException ex) { - logger.log(Level.WARNING, "Failed to retreive commands for completion", ex); - } - - } - - public void setResponseListener(ResponseListener responseListener) { - this.responseListener = responseListener; - } - - public ResponseListener getResponseListener() { - return responseListener; - } - - public Completer getCompleter() { - return completer; - } - - /** - * Execute the command asynchronously, depending on the response listener to - * deal with the reply. - * - * Note: currently, this is a fake - it just calls inputCommandSync. - * - * @param input - * @throws quanto.gui.QuantoCore.CoreException - */ - public void inputCommandAsync(String input) throws CoreException, - ParseException { - inputCommandSync(input, true); - } - - public String inputCommandSync(String input, boolean notify) - throws CoreException, ParseException { - String ret; - ret = core.consoleCommand(input); - if (notify && (responseListener != null)) { - responseListener.responseReceived(ret); - } - return ret; - } -} diff --git a/gui/src/quanto/core/ConsoleOutput.java b/gui/src/quanto/core/ConsoleOutput.java deleted file mode 100644 index 97653b02..00000000 --- a/gui/src/quanto/core/ConsoleOutput.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core; - -/** - * A console output. - * - * @author alex - */ -public interface ConsoleOutput { - /** - * Output a string to the console - * @param output The string to output - */ - void print(Object output); - /** - * Output a string to the console, followed by a newline - * - * @param output The string to output - */ - void println(Object output); - /** - * Output an error message to the console - * - * @param message The message to output - */ - void error(Object message); -} diff --git a/gui/src/quanto/core/Core.java b/gui/src/quanto/core/Core.java deleted file mode 100644 index 4d41d4e0..00000000 --- a/gui/src/quanto/core/Core.java +++ /dev/null @@ -1,542 +0,0 @@ -package quanto.core; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; -import java.util.logging.Logger; -import javax.swing.event.EventListenerList; -import org.apache.commons.collections15.CollectionUtils; -import org.apache.commons.collections15.Transformer; -import quanto.core.data.*; -import quanto.core.protocol.CoreTalker; - -/** - * Provides a nicer interface to the core - * - * @author alex - */ -public class Core { - - private final static Logger logger = Logger.getLogger("quanto.core"); - EventListenerList listenerList = new EventListenerList(); - private CoreTalker talker; - private Theory activeTheory; - private Ruleset ruleset; - private ObjectMapper jsonMapper = new ObjectMapper(); - - public Core(CoreTalker talker) throws CoreException { - this.talker = talker; - this.ruleset = new Ruleset(this); - } - - public Core(CoreTalker talker, Theory theory) throws CoreException { - this(talker); - talker.changeTheory(theory.getCoreName()); - this.activeTheory = theory; - } - - public void updateCoreTheory(Theory theory) throws CoreException { - fireTheoryAboutToChange(theory); - Theory oldTheory = activeTheory; - talker.changeTheory(theory.getCoreName()); - this.activeTheory = theory; - fireTheoryChanged(oldTheory); - } - - public void addCoreChangeListener(CoreChangeListener l) { - listenerList.add(CoreChangeListener.class, l); - } - - public void removeCoreChangeListener(CoreChangeListener l) { - listenerList.remove(CoreChangeListener.class, l); - } - - protected void fireTheoryAboutToChange(Theory newTheory) { - TheoryChangeEvent coreEvent = null; - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length - 2; i >= 0; i -= 2) { - if (listeners[i] == CoreChangeListener.class) { - // Lazily create the event: - if (coreEvent == null) { - coreEvent = new TheoryChangeEvent(this, activeTheory, newTheory); - } - ((CoreChangeListener) listeners[i + 1]).theoryAboutToChange(coreEvent); - } - } - } - - protected void fireTheoryChanged(Theory oldTheory) { - TheoryChangeEvent coreEvent = null; - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length - 2; i >= 0; i -= 2) { - if (listeners[i] == CoreChangeListener.class) { - // Lazily create the event: - if (coreEvent == null) { - coreEvent = new TheoryChangeEvent(this, oldTheory, activeTheory); - } - ((CoreChangeListener) listeners[i + 1]).theoryChanged(coreEvent); - } - } - } - - public Theory getActiveTheory() { - return activeTheory; - } - - public Ruleset getRuleset() { - return ruleset; - } - private Transformer namer = new Transformer() { - public String transform(CoreObject i) { - return i.getCoreName(); - } - }; - - private Collection names(Collection c) { - if (c == null) { - return null; - } - return CollectionUtils.collect(c, namer); - } - - public CoreTalker getTalker() { - return talker; - } - - private void assertCoreGraph(CoreGraph graph) { - if (graph.getCoreName() == null) { - throw new IllegalStateException("The graph does not have a name"); - } - } - - public CoreGraph createEmptyGraph() throws CoreException { - return new CoreGraph(activeTheory, talker.loadEmptyGraph()); - } - - public CoreGraph loadGraph(File location) throws CoreException, IOException { - CoreGraph g = new CoreGraph(activeTheory, talker.loadGraphFromFile(location.getAbsolutePath())); - updateGraph(g); - g.setFileName(location.getAbsolutePath()); - return g; - } - - public void saveGraph(CoreGraph graph, File location) throws CoreException, - IOException { - assertCoreGraph(graph); - talker.saveGraphToFile(graph.getCoreName(), location.getAbsolutePath()); - } - - public void updateGraph(CoreGraph graph) throws CoreException { - try { - String json = talker.exportGraphAsJson(graph.getCoreName()); - JsonNode node = jsonMapper.readValue(json, JsonNode.class); - graph.updateFromJson(node); - graph.fireStateChanged(); - } catch (IOException ex) { - throw new CoreCommunicationException("Failed to parse JSON from core", ex); - } catch (ParseException ex) { - throw new CoreCommunicationException("Failed to parse JSON from core", ex); - } - } - - public void renameGraph(CoreGraph graph, String suggestedNewName) - throws CoreException { - assertCoreGraph(graph); - graph.updateCoreName(talker.renameGraph(graph.getCoreName(), - suggestedNewName)); - } - - public void undo(CoreGraph graph) throws CoreException { - assertCoreGraph(graph); - talker.undo(graph.getCoreName()); - updateGraph(graph); - } - - public void redo(CoreGraph graph) throws CoreException { - assertCoreGraph(graph); - talker.redo(graph.getCoreName()); - updateGraph(graph); - } - - public void undoRewrite(CoreGraph graph) throws CoreException { - assertCoreGraph(graph); - talker.undoRewrite(graph.getCoreName()); - updateGraph(graph); - } - - public void redoRewrite(CoreGraph graph) throws CoreException { - assertCoreGraph(graph); - talker.redoRewrite(graph.getCoreName()); - updateGraph(graph); - } - - public void startUndoGroup(CoreGraph graph) throws CoreException { - assertCoreGraph(graph); - talker.startUndoGroup(graph.getCoreName()); - } - - public void endUndoGroup(CoreGraph graph) throws CoreException { - assertCoreGraph(graph); - talker.endUndoGroup(graph.getCoreName()); - } - - public Vertex addVertex(CoreGraph graph, String vertexType) - throws CoreException { - try { - assertCoreGraph(graph); - String json = talker.addVertex(graph.getCoreName(), vertexType); - JsonNode node = jsonMapper.readTree(json); - Vertex v = Vertex.fromJson(activeTheory, node); - graph.addVertex(v); - graph.fireStateChanged(); - return v; - } catch (IOException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } catch (ParseException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } - } - - public Vertex addBoundaryVertex(CoreGraph graph) throws CoreException { - return addVertex(graph, "edge-point"); - } - - public void setVertexAngle(CoreGraph graph, Vertex v, String angle) - throws CoreException { - assertCoreGraph(graph); - talker.setVertexData(graph.getCoreName(), v.getCoreName(), angle); - v.getData().setString(angle); - graph.fireStateChanged(); - } - - public void deleteVertices(CoreGraph graph, Collection vertices) - throws CoreException { - assertCoreGraph(graph); - talker.deleteVertices(graph.getCoreName(), names(vertices)); - for (Vertex v : vertices) { - graph.removeVertex(v); - } - graph.fireStateChanged(); - } - - public Edge addEdge(CoreGraph graph, boolean directed, Vertex source, Vertex target) - throws CoreException { - try { - assertCoreGraph(graph); - String json = talker.addEdge(graph.getCoreName(), - "unit", - directed, - source.getCoreName(), - target.getCoreName()); - JsonNode node = jsonMapper.readTree(json); - Edge.EdgeData ed = Edge.fromJson(activeTheory, node); - - if (!source.getCoreName().equals(ed.source)) { - throw new CoreException("Source name from core did not match what we sent"); - } - if (!target.getCoreName().equals(ed.target)) { - throw new CoreException("Target name from core did not match what we sent"); - } - graph.addEdge(ed.edge, source, target); - graph.fireStateChanged(); - return ed.edge; - } catch (IOException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } catch (ParseException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } - } - - public void deleteEdges(CoreGraph graph, Collection edges) - throws CoreException { - assertCoreGraph(graph); - talker.deleteEdges(graph.getCoreName(), names(edges)); - for (Edge e : edges) { - graph.removeEdge(e); - } - graph.fireStateChanged(); - } - - private Collection lookupVertices(CoreGraph graph, Collection vnames) { - List verts = new ArrayList(vnames.size()); - for (Vertex v : graph.getVertices()) { - if (vnames.contains(v.getCoreName())) - verts.add(v); - } - return verts; - } - - public BangBox addBangBox(CoreGraph graph, Collection vertices) - throws CoreException { - try { - assertCoreGraph(graph); - String bbdesc = talker.addBangBox(graph.getCoreName(), names(vertices)); - JsonNode bbjson = jsonMapper.readTree(bbdesc); - BangBox.BangBoxData bbdata = BangBox.fromJson(graph.getTheory(), bbjson); - graph.addBangBox(bbdata.bangBox, lookupVertices(graph, bbdata.contents)); - graph.fireStateChanged(); - return bbdata.bangBox; - } catch (IOException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } catch (ParseException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } - } - - public Collection bangVertices(CoreGraph graph, BangBox bangBox, Collection vertices) - throws CoreException { - assertCoreGraph(graph); - if (!graph.containsBangBox(bangBox)) { - throw new IllegalStateException("The graph does not contain that !-box"); - } - String[] vnames = talker.bangVertices(graph.getCoreName(), bangBox.getCoreName(), names(vertices)); - Collection vs = lookupVertices(graph, Arrays.asList(vnames)); - graph.addVerticesToBangBox(bangBox, vs); - graph.fireStateChanged(); - return vs; - } - - public void removeVerticesFromBangBoxes(CoreGraph graph, - Collection vertices) throws CoreException { - assertCoreGraph(graph); - talker.unbangVertices(graph.getCoreName(), names(vertices)); - updateGraph(graph); - } - - public void dropBangBoxes(CoreGraph graph, Collection bboxen) - throws CoreException { - assertCoreGraph(graph); - talker.dropBangBoxes(graph.getCoreName(), names(bboxen)); - for (BangBox bb : bboxen) { - graph.removeBangBox(bb); - } - graph.fireStateChanged(); - } - - public void killBangBoxes(CoreGraph graph, Collection bboxen) - throws CoreException { - assertCoreGraph(graph); - talker.killBangBoxes(graph.getCoreName(), names(bboxen)); - for (BangBox bb : bboxen) { - List verts = new ArrayList(graph.getBoxedVertices(bb)); - for (Vertex v : verts) { - graph.removeVertex(v); - } - graph.removeBangBox(bb); - } - graph.fireStateChanged(); - } - - public BangBox mergeBangBoxes(CoreGraph graph, Collection bboxen) - throws CoreException { - assertCoreGraph(graph); - BangBox newbb = new BangBox(talker.mergeBangBoxes(graph.getCoreName(), - names(bboxen))); - List contents = new LinkedList(); - for (BangBox bb : bboxen) { - for (Vertex v : graph.getBoxedVertices(bb)) { - contents.add(v); - } - graph.removeBangBox(bb); - } - graph.addBangBox(newbb, contents); - graph.fireStateChanged(); - return newbb; - } - - public BangBox duplicateBangBox(CoreGraph graph, BangBox bbox) - throws CoreException { - assertCoreGraph(graph); - String name = talker.duplicateBangBox(graph.getCoreName(), - bbox.getCoreName()); - updateGraph(graph); - for (BangBox bb : graph.getBangBoxes()) { - if (bb.getCoreName().equals(name)) { - return bb; - } - } - return null; - } - - public void replaceRuleset(File location) throws CoreException, IOException { - talker.replaceRulesetFromFile(location.getAbsolutePath()); - this.ruleset.reload(); - } - - public void renameBangBox(CoreGraph graph, String oldName, String newName) - throws CoreException { - assertCoreGraph(graph); - talker.renameBangBox(graph.getCoreName(), oldName, newName); - } - - public void loadRuleset(File location) throws CoreException, IOException { - talker.importRulesetFromFile(location.getAbsolutePath()); - this.ruleset.reload(); - } - - public void saveRuleset(File location) throws CoreException, IOException { - talker.exportRulesetToFile(location.getAbsolutePath()); - } - - /** - * Creates a rule from two graphs. - * - * Any existing rule with the same name will be replaced. - * - * @param ruleName - * @param lhs - * @param rhs - * @return - * @throws CoreException - */ - public Rule createRule(String ruleName, CoreGraph lhs, - CoreGraph rhs) throws CoreException { - assertCoreGraph(lhs); - assertCoreGraph(rhs); - talker.setRule(ruleName, lhs.getCoreName(), rhs.getCoreName()); - // FIXME: get actual rule active state from core - if (!this.ruleset.getRules().contains(ruleName)) { - this.ruleset.ruleAdded(ruleName, false); - } - return new Rule(ruleName, lhs, rhs); - } - - public Rule openRule(String ruleName) throws CoreException { - CoreGraph lhs = new CoreGraph(activeTheory, talker.openRuleLhs(ruleName)); - updateGraph(lhs); - CoreGraph rhs = new CoreGraph(activeTheory, talker.openRuleRhs(ruleName)); - updateGraph(rhs); - return new Rule(ruleName, lhs, rhs); - } - - public void saveRule(Rule rule) throws CoreException { - if (rule.getCoreName() == null) { - throw new IllegalArgumentException("Rule has no name"); - } - talker.setRule(rule.getCoreName(), rule.getLhs().getCoreName(), - rule.getRhs().getCoreName()); - } - - public void fastNormalise(CoreGraph graph) throws CoreException { - boolean didRewrites = false; - try { - int rwCount = talker.attachOneRewrite(graph.getCoreName()); - while (rwCount > 0) { - talker.applyAttachedRewrite(graph.getCoreName(), 0); - didRewrites = true; - rwCount = talker.attachOneRewrite(graph.getCoreName()); - } - } catch (CoreException e) { - if (!e.getMessage().contains("No more rewrites.")) { - throw e; - } - } - if (didRewrites) { - updateGraph(graph); - } - } - - public void cutSubgraph(CoreGraph graph, Collection vertices) - throws CoreException { - assertCoreGraph(graph); - Collection vnames = names(vertices); - talker.copySubgraphAndOverwrite(graph.getCoreName(), "__clip__", vnames); - talker.deleteVertices(graph.getCoreName(), vnames); - for (Vertex v : vertices) { - graph.removeVertex(v); - } - graph.fireStateChanged(); - } - - public void copySubgraph(CoreGraph graph, Collection vertices) - throws CoreException { - assertCoreGraph(graph); - talker.copySubgraphAndOverwrite(graph.getCoreName(), "__clip__", names(vertices)); - } - - public void paste(CoreGraph target) throws CoreException { - assertCoreGraph(target); - talker.insertGraph("__clip__", target.getCoreName()); - updateGraph(target); - } - - public int attachRewrites(CoreGraph graph, Collection vertices) - throws CoreException { - return talker.attachRewrites(graph.getCoreName(), names(vertices)); - } - - public boolean attachOneRewrite(CoreGraph graph, Collection vertices) - throws CoreException { - return talker.attachOneRewrite(graph.getCoreName(), names(vertices)) > 0; - } - - public List getAttachedRewrites(CoreGraph graph) - throws CoreException { - try { - String json = talker.listAttachedRewrites(graph.getCoreName()); - JsonNode rewritesNode = jsonMapper.readTree(json); - if (!rewritesNode.isArray()) { - throw new ParseException("Expected array"); - } - List rws = new ArrayList(rewritesNode.size()); - int i = 0; - for (JsonNode node : rewritesNode) { - rws.add(AttachedRewrite.fromJson(graph, i, node)); - ++i; - } - return rws; - } catch (IOException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } catch (ParseException ex) { - throw new CoreCommunicationException("Could not parse JSON from core", ex); - } - } - - public void applyAttachedRewrite(CoreGraph graph, int i) - throws CoreException { - try { - String json = talker.applyAttachedRewrite(graph.getCoreName(), i); - JsonNode node = jsonMapper.readValue(json, JsonNode.class); - graph.updateFromJson(node); - graph.fireStateChanged(); - } catch (IOException ex) { - throw new CoreCommunicationException("Failed to parse JSON from core", ex); - } catch (ParseException ex) { - throw new CoreCommunicationException("Failed to parse JSON from core", ex); - } - } - - /** - * Rename a vertex. - * - * Note that if a vertex with the new name already exists, that vertex (and - * not v) will be given a new name. - * - * @param graph The graph the vertex is in - * @param v The vertex to rename - * @param newName The new name for the vertex - * @return if a vertex called newName already existed, the new name for that - * vertex, otherwise null - * @throws CoreException - */ - public String renameVertex(CoreGraph graph, Vertex v, String newName) - throws CoreException { - String[] names = talker.renameVertex(graph.getCoreName(), v.getCoreName(), newName); - if (names.length > 1) { - for (Vertex vv : graph.getVertices()) { - if (vv.getCoreName().equals(names[0])) { - vv.updateCoreName(names[1]); - } - } - } - v.updateCoreName(names[0]); - graph.fireStateChanged(); - return (names.length > 1) ? names[1] : null; - } -} diff --git a/gui/src/quanto/core/CoreChangeListener.java b/gui/src/quanto/core/CoreChangeListener.java deleted file mode 100644 index fa92adcd..00000000 --- a/gui/src/quanto/core/CoreChangeListener.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core; - -import java.util.EventListener; - -/** - * - * @author alemer - */ -public interface CoreChangeListener extends EventListener { - void theoryAboutToChange(TheoryChangeEvent evt); - void theoryChanged(TheoryChangeEvent evt); -} diff --git a/gui/src/quanto/core/CoreCommunicationException.java b/gui/src/quanto/core/CoreCommunicationException.java deleted file mode 100644 index 28d42e02..00000000 --- a/gui/src/quanto/core/CoreCommunicationException.java +++ /dev/null @@ -1,29 +0,0 @@ -package quanto.core; - -/** - * Indicates an issue in communicating with the backend. - * - * Generally, exceptions of this type are unrecoverable. It indicates that - * the core process terminated, or communication with the core was disrupted - * for another reason, or the core sent invalid data - */ -public class CoreCommunicationException extends CoreException { - - private static final long serialVersionUID = 1053659906558198953L; - - public CoreCommunicationException() { - super("Failed to communicate with the core process"); - } - - public CoreCommunicationException(String msg) { - super(msg); - } - - public CoreCommunicationException(Throwable cause) { - super("Failed to communicate with the core process", cause); - } - - public CoreCommunicationException(String msg, Throwable cause) { - super(msg, cause); - } -} diff --git a/gui/src/quanto/core/CoreException.java b/gui/src/quanto/core/CoreException.java deleted file mode 100644 index 9e50b1e0..00000000 --- a/gui/src/quanto/core/CoreException.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core; - -/** - * - * @author alex - */ -public class CoreException extends Exception { - private static final long serialVersionUID = 1053659906558198953L; - public CoreException() { - super(); - } - public CoreException(String msg) { - super(msg); - } - public CoreException(Throwable cause) { - super(cause); - } - public CoreException(String msg, Throwable cause) { - super(msg, cause); - } -} diff --git a/gui/src/quanto/core/CoreExecutionException.java b/gui/src/quanto/core/CoreExecutionException.java deleted file mode 100644 index 68a0f2e0..00000000 --- a/gui/src/quanto/core/CoreExecutionException.java +++ /dev/null @@ -1,25 +0,0 @@ -package quanto.core; - -/** - * Could not communicate with the backend, because it failed to start - */ -public class CoreExecutionException extends CoreException { - - private static final long serialVersionUID = 1053659906558198953L; - - public CoreExecutionException() { - super("The core process could not be executed"); - } - - public CoreExecutionException(String msg) { - super(msg); - } - - public CoreExecutionException(Throwable cause) { - super("The core process could not be executed", cause); - } - - public CoreExecutionException(String msg, Throwable cause) { - super(msg, cause); - } -} diff --git a/gui/src/quanto/core/CoreTerminatedException.java b/gui/src/quanto/core/CoreTerminatedException.java deleted file mode 100644 index 394fd64b..00000000 --- a/gui/src/quanto/core/CoreTerminatedException.java +++ /dev/null @@ -1,25 +0,0 @@ -package quanto.core; - -/** - * Could not communicate with the backend, because it has terminated - */ -public class CoreTerminatedException extends CoreCommunicationException { - - private static final long serialVersionUID = -234829037423847923L; - - public CoreTerminatedException() { - super("The core process terminated unexpectedly"); - } - - public CoreTerminatedException(String msg) { - super(msg); - } - - public CoreTerminatedException(Throwable cause) { - super("The core process terminated unexpectedly", cause); - } - - public CoreTerminatedException(String msg, Throwable cause) { - super(msg, cause); - } -} diff --git a/gui/src/quanto/core/ParseException.java b/gui/src/quanto/core/ParseException.java deleted file mode 100644 index 6c54eb9c..00000000 --- a/gui/src/quanto/core/ParseException.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core; - -/** - * - * @author alex - */ -public class ParseException extends Exception { - private static final long serialVersionUID = 2342374892173482937L; - public ParseException() { } - public ParseException(String message) { - super(message); - } - public ParseException(Throwable cause) { - super(cause); - } - public ParseException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/gui/src/quanto/core/Ruleset.java b/gui/src/quanto/core/Ruleset.java deleted file mode 100644 index 1387da16..00000000 --- a/gui/src/quanto/core/Ruleset.java +++ /dev/null @@ -1,385 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; - -import javax.swing.event.ChangeListener; - -import java.util.logging.Level; -import java.util.logging.Logger; -import javax.swing.event.ChangeEvent; -import javax.swing.event.EventListenerList; - -/** - * - * @author alex - */ -// FIXME: support more detailed change events -public class Ruleset { - - private final static Logger logger = - Logger.getLogger("quanto.gui.ruleset"); - - private EventListenerList listenerList = new EventListenerList(); - - private Core core; - // rule name -> active state - private Map rules; - // tag name -> rule names - // assumption: we care about what rules are in a tag, rather than - // what tags a rule has - private Map> tags; - - public Ruleset(Core core) { - this.core = core; - } - - private void loadRules() throws CoreException { - String[] allrules = core.getTalker().listRules(); - Set activeRules = new HashSet(Arrays.asList(core.getTalker().listActiveRules())); - rules = new TreeMap(); - for (String rule : allrules) { - rules.put(rule, Boolean.valueOf(activeRules.contains(rule))); - } - } - - private void ensureRulesLoaded() throws CoreException { - if (rules == null) { - loadRules(); - } - } - - private void loadTag(String tag) throws CoreException { - ensureTagListLoaded(); - String[] tagrules = core.getTalker().listRulesByTag(tag); - if (tagrules != null && tagrules.length > 0) { - tags.put(tag, new HashSet(Arrays.asList(tagrules))); - } - } - - private void ensureTagLoaded(String tag) throws CoreException { - if (tags == null || tags.get(tag) == null) { - loadTag(tag); - } - } - - private void loadTagList() throws CoreException { - tags = new TreeMap>(); - String[] allTags = core.getTalker().listTags(); - for (String tag : allTags) { - tags.put(tag, null); - } - } - - private void ensureTagListLoaded() throws CoreException { - if (tags == null) { - loadTagList(); - } - } - - public void reload() { - tags = null; - rules = null; - fireRulesetReplaced(); - } - - public ArrayList getRuleTags(String ruleName) throws CoreException { - /* Reverse lookup: obviously inefficient because of the data structure in use.*/ - ArrayList ruleTags = new ArrayList(); - ensureTagListLoaded(); - for (String key : tags.keySet()) { - ensureTagLoaded(key); - if (tags.get(key).contains(ruleName)) ruleTags.add(key); - } - return ruleTags; - } - - public void tagRule(String ruleName, String tag) throws CoreException { - core.getTalker().tagRule(ruleName, tag); - if (tags != null) { - boolean newTag = !tags.containsKey(tag); - if (newTag) { - Set set = new HashSet(); - set.add(ruleName); - tags.put(tag, set); - } else { - ensureTagLoaded(tag); - tags.get(tag).add(ruleName); - } - fireRulesTagged(tag, Collections.singleton(ruleName), newTag); - } else { - // lazy - fireRulesetReplaced(); - } - } - - public void untagRule(String ruleName, String tag) throws CoreException { - if (tags == null || tags.containsKey(tag)) { - core.getTalker().untagRule(ruleName, tag); - if (tags != null) { - ensureTagLoaded(tag); - tags.get(tag).remove(ruleName); - if (tags.get(tag).isEmpty()) { - tags.remove(tag); - fireRulesUntagged(tag, Collections.singleton(ruleName), true); - } else { - fireRulesUntagged(tag, Collections.singleton(ruleName), false); - } - } else { - // lazy - fireRulesetReplaced(); - } - } - } - - public Collection getTags() throws CoreException { - ensureTagListLoaded(); - return Collections.unmodifiableCollection(tags.keySet()); - } - - public Collection getRules() throws CoreException { - ensureRulesLoaded(); - return Collections.unmodifiableCollection(rules.keySet()); - } - - public Collection getRulesByTag(String tag) throws CoreException { - ensureTagLoaded(tag); - return Collections.unmodifiableCollection(tags.get(tag)); - } - - public boolean isRuleActive(String rule) throws CoreException { - ensureRulesLoaded(); - return rules.get(rule); - } - - public void activateRulesByTag(String tag) throws CoreException { - ensureTagLoaded(tag); - Collection changedRules = tags.get(tag); - core.getTalker().activateRulesByTag(tag); - Map updated = new HashMap(changedRules.size()); - for (String rule : changedRules) { - updated.put(rule, Boolean.TRUE); - } - rules.putAll(updated); - fireRulesActiveStateChanged(updated); - } - - public void deactivateRulesByTag(String tag) throws CoreException { - ensureTagLoaded(tag); - Collection changedRules = tags.get(tag); - core.getTalker().deactivateRulesByTag(tag); - Map updated = new HashMap(changedRules.size()); - for (String rule : changedRules) { - updated.put(rule, Boolean.FALSE); - } - rules.putAll(updated); - fireRulesActiveStateChanged(updated); - } - - public void deleteRulesByTag(String tag) throws CoreException { - ensureTagLoaded(tag); - Collection removedRules = tags.get(tag); - core.getTalker().deleteRulesByTag(tag); - tags.remove(tag); - rules.keySet().removeAll(removedRules); - fireRulesRemoved(removedRules); - } - - public void activateRule(String name) throws CoreException { - core.getTalker().activateRule(name); - rules.put(name, Boolean.TRUE); - fireRulesActiveStateChanged(Collections.singletonMap(name, Boolean.TRUE)); - } - - public void deactivateRule(String name) throws CoreException { - core.getTalker().deactivateRule(name); - rules.put(name, Boolean.FALSE); - fireRulesActiveStateChanged(Collections.singletonMap(name, Boolean.FALSE)); - } - - public void activateAllRules() throws CoreException { - try { - for (String name: rules.keySet()) { - core.getTalker().activateRule(name); - rules.put(name, Boolean.TRUE); - } - fireRulesActiveStateChanged(Collections.unmodifiableMap(rules)); - } catch (CoreException ex) { - reload(); - throw ex; - } - } - - public void deactivateAllRules() throws CoreException { - try { - for (String name: rules.keySet()) { - core.getTalker().deactivateRule(name); - rules.put(name, Boolean.FALSE); - } - fireRulesActiveStateChanged(Collections.unmodifiableMap(rules)); - } catch (CoreException ex) { - reload(); - throw ex; - } - } - - public void deleteRule(String rule) throws CoreException { - core.getTalker().deleteRule(rule); - rules.remove(rule); - Iterator> it = tags.values().iterator(); - while (it.hasNext()) { - Set tagRules = it.next(); - if (tagRules != null) - tagRules.remove(rule); - } - fireRulesRemoved(Collections.singleton(rule)); - } - - public void activateRules(Collection ruleNames) throws CoreException { - if (!rules.keySet().containsAll(ruleNames)) { - throw new IllegalArgumentException("ruleNames contains unknown rules"); - } - try { - Map changes = new HashMap(); - for (String name: ruleNames) { - core.getTalker().activateRule(name); - rules.put(name, Boolean.TRUE); - changes.put(name, Boolean.TRUE); - } - fireRulesActiveStateChanged(changes); - } catch (CoreException ex) { - reload(); - throw ex; - } - } - - public void deactivateRules(Collection ruleNames) throws CoreException { - if (!rules.keySet().containsAll(ruleNames)) { - throw new IllegalArgumentException("ruleNames contains unknown rules"); - } - try { - Map changes = new HashMap(); - for (String name: ruleNames) { - core.getTalker().deactivateRule(name); - rules.put(name, Boolean.FALSE); - changes.put(name, Boolean.FALSE); - } - fireRulesActiveStateChanged(changes); - } catch (CoreException ex) { - reload(); - throw ex; - } - } - - public void renameRule(String oldName, String newName) throws CoreException { - if (!rules.containsKey(oldName)) { - throw new IllegalArgumentException("Unknown rule \"" + oldName + "\""); - } - core.getTalker().renameRule(oldName, newName); - Boolean active = rules.get(oldName); - rules.remove(oldName); - rules.put(newName, active); - Iterator> it = tags.values().iterator(); - while (it.hasNext()) { - Set tagRules = it.next(); - if (tagRules != null) { - tagRules.remove(oldName); - tagRules.add(newName); - } - } - fireRulesRenamed(Collections.singletonMap(oldName, newName)); - } - - public void addRulesetChangeListener(RulesetChangeListener l) { - listenerList.add(RulesetChangeListener.class, l); - } - - public void removeRulesetChangeListener(RulesetChangeListener l) { - listenerList.remove(RulesetChangeListener.class, l); - } - - protected void fireRulesAdded(Collection rules) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==RulesetChangeListener.class) { - ((RulesetChangeListener)listeners[i+1]).rulesAdded(this, rules); - } - } - } - - protected void fireRulesRemoved(Collection rules) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==RulesetChangeListener.class) { - ((RulesetChangeListener)listeners[i+1]).rulesRemoved(this, rules); - } - } - } - - protected void fireRulesRenamed(Map renaming) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==RulesetChangeListener.class) { - ((RulesetChangeListener)listeners[i+1]).rulesRenamed(this, renaming); - } - } - } - - protected void fireRulesActiveStateChanged(Map newState) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==RulesetChangeListener.class) { - ((RulesetChangeListener)listeners[i+1]).rulesActiveStateChanged(this, newState); - } - } - } - - protected void fireRulesTagged(String tag, Collection rules, boolean created) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==RulesetChangeListener.class) { - ((RulesetChangeListener)listeners[i+1]).rulesTagged(this, tag, rules, created); - } - } - } - - protected void fireRulesUntagged(String tag, Collection rules, boolean removed) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==RulesetChangeListener.class) { - ((RulesetChangeListener)listeners[i+1]).rulesUntagged(this, tag, rules, removed); - } - } - } - - protected void fireRulesetReplaced() { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length-2; i>=0; i-=2) { - if (listeners[i]==RulesetChangeListener.class) { - ((RulesetChangeListener)listeners[i+1]).rulesetReplaced(this); - } - } - } - - void ruleAdded(String name, boolean active) { - rules.put(name, active); - fireRulesAdded(Collections.singleton(name)); - } - - public Core getCore() { - return this.core; - } -} diff --git a/gui/src/quanto/core/RulesetChangeListener.java b/gui/src/quanto/core/RulesetChangeListener.java deleted file mode 100644 index ff165d36..00000000 --- a/gui/src/quanto/core/RulesetChangeListener.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core; - -import java.util.Collection; -import java.util.EventListener; -import java.util.Map; - -/** - * - * @author alemer - */ -public interface RulesetChangeListener extends EventListener { - void rulesetReplaced(Ruleset source); - void rulesAdded(Ruleset source, Collection ruleNames); - void rulesRemoved(Ruleset source, Collection ruleNames); - void rulesRenamed(Ruleset source, Map renaming); - void rulesActiveStateChanged(Ruleset source, Map newState); - /** - * Rules were tagged - * - * @param source the ruleset - * @param tag the tag that was applied - * @param ruleNames the affected rules - * @param newTag whether this is a newly-created tag - */ - void rulesTagged(Ruleset source, String tag, Collection ruleNames, boolean newTag); - /** - * Rules were untagged (but not deleted) - * - * @param source the ruleset - * @param tag the tag that was removed - * @param ruleNames the affected rules - * @param tagRemoved whether the last rule was removed (and hence the tag discarded) - */ - void rulesUntagged(Ruleset source, String tag, Collection ruleNames, boolean tagRemoved); -} diff --git a/gui/src/quanto/core/Theory.java b/gui/src/quanto/core/Theory.java deleted file mode 100644 index a52d2e70..00000000 --- a/gui/src/quanto/core/Theory.java +++ /dev/null @@ -1,413 +0,0 @@ -package quanto.core; - -import com.fasterxml.jackson.core.JsonEncoding; -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.awt.Color; -import java.io.File; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLEncoder; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.Map; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; -import org.lindenb.awt.ColorUtils; -import org.lindenb.lang.InvalidXMLException; -import org.w3c.dom.Document; -import org.xml.sax.SAXException; -import quanto.core.data.GraphElementDataType; -import quanto.core.data.SVGDocument; -import quanto.core.data.SvgVertexVisualizationData; -import quanto.core.data.VertexType; - -/** - * A theory description. - * - * Describes the vertices (including how to visualise them) of a theory, and - * links it to a core theory that actually implements it. - * - * NB: Theory objects are partly immutable in order to prevent data being - * changed without the Core being aware of it. - * - * @author Alex Merry - */ -public class Theory { - - private String name; - private String coreName; - private String vertexTypePath; - private Map vertexTypes; - private Map mnemonics = new HashMap(); - - public Theory( - String coreName, - String name, - String vertexTypePath, - Collection types) { - - if (types.isEmpty()) { - throw new IllegalArgumentException("No vertex types given"); - } - if (vertexTypePath == null && types.size() != 1) { - throw new IllegalArgumentException("No vertex type path, but more than one vertex type given"); - } - - this.coreName = coreName; - this.name = name; - this.vertexTypePath = vertexTypePath; - Map vTypeMap = new HashMap(); - for (VertexType vt : types) { - vTypeMap.put(vt.getTypeName(), vt); - if (vt.getMnemonic() != null) { - this.mnemonics.put(vt.getMnemonic(), vt); - } - } - this.vertexTypes = Collections.unmodifiableMap(vTypeMap); - } - - public Theory( - String coreName, - String name, - Collection types) { - this(coreName, name, null, types); - } - - /** - * Get the vertex type from JSON vertex data. - * - * This can be used to get the vertex type data given a vertex type name as - * returned by the core. - * - * @param typeName the (core) name for a vertex type - * @return a vertex type if one exists with typeName, otherwise null - */ - public VertexType getVertexType(JsonNode data) { - if (vertexTypePath != null) { - JsonNode typeNode = data; - if (vertexTypePath.length() > 0) { - for (String fieldName : vertexTypePath.split("\\.")) { - typeNode = typeNode.path(fieldName); - } - } - if (!typeNode.isTextual()) { - throw new IllegalArgumentException("Data did not have a type selector at '" + vertexTypePath + "'"); - } - String typeName = typeNode.asText(); - return getVertexType(typeName); - } else { - return vertexTypes.values().iterator().next(); - } - } - - /** - * Get the vertex type with a given name. - * - * This can be used to get the vertex type data given a vertex type name as - * returned by the core. - * - * @param typeName the (core) name for a vertex type - * @return a vertex type if one exists with typeName, otherwise null - */ - public VertexType getVertexType(String typeName) { - VertexType type = vertexTypes.get(typeName); - if (type == null) { - throw new IllegalArgumentException("Unknown vertex type '" + typeName + "'"); - } - return type; - } - - /** - * Gets the vertex type associated with a mnemonic, if there is one. - * - * @param mnemonic the key typed by the user - * @return a vertex type, or null - */ - public VertexType getVertexTypeByMnemonic(char mnemonic) { - return mnemonics.get(mnemonic); - } - - /** - * The available vertex types. - * - * This should match the core theory's list of vertices. - * - * @return an unmodifiable collection of vertex type descriptions - */ - public Collection getVertexTypes() { - return vertexTypes.values(); - } - - /** - * The core theory that is used by this theory. - * - * @return a core theory name - */ - public String getCoreName() { - return this.coreName; - } - - /** - * The name of the theory. - * - * This is a user-presentable string. - * - * @return a name that can be presented to the user - */ - public String getName() { - return name; - } - - /** - * A string representation of the theory. - * - * @return the same as getFriendlyName() - */ - @Override - public String toString() { - return name; - } - - private static VertexType vertexTypeFromJson(URL contextUrl, String name, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Vertex description for \"" + name + "\" was not an object"); - VertexType vt = new VertexType(name); - - JsonNode labelTypeNode = node.get("labelDataType"); - if (labelTypeNode != null && !labelTypeNode.isNull()) { - if (!labelTypeNode.isTextual()) { - throw new ParseException("'labelDataType' was not a string"); - } - String labelDataType = labelTypeNode.asText(); - if (labelDataType.equals("MathExpression")) { - vt.setDataType(new GraphElementDataType.MathsData()); - } else if (labelDataType.equals("String")) { - vt.setDataType(new GraphElementDataType.StringData()); - } else if (!labelDataType.equals("Null")) { - throw new ParseException("Unknown label data type \"" + labelDataType + "\""); - } - } - - JsonNode mnemonicNode = node.get("mnemonic"); - if (mnemonicNode != null && !mnemonicNode.isNull()) { - if (!mnemonicNode.isTextual()) { - throw new ParseException("'mnemonic' was not a string"); - } - String mnemonicStr = mnemonicNode.asText(); - if (mnemonicStr.length() != 1) { - throw new ParseException("'mnemonic' was not exactly one character long"); - } - vt.setMnemonic(mnemonicStr.charAt(0)); - } - - JsonNode visNode = node.get("visualization"); - if (visNode == null || !visNode.isObject()) { - throw new ParseException("'visualization' did not exist or was not an object"); - } - JsonNode svgNode = visNode.get("node"); - if (svgNode == null || !svgNode.isTextual()) { - throw new ParseException("'visualization.node' did not exist or was not a string"); - } - SVGDocument svgdoc; - try { - URL svgURL = new URL(contextUrl, svgNode.asText()); - svgdoc = new SVGDocument(svgURL); - } catch (MalformedURLException e) { - throw new ParseException("Malformed URL for SVG file", e); - } catch (InvalidXMLException e) { - throw new ParseException("Malformed SVG file", e); - } catch (SAXException e) { - throw new ParseException("Malformed SVG file", e); - } catch (IOException e) { - throw new ParseException("Could not open SVG file \"" + svgNode.asText() + "\"", e); - } - JsonNode labelNode = visNode.get("label"); - Color labelFill = null; - if (labelNode != null && !labelNode.isNull()) { - if (!labelNode.isObject()) { - throw new ParseException("'visualization.label' was not an object"); - } - JsonNode labelFillNode = labelNode.get("fill"); - if (labelFillNode != null && !labelFillNode.isNull()) { - if (!labelFillNode.isTextual()) { - throw new ParseException("'visualization.label.fill' was not a string"); - } - labelFill = ColorUtils.parseColor(labelFillNode.asText()); - if (labelFill == null) { - throw new ParseException("'visualization.label.fill' was not a valid colour"); - } - } - } - vt.setVisualizationData(new SvgVertexVisualizationData(svgdoc, labelFill)); - - return vt; - } - - private static Theory fromJson(URL contextUrl, JsonNode node) throws ParseException { - JsonNode nameNode = node.get("name"); - if (nameNode == null || !nameNode.isTextual()) { - throw new ParseException("Theory had no 'name' entry"); - } - - JsonNode coreNameNode = node.get("coreName"); - if (coreNameNode == null || !coreNameNode.isTextual()) { - throw new ParseException("Theory had no 'coreName' entry"); - } - - JsonNode vertexTypesNode = node.get("vertexTypes"); - if (vertexTypesNode == null || !vertexTypesNode.isObject()) { - throw new ParseException("Theory had no 'vertexTypes' entry"); - } - - LinkedList vtList = new LinkedList(); - Iterator> it = vertexTypesNode.fields(); - while (it.hasNext()) { - Map.Entry entry = it.next(); - vtList.add(vertexTypeFromJson(contextUrl, entry.getKey(), entry.getValue())); - } - if (vtList.isEmpty()) { - throw new ParseException("'vertexTypes' was empty"); - } - - JsonNode vertexTypePathNode = node.get("vertexTypePath"); - if (vertexTypePathNode != null && !vertexTypePathNode.isNull()) { - if (!vertexTypePathNode.isTextual()) { - throw new ParseException("'vertexTypePath' was not a string"); - } - return new Theory(coreNameNode.asText(), - nameNode.asText(), - vertexTypePathNode.asText(), - vtList); - } else { - if (vtList.size() != 1) { - throw new IllegalArgumentException("No vertex type path, but more than one vertex type given"); - } - return new Theory(coreNameNode.asText(), - nameNode.asText(), - vtList); - } - } - private static final JsonFactory jf = new JsonFactory(); - - public static Theory fromFile(File theoryFile) throws IOException, ParseException { - try { - ObjectMapper jsonMapper = new ObjectMapper(jf); - JsonNode node = jsonMapper.readTree(theoryFile); - return fromJson(theoryFile.toURI().toURL(), node); - } catch (MalformedURLException ex) { - throw new IllegalArgumentException("theoryFile cannot be converted to a URL", ex); - } - } - - public static Theory fromUrl(URL theoryFile) throws IOException, ParseException { - ObjectMapper jsonMapper = new ObjectMapper(jf); - JsonNode node = jsonMapper.readTree(theoryFile); - return fromJson(theoryFile, node); - } - - private void writeXMLResource(Document resource, File dest) throws IOException { - try { - // Use a Transformer for output - TransformerFactory tFactory = - TransformerFactory.newInstance(); - Transformer transformer = tFactory.newTransformer(); - - DOMSource source = new DOMSource(resource); - StreamResult result = new StreamResult(dest); - transformer.transform(source, result); - } catch (TransformerException ex) { - throw new IOException("Could not write out the file"); - } - } - - public void write(File theoryFile, File resourceDirectory) throws IOException { - if (!resourceDirectory.isDirectory()) { - throw new IOException("\"" + resourceDirectory.toString() + "\" is not a directory"); - } - boolean resDirIsTheoryParent = resourceDirectory.equals(theoryFile.getParentFile()); - - JsonGenerator jg = jf.createGenerator(theoryFile, JsonEncoding.UTF8); - try { - jg.writeStartObject(); - jg.writeObjectField("name", name); - jg.writeObjectField("coreName", coreName); - if (vertexTypePath != null) { - jg.writeObjectField("vertexTypePath", vertexTypePath); - } - jg.writeFieldName("vertexTypes"); - jg.writeStartObject(); - for (VertexType vt : vertexTypes.values()) { - jg.writeFieldName(vt.getTypeName()); - jg.writeStartObject(); - GraphElementDataType dt = vt.getDataType(); - if (dt != null) { - jg.writeObjectField("labelDataType", dt.getTypeName()); - } - if (vt.getMnemonic() != null) { - jg.writeObjectField("mnemonic", vt.getMnemonic().toString()); - } - jg.writeFieldName("visualization"); - jg.writeStartObject(); - SvgVertexVisualizationData svgVisData = (SvgVertexVisualizationData) vt.getVisualizationData(); - SVGDocument svgDoc = svgVisData.getSvgDocument(); - String svgFileName = encodeString(vt.getTypeName()) + ".svg"; - File svgFile = new File(resourceDirectory, svgFileName); - writeXMLResource(svgDoc.getDocument(), svgFile); - if (resDirIsTheoryParent) { - jg.writeObjectField("node", svgFileName); - } else { - jg.writeObjectField("node", svgFile.toURI()); - } - if (svgVisData.getLabelColour() != null) { - jg.writeFieldName("label"); - jg.writeStartObject(); - Color fill = svgVisData.getLabelColour(); - jg.writeObjectField("fill", colorToString(fill)); - jg.writeEndObject(); // label - } - jg.writeEndObject(); // visualization - jg.writeEndObject(); - } - jg.writeEndObject(); // vertexTypes - jg.writeEndObject(); // root - } finally { - jg.close(); - } - } - - private String encodeString(String str) { - try { - return URLEncoder.encode(str, "US-ASCII"); - } catch (UnsupportedEncodingException ex) { - throw new Error(ex); - } - } - - private String toTwoDigitHexString(int i) { - String str = Integer.toHexString(i); - if (str.length() == 1) { - return "0" + str; - } - return str; - } - - private String colorToString(Color color) { - StringBuilder str = new StringBuilder(7); - str.append('#'); - str.append(toTwoDigitHexString(color.getRed())); - str.append(toTwoDigitHexString(color.getGreen())); - str.append(toTwoDigitHexString(color.getBlue())); - return str.toString(); - } -} diff --git a/gui/src/quanto/core/TheoryChangeEvent.java b/gui/src/quanto/core/TheoryChangeEvent.java deleted file mode 100644 index c82ba826..00000000 --- a/gui/src/quanto/core/TheoryChangeEvent.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core; - -import java.util.EventObject; - -/** - * - * @author alemer - */ -public class TheoryChangeEvent extends EventObject { - private final Theory oldTheory; - private final Theory newTheory; - - public TheoryChangeEvent(Object source, Theory oldTheory, Theory newTheory) { - super(source); - this.oldTheory = oldTheory; - this.newTheory = newTheory; - } - - public Theory getOldTheory() { - return oldTheory; - } - - public Theory getNewTheory() { - return newTheory; - } -} diff --git a/gui/src/quanto/core/data/AttachedRewrite.java b/gui/src/quanto/core/data/AttachedRewrite.java deleted file mode 100644 index c80f3cef..00000000 --- a/gui/src/quanto/core/data/AttachedRewrite.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core.data; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import quanto.core.ParseException; - -/** - * - * @author alemer - */ -public class AttachedRewrite { - private CoreGraph graph; - private int index; - private Rule rule; - private CoreGraph newGraph; - - public AttachedRewrite(CoreGraph graph, int index, Rule rule, CoreGraph newGraph) { - this.graph = graph; - this.index = index; - this.rule = rule; - this.newGraph = newGraph; - } - - public CoreGraph getGraph() { - return graph; - } - - public int getIndex() { - return index; - } - - public String getRuleName() { - return rule.getCoreName(); - } - - public CoreGraph getLhs() { - return rule.getLhs(); - } - - public CoreGraph getRhs() { - return rule.getRhs(); - } - - public CoreGraph getNewGraph() { - return newGraph; - } - - public Collection getRemovedVertices() { - Set newVNames = getNewGraph().getVertexMap().keySet(); - Map vmap = getGraph().getVertexMap(); - Set hlVNames = new HashSet(vmap.keySet()); - hlVNames.removeAll(newVNames); - List newVerts = new ArrayList(hlVNames.size()); - for (String vname : hlVNames) { - Vertex v = vmap.get(vname); - if (v != null) - newVerts.add(v); - } - return newVerts; - } - - public static AttachedRewrite fromJson(CoreGraph graph, int index, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode ruleNode = node.get("rule"); - if (ruleNode == null || ruleNode.isNull()) - throw new ParseException("No rhs given for rule"); - Rule rule = Rule.fromJson(graph.getTheory(), ruleNode); - - JsonNode newGraphNode = node.get("rewritten_graph"); - if (newGraphNode == null || newGraphNode.isNull()) - throw new ParseException("No rhs given for rule"); - CoreGraph newGraph = CoreGraph.fromJson(graph.getTheory(), null, newGraphNode); - - return new AttachedRewrite(graph, index, rule, newGraph); - } -} diff --git a/gui/src/quanto/core/data/BangBox.java b/gui/src/quanto/core/data/BangBox.java deleted file mode 100644 index cfa1c74d..00000000 --- a/gui/src/quanto/core/data/BangBox.java +++ /dev/null @@ -1,95 +0,0 @@ -package quanto.core.data; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import quanto.core.ParseException; -import quanto.core.Theory; - - -/** - * A bang box - * - * @author alemer - */ -public class BangBox extends GraphElement { - public BangBox(String name) { - super(name); - } - - public BangBox() { - this(null); - } - - public static class BangBoxData - { - public BangBox bangBox; - public String parent; - public Collection contents; - } - - public static BangBoxData fromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode == null || !nameNode.isTextual()) - throw new ParseException("Standalone BangBox had no name"); - - BangBox BangBox = new BangBox(nameNode.textValue()); - return BangBox.updateFromJson(theory, node); - } - - BangBoxData updateFromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode != null && nameNode.isTextual()) - updateCoreName(nameNode.asText()); - - JsonNode dataNode = node.get("data"); - // FIXME: BangBox data - - JsonNode annotationNode = node.get("annotation"); - if (annotationNode != null && annotationNode.isObject()) { - ObjectMapper mapper = new ObjectMapper(); - setUserData(mapper.>convertValue( - annotationNode, - mapper.getTypeFactory().constructMapType( - HashMap.class, String.class, String.class))); - } - - BangBoxData bbd = new BangBoxData(); - bbd.bangBox = this; - - JsonNode parentNode = node.get("parent"); - if (parentNode != null && !parentNode.isNull()) { - if (!parentNode.isTextual()) - throw new ParseException("BangBox parent was not a string"); - bbd.parent = parentNode.asText(); - } - - JsonNode contentsNode = node.get("contents"); - if (contentsNode != null && !contentsNode.isNull()) { - if (!contentsNode.isArray()) - throw new ParseException("BangBox contents was not an array"); - - ObjectMapper mapper = new ObjectMapper(); - bbd.contents = mapper.convertValue(contentsNode, - mapper.getTypeFactory().constructCollectionType( - Collection.class, String.class)); - } else { - bbd.contents = Collections.emptySet(); - } - - return bbd; - } - - static BangBoxData fromJson(Theory theory, String name, JsonNode desc) throws ParseException { - BangBox BangBox = new BangBox(name); - return BangBox.updateFromJson(theory, desc); - } -} diff --git a/gui/src/quanto/core/data/CoreGraph.java b/gui/src/quanto/core/data/CoreGraph.java deleted file mode 100644 index e6876dac..00000000 --- a/gui/src/quanto/core/data/CoreGraph.java +++ /dev/null @@ -1,288 +0,0 @@ -package quanto.core.data; - - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.MissingNode; -import edu.uci.ics.jung.contrib.graph.DirectedSparseBangBoxMultigraph; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; - -import edu.uci.ics.jung.visualization.util.ChangeEventSupport; -import java.util.Iterator; -import quanto.core.ParseException; -import quanto.core.Theory; - -public class CoreGraph extends DirectedSparseBangBoxMultigraph -implements CoreObject, ChangeEventSupport { - - private static final long serialVersionUID = -1519901566511300787L; - private Theory theory; - private String name; - private final Set changeListeners; - private Map userData = new HashMap(); - - private String fileName = null; // defined if this graph is backed by a file - private boolean saved = true; // true if this graph has been modified since last saved - - public CoreGraph(Theory theory, String name) { - this.theory = theory; - this.name = name; - this.changeListeners = Collections.synchronizedSet( - new HashSet()); - } - - /** - * Use this constructor for unnamed graphs. The idea is you - * should do null checks before sending the name to the core. - */ - public CoreGraph(Theory theory) { - this(theory, null); - } - - public Theory getTheory() { - return theory; - } - - public Map getVertexMap() { - Map verts = - new HashMap(); - for (Vertex v : getVertices()) { - verts.put(v.getCoreName(), v); - } - return verts; - } - - public Map getEdgeMap() { - Map edges = - new HashMap(); - for (Edge e : getEdges()) { - edges.put(e.getCoreName(), e); - } - return edges; - } - - public Map getBangBoxMap() { - Map bbs = - new HashMap(); - for (BangBox bb : getBangBoxes()) { - bbs.put(bb.getCoreName(), bb); - } - return bbs; - } - - public String getCoreName() { - return name; - } - - public void updateCoreName(String name) { - this.name = name; - } - - public String getFileName() { - return fileName; - } - - public void setFileName(String fileName) { - this.fileName = fileName; - } - - public boolean isSaved() { - return saved; - } - - public void setSaved(boolean saved) { - this.saved = saved; - } - - public Map getUserData() { - return Collections.unmodifiableMap(userData); - } - - public void setUserData(Map map) { - userData = new HashMap(map); - } - - public String getUserDataEntry(String k) { - return userData.get(k); - } - - public void setUserDataEntry(String k, String v) { - userData.put(k, v); - } - - public void addChangeListener(ChangeListener l) { - changeListeners.add(l); - } - - public void fireStateChanged() { - this.saved = false; // we have changed the graph so it needs to be saved - // note that if this needs to be TRUE it will be set elsewhere - synchronized (changeListeners) { - ChangeEvent evt = new ChangeEvent(this); - for (ChangeListener l : changeListeners) { - l.stateChanged(evt); - } - } - } - - public ChangeListener[] getChangeListeners() { - return changeListeners.toArray(new ChangeListener[changeListeners.size()]); - } - - public void removeChangeListener(ChangeListener l) { - changeListeners.remove(l); - } - - public void updateGraph(CoreGraph new_graph) { - synchronized (this) { - for (Vertex v: new_graph.getVertices()) { - addVertex(v); - } - } - } - - private void verticesFromJson(JsonNode node, boolean isWv, Map oldVMap, Map newVMap) throws ParseException { - if (node == null || node.isNull()) - return; - if (node.isArray()) { - for (JsonNode entry : node) { - if (!entry.isTextual()) - throw new ParseException("vertex list contained something that was not a string"); - String vname = entry.asText(); - Vertex v = oldVMap.get(vname); - if (v != null) { - v.updateFromJson(theory, isWv, MissingNode.getInstance()); - oldVMap.remove(vname); - } else { - v = Vertex.fromJson(theory, vname, isWv, MissingNode.getInstance()); - addVertex(v); - } - newVMap.put(vname, v); - } - } else if (node.isObject()) { - Iterator> it = node.fields(); - while (it.hasNext()) { - Map.Entry entry = it.next(); - String vname = entry.getKey(); - Vertex v = oldVMap.get(vname); - if (v != null) { - v.updateFromJson(theory, isWv, entry.getValue()); - oldVMap.remove(vname); - } else { - v = Vertex.fromJson(theory, vname, isWv, entry.getValue()); - addVertex(v); - } - newVMap.put(vname, v); - } - } else { - throw new ParseException("Vertex list was neither an object nor an array"); - } - } - - private void edgesFromJson(JsonNode node, boolean isDirected, Map oldEMap, Map newVMap) throws ParseException { - if (node == null || node.isNull()) - return; - if (!node.isObject()) - throw new ParseException("Edge list was neither an object nor an array"); - Iterator> it = node.fields(); - while (it.hasNext()) { - Map.Entry entry = it.next(); - Edge e = oldEMap.get(entry.getKey()); - Edge.EdgeData ed; - if (e != null) { - ed = e.updateFromJson(theory, isDirected, entry.getValue()); - if (getSource(e) != newVMap.get(ed.source) || getDest(e) != newVMap.get(ed.target)) { - removeEdge(e); - addEdge(ed.edge, newVMap.get(ed.source), newVMap.get(ed.target)); - } - oldEMap.remove(entry.getKey()); - } else { - ed = Edge.fromJson(theory, entry.getKey(), isDirected, entry.getValue()); - if (!newVMap.containsKey(ed.source)) - throw new ParseException("Source of edge " + entry.getKey() + " does not exist"); - if (!newVMap.containsKey(ed.target)) - throw new ParseException("Target of edge " + entry.getKey() + " does not exist"); - addEdge(ed.edge, newVMap.get(ed.source), newVMap.get(ed.target)); - } - } - } - - private void bangBoxesFromJson(JsonNode node, Map oldBBMap, Map newVMap) throws ParseException { - if (node == null || node.isNull()) - return; - if (!node.isObject()) - throw new ParseException("Bang box list was neither an object nor an array"); - Iterator> it = node.fields(); - while (it.hasNext()) { - Map.Entry entry = it.next(); - BangBox bb = oldBBMap.get(entry.getKey()); - BangBox.BangBoxData bbd; - if (bb != null) { - bbd = bb.updateFromJson(theory, entry.getValue()); - oldBBMap.remove(entry.getKey()); - } else { - bbd = BangBox.fromJson(theory, entry.getKey(), entry.getValue()); - addBangBox(bbd.bangBox, Collections.emptySet()); - } - ArrayList contents = new ArrayList(bbd.contents.size()); - for (String item : bbd.contents) { - contents.add(newVMap.get(item)); - } - setBoxedVertices(bbd.bangBox, contents); - // FIXME: parents - } - } - - public void updateFromJson(JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - Map oldVMap = getVertexMap(); - Map newVMap = getVertexMap(); - Map oldEMap = getEdgeMap(); - Map oldBBMap = getBangBoxMap(); - - verticesFromJson(node.get("wire_vertices"), true, oldVMap, newVMap); - verticesFromJson(node.get("node_vertices"), false, oldVMap, newVMap); - edgesFromJson(node.get("dir_edges"), true, oldEMap, newVMap); - edgesFromJson(node.get("undir_edges"), false, oldEMap, newVMap); - bangBoxesFromJson(node.get("bang_boxes"), oldBBMap, newVMap); - - for (BangBox b : oldBBMap.values()) { - removeBangBox(b); - } - for (Edge e : oldEMap.values()) { - removeEdge(e); - } - for (Vertex v : oldVMap.values()) { - removeVertex(v); - } - - JsonNode dataNode = node.get("data"); - // FIXME: Graph data - - JsonNode annotationNode = node.get("annotation"); - if (annotationNode != null && annotationNode.isObject()) { - ObjectMapper mapper = new ObjectMapper(); - setUserData(mapper.>convertValue( - annotationNode, - mapper.getTypeFactory().constructMapType( - HashMap.class, String.class, String.class))); - } - } - - public static CoreGraph fromJson(Theory theory, String name, JsonNode node) throws ParseException { - CoreGraph graph = new CoreGraph(theory, name); - graph.updateFromJson(node); - return graph; - } -} diff --git a/gui/src/quanto/core/data/CoreObject.java b/gui/src/quanto/core/data/CoreObject.java deleted file mode 100644 index e16ca674..00000000 --- a/gui/src/quanto/core/data/CoreObject.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core.data; - -import java.util.Comparator; -import java.util.Map; - -/** - * - * @author alex - */ -public interface CoreObject { - public String getCoreName(); - public void updateCoreName(String name); - public Map getUserData(); - public void setUserData(Map map); - public String getUserDataEntry(String k); - public void setUserDataEntry(String k, String v); - - /** - * Comparator for instances of HasName - */ - public static class NameComparator implements Comparator { - public int compare(CoreObject o1, CoreObject o2) { - if (o1 == null) { - return (o2 == null) ? 0 : -1; - } - return o1.getCoreName().compareTo(o2.getCoreName()); - } - } -} diff --git a/gui/src/quanto/core/data/Edge.java b/gui/src/quanto/core/data/Edge.java deleted file mode 100644 index bf31fcfa..00000000 --- a/gui/src/quanto/core/data/Edge.java +++ /dev/null @@ -1,106 +0,0 @@ -package quanto.core.data; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.util.HashMap; -import quanto.core.ParseException; -import quanto.core.Theory; - -/** - * An edge - * - * @author alemer - */ -public class Edge extends GraphElement { - private boolean directed; - - public Edge(String name, boolean directed) { - super(name); - this.directed = directed; - } - - public void setDirected(boolean directed) { - this.directed = directed; - } - - public boolean isDirected() { - return directed; - } - - public static class EdgeData - { - public Edge edge; - public String source; - public String target; - } - - public EdgeData updateFromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode != null && nameNode.isTextual()) - updateCoreName(nameNode.asText()); - - JsonNode dirNode = node.get("is_directed"); - if (dirNode == null || !dirNode.isBoolean()) - throw new ParseException("Standalone edge had no 'is_directed' property"); - - return updateFromJson(theory, dirNode.asBoolean(), node); - } - - public static EdgeData fromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode == null || !nameNode.isTextual()) - throw new ParseException("Standalone edge had no name"); - - JsonNode dirNode = node.get("is_directed"); - if (dirNode == null || !dirNode.isBoolean()) - throw new ParseException("Standalone edge had no 'is_directed' property"); - - Edge edge = new Edge(nameNode.textValue(), dirNode.asBoolean()); - return edge.updateFromJson(theory, dirNode.asBoolean(), node); - } - - EdgeData updateFromJson(Theory theory, boolean isDirected, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - directed = isDirected; - - JsonNode dataNode = node.get("data"); - // FIXME: edge data - - JsonNode annotationNode = node.get("annotation"); - if (annotationNode != null && annotationNode.isObject()) { - ObjectMapper mapper = new ObjectMapper(); - setUserData(mapper.>convertValue( - annotationNode, - mapper.getTypeFactory().constructMapType( - HashMap.class, String.class, String.class))); - } - - EdgeData ed = new EdgeData(); - ed.edge = this; - - JsonNode srcNode = node.get("src"); - if (srcNode == null || !srcNode.isTextual()) - throw new ParseException("Edge had no 'src' property"); - ed.source = srcNode.asText(); - - JsonNode tgtNode = node.get("tgt"); - if (tgtNode == null || !tgtNode.isTextual()) - throw new ParseException("Edge had no 'tgt' property"); - ed.target = tgtNode.asText(); - - return ed; - } - - static EdgeData fromJson(Theory theory, String name, boolean isDirected, JsonNode desc) throws ParseException { - Edge edge = new Edge(name, isDirected); - return edge.updateFromJson(theory, isDirected, desc); - } -} diff --git a/gui/src/quanto/core/data/GraphElement.java b/gui/src/quanto/core/data/GraphElement.java deleted file mode 100644 index 096f3f4a..00000000 --- a/gui/src/quanto/core/data/GraphElement.java +++ /dev/null @@ -1,62 +0,0 @@ -package quanto.core.data; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -public class GraphElement implements CoreObject, Comparable { - protected GraphElementData data; - protected String coreName; - private Map userData = new HashMap(); - - public GraphElement(String name) { - this.coreName = name; - } - - public String getCoreName() { - return coreName; - } - - public void updateCoreName(String name) { - this.coreName = name; - } - - public GraphElementData getData() { - return data; - } - - public void setData(GraphElementData data) { - this.data = data; - } - - public int compareTo(GraphElement o) { - if (coreName == null) { - if (o.coreName == null) - return 0; - else - return -o.coreName.compareTo(coreName); - } - return coreName.compareTo(o.coreName); - } - - public Map getUserData() { - return Collections.unmodifiableMap(userData); - } - - public void setUserData(Map map) { - userData = new HashMap(map); - } - - public String getUserDataEntry(String k) { - return userData.get(k); - } - - public void setUserDataEntry(String k, String v) { - userData.put(k, v); - } - - @Override - public String toString() { - return coreName == null ? "" : coreName; - } -} diff --git a/gui/src/quanto/core/data/GraphElementData.java b/gui/src/quanto/core/data/GraphElementData.java deleted file mode 100644 index 9093d5e4..00000000 --- a/gui/src/quanto/core/data/GraphElementData.java +++ /dev/null @@ -1,26 +0,0 @@ -package quanto.core.data; - -public class GraphElementData { - private String value; - - public GraphElementData(String value) { - this.value = value; - } - - public void setString(String value) { - this.value = value; - } - - public String getEditableString() { - return value == null ? "" : value; - } - - public String getDisplayString() { - return getEditableString(); - } - - @Override - public String toString() { - return getEditableString(); - } -} diff --git a/gui/src/quanto/core/data/GraphElementDataType.java b/gui/src/quanto/core/data/GraphElementDataType.java deleted file mode 100644 index 9eca32a4..00000000 --- a/gui/src/quanto/core/data/GraphElementDataType.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.data; - -import com.fasterxml.jackson.databind.JsonNode; -import quanto.core.ParseException; - -/** - * - * @author alemer - */ -public abstract class GraphElementDataType { - protected String findLabel(JsonNode node) throws ParseException { - node = node.path("label"); - if (!node.isTextual()) - throw new ParseException("Expected string value"); - return node.asText(); - } - public abstract String getTypeName(); - - public abstract GraphElementData parseData(JsonNode node) throws ParseException; - - public static class StringData extends GraphElementDataType { - @Override - public GraphElementData parseData(JsonNode node) throws ParseException { - return new GraphElementData(findLabel(node)); - } - - @Override - public String getTypeName() { - return "String"; - } - } - - public static class MathsData extends GraphElementDataType { - @Override - public GraphElementData parseData(JsonNode node) throws ParseException { - return new GraphElementMathsData(findLabel(node)); - } - - @Override - public String getTypeName() { - return "MathExpression"; - } - } -} diff --git a/gui/src/quanto/core/data/GraphElementMathsData.java b/gui/src/quanto/core/data/GraphElementMathsData.java deleted file mode 100644 index 6848e8e5..00000000 --- a/gui/src/quanto/core/data/GraphElementMathsData.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.data; - -/** - * - * @author alemer - */ -public class GraphElementMathsData extends GraphElementData { - - public GraphElementMathsData(String value) { - super(value); - } - - @Override - public String getDisplayString() { - return TexConstants.translate(getEditableString()); - } -} diff --git a/gui/src/quanto/core/data/Rule.java b/gui/src/quanto/core/data/Rule.java deleted file mode 100644 index 6e07e5b0..00000000 --- a/gui/src/quanto/core/data/Rule.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.core.data; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import quanto.core.ParseException; -import quanto.core.Theory; - -/** - * - * @author alemer - */ -public class Rule implements CoreObject { - private String name; - private CoreGraph lhs; - private CoreGraph rhs; - private Map userData = new HashMap(); - - public Rule(String name, CoreGraph lhs, CoreGraph rhs) { - this.name = name; - this.lhs = lhs; - this.rhs = rhs; - } - - public Rule() { - } - - public String getCoreName() { - return name; - } - - public void updateCoreName(String name) { - this.name = name; - } - - public CoreGraph getLhs() { - return lhs; - } - - public CoreGraph getRhs() { - return rhs; - } - - public Map getUserData() { - return Collections.unmodifiableMap(userData); - } - - public void setUserData(Map map) { - userData = new HashMap(map); - } - - public String getUserDataEntry(String k) { - return userData.get(k); - } - - public void setUserDataEntry(String k, String v) { - userData.put(k, v); - } - - public void updateFromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode != null && nameNode.isTextual()) - updateCoreName(nameNode.asText()); - - JsonNode lhsNode = node.get("lhs"); - if (lhsNode == null || lhsNode.isNull()) - throw new ParseException("No lhs given for rule"); - if (lhs != null) { - lhs.updateFromJson(lhsNode); - } else { - lhs = CoreGraph.fromJson(theory, null, lhsNode); - } - - JsonNode rhsNode = node.get("rhs"); - if (rhsNode == null || rhsNode.isNull()) - throw new ParseException("No rhs given for rule"); - if (rhs != null) { - rhs.updateFromJson(rhsNode); - } else { - rhs = CoreGraph.fromJson(theory, null, rhsNode); - } - - JsonNode annotationNode = node.get("annotation"); - if (annotationNode != null && annotationNode.isObject()) { - ObjectMapper mapper = new ObjectMapper(); - setUserData(mapper.>convertValue( - annotationNode, - mapper.getTypeFactory().constructMapType( - HashMap.class, String.class, String.class))); - } - } - - public static Rule fromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode == null || !nameNode.isTextual()) - throw new ParseException("Standalone rule had no name"); - - Rule rule = new Rule(); - rule.updateFromJson(theory, node); - return rule; - } - - static Rule fromJson(Theory theory, String name, JsonNode desc) throws ParseException { - Rule rule = new Rule(); - rule.name = name; - rule.updateFromJson(theory, desc); - return rule; - } -} diff --git a/gui/src/quanto/core/data/SVGDocument.java b/gui/src/quanto/core/data/SVGDocument.java deleted file mode 100644 index 671a4ddd..00000000 --- a/gui/src/quanto/core/data/SVGDocument.java +++ /dev/null @@ -1,288 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.data; - -import java.awt.Graphics2D; -import java.awt.RenderingHints; -import java.awt.Shape; -import java.awt.geom.AffineTransform; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Line2D; -import java.awt.geom.Rectangle2D; -import java.awt.image.BufferedImage; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import javax.swing.Icon; -import javax.swing.ImageIcon; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import org.lindenb.awt.Dimension2D; -import org.lindenb.lang.InvalidXMLException; -import org.lindenb.svg.SVGRenderer; -import org.lindenb.svg.SVGUtils; -import org.w3c.dom.Attr; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.NamedNodeMap; -import org.w3c.dom.Node; -import org.xml.sax.SAXException; - -/** - * - * @author alex - */ -public class SVGDocument { - - private Document document; - private SVGRenderer renderer = new SVGRenderer(); - - private void testParse() throws InvalidXMLException { - BufferedImage img = new BufferedImage(24, 24, BufferedImage.TYPE_INT_ARGB); - renderer.paint(img.createGraphics(), document, - new Rectangle2D.Double(0, 0, img.getWidth(), img.getHeight())); - } - - public SVGDocument(Document document) throws InvalidXMLException { - this.document = document; - testParse(); - } - - private static DocumentBuilder createDocumentBuilder() { - try { - DocumentBuilderFactory domFactory = DocumentBuilderFactory.newInstance(); - domFactory.setCoalescing(true); - domFactory.setExpandEntityReferences(true); - domFactory.setIgnoringComments(true); - domFactory.setNamespaceAware(true); - domFactory.setValidating(false); - // we turn off external DTD loading, since that slows things right down - domFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false); - domFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); - return domFactory.newDocumentBuilder(); - } catch (ParserConfigurationException ex) { - throw new Error(ex); - } - - } - - public SVGDocument(File file) throws SAXException, IOException, InvalidXMLException { - DocumentBuilder domBuilder = createDocumentBuilder(); - document = domBuilder.parse(file); - testParse(); - } - - public SVGDocument(URL url) throws SAXException, IOException, InvalidXMLException { - DocumentBuilder domBuilder = createDocumentBuilder(); - InputStream is = url.openStream(); - try { - document = domBuilder.parse(is, url.toExternalForm()); - } finally { - is.close(); - } - testParse(); - } - - public SVGDocument(String uri) throws SAXException, IOException, InvalidXMLException { - DocumentBuilder domBuilder = createDocumentBuilder(); - document = domBuilder.parse(uri); - testParse(); - } - - public Icon createIcon() { - Dimension2D size = getSize(); - return createIcon((int)Math.ceil(size.getWidth()), - (int)Math.ceil(size.getHeight())); - } - - public Icon createIcon(int width, int height) { - try { - BufferedImage img = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); - Graphics2D g = img.createGraphics(); - g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); - renderer.paint(g, document, - new Rectangle2D.Double(0, 0, width, height)); - return new ImageIcon(img); - } catch (InvalidXMLException err) { - // this should already have been caught at constructor time - throw new IllegalStateException(err); - } - } - - public Dimension2D getSize() { - try { - Element svgRoot = document.getDocumentElement(); - Attr viewBoxAttr = svgRoot.getAttributeNode("viewBox"); - if (viewBoxAttr != null) { - String tokens[] = viewBoxAttr.getValue().trim().split("[ \t\n]+"); - Rectangle2D viewBox = new Rectangle2D.Double( - Double.parseDouble(tokens[0]), - Double.parseDouble(tokens[1]), - Double.parseDouble(tokens[2]), - Double.parseDouble(tokens[3])); - return new Dimension2D.Double(viewBox.getWidth(), viewBox.getHeight()); - } - return SVGUtils.getSize(svgRoot); - } catch (InvalidXMLException ex) { - throw new IllegalStateException(ex); - } - } - - public Rectangle2D getBounds() { - try { - Element svgRoot = document.getDocumentElement(); - Attr viewBoxAttr = svgRoot.getAttributeNode("viewBox"); - if (viewBoxAttr != null) { - String tokens[] = viewBoxAttr.getValue().trim().split("[ \t\n]+"); - return new Rectangle2D.Double( - Double.parseDouble(tokens[0]), - Double.parseDouble(tokens[1]), - Double.parseDouble(tokens[2]), - Double.parseDouble(tokens[3])); - } else { - Dimension2D dim = SVGUtils.getSize(svgRoot); - return new Rectangle2D.Double( - 0, - 0, - dim.getWidth(), - dim.getHeight()); - } - } catch (InvalidXMLException ex) { - throw new IllegalStateException(ex); - } - } - - /** - * Get the shape of an element - * - * Not all elements are supported. In particular, this will return null - * if the element is a text element. - * - * @param elementID the XML ID of the element - * @return the element shape, or null if there was no such element or it - * did not have a shape - */ - public Shape getElementShape(String elementID) { - Element e = document.getElementById(elementID); - if (e == null) { - return null; - } - - String shapeName = e.getLocalName(); - Shape shape = null; - if (shapeName.equals("path")) { - Attr d = e.getAttributeNode("d"); - if (d != null) { - shape = SVGUtils.pathToShape(d.getValue()); - } - } else if (shapeName.equals("polyline")) { - Attr points = e.getAttributeNode("points"); - if (points != null) { - shape = SVGUtils.polylineToShape(points.getValue()); - } - } else if (shapeName.equals("polygon")) { - Attr points = e.getAttributeNode("points"); - if (points != null) { - shape = SVGUtils.polygonToShape(points.getValue()); - } - } else if (shapeName.equals("rect")) { - - Attr x = e.getAttributeNode("x"); - Attr y = e.getAttributeNode("y"); - Attr w = e.getAttributeNode("width"); - Attr h = e.getAttributeNode("height"); - if (x != null && y != null && w != null && h != null) { - shape = new Rectangle2D.Double( - Double.parseDouble(x.getValue()), - Double.parseDouble(y.getValue()), - Double.parseDouble(w.getValue()), - Double.parseDouble(h.getValue())); - } - } else if (shapeName.equals("line")) { - Attr x1 = e.getAttributeNode("x1"); - Attr y1 = e.getAttributeNode("y1"); - Attr x2 = e.getAttributeNode("x2"); - Attr y2 = e.getAttributeNode("y2"); - if (x1 != null && y1 != null && x2 != null && y2 != null) { - shape = new Line2D.Double( - Double.parseDouble(x1.getValue()), - Double.parseDouble(y1.getValue()), - Double.parseDouble(x2.getValue()), - Double.parseDouble(y2.getValue())); - } - } else if (shapeName.equals("circle")) { - Attr cx = e.getAttributeNode("cx"); - Attr cy = e.getAttributeNode("cy"); - Attr r = e.getAttributeNode("r"); - if (cx != null && cy != null && r != null) { - double radius = Double.parseDouble(r.getValue()); - shape = new Ellipse2D.Double( - Double.parseDouble(cx.getValue()) - radius, - Double.parseDouble(cy.getValue()) - radius, - radius * 2, - radius * 2); - } - } else if (shapeName.equals("ellipse")) { - Attr cx = e.getAttributeNode("cx"); - Attr cy = e.getAttributeNode("cy"); - Attr rx = e.getAttributeNode("rx"); - Attr ry = e.getAttributeNode("ry"); - if (cx != null && cy != null && rx != null && ry != null) { - double radiusx = Double.parseDouble(rx.getValue()); - double radiusy = Double.parseDouble(ry.getValue()); - shape = new Ellipse2D.Double( - Double.parseDouble(cx.getValue()) - radiusx, - Double.parseDouble(cy.getValue()) - radiusy, - radiusx * 2, - radiusy * 2); - } - } - if (shape == null) { - return null; - } - - AffineTransform transform = new AffineTransform(); - applyAllTransforms(e, transform); - return transform.createTransformedShape(shape); - } - - private void applyAllTransforms(Element e, AffineTransform a) { - Node parent = e.getParentNode(); - if (parent == null || parent.getNodeType() != Node.ELEMENT_NODE) - return; - applyAllTransforms((Element)e.getParentNode(), a); - if (e.hasAttributes()) { - NamedNodeMap atts = e.getAttributes(); - for (int i = 0; i < atts.getLength(); ++i) { - - Attr att = Attr.class.cast(atts.item(i)); - if (att.getNamespaceURI() != null) { - continue; - } - String s = att.getName(); - String value = att.getValue(); - if (s.equals("style")) { - for (String styles : value.split("[;]+")) { - int j = styles.indexOf(':'); - if (j != -1) { - if(styles.substring(0, j).trim().equals("transform")) - a.concatenate(SVGUtils.svgToaffineTransform(styles.substring(j + 1).trim())); - } - } - - } else { - if(s.equals("transform")) - a.concatenate(SVGUtils.svgToaffineTransform(att.getValue())); - } - } - } - } - - public Document getDocument() { - return document; - } -} diff --git a/gui/src/quanto/core/data/SvgVertexVisualizationData.java b/gui/src/quanto/core/data/SvgVertexVisualizationData.java deleted file mode 100644 index 22c74565..00000000 --- a/gui/src/quanto/core/data/SvgVertexVisualizationData.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.data; - -import java.awt.Color; -import java.awt.Shape; -import java.awt.geom.Rectangle2D; -import javax.swing.Icon; - -/** - * - * @author alemer - */ -public class SvgVertexVisualizationData implements VertexVisualizationData { - - Icon cachedIcon; - Color labelColor = null; - Rectangle2D lastBoundsForGetShape; - Shape shape = null; - SVGDocument svgDoc; - - public SvgVertexVisualizationData(SVGDocument doc, Color labelColor) { - this.svgDoc = doc; - this.labelColor = labelColor; - shape = doc.getElementShape("boundary"); - if (shape == null) { - shape = doc.getBounds(); - } - cachedIcon = doc.createIcon(); - } - - public SVGDocument getSvgDocument() { - return svgDoc; - } - - public Shape getShape() { - return shape; - } - - public Color getFillColour() { - return Color.yellow; - } - - public Color getLabelColour() { - return labelColor; - } - - public Icon getIcon() { - return cachedIcon; - } - -} diff --git a/gui/src/quanto/core/data/TexConstants.java b/gui/src/quanto/core/data/TexConstants.java deleted file mode 100644 index ee1e9602..00000000 --- a/gui/src/quanto/core/data/TexConstants.java +++ /dev/null @@ -1,72 +0,0 @@ -package quanto.core.data; - -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class TexConstants { - - static Map constants = null; - static Pattern latex = Pattern.compile("\\\\([A-Za-z0-9]*)"); - - private static void initialize() { - Map c = new HashMap(50); - - // greek - c.put("alpha", "\u03b1"); - c.put("beta", "\u03b2"); - c.put("gamma", "\u03b3"); - c.put("delta", "\u03b4"); - c.put("epsilon", "\u03b5"); - c.put("zeta", "\u03b6"); - c.put("eta", "\u03b7"); - c.put("theta", "\u03b8"); - c.put("iota", "\u03b9"); - c.put("kappa", "\u03ba"); - c.put("lambda", "\u03bb"); - c.put("mu", "\u03bc"); - c.put("nu", "\u03bd"); - c.put("xi", "\u03be"); - c.put("pi", "\u03c0"); - c.put("rho", "\u03c1"); - c.put("sigma", "\u03c3"); - c.put("tau", "\u03c4"); - c.put("upsilon", "\u03c5"); - c.put("phi", "\u03c6"); - c.put("chi", "\u03c7"); - c.put("psi", "\u03c8"); - c.put("omega", "\u03c9"); - c.put("Gamma", "\u0393"); - c.put("Delta", "\u0394"); - c.put("Theta", "\u0398"); - c.put("Lambda", "\u039b"); - c.put("Xi", "\u039e"); - c.put("Pi", "\u03a0"); - c.put("Sigma", "\u03a3"); - c.put("Upsilon", "\u03a5"); - c.put("Phi", "\u03a6"); - c.put("Psi", "\u03a8"); - c.put("Omega", "\u03a9"); - - constants = c; - } - - public static String translate(String input) { - if (constants == null) { - initialize(); - } - - Matcher m = latex.matcher(input); - StringBuffer buf = new StringBuffer(); - while (m.find()) { - String ucode = constants.get(m.group(1)); - if (ucode != null) { - m.appendReplacement(buf, ucode); - } - } - m.appendTail(buf); - - return buf.toString(); - } -} diff --git a/gui/src/quanto/core/data/Vertex.java b/gui/src/quanto/core/data/Vertex.java deleted file mode 100644 index 02d039fc..00000000 --- a/gui/src/quanto/core/data/Vertex.java +++ /dev/null @@ -1,133 +0,0 @@ -package quanto.core.data; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import java.awt.geom.Point2D; -import java.util.HashMap; -import quanto.core.ParseException; -import quanto.core.Theory; - - -public class Vertex extends GraphElement { - - // null == boundary - protected VertexType vertexType; - private Point2D pos; - - public static Vertex createVertex(String name, VertexType vertexType) { - if (vertexType == null) { - throw new IllegalArgumentException("vertexType cannot be null"); - } - return new Vertex(name, vertexType); - } - - public static Vertex createBoundaryVertex(String name) { - return new Vertex(name); - } - - protected Vertex(String name, VertexType vertexType) { - super(name); - this.vertexType = vertexType; - pos=null; - } - - protected Vertex(String name) { - super(name); - pos=null; - } - - /** - * The vertex type name. - * - * @return the vertex type, as specified by the core, - * or null if it is a boundary vertex - */ - public VertexType getVertexType() { - return vertexType; - } - - - public void setPosition(Point2D pos) { - this.pos=pos; - } - - public Point2D getPosition(){ - return pos; - } - - public String getLabel() { - if (data == null) - return ""; - else - return data.getDisplayString(); - } - - public boolean isBoundaryVertex() { - return this.vertexType == null; - } - - public void updateFromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode != null && nameNode.isTextual()) - updateCoreName(nameNode.asText()); - - JsonNode isWvNode = node.get("is_wire_vertex"); - if (isWvNode == null || !isWvNode.isBoolean()) - throw new ParseException("Standalone vertex did not have is_wire_vertex"); - - updateFromJson(theory, isWvNode.asBoolean(), node); - } - - public static Vertex fromJson(Theory theory, JsonNode node) throws ParseException { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode nameNode = node.get("name"); - if (nameNode == null || !nameNode.isTextual()) - throw new ParseException("Standalone vertex had no name"); - - Vertex vertex = new Vertex(nameNode.textValue()); - vertex.updateFromJson(theory, node); - - return vertex; - } - - void updateFromJson(Theory theory, boolean isWireVertex, JsonNode node) throws ParseException { - if (isWireVertex) { - vertexType = null; - data = null; - - if (!node.isObject()) - return; - } else { - if (!node.isObject()) - throw new ParseException("Expected object"); - - JsonNode dataNode = node.get("data"); - vertexType = theory.getVertexType(dataNode); - GraphElementDataType dataType = vertexType.getDataType(); - if (dataType == null) - data = null; - else - data = dataType.parseData(dataNode); - } - - JsonNode annotationNode = node.get("annotation"); - if (annotationNode != null && annotationNode.isObject()) { - ObjectMapper mapper = new ObjectMapper(); - setUserData(mapper.>convertValue( - annotationNode, - mapper.getTypeFactory().constructMapType( - HashMap.class, String.class, String.class))); - } - } - - static Vertex fromJson(Theory theory, String name, boolean isWireVertex, JsonNode desc) throws ParseException { - Vertex vertex = new Vertex(name); - vertex.updateFromJson(theory, isWireVertex, desc); - return vertex; - } -} \ No newline at end of file diff --git a/gui/src/quanto/core/data/VertexType.java b/gui/src/quanto/core/data/VertexType.java deleted file mode 100644 index 5e3c194b..00000000 --- a/gui/src/quanto/core/data/VertexType.java +++ /dev/null @@ -1,63 +0,0 @@ -package quanto.core.data; - -public class VertexType { - private String typeName; - private GraphElementDataType dataType; - private VertexVisualizationData visualizationData; - private Character mnemonic; - - public VertexType(String typeName) { - this.typeName = typeName; - } - - public String getTypeName() { - return typeName; - } - - public void setTypeName(String typeName) { - this.typeName = typeName; - } - - public VertexVisualizationData getVisualizationData() { - return visualizationData; - } - - public void setVisualizationData(VertexVisualizationData visualizationData) { - this.visualizationData = visualizationData; - } - - /** - * The mnemonic for adding the vertex - * @return a character, or null if there is no mnemonic - */ - public Character getMnemonic() { - return mnemonic; - } - - public void setMnemonic(Character mnemonic) { - this.mnemonic = mnemonic; - } - - /** - * Equivalent to (getDataType() != null) - */ - public boolean hasData() { - return dataType != null; - } - - /** - * The type of data found at dataPath - */ - public GraphElementDataType getDataType() { - return dataType; - } - - public void setDataType(GraphElementDataType dataType) { - this.dataType = dataType; - } - - @Override - public String toString() { - return typeName; - } -} diff --git a/gui/src/quanto/core/data/VertexVisualizationData.java b/gui/src/quanto/core/data/VertexVisualizationData.java deleted file mode 100644 index 710888d0..00000000 --- a/gui/src/quanto/core/data/VertexVisualizationData.java +++ /dev/null @@ -1,11 +0,0 @@ -package quanto.core.data; - -import java.awt.Color; -import java.awt.Shape; -import javax.swing.Icon; - -public interface VertexVisualizationData { - Shape getShape(); - Color getLabelColour(); - Icon getIcon(); -} diff --git a/gui/src/quanto/core/protocol/CommandArgumentsException.java b/gui/src/quanto/core/protocol/CommandArgumentsException.java deleted file mode 100644 index 28c4e7f2..00000000 --- a/gui/src/quanto/core/protocol/CommandArgumentsException.java +++ /dev/null @@ -1,13 +0,0 @@ -package quanto.core.protocol; - -/** - * The command was given the wrong arguments - */ -public class CommandArgumentsException extends CommandException { - - private static final long serialVersionUID = 1232814923748927383L; - - public CommandArgumentsException(String message) { - super("BADARGS", message); - } -} diff --git a/gui/src/quanto/core/protocol/CommandException.java b/gui/src/quanto/core/protocol/CommandException.java deleted file mode 100644 index 85f4c341..00000000 --- a/gui/src/quanto/core/protocol/CommandException.java +++ /dev/null @@ -1,21 +0,0 @@ -package quanto.core.protocol; - -import quanto.core.CoreException; - -/** - * A command failed - */ -public class CommandException extends CoreException { - - private static final long serialVersionUID = 1232814923748927383L; - private String code; - - public CommandException(String code, String message) { - super(message); - this.code = code; - } - - public String getCode() { - return code; - } -} diff --git a/gui/src/quanto/core/protocol/CoreProcess.java b/gui/src/quanto/core/protocol/CoreProcess.java deleted file mode 100644 index caeacf1d..00000000 --- a/gui/src/quanto/core/protocol/CoreProcess.java +++ /dev/null @@ -1,96 +0,0 @@ -package quanto.core.protocol; - -import java.io.IOException; -import java.util.logging.Level; -import java.util.logging.Logger; -import quanto.core.CoreException; -import quanto.core.CoreExecutionException; - -/** - * Manages an instance of the core process. - * - * @author alemer - */ -public class CoreProcess { - - private final static Logger logger = Logger.getLogger("quanto.core.protocol"); - - public static String quantoCoreExecutable = "quanto-core"; - - private Process backend; - private CoreTalker talker = new CoreTalker(); - - public CoreTalker getTalker() { - return talker; - } - - public void startCore() throws CoreException { - startCore(quantoCoreExecutable); - } - - public void startCore(String executable) throws CoreException { - try { - ProcessBuilder pb = new ProcessBuilder(executable, "--protocol"); - - pb.redirectErrorStream(false); - logger.log(Level.FINEST, "Starting {0}...", executable); - backend = pb.start(); - logger.log(Level.FINEST, "{0} started successfully", executable); - - new StreamRedirector(backend.getErrorStream(), System.err).start(); - } catch (IOException e) { - logger.log(Level.SEVERE, - "Could not execute \"" + executable + "\": " - + e.getMessage(), - e); - throw new CoreExecutionException(String.format( - "Could not execute \"%1$\": %2$", executable, - e.getMessage()), e); - } - try { - talker.connect(backend.getInputStream(), backend.getOutputStream()); - } catch (IOException e) { - logger.log(Level.SEVERE, - "The core failed to initiate the protocol correctly", - e); - throw new CoreExecutionException( - "The core failed to initiate the protocol correctly", - e); - } - - } - - private static class ProcessCleanupThread extends Thread { - - private Process process; - - public ProcessCleanupThread(Process process) { - super("Process cleanup thread"); - this.process = process; - } - - @Override - public void run() { - try { - logger.log(Level.FINER, "Waiting for 5 seconds for the core to exit"); - sleep(5000); - } catch (InterruptedException ex) { - logger.log(Level.FINER, "Thread interupted"); - } - logger.log(Level.FINER, "Forcibly terminating the core process"); - process.destroy(); - } - } - - /** - * Quits the core process, and releases associated resources - */ - public void killCore() { - if (backend != null) { - logger.log(Level.FINEST, "Shutting down the core process"); - talker.disconnect(); - new ProcessCleanupThread(backend).start(); - backend = null; - } - } -} diff --git a/gui/src/quanto/core/protocol/CoreTalker.java b/gui/src/quanto/core/protocol/CoreTalker.java deleted file mode 100644 index d995d198..00000000 --- a/gui/src/quanto/core/protocol/CoreTalker.java +++ /dev/null @@ -1,1262 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Collection; -import java.util.Collections; -import java.util.logging.Level; -import java.util.logging.Logger; -import quanto.core.CoreCommunicationException; -import quanto.core.CoreException; -import quanto.core.CoreTerminatedException; -import static quanto.core.protocol.Utils.*; - -/** - * Manages communication with the core. - * - * @author alemer - */ -public class CoreTalker { - - private final static Logger logger = Logger.getLogger("quanto.core.protocol"); - - private RequestWriter writer; - private ResponseReader reader; - private int nextRequestId = 1; - - public CoreTalker() { - } - - public void connect(InputStream input, OutputStream output) throws IOException, ProtocolException { - reader = new ResponseReader(input); - writer = new RequestWriter(output); - - reader.waitForReady(); - logger.log(Level.FINE, - "The core is running version {0} of the protocol", - reader.getVersion()); - // FIXME: we should check that the core is running the version we expect - } - - public void disconnect() { - try { - reader.close(); - writer.close(); - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed to close communication channels to the core", ex); - } - writer = null; - reader = null; - } - - protected CoreCommunicationException writeFailure(IOException e) { - logger.log(Level.SEVERE, - "Failed to write to core process; last received message was \"{0}\"", - reader.getLastMessage()); - return new CoreCommunicationException(e); - } - - protected CoreCommunicationException readFailure(IOException e) { - if (reader.isClosed()) { - logger.log(Level.SEVERE, "Core process disconnected; last received message was \"{0}\"", - reader.getLastMessage()); - return new CoreTerminatedException(e); - } else { - return new CoreCommunicationException(e); - } - } - - private String generateRequestId() { - return Integer.toString(nextRequestId++); - } - - private CommandException errorResponseToException(String code, String message) { - if (code.equals("BADARGS")) - return new CommandArgumentsException(message); - else - return new CommandException(code, message); - } - - private Response getResponse(Response.MessageType expectedType) throws CoreException { - try { - Response resp = reader.parseNextResponse(); - if (resp.isError()) { - throw errorResponseToException(resp.getErrorCode(), resp.getErrorMessage()); - } else if (resp.getMessageType() == Response.MessageType.UnknownRequest) { - throw new UnknownCommandException(resp.getRequestCode()); - } else if (resp.getMessageType() == Response.MessageType.UnknownResponse) { - throw new ProtocolException("Got an unknown response message type"); - } else if (resp.getMessageType() != expectedType) { - throw new ProtocolException("Expected a " + expectedType.toString() + " response, but got a " + resp.getMessageType().toString() + " response"); - } - return resp; - } catch (IOException ex) { - throw readFailure(ex); - } - } - - private void getOkResponse() throws CoreException { - getResponse(Response.MessageType.Ok); - } - - private String getNameResponse() throws CoreException { - return getResponse(Response.MessageType.Name).getStringData(); - } - - private String[] getNameListResponse() throws CoreException { - return getResponse(Response.MessageType.NameList).getStringListData(); - } - - private byte[] getRawDataResponse() throws CoreException { - return getResponse(Response.MessageType.RawData).getByteData(); - } - - private String getJsonResponse() throws CoreException { - return getResponse(Response.MessageType.Json).getStringData(); - } - - private int getCountResponse() throws CoreException { - return getResponse(Response.MessageType.Count).getIntData(); - } - - /** - * Execute an arbitrary console command. - * - * @param command the command to execute, as typed by the user - * @return the result of the command - * @throws CoreException there was a communication error with the core - */ - public String consoleCommand(String command) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("CC", generateRequestId()); - writer.addDataChunkArg(command); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - Response resp = getResponse(Response.MessageType.Console); - return resp.getStringData(); - } - - public String[] consoleCommandList() throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("CL", generateRequestId()); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameListResponse(); - } - - public void changeTheory(String theory) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("TS", generateRequestId()); - writer.addStringArg(theory); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - getOkResponse(); - } - - public String loadEmptyGraph() throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GOE", generateRequestId()); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameResponse(); - } - - public String loadGraphFromFile(String fileName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GOF", generateRequestId()); - writer.addStringArg(fileName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameResponse(); - } - - public void copySubgraphAndOverwrite(String from, String to, Collection vertexNames) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GOS", generateRequestId()); - writer.addStringArg(from); - writer.addStringArg(to); - writer.addStringListArg(vertexNames); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void saveGraphToFile(String graph, String filename) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GS", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(filename); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String renameGraph(String from, String to) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GR", generateRequestId()); - writer.addStringArg(from); - writer.addStringArg(to); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameResponse(); - } - - public String exportGraphAsJson(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GE", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getJsonResponse(); - } - - public String graphUserData(String graph, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GVGU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return utf8ToString(getRawDataResponse()); - } - - public String vertexUserData(String graph, String vertex, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GVVU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(vertex); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return utf8ToString(getRawDataResponse()); - } - - public String edgeUserData(String graph, String edge, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GVEU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(edge); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return utf8ToString(getRawDataResponse()); - } - - public String bangBoxUserData(String graph, String bangBox, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GVBU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(bangBox); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return utf8ToString(getRawDataResponse()); - } - - public void undo(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMU", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void redo(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMR", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void undoRewrite(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMUR", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void redoRewrite(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMRR", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void startUndoGroup(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMSU", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void endUndoGroup(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMFU", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void insertGraph(String source, String target) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMI", generateRequestId()); - writer.addStringArg(target); - writer.addStringArg(source); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void setGraphUserData(String graph, String dataName, String data) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMGU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(dataName); - writer.addDataChunkArg(data); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deleteGraphUserData(String graph, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMDGU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String addVertex(String graph, String vertexType) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMVA", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(vertexType); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getJsonResponse(); - } - - public String[] renameVertex(String graph, String from, String to) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMVR", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(from); - writer.addStringArg(to); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - return getNameListResponse(); - } - - public void deleteVertices(String graph, Collection vertices) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMVD", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(vertices); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void setVertexData(String graph, String vertex, String data) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMVS", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(vertex); - writer.addTaggedDataChunkArg('N', data); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void setVertexUserData(String graph, String vertex, String dataName, String data) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMVU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(vertex); - writer.addStringArg(dataName); - writer.addDataChunkArg(data); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deleteVertexUserData(String graph, String vertex, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMDVU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(vertex); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String addEdge(String graph, String edgeType, boolean directed, String sourceVertex, String targetVertex) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMEA", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(edgeType); - writer.addStringArg(directed ? "d" : "u"); - writer.addStringArg(sourceVertex); - writer.addStringArg(targetVertex); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getJsonResponse(); - } - - public void deleteEdges(String graph, Collection edges) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMED", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(edges); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void setEdgeUserData(String graph, String edge, String dataName, String data) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMEU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(edge); - writer.addStringArg(dataName); - writer.addDataChunkArg(data); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deleteEdgeUserData(String graph, String edge, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMDEU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(edge); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String addBangBox(String graph, Collection vertices) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBA", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(vertices != null ? vertices : Collections.emptyList()); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getJsonResponse(); - } - - public void renameBangBox(String graph, String from, String to) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBR", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(from); - writer.addStringArg(to); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void dropBangBoxes(String graph, Collection bangBoxes) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBD", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(bangBoxes); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void killBangBoxes(String graph, Collection bangBoxes) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBK", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(bangBoxes); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String duplicateBangBox(String graph, String bangBox) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBC", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(bangBox); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameResponse(); - } - - public String mergeBangBoxes(String graph, Collection bangBoxes) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBM", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(bangBoxes); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameResponse(); - } - - public String[] bangVertices(String graph, String bangBox, Collection vertices) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBB", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(bangBox); - writer.addStringListArg(vertices); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameListResponse(); - } - - public void unbangVertices(String graph, Collection vertices) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBL", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(vertices); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void setBangBoxUserData(String graph, String bangBox, String dataName, String data) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMBU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(bangBox); - writer.addStringArg(dataName); - writer.addDataChunkArg(data); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deleteBangBoxUserData(String graph, String bangBox, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("GMDBU", generateRequestId()); - writer.addStringArg(graph); - writer.addStringArg(bangBox); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void importRulesetFromFile(String fileName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RSO", generateRequestId()); - writer.addStringArg(fileName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void replaceRulesetFromFile(String fileName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RSP", generateRequestId()); - writer.addStringArg(fileName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void exportRulesetToFile(String fileName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RSS", generateRequestId()); - writer.addStringArg(fileName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String[] listRules() throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRL", generateRequestId()); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameListResponse(); - } - - public String[] listActiveRules() throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRA", generateRequestId()); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameListResponse(); - } - - public String openRuleLhs(String rule) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRP", generateRequestId()); - writer.addStringArg(rule); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameResponse(); - } - - public String openRuleRhs(String rule) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRQ", generateRequestId()); - writer.addStringArg(rule); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameResponse(); - } - - public void setRule(String ruleName, String lhsGraph, String rhsGraph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRU", generateRequestId()); - writer.addStringArg(ruleName); - writer.addStringArg(lhsGraph); - writer.addStringArg(rhsGraph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void renameRule(String oldName, String newName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRR", generateRequestId()); - writer.addStringArg(oldName); - writer.addStringArg(newName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deleteRule(String ruleName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRD", generateRequestId()); - writer.addStringArg(ruleName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void activateRule(String ruleName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRY", generateRequestId()); - writer.addStringArg(ruleName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deactivateRule(String ruleName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RRN", generateRequestId()); - writer.addStringArg(ruleName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String ruleUserData(String ruleName, String dataName) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RUD", generateRequestId()); - writer.addStringArg(ruleName); - writer.addStringArg(dataName); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return utf8ToString(getRawDataResponse()); - } - - public void setRuleUserData(String ruleName, String dataName, String data) throws CoreException { - - try { - writer.addHeader("SRUD", generateRequestId()); - writer.addStringArg(ruleName); - writer.addStringArg(dataName); - writer.addDataChunkArg(data); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public String[] listTags() throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RTL", generateRequestId()); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameListResponse(); - } - - public String[] listRulesByTag(String tag) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RTR", generateRequestId()); - writer.addStringArg(tag); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getNameListResponse(); - } - - public void tagRule(String ruleName, String tag) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RTT", generateRequestId()); - writer.addStringArg(ruleName); - writer.addStringArg(tag); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void untagRule(String ruleName, String tag) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RTU", generateRequestId()); - writer.addStringArg(ruleName); - writer.addStringArg(tag); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deleteRulesByTag(String tag) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RTD", generateRequestId()); - writer.addStringArg(tag); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void activateRulesByTag(String tag) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RTY", generateRequestId()); - writer.addStringArg(tag); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public void deactivateRulesByTag(String tag) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("RTN", generateRequestId()); - writer.addStringArg(tag); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - getOkResponse(); - } - - public int attachRewrites(String graph, Collection vertices) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("WA", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(vertices); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getCountResponse(); - } - - public int attachOneRewrite(String graph) throws CoreException { - return attachOneRewrite(graph, Collections.emptyList()); - } - - public int attachOneRewrite(String graph, Collection vertices) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("WO", generateRequestId()); - writer.addStringArg(graph); - writer.addStringListArg(vertices); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getCountResponse(); - } - - public String applyAttachedRewrite(String graph, int offset) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("WW", generateRequestId()); - writer.addStringArg(graph); - writer.addIntArg(offset); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getJsonResponse(); - } - - public String listAttachedRewrites(String graph) throws CoreException { - if (writer == null) { - throw new IllegalStateException("Not connected to the core"); - } - - try { - writer.addHeader("WL", generateRequestId()); - writer.addStringArg(graph); - writer.closeMessage(); - } catch (IOException ex) { - throw writeFailure(ex); - } - - return getJsonResponse(); - } - } diff --git a/gui/src/quanto/core/protocol/LoggingInputStream.java b/gui/src/quanto/core/protocol/LoggingInputStream.java deleted file mode 100644 index b4a93e74..00000000 --- a/gui/src/quanto/core/protocol/LoggingInputStream.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.io.CharArrayWriter; -import java.io.FilterInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * - * @author alemer - */ -class LoggingInputStream extends FilterInputStream -{ - @SuppressWarnings("NonConstantLogger") - private final Logger logger; - private final CharArrayWriter logStream = new CharArrayWriter(256); - - public LoggingInputStream(InputStream internal, String logArea) { - super(internal); - logger = Logger.getLogger(logArea); - } - - public void writeLog(Level level) { - if (logger.isLoggable(level)) { - logger.log(level, "{0}", - logStream.toString().replace('\u001b', '\u00a4')); - } - logStream.reset(); - } - - public void writeLog(Level level, String message) { - if (logger.isLoggable(level)) { - logger.log(level, "{0}: \"{1}\"", - new Object[] { - message, - logStream.toString().replace('\u001b', '\u00a4') - }); - } - logStream.reset(); - } - - @Override - public int read() throws IOException { - int ch = in.read(); - if (ch != -1) - logStream.append((char)ch); - return ch; - } - - @Override - public int read(byte[] b) throws IOException { - int count = in.read(b); - for (int i = 0; i < count; ++i) { - logStream.append((char)b[i]); - } - return count; - } - - @Override - public int read(byte[] b, int off, int len) throws IOException { - int count = in.read(b, off, len); - for (int i = off; i < (off + count); ++i) { - logStream.append((char)b[i]); - } - return count; - } -} diff --git a/gui/src/quanto/core/protocol/LoggingOutputStream.java b/gui/src/quanto/core/protocol/LoggingOutputStream.java deleted file mode 100644 index 80dbf418..00000000 --- a/gui/src/quanto/core/protocol/LoggingOutputStream.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.io.CharArrayWriter; -import java.io.FilterOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * - * @author alemer - */ -class LoggingOutputStream extends FilterOutputStream -{ - @SuppressWarnings("NonConstantLogger") - private final Logger logger; - private final CharArrayWriter logStream = new CharArrayWriter(256); - - public LoggingOutputStream(OutputStream internal, String logArea) { - super(internal); - logger = Logger.getLogger(logArea); - } - - public void writeLog(Level level) { - if (logger.isLoggable(level)) { - logger.log(level, "{0}", - logStream.toString().replace('\u001b', '\u00a4')); - } - logStream.reset(); - } - - public void writeLog(Level level, String message) { - if (logger.isLoggable(level)) { - logger.log(level, "{0}: {1}", - new Object[] { - message, - logStream.toString().replace('\u001b', '\u00a4') - }); - } - logStream.reset(); - } - - @Override - public void write(int b) throws IOException { - logStream.append((char)b); - out.write(b); - } - - @Override - public void write(byte[] b) throws IOException { - for (int i = 0; i < b.length; ++i) { - logStream.append((char)b[i]); - } - out.write(b); - } - - @Override - public void write(byte[] b, int off, int len) throws IOException { - for (int i = 0; i < b.length; ++i) { - logStream.append((char)b[i]); - } - out.write(b, off, len); - } -} diff --git a/gui/src/quanto/core/protocol/ProtocolException.java b/gui/src/quanto/core/protocol/ProtocolException.java deleted file mode 100644 index 89d6236c..00000000 --- a/gui/src/quanto/core/protocol/ProtocolException.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import quanto.core.CoreCommunicationException; - -/** - * - * @author alex - */ -public class ProtocolException extends CoreCommunicationException { - - public ProtocolException() { - super("Invalid data was received from the core"); - } - - public ProtocolException(String string) { - super(string); - } - - public ProtocolException(Throwable thrwbl) { - super("Invalid data was received from the core", thrwbl); - } - - public ProtocolException(String string, Throwable thrwbl) { - super(string, thrwbl); - } - -} diff --git a/gui/src/quanto/core/protocol/RequestBuilder.java b/gui/src/quanto/core/protocol/RequestBuilder.java deleted file mode 100644 index 2a09adda..00000000 --- a/gui/src/quanto/core/protocol/RequestBuilder.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedList; -import static quanto.core.protocol.Utils.*; - -/** - * - * @author alemer - */ -class RequestBuilder -{ - public static final byte ESC = '\u001b'; - private LinkedList components = new LinkedList(); - private int messageLength = 0; - private boolean complete = false; - - private void addHeader(byte[] code, byte[] requestId) - { - byte[] comp = new byte[6 + code.length + requestId.length]; - comp[0] = ESC; - comp[1] = '<'; - int offset = 2; - for (int i = 0; i < code.length; ++i, ++offset) { - comp[offset] = code[i]; - } - comp[offset] = ESC; ++offset; - comp[offset] = ':'; ++offset; - for (int i = 0; i < requestId.length; ++i, ++offset) { - comp[offset] = requestId[i]; - } - comp[offset] = ESC; ++offset; - comp[offset] = '|'; ++offset; - addComponent(comp); - } - - private void addComponent(byte[] comp) - { - assert !complete; - components.add(comp); - messageLength += comp.length; - } - - private byte[] convertInt(int i) - { - return stringToAscii(Integer.toString(i)); - } - - // only ASCII!!! - public void addEscapedChar(char ch) - { - assert ch < 128; - assert messageLength > 0; - addComponent(new byte[] { ESC, (byte)ch }); - } - - public void addDelim() - { - assert messageLength > 0; - addEscapedChar(';'); - } - - public void closeMessage() - { - assert messageLength > 0; - addEscapedChar('>'); - complete = true; - } - - public void addDataChunk(byte[] data) - { - assert messageLength > 0; - addEscapedChar('['); - addComponent(convertInt(data.length)); - addEscapedChar('|'); - addComponent(data); - addEscapedChar(']'); - } - - public void addDataChunk(String data) - { - addDataChunk(stringToUtf8(data)); - } - - public void addString(String data) - { - addComponent(stringToUtf8(data)); - } - - public void addStringList(String[] items) throws IOException - { - addStringList(Arrays.asList(items)); - } - - public void addStringList(Collection items) throws IOException - { - addComponent(convertInt(items.size())); - addEscapedChar(':'); - boolean first = true; - for (String item : items) { - if (!first) { - addEscapedChar(','); - } - addString(item); - first = false; - } - } -} diff --git a/gui/src/quanto/core/protocol/RequestWriter.java b/gui/src/quanto/core/protocol/RequestWriter.java deleted file mode 100644 index 001357a0..00000000 --- a/gui/src/quanto/core/protocol/RequestWriter.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.io.BufferedOutputStream; -import java.util.Collection; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Arrays; -import java.util.logging.Level; -import static quanto.core.protocol.Utils.*; - -/** - * - * @author alemer - */ -class RequestWriter -{ - private LoggingOutputStream output; - private boolean inMessage = false; - private boolean argNeedsClosing = false; - public static final byte ESC = '\u001b'; - - public RequestWriter(OutputStream output) { - this.output = new LoggingOutputStream( - new BufferedOutputStream(output), - "quanto.core.protocol.stream"); - } - - public void close() throws IOException { - output.close(); - } - - private byte[] convertInt(int i) - { - return stringToAscii(Integer.toString(i)); - } - - public void addHeader(String code, String requestId) throws IOException - { - assert !inMessage; - inMessage = true; - addEscapedChar('<'); - output.write(stringToAscii(code)); - addEscapedChar(':'); - output.write(stringToUtf8(requestId)); - addEscapedChar('|'); - } - - private void closeArg() throws IOException - { - if (argNeedsClosing) { - argNeedsClosing = false; - addEscapedChar(';'); - } - } - - // only ASCII!!! - private void addEscapedChar(char ch) throws IOException - { - assert ch < 128; - assert inMessage; - output.write(ESC); - output.write(ch); - } - - public void addEmptyArg() throws IOException - { - assert inMessage; - argNeedsClosing = true; - } - - public void closeMessage() throws IOException - { - assert inMessage; - argNeedsClosing = false; - addEscapedChar('>'); - inMessage = false; - output.writeLog(Level.FINEST, "Sending message to core"); - output.flush(); - } - - private void addDataChunk(byte[] data) throws IOException - { - assert inMessage; - closeArg(); - addEscapedChar('['); - output.write(convertInt(data.length)); - addEscapedChar('|'); - output.write(data); - addEscapedChar(']'); - } - - public void addDataChunkArg(byte[] data) throws IOException - { - addDataChunk(data); - argNeedsClosing = true; - } - - public void addDataChunkArg(String data) throws IOException - { - addDataChunk(stringToUtf8(data)); - argNeedsClosing = true; - } - - public void addTaggedDataChunkArg(char tag, byte[] data) throws IOException - { - closeArg(); - addEscapedChar(tag); - addDataChunkArg(data); - } - - public void addTaggedDataChunkArg(char tag, String data) throws IOException - { - closeArg(); - addEscapedChar(tag); - addDataChunkArg(data); - } - - public void addStringArg(String data) throws IOException - { - assert inMessage; - closeArg(); - output.write(stringToUtf8(data)); - argNeedsClosing = true; - } - - public void addStringListArg(String[] items) throws IOException - { - addStringListArg(Arrays.asList(items)); - } - - public void addStringListArg(Collection items) throws IOException - { - assert inMessage; - closeArg(); - output.write(convertInt(items.size())); - addEscapedChar(':'); - boolean first = true; - for (String item : items) { - if (!first) { - addEscapedChar(','); - } - output.write(stringToUtf8(item)); - first = false; - } - argNeedsClosing = true; - } - - public void addIntArg(int value) throws IOException - { - assert inMessage; - closeArg(); - output.write(stringToAscii(Integer.toString(value))); - argNeedsClosing = true; - } - - public void addIntListArg(int[] values) throws IOException - { - assert inMessage; - closeArg(); - output.write(convertInt(values.length)); - boolean first = true; - for (int i: values) { - if(!first) { - addEscapedChar(','); - } - output.write(stringToUtf8(Integer.toString(i))); - first = false; - } - argNeedsClosing = true; - } -} diff --git a/gui/src/quanto/core/protocol/Response.java b/gui/src/quanto/core/protocol/Response.java deleted file mode 100644 index b81e5610..00000000 --- a/gui/src/quanto/core/protocol/Response.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import static quanto.core.protocol.Utils.*; - -/** - * - * @author alex - */ -class Response { - public enum MessageType { - Error, - Ok, - Console, - ConsoleHelp, - RawData, - Pretty, - Xml, - Json, - Count, - Name, - NameList, - UserData, - StructuredData, - UnknownRequest, - UnknownResponse - } - private String requestId; - private MessageType messageType; - private String stringData; - private String stringData2; - private String[] stringListData; - private byte[] byteData; - private int intData; - - public Response(MessageType type, String requestId) { - this.messageType = type; - this.requestId = requestId; - } - - public boolean isError() { - return messageType == MessageType.Error; - } - - public String getRequestId() { - return requestId; - } - - public MessageType getMessageType() { - return messageType; - } - - void setMessageType(MessageType messageType) { - this.messageType = messageType; - } - - public String getStringData() { - assert messageType != MessageType.Error; - assert stringData != null || byteData != null; - - if (stringData == null && byteData != null) { - stringData = utf8ToString(byteData); - } - return stringData; - } - - void setStringData(String stringData) { - this.stringData = stringData; - } - - public String[] getStringListData() { - assert messageType != MessageType.Error; - assert stringListData != null; - - return stringListData; - } - - void setStringListData(String[] stringListData) { - this.stringListData = stringListData; - } - - public byte[] getByteData() { - assert messageType != MessageType.Error; - assert stringData != null || byteData != null; - - if (byteData == null && stringData != null) { - byteData = stringToUtf8(stringData); - } - return byteData; - } - - void setByteData(byte[] byteData) { - this.byteData = byteData; - } - - public int getIntData() { - assert messageType == MessageType.Count; - return intData; - } - - void setIntData(int intData) { - this.intData = intData; - } - - public String getErrorCode() { - assert messageType == MessageType.Error; - return stringData; - } - - void setErrorCode(String errorCode) { - this.stringData = errorCode; - } - - public String getErrorMessage() { - assert messageType == MessageType.Error; - return stringData2; - } - - void setErrorMessage(String errorMessage) { - this.stringData2 = errorMessage; - } - - public String getCommandArgs() { - assert messageType == MessageType.ConsoleHelp; - return stringData; - } - - void setCommandArgs(String commandArgs) { - this.stringData = commandArgs; - } - - public String getCommandHelp() { - assert messageType == MessageType.ConsoleHelp; - return stringData2; - } - - void setCommandHelp(String commandHelp) { - this.stringData2 = commandHelp; - } - - public String getRequestCode() { - assert messageType == MessageType.UnknownRequest; - return stringData; - } - - void setRequestCode(String code) { - this.stringData = code; - } - - public String getResponseCode() { - switch (messageType) { - case Console: return "C"; - case ConsoleHelp: return "H"; - case Count: return "I"; - case Error: return "Q"; - case Name: return "N"; - case NameList: return "M"; - case Ok: return "O"; - case Pretty: return "P"; - case RawData: return "R"; - case StructuredData: return "S"; - case UserData: return "U"; - case Xml: return "X"; - case Json: return "J"; - case UnknownRequest: return "Z"; - case UnknownResponse: return stringData; - } - throw new Error("Implement your damn function!"); - } - - void setResponseCode(String code) { - assert messageType == MessageType.UnknownResponse; - this.stringData = code; - } -} diff --git a/gui/src/quanto/core/protocol/ResponseReader.java b/gui/src/quanto/core/protocol/ResponseReader.java deleted file mode 100644 index 265e55db..00000000 --- a/gui/src/quanto/core/protocol/ResponseReader.java +++ /dev/null @@ -1,460 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.util.logging.Logger; -import java.io.BufferedInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.logging.Level; -import static quanto.core.protocol.Utils.*; - -/** - * - * @author alex - */ -class ResponseReader { - private static final char ESC = '\u001b'; - private final static Logger logger = Logger.getLogger("quanto.core.protocol"); - - private LoggingInputStream input; - private String version; - private StringBuilder lastMessage = new StringBuilder(); - private String lastInvalidOutput; - - public String getLastInvalidOutput() { - return lastInvalidOutput; - } - - public String getLastMessage() { - return lastMessage.toString(); - } - - public ResponseReader(InputStream input) { - this.input = new LoggingInputStream( - new BufferedInputStream(input), "quanto.core.protocol.stream"); - } - - public boolean isClosed() { - return input == null; - } - - public void close() throws IOException { - if (!isClosed()) { - input.close(); - input = null; - } - } - - /** - * Read a character from the stream. - * - * Guarantees that it is not -1 (will throw IOException in that case). - * @return a character - * @throws IOException - */ - private int read() throws IOException { - if (isClosed()) { - throw new IllegalStateException("Input stream is closed"); - } - int gotCh = input.read(); - if (gotCh == -1) { - input = null; - throw new IOException("End of stream reached"); - } - lastMessage.append(gotCh); - return gotCh; - } - - /** - * Read into a buffer. - * - * Guarantees that length is not -1 (will throw IOException in that case). - * @return read count - */ - private int read(byte[] b) throws IOException { - if (isClosed()) { - throw new IllegalStateException("Input stream is closed"); - } - int count = input.read(b); - if (count == -1) { - input = null; - throw new IOException("End of stream reached"); - } - lastMessage.append(b); - return count; - } - - /** - * Read into a buffer. - * - * Guarantees that length is not -1 (will throw IOException in that case). - * @return read count - */ - private int read(byte[] b, int off, int len) throws IOException { - if (isClosed()) { - throw new IllegalStateException("Input stream is closed"); - } - int count = input.read(b, off, len); - if (count == -1) { - input = null; - throw new IOException("End of stream reached"); - } - lastMessage.append(b); - return count; - } - - private void eatEsc() throws ProtocolException, IOException { - int gotCh = read(); - if (gotCh != ESC) { - if (Character.isISOControl(gotCh)) - throw new ProtocolException("Expected ESC from core, got \\u" + String.format("%1$04x", gotCh)); - else - throw new ProtocolException("Expected ESC from core, got " + (char)gotCh); - } - } - - private void eatChar(char ch) throws ProtocolException, IOException { - int gotCh = read(); - if (gotCh != ch) { - if (Character.isISOControl(gotCh)) - throw new ProtocolException("Expected " + ch + ", got \\u" + Integer.toHexString(gotCh)); - else - throw new ProtocolException("Expected " + ch + ", got " + (char)gotCh); - } - } - - private void eatEscChar(char ch) throws ProtocolException, IOException { - eatEsc(); - eatChar(ch); - } - - private byte[] readDataBlock() throws ProtocolException, IOException { - eatEscChar('['); - int length = readIntToEscape(); - eatEscChar('|'); - - byte[] buffer = new byte[length]; - int pos = 0; - while (pos < buffer.length) { - pos += read(buffer, pos, buffer.length - pos); - } - - eatEscChar(']'); - return buffer; - } - - // I'm almost tempted to use a List - Java makes this - // painful to do efficiently - private byte[] readToEscape() throws ProtocolException, IOException { - if (isClosed()) { - throw new IllegalStateException("Input stream is closed"); - } - byte[] result = null; - byte[] buffer = new byte[50]; - int escPos = -1; - while (escPos == -1) { - input.mark(buffer.length + 1); - int count = read(buffer); - for (int i = 0; i < count; ++i) { - if (buffer[i] == ESC) { - byte next; - if (i + 1 < count) - next = buffer[i + 1]; - else - next = (byte)read(); - if (next == ESC) { - // escaped ESC - // shorten the array by one - for (int j = i + 1; j < count - 1; ++j) { - buffer[j] = buffer[j + 1]; - } - --count; - } else { - escPos = i; - count = i; - break; - } - } - } - if (result == null) { - result = Arrays.copyOf(buffer, count); - } else { - byte[] newResult = Arrays.copyOf(result, result.length + count); - for (int i = 0; i < count; ++i) { - newResult[result.length + i] = buffer[i]; - } - result = newResult; - } - } - input.reset(); - read(buffer, 0, escPos); - return result; - } - - private int readIntToEscape() throws ProtocolException, IOException { - try { - return Integer.parseInt(asciiToString(readToEscape())); - } catch (NumberFormatException ex) { - throw new ProtocolException("Expecting a decimal integer"); - } - } - - private String readAsciiStringToEscape() throws ProtocolException, IOException { - return asciiToString(readToEscape()); - } - - private String readStringToEscape() throws ProtocolException, IOException { - return utf8ToString(readToEscape()); - } - - private String[] readStringList() throws ProtocolException, IOException { - int length = readIntToEscape(); - if (length < 0) - throw new ProtocolException("Array length cannot be negative"); - eatEscChar(':'); - String[] result = new String[length]; - for (int i = 0; i < result.length; ++i) { - result[i] = readStringToEscape(); - if (i + 1 < result.length) - eatEscChar(','); - } - return result; - } - - private void skipToBodyEnd() throws IOException, ProtocolException { - if (isClosed()) { - throw new IllegalStateException("Input stream is closed"); - } - boolean esc = false; - input.mark(2); - int ch = read(); - while (true) { - if (esc) { - if (ch == '[') { - // data chunk - input.reset(); - readDataBlock(); - input.mark(2); - } else if (ch == '>') { - input.reset(); - break; - } - esc = false; - input.mark(2); - } else if (ch == ESC) { - esc = true; - } else { - input.mark(2); - } - ch = read(); - } - } - - private void readAvailableInvalidData() throws IOException { - if (isClosed()) { - return; - } - byte[] b = new byte[1024]; - int count = 0; - int avail = input.available(); - while (avail > 0) { - if (avail > b.length) - avail = b.length; - count = read(b, 0, avail); - if (count != -1 && logger.isLoggable(Level.INFO)) { - String strVal = new String(b, 0, count); - logger.log(Level.INFO, "Discarding data: \"{0}\"", - strVal.replace('\u001b', '\u00a4')); - lastMessage.append(strVal); - } - avail = input.available(); - } - lastInvalidOutput = lastMessage.toString(); - } - - private void eatMessageOpening() throws IOException, ProtocolException { - eatEscChar('<'); - lastMessage.setLength(0); - lastMessage.append(ESC); - lastMessage.append('<'); - } - - public void waitForReady() throws IOException, ProtocolException { - if (version != null) - return; - - try { - eatMessageOpening(); - eatChar('V'); - eatEscChar('|'); - version = readStringToEscape(); - eatEscChar('>'); - input.writeLog(Level.FINEST, "Received version message"); - } catch (IOException ex) { - input.writeLog(Level.SEVERE, "Received partial version message"); - throw ex; - } catch (ProtocolException ex) { - input.writeLog(Level.SEVERE, "Received invalid version message"); - throw ex; - } - } - - public String getVersion() throws IOException, ProtocolException { - waitForReady(); - return version; - } - - private void eatDelim() throws IOException, ProtocolException { - eatEscChar(';'); - } - - private Response parseErrorResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.Error, requestId); - resp.setErrorCode(readAsciiStringToEscape()); - eatDelim(); - resp.setErrorMessage(readStringToEscape()); - return resp; - } - - private Response parseOkResponseBody(String requestId) throws ProtocolException, IOException { - return new Response(Response.MessageType.Ok, requestId); - } - - private Response parseConsoleResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.Console, requestId); - resp.setStringData(utf8ToString(readDataBlock())); - return resp; - } - - private Response parseConsoleHelpResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.ConsoleHelp, requestId); - resp.setCommandArgs(readStringToEscape()); - eatDelim(); - resp.setCommandHelp(readStringToEscape()); - return resp; - } - - private Response parseDataResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.RawData, requestId); - resp.setByteData(readDataBlock()); - return resp; - } - - private Response parsePrettyResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.Pretty, requestId); - resp.setStringData(utf8ToString(readDataBlock())); - return resp; - } - - private Response parseXmlResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.Xml, requestId); - resp.setStringData(utf8ToString(readDataBlock())); - return resp; - } - - private Response parseJsonResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.Json, requestId); - resp.setStringData(utf8ToString(readDataBlock())); - return resp; - } - - private Response parseCountResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.Count, requestId); - resp.setIntData(readIntToEscape()); - return resp; - } - - private Response parseNameResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.Name, requestId); - resp.setStringData(readStringToEscape()); - return resp; - } - - private Response parseNameListResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.NameList, requestId); - resp.setStringListData(readStringList()); - return resp; - } - - private Response parseUserDataResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.UserData, requestId); - resp.setByteData(readDataBlock()); - return resp; - } - - private Response parseStructuredDataResponseBody(String requestId) throws ProtocolException, IOException { - // ??? - skipToBodyEnd(); - throw new UnsupportedOperationException(); - } - - private Response parseUnknownRequestResponseBody(String requestId) throws ProtocolException, IOException { - Response resp = new Response(Response.MessageType.UnknownRequest, requestId); - resp.setRequestCode(readAsciiStringToEscape()); - return resp; - } - - private Response parseUnknownResponseBody(String code, String requestId) throws ProtocolException, IOException { - skipToBodyEnd(); - Response resp = new Response(Response.MessageType.UnknownResponse, requestId); - resp.setResponseCode(code); - return resp; - } - - public Response parseNextResponse() throws IOException, ProtocolException { - waitForReady(); - try { - eatMessageOpening(); - String code = readAsciiStringToEscape(); - eatEscChar(':'); - String requestId = readStringToEscape(); - eatEscChar('|'); - Response resp; - if (code.equals("Q")) - resp = parseErrorResponseBody(requestId); - else if (code.equals("O")) - resp = parseOkResponseBody(requestId); - else if (code.equals("C")) - resp = parseConsoleResponseBody(requestId); - else if (code.equals("H")) - resp = parseConsoleHelpResponseBody(requestId); - else if (code.equals("R")) - resp = parseDataResponseBody(requestId); - else if (code.equals("P")) - resp = parsePrettyResponseBody(requestId); - else if (code.equals("X")) - resp = parseXmlResponseBody(requestId); - else if (code.equals("J")) - resp = parseJsonResponseBody(requestId); - else if (code.equals("I")) - resp = parseCountResponseBody(requestId); - else if (code.equals("N")) - resp = parseNameResponseBody(requestId); - else if (code.equals("M")) - resp = parseNameListResponseBody(requestId); - else if (code.equals("U")) - resp = parseUserDataResponseBody(requestId); - else if (code.equals("S")) - resp = parseStructuredDataResponseBody(requestId); - else if (code.equals("Z")) - resp = parseUnknownRequestResponseBody(requestId); - else - resp = parseUnknownResponseBody(code, requestId); - eatEscChar('>'); - input.writeLog(Level.FINEST, "Received message"); - return resp; - } catch (IOException ex) { - input.writeLog(Level.SEVERE, "Received partial message"); - readAvailableInvalidData(); - throw ex; - } catch (ProtocolException ex) { - input.writeLog(Level.SEVERE, "Received invalid message"); - readAvailableInvalidData(); - throw ex; - } - } -} diff --git a/gui/src/quanto/core/protocol/StreamRedirector.java b/gui/src/quanto/core/protocol/StreamRedirector.java deleted file mode 100644 index 8da22db6..00000000 --- a/gui/src/quanto/core/protocol/StreamRedirector.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * - * @author alemer - */ -class StreamRedirector extends Thread -{ - private final static Logger logger = Logger.getLogger("quanto.core.protocol.streamredirector"); - private InputStream from; - private OutputStream to; - - public StreamRedirector(InputStream from, OutputStream to) { - super("IO stream redirector"); - this.from = from; - this.to = to; - } - - @Override - public void run() { - try { - byte[] buffer = new byte[200]; - int count = from.read(buffer); - while (count != -1) { - to.write(buffer, 0, count); - to.flush(); - count = from.read(buffer); - } - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed to redirect stderr", ex); - } - } -} diff --git a/gui/src/quanto/core/protocol/UnknownCommandException.java b/gui/src/quanto/core/protocol/UnknownCommandException.java deleted file mode 100644 index 6bf298bd..00000000 --- a/gui/src/quanto/core/protocol/UnknownCommandException.java +++ /dev/null @@ -1,22 +0,0 @@ -package quanto.core.protocol; - -import quanto.core.protocol.CommandException; - -/** - * The command was not recognised by the core - */ -public class UnknownCommandException extends CommandException { - - private static final long serialVersionUID = 1232814923748927383L; - - private String command; - - public UnknownCommandException(String command) { - super("BADCOMMAND", "Unknown command \"" + command + "\""); - this.command = command; - } - - public String getCommand() { - return command; - } -} diff --git a/gui/src/quanto/core/protocol/Utils.java b/gui/src/quanto/core/protocol/Utils.java deleted file mode 100644 index 7462d0ae..00000000 --- a/gui/src/quanto/core/protocol/Utils.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.core.protocol; - -import java.io.UnsupportedEncodingException; -/** - * - * @author alex - */ -final class Utils { - - static byte[] stringToUtf8(String str) { - try { - return str.getBytes("UTF-8"); - } catch (UnsupportedEncodingException ex) { - throw new Error("The Java environment does not support the required UTF-8 encoding."); - } - } - - static String utf8ToString(byte[] bytes) { - try { - return new String(bytes, "UTF-8"); - } catch (UnsupportedEncodingException ex) { - throw new Error("The Java environment does not support the required UTF-8 encoding."); - } - } - - static byte[] stringToAscii(String str) - { - try { - return str.getBytes("US-ASCII"); - } catch (UnsupportedEncodingException ex) { - throw new Error("The Java environment does not support the required US-ASCII encoding."); - } - } - - static String asciiToString(byte[] bytes) { - try { - return new String(bytes, "US-ASCII"); - } catch (UnsupportedEncodingException ex) { - throw new Error("The Java environment does not support the required US-ASCII encoding."); - } - } -} diff --git a/gui/src/quanto/core/protocol/userdata/CopyOfGraphUserDataSerializer.java b/gui/src/quanto/core/protocol/userdata/CopyOfGraphUserDataSerializer.java deleted file mode 100644 index 44097176..00000000 --- a/gui/src/quanto/core/protocol/userdata/CopyOfGraphUserDataSerializer.java +++ /dev/null @@ -1,20 +0,0 @@ -package quanto.core.protocol.userdata; - -import quanto.core.protocol.CoreTalker; -import quanto.core.protocol.userdata.dataserialization.StringDataSerializer; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class CopyOfGraphUserDataSerializer extends GraphUserDataSerializer { - - private static String suffix = "copy_of"; - private static StringDataSerializer serializer = new StringDataSerializer(); - - public CopyOfGraphUserDataSerializer(CoreTalker talker) { - super(talker, serializer, suffix); - // copy_of is special - prefix = ""; - } -} diff --git a/gui/src/quanto/core/protocol/userdata/GraphUserDataSerializer.java b/gui/src/quanto/core/protocol/userdata/GraphUserDataSerializer.java deleted file mode 100644 index ed3573d8..00000000 --- a/gui/src/quanto/core/protocol/userdata/GraphUserDataSerializer.java +++ /dev/null @@ -1,185 +0,0 @@ -package quanto.core.protocol.userdata; - -import java.util.logging.Level; -import java.util.logging.Logger; -import quanto.core.CoreException; -import quanto.core.data.CoreGraph; -import quanto.core.protocol.CommandException; -import quanto.core.protocol.CoreTalker; -import quanto.core.protocol.userdata.dataserialization.DataSerializer; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class GraphUserDataSerializer extends QuantoAppUserDataSerializer { - - private DataSerializer serializer; - private String suffix; - private final static Logger logger = Logger.getLogger("quanto.protocol.userdata"); - - public GraphUserDataSerializer(CoreTalker talker, DataSerializer serializer, String suffix) { - super(talker); - this.serializer = serializer; - this.suffix = suffix; - } - - //Graph User Data - public T getGraphUserData(CoreGraph graph) throws CoreException { - try { - return serializer.fromString(talker.graphUserData(graph.getCoreName(), prefix + suffix)); - } catch (CommandException e) { - if (e.getCode().equals("NOSUCHGRAPHUSERDATA")) { - logger.log(Level.FINER, - "No such data {0}{1} on graph {2}", - new Object[]{prefix, suffix, graph.getCoreName()}); - } else { - logger.log(Level.WARNING, "Could not get data " + - prefix + suffix + " on graph " + graph.getCoreName(), e); - } - return null; - } - } - public void setGraphUserData(CoreGraph graph, T data) throws CoreException { - String dataString = serializer.toString(data); - if (dataString == null) { - return; - } - try { - talker.setGraphUserData(graph.getCoreName(), prefix + suffix, dataString); - } catch (CommandException e) { - logger.log(Level.WARNING, "Could not set data " + - prefix + suffix + " on graph" + graph.getCoreName(), e); - } - } - - public void deleteGraphUserData(CoreGraph graph) throws CoreException { - try { - talker.deleteGraphUserData(graph.getCoreName(), prefix + suffix); - } catch (CommandException e) { - logger.log(Level.WARNING, "Could delete set data " + - prefix + suffix + " on graph" + graph.getCoreName(), e); - } - } - - public T getVertexUserData(CoreGraph graph, String vertexName) throws CoreException { - try { - return serializer.fromString(talker.vertexUserData(graph.getCoreName(), vertexName, prefix + suffix)); - } catch (CommandException e) { - if (e.getCode().equals("NOSUCHVERTEXUSERDATA")) { - logger.log(Level.FINER, - "No such data {0}{1} on vertex {2} of graph {3}", - new Object[]{prefix, suffix, vertexName, graph.getCoreName()}); - } else { - logger.log(Level.WARNING, "Could not get data " + - prefix + suffix + " on vertex " + vertexName + - " of graph " + graph.getCoreName(), e); - } - return null; - } - } - - public void setVertexUserData(CoreGraph graph, String vertexName, T data) throws CoreException { - String dataString = serializer.toString(data); - if (dataString == null) { - return; - } - try { - talker.setVertexUserData(graph.getCoreName(), vertexName, prefix + suffix, dataString); - } catch (CommandException e) { - logger.log(Level.WARNING, "Could not set vertex user data " + - prefix + suffix + " on vertex " + vertexName + - " of graph " + graph.getCoreName(), e); - } - } - public void deleteVertexUserData(CoreGraph graph, String vertexName) throws CoreException { - try { - talker.deleteVertexUserData(graph.getCoreName(), vertexName, prefix + suffix); - } catch (CommandException e) { - logger.log(Level.WARNING, "Cound not delete user data " + - prefix + suffix + " on vertex " + vertexName + - " of graph " + graph.getCoreName(), e); - } - } - - public T getEdgeUserData(CoreGraph graph, String edgeName) throws CoreException { - try { - return serializer.fromString(talker.edgeUserData(graph.getCoreName(), edgeName, prefix + suffix)); - } catch (CommandException e) { - if (e.getCode().equals("NOSUCHEDGEUSERDATA")) { - logger.log(Level.FINER, - "No such data {0}{1} on edge {2} of graph {3}", - new Object[]{prefix, suffix, edgeName, graph.getCoreName()}); - } else { - logger.log(Level.WARNING, "Could not get data " + - prefix + suffix + " on edge " + edgeName + - " of graph " + graph.getCoreName(), e); - } - return null; - } - } - - public void setEdgeUserData(CoreGraph graph, String edgeName, T data) throws CoreException { - String dataString = serializer.toString(data); - if (dataString == null) { - return; - } - try { - talker.setEdgeUserData(graph.getCoreName(), edgeName, prefix + suffix, dataString); - } catch (CommandException e) { - logger.log(Level.WARNING, "Could not set user data " + - prefix + suffix + " on edge " + edgeName + - " of graph " + graph.getCoreName(), e); - } - } - public void deleteEdgeUserData(CoreGraph graph, String edgeName) throws CoreException { - try { - talker.deleteEdgeUserData(graph.getCoreName(), edgeName, prefix + suffix); - } catch (CommandException e) { - logger.log(Level.WARNING, "Cound not delete user data " + - prefix + suffix + " on edge " + edgeName + - " of graph " + graph.getCoreName(), e); - } - } - - public T getBangBoxUserData(CoreGraph graph, String bbName) throws CoreException { - try { - return serializer.fromString(talker.bangBoxUserData(graph.getCoreName(), bbName, prefix + suffix)); - } catch (CommandException e) { - if (e.getCode().equals("NOSUCHBANGBOXUSERDATA")) { - logger.log(Level.FINER, - "No such data {0}{1} on !-box {2} of graph {3}", - new Object[]{prefix, suffix, bbName, graph.getCoreName()}); - } else { - logger.log(Level.WARNING, "Could not get data " + - prefix + suffix + " on !-box " + bbName + - " of graph " + graph.getCoreName(), e); - } - return null; - } - } - - public void setBangBoxUserData(CoreGraph graph, String bbName, T data) throws CoreException { - String dataString = serializer.toString(data); - if (dataString == null) { - return; - } - try { - talker.setBangBoxUserData(graph.getCoreName(), bbName, prefix + suffix, dataString); - } catch (CommandException e) { - logger.log(Level.WARNING, "Could not set user data " + - prefix + suffix + " on !-box " + bbName + - " of graph " + graph.getCoreName(), e); - } - } - - public void deleteBangBoxUserData(CoreGraph graph, String bbName) throws CoreException { - try { - talker.deleteBangBoxUserData(graph.getCoreName(), bbName, prefix + suffix); - } catch (CommandException e) { - logger.log(Level.WARNING, "Cound not delete user data " + - prefix + suffix + " on !-box " + bbName + - " of graph " + graph.getCoreName(), e); - } - } -} diff --git a/gui/src/quanto/core/protocol/userdata/PositionGraphUserDataSerializer.java b/gui/src/quanto/core/protocol/userdata/PositionGraphUserDataSerializer.java deleted file mode 100644 index 0f1158cb..00000000 --- a/gui/src/quanto/core/protocol/userdata/PositionGraphUserDataSerializer.java +++ /dev/null @@ -1,19 +0,0 @@ -package quanto.core.protocol.userdata; - -import java.awt.geom.Point2D; -import quanto.core.protocol.CoreTalker; -import quanto.core.protocol.userdata.dataserialization.Point2DDataSerializer; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class PositionGraphUserDataSerializer extends GraphUserDataSerializer { - - private static String suffix = "position"; - private static Point2DDataSerializer serializer = new Point2DDataSerializer(); - - public PositionGraphUserDataSerializer(CoreTalker talker) { - super(talker, serializer, suffix); - } -} diff --git a/gui/src/quanto/core/protocol/userdata/QuantoAppUserDataSerializer.java b/gui/src/quanto/core/protocol/userdata/QuantoAppUserDataSerializer.java deleted file mode 100644 index 3b718a55..00000000 --- a/gui/src/quanto/core/protocol/userdata/QuantoAppUserDataSerializer.java +++ /dev/null @@ -1,15 +0,0 @@ -package quanto.core.protocol.userdata; - -import quanto.core.protocol.CoreTalker; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class QuantoAppUserDataSerializer { - protected String prefix = "quanto-gui:"; - protected CoreTalker talker = null; - public QuantoAppUserDataSerializer(CoreTalker talker) { - this.talker = talker; - } -} diff --git a/gui/src/quanto/core/protocol/userdata/RulePriorityRuleUserDataSerializer.java b/gui/src/quanto/core/protocol/userdata/RulePriorityRuleUserDataSerializer.java deleted file mode 100644 index 3132e4f0..00000000 --- a/gui/src/quanto/core/protocol/userdata/RulePriorityRuleUserDataSerializer.java +++ /dev/null @@ -1,18 +0,0 @@ -package quanto.core.protocol.userdata; - -import quanto.core.protocol.CoreTalker; -import quanto.core.protocol.userdata.dataserialization.IntegerDataSerializer; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class RulePriorityRuleUserDataSerializer extends RuleUserDataSerializer { - - static String suffix = "priority"; - static IntegerDataSerializer serializer = new IntegerDataSerializer(); - - public RulePriorityRuleUserDataSerializer(CoreTalker talker) { - super(talker, serializer, suffix); - } -} diff --git a/gui/src/quanto/core/protocol/userdata/RuleUserDataSerializer.java b/gui/src/quanto/core/protocol/userdata/RuleUserDataSerializer.java deleted file mode 100644 index d781cff4..00000000 --- a/gui/src/quanto/core/protocol/userdata/RuleUserDataSerializer.java +++ /dev/null @@ -1,47 +0,0 @@ -package quanto.core.protocol.userdata; - -import java.util.logging.Level; -import java.util.logging.Logger; -import quanto.core.CoreException; -import quanto.core.protocol.CoreTalker; -import quanto.core.protocol.userdata.dataserialization.DataSerializer; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class RuleUserDataSerializer extends QuantoAppUserDataSerializer { - - private String suffix; - private DataSerializer serializer; - private final static Logger logger = Logger.getLogger("quanto.protocol.userdata"); - - public RuleUserDataSerializer(CoreTalker talker, DataSerializer serializer, String suffix) { - super(talker); - this.suffix = suffix; - this.serializer = serializer; - } - - public void setRuleUserData(String ruleName, T data) { - String dataString = serializer.toString(data); - if (dataString == null) { - return; - } - try { - talker.setRuleUserData(ruleName, prefix + suffix,dataString); - } catch (CoreException e) { - logger.log(Level.WARNING, "Could not set user data " + prefix + suffix + " on " + - "rule " + ruleName, e); - } - } - - public T getRuleUserData(String ruleName) { - try { - return serializer.fromString(talker.ruleUserData(ruleName, prefix + suffix)); - } catch (CoreException e) { - logger.log(Level.WARNING, "Could not get user data " + prefix + suffix + " on " + - " rule " + ruleName, e); - return null; - } - } -} diff --git a/gui/src/quanto/core/protocol/userdata/dataserialization/DataSerializer.java b/gui/src/quanto/core/protocol/userdata/dataserialization/DataSerializer.java deleted file mode 100644 index fad2412e..00000000 --- a/gui/src/quanto/core/protocol/userdata/dataserialization/DataSerializer.java +++ /dev/null @@ -1,14 +0,0 @@ -package quanto.core.protocol.userdata.dataserialization; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ - -/* - Convert data to and from String -*/ -public interface DataSerializer { - public D fromString(String data); - public String toString(D data); -} diff --git a/gui/src/quanto/core/protocol/userdata/dataserialization/IntegerDataSerializer.java b/gui/src/quanto/core/protocol/userdata/dataserialization/IntegerDataSerializer.java deleted file mode 100644 index 8a880940..00000000 --- a/gui/src/quanto/core/protocol/userdata/dataserialization/IntegerDataSerializer.java +++ /dev/null @@ -1,15 +0,0 @@ -package quanto.core.protocol.userdata.dataserialization; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class IntegerDataSerializer implements DataSerializer { - public Integer fromString(String data) { - return Integer.parseInt(data); - } - - public String toString(Integer data) { - return Integer.toString(data); - } -} diff --git a/gui/src/quanto/core/protocol/userdata/dataserialization/Point2DDataSerializer.java b/gui/src/quanto/core/protocol/userdata/dataserialization/Point2DDataSerializer.java deleted file mode 100644 index 003d1e38..00000000 --- a/gui/src/quanto/core/protocol/userdata/dataserialization/Point2DDataSerializer.java +++ /dev/null @@ -1,36 +0,0 @@ -package quanto.core.protocol.userdata.dataserialization; - -import java.awt.geom.Point2D; -import java.util.StringTokenizer; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ -public class Point2DDataSerializer implements DataSerializer { - - private final static Logger logger = Logger.getLogger("quanto.protocol.userdata.dataserialization"); - - public Point2D fromString(String data) { - - StringTokenizer tk = new java.util.StringTokenizer(data, ":"); - int X = Integer.parseInt(tk.nextToken()); - int Y = Integer.parseInt(tk.nextToken()); - Point2D p = new Point2D.Double((double) X, (double) Y); - - return p; - } - - public String toString(Point2D data) { - try { - int X = (int) data.getX(); - int Y = (int) data.getY(); - return X + ":" + Y; - } catch (NullPointerException e) { - logger.log(Level.WARNING, "Could not serialize user data of type Point2D"); - return null; - } - } -} diff --git a/gui/src/quanto/core/protocol/userdata/dataserialization/StringDataSerializer.java b/gui/src/quanto/core/protocol/userdata/dataserialization/StringDataSerializer.java deleted file mode 100644 index 069fdc9e..00000000 --- a/gui/src/quanto/core/protocol/userdata/dataserialization/StringDataSerializer.java +++ /dev/null @@ -1,18 +0,0 @@ -package quanto.core.protocol.userdata.dataserialization; - -/** - * User: benjaminfrot - * Date: 7/21/12 - */ - -//Identity - -public class StringDataSerializer implements DataSerializer { - public String fromString(String data) { - return data; - } - - public String toString(String data) { - return data; - } -} diff --git a/gui/src/quanto/gui/CommandManager.java b/gui/src/quanto/gui/CommandManager.java deleted file mode 100644 index 072fe9a1..00000000 --- a/gui/src/quanto/gui/CommandManager.java +++ /dev/null @@ -1,133 +0,0 @@ -package quanto.gui; - -import com.sun.jaf.ui.ActionManager; -import java.util.Collections; -import java.util.LinkedList; - -/** - * Allows commands to be directed to the appropriate place - * - * These are the document-centric commands that will be passed - * on to the document view. - * - * FIXME: this should do more (eg: methods to enable/disable actions - * and trigger commands). It should possibly do the multiplexing - * instead of referring to ViewPort. - * - * @author Alex Merry - */ -public class CommandManager { - - /** - * Action name entries - * - * Having these all in one place makes sure we don't spread typos around - * the code. Hence all other code should refer to - * CommandManager.Command.Foo, not the string "foo-command". - * - * This class also deals with registering the callbacks properly with - * ActionManager. - */ - public enum Command { - - Save("save-command"), - SaveAs("save-as-command"), - Undo("undo-command"), - Redo("redo-command"), - UndoRewrite("undo-rewrite-command"), - RedoRewrite("redo-rewrite-command"), - Cut("cut-command"), - Copy("copy-command"), - Paste("paste-command"), - SelectAll("select-all-command"), - DeselectAll("deselect-all-command"), - //UseRule("use-rule-command"), - - Refresh("refresh-graph-command"), - Relayout("relayout-graph-command"), - Abort("abort-command"), - ExportToPdf("export-to-pdf-command"), - SelectMode("select-mode-command"), - DirectedEdgeMode("directed-edge-mode-command"), - UndirectedEdgeMode("undirected-edge-mode-command"), - LatexToClipboard("latex-to-clipboard-command"), - AddBoundaryVertex("add-boundary-vertex-command"), - ShowRewrites("show-rewrites-command"), - Normalise("normalise-command"), - FastNormalise("fast-normalise-command"), - BangVertices("bang-vertices-command"), - UnbangVertices("unbang-vertices-command"), - DropBangBox("drop-bang-box-command"), - KillBangBox("kill-bang-box-command"), - DuplicateBangBox("duplicate-bang-box-command"), - DumpHilbertTermAsText("hilbert-as-text-command"), - DumpHilbertTermAsMathematica("hilbert-as-mathematica-command"); - - /** - * Create a new command action - * @param actionName The action name (as in resources/actions.xml) - */ - private Command(String actionName) { - this.actionName = actionName; - } - private final String actionName; - - @Override - public String toString() { - return actionName; - } - - public String actionName() { - return actionName; - } - - public boolean matches(String command) { - return actionName.equals(command); - } - } - - public class Delegate { - - private Delegate() { - } - - public void executeCommand(String command) { - if (viewPort != null) { - viewPort.executeCommand(command); - } - } - - public void executeCommand(String command, boolean state) { - if (viewPort != null && state) { - viewPort.executeCommand(command); - } - } - } - private Delegate delegate = new Delegate(); - private ActionManager actionManager; - private ViewPort viewPort; - - public CommandManager(ActionManager actionManager) { - this.actionManager = actionManager; - LinkedList actions = new LinkedList(); - for (Command act : Command.values()) { - actions.add(act.toString()); - } - actionManager.registerGenericCallback(actions, delegate, "executeCommand"); - } - - public void registerCommand(String commandName) { - actionManager.registerGenericCallback( - Collections.singleton(commandName), - delegate, - "executeCommand"); - } - - public void setViewPort(ViewPort viewPort) { - this.viewPort = viewPort; - } - - public ViewPort getViewPort() { - return viewPort; - } -} diff --git a/gui/src/quanto/gui/ConsoleView.java b/gui/src/quanto/gui/ConsoleView.java deleted file mode 100644 index 888f1f32..00000000 --- a/gui/src/quanto/gui/ConsoleView.java +++ /dev/null @@ -1,276 +0,0 @@ -package quanto.gui; - -import quanto.core.ConsoleInterface; -import quanto.core.Completer; -import java.awt.BorderLayout; -import java.awt.Dimension; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.FocusEvent; -import java.awt.event.FocusListener; -import java.awt.event.KeyAdapter; -import java.awt.event.KeyEvent; -import java.util.SortedSet; -import java.util.Stack; - -import javax.swing.*; -import javax.swing.event.CaretEvent; -import javax.swing.event.CaretListener; -import javax.swing.text.JTextComponent; -import quanto.core.CoreException; -import quanto.core.ParseException; -import quanto.core.protocol.CoreTalker; - -public class ConsoleView extends InteractiveView { - - private static final long serialVersionUID = -5833674157230451213L; - private final ConsoleInterface coreConsole; - private JTextField input; - private JTextArea output; - private Stack history; - private int hpointer; - private JTextComponent lastFocusOwner = null; - - private class CommandEntryListener - extends KeyAdapter - implements ActionListener { - - public void actionPerformed(final ActionEvent e) { - submitCommand(input); - input.requestFocusInWindow(); - } - - @Override - public void keyReleased(final KeyEvent e) { - if (e.isConsumed()) { - return; - } - - JTextField tf = (JTextField) e.getSource(); - switch (e.getKeyCode()) { - case KeyEvent.VK_UP: - if (hpointer > 0) { - tf.setText(history.get(--hpointer)); - } - e.consume(); - break; - case KeyEvent.VK_DOWN: - if (hpointer < history.size() - 1) { - tf.setText(history.get(++hpointer)); - } - e.consume(); - break; - case KeyEvent.VK_TAB: - showCompletions(tf); - e.consume(); - break; - case KeyEvent.VK_ENTER: - submitCommand(tf); - e.consume(); - break; - } - } - - private void showCompletions(JTextField tf) { - if (coreConsole == null) { - return; - } - - SortedSet compl = coreConsole.getCompleter().getCompletions(tf.getText()); - if (compl.size() == 1) { - tf.setText(compl.first()); - } else if (compl.size() > 1) { - tf.setText(Completer.greatestCommonPrefix(compl)); - println(); - for (String c : compl) { - println(c); - } - prompt(); - } - } - - private void submitCommand(JTextField tf) { - execCommand(tf.getText()); - tf.setText(""); - } - } - private FocusListener focusListener = new FocusListener() { - - public void focusGained(FocusEvent e) { - lastFocusOwner = (JTextComponent) e.getComponent(); - updateSelectionCommands(); - } - - public void focusLost(FocusEvent e) { - if (!e.isTemporary()) { - lastFocusOwner = null; - updateSelectionCommands(); - } - } - }; - private CaretListener caretListener = new CaretListener() { - - public void caretUpdate(CaretEvent e) { - if (isAttached()) { - updateSelectionCommands(); - } - } - }; - - private void print(String text) { - output.append(text); - output.setCaretPosition(output.getDocument().getLength() - 1); - } - - private void println(String text) { - print(text + "\n"); - } - - private void println() { - print("\n"); - } - - private void prompt() { - print("quanto:> "); - } - - private void output(String text) { - println(text.trim()); - prompt(); - } - - private void execCommand(String text) { - if (coreConsole == null) { - return; - } - - try { - println(text); - coreConsole.inputCommandAsync(text); - if (history.isEmpty() || !history.peek().equals(text)) { - history.push(text); - } - hpointer = history.size(); - } catch (CoreException ex) { - output("Error: " + ex.getMessage()); - } catch (ParseException ex) { - output("Failed to parse command: " + ex.getMessage()); - } - } - - public ConsoleView(CoreTalker core) { - super("console"); - - JPanel console = new JPanel(new BorderLayout()); - output = new JTextArea(); - output.setEditable(false); - output.addFocusListener(focusListener); - output.addCaretListener(caretListener); - - history = new Stack(); - input = new JTextField(); - input.setFocusTraversalKeysEnabled(false); - input.addFocusListener(focusListener); - input.addCaretListener(caretListener); - CommandEntryListener listener = new CommandEntryListener(); - input.addKeyListener(listener); - - JButton execButton = new JButton("Exec"); - execButton.addActionListener(listener); - - JPanel commandPane = new JPanel(new BorderLayout()); - commandPane.add(input, BorderLayout.CENTER); - commandPane.add(execButton, BorderLayout.LINE_END); - - JScrollPane scroll = new JScrollPane(output); - scroll.setPreferredSize(new Dimension(800, 600)); - console.add(scroll, BorderLayout.CENTER); - console.add(commandPane, BorderLayout.PAGE_END); - setMainComponent(console); - - this.coreConsole = new ConsoleInterface(core); - coreConsole.setResponseListener(new ConsoleInterface.ResponseListener() { - - public void responseReceived(String response) { - output(response); - } - }); - - prompt(); - } - - private void updateSelectionCommands() { - if (isAttached()) { - ViewPort vp = getViewPort(); - if (lastFocusOwner == null) { - vp.setCommandEnabled(CommandManager.Command.Cut, false); - vp.setCommandEnabled(CommandManager.Command.Copy, false); - vp.setCommandEnabled(CommandManager.Command.Paste, false); - vp.setCommandEnabled(CommandManager.Command.SelectAll, false); - vp.setCommandEnabled(CommandManager.Command.DeselectAll, false); - } else { - boolean hasSelection = (lastFocusOwner.getSelectionEnd() - lastFocusOwner.getSelectionStart()) != 0; - vp.setCommandEnabled(CommandManager.Command.Cut, - lastFocusOwner.isEditable() - && hasSelection); - vp.setCommandEnabled(CommandManager.Command.Copy, - hasSelection); - vp.setCommandEnabled(CommandManager.Command.Paste, - lastFocusOwner.isEditable()); - vp.setCommandEnabled(CommandManager.Command.SelectAll, - true); - vp.setCommandEnabled(CommandManager.Command.DeselectAll, - true); - } - } - } - - @Override - public void grabFocus() { - input.grabFocus(); - } - - @Override - public void attached(ViewPort vp) { - // refuse to allow us to be closed - vp.preventViewClosure(); - input.requestFocusInWindow(); - super.attached(vp); - } - - @Override - public void detached(ViewPort vp) { - vp.setCommandEnabled(CommandManager.Command.Cut, false); - vp.setCommandEnabled(CommandManager.Command.Copy, false); - vp.setCommandEnabled(CommandManager.Command.Paste, false); - vp.setCommandEnabled(CommandManager.Command.SelectAll, false); - vp.setCommandEnabled(CommandManager.Command.DeselectAll, false); - super.detached(vp); - } - - @Override - public void commandTriggered(String command) { - if (CommandManager.Command.Cut.matches(command)) { - if (lastFocusOwner != null) { - lastFocusOwner.cut(); - } - } else if (CommandManager.Command.Copy.matches(command)) { - if (lastFocusOwner != null) { - lastFocusOwner.copy(); - } - } else if (CommandManager.Command.Paste.matches(command)) { - if (lastFocusOwner != null) { - lastFocusOwner.paste(); - } - } else if (CommandManager.Command.SelectAll.matches(command)) { - if (lastFocusOwner != null) { - lastFocusOwner.selectAll(); - } - } else if (CommandManager.Command.DeselectAll.matches(command)) { - if (lastFocusOwner != null) { - lastFocusOwner.select(0, 0); - } - } - super.commandTriggered(command); - } -} diff --git a/gui/src/quanto/gui/DetailedErrorDialog.java b/gui/src/quanto/gui/DetailedErrorDialog.java deleted file mode 100644 index d2aa73ab..00000000 --- a/gui/src/quanto/gui/DetailedErrorDialog.java +++ /dev/null @@ -1,505 +0,0 @@ -package quanto.gui; - -import java.awt.BorderLayout; -import java.awt.Color; -import java.awt.Component; -import java.awt.Dimension; -import java.awt.Font; -import java.awt.Graphics; -import java.awt.Insets; -import java.awt.Rectangle; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; -import java.net.URL; -import javax.swing.AbstractAction; -import javax.swing.AbstractButton; -import javax.swing.BorderFactory; -import javax.swing.BoxLayout; -import javax.swing.ImageIcon; -import javax.swing.JButton; -import javax.swing.JComponent; -import javax.swing.JOptionPane; -import javax.swing.JPanel; -import javax.swing.JRadioButton; -import javax.swing.JTextArea; -import javax.swing.border.Border; -import javax.swing.border.TitledBorder; -import quanto.core.CoreException; - -/** - * - * @author alemer - */ -public class DetailedErrorDialog { - - public static void showCoreErrorDialog(Component parent, String message, CoreException ex) { - showDetailedErrorDialog(parent, "Core Error", message, ex); - } - - public static void showDetailedErrorDialog( - Component parent, - String title, - String message, - String details) { - showDetailedMessageDialog(parent, title, message, details, JOptionPane.ERROR_MESSAGE); - } - - public static void showDetailedErrorDialog( - Component parent, - String title, - String message, - Throwable ex) { - showDetailedMessageDialog(parent, title, message, ex.getLocalizedMessage(), JOptionPane.ERROR_MESSAGE); - } - - public static void showDetailedMessageDialog( - Component parent, - String title, - String message, - String details, - int messageType) { - JPanel panel = new JPanel(); - panel.setBorder(new TitledBorder("Details")); - panel.setLayout(new BoxLayout(panel, BoxLayout.PAGE_AXIS)); - JTextArea detailArea = new JTextArea(details); - detailArea.setEditable(false); - panel.add(detailArea); - - Object[] contents = new Object[]{ - message, - panel - }; - - JOptionPane.showMessageDialog(parent, contents, title, messageType); - } -} - -/** - * * Copyright (c) 2004 Memorial Sloan-Kettering Cancer Center - * * - * * Code written by: Gary Bader - * * Authors: Gary Bader, Ethan Cerami, Chris Sander - * * - * * This library is free software; you can redistribute it and/or modify it - * * under the terms of the GNU Lesser General Public License as published - * * by the Free Software Foundation; either version 2.1 of the License, or - * * any later version. - * * - * * This library is distributed in the hope that it will be useful, but - * * WITHOUT ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF - * * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. The software and - * * documentation provided hereunder is on an "as is" basis, and - * * Memorial Sloan-Kettering Cancer Center - * * has no obligations to provide maintenance, support, - * * updates, enhancements or modifications. In no event shall the - * * Memorial Sloan-Kettering Cancer Center - * * be liable to any party for direct, indirect, special, - * * incidental or consequential damages, including lost profits, arising - * * out of the use of this software and its documentation, even if - * * Memorial Sloan-Kettering Cancer Center - * * has been advised of the possibility of such damage. See - * * the GNU Lesser General Public License for more details. - * * - * * You should have received a copy of the GNU Lesser General Public License - * * along with this library; if not, write to the Free Software Foundation, - * * Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. - * - * User: Vuk Pavlovic - * Date: Nov 29, 2006 - * Time: 5:34:46 PM - * Description: The user-triggered collapsable panel containing the component (trigger) in the titled border - */ -/** - * The user-triggered collapsable panel containing the component (trigger) in the titled border - */ -class CollapsiblePanel extends JPanel { - - public interface CollapseListener extends java.util.EventListener { - - public void collapsed(); - - public void expanded(); - } - //Border - CollapsableTitledBorder border; // includes upper left component and line type - Border collapsedBorderLine = BorderFactory.createEmptyBorder(2, 2, 2, 2); // no border - Border expandedBorderLine = null; // because this is null, default is used, etched lowered border on MAC - //Title - AbstractButton titleComponent; // displayed in the titled border - //Expand/Collapse button - final static int COLLAPSED = 0, EXPANDED = 1; // image States - ImageIcon[] iconArrow = createExpandAndCollapseIcon(); - JButton arrow = createArrowButton(); - //Content Pane - JPanel panel; - //Container State - boolean collapsed; // stores curent state of the collapsable panel - - /** - * Constructor for an option button controlled collapsable panel. - * This is useful when a group of options each have unique sub contents. The radio buttons should be created, - * grouped, and then used to construct their own collapsable panels. This way choosing a different option in - * the same option group will collapse all unselected options. Expanded panels draw a border around the - * contents and through the radio button in the fashion of a titled border. - * - * @param component Radio button that expands and collapses the panel based on if it is selected or not - */ - public CollapsiblePanel(JRadioButton component) { - component.addItemListener(new CollapsiblePanel.ExpandAndCollapseAction()); - titleComponent = component; - collapsed = !component.isSelected(); - commonConstructor(); - } - - /** - * Constructor for a label/button controlled collapsable panel. Displays a clickable title that resembles a - * native titled border except for an arrow on the right side indicating an expandable panel. The actual border - * only appears when the panel is expanded. - * - * @param text Title of the collapsable panel in string format, used to create a button with text and an arrow icon - */ - public CollapsiblePanel(String text) { - arrow.setText(text); - titleComponent = arrow; - collapsed = true; - commonConstructor(); - } - - /** - * Sets layout, creates the content panel and adds it and the title component to the container, - * all constructors have this procedure in common. - */ - private void commonConstructor() { - setLayout(new BorderLayout()); - - panel = new JPanel(); - panel.setLayout(new BorderLayout()); - - add(titleComponent, BorderLayout.CENTER); - add(panel, BorderLayout.CENTER); - - setCollapsed(collapsed); - - placeTitleComponent(); - } - - public void addCollapseListener(CollapseListener collapseListener) { - listenerList.add(CollapseListener.class, collapseListener); - } - - public void removeCollapseListener(CollapseListener collapseListener) { - listenerList.remove(CollapseListener.class, collapseListener); - } - - /** - * Sets the bounds of the border title component so that it is properly positioned. - */ - private void placeTitleComponent() { - Insets insets = this.getInsets(); - Rectangle containerRectangle = this.getBounds(); - Rectangle componentRectangle = border.getComponentRect(containerRectangle, insets); - titleComponent.setBounds(componentRectangle); - } - - public void setTitleComponentText(String text) { - if (titleComponent instanceof JButton) { - titleComponent.setText(text); - } - placeTitleComponent(); - } - - /** - * This class requires that all content be placed within a designated panel, this method returns that panel. - * - * @return panel The content panel - */ - public JPanel getContentPane() { - return panel; - } - - /** - * Collapses or expands the panel. This is done by adding or removing the content pane, - * alternating between a frame and empty border, and changing the title arrow. - * Also, the current state is stored in the collapsed boolean. - * - * @param collapse When set to true, the panel is collapsed, else it is expanded - */ - public void setCollapsed(boolean collapse) { - collapsed = collapse; - if (collapse) { - //collapse the panel, remove content and set border to empty border - remove(panel); - arrow.setIcon(iconArrow[COLLAPSED]); - border = new CollapsableTitledBorder(collapsedBorderLine, titleComponent); - - fireCollapsed(); - } else { - //expand the panel, add content and set border to titled border - add(panel, BorderLayout.CENTER); - arrow.setIcon(iconArrow[EXPANDED]); - border = new CollapsableTitledBorder(expandedBorderLine, titleComponent); - - fireExpanded(); - } - setBorder(border); - updateUI(); - } - - protected void fireCollapsed() { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length - 2; i >= 0; i -= 2) { - if (listeners[i] == CollapseListener.class) { - ((CollapseListener) listeners[i + 1]).collapsed(); - } - } - } - - protected void fireExpanded() { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length - 2; i >= 0; i -= 2) { - if (listeners[i] == CollapseListener.class) { - ((CollapseListener) listeners[i + 1]).expanded(); - } - } - } - - /** - * Returns the current state of the panel, collapsed (true) or expanded (false). - * - * @return collapsed Returns true if the panel is collapsed and false if it is expanded - */ - public boolean isCollapsed() { - return collapsed; - } - - /** - * Returns an ImageIcon array with arrow images used for the different states of the panel. - * - * @return iconArrow An ImageIcon array holding the collapse and expanded versions of the right hand side arrow - */ - private ImageIcon[] createExpandAndCollapseIcon() { - ImageIcon[] iconArrow = new ImageIcon[2]; - URL iconURL; - - iconURL = getClass().getResource("/images/arrow_collapsed.gif"); - if (iconURL != null) { - iconArrow[COLLAPSED] = new ImageIcon(iconURL); - } - iconURL = getClass().getResource("/images/arrow_expanded.gif"); - if (iconURL != null) { - iconArrow[EXPANDED] = new ImageIcon(iconURL); - } - return iconArrow; - } - - /** - * Returns a button with an arrow icon and a collapse/expand action listener. - * - * @return button Button which is used in the titled border component - */ - private JButton createArrowButton() { - JButton button = new JButton("arrow", iconArrow[COLLAPSED]); - button.setBorder(BorderFactory.createEmptyBorder(0, 1, 5, 1)); - button.setVerticalTextPosition(AbstractButton.CENTER); - button.setHorizontalTextPosition(AbstractButton.LEFT); - button.setMargin(new Insets(0, 0, 3, 0)); - - //We want to use the same font as those in the titled border font - Font font = BorderFactory.createTitledBorder("Sample").getTitleFont(); - Color color = BorderFactory.createTitledBorder("Sample").getTitleColor(); - button.setFont(font); - button.setForeground(color); - button.setFocusable(false); - button.setContentAreaFilled(false); - - button.addActionListener(new CollapsiblePanel.ExpandAndCollapseAction()); - - return button; - } - - /** - * Handles expanding and collapsing of extra content on the user's click of the titledBorder component. - */ - private class ExpandAndCollapseAction extends AbstractAction implements ActionListener, ItemListener { - - public void actionPerformed(ActionEvent e) { - setCollapsed(!isCollapsed()); - } - - public void itemStateChanged(ItemEvent e) { - setCollapsed(!isCollapsed()); - } - } - - /** - * Special titled border that includes a component in the title area - */ - private class CollapsableTitledBorder extends TitledBorder { - - JComponent component; - //Border border; - - public CollapsableTitledBorder(JComponent component) { - this(null, component, LEFT, TOP); - } - - public CollapsableTitledBorder(Border border) { - this(border, null, LEFT, TOP); - } - - public CollapsableTitledBorder(Border border, JComponent component) { - this(border, component, LEFT, TOP); - } - - public CollapsableTitledBorder(Border border, JComponent component, int titleJustification, int titlePosition) { - //TitledBorder needs border, title, justification, position, font, and color - super(border, null, titleJustification, titlePosition, null, null); - this.component = component; - if (border == null) { - this.border = super.getBorder(); - } - } - - @Override - public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { - Rectangle borderR = new Rectangle(x + EDGE_SPACING, y + EDGE_SPACING, width - (EDGE_SPACING * 2), height - (EDGE_SPACING * 2)); - Insets borderInsets; - if (border != null) { - borderInsets = border.getBorderInsets(c); - } else { - borderInsets = new Insets(0, 0, 0, 0); - } - - Rectangle rect = new Rectangle(x, y, width, height); - Insets insets = getBorderInsets(c); - Rectangle compR = getComponentRect(rect, insets); - int diff; - switch (titlePosition) { - case ABOVE_TOP: - diff = compR.height + TEXT_SPACING; - borderR.y += diff; - borderR.height -= diff; - break; - case TOP: - case DEFAULT_POSITION: - diff = insets.top / 2 - borderInsets.top - EDGE_SPACING; - borderR.y += diff; - borderR.height -= diff; - break; - case BELOW_TOP: - case ABOVE_BOTTOM: - break; - case BOTTOM: - diff = insets.bottom / 2 - borderInsets.bottom - EDGE_SPACING; - borderR.height -= diff; - break; - case BELOW_BOTTOM: - diff = compR.height + TEXT_SPACING; - borderR.height -= diff; - break; - } - border.paintBorder(c, g, borderR.x, borderR.y, borderR.width, borderR.height); - Color col = g.getColor(); - g.setColor(c.getBackground()); - g.fillRect(compR.x, compR.y, compR.width, compR.height); - g.setColor(col); - } - - @Override - public Insets getBorderInsets(Component c, Insets insets) { - Insets borderInsets; - if (border != null) { - borderInsets = border.getBorderInsets(c); - } else { - borderInsets = new Insets(0, 0, 0, 0); - } - insets.top = EDGE_SPACING + TEXT_SPACING + borderInsets.top; - insets.right = EDGE_SPACING + TEXT_SPACING + borderInsets.right; - insets.bottom = EDGE_SPACING + TEXT_SPACING + borderInsets.bottom; - insets.left = EDGE_SPACING + TEXT_SPACING + borderInsets.left; - - if (c == null || component == null) { - return insets; - } - - int compHeight = component.getPreferredSize().height; - - switch (titlePosition) { - case ABOVE_TOP: - insets.top += compHeight + TEXT_SPACING; - break; - case TOP: - case DEFAULT_POSITION: - insets.top += Math.max(compHeight, borderInsets.top) - borderInsets.top; - break; - case BELOW_TOP: - insets.top += compHeight + TEXT_SPACING; - break; - case ABOVE_BOTTOM: - insets.bottom += compHeight + TEXT_SPACING; - break; - case BOTTOM: - insets.bottom += Math.max(compHeight, borderInsets.bottom) - borderInsets.bottom; - break; - case BELOW_BOTTOM: - insets.bottom += compHeight + TEXT_SPACING; - break; - } - return insets; - } - - public JComponent getTitleComponent() { - return component; - } - - public void setTitleComponent(JComponent component) { - this.component = component; - } - - public Rectangle getComponentRect(Rectangle rect, Insets borderInsets) { - Dimension compD = component.getPreferredSize(); - Rectangle compR = new Rectangle(0, 0, compD.width, compD.height); - switch (titlePosition) { - case ABOVE_TOP: - compR.y = EDGE_SPACING; - break; - case TOP: - case DEFAULT_POSITION: - if (titleComponent instanceof JButton) { - compR.y = EDGE_SPACING + (borderInsets.top - EDGE_SPACING - TEXT_SPACING - compD.height) / 2; - } else if (titleComponent instanceof JRadioButton) { - compR.y = (borderInsets.top - EDGE_SPACING - TEXT_SPACING - compD.height) / 2; - } - break; - case BELOW_TOP: - compR.y = borderInsets.top - compD.height - TEXT_SPACING; - break; - case ABOVE_BOTTOM: - compR.y = rect.height - borderInsets.bottom + TEXT_SPACING; - break; - case BOTTOM: - compR.y = rect.height - borderInsets.bottom + TEXT_SPACING + (borderInsets.bottom - EDGE_SPACING - TEXT_SPACING - compD.height) / 2; - break; - case BELOW_BOTTOM: - compR.y = rect.height - compD.height - EDGE_SPACING; - break; - } - switch (titleJustification) { - case LEFT: - case DEFAULT_JUSTIFICATION: - //compR.x = TEXT_INSET_H + borderInsets.left; - compR.x = TEXT_INSET_H + borderInsets.left - EDGE_SPACING; - break; - case RIGHT: - compR.x = rect.width - borderInsets.right - TEXT_INSET_H - compR.width; - break; - case CENTER: - compR.x = (rect.width - compR.width) / 2; - break; - } - return compR; - } - } -} \ No newline at end of file diff --git a/gui/src/quanto/gui/DuplicateTheoryException.java b/gui/src/quanto/gui/DuplicateTheoryException.java deleted file mode 100644 index 7dc626b6..00000000 --- a/gui/src/quanto/gui/DuplicateTheoryException.java +++ /dev/null @@ -1,19 +0,0 @@ -package quanto.gui; - -/** - * - * @author alex - */ -public class DuplicateTheoryException extends Exception { - - private String theoryName; - - public DuplicateTheoryException(String theoryName) { - super("There is already a theory called \"" + theoryName + "\""); - this.theoryName = theoryName; - } - - public String getTheoryName() { - return theoryName; - } -} diff --git a/gui/src/quanto/gui/GraphVisualizationViewer.java b/gui/src/quanto/gui/GraphVisualizationViewer.java deleted file mode 100644 index 6192849a..00000000 --- a/gui/src/quanto/gui/GraphVisualizationViewer.java +++ /dev/null @@ -1,270 +0,0 @@ -package quanto.gui; - -import javax.swing.event.ChangeEvent; - -import quanto.core.data.BangBox; -import quanto.core.data.Vertex; -import quanto.core.data.Edge; -import quanto.core.data.CoreGraph; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.algorithms.layout.util.Relaxer; -import edu.uci.ics.jung.graph.util.BalancedEdgeIndexFunction; -import edu.uci.ics.jung.contrib.visualization.decorators.MixedShapeTransformer; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphVisualizationViewer; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.renderers.Renderer.VertexLabel; -import edu.uci.ics.jung.visualization.transform.MutableTransformer; -import java.awt.Color; -import java.awt.Dimension; -import java.awt.Shape; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.Collection; - -import javax.swing.event.ChangeListener; -import org.apache.commons.collections15.Predicate; - -import quanto.gui.graphhelpers.QBangBoxLabeler; -import quanto.gui.graphhelpers.BackdropPaintable; -import quanto.gui.graphhelpers.QVertexAngleLabeler; -import quanto.gui.graphhelpers.QVertexColorTransformer; -import quanto.gui.graphhelpers.QVertexIconTransformer; -import quanto.gui.graphhelpers.QVertexLabelTransformer; -import quanto.gui.graphhelpers.QVertexRenderer; -import quanto.gui.graphhelpers.QVertexShapeTransformer; -import quanto.gui.graphhelpers.BangBoxRenderer; - -/** - * Displays a graph. - * - * Listen to state changes to know when the size changed. - * - * @author alex - */ -public class GraphVisualizationViewer - extends BangBoxGraphVisualizationViewer { - - private static final long serialVersionUID = -1723894723956293847L; - private CoreGraph graph; - private BackdropPaintable boundsPaint; - private boolean boundsPaintingEnabled = false; - - public GraphVisualizationViewer(CoreGraph graph) { - this(QuantoApp.useExperimentalLayout ? new JavaQuantoDotLayout(graph) : new QuantoDotLayout(graph)); - } - - public GraphVisualizationViewer(Layout layout) { - super(layout); - if (!(layout.getGraph() instanceof CoreGraph)) { - throw new IllegalArgumentException("Only QuantoGraphs are supported"); - } - setCoreGraph((CoreGraph) layout.getGraph()); - layout.initialize(); - setBackground(Color.white); - - setupRendering(); - - setPreferredSize(calculateGraphSize()); - - graph.addChangeListener(new ChangeListener() { - - public void stateChanged(ChangeEvent e) { - modifyLayout(); - fireStateChanged(); - } - }); - } - - private void setupRendering() { - getRenderContext().setParallelEdgeIndexFunction( - BalancedEdgeIndexFunction.getInstance()); - - getRenderContext().setEdgeShapeTransformer( - new MixedShapeTransformer()); - - getRenderContext().setEdgeArrowPredicate( - new Predicate, Edge>>() { - - public boolean evaluate(Context, Edge> object) { - return object.element.isDirected(); - } - }); - - getRenderContext().setVertexLabelTransformer(new QVertexLabelTransformer()); - getRenderContext().setVertexLabelRenderer(new QVertexAngleLabeler()); - getRenderContext().setVertexFillPaintTransformer(new QVertexColorTransformer()); - getRenderContext().setVertexShapeTransformer(new QVertexShapeTransformer()); - getRenderContext().setVertexIconTransformer(new QVertexIconTransformer()); - getRenderer().setVertexRenderer(new QVertexRenderer()); - getRenderer().getVertexLabelRenderer().setPosition( - VertexLabel.Position.S); - - getRenderContext().setBangBoxLabelRenderer(new QBangBoxLabeler()); - getRenderer().setBangBoxRenderer(new BangBoxRenderer()); - // For debugging: show a grid behind the graph - //addPreRenderPaintable(new GridPaintable(new GridPaintable.BoundsCalculator() { - // public Rectangle2D getBounds() { return getGraphBounds(); } - //})); - } - - private Dimension calculateGraphSize() { - Dimension size = getGraphLayout().getSize(); - Rectangle2D rect = new Rectangle2D.Double(0, 0, size.getWidth(), size.getHeight()); - Shape bound = getRenderContext().getMultiLayerTransformer().transform(rect); - rect = bound.getBounds2D(); - size.setSize(rect.getWidth(), rect.getHeight()); - return size; - } - - /** - * Compute a bounding box and scale such that the largest dimension fits within the - * view port. - */ - public void zoomToFit(Dimension size) { - MutableTransformer mt = getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - Rectangle2D gb = getGraphBounds(); - double centerX = size.getWidth() / 2.0; - double centerY = size.getHeight() / 2.0; - mt.translate( - centerX - gb.getCenterX(), - centerY - gb.getCenterY()); - float scale = Math.min( - (float) (size.getWidth() / gb.getWidth()), - (float) (size.getHeight() / gb.getHeight())); - if (scale < 1) { - mt.scale(scale, scale, new Point2D.Double(centerX, centerY)); - } - setPreferredSize(size); - } - - public void unzoom() { - MutableTransformer mt = getRenderContext().getMultiLayerTransformer().getTransformer(Layer.VIEW); - mt.setToIdentity(); - setPreferredSize(calculateGraphSize()); - } - - private ChangeListener graphChangeListener = new ChangeListener() { - public void stateChanged(ChangeEvent e) { - pickedVertexState.restrict(graph.getVertices()); - pickedEdgeState.restrict(graph.getEdges()); - } - }; - - public CoreGraph getGraph() { - return graph; - } - - public void setCoreGraph(CoreGraph g) { - if (this.graph != null) - this.graph.removeChangeListener(graphChangeListener); - this.graph = g; - if (this.graph != null) - this.graph.addChangeListener(graphChangeListener); - } - - /** - * Draw a bounding box around the graph. - */ - public void setBoundingBoxEnabled(boolean enabled) { - if (enabled != boundsPaintingEnabled) { - if (enabled) { - if (boundsPaint == null) { - boundsPaint = new BackdropPaintable(getGraphLayout()); - boundsPaint.setBackgroundColor(new Color(0.99f, 0.99f, 0.99f)); - } - setBackground(new Color(0.97f, 0.97f, 0.97f)); - prependPreRenderPaintable(boundsPaint); - } else { - setBackground(Color.white); - removePreRenderPaintable(boundsPaint); - } - } - } - - public boolean isBoundingBoxEnabled() { - return boundsPaintingEnabled; - } - - /** - * Compute the bounding box of the graph under its current layout. - * @return - */ - public Rectangle2D getGraphBounds() { - Rectangle2D bounds = null; - synchronized (getGraph()) { - bounds = getSubgraphBounds(getGraphLayout(), getGraph().getVertices()); - } - return bounds; - } - - // FIXME: this isn't really the right place - public static Rectangle2D getSubgraphBounds( - Layout layout, - Collection subgraph) { - Rectangle2D bounds = null; - for (Vertex v : subgraph) { - Point2D p = layout.transform(v); - if (bounds == null) { - bounds = new Rectangle2D.Double(p.getX(), p.getY(), 0, 0); - } else { - bounds.add(p); - } - } - if (bounds != null) { - bounds.setRect(bounds.getX() - 20, - bounds.getY() - 20, - bounds.getWidth() + 40, - bounds.getHeight() + 40); - } - - if (bounds == null) { - return new Rectangle2D.Double(0.0d, 0.0d, 20.0d, 20.0d); - } else { - return bounds; - } - } - - /** - * Compute the bounding box of the subgraph under its current layout. - * @return - */ - public Rectangle2D getSubgraphBounds(Collection subgraph) { - synchronized (getGraph()) { - return getSubgraphBounds(getGraphLayout(), subgraph); - } - } - - /*@Override - public Dimension getPreferredSize() { - return layout.getSize(); - }*/ - public void shift(Rectangle2D rect, Vertex v, Point2D shift) { - - getGraphLayout().setLocation(v, new Point2D.Double( - rect.getCenterX() + shift.getX(), rect.getCenterY() + shift.getY())); - } - - public void modifyLayout() { - getGraphLayout().reset(); - update(); - } - - public void update() { - // FIXME: there are threading issues here - // We revalidate and repaint while the relaxer is still changing the - // layout - Relaxer relaxer = getModel().getRelaxer(); - if (relaxer != null) { - relaxer.relax(); - } - setPreferredSize(calculateGraphSize()); - Collection c = getGraph().getVertices(); - for (Vertex v : getGraph().getVertices()) { - v.setPosition(getGraphLayout().transform(v)); - } - revalidate(); - repaint(); - } -} diff --git a/gui/src/quanto/gui/InteractiveGraphView.java b/gui/src/quanto/gui/InteractiveGraphView.java deleted file mode 100644 index 4ce0f6fa..00000000 --- a/gui/src/quanto/gui/InteractiveGraphView.java +++ /dev/null @@ -1,1641 +0,0 @@ -package quanto.gui; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.RenderContext; -import quanto.core.data.BangBox; -import quanto.core.data.Vertex; -import quanto.core.data.Edge; -import quanto.core.data.CoreGraph; -import quanto.core.data.VertexType; - -import com.itextpdf.text.DocumentException; -import java.awt.*; -import java.awt.datatransfer.Clipboard; -import java.awt.datatransfer.StringSelection; -import java.awt.event.*; -import java.awt.geom.Ellipse2D; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.io.File; -import java.io.FileOutputStream; -import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; - -import javax.swing.*; -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; - -import org.apache.commons.collections15.Transformer; - -import quanto.core.CoreException; -import edu.uci.ics.jung.algorithms.layout.util.Relaxer; -import edu.uci.ics.jung.algorithms.layout.SmoothLayoutDecorator; -import edu.uci.ics.jung.contrib.visualization.control.AddEdgeGraphMousePlugin; -import edu.uci.ics.jung.contrib.visualization.control.ViewScrollingGraphMousePlugin; -import edu.uci.ics.jung.contrib.visualization.ViewZoomScrollPane; -import edu.uci.ics.jung.contrib.visualization.control.ConstrainedPickingBangBoxGraphMousePlugin; -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxLabelRenderer; -import edu.uci.ics.jung.visualization.Layer; -import edu.uci.ics.jung.visualization.VisualizationServer; -import edu.uci.ics.jung.visualization.control.*; -import edu.uci.ics.jung.contrib.visualization.ShapeBangBoxPickSupport; -import edu.uci.ics.jung.visualization.renderers.VertexLabelRenderer; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; -import java.awt.geom.AffineTransform; -import java.io.OutputStream; -import java.util.HashSet; -import java.util.logging.Level; -import java.util.logging.Logger; - -import java.util.prefs.Preferences; -import javax.swing.filechooser.FileFilter; -import javax.swing.filechooser.FileNameExtensionFilter; -import quanto.core.data.AttachedRewrite; -import quanto.core.Core; -import quanto.core.protocol.userdata.CopyOfGraphUserDataSerializer; -import quanto.core.protocol.userdata.PositionGraphUserDataSerializer; -import quanto.gui.graphhelpers.ConstrainedMutableAffineTransformer; -import quanto.gui.graphhelpers.Labeler; -import quanto.gui.graphhelpers.QVertexRenderer; - -public class InteractiveGraphView - extends InteractiveView - implements AddEdgeGraphMousePlugin.Adder, - KeyListener { - - private static final long serialVersionUID = 7196565776978339937L; - private static final Logger logger = Logger.getLogger("quanto.gui.InteractiveGraphView"); - private static Preferences prefsNode; - - public Map actionMap = new HashMap(); - private GraphVisualizationViewer viewer; - private Core core; - private RWMouse graphMouse; - private volatile Job rewriter = null; - private List rewriteCache = null; - private boolean saveEnabled = true; - private boolean saveAsEnabled = true; - private boolean directedEdges = false; - private SmoothLayoutDecorator smoothLayout; - private Map verticesCache; - private QuantoForceLayout forceLayout; - private QuantoDotLayout initLayout; - - private JFileChooser graphSaveFileChooser; - private JFileChooser pdfSaveFileChooser; - - public boolean viewHasParent() { - return this.getParent() != null; - } - - public static void setPreferencesNode(Preferences prefsNode) { - InteractiveGraphView.prefsNode = prefsNode; - } - - public static Preferences getPreferencesNode() { - return prefsNode; - } - - private class QVertexLabeler implements VertexLabelRenderer { - - Map components; - JLabel dummyLabel = new JLabel(); - JLabel realLabel = new JLabel(); - - public QVertexLabeler() { - components = new HashMap(); - realLabel.setOpaque(true); - realLabel.setBackground(Color.white); - } - - public Component getVertexLabelRendererComponent(JComponent vv, - Object value, Font font, boolean isSelected, T vertex) { - if (vertex instanceof Vertex) { - final Vertex qVertex = (Vertex) vertex; - if (!qVertex.isBoundaryVertex() && !qVertex.getVertexType().hasData()) { - return dummyLabel; - } - - Point2D screen = viewer.getRenderContext(). - getMultiLayerTransformer().transform( - viewer.getGraphLayout().transform(qVertex)); - - Labeler labeler; - if (qVertex.isBoundaryVertex()) { - String label = qVertex.getCoreName(); - labeler = components.get(qVertex); - if (labeler == null) { - labeler = new Labeler(label); - components.put(qVertex, labeler); - viewer.add(labeler); - Color colour = new Color(0, 0, 0, 0); - labeler.setColor(colour); - labeler.addLabelChangeListener(new Labeler.LabelChangeListener() { - - public boolean aboutToChangeLabel(Labeler.LabelChangeEvent evt) { - if (qVertex == null) { - return true; // why not? - } - try { - String newN = evt.getNewText(); - String oldN = qVertex.getCoreName(); - String displacedName = core.renameVertex(getGraph(), qVertex, newN); - if (verticesCache != null) { - if (displacedName != null) { - Point2D oldP = verticesCache.get(newN); - verticesCache.put(displacedName, oldP); - verticesCache.remove(newN); - } - Point2D oldP = verticesCache.get(oldN); - verticesCache.put(newN, oldP); - verticesCache.remove(oldN); - } - return true; - } catch (CoreException err) { - errorDialog(err.getMessage()); - return false; - } - } - }); - } else { - labeler.setText(label); - } - } else { - // lazily create the labeler - labeler = components.get(qVertex); - if (labeler == null) { - labeler = new Labeler(qVertex.getData()); - components.put(qVertex, labeler); - viewer.add(labeler); - Color colour = qVertex.getVertexType().getVisualizationData().getLabelColour(); - if (colour != null) { - labeler.setColor(colour); - } - labeler.addLabelChangeListener(new Labeler.LabelChangeListener() { - - public boolean aboutToChangeLabel(Labeler.LabelChangeEvent evt) { - if (qVertex == null) { - return true; // sure, I guess? - } - try { - core.setVertexAngle(getGraph(), qVertex, evt.getNewText()); - return true; - } catch (CoreException err) { - coreErrorMessage("The label could not be updated", err); - return false; - } - } - }); - } else { - labeler.update(); - } - } - - Rectangle rect = new Rectangle(labeler.getPreferredSize()); - Point loc = new Point((int) (screen.getX() - rect.getCenterX()), - (int) screen.getY() + 10); - rect.setLocation(loc); - - if (!labeler.getBounds().equals(rect)) { - labeler.setBounds(rect); - } - - return dummyLabel; - } else if (value != null) { - realLabel.setText(value.toString()); - return realLabel; - } else { - return dummyLabel; - } - } - - /** - * Removes orphaned labels. - */ - public void cleanup() { - final Map oldComponents = components; - components = new HashMap(); - for (Labeler l : oldComponents.values()) { - viewer.remove(l); - } - } - } - - private class QBangBoxLabeler implements BangBoxLabelRenderer { - - Map components; - JLabel dummyLabel = new JLabel(); - JLabel realLabel = new JLabel(); - - public QBangBoxLabeler() { - components = new HashMap(); - realLabel.setOpaque(true); - realLabel.setBackground(Color.white); - } - - public Component getBangBoxLabelRendererComponent(JComponent vv, - Object value, Font font, boolean isSelected, T bb) { - if (bb instanceof BangBox) { - final BangBox qBb = (BangBox) bb; - - //FIXME: This method is called a lot, it would probably be nicer - //to store a map: BB -> Shape, so we compute the position of the - //label directly from the shape of the BB and avoid all that min/max - //thing. - if (!getGraph().containsBangBox(qBb)) { - return dummyLabel; - } - Collection bangedV = getGraph().getBoxedVertices(qBb); - if (bangedV.isEmpty()) { - return dummyLabel; - } - Point2D screen = new Point2D.Double(0, 0); - SortedSet Xs = new TreeSet(); - SortedSet Ys = new TreeSet(); - - for (Vertex v : bangedV) { - Point2D p = viewer.getRenderContext(). - getMultiLayerTransformer().transform( - viewer.getGraphLayout().transform(v)); - Xs.add(p.getX()); - Ys.add(p.getY()); - } - screen.setLocation((Xs.last() - Xs.first()) / 2 + Xs.first(), Ys.first()); - String label = qBb.getCoreName(); - Labeler labeler = components.get(qBb); - if (labeler == null) { - labeler = new Labeler(label); - components.put(qBb, labeler); - viewer.add(labeler); - Color colour = new Color(0, 0, 0, 0); - labeler.setColor(colour); - labeler.addLabelChangeListener(new Labeler.LabelChangeListener() { - - public boolean aboutToChangeLabel(Labeler.LabelChangeEvent evt) { - if (qBb == null) { - return true; // sure? - } - try { - String newN = evt.getNewText(); - String oldN = qBb.getCoreName(); - core.renameBangBox(getGraph(), oldN, newN); - qBb.updateCoreName(newN); - return true; - } catch (CoreException err) { - errorDialog(err.getMessage()); - return false; - } - } - }); - } - - labeler.setText(label); - - Rectangle rect = new Rectangle(labeler.getPreferredSize()); - Point loc = new Point((int) (screen.getX() - rect.getCenterX()), - (int) screen.getY() - 30); - rect.setLocation(loc); - - if (!labeler.getBounds().equals(rect)) { - labeler.setBounds(rect); - } - - return dummyLabel; - } else if (value != null) { - realLabel.setText(value.toString()); - return realLabel; - } else { - return dummyLabel; - } - } - - /** - * Removes orphaned labels. - */ - public void cleanup() { - final Map oldComponents = components; - components = new HashMap(); - for (Labeler l : oldComponents.values()) { - viewer.remove(l); - } - } - } - - /** - * A graph mouse for doing most interactive graph operations. - * - */ - private class RWMouse extends PluggableGraphMouse { - - private GraphMousePlugin pickingMouse, edgeMouse; - private boolean pickingMouseActive, edgeMouseActive; - - public RWMouse() { - add(new ScalingGraphMousePlugin(new ViewScalingControl(), QuantoApp.COMMAND_MASK)); - add(new ViewTranslatingGraphMousePlugin(InputEvent.BUTTON1_MASK | QuantoApp.COMMAND_MASK)); - ViewScrollingGraphMousePlugin scrollerPlugin = new ViewScrollingGraphMousePlugin(); - scrollerPlugin.setShift(10.0); - add(scrollerPlugin); - add(new AddEdgeGraphMousePlugin( - viewer, - InteractiveGraphView.this, - InputEvent.BUTTON1_MASK | InputEvent.ALT_MASK)); - pickingMouse = new ConstrainedPickingBangBoxGraphMousePlugin(20.0, 20.0) { - // don't change the cursor - - @Override - public void mouseEntered(MouseEvent e) { - } - - @Override - public void mouseExited(MouseEvent e) { - } - - @Override - public void mouseReleased(MouseEvent e) { - super.mouseReleased(e); - setVerticesPositionData(); - } - }; - edgeMouse = new AddEdgeGraphMousePlugin( - viewer, - InteractiveGraphView.this, - InputEvent.BUTTON1_MASK); - setPickingMouse(); - } - - public void clearMouse() { - edgeMouseActive = false; - remove(edgeMouse); - pickingMouseActive = false; - remove(pickingMouse); - } - - final public void setPickingMouse() { - clearMouse(); - pickingMouseActive = true; - add(pickingMouse); - InteractiveGraphView.this.repaint(); - if (isAttached()) { - getViewPort().setCommandStateSelected(CommandManager.Command.SelectMode, true); - } - } - - public void setEdgeMouse() { - clearMouse(); - edgeMouseActive = true; - add(edgeMouse); - InteractiveGraphView.this.repaint(); - if (isAttached()) { - if (directedEdges) { - getViewPort().setCommandStateSelected(CommandManager.Command.DirectedEdgeMode, true); - } else { - getViewPort().setCommandStateSelected(CommandManager.Command.UndirectedEdgeMode, true); - } - } - } - - public boolean isPickingMouse() { - return pickingMouseActive; - } - - public boolean isEdgeMouse() { - return edgeMouseActive; - } - } - - public InteractiveGraphView(Core core, CoreGraph g) throws CoreException { - this(core, g, new Dimension(800, 600)); - } - - public InteractiveGraphView(Core core, CoreGraph g, Dimension size) throws CoreException { - super(g.getCoreName()); - setPreferredSize(size); - initLayout = new QuantoDotLayout(g); - initLayout.initialize(); - forceLayout = new QuantoForceLayout(g, initLayout, 20.0); - smoothLayout = new SmoothLayoutDecorator(forceLayout); - viewer = new GraphVisualizationViewer(smoothLayout); - - /* This is probably not the place to do it: - * get vertices user data from graph, and set - * position.*/ - Map vmap = g.getVertexMap(); - for (String key : vmap.keySet()) { - PositionGraphUserDataSerializer pds = new PositionGraphUserDataSerializer(core.getTalker()); - Point2D p = (Point2D) pds.getVertexUserData(g, key); - if (p != null) { - viewer.getGraphLayout().setLocation(vmap.get(key), p); - viewer.getGraphLayout().lock(vmap.get(key), true); - } - } - setMainComponent(new ViewZoomScrollPane(viewer)); - - this.core = core; - Relaxer r = viewer.getModel().getRelaxer(); - if (r != null) { - r.setSleepTime(10); - } - - graphMouse = new RWMouse(); - viewer.setGraphMouse(graphMouse); - - viewer.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.VIEW, new ConstrainedMutableAffineTransformer()); - viewer.getRenderContext().getMultiLayerTransformer().setTransformer(Layer.LAYOUT, new ConstrainedMutableAffineTransformer()); - - viewer.addPreRenderPaintable(new VisualizationServer.Paintable() { - - public void paint(Graphics g) { - Color old = g.getColor(); - g.setColor(Color.red); - if ((graphMouse.isEdgeMouse()) && (directedEdges)) { - g.drawString("DIRECTED EDGE MODE", 5, 15); - } else if (graphMouse.isEdgeMouse()) { - g.drawString("UNDIRECTED EDGE MODE", 5, 15); - } - g.setColor(old); - } - - public boolean useTransform() { - return false; - } - }); - - viewer.addMouseListener(new MouseAdapter() { - - @Override - public void mousePressed(MouseEvent e) { - InteractiveGraphView.this.grabFocus(); - super.mousePressed(e); - } - }); - - addKeyListener(this); - viewer.addKeyListener(this); - - viewer.getRenderContext().setVertexDrawPaintTransformer( - new Transformer() { - - public Paint transform(Vertex v) { - if (isVertexLocked(v)) { - return Color.gray; - } else { - return Color.black; - } - } - }); - viewer.getRenderer().setVertexRenderer(new QVertexRenderer() { - - @Override - public void paintVertex(RenderContext rc, Layout layout, Vertex v) { - if (rc.getPickedVertexState().isPicked(v)) { - Rectangle bounds = rc.getVertexShapeTransformer().transform(v).getBounds(); - Point2D p = layout.transform(v); - p = rc.getMultiLayerTransformer().transform(Layer.LAYOUT, p); - float x = (float) p.getX(); - float y = (float) p.getY(); - // create a transform that translates to the location of - // the vertex to be rendered - AffineTransform xform = AffineTransform.getTranslateInstance(x, y); - // transform the vertex shape with xtransform - bounds = xform.createTransformedShape(bounds).getBounds(); - bounds.translate(-1, -1); - - GraphicsDecorator g = rc.getGraphicsContext(); - bounds.grow(3, 3); - g.setColor(new Color(200, 200, 255)); - g.fillRoundRect(bounds.x, bounds.y, bounds.width, bounds.height, 4, 4); - g.setColor(Color.BLUE); - g.drawRoundRect(bounds.x, bounds.y, bounds.width, bounds.height, 4, 4); - } - super.paintVertex(rc, layout, v); - } - }); - - viewer.getRenderContext().setVertexLabelRenderer(new QVertexLabeler()); - viewer.getRenderContext().setBangBoxLabelRenderer(new QBangBoxLabeler()); - // increase the picksize - viewer.setPickSupport(new ShapeBangBoxPickSupport(viewer, 4)); - viewer.setBoundingBoxEnabled(false); - - buildActionMap(); - - g.addChangeListener(new ChangeListener() { - - public void stateChanged(ChangeEvent e) { - removeOldLabels(); - if (saveEnabled && isAttached()) { - getViewPort().setCommandEnabled(CommandManager.Command.Save, - !getGraph().isSaved()); - firePropertyChange("saved", !getGraph().isSaved(), getGraph().isSaved()); - } - } - }); - } - - public boolean isVertexLocked(Vertex v) { - return viewer.getGraphLayout().isLocked(v); - } - - public void lockVertices(Collection verts) { - for (Vertex v : verts) { - viewer.getGraphLayout().lock(v, true); - } - } - - public void unlockVertices(Collection verts) { - for (Vertex v : verts) { - viewer.getGraphLayout().lock(v, false); - } - } - - public boolean isSaveEnabled() { - return saveEnabled; - } - - public void setSaveEnabled(boolean saveEnabled) { - if (this.saveEnabled != saveEnabled) { - this.saveEnabled = saveEnabled; - if (isAttached()) { - getViewPort().setCommandEnabled( - CommandManager.Command.Save, - saveEnabled && !isSaved()); - } - if (saveEnabled) { - actionMap.put(CommandManager.Command.Save.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - saveGraph(); - } - }); - } else { - actionMap.remove(CommandManager.Command.Save.toString()); - } - } - } - - public boolean isSaveAsEnabled() { - return saveAsEnabled; - } - - public void setSaveAsEnabled(boolean saveAsEnabled) { - if (this.saveAsEnabled != saveAsEnabled) { - this.saveAsEnabled = saveAsEnabled; - if (isAttached()) { - getViewPort().setCommandEnabled( - CommandManager.Command.SaveAs, - saveAsEnabled); - } - if (saveAsEnabled) { - actionMap.put(CommandManager.Command.SaveAs.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - saveGraphAs(); - } - }); - } else { - actionMap.remove(CommandManager.Command.SaveAs.toString()); - } - } - } - - public GraphVisualizationViewer getVisualization() { - return viewer; - } - - public void addChangeListener(ChangeListener listener) { - viewer.addChangeListener(listener); - } - - public CoreGraph getGraph() { - return viewer.getGraph(); - } - - /** - * Compute a bounding box and scale such that the largest - * dimension fits within the view port. - */ - public void zoomToFit() { - viewer.zoomToFit(getSize()); - } - - public static String titleOfGraph(String name) { - return "graph (" + name + ")"; - } - - public void addEdge(Vertex s, Vertex t) { - try { - core.addEdge(getGraph(), directedEdges, s, t); - } catch (CoreException e) { - coreErrorDialog("Could not add a directed edge", e); - } - } - - public void addBoundaryVertex() { - try { - core.addBoundaryVertex(getGraph()); - setVerticesPositionData(); - } catch (CoreException e) { - coreErrorDialog("Could not add a boundary vertex", e); - } - } - - public void addVertex(String type) { - try { - core.addVertex(getGraph(), type); - setVerticesPositionData(); - } catch (CoreException e) { - coreErrorDialog("Could not add a vertex", e); - } - } - - public void showRewrites() { - try { - Set picked = viewer.getPickedVertexState().getPicked(); - if (picked.isEmpty()) { - core.attachRewrites(getGraph(), getGraph().getVertices()); - } else { - core.attachRewrites(getGraph(), picked); - } - JFrame rewrites = new RewriteViewer(InteractiveGraphView.this); - rewrites.setVisible(true); - } catch (CoreException e) { - coreErrorDialog("Could not obtain the rewrites", e); - } - } - - public void removeOldLabels() { - ((QVertexLabeler) viewer.getRenderContext().getVertexLabelRenderer()).cleanup(); - ((QBangBoxLabeler) viewer.getRenderContext().getBangBoxLabelRenderer()).cleanup(); - } - - @Override - public void cleanUp() { - removeOldLabels(); - ((QVertexLabeler) viewer.getRenderContext().getVertexLabelRenderer()).cleanup(); - ((QBangBoxLabeler) viewer.getRenderContext().getBangBoxLabelRenderer()).cleanup(); - if (saveEnabled && isAttached()) { - getViewPort().setCommandEnabled(CommandManager.Command.Save, - !getGraph().isSaved()); - } - } - - public void cacheVertexPositions() { - verticesCache = new HashMap(); - for (Vertex v : getGraph().getVertices()) { - int X = (int) smoothLayout.getDelegate().transform(v).getX(); - int Y = (int) smoothLayout.getDelegate().transform(v).getY(); - Point2D p = new Point2D.Double(X, Y); - verticesCache.put(v.getCoreName(), p); - } - } - - public void setVertexPositionData(Vertex v) { - try { - core.startUndoGroup(getGraph()); - PositionGraphUserDataSerializer pds = new PositionGraphUserDataSerializer(core.getTalker()); - int X = (int) smoothLayout.getDelegate().transform(v).getX(); - int Y = (int) smoothLayout.getDelegate().transform(v).getY(); - Point2D new_p = new Point2D.Double(X, Y); - pds.setVertexUserData(getGraph(), v.getCoreName(), new_p); - core.endUndoGroup(getGraph()); - } catch (CoreException e) { - errorDialog(e.getMessage()); - } - } - - public void setVerticesPositionData() { - CoreGraph graph = getGraph(); - PositionGraphUserDataSerializer pds = new PositionGraphUserDataSerializer(core.getTalker()); - try { - //New vertices are added but not pushed on the undo stack - core.startUndoGroup(graph); - for (Vertex v : graph.getVertices()) { - int X = (int) smoothLayout.getDelegate().transform(v).getX(); - int Y = (int) smoothLayout.getDelegate().transform(v).getY(); - Point2D old_p = pds.getVertexUserData(graph, v.getCoreName()); - Point2D new_p = new Point2D.Double(X, Y); - if (old_p == null) { - pds.setVertexUserData(graph, v.getCoreName(), new_p); - } - } - core.endUndoGroup(graph); - - ArrayList vertices = new ArrayList(); - for (Vertex v : graph.getVertices()) { - int X = (int) smoothLayout.getDelegate().transform(v).getX(); - int Y = (int) smoothLayout.getDelegate().transform(v).getY(); - Point2D old_p = (Point2D) pds.getVertexUserData(graph, v.getCoreName()); - Point2D new_p = new Point2D.Double(X, Y); - if (old_p.distance(new_p) > 1.5) { - vertices.add(v); - } - } - if (vertices.size() > 0) { - //The first one creates an undo point - Vertex v = vertices.get(0); - int X = (int) smoothLayout.getDelegate().transform(v).getX(); - int Y = (int) smoothLayout.getDelegate().transform(v).getY(); - Point2D new_p = new Point2D.Double(X, Y); - pds.setVertexUserData(graph, v.getCoreName(), new_p); - vertices.remove(v); - } - if (vertices.size() <= 0) { - return; - } - //The others do not - core.startUndoGroup(graph); - for (Vertex v : vertices) { - int X = (int) smoothLayout.getDelegate().transform(v).getX(); - int Y = (int) smoothLayout.getDelegate().transform(v).getY(); - Point2D new_p = new Point2D.Double(X, Y); - pds.setVertexUserData(graph, v.getCoreName(), new_p); - } - core.endUndoGroup(graph); - } catch (CoreException e) { - errorDialog(e.getMessage()); - } - } - - public void updateGraph(Rectangle2D rewriteRect) throws CoreException { - core.updateGraph(getGraph()); - relayoutGraph(rewriteRect); - } - - public void relayoutGraph(Rectangle2D rewriteRect) throws CoreException { - int count = 0; - for (Vertex v : getGraph().getVertices()) { - if (verticesCache.get(v.getCoreName()) != null) { - PositionGraphUserDataSerializer pds = new PositionGraphUserDataSerializer(core.getTalker()); - Point2D p = (Point2D) pds.getVertexUserData(getGraph(), v.getCoreName()); - if (p == null) p = verticesCache.get(v.getCoreName()); - - viewer.getGraphLayout().setLocation(v, p); - viewer.getGraphLayout().lock(v, true); - } else { - if (rewriteRect != null) { - PositionGraphUserDataSerializer pds = new PositionGraphUserDataSerializer(core.getTalker()); - Point2D p = (Point2D) pds.getVertexUserData(getGraph(), v.getCoreName()); - if (p != null) { - viewer.getGraphLayout().setLocation(v, p); - viewer.getGraphLayout().lock(v, true); - } else { - if (rewriteRect.getCenterX() <= 10 || rewriteRect.getCenterX() <= 10) - viewer.getGraphLayout().setLocation(v, new Point2D.Double(20 * (1 + count), 20 * (1 + count))); - else - viewer.shift(rewriteRect, v, new Point2D.Double(20 * (1 + count), 20 * (1 + count))); - - setVertexPositionData(v); - count++; - } - } else { - // ... log here - } - } - } - forceLayout.startModify(); - viewer.modifyLayout(); - forceLayout.endModify(); - removeOldLabels(); - viewer.update(); - //locking and unlocking used internally to notify the layout which vertices have user data - unlockVertices(getGraph().getVertices()); - } - - public void outputToTextView(String text) { - TextView tview = new TextView(getTitle() + "-output", text); - getViewManager().addView(tview); - - if (isAttached()) { - getViewPort().openView(tview); - } - } - private SubgraphHighlighter highlighter = null; - - public void clearHighlight() { - if (highlighter != null) { - viewer.removePostRenderPaintable(highlighter); - } - highlighter = null; - viewer.repaint(); - } - - public void highlightSubgraph(Collection vs) { - clearHighlight(); - highlighter = new SubgraphHighlighter(vs); - viewer.addPostRenderPaintable(highlighter); - viewer.update(); - } - - public void highlightRewrite(AttachedRewrite rw) { - highlightSubgraph(rw.getRemovedVertices()); - } - - public void startRewriting() { - abortRewriting(); - rewriter = new RewriterJob(); - rewriter.addJobListener(new JobListener() { - - public void jobEnded(JobEndEvent event) { - if (rewriter != null) { - rewriter = null; - } - if (isAttached()) { - setupNormaliseAction(getViewPort()); - } - } - }); - rewriter.start(); - showJobIndicator("Rewriting...", rewriter); - if (isAttached()) { - setupNormaliseAction(getViewPort()); - } - } - - public void abortRewriting() { - if (rewriter != null) { - rewriter.abortJob(); - rewriter = null; - } - } - - private void setupNormaliseAction(ViewPort vp) { - if (rewriter == null) { - vp.setCommandEnabled(CommandManager.Command.Normalise, true); - } else { - vp.setCommandEnabled(CommandManager.Command.Normalise, false); - } - } - - private class RewriterJob extends Job { - - private boolean highlight = false; - - private boolean attachNextRewrite() { - try { - return core.attachOneRewrite( - getGraph(), - getGraph().getVertices()); - } catch (CoreException e) { - coreErrorDialog("Could not attach the next rewrite", e); - return false; - } - } - - private void invokeHighlightRewriteAndWait(AttachedRewrite rw) - throws InterruptedException { - highlight = true; - final AttachedRewrite fRw = rw; - invokeAndWait(new Runnable() { - - public void run() { - highlightRewrite(fRw); - } - }); - } - - private void invokeApplyRewriteAndWait(int index) - throws InterruptedException { - highlight = false; - final int fIndex = index; - invokeAndWait(new Runnable() { - - public void run() { - clearHighlight(); - applyRewrite(fIndex); - } - }); - } - - private void invokeClearHighlightLater() { - highlight = false; - SwingUtilities.invokeLater(new Runnable() { - - public void run() { - clearHighlight(); - } - }); - } - - private void invokeInfoDialogAndWait(String message) - throws InterruptedException { - final String fMessage = message; - invokeAndWait(new Runnable() { - - public void run() { - infoDialog(fMessage); - } - }); - } - - private void invokeAndWait(Runnable runnable) - throws InterruptedException { - try { - SwingUtilities.invokeAndWait(runnable); - } catch (InvocationTargetException ex) { - logger.log(Level.WARNING, - "invoke and wait failed", ex); - } - } - - @Override - public void run() { - try { - // FIXME: communicating with the core: is this - // really threadsafe? Probably not. - - int count = 0; - while (!Thread.interrupted() && attachNextRewrite()) { - List rws = getRewrites(); - invokeHighlightRewriteAndWait(rws.get(0)); - sleep(1500); - invokeApplyRewriteAndWait(0); - ++count; - } - - fireJobFinished(); - invokeInfoDialogAndWait("Applied " + count + " rewrites"); - } catch (InterruptedException e) { - if (highlight) { - invokeClearHighlightLater(); - } - } - } - } - - private class SubgraphHighlighter - implements VisualizationServer.Paintable { - - Collection verts; - - public SubgraphHighlighter(Collection vs) { - verts = vs; - } - - public void paint(Graphics g) { - Color oldColor = g.getColor(); - g.setColor(Color.blue); - Graphics2D g2 = (Graphics2D) g.create(); - float opac = 0.3f + 0.2f * (float) Math.sin( - System.currentTimeMillis() / 150.0); - g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, opac)); - - for (Vertex v : verts) { - Point2D pt = viewer.getGraphLayout().transform(v); - Ellipse2D ell = new Ellipse2D.Double( - pt.getX() - 15, pt.getY() - 15, 30, 30); - Shape draw = viewer.getRenderContext().getMultiLayerTransformer().transform(ell); - ((Graphics2D) g2).fill(draw); - } - - g2.dispose(); - g.setColor(oldColor); - repaint(10); - } - - public boolean useTransform() { - return false; - } - } - - /** - * Gets the attached rewrites as a list of Pair. Returns and empty - * list on console error. - * @return - */ - public List getRewrites() { - try { - rewriteCache = core.getAttachedRewrites(getGraph()); - return rewriteCache; - } catch (CoreException e) { - coreErrorDialog("Could not obtain the rewrites", e); - } - - return new ArrayList(); - } - - public void applyRewrite(int index) { - Rectangle2D rewriteRect = new Rectangle2D.Double(); - try { - AttachedRewrite rw = rewriteCache.get(index); - viewer.setCoreGraph(rw.getGraph()); - Collection removed = rw.getRemovedVertices(); - if (removed.size() > 0) { - rewriteRect = viewer.getSubgraphBounds(removed); - if (removed.size() == 1) { - smoothLayout.setOrigin(rewriteRect.getCenterX(), rewriteRect.getCenterY()); - } - } - cacheVertexPositions(); - core.applyAttachedRewrite(getGraph(), index); - relayoutGraph(rewriteRect); - smoothLayout.setOrigin(0, 0); - } catch (CoreException e) { - coreErrorDialog("Could not apply the rewrite", e); - } - } - - public Core getCore() { - return core; - } - - @Override - public void commandTriggered(String command) { - ActionListener listener = actionMap.get(command); - if (listener != null) { - listener.actionPerformed(new ActionEvent(this, -1, command)); - } else { - super.commandTriggered(command); - } - } - - public void saveGraphAs() { - if (graphSaveFileChooser == null) { - graphSaveFileChooser = new JFileChooser(); - graphSaveFileChooser.setDialogType(JFileChooser.SAVE_DIALOG); - FileFilter filter = new FileNameExtensionFilter("Quanto graph", - "graph", "qgr"); - graphSaveFileChooser.addChoosableFileFilter(filter); - graphSaveFileChooser.setFileFilter(filter); - if (prefsNode != null) { - String path = prefsNode.get("lastGraphDir", null); - if (path != null) { - graphSaveFileChooser.setCurrentDirectory(new File(path)); - } - } - } - String fileName = getGraph().getFileName(); - if (fileName != null && !fileName.isEmpty()) { - graphSaveFileChooser.setSelectedFile(new File(fileName)); - } - - int retVal = graphSaveFileChooser.showDialog(this, "Save Graph"); - if (retVal == JFileChooser.APPROVE_OPTION) { - File f = graphSaveFileChooser.getSelectedFile(); - if (f.exists()) { - int overwriteAnswer = JOptionPane.showConfirmDialog( - this, - "Are you sure you want to overwrite \"" + f.getName() + "\"?", - "Overwrite file?", - JOptionPane.YES_NO_OPTION); - if (overwriteAnswer != JOptionPane.YES_OPTION) { - return; - } - } - if (f.getParent() != null && prefsNode != null) { - prefsNode.put("lastGraphDir", f.getParent()); - } - try { - core.saveGraph(getGraph(), f); - core.renameGraph(getGraph(), f.getName()); - getGraph().setFileName(f.getAbsolutePath()); - getGraph().setSaved(true); - firePropertyChange("saved", !getGraph().isSaved(), getGraph().isSaved()); - setTitle(f.getName()); - } catch (CoreException e) { - coreErrorDialog("Could not save the graph", e); - } catch (IOException e) { - detailedErrorDialog("Save Graph", "Could not save the graph", e.getLocalizedMessage()); - } - } - } - - public void saveGraph() { - if (getGraph().getFileName() != null) { - try { - core.saveGraph(getGraph(), new File(getGraph().getFileName())); - getGraph().setSaved(true); - firePropertyChange("saved", !getGraph().isSaved(), getGraph().isSaved()); - } catch (CoreException e) { - coreErrorDialog("Could not save the graph", e); - } catch (IOException e) { - detailedErrorDialog("Save Graph", "Could not save the graph", e.getLocalizedMessage()); - } - } else { - saveGraphAs(); - } - } - - public void exportToPdf() { - try { - if (pdfSaveFileChooser == null) { - pdfSaveFileChooser = new JFileChooser(); - pdfSaveFileChooser.setDialogType(JFileChooser.SAVE_DIALOG); - FileFilter filter = new FileNameExtensionFilter("PDF Document", "pdf"); - pdfSaveFileChooser.addChoosableFileFilter(filter); - pdfSaveFileChooser.setFileFilter(filter); - String fileName = getGraph().getFileName(); - if (fileName != null && !fileName.isEmpty()) { - pdfSaveFileChooser.setCurrentDirectory(new File(fileName).getParentFile()); - } else if (prefsNode != null) { - String path = prefsNode.get("lastPdfDir", null); - if (path != null) { - pdfSaveFileChooser.setCurrentDirectory(new File(path)); - } - } - } - int retVal = graphSaveFileChooser.showDialog(this, "Export to PDF"); - if (retVal == JFileChooser.APPROVE_OPTION) { - File f = graphSaveFileChooser.getSelectedFile(); - if (f.exists()) { - int overwriteAnswer = JOptionPane.showConfirmDialog( - this, - "Are you sure you want to overwrite \"" + f.getName() + "\"?", - "Overwrite file?", - JOptionPane.YES_NO_OPTION); - if (overwriteAnswer != JOptionPane.YES_OPTION) { - return; - } - } - if (f.getParent() != null && prefsNode != null) { - prefsNode.put("lastPdfDir", f.getParent()); - } - OutputStream file = new FileOutputStream(f); - PdfGraphVisualizationServer server = new PdfGraphVisualizationServer(core.getActiveTheory(), getGraph()); - server.renderToPdf(file); - file.close(); - } - } catch (DocumentException ex) { - detailedErrorMessage("Could not generate the PDF", ex); - } catch (IOException ex) { - detailedErrorMessage("Could not save the PDF", ex); - } - } - - public static String getLastGraphDirectory() { - if (prefsNode != null) { - return prefsNode.get("lastGraphDir", null); - } - return null; - } - - /** - * Presents the user with an "Open Graph" dialog - * - * The directory will be set to the last directory that was used for - * opening or saving a graph. - * - * @param parent - * @return - */ - public static File chooseGraphFile(Component parent) { - JFileChooser chooser = new JFileChooser(); - chooser.setDialogType(JFileChooser.OPEN_DIALOG); - FileFilter filter = new FileNameExtensionFilter("Quanto graph", - "graph", "qgr"); - chooser.addChoosableFileFilter(filter); - chooser.setFileFilter(filter); - String path = getLastGraphDirectory(); - if (path != null) { - chooser.setCurrentDirectory(new File(path)); - } - int retVal = chooser.showDialog(parent, "Open Graph"); - if (retVal == JFileChooser.APPROVE_OPTION) { - File f = chooser.getSelectedFile(); - if (f.getParent() != null && prefsNode != null) { - prefsNode.put("lastGraphDir", f.getParent()); - } - return f; - } else { - return null; - } - } - - public static void registerKnownCommands(Core core, CommandManager commandManager) { - /* - * Add commands dynamically and add registered vertex types - */ - for (VertexType vertexType : core.getActiveTheory().getVertexTypes()) { - commandManager.registerCommand("add-" + vertexType.getTypeName() + "-vertex-command"); - } - } - - private void buildActionMap() { - actionMap.put(CommandManager.Command.Save.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - saveGraph(); - } - }); - actionMap.put(CommandManager.Command.SaveAs.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - saveGraphAs(); - } - }); - - actionMap.put(CommandManager.Command.Undo.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - core.undo(getGraph()); - relayoutGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not undo", ex); - } - } - }); - actionMap.put(CommandManager.Command.Redo.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - core.redo(getGraph()); - relayoutGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not redo", ex); - } - } - }); - actionMap.put(CommandManager.Command.UndoRewrite.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - core.undoRewrite(getGraph()); - relayoutGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not undo", ex); - } - } - }); - actionMap.put(CommandManager.Command.RedoRewrite.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - core.redoRewrite(getGraph()); - relayoutGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not redo", ex); - } - } - }); - actionMap.put(CommandManager.Command.Cut.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - Set picked = viewer.getPickedVertexState().getPicked(); - if (!picked.isEmpty()) { - core.cutSubgraph(getGraph(), picked); - } - } catch (CoreException ex) { - coreErrorDialog("Could not cut selection", ex); - } - } - }); - actionMap.put(CommandManager.Command.Copy.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - Set picked = viewer.getPickedVertexState().getPicked(); - if (!picked.isEmpty()) { - core.copySubgraph(getGraph(), picked); - } - } catch (CoreException ex) { - coreErrorDialog("Could not copy selection", ex); - } - } - }); - actionMap.put(CommandManager.Command.Paste.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - core.paste(getGraph()); - /* - * FIXME: maybe, this is not the right place? - * When we paste a graph we want to keep it's layout as well, so we get it's quanto-position uidata - * and translate everything so that it ends up at the right of the current graph. - * What we get is a graph, already merged and with fresh names. So in order to know which one - * were copied we check the "copy_of" user_data which is set automatically by the core when a - * subgraph get copied, and delete it afterwards. - * */ - CopyOfGraphUserDataSerializer cos = new CopyOfGraphUserDataSerializer(core.getTalker()); - PositionGraphUserDataSerializer pos = new PositionGraphUserDataSerializer(core.getTalker()); - - core.startUndoGroup(getGraph()); - for (Vertex v : getGraph().getVertices()) { - String copy_of_vertex = (String) cos.getVertexUserData(getGraph(), v.getCoreName()); - if ((copy_of_vertex != null) && (!copy_of_vertex.equals(""))) { - //Then translate its quanto-gui:position - Point2D position = (Point2D) pos.getVertexUserData(getGraph(), v.getCoreName()); - position.setLocation(position.getX() + rect.getCenterX() + 20, position.getY()); - pos.setVertexUserData(getGraph(), v.getCoreName(), position); - cos.deleteVertexUserData(getGraph(), v.getCoreName()); - } - } - /* For now we do nothing with Edge and !-Boxes user data but still need to remove their "copy_of" UD */ - for (Edge edge : getGraph().getEdges()) { - String copy_of = (String) cos.getEdgeUserData(getGraph(), edge.getCoreName()); - if ((copy_of != null) && (!copy_of.equals(""))) { - cos.deleteEdgeUserData(getGraph(), edge.getCoreName()); - } - } - for (BangBox bb : getGraph().getBangBoxes()) { - String copy_of = (String) cos.getBangBoxUserData(getGraph(), bb.getCoreName()); - if ((copy_of != null) && (!copy_of.equals(""))) { - cos.deleteBangBoxUserData(getGraph(), bb.getCoreName()); - } - } - core.endUndoGroup(getGraph()); - updateGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not paste selection", ex); - } - } - }); - actionMap.put(CommandManager.Command.SelectAll.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - synchronized (getGraph()) { - for (Vertex v : getGraph().getVertices()) { - viewer.getPickedVertexState().pick(v, true); - } - } - } - }); - actionMap.put(CommandManager.Command.DeselectAll.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewer.getPickedVertexState().clear(); - } - }); - actionMap.put(CommandManager.Command.Relayout.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - // re-layout - initLayout.reset(); - forceLayout.forgetPositions(); - viewer.update(); - setVerticesPositionData(); - } - }); - - actionMap.put(CommandManager.Command.ExportToPdf.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - exportToPdf(); - } - }); - actionMap.put(CommandManager.Command.SelectMode.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - graphMouse.setPickingMouse(); - } - }); - actionMap.put(CommandManager.Command.DirectedEdgeMode.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - directedEdges = true; - graphMouse.setEdgeMouse(); - } - }); - actionMap.put(CommandManager.Command.UndirectedEdgeMode.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - directedEdges = false; - graphMouse.setEdgeMouse(); - } - }); - actionMap.put(CommandManager.Command.LatexToClipboard.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - String tikz = TikzOutput.generate( - getGraph(), - viewer.getGraphLayout()); - Clipboard cb = Toolkit.getDefaultToolkit().getSystemClipboard(); - StringSelection data = new StringSelection(tikz); - cb.setContents(data, data); - } - }); - actionMap.put(CommandManager.Command.AddBoundaryVertex.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - addBoundaryVertex(); - } - }); - actionMap.put(CommandManager.Command.ShowRewrites.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - showRewrites(); - } - }); - actionMap.put(CommandManager.Command.Normalise.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - if (rewriter != null) { - abortRewriting(); - } - startRewriting(); - - } - }); - actionMap.put(CommandManager.Command.FastNormalise.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - core.fastNormalise(getGraph()); - relayoutGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not normalise graph", ex); - } - } - }); - actionMap.put(CommandManager.Command.BangVertices.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - core.addBangBox(getGraph(), viewer.getPickedVertexState().getPicked()); - } catch (CoreException ex) { - coreErrorDialog("Could not add !-box", ex); - } - } - }); - actionMap.put(CommandManager.Command.UnbangVertices.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - core.removeVerticesFromBangBoxes(getGraph(), viewer.getPickedVertexState().getPicked()); - relayoutGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not remove vertices from !-box", ex); - } - } - }); - actionMap.put(CommandManager.Command.DropBangBox.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - core.dropBangBoxes(getGraph(), viewer.getPickedBangBoxState().getPicked()); - } catch (CoreException ex) { - coreErrorDialog("Could not remove !-box", ex); - } - } - }); - actionMap.put(CommandManager.Command.KillBangBox.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - core.killBangBoxes(getGraph(), viewer.getPickedBangBoxState().getPicked()); - } catch (CoreException ex) { - coreErrorDialog("Could not kill !-box", ex); - } - } - }); - actionMap.put(CommandManager.Command.DuplicateBangBox.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - cacheVertexPositions(); - Rectangle2D rect = viewer.getGraphBounds(); - if (viewer.getPickedBangBoxState().getPicked().size() == 1) { - core.duplicateBangBox(getGraph(), (BangBox) viewer.getPickedBangBoxState().getPicked().toArray()[0]); - } - updateGraph(rect); - } catch (CoreException ex) { - coreErrorDialog("Could not duplicate !-box", ex); - } - } - }); - actionMap.put(CommandManager.Command.Refresh.toString(), new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - core.updateGraph(getGraph()); - } catch (CoreException ex) { - coreErrorDialog("Could not refresh graph", ex); - } - } - }); - - /* - * Add dynamically commands corresponding allowing to add registered vertices - */ - for (final VertexType vertexType : core.getActiveTheory().getVertexTypes()) { - actionMap.put("add-" + vertexType.getTypeName() + "-vertex-command", new ActionListener() { - - public void actionPerformed(ActionEvent e) { - addVertex(vertexType.getTypeName()); - } - }); - } - } - - @Override - public void attached(ViewPort vp) { - for (String actionName : actionMap.keySet()) { - vp.setCommandEnabled(actionName, true); - } - if (saveEnabled) { - vp.setCommandEnabled(CommandManager.Command.Save, - !getGraph().isSaved()); - } - if ((graphMouse.isEdgeMouse()) && (directedEdges)) { - vp.setCommandStateSelected(CommandManager.Command.DirectedEdgeMode, true); - } else if (graphMouse.isEdgeMouse()) { - vp.setCommandStateSelected(CommandManager.Command.UndirectedEdgeMode, true); - } else { - vp.setCommandStateSelected(CommandManager.Command.SelectMode, true); - } - setupNormaliseAction(vp); - super.attached(vp); - } - - @Override - public void detached(ViewPort vp) { - vp.setCommandStateSelected(CommandManager.Command.SelectMode, true); - - for (String actionName : actionMap.keySet()) { - vp.setCommandEnabled(actionName, false); - } - super.detached(vp); - } - - @Override - protected String getUnsavedClosingMessage() { - return "Graph '" + getGraph().getCoreName() + "' is unsaved. Close anyway?"; - } - - @Override - public boolean isSaved() { - return getGraph().isSaved(); - } - - public void keyPressed(KeyEvent e) { - // this listener only handles un-modified keys - if (e.getModifiers() != 0) { - return; - } - - int delete = (QuantoApp.isMac) ? KeyEvent.VK_BACK_SPACE : KeyEvent.VK_DELETE; - if (e.getKeyCode() == delete) { - try { - core.deleteEdges( - getGraph(), viewer.getPickedEdgeState().getPicked()); - core.deleteVertices( - getGraph(), viewer.getPickedVertexState().getPicked()); - - } catch (CoreException err) { - coreErrorMessage("Could not delete the vertex", err); - } finally { - // if null things are in the picked state, weird stuff - // could happen. - viewer.getPickedEdgeState().clear(); - viewer.getPickedVertexState().clear(); - } - } else { - switch (e.getKeyCode()) { - case KeyEvent.VK_B: - addBoundaryVertex(); - break; - case KeyEvent.VK_E: - if (graphMouse.isEdgeMouse()) { - graphMouse.setPickingMouse(); - } else { - graphMouse.setEdgeMouse(); - } - break; - case KeyEvent.VK_SPACE: - showRewrites(); - break; - //hotkey for force layout - case KeyEvent.VK_A: { - forceLayout.startModify(); - viewer.modifyLayout(); - forceLayout.endModify(); - setVerticesPositionData(); - } - break; - } - VertexType v = core.getActiveTheory().getVertexTypeByMnemonic(e.getKeyChar()); - if (v != null) { - addVertex(v.getTypeName()); - } - } - } - - public void keyReleased(KeyEvent e) { - } - - public void keyTyped(KeyEvent e) { - } -} diff --git a/gui/src/quanto/gui/InteractiveView.java b/gui/src/quanto/gui/InteractiveView.java deleted file mode 100644 index 6f399ebc..00000000 --- a/gui/src/quanto/gui/InteractiveView.java +++ /dev/null @@ -1,524 +0,0 @@ -package quanto.gui; - -import java.awt.BorderLayout; -import java.awt.Component; -import java.awt.Insets; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.util.LinkedList; -import java.util.List; -import javax.swing.BorderFactory; -import javax.swing.BoxLayout; -import javax.swing.Icon; -import javax.swing.ImageIcon; -import javax.swing.JButton; -import javax.swing.JLabel; -import javax.swing.JOptionPane; -import javax.swing.JPanel; -import javax.swing.UIManager; -import quanto.core.CoreException; - -/** - * An interactive view, which is pretty much self-sufficient. Meant to be - * held in a global table like emacs buffers. - * - * InteractiveViews are tied in very closely with ViewPort, but should be - * independent of QuantoFrame, and the aim is to make them independent of - * QuantoApp. - * - * A note about the design of the menu/command system: - * - * Each view type needs to register the actions it provides with each view - * port. ViewPort.createPredefinedActions() is where this is done - you - * should create a static createActions(ViewPort) method in your subclass - * of InteractiveView that is called from ViewPort.createPredefinedActions(). - * - * If you want to add the command to the menu and/or toolbar, this should be - * done in QuantoFrame.initMenuBar() (assuming you are using QuantoFrame to - * display the ViewPort). - * - * Your view needs to react to commands in commandTriggered(). See - * InteractiveGraphView for how to do this efficiently when you have a - * lot of commands. - */ -public abstract class InteractiveView extends JPanel { - - private String title; - private InteractiveViewManager viewManager = null; - private ViewPort viewPort = null; - private JPanel panelContainer; - private List activeJobs = new LinkedList(); - - public InteractiveView() { - this(""); - } - - public InteractiveView(String title) { - super(new BorderLayout()); - this.title = title; - panelContainer = new JPanel(); - panelContainer.setLayout(new BoxLayout(panelContainer, BoxLayout.PAGE_AXIS)); - add(panelContainer, BorderLayout.PAGE_END); - } - - protected void setMainComponent(Component c) { - add(c, BorderLayout.CENTER); - } - - public InteractiveViewManager getViewManager() { - return viewManager; - } - - void setViewManager(InteractiveViewManager viewManager) { - this.viewManager = viewManager; - } - - public ViewPort getViewPort() { - return viewPort; - } - - void setViewPort(ViewPort viewPort) { - if (this.viewPort != viewPort) { - if (this.viewPort != null) { - this.viewPort.setCommandEnabled(CommandManager.Command.Abort, false); - detached(this.viewPort); - } - this.viewPort = viewPort; - if (viewPort != null) { - viewPort.setCommandEnabled(CommandManager.Command.Abort, - !activeJobs.isEmpty()); - attached(viewPort); - } - } - } - - public void setTitle(String title) { - String oldTitle = this.title; - this.title = title; - firePropertyChange("title", oldTitle, title); - } - - public String getTitle() { - return title; - } - - /** - * Called when this view is attached to a view port. - * - * Used to activate and deactivate menu items for example. - */ - protected void attached(ViewPort vp) {} - - /** - * Called when this view detached from a view port. - * - * Used to activate and deactivate menu items for example. - */ - protected void detached(ViewPort vp) {} - - /** - * Determine if this view has a parent (i.e. is currently being displayed). - */ - public boolean isAttached() { - return viewPort != null; - } - - /** - * Called when view is killed to do clean-up. - */ - public void cleanUp() {} - - /** - * Return false if there are changes that need to be saved. - */ - public boolean isSaved() { return true; } - - /** - * Called when view is killed to do clean-up. - */ - public void commandTriggered(String command) { - if(CommandManager.Command.Abort.toString().equals(command)) { - if (!activeJobs.isEmpty()) { - Job[] jobs = activeJobs.toArray(new Job[activeJobs.size()]); - for (Job job : jobs) { - job.abortJob(); - } - } - } - } - - /** - * Checks whether the view can be closed. - * - * Checks isSaved(), and if that returns @c false, asks the - * user whether they want to close the view anyway, using the - * message provided by getUnsavedClosingMessage(). - * - * @return @c true if the view can be closed, @c false otherwise - */ - public boolean checkCanClose() { - if (!isSaved()) { - int dialogRet = JOptionPane.showConfirmDialog(this, - getUnsavedClosingMessage(), - "Unsaved changes", JOptionPane.YES_NO_OPTION); - return (dialogRet == JOptionPane.YES_OPTION); - } - return true; - } - - /** - * Provides a message asking whether to close the view, even though - * it has unsaved changes. - * - * @return "{view title} is unsaved. Close anyway?" - */ - protected String getUnsavedClosingMessage() { - return getTitle() + " is unsaved. Close anyway?"; - } - - public void refresh() {} - - protected static ImageIcon createImageIcon(String path) { - java.net.URL imgURL = InteractiveView.class.getResource(path); - if (imgURL != null) { - return new ImageIcon(imgURL); - } else { - System.err.println("Couldn't find file: " + path); - return null; - } - } - - private class MessagePanel extends JPanel { - - private JButton okButton; - - public MessagePanel(String message) { - this(message, null, null); - } - - public MessagePanel(String message, String details) { - this(message, details, null); - } - - public MessagePanel(String message, Icon icon) { - this(message, null, icon); - } - - public MessagePanel(String message, String details, Icon icon) { - super(new BorderLayout()); - - setBorder(BorderFactory.createEmptyBorder(3, 3, 3, 3)); - setBackground(UIManager.getColor("textHighlight")); - - JPanel topLine = new JPanel(new BorderLayout()); - topLine.setBackground(UIManager.getColor("textHighlight")); - - if (icon != null) { - topLine.add(new JLabel(icon), BorderLayout.LINE_START); - } - JLabel messageLabel = new JLabel(message); - topLine.add(messageLabel, BorderLayout.CENTER); - - okButton = new JButton("OK"); - okButton.setMargin(new Insets(0, 0, 0, 0)); - topLine.add(okButton, BorderLayout.LINE_END); - - if (message != null) { - add(topLine, BorderLayout.PAGE_START); - JLabel detailsLabel = new JLabel(details); - add(detailsLabel, BorderLayout.CENTER); - } else { - add(topLine, BorderLayout.CENTER); - } - } - - public void addActionListener(ActionListener l) { - okButton.addActionListener(l); - } - - public void removeActionListener(ActionListener l) { - okButton.removeActionListener(l); - } - } - - private class ErrorPanel extends MessagePanel { - public ErrorPanel(String message) { - super(message,UIManager.getIcon("OptionPane.errorIcon")); - } - - public ErrorPanel(String message, String details) { - super(message,details,UIManager.getIcon("OptionPane.errorIcon")); - setBackground(UIManager.getColor("textHighlight")); - } - } - - private class WarningPanel extends MessagePanel { - public WarningPanel(String message) { - super(message, UIManager.getIcon("OptionPane.warningIcon")); - } - - public WarningPanel(String message, String details) { - super(message,details,UIManager.getIcon("OptionPane.warningIcon")); - setBackground(UIManager.getColor("textHighlight")); - } - } - - // FIXME: timeout? - private class InfoPanel extends MessagePanel { - public InfoPanel(String message) { - super(message, UIManager.getIcon("OptionPane.informationIcon")); - } - - public InfoPanel(String message, String details) { - super(message,details,UIManager.getIcon("OptionPane.informationIcon")); - setBackground(UIManager.getColor("textHighlight")); - } - } - - private void pushMessagePanel(final MessagePanel p) { - panelContainer.add(p); - p.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - panelContainer.remove(p); - InteractiveView.this.validate(); - } - }); - this.validate(); - } - - private static class JobIndicatorPanel extends JPanel { - - private JLabel textLabel; - private JButton cancelButton = null; - - public JobIndicatorPanel(String description, final Job job) { - super(new BorderLayout()); - - setBorder(BorderFactory.createEmptyBorder(3, 3, 3, 3)); - setBackground(UIManager.getColor("textHighlight")); - - textLabel = new JLabel(description); - add(textLabel, BorderLayout.CENTER); - - cancelButton = new JButton(createImageIcon("/toolbarButtonGraphics/general/Stop16.gif")); - cancelButton.setToolTipText("Abort this operation"); - cancelButton.setMargin(new Insets(0, 0, 0, 0)); - cancelButton.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - job.abortJob(); - } - }); - add(cancelButton, BorderLayout.LINE_END); - } - } - - /** - * Registers a job, allowing it to be aborted by the "Abort all" - * action. - * - * Should not be called for a job if showJobIndicator() is called - * for that job. - * @param job The job to register - */ - protected void registerJob(final Job job) { - activeJobs.add(job); - if (getViewPort() != null) { - getViewPort().setCommandEnabled(CommandManager.Command.Abort, true); - } - job.addJobListener(new JobListener() { - - public void jobEnded(JobEndEvent event) { - activeJobs.remove(job); - if (activeJobs.isEmpty() && getViewPort() != null) { - getViewPort().setCommandEnabled(CommandManager.Command.Abort, false); - } - } - }); - } - - /** - * Shows an indicator at the bottom of the view with - * a button to cancel the job. - * - * @param jobDescription The text on the indicator - * @param job The job - */ - protected void showJobIndicator(String jobDescription, Job job) { - registerJob(job); - final JobIndicatorPanel indicator = new JobIndicatorPanel(jobDescription, job); - panelContainer.add(indicator); - job.addJobListener(new JobListener() { - - public void jobEnded(JobEndEvent event) { - panelContainer.remove(indicator); - InteractiveView.this.validate(); - } - }); - this.validate(); - } - - /** - * Display an informational message without getting in the way. - * - * @param msg the message (should be short) - */ - protected void infoMessage(String message) { - final InfoPanel p = new InfoPanel(message); - pushMessagePanel(p); - } - - /** - * Display an informational message without getting in the way. - * - * @param msg the message (should be short) - * @param details a more detailed message - */ - protected void detailedInfoMessage(String message, String details) { - final InfoPanel p = new InfoPanel(message, details); - pushMessagePanel(p); - } - - /** - * Display an warning message without getting in the way. - * - * @param msg the message (should be short) - */ - protected void warningMessage(String message) { - final WarningPanel p = new WarningPanel(message); - pushMessagePanel(p); - } - - /** - * Display an warning message without getting in the way. - * - * @param msg the message (should be short) - * @param details a more detailed message - */ - protected void detailedWarningMessage(String message, String details) { - final WarningPanel p = new WarningPanel(message, details); - pushMessagePanel(p); - } - - /** - * Display an error message without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param msg the message (should be short) - */ - protected void errorMessage(String message) { - final ErrorPanel errorPanel = new ErrorPanel(message); - pushMessagePanel(errorPanel); - } - - /** - * Display an error message from the core without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param msg a short message explaining what could not be done - * @param ex the exception thrown by the core - */ - protected void coreErrorMessage(String msg, CoreException ex) { - detailedErrorMessage(msg, ex); - } - - /** - * Display an error message, with extra detail, without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param msg a short message explaining what could not be done - * @param details a more detailed message explaining why it could not be done - */ - protected void detailedErrorMessage(String msg, String details) { - final ErrorPanel errorPanel = new ErrorPanel(msg, details); - pushMessagePanel(errorPanel); - } - - /** - * Display an error message, with extra detail, without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param msg a short message explaining what could not be done - * @param ex an exception detailing the error - */ - protected void detailedErrorMessage(String msg, Throwable ex) { - detailedErrorMessage(msg, ex.getLocalizedMessage()); - } - - /** - * Display a modal error message to the user. - * - * Consider whether errorMessage might be less annoying. - * - * @param msg the error message - */ - protected void errorDialog(String msg) { - errorDialog("Error", msg); - } - - /** - * Display a modal error message from the core. - * - * Consider whether coreErrorMessage might be less annoying. - * - * @param msg a short message explaining what could not be done - * @param ex the exception thrown by the core - */ - protected void coreErrorDialog(String msg, CoreException ex) { - DetailedErrorDialog.showCoreErrorDialog(this, msg, ex); - } - - /** - * Display a modal error message, with extra detail. - * - * Consider whether detailedErrorMessage might be less annoying. - * - * @param title a title for the dialog - * @param msg a short message explaining what could not be done - * @param details a more detailed message explaining why it could not be done - */ - protected void detailedErrorDialog(String title, String msg, String details) { - DetailedErrorDialog.showDetailedErrorDialog(this, title, msg, details); - } - - /** - * Display a modal error message, with extra detail. - * - * Consider whether detailedErrorMessage might be less annoying. - * - * @param title a title for the dialog - * @param msg a short message explaining what could not be done - * @param ex an exception detailing the error - */ - protected void detailedErrorDialog(String title, String msg, Throwable ex) { - DetailedErrorDialog.showDetailedErrorDialog(this, title, msg, ex.getLocalizedMessage()); - } - - /** - * Display a modal error message to the user. - * - * Consider whether errorMessage might be less annoying. - * - * @param title a title for the message - * @param msg the error message - */ - protected void errorDialog(String title, String msg) { - JOptionPane.showMessageDialog(this, msg, title, JOptionPane.ERROR_MESSAGE); - } - - protected void infoDialog(String msg) { - JOptionPane.showMessageDialog(this, msg); - } - - protected void infoDialog(String title, String msg) { - JOptionPane.showMessageDialog(this, msg, title, JOptionPane.INFORMATION_MESSAGE); - } -} diff --git a/gui/src/quanto/gui/InteractiveViewManager.java b/gui/src/quanto/gui/InteractiveViewManager.java deleted file mode 100644 index 03bbbeb9..00000000 --- a/gui/src/quanto/gui/InteractiveViewManager.java +++ /dev/null @@ -1,126 +0,0 @@ -package quanto.gui; - -import quanto.util.StringNamer; -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import java.util.Map; -import org.apache.commons.collections15.BidiMap; -import org.apache.commons.collections15.MapIterator; -import org.apache.commons.collections15.bidimap.DualTreeBidiMap; -import org.apache.commons.collections15.comparators.ComparableComparator; -import org.apache.commons.collections15.contrib.HashCodeComparator; - -/** - * - * @author alex - */ -public class InteractiveViewManager { - - // bidirectional map implemented as dual trees. note that get(null) or - // getKey(null) will raise exceptions in the Comparators. - private final BidiMap views = - new DualTreeBidiMap( - ComparableComparator.getInstance(), - new HashCodeComparator()); - private PropertyChangeListener viewRenameListener = new PropertyChangeListener() { - - public void propertyChange(PropertyChangeEvent evt) { - if (!"title".equals(evt.getPropertyName())) { - return; - } - InteractiveView view = (InteractiveView) evt.getSource(); - synchronized (views) { - String oldName = views.getKey(view); - views.remove(oldName); - String newName = StringNamer.getFreshName(views.keySet(), evt.getNewValue().toString()); - views.put(newName, view); - } - } - }; - - public InteractiveView getNextFreeView() { - InteractiveView foundView = null; - for (InteractiveView view : views.values()) { - if (!view.isAttached()) { - foundView = view; - break; - } - } - return foundView; - } - - public void addView(InteractiveView view) { - String name = StringNamer.getFreshName(views.keySet(), view.getTitle()); - synchronized (views) { - views.put(name, view); - view.setViewManager(this); - view.addPropertyChangeListener("title", viewRenameListener); - } - } - - public void removeView(InteractiveView view) { - synchronized (views) { - view.removePropertyChangeListener("title", viewRenameListener); - view.setViewManager(null); - views.removeValue(view); - } - } - - public String getViewName(InteractiveView v) { - return views.getKey(v); - } - - public InteractiveView getView(String name) { - return views.get(name); - } - - public Map getViews() { - return views; - } - - public void removeView(String name) { - InteractiveView view = views.get(name); - if (view == null) { - throw new IllegalArgumentException("No such view"); - } - removeView(view); - } - - public boolean closeAllViews() { - for (InteractiveView view : views.values()) { - if (!view.checkCanClose()) { - return false; - } - } - MapIterator it = views.mapIterator(); - while (it.hasNext()) { - it.next(); - InteractiveView view = it.getValue(); - it.remove(); - if (view.isAttached()) { - view.getViewPort().switchToConsole(); - } - view.cleanUp(); - } - return true; - } - - /** - * Call "repaint" on all views that might be visible - */ - public void repaintViews() { - synchronized (views) { - for (InteractiveView v : views.values()) { - v.repaint(); - } - } - } - - public void refreshAll() { - synchronized (views) { - for (InteractiveView v : views.values()) { - v.refresh(); - } - } - } -} diff --git a/gui/src/quanto/gui/JavaQuantoDotLayout.java b/gui/src/quanto/gui/JavaQuantoDotLayout.java deleted file mode 100644 index 7b4529cc..00000000 --- a/gui/src/quanto/gui/JavaQuantoDotLayout.java +++ /dev/null @@ -1,97 +0,0 @@ -package quanto.gui; - -import edu.uci.ics.jung.contrib.algorithms.layout.AKDotLayout; -import java.awt.Dimension; -import java.awt.Rectangle; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import quanto.core.data.CoreGraph; -import quanto.core.data.Edge; -import quanto.core.data.Vertex; - -public class JavaQuantoDotLayout - extends AKDotLayout { - - private static final double VERTEX_PADDING = 20; - private static final double EMPTY_BOX_SIZE = 40; - private Rectangle boundingRect = new Rectangle(0, 0, 0, 0); - - public JavaQuantoDotLayout(CoreGraph graph, Dimension size) { - super(graph); - } - - public JavaQuantoDotLayout(CoreGraph graph) { - this(graph, new Dimension(800, 600)); - } - - @Override - public void initialize() { - super.initialize(); - recalculateSize(); - } - - @Override - public Dimension getSize() { - return boundingRect.getSize(); - } - - @Override - public void setLocation(Vertex picked, Point2D p) { - if (p.getX() < 20) { - p.setLocation(20, p.getY()); - } - if (p.getY() < 20) { - p.setLocation(p.getX(), 20); - } - super.setLocation(picked, p); - boundingRect.add(new Rectangle2D.Double( - p.getX() - VERTEX_PADDING, - p.getY() - VERTEX_PADDING, - 2 * VERTEX_PADDING, - 2 * VERTEX_PADDING)); - } - - @Override - public void setLocation(Vertex picked, double x, double y) { - if (x < 20) { - x = 20; - } - if (y < 20) { - y = 20; - } - super.setLocation(picked, x, y); - boundingRect.add(new Rectangle2D.Double( - x - VERTEX_PADDING, - y - VERTEX_PADDING, - 2 * VERTEX_PADDING, - 2 * VERTEX_PADDING)); - } - - public void recalculateSize() { - double left = Double.MAX_VALUE; - double top = Double.MAX_VALUE; - double right = 0; - double bottom = 0; - for (Vertex v : getGraph().getVertices()) { - Point2D point = transform(v); - left = Math.min(left, point.getX()); - top = Math.min(top, point.getY()); - right = Math.max(right, point.getX()); - bottom = Math.max(bottom, point.getY()); - } - left -= VERTEX_PADDING; - top -= VERTEX_PADDING; - right += VERTEX_PADDING; - bottom += VERTEX_PADDING; - if (left < right && top < bottom) { - // get the same padding right and bottom as left and top - boundingRect.setRect( - 0, - 0, - right + left, - bottom + top); - } else { - boundingRect.setRect(0, 0, EMPTY_BOX_SIZE, EMPTY_BOX_SIZE); - } - } -} diff --git a/gui/src/quanto/gui/Job.java b/gui/src/quanto/gui/Job.java deleted file mode 100644 index cd54ef51..00000000 --- a/gui/src/quanto/gui/Job.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.gui; - -import javax.swing.SwingUtilities; -import javax.swing.event.EventListenerList; - -/** - * A separate thread that executes some job on the graph - * asynchronously. - * - * This mainly exists to allow the job to be displayed to the user - * and aborted. - * - * The job must call fireJobFinished() when it has come to a natural - * end. It may also call fireJobAborted() when it is interrupted, - * but should work fine even if it doesn't. - */ -public abstract class Job extends Thread -{ - private EventListenerList listenerList = new EventListenerList(); - private JobEndEvent jobEndEvent = null; - - /** - * Abort the job. The default implementation interrupts the - * thread and calls fireJobAborted(). - */ - public void abortJob() { - this.interrupt(); - fireJobAborted(); - } - - /** - * Add a job listener. - * - * All job listener methods execute in the context of the - * AWT event queue. - * @param l - */ - public void addJobListener(JobListener l) { - listenerList.add(JobListener.class, l); - } - - public void removeJobListener(JobListener l) { - listenerList.remove(JobListener.class, l); - } - - /** - * Notify listeners that the job has finished successfully, - * if no notification has already been sent. - */ - protected final void fireJobFinished() { - if (jobEndEvent == null) { - fireJobEnded(false); - } - } - - /** - * Notify listeners that the job has been aborted, if no - * notification has already been sent. - */ - protected final void fireJobAborted() { - if (jobEndEvent == null) { - fireJobEnded(true); - } - } - - private void fireJobEnded(final boolean aborted) { - SwingUtilities.invokeLater(new Runnable() { - - public void run() { - // Guaranteed to return a non-null array - Object[] listeners = listenerList.getListenerList(); - // Process the listeners last to first, notifying - // those that are interested in this event - for (int i = listeners.length - 2; i >= 0; i -= 2) { - if (listeners[i] == JobListener.class) { - // Lazily create the event: - if (jobEndEvent == null) { - jobEndEvent = new JobEndEvent(this, aborted); - } - ((JobListener) listeners[i + 1]).jobEnded(jobEndEvent); - } - } - } - }); - } - -} diff --git a/gui/src/quanto/gui/JobEndEvent.java b/gui/src/quanto/gui/JobEndEvent.java deleted file mode 100644 index f2572103..00000000 --- a/gui/src/quanto/gui/JobEndEvent.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.gui; - -import java.util.EventObject; - -/** - * - * @author alemer - */ -public class JobEndEvent extends EventObject -{ - private boolean aborted = false; - - public JobEndEvent(Object source) { - super(source); - } - - public JobEndEvent(Object source, boolean aborted) { - super(source); - this.aborted = aborted; - } - - public boolean jobWasAborted() { - return aborted; - } - -} diff --git a/gui/src/quanto/gui/JobListener.java b/gui/src/quanto/gui/JobListener.java deleted file mode 100644 index 480f98c0..00000000 --- a/gui/src/quanto/gui/JobListener.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ -package quanto.gui; - -import java.util.EventListener; - -/** - * - * @author alemer - */ -public interface JobListener extends EventListener -{ - - /** - * Notifies the listener that the job has terminated. - * - * Guaranteed to be sent exactly once in the life of a job. - * @param event - */ - void jobEnded(JobEndEvent event); - -} diff --git a/gui/src/quanto/gui/LeftTabbedPane.java b/gui/src/quanto/gui/LeftTabbedPane.java deleted file mode 100644 index d711b723..00000000 --- a/gui/src/quanto/gui/LeftTabbedPane.java +++ /dev/null @@ -1,37 +0,0 @@ -package quanto.gui; - -import javax.swing.BoxLayout; -import javax.swing.JPanel; -import javax.swing.JTabbedPane; - -import quanto.core.Core; - -/* - * Tabbed Pane containing the RulesBar and the Toolbox. - * Instanciated in QuantoFrame - */ -public class LeftTabbedPane extends JPanel { - - private Toolbox toolbox; - private JTabbedPane tabbedPane; - - public LeftTabbedPane(Core core, ViewPort viewPort) { - - this.setLayout(new BoxLayout(this, BoxLayout.PAGE_AXIS)); - - tabbedPane = new JTabbedPane(); - RulesBar sidebar = new RulesBar(core.getRuleset(), viewPort); - tabbedPane.addTab("Rules", null, sidebar, - "Display Ruleset"); - - toolbox = new Toolbox(core, viewPort); - tabbedPane.addTab("Toolbox", null, toolbox, "Display Toolbox"); - - this.add(tabbedPane); - } - - public void setToolbox(Toolbox toolbox) { - this.tabbedPane.remove(this.toolbox); - this.tabbedPane.addTab("Toolbox", null, toolbox, "Display Toolbox"); - } -} diff --git a/gui/src/quanto/gui/PdfGraphVisualizationServer.java b/gui/src/quanto/gui/PdfGraphVisualizationServer.java deleted file mode 100644 index b9d4faa2..00000000 --- a/gui/src/quanto/gui/PdfGraphVisualizationServer.java +++ /dev/null @@ -1,153 +0,0 @@ -package quanto.gui; - -import quanto.core.Theory; -import quanto.core.data.Vertex; -import quanto.core.data.Edge; -import quanto.core.data.CoreGraph; -import com.itextpdf.text.Document; -import com.itextpdf.text.DocumentException; -import com.itextpdf.text.pdf.PdfContentByte; -import com.itextpdf.text.pdf.PdfWriter; -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.util.BalancedEdgeIndexFunction; -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphVisualizationViewer; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Context; -import edu.uci.ics.jung.visualization.renderers.Renderer.VertexLabel; -import edu.uci.ics.jung.visualization.transform.shape.GraphicsDecorator; -import java.awt.Dimension; -import java.awt.Graphics2D; -import java.awt.geom.Rectangle2D; -import java.io.OutputStream; -import javax.swing.JComponent; -import org.apache.commons.collections15.Predicate; -import quanto.core.data.BangBox; -import quanto.gui.graphhelpers.QVertexAngleLabeler; -import quanto.gui.graphhelpers.QVertexColorTransformer; -import quanto.gui.graphhelpers.QVertexIconTransformer; -import quanto.gui.graphhelpers.QVertexLabelTransformer; -import quanto.gui.graphhelpers.QVertexRenderer; -import quanto.gui.graphhelpers.QVertexShapeTransformer; -import quanto.gui.graphhelpers.BangBoxRenderer; - -/** - * - * @author alemer - */ -public class PdfGraphVisualizationServer extends BangBoxGraphVisualizationViewer { - - private final CoreGraph graph; - private boolean arrowHeadsShown = false; - - public PdfGraphVisualizationServer(Theory theory, CoreGraph graph) { - this(theory, QuantoApp.useExperimentalLayout ? new JavaQuantoDotLayout( - graph) : new QuantoDotLayout(graph)); - } - - public PdfGraphVisualizationServer(Theory theory, - Layout layout) { - super(layout); - - if (!(layout.getGraph() instanceof CoreGraph)) { - throw new IllegalArgumentException( - "Only QuantoGraphs are supported"); - } - this.graph = (CoreGraph) layout.getGraph(); - - layout.initialize(); - - setupRendering(); - } - - private void setupRendering() { - getRenderContext().setParallelEdgeIndexFunction( - BalancedEdgeIndexFunction.getInstance()); - - getRenderContext().setEdgeArrowPredicate( - new Predicate, Edge>>() { - - public boolean evaluate( - Context, Edge> object) { - return object.element.isDirected(); - } - }); - - getRenderContext().setVertexLabelTransformer( - new QVertexLabelTransformer()); - getRenderContext().setVertexLabelRenderer(new QVertexAngleLabeler()); - getRenderContext().setVertexFillPaintTransformer( - new QVertexColorTransformer()); - getRenderContext().setVertexShapeTransformer( - new QVertexShapeTransformer()); - getRenderContext().setVertexIconTransformer(new QVertexIconTransformer()); - - getRenderer().setVertexRenderer(new QVertexRenderer()); - getRenderer().getVertexLabelRenderer().setPosition( - VertexLabel.Position.S); - getRenderer().setBangBoxRenderer(new BangBoxRenderer()); - // For debugging: show a grid behind the graph - // addPreRenderPaintable(new GridPaintable(new - // GridPaintable.BoundsCalculator() { - // public Rectangle2D getBounds() { return getGraphBounds(); } - // })); - } - - public boolean isArrowHeadsShown() { - return arrowHeadsShown; - } - - public void setArrowHeadsShown(boolean arrowHeadsShown) { - this.arrowHeadsShown = arrowHeadsShown; - } - - public void renderToPdf(OutputStream output) throws DocumentException { - Rectangle2D bounds = getGraphBounds(); - final int width = (int) (bounds.getMaxX()) + 20; - final int height = (int) (bounds.getMaxY()) + 20; - - Document doc = new Document(new com.itextpdf.text.Rectangle(width, - height)); - - PdfWriter writer = PdfWriter.getInstance(doc, output); - - doc.open(); - - PdfContentByte cb = writer.getDirectContent(); - Graphics2D g2 = cb.createGraphicsShapes(width, height); - - GraphicsDecorator pdfGr = new GraphicsDecorator(g2); - getRenderContext().setGraphicsContext(pdfGr); - - // create a virtual screen so Jung doesn't freak - JComponent virtual = new JComponent() { - - private static final long serialVersionUID = 1L; - - @Override - public Dimension getSize() { - // make sure nothing gets clipped - return new Dimension(width, height); - } - }; - - getRenderContext().setScreenDevice(virtual); - getRenderer().render(getRenderContext(), getGraphLayout()); - - g2.dispose(); - doc.close(); - } - - /** - * Compute the bounding box of the graph under its current layout. - * - * @return - */ - public Rectangle2D getGraphBounds() { - Rectangle2D bounds = null; - synchronized (graph) { - bounds = GraphVisualizationViewer.getSubgraphBounds( - getGraphLayout(), graph.getVertices()); - } - return bounds; - } -} diff --git a/gui/src/quanto/gui/QuantoApp.java b/gui/src/quanto/gui/QuantoApp.java deleted file mode 100644 index 24369124..00000000 --- a/gui/src/quanto/gui/QuantoApp.java +++ /dev/null @@ -1,604 +0,0 @@ -package quanto.gui; - -import apple.dts.samplecode.osxadapter.OSXAdapter; -import java.awt.Component; -import java.awt.Dimension; -import java.awt.event.ItemEvent; -import java.awt.event.ItemListener; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.logging.ConsoleHandler; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.prefs.Preferences; -import javax.swing.JDialog; -import javax.swing.JFileChooser; -import javax.swing.JOptionPane; -import javax.swing.UIManager; -import org.xml.sax.SAXException; -import quanto.core.*; -import quanto.core.data.CoreGraph; -import quanto.core.protocol.CoreProcess; -import quanto.core.protocol.CoreTalker; - -/** - * Singleton class - * @author aleks - * - */ -public class QuantoApp { - - private static final boolean LOG_PROTOCOL = false; - private static final boolean LOG_JUNG = false; - private static final boolean LOG_QUANTO = true; - private final static Logger logger = - Logger.getLogger("quanto.gui"); - // isMac is used for CTRL vs META shortcuts, etc - public static final boolean isMac = - (System.getProperty("os.name").toLowerCase().indexOf("mac os x") != -1); - public static final boolean isWin = - (System.getProperty("os.name").toLowerCase().indexOf("win") != -1); - public static final boolean isUnix = - (System.getProperty("os.name").toLowerCase().indexOf("nix") != -1 - || System.getProperty("os.name").toLowerCase().indexOf("nux") != -1); - public static final int COMMAND_MASK = - isMac ? java.awt.event.InputEvent.META_DOWN_MASK - : java.awt.event.InputEvent.CTRL_DOWN_MASK; - public static boolean useExperimentalLayout = false; - private final Preferences globalPrefs; - private final CoreProcess coreProcess; - private final Core core; - private JFileChooser[] fileChooser = {null, null, null}; - private InteractiveViewManager viewManager; - private TheoryManager theoryManager; - public static final String lastTheoryDir = "theory"; - public static final String lastTheoryFileName = lastTheoryDir + File.separatorChar + "stored.qth"; - - public static File getAppSettingsDirectory(boolean create) throws IOException { - File dir; - String userHome = System.getProperty("user.home"); - if (isWin) { - dir = new File(userHome + File.separatorChar + "Quantomatic"); - } else if (isUnix) { - dir = new File(userHome - + File.separatorChar + ".config" - + File.separatorChar + "Quantomatic"); - } else { - dir = new File(userHome - + File.separatorChar + ".quantomatic"); - } - if (create && !dir.exists()) { - if (!dir.mkdirs()) { - throw new IOException("Failed to create preferences directory " + dir.getAbsolutePath()); - } - } - if (dir.exists() && !dir.isDirectory()) { - throw new IOException(dir.getAbsolutePath() + " is not a directory!"); - } - return dir; - } - - public String getRootDirectory() { - String applicationDir = getClass().getProtectionDomain().getCodeSource().getLocation().getPath(); - if (applicationDir.endsWith(".jar")) { - applicationDir = new File(applicationDir).getParent(); - } else { - applicationDir += getClass().getName().replace('.', File.separatorChar); - applicationDir = new File(applicationDir).getParent(); - } - - if (applicationDir.endsWith("gui" + File.separator + "dist")) { - applicationDir = applicationDir.replaceFirst(File.separator + "gui" + File.separator + "dist", ""); - } else { - applicationDir = applicationDir.replaceFirst(File.separator + "gui" + File.separator + "bin" - + File.separator + "quanto" + File.separator + "gui", ""); - } - - /* - * If the user relocates the .jar file and appends the path to the core to $PATH - * we cannot really infer the location of the root dir (or can we?): - * No default files will be loaded - */ - - return applicationDir; - } - - private static class Pref { - - final T def; // default value - final String key; - String friendlyName; - - protected Pref(String key, T def) { - this.key = key; - this.def = def; - } - - protected Pref(String key, T def, String friendlyName) { - this.key = key; - this.def = def; - this.friendlyName = friendlyName; - } - - public String getFriendlyName() { - return friendlyName; - } - } - - public static class StringPref extends Pref { - - protected StringPref(String key, String def) { - super(key, def); - } - } - - public class BoolPref extends Pref implements ItemListener { - - protected BoolPref(String key, Boolean def) { - super(key, def); - } - - protected BoolPref(String key, Boolean def, String friendlyName) { - super(key, def, friendlyName); - } - - public void itemStateChanged(ItemEvent e) { - QuantoApp.this.setPreference(this, e.getStateChange() == ItemEvent.SELECTED); - } - } - // Preferences - public final BoolPref NEW_WINDOW_FOR_GRAPHS = - new BoolPref("new_window_for_graphs", false, "Open graphs in a new window"); - public static final StringPref[] LAST_OPEN_DIRS = {new StringPref("last_open_dir", null), - new StringPref("last_open_ruleset_dir", null), - new StringPref("last_open_theory_dir", null)}; - public static final int DIR_GRAPH = 0; - public static final int DIR_RULESET = 1; - public static final int DIR_THEORY = 2; - - /** - * main entry point for the GUI application - * @param args - */ - public static void main(String[] args) { - /* - * Setup logging - */ - if (LOG_PROTOCOL) { - // protocol stream - Logger protocolLogger = Logger.getLogger("quanto.core.protocol.stream"); - protocolLogger.setUseParentHandlers(false); - ConsoleHandler ch = new ConsoleHandler(); - ch.setLevel(Level.FINEST); - protocolLogger.addHandler(ch); - - // choose real log level here - protocolLogger.setLevel(Level.ALL); - } else { - // only log problems by default - // this is required for when LOG_QUANTO is true but LOG_PROTOCOL is false - Logger protocolLogger = Logger.getLogger("quanto.core.protocol.stream"); - protocolLogger.setLevel(Level.INFO); - } - if (LOG_QUANTO) { - // log everything to the console - Logger ql = Logger.getLogger("quanto"); - ql.setUseParentHandlers(false); - ConsoleHandler ch = new ConsoleHandler(); - ch.setLevel(Level.FINEST); - ql.addHandler(ch); - - // choose real log level here - ql.setLevel(Level.ALL); - } - if (LOG_JUNG) { - // log everything to the console - Logger ql = Logger.getLogger("edu.uci.ics.jung"); - ql.setUseParentHandlers(false); - ConsoleHandler ch = new ConsoleHandler(); - ch.setLevel(Level.FINEST); - ql.addHandler(ch); - - // choose real log level here - ql.setLevel(Level.ALL); - } - - /* - * Find external executables - */ - logger.log(Level.FINER, "Starting quantomatic"); - boolean mathematicaMode = false; - String coreSocket = null; - String coreOverride = null; - String dotOverride = null; - for (String arg : args) { - if (arg.equals("--app-mode")) { - String appName = "Quantomatic.app"; - - // determine the app name from the classpath if I can... - String classpath = System.getProperty("java.class.path"); - logger.log(Level.FINEST, - "Trying to determine app name using class path ({0})", - classpath); - for (String path : classpath.split(System.getProperty("path.separator"))) { - if (path.indexOf("QuantoGui.jar") != -1) { - String[] dirs = path.split(System.getProperty("file.separator")); - if (dirs.length >= 5) { - appName = dirs[dirs.length - 5]; - } - } - } - - logger.log(Level.FINER, "Invoked as OS X application ({0})", appName); - if (dotOverride != null) - dotOverride = appName + "/Contents/MacOS/dot_static"; - if (coreOverride != null) - coreOverride = appName + "/Contents/MacOS/quanto-core-app"; - } else if (arg.equals("--mathematica-mode")) { - mathematicaMode = true; - logger.log(Level.FINER, "Mathematica mode enabled"); - } else if (arg.startsWith("--core=")) { - coreOverride = arg.substring("--core=".length()); - } else if (arg.startsWith("--dot=")) { - dotOverride = arg.substring("--dot=".length()); - } else if (arg.startsWith("--core-socket=")) { - coreSocket = arg.substring("--core-socket=".length()); - } - } - if (coreOverride != null) { - CoreProcess.quantoCoreExecutable = coreOverride; - } - if (dotOverride != null) { - edu.uci.ics.jung.contrib.algorithms.layout.AbstractDotLayout.dotProgram = dotOverride; - } - logger.log(Level.FINE, "Using dot executable: {0}", - edu.uci.ics.jung.contrib.algorithms.layout.AbstractDotLayout.dotProgram); - logger.log(Level.FINE, "Using core executable: {0}", - CoreProcess.quantoCoreExecutable); - - /* - * Try to blend into the system we're running on - */ - try { - UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); - } catch (Exception e) { - logger.log(Level.WARNING, "Could not set look-and-feel", e); - } - - /* - * Mac magic - */ - if (QuantoApp.isMac && !mathematicaMode) { - //System.setProperty("apple.laf.useScreenMenuBar", "true"); - System.setProperty( - "com.apple.mrj.application.apple.menu.about.name", - "Quanto"); - } - - boolean no_dot = false; - try { - Process testDotProc = Runtime.getRuntime().exec(new String[] { - edu.uci.ics.jung.contrib.algorithms.layout.AbstractDotLayout.dotProgram, - "-V" - }); - int result = testDotProc.waitFor(); - if (result != 0) { - no_dot = true; - } - } catch (InterruptedException ex) { - logger.log(Level.WARNING, "Interrupted while waiting for dot", ex); - } catch (IOException ex) { - logger.log(Level.WARNING, "Error running dot", ex); - no_dot = true; - } - if (no_dot) { - String message; - if (dotOverride == null) - message = "Could not find the 'dot' executable; please make " + - "sure GraphViz is installed, and 'dot' is in your PATH."; - else - message = "Could not run '" + dotOverride + "'"; - JOptionPane.showMessageDialog(null, - message, - "'dot' not found", - JOptionPane.ERROR_MESSAGE); - } - - try { - QuantoApp app; - if (coreSocket != null) { - File socket = new File(coreSocket); - if (!socket.exists()) { - logger.log(Level.SEVERE, "Core socket '{0}' does not exist", coreSocket); - System.exit(1); - } - CoreTalker talker = new CoreTalker(); - talker.connect(new FileInputStream(socket), new FileOutputStream(socket)); - app = new QuantoApp(talker); - } else { - app = new QuantoApp(); - } - app.newGraph(true); - logger.log(Level.FINER, "Finished initialisation"); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Failed to connect to core: terminating", ex); - System.exit(1); - } catch (CoreException ex) { - logger.log(Level.SEVERE, "Failed to start core: terminating", ex); - JOptionPane.showMessageDialog(null, - ex.getMessage(), - "Could not start core", - JOptionPane.ERROR_MESSAGE); - System.exit(1); - } - } - - public boolean shutdown() { - theoryManager.saveState(); - logger.log(Level.FINER, "Shutting down"); - if (viewManager.closeAllViews()) { - if (coreProcess != null) { - coreProcess.killCore(); - } - logger.log(Level.FINER, "Exiting now"); - System.exit(0); - } - return false; - } - - public TheoryManager getTheoryManager() { - return theoryManager; - } - - private void demandTheoryOrQuit() { - File f = openFile(null, "Select theory file", QuantoApp.DIR_THEORY); - if (f == null) { - JOptionPane.showMessageDialog(null, "Cannot proceed without a theory", "Error", JOptionPane.ERROR_MESSAGE); - System.exit(1); - } - try { - Theory theory = theoryManager.loadTheory(f.toURI().toURL()); - core.updateCoreTheory(theory); - } catch (IOException ex) { - DetailedErrorDialog.showDetailedErrorDialog(null, - "Open theory", - "Could not open theory file; cannot proceed", - ex); - System.exit(1); - } catch (ParseException ex) { - DetailedErrorDialog.showDetailedErrorDialog(null, - "Open theory", - "Corrupted theory file; cannot proceed", - ex); - System.exit(1); - } catch (CoreException ex) { - DetailedErrorDialog.showCoreErrorDialog(null, - "Core refused to load theory; cannot proceed", - ex); - System.exit(1); - } catch (DuplicateTheoryException ex) { - logger.log(Level.SEVERE, - "Got a duplicate theory exception, but there were " - + "no existing theories", - ex); - System.exit(1); - } - } - - public QuantoApp() throws CoreException { - this(null); - } - - public QuantoApp(CoreTalker talker) throws CoreException { - globalPrefs = Preferences.userNodeForPackage(this.getClass()); - InteractiveGraphView.setPreferencesNode(globalPrefs.node("graphs")); - - if (talker == null) { - coreProcess = new CoreProcess(); - coreProcess.startCore(); - talker = coreProcess.getTalker(); - } else { - coreProcess = null; - } - core = new Core(talker); - viewManager = new InteractiveViewManager(); - - File theoryDir = null; - try { - theoryDir = new File(getAppSettingsDirectory(true), "theories"); - } catch (IOException ex) { - logger.log(Level.SEVERE, ex.getLocalizedMessage(), ex); - } - theoryManager = new TheoryManager(theoryDir, core); - if (core.getActiveTheory() == null) { - demandTheoryOrQuit(); - } - - core.addCoreChangeListener(new CoreChangeListener() { - - public void theoryAboutToChange(TheoryChangeEvent evt) { - } - - public void theoryChanged(TheoryChangeEvent evt) { - viewManager.closeAllViews(); - } - }); - - if (isMac) { - try { - OSXAdapter.setQuitHandler(this, getClass().getDeclaredMethod("shutdown", (Class[]) null)); - } catch (Exception e) { - logger.log(Level.SEVERE, "Could not set quit handler", e); - } - } - } - - private void createFileChooser(int type) { - if (fileChooser[type] == null) { - fileChooser[type] = new JFileChooser(); - String lastDir = getPreference(QuantoApp.LAST_OPEN_DIRS[type]); - if (lastDir != null) { - fileChooser[type].setCurrentDirectory(new File(lastDir)); - } - } - } - - public File openFile(Component parent, String title, int type) { - createFileChooser(type); - int retVal = fileChooser[type].showDialog(parent, title); - fileChooser[type].setDialogType(JFileChooser.OPEN_DIALOG); - if (retVal == JFileChooser.APPROVE_OPTION) { - File f = fileChooser[type].getSelectedFile(); - if (f.getParent() != null) { - setPreference(QuantoApp.LAST_OPEN_DIRS[type], f.getParent()); - } - return f; - } - return null; - } - - public File openFile(Component parent) { - return openFile(parent, "Open", DIR_GRAPH); - } - - public File saveFile(Component parent, String title, int type) { - createFileChooser(type); - int retVal = fileChooser[type].showDialog(parent, title); - fileChooser[type].setDialogType(JFileChooser.SAVE_DIALOG); - if (retVal == JFileChooser.APPROVE_OPTION) { - File f = fileChooser[type].getSelectedFile(); - if (f.exists()) { - int overwriteAnswer = JOptionPane.showConfirmDialog( - parent, - "Are you sure you want to overwrite \"" + f.getName() + "\"?", - "Overwrite file?", - JOptionPane.YES_NO_OPTION); - if (overwriteAnswer != JOptionPane.YES_OPTION) { - return null; - } - } - if (f.getParent() != null) { - setPreference(QuantoApp.LAST_OPEN_DIRS[type], f.getParent()); - } - return f; - } - return null; - } - - public File saveFile(Component parent) { - return saveFile(parent, "Save", DIR_GRAPH); - } - - public InteractiveViewManager getViewManager() { - return viewManager; - } - - public Core getCore() { - return core; - } - - public void createNewFrame() { - try { - InteractiveView view = viewManager.getNextFreeView(); - if (view == null) { - view = createNewGraph(); - } - openNewFrame(view); - } catch (CoreException ex) { - logger.log(Level.SEVERE, "Could not create a new graph", ex); - DetailedErrorDialog.showCoreErrorDialog(null, "Could not create a new graph to display", ex); - } - } - - public void openNewFrame(InteractiveView view) - throws ViewUnavailableException { - QuantoFrame fr = new QuantoFrame(this); - try { - fr.getViewPort().attachView(view); - fr.pack(); - fr.setVisible(true); - } catch (ViewUnavailableException ex) { - logger.log(Level.WARNING, - "Tried to open an already-attached view in a new frame", ex); - fr.dispose(); - throw ex; - } - } - - public InteractiveGraphView createNewGraph() - throws CoreException { - CoreGraph newGraph = core.createEmptyGraph(); - InteractiveGraphView vis = - new InteractiveGraphView(core, newGraph, new Dimension(800, 600)); - viewManager.addView(vis); - return vis; - } - - public InteractiveGraphView openGraph(File file) - throws CoreException, - java.io.IOException { - CoreGraph loadedGraph = core.loadGraph(file); - InteractiveGraphView vis = - new InteractiveGraphView(core, loadedGraph, new Dimension(800, 600)); - vis.setTitle(file.getName()); - - viewManager.addView(vis); - core.renameGraph(loadedGraph, viewManager.getViewName(vis)); - - //vis.cleanUp(); - //vis.updateGraph(null); - vis.getGraph().setSaved(true); - return vis; - } - - /** - * Create a new graph, read the name, and send to a fresh - * InteractiveQuantoVisualizer. - * @param initial a boolean that tells whether this is the - * first call to newGraph(). - */ - public void newGraph(boolean initial) { - try { - CoreGraph newGraph = core.createEmptyGraph(); - InteractiveGraphView vis = - new InteractiveGraphView(core, newGraph, new Dimension(800, 600)); - viewManager.addView(vis); - - if (initial || getPreference(NEW_WINDOW_FOR_GRAPHS)) { // are we making a new window? - openNewFrame(vis); - } - } catch (CoreException e) { - logger.log(Level.SEVERE, "Failed to create a new graph", e); - DetailedErrorDialog.showCoreErrorDialog(null, "Could not create a new graph to display", e); - } - } - - public void newGraph() { - newGraph(false); - } - - /** - * Get a global preference. This method is overloaded because the preference API - * doesn't support generics. - */ - public boolean getPreference(QuantoApp.BoolPref pref) { - return globalPrefs.getBoolean(pref.key, pref.def); - } - - public String getPreference(QuantoApp.StringPref pref) { - return globalPrefs.get(pref.key, pref.def); - } - - /** - * Set a global preference. - */ - public void setPreference(QuantoApp.BoolPref pref, boolean value) { - globalPrefs.putBoolean(pref.key, value); - } - - public void setPreference(QuantoApp.StringPref pref, String value) { - globalPrefs.put(pref.key, value); - } -} \ No newline at end of file diff --git a/gui/src/quanto/gui/QuantoAutoLayout.java b/gui/src/quanto/gui/QuantoAutoLayout.java deleted file mode 100644 index 32e71048..00000000 --- a/gui/src/quanto/gui/QuantoAutoLayout.java +++ /dev/null @@ -1,105 +0,0 @@ -package quanto.gui; - -import edu.uci.ics.jung.algorithms.layout.AbstractLayout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import quanto.core.data.CoreGraph; -import quanto.core.data.Edge; -import quanto.core.data.Vertex; - -public class QuantoAutoLayout extends AbstractLayout { - - protected double vertexSpacing = 20.0; - private QuantoDotLayout dotLayout; - - protected QuantoAutoLayout(CoreGraph graph) { - super(graph, new Dimension(40, 40)); - dotLayout = new QuantoDotLayout(graph); - } - - protected double vertexWidth(Vertex v) { - return 14; - } - - protected double vertexHeight(Vertex v) { - return 14; - } - - @Override - public void setLocation(Vertex picked, Point2D p) { - if (p.getX() < 20) { - p.setLocation(20, p.getY()); - } - if (p.getY() < 20) { - p.setLocation(p.getX(), 20); - } - super.setLocation(picked, p); - if (p.getX() + vertexSpacing > size.width) { - size.width = (int) Math.ceil(p.getX() + vertexSpacing); - } - if (p.getY() + vertexSpacing > size.height) { - size.height = (int) Math.ceil(p.getY() + vertexSpacing); - } - } - - public void recalculateSize() { - double right = vertexSpacing; - double bottom = vertexSpacing; - for (Vertex v : getGraph().getVertices()) { - Point2D point = transform(v); - right = Math.max(right, point.getX() + vertexWidth(v) / 2.0); - bottom = Math.max(bottom, point.getY() + vertexHeight(v) / 2.0); - } - right += vertexSpacing; - bottom += vertexSpacing; - size.setSize(Math.ceil(right), Math.ceil(bottom)); - } - - protected boolean isWorkToDo() { - return getGraph().getVertexCount() > 0; - } - - private boolean check() { - for (Vertex v : graph.getVertices()) { - if (v.getPosition() != null) { - return true; - } - } - return false; - } - - public void initialize() { - if (!isWorkToDo()) { - return; - } - - recalculateSize(); - - if (check()) { - synchronized (getGraph()) { - int i = 1; - Point2D p = new Point2D.Double(0, 0); - for (Vertex v : graph.getVertices()) { - if (transform(v).equals(p)) { - setLocation(v, size.width + vertexSpacing, vertexSpacing * i); - i++; - } - } - } - } else { - dotLayout.initialize(); - for (Vertex v : graph.getVertices()) { - setLocation(v, dotLayout.transform(v)); - } - } - recalculateSize(); - } - - public void imposeLocation(Vertex v, Point2D p) { - setLocation(v, p); - } - - public void reset() { - dotLayout.reset(); - } -} diff --git a/gui/src/quanto/gui/QuantoDotLayout.java b/gui/src/quanto/gui/QuantoDotLayout.java deleted file mode 100644 index 0476a381..00000000 --- a/gui/src/quanto/gui/QuantoDotLayout.java +++ /dev/null @@ -1,25 +0,0 @@ -package quanto.gui; - -import edu.uci.ics.jung.contrib.algorithms.layout.AbstractDotBangBoxLayout; -import quanto.core.data.BangBox; -import quanto.core.data.CoreGraph; -import quanto.core.data.Edge; -import quanto.core.data.Vertex; - -/** - * - * @author alex - */ -public class QuantoDotLayout extends AbstractDotBangBoxLayout { - - public static final double PADDING = 23.0; - - public QuantoDotLayout(CoreGraph graph) { - super(graph, PADDING); - } - - @Override - protected String getVertexDotKey(Vertex vertex) { - return vertex.getCoreName(); - } -} diff --git a/gui/src/quanto/gui/QuantoForceLayout.java b/gui/src/quanto/gui/QuantoForceLayout.java deleted file mode 100644 index d66f3092..00000000 --- a/gui/src/quanto/gui/QuantoForceLayout.java +++ /dev/null @@ -1,187 +0,0 @@ -package quanto.gui; - -import edu.uci.ics.jung.algorithms.layout.AbstractLayout; -import edu.uci.ics.jung.algorithms.layout.util.Relaxer; -import edu.uci.ics.jung.algorithms.layout.util.VisRunner; -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.DirectedGraph; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.HashMap; -import java.util.Map; -import org.apache.commons.collections15.Transformer; -import quanto.core.data.CoreGraph; -import quanto.core.data.Edge; -import quanto.core.data.Vertex; - -public class QuantoForceLayout extends AbstractLayout implements IterativeContext { - - protected Map vertexVelocities; - protected double vertexSpacing = 20.0; - private double damping = 0.65; - private double timestep = 0.01; - private int done; - private boolean modify; - CoreGraph gr; - - protected QuantoForceLayout(DirectedGraph graph, Transformer initializer, - double vertexSpacing) { - super(graph, new Dimension((int) Math.ceil(2 * vertexSpacing), (int) Math.ceil(2 * vertexSpacing))); - setInitializer(initializer); - vertexVelocities = new HashMap(); - modify = true; - } - - public QuantoForceLayout(CoreGraph graph, QuantoDotLayout quantoDotLayout) { - this(graph, quantoDotLayout, 20.0); - gr = graph; - } - - protected void beginLayout() { - } - - protected void endLayout() { - } - - @Override - public Dimension getSize() { - return size; - } - - public void forgetPositions() { - locations.clear(); - } - - @Override - public void setLocation(Vertex picked, Point2D p) { - if (p.getX() < 20) { - p.setLocation(20, p.getY()); - } - if (p.getY() < 20) { - p.setLocation(p.getX(), 20); - } - super.setLocation(picked, p); - if (p.getX() + vertexSpacing > size.width) { - size.width = (int) Math.ceil(p.getX() + vertexSpacing); - } - if (p.getY() + vertexSpacing > size.height) { - size.height = (int) Math.ceil(p.getY() + vertexSpacing); - } - } - - protected Point2D coulombRepulsion(Vertex v1, Vertex v2) { - Point2D p1 = locations.get(v1); - Point2D p2 = locations.get(v2); - double distSq = (p1.getX() - p2.getX()) * (p1.getX() - p2.getX()) - + (p1.getY() - p2.getY()) * (p1.getY() - p2.getY()); - return new Point2D.Double(200 * (p1.getX() - p2.getX()) / distSq, 200 * (p1.getY() - p2.getY()) / distSq); - } - - protected Point2D hookeAttraction(Vertex v1, Vertex v2) { - Point2D p1 = locations.get(v1); - Point2D p2 = locations.get(v2); - double atr = 0.06; - return new Point2D.Double(atr * (p1.getX() - p2.getX()), atr * (p1.getY() - p2.getY())); - } - - public void step() { - done++; - for (Vertex v : graph.getVertices()) { - if (!isLocked(v)) { - Point2D netForce = new Point2D.Double(0, 0); - vertexVelocities.put(v, new Point2D.Double(0, 0)); - for (Vertex u : graph.getVertices()) { - if (v != u) { - Point2D q = coulombRepulsion(u, v); - netForce.setLocation(netForce.getX() - q.getX(), netForce.getY() - q.getY()); - } - } - for (Vertex u : graph.getSuccessors(v)) { - Point2D q = hookeAttraction(u, v); - netForce.setLocation(netForce.getX() + q.getX(), netForce.getY() + q.getY()); - } - Point2D p = vertexVelocities.get(v); - vertexVelocities.put(v, new Point2D.Double((p.getX() + netForce.getX() * timestep) * damping, - (p.getY() + netForce.getY() * timestep) * damping)); - p = vertexVelocities.get(v); - Point2D q = locations.get(v); - Point2D r = new Point2D.Double(q.getX() + p.getX() * timestep, q.getY() + p.getY() * timestep); - setLocation(v, r); - } - } - } - - public boolean done() { - return done > 10000; - } - - protected boolean isWorkToDo() { - return getGraph().getVertexCount() > 0; - } - - public void initialize() { - //Needed to be run on top of DotLayout to avoid an unnecessary forceLayout iteration - Relaxer relaxer = new VisRunner(this); - relaxer.prerelax(); - recalculateSize(); - } - - public void reset() { - recalculateSize(); - if (modify) { - int i = 1; - Point2D p = new Point2D.Double(0, 0); - for (Vertex v : graph.getVertices()) { - if (locations.get(v).equals(p)) { - setLocation(v, size.width + vertexSpacing, vertexSpacing * i); - i++; - } - } - recalculateSize(); - } else { - if (!isWorkToDo()) { - return; - } - beginLayout(); - for (Vertex v : graph.getVertices()) { - vertexVelocities.put(v, new Point2D.Double(0, 0)); - } - done = 0; - while (!done()) { - step(); - } - endLayout(); - recalculateSize(); - } - } - - //flag to distinguish between the cases when a vertex is added and other actions performed - public void startModify() { - modify = false; - } - - public void endModify() { - modify = true; - } - - protected double vertexWidth(Vertex v) { - return 14; - } - - protected double vertexHeight(Vertex v) { - return 14; - } - - public void recalculateSize() { - double right = vertexSpacing; - double bottom = vertexSpacing; - for (Vertex v : getGraph().getVertices()) { - Point2D point = transform(v); - right = Math.max(right, point.getX() + vertexWidth(v) / 2.0); - bottom = Math.max(bottom, point.getY() + vertexHeight(v) / 2.0); - } - right += vertexSpacing; - bottom += vertexSpacing; - size.setSize(Math.ceil(right), Math.ceil(bottom)); - } -} \ No newline at end of file diff --git a/gui/src/quanto/gui/QuantoFrame.java b/gui/src/quanto/gui/QuantoFrame.java deleted file mode 100644 index 659a6e5c..00000000 --- a/gui/src/quanto/gui/QuantoFrame.java +++ /dev/null @@ -1,490 +0,0 @@ -package quanto.gui; - -import com.sun.jaf.ui.ActionManager; -import com.sun.jaf.ui.UIFactory; -import java.awt.BorderLayout; -import java.awt.Color; -import java.awt.Component; -import java.awt.Image; -import java.awt.event.WindowEvent; -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import java.util.logging.Level; -import java.util.logging.Logger; -import javax.imageio.ImageIO; -import javax.swing.*; -import javax.swing.Action; -import javax.swing.JFrame; -import javax.swing.JMenu; -import javax.swing.JMenuBar; -import javax.swing.JMenuItem; -import javax.swing.JSplitPane; - -import javax.swing.plaf.basic.BasicSplitPaneUI; - -import quanto.core.CoreException; -import quanto.gui.QuantoApp.BoolPref; - -public class QuantoFrame extends JFrame implements ViewPortHost { - - private final static Logger logger = Logger.getLogger("quanto.gui"); - private static final long serialVersionUID = 3656684775223085393L; - private final ViewPort viewPort; - private LeftTabbedPane sidebar; - private JSplitPane splitPane; - private volatile static int frameCount = 0; - private QuantoApp app; - private ActionManager actionManager = new ActionManager(); - - /** - * Command actions that are dealt with directly by the frame - * - * These are "global" command (as opposed to toggle, for example) actions, - * such as Open or Quit. Each should be dealt with by a separate - * (non-static) method in this class. - */ - public enum CommandAction { - - NewWindow("new-win-command", "createNewFrame"), - NewGraph("new-graph-command", "createNewGraph"), - OpenGraph("open-command", "openGraph"), - LoadRuleset("load-ruleset-command", "importRuleset"), - SaveRuleset("save-ruleset-command", "exportRuleset"), - Close("close-command", "closeCurrentView"), - Quit("quit-command", "quit"), - RefreshAll("refresh-all-command", "refreshall"); - - /** - * Create a new command action - * @param actionName The action name (as in resources/actions.xml) - * @param methodName The name of the method (in the QuantoFrame class) - * to invoke when the action is triggered - */ - private CommandAction(String actionName, String methodName) { - this.actionName = actionName; - this.methodName = methodName; - } - private final String actionName; - private final String methodName; - - @Override - public String toString() { - return actionName; - } - - public String actionName() { - return actionName; - } - - public String methodName() { - return methodName; - } - } - - // This type has to be public in order to be registered as a - // handler with ActionManager. The constructor is private, however, - // to prevent abuse. - public class Delegate { - - private Delegate() { - } - - public void executeCommand(String command) { - viewPort.executeCommand(command); - } - - public void executeCommand(String command, boolean state) { - if (state) { - viewPort.executeCommand(command); - } - } - } - // This type has to be public in order to be registered as a - // handler with ActionManager. The constructor is private, however, - // to prevent abuse. - - public class BoolPrefDelegate { - - private final QuantoApp.BoolPref pref; - - private BoolPrefDelegate(BoolPref pref) { - this.pref = pref; - } - - public void setState(boolean state) { - app.setPreference(pref, state); - } - } - - private void addIconFromRes(List to, String resourceRef) { - try { - to.add(ImageIO.read(getClass().getResource(resourceRef))); - } catch (IOException ex) { - logger.log(Level.WARNING, "Cannot find " + resourceRef, ex); - } - } - - private void loadIcons() { - List icons = new ArrayList(6); - addIconFromRes(icons, "/icons/quanto_icon_16.png"); - addIconFromRes(icons, "/icons/quanto_icon_24.png"); - addIconFromRes(icons, "/icons/quanto_icon_32.png"); - addIconFromRes(icons, "/icons/quanto_icon_48.png"); - addIconFromRes(icons, "/icons/quanto_icon_64.png"); - addIconFromRes(icons, "/icons/quanto_icon_128.png"); - setIconImages(icons); - } - - public void quit() { - app.shutdown(); - } - - public QuantoFrame(QuantoApp app) { - super("Quantomatic"); - - loadIcons(); - - frameCount++; - this.app = app; - setBackground(Color.white); - getContentPane().setLayout(new BorderLayout()); - - actionManager.setControlConvertedToMeta(QuantoApp.isMac); - URL actionsXml = getClass().getResource("resources/actions.xml"); - if (actionsXml == null) { - throw new Error("Could not find resource \"resources/actions.xml\""); - } - try { - actionManager.loadActions(actionsXml); - } catch (IOException ex) { - throw new Error("Could not load resource \"resources/actions.xml\": " + ex.getMessage()); - } - Set menuIds = actionManager.getActionListIDs(); - for (String id : actionManager.getActionIDs()) { - if (!menuIds.contains(id)) { - actionManager.setEnabled(id, false); - } - } - - for (CommandAction action : CommandAction.values()) { - actionManager.registerCallback(action.actionName(), this, action.methodName()); - actionManager.setEnabled(action.actionName(), true); - } - actionManager.registerCallback("open-in-new-window-command", - new BoolPrefDelegate(app.NEW_WINDOW_FOR_GRAPHS), - "setState"); - actionManager.setEnabled("open-in-new-window-command", true); - actionManager.setSelected("open-in-new-window-command", - app.getPreference(app.NEW_WINDOW_FOR_GRAPHS)); - CommandManager commandManager = new CommandManager(actionManager); - InteractiveGraphView.registerKnownCommands(app.getCore(), commandManager); - - UIFactory factory = new UIFactory(actionManager); - setJMenuBar(factory.createMenuBar("main-menu")); - if (QuantoApp.isMac) { - removeQuitFromFileMenu(); - } - insertTheoryMenu(); - getContentPane().add(factory.createToolBar("main-toolbar"), BorderLayout.PAGE_START); - - viewPort = new ViewPort(app.getViewManager(), this, app.getCore()); - sidebar = new LeftTabbedPane(app.getCore(), viewPort); - commandManager.setViewPort(viewPort); - - //Add the scroll panes to a split pane. - splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); - splitPane.setLeftComponent(sidebar); - splitPane.setRightComponent(viewPort); - splitPane.setDividerLocation(256); - splitPane.setDividerSize(10); - splitPane.setUI(new BasicSplitPaneUI()); - splitPane.setBackground(Color.DARK_GRAY); - splitPane.setOneTouchExpandable(true); - - getContentPane().add(splitPane, BorderLayout.CENTER); - this.pack(); - } - - /** - * Display an error message from the core without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param msg a short message explaining what could not be done - * @param ex the exception thrown by the core - */ - public void coreErrorMessage(String message, CoreException ex) { - // FIXME: this should be non-modal - DetailedErrorDialog.showCoreErrorDialog(this, message, ex); - } - - /** - * Display an error message, with extra detail, without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param title a title for the dialog - * @param msg a short message explaining what could not be done - * @param details a more detailed message explaining why it could not be done - */ - public void detailedErrorMessage(String title, String msg, String details) { - // FIXME: this should be non-modal - DetailedErrorDialog.showDetailedErrorDialog(this, title, msg, details); - } - - /** - * Display an error message, with extra detail, without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param title a title for the dialog - * @param msg a short message explaining what could not be done - * @param ex an exception detailing the error - */ - public void detailedErrorMessage(String title, String msg, Throwable ex) { - // FIXME: this should be non-modal - DetailedErrorDialog.showDetailedErrorDialog(this, title, msg, ex.getLocalizedMessage()); - } - - /** - * Display an error message without getting in the way. - * - * This is intended for situations where the problem was not directly - * caused by the user clicking something. - * - * @param msg the message - */ - public void errorMessage(String title, String msg) { - // FIXME: this should be non-modal - JOptionPane.showMessageDialog(this, msg, title, JOptionPane.ERROR_MESSAGE); - } - - /** - * Display a modal error message from the core. - * - * Consider whether coreErrorMessage might be less annoying. - * - * @param parent the parent component, or null to use the frame - * @param msg a short message explaining what could not be done - * @param ex the exception thrown by the core - */ - public void coreErrorDialog(Component parent, String message, CoreException ex) { - DetailedErrorDialog.showCoreErrorDialog(parent == null ? this : parent, message, ex); - } - - /** - * Display a modal error message, with extra detail. - * - * Consider whether detailedErrorMessage might be less annoying. - * - * @param parent the parent component, or null to use the frame - * @param title a title for the dialog - * @param msg a short message explaining what could not be done - * @param details a more detailed message explaining why it could not be done - */ - public void detailedErrorDialog(Component parent, String title, String msg, String details) { - DetailedErrorDialog.showDetailedErrorDialog(parent == null ? this : parent, title, msg, details); - } - - /** - * Display a modal error message, with extra detail. - * - * Consider whether detailedErrorMessage might be less annoying. - * - * @param parent the parent component, or null to use the frame - * @param title a title for the dialog - * @param msg a short message explaining what could not be done - * @param ex an exception detailing the error - */ - public void detailedErrorDialog(Component parent, String title, String msg, Throwable ex) { - DetailedErrorDialog.showDetailedErrorDialog(parent == null ? this : parent, title, msg, ex.getLocalizedMessage()); - } - - /** - * Display a modal error message. - * - * Consider whether errorMessage might be less annoying. - * - * @param parent the parent component, or null to use the frame - * @param title a title for the dialog - * @param msg a short message explaining what could not be done - */ - public void errorDialog(Component parent, String title, String msg) { - JOptionPane.showMessageDialog(parent == null ? this : parent, msg, title, JOptionPane.ERROR_MESSAGE); - } - - public File openFile(String title, int type) { - return app.openFile(this, title, type); - } - - private void insertTheoryMenu() { - JMenuBar menuBar = getJMenuBar(); - Action fileMenuAction = actionManager.getAction("file-menu"); - Action LoadTheoryAction = actionManager.getAction("load-theory-command"); - for (int i = 0; i < menuBar.getMenuCount(); ++i) { - JMenu menu = menuBar.getMenu(i); - if (menu != null && menu.getAction() == fileMenuAction) { - for (int j = menu.getItemCount() - 1; j >= 0; --j) { - JMenuItem item = menu.getItem(j); - if (item != null && item.getAction() == LoadTheoryAction) { - menu.remove(j); - menu.add(new TheoryMenu(app.getTheoryManager(), this), j); - return; - } - } - return; - } - } - } - - private void removeQuitFromFileMenu() { - JMenuBar menuBar = getJMenuBar(); - Action fileMenuAction = actionManager.getAction("file-menu"); - Action quitCommandAction = actionManager.getAction(CommandAction.Quit.actionName()); - for (int i = 0; i < menuBar.getMenuCount(); ++i) { - JMenu menu = menuBar.getMenu(i); - if (menu != null && menu.getAction() == fileMenuAction) { - for (int j = menu.getItemCount() - 1; j >= 0; --j) { - JMenuItem item = menu.getItem(j); - if (item != null && item.getAction() == quitCommandAction) { - menu.remove(j); - return; - } - } - return; - } - } - } - - public void refreshAll() { - app.getViewManager().refreshAll(); - } - - public void openView(InteractiveView view) { - if (app.getPreference(app.NEW_WINDOW_FOR_GRAPHS)) { - app.openNewFrame(view); - } else { - viewPort.attachView(view); - } - } - - public void closeCurrentView() { - ViewPort.CloseResult result = viewPort.closeCurrentView(); - if (result == ViewPort.CloseResult.NoMoreViews) { - dispose(); - } - } - - public void createNewFrame() { - app.createNewFrame(); - } - - public void createNewGraph() { - try { - openView(app.createNewGraph()); - } catch (CoreException ex) { - coreErrorDialog(this, "Could not create new graph", ex); - } - } - - public void importRuleset() { - File f = app.openFile(this, "Import ruleset", QuantoApp.DIR_RULESET); - try { - if (f != null) { - app.getCore().loadRuleset(f); - } - } catch (CoreException ex) { - coreErrorDialog(this, "Error in core when opening \"" + f.getName() + "\"", ex); - } catch (java.io.IOException ex) { - detailedErrorDialog(this, "Import Ruleset", "Could not read \"" + f.getName() + "\"", ex); - } - } - - public void exportRuleset() { - File f = app.saveFile(this, "Export ruleset", QuantoApp.DIR_RULESET); - try { - if (f != null) { - app.getCore().saveRuleset(f); - } - } catch (CoreException ex) { - coreErrorDialog(this, "Error in core when writing to \"" + f.getName() + "\"", ex); - } catch (java.io.IOException ex) { - detailedErrorDialog(this, "Export Ruleset", "Could not write \"" + f.getName() + "\"", ex); - } - } - - /** - * Read a graph from a file and send it to a fresh InteractiveGraphView. - */ - public void openGraph() { - File f = InteractiveGraphView.chooseGraphFile(this); - try { - if (f != null) { - InteractiveView view = app.openGraph(f); - openView(view); - } - } catch (CoreException ex) { - coreErrorDialog(this, "Error in core when opening \"" + f.getName() + "\"", ex); - } catch (java.io.IOException ex) { - detailedErrorDialog(this, "Open Graph", "Could not read \"" + f.getName() + "\"", ex); - } - } - - @Override - protected void processWindowEvent(WindowEvent e) { - if (e.getID() == WindowEvent.WINDOW_CLOSING) { - if (frameCount == 1) { - app.shutdown(); - } else { - frameCount--; - viewPort.clearPort(); - dispose(); - } - } else { - super.processWindowEvent(e); - } - } - - public ViewPort getViewPort() { - return viewPort; - } - - public void setViewAllowedToClose(boolean allowed) { - actionManager.setEnabled(CommandAction.Close.actionName(), allowed); - } - - public boolean isViewAllowedToClose() { - return actionManager.isEnabled(CommandAction.Close.actionName()); - } - - public void setCommandEnabled(String command, boolean enabled) { - actionManager.setEnabled(command, enabled); - } - - public boolean isCommandEnabled(String command) { - return actionManager.isEnabled(command); - } - - public void setCommandStateSelected(String command, boolean selected) { - actionManager.setSelected(command, selected); - } - - public boolean isCommandStateSelected(String command) { - return actionManager.isSelected(command); - } - - public void attachedViewChanged(InteractiveView newView) { - if (newView == null) { - setTitle("Quantomatic"); - } else { - setTitle("Quantomatic: " + newView.getTitle()); - } - } -} diff --git a/gui/src/quanto/gui/RecentDirectoryChangeListener.java b/gui/src/quanto/gui/RecentDirectoryChangeListener.java deleted file mode 100644 index a7489467..00000000 --- a/gui/src/quanto/gui/RecentDirectoryChangeListener.java +++ /dev/null @@ -1,12 +0,0 @@ -package quanto.gui; - -import java.io.File; -import java.util.EventListener; - -/** - * - * @author alex - */ -public interface RecentDirectoryChangeListener extends EventListener { - void recentDirectoryChanged(Object source, File directory); -} diff --git a/gui/src/quanto/gui/RewriteViewer.java b/gui/src/quanto/gui/RewriteViewer.java deleted file mode 100644 index 5e83ac83..00000000 --- a/gui/src/quanto/gui/RewriteViewer.java +++ /dev/null @@ -1,128 +0,0 @@ -package quanto.gui; - -import quanto.core.data.CoreGraph; -import java.awt.BorderLayout; -import java.awt.Color; -import java.awt.Dimension; -import java.awt.FlowLayout; -import java.awt.GridLayout; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.KeyAdapter; -import java.awt.event.KeyEvent; -import java.awt.event.KeyListener; -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; -import java.util.List; - -import javax.swing.JButton; -import javax.swing.JComponent; -import javax.swing.JFrame; -import javax.swing.JLabel; -import javax.swing.JPanel; -import javax.swing.JScrollPane; -import javax.swing.border.LineBorder; -import quanto.core.data.AttachedRewrite; - -public class RewriteViewer extends JFrame { - - private static final long serialVersionUID = 3627522980375030017L; - private final InteractiveGraphView vis; - protected List rewrites; - - public RewriteViewer(InteractiveGraphView vis) { - super("Rewrites for " + vis.getTitle()); - this.vis = vis; - rewrites = vis.getRewrites(); - - KeyListener esc = new KeyAdapter() { - - @Override - public void keyReleased(KeyEvent e) { - if (e.getKeyCode() == KeyEvent.VK_ESCAPE) { - RewriteViewer.this.dispose(); - } - } - }; - - JPanel panel = new JPanel(); - panel.setLayout(new GridLayout(rewrites.size(), 1)); - int index = 0; - JButton cancel = new JButton("Cancel"); - JComponent focusMe = cancel; - for (AttachedRewrite rw : rewrites) { - JPanel rwPanel = new JPanel(); - rwPanel.setLayout(new FlowLayout()); - JLabel ruleName = new JLabel(rw.getRuleName()); - rwPanel.add(ruleName); - GraphVisualizationViewer lhs = new GraphVisualizationViewer(rw.getLhs()); - GraphVisualizationViewer rhs = new GraphVisualizationViewer(rw.getRhs()); - lhs.zoomToFit(new Dimension(100, 100)); - rhs.zoomToFit(new Dimension(100, 100)); - lhs.setBorder(new LineBorder(Color.gray, 1)); - rhs.setBorder(new LineBorder(Color.gray, 1)); - JButton apply = new JButton("=>"); - rwPanel.setBackground(lhs.getBackground()); - rwPanel.add(lhs); - rwPanel.add(apply); - rwPanel.add(rhs); - if (index == 0) { - focusMe = apply; - } - rwPanel.setBorder(new LineBorder(Color.black, 1)); - panel.add(rwPanel); - - final int thisIndex = index; - - apply.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - RewriteViewer.this.vis.clearHighlight(); - RewriteViewer.this.vis.applyRewrite(thisIndex); - RewriteViewer.this.dispose(); - } - }); - - MouseAdapter hl = new MouseAdapter() { - - @Override - public void mouseEntered(MouseEvent e) { - AttachedRewrite rw = - RewriteViewer.this.rewrites.get(thisIndex); - RewriteViewer.this.vis.highlightRewrite(rw); - } - - @Override - public void mouseExited(MouseEvent e) { - RewriteViewer.this.vis.clearHighlight(); - } - }; - - rwPanel.addMouseListener(hl); - lhs.addMouseListener(hl); - rhs.addMouseListener(hl); - apply.addMouseListener(hl); - - index++; - } - - JScrollPane scroll = new JScrollPane(panel); - scroll.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); - getContentPane().setLayout(new BorderLayout()); - getContentPane().add(scroll, BorderLayout.CENTER); - cancel.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - RewriteViewer.this.dispose(); - } - }); - - getContentPane().add(cancel, BorderLayout.SOUTH); - - pack(); - - focusMe.grabFocus(); - focusMe.addKeyListener(esc); - addKeyListener(esc); - } -} diff --git a/gui/src/quanto/gui/RulesBar.java b/gui/src/quanto/gui/RulesBar.java deleted file mode 100644 index 58e73ceb..00000000 --- a/gui/src/quanto/gui/RulesBar.java +++ /dev/null @@ -1,633 +0,0 @@ -package quanto.gui; - -import quanto.core.CoreException; -import quanto.core.Ruleset; -import quanto.core.RulesetChangeListener; -import quanto.core.data.CoreGraph; -import quanto.core.data.Rule; -import quanto.core.protocol.userdata.RulePriorityRuleUserDataSerializer; - -import javax.swing.*; -import java.awt.*; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; -import java.util.*; -import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * Panel displaying a very simple rules interface. - * - * This just lists the rules, allowing them to be filtered - * by tag, and enables/disables rules (individually, all at - * once, or by tag). - * - * @author alex - */ -public class RulesBar extends JPanel { - - private final static Logger logger = - Logger.getLogger("quanto.gui"); - - private static class RuleDescription { - - public RuleDescription(String rulename, boolean active) { - this.rulename = rulename; - this.active = active; - } - public String rulename; - public boolean active; - - @Override - public String toString() { - return rulename; - } - }; - private Ruleset ruleset; - private ViewPort viewPort; - private RulesetChangeListener listener = new RulesetChangeListener() { - - public void rulesAdded(Ruleset source, Collection ruleNames) { - try { - if (tagsCombo.getSelectedIndex() != 0) { - for (String rule : ruleset.getRulesByTag(tagsCombo.getSelectedItem().toString())) { - if (ruleNames.contains(rule)) { - rulesModel.addElement(new RuleDescription(rule, - ruleset.isRuleActive(rule))); - } - } - } else { - for (String rule : ruleNames) { - rulesModel.addElement(new RuleDescription(rule, - ruleset.isRuleActive(rule))); - } - } - } catch (CoreException ex) { - logger.log(Level.WARNING, "Core complained when responding to rule addition", ex); - ruleset.reload(); - } - } - - public void rulesRemoved(Ruleset source, Collection ruleNames) { - Object[] descs = rulesModel.toArray(); - for (int i = 0; i < descs.length; ++i) { - RuleDescription desc = (RuleDescription) descs[i]; - if (ruleNames.contains(desc.rulename)) { - rulesModel.remove(i); - } - } - } - - public void rulesRenamed(Ruleset source, Map renaming) { - Object[] descs = rulesModel.toArray(); - for (int i = 0; i < descs.length; ++i) { - RuleDescription desc = (RuleDescription) descs[i]; - if (renaming.containsKey(desc.rulename)) { - desc.rulename = renaming.get(desc.rulename); - // force an update - rulesModel.set(i, desc); - } - } - } - - public void rulesetReplaced(Ruleset source) { - loadTags(); - loadRules(tagsCombo.getSelectedItem().toString()); - } - - public void rulesActiveStateChanged(Ruleset source, Map newState) { - Object[] descs = rulesModel.toArray(); - for (int i = 0; i < descs.length; ++i) { - RuleDescription desc = (RuleDescription) descs[i]; - if (newState.containsKey(desc.rulename)) { - desc.active = newState.get(desc.rulename); - // force an update - rulesModel.set(i, desc); - } - } - } - - public void rulesTagged(Ruleset source, String tag, Collection ruleNames, boolean newTag) { - if (newTag) { - tagsCombo.addItem(tag); - } - if (tagsCombo.getSelectedIndex() == 0) { - return; - } - if (!tagsCombo.getSelectedItem().toString().equals(tag)) { - return; - } - try { - for (String rule : ruleNames) { - rulesModel.addElement(new RuleDescription(rule, - ruleset.isRuleActive(rule))); - } - } catch (CoreException ex) { - logger.log(Level.WARNING, "Core complained when responding to rule tagging", ex); - ruleset.reload(); - } - } - - public void rulesUntagged(Ruleset source, String tag, Collection ruleNames, boolean tagRemoved) { - if (tagRemoved) { - if (tagsCombo.getSelectedIndex() != 0 - && tagsCombo.getSelectedItem().toString().equals(tag)) { - tagsCombo.setSelectedIndex(0); - } - tagsCombo.removeItem(tag); - return; - } - if (tagsCombo.getSelectedIndex() == 0) { - return; - } - if (!tagsCombo.getSelectedItem().toString().equals(tag)) { - return; - } - Object[] descs = rulesModel.toArray(); - for (int i = 0; i < descs.length; ++i) { - RuleDescription desc = (RuleDescription) descs[i]; - if (ruleNames.contains(desc.rulename)) { - rulesModel.remove(i); - } - } - } - }; - private JList listView; - private DefaultListModel rulesModel; - private JButton enableButton; - private JButton disableButton; - private JButton deleteButton; - private JButton createRuleButton; - private JButton refreshButton; - private JComboBox tagsCombo; - private boolean suppressTagComboCallback = false; - - private void showModalError(String message, CoreException ex) { - logger.log(Level.SEVERE, message, ex); - DetailedErrorDialog.showCoreErrorDialog(this, message, ex); - } - - private void logError(String message, CoreException ex) { - logger.log(Level.SEVERE, message, ex); - // FIXME: show to user - } - - private void logWarning(String message, CoreException ex) { - logger.log(Level.WARNING, message, ex); - // FIXME: show to user - } - - private JPopupMenu createRuleContextualMenu() { - Object[] objDescs = listView.getSelectedValues(); - final RuleDescription[] descs = new RuleDescription[objDescs.length]; - for (int i = 0; i < objDescs.length; ++i) { - descs[i] = (RuleDescription)objDescs[i]; - } - JPopupMenu popupMenu = new JPopupMenu(); - JMenuItem menuItem; - - if (descs.length == 1) { - menuItem = new JMenuItem("Edit rule"); - popupMenu.add(menuItem); - menuItem.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - editRule(descs[0].rulename); - } - }); - - menuItem = new JMenuItem("Rename rule"); - popupMenu.add(menuItem); - menuItem.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - renameRule(descs[0].rulename); - } - }); - } - - menuItem = new JMenuItem((descs.length == 1) ? "Delete rule" : "Delete rules"); - popupMenu.add(menuItem); - menuItem.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - deleteSelectedRules(); - } - }); - - int enabledCount = 0; - int disabledCount = 0; - for (RuleDescription desc : descs) { - if (desc.active) - ++enabledCount; - else - ++disabledCount; - } - if (disabledCount > 0) { - menuItem = new JMenuItem((descs.length == 1) ? "Enable rule" : "Enable rules"); - popupMenu.add(menuItem); - menuItem.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - enableSelectedRules(); - } - }); - } - if (enabledCount > 0) { - menuItem = new JMenuItem((descs.length == 1) ? "Disable rule" : "Disable rules"); - popupMenu.add(menuItem); - menuItem.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - disableSelectedRules(); - } - }); - } - - if (descs.length == 1) { - menuItem = new JMenuItem("New Rule by Reverse"); - popupMenu.add(menuItem); - menuItem.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - String name = JOptionPane.showInputDialog(RulesBar.this, "Rule name:", - descs[0].rulename + "-rev"); - if (name == null) { - return; - } - - try { - Rule rule = RulesBar.this.ruleset.getCore().openRule(descs[0].rulename); - rule = RulesBar.this.ruleset.getCore().createRule(name, rule.getRhs(), rule.getLhs()); - SplitGraphView spg = new SplitGraphView(RulesBar.this.ruleset.getCore(), rule); - RulesBar.this.viewPort.getViewManager().addView(spg); - RulesBar.this.viewPort.attachView(spg); - } catch (CoreException ex) { - showModalError("Could not create a new rule.", ex); - } - } - }); - } - - JMenu subMenu = new JMenu("Add tag"); - try { - Collection allTags = ruleset.getTags(); - for (String tag : allTags) { - menuItem = new JMenuItem(tag); - menuItem.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - try { - for (RuleDescription desc : descs) { - ruleset.tagRule(desc.rulename, e.getActionCommand()); - } - } catch (CoreException ex) { - showModalError("Could not tag the rule.", ex); - } - } - }); - subMenu.add(menuItem); - } - } catch (CoreException ex) { - logError("Could not load tags from the core.", ex); - } - menuItem = new JMenuItem("New Tag..."); - menuItem.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - String tag = JOptionPane.showInputDialog(RulesBar.this, "Tag name:", ""); - if (tag == null) { - return; - } - - try { - for (RuleDescription desc : descs) { - ruleset.tagRule(desc.rulename, tag); - } - } catch (CoreException ex) { - showModalError("Could not tag the rule.", ex); - } - } - }); - subMenu.add(menuItem); - popupMenu.add(subMenu); - - try { - HashSet tags = new HashSet(); - for (RuleDescription desc : descs) { - tags.addAll(ruleset.getRuleTags(desc.rulename)); - } - if (!tags.isEmpty()) { - subMenu = new JMenu("Remove tag"); - for (String tag : tags) { - menuItem = new JMenuItem(tag); - menuItem.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - for (RuleDescription desc : descs) { - ruleset.untagRule(desc.rulename, e.getActionCommand()); - } - } catch (CoreException ex) { - showModalError("Could not load tags for the rule \"" - + listView.getSelectedValue().toString() - + "\" from the core.", ex); - } - } - }); - subMenu.add(menuItem); - } - popupMenu.add(subMenu); - } - } catch (CoreException ex) { - logError("Could not load tags for the rule \"" - + listView.getSelectedValue().toString() - + "\" from the core.", ex); - } - return popupMenu; - } - - private ImageIcon createImageIcon(String path, - String description) { - java.net.URL imgURL = getClass().getResource(path); - if (imgURL != null) { - return new ImageIcon(imgURL, description); - } else { - logger.log(Level.WARNING, "Could not load image icon \"{0}\"", path); - return null; - } - } - - private void enableSelectedRules() { - try { - Object[] descs = listView.getSelectedValues(); - List ruleNames = new LinkedList(); - for (Object d : descs) { - ruleNames.add(((RuleDescription) d).rulename); - } - ruleset.activateRules(ruleNames); - } catch (CoreException ex) { - showModalError("Could not enable the rules.", ex); - } - } - - private void disableSelectedRules() { - try { - Object[] descs = listView.getSelectedValues(); - List ruleNames = new LinkedList(); - for (Object d : descs) { - ruleNames.add(((RuleDescription) d).rulename); - } - ruleset.deactivateRules(ruleNames); - } catch (CoreException ex) { - showModalError("Could not disable the rules.", ex); - } - } - - private void createMenuButtons() { - enableButton = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/ComputeAdd16.gif", "Enable")); - enableButton.setToolTipText("Enable selected rules"); - enableButton.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - enableSelectedRules(); - } - }); - - disableButton = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/ComputeRemove16.gif", "Disable")); - disableButton.setToolTipText("Disable selected rules"); - disableButton.addActionListener(new ActionListener() { - public void actionPerformed(ActionEvent e) { - disableSelectedRules(); - } - }); - - deleteButton = new JButton(createImageIcon("/toolbarButtonGraphics/general/Delete16.gif", "Enable")); - deleteButton.setToolTipText("Delete selected rules"); - deleteButton.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - deleteSelectedRules(); - } - }); - - refreshButton = new JButton(createImageIcon("/toolbarButtonGraphics/general/Refresh16.gif", "Refresh")); - refreshButton.setToolTipText("Reload ruleset"); - refreshButton.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - RulesBar.this.ruleset.reload(); - } - }); - - createRuleButton = new JButton(createImageIcon("/toolbarButtonGraphics/general/New16.gif", "Create Rule")); - createRuleButton.setToolTipText("Create Rule"); - createRuleButton.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - String ruleName = JOptionPane.showInputDialog(RulesBar.this, "Rule name:", ""); - if (ruleName == null || ruleName.isEmpty()) { - return; - } - try { - CoreGraph lhs = RulesBar.this.ruleset.getCore().createEmptyGraph(); - CoreGraph rhs = RulesBar.this.ruleset.getCore().createEmptyGraph(); - Rule rule = RulesBar.this.ruleset.getCore().createRule(ruleName, lhs, rhs); - - SplitGraphView spg = new SplitGraphView(RulesBar.this.ruleset.getCore(), rule); - RulesBar.this.viewPort.getViewManager().addView(spg); - RulesBar.this.viewPort.attachView(spg); - - //By default the priority is set to 5 - RulePriorityRuleUserDataSerializer priorityRuleUserDataSerializer = - new RulePriorityRuleUserDataSerializer(RulesBar.this.ruleset.getCore().getTalker()); - priorityRuleUserDataSerializer.setRuleUserData(ruleName, 5); - } catch (CoreException ex) { - showModalError("Could not create a new rule.", ex); - } - } - }); - } - - public RulesBar(Ruleset ruleset, ViewPort viewPort) { - this.ruleset = ruleset; - this.viewPort = viewPort; - ruleset.addRulesetChangeListener(listener); - - final DefaultListCellRenderer cellRenderer = new DefaultListCellRenderer() { - - @Override - public Component getListCellRendererComponent( - JList list, - Object value, - int index, - boolean isSelected, - boolean cellHasFocus) { - super.getListCellRendererComponent(list, value.toString(), - index, isSelected, cellHasFocus); - setName(value.toString()); - if (!((RuleDescription) value).active) { - setForeground(Color.gray); - } - - return this; - } - }; - - rulesModel = new DefaultListModel(); - listView = new JList(rulesModel); - listView.setCellRenderer(cellRenderer); - listView.addMouseListener(new MouseAdapter() { - - @Override - public void mousePressed(MouseEvent e) { - if (e.isPopupTrigger()) { - int index = listView.locationToIndex(e.getPoint()); - if (index < 0) { - return; - } - if (Arrays.binarySearch(listView.getSelectedIndices(), index) < 0) { - listView.setSelectedIndex(index); - } - JPopupMenu contextualMenu = createRuleContextualMenu(); - contextualMenu.show(e.getComponent(), - e.getX(), e.getY()); - } - } - }); - JScrollPane listPane = new JScrollPane(listView); - tagsCombo = new JComboBox(); - createMenuButtons(); - - - - tagsCombo.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - if (!suppressTagComboCallback) { - JComboBox cb = (JComboBox) e.getSource(); - String tag = (String) cb.getSelectedItem(); - loadRules(tag); - } - } - }); - - JPanel buttonBox = new JPanel(); - buttonBox.setLayout(new BoxLayout(buttonBox, BoxLayout.LINE_AXIS)); - buttonBox.add(enableButton); - buttonBox.add(disableButton); - buttonBox.add(deleteButton); - buttonBox.add(createRuleButton); - buttonBox.add(refreshButton); - this.setLayout(new BoxLayout(this, BoxLayout.PAGE_AXIS)); - this.add(buttonBox); - this.add(tagsCombo); - /*Because of the BoxLayout, the Jcombobox takes too much space - width = width of the listPane, height = preferred height*/ - tagsCombo.setMaximumSize(new Dimension((int) listPane.getPreferredSize().getWidth(), (int) tagsCombo.getPreferredSize().getHeight())); - this.add(listPane); - - loadTags(); - loadRules("All Rules"); - } - - private void editRule(String rule) { - - try { - InteractiveViewManager vm = RulesBar.this.viewPort.getViewManager(); - for (Map.Entry e : vm.getViews().entrySet()) { - if (e.getValue() instanceof SplitGraphView) { - SplitGraphView sgv = (SplitGraphView)e.getValue(); - if (sgv.getRule().getCoreName().equals(rule)) { - RulesBar.this.viewPort.attachView(sgv); - return; - } - } - } - Rule ruleGraphs = RulesBar.this.ruleset.getCore().openRule(rule); - SplitGraphView spg = new SplitGraphView(RulesBar.this.ruleset.getCore(), ruleGraphs); - vm.addView(spg); - RulesBar.this.viewPort.attachView(spg); - } catch (CoreException ex) { - showModalError("Cannot open the rule \"" + rule + "\"", ex); - } - } - - private void renameRule(String rule) { - - try { - String newName = JOptionPane.showInputDialog(this, - "Enter a new name for the rule \"" + rule + "\"", rule); - if (newName != null && !newName.isEmpty()) { - ruleset.renameRule(rule, newName); - } - } catch (CoreException ex) { - showModalError("Cannot rename the rule \"" + rule + "\"", ex); - } - } - - private void deleteSelectedRules() { - int confirmation = JOptionPane.showConfirmDialog( - this, - "Delete selected rule(s)?", - "Delete Rules", - JOptionPane.YES_NO_OPTION); - if (confirmation != JOptionPane.YES_OPTION) { - return; - } - try { - Object[] descs = listView.getSelectedValues(); - for (Object d : descs) { - ruleset.deleteRule(((RuleDescription) d).rulename); - } - } catch (CoreException ex) { - showModalError("Cannot delete the selected rules", ex); - } - } - - private void loadTags() { - try { - try { - suppressTagComboCallback = true; - String oldSelection = null; - if (tagsCombo.getItemCount() > 0) { - oldSelection = tagsCombo.getSelectedItem().toString(); - tagsCombo.removeAllItems(); - } - tagsCombo.addItem("All Rules"); - for (String tag : ruleset.getTags()) { - tagsCombo.addItem(tag); - } - if (oldSelection != null) { - for (int i = 0; i < tagsCombo.getItemCount(); ++i) { - if (oldSelection.equals(tagsCombo.getItemAt(i).toString())) { - tagsCombo.setSelectedIndex(i); - } - } - } - } finally { - suppressTagComboCallback = false; - } - } catch (CoreException ex) { - logError("Could not get tags from core", ex); - } - } - - private void loadRules(String tag) { - rulesModel.clear(); - /* If the tag exists, load the corresponding rules. - If not then load all the rules.*/ - try { - if (tagsCombo.getSelectedIndex() != 0) { - for (String rule : ruleset.getRulesByTag(tag)) { - rulesModel.addElement(new RuleDescription(rule, - ruleset.isRuleActive(rule))); - } - } else { - for (String rule : ruleset.getRules()) { - rulesModel.addElement(new RuleDescription(rule, - ruleset.isRuleActive(rule))); - } - } - } catch (CoreException ex) { - logError("Could not get the rules for tag \"" - + tag + "\" from core", ex); - } - } -} diff --git a/gui/src/quanto/gui/SplitGraphView.java b/gui/src/quanto/gui/SplitGraphView.java deleted file mode 100644 index bb9c688e..00000000 --- a/gui/src/quanto/gui/SplitGraphView.java +++ /dev/null @@ -1,305 +0,0 @@ -package quanto.gui; - -import java.awt.Color; -import java.awt.Dimension; -import java.awt.event.FocusAdapter; -import java.awt.event.FocusEvent; -import java.awt.event.FocusListener; - -import java.util.Collection; -import java.util.Map; -import java.util.logging.Level; -import java.util.logging.Logger; -import javax.swing.JOptionPane; -import javax.swing.JSplitPane; -import javax.swing.border.EmptyBorder; -import javax.swing.border.LineBorder; -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; -import quanto.core.Core; - -import quanto.core.CoreException; -import quanto.core.Ruleset; -import quanto.core.RulesetChangeListener; -import quanto.core.data.CoreGraph; -import quanto.core.data.Rule; - -@SuppressWarnings("serial") -public class SplitGraphView extends InteractiveView { - - private boolean leftFocused = true; - private InteractiveGraphView leftView; - private InteractiveGraphView rightView; - private JSplitPane splitPane; - private volatile boolean saved; - // this may become null, if the rule is deleted - private Rule rule; - // we keep our own copy of this, in case someone else changes the - // rule name in Rewrite - private Core core; - private RulesetChangeListener listener = new RulesetChangeListener() { - - public void rulesetReplaced(Ruleset source) { - try { - if (!core.getRuleset().getRules().contains(rule.getCoreName())) { - if (isAttached()) { - getViewPort().closeCurrentView(); - } else if (getViewManager() != null) { - getViewManager().removeView(SplitGraphView.this); - } - } - } catch (CoreException ex) { - Logger.getLogger(SplitGraphView.class.getName()).log(Level.SEVERE, "Failed to get rule list.", ex); - } - } - - public void rulesRemoved(Ruleset source, Collection ruleNames) { - if (ruleNames.contains(rule.getCoreName())) { - if (isAttached()) { - getViewPort().closeCurrentView(); - } else if (getViewManager() != null) { - getViewManager().removeView(SplitGraphView.this); - } - } - } - - public void rulesRenamed(Ruleset source, Map renaming) { - if (renaming.containsKey(rule.getCoreName())) { - rule.updateCoreName(renaming.get(rule.getCoreName())); - setTitle(rule.getCoreName()); - } - } - - public void rulesAdded(Ruleset source, Collection ruleNames) { - } - - public void rulesActiveStateChanged(Ruleset source, Map newState) { - } - - public void rulesTagged(Ruleset source, String tag, Collection ruleNames, boolean newTag) { - } - - public void rulesUntagged(Ruleset source, String tag, Collection ruleNames, boolean tagRemoved) { - } - }; - - public Rule getRule() { - return rule; - } - - public SplitGraphView(Core core, Rule rule) - throws CoreException { - this(core, rule, new Dimension(800, 600)); - } - - public SplitGraphView(Core core, Rule rule, Dimension dim) - throws CoreException { - super(rule.getCoreName()); - this.rule = rule; - this.core = core; - - core.getRuleset().addRulesetChangeListener(listener); - - leftView = new InteractiveGraphView(core, rule.getLhs()); - leftView.setSaveEnabled(false); - leftView.setSaveAsEnabled(false); - leftView.repaint(); - leftView.setVerticesPositionData(); - - rightView = new InteractiveGraphView(core, rule.getRhs()); - rightView.setSaveEnabled(false); - rightView.setSaveAsEnabled(false); - rightView.repaint(); - rightView.setVerticesPositionData(); - setupListeners(); - setupLayout(dim); - setSaved(true); - } - - private void setupListeners() { - FocusListener fl = new FocusAdapter() { - - @Override - public void focusGained(FocusEvent e) { - leftFocused = (e.getSource() == leftView); - updateFocus(); - } - }; - - ChangeListener cl = new ChangeListener() { - - public void stateChanged(ChangeEvent e) { - setSaved(false); - } - }; - - leftView.addFocusListener(fl); - rightView.addFocusListener(fl); - leftView.addChangeListener(cl); - rightView.addChangeListener(cl); - } - - private void setupLayout(Dimension dim) { - splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); - splitPane.setLeftComponent(leftView); - splitPane.setRightComponent(rightView); - splitPane.setDividerLocation(((int) dim.getWidth() - 140) / 2); - - setMainComponent(splitPane); - } - - public boolean hasExpandingWorkspace() { - return false; - } - - private void updateFocus() { - InteractiveGraphView focusMe, unfocusMe; - if (leftFocused) { - focusMe = leftView; - unfocusMe = rightView; - } else { - focusMe = rightView; - unfocusMe = leftView; - } - - focusMe.setBorder(new LineBorder(Color.blue)); - unfocusMe.setBorder(new EmptyBorder(1, 1, 1, 1)); - if (isAttached()) { - unfocusMe.detached(getViewPort()); - focusMe.attached(getViewPort()); - } - } - - @Override - public void commandTriggered(String command) { - if (CommandManager.Command.Save.matches(command)) { - try { - if (rule != null) { - core.saveRule(rule); - setSaved(true); - } - } catch (CoreException err) { - coreErrorDialog("Could not save rule", err); - } - } else if (CommandManager.Command.SaveAs.matches(command)) { - try { - String newName = JOptionPane.showInputDialog(this, - "Rule name:", - rule == null ? "" : rule.getCoreName()); - if (newName == null || newName.isEmpty()) { - return; - } - - while (core.getRuleset().getRules().contains(newName)) { - int overwrite = JOptionPane.showConfirmDialog(this, - "A rule named \"" + newName - + "\" already exists. " - + "Do you want to overwrite it?", - "Overwrite rule", - JOptionPane.YES_NO_CANCEL_OPTION); - - if (overwrite == JOptionPane.YES_OPTION) { - break; // continue - } else if (overwrite != JOptionPane.NO_OPTION) { - return; // cancelled - give up - } - newName = JOptionPane.showInputDialog(this, - "Rule name:", - rule == null ? "" : rule.getCoreName()); - if (newName == null || newName.isEmpty()) { - return; - } - } - - rule = core.createRule(newName, rule.getLhs(), rule.getRhs()); - setTitle(newName); - setSaved(true); - } catch (CoreException err) { - coreErrorDialog("Could not save rule", err); - } - } else if ((CommandManager.Command.DirectedEdgeMode.matches(command)) - || (CommandManager.Command.UndirectedEdgeMode.matches(command)) - || (CommandManager.Command.SelectMode.matches(command))) { - leftView.commandTriggered(command); - rightView.commandTriggered(command); - } else { - if (leftFocused) { - leftView.commandTriggered(command); - } else { - rightView.commandTriggered(command); - } - } - super.commandTriggered(command); - } - - @Override - public void attached(ViewPort vp) { - //vp.setCommandEnabled(USE_RULE_ACTION, true); - vp.setCommandEnabled(CommandManager.Command.SaveAs, true); - vp.setCommandEnabled(CommandManager.Command.Save, - rule != null && !isSaved()); - updateFocus(); - super.attached(vp); - } - - @Override - public void detached(ViewPort vp) { - //vp.setCommandEnabled(USE_RULE_ACTION, false); - vp.setCommandEnabled(CommandManager.Command.SaveAs, false); - vp.setCommandEnabled(CommandManager.Command.Save, false); - if (leftFocused) { - leftView.detached(vp); - } else { - rightView.detached(vp); - } - super.detached(vp); - } - - @Override - public void cleanUp() { - leftView.cleanUp(); - rightView.cleanUp(); - core.getRuleset().removeRulesetChangeListener(listener); - super.cleanUp(); - } - - @Override - protected String getUnsavedClosingMessage() { - return "Rule not sent to theory. Close anyway?"; - } - - public boolean isLeftFocused() { - return leftFocused; - } - - public InteractiveGraphView getLeftView() { - return leftView; - } - - public InteractiveGraphView getRightView() { - return rightView; - } - - @Override - public boolean isSaved() { - return saved; - } - - public void setSaved(boolean saved) { - if (this.saved != saved) { - this.saved = saved; - if (rule != null && isAttached()) { - getViewPort().setCommandEnabled( - CommandManager.Command.Save, - !isSaved()); - } - firePropertyChange("saved", !saved, saved); - } - } - - @Override - public void refresh() { - leftView.refresh(); - rightView.refresh(); - } -} diff --git a/gui/src/quanto/gui/TextView.java b/gui/src/quanto/gui/TextView.java deleted file mode 100644 index 48dbd816..00000000 --- a/gui/src/quanto/gui/TextView.java +++ /dev/null @@ -1,25 +0,0 @@ -package quanto.gui; - -import java.util.Collection; -import javax.swing.JScrollPane; -import javax.swing.JTextArea; - -public class TextView extends InteractiveView { - - private static final long serialVersionUID = -9201774497137020314L; - private static int instanceCount = 0; - private JTextArea textArea; - - public TextView(String title, String text) { - setTitle(title); - textArea = new JTextArea(); - textArea.setText(text); - - setMainComponent(new JScrollPane(textArea)); - - instanceCount++; - } - - public static void registerKnownCommands(Collection commands) { - } -} diff --git a/gui/src/quanto/gui/TheoryManager.java b/gui/src/quanto/gui/TheoryManager.java deleted file mode 100644 index 5162ab2b..00000000 --- a/gui/src/quanto/gui/TheoryManager.java +++ /dev/null @@ -1,353 +0,0 @@ -package quanto.gui; - -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URL; -import java.net.URLEncoder; -import java.util.*; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.prefs.Preferences; -import javax.swing.event.EventListenerList; -import org.xml.sax.SAXException; -import quanto.core.*; - -/** - * - * @author alex - */ -public class TheoryManager { - - private final static Logger logger = Logger.getLogger("quanto.gui"); - private static final FileFilter theoryDirFilter = new FileFilter() { - - public boolean accept(File pathname) { - return pathname.isDirectory() - && new File(pathname, theoryFilename).isFile(); - } - }; - // the first theory listed here is made active - private static final String[] defaultTheories = { - "/theories/red_green/red-green-theory.qth", - "/theories/black_white/black-white-theory.qth" - }; - private static final String theoryFilename = "theory.qth"; - private static final String rulesetFilename = "ruleset.qrs"; - private static final String lastTheoryKey = "last-active-theory"; - - public interface ChangeListener extends EventListener { - - void theoryAdded(Theory theory); - - void theoryRemoved(Theory theory); - } - private final File store; - private final Core core; - private Map theorys = new HashMap(); - EventListenerList listenerList = new EventListenerList(); - private final CoreChangeListener coreListener = new CoreChangeListener() { - - public void theoryChanged(TheoryChangeEvent evt) { - if (evt.getNewTheory() == null) { - return; - } - - try { - File rsetFile = getRulesetFile(evt.getNewTheory()); - if (rsetFile.isFile()) { - core.replaceRuleset(rsetFile); - } - } catch (CoreException ex) { - logger.log(Level.WARNING, "Could not load ruleset for " - + evt.getNewTheory().getName() + " theory", ex); - } catch (IOException ex) { - logger.log(Level.WARNING, "Could not load ruleset for " - + evt.getNewTheory().getName() + " theory", ex); - } - } - - public void theoryAboutToChange(TheoryChangeEvent evt) { - if (evt.getOldTheory() == null) { - return; - } - - try { - core.saveRuleset(getRulesetFile(evt.getOldTheory())); - } catch (CoreException ex) { - logger.log(Level.WARNING, "Could not save ruleset for " - + evt.getOldTheory().getName() + " theory", ex); - } catch (IOException ex) { - logger.log(Level.WARNING, "Could not save ruleset for " - + evt.getOldTheory().getName() + " theory", ex); - } - } - }; - - public TheoryManager(File localStore, Core core) { - this.store = localStore; - this.core = core; - core.addCoreChangeListener(coreListener); - loadFromStore(); - } - - public Core getCore() { - return core; - } - - public void addChangeListener(ChangeListener l) { - listenerList.add(ChangeListener.class, l); - } - - public void removeChangeListener(ChangeListener l) { - listenerList.remove(ChangeListener.class, l); - } - - protected void fireTheoryAdded(Theory theory) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length - 2; i >= 0; i -= 2) { - if (listeners[i] == ChangeListener.class) { - ((ChangeListener) listeners[i + 1]).theoryAdded(theory); - } - } - } - - protected void fireTheoryRemoved(Theory theory) { - Object[] listeners = listenerList.getListenerList(); - for (int i = listeners.length - 2; i >= 0; i -= 2) { - if (listeners[i] == ChangeListener.class) { - ((ChangeListener) listeners[i + 1]).theoryRemoved(theory); - } - } - } - - private File getTheoryDir(Theory theory) { - return new File(store, getId(theory)); - } - - private File getRulesetFile(Theory theory) { - return new File(getTheoryDir(theory), rulesetFilename); - } - - private File getTheoryFile(Theory theory) { - return new File(getTheoryDir(theory), theoryFilename); - } - - private void saveTheoryCopy(Theory theory, final File theoryDir) { - if (theoryDir.isDirectory()) { - for (File file : theoryDir.listFiles()) { - file.delete(); - } - } else if (!theoryDir.mkdir()) { - logger.log(Level.WARNING, - "Failed to create dir {0} to save theory", - theoryDir.getAbsolutePath()); - return; - } - - File theoryFile = new File(theoryDir, theoryFilename); - try { - theory.write(theoryFile, theoryDir); - } catch (IOException ex) { - logger.log(Level.WARNING, - "Failed to save local copy of theory", ex); - } - } - - /** - * Unload the theory - * - * @param theory - * @throws IllegalArgumentException theory is currently active - */ - public void unloadTheory(Theory theory) throws IllegalArgumentException { - if (theory == core.getActiveTheory()) { - throw new IllegalArgumentException("The " - + theory.getName() + " theory is currently active"); - } - String id = getId(theory); - File theoryDir = getTheoryDir(theory); - if (theoryDir.isDirectory()) { - for (File file : theoryDir.listFiles()) { - file.delete(); - } - theoryDir.delete(); - } - theorys.remove(id); - fireTheoryRemoved(theory); - } - - public Collection getTheories() { - return Collections.unmodifiableCollection(theorys.values()); - } - - /** - * - * @param url - * @return the loaded theory, or null if there was another theory with - * the same name - * @throws SAXException - * @throws IOException - */ - public Theory loadTheory(URL url) throws ParseException, IOException, DuplicateTheoryException { - Theory theory = Theory.fromUrl(url); - addTheory(theory); - saveTheoryCopy(theory, getTheoryDir(theory)); - return theory; - } - - public void saveState() { - Theory theory = core.getActiveTheory(); - if (theory == null) { - return; - } - - Preferences.userNodeForPackage(getClass()).put(lastTheoryKey, getId(theory)); - try { - core.saveRuleset(getRulesetFile(theory)); - } catch (CoreException ex) { - logger.log(Level.WARNING, "Could not save ruleset for " - + theory.getName() + " theory", ex); - } catch (IOException ex) { - logger.log(Level.WARNING, "Could not save ruleset for " - + theory.getName() + " theory", ex); - } - } - - private String getId(Theory theory) { - try { - return URLEncoder.encode(theory.getName(), "US-ASCII"); - } catch (UnsupportedEncodingException ex) { - throw new Error(ex); - } - } - - private void addTheory(Theory theory) throws DuplicateTheoryException { - String id = getId(theory); - if (theorys.containsKey(id)) { - throw new DuplicateTheoryException(theory.getName()); - } - theorys.put(id, theory); - fireTheoryAdded(theory); - } - - private void loadDefaults() { - for (String res : defaultTheories) { - try { - URL url = getClass().getResource(res); - if (url != null) { - Theory theory = loadTheory(url); - if (core.getActiveTheory() == null) { - core.updateCoreTheory(theory); - } - } else { - logger.log(Level.SEVERE, - "Could not find default theory {0}", res); - } - } catch (ParseException ex) { - logger.log(Level.SEVERE, - "Default theory " + res + " was not valid", - ex); - } catch (IOException ex) { - logger.log(Level.SEVERE, - "Could not open default theory " + res, - ex); - } catch (CoreException ex) { - logger.log(Level.SEVERE, - "Core did not recognise default theory " + res, - ex); - } catch (DuplicateTheoryException ex) { - logger.log(Level.SEVERE, - "Unexpected duplicate default theory names", - ex); - } - } - } - - private Theory loadSavedTheory(File theoryDir) { - try { - File theoryFile = new File(theoryDir, theoryFilename); - Theory theory = Theory.fromFile(theoryFile); - if (!theoryDir.getName().equals(getId(theory))) { - logger.log(Level.SEVERE, - "Theory encoding changed from under us: expected " - + "\"{0}\", got \"{1}\" for theory \"{2}\"", - new Object[]{ - getId(theory), - theoryDir.getName(), - theory.getName() - }); - // not much we can do if this fails - boolean moved = theoryDir.renameTo(new File(store, getId(theory))); - if (!moved) { - logger.log(Level.SEVERE, "Failed to rename theory directory"); - } - } - addTheory(theory); - return theory; - } catch (ParseException ex) { - logger.log(Level.SEVERE, - "Saved theory (" + theoryDir.getAbsolutePath() - + ") was not valid", - ex); - } catch (IOException ex) { - logger.log(Level.SEVERE, - "Could not open saved theory (" - + theoryDir.getAbsolutePath() + ")", - ex); - } catch (DuplicateTheoryException ex) { - logger.log(Level.SEVERE, - "Unexpected duplicate saved theory names", - ex); - } - return null; - } - - private void loadFromStore() { - if (!store.exists()) { - if (!store.mkdirs()) { - logger.log(Level.WARNING, "Could not create local theory store"); - } - loadDefaults(); - } else if (!store.isDirectory()) { - logger.log(Level.WARNING, - "Local theory store ({0}) is not a directory", - store.getAbsolutePath()); - } else { - String lastActiveId = Preferences.userNodeForPackage(getClass()).get(lastTheoryKey, null); - Theory activate = null; - - for (File dir : store.listFiles(theoryDirFilter)) { - Theory theory = loadSavedTheory(dir); - if (dir.getName().equals(lastActiveId)) { - activate = theory; - } - } - - if (activate == null && !theorys.isEmpty()) { - if (lastActiveId == null) { - logger.log(Level.FINE, - "Theories found, but no active theory recorded"); - } else { - logger.log(Level.FINE, - "Last active theory ({0}) was not found", - lastActiveId); - } - Iterator it = theorys.values().iterator(); - assert (it.hasNext()); - activate = it.next(); - } - - if (activate != null) { - try { - core.updateCoreTheory(activate); - } catch (CoreException ex) { - logger.log(Level.SEVERE, - "Saved theory (" + activate.getName() - + ") was not recognised by the core", ex); - } - } - } - } -} diff --git a/gui/src/quanto/gui/TheoryMenu.java b/gui/src/quanto/gui/TheoryMenu.java deleted file mode 100644 index 992fe193..00000000 --- a/gui/src/quanto/gui/TheoryMenu.java +++ /dev/null @@ -1,224 +0,0 @@ -package quanto.gui; - -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.logging.Logger; -import javax.swing.ButtonGroup; -import javax.swing.JMenu; -import javax.swing.JMenuItem; -import javax.swing.JRadioButtonMenuItem; -import org.xml.sax.SAXException; -import quanto.core.CoreChangeListener; -import quanto.core.CoreException; -import quanto.core.ParseException; -import quanto.core.Theory; -import quanto.core.TheoryChangeEvent; - -/** - * - * @author alex - */ -public class TheoryMenu extends JMenu { - - private final static Logger logger = Logger.getLogger("quanto.gui.TheoryMenu"); - private final TheoryManager manager; - private QuantoFrame parent; - private JMenu removeMenu; - private List shadowList; - private int activeIndex = -1; - private ButtonGroup activeTheoryGroup = new ButtonGroup(); - private final Comparator theoryAlphaComparator = - new Comparator() { - - public int compare(Theory o1, Theory o2) { - if (o1 == o2) { - return 0; - } - if (o1 == null) { - return -1; - } - if (o2 == null) { - return 1; - } - int result = o1.getName().compareToIgnoreCase( - o2.getName()); - if (result == 0) { - result = o1.getName().compareTo(o2.getName()); - } - return result; - } - }; - private final TheoryManager.ChangeListener theoryManagerListener = - new TheoryManager.ChangeListener() { - - public void theoryAdded(Theory theory) { - int pos = Collections.binarySearch(shadowList, theory, theoryAlphaComparator); - assert (pos < 0); - pos = -(pos + 1); - - TheoryRadioMenuItem setItem = new TheoryRadioMenuItem(theory); - setItem.addActionListener(setActiveTheoryListener); - activeTheoryGroup.add(setItem); - TheoryMenu.this.add(setItem, pos); - - TheoryMenuItem remItem = new TheoryMenuItem(theory); - remItem.addActionListener(unloadTheoryListener); - removeMenu.add(remItem, (activeIndex < pos) ? pos - 1 : pos); - - if (activeIndex >= pos) { - ++activeIndex; - } - shadowList.add(pos, theory); - } - - public void theoryRemoved(Theory theory) { - int pos = Collections.binarySearch(shadowList, theory, theoryAlphaComparator); - assert (pos >= 0); - assert (pos != activeIndex); - - activeTheoryGroup.remove((TheoryRadioMenuItem) TheoryMenu.this.getMenuComponent(pos)); - TheoryMenu.this.remove(pos); - removeMenu.remove((activeIndex < pos) ? pos - 1 : pos); - - shadowList.remove(pos); - if (activeIndex > pos) { - --activeIndex; - } - } - }; - private final CoreChangeListener coreListener = - new CoreChangeListener() { - - public void theoryAboutToChange(TheoryChangeEvent evt) { - } - - public void theoryChanged(TheoryChangeEvent evt) { - if (activeIndex >= 0) { - assert (shadowList.get(activeIndex) == evt.getOldTheory()); - TheoryMenuItem item = new TheoryMenuItem(shadowList.get(activeIndex)); - item.addActionListener(unloadTheoryListener); - removeMenu.add(item, activeIndex); - activeIndex = -1; - } - assert (activeIndex < 0); - if (evt.getNewTheory() != null) { - int pos = Collections.binarySearch(shadowList, evt.getNewTheory(), theoryAlphaComparator); - assert (pos >= 0); - activeIndex = pos; - removeMenu.remove(activeIndex); - } - } - }; - private final ActionListener setActiveTheoryListener = - new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - TheoryRadioMenuItem tmi = (TheoryRadioMenuItem) e.getSource(); - manager.getCore().updateCoreTheory(tmi.theory); - } catch (CoreException ex) { - parent.coreErrorDialog(null, "Could not change theory", ex); - } - } - }; - private final ActionListener unloadTheoryListener = - new ActionListener() { - - public void actionPerformed(ActionEvent e) { - TheoryMenuItem tmi = (TheoryMenuItem) e.getSource(); - manager.unloadTheory(tmi.theory); - } - }; - - private static class TheoryMenuItem extends JMenuItem { - - public Theory theory; - - public TheoryMenuItem(Theory theory) { - super(theory.getName()); - this.theory = theory; - } - } - - private static class TheoryRadioMenuItem extends JRadioButtonMenuItem { - - public Theory theory; - - public TheoryRadioMenuItem(Theory theory) { - super(theory.getName()); - this.theory = theory; - } - } - - public TheoryMenu(TheoryManager manager, QuantoFrame parent) { - super("Theories"); - this.manager = manager; - this.parent = parent; - - manager.addChangeListener(theoryManagerListener); - manager.getCore().addCoreChangeListener(coreListener); - removeMenu = new JMenu("Unload"); - - shadowList = new ArrayList(manager.getTheories()); - Collections.sort(shadowList, theoryAlphaComparator); - int i = 0; - for (Theory theory : shadowList) { - TheoryRadioMenuItem setItem = new TheoryRadioMenuItem(theory); - setItem.addActionListener(setActiveTheoryListener); - activeTheoryGroup.add(setItem); - this.add(setItem); - - if (theory == manager.getCore().getActiveTheory()) { - activeIndex = i; - setItem.setSelected(true); - } else { - TheoryMenuItem removeItem = new TheoryMenuItem(theory); - removeItem.addActionListener(unloadTheoryListener); - removeMenu.add(removeItem); - } - ++i; - } - - this.addSeparator(); - addLoadMenuItem(); - this.add(removeMenu); - } - - private void loadTheory() { - File f = parent.openFile("Select theory file", QuantoApp.DIR_THEORY); - if (f != null) { - try { - Theory theory = manager.loadTheory(f.toURI().toURL()); - manager.getCore().updateCoreTheory(theory); - } catch (CoreException ex) { - parent.coreErrorDialog(null, "Could not change theory", ex); - } catch (ParseException ex) { - parent.detailedErrorDialog(null, "Open Theory", "Could not parse theory", ex); - } catch (IOException ex) { - parent.detailedErrorDialog(null, "Open Theory", "Could not read theory", ex); - } catch (DuplicateTheoryException ex) { - // FIXME: maybe offer to replace it directly? - parent.errorDialog(null, "Open Theory", "There is already a theory named \"" - + ex.getTheoryName() - + "\"; please remove it first"); - } - } - } - - private void addLoadMenuItem() { - JMenuItem item = new JMenuItem("Import new..."); - item.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - loadTheory(); - } - }); - this.add(item); - } -} diff --git a/gui/src/quanto/gui/TikzOutput.java b/gui/src/quanto/gui/TikzOutput.java deleted file mode 100644 index ddc2be32..00000000 --- a/gui/src/quanto/gui/TikzOutput.java +++ /dev/null @@ -1,57 +0,0 @@ -package quanto.gui; - -import quanto.core.data.Vertex; -import quanto.core.data.Edge; -import quanto.core.data.CoreGraph; -import java.awt.geom.Point2D; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.graph.util.BalancedEdgeIndexFunction; -import edu.uci.ics.jung.graph.util.EdgeIndexFunction; - -public class TikzOutput { - - public static String generate(CoreGraph graph, Layout layout) { - StringBuilder tikz = new StringBuilder("\\begin{tikzpicture}[quanto]\n"); - synchronized (graph) { - Point2D p; - String col; - for (Vertex v : graph.getVertices()) { - p = layout.transform(v); - if (v.getVertexType() != null) - col = v.getVertexType().toString().toLowerCase(); - else - col = "wire"; - tikz.append("\\node [").append(col).append(" vertex] ").append("(").append(v.getCoreName()).append(") ").append("at (").append(Double.toString(Math.floor(p.getX()) / 40.0)).append(",").append(Double.toString(Math.floor(p.getY()) / -40.0)).append(") {};\n"); - } - - EdgeIndexFunction eif = - BalancedEdgeIndexFunction.getInstance(); - - int idx; - for (Edge e : graph.getEdges()) { - idx = eif.getIndex(graph, e) + 1; - tikz.append("\\draw ["); - if (e.isDirected()) { - tikz.append("-latex"); - } - if (idx != 0) { - tikz.append(",bend left=").append(idx * 20); - } - tikz.append("] (").append(graph.getSource(e).getCoreName()).append(") to ").append("(").append(graph.getDest(e).getCoreName()).append(");\n"); - } - - for (Vertex v : graph.getVertices()) { - if (v.getVertexType() != null) - col = v.getVertexType().toString().toLowerCase(); - else - col = "wire"; - if (!v.getLabel().equals("0")) { - tikz.append("\\node [").append(col).append(" angle] at (").append(v.getCoreName()).append(") {$").append(v.getLabel()).append("$};\n"); - } - } - } - tikz.append("\\end{tikzpicture}\n"); - return tikz.toString(); - } -} diff --git a/gui/src/quanto/gui/Toolbox.java b/gui/src/quanto/gui/Toolbox.java deleted file mode 100644 index 00c0c68a..00000000 --- a/gui/src/quanto/gui/Toolbox.java +++ /dev/null @@ -1,332 +0,0 @@ -package quanto.gui; - -import java.awt.GridLayout; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.MouseEvent; -import java.awt.event.MouseListener; -import java.awt.geom.Rectangle2D; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.logging.Level; -import java.util.logging.Logger; - -import javax.swing.BorderFactory; -import javax.swing.BoxLayout; -import javax.swing.ImageIcon; -import javax.swing.JButton; -import javax.swing.JMenu; -import javax.swing.JMenuItem; -import javax.swing.JPanel; -import javax.swing.JPopupMenu; - - -import quanto.core.Core; -import quanto.core.CoreChangeListener; -import quanto.core.CoreException; -import quanto.core.TheoryChangeEvent; -import quanto.core.Theory; -import quanto.core.data.BangBox; -import quanto.core.data.CoreGraph; -import quanto.core.data.Vertex; -import quanto.core.data.VertexType; - -/* - * Toolbox : Allows to add vertices/gates - * bang/unbang/etc... vertices. - */ -public class Toolbox extends JPanel { - - private Core core; - private ViewPort viewPort; - private final static Logger logger = - Logger.getLogger("quanto.gui"); - - private void showModalError(String message, CoreException ex) { - logger.log(Level.SEVERE, message, ex); - DetailedErrorDialog.showCoreErrorDialog(this, message, ex); - } - - public Toolbox(Core core, ViewPort viewPort) { - /* - * The toolbox is divided in 2 distinct categories : add, - * bangbox stuff. - * They are all using a grid layout. - */ - - this.setLayout(new BoxLayout(this, BoxLayout.PAGE_AXIS)); - - this.core = core; - this.viewPort = viewPort; - - JPanel controlArea = new JPanel(); - controlArea.setLayout(new BoxLayout(controlArea, BoxLayout.Y_AXIS)); - - controlArea.add(new AddVertexArea()); - controlArea.add(createBangBoxArea()); - this.add(controlArea); - } - - private ImageIcon createImageIcon(String path, - String description) { - java.net.URL imgURL = getClass().getResource(path); - if (imgURL != null) { - return new ImageIcon(imgURL, description); - } else { - System.err.println("Couldn't find file: " + path); - return null; - } - } - - private ToolboxArea createBangBoxArea() { - final ToolboxArea bangBoxArea = new ToolboxArea("Bang Boxes", 3, 2); - JButton button = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/BangVertex32.png", "Bang Vertices")); - bangBoxArea.add(button); - button.setToolTipText("Bang Vertices"); - button.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewPort.executeCommand("bang-vertices-command"); - } - }); - - button = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/UnbangVertex32.png", "Unbang Vertices")); - bangBoxArea.add(button); - button.setToolTipText("Unbang Vertices"); - button.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewPort.executeCommand("unbang-vertices-command"); - } - }); - - //Unbang and Drop seem to do the exact same thing... - button = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/UnbangVertex32.png", "Drop Vertices")); - bangBoxArea.add(button); - button.setToolTipText("Drop Bang Box"); - button.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewPort.executeCommand("drop-bang-box-command"); - } - }); - button = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/KillBangBox32.png", "Kill Bang Box")); - bangBoxArea.add(button); - button.setToolTipText("Kill Bang Box"); - button.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewPort.executeCommand("kill-bang-box-command"); - } - }); - - button = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/DuplicateBangBox32.png", "Duplicate Bang Box")); - bangBoxArea.add(button); - button.setToolTipText("Duplicate Bang Box"); - button.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewPort.executeCommand("duplicate-bang-box-command"); - } - }); - - final JButton popbutton = new JButton("+"); - bangBoxArea.add(popbutton); - popbutton.setToolTipText("More..."); - popbutton.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - JPopupMenu popup = createPopupMenu(); - if (popup != null) { - popup.show(bangBoxArea, popbutton.getX() + popbutton.getWidth(), popbutton.getY()); - } - } - }); - - return bangBoxArea; - } - - private JMenu createSubPopupMenu(final BangBox bangBox, final InteractiveGraphView view) { - final CoreGraph graph = view.getGraph(); - JMenu menu = new JMenu(bangBox.getCoreName()); - JMenu subMenu = new JMenu("Bang Vertex..."); - if (graph.getVertices().size() > 0) { - for (final Vertex v : graph.getVertices()) { - if (graph.getBoxedVertices(bangBox).contains(v)) { - continue; - } - JMenuItem menuItem = new JMenuItem(v.getCoreName()); - menuItem.addMouseListener(new MouseListener() { - - public void mouseEntered(MouseEvent e) { - view.highlightSubgraph(Collections.singleton(v)); - } - - public void mouseExited(MouseEvent e) { - view.clearHighlight(); - } - - public void mouseClicked(MouseEvent arg0) {} - - public void mousePressed(MouseEvent arg0) { - view.clearHighlight(); - } - - public void mouseReleased(MouseEvent arg0) {} - }); - menuItem.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - HashSet vertices = new HashSet(); - vertices.add(v); - try { - core.bangVertices(graph, bangBox, vertices); - } catch (CoreException ex) { - showModalError("Bang Vertex", ex); - } - } - }); - subMenu.add(menuItem); - } - menu.add(subMenu); - } - if (graph.getBangBoxes().size() > 1) { - subMenu = new JMenu("Merge with..."); - for (final BangBox b : graph.getBangBoxes()) { - if (b.getCoreName().equals(bangBox.getCoreName())) { - continue; - } - JMenuItem menuItem = new JMenuItem(b.getCoreName()); - menuItem.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - core.mergeBangBoxes(graph, Arrays.asList(b, bangBox)); - } catch (CoreException ex) { - showModalError("Merge !-Box", ex); - } - } - }); - subMenu.add(menuItem); - } - menu.add(subMenu); - } - String label; - if (graph.getBoxedVertices(bangBox).isEmpty()) { - label = "Drop this (empty) !-Box"; - } else { - label = "Drop this !-Box"; - } - JMenuItem menuItem = new JMenuItem(label); - menuItem.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - try { - core.dropBangBoxes(graph, Collections.singleton(bangBox)); - } catch (CoreException ex) { - showModalError("Drop!-Box", ex); - } - } - }); - menu.add(menuItem); - return menu; - } - - private JPopupMenu createPopupMenu() { - InteractiveGraphView view; - if (viewPort.getAttachedView() instanceof InteractiveGraphView) { - view = (InteractiveGraphView) viewPort.getAttachedView(); - } else if (viewPort.getAttachedView() instanceof SplitGraphView) { - if (((SplitGraphView) viewPort.getAttachedView()).isLeftFocused()) { - view = (InteractiveGraphView) ((SplitGraphView) viewPort.getAttachedView()).getLeftView(); - } else { - view = (InteractiveGraphView) ((SplitGraphView) viewPort.getAttachedView()).getRightView(); - } - } else { - return null; - } - CoreGraph graph = view.getGraph(); - JPopupMenu menu = new JPopupMenu(); - if (graph.getBangBoxes().isEmpty()) { - JMenuItem menuItem = new JMenuItem("No !-Boxes"); - menu.add(menuItem); - menuItem.setEnabled(false); - return menu; - } - - //Create a submenu for each !-Box - for (BangBox b : graph.getBangBoxes()) { - JMenu menuItem = createSubPopupMenu(b, view); - menu.add(menuItem); - } - - return menu; - } - - class ToolboxArea extends JPanel { - - public ToolboxArea(String name, int rows, int columns) { - super(new GridLayout(rows, columns)); - setBorder(BorderFactory.createTitledBorder(name)); - } - - protected void setRows(int rows) { - ((GridLayout) getLayout()).setRows(rows); - } - } - - private static int rowsForTheory(Theory theory) { - return (int) Math.ceil(((float) theory.getVertexTypes().size() + 1) / 2); - } - - class AddVertexArea extends ToolboxArea { - - public AddVertexArea() { - super("Add", rowsForTheory(core.getActiveTheory()), 2); - loadButtons(); - core.addCoreChangeListener(new CoreChangeListener() { - - public void theoryChanged(TheoryChangeEvent evt) { - AddVertexArea.this.removeAll(); - setRows(rowsForTheory(core.getActiveTheory())); - loadButtons(); - AddVertexArea.this.validate(); - } - - public void theoryAboutToChange(TheoryChangeEvent evt) { - } - }); - } - - private void loadButtons() { - /* - * Then loop though all the types of vertices - */ - for (final VertexType vertexType : core.getActiveTheory().getVertexTypes()) { - JButton button = new JButton(vertexType.getVisualizationData().getIcon()); - this.add(button); - String toolTipText = "Add vertex of type " + vertexType.getTypeName(); - if (vertexType.getMnemonic() != null) { - toolTipText += " - '" + vertexType.getMnemonic() + "'"; - } - button.setToolTipText(toolTipText); - button.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewPort.executeCommand("add-" + vertexType.getTypeName() + "-vertex-command"); - } - }); - } - JButton button = new JButton(createImageIcon("/toolbarButtonGraphics/quanto/EdgePointIcon20.png", "Add Boundary Vertex")); - this.add(button); - button.setToolTipText("Add Boundary Vertex"); - button.addActionListener(new ActionListener() { - - public void actionPerformed(ActionEvent e) { - viewPort.executeCommand("add-boundary-vertex-command"); - } - }); - } - } -} diff --git a/gui/src/quanto/gui/ViewPort.java b/gui/src/quanto/gui/ViewPort.java deleted file mode 100644 index cf89f397..00000000 --- a/gui/src/quanto/gui/ViewPort.java +++ /dev/null @@ -1,275 +0,0 @@ -package quanto.gui; - -import java.awt.BorderLayout; -import java.awt.event.ActionListener; -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; -import java.util.Map; - -import javax.swing.*; -import javax.swing.border.EtchedBorder; -import quanto.core.Core; - -public class ViewPort extends JPanel { - - private static final long serialVersionUID = -2789609872128334500L; - private ViewPortHost host; - private InteractiveView attachedView = null; - private JLabel pickView = null; - private final String arrowDown = "\u25be"; - private final InteractiveViewManager viewManager; - private boolean showInternalNames = false; - private ViewRenameListener viewRenameListener = new ViewRenameListener(); - private final ConsoleView console; - - private class ViewRenameListener implements PropertyChangeListener { - - public void propertyChange(PropertyChangeEvent evt) { - if (evt.getSource() == attachedView) { - refreshLabel(); - } - } - } - - /** - * The result of closing the current view - */ - public enum CloseResult { - - /** The view was closed, and another view has taken its place */ - Success, - /** The closing of the view was cancelled by the user */ - Cancelled, - /** The view was closed, but there are no more views to replace it with */ - NoMoreViews - } - - public ViewPort(InteractiveViewManager viewManager, - ViewPortHost host, Core core) { - this.viewManager = viewManager; - this.host = host; - this.console = new ConsoleView(core.getTalker()); - setLayout(new BorderLayout()); - pickView = makeViewMenu(); - add(pickView, BorderLayout.NORTH); - } - - public void openView(InteractiveView view) { - host.openView(view); - } - - public InteractiveViewManager getViewManager() { - return viewManager; - } - - public void setShowInternalNames(boolean showInternalNames) { - this.showInternalNames = showInternalNames; - } - - public boolean showInternalNames() { - return showInternalNames; - } - - /** - * Attaches a view to this viewport, detaching any existing view. - * @param view The view to attach - * @throws ViewUnavailableException @p view is already attached to another viewport - */ - public void attachView(InteractiveView view) - throws ViewUnavailableException { - if (view == attachedView) { - return; - } - if (view.isAttached()) { - throw new ViewUnavailableException(); - } - - clearPort(); - - if (view.getParent() != null) { - throw new IllegalStateException("View '" + view.getTitle() + "' is already being displayed"); - } - - add(view, BorderLayout.CENTER); - attachedView = view; - host.setViewAllowedToClose(view != console); - view.setViewPort(this); - refreshLabel(); - view.addPropertyChangeListener("title", viewRenameListener); - view.addPropertyChangeListener("saved", viewRenameListener); - host.attachedViewChanged(attachedView); - validate(); - repaint(); - } - - private void switchToNextAvailableView() { - InteractiveView newView = viewManager.getNextFreeView(); - if (newView == null) { - newView = console; - } - attachView(newView); - } - - public void switchToConsole() { - attachView(console); - } - - public void clearPort() { - if (attachedView != null) { - attachedView.removePropertyChangeListener("title", viewRenameListener); - attachedView.removePropertyChangeListener("saved", viewRenameListener); - attachedView.setViewPort(null); - remove(attachedView); - attachedView = null; - host.setViewAllowedToClose(false); - setLabel(""); - } - } - - public CloseResult closeCurrentView() { - if (attachedView == null) { - throw new IllegalStateException("There is no currently attached view"); - } else { - if (!attachedView.checkCanClose()) { - return CloseResult.Cancelled; - } - InteractiveView oldView = attachedView; - clearPort(); - viewManager.removeView(oldView); - oldView.cleanUp(); - - switchToNextAvailableView(); - - return CloseResult.Success; - } - } - - public void setCommandEnabled(CommandManager.Command command, boolean enabled) { - host.setCommandEnabled(command.toString(), enabled); - } - - public void setCommandEnabled(String command, boolean enabled) { - host.setCommandEnabled(command, enabled); - } - - public boolean isCommandEnabled(CommandManager.Command command) { - return host.isCommandEnabled(command.toString()); - } - - public boolean isCommandEnabled(String command) { - return host.isCommandEnabled(command); - } - - public void setCommandStateSelected(CommandManager.Command command, boolean selected) { - host.setCommandStateSelected(command.toString(), selected); - } - - public void setCommandStateSelected(String command, boolean selected) { - host.setCommandStateSelected(command, selected); - } - - public boolean isCommandStateSelected(CommandManager.Command command) { - return host.isCommandStateSelected(command.toString()); - } - - public boolean isCommandStateSelected(String command) { - return host.isCommandStateSelected(command); - } - - public void executeCommand(String command) { - if (attachedView != null) { - attachedView.commandTriggered(command); - } - } - - private JLabel makeViewMenu() { - final JLabel picker = new JLabel(" (no views) " + arrowDown); - picker.setOpaque(true); - picker.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED)); - java.net.URL imgURL = getClass().getResource("/toolbarButtonGraphics/navigation/Down16.gif"); - if (imgURL != null) { - picker.setIcon(new ImageIcon(imgURL)); - } - final JPopupMenu viewMenu = new JPopupMenu(); - picker.addMouseListener(new MouseAdapter() { - - private JMenuItem createMenuItem(String name, final InteractiveView view) { - JMenuItem item = new JMenuItem(name); - item.setFont(item.getFont().deriveFont(12.0f)); - item.setEnabled(!view.isAttached()); - item.addActionListener(new ActionListener() { - - public void actionPerformed(java.awt.event.ActionEvent e) { - attachView(view); - } - }); - return item; - } - - @Override - public void mouseClicked(MouseEvent e) { - viewMenu.removeAll(); - - viewMenu.add(createMenuItem("console", console)); - - Map views = viewManager.getViews(); - if (!views.isEmpty()) { - viewMenu.addSeparator(); - } - - for (final Map.Entry ent : views.entrySet()) { - String title = ent.getKey(); - if (!ent.getValue().isSaved()) { - title += "*"; - } - viewMenu.add(createMenuItem(title, ent.getValue())); - } - int yoffset = picker.getHeight(); - if (picker.getBorder() != null) { - yoffset -= picker.getBorder().getBorderInsets(picker).top; - } - viewMenu.show(picker, 0, yoffset); - } - }); - return picker; - } - - public InteractiveView getAttachedView() { - return attachedView; - } - - public void preventViewClosure() { - host.setViewAllowedToClose(false); - } - - private void refreshLabel() { - if (attachedView == console) { - setLabel("console"); - } else if (attachedView != null) { - String name = viewManager.getViewName(attachedView); - if (!attachedView.isSaved()) { - name += "*"; - } - // if the view names and graph names are out of sync, show it - if (showInternalNames) { - if (attachedView instanceof InteractiveGraphView) { - name += " (" + ((InteractiveGraphView) attachedView).getGraph().getCoreName() + ")"; - } else if (attachedView instanceof SplitGraphView) { - name += String.format(" (%s -> %s)", - ((SplitGraphView) attachedView).getLeftView().getGraph().getCoreName(), - ((SplitGraphView) attachedView).getRightView().getGraph().getCoreName()); - } - } - setLabel(name); - } - } - - private void setLabel(String text) { - if (text == null) { - text = "[null]"; - } - pickView.setText(" " + text + " " + arrowDown); - } -} diff --git a/gui/src/quanto/gui/ViewPortHost.java b/gui/src/quanto/gui/ViewPortHost.java deleted file mode 100644 index 9095e679..00000000 --- a/gui/src/quanto/gui/ViewPortHost.java +++ /dev/null @@ -1,38 +0,0 @@ -package quanto.gui; - -/** - * The interface ViewPort expects its host to provide. - * - * @author alex - */ -public interface ViewPortHost { - - /** - * Opens the selected view. - * - * This may be in a new window, or in the same view port - * - * @param view The view to open - */ - void openView(InteractiveView view); - - /** - * ViewPort can use this to prevent the host from closing the - * current view. - * - * This may disable a menu item, for example. - * - * @param allowed Whether the current view can be closed. - */ - void setViewAllowedToClose(boolean allowed); - - void attachedViewChanged(InteractiveView newView); - - void setCommandEnabled(String command, boolean enabled); - - boolean isCommandEnabled(String command); - - void setCommandStateSelected(String command, boolean selected); - - boolean isCommandStateSelected(String command); -} diff --git a/gui/src/quanto/gui/ViewUnavailableException.java b/gui/src/quanto/gui/ViewUnavailableException.java deleted file mode 100644 index 56582adb..00000000 --- a/gui/src/quanto/gui/ViewUnavailableException.java +++ /dev/null @@ -1,10 +0,0 @@ -package quanto.gui; - -/** - * - * @author alemer - */ -public class ViewUnavailableException extends RuntimeException { - - private static final long serialVersionUID = 5716777233300586133L; -} diff --git a/gui/src/quanto/gui/graphhelpers/BackdropPaintable.java b/gui/src/quanto/gui/graphhelpers/BackdropPaintable.java deleted file mode 100644 index 4e220557..00000000 --- a/gui/src/quanto/gui/graphhelpers/BackdropPaintable.java +++ /dev/null @@ -1,50 +0,0 @@ -package quanto.gui.graphhelpers; - -import edu.uci.ics.jung.algorithms.layout.Layout; -import edu.uci.ics.jung.visualization.VisualizationServer; -import java.awt.Color; -import java.awt.Dimension; -import java.awt.Graphics; -import java.awt.Graphics2D; -import java.awt.geom.Rectangle2D; -import quanto.core.data.Edge; -import quanto.core.data.Vertex; - -/** - * - * @author alemer - */ -public class BackdropPaintable implements VisualizationServer.Paintable { - - private Color pageBackground = Color.WHITE; - private Layout layout; - - public BackdropPaintable(Layout layout) { - // FIXME: better to update the size when it changes? - this.layout = layout; - } - - public Color getBackgroundColor() { - return pageBackground; - } - - public void setBackgroundColor(Color color) { - pageBackground = color; - } - - public void paint(Graphics g) { - Graphics2D gr = (Graphics2D) g; - Color oldColor = g.getColor(); - Dimension size = layout.getSize(); - Rectangle2D bounds = new Rectangle2D.Double(0, 0, size.getWidth(), size.getHeight()); - g.setColor(pageBackground); - gr.fill(bounds); - g.setColor(Color.black); - gr.draw(bounds); - g.setColor(oldColor); - } - - public boolean useTransform() { - return true; - } -} diff --git a/gui/src/quanto/gui/graphhelpers/BangBoxRenderer.java b/gui/src/quanto/gui/graphhelpers/BangBoxRenderer.java deleted file mode 100644 index 5b840cf0..00000000 --- a/gui/src/quanto/gui/graphhelpers/BangBoxRenderer.java +++ /dev/null @@ -1,26 +0,0 @@ -package quanto.gui.graphhelpers; - -import edu.uci.ics.jung.contrib.visualization.BangBoxGraphRenderContext; -import edu.uci.ics.jung.contrib.visualization.renderers.BasicBangBoxRenderer; - -import java.awt.Shape; - -import quanto.core.data.Edge; -import quanto.core.data.Vertex; -import quanto.core.data.BangBox; - -public class BangBoxRenderer extends BasicBangBoxRenderer { - - Shape shape = null; - - @Override - protected void paintShapeForBangBox(BangBoxGraphRenderContext rc, BangBox b, Shape shape) { - - super.paintShapeForBangBox((BangBoxGraphRenderContext) rc, b, shape); - this.shape = shape; - } - - public Shape getShape() { - return this.shape; - } -} diff --git a/gui/src/quanto/gui/graphhelpers/ConstrainedMutableAffineTransformer.java b/gui/src/quanto/gui/graphhelpers/ConstrainedMutableAffineTransformer.java deleted file mode 100644 index b4b7a607..00000000 --- a/gui/src/quanto/gui/graphhelpers/ConstrainedMutableAffineTransformer.java +++ /dev/null @@ -1,28 +0,0 @@ -package quanto.gui.graphhelpers; - -import edu.uci.ics.jung.visualization.transform.MutableAffineTransformer; -import java.awt.geom.Point2D; - -/** - * Makes sure the origin never moves in a positive direction on either axis - * - * @author alemer - */ -public class ConstrainedMutableAffineTransformer extends MutableAffineTransformer { - - private void checkAndAdjust() { - Point2D p = transform(new Point2D.Double(0.0, 0.0)); - double dx = (p.getX() > 0) ? -p.getX() : 0.0; - double dy = (p.getY() > 0) ? -p.getY() : 0.0; - if (dx != 0.0 || dy != 0.0) { - inverse = null; - transform.translate(dx, dy); - } - } - - @Override - public void fireStateChanged() { - checkAndAdjust(); - super.fireStateChanged(); - } -} diff --git a/gui/src/quanto/gui/graphhelpers/Labeler.java b/gui/src/quanto/gui/graphhelpers/Labeler.java deleted file mode 100644 index 81869c00..00000000 --- a/gui/src/quanto/gui/graphhelpers/Labeler.java +++ /dev/null @@ -1,197 +0,0 @@ -package quanto.gui.graphhelpers; - -import java.awt.BorderLayout; -import java.awt.Color; -import java.awt.Point; -import java.awt.Rectangle; -import java.awt.event.*; -import java.util.EventListener; -import java.util.EventObject; - -import javax.swing.JComponent; -import javax.swing.JLabel; -import javax.swing.JPanel; -import javax.swing.JTextField; -import javax.swing.border.LineBorder; -import javax.swing.event.ChangeEvent; -import javax.swing.event.ChangeListener; - -import quanto.core.data.GraphElementData; - -@SuppressWarnings("serial") -public class Labeler extends JPanel implements MouseListener, KeyListener, FocusListener { - - public class LabelChangeEvent extends EventObject { - private String oldText; - private String newText; - - public LabelChangeEvent(Object source, String oldText, String newText) { - super(source); - this.oldText = oldText; - this.newText = newText; - } - - public String getOldText() { - return oldText; - } - - public String getNewText() { - return newText; - } - } - - public interface LabelChangeListener extends EventListener { - boolean aboutToChangeLabel(LabelChangeEvent evt); - } - - JLabel label; - JTextField textField; - JComponent active; - ChangeEvent evt; - Point idealLocation; - GraphElementData data; - - public Labeler(GraphElementData data) { - setLayout(new BorderLayout()); - this.data = data; - evt = new ChangeEvent(this); - label = new JLabel(); - label.setOpaque(false); - setColor(Color.yellow); - textField = new JTextField(); - label.setText(data.getDisplayString()); - - addMouseListener(this); - textField.addKeyListener(this); - textField.addFocusListener(this); - active = label; - add(active, BorderLayout.CENTER); - refresh(); - } - - public Labeler(String value) { - this(new GraphElementData(value)); - } - - @Override - public void setLocation(Point p) { - super.setLocation(p); - } - - public Point getIdealLocation() { - return idealLocation; - } - - public void setIdealLocation(Point p) { - idealLocation = p; - } - - public final void setColor(Color c) { - setBackground(c); - setBorder(new LineBorder(c, 1)); - } - - //@Override - public void mouseClicked(MouseEvent e) { - if (e.getClickCount() == 2) { - textField.setText(getText()); - remove(label); - add(textField, BorderLayout.CENTER); - textField.grabFocus(); - textField.selectAll(); - active = textField; - refresh(); - } - } - - //@Override - public void keyReleased(KeyEvent e) { - if (e.getKeyCode() == KeyEvent.VK_ENTER) { - updateLabel(); - } - refresh(); - } - - //@Override - public void focusLost(FocusEvent e) { - updateLabel(); - refresh(); - } - - public boolean isBeingEdited() { - return active == textField; - } - - private void updateLabel() { - String oldText = getText(); - String newText = textField.getText(); - if (oldText.equals(newText)) - return; - if (fireAboutToChangeLabel(oldText, newText)) { - setText(newText); - } else { - textField.setText(oldText); - } - remove(textField); - active = label; - add(active, BorderLayout.CENTER); - } - - private void refresh() { - revalidate(); - // along with keeping the bounds, this forces a redraw - setBounds(new Rectangle(getPreferredSize())); - repaint(); - } - - public void update() { - label.setText(data.getDisplayString()); - refresh(); - } - - public String getText() { - return data.getEditableString(); - } - - public final void setText(String text) { - data.setString(text); - update(); - } - - public void addLabelChangeListener(LabelChangeListener l) { - listenerList.add(LabelChangeListener.class, l); - } - - public boolean fireAboutToChangeLabel(String oldValue, String newValue) { - LabelChangeListener[] listeners = - listenerList.getListeners(LabelChangeListener.class); - LabelChangeEvent evt = new LabelChangeEvent(this, oldValue, newValue); - for (LabelChangeListener l : listeners) { - if (!l.aboutToChangeLabel(evt)) - return false; - } - return true; - } - - // stubs - public void mouseEntered(MouseEvent e) { - } - - public void mouseExited(MouseEvent e) { - } - - public void mousePressed(MouseEvent e) { - } - - public void mouseReleased(MouseEvent e) { - } - - public void keyPressed(KeyEvent e) { - } - - public void keyTyped(KeyEvent e) { - } - - public void focusGained(FocusEvent e) { - } -} diff --git a/gui/src/quanto/gui/graphhelpers/QBangBoxLabeler.java b/gui/src/quanto/gui/graphhelpers/QBangBoxLabeler.java deleted file mode 100644 index 10387c7b..00000000 --- a/gui/src/quanto/gui/graphhelpers/QBangBoxLabeler.java +++ /dev/null @@ -1,24 +0,0 @@ -package quanto.gui.graphhelpers; - -import edu.uci.ics.jung.contrib.visualization.renderers.BangBoxLabelRenderer; -import java.awt.Component; -import java.awt.Font; -import javax.swing.JComponent; -import javax.swing.JLabel; - -/** - * - * @author alemer - */ -public class QBangBoxLabeler implements BangBoxLabelRenderer { - - private JLabel dummyLabel = new JLabel(); - - public QBangBoxLabeler() { - } - - public Component getBangBoxLabelRendererComponent(JComponent vv, - Object value, Font font, boolean isSelected, T edge) { - return dummyLabel; - } -} diff --git a/gui/src/quanto/gui/graphhelpers/QVertexAngleLabeler.java b/gui/src/quanto/gui/graphhelpers/QVertexAngleLabeler.java deleted file mode 100644 index c3850293..00000000 --- a/gui/src/quanto/gui/graphhelpers/QVertexAngleLabeler.java +++ /dev/null @@ -1,47 +0,0 @@ -package quanto.gui.graphhelpers; - -import edu.uci.ics.jung.visualization.renderers.VertexLabelRenderer; -import java.awt.Color; -import java.awt.Component; -import java.awt.Font; -import javax.swing.JComponent; -import javax.swing.JLabel; - -import quanto.core.data.Vertex; - -/** - * - * @author alemer - */ -public class QVertexAngleLabeler implements VertexLabelRenderer { - - private JLabel dummyLabel = new JLabel(); - private JLabel realLabel = new JLabel(); - - public QVertexAngleLabeler() { - realLabel.setOpaque(true); - } - - public Component getVertexLabelRendererComponent(JComponent vv, - Object value, Font font, boolean isSelected, T vertex) { - - if (value == null) { - return dummyLabel; - } else { - realLabel.setBackground(Color.white); - if (vertex instanceof Vertex) { - Vertex v = (Vertex) vertex; - // we render boundary labels differently - if (v.isBoundaryVertex()) { - return dummyLabel; - } - Color colour = v.getVertexType().getVisualizationData().getLabelColour(); - if (colour != null) { - realLabel.setBackground(colour); - } - } - realLabel.setText(value.toString()); - return realLabel; - } - } -} diff --git a/gui/src/quanto/gui/graphhelpers/QVertexColorTransformer.java b/gui/src/quanto/gui/graphhelpers/QVertexColorTransformer.java deleted file mode 100644 index ca6cde12..00000000 --- a/gui/src/quanto/gui/graphhelpers/QVertexColorTransformer.java +++ /dev/null @@ -1,25 +0,0 @@ -package quanto.gui.graphhelpers; - -import java.awt.Color; -import java.awt.Paint; -import org.apache.commons.collections15.Transformer; - -import quanto.core.data.Vertex; - -/** - * - * @author alemer - */ -public class QVertexColorTransformer implements Transformer { - - public QVertexColorTransformer() { - } - - public Paint transform(Vertex v) { - if (v.isBoundaryVertex()) { - return Color.lightGray; - } else { - return Color.blue; - } - } -} diff --git a/gui/src/quanto/gui/graphhelpers/QVertexIconTransformer.java b/gui/src/quanto/gui/graphhelpers/QVertexIconTransformer.java deleted file mode 100644 index 3ca7e21b..00000000 --- a/gui/src/quanto/gui/graphhelpers/QVertexIconTransformer.java +++ /dev/null @@ -1,20 +0,0 @@ -package quanto.gui.graphhelpers; - -import javax.swing.Icon; -import org.apache.commons.collections15.Transformer; -import quanto.core.data.Vertex; - -/** - * - * @author alemer - */ -public class QVertexIconTransformer implements Transformer { - - public Icon transform(Vertex input) { - if (input.isBoundaryVertex()) { - return null; - } else { - return input.getVertexType().getVisualizationData().getIcon(); - } - } -} diff --git a/gui/src/quanto/gui/graphhelpers/QVertexLabelTransformer.java b/gui/src/quanto/gui/graphhelpers/QVertexLabelTransformer.java deleted file mode 100644 index f7ad11b7..00000000 --- a/gui/src/quanto/gui/graphhelpers/QVertexLabelTransformer.java +++ /dev/null @@ -1,27 +0,0 @@ -package quanto.gui.graphhelpers; - -import org.apache.commons.collections15.Transformer; - -import quanto.core.data.Vertex; -import quanto.core.data.VertexType; -import quanto.core.data.TexConstants; - -/** - * - * @author alemer - */ -public class QVertexLabelTransformer implements Transformer { - - public QVertexLabelTransformer() { - } - - public String transform(Vertex v) { - if (v.isBoundaryVertex()) { - return v.getCoreName(); - } else if (v.getVertexType().hasData()) { - return v.getData().getDisplayString(); - } else { - return null; - } - } -} diff --git a/gui/src/quanto/gui/graphhelpers/QVertexRenderer.java b/gui/src/quanto/gui/graphhelpers/QVertexRenderer.java deleted file mode 100644 index 4eb28371..00000000 --- a/gui/src/quanto/gui/graphhelpers/QVertexRenderer.java +++ /dev/null @@ -1,21 +0,0 @@ -package quanto.gui.graphhelpers; - -import edu.uci.ics.jung.visualization.RenderContext; -import edu.uci.ics.jung.visualization.renderers.BasicVertexRenderer; -import java.awt.Shape; -import quanto.core.data.Edge; -import quanto.core.data.Vertex; - -/** - * - * @author alemer - */ -public class QVertexRenderer extends BasicVertexRenderer -{ - @Override - protected void paintShapeForVertex( - RenderContext rc, Vertex v, Shape shape) - { - super.paintShapeForVertex(rc, v, shape); - } -} diff --git a/gui/src/quanto/gui/graphhelpers/QVertexShapeTransformer.java b/gui/src/quanto/gui/graphhelpers/QVertexShapeTransformer.java deleted file mode 100644 index cb6b1559..00000000 --- a/gui/src/quanto/gui/graphhelpers/QVertexShapeTransformer.java +++ /dev/null @@ -1,29 +0,0 @@ -package quanto.gui.graphhelpers; - -import java.awt.Shape; -import java.awt.geom.Rectangle2D; -import javax.swing.JLabel; -import org.apache.commons.collections15.Transformer; - -import quanto.core.data.Vertex; - -/** - * - * @author alemer - */ -public class QVertexShapeTransformer implements Transformer { - - public QVertexShapeTransformer() { - } - - public Shape transform(Vertex v) { - if (v.isBoundaryVertex()) { - String text = v.getCoreName(); - double width = new JLabel(text).getPreferredSize().getWidth(); - width = Math.max(width, 14); - return new Rectangle2D.Double(-(width / 2), -7, width, 14); - } else { - return v.getVertexType().getVisualizationData().getShape(); - } - } -} diff --git a/gui/src/quanto/gui/resources/actions.xml b/gui/src/quanto/gui/resources/actions.xml deleted file mode 100644 index d62491ec..00000000 --- a/gui/src/quanto/gui/resources/actions.xml +++ /dev/null @@ -1,388 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/gui/src/quanto/util/FileUtils.java b/gui/src/quanto/util/FileUtils.java deleted file mode 100644 index 5ed265b3..00000000 --- a/gui/src/quanto/util/FileUtils.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * To change this template, choose Tools | Templates - * and open the template in the editor. - */ - -package quanto.util; - -import java.io.*; -import java.net.URL; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * - * @author alex - */ -public final class FileUtils { - private final static Logger logger = Logger.getLogger("quanto.util.FileUtils"); - private FileUtils() {} - - public static String slurp(File file) throws IOException { - BufferedReader r = new BufferedReader(new FileReader(file)); - try - { - StringBuilder result = new StringBuilder(); - int c = 0; - while (c != -1) { - c = r.read(); - result.append((char)c); - } - return result.toString(); - } finally { - r.close(); - } - } - - public static void copy(File fromFile, File toFile) throws IOException { - if (!fromFile.isFile()) { - throw new IOException("Not a file: " + fromFile.getPath()); - } - copy(new FileInputStream(fromFile), toFile); - } - - public static void copy(URL fromURL, File toFile) throws IOException { - copy(fromURL.openStream(), toFile); - } - - public static void copy(InputStream from, File toFile) throws IOException { - - if (toFile.exists() && !toFile.isFile()) { - throw new IOException("Not a file: " + toFile.getPath()); - } - - OutputStream to = null; - try { - to = new FileOutputStream(toFile); - byte[] buffer = new byte[4096]; - int bytesRead; - - while ((bytesRead = from.read(buffer)) != -1) { - to.write(buffer, 0, bytesRead); // write - } - } finally { - try { - from.close(); - } catch (IOException e) { - logger.log(Level.FINE, "Failed to close input file", e); - } - if (to != null) { - try { - to.close(); - } catch (IOException e) { - logger.log(Level.FINE, "Failed to close output file", e); - } - } - } - } -} diff --git a/gui/src/quanto/util/StringNamer.java b/gui/src/quanto/util/StringNamer.java deleted file mode 100644 index eedb554b..00000000 --- a/gui/src/quanto/util/StringNamer.java +++ /dev/null @@ -1,32 +0,0 @@ -package quanto.util; - -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Simple unique namer for strings - */ -public class StringNamer { - - private static Pattern p = Pattern.compile("^(.+)-[0-9]+$"); - - public static String getFreshName(Set names, String tryName) { - if (!names.contains(tryName)) { - return tryName; - } - String newTry; - int idx = 1; - Matcher m = p.matcher(tryName); - if (m.matches()) { - tryName = m.group(1); - } // should be fine for small numbers of duplicate names. - while (true) { - newTry = tryName + "-" + Integer.toString(idx); - if (!names.contains(newTry)) { - return newTry; - } - idx++; - } - } -} diff --git a/oldcore/Makefile b/oldcore/Makefile deleted file mode 100644 index 90c01008..00000000 --- a/oldcore/Makefile +++ /dev/null @@ -1,80 +0,0 @@ -################################ -# variables you might want to set in your own profile setup... -################################ -### your local install of PolyML and isaplib -# POLYML_HOME=/home/ldixon/local/polyml-cvs-version -ISAPLIB_SOURCES=../../isaplib - -################################ -# general variables -################################ - -# polyml executable -POLYML=$(shell ../tools/findpoly.sh)/bin/poly -POLYFLAGS=-H 200 $(if $(FAST),--disable-tests) $(if $(V),--test-log-level=$(V)) - -################################ -# dynamic variables checked on call to make. -################################ -ML_SRC_FILES = $(shell find . | grep ".ML$$" | grep -v "test/protocol/" | grep -v "test/PROTOCOLTEST.ML$$" | grep -v "toplevel/") -COSY_ML_SRC_FILES = $(shell find ../cosy | grep ".ML$$") -ISAP_ML_SRC_FILES = $(ISAPLIB_SOURCES)/Makefile $(shell find $(ISAPLIB_SOURCES)/* | grep ".ML$$") - -################################ -# Fixed file locations -################################ -# quanto heap file -QUANTO_HEAP=heaps/quanto.heap -# binary file to produce -CORE_BIN=bin/quanto-core -ALL_BINS=$(CORE_BIN) bin/quanto-xml-to-json - -################################ -# Targets: -################################ -default: all - -$(QUANTO_HEAP): $(ML_SRC_FILES) $(ISAP_ML_SRC_FILES) $(COSY_ML_SRC_FILES) - @mkdir -p heaps - POLYFLAGS="$(POLYFLAGS)" ../tools/poly-build-heap -p -o $@ ROOT.ML - -heap-osx-dist: - echo 'use "build_heap.ML"' | ../scala/dist/osx-dist/poly - -heap-linux-dist: - echo 'use "build_heap.ML"' | ../scala/dist/linux-dist/poly - -heap-windows-dist: - echo 'use "build_heap.ML"' | ../scala/dist/windows-dist/poly.exe - -heap: $(QUANTO_HEAP) - -bin/%: toplevel/%.ML $(QUANTO_HEAP) $(shell find "toplevel/$%" | grep ".ML$$") - @mkdir -p bin - POLYFLAGS="$(POLYFLAGS)" ../tools/polyc -l $(QUANTO_HEAP) -o $@ $< - -all: $(ALL_BINS) - -protocol-tests: $(CORE_BIN) - $(POLYML) $(POLYFLAGS) -q --use "test/PROTOCOLTEST.ML" --core "$(CORE_BIN)" --log "test/protocol-tests.log" - -test check: protocol-tests - -# startup an ML shell using the quanto heap -ml-shell: $(QUANTO_HEAP) - @../tools/polyml-toplevel -l $< - -run: ml-shell - -# -localclean: - rm -f heaps/*.heap - rm -f bin/*.o - find . -type d -name .polysave | xargs rm -rf - rm -f $(ALL_BINS) - -clean: localclean - $(MAKE) -C $(ISAPLIB_SOURCES) clean - -veryclean: clean -vclean: veryclean diff --git a/oldcore/ROOT.ML b/oldcore/ROOT.ML deleted file mode 100644 index 84cbd431..00000000 --- a/oldcore/ROOT.ML +++ /dev/null @@ -1,229 +0,0 @@ -(* - * ROOT file to compile all libraries and quantomatic core - *) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Compile isaplib code *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -val rootDir = OS.FileSys.getDir(); - -(* Include isaplib *) -OS.FileSys.chDir (rootDir ^ "/../../isaplib/"); -use "ROOT.ML"; -OS.FileSys.chDir rootDir; - -(*PolyML.Project.Log.level_ref := 4;*) -PolyML.Project.depend_on_files - [ - rootDir ^ "/../../isaplib/ROOT.ML", - rootDir ^ "/ROOT.ML", - rootDir ^ "/quanto-lib.ML" - ]; - - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Local setup *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -(** Useful settings for debugging: **) - -(* PolyML.Compiler.debug := true; *) -(* PolyML.Project.Log.level_ref := 4; *) -(* Testing.Log.level_ref := 3; *) -(* print_depth 5 *) - -PolyML.Project.optional_make "local_config.ML"; - - -(* quanto-lib contains all of core graph/rule/match/rewrite - * functionality of quantomatic *) - -(*PolyML.Project.make "quanto-lib.ML";*) -use "quanto-lib.ML"; - - -(* tests from quanto-lib files *) - -Testing.make_test "expressions/test/linrat_angle_expr-test.ML"; -Testing.make_test "expressions/test/linrat_expr-test.ML"; -Testing.make_test "expressions/test/linrat_matcher-test.ML"; -Testing.make_test "io/test/linrat-json-test.ML"; -Testing.make_test "graph/test/test-graph-setup.ML"; -Testing.make_test "graph/test/test-nhd.ML"; -Testing.make_test "graph/test/test-ograph.ML"; -Testing.make_test "graph/test/test-bang-graph.ML"; -Testing.make_test "io/test/graph-json-test.ML"; -Testing.make_test "matching/test/test-matching-setup.ML"; -Testing.make_test "matching/test/test-sg-to-sg-matching.ML"; -Testing.make_test "matching/test/test-bg-to-sg-matching.ML"; -Testing.make_test "matching/test/test-bg-to-bg-matching.ML"; -Testing.make_test "matching/test/test-concrete-matcher.ML"; -Testing.make_test "matching/test/test-greedy-matcher.ML"; -Testing.make_test "matching/test/test-bang-graph-homeomorphism-search.ML"; -Testing.make_test "rewriting/test/rule-test.ML"; -Testing.make_test "theories/test/ruleset-test.ML"; -Testing.make_test "rewriting/test/rewriter-test.ML"; -Testing.make_test "rewriting/test/ruleset_rewriter-tests.ML"; - -(* - * Theories - *) -(* construction of everything in a graphical theory from just param *) -PolyML.Project.make "io/graph_component_io.ML"; -PolyML.Project.make "theories/graphical_theory.ML"; -PolyML.Project.make "io/graphical_theory_io.ML"; - -(* string vertex/edge graphs *) -PolyML.Project.make "theories/string_ve/data.ML"; -PolyML.Project.make "theories/string_ve/io.ML"; -PolyML.Project.make "theories/string_ve/theory.ML"; -PolyML.Project.make "theories/string_ve/test/test.ML"; - -(* red-green specific vertices, graphs and matching *) -(* graph-derived expressions for R-G graphs *) -PolyML.Project.make "theories/red_green/data.ML"; -PolyML.Project.make "theories/red_green/io.ML"; -PolyML.Project.make "theories/red_green/theory.ML"; -Testing.make_test "theories/red_green/test/test.ML"; -Testing.make_test "theories/red_green/rg_mathematica.ML"; - -(* ghz-w specific vertices, graphs, and matching *) -PolyML.Project.make "theories/ghz_w/data.ML"; -PolyML.Project.make "theories/ghz_w/io.ML"; -PolyML.Project.make "theories/ghz_w/theory.ML"; -Testing.make_test "theories/ghz_w/test/test.ML"; - -(* Graphs having vertices with strings as data, substring as matching *) -PolyML.Project.make "theories/substrings/data.ML"; -PolyML.Project.make "theories/substrings/io.ML"; -PolyML.Project.make "theories/substrings/theory.ML"; -Testing.make_test "theories/substrings/test/test.ML"; - -(* Graphs having strings as types, linrat as data and both substrings and linrat - * as matching *) -PolyML.Project.make "theories/substr_linrat/data.ML"; -PolyML.Project.make "theories/substr_linrat/io.ML"; -PolyML.Project.make "theories/substr_linrat/theory.ML"; -Testing.make_test "theories/substr_linrat/test/test.ML"; - -(* rgb specific vertices, graphs, and matching *) -PolyML.Project.make "theories/red_green_blue/data.ML"; -PolyML.Project.make "theories/red_green_blue/io.ML"; -PolyML.Project.make "theories/red_green_blue/theory.ML"; -Testing.make_test "theories/red_green_blue/test/test.ML"; - -(* petri specific vertices, graphs, and matching *) -PolyML.Project.make "theories/petri/data.ML"; -PolyML.Project.make "theories/petri/io.ML"; -PolyML.Project.make "theories/petri/theory.ML"; -Testing.make_test "theories/petri/test/test.ML"; - -(* Tactics as Graphs in Isabelle *) -PolyML.Project.make "theories/isaplanner_rtechn/data.ML"; -PolyML.Project.make "theories/isaplanner_rtechn/io.ML"; -PolyML.Project.make "theories/isaplanner_rtechn/theory.ML"; -Testing.make_test "theories/isaplanner_rtechn/test/test.ML"; - - -(* Pair of dots with rational expressions *) -PolyML.Project.make "theories/rational_pair/data.ML"; -PolyML.Project.make "theories/rational_pair/io.ML"; -PolyML.Project.make "theories/rational_pair/theory.ML"; -Testing.make_test "theories/rational_pair/test/test.ML"; - - -(* - * Descrimination nets - *) -PolyML.Project.make "dnets/DNetsLib.ML"; -PolyML.Project.make "dnets/Literal.ML"; -PolyML.Project.make "dnets/Contour.ML"; -PolyML.Project.make "dnets/ContourList.ML"; -PolyML.Project.make "dnets/TopDNet.ML"; - -Testing.make_test "dnets/test.ML"; - -(* Performance tests for dnets *) -(*Testing.make_test "dnets/perf.ML";*) - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Compile quantocosy *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - - -(* Include cosy *) -val rootDir = OS.FileSys.getDir(); -OS.FileSys.chDir (rootDir ^ "/../cosy/"); -use "ROOT.ML"; -OS.FileSys.chDir rootDir; -PolyML.Project.depend_on_files [rootDir ^ "/../cosy/ROOT.ML"]; - - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Compile the controller *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - - -(* Overall controller for theories *) -(* PolyML.Project.make "interface/controller_state.ML"; (* control state for quanto *) -PolyML.Project.make "interface/controller.ML"; (* commands *) -PolyML.Project.make "interface/controller_registry.ML"; (* theory lists *) - -(* interface protocol/console *) -PolyML.Project.make "interface/control_interface.ML"; (* generic interface for run_in_textstreams *) - -PolyML.Project.make "interface/console_commands.ML"; (* console commands and help *) -PolyML.Project.make "interface/console_lexer.ML"; (* lexer for quanto console *) -PolyML.Project.make "interface/console.ML"; (* generic protocol using commands *) -PolyML.Project.make "interface/console_interface.ML"; (* generic protocol using commands *) -PolyML.Project.make "interface/protocol.ML"; (* protocol for tools *) *) - - - -(* new modular controller *) - -PolyML.Project.make "json_interface/controller_util.ML"; -PolyML.Project.make "json_interface/controller_module.ML"; -PolyML.Project.make "json_interface/modules/test.ML"; -PolyML.Project.make "json_interface/modules/rewrite.ML"; -PolyML.Project.make "json_interface/modules/simplify.ML"; -PolyML.Project.make "json_interface/controller.ML"; -PolyML.Project.make "json_interface/controller_registry.ML"; -PolyML.Project.make "json_interface/protocol.ML"; -PolyML.Project.make "json_interface/run.ML"; - - -(* some combinators and shorthand functions for simprocs *) -PolyML.Project.make "rewriting/simp_util.ML"; -PolyML.Project.make "theories/red_green/rg_simp_util.ML"; - - -(* - * Top-level testing - *) -Testing.make_test "test/old-rg-ruleset-rewriter-tests.ML"; -Testing.make_test "test/regression-tests.ML"; -(* Metrics tests depend on Substrings_Theory *) -(*Testing.make_test "metrics/test/test.ML";*) -Testing.make_test "example_code/ROOT.ML"; - - - -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) -(* Update heaps *) -(* -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- *) - -PolyML.Project.Log.log 2 "checking if heap is newer than edits..."; - -if PolyML.Project.heap_is_newer_than_edits "heaps/quanto_child.heap" -then PolyML.Project.Log.log 2 "NO" -else - (PolyML.Project.Log.log 2 "YES...building child heap..."; - PolyML.Project.ensure_at_latest_heap (); - PolyML.Project.save_project_child_state "heaps/quanto_child.heap"); - -PolyML.Project.Log.log 2 "done."; - diff --git a/oldcore/build_heap.ML b/oldcore/build_heap.ML deleted file mode 100644 index 603737ae..00000000 --- a/oldcore/build_heap.ML +++ /dev/null @@ -1,7 +0,0 @@ -val _ = PolyML.Compiler.printDepth := 0; -val _ = PolyML.exception_trace (fn () => PolyML.use "ROOT.ML") - handle _ => OS.Process.exit OS.Process.failure; -PolyML.Project.ensure_at_latest_heap (); -PolyML.fullGC (); -PolyML.SaveState.saveState "heaps/quanto.heap"; -val _ = OS.Process.exit OS.Process.success; diff --git a/oldcore/buildheap.bat b/oldcore/buildheap.bat deleted file mode 100644 index 3c7c8329..00000000 --- a/oldcore/buildheap.bat +++ /dev/null @@ -1 +0,0 @@ -..\scala\dist\windows-dist\poly.exe --use build_heap.ML diff --git a/oldcore/dev/algebra.ML b/oldcore/dev/algebra.ML deleted file mode 100644 index 1a925507..00000000 --- a/oldcore/dev/algebra.ML +++ /dev/null @@ -1,265 +0,0 @@ - - -fun list_of 0 x = [] - | list_of n x = x::(list_of (n-1) x) - -signature RING = -sig - type T - val zero : T - val one : T - val is_zero : T -> bool - val is_one : T -> bool - val + : (T * T) -> T - val * : (T * T) -> T - val ~ : T -> T - val compare : T * T -> General.order - val toString : T -> string - val pretty : T -> Pretty.T - val print : T -> unit -end - -signature FIELD = -sig - include RING - val invert : T -> T - val / : (T*T) -> T - - (* if not a complex field, let conj = id *) - val conj : T -> T -end - -signature POLY_RING = -sig - include RING; - structure URing : RING; - val mono : URing.T * int * URing.T -> T; - val const : URing.T -> T; - val reduce : T -> T; -end - -signature INDET_NAMER = -sig - val indet_name : int -> string -end - -structure IntRing : RING = -struct -open Int - -type T = int -val zero = 0 -val one = 1 -fun is_zero 0 = true | is_zero _ = false -fun is_one 1 = true | is_one _ = false -fun pretty i = Pretty.str (toString i) -val print = Pretty.writeln o pretty -end - - -structure RationalField : FIELD = -struct -open Rational -val op+ = uncurry add -val op* = uncurry mult -val one = (1,1) -val zero = (0,1) -val op~ = subtr zero -val conj = fn x => x -val compare = ord -val op/ = uncurry (Rational.div) -fun invert x = one / x -val toString = Pretty.string_of o pretty -end - -structure RealField : FIELD = -struct -open Real - -type T = real -val zero = 0.0 -val one = 1.0 -fun invert r = 1.0 / r -fun conj r = r -fun is_zero r = (compare (r,zero))=EQUAL -fun is_one r = (compare (r,one))=EQUAL -fun pretty i = Pretty.str (toString i) -val print = Pretty.writeln o pretty -end - - -functor PolyRingFun ( - structure URing : RING - and IndetNamer : INDET_NAMER - ) : POLY_RING = -struct - -structure URing = URing; -type T = (URing.T * URing.T list) list - -fun indet_name i = "X[" ^ (Int.toString i) ^ "]" - - -fun add_powers [] [] = [] - | add_powers [] x = add_powers x [] - | add_powers (p::ps) [] = p::add_powers ps [] - | add_powers (p::ps) (q::qs) = (URing.+(p,q))::add_powers ps qs -fun compare_powers (ps,qs) = List.collate (URing.compare) (ps, qs) -fun compare_monos ((k,ps),(l,qs)) = - case compare_powers (ps,qs) - of EQUAL => URing.compare(k,l) - | ord => ord - -fun reduce_powers' [] = [] - | reduce_powers' (p::ps) = if (URing.is_zero p) then reduce_powers' ps - else p::ps -fun reduce_powers l = rev(reduce_powers'(rev l)) - -fun reduce' ((k,ps')::(l,qs')::poly) = - if URing.is_zero k then reduce' ((l,qs')::poly) - else - let - val ps = reduce_powers ps' - val qs = reduce_powers qs' - in - if compare_powers (ps, qs) = EQUAL then reduce' ((URing.+(k,l),ps)::poly) - else (k,ps)::reduce' ((l,qs)::poly) - end - | reduce' [(k,ps)] = if URing.is_zero k then [(URing.zero,[])] else [(k,ps)] - | reduce' poly = poly -fun multiply_monos (k,ps) (l,qs) = (URing.*(k,l), add_powers ps qs) - -fun power_string [] _ = "" - | power_string (p::ps) x = - (if (URing.is_zero p) then "" - else - (IndetNamer.indet_name x)^ - (if URing.is_one p then "" else "^"^(URing.toString p))^ - (case ps of [] => "" | _ => "*")) - ^(power_string ps (x+1)) - -fun mono_toString (k,p) = if (URing.is_one k) then power_string p 0 - else (URing.toString k)^"*"^(power_string p 0) - -val minus_one = [(URing.~(URing.one),[])] - -fun mono (k,x,p) = - [if URing.is_zero p then (k,[]) - else (k, (list_of x URing.zero) @ [p])] - -fun sort_terms poly = sort compare_monos poly -val reduce = reduce' o sort_terms -fun const i = [(i,[]:URing.T list)] - -(* RING functions *) -val zero = [(URing.zero,[])] -val one = [(URing.one,[])] -fun (op+) (p1,p2) = reduce (p1 @ p2) -fun (op*) (p1,p2) = reduce (flat (map (fn x => map (multiply_monos x) p2) p1)) -fun (op~) poly = poly * minus_one -fun compare (p1, p2) = List.collate (compare_monos) (p1,p2) -fun is_zero poly = compare (poly, zero) = EQUAL -fun is_one poly = compare (poly, one) = EQUAL - - -fun toString' [] = "" - | toString' [m] = mono_toString m - | toString' (m::ms) = mono_toString m ^ " + " ^ (toString' ms) -fun toString p = "(" ^ (toString' p) ^ ")" -fun pretty poly = Pretty.str (toString poly) -val print = Pretty.writeln o pretty -end - - -functor ModuleFun (structure URing : RING) = -struct - -structure URing = URing -type T = URing.T list list - -fun dot_product v1 v2 = fold2 (fn x => fn y => fn sum => (URing.+(URing.*(x,y),sum))) v1 v2 URing.zero - -local - fun split_col [] h t = (rev h,rev t) - | split_col ([]::rows) _ _ = ([],[]) - | split_col ((e::cols)::rows) h t = split_col rows (e::h) (cols::t) - - fun mult' [] _ = [] - | mult' (r::rs) cs = let - val mrow = foldr (fn (c,row) => (dot_product r c)::row) [] cs - in (mrow)::(mult' rs cs) - end -in -fun transpose m = case (split_col m [] []) - of ([],[]) => [] - | (col,rest) => col::(transpose rest) - -fun mult m1 m2 = mult' m1 (transpose m2) -end (* local *) -fun map f m = List.map (fn row => List.map f row) m -fun scalar_mult k m = map (fn e => URing.*(k,e)) m -fun fold cmb_x base_x cmb_y base_y m = - Basics.fold cmb_y (List.map (fn row => Basics.fold cmb_x row base_x) m) base_y -fun merge_y m1 m2 = m1 @ m2 -fun merge_x m1 m2 = transpose ((transpose m1) @ (transpose m2)) -fun flatten m = fold merge_x [] merge_y [] m -fun sum_of_squares m = fold (fn x => fn s => URing.+(URing.*(x,x),s)) URing.zero (curry URing.+) URing.zero m -fun kronecker_product m1 m2 = flatten (map (fn a => scalar_mult a m2) m1) - -fun pretty m = Pretty.chunks (List.map (Pretty.list "[" "]") (map (URing.pretty) m)); -val print = Pretty.writeln o pretty - -end (* structure Matrix *) - - -functor MatrixFun (structure UField : FIELD) = -struct - -structure UField = UField -structure URing = UField -structure MF = ModuleFun(structure URing = URing) -open MF - -fun adjoint m = map (UField.conj) (transpose m) - -end - -(* -structure PolyRingInt = PolyRingFun(structure URing = IntRing - and IndetNamer = struct - fun indet_name i = case i - of 0=>"I"| 1=>"(pi)" - | 2=>"A"| 3=>"B" | 4=>"C" | 5 => "D" - | _=>"X["^(Int.toString i)^"]" - end) - -structure ExpRingInt = PolyRingFun(structure URing = PolyRingInt - and IndetNamer = struct - fun indet_name i = case i - of 0=>"e" - | _=>"X["^(Int.toString i)^"]" - end) - - -structure PRI = PolyRingInt; -structure ERI = ExpRingInt; - -val p1 = PRI.+(PRI.mono(2,0,2),PRI.mono(3,0,3)) -val p2 = PRI.+(PRI.mono(3,1,4),PRI.mono(5,1,2)) - -fun angle exp = - ERI.mono(PRI.one, 0, - PRI.*([(IntRing.one,[1])],exp)) - - -val e1 = ERI.mono(PRI.const 1,0,p1) -val e2 = ERI.mono(PRI.const 4,0,p2) - -structure IMatrix = ModuleFun(structure URing = IntRing) -structure PMatrix = ModuleFun(structure URing = PRI) -val ident = [[1,0],[0,1]] -val matr = [[2,3],[4,5]] -val matr2 = [[6,7],[8,9]] - -val pmat = [[p1,p2],[PRI.const 1,PRI.const 5]] -*) diff --git a/oldcore/dev/hilb2.ML b/oldcore/dev/hilb2.ML deleted file mode 100644 index b53a0990..00000000 --- a/oldcore/dev/hilb2.ML +++ /dev/null @@ -1,110 +0,0 @@ -(* construct hilbert term from a DAG *) - -signature GRAPH_ADAPTER = -sig - structure Graph : GRAPH - datatype term = - Tens of term list | - Comp of term list | - V of V.name | - Sigma of Permutation.T | - Id of int - (* a component holds a term and a list of inputs in order *) - type component = E.name list * term * E.name list - - val translate_vertex : Graph.T -> V.name -> component -end - - -functor TensorTermFun (structure GraphAdapter : GRAPH_ADAPTER) = -struct - -structure GraphAdapter = GraphAdapter -open GraphAdapter - -(* tensor a list of components, gathering inputs *) -fun tensor [] = ([],Id 0,[]) - | tensor [x] = x - | tensor clist = fold_rev (fn (i1,t,o1) => - fn (i1s, Tens ts, o1s) => - (i1@i1s, Tens (t::ts), o1@o1s)) - clist ([], Tens [], []) -val flat_compose = let - fun fc [] = [] - | fc ((Comp l1)::l2) = l1 @ fc l2 - | fc (t::ts) = t :: fc ts -in Comp o fc -end - -(* perform the composition t1 ; t2 *) -fun compose (i1, t1, o1) (_, Id _, _) = (i1, t1, o1) - | compose (i1, t1, o1) (i2, t2, o2) = let - val oset = E.NSet.of_list o1 - val new_ins = filter_out (E.NSet.contains oset) i2 - val full_t1 = case length new_ins - of 0 => t1 - | n => (case t1 of Tens ts => Tens (ts@[Id n]) - | _ => Tens [t1, Id n]) - val perm = Permutation.get_perm E.name_ord (o1 @ new_ins) i2 - val sigma = if Permutation.is_id perm then [] else [Sigma perm] -in - (i1 @ new_ins, flat_compose (full_t1 :: sigma @ [t2]), o2) -end - - -fun of_graph graph = let - val dag = Graph.convert_to_dag graph - val ranks = Graph.get_dag_ranks dag - val tens_rank = tensor o (map (translate_vertex dag)) -in #2 (fold_rev compose (map tens_rank ranks) ([], Id 0, [])) -end - - -fun pretty_text (V n) = V.pretty_name n - | pretty_text (Id num) = Pretty.str ("id(" ^ (Int.toString num) ^ ")") - | pretty_text (Sigma perm) = Pretty.block[Pretty.str "sig", - Pretty.str_list - "[" "]" - (map Int.toString perm)] - | pretty_text (Comp lst) = Pretty.block - ([Pretty.str "("]@ - (Pretty.separate - " o" (map pretty_text (rev lst)))@ - [Pretty.str ")"]) - | pretty_text (Tens lst) = Pretty.block - ([Pretty.str "("]@ - (Pretty.separate - " x" (map pretty_text lst))@ - [Pretty.str ")"]) - -val print = Pretty.writeln o pretty_text -fun printc (_,t,_) = print t -end - - -structure RGGraphAdapter : GRAPH_ADAPTER = -struct -structure Graph = RGGraph -datatype term = - Tens of term list | - Comp of term list | - V of V.name | - Sigma of Permutation.T | - Id of int - -(* a component holds a term and a list of inputs in order *) -type component = E.name list * term * E.name list - -fun translate_vertex graph v = let - val (ie,oe) = ((apfst E.NSet.list_of) o - (apsnd E.NSet.list_of) o - snd) (Graph.get_vertex graph v) -in (ie,V v,oe) -end - -end - - -structure RGHilbTerm = TensorTermFun(structure GraphAdapter = RGGraphAdapter) - - diff --git a/oldcore/dev/maxima.ML b/oldcore/dev/maxima.ML deleted file mode 100644 index 040e0b1f..00000000 --- a/oldcore/dev/maxima.ML +++ /dev/null @@ -1,137 +0,0 @@ - -(* Call out to the computer algebra system Maxima *) - -signature MAXIMA_PROCESS = -sig -type T -exception Maxima of string -val flush : T -> T -val new : unit -> T -val kill : T -> unit -val eval : T -> string -> string -end - -structure MaximaProcess : MAXIMA_PROCESS = -struct - -exception Maxima of string -type T = (TextIO.instream, TextIO.outstream) Unix.proc -fun flush pr = let - val (istr,ostr) = Unix.streamsOf pr - val random_str = (Int.toString o round o Library.random) () - val rec chomp = - (fn () => - if String.isSubstring - ("FLUSH"^random_str) - (the (TextIO.inputLine istr)) then - TextIO.inputLine istr - else chomp()) -in - (TextIO.output (ostr, "print (\"FLUSH"^random_str^"\");\n");chomp();pr) -end - -fun new () = flush (Unix.execute ("/usr/local/bin/maxima",[])) -fun kill pr = Unix.kill(pr,9) - -fun read_grind [] str = raise Maxima str - | read_grind (c::cs) str = case c - of #"$" => [] - | _ => c::(read_grind cs str) -fun eval pr str = let - val (istr,ostr) = Unix.streamsOf pr - val _ = TextIO.output (ostr, "v:("^str^");\n"); - val _ = flush pr - val _ = TextIO.output (ostr, "grind(v);\n") - val _ = TextIO.inputLine istr - val ln = the (TextIO.inputLine istr) -in String.implode (read_grind (String.explode ln) ln) -end - - - -end - - -(* leave this here to debug, so we don't get a million procs *) -val _ = MaximaProcess.kill MaximaExpression.root_proc - - - -(* -structure MaximaExpression -================================================================================ -Evaluate maxima expressions with a single underlying process in a -thread-safe manner. - -Note MaximaExpression respects the signatures RING and FIELD -*) - -structure MaximaExpression = -struct - -type T = string - -val root_proc = MaximaProcess.new () -val root_proc_m = Mutex.mutex (); -fun eval str = (Mutex.lock root_proc_m; - (MaximaProcess.eval root_proc str) - before Mutex.unlock root_proc_m) -handle MaximaProcess.Maxima str => - (Mutex.unlock root_proc_m; "MAXIMA_ERROR:"^str) - - -val _ = eval ("match(pat,tgt) := (vs:listofvars(pat),"^ - " if vs=[] then (if pat=tgt then [] else NO_MATCH) "^ - " else"^ - " first(solve(map(lambda([v],first(solve(pat=tgt,v))),vs),vs))"^ - ")") - -fun match pat tgt = case eval ("match("^pat^","^tgt^")") - of "NO_MATCH" => NONE - | str => SOME str -fun do_subs subs expr = eval("ev("^expr^","^subs^")") - -fun fromString str = eval str -val fromInt = fromString o Int.toString -fun (op+) (s1,s2) = eval ("("^s1^")+("^s2^")") -fun (op*) (s1,s2) = eval ("("^s1^")*("^s2^")") -fun (op/) (s1,s2) = eval ("("^s1^")/("^s2^")") -fun (op~) s = eval ("-("^s^")") -fun invert s = "1" / s -fun conj s = eval ("conjugate("^s^")") - -val compare = String.compare -val zero = "0" -val one = "1" -fun is_zero exp = ((compare (exp, zero)) = EQUAL) -fun is_one exp = ((compare (exp, one)) = EQUAL) -val toString : T -> string = fn s => s -val pretty = Pretty.str o toString -val print = Pretty.writeln o pretty -end - -structure ME = MaximaExpression - -(* -use "alebra.ML" for this stuff -================================================== -*) -structure ME = MaximaExpression; -structure Matr = MatrixFun(structure UField = ME); - -val angle = Matr.map (ME.fromString) [["1"], - ["%e^(%i*A)"]] -val deltaZ = Matr.map (ME.fromString) - [["1","0"], - ["0","0"], - ["0","0"], - ["0","1"]] - -val ident = Matr.map (ME.fromString) - [["1","0"], - ["0","1"]] - -val phase = Matr.mult - (Matr.transpose deltaZ) - (Matr.kronecker_product ident angle) - diff --git a/oldcore/dnets/.gitignore b/oldcore/dnets/.gitignore deleted file mode 100644 index 4b7c4ed0..00000000 --- a/oldcore/dnets/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/*.txt diff --git a/oldcore/dnets/Contour.ML b/oldcore/dnets/Contour.ML deleted file mode 100644 index 4961b36f..00000000 --- a/oldcore/dnets/Contour.ML +++ /dev/null @@ -1,391 +0,0 @@ -signature CONTOUR = -sig - type T - structure LIB : TDNET_LIBRARY - structure G : BANG_GRAPH - structure L : LITERAL - - (* CONSTRUCTORS *) - val empty : T - val mk : G.T -> T -> T - val mk_first_contour : G.T -> V.name -> T - val target_function : G.T -> T - val add_literal : T -> L.T -> T - - (* GETTERS *) - val get_first_literal : T -> L.T - - val get_contour_mult_none : T -> T - val get_contour_mult_star : T -> T - val get_contour_mult_qm : T -> T - val get_contour_mult_star_or_qm : T -> T - - val contains_boundary : T -> bool - - (* COMPARISON FUNCTIONS *) - val eq : T * T -> bool - val equiv : T * T -> bool - val check_strong_compatibility : T * T -> bool - val check_weak_compatibility : T * T -> bool - - (* COMPARISON-EQUALITY FUNCTIONS *) - val subtract_eq_contour: T * T -> T - val intersect_eq_contours: T * T -> T - val complement_eq_contour: T * T -> T - val remove_eq_duplicate : T -> T - val is_eq_literal_contained : L.T * T -> bool - - (* COMPARISON-MATCHING FUNCTIONS *) - val is_matching_contour_contained : T * T -> bool - - (* PRINT FUNCTION *) - val printout : T -> string - -end - - - -functor Contour ( G : BANG_GRAPH ) : CONTOUR = -struct - - structure G = G - structure LIB = TDNet_Library - structure L = Literal(G) - - type T = L.T list; - - val empty = []; - - - - (* This function gets the first node in a contour *) - fun get_first_literal [] = raise ERROR "Contour is empty" - | get_first_literal (l::[]) = l - | get_first_literal (l::literals) = l - - (* This function returns a contout containing only the literal with multiplicity equal to None *) - fun get_contour_mult_none([]) = empty - | get_contour_mult_none(l::[]) = if L.is_kind_mult_none(l) then l::[] else [] - | get_contour_mult_none(l::ls) = if L.is_kind_mult_none(l) then l::get_contour_mult_none(ls) else get_contour_mult_none(ls) - - (* This function returns a contout containing only the literal with multiplicity equal to Star *) - fun get_contour_mult_star([]) = empty - | get_contour_mult_star(l::[]) = if L.is_kind_mult_star(l) then l::[] else [] - | get_contour_mult_star(l::ls) = if L.is_kind_mult_star(l) then l::get_contour_mult_star(ls) else get_contour_mult_star(ls) - - (* This function returns a contout containing only the literal with multiplicity equal to QM *) - fun get_contour_mult_qm([]) = empty - | get_contour_mult_qm(l::[]) = if L.is_kind_mult_qm(l) then l::[] else [] - | get_contour_mult_qm(l::ls) = if L.is_kind_mult_qm(l) then l::get_contour_mult_qm(ls) else get_contour_mult_qm(ls) - - (* This function returns a contout containing only the literal with multiplicity equal to Star or QM *) - fun get_contour_mult_star_or_qm(c) = flat (get_contour_mult_star(c) :: get_contour_mult_qm(c) :: []) - - (* This function checks if a boundary contains at least one boundary node-vertex *) - fun contains_boundary ([]) = false - | contains_boundary (l::[]) = L.is_boundary l - | contains_boundary (l::ls) = if L.is_boundary l then true else contains_boundary ls - - (* This function checks if a boundary contains at least two boundary node-vertex *) - fun contains_two_boundaries([]) = false - | contains_two_boundaries(l::[]) = false - | contains_two_boundaries(l::ls) = if L.is_boundary l then contains_boundary ls else contains_two_boundaries ls - - - (* This function finds if a literal l is contained in a contour c *) - fun is_eq_literal_contained(l,c) = LIB.is_contained L.eq l c - - (* This function removes a literal l from a contour c, given that l is contained in c *) - (* PRECONDITION: is_eq_literal_contained(l,c) = true *) - fun remove_eq_literal_in_contour(l,c) = LIB.rm_element L.eq l c - - (* This function subtracts c2 from c1, c1-c2, given that c2 is contained in c1 *) - (* PRECONDITION: c2 is contained in c1 *) - fun subtract_eq_contour(c1,c2) = LIB.sub_x_y L.eq c1 c2 - - (* This function finds the contour given by the intersection of two contours c1 and c2 *) - (* PRECONDITION: remove_eq_duplicate() has been executed *) - fun intersect_eq_contours(c1,c2) = LIB.intersect_x_y L.eq c2 c1 - - (* This function computes the complement of c1 wrt c2, c2 - (c1 intersection c2) *) - fun complement_eq_contour(c1,c2) = LIB.compl_x_y L.eq c2 c1 - - (* This function removes duplicates in a contour c *) - fun remove_eq_duplicate(c) = LIB.rm_duplicates L.eq c - - (* This function checks if two contours c1 and c2 are equal*) - fun is_eq_contour([],[]) = true - | is_eq_contour(c1::[],[]) = false - | is_eq_contour([],c2::[]) = false - | is_eq_contour(c1::c1s,[]) = false - | is_eq_contour([],c2::c2s) = false - | is_eq_contour(c1::[],c2::[]) = if L.eq(c1,c2) then true else false - | is_eq_contour(c1::c1s,c2::[]) = false - | is_eq_contour(c1::[],c2::c2s) = false - | is_eq_contour(c1::c1s,c2::c2s) = - if L.eq(c1,c2) then is_eq_contour (c1s,c2s) - else if is_eq_literal_contained(c1,c2s) then is_eq_contour(c1s,c2::remove_eq_literal_in_contour(c1,c2s)) else false - - (* This function checks for equality between c1 and c2, that is if c1 and c2 are equal contours *) - (* TRUE iff c1 and c2 contains equal literals in any order; they must be generated by the same node-vertices *) - (* EQUALITY is used when building contours in order to build a single literal for every node-vertex *) - fun eq (c1,c2) = - if (length c1 = length c2) then is_eq_contour(c1,c2) - else false - - - - (* This function finds if a literal l is contained in a contour c *) - fun is_equiv_literal_contained(l,c) = LIB.is_contained L.equiv l c - - (* This function removes a literal l from a contour c, given that l is contained in c *) - (* PRECONDITION: is_equiv_literal_contained(l,c) = true *) - fun remove_equiv_literal_in_contour(l,c) = LIB.rm_element L.equiv l c - - (* This function checks if two contours c1 and c2 are equivalent *) - fun is_equiv_contour([],[]) = true - | is_equiv_contour(c1::[],[]) = false - | is_equiv_contour([],c2::[]) = false - | is_equiv_contour(c1::c1s,[]) = false - | is_equiv_contour([],c2::c2s) = false - | is_equiv_contour(c1::[],c2::[]) = if L.equiv(c1,c2) then true else false - | is_equiv_contour(c1::c1s,c2::[]) = false - | is_equiv_contour(c1::[],c2::c2s) = false - | is_equiv_contour(c1::c1s,c2::c2s) = - if L.equiv(c1,c2) then is_equiv_contour (c1s,c2s) - else if is_equiv_literal_contained(c1,c2s) then is_equiv_contour(c1s,c2::remove_equiv_literal_in_contour(c1,c2s)) else false - - (* This function checks for equivalence between c1 and c2, that is if c1 and c2 are equivalent contours *) - (* TRUE iff c1 and c2 contains equivalent literals in any order; they can be generated by different node-vertices *) - (* EQUIVALENCE is used when building the tree in order identify contours which are equivalent and share a node in the tree *) - fun equiv (c1,c2) = - if (length c1 = length c2) then is_equiv_contour(c1,c2) - else false - - - - - - (* HELPER FUNCTIONS to build a literal *) - (* This functions checks if all the literals l in a contour are in a bang-box *) - fun all_bang(g, []) = false - | all_bang(g, l::[]) = G.is_bboxed g (L.get_name(l)) - | all_bang(g, l::ls) = - if G.is_bboxed g (L.get_name(l)) then all_bang(g, ls) - else false - - (* This functions checks if there is a literals l in a contour which is in a bang-box *) - fun exists_bang(g, []) = false - | exists_bang (g, l::[]) = G.is_bboxed g (L.get_name(l)) - | exists_bang (g, l::ls) = - if G.is_bboxed g (L.get_name(l)) then true - else exists_bang(g,ls) - - (* This function recomputes the multiplicity of a literal l in a graph g given the previous contour pc *) - fun set_kind_mult(g,pc,l) = - let - val adj = L.get_adj g l - val adj_in_contour = intersect_eq_contours(pc,adj) - in - if - G.is_bboxed g (L.get_name(l)) then L.mult_star - else if all_bang(g, adj_in_contour) then L.mult_qm - else L.mult_none - end - - (* This function recomputes the input arity evaluating the number of bang-boxes in the graph g having out-edges going to the literal l*) - fun set_input_arity(g,l) = - let - val pred = L.get_pred g l - - val bboxed = G.get_bboxed(g) - val bboxed = V.NSet.list_of(bboxed) - val g_list = replicate (length bboxed) g - val bboxed = map2 L.mk g_list bboxed - - val pred = complement_eq_contour(bboxed,pred) - in - length pred - end - - (* This function recomputes the input multiplicity of a literal l in a graph g given the previous contour pc *) - fun set_input_mult(g,pc,l) = - let - val prec = L.get_pred g l - in - if exists_bang(g,prec) then L.mult_star - else L.mult_none - end - - (* This function recomputes the output arity evaluating the number of bang-boxes in the graph g having in-edges coming from the literal l*) - fun set_output_arity(g,l) = - let - val succ = L.get_succ g l - - val bboxed = G.get_bboxed(g) - val bboxed = V.NSet.list_of(bboxed) - val g_list = replicate (length bboxed) g - val bboxed = map2 L.mk g_list bboxed - - val succ = complement_eq_contour(bboxed,succ) - in - length succ - end - - (* This function recomputes the input multiplicity of a literal l in a graph g given the previous contour pc *) - fun set_output_mult(g,pc,l) = - let - val succ = L.get_succ g l - in - if exists_bang(g,succ) then L.mult_star - else L.mult_none - end - - (* This function rebuilds the literals ls in a graph g recomputing arities and multiplicities according to the previous contour pc *) - fun rebuild_literals(_,_,[]) = [] - | rebuild_literals(g,pc,l::[]) = - L.build ( L.get_name(l), L.get_kind(l), set_kind_mult(g,pc,l), set_input_arity(g,l), set_input_mult(g,pc,l), set_output_arity(g,l), set_output_mult(g,pc,l) ) :: [] - | rebuild_literals(g,pc,l::ls) = - L.build ( L.get_name(l), L.get_kind(l), set_kind_mult(g,pc,l), set_input_arity(g,l), set_input_mult(g,pc,l), set_output_arity(g,l), set_output_mult(g,pc,l) ) :: rebuild_literals(g,pc,ls) - - (* This function adds a qm multiplicity to the boundary node-vertices in l::ls *) - fun add_qm_to_boundaries([]) = [] - | add_qm_to_boundaries(l::[]) = - if L.is_boundary(l) then L.build ( L.get_name(l), L.get_kind(l), L.mult_qm, L.get_input_arity(l), L.get_input_mult(l), L.get_output_arity(l), L.get_output_mult(l) ) :: [] - else l ::[] - | add_qm_to_boundaries(l::ls) = - if L.is_boundary(l) then L.build ( L.get_name(l), L.get_kind(l), L.mult_qm, L.get_input_arity(l), L.get_input_mult(l), L.get_output_arity(l), L.get_output_mult(l) ) :: add_qm_to_boundaries(ls) - else l :: add_qm_to_boundaries(ls) - - (* This function rebuilds the literals adding a qm multiplicity to a boundary node-vertex if the contour c contains more than one boundary node-vertex *) - fun rebuild_boundaries(c) = - if contains_two_boundaries(c) then add_qm_to_boundaries(c) else c - - (* This function builds the first contour given a graph g *) - fun target_function(g) = - let - val vnames = G.get_unbboxed(g) - val v = case V.NSet.get_local_bot vnames - of SOME v => v - | NONE => raise ERROR "No unboxed verts" - val lit = L.mk g v - val contour = rebuild_literals(g,[],lit::[]) - in - rebuild_boundaries(contour) - end - - (* This function build the first contour using the node-vertex v *) - fun mk_first_contour g v = - let - val lit = L.mk g v - val contour = rebuild_literals(g,[],lit::[]) - in - rebuild_boundaries(contour) - end - - (* This function builds a contour starting from another contour c *) - fun mk g c = - let - val g_list = replicate (length c) g - val gl_list = g_list ~~ c - - val contour = LIB.maps3 L.get_adj g c - val contour = remove_eq_duplicate(contour) - val contour = complement_eq_contour(c,contour) - val contour = rebuild_literals(g,c,contour) - in - rebuild_boundaries(contour) - end - - (* This function adds the literal l to the contour c *) - fun add_literal c l = l::c - - - - (* This function finds if a literal l is contained in a contour c *) - fun is_matching_literal_contained(l,c) = LIB.is_contained L.match l c - - (* This function removes a literal l from a contour c, given that l is contained in c *) - (* PRECONDITION: is_matching_literal_contained(l,c) = true *) - fun remove_matching_literal_in_contour(l,c) = LIB.rm_element L.match l c - - (* This function removes a matching literal from a contour considering its multiplicity *) - (* PRECONDITION: is_matching_literal_contained(l,c) = true *) - fun remove_matching_literal_in_contour_according_to_mult(l,c::[]) = - if(L.is_kind_mult_star(c)) then c::[] else [] - | remove_matching_literal_in_contour_according_to_mult(l,c::cs) = - if L.match(l,c) then - if(L.is_kind_mult_star(c)) then c::cs else cs - else c::remove_matching_literal_in_contour_according_to_mult(l,cs) - | remove_matching_literal_in_contour_according_to_mult(l,[]) = - raise ERROR "Empty contour" - - (* This function subtracts c2 from c1, c1-c2, given that c2 is contained in c1 *) - (* PRECONDITION: c2 is contained in c1 *) - fun subtract_matching_contour(c1,c2) = LIB.sub_x_y L.match c1 c2 - - (* This function checks if every literal in c1 is matched in c2 *) - fun is_matching_contour_contained([], []) = true - | is_matching_contour_contained(c1::[], []) = false - | is_matching_contour_contained([], c2::[]) = true - | is_matching_contour_contained([], c2::c2s) = true - | is_matching_contour_contained(c1::[],c2::[]) = if L.match(c1,c2) then true else false - | is_matching_contour_contained(c1::[],c2::c2s) = if is_matching_literal_contained(c1,c2::c2s) then true else false - | is_matching_contour_contained(c1::c1s,[]) = false - | is_matching_contour_contained(c1::c1s,c2::[]) = false - | is_matching_contour_contained(c1::c1s,c2::c2s) = - if (length c1s > length c2s) then false - else if is_matching_literal_contained(c1,c2::c2s) then is_matching_contour_contained(c1s,remove_matching_literal_in_contour(c1,c2::c2s)) - else false - - (* This function checks if if c1 is generated from c2 *) - fun is_contour_generated([],[]) = true - | is_contour_generated(c1::[],[]) = false - | is_contour_generated(c1::c1s,[]) = false - | is_contour_generated([],c2::[]) = true - | is_contour_generated([],c2::c2s) = true - | is_contour_generated(c1::[],c2::[]) = if L.match(c1,c2) then true else false - | is_contour_generated(c1::[],c2::c2s) = if is_matching_literal_contained(c1,c2::c2s) then true else false - | is_contour_generated(c1::c1s,c2::[]) = - if L.is_kind_mult_star(c2) - then if L.match(c1,c2) then is_contour_generated(c1s,c2::[]) else false - else false - | is_contour_generated(c1::c1s,c2::c2s) = - if is_matching_literal_contained(c1,c2::c2s) - then is_contour_generated(c1s,remove_matching_literal_in_contour_according_to_mult(c1,c2::c2s)) - else false - - (* This function checks if c1 is strongly compatible with c2, which may be different from checking if c2 is strongly compatible with c1 *) - (* STRONG COMPATIBILITY is a form of MATCHING, used when pruning the tree in order to identify contours that are strongly or weakly compatible *) - fun check_strong_compatibility (c1,c2) = - let - val c2_concrete = get_contour_mult_none(c2) - val c2_abstract = get_contour_mult_star_or_qm(c2) - in - if (is_matching_contour_contained(c2_concrete,c1)) - then - let - val c1_remaining = subtract_matching_contour(c1,c2_concrete) - in - if (is_contour_generated(c1_remaining,c2_abstract)) then true else false - end - else false - end - - (* This function checks if c1 is weakly compatible with c2, which may be different from checking if c2 is weakly compatible with c1 *) - (* WEAK COMPATIBILITY is a form of MATCHING, used when pruning the tree in order to identify contours that are strongly or weakly compatible *) - fun check_weak_compatibility (c1,c2) = - let - val c2_concrete = get_contour_mult_none(c2) - in - if (is_matching_contour_contained(c2_concrete,c1)) then true else false - end - - - - (* This function prints the contour c*) - fun printout([]) = "" - | printout(l::[]) = L.printout(l) - | printout(l::ls) = L.printout(l) ^ " " ^ printout(ls) ^ " " - -end diff --git a/oldcore/dnets/ContourList.ML b/oldcore/dnets/ContourList.ML deleted file mode 100644 index 84978405..00000000 --- a/oldcore/dnets/ContourList.ML +++ /dev/null @@ -1,108 +0,0 @@ -signature CONTOUR_LIST = -sig - type T - structure G : BANG_GRAPH - structure C : CONTOUR - - (* CONSTRUCTORS *) - val empty : T - val mk : G.T -> T - val mk_from : G.T -> V.name -> T - - (* COMPARISON FUNCTION *) - val equiv : T * T -> bool - - (* PRINT FUNCTION *) - val printout : T -> string -end - - -functor Contour_List (G : BANG_GRAPH) : CONTOUR_LIST = -struct - - structure G = G - structure LIB = TDNet_Library - structure C = Contour(G) - - type T = C.T list - - val empty = [] - - - - (* This function builds contour recursively given a graph g, a previous contour c and a list of remaining literals r. *) - (* The new contour is added to the contour list cl *) - fun build_contours(_,_,cl,[]) = rev cl - | build_contours(_,[],cl,_) = rev cl - | build_contours(g,c,cl,r) = - let - val new_contour = C.mk g c - val new_contour = C.intersect_eq_contours(new_contour,r) - val r = C.subtract_eq_contour(r,new_contour) - in - if null(new_contour) then build_contours(g,new_contour,cl,r) - else build_contours(g,new_contour,(new_contour::cl),r) - end - - (* This function builds a contour list given a graph g *) - fun mk g = - let - val first_contour = C.target_function(g) - val contour_list = first_contour::[] - - val remaining_list = G.get_vertices g - val remaining_list = V.NSet.list_of remaining_list - val g_list = replicate (length remaining_list) g - val remaining_list = map2 C.L.mk g_list remaining_list - val remaining_list = C.subtract_eq_contour(remaining_list,first_contour) - in - build_contours (g,first_contour,contour_list,remaining_list) - end - - (* This function builds a contour list given a graph g a starting node-vertex v*) - fun mk_from g v = - let - val first_contour = C.mk_first_contour g v - val contour_list = first_contour::[] - - val remaining_list = G.get_vertices g - val remaining_list = V.NSet.list_of remaining_list - val g_list = replicate (length remaining_list) g - val remaining_list = map2 C.L.mk g_list remaining_list - val remaining_list = C.subtract_eq_contour(remaining_list,first_contour) - in - build_contours (g,first_contour,contour_list,remaining_list) - end - - - (* This function removes a contour c from a contour list cl, given that c is contained in cl *) - (* PRECONDITION: c is contained in cl *) - fun remove_equiv_contour(c,cl) = LIB.rm_element C.equiv c cl - - (* This function checks if a contour c is contained in the contour list cl*) - fun is_equiv_contour_contained(c,cl) = LIB.is_contained C.equiv c cl - - (* This function checks if two contour lists cl1 and cl2 are equivalent *) - fun is_equiv_contour_list(c1::[],c2::[]) = if C.equiv(c1,c2) then true else false - | is_equiv_contour_list(c1::cl1,c2::[]) = if C.equiv(c1,c2) then true else false - | is_equiv_contour_list(c1::cl1,c2::cl2) = - if C.equiv(c1,c2) then is_equiv_contour_list (cl1,cl2) - else if is_equiv_contour_contained(c1,cl2) then is_equiv_contour_list(cl1,c2::remove_equiv_contour(c1,cl2)) else false - | is_equiv_contour_list _ = false (* different lengths *) - - (* This function checks for equivalence between cl1 and cl2, that is if cl1 and cl2 are equivalent contour lists *) - (* TRUE iff cl1 and cl2 contains equivalent contours in any order; they can be generated by different node-vertices *) - fun equiv (cl1,cl2) = - if (length cl1 = length cl2) then is_equiv_contour_list(cl1,cl2) - else false - - - - - (* This function prints the contour list cl*) - fun printout ([]) = "" - | printout (c::[]) = C.printout(c) - | printout (c::cs) = C.printout(c) ^ " " ^ printout(cs) ^ " " - -end - diff --git a/oldcore/dnets/DNetsLib.ML b/oldcore/dnets/DNetsLib.ML deleted file mode 100644 index 7ae475ff..00000000 --- a/oldcore/dnets/DNetsLib.ML +++ /dev/null @@ -1,53 +0,0 @@ -signature TDNET_LIBRARY = -sig - - (* GENERIC FUNCTIONS *) - val maps2 : ('a -> 'b -> 'b list) -> 'a list -> 'b list -> 'b list - val maps3 : ('a -> 'b -> 'c list) -> 'a -> 'b list -> 'c list - - (* LIST FUNCTIONS *) - val is_contained : ('a * 'a -> bool) -> 'a -> 'a list -> bool - val rm_duplicates : ('a * 'a -> bool) -> 'a list -> 'a list - val rm_element : ('a * 'a -> bool) -> 'a -> 'a list -> 'a list - val sub_x_y : ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - val intersect_x_y : ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - val compl_x_y : ('a * 'a -> bool) -> 'a list -> 'a list -> 'a list - -end - - -structure TDNet_Library : TDNET_LIBRARY = -struct - - fun maps2 _ [] [] = [] - | maps2 f (x::xs) (y::ys) = f x y @ maps2 f xs ys - | maps2 _ _ _ = raise ListPair.UnequalLengths; - - fun maps3 _ _ [] = [] - | maps3 f x (y::ys) = f x y @ maps3 f x ys - - fun is_contained _ x [] = false - | is_contained f x (y::[]) = if f(x,y) then true else false - | is_contained f x (y::ys) = if f(x,y) then true else is_contained f x ys - - fun rm_duplicates _ [] = [] - | rm_duplicates _ (x::[]) = x::[] - | rm_duplicates f (x::xs) = if is_contained f x xs then rm_duplicates f xs else x :: (rm_duplicates f xs) - - fun rm_element _ x [] = [] - | rm_element f x (y::[]) = if f(x,y) then [] else y::[] - | rm_element f x (y::ys) = if f(x,y) then ys else y::(rm_element f x ys) - - fun sub_x_y _ x [] = x - | sub_x_y f x (y::[]) = if is_contained f y x then rm_element f y x else x - | sub_x_y f x (y::ys) = if is_contained f y x then sub_x_y f (rm_element f y x) ys else sub_x_y f x ys - - fun intersect_x_y _ x [] = [] - | intersect_x_y _ [] y = [] - | intersect_x_y f x (y::[]) = if is_contained f y x then y::[] else [] - | intersect_x_y f x (y::ys) = if is_contained f y x then y::(intersect_x_y f (rm_element f y x) ys) else intersect_x_y f x ys - - fun compl_x_y f x y = sub_x_y f x (intersect_x_y f x y) - - -end \ No newline at end of file diff --git a/oldcore/dnets/Literal.ML b/oldcore/dnets/Literal.ML deleted file mode 100644 index 66cba0a2..00000000 --- a/oldcore/dnets/Literal.ML +++ /dev/null @@ -1,230 +0,0 @@ -signature LITERAL = -sig - type T - type multiplicity - structure G : BANG_GRAPH - - val boundary : G.vdata - - (* CONSTRUCTOR *) - val mk : G.T -> V.name -> T - val build : V.name * G.vdata * multiplicity * int * multiplicity * int * multiplicity -> T - - (* COMPARISON FUNCTIONS *) - val eq : T * T -> bool - val equiv : T * T -> bool - val match : T * T -> bool - - (* GETTERS *) - val get_adj : G.T -> T -> T list - val get_pred : G.T -> T -> T list - val get_succ : G.T -> T -> T list - - val get_name : T -> V.name - val get_kind : T -> G.vdata - val get_kind_mult : T -> multiplicity - val get_input_arity : T -> int - val get_input_mult : T -> multiplicity - val get_output_arity : T -> int - val get_output_mult : T -> multiplicity - - val is_boundary : T -> bool - - (* MULTIPLICITY FUNCTIONS*) - val mult_none : multiplicity - val mult_star : multiplicity - val mult_qm : multiplicity - val is_kind_mult_none : T -> bool - val is_kind_mult_star : T -> bool - val is_kind_mult_qm : T -> bool - val mult_eq : multiplicity * multiplicity -> bool - - (* PRINT FUNCTION *) - val printout : T -> string - -end - - - -functor Literal ( G : BANG_GRAPH ) : LITERAL = -struct - - structure G = G; - - datatype multiplicity = None | Star | QM - - type T = { name : V.name, - kind : G.vdata, - kind_mult : multiplicity, - input_arity : int, - input_mult : multiplicity, - output_arity : int, - output_mult : multiplicity } - - val boundary = G.WVert - - val mult_none = None - val mult_qm = QM - val mult_star = Star - - (* This function builds a literal by packaging its data in a literal type *) - fun build (n, k, k_m, i_a, i_m, o_a, o_m) = - {name = n, kind = k, kind_mult = k_m, input_arity = i_a, input_mult = i_m, output_arity = o_a, output_mult = o_m} : T - - (* This function builds a literal given a graph g and a node-vertex v *) - fun mk g v = - let - val k = G.get_vertex_data g v - val k_m = None - val i_a = Arity.get_in (G.get_arity g v) - val i_m = None - val o_a = Arity.get_out (G.get_arity g v) - val o_m = None - in - build (v, k, k_m, i_a, i_m, o_a, o_m) - end - - - (* HELPER FUNCTIONS to compare literal matching *) - (* This function checks if one literal between l1 and l2 has a boundary kind *) - fun one_is_a_bondary(l1,l2) = - if (G.vdata_eq(#kind l1, boundary) orelse G.vdata_eq(#kind l2, boundary)) then true else false - - (* This functions checks if the kinds of two literals l1 and l2 match *) - fun match_kind(l1,l2) = - G.vdata_eq(#kind l1, #kind l2) orelse one_is_a_bondary(l1,l2) - - (* This functions checks if the input arity of two literals l1 and l2 match *) - fun match_input(l1,l2) = - if (#input_arity l1 = #input_arity l2) then true - else if (#input_arity l1 > #input_arity l2) andalso (#input_mult l2 = Star) then true - else if (#input_arity l1 < #input_arity l2) andalso (#input_mult l1 = Star) then true - else if one_is_a_bondary(l1,l2) - then true - else false - - (* This functions checks if the output arity of two literals l1 and l2 match *) - fun match_output(l1,l2) = - if (#output_arity l1 = #output_arity l2) then true - else if (#output_arity l1 > #output_arity l2) andalso (#output_mult l2 = Star) then true - else if (#output_arity l1 < #output_arity l2) andalso (#output_mult l1 = Star) then true - else if one_is_a_bondary(l1,l2) - then true - else false - - (* This function checks for matching between c1 and c2, that is if c1 and c2 can match each other *) - (* TRUE iff l1 and l2 are literals that can be reduced to one another; they can be generated by different node-vertices; they can differ on the multiplicity *) - (* MATCHING is used when pruning the tree in order to identify contours that are strongly or weakly compatible *) - fun match(l1,l2) = match_kind(l1,l2) andalso match_input(l1,l2) andalso match_output(l1,l2) - - - - (* This functions checks if the kinds of two literals l1 and l2 are equivalent *) - fun equiv_kind(l1,l2) = - if (G.vdata_eq(#kind l1, #kind l2)) andalso - ( (#input_mult l1 = None andalso #input_mult l2 = None) orelse - (#input_mult l1 = Star andalso #input_mult l2 = Star) orelse - (#input_mult l1 = QM andalso #input_mult l2 = QM) ) - then true - else false - - (* This functions checks if the input arity of two literals l1 and l2 are equivalent *) - fun equiv_input(l1,l2) = - if (#input_arity l1 = #input_arity l2) andalso - ( (#input_mult l1 = None andalso #input_mult l2 = None) orelse - (#input_mult l1 = Star andalso #input_mult l2 = Star) ) - then true - else false - - (* This functions checks if the output arity of two literals l1 and l2 are equivalent *) - fun equiv_output(l1,l2) = - if (#output_arity l1 = #output_arity l2) andalso - ( (#output_mult l1 = None andalso #output_mult l2 = None) orelse - (#output_mult l1 = Star andalso #output_mult l2 = Star) ) - then true - else false - - (* This function checks for equivalence between c1 and c2, that is if c1 and c2 are equivalent contours *) - (* TRUE iff l1 and l2 are literals that can be reduced to one another; they can be generated by different node-vertices *) - (* EQUIVALENCE is used when building the tree in order identify contours which are equivalent and share a node in the tree *) - fun equiv(l1,l2) = equiv_kind(l1,l2) andalso equiv_input(l1,l2) andalso equiv_output(l1,l2) - - - - (* This function checks for equality between l1 and l2, that is if l1 and l2 are the same literal *) - (* TRUE iff l1 and l2 are literals generated by the same node-vertex *) - (* EQUALITY is used when building contours in order to build a single literal for every node-vertex *) - fun eq(l1,l2) = V.name_eq(#name l1, #name l2) - - - - (* Getters for a literal *) - fun get_name(l) = #name l - - fun get_kind(l) = #kind l - - fun get_kind_mult(l) = #kind_mult l - - fun get_input_arity(l) = #input_arity l - - fun get_input_mult(l) = #input_mult l - - fun get_output_arity(l) = #output_arity l - - fun get_output_mult(l) = #output_mult l - - - (* This function returns the literals of all the adjacent nodes to the literal l in the graph g *) - fun get_adj g l = - let - val adj_vert = G.get_adj_vertices g (#name l) - val v_list = V.NSet.list_of adj_vert - - val g_list = replicate (length v_list) g - in - map2 mk g_list v_list - end - - (* This function returns the literals of all the predecessor nodes of the literal l in the graph g *) - fun get_pred g l = - let - val adj_vert = G.get_predecessor_vertices g (#name l) - val v_list = V.NSet.list_of adj_vert - - val g_list = replicate (length v_list) g - in - map2 mk g_list v_list - end - - (* This function returns the literals of all the successor nodes of the literal l in the graph g *) - fun get_succ g l = - let - val adj_vert = G.get_successor_vertices g (#name l) - val v_list = V.NSet.list_of adj_vert - - val g_list = replicate (length v_list) g - in - map2 mk g_list v_list - end - - (* This function checks if a literal is a boundary node-vertex *) - fun is_boundary(l) = G.vdata_eq(#kind l, boundary) - - - (* Functions to work with multiplicity *) - fun mult_eq (m1,m2) = - if ((m1=Star andalso m2=Star) orelse (m1=QM andalso m2=QM) orelse (m1=None andalso m2=None)) then true - else false - - fun is_kind_mult_none(l) = if mult_eq((#kind_mult l),None) then true else false - - fun is_kind_mult_star(l) = if mult_eq((#kind_mult l),Star) then true else false - - fun is_kind_mult_qm(l) = if mult_eq((#kind_mult l),QM) then true else false - - - - (* This function prints the literal l*) - fun printout(l) = V.string_of_name(#name l) - -end diff --git a/oldcore/dnets/TopDNet.ML b/oldcore/dnets/TopDNet.ML deleted file mode 100644 index 78353eb3..00000000 --- a/oldcore/dnets/TopDNet.ML +++ /dev/null @@ -1,416 +0,0 @@ -signature TOP_DNET = -sig - type T - type tree - structure G : BANG_GRAPH - structure CL : CONTOUR_LIST - sharing G.Sharing = CL.G.Sharing - - (* CONSTRUCTORS *) - val empty : T - val mk : G.T GraphName.NTab.T -> T - val add_cl_to_dnet : T -> CL.T -> GraphName.name -> T - val add_cl_list_to_dnet : T -> CL.T list -> GraphName.name list -> T - val add_graph : (GraphName.name * G.T) -> T -> T - - (* PRUNING FUNCTIONS *) - val extended_pruning : T -> G.T -> (V.name * GraphName.name list) list - val standard_pruning : T -> G.T -> (V.name * GraphName.name list) list - val extended_prune : V.name -> G.T -> T -> T - val standard_prune : V.name -> G.T -> T -> T - val graphs : T -> GraphName.name list - val get_match_candidates : T -> G.T -> GraphName.NSet.T - - (* FOLD FUNCTION *) - val fold : (('a * 'b) -> G.T GraphName.NTab.T -> G.T GraphName.NTab.T) -> 'a list -> 'b list -> G.T GraphName.NTab.T -> G.T GraphName.NTab.T - - (* GETTERS *) - val is_node : tree -> bool - val get_contour : tree -> CL.C.T - val get_children : tree -> tree list - val get_graph : tree -> GraphName.name - - (* COMPARISON FUNCTION *) - val is_eq_graphs : GraphName.name list * GraphName.name list -> bool - - (* PRINT FUNCTION*) - val printout : tree list -> string - -end - - -functor Top_DNet ( G: BANG_GRAPH ) : TOP_DNET = - -struct - - structure G = G - structure CL = Contour_List(G) - structure C = CL.C - - datatype tree = Node of {contour : C.T, children : tree list} - | Leaf of {graph : GraphName.name} - type T = tree - - val empty = Node{contour=C.empty, children=[]} - - (* This function checks if a tree node is a Node or a Leaf *) - fun is_node (Node{contour=contour, children = children}) = true - | is_node (Leaf{graph=graph}) = false - - (* Getters for a tree *) - fun get_contour(Node{contour=contour, children = children}) = contour - | get_contour _ = raise ERROR "Not a contour" - - fun get_children(Node{contour=contour, children=children}) = children - | get_children _ = raise ERROR "Not a contour" - - fun get_graph(Leaf{graph=graph}) = graph - | get_graph _ = raise ERROR "Not a graph" - - - - - (* This function checks is a contour c is contained in a list of Node or Leaf *) - fun is_equiv_contained(c,[]) = false - | is_equiv_contained(c,Node{contour=contour, children=children}::[]) = if C.equiv(contour,c) then true else false - | is_equiv_contained(c,Leaf{graph=graph}::[]) = false - | is_equiv_contained(c,Node{contour=contour, children=children}::cl) = if C.equiv(contour,c) then true else is_equiv_contained(c,cl) - | is_equiv_contained(c,Leaf{graph=graph}::cl) = is_equiv_contained(c,cl) - - (* This function checks if a contour c is equivalent with the contour stored in a Node *) - fun equiv(Node{contour=contour, children=children}, c) = - if C.equiv(contour,c) then true else false - | equiv _ = false - - (* This function retrieves a node x from a contour list cl, given that x is contained in cl*) - (* PRECONDITION: x is contained in cl *) - fun get_equiv_node(_,c::[]) = c - | get_equiv_node(x,c::cl) = - if is_node(c) - then if equiv(c,x) then c else get_equiv_node(x,cl) - else get_equiv_node(x,cl) - | get_equiv_node(_,[]) = raise ERROR "empty contour list" - - (* This function retrieves a list of literals not equiv to x from a contour list cl, given that x is contained in cl*) - (* PRECONDITION: x is contained in cl *) - fun get_equiv_remaining(x,c::[]) = [] - | get_equiv_remaining(x,c::cl) = - if is_node(c) - then if equiv(c,x) then cl else c::get_equiv_remaining(x,cl) - else c::get_equiv_remaining(x,cl) - | get_equiv_remaining(_,[]) = raise ERROR "empty contour list" - - - - (* This function add a contour list cl associated with the graph named gn to the tree *) - fun add_cl_to_dnet (Node{contour=contour, children=children}) (c::[]) gn = - if is_equiv_contained(c,children) - then - let - val n = get_equiv_node(c,children) - val r = get_equiv_remaining(c,children) - val new_leaf = Leaf{graph=gn} - val new_node = Node{contour=get_contour(n),children=new_leaf::get_children(n)} - in - Node{contour=contour, children=new_node::r} - end - else - let - val new_leaf = Leaf{graph=gn} - val new_node = Node{contour=c,children=new_leaf::[]} - in - Node{contour=contour, children=new_node::children} - end - | add_cl_to_dnet (Node{contour=contour, children=children}) (c::cs) gn = - if is_equiv_contained(c,children) - then - let - val n = get_equiv_node(c,children) - val r = get_equiv_remaining(c,children) - in - Node{contour=contour, children=(add_cl_to_dnet n cs gn)::r} - end - else - let - val n = Node{contour=c, children =[]} - in - Node{contour=contour, children = (add_cl_to_dnet n cs gn)::children} - end - | add_cl_to_dnet _ [] _ = raise ERROR "Empty contour list" - | add_cl_to_dnet _ _ _ = raise ERROR "Not a contour" - - (* This function add a list of contour lists cls together with the list of graph named gns to the tree *) - fun add_cl_list_to_dnet d (cl::[]) (gn::[]) = add_cl_to_dnet d cl gn - | add_cl_list_to_dnet d (cl::cll) (gn::gns) = add_cl_list_to_dnet (add_cl_to_dnet d cl gn) cll gns - | add_cl_list_to_dnet d _ _ = raise ERROR "Contour and graph list lengths differ" - - fun add_graph (n,g) dnet = add_cl_to_dnet dnet (CL.mk g) n - - (* This function builds a tree given a set of pattern graphs tab *) - fun mk(tab) = - let - val g_list = GraphName.NTab.values tab - val g_names = GraphName.NTab.keys tab - val cl_list = map CL.mk g_list - val e = empty - in - add_cl_list_to_dnet e cl_list g_names - end - - - - (* This function recursively prunes the tree t using weak compatibility*) - fun weak_prune_trees([],[]) = [] - | weak_prune_trees(t::[],[]) = t::[] - | weak_prune_trees([],c::[]) = [] - | weak_prune_trees(t::[],c::[]) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: [] - else [] - else t :: [] - | weak_prune_trees([],c::cs) = [] - | weak_prune_trees(t::ts,[]) = - if (is_node(t)) then weak_prune_trees(ts,[]) - else t :: weak_prune_trees(ts,[]) - | weak_prune_trees(t::[],c::cs) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: [] - else [] - else t :: [] - | weak_prune_trees(t::ts,c::[]) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: weak_prune_trees(ts,c::[]) - else weak_prune_trees(ts,c::[]) - else t :: weak_prune_trees(ts,c::[]) - | weak_prune_trees(t::ts,c::cs) = - if (is_node(t)) then - if (C.check_weak_compatibility(c,get_contour(t))) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: weak_prune_trees(ts,c::cs) - else weak_prune_trees(ts,c::cs) - else t :: weak_prune_trees(ts,c::cs) - - - (* This function recursively prunes the tree t using strong compatibility*) - fun extended_strong_prune_trees([],[]) = [] - | extended_strong_prune_trees(t::[],[]) = t::[] - | extended_strong_prune_trees([],c::[]) = [] - | extended_strong_prune_trees(t::[],c::[]) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: [] - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),[])} :: [] - else [] - end - else - t :: [] - | extended_strong_prune_trees([],c::cs) = [] - | extended_strong_prune_trees(t::ts,[]) = - if (is_node(t)) then extended_strong_prune_trees(ts,[]) - else t :: extended_strong_prune_trees(ts,[]) - | extended_strong_prune_trees(t::[],c::cs) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: [] - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),cs)} :: [] - else [] - end - else - t :: [] - | extended_strong_prune_trees(t::ts,c::[]) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),[])} :: extended_strong_prune_trees(ts,c::[]) - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),[])} :: extended_strong_prune_trees(ts,c::[]) - else extended_strong_prune_trees(ts,c::[]) - end - else - t :: extended_strong_prune_trees(ts,c::[]) - | extended_strong_prune_trees(t::ts,c::cs) = - if (is_node(t)) - then - let - val is_weak = C.contains_boundary(get_contour(t)) - val is_saved = if (is_weak) then C.check_weak_compatibility(c,get_contour(t)) else C.check_strong_compatibility(c,get_contour(t)) - in - if (is_saved) then - if (is_weak) then Node{contour=get_contour(t),children=weak_prune_trees(get_children(t),cs)} :: extended_strong_prune_trees(ts,c::cs) - else Node{contour=get_contour(t),children=extended_strong_prune_trees(get_children(t),cs)} :: extended_strong_prune_trees(ts,c::cs) - else extended_strong_prune_trees(ts,c::cs) - end - else - t :: extended_strong_prune_trees(ts,c::cs) - - - (* This function perform the first step of the extended pruning of the tree t at the root *) - fun extended_prune_tree(t,[]) = t - | extended_prune_tree(t,c::[]) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour=get_contour(t),children = extended_strong_prune_trees(children,c::[])} - end - else t - | extended_prune_tree(t,c::cs) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour = get_contour(t), children = extended_strong_prune_trees(children,c::cs)} - end - else t - - (* This function perform the first step of the standard pruning of the tree t at the root *) - fun standard_prune_tree(t,[]) = t - | standard_prune_tree(t,c::[]) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour=get_contour(t),children = weak_prune_trees(children,c::[])} - end - else t - | standard_prune_tree(t,c::cs) = - if (is_node(t)) - then - let - val children = get_children(t) - in - Node{contour = get_contour(t), children = weak_prune_trees(children,c::cs)} - end - else t - - (* This function use the extended algorithm to prune a tree given a tree t, a graph g and a starting node-vertex v to build the contour list of g*) - fun extended_prune v g t = - let - val target_contour_list = CL.mk_from g v - in - extended_prune_tree(t,target_contour_list) - end - - (* This function use the standard algorithm to prune a tree given a tree t, a graph g and a starting node-vertex v to build the contour list of g*) - fun standard_prune v g t = - let - val target_contour_list = CL.mk_from g v - in - standard_prune_tree(t,target_contour_list) - end - - (* This function traverses the tree t to get to the leaves *) - fun explore_branches([]) = [] - | explore_branches(t::[]) = - if (is_node(t)) then explore_branches(get_children(t)) - else get_graph(t) :: [] - | explore_branches(t::ts) = - if (is_node(t)) then explore_branches(get_children(t)) @ explore_branches(ts) - else get_graph(t) :: explore_branches(ts) - - (* This function returns the list of the names of all the graphs encoded in the tree t *) - fun graphs t = - if (is_node(t)) then explore_branches(get_children(t)) - else get_graph(t)::[] - - (* This function returns the list of the names of all the graphs encoded in a list of discrimination trees t::ts *) - fun collect_graphs [] = [] - | collect_graphs (t::[]) = (graphs t) :: [] - | collect_graphs (t::ts) = (graphs t) :: (collect_graphs ts) - - (* This function receives a discrimination tree and executes the extended pruning algorithm using all possible contour list built from a target and a starting node-vertex n *) - fun do_extended_pruning tree target [] = [] - | do_extended_pruning tree target (n::[]) = (extended_prune n target tree) :: [] - | do_extended_pruning tree target (n::ns) = (extended_prune n target tree) :: (do_extended_pruning tree target ns) - - (* This function receives a discrimination tree and executes the standard pruning algorithm using all possible contour list built from a target and a starting node-vertex n *) - fun do_standard_pruning tree target [] = [] - | do_standard_pruning tree target (n::[]) = (standard_prune n target tree) :: [] - | do_standard_pruning tree target (n::ns) = (standard_prune n target tree) :: (do_standard_pruning tree target ns) - - (* This function recevives a discrimination tree and a target graph and starts the extended pruning algorithm *) - fun extended_pruning tree target = - let - val nodelist = G.get_unbboxed target - val nodelist = V.NSet.list_of nodelist - val treelist = do_extended_pruning tree target (rev nodelist) - val graphslist = collect_graphs (rev treelist) - in - nodelist ~~ graphslist - end - - (* This function recevives a discrimination tree and a target graph and starts the standard pruning algorithm *) - fun standard_pruning tree target = - let - val nodelist = G.get_unbboxed target - val nodelist = V.NSet.list_of nodelist - val treelist = do_standard_pruning tree target (rev nodelist) - val graphslist = collect_graphs (rev treelist) - in - nodelist ~~ graphslist - end - - fun get_match_candidates dnet target = - fold GraphName.NSet.add (flat (map snd (extended_pruning dnet target))) GraphName.NSet.empty - - (* This function checks if two graphs g1 and g2 are equal *) - (* TRUE iff the graphs have the same name *) - fun graphname_eq(g1,g2) = if (GraphName.string_of_name(g1) = GraphName.string_of_name(g2)) then true else false - - (* This function removes the graph g from the list of graphs gl, given that g is contained in gl*) - (* PRECONDITION: g is contained in gl*) - fun remove_eq_graph(g,gl::[]) = [] - | remove_eq_graph(g,gl::gls) = if graphname_eq(g,gl) then gls else gl::remove_eq_graph(g,gls) - | remove_eq_graph(_,[]) = raise ERROR "Empty graph list" - - (* This function checks if a graph g is contained in the graph list gl*) - fun find_eq_graph(g,[]) = false - | find_eq_graph(g,gl::[]) = if graphname_eq(g,gl) then true else false - | find_eq_graph(g,gl::gls) = if graphname_eq(g,gl) then true else find_eq_graph(g,gls) - - (* This function checks if two graph lists g1s and g2s are equal *) - fun is_eq_graphs([],[]) = true - | is_eq_graphs([],g2::[]) = false - | is_eq_graphs(g1::[],[]) = false - | is_eq_graphs([],g2::g2s) = false - | is_eq_graphs(g1::g1s,[]) = false - | is_eq_graphs(g1::[],g2::[]) = if graphname_eq(g1,g2) then true else false - | is_eq_graphs(g1::g1s,g2::[]) = false - | is_eq_graphs(g1::[],g2::g2s) = false - | is_eq_graphs(g1::g1s,g2::g2s) = - if graphname_eq(g1,g2) then is_eq_graphs (g1s,g2s) - else if find_eq_graph(g1,g2s) then is_eq_graphs(g1s,g2::remove_eq_graph(g1,g2s)) else false - - - - (* Fold function *) - fun fold _ [] [] tab = tab - | fold f (gn::gns) (g::gs) tab = f (gn,g) (fold f gns gs tab) - | fold _ _ _ _ = raise ListPair.UnequalLengths; - - - - (* This function prints the tree t*) - fun printout ([]) = "" - | printout (t::[]) = - if (is_node(t)) then C.printout(get_contour(t)) ^ " " ^ printout(get_children(t)) - else GraphName.string_of_name(get_graph(t)) - | printout (t::ts) = - if (is_node(t)) then C.printout(get_contour(t)) ^ " " ^ printout(ts) ^ " " ^ printout(get_children(t)) - else GraphName.string_of_name(get_graph(t)) ^ " " ^ printout(ts) - -end diff --git a/oldcore/dnets/perf.ML b/oldcore/dnets/perf.ML deleted file mode 100644 index 00d7926f..00000000 --- a/oldcore/dnets/perf.ML +++ /dev/null @@ -1,420 +0,0 @@ -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph); - open Tools; - - - structure LIB = TDNet_Library - structure TD = Top_DNet(G) - structure CL = TD.CL - structure C = CL.C - structure L = C.L - - fun one_of xs = - let val max = length xs in - nth xs (random_range 0 (max-1)) - end; - - (*val timestamp = Time.toString (Time.now()) - val out = TextIO.openOut ("output"^timestamp^".txt")*) - - val data_no_alg = TextIO.openAppend ("data_no_alg.txt") - val data_std_alg = TextIO.openAppend ("data_std_alg.txt") - val data_ext_alg = TextIO.openAppend ("data_ext_alg.txt") - - val k1 = (vexpr1 "0") - val k2 = (vexpr1 "1") - val k3 = (vexpr1 "a") - - val iterations = 100 - val pattern_params = {kinds = k1 :: k2 :: k3 :: [], - - num_graphs = 100, - num_nodes = 4, - num_edges = 4, - num_bbox = 3, - max_num_nodes_per_bbox = 1, - num_bound = 1 } - - val target_params = {kinds = k1 :: k2 :: k3 :: [], - - num_graphs = 1, - num_nodes = 30, - num_edges = 30, - num_bbox = 0, - max_num_nodes_per_bbox = 0, - num_bound = 0 } - - - - - fun printout out msg = - TextIO.outputSubstr(out, Substring.substring (msg,0,size msg)) - - fun savedata i no std ext = - let - val no_data = "("^i^","^no^")\n" - val std_data = "("^i^","^std^")\n" - val ext_data = "("^i^","^ext^")\n" - val _ = TextIO.outputSubstr(data_no_alg, Substring.substring(no_data,0,size no_data)) - val _ = TextIO.outputSubstr(data_std_alg, Substring.substring(std_data,0,size std_data)) - val _ = TextIO.outputSubstr(data_ext_alg, Substring.substring(ext_data,0,size ext_data)) - in - 1 - end - - - fun generate_random_nodes n = - if (n = 1) then V.mk ("v" ^ string_of_int(n)) :: [] - else V.mk ("v" ^ string_of_int(n)) :: generate_random_nodes (n-1) - - fun add_nodes g [] k = g - | add_nodes g (n::[]) k = g |> G.add_named_vertex n (one_of k) - | add_nodes g (n::ns) k = add_nodes (g |> G.add_named_vertex n (one_of k)) ns k - - fun generate_random_edges e = - if (e = 1) then E.mk ("e" ^ string_of_int(e)) :: [] - else E.mk ("e" ^ string_of_int(e)) :: generate_random_edges (e-1) - - fun add_edges g n [] = g - | add_edges g n (e::[]) = - let - val node1 = one_of n - val n_minus = LIB.rm_element (V.name_eq) node1 n - val node2 = one_of n_minus - in - g |> G.add_named_edge e (Directed,eunit1) node1 node2 - end - | add_edges g n (e::es) = - let - val node1 = one_of n - val n_minus = LIB.rm_element (V.name_eq) node1 n - val node2 = one_of n_minus - in - add_edges (g |> G.add_named_edge e (Directed,eunit1) node1 node2) n es - end - - - fun get_adj g n = V.NSet.list_of (G.get_adj_vertices g n) - - fun get_connected_nodes g [] num_nodes bb_nodes = bb_nodes - | get_connected_nodes g (n::[]) num_nodes bb_nodes = - if (num_nodes=1) then n::bb_nodes - else - let - val nodelist = LIB.maps3 get_adj g (n::bb_nodes) - val nodelist = LIB.rm_duplicates (V.name_eq) nodelist - val nodelist = LIB.sub_x_y (V.name_eq) nodelist bb_nodes - in - get_connected_nodes g nodelist (num_nodes-1) (n::bb_nodes) - end - | get_connected_nodes g (n::ns) num_nodes bb_nodes = - if (num_nodes=1) then (one_of (n::ns)) :: bb_nodes - else - let - val node = one_of(n::ns) - val nodelist = LIB.maps3 get_adj g (n::bb_nodes) - val nodelist = LIB.rm_duplicates V.name_eq nodelist - val nodelist = LIB.sub_x_y (V.name_eq) nodelist bb_nodes - in - get_connected_nodes g nodelist (num_nodes-1) (node::bb_nodes) - end - - fun add_bbox g bb [] = g - | add_bbox g bb (n::[]) = g |> G.add_to_bbox_anon bb (V.NSet.single n) - | add_bbox g bb (n::ns) = add_bbox (g |> G.add_to_bbox_anon bb (V.NSet.single n)) bb ns - - fun add_bboxes g n b npbb = - if (b=1) then - let - val num_nodes = random_range 1 npbb - val bb_nodes : V.name list = [] - val nodes = get_connected_nodes g n num_nodes bb_nodes - val (bb,g) = g |> G.add_bbox - in - add_bbox g bb nodes - end - else - let - val num_nodes = random_range 1 npbb - val bb_nodes : V.name list = [] - val nodes = get_connected_nodes g n num_nodes bb_nodes - val n = LIB.sub_x_y (V.name_eq) n nodes - val (bb,g) = g |> G.add_bbox - in - add_bboxes (add_bbox g bb nodes) n (b-1) npbb - end - - fun add_boundaries i g nodes = - let - val b = L.boundary - val bound = V.mk ("b" ^ string_of_int(i)) - val node = one_of nodes - val g = g |> G.add_named_vertex bound b - val edge = E.mk("b_e" ^ string_of_int(i)) - val dice = random_range 1 2 - val g = if (dice=1) then g |> G.add_named_edge edge (Directed,eunit1) node bound - else g |> G.add_named_edge edge (Directed,eunit1) bound node - in - if (i=1) then g else add_boundaries (i-1) g nodes - end - - - - fun instantiate_graph params = - let - val graph = G.empty - - val num_nodes = #num_nodes params - (*val _ = printout out ("Instantiating " ^ string_of_int(num_nodes) ^ " nodes..\n")*) - val nodes = generate_random_nodes num_nodes - - val kinds = #kinds params - (*val _ = printout out ("Adding the nodes to the graph selecting 1 out of "^ string_of_int(length kinds) ^" possible kinds..\n")*) - val graph = add_nodes graph nodes kinds - - val num_edges = #num_edges params - (*val _ = printout out ("Instantiating " ^ string_of_int(num_edges) ^ " edges..\n")*) - val edges = if (num_edges > 0) then generate_random_edges num_edges else [] - - (*val _ = printout out ("Wiring nodes through edges..\n")*) - val graph = if (num_edges > 0) then add_edges graph nodes edges else graph - - val num_bbox = #num_bbox params - val max_num_nodes_per_bbox = #max_num_nodes_per_bbox params - (*val _ = printout out ("Adding " ^ string_of_int(num_bbox) ^ " bbox each containing at most " ^ string_of_int(max_num_nodes_per_bbox) ^ "..\n")*) - val graph = if (num_bbox > 0) then add_bboxes graph nodes num_bbox max_num_nodes_per_bbox else graph - - val num_bound = #num_bound params - (*val _ = printout out ("Adding "^ string_of_int(num_bound) ^" boundaries..\n\n")*) - val graph = if (num_bound > 0) then add_boundaries num_bound graph nodes else graph - in - graph - end - - - fun iterate_instantiate_graph i params = - if (i=1) then (instantiate_graph params) :: [] - else (instantiate_graph params) :: (iterate_instantiate_graph (i-1) params) - - fun iterate_instantiate_graph_name i = - if (i=1) then (GraphName.mk ("g"^string_of_int(i))) :: [] - else (GraphName.mk ("g"^string_of_int(i))) :: (iterate_instantiate_graph_name (i-1)) - - - fun enumerate_graphs [] = [] - | enumerate_graphs (g::[]) = length (snd g) :: [] - | enumerate_graphs (g::gs) = length (snd g) :: (enumerate_graphs gs) - - fun sum [] = 0 - | sum (n::[]) = n - | sum (n::ns) = n + sum ns - - fun get_pretty [] = "" - | get_pretty (g::[]) = (Pretty.str_of(G.pretty g)) - | get_pretty (g::gs) = (Pretty.str_of(G.pretty g)) ^ " \n " ^ (get_pretty gs) - - - - - fun stats pattern_params target_params = - let - - (*val _ = printout out ("Generating " ^ string_of_int(#num_graphs pattern_params) ^ " graphs..\n\n")*) - val pattern_graphs = iterate_instantiate_graph (#num_graphs pattern_params) pattern_params - - (*val _ = printout out ("Generating " ^ string_of_int(#num_graphs pattern_params) ^ " graph names..\n\n")*) - val pattern_gnames = iterate_instantiate_graph_name (#num_graphs pattern_params) - - (*val pretty_graphs = get_pretty graphs*) - (*val _ = printout out pretty_graphs*) - - val tab = GraphName.NTab.empty - val tab = TD.fold GraphName.NTab.doadd pattern_gnames pattern_graphs tab - (*val _ = printout out ("\n\nBuilding the discrimination tree..\n\n")*) - val tree = TD.mk tab - (*val _ = printout out (TD.printout (tree::[]))*) - - (*val _ = printout out ("Generating a target graph..\n\n")*) - val target_graphs = iterate_instantiate_graph (#num_graphs target_params) target_params - val target_graph = nth target_graphs 0 - - (*val _ = printout out ("Pruning the tree using the target graph..\n\n")*) - val ext_graph_by_vertex = TD.extended_pruning tree target_graph - val std_graph_by_vertex = TD.standard_pruning tree target_graph - - (*val _ = printout out ("Evaluating performances..\n")*) - val ext_matchings = enumerate_graphs ext_graph_by_vertex - val std_matchings = enumerate_graphs std_graph_by_vertex - val ext_num_matchings = sum ext_matchings - val std_num_matchings = sum std_matchings - (*val _ = printout out ("Number of times to exec matching alg WITHOUT dnets: " ^ string_of_int((#num_graphs pattern_params)*(#num_nodes target_params)) ^ "\n") - val _ = printout out ("Number of times to exec matching alg WITH dnets: " ^ string_of_int(num_matchings) ^ "\n\n")*) - in - (ext_num_matchings,std_num_matchings) - end - - fun iterate_stats i pattern_params target_params = - if (i = 1) then (stats pattern_params target_params) :: [] - else (stats pattern_params target_params) :: (iterate_stats (i-1) pattern_params target_params) - - - - fun compute_num_svr [] m = 0.0 - | compute_num_svr (x::[]) m = (x-m) * (x-m) - | compute_num_svr (x::xs) m = ((x-m) * (x-m)) + (compute_num_svr xs m) - - fun toReal [] = [] - | toReal (x::[]) = Real.fromInt(x)::[] - | toReal (x::xs) = Real.fromInt(x)::(toReal xs) - - (*fun stat_test_ext_alg i pattern_params target_params = - let - val matchings = iterate_stats_ext_alg i pattern_params target_params - - val matchings_without_alg = (#num_graphs pattern_params)*(#num_nodes target_params) - - val num_matchings = sum matchings - val avg_matchings = Real.fromInt(num_matchings) / Real.fromInt(i) - - val num_svr = compute_num_svr (toReal matchings) avg_matchings - val svr_matchings = num_svr / Real.fromInt(i) - - val _ = printout out ("\n\n\n\n") - - val _ = printout out ("***************************************************************************\n") - - val _ = printout out ("I executed " ^ string_of_int(i) ^ " iterations with these parameters: \n\n") - - val _ = printout out ("PATTERNS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs pattern_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes pattern_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges pattern_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds pattern_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox pattern_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox pattern_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound pattern_params)^ "\n") - - val _ = printout out ("\n") - - val _ = printout out ("TARGETS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs target_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes target_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges target_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds target_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox target_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox target_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound target_params)^ "\n") - - val _ = printout out ("\n\n\n") - - val _ = printout out ("STAT RESULTS: \n") - val _ = printout out ("Normally I would need to run: " ^ string_of_int(matchings_without_alg)^ " iterations of the matching alg.\n") - val _ = printout out ("Now I need to run only: " ^ Real.toString(avg_matchings)^ "(avg) " ^ Real.toString(svr_matchings) ^ "(var) iterations of the matching alg.\n") - - val _ = printout out ("***************************************************************************\n") - in - 1 - end*) - - - fun stat_test i pattern_params target_params simul_number = - let - val timestamp = Time.toString (Time.now()) - val out = TextIO.openOut ("output"^timestamp^".txt") - - val matchings = iterate_stats i pattern_params target_params - - val ext_matchings = fst (split_list matchings) - val std_matchings = snd (split_list matchings) - - val matchings_without_alg = (#num_graphs pattern_params)*(#num_nodes target_params) - - val ext_num_matchings = sum ext_matchings - val ext_avg_matchings = Real.fromInt(ext_num_matchings) / Real.fromInt(i) - val ext_num_sample_var = compute_num_svr (toReal ext_matchings) ext_avg_matchings - val ext_sample_var_matchings = ext_num_sample_var / Real.fromInt(i) - - val std_num_matchings = sum std_matchings - val std_avg_matchings = Real.fromInt(std_num_matchings) / Real.fromInt(i) - val std_num_sample_var = compute_num_svr (toReal ext_matchings) std_avg_matchings - val std_sample_var_matchings = std_num_sample_var / Real.fromInt(i) - - val _ = printout out ("\n\n\n\n") - - val _ = printout out ("************************************************************************************\n") - - val _ = printout out ("EXTENDED vs STANDARD vs NO-ALGORITHM!!\n\n") - - val _ = printout out ("I executed " ^ string_of_int(i) ^ " iterations with these parameters: \n\n") - - val _ = printout out ("PATTERNS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs pattern_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes pattern_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges pattern_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds pattern_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox pattern_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox pattern_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound pattern_params)^ "\n") - - val _ = printout out ("\n") - - val _ = printout out ("TARGETS: \n") - val _ = printout out ("Graphs: " ^ string_of_int(#num_graphs target_params)^ "\n") - val _ = printout out ("Nodes: " ^ string_of_int(#num_nodes target_params)^ "\n") - val _ = printout out ("Edges: " ^ string_of_int(#num_edges target_params)^ "\n") - val _ = printout out ("Kinds: " ^ string_of_int(length (#kinds target_params)) ^ "\n") - val _ = printout out ("BBoxes: " ^ string_of_int(#num_bbox target_params)^ "\n") - val _ = printout out ("Max nodes per bbox: " ^ string_of_int(#max_num_nodes_per_bbox target_params)^ "\n") - val _ = printout out ("Boundaries: " ^ string_of_int(#num_bound target_params)^ "\n") - - val _ = printout out ("\n\n\n") - - val _ = printout out ("STAT RESULTS: \n") - val _ = printout out ("Normally I would need to run: " ^ string_of_int(matchings_without_alg)^ " iterations of the matching alg.\n") - val _ = printout out ("With the standard algorithm I would need to run: " ^ Real.toString(std_avg_matchings)^ "(avg) " ^ Real.toString(std_sample_var_matchings) ^ "(var) iterations of the matching alg.\n") - val _ = printout out ("Now with the extended algorithm I need to run only: " ^ Real.toString(ext_avg_matchings)^ "(avg) " ^ Real.toString(ext_sample_var_matchings) ^ "(var) iterations of the matching alg.\n") - - val _ = printout out ("************************************************************************************\n") - - val _ = TextIO.closeOut out - - val _ = savedata (string_of_int(simul_number)) (string_of_int(matchings_without_alg)) (Real.toString(std_avg_matchings)) (Real.toString(ext_avg_matchings)) - in - 1 - end - - - - - - fun simul i v = - let - val pattern_params = {kinds = k1 :: k2 :: k3 :: [], - num_graphs = 100, - num_nodes = 7, - num_edges = 7, - num_bbox = v, - max_num_nodes_per_bbox = 1, - num_bound = 0 } - in - stat_test iterations pattern_params target_params i - end - - - fun iterate_simul [] [] = 0 - | iterate_simul (i::[]) (v::[]) = (simul i v) + (iterate_simul [] []) - | iterate_simul (i::is) (v::vs) = (simul i v) + (iterate_simul is vs) - | iterate_simul _ _ = raise ERROR "unequal length lists" - - - - val indexes = 0 :: 1 :: 2 :: 3 :: 4 :: 5 :: 6 :: [] - val variables = 0 :: 1 :: 2 :: 3 :: 4 :: 5 :: 6 ::[] - val signal = iterate_simul indexes variables - - - val _ = TextIO.closeOut data_no_alg - val _ = TextIO.closeOut data_std_alg - val _ = TextIO.closeOut data_ext_alg -in val _ = (); end; - diff --git a/oldcore/dnets/test.ML b/oldcore/dnets/test.ML deleted file mode 100644 index 4089fbd0..00000000 --- a/oldcore/dnets/test.ML +++ /dev/null @@ -1,1225 +0,0 @@ -local - structure Tools = Test_Bang_Graph_Tools(Test_Bang_Graph); - open Tools; - - - structure TD = Top_DNet(G) - structure CL = TD.CL - structure C = CL.C - structure L = C.L - - - val b = L.boundary; - val k1 = (vexpr1 "0") - val k2 = (vexpr1 "1") - val k3 = (vexpr1 "a") - - - -(******************) -(* BUILDING TESTS *) -(******************) - - (* Building the graphs *) - val pattern1 = G.empty - - val p1_v1 = V.mk "p1_v1" - val p1_v2 = V.mk "p1_v2" - val p1_v3 = V.mk "p1_v3" - val p1_v4 = V.mk "p1_v4" - val p1_v5 = V.mk "p1_v5" - - val p1_e1 = E.mk "p1_e1" - val p1_e2 = E.mk "p1_e2" - val p1_e3 = E.mk "p1_e3" - val p1_e4 = E.mk "p1_e4" - - val pattern1 = pattern1 |> G.add_named_vertex p1_v1 k1 - |> G.add_named_vertex p1_v2 k2 - |> G.add_named_vertex p1_v3 k2 - |> G.add_named_vertex p1_v4 k1 - |> G.add_named_vertex p1_v5 k2 - |> G.add_named_edge p1_e1 (Directed,eunit1) p1_v1 p1_v2 - |> G.add_named_edge p1_e2 (Directed,eunit1) p1_v1 p1_v3 - |> G.add_named_edge p1_e3 (Directed,eunit1) p1_v2 p1_v3 - |> G.add_named_edge p1_e4 (Directed,eunit1) p1_v5 p1_v4 - - val (bb1,pattern1) = pattern1 |> G.add_bbox - val pattern1 = pattern1 |> G.add_to_bbox_anon bb1 (V.NSet.single p1_v5) - - - - val pattern2 = G.empty - - val p2_v1 = V.mk "p2_v1" - val p2_v2 = V.mk "p2_v2" - val p2_v3 = V.mk "p2_v3" - val p2_v4 = V.mk "p2_v4" - - val p2_e1 = E.mk "p2_e1" - val p2_e2 = E.mk "p2_e2" - val p2_e3 = E.mk "p2_e3" - val p2_e4 = E.mk "p2_e4" - - val pattern2 = pattern2 |> G.add_named_vertex p2_v1 k2 - |> G.add_named_vertex p2_v2 k2 - |> G.add_named_vertex p2_v3 k2 - |> G.add_named_vertex p2_v4 k1 - |> G.add_named_edge p2_e1 (Directed,eunit1) p2_v1 p2_v2 - |> G.add_named_edge p2_e2 (Directed,eunit1) p2_v1 p2_v3 - |> G.add_named_edge p2_e3 (Directed,eunit1) p2_v1 p2_v4 - |> G.add_named_edge p2_e4 (Directed,eunit1) p2_v4 p2_v3 - - val (bb2,pattern2) = pattern2 |> G.add_bbox - val (bb3,pattern2) = pattern2 |> G.add_bbox - val pattern2 = pattern2 |> G.add_to_bbox_anon bb2 (V.NSet.single p2_v1) - |> G.add_to_bbox_anon bb3 (V.NSet.single p2_v4) - - - val pattern3 = G.empty - - val p3_v1 = V.mk "p3_v1" - val p3_v2 = V.mk "p3_v2" - val p3_v3 = V.mk "p3_v3" - val p3_v4 = V.mk "p3_v4" - - val p3_e1 = E.mk "p3_e1" - val p3_e2 = E.mk "p3_e2" - val p3_e3 = E.mk "p3_e3" - val p3_e4 = E.mk "p3_e4" - - val pattern3 = pattern3 |> G.add_named_vertex p3_v1 k1 - |> G.add_named_vertex p3_v2 k1 - |> G.add_named_vertex p3_v3 k2 - |> G.add_named_vertex p3_v4 k2 - |> G.add_named_edge p3_e1 (Directed,eunit1) p3_v1 p3_v2 - |> G.add_named_edge p3_e2 (Directed,eunit1) p3_v1 p3_v3 - |> G.add_named_edge p3_e3 (Directed,eunit1) p3_v2 p3_v3 - |> G.add_named_edge p3_e4 (Directed,eunit1) p3_v3 p3_v4 - - - val pattern4 = G.empty - - val p4_v1 = V.mk "p4_v1" - val p4_v2 = V.mk "p4_v2" - val p4_v3 = V.mk "p4_v3" - val p4_v4 = V.mk "p4_v4" - - val p4_e1 = E.mk "l1" - val p4_e2 = E.mk "l2" - val p4_e3 = E.mk "l3" - val p4_e4 = E.mk "l4" - - val pattern4 = pattern4 |> G.add_named_vertex p4_v1 k1 - |> G.add_named_vertex p4_v2 k1 - |> G.add_named_vertex p4_v3 k2 - |> G.add_named_vertex p4_v4 b - |> G.add_named_edge p4_e1 (Directed,eunit1) p4_v1 p4_v2 - |> G.add_named_edge p4_e2 (Directed,eunit1) p4_v1 p4_v3 - |> G.add_named_edge p4_e3 (Directed,eunit1) p4_v2 p4_v3 - |> G.add_named_edge p4_e4 (Directed,eunit1) p4_v3 p4_v4 - - - - val pattern5 = G.empty - - val p5_v1 = V.mk "p5_v1" - val p5_v2 = V.mk "p5_v2" - val p5_v3 = V.mk "p5_v3" - val p5_v4 = V.mk "p5_v4" - - val p5_e1 = E.mk "p5_e1" - val p5_e2 = E.mk "p5_e2" - val p5_e3 = E.mk "p5_e3" - val p5_e4 = E.mk "p5_e4" - - val pattern5 = pattern5 |> G.add_named_vertex p5_v1 k2 - |> G.add_named_vertex p5_v2 b - |> G.add_named_vertex p5_v3 b - |> G.add_named_vertex p5_v4 k1 - |> G.add_named_edge p5_e1 (Directed,eunit1) p5_v1 p5_v2 - |> G.add_named_edge p5_e2 (Directed,eunit1) p5_v3 p5_v1 - |> G.add_named_edge p5_e3 (Directed,eunit1) p5_v4 p5_v1 - - - val pattern6 = G.empty - - val p6_v1 = V.mk "p6_v1" - val p6_v2 = V.mk "p6_v2" - val p6_v3 = V.mk "p6_v3" - val p6_v4 = V.mk "p6_v4" - - val p6_e1 = E.mk "p6_e1" - val p6_e2 = E.mk "p6_e2" - val p6_e3 = E.mk "p6_e3" - val p6_e4 = E.mk "p6_e4" - - val pattern6 = pattern6 |> G.add_named_vertex p6_v1 k2 - |> G.add_named_vertex p6_v2 b - |> G.add_named_vertex p6_v3 k1 - |> G.add_named_vertex p6_v4 k1 - |> G.add_named_edge p6_e1 (Directed,eunit1) p6_v1 p6_v2 - |> G.add_named_edge p6_e2 (Directed,eunit1) p6_v1 p6_v3 - |> G.add_named_edge p6_e3 (Directed,eunit1) p6_v3 p6_v4 - - - - - val pattern7 = G.empty - - val p7_v1 = V.mk "p7_v1" - val p7_v2 = V.mk "p7_v2" - val p7_v3 = V.mk "p7_v3" - val p7_v4 = V.mk "p7_v4" - - val p7_e1 = E.mk "p7_e1" - val p7_e2 = E.mk "p7_e2" - val p7_e3 = E.mk "p7_e3" - val p7_e4 = E.mk "p7_e4" - val p7_e5 = E.mk "p7_e5" - - val pattern7 = pattern7 |> G.add_named_vertex p7_v1 k1 - |> G.add_named_vertex p7_v2 k1 - |> G.add_named_vertex p7_v3 k1 - |> G.add_named_vertex p7_v4 k1 - |> G.add_named_edge p7_e1 (Directed,eunit1) p7_v1 p7_v2 - |> G.add_named_edge p7_e2 (Directed,eunit1) p7_v2 p7_v1 - |> G.add_named_edge p7_e3 (Directed,eunit1) p7_v1 p7_v3 - |> G.add_named_edge p7_e4 (Directed,eunit1) p7_v2 p7_v4 - |> G.add_named_edge p7_e5 (Directed,eunit1) p7_v3 p7_v4 - - - - (* Building the contour lists *) - val cl1 = CL.mk pattern1 - val c1_1 = nth cl1 0 - val c1_2 = nth cl1 1 - val lit1_1 = nth c1_1 0 - val lit1_2 = nth c1_2 0 - val lit1_3 = nth c1_2 1 - val lit1_4 = L.mk pattern1 p1_v4 - val lit1_5 = L.mk pattern1 p1_v5 - - - val cl2 = CL.mk pattern2 - val c2_1 = nth cl2 0 - val c2_2 = nth cl2 1 - val c2_3 = nth cl2 2 - val lit2_1 = nth c2_2 0 - val lit2_2 = nth c2_1 0 - val lit2_3 = nth c2_3 0 - val lit2_4 = nth c2_3 1 - - - val cl3 = CL.mk pattern3 - val c3_1 = nth cl3 0 - val c3_2 = nth cl3 1 - val c3_3 = nth cl3 2 - val lit3_1 = nth c3_1 0 - val lit3_2 = nth c3_2 0 - val lit3_3 = nth c3_2 1 - val lit3_4 = nth c3_3 0 - - - val cl4 = CL.mk pattern4 - val c4_1 = nth cl4 0 - val c4_2 = nth cl4 1 - val c4_3 = nth cl4 2 - val lit4_1 = nth c4_1 0 - val lit4_2 = nth c4_2 0 - val lit4_3 = nth c4_2 1 - val lit4_4 = nth c4_3 0 - - - val cl5 = CL.mk pattern5 - val c5_1 = nth cl5 0 - val c5_2 = nth cl5 1 - val lit5_1 = nth c5_1 0 - val lit5_2 = nth c5_2 0 - val lit5_3 = nth c5_2 1 - val lit5_4 = nth c5_2 2 - - - val cl6 = CL.mk pattern6 - val c6_1 = nth cl6 0 - val c6_2 = nth cl6 1 - val c6_3 = nth cl6 2 - val lit6_1 = nth c6_1 0 - val lit6_2 = nth c6_2 0 - val lit6_3 = nth c6_2 1 - val lit6_4 = nth c6_3 0 - - - val cl7 = CL.mk pattern7 - val c7_1 = nth cl7 0 - val c7_2 = nth cl7 1 - val c7_3 = nth cl7 2 - val lit7_1 = nth c7_1 0 - val lit7_2 = nth c7_2 0 - val lit7_3 = nth c7_2 1 - val lit7_4 = nth c7_3 0 - - - (* TESTING BASIC OPERATIONS *) - - (* Assertion functions *) - fun assert_subtraction(c1,c2,c3) = - if C.eq(C.subtract_eq_contour(c1,c2),c3) then true - else raise ERROR ("Subtraction is wrong: expected " ^ C.printout(c3) ^ " -- computed: " ^ C.printout(C.subtract_eq_contour(c1,c2))) - - fun assert_intersection(c1,c2,c3) = - if C.eq(C.intersect_eq_contours(c1,c2),c3) then true - else raise ERROR ("Intersection is wrong: expected " ^ C.printout(c3) ^ " -- computed: " ^ C.printout(C.intersect_eq_contours(c1,c2))) - - fun assert_complement(c1,c2,c3) = - if C.eq(C.complement_eq_contour(c1,c2),c3) then true - else raise ERROR ("Complement is wrong: expected " ^ C.printout(c3) ^ " -- computed: " ^ C.printout(C.complement_eq_contour(c1,c2))) - - fun assert_duplicate_removal(c1,c2) = - if C.eq(C.remove_eq_duplicate(c1),c2) then true - else raise ERROR ("Duplicate removal is wrong: expected " ^ C.printout(c2) ^ " -- computed: " ^ C.printout(C.remove_eq_duplicate(c1))) - - (* Tests *) - val lit_a = L.build((V.mk "a"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) - val lit_b = L.build((V.mk "b"), k2, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val lit_c = L.build((V.mk "c"), k1, L.mult_star, 0, L.mult_none, 1, L.mult_none) - val lit_d = L.build((V.mk "d"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_star) - val lit_e = L.build((V.mk "e"), G.WVert, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val lit_f = L.build((V.mk "f"), k2, L.mult_star, 1, L.mult_none, 0, L.mult_none) - - val cont_a = lit_b :: lit_a :: [] - val cont_b = lit_b :: lit_c :: [] - val cont_c = lit_c :: [] - val cont_d = lit_a :: lit_e :: lit_b :: lit_f :: [] - - val sub_d_a = lit_e :: lit_f :: [] - val sub_b_c = lit_b :: [] - val sub_a_a = [] - val _ = Testing.test "Checking subtraction.." assert_subtraction (cont_d,cont_a,sub_d_a) - val _ = Testing.test "Checking subtraction.." assert_subtraction (cont_b,cont_c,sub_b_c) - val _ = Testing.test "Checking subtraction.." assert_subtraction (cont_a,cont_a,sub_a_a) - - val inters_a_b = lit_b :: [] - val inters_a_c = [] - val inters_b_c = lit_c :: [] - val inters_a_d = lit_b :: lit_a :: [] - val _ = Testing.test "Checking intersection.." assert_intersection (cont_a,cont_b,inters_a_b) - val _ = Testing.test "Checking intersection.." assert_intersection (cont_a,cont_c,inters_a_c) - val _ = Testing.test "Checking intersection.." assert_intersection (cont_b,cont_c,inters_b_c) - val _ = Testing.test "Checking intersection.." assert_intersection (cont_a,cont_d,inters_a_d) - - val compl_a_b = lit_c :: [] - val compl_a_c = lit_c :: [] - val compl_b_c = [] - val compl_a_d = lit_e :: lit_f :: [] - val compl_a_d2 = lit_f :: lit_e :: [] - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_b,compl_a_b) - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_c,compl_a_c) - val _ = Testing.test "Checking complement.." assert_complement (cont_b,cont_c,compl_b_c) - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_d,compl_a_d) - val _ = Testing.test "Checking complement.." assert_complement (cont_a,cont_d,compl_a_d2) - - val cont_aa = lit_b :: lit_a :: lit_a :: lit_a :: lit_b :: [] - val cont_bb = lit_c :: lit_b :: [] - val cont_cc = lit_c :: lit_c :: [] - val _ = Testing.test "Checking duplicate removal.." assert_duplicate_removal (cont_aa,cont_a) - val _ = Testing.test "Checking duplicate removal.." assert_duplicate_removal (cont_bb,cont_b) - val _ = Testing.test "Checking duplicate removal.." assert_duplicate_removal (cont_cc,cont_c) - - - - - (* TESTING LITERALS *) - - (* Assertion functions *) - fun assert_name_eq(a:V.name,b:V.name) = - if V.name_eq(a,b) then true - else raise ERROR ("Literal names do not match: " ^ V.string_of_name(a) ^ " -- " ^ V.string_of_name(b)) - - fun assert_kind_eq(a:G.vdata,b:G.vdata) = - if G.vdata_eq(a,b) then true - else raise ERROR ("Literal kinds do not match!") - - fun assert_mult_eq(a:L.multiplicity,b:L.multiplicity,s) = - if L.mult_eq(a,b) then true - else raise ERROR ("Literal " ^ s ^ "do not match!") - - fun assert_int_eq(a,b,s) = - if (a=b) then true - else raise ERROR ("Literal " ^ s ^ "do not match!") - - fun assert_lit_eq(l1,l2) = - if L.eq(l1,l2) then true - else raise ERROR ("Literals are not equal") - - fun assert_lit_equiv(l1,l2) = - if L.equiv(l1,l2) then true - else raise ERROR ("Literals are not equivalent") - - fun assert_lit_match(l1,l2) = - if L.match(l1,l2) then true - else raise ERROR ("Literals do not match") - - (* Tests *) - val name1 = L.get_name lit1_1 - val name2 = L.get_name lit1_2 - val _ = Testing.test "Checking literal name.." assert_name_eq (name1,p1_v1) - val _ = Testing.test "Checking literal name.." assert_name_eq (name2,p1_v2) - - val kind1 = L.get_kind lit1_1 - val kind2 = L.get_kind lit1_2 - val _ = Testing.test "Checking literal kind.." assert_kind_eq (kind1,k1) - val _ = Testing.test "Checking literal kind.." assert_kind_eq (kind2,k2) - - val k_m1 = L.get_kind_mult lit1_1 - val k_m2 = L.get_kind_mult lit1_2 - val _ = Testing.test "Checking literal kind-mult.." assert_mult_eq (k_m1,L.mult_none,"kind-mult") - val _ = Testing.test "Checking literal kind-mult.." assert_mult_eq (k_m2,L.mult_none,"kind-mult") - - val i_a1 = L.get_input_arity lit1_1 - val i_a2 = L.get_input_arity lit1_2 - val _ = Testing.test "Checking literal input-arity.." assert_int_eq (i_a1,0,"input-arity") - val _ = Testing.test "Checking literal input-arity.." assert_int_eq (i_a2,1,"input-arity") - - val i_m1 = L.get_input_mult lit1_1 - val i_m2 = L.get_input_mult lit1_2 - val _ = Testing.test "Checking literal input-mult.." assert_mult_eq (i_m1,L.mult_none,"input-mult") - val _ = Testing.test "Checking literal input-mult.." assert_mult_eq (i_m2,L.mult_none,"input-mult") - - val o_a1 = L.get_output_arity lit1_1 - val o_a2 = L.get_output_arity lit1_2 - val _ = Testing.test "Checking literal output-arity.." assert_int_eq (o_a1,2,"output-arity") - val _ = Testing.test "Checking literal output-arity.." assert_int_eq (o_a2,1,"output-arity") - - val o_m1 = L.get_output_mult lit1_1 - val o_m2 = L.get_output_mult lit1_2 - val _ = Testing.test "Checking literal output-mult.." assert_mult_eq (o_m1,L.mult_none,"output-mult") - val _ = Testing.test "Checking literal output-mult.." assert_mult_eq (o_m2,L.mult_none,"output-mult") - - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_1, L.build ( (V.mk "p2_v1"), k2, L.mult_star, 0, L.mult_none, 3, L.mult_star) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_2, L.build ( (V.mk "p2_v2"), k2, L.mult_none, 1, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_3, L.build ( (V.mk "p2_v3"), k2, L.mult_qm, 2, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit2_4, L.build ( (V.mk "p2_v4"), k1, L.mult_star, 1, L.mult_star, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_1, L.build ( (V.mk "p2_v1"), k2, L.mult_star, 0, L.mult_none, 2, L.mult_star) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_2, L.build ( (V.mk "p2_v2"), k2, L.mult_none, 0, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_3, L.build ( (V.mk "p2_v3"), k2, L.mult_qm, 0, L.mult_star, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit2_4, L.build ( (V.mk "p2_v4"), k1, L.mult_star, 0, L.mult_star, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_1, L.build ( (V.mk "p4_v1"), k2, L.mult_none, 0, L.mult_none, 3, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_2, L.build ( (V.mk "p4_v2"), k2, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_3, L.build ( (V.mk "p4_v3"), k1, L.mult_none, 2, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equality.." assert_lit_eq (lit4_4, L.build ( (V.mk "p4_v4"), b, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_1, L.build ( (V.mk "p4_v1"), k1, L.mult_none, 0, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_2, L.build ( (V.mk "p4_v2"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_3, L.build ( (V.mk "p4_v3"), k2, L.mult_none, 2, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit4_4, L.build ( (V.mk "p4_v4"), b, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_1, L.build ( (V.mk "p5_v1"), k2, L.mult_none, 2, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_2, L.build ( (V.mk "p5_v2"), b, L.mult_star, 1, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_3, L.build ( (V.mk "p5_v3"), b, L.mult_star, 0, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit5_4, L.build ( (V.mk "p5_v4"), k1, L.mult_none, 0, L.mult_none, 1, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_1, L.build ( (V.mk "p6_v1"), k2, L.mult_none, 0, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_2, L.build ( (V.mk "p6_v2"), b, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_3, L.build ( (V.mk "p6_v3"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit6_4, L.build ( (V.mk "p6_v4"), k1, L.mult_none, 1, L.mult_none, 0, L.mult_none) ) - - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_1, L.build ( (V.mk "p7_v1"), k1, L.mult_none, 1, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_2, L.build ( (V.mk "p7_v2"), k1, L.mult_none, 1, L.mult_none, 2, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_3, L.build ( (V.mk "p7_v3"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) ) - val _ = Testing.test "Checking literal equivalence.." assert_lit_equiv (lit7_4, L.build ( (V.mk "p7_v4"), k1, L.mult_none, 2, L.mult_none, 0, L.mult_none) ) - - - (* TESTING CONTOURS *) - - (* Assertion functions *) - fun assert_contour_length(i,c) = - if (length c = i) then true - else raise ERROR ("Error in contour length " ^ CL.printout(c)) - - fun is_one(x,[]) = false - | is_one(x,c::[]) = if C.equiv(x,c) then true else false - | is_one(x,c::cs) = if C.equiv(x,c) then true else is_one(x,cs) - - fun assert_tgt_fn(c,cc) = - if (is_one(c,cc)) then true - else raise ERROR ("Error in target function") - - fun assert_contour_length(i,c) = - if (length c = i) then true - else raise ERROR ("Error in contour length") - - fun assert_is_eq_contained(l,c) = - if (C.is_eq_literal_contained(l,c)) then true - else raise ERROR ("Error in the elements of the contour") - - fun assert_is_eq_not_contained(l,c) = - if (C.is_eq_literal_contained(l,c)) then raise ERROR ("Error in the elements of the contour") - else true - - fun assert_contour_eq(c1,c2) = - if C.eq(c1,c2) then true - else raise ERROR ("Contours are not equal") - - fun assert_contour_equiv(c1,c2) = - if C.equiv(c1,c2) then true - else raise ERROR ("Contours are not equivalent") - - fun assert_matching_contour_containing(c1,c2) = - if C.is_matching_contour_contained(c1,c2) then true - else raise ERROR ("Error in contour list") - - fun assert_strong_compatibility(c1,c2) = - if C.check_strong_compatibility(c1,c2) then true - else raise ERROR ("Contours are not strongly compatible") - - fun assert_no_strong_compatibility(c1,c2) = - if C.check_strong_compatibility(c1,c2) then raise ERROR ("Contours are strongly compatible") - else true - - fun assert_weak_compatibility(c1,c2) = - if C.check_weak_compatibility(c1,c2) then true - else raise ERROR ("Contours are not weakly compatible") - - fun assert_no_weak_compatibility(c1,c2) = - if C.check_weak_compatibility(c1,c2) then raise ERROR ("Contours are weakly compatible") - else true - - - (* Tests *) - val e = C.empty - val trg1 = C.target_function pattern1 - val ok1 = C.add_literal e lit1_1 - val ok2 = C.add_literal e lit1_2 - val ok3 = C.add_literal e lit1_3 - val ok4 = C.add_literal e lit1_4 - val ok1list = ok1::ok2::ok3::ok4::[] - - val trg2 = C.target_function pattern2 - val ok2 = C.add_literal e lit2_2 - val ok3 = C.add_literal e lit2_3 - val ok2list = ok2::ok3::[] - - val _ = Testing.test "Checking target function.." assert_tgt_fn (trg1,ok1list) - val _ = Testing.test "Checking target function.." assert_tgt_fn (trg2,ok2list) - - - val c1 = C.mk pattern1 trg1 - val _ = Testing.test "Checking number of literals.." assert_contour_length (2,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_contained (lit1_2,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_contained (lit1_3,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_not_contained (lit1_4,c1) - val _ = Testing.test "Checking element in contour.." assert_is_eq_not_contained (lit1_5,c1) - - val c2 = C.mk pattern1 (lit1_2::lit1_3::[]) - val _ = Testing.test "Checking number of literals.." assert_contour_length (1,c2) - - - val lit_1 = L.build((V.mk "a"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_none) - val lit_2 = L.build((V.mk "b"), k2, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val target = lit_1 :: lit_2 :: lit_2 :: [] - - val lit_3 = L.build((V.mk "c"), k1, L.mult_star, 0, L.mult_none, 1, L.mult_none) - val pattern_1 = lit_1 :: lit_2 :: lit_3 :: lit_2 :: [] - val lit_4 = L.build((V.mk "d"), k1, L.mult_none, 1, L.mult_none, 1, L.mult_star) - val pattern_2 = lit_2 :: lit_4 :: lit_2 :: [] - val lit_5 = L.build((V.mk "e"), G.WVert, L.mult_none, 1, L.mult_none, 0, L.mult_none) - val lit_6 = L.build((V.mk "f"), k2, L.mult_star, 1, L.mult_none, 0, L.mult_none) - val pattern_3 = lit_5 :: lit_6 :: [] - - val pattern_2_concrete = C.get_contour_mult_none(pattern_2) - val pattern_2_abstract = C.get_contour_mult_star_or_qm(pattern_2) - val pattern_3_concrete = C.get_contour_mult_none(pattern_3) - val pattern_3_abstract = C.get_contour_mult_star_or_qm(pattern_3) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_2_concrete, pattern_2) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_2_abstract, C.empty) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_3_concrete, (lit_5::[])) - val _ = Testing.test "Checking abstract and concrete contour.." assert_contour_equiv(pattern_3_abstract, (lit_6::[])) - - val _ = Testing.test "Checking containment.." assert_matching_contour_containing(pattern_2_concrete,target) - val _ = Testing.test "Checking containment.." assert_matching_contour_containing((lit_1 :: lit_2 :: lit_2 :: []),target) - - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(target,pattern_1) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(target,pattern_2) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(target,pattern_3) - - val lit_7 = L.build((V.mk "g"), k2, L.mult_qm, 1, L.mult_none, 0, L.mult_none) - val pattern_4 = lit_5 :: lit_7 :: [] - val lit_8 = L.build((V.mk "h"), k2, L.mult_none, 1, L.mult_star, 0, L.mult_none) - val pattern_5 = lit_8 :: [] - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_4) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_5) - val _ = Testing.test "Checking weak compatibility.." assert_weak_compatibility(target,pattern_4) - val _ = Testing.test "Checking weak compatibility.." assert_weak_compatibility(target,pattern_5) - - val lit_9 = L.build((V.mk "j"), k2, L.mult_qm, 1, L.mult_none, 1, L.mult_none) - val pattern_6 = lit_1 :: lit_1 :: lit_9 :: lit_6 :: [] - val pattern_7 = lit_1 :: lit_2 :: lit_8 :: lit_5 :: [] - val lit_10 = L.build((V.mk "k"), k1, L.mult_star, 1, L.mult_none, 1, L.mult_none) - val lit_11 = L.build((V.mk "l"), k2, L.mult_none, 1, L.mult_none, 1, L.mult_none) - val lit_12 = L.build((V.mk "m"), k2, L.mult_none, 2, L.mult_none, 1, L.mult_none) - val pattern_8 = lit_10 :: lit_11 :: lit_12 :: [] - val lit_13 = L.build((V.mk "n"), k1, L.mult_qm, 1, L.mult_none, 0, L.mult_none) - val pattern_9 = lit_13 :: lit_11 :: [] - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_6) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_7) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_8) - val _ = Testing.test "Checking strong compatibility.." assert_no_strong_compatibility(target,pattern_9) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_6) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_7) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_8) - val _ = Testing.test "Checking weak compatibility.." assert_no_weak_compatibility(target,pattern_9) - - - - (* TESTING CONTOUR LISTS *) - - (* Assertion functions *) - fun assert_contour_list_length(i,cl) = - if (length cl = i) then true - else raise ERROR ("Error in contour list length") - - fun assert_contour_list_equiv(cl1,cl2) = - if CL.equiv(cl1,cl2) then true - else raise ERROR ("Error in contour list") - - - (* Tests *) - - val hand_cl1 = (lit1_1::[]) :: (lit1_2::lit1_3::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (2,cl1) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl1,cl1) - - val hand_cl2_1 = (lit2_2::[]) :: (lit2_1::[]) :: (lit2_4::lit2_3::[]) :: [] - val hand_cl2_2 = (lit2_2::[]) :: (lit2_1::[]) :: (lit2_3::lit2_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl2) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl2_1,cl2) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl2_2,cl2) - - val hand_cl3 = (lit3_1::[]) :: (lit3_2::lit3_3::[]) :: (lit3_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl3) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl3,cl3) - - val hand_cl4 = (lit4_1::[]) :: (lit4_2::lit4_3::[]) :: (lit4_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl4) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl4,cl4) - - val hand_cl5 = (lit5_1::[]) :: (lit5_2::lit5_3::lit5_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (2,cl5) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl5,cl5) - - val hand_cl6 = (lit6_1::[]) :: (lit6_2::lit6_3::[]) :: (lit6_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl6) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl6,cl6) - - val hand_cl7 = (lit7_1::[]) :: (lit7_2::lit7_3::[]) :: (lit7_4::[]) :: [] - val _ = Testing.test "Checking number of contours in contour list.." assert_contour_list_length (3,cl7) - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (hand_cl7,cl7) - - - - (* TESTING DNET *) - - (* Assertion functions *) - fun assert_graph_name_eq(n1,n2) = - if GraphName.name_eq(n1,n2) then true - else raise ERROR ("Graph names do not match") - - fun assert_children_length(i,j) = - if i=j then true - else raise ERROR ("Children number do not match") - - - (* Tests *) - val gn1 = GraphName.mk "pattern1" - val gn2 = GraphName.mk "pattern2" - val gn3 = GraphName.mk "pattern3" - val gn4 = GraphName.mk "pattern4" - val gn5 = GraphName.mk "pattern5" - val gn6 = GraphName.mk "pattern6" - val gn7 = GraphName.mk "pattern7" - val gnl = gn1 :: gn2 :: gn3 :: gn4 :: gn5 :: gn6 :: gn7 :: [] - val gl = pattern1 :: pattern2 :: pattern3 :: pattern4 :: pattern5 :: pattern6 :: pattern7 :: [] - - val tab = GraphName.NTab.empty - val tab = TD.fold GraphName.NTab.doadd gnl gl tab - - val tree = TD.mk tab - - val level_1 = tree - val node_1 = tree - val contour_1_1 = TD.get_contour(level_1) - val children_1_1 = TD.get_children(level_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_1_1,C.empty) - val _ = Testing.test "Checking the children.." assert_children_length((length children_1_1),5) - - val level_2 = children_1_1 - val node_2_1 = nth level_2 0 - val node_2_2 = nth level_2 1 - val node_2_3 = nth level_2 2 - val node_2_4 = nth level_2 3 - val node_2_5 = nth level_2 4 - val contour_2_1 = TD.get_contour(node_2_1) - val contour_2_2 = TD.get_contour(node_2_2) - val contour_2_3 = TD.get_contour(node_2_3) - val contour_2_4 = TD.get_contour(node_2_4) - val contour_2_5 = TD.get_contour(node_2_5) - val children_2_1 = TD.get_children(node_2_1) - val children_2_2 = TD.get_children(node_2_2) - val children_2_3 = TD.get_children(node_2_3) - val children_2_4 = TD.get_children(node_2_4) - val children_2_5 = TD.get_children(node_2_5) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit1_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit3_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit4_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_2,(lit2_2::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_3,(lit5_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_4,(lit6_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_5,(lit7_1::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_1),2) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_2),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_3),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_4),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_5),1) - - val level_3 = children_2_1 :: children_2_2 :: children_2_3 :: children_2_4 :: children_2_5 :: [] - val node_3_1 = nth children_2_1 0 - val node_3_2 = nth children_2_1 1 - val node_3_3 = nth children_2_2 0 - val node_3_4 = nth children_2_3 0 - val node_3_5 = nth children_2_4 0 - val node_3_6 = nth children_2_5 0 - val contour_3_1 = TD.get_contour(node_3_1) - val contour_3_2 = TD.get_contour(node_3_2) - val contour_3_3 = TD.get_contour(node_3_3) - val contour_3_4 = TD.get_contour(node_3_4) - val contour_3_5 = TD.get_contour(node_3_5) - val contour_3_6 = TD.get_contour(node_3_6) - val children_3_1 = TD.get_children(node_3_1) - val children_3_2 = TD.get_children(node_3_2) - val children_3_3 = TD.get_children(node_3_3) - val children_3_4 = TD.get_children(node_3_4) - val children_3_5 = TD.get_children(node_3_5) - val children_3_6 = TD.get_children(node_3_6) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_1,(lit1_2::lit1_3::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_2,(lit3_2::lit3_3::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_3,(lit2_1::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_4,(lit5_2::lit5_3::lit5_4::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_5,(lit6_2::lit6_3::[])) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_6,(lit7_2::lit7_3::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_1),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_2),2) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_3),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_4),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_5),1) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_6),1) - - val level_4 = children_3_1 :: children_3_2 :: children_3_3 :: children_3_4 :: children_3_5 :: children_3_6 :: [] - val node_4_1 = nth children_3_1 0 - val node_4_2 = nth children_3_2 0 - val node_4_3 = nth children_3_2 1 - val node_4_4 = nth children_3_3 0 - val node_4_5 = nth children_3_4 0 - val node_4_6 = nth children_3_5 0 - val node_4_7 = nth children_3_6 0 - val graph_4_1 = TD.get_graph(node_4_1) - val contour_4_2 = TD.get_contour(node_4_2) - val children_4_2 = TD.get_children(node_4_2) - val contour_4_3 = TD.get_contour(node_4_3) - val children_4_3 = TD.get_children(node_4_3) - val contour_4_4 = TD.get_contour(node_4_4) - val children_4_4 = TD.get_children(node_4_4) - val graph_4_5 = TD.get_graph(node_4_5) - val contour_4_6 = TD.get_contour(node_4_6) - val children_4_6 = TD.get_children(node_4_6) - val contour_4_7 = TD.get_contour(node_4_7) - val children_4_7 = TD.get_children(node_4_7) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_4_1, gn1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_2,(lit3_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_2),1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_3,(lit4_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_3),1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_4,(lit2_4::lit2_3::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_4),1) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_4_5, gn5) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_6,(lit6_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_6),1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_4_7,(lit7_4::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_4_7),1) - - val level_5 = children_4_2 :: children_4_3 :: children_4_4 :: children_4_6 :: children_4_7 :: [] - val node_5_1 = nth children_4_2 0 - val node_5_2 = nth children_4_3 0 - val node_5_3 = nth children_4_4 0 - val node_5_4 = nth children_4_6 0 - val node_5_5 = nth children_4_7 0 - val graph_5_1 = TD.get_graph(node_5_1) - val graph_5_2 = TD.get_graph(node_5_2) - val graph_5_3 = TD.get_graph(node_5_3) - val graph_5_4 = TD.get_graph(node_5_4) - val graph_5_5 = TD.get_graph(node_5_5) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_2, gn4) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_1, gn3) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_3, gn2) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_4, gn6) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_5_5, gn7) - - - - - - -(******************) -(* PRUNING TESTS *) -(******************) - - (* Building the graphs *) - val target1 = G.empty - val t1_v1 = V.mk "t1v1" - val target1 = target1 |> G.add_named_vertex t1_v1 k1 - val t1_lit1 = L.mk target1 t1_v1 - - - - val target2 = G.empty - - val t2_v1 = V.mk "t2v1" - val t2_v2 = V.mk "t2v2" - val t2_v3 = V.mk "t2v3" - - val t2_e1 = E.mk "t2e1" - val t2_e2 = E.mk "t2e2" - val t2_e3 = E.mk "t2e3" - - val target2 = target2 |> G.add_named_vertex t2_v1 k1 - |> G.add_named_vertex t2_v2 k2 - |> G.add_named_vertex t2_v3 k2 - |> G.add_named_edge t2_e1 (Directed,eunit1) t2_v1 t2_v2 - |> G.add_named_edge t2_e2 (Directed,eunit1) t2_v1 t2_v3 - |> G.add_named_edge t2_e3 (Directed,eunit1) t2_v2 t2_v3 - - val t2_cl = CL.mk target2 - val t2_c1_1 = nth t2_cl 0 - val t2_c1_2 = nth t2_cl 1 - val t2_lit1 = nth t2_c1_1 0 - val t2_lit2 = nth t2_c1_2 0 - val t2_lit3 = nth t2_c1_2 1 - - - - val target3 = G.empty - - val t3_v1 = V.mk "t3v1" - - val target3 = target3 |> G.add_named_vertex t3_v1 k2 - - val t3_cl = CL.mk target3 - val t3_c1_1 = nth t3_cl 0 - - - - val target4 = G.empty - - val t4_v1a = V.mk "t4v1a" - val t4_v1b = V.mk "t4v1b" - val t4_v2 = V.mk "t4v2" - val t4_v3 = V.mk "t4v3" - val t4_v4 = V.mk "t4v4" - - val t4_e1a = E.mk "t4e1a" - val t4_e2a = E.mk "t4e2a" - val t4_e3a = E.mk "t4e3a" - val t4_e1b = E.mk "t4e1b" - val t4_e2b = E.mk "t4e2b" - val t4_e3b = E.mk "t4e3b" - val t4_e4 = E.mk "t4e4" - - val target4 = target4 |> G.add_named_vertex t4_v1a k2 - |> G.add_named_vertex t4_v1b k2 - |> G.add_named_vertex t4_v2 k2 - |> G.add_named_vertex t4_v3 k2 - |> G.add_named_vertex t4_v4 k1 - |> G.add_named_edge t4_e1a (Directed,eunit1) t4_v1a t4_v2 - |> G.add_named_edge t4_e2a (Directed,eunit1) t4_v1a t4_v3 - |> G.add_named_edge t4_e3a (Directed,eunit1) t4_v1a t4_v4 - |> G.add_named_edge t4_e1b (Directed,eunit1) t4_v1b t4_v2 - |> G.add_named_edge t4_e2b (Directed,eunit1) t4_v1b t4_v3 - |> G.add_named_edge t4_e3b (Directed,eunit1) t4_v1b t4_v4 - |> G.add_named_edge t4_e4 (Directed,eunit1) t4_v4 t4_v3 - - val t4_cl = CL.mk target4 - val t4_c1_1 = nth t4_cl 0 - val t4_c1_2 = nth t4_cl 1 - val t4_c1_3 = nth t4_cl 1 - val t4_lit1_1 = nth t4_c1_1 0 - val t4_lit2_1 = nth t4_c1_2 0 - val t4_lit2_2 = nth t4_c1_2 1 - val t4_lit3_1 = nth t4_c1_3 0 - val t4_lit3_2 = nth t4_c1_3 1 - - - - - val target5 = G.empty - - val t5_v1 = V.mk "t5v1" - val t5_v2 = V.mk "t5v2" - val t5_v3 = V.mk "t5v3" - val t5_v4 = V.mk "t5v4" - val t5_v5 = V.mk "t5v5" - - val t5_e1 = E.mk "t5e1" - val t5_e2 = E.mk "t5e2" - val t5_e3 = E.mk "t5e3" - val t5_e4 = E.mk "t5e4" - val t5_e5 = E.mk "t5e5" - - val target5 = target5 |> G.add_named_vertex t5_v1 k1 - |> G.add_named_vertex t5_v2 k1 - |> G.add_named_vertex t5_v3 k2 - |> G.add_named_vertex t5_v4 k3 - |> G.add_named_vertex t5_v5 k2 - |> G.add_named_edge t5_e1 (Directed,eunit1) t5_v1 t5_v2 - |> G.add_named_edge t5_e2 (Directed,eunit1) t5_v1 t5_v3 - |> G.add_named_edge t5_e3 (Directed,eunit1) t5_v2 t5_v3 - |> G.add_named_edge t5_e4 (Directed,eunit1) t5_v3 t5_v4 - |> G.add_named_edge t5_e5 (Directed,eunit1) t5_v4 t5_v5 - - val t5_cl = CL.mk target5 - val t5_c1_1 = nth t5_cl 0 - val t5_c1_2 = nth t5_cl 1 - val t5_c1_3 = nth t5_cl 1 - val t5_c1_4 = nth t5_cl 2 - val t5_lit1 = nth t5_c1_1 0 - val t5_lit2 = nth t5_c1_2 0 - val t5_lit3 = nth t5_c1_2 1 - val t5_lit4 = nth t5_c1_3 0 - val t5_lit5 = nth t5_c1_4 0 - - - - - val target6 = G.empty - - val t6_v1 = V.mk "t6v1" - val t6_v2 = V.mk "t6v2" - val t6_v3 = V.mk "t6v3" - val t6_v4 = V.mk "t6v4" - - val t6_e1 = E.mk "t6e1" - val t6_e2 = E.mk "t6e2" - val t6_e3 = E.mk "t6e3" - val t6_e4 = E.mk "t6e4" - val t6_e5 = E.mk "t6e5" - - val target6 = target6 |> G.add_named_vertex t6_v1 k1 - |> G.add_named_vertex t6_v2 k1 - |> G.add_named_vertex t6_v3 k2 - |> G.add_named_vertex t6_v4 k2 - |> G.add_named_edge t6_e1 (Directed,eunit1) t6_v1 t6_v2 - |> G.add_named_edge t6_e2 (Directed,eunit1) t6_v1 t6_v3 - |> G.add_named_edge t6_e3 (Directed,eunit1) t6_v2 t6_v3 - |> G.add_named_edge t6_e4 (Directed,eunit1) t6_v3 t6_v4 - - val t6_cl = CL.mk target6 - val t6_c1_1 = nth t6_cl 0 - val t6_c1_2 = nth t6_cl 1 - val t6_c1_3 = nth t6_cl 1 - val t6_lit1 = nth t6_c1_1 0 - val t6_lit2 = nth t6_c1_2 0 - val t6_lit3 = nth t6_c1_2 1 - val t6_lit4 = nth t6_c1_3 0 - - - - val target7 = G.empty - - val t7_v1 = V.mk "t7v1" - val t7_v2 = V.mk "t7v2" - val t7_v3 = V.mk "t7v3" - - val t7_e1 = E.mk "t7e1" - val t7_e2 = E.mk "t7e2" - val t7_e3 = E.mk "t7e3" - - val target7 = target7 |> G.add_named_vertex t7_v1 k2 - |> G.add_named_vertex t7_v2 k1 - |> G.add_named_vertex t7_v3 k1 - |> G.add_named_edge t7_e1 (Directed,eunit1) t7_v1 t7_v2 - |> G.add_named_edge t7_e2 (Directed,eunit1) t7_v3 t7_v1 - |> G.add_named_edge t7_e3 (Directed,eunit1) t7_v2 t7_v1 - - val t7_cl = CL.mk target7 - val t7_c1_1 = nth t7_cl 0 - val t7_c1_2 = nth t7_cl 1 - val t7_lit1 = nth t7_c1_1 0 - val t7_lit2 = nth t7_c1_2 0 - val t7_lit3 = nth t7_c1_2 1 - - - - val target8 = G.empty - - val t8_v1 = V.mk "t8v1" - val t8_v2 = V.mk "t8v2" - val t8_v3 = V.mk "t8v3" - val t8_v4 = V.mk "t8v4" - val t8_v5 = V.mk "t8v5" - - val t8_e1 = E.mk "t8e1" - val t8_e2 = E.mk "t8e2" - val t8_e3 = E.mk "t8e3" - val t8_e4 = E.mk "t8e4" - - val target8 = target8 |> G.add_named_vertex t8_v1 k2 - |> G.add_named_vertex t8_v2 k1 - |> G.add_named_vertex t8_v3 k1 - |> G.add_named_vertex t8_v4 k2 - |> G.add_named_vertex t8_v5 k1 - |> G.add_named_edge t8_e1 (Directed,eunit1) t8_v1 t8_v2 - |> G.add_named_edge t8_e2 (Directed,eunit1) t8_v1 t8_v3 - |> G.add_named_edge t8_e3 (Directed,eunit1) t8_v2 t8_v4 - |> G.add_named_edge t8_e4 (Directed,eunit1) t8_v3 t8_v5 - - val t8_cl = CL.mk target8 - val t8_c1_1 = nth t8_cl 0 - val t8_c1_2 = nth t8_cl 1 - val t8_c1_3 = nth t8_cl 2 - val t8_lit1 = nth t8_c1_1 0 - val t8_lit2 = nth t8_c1_2 0 - val t8_lit3 = nth t8_c1_2 1 - val t8_lit4 = nth t8_c1_3 0 - val t8_lit5 = nth t8_c1_3 1 - - - - - val target9 = G.empty - - val t9_v1 = V.mk "t9v1" - val t9_v2 = V.mk "t9v2" - val t9_v3 = V.mk "t9v3" - val t9_v4 = V.mk "t9v4" - val t9_v5 = V.mk "t9v5" - - val t9_e1 = E.mk "t9e1" - val t9_e2 = E.mk "t9e2" - val t9_e3 = E.mk "t9e3" - val t9_e4 = E.mk "t9e4" - val t9_e5 = E.mk "t9e5" - val t9_e6 = E.mk "t9e6" - - val target9 = target9 |> G.add_named_vertex t9_v1 k1 - |> G.add_named_vertex t9_v2 k1 - |> G.add_named_vertex t9_v3 k1 - |> G.add_named_vertex t9_v4 k1 - |> G.add_named_vertex t9_v5 k1 - |> G.add_named_edge t9_e1 (Directed,eunit1) t9_v1 t9_v2 - |> G.add_named_edge t9_e2 (Directed,eunit1) t9_v1 t9_v3 - |> G.add_named_edge t9_e3 (Directed,eunit1) t9_v3 t9_v1 - |> G.add_named_edge t9_e4 (Directed,eunit1) t9_v2 t9_v4 - |> G.add_named_edge t9_e5 (Directed,eunit1) t9_v2 t9_v5 - |> G.add_named_edge t9_e6 (Directed,eunit1) t9_v5 t9_v4 - - val t9_cl = CL.mk target9 - val t9_c1_1 = nth t9_cl 0 - val t9_c1_2 = nth t9_cl 1 - val t9_c1_3 = nth t9_cl 2 - val t9_lit1 = nth t9_c1_1 0 - val t9_lit2 = nth t9_c1_2 0 - val t9_lit3 = nth t9_c1_2 1 - val t9_lit4 = nth t9_c1_3 0 - val t9_lit5 = nth t9_c1_3 1 - - - - - (* TESTING PRUNING *) - - fun printout([]) = " " - | printout(g::[]) = GraphName.string_of_name(g) - | printout(g::gs) = GraphName.string_of_name(g) ^ printout(gs) - - (* Assertion functions *) - fun assert_graphs_eq(g1,g2) = - if TD.is_eq_graphs(g1,g2) then true - else raise ERROR ("Graphs do not match " ^ printout(g2) ^ " " ^ printout(g1)) - - fun assert_string_eq(a,b) = - if a = b then true - else raise ERROR ("Graphs do n") - - - (* Tests *) - val gn1 = GraphName.mk "pattern1" - val gn2 = GraphName.mk "pattern2" - val gn3 = GraphName.mk "pattern3" - val gn4 = GraphName.mk "pattern4" - val gn5 = GraphName.mk "pattern5" - val gn6 = GraphName.mk "pattern6" - val gn7 = GraphName.mk "pattern7" - val gnl = gn1 :: gn2 :: gn3 :: gn4 :: gn5 :: gn6 :: gn7 :: [] - val gl = pattern1 :: pattern2 :: pattern3 :: pattern4 :: pattern5 :: pattern6 :: pattern7 :: [] - - val tab = GraphName.NTab.empty - val tab = TD.fold GraphName.NTab.doadd gnl gl tab - - val tree = TD.mk tab - - - val tree_graphs = TD.graphs tree - val _ = Testing.test "Checking the graphs in the tree.." assert_graphs_eq(gnl,tree_graphs) - - - val pruned_tree1 = TD.extended_prune t1_v1 target1 tree - val pruned_tree1_grpahs = TD.graphs pruned_tree1 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq([],pruned_tree1_grpahs) - - (*val root = pruned_tree1 - val contour_1 = TD.get_contour(root) - val children_1 = TD.get_children(root) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_1,C.empty) - val _ = Testing.test "Checking the children.." assert_children_length((length children_1),0)*) - - val pruned_tree2 = TD.extended_prune t2_v1 target2 tree - val pruned_tree2_graphs = TD.graphs pruned_tree2 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn1::[],pruned_tree2_graphs) - - (*val target_contour_list = CL.mk_from target2 t2_v1 - val _ = Testing.test "Checking the contours.." assert_contour_list_equiv (target_contour_list, (t2_lit1::[]) :: (t2_lit2::t2_lit3::[]) :: [] ) - - val level_1 = pruned_tree2 - val contour_1_1 = TD.get_contour(level_1) - val children_1_1 = TD.get_children(level_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_1_1,C.empty) - val _ = Testing.test "Checking the children.." assert_children_length((length children_1_1),1) - - val level_2 = children_1_1 - val node_2_1 = nth level_2 0 - val contour_2_1 = TD.get_contour(node_2_1) - val children_2_1 = TD.get_children(node_2_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_2_1,(lit1_1::[])) - val _ = Testing.test "Checking t he contour.." assert_contour_equiv(contour_2_1,(lit3_1::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_2_1),1) - - val level_3 = children_2_1 - val node_3_1 = nth children_2_1 0 - val contour_3_1 = TD.get_contour(node_3_1) - val children_3_1 = TD.get_children(node_3_1) - val _ = Testing.test "Checking the contour.." assert_contour_equiv(contour_3_1,(lit1_2::lit1_3::[])) - val _ = Testing.test "Checking the children.." assert_children_length((length children_3_1),1) - - val level_4 = children_3_1 - val node_4_1 = nth children_3_1 0 - val graph_4_1 = TD.get_graph(node_4_1) - val _ = Testing.test "Checking the graph.." assert_graph_name_eq(graph_4_1, gn1)*) - - val pruned_tree3 = TD.extended_prune t3_v1 target3 tree - val pruned_tree3_graphs = TD.graphs pruned_tree3 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn2::[],pruned_tree3_graphs) - - val pruned_tree4 = TD.extended_prune t4_v1a target4 tree - val pruned_tree4_graphs = TD.graphs pruned_tree4 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq([],pruned_tree4_graphs) - - val pruned_tree4 = TD.extended_prune t4_v2 target4 tree - val pruned_tree4_graphs = TD.graphs pruned_tree4 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn2::[],pruned_tree4_graphs) - - (*val t4_cl = CL.mk_from target4 t4_v2 - val t4_c1_1 = nth t4_cl 0 - val t4_c1_2 = nth t4_cl 1 - val t4_c1_3 = nth t4_cl 2 - val t4_lit1_1 = nth t4_c1_1 0 - val t4_lit2_1 = nth t4_c1_2 0 - val t4_lit2_2 = nth t4_c1_2 1 - val t4_lit3_1 = nth t4_c1_3 0 - val t4_lit3_2 = nth t4_c1_3 1 - - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit1_1,L.build( V.mk "t4v2", k2, L.mult_none, 2, L.mult_none, 0, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit2_1,L.build( V.mk "t4v1a", k2, L.mult_none, 0, L.mult_none, 3, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit2_2,L.build( V.mk "t4v1b", k2, L.mult_none, 0, L.mult_none, 3, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit3_1,L.build( V.mk "t4v3", k2, L.mult_none, 3, L.mult_none, 0, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(t4_lit3_2,L.build( V.mk "t4v4", k1, L.mult_none, 2, L.mult_none, 1, L.mult_none )) *) - - val pruned_tree5 = TD.extended_prune t5_v1 target5 tree - val pruned_tree5_graphs = TD.graphs pruned_tree5 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn4::[],pruned_tree5_graphs) - - val pruned_tree6 = TD.extended_prune t6_v1 target6 tree - val pruned_tree6_graphs = TD.graphs pruned_tree6 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn3 :: gn4:: [],pruned_tree6_graphs) - - val pruned_tree7 = TD.extended_prune t7_v1 target7 tree - val pruned_tree7_graphs = TD.graphs pruned_tree7 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn5::[],pruned_tree7_graphs) - - (*val target_contour_list = CL.mk_from target7 t7_v1 - val t7_c1 = nth target_contour_list 0 - val t7_c2 = nth target_contour_list 1 - val lit7_1 = nth t7_c1 0 - val lit7_2 = nth t7_c2 0 - val lit7_3 = nth t7_c2 1 - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit7_1,lit5_1) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit7_2,L.build( V.mk "t4v2", k1, L.mult_none, 1, L.mult_none, 1, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit7_3,L.build( V.mk "t4v2", k1, L.mult_none, 0, L.mult_none, 1, L.mult_none )) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(t7_c1,c5_1) - val _ = Testing.test "Checking strong compatibility.." assert_weak_compatibility(t7_c2,c5_2)*) - - val pruned_tree8 = TD.extended_prune t8_v1 target8 tree - val pruned_tree8_graphs = TD.graphs pruned_tree8 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn6::[],pruned_tree8_graphs) - - (*val target_contour_list = CL.mk_from target8 t8_v1 - val t8_c1 = nth target_contour_list 0 - val t8_c2 = nth target_contour_list 1 - val t8_c3 = nth target_contour_list 2 - val lit8_1 = nth t8_c1 0 - val lit8_2 = nth t8_c2 0 - val lit8_3 = nth t8_c2 1 - val lit8_4 = nth t8_c3 0 - val lit8_5 = nth t8_c3 1 - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_1,lit6_1) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_3,lit6_3) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_5,lit6_4) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_2,L.build( V.mk "t4v2", k1, L.mult_none, 1, L.mult_none, 1, L.mult_none )) - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit8_4,L.build( V.mk "t4v2", k2, L.mult_none, 1, L.mult_none, 0, L.mult_none )) - val _ = Testing.test "Checking strong compatibility.." assert_strong_compatibility(t8_c1,c6_1) - val _ = Testing.test "Checking strong compatibility.." assert_weak_compatibility(t8_c2,c6_2) - val _ = Testing.test "Checking strong compatibility.." assert_weak_compatibility(t8_c3,c6_3)*) - - val pruned_tree9 = TD.extended_prune t9_v1 target9 tree - val pruned_tree9_graphs = TD.graphs pruned_tree9 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq([],pruned_tree9_graphs) - - val pruned_tree9 = TD.standard_prune t9_v1 target9 tree - val pruned_tree9_graphs = TD.graphs pruned_tree9 - val _ = Testing.test "Checking the graphs.." assert_graphs_eq(gn7::[],pruned_tree9_graphs) - -in - val _ = Testing.assert_no_failed_tests(); - val _ = "UNIT TESTS FOR DNETS PASSED!" -end - - -(* - val target_contour_list = CL.mk_from target3 t3_v1 - val t3_cl = nth target_contour_list 0 - val lit3_1 = nth t3_cl 0 - val _ = Testing.test "Checking the literals.." assert_lit_equiv(lit3_1,L.build( V.mk "t4v2", k2, L.mult_none, 0, L.mult_none, 0, L.mult_none )) - - val children = TD.get_children(tree) - val child = nth children 1 - - val cont = TD.get_contour(child) - val _ = Testing.test "X" assert_contour_equiv(cont, (L.build( V.mk "t4v2", k2, L.mult_none, 0, L.mult_star, 0, L.mult_none) :: [])) - - val is_weak = C.contains_boundary(cont) - val t = if is_weak then 1 else 0 - val _ = Testing.test "X" assert_int_eq(t,0,"s") - - val is_saved = C.check_strong_compatibility(t3_cl,cont) - val t = if is_saved then 1 else 0 - val _ = Testing.test "X" assert_int_eq(t,1,"s")*) diff --git a/oldcore/example_code/ROOT.ML b/oldcore/example_code/ROOT.ML deleted file mode 100644 index 67d8db47..00000000 --- a/oldcore/example_code/ROOT.ML +++ /dev/null @@ -1,6 +0,0 @@ -Testing.make_test "ml_hackery.ML"; -Testing.make_test "graph_io_and_basic_manipulation.ML"; -Testing.make_test "../layout/ograph_layout.ML"; -Testing.make_test "layout.ML"; - - diff --git a/oldcore/example_code/foo.svg b/oldcore/example_code/foo.svg deleted file mode 100644 index f349ea67..00000000 --- a/oldcore/example_code/foo.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/oldcore/example_code/graph_io_and_basic_manipulation.ML b/oldcore/example_code/graph_io_and_basic_manipulation.ML deleted file mode 100644 index d5199f59..00000000 --- a/oldcore/example_code/graph_io_and_basic_manipulation.ML +++ /dev/null @@ -1,62 +0,0 @@ -local -(* Select the Theory we are going to work in. See theories/*/theory.ML. Below uses theories/substrings/theory.ML, which provides string data on vertices, no variable data, and prefix-substring as matching. *) -structure Theory = Substrings_Theory -structure IO = Substrings_GraphicalTheoryIO -open Theory -in - -(* Create a new empty graph *) -val g0 = Graph.empty - -(* Write the graph to a file. *) -val _ = g0 - |> IO.OutputGraphJSON.output - |> File_Io.write_json "../theories/substrings/test/graphs/empty_graph.graph" - -(* Read the graph from the file *) -val g2 = "../theories/substrings/test/graphs/empty_graph.graph" - |> File_Io.read_json - |> IO.InputGraphJSON.input - -(* We add Boundary vertex to g2, and save it as the file g3. To see what you can do to work with graphs, see the signatures in: - - core/graph/ograph.sig.ML - core/graph/bang_graph.sig.ML - -We'll create a graph that has 3 vertices, connected in a line "vn1 -> vn2 -> vn3". The first vertex, vn1, is a boundary vertex, the other too are "blah.foo" and "blah.bar". "blah.bar" has an undirected self loop. -*) -(* Let g3 be the empty graph. *) -val g3 = Graph.empty -(* add vertices *) -val (vn1, g3) = g3 |> Graph.add_vertex (Graph.WVert) -val (vn2, g3) = g3 |> Graph.add_vertex (Graph.NVert ("blah.foo.")) -val (vn3, g3) = g3 |> Graph.add_vertex (Graph.NVert ("blah.bar.")) -(* add edges *) -val (en1, g3) = g3 |> Graph.add_edge - (Directed, Graph.default_edata) vn1 vn2 -val (en2, g3) = g3 |> Graph.add_edge - (Directed, Graph.default_edata) vn2 vn3 -val (en3, g3) = g3 |> Graph.add_edge - (Undirected, Graph.default_edata) vn3 vn3 - -(* Write the graph g3 to a file. *) -val _ = g3 - |> IO.OutputGraphJSON.output - |> File_Io.write_json "../theories/substrings/test/graphs/g3.graph" - -(* Read the graph from the file *) -val g4 = "../theories/substrings/test/graphs/g3.graph" - |> File_Io.read_json - |> IO.InputGraphJSON.input - -(* Make a simple rule that removes a self loop from "blah.bar.*" if it is connected to "blah.foo.*" by one edge and "blah.foo.*" is connected to one other vertex. *) -(** Make the graph g4, which is the same as g3 but with the self-loop removed. **) -val g4 = g4 |> Graph.delete_edge en3 -(** Make the (un-named, unstored) rule r1. **) -val r1 = Rule.mk (g3,g4) -(** Write the rule to disk **) -val _ = r1 - |> IO.OutputRuleJSON.output - |> File_Io.write_json "../theories/substrings/test/rules/r1.graph" - -end; (* *) diff --git a/oldcore/example_code/layout.ML b/oldcore/example_code/layout.ML deleted file mode 100644 index 235956d8..00000000 --- a/oldcore/example_code/layout.ML +++ /dev/null @@ -1,32 +0,0 @@ - -local - structure Theory = Substrings_Theory - open Theory -in - (* make a dummy graph that is a single vertex *) - val _ = File_Io.write_pretty "./foo.svg" - (OGraphPreLayout.svg_pretty_in_context - (OGraphPreLayout.svg_pretty_vertex (V.mk "x") 100 150)) - - (* create the layout/drawing for the graph g4 with an extra vertex added. *) - val (_, g) = g4 |> Graph.add_vertex (Graph.NVert ("nar.")) - - (* test adding next vlist *) - val input_vlist = V.NSet.list_of (Graph.get_inputs g) - val (seen_vs,next_vs) = - OGraphPreLayout.add_to_next_vlist g input_vlist (V.NSet.empty,V.NSet.empty) - val (seen_vs,next_vs) = - OGraphPreLayout.add_to_next_vlist g (V.NSet.list_of next_vs) (seen_vs,V.NSet.empty) - - (* test laying out the graph *) - (* val _ = Str_Theory.Graph.print g; *) - val layouts = OGraphPreLayout.layout_graph g - (* val _ = map OGraphPreLayout.print_layout layouts; *) - - (* one box for each disconnected component *) - val boxes = map OGraphPreLayout.box_of_layout layouts - - val positions = map (OGraphPreLayout.pos_maps_of_box 50 50) boxes - -end - diff --git a/oldcore/example_code/layout.html b/oldcore/example_code/layout.html deleted file mode 100644 index 5dc061a7..00000000 --- a/oldcore/example_code/layout.html +++ /dev/null @@ -1,108 +0,0 @@ - - - - - - - -

          Do an AJAX thing

          -A simple HTML/JS to do an AJAX call. -

          -server:
          -port:
          -path:
          -data:
          - -

          -

          -Status:

          -

          -Result:

          -

          -Graph:

          -

    ", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\", - "\\