This is a simplified version, by raising cutoffs and sharpening decays.
When we have variables such as $A: Type$, $a : A$ and $B: Type$, we test whether we correctly:
This time the test is more refined. Namely,
import $cp.bin.`provingground-core-jvm-b17f79ea57.fat.jar`
import provingground._ , interface._, HoTT._, learning._
repl.pprinter() = {
val p = repl.pprinter()
p.copy(
additionalHandlers = p.additionalHandlers.orElse {
translation.FansiShow.fansiHandler
}
)
}
val A = "A" :: Type
val B = "B" :: Type
val a = "a" :: A
val ts = TermState(FiniteDistribution.unif(a), FiniteDistribution.unif(A, B), vars = Vector(A, B, a), context = Context.Empty.addVariable(A).addVariable(B).addVariable(a))
val lp = LocalProver(ts)
ts.vars
import TermData._
val datT = termData(lp)
import monix.execution.Scheduler.Implicits.global
val td = datT.runSyncUnsafe()
val (ns, eqs) = td
val atoms = (eqs.map(_.rhs).flatMap(Expression.varVals(_)) union eqs.map(_.lhs).flatMap(Expression.varVals(_))).map(_.variable)
import TermRandomVars._, GeneratorVariables._
val elemTerms = atoms.collect{case Elem(t: Term, Terms) => t}
elemTerms.exists(_.dependsOn(A))
None of the terms depends on the variables
atoms.size
val normEqs = eqs.map(eq => TermData.isleNormalize(eq))
val normAtoms = (normEqs.map(_.rhs).flatMap(Expression.varVals(_)) union normEqs.map(_.lhs).flatMap(Expression.varVals(_))).map(_.variable)
val normElemTerms = normAtoms.collect{case Elem(t: Term, Terms) => t}
elemTerms == normElemTerms
val ts0 = TermState(FiniteDistribution.empty, FiniteDistribution.unif(Type))
val ev = ExpressionEval.fromInitEqs(ts0, Equation.group(eqs), TermGenParams(), decayS = 0.75)
val termsEv = ev.finalTerms
val evN = ExpressionEval.fromInitEqs(ts0, Equation.group(normEqs), TermGenParams(), decayS = 0.75)
val termsN = evN.finalTerms
evN.init
evN.maxRatio
evN.decay
evN.equations
import ExpressionEval._
val newMap = nextMap(evN.init, evN.equations, 0.5)
val ss : Stream[Map[Expression, Double]] = evN.init #:: ss.map(m => nextMap(m, evN.equations, 0.5))
val sizes = ss.map(_.keySet.size)
sizes(2)
sizes.take(20).toVector
val stable= ss.zip(ss.tail).map{case (m1, m2) => m1.keySet == m2.keySet}
stable.take(20).toVector
val ratios = ss.tail.zip(ss.tail.tail).map{case (m1, m2) => mapRatio(m1, m2)}
ratios.take(20).toVector
NaN
in mapRatios¶val zeroes = ss.map(_.filter(_._2 == 0))
zeroes.take(20).toVector
val m1 = ss(1)
val m2 = ss(2)
mapRatio(m1, m2)
val triple = m1.map{case (k, v) => (k, v, m2(k))}
triple.filter(t => t._2 == 0 || t._3 == 0)
val rt = triple.map(t => (t._2/t._3, t._3/ t._2))
triple.head._2
triple.head._3
triple.map(_._3)
triple.filterNot(_._3 < 2)
m1.values.toSet
m2.values.toSet
val steps = normEqs.map(eq => recExp(m1, eq.rhs) -> eq)
steps.map(_._1)
val neg = steps.find(_._1 < 0)
neg.get._2.rhs
val source = eqs.find(eq => TermData.isleNormalize(eq) == neg.get)
val source = eqs.filter(eq => TermData.isleNormalize(eq).lhs == neg.get._2.lhs)
normEqs.contains(neg.get._2)
normEqs == eqs.map(eq => TermData.isleNormalize(eq))
normEqs -- eqs.map(eq => TermData.isleNormalize(eq))
val badEq = eqs.find(eq => TermData.isleNormalize(eq) != TermData.isleNormalize(eq)).get
val eq1 = TermData.isleNormalize(badEq)
val eq2 = TermData.isleNormalize(badEq)
eq1 == eq2
eq1.rhs ==eq2.rhs
eq1.rhs
eq1.lhs == eq2.lhs
eq1.lhs
eq2.lhs
import Expression._
val exp = badEq.lhs.asInstanceOf[FinalVal[Term]].variable
import TermRandomVars._
val n1 = isleNormalizeVars(exp, Vector())
val n2 = isleNormalizeVars(exp, Vector())
n1 == n2
val i1 = n1.asInstanceOf[InIsle[Term, TermState, Term, Term]]
val i2 = n2.asInstanceOf[InIsle[Term, TermState, Term, Term]]
i1.boat == i2.boat
i1.isleVar == i2.isleVar
i1.isle == i2.isle
i1.isle
i2.isle
i1.isle.initMap == i2.isle.initMap
i1.isle.output == i2.isle.output
i1.isle.islandOutput == i2.isle.islandOutput
ContantRandomVar(rv)
isleSub
these were defined as lambdas.