summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--analyze.py4
-rw-r--r--reversing_game_mechanics/stuff.txt105
-rw-r--r--stats.py35
3 files changed, 135 insertions, 9 deletions
diff --git a/analyze.py b/analyze.py
index a0537f7..2dc8c88 100644
--- a/analyze.py
+++ b/analyze.py
@@ -11,9 +11,9 @@ s = Stats.load(files[0])
for f in files[1:]:
s.merge(f)
-#s.analyze_speed()
+s.analyze_speed()
print("\n" + "-"*40 + "\n")
-#s.analyze_visible_window(True)
+s.analyze_visible_window(False)
for i in ["split cell", "ejected mass", "virus"]:
s.analyze_deviations(i)
print("")
diff --git a/reversing_game_mechanics/stuff.txt b/reversing_game_mechanics/stuff.txt
new file mode 100644
index 0000000..29a5248
--- /dev/null
+++ b/reversing_game_mechanics/stuff.txt
@@ -0,0 +1,105 @@
+as of 2015-08-30
+
+CELL SIZE VS SPEED
+
+ size**0.45 * speed = 86.05616001328154
+
+
+
+
+SIZE VS VIEWPORT
+
+ with 1 cells, depending on sum(size)
+ median ratio = 1.7025611175785798
+ diag / size**0.33 = 608.971483054539
+
+ with 2 cells, depending on sum(size)
+ median ratio = 1.6963503649635037
+ diag / size**0.33 = 585.5509541758322
+
+ with 3 cells, depending on sum(size)
+ median ratio = 1.6898326898326899
+ diag / size**0.58 = 170.29929514108093
+
+ with 4 cells, depending on sum(size)
+ median ratio = 1.650784427658338
+ diag / size**0.0 = 3158.567553889486
+
+ with 1 cells, depending on sum(mass)
+ median ratio = 1.7025611175785798
+ diag / size**0.17 = 1270.6199859482824
+
+ with 2 cells, depending on sum(mass)
+ median ratio = 1.6972934472934473
+ diag / size**0.16 = 1407.4522241811242
+
+ with 3 cells, depending on sum(mass)
+ median ratio = 1.6975546975546976
+ diag / size**0.28 = 910.623966202271
+
+ with 4 cells, depending on sum(mass)
+ median ratio = 1.6625734116390818
+ diag / size**0.0 = 3141.1700855829763
+
+
+
+
+EJECT/SPLIT DIRECTIONS
+
+ split cell eject/split direction deviations: mean = 0.0009390500296252917, stddev=0.31212271930689983, ndata=621
+ 75% of the splits had a deviation smaller than 0.02 rad = 1.19 deg
+
+ ejected mass eject/split direction deviations: mean = -0.021378484138331356, stddev=0.730695490707546, ndata=1585
+ 75% of the splits had a deviation smaller than 0.39 rad = 22.16 deg
+
+
+
+
+EJECT/SPLIT DISTANCES
+
+ split cell eject/split distances: mean = 378.6264099585539, stddev =214.15995855502896, ndata=1226
+ split cell meann = 23.37846655791191, stddevn =17.23260859398865
+ 75% of the distances lie in the interval 370.30 plusminus 218.60
+ 80% of the distances lie in the interval 370.30 plusminus 262.32
+ max = 1205.46
+ 75% of the flight lengths lie in the interval 20.00 plusminus 9.00
+ 78% of the flight lengths lie in the interval 20.00 plusminus 10.80
+
+ ejected mass eject/split distances: mean = 473.3307839719213, stddev =159.4625848157587, ndata=1121
+ ejected mass meann = 42.015165031222125, stddevn =8.5656796143937
+ 75% of the distances lie in the interval 534.64 plusminus 2.10
+ 77% of the distances lie in the interval 534.64 plusminus 2.52
+ max = 637.28
+ 75% of the flight lengths lie in the interval 44.00 plusminus 1.00
+ 79% of the flight lengths lie in the interval 44.00 plusminus 1.20
+
+ virus eject/split distances: mean = 396.47928995805, stddev =219.79929069475193, ndata=9
+ virus meann = 42.666666666666664, stddevn =6.879922480183431
+ 75% of the distances lie in the interval 510.53 plusminus 363.80
+ 88% of the distances lie in the interval 510.53 plusminus 436.56
+ max = 580.08
+ 75% of the flight lengths lie in the interval 45.00 plusminus 10.00
+ 77% of the flight lengths lie in the interval 45.00 plusminus 12.00
+
+
+
+
+VIRUS SIZES
+
+ I've seen the following 7 virus sizes:
+ 100: 386018 times
+ 106: 124015 times
+ 113: 72084 times
+ 119: 41825 times
+ 125: 24954 times
+ 131: 373398 times
+ 136: 11550 times
+
+
+
+
+REMERGES
+
+ 75% of the remerge durations lie at 32.00 plusminus 30.00 frames
+ 75% of the remerges were started after 767.00 plusminus 20.00 frames
+
diff --git a/stats.py b/stats.py
index 42ee0ea..d65c6d3 100644
--- a/stats.py
+++ b/stats.py
@@ -26,6 +26,8 @@ def quantile(values, q):
return 0
def find_smallest_q_confidence_area(values, q):
+ """Calculates the (mid, delta) with the smallest delta, such that at least q * len(values)
+ lie within the interval [mid-delta, mid+delta]."""
try:
mid = min(values, key = lambda value : quantile(list(map(lambda x : abs(x-value), values)), q))
deviation = quantile(list(map(lambda x : abs(x-mid), values)),q)
@@ -34,6 +36,14 @@ def find_smallest_q_confidence_area(values, q):
except:
return 0,0
+def get_delta_confidence(values, mid, delta):
+ #"""Calculates which fraction of the values lie within [mid-delta, mid+delta]"""
+ #try:
+ return len(list(filter(lambda v : (mid-delta <= v and v <= mid+delta), values))) / len(values)
+ #except:
+ # raise
+ # return 0
+
def avg(values):
if not isinstance(values, dict):
return sum(values)/len(values)
@@ -359,11 +369,17 @@ class Stats:
for ncells in sorted(self.data.size_vs_visible_window.keys()):
if len(self.data.size_vs_visible_window[ncells]) > 0:
print("\nwith "+str(ncells)+" cells, depending on sum(size)")
- self.analyze_visible_window_helper(self.data.size_vs_visible_window[ncells], verbose)
+ try:
+ self.analyze_visible_window_helper(self.data.size_vs_visible_window[ncells], verbose)
+ except ZeroDivisionError:
+ print("\toops.")
for ncells in sorted(self.data.mass_vs_visible_window.keys()):
if len(self.data.mass_vs_visible_window[ncells]) > 0:
print("\nwith "+str(ncells)+" cells, depending on sum(mass)")
- self.analyze_visible_window_helper(self.data.mass_vs_visible_window[ncells], verbose)
+ try:
+ self.analyze_visible_window_helper(self.data.mass_vs_visible_window[ncells], verbose)
+ except ZeroDivisionError:
+ print("\toops.")
def analyze_deviations(self, celltype):
ds = self.data.eject_deviations[celltype]
@@ -412,8 +428,13 @@ class Stats:
#quant = quantile(list(map(lambda v : abs(v-midpoints[maxidx]), ds)), q/100)
#print("\t"+str(q)+"% of values lie have a distance of at most "+str(quant)+" from the maximum")
- print("\t75%% of the distances lie in the interval %.2f plusminus %.2f" % find_smallest_q_confidence_area(ds, 0.75))
- print("\t75%% of the flight lengths lie in the interval %.2f plusminus %.2f" % find_smallest_q_confidence_area(ns, 0.75))
+ mid, delta = find_smallest_q_confidence_area(ds, 0.75)
+ print("\t75%% of the distances lie in the interval %.2f plusminus %.2f" % (mid,delta))
+ print("\t%2d%% of the distances lie in the interval %.2f plusminus %.2f" % (100*get_delta_confidence(ds, mid, delta*1.2), mid, delta*1.2) )
+ print("\tmax = %.2f" % (max(ds)))
+ mid, delta = find_smallest_q_confidence_area(ns, 0.75)
+ print("\t75%% of the flight lengths lie in the interval %.2f plusminus %.2f" % (mid,delta))
+ print("\t%2d%% of the flight lengths lie in the interval %.2f plusminus %.2f" % (100*get_delta_confidence(ns,mid,delta*1.2),mid,delta*1.2))
print("")
def analyze_virus_sizes(self):
@@ -424,7 +445,7 @@ class Stats:
def analyze_remerge(self):
relevant = list(filter(lambda r : r.is_parent_child, self.data.remerging.values()))
durations = list(map(lambda r : r.end_time - r.begin_time, relevant))
- print(fit_gaussian(durations))
+ print("75%% of the remerge durations lie at %.2f plusminus %.2f frames" % find_smallest_q_confidence_area(durations,0.75))
waittimes = list(map(lambda r : r.begin_time - max(r.birth1, r.birth2), relevant))
- print(fit_gaussian(waittimes))
-
+ print("75%% of the remerges were started after %.2f plusminus %.2f frames" % find_smallest_q_confidence_area(waittimes,0.75))
+