app: stacked_cores: create a plot_53xx and refactor the surroundings

note that the interpolation is very BAD. i need to figure out better
sampling.
This commit is contained in:
colin 2022-11-03 21:21:07 -07:00
parent 4023a67912
commit df828b6299
4 changed files with 163 additions and 94 deletions

View File

@ -1,93 +1,17 @@
#!/usr/bin/env python3
from math import sqrt
from natsort import natsorted
import plotly.express as px
from pandas import DataFrame
from stacked_cores_52xx import *
def plot(name: str, x_name: str, y_series: list):
df = DataFrame(data={ x_name: sweep_1d(len(y_series)), "y": y_series })
fig = px.line(df, x=x_name, y="y", title=name)
fig.show()
def eval_series(meas: 'ParameterizedMeas', points: list, y_idx: int = -1):
return [sample_all(meas, *p)[y_idx] for p in points]
def sample_all(meas: 'ParameterizedMeas', a0: float, a1: float) -> tuple:
runs = [extract_transfer_from_meas_rows(r) for r in meas.runs()]
distances = [(distance_to(m, (a0, a1)), m) for m in runs]
return weighted_sum_of_neighbors_by_inv_distance(distances)
def extract_transfer_from_meas_rows(meas_rows: list) -> tuple:
return (meas_rows[0].m[0], meas_rows[0].m[1], meas_rows[1].m[2], meas_rows[2].m[3])
def interpolate(meas: 'ParameterizedMeas', a0: float, a1: float) -> tuple:
"""
this interpolates a point among four neighboring points in 2d.
the implementation only supports 2d, but the technique is extendable to N dim.
"""
rows = [r.m for r in meas.all_rows()]
distances = [(distance_to(m, (a0, a1)), m) for m in rows]
# a0_below_dist, a0_below_val = min(d for d in distances if d[1][0] <= a0)
# a0_above_dist, a0_above_val = min(d for d in distances if d[1][0] >= a0)
# a1_below_dist, a1_below_val = min(d for d in distances if d[1][1] <= a1)
# a1_above_dist, a1_above_val = min(d for d in distances if d[1][1] >= a1)
a0_below = min((d for d in distances if d[1][0] <= a0), default=None)
a0_above = min((d for d in distances if d[1][0] >= a0), default=None)
a1_below = min((d for d in distances if d[1][1] <= a1), default=None)
a1_above = min((d for d in distances if d[1][1] >= a1), default=None)
neighbors = [a for a in [a0_below, a0_above, a1_below, a1_above] if a is not None]
return weighted_sum_of_neighbors_by_inv_distance(neighbors)
def weighted_sum_of_neighbors_by_inv_distance(neighbors: list) -> tuple:
"""
each neighbor is (distance, value).
return a weighted sum of these neighbors, where lower-distance neighbors are more strongly weighted.
"""
D = sum(a[0] for a in neighbors)
weight_n = lambda n: 1/max(n[0], 1e-3) # non-normalized weight for neighbor
W = sum(weight_n(n) for n in neighbors)
weighted_n = lambda n: weighted(n[1], weight_n(n)/W) # normalized weighted contribution for neighbor
return element_sum([weighted_n(n) for n in neighbors])
def weighted_sum_of_neighbors(neighbors: list) -> tuple:
"""
each neighbor is (distance, value).
return a weighted sum of these neighbors, where lower-distance neighbors are more strongly weighted.
"""
D = sum(a[0] for a in neighbors)
weight_n = lambda n: D - n[0] # non-normalized weight for neighbor
W = sum(weight_n(n) for n in neighbors)
weighted_n = lambda n: weighted(n[1], weight_n(n)/W) # normalized weighted contribution for neighbor
return element_sum([weighted_n(n) for n in neighbors])
def distance_to(p0: tuple, p1: tuple) -> float:
return sqrt(sum((x0-x1)*(x0-x1) for (x0, x1) in zip(p0, p1)))
def element_sum(lists: list) -> list:
elems = lists[0]
for l in lists[1:]:
for i, e in enumerate(l):
elems[i] += e
return elems
def weighted(l: list, scale: float) -> list:
return [e*scale for e in l]
from stacked_cores_52xx_plotters import *
or_gates = DB
or_gates = read_db(lambda name: name.startswith("52-"))
sweep_1d = lambda points=101: [unit_to_m(x/(points-1)) for x in range(points)]
sweep_a0 = lambda a1, points=101: [(unit_to_m(x/(points-1)), a1) for x in range(points)]
sweep_a1 = lambda a0, points=101: [(a0, unit_to_m(x/(points-1))) for x in range(points)]
sweep_a0 = lambda a1, points=101: [(unit_to_m(x/(points-1)), a1, None, None) for x in range(points)]
sweep_a1 = lambda a0, points=101: [(a0, unit_to_m(x/(points-1)), None, None) for x in range(points)]
unit_to_m = lambda u: -17000 + 34000 * u
for name, meas in natsorted(or_gates.items()):
trace = eval_series(meas, sweep_a1(-17000))
trace = eval_series(meas, sweep_a1(-17000), extract_52xx_tx)
plot(f"{name}", "a1", trace)

View File

@ -0,0 +1,19 @@
#!/usr/bin/env python3
from stacked_cores_52xx import *
from stacked_cores_52xx_plotters import *
def extract_53xx_tx(meas_rows: list) -> tuple:
"""
extracts a flat list of input/output M mappings from a 53xx run
"""
return (meas_rows[1].m[0], meas_rows[0].m[1], meas_rows[0].m[2], meas_rows[1].m[3])
sweep_buf_inputs = lambda points=101: [(None, m, -m, None) for m in sweep_1d(points)]
buf_gates = read_db(lambda name: name.startswith("53-"))
for name, meas in natsorted(buf_gates.items()):
print(name)
trace = eval_series(meas, sweep_buf_inputs(), extract_53xx_tx, y_idx=0)
plot(f"{name}", "a0", trace)

View File

@ -8,6 +8,8 @@ from extract_meas import extract_parameterized_meas, indented
from stacked_cores_52xx_db import DB
## CONSTANTS/CONFIGURATION
# list of sims to extract details for
PREFIXES = { "52", "53" }
@ -18,6 +20,28 @@ def times_of_interest(sim_name: str) -> list:
if sim_name.startswith("53-"):
return [2e-9, 4e-9]
## USER-FACING FUNCTIONS
def read_db(name_filter=lambda name: True) -> dict:
return { name: meas for (name, meas) in DB.items() if name_filter(name) }
def update_db():
db = compute_db()
dump("stacked_cores_52xx_db.py", db)
## IMPLEMENTATION DETAILS
def compute_db():
here, _ = os.path.split(__file__)
toplevel_out = f"{here}/../../../../out/applications/stacked_cores"
stems = extract_stems(os.listdir(toplevel_out))
return {
s: extract_parameterized_meas(os.path.join(toplevel_out, s), times_of_interest(s))
for s in stems
}
def extract_stems(dirlist: list) -> list:
stems = set()
TERM = "-drive-"
@ -32,15 +56,6 @@ def extract_stems(dirlist: list) -> list:
return stems
def compute_db():
here, _ = os.path.split(__file__)
toplevel_out = f"{here}/../../../../out/applications/stacked_cores"
stems = extract_stems(os.listdir(toplevel_out))
return {
s: extract_parameterized_meas(os.path.join(toplevel_out, s), times_of_interest(s))
for s in stems
}
def dump(path: str, db: dict):
with open(path, "w") as f:
f.write("from extract_meas import MeasRow, ParameterizedMeas\n\n")
@ -49,8 +64,4 @@ def dump(path: str, db: dict):
f.write(indented(f"\n{k!r}: {v},"))
f.write("\n}")
def update_db():
db = compute_db()
dump("stacked_cores_52xx_db.py", db)
if __name__ == '__main__': update_db()

View File

@ -0,0 +1,115 @@
from math import sqrt
import plotly.express as px
from pandas import DataFrame
unit_to_m = lambda u: -17000 + 34000 * u
sweep_1d = lambda points=101: [unit_to_m(x/(points-1)) for x in range(points)]
def plot(name: str, x_name: str, y_series: list):
""" plot y(x), where y values are specified by `y_series` and x is inferred """
df = DataFrame(data={ x_name: sweep_1d(len(y_series)), "y": y_series })
fig = px.line(df, x=x_name, y="y", title=name)
fig.show()
def eval_series(meas: 'ParameterizedMeas', points: list, extract_tx, y_idx: int = -1) -> list:
"""
extract a list of y-value floats from `meas`.
each x value is a tuple of desired M values at which to sample the curve.
e.g. points = [ (None, 1000.0, 2000.0, None) ] samples at M1=1000.0, M2=2000.0,
treating M0 and M3 as dependent values.
`y_idx` specifies which M value should be treated as the dependent value to be computed.
e.g. `y_idx=0` to compute M0.
`extract_tx` is a function mapping one run (list[list[float]] of M values)
to a measured point of the transfer function. e.g. [15000, -15000, 14000] for a 3-core OR gate.
"""
return [sample_all(meas, p, extract_tx)[y_idx] for p in points]
def sample_all(meas: 'ParameterizedMeas', at: tuple, extract_tx) -> tuple:
"""
computes the interpolated M values at the provided `at` coordinate;
effectively fillin in whichever items in `at` are left at `None`
"""
runs = [extract_tx(r) for r in meas.runs()]
distances = [(distance_to(m, at), m) for m in runs]
interpolated = weighted_sum_of_neighbors_by_inv_distance(distances)
print(at, interpolated)
return interpolated
def extract_52xx_tx(meas_rows: list) -> tuple:
"""
extracts a flat list of input/output M mappings from a 52xx run
"""
return (meas_rows[0].m[0], meas_rows[0].m[1], meas_rows[1].m[2], meas_rows[2].m[3])
def interpolate(meas: 'ParameterizedMeas', a0: float, a1: float) -> tuple:
"""
this interpolates a point among four neighboring points in 2d.
the implementation only supports 2d, but the technique is extendable to N dim.
"""
rows = [r.m for r in meas.all_rows()]
distances = [(distance_to(m, (a0, a1)), m) for m in rows]
# a0_below_dist, a0_below_val = min(d for d in distances if d[1][0] <= a0)
# a0_above_dist, a0_above_val = min(d for d in distances if d[1][0] >= a0)
# a1_below_dist, a1_below_val = min(d for d in distances if d[1][1] <= a1)
# a1_above_dist, a1_above_val = min(d for d in distances if d[1][1] >= a1)
a0_below = min((d for d in distances if d[1][0] <= a0), default=None)
a0_above = min((d for d in distances if d[1][0] >= a0), default=None)
a1_below = min((d for d in distances if d[1][1] <= a1), default=None)
a1_above = min((d for d in distances if d[1][1] >= a1), default=None)
neighbors = [a for a in [a0_below, a0_above, a1_below, a1_above] if a is not None]
return weighted_sum_of_neighbors_by_inv_distance(neighbors)
def weighted_sum_of_neighbors_by_inv_distance(neighbors: list) -> tuple:
"""
each neighbor is (distance, value).
return a weighted sum of these neighbors, where lower-distance neighbors are more strongly weighted.
"""
D = sum(a[0] for a in neighbors)
weight_n = lambda n: 1/max(n[0], 1e-3) # non-normalized weight for neighbor
W = sum(weight_n(n) for n in neighbors)
weighted_n = lambda n: weighted(n[1], weight_n(n)/W) # normalized weighted contribution for neighbor
return element_sum([weighted_n(n) for n in neighbors])
def weighted_sum_of_neighbors(neighbors: list) -> tuple:
"""
each neighbor is (distance, value).
return a weighted sum of these neighbors, where lower-distance neighbors are more strongly weighted.
"""
D = sum(a[0] for a in neighbors)
weight_n = lambda n: D - n[0] # non-normalized weight for neighbor
W = sum(weight_n(n) for n in neighbors)
weighted_n = lambda n: weighted(n[1], weight_n(n)/W) # normalized weighted contribution for neighbor
return element_sum([weighted_n(n) for n in neighbors])
def distance_to(p0: tuple, p1: tuple) -> float:
"""
return the L2-norm distance from p0 to p1.
any coordinates set to `None` are ignored.
e.g. `distance_to((1, 2, 3), (None, 4, 5))` is the same as `distance_to((2, 3), (4, 5))`
"""
return sqrt(sum((x0-x1)*(x0-x1) for (x0, x1) in zip(p0, p1) if x0 is not None and x1 is not None))
def element_sum(lists: list) -> list:
"""
given a list[list[float]] where each inner length is of identical length,
returns a list[float] by summing along each axis.
e.g. element_sum([[1, 2], [3, 4], [5, 6]]) gives `[1+2+5, 2+4+6]`
"""
elems = lists[0]
for l in lists[1:]:
for i, e in enumerate(l):
elems[i] += e
return elems
def weighted(l: list, scale: float) -> list:
"""
given list[float], returns a new list[float] where each element is multipled by `scale`
"""
return [e*scale for e in l]