Update graphing

This commit is contained in:
Cal Wing 2024-10-18 09:35:36 +10:00
parent 9c8c01f866
commit 0029238a0c
2 changed files with 48 additions and 25 deletions

View File

@ -32,15 +32,16 @@ probe-info:
c2c: 5.6 # mm
gauge-diam: 0.8 # mm
gauge-c2c: 1.8 #mm
data-record:
type: "scope"
config: "eProbe-Scope.txt"
data: "eProbe-Scope.csv"
trigger: # Redundant?
type: "channel"
channel: 4
alignment-offset: 0 # 601 # us [TODO] Make this auto-magic
delay: 100 # us
data-records:
- type: "scope"
config: "eProbe-Scope.txt"
data: "eProbe-Scope.csv"
trigger: # Redundant?
type: "channel"
channel: 4
alignment-offset: 0 # 601 # us [TODO] Make this auto-magic
delay: 100 # us

52
main.py
View File

@ -57,10 +57,14 @@ def load_data(data_path: str, data={}) -> dict:
x2_channels = x2_tdms_data.groups()[0].channels()
x2_channel_names = tuple(c.name for c in x2_channels)
data_locs = [dr["type"] for dr in dataInfo["probe-info"]["data-records"]]
# Scope info _if it exists_
if dataInfo["probe-info"]["data-record"]["type"] == "scope":
scope_data_path = data_path + dataInfo["probe-info"]["data-record"]["data"]
scope_config_path = data_path + dataInfo["probe-info"]["data-record"]["config"] # [TODO] Read this file
if "scope" in data_locs:
scope_data_info = dataInfo["probe-info"]["data-record"][data_locs.index("scope")]
scope_data_path = data_path + scope_data_info["data"]
scope_config_path = data_path + ["config"] # [TODO] Read this file
# Generate Data Headers - This could be better
with open(scope_data_path, 'r') as dfile:
@ -100,12 +104,14 @@ def load_data(data_path: str, data={}) -> dict:
},
"time": {
"x2": None,
"trigger_index": None
"probes": None, # This may be x2 but may not - ie a scope was used
"trigger_index": None,
},
"data": {
"x2": {} # Only pop channels with a voltage scale in ./tunnel-info.yaml
"x2": {}, # Only pop channels with a voltage scale in ./tunnel-info.yaml
"probes": {} # Save probe data
},
"shock-speed": {}
"shock-speed": {} # Note all in us
}
# === Process the data ===
@ -139,13 +145,16 @@ def load_data(data_path: str, data={}) -> dict:
# Add the time data
data[x2_shot]["time"] = {
"x2": x2_time_us,
"trigger_index": x2_trigger_index
"trigger_index": x2_trigger_index,
"probes": x2_time_us, # Until otherwise overridden - probe time is x2 time
}
# Scope timing _if it exists_
if dataInfo["probe-info"]["data-record"]["type"] == "scope":
trigger_info = dataInfo["probe-info"]["data-record"]["trigger"] # Get the scope trigger info
if "scope" in data_locs:
scope_data_info = dataInfo["probe-info"]["data-record"][data_locs.index("scope")]
trigger_info = scope_data_info["trigger"] # Get the scope trigger info
# Calc the scope time & apply any manual offsets
scope_time = (scope_data[:, 0] - scope_data[0, 0]) * 1e6 # to us
@ -175,6 +184,11 @@ def load_data(data_path: str, data={}) -> dict:
data[x2_shot]["data"]["scope"][i] = ref
data[x2_shot]["data"]["scope"][header] = ref
# Save Probe Data
if "scope" in data_locs:
data[x2_shot]["data"]["probes"] = data[x2_shot]["data"]["scope"][1:2]
data[x2_shot]["time"]["probes"] = data[x2_shot]["time"]["scope"]
# Find Shock Times
# X2 - Canning Edge
@ -187,18 +201,18 @@ def load_data(data_path: str, data={}) -> dict:
data[x2_shot]["shock-point"][ref] = shock_point, first_value
for i, probe in enumerate(dataInfo["probe-info"]["locations"]):
probeCh1 = data[x2_shot]["data"]["scope"][1]
probeCh2 = data[x2_shot]["data"]["scope"][2]
probeCh1 = data[x2_shot]["data"]["probes"][0]
probeCh2 = data[x2_shot]["data"]["probes"][1]
# Get the canny-args
cArgs = dataInfo["canny-args"]
doCannyPlot = False
if i in range(len(cArgs)):
sigma = cArgs[i]["sigma"]
post_pres = cArgs[i]["post_pres"]
post_sup_thresh = cArgs[i]["post_pres"]
else:
sigma = cArgs[-1]["sigma"]
post_pres = cArgs[-1]["post_pres"]
post_sup_thresh = cArgs[-1]["post_pres"]
# If this _isn't_ the first probe then apply a time offset
if i > 0:
@ -208,7 +222,11 @@ def load_data(data_path: str, data={}) -> dict:
time_offset = None
# Find G1 Shock Time
first_value, first_value_uncertainty, _, _ = canny_shock_finder(scope_time, probeCh1, sigma=sigma, post_suppression_threshold=post_pres, plot=doCannyPlot, start_time=time_offset, print_func=None)
first_value, first_value_uncertainty, _, _ = canny_shock_finder(scope_time, probeCh1, sigma=sigma, post_suppression_threshold=post_sup_thresh, plot=doCannyPlot, start_time=time_offset, print_func=None)
if first_value is None:
print(f"[ERROR] {x2_shot} - {probe}-g1 could not be detected using: Sigma = {sigma}, post_suppression_threshold = {post_sup_thresh}")
raise ValueError(f"{probe}-g1 not detected")
shock_point = np.where(scope_time >= first_value)[0][0] # [BUG] Seems to give n+1
data[x2_shot]["shock-point"][f"{probe}-g1"] = shock_point, first_value
@ -218,7 +236,11 @@ def load_data(data_path: str, data={}) -> dict:
time_offset = data[x2_shot]["shock-point"][f"{privPoint}-g2"][1] + CANNY_TIME_OFFSET
# Find G2 Shock Time
first_value, first_value_uncertainty, _, _ = canny_shock_finder(scope_time, probeCh2, sigma=sigma, post_suppression_threshold=post_pres, plot=doCannyPlot, start_time=time_offset, print_func=None)
first_value, first_value_uncertainty, _, _ = canny_shock_finder(scope_time, probeCh2, sigma=sigma, post_suppression_threshold=post_sup_thresh, plot=doCannyPlot, start_time=time_offset, print_func=None)
if first_value is None:
print(f"[ERROR] {x2_shot} - {probe}-g2 could not be detected using: Sigma = {sigma}, post_suppression_threshold = {post_sup_thresh}")
raise ValueError(f"{probe}-g2 not detected")
shock_point = np.where(scope_time >= first_value)[0][0] # [BUG] Seems to give n+1
data[x2_shot]["shock-point"][f"{probe}-g2"] = shock_point, first_value