Refactor data loading
This commit is contained in:
parent
5a24ab0bc1
commit
7d6c0514f6
@ -28,6 +28,6 @@ probe-info:
|
|||||||
trigger:
|
trigger:
|
||||||
type: "channel"
|
type: "channel"
|
||||||
channel: 4
|
channel: 4
|
||||||
alignment-offset: 601000 # ns
|
alignment-offset: 601 # us [TODO] Make this auto-magic
|
||||||
delay: 100 # us
|
delay: 100 # us
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ probe-info:
|
|||||||
trigger: # Redundant?
|
trigger: # Redundant?
|
||||||
type: "channel"
|
type: "channel"
|
||||||
channel: 4
|
channel: 4
|
||||||
alignment-offset: 499500 # ns
|
alignment-offset: 601 # us [TODO] Make this auto-magic
|
||||||
delay: 100 # us
|
delay: 100 # us
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ probe-info:
|
|||||||
trigger: # Redundant?
|
trigger: # Redundant?
|
||||||
type: "channel"
|
type: "channel"
|
||||||
channel: 4
|
channel: 4
|
||||||
alignment-offset: 499500 # ns
|
alignment-offset: 601 # us [TODO] Make this auto-magic
|
||||||
delay: 100 # us
|
delay: 100 # us
|
||||||
|
|
||||||
|
|
||||||
|
199
main.py
199
main.py
@ -19,13 +19,14 @@ folders = ["./images"]
|
|||||||
for folder in folders:
|
for folder in folders:
|
||||||
if not os.path.isdir(folder): os.mkdir(folder)
|
if not os.path.isdir(folder): os.mkdir(folder)
|
||||||
|
|
||||||
# Load Data
|
# Data Paths
|
||||||
DATA_PATH = "./data"
|
DATA_PATH = "./data"
|
||||||
DATA_INFO = "_info.yaml"
|
DATA_INFO = "_info.yaml"
|
||||||
PCB_INFO_FILE = "./pcb-info.yaml"
|
TUNNEL_INFO_FILE = "./tunnel-info.yaml"
|
||||||
|
SAMPLES_TO_AVG = 500
|
||||||
|
|
||||||
with open(PCB_INFO_FILE, 'r') as file:
|
with open(TUNNEL_INFO_FILE, 'r') as file:
|
||||||
PCB_INFO = yaml.safe_load(file)
|
TUNNEL_INFO = yaml.safe_load(file)
|
||||||
|
|
||||||
data_to_load = [
|
data_to_load = [
|
||||||
"x2s5823",
|
"x2s5823",
|
||||||
@ -33,32 +34,39 @@ data_to_load = [
|
|||||||
"x2s5827"
|
"x2s5827"
|
||||||
]
|
]
|
||||||
|
|
||||||
data = {}
|
# ==== Data Loading & Processing ====
|
||||||
|
def load_data(data_to_load: list[str]) -> dict:
|
||||||
for dp in data_to_load:
|
data = {}
|
||||||
data_path = f"{DATA_PATH}/{dp}/"
|
for dp in data_to_load:
|
||||||
data_info_path = data_path + DATA_INFO
|
data_path = f"{DATA_PATH}/{dp}/"
|
||||||
if not os.path.exists(data_info_path):
|
data_info_path = data_path + DATA_INFO
|
||||||
print(f"[ERR] Could not find data info file: '{data_info_path}'")
|
if not os.path.exists(data_info_path):
|
||||||
print(f"[WARN] Not Loading Data '{dp}'")
|
print(f"[ERR] Could not find data info file: '{data_info_path}'")
|
||||||
continue
|
print(f"[WARN] Not Loading Data '{dp}'")
|
||||||
|
continue
|
||||||
with open(data_info_path, 'r') as file:
|
|
||||||
# Load data info (Cal)
|
# Load Shot Data Info YAML File (Cal)
|
||||||
dataInfo = yaml.safe_load(file)
|
with open(data_info_path, 'r') as file:
|
||||||
|
dataInfo = yaml.safe_load(file)
|
||||||
|
|
||||||
|
# Grab the shot name
|
||||||
x2_shot = dataInfo["shot-info"]["name"]
|
x2_shot = dataInfo["shot-info"]["name"]
|
||||||
|
|
||||||
|
# Load Raw Data
|
||||||
|
# TDMS File (X2 DAQ Data)
|
||||||
x2_tdms_data = TdmsFile.read(data_path + dataInfo["shot-info"]['tdms'], raw_timestamps=True)
|
x2_tdms_data = TdmsFile.read(data_path + dataInfo["shot-info"]['tdms'], raw_timestamps=True)
|
||||||
x2_channels = x2_tdms_data.groups()[0].channels()
|
x2_channels = x2_tdms_data.groups()[0].channels()
|
||||||
|
x2_channel_names = tuple(c.name for c in x2_channels)
|
||||||
|
|
||||||
|
# Scope info _if it exists_
|
||||||
if dataInfo["probe-info"]["data-record"]["type"] == "scope":
|
if dataInfo["probe-info"]["data-record"]["type"] == "scope":
|
||||||
scope_data_path = data_path + dataInfo["probe-info"]["data-record"]["data"]
|
scope_data_path = data_path + dataInfo["probe-info"]["data-record"]["data"]
|
||||||
scope_config_path = data_path + dataInfo["probe-info"]["data-record"]["config"]
|
scope_config_path = data_path + dataInfo["probe-info"]["data-record"]["config"] # [TODO] Read this file
|
||||||
|
|
||||||
# Generate Headers
|
# Generate Data Headers - This could be better
|
||||||
with open(scope_data_path, 'r') as dfile:
|
with open(scope_data_path, 'r') as dfile:
|
||||||
scope_header = []
|
scope_header = []
|
||||||
|
|
||||||
header_lines = []
|
header_lines = []
|
||||||
for i, line in enumerate(dfile):
|
for i, line in enumerate(dfile):
|
||||||
if i > 1: break
|
if i > 1: break
|
||||||
@ -72,64 +80,100 @@ for dp in data_to_load:
|
|||||||
outStr = f"{name} [{header_lines[1][i][0]}]"
|
outStr = f"{name} [{header_lines[1][i][0]}]"
|
||||||
else:
|
else:
|
||||||
outStr = f"{name} [{header_lines[1][i]}]"
|
outStr = f"{name} [{header_lines[1][i]}]"
|
||||||
|
|
||||||
scope_header.append(outStr)
|
scope_header.append(outStr)
|
||||||
|
|
||||||
#scope_data = pd.read_csv(scope_data_path, names=scope_header, skiprows=2)
|
# Load the Scope CSV Data
|
||||||
scope_data = np.loadtxt(scope_data_path, delimiter=',', skiprows=2)
|
scope_data = np.loadtxt(scope_data_path, delimiter=',', skiprows=2)
|
||||||
|
|
||||||
|
|
||||||
|
# Build a data object (this could be cached - or partially cached if I was clever enough)
|
||||||
|
# Raw Data is always added - processing comes after
|
||||||
data[x2_shot] = {
|
data[x2_shot] = {
|
||||||
"info": dataInfo,
|
"info": dataInfo,
|
||||||
"probe_headers": scope_header,
|
"shot_time": np.datetime64(f"{dataInfo["date"]}T{dataInfo["time"]}"),
|
||||||
"probes": scope_data,
|
"raw-data":{
|
||||||
"x2": x2_channels,
|
"probe_headers": scope_header,
|
||||||
"x2-tdms": x2_tdms_data
|
"probes": scope_data,
|
||||||
|
"x2": x2_channels,
|
||||||
|
"x2-channels": x2_channel_names,
|
||||||
|
"x2-tdms": x2_tdms_data
|
||||||
|
},
|
||||||
|
"time": {
|
||||||
|
"x2": None,
|
||||||
|
"trigger_index": None
|
||||||
|
},
|
||||||
|
"data": {
|
||||||
|
"x2": {} # Only pop channels with a voltage scale in ./tunnel-info.yaml
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# === Process the data ===
|
||||||
|
# Generate X2 time arrays
|
||||||
|
time_data = x2_channels[0]
|
||||||
|
second_fractions = np.array(time_data[:].second_fractions, dtype=int) # 2^-64 ths of a second
|
||||||
|
x2_time_seconds = (second_fractions - second_fractions[0]) * (2**(-64)) # 0 time data and convert to seconds
|
||||||
|
x2_time_us = x2_time_seconds * 1000 # Scale to ms
|
||||||
|
|
||||||
|
# --- Un Scale Data ---
|
||||||
|
for channel, vScale in TUNNEL_INFO["volt-scale"].items():
|
||||||
|
# Get the channel index from its name
|
||||||
|
chIndex = x2_channel_names.index(channel)
|
||||||
|
|
||||||
|
# Calculate the average noise offset
|
||||||
|
avg_noise = x2_channels[chIndex][0:SAMPLES_TO_AVG].mean()
|
||||||
|
|
||||||
|
# Save the channel data
|
||||||
|
data[x2_shot]["data"]["x2"][channel] = (x2_channels[chIndex][:] - avg_noise) * vScale
|
||||||
|
|
||||||
|
# Process Trigger Info
|
||||||
|
trigger_volts = data[x2_shot]["data"]["x2"]["trigbox"] # Use a mean to offset
|
||||||
|
x2_trigger_index = np.where(trigger_volts > 1)[0][0]
|
||||||
|
x2_trigger_time = x2_time_us[x2_trigger_index]
|
||||||
|
|
||||||
|
# Add the time data
|
||||||
|
data[x2_shot]["time"] = {
|
||||||
|
"x2": x2_time_us,
|
||||||
|
"trigger_index": x2_trigger_index
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Scope timing _if it exists_
|
||||||
|
if dataInfo["probe-info"]["data-record"]["type"] == "scope":
|
||||||
|
trigger_info = dataInfo["probe-info"]["data-record"]["trigger"] # Get the scope trigger info
|
||||||
|
|
||||||
|
scope_time = (scope_data[:, 0] - scope_data[0, 0]) * 1000 # to us
|
||||||
|
scope_time -= trigger_info["alignment-offset"] # manual offset delay
|
||||||
|
scope_time += trigger_info["delay"] # us delay from the actual trigger signal to the scope received trigger
|
||||||
|
|
||||||
|
# Trigger Alignment
|
||||||
|
scope_trigger_volts = (scope_data[:, 3] - scope_data[0:SAMPLES_TO_AVG, 3].mean()) # Use a mean here too
|
||||||
|
scope_trigger_index = np.where(scope_trigger_volts > 1)[0][0]
|
||||||
|
scope_trigger_time = scope_time[scope_trigger_index]
|
||||||
|
|
||||||
|
scope_alignment = x2_trigger_time - scope_trigger_time
|
||||||
|
|
||||||
|
scope_time += scope_alignment
|
||||||
|
|
||||||
|
data[x2_shot]["time"]["scope"] = scope_time
|
||||||
|
data[x2_shot]["time"]["scope-offset"] = scope_alignment
|
||||||
|
|
||||||
|
data[x2_shot]["data"]["scope"] = {}
|
||||||
|
for i, header in enumerate(scope_header):
|
||||||
|
if i == 0: continue # Don't record time
|
||||||
|
data[x2_shot]["data"]["scope"][header] = scope_data[i]
|
||||||
|
|
||||||
|
|
||||||
|
# Return the data & the successfully loaded data keys
|
||||||
|
return data, tuple(data.keys())
|
||||||
|
|
||||||
loaded_data = list(data.keys())
|
data, loaded_data = load_data(data_to_load)
|
||||||
print("Loaded Data")
|
print("Loaded Data")
|
||||||
|
|
||||||
def process_data(gData: dict):
|
|
||||||
#x2_time = (gData["x2"][0][:] - gData["x2"][0][0]).astype('timedelta64[ns]') # Convert x2 to timedelta64[ns]
|
|
||||||
|
|
||||||
time_data = data[loaded_data[0]]["x2"][0]
|
|
||||||
second_fractions = np.array(time_data[:].second_fractions, dtype=int)
|
|
||||||
seconds = (second_fractions - second_fractions[0]) * (2**(-64))
|
|
||||||
ns_seconds = seconds * 1E9
|
|
||||||
x2_time = ns_seconds
|
|
||||||
|
|
||||||
trigger_info = gData["info"]["probe-info"]["data-record"]["trigger"] # Get the scope trigger info
|
|
||||||
|
|
||||||
# Convert the scope times into timedelta64 & apply config offsets & delays
|
|
||||||
#scope_time = np.array([ pd.Timedelta(t, 's').to_numpy() for t in (gData["probes"][:, 0] - gData["probes"][0, 0])])
|
|
||||||
#scope_time -= np.timedelta64(trigger_info["alignment-offset"], 'ns')
|
|
||||||
#scope_time += np.timedelta64(trigger_info["delay"], 'us')
|
|
||||||
|
|
||||||
scope_time = (gData["probes"][:, 0] - gData["probes"][0, 0]) * 1E9 # to ns
|
|
||||||
scope_time -= trigger_info["alignment-offset"]
|
|
||||||
scope_time += trigger_info["delay"] * 1000 # us -> ns
|
|
||||||
|
|
||||||
|
|
||||||
start_timestamp = np.datetime64(f"{gData["info"]["date"]}T{gData["info"]["time"]}")
|
|
||||||
|
|
||||||
# start_time = 0
|
|
||||||
# x2_timesteps = np.array([0 for _ in x2_time])
|
|
||||||
|
|
||||||
# for i, dt in enumerate(x2_time):
|
|
||||||
# dt = dt.astype("int")
|
|
||||||
# if i == 0:
|
|
||||||
# x2_timesteps[i] = start_time + dt # should be 0
|
|
||||||
# else:
|
|
||||||
# x2_timesteps[i] = x2_timesteps[i-1] + dt
|
|
||||||
|
|
||||||
# test = x2_time.cumsum()
|
|
||||||
|
|
||||||
return x2_time, scope_time
|
|
||||||
|
|
||||||
|
|
||||||
|
#[TODO] Refactor
|
||||||
def genGraph(gData: dict, showPlot: bool = True):
|
def genGraph(gData: dict, showPlot: bool = True):
|
||||||
x2_time, scope_time = process_data(gData)
|
|
||||||
|
|
||||||
graphData = {
|
graphData = {
|
||||||
"title": f"Shock response Time\nFor {gData['info']['long_name']}",
|
"title": f"Shock response Time\nFor {gData['info']['long_name']}",
|
||||||
"xLabel": "Time (ns)",
|
"xLabel": "Time (ns)",
|
||||||
@ -173,22 +217,19 @@ def genGraph(gData: dict, showPlot: bool = True):
|
|||||||
|
|
||||||
makeGraph(graphData, doProgramBlock=False, showPlot=showPlot, figSavePath="./images/{0}.png")
|
makeGraph(graphData, doProgramBlock=False, showPlot=showPlot, figSavePath="./images/{0}.png")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#print("Graphing showPlot=showPlot, Data")
|
#print("Graphing showPlot=showPlot, Data")
|
||||||
genGraph(data[loaded_data[0]], showPlot=False)
|
#genGraph(data[loaded_data[0]], showPlot=False)
|
||||||
genGraph(data[loaded_data[1]], showPlot=False)
|
#genGraph(data[loaded_data[1]], showPlot=False)
|
||||||
|
|
||||||
|
|
||||||
# Try to process things
|
|
||||||
gData = data[loaded_data[0]]
|
|
||||||
x2_time, scope_time = process_data(gData)
|
|
||||||
|
|
||||||
#time = (gData["x2"][0][:] - gData["x2"][0][0])
|
#x2_out = canny_shock_finder(x2_time, (gData["raw-data"]["x2"][16][:] - gData["raw-data"]["x2"][16][0]))
|
||||||
|
|
||||||
x2_out = canny_shock_finder(x2_time, (gData["x2"][4][:] - gData["x2"][4][0]) * 0.0148)
|
#print(x2_out)
|
||||||
|
|
||||||
print(x2_out)
|
# This forces matplotlib to hang until I tell it to close all windows
|
||||||
|
|
||||||
# This forces matplotlib to hang untill I tell it to close all windows
|
|
||||||
pltKeyClose()
|
pltKeyClose()
|
||||||
|
|
||||||
print("Done")
|
print("Done")
|
||||||
|
@ -32,3 +32,6 @@ volt-scale:
|
|||||||
at4: 0.01435 #V/kPa
|
at4: 0.01435 #V/kPa
|
||||||
at5: 0.01447 #V/kPa
|
at5: 0.01447 #V/kPa
|
||||||
at6: 0.01442 #V/kPa
|
at6: 0.01442 #V/kPa
|
||||||
|
|
||||||
|
trigbox: 0.001 #V / mV
|
||||||
|
trigbox_delay: 0.001 #V / mV
|
Loading…
x
Reference in New Issue
Block a user