Create data caching var
This commit is contained in:
parent
7946e1fcf7
commit
19d980b086
87
main.py
87
main.py
@ -2,7 +2,7 @@
|
||||
# Cal Wing, Sem 2 2023
|
||||
|
||||
# Import System / Common Libs
|
||||
import os, time
|
||||
import os, time, pickle
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from tqdm import tqdm
|
||||
@ -17,10 +17,45 @@ from makeGraph import makeGraph, pltKeyClose, UQ_COLOURS as UQC # Custom Graphin
|
||||
#def cos(angle): return np.cos(np.deg2rad(angle))
|
||||
|
||||
# Make sure the relevant folders folder exists
|
||||
folders = ["./images"]
|
||||
folders = ["./images", "./tmp"]
|
||||
for folder in folders:
|
||||
if not os.path.isdir(folder): os.mkdir(folder)
|
||||
|
||||
# This is a cacheing function, it checks if a cache file exists and loads it or it calcs & saves it.
|
||||
def cacheData(dataFilePath: str, calcFunction: callable, args: tuple = (), kargs: dict = {}):
|
||||
data = None
|
||||
dataFileExt = dataFilePath.rsplit(".")[-1]
|
||||
|
||||
# Check if file exists
|
||||
if os.path.isfile(dataFilePath):
|
||||
print(f"Found datafile\"{dataFilePath}\", loading data.")
|
||||
# Check if file is a compressed numpy file
|
||||
if dataFileExt == "npz":
|
||||
data = np.load(dataFilePath)["arr_0"]
|
||||
elif dataFileExt == "pkl":
|
||||
#If its not then just pickle it normally
|
||||
with open(dataFilePath, 'rb') as handle:
|
||||
data = pickle.load(handle)
|
||||
else:
|
||||
raise TypeError(f"Cannot determine file type of: {dataFilePath}")
|
||||
else:
|
||||
print(f"Could not find data file\"{dataFilePath}\", generating & saving data.")
|
||||
# Calculate Value
|
||||
data = calcFunction(*args, **kargs)
|
||||
|
||||
# Check if file is a compressed numpy file
|
||||
if dataFileExt == "npz":
|
||||
np.savez_compressed(dataFilePath, data)
|
||||
elif dataFileExt == "pkl":
|
||||
#If its not then just pickle it normally
|
||||
with open(dataFilePath, 'wb') as handle:
|
||||
pickle.dump(data, handle)
|
||||
else:
|
||||
raise TypeError(f"Cannot determine file type of: {dataFilePath}")
|
||||
|
||||
if data is None: raise ValueError("Could not import or generate data requested")
|
||||
return data
|
||||
|
||||
# IMU Data Loading
|
||||
# I map in to a heading to add units / make things make more sense
|
||||
## The gyroscopic body angular rates from the IMU are given:
|
||||
@ -28,9 +63,9 @@ for folder in folders:
|
||||
# - WBE_2 (in rad/s) - the pitch rate about the body-fixed y-axis
|
||||
# - WBE_3 (in rad/s) - the yaw rate about the body-fixed z-axis
|
||||
## Specific forces:
|
||||
# - FSP_X (in m/s2) - the specific force in the body-fixed x-direction
|
||||
# - FSP_Y (in m/s2) - the specific force in the body-fixed y-direction
|
||||
# - FSP_Z (in m/s2) - the specific force in the body-fixed z-direction
|
||||
# - FSP_X (in m/s^2) - the specific force in the body-fixed x-direction
|
||||
# - FSP_Y (in m/s^2) - the specific force in the body-fixed y-direction
|
||||
# - FSP_Z (in m/s^2) - the specific force in the body-fixed z-direction
|
||||
IMU_TIME_HEADER = ["Time [s]"]
|
||||
IMU_WBE_HEADERS = ["WBE_1 [rad/s]", "WBE_2 [rad/s]", "WBE_3 [rad/s]"]
|
||||
IMU_FSP_HEADERS = ["FSP_X [m/s^2]", "FSP_Y [m/s^2]", "FSP_Z [m/s^2]"]
|
||||
@ -47,9 +82,11 @@ def importIMUData(mission, imu):
|
||||
|
||||
return data
|
||||
|
||||
# Load the Mission Data
|
||||
m1_IMUData = importIMUData(1, 0), importIMUData(1, 1) #(L, H) Data
|
||||
m2_IMUData = importIMUData(2, 0), importIMUData(2, 1)
|
||||
|
||||
# NED Translation & Force Functions
|
||||
INIT_EULER_ANGLES = (0, 0, 0)
|
||||
def translate2NED(angles, euler_angles):
|
||||
phi, theta, psi = euler_angles
|
||||
@ -77,19 +114,37 @@ def getNEDForces(NEDPos):
|
||||
|
||||
return forceMat
|
||||
|
||||
if __name__ == '__main__':
|
||||
dataPoint = m1_IMUData[0][IMU_WBE_HEADERS].iloc[0]
|
||||
trans = translate2NED(dataPoint.values, INIT_EULER_ANGLES)
|
||||
forces = getNEDForces((trans[0][0], trans[1][0], trans[2][0]))
|
||||
def calculateTranslatedData(missionWBEData):
|
||||
print("Translating Motion & Calculating Resulting Forces")
|
||||
translatedData = pd.DataFrame(columns=IMU_TIME_HEADER + IMU_WBE_HEADERS + ["Forces"])
|
||||
for i in tqdm(range(len(missionWBEData))):
|
||||
dataPoint = missionWBEData[IMU_WBE_HEADERS].iloc[i]
|
||||
trans = translate2NED(dataPoint.values, INIT_EULER_ANGLES).flatten()
|
||||
forces = getNEDForces(trans)
|
||||
dataRow = {
|
||||
IMU_TIME_HEADER[0]: missionWBEData[IMU_TIME_HEADER].iloc[i],
|
||||
IMU_WBE_HEADERS[0]: trans[0],
|
||||
IMU_WBE_HEADERS[1]: trans[1],
|
||||
IMU_WBE_HEADERS[2]: trans[2],
|
||||
"Forces": forces
|
||||
}
|
||||
translatedData.loc[i] = dataRow
|
||||
|
||||
print("Raw Data")
|
||||
print(dataPoint)
|
||||
|
||||
print("\nTranslated Point")
|
||||
print(trans)
|
||||
return translatedData
|
||||
|
||||
print("\nForces")
|
||||
print(forces)
|
||||
if __name__ == '__main__':
|
||||
missionWBEData = m1_IMUData[0][IMU_TIME_HEADER + IMU_WBE_HEADERS]
|
||||
|
||||
|
||||
|
||||
#print("Raw Data")
|
||||
#print(dataPoint)
|
||||
|
||||
#print("\nTranslated Point")
|
||||
#print(trans)
|
||||
|
||||
#print("\nForces")
|
||||
#print(forces)
|
||||
|
||||
input("Damn")
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user