# this script is designed to take processed experimental data resulting from tdms_processing, csv_processing
# (kinematics in JCS with model offsets already removed, kinetics as forces/moments applied to the femur defined in the tibia coorindate system),
# and the model that results from running the customization script, and, create a model that applies to experimental conditions to the
# model by first converting the kinetics data into the image coorindate system, and then applying those kinetics as external forces to the
# femur. kinematics load curves are also added, so that the kinematics can be easily applied to the cylindircal joint by setting
# the translation/rotation of the joint to follow the load curve.
# see main function at end of file for tips on running for a single model vs many models.

import pandas as pd
import numpy as np
from lxml import etree as et
import math
import csv
import os

def myElement(tag, parent=None, text=None, attrib={}, **extra):
    """Shortcut to create an xml element (root or child), add text and other attributes"""
    e = et.Element(tag, attrib, **extra)
    if text:	e.text = text
    if parent is not None:
        parent.append(e)
    return e

def read_csv(csv_file):

    with open(csv_file, newline='', encoding='utf8') as my_file:
        file_reader = csv.reader(my_file, delimiter=',')

        file_contents = []
        for row in file_reader:
            file_contents.append(row)

        contents_dict = {}
        column_names = file_contents.pop(0)
        for i, name in enumerate(column_names):
            if len(name) == 0: # empty
                continue
            values = []
            for row in file_contents:
                v = row[i]
                if len(v) == 0: # empty
                    values.append(np.nan)
                else:
                    values.append(float(v))
            contents_dict[name] = np.asarray(values)

    return contents_dict

def convert_to_image(modelproperties_xml, kinetics_csv):
    """ convert kineitcs data from the experiment tibia cs to the image cs, save in a new csv file"""

    # get the tibia axes from the model properties file
    ModelProperties_tree = et.parse(modelproperties_xml)
    ModelProperties = ModelProperties_tree.getroot()
    landmarks = ModelProperties.find('Landmarks')
    tibia_x = landmarks.find('Xt_axis').text.split(',')
    tibia_y = landmarks.find('Yt_axis').text.split(',')
    tibia_z = landmarks.find('Zt_axis').text.split(',')

    tibia_x = [float(i) for i in tibia_x]
    tibia_y = [float(i) for i in tibia_y]
    tibia_z = [float(i) for i in tibia_z]

    tibia_axes = np.array([tibia_x,tibia_y,tibia_z])
    tibia_in_image = tibia_axes.T  # rotation matrix from tibia to image CS

    try:
        df = pd.read_csv(kinetics_csv, encoding='utf7')
    except:
        df = pd.read_csv(kinetics_csv, encoding='utf8')

    # note - this will only work if the data processing was used to process the data.
    # should make this less "hard coded" at a later time to avoid problems arising
    # from making a change in the data processing scripts
    loading_times = None
    moment_units = None


    # check to make sure where the last data point is. crop the data up the last data point
    fx = df['External Femur_x Load [N]'].values
    intial_len = len(fx)
    count = 0
    while math.isnan(fx[-1]):
        fx = fx[:-1]
        count += 1

    if count > 0:
        trunc_after = intial_len - count - 1
        df = df.truncate(after = trunc_after)

    for col in df.columns:
        if 'Moment' in col:
            moment_units = col.split(' ')[-1]
        if 'Applied' in col or "Flexion Angle" in col:
            applied_load = df[col].values

            # sometimes an empty row appears at the end of the csv - probably dont need this after truncation above but just in case
            if math.isnan(applied_load[-1]):
                applied_load = applied_load[:-1]

            # this will give time spacing from 0 to 1, based on load
            loading_times = (applied_load) / (applied_load[-1])


    # this is done explicity to ensure the data is in the correct order
    # but can probably ensure the order some other way to keep is as fx,fy,fz,mx,my,mz
    fx = df['External Femur_x Load [N]'].values
    fy = df['External Femur_y Load [N]'].values
    fz = df['External Femur_z Load [N]'].values
    mx = df['External Femur_x Moment '+ moment_units].values # units are differet in oks and knee hub, this takes care of that
    my = df['External Femur_y Moment '+ moment_units].values
    mz = df['External Femur_z Moment '+ moment_units].values


    Forces = np.array([fx, fy, fz])
    Moments = np.array([mx, my, mz])

    # apply that transformation to the forces, and moments (kinetics data) to get them in the image coordinate system
    Forces_image = np.matmul(tibia_in_image, Forces)
    Moments_image = np.matmul(tibia_in_image, Moments)

    # save data in new csv
    df2 = pd.DataFrame()
    df2['External Femur_x Load [N]'] = Forces_image[0]
    df2['External Femur_y Load [N]'] = Forces_image[1]
    df2['External Femur_z Load [N]'] = Forces_image[2]
    df2['External Femur_x Moment '+ moment_units] = Moments_image[0]
    df2['External Femur_y Moment '+ moment_units] = Moments_image[1]
    df2['External Femur_z Moment '+ moment_units] = Moments_image[2]

    df2.to_csv(kinetics_csv.replace('_TibiaCS', '_ImageCS'))

    return df2, loading_times

def add_kinetics_load_curves(febio_spec_root, image_kinetics, loading_times, relative= False):

    # get this list from the image kinetics df, not hard coded
    loads = image_kinetics.columns
    #
    # loads  = ['External Femur_x Load [N]','External Femur_y Load [N]','External Femur_z Load [N]',
    #           'External Femur_x Moment [Nm]','External Femur_y Moment [Nm]','External Femur_z Moment [Nm]']

    LoadData = febio_spec_root.find('LoadData')

    # find the number of the last load curve
    lc_count = LoadData[-1].attrib["id"]
    lc_count = int(lc_count)

    lc_nums = []
    # create a load curve for each load
    for load in loads:
        lc_count += 1
        lc_nums.append(lc_count)
        load_values = image_kinetics[load].values

        if '[Nm]' in load: # if moments given in Nm need to convert to Nmm
            loadcurve = myElement('loadcurve', LoadData, name=load.replace('Nm', 'Nmm'), id=str(lc_count))
            load_values = 1000*load_values
        else:
            loadcurve = myElement('loadcurve', LoadData, name=load, id=str(lc_count))

        myElement('point', loadcurve, text = '0,0')
        myElement('point', loadcurve, text='1,0')

        if relative:
            # loading starting at time 1, relative loading so subtract initial condition
            time = 1.0
            load_values = load_values - load_values[0]
        else:
            time = 2.0 # loading starting at time 2 - time 1 to 2 is to reach initial load value


        for a,v in enumerate(load_values):
            if math.isnan(v):
                pass
            else:
                t = loading_times[a] + time
                myElement('point', loadcurve, text='{0:.2f},{1:.3f}'.format(t,v))

    return lc_nums

def add_must_points(febio_spec_root, image_kinetics, loading_times, relative = False):
    """ create a load curve for dtmax, and change the plot_level parameter in the control section to plot_must_points"""

    LoadData = febio_spec_root.find('LoadData')

    # find the last load curve id
    last_id = LoadData[-1].attrib["id"]
    id = int(last_id) +1

    # add the new load curve
    loadcurve = myElement("loadcurve", LoadData, name="must_points", id = str(id))
    # from time 0 to 1 is prestrain
    myElement('point', loadcurve, text='0,0')
    myElement('point', loadcurve, text='1,1')

    if relative:
        # from 1 to 2 is applying experiment kinematics, take time steps the same as the experiment
        for i in loading_times + 1.0:
            if math.isnan(i):
                pass
            else:
                myElement('point', loadcurve, text='{0:.2f},0.1'.format(i))
    else:
        # from 1 to 2 is initial loading, take steps of 0.1 --- cutting this out as sometimes causes convergence issues if change is small.

        # for i in np.arange(1.1, 2.0, 0.1):
        #     myElement('point', loadcurve, text = '{0:.2f},0.1'.format(i))
        # # from 2 to 3 is applying experiment kinematics, take time steps the same as the experiment

        for i in loading_times + 2.0:
            if math.isnan(i):
                pass
            else:
                myElement('point', loadcurve, text='{0:.2f},1'.format(i))

    # for i in np.arange(2.0+time_step, 3.0+time_step, time_step):
    #     myElement('point', loadcurve, text='{0:.2f},{1:.2f}'.format(i, time_step))

    Step = febio_spec_root.find("Step")
    Control = Step.find("Control")

    # update the plot level parameter
    plot_level = Control.find("plot_level")
    plot_level.text = 'PLOT_MUST_POINTS'

    # update dtmax to include the load curve
    time_stepper = Control.find("time_stepper")
    dtmax = time_stepper.find("dtmax")
    dtmax.attrib["lc"] = str(id)


def add_kinematics_load_curves(febio_spec_root, kinematics_csv , loading_times, relative = False):

    try:
        df = pd.read_csv(kinematics_csv, encoding='utf7')
    except:
        df = pd.read_csv(kinematics_csv, encoding='utf8')

    # extract kinematics channel names
    kinematics_names = [None,None,None,None,None,None]

    for col in df.columns:
        i=None

        cl = col.lower()
        if 'lateral' in cl or 'medial' in cl: i=0
        elif 'anterior' in cl or 'posterior' in cl: i=1
        elif 'superior' in cl or 'inferior' in cl: i=2
        elif 'extension' in cl or 'flexion' in cl: i=3
        elif 'adduction' in cl or 'abduction' in cl: i=4
        elif 'internal' in cl or 'external' in cl: i=5

        if i is not None:
            kinematics_names[i] = col

    LoadData = febio_spec_root.find('LoadData')
    # find the number of the last load curve
    lc_count = LoadData[-1].attrib["id"]
    lc_count = int(lc_count)

    lc_nums = []

    for n in kinematics_names:
        lc_count += 1
        lc_nums.append(lc_count)

        # kinematics_values = df[n].values
        kinematics_values = list(map(float, df[n].values))

        # create a load curve
        if 'deg' in n.lower():
            print_n = n.replace('deg','rad')
        else:
            print_n = n
        loadcurve = myElement('loadcurve', LoadData, name=print_n, id=str(lc_count))
        myElement('point', loadcurve, text='0,0')
        myElement('point', loadcurve, text='1,0')
        if relative:
            time = 1.0
            kinematics_values = np.array(kinematics_values) - kinematics_values[0]
        else:
            time = 2.0
        for a, v in enumerate(kinematics_values):
            if math.isnan(v):
                pass
            else:
                t = loading_times[a] + time
                if 'deg' in n.lower():
                    myElement('point', loadcurve,
                        text='{0:.2f},{1:.3f}'.format(t, np.radians(v)))  # convert to radians
                else:
                    myElement('point', loadcurve,
                              text='{0:.2f},{1:.3f}'.format(t, v))  # convert to radians
    return lc_nums

def change_existing_lc(kinematics_csv, febio_spec_root, loading_times, relative = False):

    try:
        df = pd.read_csv(kinematics_csv, encoding='utf7')
    except:
        df = pd.read_csv(kinematics_csv, encoding='utf8')

    # extension_rotation =list(map(float, df['Knee JCS Extension Rotation [deg]'].values))

    LoadData = febio_spec_root.find('LoadData')

    for loadcurve in LoadData:
        if "prestrain" in loadcurve.attrib["name"]:
            if not relative:
                last_point = loadcurve[-1].text             # find the prestrain value from the previous time point
                prstrn = last_point.split(',')[-1]
                myElement('point',loadcurve,text = '3,'+prstrn)
        elif "Loading_Curve" in loadcurve.attrib["name"]:
            if not relative:
                myElement('point', loadcurve, text= '3,1')
        else:
            pass

        # elif loadcurve.attrib['id'] == "9": # the flexion load curve
        #     loadcurve.attrib["name"] = "flexion_angle"
        #     loadcurve.remove(loadcurve[-1])
        #     if relative:
        #         time = 1.0
        #         extension_rotation = np.array(extension_rotation) - extension_rotation[0]
        #     else:
        #         time = 2.0
        #     for a, v in enumerate(extension_rotation):
        #         if math.isnan(v):
        #             pass
        #         else:
        #             t = loading_times[a] + time
        #             myElement('point', loadcurve,
        #                 text='{0:.2f},{1:.3f}'.format(t, np.radians(v)))  # convert to radians


def constraint_load_curves(Step, lc_nums):

    # make sure the rotation is set to 1 in the flexion constraint
    contraints = Step.find('Constraints')


    for const in contraints:
        try:
            # get the ids of the translation and rotation in the lc_nums list
            const_name = const.attrib["name"]
            if "Flexion" in const_name:
                ids = (0,3)
            elif "Abduction" in const_name:
                ids = (1,4)
            elif "Internal" in const_name:
                ids = (2,5)
            else:
                continue

            # set the load curves
            translation = const.find('translation')
            rotation = const.find('rotation')
            translation.text = "1.0"
            rotation.text = "1.0"
            translation.attrib["lc"] = str(lc_nums[ids[0]])
            rotation.attrib["lc"] = str(lc_nums[ids[1]])

            # prescribed_translation = const.find('prescribed_translation')
            # prescribed_rotation = const.find('prescribed_rotation')

        except KeyError: # if there is a constraint without a name
            pass


def add_rigid_forces(boundary_secion, lc_nums):

    rigid_body = myElement('rigid_body',boundary_secion, mat="1")
    dof = ['x','y','z','Rx','Ry','Rz']
    for i in range(6):
        myElement('force', rigid_body, bc = dof[i], lc = str(lc_nums[i]), text = '1.0')


def write_file(xml_tree, new_filename):

    # Write the New File
    xml_tree.write(new_filename, xml_declaration=True, pretty_print=True)

    # make sure that it's pretty printing properly, by parsing and re-writing
    parser = et.XMLParser(remove_blank_text=True)
    new_feb_tree = et.parse(new_filename, parser)
    new_feb_tree.write(new_filename, xml_declaration=True, pretty_print=True)

def make_model(modelproperties_xml, febio_file, kinetics_csv, kinematics_csv, name=None, relative = False):
    """ make a model to mimic experiment"""

    image_kinetics, loading_times = convert_to_image(modelproperties_xml, kinetics_csv)

    Febio_tree = et.parse(febio_file)
    febio_spec_root = Febio_tree.getroot()

    # add the load curves for the kinetics
    kinetics_lc_nums = add_kinetics_load_curves(febio_spec_root, image_kinetics, loading_times, relative=relative)

    # add time to in situ load curves
    change_existing_lc(kinematics_csv, febio_spec_root, loading_times, relative=relative)

    # add the kinematics load curves
    kineamtics_lc_nums = add_kinematics_load_curves(febio_spec_root, kinematics_csv, loading_times, relative=relative)

    # add the must points so febio gives output at the desired time steps
    add_must_points(febio_spec_root, image_kinetics, loading_times, relative=relative)

    # change the control settings for improved convergence
    Step = febio_spec_root.find('Step')
    Control = Step.find('Control')
    time_steps = Control.find('time_steps')
    step_size = Control.find('step_size')
    max_refs = Control.find('max_refs')
    min_residual = Control.find('min_residual')

    # try to take step size of 1 when possible (except for must points)
    if relative:
        time_steps.text = "2"
    if not relative:
        time_steps.text = "3"

    step_size.text = "1"

    # increase max refs, decrease min residual
    max_refs.text = "50"
    min_residual.text = "0.01"


    # add the loads to the femur
    Boundary = Step.find('Boundary')
    add_rigid_forces(Boundary, kinetics_lc_nums)

    # set the kinematics load curves in the constraints. set only the flexion rotation to on, others can be changed
    # manually later if the user wants.
    constraint_load_curves(Step, kineamtics_lc_nums)

    # re-write the file. If no name give, change the original febio file
    if name is None:
        new_name = febio_file
    else:
        new_name = os.path.join(os.path.dirname(febio_file), name+'.feb')


    write_file(Febio_tree, new_name)

def oks003_calibration():

    # modelprops_file = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/InSituStrain/ModelProperties.xml'
    modelprops_file = 'C:/oks/app/KneeHub/test/registration02/ModelProperties.xml'

    # AnteriorLaxity
    # febio_file = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/InSituStrain/Spring_Ties3/AnteriorLaxity/FeBio_custom.feb'
    # kinetics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_AP1_kinetics_in_TibiaCS.csv'
    # kinematics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_AP1_kinematics_in_JCS.csv'
    febio_file = 'C:/oks/app/KneeHub/test/AnteriorLaxity/FeBio_custom.feb'
    kinetics_csv = 'C:/oks/app/KneeHub/test/AnteriorLaxity/Laxity_0deg_AP1_kinetics_in_TibiaCS.csv'
    kinematics_csv = 'C:/oks/app/KneeHub/test/AnteriorLaxity/Laxity_0deg_AP1_kinematics_in_JCS.csv'
    
    # febio_file = 'C:/oks/app/KneeHub/test/tdms/Processed_Data/FeBio_custom.feb'
    # kinetics_csv = 'C:/oks/app/KneeHub/test/tdms/Processed_Data/Laxity_0deg_AP1_kinetics_in_TibiaCS.csv'
    # kinematics_csv = 'C:/oks/app/KneeHub/test/tdms/Processed_Data/Laxity_0deg_AP1_kinematics_in_JCS.csv'

    make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv)

    # PosteriorLaxity
    # febio_file = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/InSituStrain/Spring_Ties3/PosteriorLaxity/FeBio_custom.feb'
    # kinetics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_AP2_kinetics_in_TibiaCS.csv'
    # kinematics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_AP2_kinematics_in_JCS.csv'
    febio_file = 'C:/oks/app/KneeHub/test/PosteriorLaxity/FeBio_custom.feb'
    kinetics_csv = 'C:/oks/app/KneeHub/test/PosteriorLaxity/Laxity_0deg_AP2_kinetics_in_TibiaCS.csv'
    kinematics_csv = 'C:/oks/app/KneeHub/test/PosteriorLaxity/Laxity_0deg_AP2_kinematics_in_JCS.csv'

    make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv)

    # VarusLaxity
    # febio_file = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/InSituStrain/Spring_Ties3/VarusLaxity/FeBio_custom.feb'
    # kinetics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_VV1_kinetics_in_TibiaCS.csv'
    # kinematics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_VV1_kinematics_in_JCS.csv'
    febio_file = 'C:/oks/app/KneeHub/test/VarusLaxity/FeBio_custom.feb'
    kinetics_csv = 'C:/oks/app/KneeHub/test/VarusLaxity/Laxity_0deg_VV1_kinetics_in_TibiaCS.csv'
    kinematics_csv = 'C:/oks/app/KneeHub/test/VarusLaxity/Laxity_0deg_VV1_kinematics_in_JCS.csv'

    make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv)

    # ValgusLaxity
    # febio_file = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/InSituStrain/Spring_Ties3/ValgusLaxity/FeBio_custom.feb'
    # kinetics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_VV2_kinetics_in_TibiaCS.csv'
    # kinematics_csv = '/home/schwara2/Documents/Open_Knees/knee_hub/oks003/calibration/DataProcessing/Processed_Data/Laxity_0deg_VV2_kinematics_in_JCS.csv'
    febio_file = 'C:/oks/app/KneeHub/test/ValgusLaxity/FeBio_custom.feb'
    kinetics_csv = 'C:/oks/app/KneeHub/test/ValgusLaxity/Laxity_0deg_VV2_kinetics_in_TibiaCS.csv'
    kinematics_csv = 'C:/oks/app/KneeHub/test/ValgusLaxity/Laxity_0deg_VV2_kinematics_in_JCS.csv'

    make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv)

def du02_calibration():

    # modelprops_file = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\\Spring_Ties6\ModelProperties.xml"
    modelprops_file = "C:/oks/app/KneeHub/DU02/recalibration/InSituStrain/ModelProperties.xml"
     

    # AnteriorLaxity
    # febio_file = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\\Spring_Ties6\AnteriorLaxity\FeBio_custom.feb"
    # kinetics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\AP1_kinetics_in_TibiaCS.csv"
    # kinematics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\AP1_kinematics_in_JCS.csv"
    
    febio_file = "C:/oks/app/KneeHub/DU02/recalibration/InSituStrain/AnteriorLaxity/FeBio_custom.feb"
    kinetics_csv = "C:/oks/app/KneeHub/DU02/recalibration/InSituStrain/AnteriorLaxity/Laxity_0_AP1_kinetics_in_TibiaCS.csv"
    kinematics_csv = "C:/oks/app/KneeHub/DU02/recalibration/InSituStrain/AnteriorLaxity/Laxity_0_AP1_kinematics_in_JCS.csv"
    
    

    make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv, relative=False)

    # #PosteriorLaxity
    # febio_file ="C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\\Spring_Ties4\PosteriorLaxity\FeBio_custom.feb"
    # kinetics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\Laxity_7deg_AP2_kinetics_in_TibiaCS.csv"
    # kinematics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\Laxity_7deg_AP2_kinematics_in_JCS.csv"
    #
    # make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv, relative=False)
    #
    # #VarusLAxity
    # febio_file ="C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\\Spring_Ties4\VarusLaxity\FeBio_custom.feb"
    # kinetics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\Laxity_11deg_VV2_kinetics_in_TibiaCS.csv"
    # kinematics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\Laxity_11deg_VV2_kinematics_in_JCS.csv"
    #
    # make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv, relative=False)
    #
    # #ValgusLaxity
    # febio_file ="C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\\Spring_Ties4\ValgusLaxity\FeBio_custom.feb"
    # kinetics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\Laxity_9deg_VV1_kinetics_in_TibiaCS.csv"
    # kinematics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\DataProcessing\Processed_Data\Laxity_9deg_VV1_kinematics_in_JCS.csv"
    #
    # make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv, relative=False)

def read_from_xml(xml_file):
    """ read the files from an xml file and create all necessary models"""
    file_tree = et.parse(xml_file)
    file_info = file_tree.getroot()

    gen_files = file_info.find("general_files")
    feb_file = gen_files.find("febio_file").text
    mod_props_file= gen_files.find("model_properties_file").text
    processed_data_folder = gen_files.find("processed_data_directory").text

    models = file_info.find("Models")
    for mod in models:
        if mod.tag is et.Comment:
            continue
        model_name = mod.attrib["name"]
        try:
            kinetics_csv = os.path.join(processed_data_folder, mod.find("kinetics_csv").text)
            kinematics_csv = os.path.join(processed_data_folder, mod.find("kinematics_csv").text)
            make_model(mod_props_file, feb_file, kinetics_csv, kinematics_csv, name=model_name)
        except:
            print("failed creating model " + model_name)


if __name__ == '__main__':

    # # to run for a single file, change the file paths here and run

    # modelprops_file = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\Spring_Ties2\ModelProperties.xml"
    # febio_file = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\Spring_Ties2\AnteriorLaxity\Test\FeBio_custom.feb"
    # kinematics_csv ="C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\Spring_Ties2\AnteriorLaxity\Test\Laxity_9deg_AP1_kinematics_in_JCS.csv"
    # kinetics_csv = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\InSituStrain\Spring_Ties2\AnteriorLaxity\Test\Laxity_9deg_AP1_kinetics_in_TibiaCS.csv"
    
    # make_model(modelprops_file, febio_file, kinetics_csv, kinematics_csv, relative=False)

    # to run on a bunch of models, use the exp_to_model_xml with the read_from_xml function
    # exmaple xml file https://simtk.org/svn/openknee/app/KneeHub/oks003/calibration/CustomizedFullModels/ExperimentalLoading02/Exp_to_Mod.xml
    # exp_to_model_xml = "C:\\Users\schwara2\Documents\Open_Knees\du02_calibration\CustomizedFullModels\ExperimentalLoading03\Exp_to_Mod.xml"
    # read_from_xml(exp_to_model_xml)

    # i used these helper functions when preparing models for calibration phase,
    # but I think it would be easier to do using an Exp_to_model_xml file
    oks003_calibration()
    #du02_calibration()
