Commit f176f22a authored by Laura Ketzer's avatar Laura Ketzer
Browse files

worked on Owen&Wu 17 sample and started evolving it using Platypos (with beta & K off, eps=0.1)

parent a7877544
import numpy as np
import astropy.units as u
from astropy import constants as const
import scipy.optimize as optimize
from scipy.optimize import fsolve
def Lx_evo(t, track_dict):
"""
......@@ -154,6 +156,23 @@ def L_xuv_all(Lx):
log_L_xuv = np.log10(Lx+Leuv)
return 10.**log_L_xuv#*u.erg/u.s
#####################################################################################################################
def undo_what_Lxuv_all_does(L_xuv):
""" If you have LXUV given, this takes the Sanz-Forcada et al. (2011) scaling relation and reverses it to estimate Lx."""
def Calculate_Lx(Lx):
return Lx + 10**(0.86*np.log10(Lx)+4.8) - L_xuv
if (L_xuv > 1.*10**29):
f_guess = L_xuv
elif (L_xuv <= 1.*10**29) and (L_xuv > 1.*10**26):
f_guess = 1.*10**25
elif (L_xuv <= 1.*10**26):
f_guess = 1.*10**22
#print(f_guess)
Lx = optimize.fsolve(Calculate_Lx, x0=f_guess)[0]
return Lx
#####################################################################################################################
# def Lx_relation_Booth(t, R_star):
# """R_star in terms of solar units"""
......@@ -163,4 +182,4 @@ def L_xuv_all(Lx):
#####################################################################################################################
def calculate_Lx_sat(L_star_bol):
""" Typical relation (from observations) to estimate the saturation X-ray luminosity given a bolometric luminosity."""
return 10**(-3)*(L_star_bol*const.L_sun.cgs).value
\ No newline at end of file
return 10**(-3)*(L_star_bol*const.L_sun.cgs).value
This diff is collapsed.
......@@ -3,20 +3,11 @@ import math
import numpy as np
def create_planet_chunks(curr_path, folder_name, list_planets, chunk_size):
""" Function creates the directory & subdirectory structure for saving the results
for all the planets in "list_planets". In addition, it divides list_planets into smaller chunks.
This is needed if "list_planets" is very long, and avoids the multiprocessing-action in the evolve_ensamble-function from crashing.
Return:
------
path_save: path specifying where to save the results
planets_chunks: chunked-up list of planets, which can be passed to "evolve_ensemble" to evolve
"""
""" chunk_size=9 seems to work fine """
# check if directroy for saving the results existst, if not create
path_save = curr_path+folder_name
if os.path.isdir(path_save):
# check if directroy for saving the results existst, if not create
print("Folder -> "+folder_name+" <- exists.")
print("Folder "+folder_name+" exists.")
pass
else:
os.mkdir(path_save)
......@@ -40,7 +31,7 @@ def create_planet_chunks(curr_path, folder_name, list_planets, chunk_size):
# divide dictionary into smaller bits -> I found that if I pass all planets at once for multiprocessing, it fills up my memory
# this is why I only multiprocess a smaller number at a given time (at least this is what I think I'm doing)
number_of_splits = math.ceil(len(planets_dict)/9)
number_of_splits = math.ceil(len(planets_dict)/chunk_size)
planets_arr = [[key, value] for key, value in planets_dict.items()]
planets_chunks = np.array_split(planets_arr, number_of_splits)
# now I have an list of [folder-planet] pairs, which I can pass to the multiprocessing_func
......
import os
import numpy as np
import multiprocessing as mp
import multiprocessing
import time
import sys
sys.path.append('../platypos_package/')
sys.path.append('../plaml_package/')
# Planet Class
from Planet_class_LoFo14 import planet_LoFo14
from Planet_class_Ot20 import planet_Ot20
from Planet_class_LoFo14_PAPER import planet_LoFo14_PAPER
from Planet_class_Ot20_PAPER import planet_Ot20_PAPER
from Lx_evo_and_flux import undo_what_Lxuv_all_does
def evolve_one_planet(pl_folder_pair, t_final, init_step, eps, K_on, beta_on, evo_track_dict_list, path_for_saving):
"""Function to evolve one planet at a time, but through all evo tracks specified in evo_track_dict_list
(dictionary with info about evolutionary tracks).
So all calculations for this one planet belong to each other, but are independent from another planet.
Each function call to "evolve_one_planet" represents an independent process."""
for track in evo_track_dict_list:
pl = pl_folder_pair[1] # get the planet object
folder = pl_folder_pair[0] # and the correcponding folder name where results are saved
def evolve_one_planet(pl_folder_pair, t_final, init_step, eps, K_on, beta_on, evo_track_dict_list_or_1, path_for_saving):
"""Evolves one planet, pl, at a time, but through all evo tracks in evo_track_dict_list.
So all calculations for one planet belong to each other, but the planets are independent of each other.
Each time this function is called represents an independent process. """
if evo_track_dict_list_or_1 == "1": # one planet evolves along a specific track (e.g. if host star mass is different for each planet)
pl = pl_folder_pair[1]
folder = pl_folder_pair[0]
# NOTE!! for OwWu17 planets the planet_instance.Lx_age is actually the XUV luminosity!!!
# get parameters to specify the track
a_0 = 0.5 # get final Lxuv value, such that slope a_0 = 0.5
alpha = -1 - a_0
t_sat = 100.
# solve for Lx_curr, which is the Lx at time t_track_end, assuming a constant power law slope after time t_sat
L_xuv_track_end = 10**(alpha*(np.log10(t_final/t_sat))) * pl.Lx_age
# pass Lx to dictionary, not Lxuv, since Platypos automatically applies the conversion
Lx_track_end = 10**(alpha*(np.log10(t_final/t_sat))) * undo_what_Lxuv_all_does(L_xuv_track_end)
track = {"t_start": pl.age, "t_sat": t_sat, "t_curr": t_final, "t_5Gyr": t_final,
"Lx_max": undo_what_Lxuv_all_does(pl.Lx_age), "Lx_curr": Lx_track_end,
"Lx_5Gyr": Lx_track_end, "dt_drop": 0., "Lx_drop_factor": 0.}
pl.set_name(t_final, init_step, eps, K_on, beta_on, evo_track_dict=track) # set planet name based on specified track
# generate a useful planet name for saving the results
pl_file_name = folder + "_track" + pl.planet_id.split("_track")[1] + ".txt"
#print(pl.planet_id)
#print(pl_file_name)
if os.path.isdir(path_for_saving+pl_file_name):
# if file exists, skip & don't evolve planet
# skip & don't evolve planet
# print("Folder "+pl_file_name+" exists.")
pass
else:
# evolve the planet
#print(track)
#print("here planet would evolve.")
pl.evolve_forward(t_final, init_step, eps, K_on, beta_on, evo_track_dict=track,
path_for_saving=path_for_saving, planet_folder_id=folder)
path_for_saving=path_for_saving, planet_folder_id=folder)
else: # evolve on planet along more than one track (e.g. to see effects of different tracks on planetary evolution)
for track in evo_track_dict_list:
pl = pl_folder_pair[1]
folder = pl_folder_pair[0]
pl.set_name(t_final, init_step, eps, K_on, beta_on, evo_track_dict=track) # set planet name based on specified track
# generate a useful planet name for saving the results
pl_file_name = folder + "_track" + pl.planet_id.split("_track")[1] + ".txt"
if os.path.isdir(path_for_saving+pl_file_name):
# skip & don't evolve planet
# print("Folder "+pl_file_name+" exists.")
pass
else:
pl.evolve_forward(t_final, init_step, eps, K_on, beta_on, evo_track_dict=track,
path_for_saving=path_for_saving, planet_folder_id=folder)
def evolve_ensamble(planets_chunks, t_final, initial_step_size, epsilon, K_on, beta_on, evo_track_dict_list, path_save):
"""Function which parallelizes the multiprocessing_func "evolve_one_planet".
@param planets_chunks: (array of lists) Array of lists of [folder, planet] pairs. Each chunk of planets
in the array will be run seperately, one after the other. This avoids computer crashes.
@param t_final: (float) Time at which to end the simulation (e.g. 5 or 10 Gyrs)
@param initial_step_size: (float) Initial step size for integration (old code uses a fixed step size, newer version uses an adptive one)
@param epsilon: (float) Evaporation efficieny parameter
@param K_on: (str) Tidal-effects correction
@param beta_on: (str) X-ray cross section correction
@param evo_track_dict_list: (list) List of track dictionaries along which to evolve each planet
def evolve_ensamble(planets_chunks, t_final, init_step, eps, K_on, beta_on, evo_track_dict_list_or_1, path_save):
"""Function which parallelizes the multiprocessing_func.
@param planets_chunks: (array of lists) Array of lists of [folder, planet] pairs.
Each list (i.e. list of planets) in the array will be run seperately.
@param t_final: (float)
@param init_step: (float)
@param eps: (float)
@param K_on: (str)
@param beta_on: (str)
@param evo_track_dict_list: (list) List of track_dictionaries along which to evolve each planet
@param path_save: (str) Filepath to the folder in which to save the planet-folders
@result: Each planet will evolve along all the specified tracks.
Each track-planet evolution result will be saved as a .txt file in the planet-folder.
Each track-planet evolution result will be saved as a csv file in the planet-folder.
"""
print("start")
#if __name__ == '__main__':
......@@ -55,8 +92,8 @@ def evolve_ensamble(planets_chunks, t_final, initial_step_size, epsilon, K_on, b
pl_folder_pair = planets_chunks[i][j] # get folder-planet pair
path_for_saving = path_save + planets_chunks[i][j][0] + "/" # get corresponding folder
p = multiprocessing.Process(
target=evolve_one_planet, args=(pl_folder_pair, t_final, initial_step_size, epsilon, K_on, beta_on,
evo_track_dict_list, path_for_saving))
target=evolve_one_planet, args=(pl_folder_pair, t_final, init_step, eps, K_on, beta_on,
evo_track_dict_list_or_1, path_for_saving))
processes.append(p)
p.start()
......@@ -65,3 +102,4 @@ def evolve_ensamble(planets_chunks, t_final, initial_step_size, epsilon, K_on, b
t = (time.time() - starttime)/60
print('That took {} minutes'.format(t))
from astropy import constants as const
from astropy import units as u
import numpy as np
def get_period_from_a(M_star, a_pl):
P_sec = (4*np.pi**2/(const.G.cgs*M_star*const.M_sun.cgs)*(a_pl*const.au.cgs)**3)**(0.5)
return P_sec.value/(86400) # in days
def get_a_from_period(M_star, P):
ahoch3 = (P*86400*u.s)**2 * (const.G.cgs*M_star*const.M_sun.cgs)/ (4*np.pi**2)
return ((ahoch3**(1/3))/const.au.cgs).value # in days
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment