Advertisement
rasmit

Untitled

Feb 28th, 2024
121
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 6.83 KB | None | 0 0
  1. import yt
  2. from yt.config import ytcfg
  3. import ytree
  4.  
  5. import numpy as np
  6. from bisect import bisect_left
  7.  
  8. from calculations import get_star_formation_times
  9.  
  10. ####################################################################################################
  11.  
  12. yt.enable_parallelism()
  13.  
  14. nproc = ytcfg.get("yt", "internals", "topcomm_parallel_size")
  15.  
  16. ####################################################################################################
  17.  
  18. def neighbors(myList, myNumber):
  19.     pos = bisect_left(myList, myNumber)
  20.  
  21.     if pos == 0:
  22.         return myList[0]
  23.  
  24.     if pos == len(myList):
  25.         return myList[-1]
  26.  
  27.     before = myList[pos - 1]
  28.     after = myList[pos]
  29.    
  30.     return before, after
  31.  
  32. ####################################################################################################
  33.  
  34. zcat = dict(z=[], fn=[])
  35. with open('/storage/home/hcoda1/0/jw254/data/SG64-2020/redshifts.dat', 'r') as fp:
  36.     for line in fp:
  37.         zcat['z'].append(float(line.split("=")[-1]))
  38.         zcat['fn'].append(line.split(":")[0])
  39.     zcat['z'] = np.array(zcat['z'])
  40.  
  41. def get_filename_from_redshift(z):
  42.     i = np.argmin(np.abs(z - zcat['z']))
  43.     return zcat['fn'][i]
  44.  
  45. def get_yt_dataset(node):
  46.     filename = get_filename_from_redshift(node["redshift"])
  47.     node.ds = yt.load("/storage/home/hcoda1/0/jw254/data/SG64-2020/"+str(filename))
  48.  
  49. def get_yt_sphere(node):
  50.     ds = node.ds
  51.  
  52.     center = node["position"].to("unitary").v
  53.     radius = node["virial_radius"].to("unitary").to_value()
  54.     node.sphere = ds.sphere((center), (radius, "unitary"))
  55.  
  56. def delete_attributes(node, attributes):
  57.     for attr in attributes:
  58.         if hasattr(node, attr):
  59.             delattr(node, attr)
  60.  
  61. def delete_dataset(node):
  62.     if hasattr(node, "ds"):
  63.         node.ds.index.clear_all_data()
  64.  
  65.         # try:
  66.         #     del node.ds.index.grid_dimensions
  67.         #     del node.ds.index.grid_left_edge
  68.         #     del node.ds.index.grid_right_edge
  69.         #     del node.ds.index.grid_levels
  70.         #     del node.ds.index.grid_particle_count
  71.         #     del node.ds.index.grids
  72.         # except:
  73.         #     print("Could not delete some attribute(s)")
  74.  
  75. ####################################################################################################
  76.  
  77. @yt.particle_filter(requires=["particle_type"], filtered_type="all")
  78. def stars(pfilter, data):
  79.     return data[(pfilter.filtered_type, "particle_type")] == 2
  80.  
  81. @yt.particle_filter(requires=["creation_time"], filtered_type="stars")
  82. def young_stars(pfilter, data):
  83.     age = data.ds.current_time - data[pfilter.filtered_type, "creation_time"]
  84.  
  85.     return np.logical_and(age.in_units("Myr") <= 3, age >= 0)
  86.  
  87. # Halo criteria:
  88. # (i) Within the halo’s progenitor line, the halo at some point in its evolution must exceed a dark matter mass of 1E4 M⊙.
  89. # (ii) At the time of a radiative event, the halo must have a molecular hydrogen fraction fH2 ≥ 1E−4 within its densest grid cell and a dark matter to baryon mass ratio above 1% of the cosmic mean (Ωbaryon / ΩDM).
  90. # (iii) The halo must not be a subhalo.
  91. def filter_halos(node):    
  92.     # criterion (i)
  93.     if max(list(node["prog", "mass"])) < 1E4:
  94.         return False
  95.    
  96.     # criterion (ii)
  97.     # step 1: get star formation times
  98.     node.ds.add_particle_filter("stars")
  99.  
  100.     # print(node.ds.all_data().quantities.total_mass()) # gives non-zero masses
  101.  
  102.     node.ds.add_particle_filter("young_stars")
  103.  
  104.     # print(node.ds.all_data().quantities.total_mass()) # gives non-zero masses
  105.  
  106.     star_formation_times = node.ds.all_data()[("young_stars", "creation_time")].in_units('Myr')
  107.  
  108.     # step 2: if star formed around node time, check if its H2 fraction >= 1E-4 and mass ratio > 1% of the cosmic mean
  109.     if len(star_formation_times) == 0: # every node stops here
  110.         print("no stars at all for this node")
  111.         return False
  112.  
  113.     if max(star_formation_times) < 0:
  114.         print("no stars yet for this node", min(star_formation_times), max(star_formation_times))
  115.         return False
  116.    
  117.     before, after = neighbors(star_formation_times, node["time"])
  118.     time_before = node["time"] - before
  119.     time_after = after - node["time"]
  120.  
  121.     # print(before, node["time"]) # all times are < 0
  122.  
  123.     if 0.33 > min(time_before, time_after):
  124.         print(node.ds.all_data().max(("gas", "H2_fraction")))
  125.         if node.ds.all_data().max(("gas", "H2_fraction")) <= 1E-4:
  126.             return False
  127.        
  128.         # @TODO - Check mass ratio vs cosmic mean
  129.    
  130.     # criterion (iii)
  131.     if node["upid"] != -1:
  132.         return False
  133.    
  134.     return True
  135.  
  136. ####################################################################################################
  137.  
  138. def calculate_significance(node):
  139.    if node.descendent is None:
  140.        dt = 0. * node["time"]
  141.    else:
  142.        dt = node.descendent["time"] - node["time"]
  143.  
  144.    sig = node["mass"] * dt
  145.    if node.ancestors is not None:
  146.        for anc in node.ancestors:
  147.            sig += calculate_significance(anc)
  148.  
  149.    node["significance"] = sig
  150.    return sig
  151.  
  152. ####################################################################################################
  153.  
  154. save_and_reload = False
  155.  
  156. ####################################################################################################
  157.  
  158. arbor = ytree.load("/storage/home/hcoda1/0/jw254/data/SG64-2020/rockstar_halos-jhw/trees/tree_0_0_0.dat")
  159. if save_and_reload:
  160.     fn = arbor.save_arbor(filename="arbor_original")
  161.     arbor = ytree.load(fn)
  162.     print(fn)
  163.  
  164. arbor.add_analysis_field("significance", "Msun*Myr")
  165.  
  166. ap_significance = ytree.AnalysisPipeline()
  167. ap_significance.add_operation(calculate_significance)
  168.  
  169. trees = list(arbor[:])
  170. for tree in ytree.parallel_trees(trees):
  171.     for node in tree["forest"]:
  172.         ap_significance.process_target(node)
  173.  
  174. ############################################################
  175.  
  176. if save_and_reload:
  177.     fn = arbor.save_arbor(filename="arbor_significance")
  178.     arbor = ytree.load(fn)
  179.     print(fn)
  180.  
  181. ############################################################
  182.  
  183. print("before filtering:", arbor.size)
  184.  
  185. arbor.set_selector("max_field_value", "significance")
  186.  
  187. if "halo_DM_mass" not in arbor.field_list:
  188.     arbor.add_analysis_field("halo_DM_mass", default=-1, units="Msun")
  189.  
  190. ap_filter = ytree.AnalysisPipeline()
  191. ap_filter.add_operation(get_yt_dataset)
  192. ap_filter.add_operation(filter_halos)
  193. ap_filter.add_operation(delete_dataset, always_do=True)
  194.  
  195. trees = list(arbor[:])
  196. print("ntrees:", len(trees))
  197. for tree in ytree.parallel_trees(trees):
  198.     for node in tree["forest"]:
  199.         ap_filter.process_target(node)
  200.    
  201.     print("during filtering:", arbor.size)
  202.  
  203. ############################################################
  204.  
  205. if save_and_reload:
  206.     arbor.save_arbor(filename="arbor_analysis", trees=trees)
  207.  
  208. ############################################################
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement