· 5 years ago · Jul 14, 2020, 09:12 PM
1# -*- coding: utf-8 -*-
2"""
3Created on Tue May 19 12:31:13 2020
4
5@author: norma
6
7eRASS Simulation
8
9"""
10import numpy as np
11import os
12import time
13import sqlite3
14
15import populations
16import auxil
17import curvefunc
18import ulxlc
19
20def create_classification_dict():
21 # Classication dict
22 keys = (curve_classifications['system_id'].astype(str) +
23 '-' + curve_classifications['dincl'].astype(str) +
24 '-' + curve_classifications['inclination'].astype(str)).values
25 classications = curve_classifications['classification'].values
26 class_dict = dict(zip(keys, classications))
27 return class_dict
28
29
30def create_N_lim_dict():
31 # Classication dict
32 keys = (curve_classifications['system_id'].astype(str) +
33 '-' + curve_classifications['dincl'].astype(str) +
34 '-' + curve_classifications['inclination'].astype(str)).values
35 N_lim = curve_classifications['N_lim'].values
36 N_lim_dict = dict(zip(keys, N_lim))
37 return N_lim_dict
38
39
40def create_keys(selected_systems, selected_dincls, selected_inclinations,):
41 a = np.core.defchararray.add(selected_systems.astype(str), '-')
42 b = np.core.defchararray.add(selected_dincls.astype(str), '-')
43 keys = np.core.defchararray.add(a,b)
44 keys = np.core.defchararray.add(keys,selected_inclinations.astype(str))
45 return keys
46
47
48def get_erass_curve(theta, inclination, dincl):
49 theta = round(theta, 2)
50 inclination = int(inclination)
51 dincl = int(dincl)
52
53
54 key = f'{theta}-{inclination}-{dincl}'
55
56 lc_path = erass_curve_folder + f'{key}.txt'
57 isFile = os.path.isfile(lc_path)
58
59 if isFile:
60 curve = curvefunc.load_curve_file(lc_path)
61 return curve
62 else:
63
64 xcm_path = erass_curve_folder + f'{key}.xcm'
65 parameters = {'period': 50,
66 'phase': 0,
67 'theta': theta,
68 'inclination': inclination,
69 'dincl': dincl,
70 'beta': 0.2,
71 'dopulse': 1,
72 'norm': 1}
73 ulxlc.run_ulxlc(xcm_path, parameters, lc_path)
74 curve = curvefunc.load_curve_file(lc_path)
75 return curve
76
77
78def is_ulx_on_first_cycle(curve, curve_period, P_wind, N_lim, sampling_interval=30*6):
79 time_arr = np.array(curve['Time'])
80 flux_arr = np.array(curve['Flux'])
81 # Time scaling constant
82 k = curve_period / P_wind
83
84 t_start = np.random.uniform(P_wind)
85 t_start_curve = k * t_start
86
87 sample_index = np.argmin(np.abs(time_arr - t_start_curve))
88 flux = flux_arr[sample_index]
89
90 if flux > N_lim:
91 return True
92 else:
93 return False
94
95
96def sample_curve(curve, curve_period, P_wind, N_lim, sampling_interval=30*6):
97 time_arr = np.array(curve['Time'])
98 flux_arr = np.array(curve['Flux'])
99
100 # Time scaling constant
101 k = curve_period / P_wind
102
103 # Sampling in days
104 t_start = np.random.uniform(P_wind)
105 t_interval = sampling_interval
106 t_sample = [(t_start + i * t_interval) for i in range(8)]
107
108 # Sampling points scaled for curve
109 t_sample_curve = k * np.array(t_sample) % curve_period
110
111 # Get corresponding indexs
112 sample_indexes = [np.argmin(np.abs(time_arr - t)) for t in t_sample_curve]
113
114 # Find corresponding fluxes
115 fluxes = flux_arr[sample_indexes]
116 return fluxes
117
118
119def get_first_transient_cycle(alive_arr):
120 for i, a in enumerate(alive_arr):
121 if a != alive_arr[0]:
122 return i
123 raise ValueError('No transient cycle found, should not have got here')
124
125
126def erass_classifier(is_ulx_erass):
127 arr_sum = sum(is_ulx_erass)
128 if arr_sum == 0:
129 return 'dead'
130 if arr_sum == len(is_ulx_erass):
131 return 'alive'
132 else:
133 return 'transient'
134
135def run(sample_size=500, bh_ratio=0.5, dincl_cut=46):
136 # eRASS Simulation Variables
137 sample_size = sample_size
138 bh_ratio = bh_ratio
139 dincl_cut = dincl_cut
140
141 # Treat sources that are over a wind period length as persistent sources
142 P_wind_persistent_level = 4 * 365
143
144 # Inputs
145 key_class_dict = create_classification_dict()
146 key_N_lim_dict = create_N_lim_dict()
147 id_theta_dict = dict(df_ulx['theta_half_deg'])
148 id_P_wind_dict = dict(df_ulx['P_wind_days'])
149
150
151 selected_systems = auxil.sample_by_bh_ratio(df_ulx, bh_ratio, sample_size)
152 selected_dincls = np.random.randint(0, dincl_cut, size=sample_size)
153 selected_inclinations = np.random.randint(0,91, size=sample_size)
154 selected_keys = create_keys(selected_systems, selected_dincls, selected_inclinations)
155 selected_classications = [key_class_dict.get(key) for key in selected_keys]
156 selected_N_lims = [key_N_lim_dict.get(key) for key in selected_keys]
157 selected_thetas = [id_theta_dict.get(key) for key in selected_systems]
158 selected_P_winds = [id_P_wind_dict.get(key) for key in selected_systems]
159
160
161 # Sample Result variables
162 N_total = sample_size
163 N_alive = selected_classications.count('alive') + selected_classications.count(None) #None --> no lightcurve --> alive
164 N_transient = selected_classications.count('transient')
165 N_dead = selected_classications.count('dead')
166
167 # Sample Check
168 assert N_alive + N_transient + N_dead == N_total
169
170 # eRASS Results
171 N_alive_persisitent = 0
172 N_dead_persisitent = 0
173 N_transient_erass = [0] * 8
174
175
176 # for i in range(sample_size):
177 # erass.run()
178
179
180 for i in range(sample_size):
181
182 curve_classification = selected_classications[i]
183
184 if curve_classification == None:
185 N_alive_persisitent += 1
186
187 if curve_classification == 'alive':
188 N_alive_persisitent += 1
189
190 if curve_classification == 'dead':
191 N_dead_persisitent += 1
192
193 if curve_classification == 'transient':
194
195 # Get variables for calculation
196 system_id = selected_systems[i]
197 dincl = selected_dincls[i]
198 inclination = selected_inclinations[i]
199 theta = selected_thetas[i]
200 N_lim = selected_N_lims[i]
201 P_wind = selected_P_winds[i]
202
203 curve = get_erass_curve(theta, inclination, dincl)
204
205 # Long period systems will be treated as persistent
206 if P_wind > P_wind_persistent_level:
207 is_ulx = is_ulx_on_first_cycle(curve, 50, P_wind, N_lim, sampling_interval=30*6)
208 if is_ulx:
209 N_alive_persisitent += 1
210 else:
211 N_dead_persisitent += 1
212
213
214 else:
215 sampled_fluxes = sample_curve(curve, 50, P_wind, N_lim, sampling_interval=30*6)
216 is_ulx_erass = list(np.where(sampled_fluxes > N_lim, True, False))
217
218 erass_sample_classification = erass_classifier(is_ulx_erass)
219
220 if erass_sample_classification == 'alive':
221 N_alive_persisitent += 1
222
223 if erass_sample_classification == 'dead':
224 N_dead_persisitent += 1
225
226 if erass_sample_classification == 'transient':
227 first_transient_cycle = get_first_transient_cycle(is_ulx_erass)
228 N_transient_erass[first_transient_cycle] += 1
229
230
231 N_transient_erass_cumulative = np.cumsum(N_transient_erass).tolist()
232
233 assert N_alive_persisitent + N_dead_persisitent + N_transient_erass_cumulative[-1] == sample_size
234
235 return N_alive_persisitent, N_dead_persisitent, N_transient_erass, N_transient_erass_cumulative
236
237
238
239def sql_create_db():
240 create_table_sql = """CREATE TABLE IF NOT EXISTS SAMPLE_RESULTS (
241 id integer PRIMARY KEY AUTOINCREMENT,
242 dincl integer,
243 bh_percent real,
244 N_alive_persisitent integer,
245 N_dead_persisitent integer,
246 N_transient_erass1 integer,
247 N_transient_erass2 integer,
248 N_transient_erass3 integer,
249 N_transient_erass4 integer,
250 N_transient_erass5 integer,
251 N_transient_erass6 integer,
252 N_transient_erass7 integer,
253 N_transient_erass8 integer,
254 N_t_cum1 integer,
255 N_t_cum2 integer,
256 N_t_cum3 integer,
257 N_t_cum4 integer,
258 N_t_cum5 integer,
259 N_t_cum6 integer,
260 N_t_cum7 integer,
261 N_t_cum8 integer
262 );"""
263
264 conn = sqlite3.connect('erass.db')
265 c = conn.cursor()
266 c.execute(create_table_sql)
267 conn.commit()
268 conn.close()
269
270
271def sql_save_result(unpacked_tuple):
272
273 insert = """ INSERT INTO SAMPLE_RESULTS (
274 dincl,
275 bh_percent,
276 N_alive_persisitent,
277 N_dead_persisitent,
278 N_transient_erass1,
279 N_transient_erass2,
280 N_transient_erass3,
281 N_transient_erass4,
282 N_transient_erass5,
283 N_transient_erass6,
284 N_transient_erass7,
285 N_transient_erass8,
286 N_t_cum1,
287 N_t_cum2,
288 N_t_cum3,
289 N_t_cum4,
290 N_t_cum5,
291 N_t_cum6,
292 N_t_cum7,
293 N_t_cum8
294 )
295 VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"""
296
297 conn = sqlite3.connect('../../../src/erass.db')
298 c = conn.cursor()
299 print(unpacked_tuple)
300 c.execute(insert, unpacked_tuple)
301 conn.commit()
302 conn.close()
303
304
305if __name__ == "__main__":
306 sql_create_db()
307 df_ulx = populations.ulx()
308 curve_classifications = auxil.load_curve_classifications()
309
310
311 # This script struggles pretty hard when making it change directory.
312 os.chdir('../data/interim/curves/')
313 erass_curve_folder = 'eRASS_sims/'
314
315 import itertools
316
317 #dincls = [46, 40, 30, 20, 10]
318 #bh_ratios = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
319
320 dincls = [40, 20]
321 bh_ratios = [0.0, 0.2, 0.5, 0.7, 1.0]
322
323
324 repeats = np.arange(100)
325
326 results = {}
327 for i, variables in enumerate(itertools.product(dincls, bh_ratios, repeats)):
328 print(i)
329 dincl, bh, repeat = variables
330 N_alive_persisitent, N_dead_persisitent, N_transient_erass, N_transient_erass_cumulative = run(sample_size=500, bh_ratio=bh, dincl_cut=dincl)
331 results[i] =(dincl,
332 bh,
333 N_alive_persisitent,
334 N_dead_persisitent,
335 N_transient_erass,
336 N_transient_erass_cumulative)
337
338 print(results[i])
339
340 unpacked_tuple = (dincl,
341 bh,
342 N_alive_persisitent,
343 N_dead_persisitent,
344 *N_transient_erass,
345 *N_transient_erass_cumulative)
346
347 sql_save_result(unpacked_tuple)