Loading DataReductionGIANOB/DataInterface.py +254 −2 Original line number Diff line number Diff line import os import shutil import numpy as np import yaml Loading @@ -6,6 +7,25 @@ from numpy import array from os import environ from pandas import read_csv, DataFrame from pathlib import Path from petitRADTRANS import physical_constants as phys_const try: from tkinter import messagebox as _messagebox except Exception: _messagebox = None EXPECTED_YAML_KEYS = ( "name", "radius_jup", "mass_jup", "gravity_cm_s2", "t_eq_K", "stellar_radius_sun", "stellar_mass_sun", "stellar_teff_K", "p0_log10_bar", "hjd0_days", "period_days", "v_system_km_s", "kp_km_s", "ks_km_s", "ecc", "periastron_argument", "ra", "dec", "a_Rs_ratio", "projected_obliquity_deg", "inclination_deg", "v_sini_km_s", "Limit_Phase_T14", "Limit_Phase_T12", "Limit_Phase_T23", "t14_hours", "t12_hours", "t23_hours", ) def create_path_night(pathfolder, target, rad_mode, instrument): Loading Loading @@ -50,6 +70,234 @@ def get_target_list(path_targets_folder): return array(target_list) def _fetch_a_rs_from_archive(target_name): """ Try to fetch a/R_star (pl_ratdor) from the NASA Exoplanet Archive. Matching is flexible: dashes, underscores and spaces are stripped from both the user-supplied name and the archive's hostname/pl_name before comparison, so 'hatp11' matches 'HAT-P-11', 'wasp69' matches 'WASP-69', 'HD143105' matches 'HD 143105', etc. When several planets match, the one closest alphabetically to 'b' with a finite a/Rs is returned. Returns (matched_pl_name, a_Rs_value) or (None, None). """ import re import warnings try: from astroquery.ipac.nexsci.nasa_exoplanet_archive import ( NasaExoplanetArchive, ) except Exception as exc: print(f"[archive lookup] astroquery unavailable: {exc}") return None, None norm = re.sub(r"[-_ ]", "", (target_name or "").strip()).upper() if not norm: return None, None # If the user spelled out a planet letter (e.g. 'hatp11c'), honour it; # otherwise always target planet 'b'. letter_match = re.match(r"^(.+\d)([B-Z])$", norm) if letter_match: host_norm = letter_match.group(1) requested_letter = letter_match.group(2).lower() else: host_norm = norm requested_letter = "b" safe_host = host_norm.replace("'", "") safe_pl = (host_norm + requested_letter.upper()).replace("'", "") where = ( f"UPPER(REPLACE(REPLACE(hostname, '-', ''), ' ', '')) = '{safe_host}'" f" OR UPPER(REPLACE(REPLACE(pl_name, '-', ''), ' ', '')) = '{safe_pl}'" ) try: with warnings.catch_warnings(): warnings.simplefilter("ignore") result = NasaExoplanetArchive.query_criteria( table="pscomppars", select="pl_name,pl_ratdor", where=where, ) except Exception as exc: print(f"[archive lookup] query failed for '{target_name}': {exc}") return None, None for row in result: pl_name = str(row["pl_name"]) letter = pl_name.strip().split(" ")[-1].lower() if letter != requested_letter: continue try: fval = float(row["pl_ratdor"]) except (TypeError, ValueError): continue if fval > 0 and np.isfinite(fval): return pl_name, fval return None, None def _rebuild_yaml_from_partial(yaml_file, target_name): """ Return (yaml_dict, missing_primary, recomputed_keys, fetched_keys) given a partial yaml_file. Primary parameters absent from yaml_file are set to 0 (with a few sensible physical defaults). For a_Rs_ratio we first try the NASA Exoplanet Archive. Derived quantities (gravity, Keplerian semi-amplitudes, transit durations, phase limits) are recomputed whenever the required inputs are available. """ primary_defaults = { "radius_jup": 0.0, "mass_jup": 0.0, "t_eq_K": 0.0, "stellar_radius_sun": 0.0, "stellar_mass_sun": 0.0, "stellar_teff_K": 0.0, "p0_log10_bar": -1.0, "hjd0_days": 0.0, "period_days": 0.0, "v_system_km_s": 0.0, "ecc": 0.0, "periastron_argument": float(np.pi / 2), "ra": 0.0, "dec": 0.0, "a_Rs_ratio": 0.0, "projected_obliquity_deg": 0.0, "inclination_deg": 90.0, "v_sini_km_s": 0.0, } missing_primary = [k for k in primary_defaults if k not in yaml_file] vals = {k: float(yaml_file.get(k, d)) for k, d in primary_defaults.items()} name = yaml_file.get("name", target_name) fetched_keys = {} if "a_Rs_ratio" not in yaml_file: archive_name, archive_a_rs = _fetch_a_rs_from_archive(name) if archive_a_rs is not None: vals["a_Rs_ratio"] = archive_a_rs fetched_keys["a_Rs_ratio"] = (archive_name, archive_a_rs) missing_primary = [k for k in missing_primary if k != "a_Rs_ratio"] derived_keys = ( "gravity_cm_s2", "kp_km_s", "ks_km_s", "t14_hours", "t12_hours", "t23_hours", "Limit_Phase_T14", "Limit_Phase_T12", "Limit_Phase_T23", ) recomputed_keys = [k for k in derived_keys if k not in yaml_file] if vals["mass_jup"] > 0 and vals["radius_jup"] > 0: gravity_cm_s2 = float( phys_const.G * (vals["mass_jup"] * phys_const.m_jup) / np.power(vals["radius_jup"] * phys_const.r_jup_mean, 2) ) else: gravity_cm_s2 = float(yaml_file.get("gravity_cm_s2", 0.0)) Msun = phys_const.m_sun / 1e3 MJ = phys_const.m_jup / 1e3 G = phys_const.G / 1e3 mass_pl_jup_min = vals["mass_jup"] * np.sin(np.deg2rad(vals["inclination_deg"])) if vals["period_days"] > 0 and mass_pl_jup_min > 0 and vals["stellar_mass_sun"] > 0: ks_km_s = ( ((2 * np.pi * G) ** (1 / 3) * mass_pl_jup_min * MJ) / ((86400 * vals["period_days"]) ** (1 / 3) * (vals["stellar_mass_sun"] * Msun + mass_pl_jup_min * MJ) ** (2 / 3)) ) / 1e3 kp_km_s = (vals["stellar_mass_sun"] * Msun * ks_km_s) / (mass_pl_jup_min * MJ) else: ks_km_s = float(yaml_file.get("ks_km_s", 0.0)) kp_km_s = float(yaml_file.get("kp_km_s", 0.0)) t14_hours = float(yaml_file.get("t14_hours", 0.0)) t12_hours = float(yaml_file.get("t12_hours", 0.0)) t23_hours = float(yaml_file.get("t23_hours", 0.0)) legacy_limph = float(yaml_file.get("Limit_Phase", 0.0)) limph_T14 = float(yaml_file.get("Limit_Phase_T14", legacy_limph)) limph_T12 = float(yaml_file.get("Limit_Phase_T12", 0.0)) limph_T23 = float(yaml_file.get("Limit_Phase_T23", 0.0)) if ( vals["period_days"] > 0 and vals["a_Rs_ratio"] > 0 and vals["stellar_radius_sun"] > 0 and vals["radius_jup"] > 0 ): try: phase_dict = compute_transit_durations( vals["period_days"], vals["a_Rs_ratio"], (vals["radius_jup"] * phys_const.r_jup_mean) / (vals["stellar_radius_sun"] * phys_const.r_sun), vals["inclination_deg"], ecc=vals["ecc"], omega=vals["periastron_argument"], ) t14_hours = phase_dict["T14_hours"] t12_hours = phase_dict["T12_hours"] t23_hours = phase_dict["T23_hours"] limph_T14 = phase_dict["limph_T14"] limph_T12 = phase_dict["limph_T12"] limph_T23 = phase_dict["limph_T23"] except (ValueError, ZeroDivisionError) as exc: print(f"[rebuild_yaml] Could not recompute transit durations for " f"'{target_name}': {exc}") yaml_dict = { 'name': name, 'radius_jup': vals["radius_jup"], 'mass_jup': vals["mass_jup"], 'gravity_cm_s2': gravity_cm_s2, 't_eq_K': vals["t_eq_K"], 'stellar_radius_sun': vals["stellar_radius_sun"], 'stellar_mass_sun': vals["stellar_mass_sun"], 'stellar_teff_K': vals["stellar_teff_K"], 'p0_log10_bar': vals["p0_log10_bar"], 'hjd0_days': vals["hjd0_days"], 'period_days': vals["period_days"], 't14_hours': float(t14_hours), 't12_hours': float(t12_hours), 't23_hours': float(t23_hours), 'Limit_Phase_T14': float(limph_T14), 'Limit_Phase_T12': float(limph_T12), 'Limit_Phase_T23': float(limph_T23), 'kp_km_s': float(kp_km_s), 'ks_km_s': float(ks_km_s), 'v_system_km_s': vals["v_system_km_s"], 'ecc': vals["ecc"], 'periastron_argument': vals["periastron_argument"], 'ra': vals["ra"], 'dec': vals["dec"], 'a_Rs_ratio': vals["a_Rs_ratio"], 'projected_obliquity_deg': vals["projected_obliquity_deg"], 'inclination_deg': vals["inclination_deg"], 'v_sini_km_s': vals["v_sini_km_s"], } return yaml_dict, missing_primary, recomputed_keys, fetched_keys def _repair_info_yaml(yaml_path, yaml_file, missing_keys, target_name): """Back up the incomplete yaml, write a rebuilt one, notify the user.""" yaml_dict, missing_primary, recomputed_keys, fetched_keys = ( _rebuild_yaml_from_partial(yaml_file, target_name) ) backup_path = Path(str(yaml_path) + ".bak") shutil.copy2(str(yaml_path), str(backup_path)) with open(str(yaml_path), 'w') as f: yaml.dump(yaml_dict, f, sort_keys=False) lines = [ f"info.yaml for target '{target_name}' was missing keys:", f" {', '.join(sorted(missing_keys))}", "", f"Backup saved to: {backup_path.name}", ] if fetched_keys: lines.append("") lines.append("Fetched from NASA Exoplanet Archive:") for key, (archive_name, value) in fetched_keys.items(): lines.append(f" {key} = {value} (matched '{archive_name}')") if recomputed_keys: lines.append("") lines.append("Recomputed from available inputs:") lines.append(" " + ", ".join(sorted(set(recomputed_keys)))) if missing_primary: lines.append("") lines.append("Primary parameters missing, set to 0:") lines.append(" " + ", ".join(missing_primary)) lines.append("") lines.append("Please verify all values in info.yaml before proceeding.") message = "\n".join(lines) print("[info.yaml repair]\n" + message) if _messagebox is not None: try: _messagebox.showwarning(f"info.yaml repaired: {target_name}", message) except Exception: pass return yaml_dict def get_target_info(path_targets_folder, target): """ Load target information from the info.yaml file. Loading @@ -73,8 +321,12 @@ def get_target_info(path_targets_folder, target): Returns a list of 25 None values if target is "None". """ if target != "None": with open(str(Path(path_targets_folder, target, "info.yaml")), 'r') as f: yaml_file = yaml.safe_load(f) # Use safe_load to avoid executing arbitrary code in the YAML yaml_path = Path(path_targets_folder, target, "info.yaml") with open(str(yaml_path), 'r') as f: yaml_file = yaml.safe_load(f) or {} # Use safe_load to avoid executing arbitrary code in the YAML missing_keys = [k for k in EXPECTED_YAML_KEYS if k not in yaml_file] if missing_keys: yaml_file = _repair_info_yaml(yaml_path, yaml_file, missing_keys, target) return ( yaml_file["name"], yaml_file["radius_jup"], Loading DataReductionGIANOB/Frame_Gofio.py +0 −1 Original line number Diff line number Diff line Loading @@ -2111,7 +2111,6 @@ class Frame_Gofio: target_a_Rs_ratio, target_projected_obliquity, target_inclination, target_v_sini, target_limph_T12, target_limph_T23, target_t14_hours, target_t12_hours, target_t23_hours ) = DataInterface.get_target_info(self.path_target, target_id) self.current_target_ self.radius_DB_up.delete(0, tk.END) self.radius_DB_up.insert(tk.END, target_radius) self.mass_DB_up.delete(0, tk.END) Loading Loading
DataReductionGIANOB/DataInterface.py +254 −2 Original line number Diff line number Diff line import os import shutil import numpy as np import yaml Loading @@ -6,6 +7,25 @@ from numpy import array from os import environ from pandas import read_csv, DataFrame from pathlib import Path from petitRADTRANS import physical_constants as phys_const try: from tkinter import messagebox as _messagebox except Exception: _messagebox = None EXPECTED_YAML_KEYS = ( "name", "radius_jup", "mass_jup", "gravity_cm_s2", "t_eq_K", "stellar_radius_sun", "stellar_mass_sun", "stellar_teff_K", "p0_log10_bar", "hjd0_days", "period_days", "v_system_km_s", "kp_km_s", "ks_km_s", "ecc", "periastron_argument", "ra", "dec", "a_Rs_ratio", "projected_obliquity_deg", "inclination_deg", "v_sini_km_s", "Limit_Phase_T14", "Limit_Phase_T12", "Limit_Phase_T23", "t14_hours", "t12_hours", "t23_hours", ) def create_path_night(pathfolder, target, rad_mode, instrument): Loading Loading @@ -50,6 +70,234 @@ def get_target_list(path_targets_folder): return array(target_list) def _fetch_a_rs_from_archive(target_name): """ Try to fetch a/R_star (pl_ratdor) from the NASA Exoplanet Archive. Matching is flexible: dashes, underscores and spaces are stripped from both the user-supplied name and the archive's hostname/pl_name before comparison, so 'hatp11' matches 'HAT-P-11', 'wasp69' matches 'WASP-69', 'HD143105' matches 'HD 143105', etc. When several planets match, the one closest alphabetically to 'b' with a finite a/Rs is returned. Returns (matched_pl_name, a_Rs_value) or (None, None). """ import re import warnings try: from astroquery.ipac.nexsci.nasa_exoplanet_archive import ( NasaExoplanetArchive, ) except Exception as exc: print(f"[archive lookup] astroquery unavailable: {exc}") return None, None norm = re.sub(r"[-_ ]", "", (target_name or "").strip()).upper() if not norm: return None, None # If the user spelled out a planet letter (e.g. 'hatp11c'), honour it; # otherwise always target planet 'b'. letter_match = re.match(r"^(.+\d)([B-Z])$", norm) if letter_match: host_norm = letter_match.group(1) requested_letter = letter_match.group(2).lower() else: host_norm = norm requested_letter = "b" safe_host = host_norm.replace("'", "") safe_pl = (host_norm + requested_letter.upper()).replace("'", "") where = ( f"UPPER(REPLACE(REPLACE(hostname, '-', ''), ' ', '')) = '{safe_host}'" f" OR UPPER(REPLACE(REPLACE(pl_name, '-', ''), ' ', '')) = '{safe_pl}'" ) try: with warnings.catch_warnings(): warnings.simplefilter("ignore") result = NasaExoplanetArchive.query_criteria( table="pscomppars", select="pl_name,pl_ratdor", where=where, ) except Exception as exc: print(f"[archive lookup] query failed for '{target_name}': {exc}") return None, None for row in result: pl_name = str(row["pl_name"]) letter = pl_name.strip().split(" ")[-1].lower() if letter != requested_letter: continue try: fval = float(row["pl_ratdor"]) except (TypeError, ValueError): continue if fval > 0 and np.isfinite(fval): return pl_name, fval return None, None def _rebuild_yaml_from_partial(yaml_file, target_name): """ Return (yaml_dict, missing_primary, recomputed_keys, fetched_keys) given a partial yaml_file. Primary parameters absent from yaml_file are set to 0 (with a few sensible physical defaults). For a_Rs_ratio we first try the NASA Exoplanet Archive. Derived quantities (gravity, Keplerian semi-amplitudes, transit durations, phase limits) are recomputed whenever the required inputs are available. """ primary_defaults = { "radius_jup": 0.0, "mass_jup": 0.0, "t_eq_K": 0.0, "stellar_radius_sun": 0.0, "stellar_mass_sun": 0.0, "stellar_teff_K": 0.0, "p0_log10_bar": -1.0, "hjd0_days": 0.0, "period_days": 0.0, "v_system_km_s": 0.0, "ecc": 0.0, "periastron_argument": float(np.pi / 2), "ra": 0.0, "dec": 0.0, "a_Rs_ratio": 0.0, "projected_obliquity_deg": 0.0, "inclination_deg": 90.0, "v_sini_km_s": 0.0, } missing_primary = [k for k in primary_defaults if k not in yaml_file] vals = {k: float(yaml_file.get(k, d)) for k, d in primary_defaults.items()} name = yaml_file.get("name", target_name) fetched_keys = {} if "a_Rs_ratio" not in yaml_file: archive_name, archive_a_rs = _fetch_a_rs_from_archive(name) if archive_a_rs is not None: vals["a_Rs_ratio"] = archive_a_rs fetched_keys["a_Rs_ratio"] = (archive_name, archive_a_rs) missing_primary = [k for k in missing_primary if k != "a_Rs_ratio"] derived_keys = ( "gravity_cm_s2", "kp_km_s", "ks_km_s", "t14_hours", "t12_hours", "t23_hours", "Limit_Phase_T14", "Limit_Phase_T12", "Limit_Phase_T23", ) recomputed_keys = [k for k in derived_keys if k not in yaml_file] if vals["mass_jup"] > 0 and vals["radius_jup"] > 0: gravity_cm_s2 = float( phys_const.G * (vals["mass_jup"] * phys_const.m_jup) / np.power(vals["radius_jup"] * phys_const.r_jup_mean, 2) ) else: gravity_cm_s2 = float(yaml_file.get("gravity_cm_s2", 0.0)) Msun = phys_const.m_sun / 1e3 MJ = phys_const.m_jup / 1e3 G = phys_const.G / 1e3 mass_pl_jup_min = vals["mass_jup"] * np.sin(np.deg2rad(vals["inclination_deg"])) if vals["period_days"] > 0 and mass_pl_jup_min > 0 and vals["stellar_mass_sun"] > 0: ks_km_s = ( ((2 * np.pi * G) ** (1 / 3) * mass_pl_jup_min * MJ) / ((86400 * vals["period_days"]) ** (1 / 3) * (vals["stellar_mass_sun"] * Msun + mass_pl_jup_min * MJ) ** (2 / 3)) ) / 1e3 kp_km_s = (vals["stellar_mass_sun"] * Msun * ks_km_s) / (mass_pl_jup_min * MJ) else: ks_km_s = float(yaml_file.get("ks_km_s", 0.0)) kp_km_s = float(yaml_file.get("kp_km_s", 0.0)) t14_hours = float(yaml_file.get("t14_hours", 0.0)) t12_hours = float(yaml_file.get("t12_hours", 0.0)) t23_hours = float(yaml_file.get("t23_hours", 0.0)) legacy_limph = float(yaml_file.get("Limit_Phase", 0.0)) limph_T14 = float(yaml_file.get("Limit_Phase_T14", legacy_limph)) limph_T12 = float(yaml_file.get("Limit_Phase_T12", 0.0)) limph_T23 = float(yaml_file.get("Limit_Phase_T23", 0.0)) if ( vals["period_days"] > 0 and vals["a_Rs_ratio"] > 0 and vals["stellar_radius_sun"] > 0 and vals["radius_jup"] > 0 ): try: phase_dict = compute_transit_durations( vals["period_days"], vals["a_Rs_ratio"], (vals["radius_jup"] * phys_const.r_jup_mean) / (vals["stellar_radius_sun"] * phys_const.r_sun), vals["inclination_deg"], ecc=vals["ecc"], omega=vals["periastron_argument"], ) t14_hours = phase_dict["T14_hours"] t12_hours = phase_dict["T12_hours"] t23_hours = phase_dict["T23_hours"] limph_T14 = phase_dict["limph_T14"] limph_T12 = phase_dict["limph_T12"] limph_T23 = phase_dict["limph_T23"] except (ValueError, ZeroDivisionError) as exc: print(f"[rebuild_yaml] Could not recompute transit durations for " f"'{target_name}': {exc}") yaml_dict = { 'name': name, 'radius_jup': vals["radius_jup"], 'mass_jup': vals["mass_jup"], 'gravity_cm_s2': gravity_cm_s2, 't_eq_K': vals["t_eq_K"], 'stellar_radius_sun': vals["stellar_radius_sun"], 'stellar_mass_sun': vals["stellar_mass_sun"], 'stellar_teff_K': vals["stellar_teff_K"], 'p0_log10_bar': vals["p0_log10_bar"], 'hjd0_days': vals["hjd0_days"], 'period_days': vals["period_days"], 't14_hours': float(t14_hours), 't12_hours': float(t12_hours), 't23_hours': float(t23_hours), 'Limit_Phase_T14': float(limph_T14), 'Limit_Phase_T12': float(limph_T12), 'Limit_Phase_T23': float(limph_T23), 'kp_km_s': float(kp_km_s), 'ks_km_s': float(ks_km_s), 'v_system_km_s': vals["v_system_km_s"], 'ecc': vals["ecc"], 'periastron_argument': vals["periastron_argument"], 'ra': vals["ra"], 'dec': vals["dec"], 'a_Rs_ratio': vals["a_Rs_ratio"], 'projected_obliquity_deg': vals["projected_obliquity_deg"], 'inclination_deg': vals["inclination_deg"], 'v_sini_km_s': vals["v_sini_km_s"], } return yaml_dict, missing_primary, recomputed_keys, fetched_keys def _repair_info_yaml(yaml_path, yaml_file, missing_keys, target_name): """Back up the incomplete yaml, write a rebuilt one, notify the user.""" yaml_dict, missing_primary, recomputed_keys, fetched_keys = ( _rebuild_yaml_from_partial(yaml_file, target_name) ) backup_path = Path(str(yaml_path) + ".bak") shutil.copy2(str(yaml_path), str(backup_path)) with open(str(yaml_path), 'w') as f: yaml.dump(yaml_dict, f, sort_keys=False) lines = [ f"info.yaml for target '{target_name}' was missing keys:", f" {', '.join(sorted(missing_keys))}", "", f"Backup saved to: {backup_path.name}", ] if fetched_keys: lines.append("") lines.append("Fetched from NASA Exoplanet Archive:") for key, (archive_name, value) in fetched_keys.items(): lines.append(f" {key} = {value} (matched '{archive_name}')") if recomputed_keys: lines.append("") lines.append("Recomputed from available inputs:") lines.append(" " + ", ".join(sorted(set(recomputed_keys)))) if missing_primary: lines.append("") lines.append("Primary parameters missing, set to 0:") lines.append(" " + ", ".join(missing_primary)) lines.append("") lines.append("Please verify all values in info.yaml before proceeding.") message = "\n".join(lines) print("[info.yaml repair]\n" + message) if _messagebox is not None: try: _messagebox.showwarning(f"info.yaml repaired: {target_name}", message) except Exception: pass return yaml_dict def get_target_info(path_targets_folder, target): """ Load target information from the info.yaml file. Loading @@ -73,8 +321,12 @@ def get_target_info(path_targets_folder, target): Returns a list of 25 None values if target is "None". """ if target != "None": with open(str(Path(path_targets_folder, target, "info.yaml")), 'r') as f: yaml_file = yaml.safe_load(f) # Use safe_load to avoid executing arbitrary code in the YAML yaml_path = Path(path_targets_folder, target, "info.yaml") with open(str(yaml_path), 'r') as f: yaml_file = yaml.safe_load(f) or {} # Use safe_load to avoid executing arbitrary code in the YAML missing_keys = [k for k in EXPECTED_YAML_KEYS if k not in yaml_file] if missing_keys: yaml_file = _repair_info_yaml(yaml_path, yaml_file, missing_keys, target) return ( yaml_file["name"], yaml_file["radius_jup"], Loading
DataReductionGIANOB/Frame_Gofio.py +0 −1 Original line number Diff line number Diff line Loading @@ -2111,7 +2111,6 @@ class Frame_Gofio: target_a_Rs_ratio, target_projected_obliquity, target_inclination, target_v_sini, target_limph_T12, target_limph_T23, target_t14_hours, target_t12_hours, target_t23_hours ) = DataInterface.get_target_info(self.path_target, target_id) self.current_target_ self.radius_DB_up.delete(0, tk.END) self.radius_DB_up.insert(tk.END, target_radius) self.mass_DB_up.delete(0, tk.END) Loading