-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
🪟🎟️ ↝ [SSG-100]: Consensus & Location - Merge pull request #63 from S…
…ignal-K/SSG-100 🎫🪟 ↝ [SSG-100]: Consensus & Location
- Loading branch information
Showing
50 changed files
with
327 additions
and
7 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
import os | ||
from astroquery.mast import Catalogs | ||
import lightkurve as lk | ||
import matplotlib.pyplot as plt | ||
import random | ||
|
||
def plot_sectors_with_temperature(tic_id, bin_time_minutes=15): | ||
""" | ||
Create and save light curve plots for all available sectors of a given TIC ID. | ||
Parameters: | ||
- tic_id: The TIC ID for which to generate plots. | ||
- bin_time_minutes: Time interval for binning the light curves, in minutes. | ||
""" | ||
bin_time_days = bin_time_minutes / 24 / 60 # Convert minutes to days | ||
|
||
# Query stellar temperature | ||
star_info = Catalogs.query_object(f"TIC {tic_id}", catalog="TIC") | ||
if len(star_info) == 0: | ||
print(f"Star information not found for TIC {tic_id}.") | ||
return | ||
temperature = star_info[0]['Teff'] if 'Teff' in star_info.columns else 'Unknown' | ||
|
||
# Search for light curves | ||
search_result = lk.search_lightcurve(f"TIC {tic_id}", author="SPOC") | ||
if len(search_result) == 0: | ||
print(f"No light curves found for TIC {tic_id}.") | ||
return | ||
|
||
# Create folder for the TIC ID | ||
output_folder = str(tic_id) | ||
os.makedirs(output_folder, exist_ok=True) | ||
|
||
colors = ['red', 'blue', 'green', 'purple', 'orange', 'pink', 'cyan', 'magenta', 'yellow', 'brown'] | ||
|
||
# Generate and save plots for each sector | ||
image_counter = 1 | ||
for lc_file in search_result: | ||
lc = lc_file.download() | ||
lc = lc.remove_outliers(sigma=5) | ||
lc_binned = lc.bin(bin_time_days) | ||
|
||
color = colors[image_counter % len(colors)] if image_counter <= len(colors) else "#" + ''.join(random.choices('0123456789ABCDEF', k=6)) | ||
|
||
plt.figure(figsize=(10, 5)) | ||
lc_binned.plot(marker='o', linewidth=0, color=color, alpha=0.8, markersize=5, label='Binned') | ||
|
||
plt.title(f"TIC {tic_id} - Sector {lc.sector}\n" | ||
f"Stellar Temperature: {temperature} K | Binning: {bin_time_minutes} min") | ||
plt.xlabel("Time [BTJD days]") | ||
plt.ylabel("Normalized Flux") | ||
plt.legend() | ||
|
||
output_file = os.path.join(output_folder, f"Sector{image_counter}.png") | ||
plt.savefig(output_file) | ||
plt.close() | ||
print(f"Saved plot: {output_file}") | ||
image_counter += 1 | ||
|
||
# List of TIC IDs to process | ||
tic_ids = [ | ||
440801822, 345724317, 329981856, 284300833, 277039287, 269343479, | ||
263723967, 238597883, 210904767, 201175570, 169904935, 156115721, | ||
124709665, 106997505, 88863718, 65212867, 57299130, 50365310, 21720215 | ||
] | ||
|
||
# Generate plots for each TIC ID | ||
for tic_id in tic_ids: | ||
print(f"Processing TIC {tic_id}...") | ||
plot_sectors_with_temperature(tic_id) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
import os | ||
from supabase import create_client, Client | ||
from pathlib import Path | ||
|
||
# Initialize Supabase client | ||
def init_supabase_client(): | ||
url = "http://127.0.0.1:54321" | ||
key = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0" | ||
return create_client(url, key) | ||
|
||
def upload_file_to_supabase(supabase: Client, bucket_name: str, file_path: str, destination_path: str): | ||
with open(file_path, "rb") as file: | ||
try: | ||
response = supabase.storage.from_(bucket_name).upload(destination_path, file) | ||
print(f"Uploaded {file_path} -> {destination_path}") | ||
return True | ||
except Exception as e: | ||
if "Duplicate" in str(e): | ||
print(f"File already exists: {file_path}. Proceeding with database insertion.") | ||
return True | ||
print(f"Failed to upload {file_path} -> {destination_path}: {e}") | ||
return False | ||
|
||
def check_anomaly_exists(supabase: Client, anomaly_id): | ||
try: | ||
response = supabase.table('anomalies').select("*").eq("id", anomaly_id).execute() | ||
return len(response.data) > 0 | ||
except Exception as e: | ||
print(f"Error checking for anomaly {anomaly_id}: {e}") | ||
return False | ||
|
||
def check_anomaly_needs_avatar_update(supabase: Client, anomaly_id): | ||
try: | ||
response = supabase.table('anomalies').select("avatar_url").eq("id", anomaly_id).execute() | ||
if len(response.data) > 0: | ||
return response.data[0]["avatar_url"] is None | ||
return False | ||
except Exception as e: | ||
print(f"Error checking avatar_url for anomaly {anomaly_id}: {e}") | ||
return False | ||
|
||
def insert_or_update_anomalies(supabase: Client, anomaly_id, content, anomaly_set: str, avatar_url: str): | ||
if not check_anomaly_exists(supabase, anomaly_id): | ||
try: | ||
data = { | ||
"id": anomaly_id, | ||
"content": content, | ||
"anomalytype": "planet", | ||
"anomalySet": "telescope-tess", # anomaly_set, | ||
# "parentAnomaly": 50, | ||
"avatar_url": avatar_url | ||
} | ||
response = supabase.table('anomalies').insert(data).execute() | ||
print(f"Inserted anomaly with id {anomaly_id} into 'anomalies' table.") | ||
except Exception as e: | ||
print(f"Failed to insert anomaly {anomaly_id}: {e}") | ||
else: | ||
if check_anomaly_needs_avatar_update(supabase, anomaly_id): | ||
try: | ||
response = supabase.table('anomalies').update({"avatar_url": avatar_url}).eq("id", anomaly_id).execute() | ||
print(f"Updated anomaly {anomaly_id} with new avatar_url.") | ||
except Exception as e: | ||
print(f"Failed to update avatar_url for anomaly {anomaly_id}: {e}") | ||
else: | ||
print(f"Anomaly {anomaly_id} already has an avatar_url. Skipping update.") | ||
|
||
def upload_directory_to_supabase(supabase: Client, bucket_name: str, local_directory: str): | ||
for root, dirs, files in os.walk(local_directory): | ||
for file_name in files: | ||
if file_name.startswith('.'): | ||
continue | ||
|
||
file_path = os.path.join(root, file_name) | ||
relative_path = os.path.relpath(file_path, local_directory) | ||
destination_path = Path(relative_path).as_posix() | ||
|
||
anomaly_set = Path(root).name | ||
|
||
anomaly_id = Path(file_name).stem | ||
try: | ||
anomaly_id = int(anomaly_id) | ||
content = anomaly_id | ||
except ValueError: | ||
anomaly_id = anomaly_set | ||
content = anomaly_set | ||
|
||
# Upload file and if successful, insert or update the anomaly | ||
if upload_file_to_supabase(supabase, bucket_name, file_path, destination_path): | ||
# Create the avatar_url with the relative path in the Supabase bucket | ||
avatar_url = f"{bucket_name}/{destination_path}" | ||
insert_or_update_anomalies(supabase, anomaly_id, content, anomaly_set, avatar_url) | ||
|
||
def main(): | ||
supabase = init_supabase_client() | ||
bucket_name = "anomalies" #telescope/telescope-dailyMinorPlanet" | ||
local_directory = "anomalies" # "telescope/telescope-dailyMinorPlanet" | ||
|
||
upload_directory_to_supabase(supabase, bucket_name, local_directory) | ||
|
||
if __name__ == "__main__": | ||
main() |