Skip to content

Commit

Permalink
🔨🎧 ↝ Merge pull request #49 from Signal-K/GP-8-Supabase-Unity-pass-in…
Browse files Browse the repository at this point in the history
…-session-data-inventoryPLANETS-data-to-Unity

🏄🏼‍♀️🔨 ↝ New anomalies & content
  • Loading branch information
Gizmotronn authored Sep 17, 2024
2 parents e150144 + 5871971 commit d2c215e
Show file tree
Hide file tree
Showing 304 changed files with 12,037 additions and 18,655 deletions.
Binary file modified .DS_Store
Binary file not shown.
11 changes: 0 additions & 11 deletions .gitpod.yml

This file was deleted.

1 change: 1 addition & 0 deletions Automating sectors.ipynb

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion Exoplanet Finder.ipynb

This file was deleted.

10 changes: 0 additions & 10 deletions Pipfile

This file was deleted.

11 changes: 0 additions & 11 deletions Pipfile.extra

This file was deleted.

1,490 changes: 0 additions & 1,490 deletions Pipfile.lock

This file was deleted.

220 changes: 155 additions & 65 deletions app.py
Original file line number Diff line number Diff line change
@@ -1,71 +1,161 @@
from flask import Flask, jsonify, request, g
from flask import Flask, jsonify, request
from supabase import create_client, Client
import matplotlib.pyplot as plt
from io import BytesIO
import pandas as pd
import requests
import os

app = Flask(__name__)

# Store authentication details in a global variable 'auth'
auth = None
# Supabase credentials
SUPABASE_URL = 'https://hlufptwhzkpkkjztimzo.supabase.co'
SUPABASE_KEY = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImhsdWZwdHdoemtwa2tqenRpbXpvIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MTYyOTk3NTUsImV4cCI6MjAzMTg3NTc1NX0.v_NDVWjIU_lJQSPbJ_Y6GkW3axrQWKXfXVsBEAbFv_I'
SUPABASE_STORAGE_URL = 'https://hlufptwhzkpkkjztimzo.supabase.co/storage/v1'

@app.route("/")
supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)


@app.route('/')
def index():
return "Hello world"

@app.route('/login', methods=['POST'])
def login():
global auth # Use the global 'auth' variable to store authentication details

# Parse the JSON request data
data = request.get_json()
username = data.get('username')
password = data.get('password')

# Check if username and password are provided
if not username or not password:
return jsonify({'error': 'Username and password are required'}), 400

# Send a POST request to the NASA Earthdata login API
response = requests.post(
'https://appeears.earthdatacloud.nasa.gov/api/login',
auth=(username, password)
)

# Check if the request was successful
if response.status_code == 200:
token_response = response.json()
# Store authentication details in the 'auth' variable
auth = (username, password)
return jsonify(token_response)
else:
return jsonify({'error': 'Login failed'}), 401

@app.route('/get_geolocation', methods=['POST'])
def get_geolocation():
global auth # Use the global 'auth' variable for authentication

# Check if authentication details are available
if not auth:
return jsonify({'error': 'Authentication required'}), 401

# Parse the JSON request data
data = request.get_json()
address = data.get('address')

# Check if address is provided
if not address:
return jsonify({'error': 'Address is required'}), 400

# Send a POST request to fetch geolocation using stored authentication
response = requests.get(
f'https://geocode.maps.co/search?q={address}',
auth=auth
)

# Check if the request was successful
if response.status_code == 200:
geoloc_data = response.json()
return jsonify(geoloc_data)
else:
return jsonify({'error': 'Geolocation fetch failed'}), 500

if __name__ == "__main__":
app.run()
try:
# Fetch anomalies of type "planet"
response = supabase.from_("anomalies").select("*").eq("anomalytype", "planet").execute()
if response['status'] == 200:
anomalies = response['data']
for anomaly in anomalies:
anomaly_id = anomaly['id']
tic_id = anomaly['configuration']['ticId']
create_lightkurve_graph(anomaly_id, tic_id)
return 'Lightkurve graphs created and uploaded successfully.'
else:
return 'Failed to fetch anomalies from Supabase.'
except Exception as e:
return f'Error: {str(e)}'

@app.route('/upload-lightcurve', methods=['POST'])
def upload_lightcurve():
try:
data = request.get_json()
tic_id = data.get('ticId')
anomaly_id = data.get('anomalyId')

if not tic_id or not anomaly_id:
return jsonify({'message': 'TIC Id and anomaly Id are required'}), 400

# Generate lightcurve graph and upload to Supabase
success = generate_and_upload_lightcurve(tic_id, anomaly_id)

if success:
return jsonify({'message': f'Lightcurve uploaded successfully for Anomaly ID: {anomaly_id}'}), 200
else:
return jsonify({'message': 'Failed to generate or upload lightcurve image'}), 500
except Exception as e:
return jsonify({'message': str(e)}), 500

def generate_and_upload_lightcurve(tic_id, anomaly_id):
try:
# Generate lightcurve graph
img_bytes = generate_lightcurve_graph(tic_id)
if img_bytes:
# Upload image to Supabase storage
upload_image_to_supabase(tic_id, anomaly_id, img_bytes)
return True
else:
return False
except Exception as e:
print(f'Error: {str(e)}')
return False

def generate_lightcurve_graph(tic_id):
try:
# Retrieve lightcurve data using lightkurve
lc = lk.search_lightcurvefile(tic_id).download().PDCSAP_FLUX
# Fold the lightcurve
folded_lc = lc.fold(period=lc.period)

# Plot the folded lightcurve
plt.figure(figsize=(10, 6))
folded_lc.scatter(color='lightblue', alpha=0.6)
plt.title(f'Folded Lightcurve for TIC ID: {tic_id}')
plt.xlabel('Phase')
plt.ylabel('Flux')
plt.grid(True)

# Save plot to BytesIO object
img_bytes = BytesIO()
plt.savefig(img_bytes, format='png')
img_bytes.seek(0)
plt.close()

return img_bytes
except Exception as e:
print(f'Error generating lightcurve graph: {str(e)}')
return None

def upload_image_to_supabase(tic_id, anomaly_id, img_bytes):
try:
# Upload image to Supabase storage under "anomalies" folder with anomaly_id
upload_url = f'{SUPABASE_STORAGE_URL}/object/public/anomalies/{anomaly_id}/{tic_id}_phase.png'
headers = {
'apikey': SUPABASE_KEY,
'Content-Type': 'image/png'
}
response = requests.post(upload_url, headers=headers, data=img_bytes)
if response.status_code == 201:
print(f'Image uploaded successfully for TIC ID: {tic_id} under Anomaly ID: {anomaly_id}')
return True
else:
print(f'Failed to upload image for TIC ID: {tic_id} under Anomaly ID: {anomaly_id}')
return False
except Exception as e:
print(f'Error uploading image to Supabase: {str(e)}')
return False


def create_lightkurve_graph(anomaly_id, tic_id):
try:
# Example URL for lightkurve data (replace with your data retrieval method)
lightkurve_url = f"https://example.com/lightkurve/{tic_id}"
response = requests.get(lightkurve_url)
if response.status_code == 200:
data = pd.read_csv(BytesIO(response.content))
# Example phase-folded plot (replace with your actual plot generation logic)
plt.figure(figsize=(8, 6))
plt.scatter(data['phase'], data['flux'], color='lightblue', alpha=0.6)
plt.xlabel('Phase')
plt.ylabel('Flux')
plt.title(f'Phase-folded lightkurve for TIC ID: {tic_id}')
plt.grid(True)
# Save plot to a BytesIO object
img_bytes = BytesIO()
plt.savefig(img_bytes, format='png')
img_bytes.seek(0)
# Upload image to Supabase storage
upload_image_to_supabase(anomaly_id, img_bytes)
plt.close()
else:
print(f'Failed to fetch lightkurve data for TIC ID: {tic_id}')
except Exception as e:
print(f'Error creating lightkurve graph: {str(e)}')


def upload_image_to_supabase(anomaly_id, img_bytes):
try:
# Upload image to Supabase storage under "anomalies" folder with anomaly_id
upload_url = f'{SUPABASE_STORAGE_URL}/object/public/anomalies/{anomaly_id}/phase.png'
headers = {
'apikey': SUPABASE_KEY,
'Content-Type': 'image/png'
}
response = requests.post(upload_url, headers=headers, data=img_bytes)
if response.status_code == 201:
print(f'Image uploaded successfully for anomaly ID: {anomaly_id}')
else:
print(f'Failed to upload image for anomaly ID: {anomaly_id}')
except Exception as e:
print(f'Error uploading image to Supabase: {str(e)}')


if __name__ == '__main__':
app.run(debug=True)
37 changes: 0 additions & 37 deletions auth/auth.py

This file was deleted.

Binary file added buckets/anomalies/1/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/2/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/201175570/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/210904767/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/21720215/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/263723967/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/269343479/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/277039287/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/284300833/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/3/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/329981856/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/345724317/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/4/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/5/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/57299130/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/anomalies/6/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/1/1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/1/2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/1/3.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/1/4.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/2/1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/2/2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/2/3.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/2/4.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/2/Binned.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added buckets/avatars/3/1.png
Binary file added buckets/avatars/3/2.png
Binary file added buckets/avatars/3/3.png
Binary file added buckets/avatars/3/4.png
Binary file added buckets/avatars/4/1.png
Binary file added buckets/avatars/4/2.png
Binary file added buckets/avatars/4/3.png
Binary file added buckets/avatars/4/4.png
Binary file added buckets/avatars/4/Kepler-22b (4).png
Binary file added buckets/avatars/5/1.png
Binary file added buckets/avatars/5/2.png
Binary file added buckets/avatars/5/3.png
Binary file added buckets/avatars/5/4.png
Binary file added buckets/avatars/6/1.png
Binary file added buckets/avatars/6/2.png
Binary file added buckets/avatars/6/3.png
Binary file added buckets/avatars/6/4.png
Binary file added buckets/clouds/84238508.png
Binary file added buckets/clouds/84238508_02.png
Binary file added buckets/clouds/84238508_03.png
Binary file added buckets/clouds/84238508_04.png
Binary file added buckets/clouds/94904822_01.png
Binary file added buckets/clouds/94904822_02.png
Binary file added buckets/clouds/94904822_03.png
Binary file added buckets/clouds/94904822_04.png
46 changes: 46 additions & 0 deletions buckets/upload.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import os
from supabase import create_client, Client
from pathlib import Path

# Initialize Supabase client
def init_supabase_client():
url = "http://127.0.0.1:54321" # Replace with your Supabase URL
key = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0" # Replace with your key
return create_client(url, key)

# Upload file to Supabase bucket
def upload_file_to_supabase(supabase: Client, bucket_name: str, file_path: str, destination_path: str):
with open(file_path, "rb") as file:
try:
response = supabase.storage.from_(bucket_name).upload(destination_path, file)
print(f"Uploaded {file_path} -> {destination_path}")
except Exception as e:
print(f"Failed to upload {file_path} -> {destination_path}: {e}")

# Recursive function to upload all files in directory
def upload_directory_to_supabase(supabase: Client, bucket_name: str, local_directory: str):
for root, dirs, files in os.walk(local_directory):
for file_name in files:
if file_name.startswith('.'):
# Skip hidden files (e.g., .DS_Store)
continue

file_path = os.path.join(root, file_name)

# Create relative path for Supabase storage to mirror local structure
relative_path = os.path.relpath(file_path, local_directory)
destination_path = Path(relative_path).as_posix() # Ensure path uses forward slashes

# Upload file
upload_file_to_supabase(supabase, bucket_name, file_path, destination_path)

# Main function
def main():
supabase = init_supabase_client()
bucket_name = "clouds" # Use your Supabase bucket name
local_directory = "clouds" # Folder you want to upload (in this case, anomalies)

upload_directory_to_supabase(supabase, bucket_name, local_directory)

if __name__ == "__main__":
main()
Binary file added buckets/zoodex/burrowingOwls/4567867.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575107575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575117575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575127575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575137575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575147575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575157575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575167575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757517575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575177575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575187575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575197575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575207575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575217575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575227575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575237575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575247575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575257575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575267575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757527575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575277575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575287575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575297575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575307575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575317575.jpeg
Binary file added buckets/zoodex/burrowingOwls/7575327575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757537575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757547575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757557575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757567575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757577575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757587575.jpeg
Binary file added buckets/zoodex/burrowingOwls/757597575.jpeg
29 changes: 29 additions & 0 deletions compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
services:
flaskapp:
container_name: flaskapp
image: flaskapp:1.0.0
build:
context: ./galaxy
dockerfile: flask.dockerfile
ports:
- 4000:4000
environment:
- SQLALCHEMY_DATABASE_URI=postgresql://postgres:postgres@db:5432/postgres
- DATABASE_URL=postgresql://postgres:postgres@db:5432/postgres
depends_on:
- db

db:
container_name: starsailors_db
image: postgres:13
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
volumes:
- pgdata:/var/ib/postgresql/data

volumes:
pgdata: {}
Loading

0 comments on commit d2c215e

Please sign in to comment.