-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #512 from ddps-lab/azure-sps-for-PR
SPS 수집 기능 모두 포함한 PR - to main branch
- Loading branch information
Showing
21 changed files
with
1,408 additions
and
117 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
name: deploy azure sps files to lambda | ||
on: | ||
push: | ||
branches: | ||
- 'main' | ||
paths: | ||
- 'collector/spot-dataset/azure/lambda/current_collector/lambda_function_sps.py' | ||
- 'collector/spot-dataset/azure/lambda/current_collector/load_price.py' | ||
- 'collector/spot-dataset/azure/lambda/current_collector/load_sps.py' | ||
- 'const_config.py' | ||
- 'collector/spot-dataset/azure/lambda/current_collector/utils/**' | ||
- 'collector/spot-dataset/azure/lambda/current_collector/sps_module/**' | ||
env: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.SPOTRANK_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.SPOTRANK_SECRET_ACCESS_KEY }} | ||
AWS_DEFAULT_REGION: 'us-west-2' | ||
|
||
jobs: | ||
deploy_source: | ||
name: deploy lambda from source | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: checkout source code | ||
uses: actions/checkout@v4 | ||
- name: Zip Lambda function code | ||
run: | | ||
rm -f azure_sps_lambda.zip | ||
rm -f ./collector/spot-dataset/azure/lambda/current_collector/azure_sps_lambda.zip | ||
zip -j ./collector/spot-dataset/azure/lambda/current_collector/azure_sps_lambda.zip \ | ||
./collector/spot-dataset/azure/lambda/current_collector/lambda_function_sps.py \ | ||
./collector/spot-dataset/azure/lambda/current_collector/load_price.py \ | ||
./collector/spot-dataset/azure/lambda/current_collector/load_sps.py \ | ||
./const_config.py | ||
cd ./collector/spot-dataset/azure/lambda/current_collector/ | ||
zip -r azure_sps_lambda.zip ./utils/* | ||
zip -r azure_sps_lambda.zip ./sps_module/* | ||
cd ../../../../../ | ||
mv ./collector/spot-dataset/azure/lambda/current_collector/azure_sps_lambda.zip ./ | ||
- name: Deploy to AWS Lambda | ||
run: | | ||
aws lambda update-function-code --function-name azure-sps-collector --zip-file fileb://azure_sps_lambda.zip |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
83 changes: 83 additions & 0 deletions
83
collector/spot-dataset/azure/lambda/current_collector/lambda_function_sps.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
import load_sps | ||
import pandas as pd | ||
from datetime import datetime | ||
from sps_module import sps_shared_resources | ||
from utils.merge_df import merge_price_eviction_sps_df | ||
from utils.upload_data import update_latest_sps, save_raw_sps | ||
from utils.pub_service import send_slack_message, logger, S3, AZURE_CONST | ||
|
||
FIRST_TIME_ACTION = "First_Time" # 첫 실행 액션 | ||
EVERY_10MIN_ACTION = "Every_10Min" # 10분마다 실행 액션 | ||
UTC_1500_TIME = "15:00" # UTC 15:00 (KST 00:00) | ||
|
||
def lambda_handler(event, _): | ||
action = event.get("action") | ||
event_time_utc = event.get("time") | ||
try: | ||
if not action or not event_time_utc: | ||
raise ValueError("Invalid event info: action or time is missing") | ||
|
||
event_time_utc = datetime.strptime(event_time_utc, "%Y-%m-%dT%H:%M:%SZ") | ||
desired_count = sps_shared_resources.time_desired_count_map.get(event_time_utc.strftime("%H:%M"), 1) | ||
|
||
logger.info(f"Lambda triggered: action: {action}, event_time: {event_time_utc}, desired_count: {desired_count}") | ||
|
||
if action == FIRST_TIME_ACTION: | ||
sps_res_df = load_sps.collect_spot_placement_score_first_time(desired_count=desired_count) | ||
|
||
elif action == EVERY_10MIN_ACTION: | ||
# UTC 15:00 (KST 00:00)인 경우 실행 건너뛰기 | ||
if event_time_utc.strftime("%H:%M") == UTC_1500_TIME: | ||
logger.info("Skipping scheduled time (UTC 15:00, KST 00:00)") | ||
return handle_response(200, "Executed successfully. Scheduled time skipped.", action, event_time_utc) | ||
|
||
sps_res_df = load_sps.collect_spot_placement_score(desired_count=desired_count) | ||
|
||
else: | ||
raise ValueError(f"Invalid lambda action.") | ||
|
||
|
||
if sps_res_df is None: raise ValueError("sps_res_df is None") | ||
|
||
if not handle_res_df(sps_res_df, event_time_utc): | ||
raise RuntimeError("Failed to handle_res_df") | ||
|
||
return handle_response(200, "Executed Successfully!", action, event_time_utc) | ||
|
||
except Exception as e: | ||
error_msg = f"Unexpected error: {e}" | ||
logger.error(error_msg) | ||
send_slack_message(f"AZURE SPS MODULE EXCEPTION!\n{error_msg}") | ||
return handle_response(500, "Execute Failed!", action, event_time_utc, str(e)) | ||
|
||
|
||
def handle_res_df(sps_res_df, event_time_utc): | ||
try: | ||
sps_res_df['time'] = event_time_utc.strftime("%Y-%m-%d %H:%M:%S") | ||
sps_res_df['AvailabilityZone'] = sps_res_df['AvailabilityZone'].where(pd.notna(sps_res_df['AvailabilityZone']), None) | ||
|
||
# price_if_df = S3.read_file(AZURE_CONST.S3_LATEST_PRICE_IF_GZIP_SAVE_PATH, 'pkl.gz') | ||
# if price_if_df is None: raise ValueError("price_if_df is None") | ||
price_if_df = pd.DataFrame(S3.read_file(AZURE_CONST.S3_LATEST_DATA_SAVE_PATH, 'json')) | ||
price_eviction_sps_df = merge_price_eviction_sps_df(price_if_df, sps_res_df) | ||
|
||
if update_latest_sps(price_eviction_sps_df) and save_raw_sps(price_eviction_sps_df, event_time_utc): | ||
logger.info(f"Successfully merge the price/if/sps df, and update_latest_result, save_raw!") | ||
return True | ||
|
||
except Exception as e: | ||
logger.error(f"Error in handle_res_df function: {e}") | ||
return False | ||
|
||
def handle_response(status_code, body, action, time, error_message=None): | ||
response = { | ||
"statusCode": status_code, | ||
"body": body, | ||
"action": action, | ||
"time": str(time) | ||
} | ||
if error_message: | ||
response["error_message"] = error_message | ||
|
||
logger.info(f"Response: {response}") | ||
return response |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.