Skip to content

Commit

Permalink
Merge pull request #505 from ddps-lab/azure-sps-for-PR
Browse files Browse the repository at this point in the history
AZURE SPS 수집 기능 코드 Main Branch로 Merge
  • Loading branch information
krtaiyang authored Feb 6, 2025
2 parents 289a821 + cf52757 commit 7933dae
Show file tree
Hide file tree
Showing 16 changed files with 1,286 additions and 80 deletions.
35 changes: 23 additions & 12 deletions .github/workflows/azure-lambda-sync.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,14 @@ on:
branches:
- 'main'
paths:
- 'collector/spot-dataset/azure/lambda/current_collector/**'
- 'utility/slack_msg_sender.py'
- 'collector/spot-dataset/azure/lambda/current_collector/compare_data.py'
- 'collector/spot-dataset/azure/lambda/current_collector/lambda_function.py'
- 'collector/spot-dataset/azure/lambda/current_collector/load_if.py'
- 'collector/spot-dataset/azure/lambda/current_collector/load_price.py'
- 'collector/spot-dataset/azure/lambda/current_collector/merge_df.py'
- 'collector/spot-dataset/azure/lambda/current_collector/upload_data.py'
- 'const_config.py'

- 'collector/spot-dataset/azure/lambda/current_collector/utill/**'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.SPOTRANK_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SPOTRANK_SECRET_ACCESS_KEY }}
Expand All @@ -19,14 +23,21 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: checkout source code
uses: actions/checkout@v1
- name: Zip lambda_function code
uses: actions/checkout@v4
- name: Zip Lambda function code
run: |
zip -j ./collector/spot-dataset/azure/lambda/current_collector/azure_lambda.zip ./collector/spot-dataset/azure/lambda/current_collector/* ./const_config.py ./utility/slack_msg_sender.py
cd ./collector/spot-dataset/azure/lambda/current_collector/
zip -r azure_lambda.zip ./utill/*
cd ../../../../../
mv ./collector/spot-dataset/azure/lambda/current_collector/azure_lambda.zip ./
- name: Deploy to lambda
rm -f azure_lambda.zip
zip -j azure_lambda.zip \
collector/spot-dataset/azure/lambda/current_collector/compare_data.py \
collector/spot-dataset/azure/lambda/current_collector/lambda_function.py \
collector/spot-dataset/azure/lambda/current_collector/load_if.py \
collector/spot-dataset/azure/lambda/current_collector/load_price.py \
collector/spot-dataset/azure/lambda/current_collector/merge_df.py \
collector/spot-dataset/azure/lambda/current_collector/upload_data.py \
const_config.py
zip -r azure_lambda.zip collector/spot-dataset/azure/lambda/current_collector/utill
- name: Deploy to AWS Lambda
run: |
aws lambda update-function-code --function-name azure-collector --zip-file fileb://azure_lambda.zip
aws lambda update-function-code --function-name azure-collector --zip-file fileb://azure_lambda.zip
43 changes: 43 additions & 0 deletions .github/workflows/azure-sps-lambda-sync.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: deploy azure sps files to lambda
on:
push:
branches:
- 'main'
paths:
- 'collector/spot-dataset/azure/lambda/current_collector/load_price.py'
- 'collector/spot-dataset/azure/lambda/current_collector/upload_data.py'
- 'collector/spot-dataset/azure/lambda/current_collector/lambda_function_sps.py'
- 'collector/spot-dataset/azure/lambda/current_collector/load_sps.py'
- 'const_config.py'
- 'collector/spot-dataset/azure/lambda/current_collector/utill/**'
- 'collector/spot-dataset/azure/lambda/current_collector/sps_module/**'

env:
AWS_ACCESS_KEY_ID: ${{ secrets.SPOTRANK_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SPOTRANK_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: 'us-west-2'

jobs:
deploy_source:
name: deploy lambda from source
runs-on: ubuntu-latest
steps:
- name: checkout source code
uses: actions/checkout@v4
- name: Zip Lambda function code
run: |
rm -f azure_sps_lambda.zip
zip -j azure_sps_lambda.zip \
collector/spot-dataset/azure/lambda/current_collector/load_price.py \
collector/spot-dataset/azure/lambda/current_collector/upload_data.py \
collector/spot-dataset/azure/lambda/current_collector/lambda_function_sps.py \
collector/spot-dataset/azure/lambda/current_collector/load_sps.py \
const_config.py
zip -r azure_sps_lambda.zip \
collector/spot-dataset/azure/lambda/current_collector/utill \
collector/spot-dataset/azure/lambda/current_collector/sps_module
- name: Deploy to AWS Lambda
run: |
aws lambda update-function-code --function-name azure-sps-collector --zip-file fileb://azure_sps_lambda.zip
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import os
import json
import boto3
import slack_msg_sender
import pandas as pd
from const_config import AzureCollector, Storage
from datetime import datetime, timezone
Expand All @@ -10,6 +9,7 @@
from load_price import collect_price_with_multithreading
from upload_data import upload_timestream, update_latest, save_raw, query_selector, upload_cloudwatch
from compare_data import compare
from utill import pub_service

STORAGE_CONST = Storage()
AZURE_CONST = AzureCollector()
Expand All @@ -32,15 +32,15 @@ def azure_collector(timestamp):
except Exception as e:
result_msg = """AZURE PRICE MODULE EXCEPTION!\n %s""" % (e)
data = {'text': result_msg}
slack_msg_sender.send_slack_message(result_msg)
pub_service.send_slack_message(result_msg)
is_price_fetch_success = False

try:
eviction_df = load_if()
except Exception as e:
result_msg = """AZURE IF MODULE EXCEPTION!\n %s""" % (e)
data = {'text': result_msg}
slack_msg_sender.send_slack_message(result_msg)
pub_service.send_slack_message(result_msg)
is_if_fetch_success = False

if is_price_fetch_success and is_if_fetch_success:
Expand All @@ -54,7 +54,7 @@ def azure_collector(timestamp):
else:
result_msg = """AZURE PRICE MODULE AND IF MODULE EXCEPTION!"""
data = {'text': result_msg}
slack_msg_sender.send_slack_message(result_msg)
pub_service.send_slack_message(result_msg)
return

try:
Expand All @@ -81,7 +81,7 @@ def azure_collector(timestamp):
except Exception as e:
result_msg = """AZURE UPLOAD MODULE EXCEPTION!\n %s""" % (e)
data = {'text': result_msg}
slack_msg_sender.send_slack_message(result_msg)
pub_service.send_slack_message(result_msg)
if_exception_flag = False

def lambda_handler(event, context):
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import load_sps
from sps_module import sps_shared_resources
from datetime import datetime
from upload_data import update_latest_sps, save_raw_sps
from utill import pub_service

def lambda_handler(event, _):
try:
# EventBridge 규칙에서 전달된 action 매개변수 가져오기
action = event.get("action", "default") # EventBridge에서 전달된 UTC 시간 문자열
event_time_utc = event.get("time", "default")
event_time_utc = datetime.strptime(event_time_utc, "%Y-%m-%dT%H:%M:%SZ")
desired_count = sps_shared_resources.time_desired_count_map.get(event_time_utc.strftime("%H:%M"), 1)

print(f"Lambda triggered: action={action}, event_time_utc={event_time_utc}, desired_count={desired_count}")

# Event Bridge 에서 0:00의 호출은 First_Time으로 오고, 기타는 Every_10Min로 옵니다.
if action == "First_Time":
print(f"Executing: collect_spot_placement_score_first_time (desired_count={desired_count})")
sps_res_df = load_sps.collect_spot_placement_score_first_time(desired_count=desired_count)

elif action == "Every_10Min":
if event_time_utc.hour == 15 and event_time_utc.minute == 0:
return handle_response(200, f"Action '{action}' executed successfully. The scheduled time (UTC 15:00, KST 00:00) has been skipped.")

print(f"Executing: collect_spot_placement_score (desired_count={desired_count})")
sps_res_df = load_sps.collect_spot_placement_score(desired_count=desired_count)

else:
return handle_response(400, f"Invalid action: '{action}'. Time: {event_time_utc}")

assert sps_res_df is not None and handle_res_df(sps_res_df, event_time_utc)
return handle_response(200, f"Action '{action}' executed successfully")

except Exception as e:
error_msg = f"AZURE SPS MODULE EXCEPTION!\n Error: {e}"
pub_service.send_slack_message(error_msg)
return handle_response(400, f"Action '{action}' executed failed. Time: {event_time_utc}.", error_msg)


def handle_res_df(sps_res_df, event_time_utc):
update_result = update_latest_sps(sps_res_df, event_time_utc)
save_result = save_raw_sps(sps_res_df, event_time_utc)
return update_result and save_result

def handle_response(status_code, body, error_message=None):
response = {"statusCode": status_code, "body": body}
if error_message:
response["error_message"] = error_message
print(f"Response: {response}")
return response
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import json
import boto3
import requests
import pandas as pd
import slack_msg_sender
from utill import pub_service
from utill.azure_auth import get_token


Expand Down Expand Up @@ -72,4 +70,4 @@ def load_if():
except Exception as e:
result_msg = """AZURE Exception when load_if\n %s""" % (e)
data = {'text': result_msg}
slack_msg_sender.send_slack_message(result_msg)
pub_service.send_slack_message(result_msg)
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import threading
from const_config import AzureCollector
from concurrent.futures import ThreadPoolExecutor
import slack_msg_sender
from utill import pub_service

AZURE_CONST = AzureCollector()

Expand Down Expand Up @@ -133,10 +133,10 @@ def collect_price_with_multithreading():

if response_dict:
for i in response_dict:
slack_msg_sender.send_slack_message(f"{i} respones occurred {response_dict[i]} times")
pub_service.send_slack_message(f"{i} respones occurred {response_dict[i]} times")

price_df = pd.DataFrame(price_list)
savings_df = preprocessing_price(price_df)
savings_df = savings_df.drop_duplicates(subset=['InstanceTier', 'InstanceType', 'Region'], keep='first')

return savings_df
return savings_df
Loading

0 comments on commit 7933dae

Please sign in to comment.