Skip to content
Open

Gopi #34

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified .DS_Store
Binary file not shown.
3 changes: 3 additions & 0 deletions .dvc/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
/cache
/config.local
/tmp
7 changes: 7 additions & 0 deletions .dvc/config
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
[core]
remote = myremote
autostage = true
['remote "myremote"']
url = gs://stock_price_prediction_dataset/DVC
['remote "gcs_remote"']
url = gs://stock_price_prediction_dataset/Data
4 changes: 4 additions & 0 deletions .dvcignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Add patterns of files dvc should ignore, which could improve
# the performance. Learn more at
# https://dvc.org/doc/user-guide/dvcignore

78 changes: 78 additions & 0 deletions .github/workflows/cloudbuild.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
steps:
# Step 1: Clone the repository
- name: 'gcr.io/cloud-builders/git'
entrypoint: 'bash'
args:
- '-c'
- |
git clone https://github.com/IE7374-MachineLearningOperations/StockPricePrediction.git &&
cd StockPricePrediction &&
echo "Repository cloned successfully"

# Step 2: Upload specific files to GCP Bucket
- name: 'gcr.io/cloud-builders/gsutil'
args:
- '-m'
- 'cp'
- '-r'
- 'StockPricePrediction/*.py'
- 'StockPricePrediction/*.ipynb'
- 'StockPricePrediction/*.pkl'
- 'gs://stock_price_prediction_dataset/'

# Step 3: Install dependencies
- name: 'gcr.io/cloud-builders/pip'
args:
- 'install'
- '-r'
- 'StockPricePrediction/requirements.txt'

# # Step 4: Train the model
# - name: 'gcr.io/cloud-builders/python'
# args:
# - 'StockPricePrediction/train.py'

# # Step 5: Validate the model
# - name: 'gcr.io/cloud-builders/python'
# args:
# - 'StockPricePrediction/validate.py'

# # Step 6: Conditional deployment if validation is successful
# - name: 'gcr.io/cloud-builders/bash'
# id: 'Check Validation'
# args:
# - '-c'
# - |
# ACCURACY=$(python StockPricePrediction/validate.py --get_accuracy) &&
# if (( $(echo "$ACCURACY > 0.70" | bc -l) )); then
# echo "Model accuracy is sufficient, proceeding with deployment";
# else
# echo "Model accuracy is insufficient, stopping deployment";
# exit 1;
# fi

# # Step 7: Save the trained model to GCP Bucket
# - name: 'gcr.io/cloud-builders/gsutil'
# args:
# - 'cp'
# - 'StockPricePrediction/models/*.h5'
# - 'gs://stock_price_prediction_dataset/trained_models/'

# # Step 8: Run Unit Tests
# - name: 'gcr.io/cloud-builders/python'
# args:
# - '-m'
# - 'unittest'
# - 'discover'
# - '-s'
# - 'StockPricePrediction/tests'

# artifacts:
# objects:
# location: 'gs://stock_price_prediction_dataset/artifacts/'
# paths:
# - 'StockPricePrediction/*.py'
# - 'StockPricePrediction/*.ipynb'
# - 'StockPricePrediction/*.h5'

# timeout: '1200s'
84 changes: 84 additions & 0 deletions .github/workflows/model.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
name: Model Training

on:
push:
branches:
- main

jobs:
train_model:
runs-on: ubuntu-latest

steps:
# Step 1: Checkout repository
- name: Checkout repository
uses: actions/checkout@v2

# Step 2: Set up Python
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.10.5'

# Step 3: Install all dependencies
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt

# Step 4: Run Py Tests
- name: Run Tests
run: |
pytest StockPricePrediction/pipeline/airflow/tests --maxfail=1 --disable-warnings

# Step 5: Authenticate with GCP (Main Branch Only)
- name: Authenticate to GCP
if: ${{ github.ref == 'refs/heads/main' }}
env:
GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }}
GCP_SERVICE_ACCOUNT_KEY: ${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}
run: |
echo "${GCP_SERVICE_ACCOUNT_KEY}" | base64 --decode > ${HOME}/gcp-key.json
gcloud auth activate-service-account --key-file=${HOME}/gcp-key.json
gcloud config set project ${GCP_PROJECT_ID}

# # Step 6: Conditional Model Training
# - name: Trigger Model Training
# env:
# GCP_BUCKET_NAME: ${{ secrets.GCP_BUCKET_NAME }}
# run: |
# if [ "${{ github.ref }}" == "refs/heads/main" ]; then
# # Production Training on Google Cloud AI Platform
# gcloud ai-platform jobs submit training model_training_$(date +%Y%m%d_%H%M%S) \
# --region us-central1 \
# --module-name trainer.task \
# --package-path ./trainer \
# --python-version 3.10 \
# --runtime-version 2.5 \
# --job-dir gs://${GCP_BUCKET_NAME}/models/training_$(date +%Y%m%d_%H%M%S) \
# -- \
# --additional_training_args
# else
# # Test Training Locally for the test branch
# echo "Running model training locally for testing"
# python trainer/task.py --test_data ./data/test_data.csv
# fi

# # Step 7: Save Training Logs as Artifacts
# - name: Upload Training Logs
# if: always()
# uses: actions/upload-artifact@v3
# with:
# name: training-logs
# path: logs/

# # Step 8: Send notification on failure
# - name: Notify on failure
# if: failure()
# uses: actions/github-script@v6
# with:
# script: |
# github.issues.createComment({
# issue_number: context.issue.number,
# body: 'Model training failed on `${{ github.ref }}` branch. Please check logs for details.'
# })
Loading