modrec-workflow/.riahub/workflows/workflow.yaml
Liyu Xiao 6b2ea23be4
Some checks failed
RIA Hub Workflow Demo / ria-demo (push) Failing after 53s
fixed installation packages
2025-06-18 10:11:56 -04:00

133 lines
3.3 KiB
YAML

name: RIA Hub Workflow Demo
on:
push:
branches:
[main]
pull_request:
branches:
[main]
jobs:
ria-demo:
runs-on: ubuntu-latest-2080
env:
RIAGIT_USERNAME: ${{ secrets.USERNAME }}
RIAGIT_TOKEN: ${{ secrets.TOKEN }}
steps:
- name: Print GPU information
run: |
if command -v nvidia-smi &> /dev/null; then
echo "✅ NVIDIA GPU is available"
nvidia-smi
else
echo "⚠️ No NVIDIA GPU found"
fi
- name: Checkout code
uses: actions/checkout@v4
with:
lfs: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: 1. Generate Recordings
run: |
mkdir -p data/recordings
PYTHONPATH=. python scripts/dataset_building/data_gen.py --output-dir data/recordings
echo "recordings produced successfully"
- name: 📦 Zip and Upload Recordings
run: |
echo "📦 Zipping recordings..."
zip -qr recordings.zip data/recordings
shell: bash
- name: ⬆️ Upload zipped recordings
uses: actions/upload-artifact@v3
with:
name: recordings
path: recordings.zip
- name: 2. Build HDF5 Dataset
run: |
mkdir -p data/dataset
PYTHONPATH=. python scripts/dataset_building/produce_dataset.py
echo "datasets produced successfully"
shell: bash
- name: 📦 Zip Dataset
run: zip -qr dataset.zip data/dataset
- name: 📤 Upload Zipped Dataset
uses: actions/upload-artifact@v3
with:
name: dataset
path: dataset.zip
- name: 3. Train Model
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
mkdir -p checkpoint_files
PYTHONPATH=. python scripts/training/train.py
echo "training model"
- name: Upload Checkpoints
uses: actions/upload-artifact@v3
with:
name: checkpoints
path: checkpoint_files/*
- name: 4. Convert to ONNX file
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
mkdir -p onnx_files
MKL_DISABLE_FAST_MM=1 PYTHONPATH=. python scripts/onnx/convert_to_onnx.py
echo "building inference app"
- name: Upload ONNX file
uses: actions/upload-artifact@v3
with:
name: onnx-file
path: onnx_files/inference_recognition_model.onnx
- name: List checkpoint directory
run: ls -lh onnx_files
- name: 5. Profile ONNX model
run: |
PYTHONPATH=. python scrips/onnx/profile_onnx.py
- name: Upload JSON profiling data
uses: actions/upload-artifact@v3
with:
name: profile-data
path: '**/onnxruntime_profile_*.json'
- name: 6. Convert to ORT file
run: |
python -m scripts/ort/convert_to_ort.py
- name: Upload ORT file
uses: actions/upload-artifact@v3
with:
name: ort-file
path: ort_files/inference_recognition_model.ort