Documentation and formatting updates #1

Merged
Liyux merged 12 commits from michael-review into main 2025-07-08 10:50:41 -04:00
Showing only changes of commit 1267833806 - Show all commits

View File

@ -1,4 +1,4 @@
name: RIA Hub Workflow Demo
name: Modulation Recognition Demo
on:
push:
@ -11,9 +11,6 @@ on:
jobs:
ria-demo:
runs-on: ubuntu-latest-2080
env:
RIAGIT_USERNAME: ${{ secrets.USERNAME }}
RIAGIT_TOKEN: ${{ secrets.TOKEN }}
steps:
- name: Print GPU information
run: |
@ -24,7 +21,7 @@ jobs:
echo "⚠️ No NVIDIA GPU found"
fi
- name: Checkout code
- name: Checkout project code
uses: actions/checkout@v4
with:
lfs: true
@ -42,13 +39,10 @@ jobs:
utils \
-r requirements.txt
- name: 1. Generate Recordings
run: |
mkdir -p data/recordings
PYTHONPATH=. python scripts/dataset_building/data_gen.py --output-dir data/recordings
echo "recordings produced successfully"
PYTHONPATH=. python scripts/dataset_manager/data_gen.py --output-dir data/recordings
- name: ⬆️ Upload recordings
uses: actions/upload-artifact@v3
@ -59,11 +53,10 @@ jobs:
- name: 2. Build HDF5 Dataset
run: |
mkdir -p data/dataset
PYTHONPATH=. python scripts/dataset_building/produce_dataset.py
echo "datasets produced successfully"
PYTHONPATH=. python scripts/dataset_manager/produce_dataset.py
shell: bash
- name: 📤 Upload Dataset
- name: ⬆️ Upload Dataset
uses: actions/upload-artifact@v3
with:
name: dataset
@ -75,34 +68,30 @@ jobs:
PYTORCH_NO_NNPACK: 1
run: |
mkdir -p checkpoint_files
PYTHONPATH=. python scripts/training/train.py 2>/dev/null
echo "training model"
PYTHONPATH=. python scripts/model_builder/train.py 2>/dev/null
- name: 4. Plot Model
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
PYTHONPATH=. python scripts/training/plot_data.py 2>/dev/null
PYTHONPATH=. python scripts/model_builder/plot_data.py 2>/dev/null
- name: Upload Checkpoints
- name: ⬆️ Upload Checkpoints
uses: actions/upload-artifact@v3
with:
name: checkpoints
path: checkpoint_files/*
- name: 5. Convert to ONNX file
- name: 5. Export model to ONNX graph
env:
NO_NNPACK: 1
PYTORCH_NO_NNPACK: 1
run: |
mkdir -p onnx_files
MKL_DISABLE_FAST_MM=1 PYTHONPATH=. python scripts/onnx/convert_to_onnx.py 2>/dev/null
echo "building inference app"
MKL_DISABLE_FAST_MM=1 PYTHONPATH=. python scripts/application_packager/convert_to_onnx.py 2>/dev/null
- name: Upload ONNX file
- name: ⬆️ Upload ONNX file
uses: actions/upload-artifact@v3
with:
name: onnx-file
@ -110,21 +99,20 @@ jobs:
- name: 6. Profile ONNX model
run: |
PYTHONPATH=. python scripts/onnx/profile_onnx.py
PYTHONPATH=. python scripts/application_packager/profile_onnx.py
- name: Upload JSON profiling data
- name: ⬆️ Upload JSON trace
uses: actions/upload-artifact@v3
with:
name: profile-data
path: '**/onnxruntime_profile_*.json'
- name: 7. Convert to ORT file
- name: 7. Convert ONNX graph to an ORT file
run: |
PYTHONPATH=. python scripts/ort/convert_to_ort.py
PYTHONPATH=. python scripts/application_packager/convert_to_ort.py
- name: Upload ORT file
- name: ⬆️ Upload ORT file
uses: actions/upload-artifact@v3
with:
name: ort-file
path: ort_files/inference_recognition_model.ort