File size: 2,102 Bytes
45b0ed8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/bin/bash
#SBATCH --job-name=moge-lotus
#SBATCH --output=/home/ywan0794/MoGe/moge_lotus_%j.log
#SBATCH --error=/home/ywan0794/MoGe/moge_lotus_%j.log
#SBATCH --open-mode=append
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=4
#SBATCH --gres=gpu:H100:1
#SBATCH --time=0-08:00:00
#SBATCH --mem=40G
#SBATCH --nodelist=erinyes

export PYTHONUNBUFFERED=1

cd /home/ywan0794/MoGe

source /home/ywan0794/miniconda3/etc/profile.d/conda.sh
conda activate lotus

export CUDA_HOME=$CONDA_PREFIX
export PATH=$CUDA_HOME/bin:$PATH
export LD_LIBRARY_PATH=$CUDA_HOME/lib64:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=$CONDA_PREFIX/lib/python3.10/site-packages/torch/lib:$LD_LIBRARY_PATH
export PYTHONPATH=$PYTHONPATH:$(pwd)

echo "============================================"
echo "Activated conda environment: $CONDA_DEFAULT_ENV"
echo "CUDA_HOME: $CUDA_HOME"
echo "============================================"

echo "=== GPU Info ==="
nvidia-smi

python -c "import torch; print('CUDA:', torch.cuda.is_available(), torch.cuda.get_device_name(0) if torch.cuda.is_available() else '')"

TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
REPO=/home/ywan0794/EvalMDE/Lotus
PRETRAINED=jingheya/lotus-depth-d-v2-0-disparity
CONFIG=/home/ywan0794/MoGe/configs/eval/all_benchmarks.json
OUT_DIR=eval_output
mkdir -p $OUT_DIR

echo "============================================"
echo "Starting MoGe Eval for Lotus at $(date)"
echo "Repo: $REPO"
echo "Checkpoint: $PRETRAINED"
echo "Config: $CONFIG"
echo "============================================"

# Lotus disparity v2 regression: --disparity flag tells the wrapper to emit
# `disparity_affine_invariant`. For depth ckpts (e.g. lotus-depth-d-v1-0), drop --disparity.
python moge/scripts/eval_baseline.py \
    --baseline baselines/lotus.py \
    --config $CONFIG \
    --output ${OUT_DIR}/lotus_${TIMESTAMP}.json \
    --repo $REPO \
    --pretrained $PRETRAINED \
    --mode regression \
    --task_name depth \
    --disparity \
    --timestep 999

echo "============================================"
echo "Evaluation completed at $(date)"
echo "============================================"