-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
49 lines (36 loc) · 1.48 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
help: ## Show help
@grep -E '^[.a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
clean: ## Clean autogenerated files
rm -rf dist
find . -type f -name "*.DS_Store" -ls -delete
find . -type f -name "sleep.sh.e*" -ls -delete
find . | grep -E "(__pycache__|\.pyc|\.pyo)" | xargs rm -rf
find . | grep -E ".pytest_cache" | xargs rm -rf
find . | grep -E ".ipynb_checkpoints" | xargs rm -rf
rm -f .coverage
clean-logs: ## Clean logs
rm -rf LOGS/**
rm -rf notebooks/lightning_logs/**
hook: ## Run pre-commit hooks
pre-commit run -a
push: ## Fast Push changes to the main branch
git add .
git commit -m "make push all"
git push orgin main
pull: ## Merge changes from main branch to your current branch
git pull
git pull origin main
test: ## Run not slow tests
pytest -k "not slow"
test-full: ## Run all tests
pytest
train-debug: ## Train the model
python ./train.py
eval-debug: ## Evaluate the model
python ./evaluate_mp.py /data/milsrg1/huggingface/cache/efb48/diffsep/checkpoint.pt --split libri-clean
train: ## Train the model
qsub -cwd -S /bin/bash -l qp=cuda-low,tests=0,mem_grab=0M,osrel="*",gpuclass="*", -o LOGS/diffsep_train_1000 scripts/train.sh
eval:
qsub -cwd -S /bin/bash -l qp=cuda-low,tests=0,mem_grab=0M,osrel="*",gpuclass="*", -o LOGS/diffsep_eval_1000 scripts/eval_libri.sh
sleep:
qsub -cwd -S /bin/bash -l qp=cuda-low,tests=0,mem_grab=0M,osrel="*",gpuclass="*", -o LOGS/sleeplog scripts/sleep.sh