Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/ulissigroup/finetuna into main
Browse files Browse the repository at this point in the history
  • Loading branch information
jmusiel committed May 15, 2024
2 parents 8577c98 + 79ddb58 commit 40385b9
Show file tree
Hide file tree
Showing 11 changed files with 5 additions and 10 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,6 @@ You are all set! Now in your VASP input folder, run the calculation by: `finetun

### Usage

If you have an ASE atoms object, see example [1](https://github.com/ulissigroup/finetuna/blob/main/examples/online_al_example.py) and [2](https://github.com/ulissigroup/finetuna/blob/main/examples/online_al_beef_example.py).
If you have an ASE atoms object, see example [1](https://github.com/ulissigroup/finetuna/blob/main/examples/online_al/online_al_example.py) and [2](https://github.com/ulissigroup/finetuna/blob/main/examples/beef/online_al_beef_example.py).

If you have VASP input files (INCAR, KPOINTS, POTCAR, and POSCAR), see example [3](https://github.com/ulissigroup/finetuna/tree/main/finetuna/vasp_wrapper).
1 change: 0 additions & 1 deletion examples/N2H_Ag111_dissociation/N2H_Ag111.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from finetuna.utils import calculate_surface_k_points

if __name__ == "__main__":

traj = Trajectory("N2H_Ag111.traj") # change this path to your trajectory file

ml_potential = FinetunerCalc(
Expand Down
1 change: 0 additions & 1 deletion examples/beef/online_al_beef_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@
vasp_inputs["ibrion"] = -1

with VaspInteractive(**vasp_inputs) as parent_calc:

learner = OnlineLearner(
learner_params={
"query_every_n_steps": 100,
Expand Down
1 change: 0 additions & 1 deletion examples/online_al/online_al_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from finetuna.utils import calculate_surface_k_points

if __name__ == "__main__":

traj = Trajectory("random1447590.traj") # change this path to your trajectory file

ml_potential = FinetunerCalc(
Expand Down
1 change: 0 additions & 1 deletion examples/quantum_espresso/qe_gpu_online_al_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import os

if __name__ == "__main__":

traj = Trajectory("ch3_cu_final.traj") # change this path to your trajectory file

ml_potential = FinetunerCalc(
Expand Down
3 changes: 2 additions & 1 deletion finetuna/atomistic_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ def __init__(self, starting_images, intermediate_samples=3):
----------
starting_images: list. Initial and final images to be used for the NEB.
intermediate_samples: int. Number of intermediate samples to be used in constructing the NEB"""
intermediate_samples: int. Number of intermediate samples to be used in constructing the NEB
"""

self.starting_images = copy.deepcopy(starting_images)
self.intermediate_samples = intermediate_samples
Expand Down
1 change: 1 addition & 0 deletions finetuna/finetuner_utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def close_db(self):
from ocpmodels.common.registry import registry
from ocpmodels.models.gemnet.gemnet import GemNetT


# imported in __init__.py
@registry.register_model("gemnet_t_uncertainty")
class GemNetTUncertainty(GemNetT):
Expand Down
1 change: 0 additions & 1 deletion finetuna/ml_potentials/finetuner_ensemble_calc.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ def __init__(
checkpoint_paths: "list[str]",
mlp_params: dict = {},
) -> None:

# self.model_classes = model_classes
# self.model_paths = model_paths
self.checkpoint_paths = checkpoint_paths
Expand Down
1 change: 0 additions & 1 deletion finetuna/mongo.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,6 @@ def _make_results_dict(atoms):
# Results. This may duplicate information in the calculator,
# but we have no control on what the calculator does.
if calculator:

if not calculator.calculation_required(atoms, ["energy"]):
results_dict["energy"] = atoms.get_potential_energy(apply_constraint=False)

Expand Down
1 change: 0 additions & 1 deletion finetuna/ocp_models/gemnet_t/int_descriptor_gemnet_t.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ def __init__(
checkpoint_path,
cpu=True,
):

if cpu:
map_location = torch.device("cpu")
else:
Expand Down
2 changes: 1 addition & 1 deletion finetuna/tests/cases/online_ft_gemnet_oc_CuNP_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,6 @@ def get_al_config(cls) -> dict:
},
}
al_config["ocp"] = {
"checkpoint_path": "/home/jovyan/shared-scratch/ocp_checkpoints/for_finetuna/public_checkpoints/scaling_attached/gemnet_oc_base_oc20_oc22_attscale.pt",
"checkpoint_path": "/home/jovyan/shared-scratch/ocp_checkpoints/for_finetuna/new_public_checkpoints/gnoc_finetune_all_s2ef.pt",
}
return al_config

0 comments on commit 40385b9

Please sign in to comment.