Skip to content

Commit

Permalink
Merge branch 'develop' into 'main'
Browse files Browse the repository at this point in the history
Release 0.2
  • Loading branch information
gmertes committed Oct 16, 2024
2 parents d64cf6e + c3adf98 commit 59c3199
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 1 deletion.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Please add your functional changes to the appropriate section in the PR.
Keep it human-readable, your future self will thank you!

## [Unreleased](https://github.com/ecmwf/anemoi-training/compare/0.1.0...HEAD)
- Make pin_memory of the Dataloader configurable (#64)

### Added

Expand Down
1 change: 1 addition & 0 deletions src/anemoi/training/config/dataloader/native_grid.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
prefetch_factor: 2
pin_memory: True

num_workers:
training: 8
Expand Down
5 changes: 4 additions & 1 deletion src/anemoi/training/data/datamodule.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,9 @@ def __init__(self, config: DictConfig) -> None:
)
self.config.dataloader.training.end = self.config.dataloader.validation.start - 1

if not self.config.dataloader.get("pin_memory", True):
LOGGER.info("Data loader memory pinning disabled.")

def _check_resolution(self, resolution: str) -> None:
assert (
self.config.data.resolution.lower() == resolution.lower()
Expand Down Expand Up @@ -185,7 +188,7 @@ def _get_dataloader(self, ds: NativeGridDataset, stage: str) -> DataLoader:
num_workers=self.config.dataloader.num_workers[stage],
# use of pinned memory can speed up CPU-to-GPU data transfers
# see https://pytorch.org/docs/stable/notes/cuda.html#cuda-memory-pinning
pin_memory=True,
pin_memory=self.config.dataloader.get("pin_memory", True),
# worker initializer
worker_init_fn=worker_init_func,
# prefetch batches
Expand Down

0 comments on commit 59c3199

Please sign in to comment.