Skip to content

Commit

Permalink
[DOCS][FIXES +++ ]
Browse files Browse the repository at this point in the history
  • Loading branch information
Kye committed Dec 27, 2023
1 parent 4e5e83a commit 7269d7d
Show file tree
Hide file tree
Showing 11 changed files with 10 additions and 26 deletions.
3 changes: 0 additions & 3 deletions docs/zeta/nn/modules/simple_feedback.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,3 @@ This particular sequence ensures that the neural network can learn a rich repres

---

**Notes**:

Remember to replace `"from zeta.nn.modules import SimpleFeedForward"` with the actual import statement depending on where the `SimpleFeedForward` function resides in your project structure. The above examples assume it's placed in a module named `your_module`.
2 changes: 1 addition & 1 deletion docs/zeta/structs/encoderdecoder.md
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ This method executes the forward pass of the module.
```python
# Imports
import torch
from _your_module_ import Encoder, Decoder, EncoderDecoder
from zeta.structs import Encoder, Decoder, EncoderDecoder

# Arguments
args = argparse.Namespace(
Expand Down
6 changes: 3 additions & 3 deletions docs/zeta/training/parallel_wrapper.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ This method redirects attribute access to the internal model to allow direct acc

```python
import torch.nn as nn
from zeta.training import ParallelWrapper # assuming the class is in your_module.py
from zeta.training import ParallelWrapper

# Define a model
model = nn.Linear(512, 512)
Expand All @@ -74,7 +74,7 @@ output = model(input)

```python
import torch.nn as nn
from zeta.training import ParallelWrapper # assuming the class is in your_module.py
from zeta.training import ParallelWrapper

# Define a model
model = nn.Linear(512, 512)
Expand All @@ -92,7 +92,7 @@ output = model(input)

```python
import torch.nn as nn
from zeta.training import ParallelWrapper # assuming the class is in your_module.py
from zeta.training import ParallelWrapper

# Define a model
model = nn.Linear(512, 512)
Expand Down
6 changes: 0 additions & 6 deletions tests/nn/modules/test_test_conv_lang.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,3 @@ def test_invalid_activation_raises_error():
)


# 6. Test Coverage (requires pytest-cov)
def test_coverage():
pytest.main(["--cov=your_module", "test_your_module.py"])


# Add more tests as needed...
7 changes: 0 additions & 7 deletions tests/nn/modules/test_test_h3_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,3 @@ def test_invalid_dimension_raises_error():
with pytest.raises(ValueError):
H3Layer(0)


# 6. Test Coverage (requires pytest-cov)
def test_coverage():
pytest.main(["--cov=your_module", "test_your_module.py"])


# Add more tests as needed...
2 changes: 1 addition & 1 deletion tests/ops/test_mos.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from torch import nn
from zeta.ops.mos import (
MixtureOfSoftmaxes,
) # Replace 'your_module' with your actual module
)


# Create a fixture for initializing the model
Expand Down
2 changes: 1 addition & 1 deletion tests/rl/test_prioritizedreplybuffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import torch
from zeta.rl.priortized_replay_buffer import (
PrioritizedReplayBuffer,
) # Replace 'your_module' with the actual module where classes are defined
)


@pytest.fixture
Expand Down
2 changes: 1 addition & 1 deletion tests/rl/test_prioritizedsequencereplybuffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import torch
from zeta.rl.priortized_rps import (
PrioritizedSequenceReplayBuffer,
) # Replace 'your_module' with the actual module where classes are defined
)


@pytest.fixture
Expand Down
2 changes: 1 addition & 1 deletion tests/rl/test_sumtree.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pytest
from zeta.rl.sumtree import (
SumTree,
) # Replace 'your_module' with the actual module where SumTree is defined
)


# Fixture for initializing SumTree instances with a given size
Expand Down
2 changes: 1 addition & 1 deletion tests/training/test_parallel_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import torch.nn as nn

from zeta.training.parallel_wrapper import (
ParallelWrapper, # assuming the class is in your_module.py
ParallelWrapper,
)


Expand Down
2 changes: 1 addition & 1 deletion zeta/nn/modules/test_dense_connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import torch.nn as nn
import unittest

from your_module import DenseBlock
from zeta.nn.modules.dense_connect import DenseBlock


class DenseBlockTestCase(unittest.TestCase):
Expand Down

0 comments on commit 7269d7d

Please sign in to comment.