No project description provided
Project description
Torch Lure
Installations
pip install torchlure
Usage
import torchlure as lure
# Optimizers
lure.SophiaG(lr=1e-3, weight_decay=0.2)
# Functions
lure.tanh_exp(x)
lure.TanhExp()
lure.quantile_loss(y_pred, y_target, quantile=0.5)
lure.QuantileLoss(quantile=0.5)
lure.RMSNrom(dim=256, eps=1e-6)
# Noise Scheduler
lure.LinearNoiseScheduler(beta=1e-4, beta_end=0.02, num_timesteps=1000)
lure.CosineNoiseScheduler(max_beta=0.999, s=0.008, num_timesteps=1000):
lure.ReLUKAN(width=[11, 16, 16, 2], grid=5, k=3)
lure.create_relukan_network(
input_dim=11,
output_dim=2,
hidden_dim=32,
num_layers=3,
grid=5,
k=3,
)
import torchlure as lure
# Optimizers
lure.SophiaG(lr=1e-3, weight_decay=0.2)
# Functions
lure.tanh_exp(x)
lure.TanhExp()
lure.quantile_loss(y_pred, y_target, quantile=0.5)
lure.QuantileLoss(quantile=0.5)
lure.RMSNrom(dim=256, eps=1e-6)
# Noise Scheduler
lure.LinearNoiseScheduler(beta=1e-4, beta_end=0.02, num_timesteps=1000)
lure.CosineNoiseScheduler(max_beta=0.999, s=0.008, num_timesteps=1000):
Dataset
import gymnasium as gym
import numpy as np
import torch
from torchlure.datasets import MinariEpisodeDataset, MinariTrajectoryDataset
from torchtyping import TensorType
def return_to_go(rewards: TensorType[..., "T"], gamma: float) -> TensorType[..., "T"]:
if gamma == 1.0:
return rewards.flip(-1).cumsum(-1).flip(-1)
seq_len = rewards.shape[-1]
rtgs = torch.zeros_like(rewards)
rtg = torch.zeros_like(rewards[..., 0])
for i in range(seq_len - 1, -1, -1):
rtg = rewards[..., i] + gamma * rtg
rtgs[..., i] = rtg
return rtgs
env = gym.make("Hopper-v4")
minari_dataset = MinariEpisodeDataset("Hopper-random-v0")
minari_dataset.create(env, n_episodes=100, exist_ok=True)
minari_dataset.info()
# Observation space: Box(-inf, inf, (11,), float64)
# Action space: Box(-1.0, 1.0, (3,), float32)
# Total episodes: 100
# Total steps: 2,182
traj_dataset = MinariTrajectoryDataset(minari_dataset, traj_len=20, {
"returns": lambda ep: return_to_go(torch.tensor(ep.rewards), 0.99),
})
traj = traj_dataset[2]
traj = traj_dataset[[3, 8, 15]]
traj = traj_dataset[np.arange(16)]
traj = traj_dataset[torch.arange(16)]
traj = traj_dataset[-16:]
traj["observations"].shape, traj["actions"].shape, traj["rewards"].shape, traj[
"terminated"
].shape, traj["truncated"].shape, traj["timesteps"].shape
# (torch.Size([16, 20, 4, 4, 16]),
# torch.Size([16, 20]),
# torch.Size([16, 20]),
# torch.Size([16, 20]),
# torch.Size([16, 20]),
# torch.Size([16, 20]))
Project details
Release history Release notifications | RSS feed
Download files
Download the file for your platform. If you're not sure which to choose, learn more about installing packages.
Source Distribution
torchlure-0.2408.3.tar.gz
(67.5 kB
view details)
Built Distribution
File details
Details for the file torchlure-0.2408.3.tar.gz
.
File metadata
- Download URL: torchlure-0.2408.3.tar.gz
- Upload date:
- Size: 67.5 kB
- Tags: Source
- Uploaded using Trusted Publishing? No
- Uploaded via: twine/4.0.2 CPython/3.12.2
File hashes
Algorithm | Hash digest | |
---|---|---|
SHA256 | fe0808981a5000fa5a6d3163359291423d41af07df4805027d48350a999d2bfe |
|
MD5 | e68a1917573b36196c992c358bda894e |
|
BLAKE2b-256 | 6043c930b0178279d86247824eb5acd64c7cdfb930f59a6aec4b79513ca263bb |
File details
Details for the file torchlure-0.2408.3-py3-none-any.whl
.
File metadata
- Download URL: torchlure-0.2408.3-py3-none-any.whl
- Upload date:
- Size: 20.9 kB
- Tags: Python 3
- Uploaded using Trusted Publishing? No
- Uploaded via: twine/4.0.2 CPython/3.12.2
File hashes
Algorithm | Hash digest | |
---|---|---|
SHA256 | c1473548cbf86199ad39b0d23e94dcbaa5498cac1f71fecb3f8ba2f2e6b836c8 |
|
MD5 | ac47a03435d1ce5d56fc282af3c0621e |
|
BLAKE2b-256 | e84e2c08f891aab7abc85c6d091cf4a8a9bd8f6c1dd591fd20588765d7a42ee1 |