mirror of
https://github.com/NousResearch/atropos.git
synced 2026-04-24 17:04:55 +00:00
Convert FOB submodule to regular folder
This commit is contained in:
parent
94f046ad40
commit
94825011a0
74 changed files with 4563 additions and 0 deletions
20
environments/optimizer/FOB/run_optimizer_benchmark.py
Normal file
20
environments/optimizer/FOB/run_optimizer_benchmark.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from optimizer_benchmark_env import OptimizerBenchmarkEnv
|
||||
|
||||
optimizer_code = '''
|
||||
from lightning.pytorch.utilities.types import OptimizerLRScheduler
|
||||
from torch.optim import SGD
|
||||
from pytorch_fob.engine.parameter_groups import GroupedModel
|
||||
from pytorch_fob.engine.configs import OptimizerConfig
|
||||
|
||||
def configure_optimizers(model: GroupedModel, config: OptimizerConfig) -> OptimizerLRScheduler:
|
||||
lr = config.learning_rate
|
||||
optimizer = SGD(model.grouped_parameters(lr=lr), lr=lr)
|
||||
return {"optimizer": optimizer}
|
||||
'''
|
||||
|
||||
env = OptimizerBenchmarkEnv()
|
||||
env.submit_optimizer(optimizer_code, 'my_sgd_optimizer')
|
||||
env.generate_experiment_yaml()
|
||||
env.run_benchmark()
|
||||
reward = env.get_reward()
|
||||
print('Final reward:', reward)
|
||||
Loading…
Add table
Add a link
Reference in a new issue