更新PPO,增加PER DQN
This commit is contained in:
22
projects/codes/PER_DQN/config/CartPole-v1_PER_DQN_Test.yaml
Normal file
22
projects/codes/PER_DQN/config/CartPole-v1_PER_DQN_Test.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
general_cfg:
|
||||
algo_name: PER_DQN
|
||||
device: cpu
|
||||
env_name: CartPole-v1
|
||||
mode: test
|
||||
load_checkpoint: true
|
||||
load_path: Train_CartPole-v1_PER_DQN_20221113-162804
|
||||
max_steps: 200
|
||||
save_fig: true
|
||||
seed: 0
|
||||
show_fig: false
|
||||
test_eps: 10
|
||||
train_eps: 200
|
||||
algo_cfg:
|
||||
batch_size: 64
|
||||
buffer_size: 100000
|
||||
epsilon_decay: 500
|
||||
epsilon_end: 0.01
|
||||
epsilon_start: 0.95
|
||||
gamma: 0.95
|
||||
lr: 0.0001
|
||||
target_update: 4
|
||||
Reference in New Issue
Block a user