更新PPO,增加PER DQN

This commit is contained in:
johnjim0816
2022-11-14 21:35:28 +08:00
parent dc78698262
commit b8aec4c188
34 changed files with 1993 additions and 476 deletions

View File

@@ -0,0 +1,22 @@
general_cfg:
algo_name: PER_DQN
device: cpu
env_name: CartPole-v1
mode: test
load_checkpoint: true
load_path: Train_CartPole-v1_PER_DQN_20221113-162804
max_steps: 200
save_fig: true
seed: 0
show_fig: false
test_eps: 10
train_eps: 200
algo_cfg:
batch_size: 64
buffer_size: 100000
epsilon_decay: 500
epsilon_end: 0.01
epsilon_start: 0.95
gamma: 0.95
lr: 0.0001
target_update: 4