38 lines
802 B
YAML
38 lines
802 B
YAML
use_gpu: True
|
|
device: 0
|
|
early_stop:
|
|
patience: 20
|
|
improve_indicator_mode: mean
|
|
federate:
|
|
mode: 'standalone'
|
|
make_global_eval: False
|
|
total_round_num: 400
|
|
share_local_model: False
|
|
data:
|
|
root: data/
|
|
type: graph_multi_domain_mix
|
|
pre_transform: ['Constant', {'value':1.0, 'cat':False}]
|
|
dataloader:
|
|
type: pyg
|
|
model:
|
|
type: gin
|
|
hidden: 64
|
|
out_channels: 0
|
|
task: graph
|
|
personalization:
|
|
# local_param: ['encoder_atom', 'encoder', 'clf'] # to handle size-different pre & post layers
|
|
local_param: [ 'encoder_atom', 'encoder', 'clf', 'norms' ] # pre, post + FedBN
|
|
train:
|
|
local_update_steps: 16
|
|
optimizer:
|
|
lr: 0.5
|
|
weight_decay: 0.0005
|
|
type: SGD
|
|
criterion:
|
|
type: CrossEntropyLoss
|
|
trainer:
|
|
type: graphminibatch_trainer
|
|
eval:
|
|
freq: 5
|
|
metrics: ['acc', 'correct']
|