53 lines
939 B
YAML
53 lines
939 B
YAML
use_gpu: False
|
|
device: 0
|
|
backend: torch
|
|
outdir: vFL_adult
|
|
federate:
|
|
mode: standalone
|
|
client_num: 2
|
|
total_round_num: 30
|
|
model:
|
|
type: xgb_tree
|
|
lambda_: 0.1
|
|
gamma: 0
|
|
num_of_trees: 10
|
|
train:
|
|
optimizer:
|
|
lr: 0.5
|
|
# learning rate for xgb model
|
|
eta: 0.5
|
|
data:
|
|
root: data/
|
|
type: adult
|
|
splits: [1.0, 0.0]
|
|
args: [{normalization: False, standardization: True}]
|
|
feat_engr:
|
|
scenario: vfl
|
|
dataloader:
|
|
type: raw
|
|
batch_size: 50
|
|
criterion:
|
|
type: CrossEntropyLoss
|
|
trainer:
|
|
type: verticaltrainer
|
|
vertical:
|
|
use: True
|
|
key_size: 256
|
|
dims: [7, 14]
|
|
algo: 'xgb'
|
|
data_size_for_debug: 1500
|
|
feature_subsample_ratio: 1.0
|
|
eval:
|
|
freq: 5
|
|
best_res_update_round_wise_key: test_loss
|
|
hpo:
|
|
scheduler: sha
|
|
num_workers: 0
|
|
init_cand_num: 9
|
|
ss: 'federatedscope/autotune/baseline/vfl_ss.yaml'
|
|
sha:
|
|
budgets: [ 3, 9 ]
|
|
elim_rate: 3
|
|
iter: 1
|
|
metric: 'server_global_eval.test_loss'
|
|
working_folder: sha |