first add
This commit is contained in:
10
examples/02_ml_training/conf/config.yaml
Normal file
10
examples/02_ml_training/conf/config.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
defaults:
|
||||
- model: resnet
|
||||
- optimizer: adam
|
||||
- dataset: cifar10
|
||||
- _self_
|
||||
|
||||
learning_rate: 0.001
|
||||
batch_size: 32
|
||||
epochs: 100
|
||||
device: cuda
|
||||
5
examples/02_ml_training/conf/dataset/cifar10.yaml
Normal file
5
examples/02_ml_training/conf/dataset/cifar10.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
name: CIFAR-10
|
||||
num_classes: 10
|
||||
image_size: 32
|
||||
train_samples: 50000
|
||||
val_samples: 10000
|
||||
5
examples/02_ml_training/conf/dataset/imagenet.yaml
Normal file
5
examples/02_ml_training/conf/dataset/imagenet.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
name: ImageNet
|
||||
num_classes: 1000
|
||||
image_size: 224
|
||||
train_samples: 1281167
|
||||
val_samples: 50000
|
||||
5
examples/02_ml_training/conf/model/resnet.yaml
Normal file
5
examples/02_ml_training/conf/model/resnet.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
name: ResNet50
|
||||
layers: 50
|
||||
pretrained: true
|
||||
num_classes: 1000
|
||||
dropout: 0.5
|
||||
7
examples/02_ml_training/conf/model/transformer.yaml
Normal file
7
examples/02_ml_training/conf/model/transformer.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
name: ViT-B/16
|
||||
layers: 12
|
||||
pretrained: true
|
||||
num_classes: 1000
|
||||
hidden_dim: 768
|
||||
num_heads: 12
|
||||
patch_size: 16
|
||||
4
examples/02_ml_training/conf/optimizer/adam.yaml
Normal file
4
examples/02_ml_training/conf/optimizer/adam.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
name: Adam
|
||||
lr: ${learning_rate}
|
||||
betas: [0.9, 0.999]
|
||||
weight_decay: 0.0001
|
||||
5
examples/02_ml_training/conf/optimizer/sgd.yaml
Normal file
5
examples/02_ml_training/conf/optimizer/sgd.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
name: SGD
|
||||
lr: ${learning_rate}
|
||||
momentum: 0.9
|
||||
weight_decay: 0.0005
|
||||
nesterov: true
|
||||
32
examples/02_ml_training/train.py
Normal file
32
examples/02_ml_training/train.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""机器学习训练配置示例"""
|
||||
|
||||
import hydra
|
||||
from omegaconf import DictConfig
|
||||
from rich.console import Console
|
||||
from rich.tree import Tree
|
||||
|
||||
console = Console()
|
||||
|
||||
@hydra.main(version_base=None, config_path="conf", config_name="config")
|
||||
def train(cfg: DictConfig) -> None:
|
||||
console.print("\n[bold green]🤖 机器学习训练配置示例[/bold green]\n")
|
||||
|
||||
tree = Tree("🎯 Training Configuration")
|
||||
|
||||
model_tree = tree.add("[yellow]Model")
|
||||
model_tree.add(f"Name: {cfg.model.name}")
|
||||
model_tree.add(f"Layers: {cfg.model.layers}")
|
||||
|
||||
optimizer_tree = tree.add("[cyan]Optimizer")
|
||||
optimizer_tree.add(f"Name: {cfg.optimizer.name}")
|
||||
optimizer_tree.add(f"Learning Rate: {cfg.optimizer.lr}")
|
||||
|
||||
dataset_tree = tree.add("[magenta]Dataset")
|
||||
dataset_tree.add(f"Name: {cfg.dataset.name}")
|
||||
dataset_tree.add(f"Classes: {cfg.dataset.num_classes}")
|
||||
|
||||
console.print(tree)
|
||||
console.print("\n[bold green]✅ 配置加载完成![/bold green]\n")
|
||||
|
||||
if __name__ == "__main__":
|
||||
train()
|
||||
Reference in New Issue
Block a user