-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathparams.py
62 lines (53 loc) · 1.28 KB
/
params.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from pathlib import Path
from typing import List, Optional, Tuple
from pydantic import BaseModel
class TrainingParams(BaseModel):
log_interval: int
eval_interval: int
seed: int
epochs: int
learning_rate: float
betas: Tuple[float, float]
eps: float
batch_size: int
fp16_run: bool
lr_decay: float
segment_size: int
c_mel: int
c_kl: float
class DataParams(BaseModel):
training_files: Path
validation_files: Path
text_cleaners: List[str]
sampling_rate: int
filter_length: int
hop_length: int
win_length: int
n_mel_channels: int
mel_fmin: float
language: str
min_text_len: int = 1
max_text_len: int = 190
mel_fmax: Optional[float] = None
class ModelParams(BaseModel):
inter_channels: int
hidden_channels: int
filter_channels: int
n_heads: int
n_layers: int
kernel_size: int
p_dropout: float
resblock: str
resblock_kernel_sizes: List[int]
resblock_dilation_sizes: List[List[int]]
upsample_rates: List[int]
upsample_initial_channel: int
upsample_kernel_sizes: List[int]
n_layers_q: int
use_spectral_norm: bool
use_sdp: bool
gin_channels: int
class Params(BaseModel):
train: TrainingParams
data: DataParams
model: ModelParams