mirror of https://github.com/tracel-ai/burn.git
Replace opaque return types in optim (#1767)
* update ARCHITECTURE.md links to project architecture section in contributor book * replace opaque return type in optim
This commit is contained in:
parent
e4d0cf3343
commit
d3cd6c4928
|
@ -5,7 +5,7 @@ use crate::{
|
|||
|
||||
use super::{
|
||||
decay::{WeightDecay, WeightDecayConfig},
|
||||
Optimizer, SimpleOptimizer,
|
||||
SimpleOptimizer,
|
||||
};
|
||||
use crate::config::Config;
|
||||
use crate::optim::adaptor::OptimizerAdaptor;
|
||||
|
@ -79,7 +79,9 @@ impl AdaGradConfig {
|
|||
/// # Returns
|
||||
///
|
||||
/// Returns an optimizer that can be used to optimize a module.
|
||||
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(&self) -> impl Optimizer<M, B> {
|
||||
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(
|
||||
&self,
|
||||
) -> OptimizerAdaptor<AdaGrad<B::InnerBackend>, M, B> {
|
||||
let optim = AdaGrad {
|
||||
lr_decay: LrDecay {
|
||||
lr_decay: self.lr_decay,
|
||||
|
|
|
@ -5,7 +5,7 @@ use crate::{
|
|||
|
||||
use super::{
|
||||
decay::{WeightDecay, WeightDecayConfig},
|
||||
Optimizer, SimpleOptimizer,
|
||||
SimpleOptimizer,
|
||||
};
|
||||
use crate::config::Config;
|
||||
use crate::optim::adaptor::OptimizerAdaptor;
|
||||
|
@ -85,7 +85,9 @@ impl AdamConfig {
|
|||
/// # Returns
|
||||
///
|
||||
/// Returns an optimizer that can be used to optimize a module.
|
||||
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(&self) -> impl Optimizer<M, B> {
|
||||
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(
|
||||
&self,
|
||||
) -> OptimizerAdaptor<Adam<B::InnerBackend>, M, B> {
|
||||
let optim = Adam {
|
||||
momentum: AdaptiveMomentum {
|
||||
beta_1: self.beta_1,
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::{
|
|||
};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use super::{Optimizer, SimpleOptimizer};
|
||||
use super::SimpleOptimizer;
|
||||
use crate::config::Config;
|
||||
use crate::optim::adaptor::OptimizerAdaptor;
|
||||
use crate::tensor::{backend::AutodiffBackend, Tensor};
|
||||
|
@ -83,7 +83,9 @@ impl AdamWConfig {
|
|||
/// # Returns
|
||||
///
|
||||
/// Returns an optimizer that can be used to optimize a module.
|
||||
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(&self) -> impl Optimizer<M, B> {
|
||||
pub fn init<B: AutodiffBackend, M: AutodiffModule<B>>(
|
||||
&self,
|
||||
) -> OptimizerAdaptor<AdamW<B::InnerBackend>, M, B> {
|
||||
let optim = AdamW {
|
||||
momentum: AdaptiveMomentumW {
|
||||
beta_1: self.beta_1,
|
||||
|
|
Loading…
Reference in New Issue