https://github.com/Microsoft/CNTK
Raw File
Tip revision: 4ed189633247ce4a5a6d3c5f1f77422cdd66dc6c authored by liqfu on 17 September 2018, 23:10:48 UTC
set public_build to "no"/false
Tip revision: 4ed1896
DataParallelDistributedLearner.h
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//

#pragma  once

#include "CNTKLibrary.h"
#include "DistributedLearnerBase.h"

namespace CNTK
{
    ///
    /// Distributed Trainer.
    ///
    class DataParallelDistributedLearner : public DistributedLearnerBase
    {
    public:
        DataParallelDistributedLearner(DistributedCommunicatorPtr communicator, LearnerPtr learner, size_t distributedAfterSamples, bool useAsyncBufferedParameterUpdate);

        // Optional override that gets called per minibatch after finishing gradient computation but before updating model parameters
        bool Update(std::unordered_map<Parameter, NDArrayViewPtr>& gradientValues, MinibatchInfo& trainingSampleCount) override;
    };
}
back to top