https://github.com/Microsoft/CNTK
Revision 0fdf321b6f8f6c1c00d8c6458cd3ae55b73540e7 authored by Friedel van Megen on 29 November 2016, 10:17:05 UTC, committed by Friedel van Megen on 29 November 2016, 11:06:26 UTC
1 parent 20dada9
Raw File
Tip revision: 0fdf321b6f8f6c1c00d8c6458cd3ae55b73540e7 authored by Friedel van Megen on 29 November 2016, 10:17:05 UTC
eval changes
Tip revision: 0fdf321
DataParallelDistributedTrainer.h
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
//

#pragma  once

#include "CNTKLibrary.h"
#include "DistributedTrainerBase.h"

namespace CNTK
{
    ///
    /// Distributed Trainer.
    ///
    class DataParallelDistributedTrainer : public DistributedTrainerBase
    {
    public:
        DataParallelDistributedTrainer(DistributedCommunicatorPtr communicator, bool useAsyncBufferedParameterUpdate, size_t distributedAfterSampleCount);

        // Optional override that gets called per minibatch after finishing gradient computation but before updating model parameters
        bool PreParameterUpdateCallback(const Trainer& trainer, std::vector<std::pair<Parameter, NDArrayViewPtr>>& gradientValues, MinibatchInfo& info) override;
    };
}
back to top