Raw File
01_OneHidden.cntk
# Parameters can be overwritten on the command line
# for example: cntk configFile=myConfigFile RootDir=../.. 
# For running from Visual Studio add
# currentDirectory=$(SolutionDir)/<path to corresponding data folder> 

command = trainNetwork:testNetwork

precision = "float"; traceLevel = 1 ; deviceId = "auto"

rootDir = ".." ; dataDir = "$rootDir$/DataSets/MNIST" ;
outputDir = "./Output" ;

modelPath = "$outputDir$/Models/01_OneHidden"
#stderr = "$outputDir$/01_OneHidden_bs_out"

# TRAINING CONFIG
trainNetwork = {
    action = "train"
    
    BrainScriptNetworkBuilder = {
        imageShape = 28:28:1                        # image dimensions, 1 channel only
        labelDim = 10                               # number of distinct labels
        featScale = 1/256

        # This model returns multiple nodes as a record, which
        # can be accessed using .x syntax.
        model(x) = {
            s1 = x * featScale
            h1 = DenseLayer {200, activation=ReLU} (s1) 
            z = LinearLayer {labelDim} (h1)
        }
        
        # inputs
        features = Input {imageShape}
        labels = Input {labelDim}

        # apply model to features
        out = model (features)

        # loss and error computation
        ce   = CrossEntropyWithSoftmax (labels, out.z)
        errs = ClassificationError (labels, out.z)

        # declare special nodes
        featureNodes    = (features)
        labelNodes      = (labels)
        criterionNodes  = (ce)
        evaluationNodes = (errs)
        outputNodes     = (out.z)
        
        # Alternative, you can use the Sequential keyword and write the model 
        # as follows. We keep the previous format because EvalClientTest needs 
        # to access the internal nodes, which is not doable yet with Sequential 
        #
        # Scale{f} = x => Constant(f) .* x
        # model = Sequential (
            # Scale {featScale} :
            # DenseLayer {200} : ReLU : 
            # LinearLayer {labelDim}
        # )

        # # inputs
        # features = Input {imageShape}
        # labels = Input (labelDim)

        # # apply model to features
        # ol = model (features)

        # # loss and error computation
        # ce   = CrossEntropyWithSoftmax (labels, ol)
        # errs = ClassificationError (labels, ol)

        # # declare special nodes
        # featureNodes    = (features)
        # labelNodes      = (labels)
        # criterionNodes  = (ce)
        # evaluationNodes = (errs)
        # outputNodes     = (ol)
    }

    SGD = {
        epochSize = 60000
        minibatchSize = 64
        maxEpochs = 10
        learningRatesPerSample = 0.01*5:0.005
        momentumAsTimeConstant = 0
        
        numMBsToShowResult = 500
    }

    reader = {
        readerType = "CNTKTextFormatReader"
        # See ../README.md for details on getting the data (Train-28x28_cntk_text.txt).
        file = "$DataDir$/Train-28x28_cntk_text.txt"
        input = {
            features = { dim = 784 ; format = "dense" }
            labels =   { dim = 10  ; format = "dense" }
        }
    }   
}

# TEST CONFIG
testNetwork = {
    action = "test"
    minibatchSize = 1024    # reduce this if you run out of memory

    reader = {
        readerType = "CNTKTextFormatReader"
        file = "$DataDir$/Test-28x28_cntk_text.txt"
        input = {
            features = { dim = 784 ; format = "dense" }
            labels =   { dim = 10  ; format = "dense" }
        }
    }
}
back to top