gdeep

package module
v0.0.0-...-e46c581 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 5, 2021 License: Apache-2.0 Imports: 8 Imported by: 0

README

gdeep

deep learning library written by golang. wellcom to contribution!! and still in production..

Getting Started

git clone https://github.com/kuroko1t/gdeep
cd gdeep
go run example/mlpMnist.go

sample

distributed

  • go get github.com/kuroko1t/gmpi
  • install openmpi > 3.0

Run

  • cpu
go run example/mlpMnist.go
  • distributed
mpirun -np 2 -H host1:1,host2:1 go run example/mlpMnist_allreduce.go

Document

https://godoc.org/github.com/kuroko1t/gdeep

implemented networks

only support 2D input yet

layer
  • Dense
  • Dropout
activation
  • Relu
  • Sigmoid
loss
  • crossentropy
optimization
  • Sgd
  • Momentum

Sample

  • mlp sample
package main

import (
	"fmt"
	"github.com/kuroko1t/GoMNIST"
	"github.com/kuroko1t/gdeep"
	"github.com/kuroko1t/gmat"
)

func main() {

	train, test, _ := GoMNIST.Load("./data")
	trainDataSize := len(train.ImagesFloatNorm)
	testDataSize := len(test.ImagesFloatNorm)

	batchSize := 128
	inputSize := 784
	hiddenSize := 20
	outputSize := 10
	learningRate := 0.01
	epochNum := 1
	iterationNum := trainDataSize * epochNum / batchSize

	dropout1 := &gdeep.Dropout{}
	dropout2 := &gdeep.Dropout{}

	layer := []gdeep.LayerInterface{}
	gdeep.LayerAdd(&layer, &gdeep.Dense{}, []int{inputSize, hiddenSize})
	gdeep.LayerAdd(&layer, &gdeep.Relu{})
	gdeep.LayerAdd(&layer, dropout1, 0.2)
	gdeep.LayerAdd(&layer, &gdeep.Dense{}, []int{hiddenSize, hiddenSize})
	gdeep.LayerAdd(&layer, &gdeep.Relu{})
	gdeep.LayerAdd(&layer, dropout2, 0.2)
	gdeep.LayerAdd(&layer, &gdeep.Dense{}, []int{hiddenSize, outputSize})
	gdeep.LayerAdd(&layer, &gdeep.SoftmaxWithLoss{})
	momentum := &gdeep.Momentum{learningRate, 0.9}

	iter := 0
	for i := 0; i < iterationNum; i++ {
		if (i+2)*batchSize > trainDataSize {
			iter = 0
		}
		imageBatch := train.ImagesFloatNorm[:][iter*batchSize : (iter+1)*batchSize]
		lagelBatch := train.LabelsOneHot[:][iter*batchSize : (iter+1)*batchSize]
		x := gmat.Make2DInitArray(imageBatch)
		t := gmat.Make2DInitArray(lagelBatch)
		loss := gdeep.Run(layer, momentum, x, t)
		gdeep.AvePrint(loss, "loss")
		iter++
	}
}

  • distributed

sample

  • Mnist Test Accuracy

accuracy

Done

  • cpu calc parallelization(dot)
  • learning param save and restore
  • mlp with gpu

Todo

  • CNN

License

gdeep is licensed under the Apache License, Version2.0

Documentation

Overview

Copyright 2018 kurosawa. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =============================================================================

Copyright 2018 kurosawa. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =============================================================================

Copyright 2018 kurosawa. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =============================================================================

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func Accuracy

func Accuracy(x gmat.Tensor, t gmat.Tensor) float64

func Allreduce

func Allreduce(layer []LayerInterface)

func AvePrint

func AvePrint(x gmat.Tensor, name string)

func BackLayer

func BackLayer(layer []LayerInterface, dout gmat.Tensor) gmat.Tensor

func DensePrint

func DensePrint(x gmat.Tensor, name string)

func ForwardLayer

func ForwardLayer(layer []LayerInterface, x gmat.Tensor, t gmat.Tensor) gmat.Tensor

func ImageRead2GrayNorm

func ImageRead2GrayNorm(imgpath string) (t gmat.Tensor)

func LayerAdd

func LayerAdd(layer *[]LayerInterface, calc interface{}, shape ...interface{})

func Layerinit

func Layerinit() interface{}

func MomentumUpdateLayer

func MomentumUpdateLayer(layer []LayerInterface, m *Momentum)

func OneHot

func OneHot(x int, size int) (y []float64)

func Predict

func Predict(layers []LayerInterface, x gmat.Tensor) int

func Restore

func Restore(fileName string, p *[]LayerInterface)

func Run

func Run(layer []LayerInterface, update interface{}, x gmat.Tensor, t gmat.Tensor) (loss gmat.Tensor)

func SGDUpdateLayer

func SGDUpdateLayer(layer []LayerInterface, sgd *SGD)

func Saver

func Saver(p []LayerInterface, saveName string)

Types

type Dense

type Dense layer.Dense

func (*Dense) Backward

func (dense *Dense) Backward(dout gmat.Tensor) gmat.Tensor

func (*Dense) Forward

func (dense *Dense) Forward(x gmat.Tensor, t gmat.Tensor) gmat.Tensor

type Dropout

type Dropout layer.Dropout

func (*Dropout) Backward

func (drop *Dropout) Backward(dout gmat.Tensor) gmat.Tensor

func (*Dropout) Forward

func (drop *Dropout) Forward(x gmat.Tensor, t gmat.Tensor) gmat.Tensor

type LayerInterface

type LayerInterface interface {
	Forward(gmat.Tensor, gmat.Tensor) gmat.Tensor
	Backward(gmat.Tensor) gmat.Tensor
	// contains filtered or unexported methods
}

type Momentum

type Momentum layer.Momentum

type Relu

type Relu layer.Relu

func (*Relu) Backward

func (relu *Relu) Backward(dout gmat.Tensor) gmat.Tensor

func (*Relu) Forward

func (relu *Relu) Forward(x gmat.Tensor, t gmat.Tensor) gmat.Tensor

type SGD

type SGD layer.SGD

type Sigmoid

type Sigmoid layer.Sigmoid

func (*Sigmoid) Backward

func (sigmoid *Sigmoid) Backward(dout gmat.Tensor) gmat.Tensor

func (*Sigmoid) Forward

func (sigmoid *Sigmoid) Forward(x gmat.Tensor, t gmat.Tensor) gmat.Tensor

type SoftmaxWithLoss

type SoftmaxWithLoss layer.SoftmaxWithLoss

func (*SoftmaxWithLoss) Backward

func (softmaxWithLoss *SoftmaxWithLoss) Backward(dout gmat.Tensor) gmat.Tensor

func (*SoftmaxWithLoss) Forward

func (softmaxWithLoss *SoftmaxWithLoss) Forward(x gmat.Tensor, t gmat.Tensor) gmat.Tensor

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL