forked from aaaron7/tfiws_snippet
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLC.swift
146 lines (120 loc) · 3.55 KB
/
LC.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
//
// LC.swift
// TFPlayground
//
// Created by aaron on 2018/9/4.
// Copyright © 2018 aaron. All rights reserved.
//
import Foundation
import TensorFlow
import Python
typealias FloatTensor = Tensor<Float>
enum Label : Int{
case Green = 0
case Red
}
struct Position{
let x0 : Float = 1
let x1 : Float
let x2 : Float
}
struct ClassifierParameters : ParameterAggregate {
var w = Tensor<Float>(randomNormal: [3,1])
}
struct Model
{
var parameters : ClassifierParameters = ClassifierParameters()
}
func loadTrainingSet() -> (trainingVec : FloatTensor , labelVec : FloatTensor)
{
let lines = try! String(contentsOf: URL(fileURLWithPath: "test.txt")).split(separator: "\r\n")
let data = lines.map{$0.split(separator: "\t")}
let rowCount = data.count
let trainingScalars:[[Float]] = data.map{[1.0, Float($0[0])!, Float($0[1])!]}
let labelScalarts:[Float] = data.map{Float($0[2])!}
let trainingVec = Tensor<Float>(shape: [Int32(rowCount), 3], scalars: trainingScalars.flatMap{$0})
let labelVec = Tensor<Float>(shape: [Int32(rowCount) , 1], scalars: labelScalarts)
return (trainingVec, labelVec)
}
func train(trainingVec : FloatTensor, labelVec : FloatTensor, model : inout Model)
{
let learningRate:Float = 0.0005
for epoch in 0...3000
{
let y = trainingVec • model.parameters.w
let h = sigmoid(y)
let e = labelVec - h
let dw = trainingVec.transposed() • e
let grad = ClassifierParameters(w: dw)
model.parameters.update(withGradients: grad) { (p, g) in
p += g * learningRate
}
let p1 = -1 * labelVec * log(h)
let p2 = (1 - labelVec)*log(1 - h)
let traditionalLogLoss = ((p1 - p2).sum() / batchSize)
print("epoch: \(epoch), LogLoss v2: \(traditionalLogLoss)")
}
}
func plot(trainVec : FloatTensor, labelVec : FloatTensor, parameters : ClassifierParameters)
{
var coord1x:[Float] = []
var coord1y:[Float] = []
var coord2x:[Float] = []
var coord2y:[Float] = []
let rowCount = trainVec.shape[0]
for i in 0..<rowCount
{
if Int(labelVec[i][0].scalar!) == 1
{
coord1x.append(trainVec[i][1].scalar!)
coord1y.append(trainVec[i][2].scalar!)
}
else
{
coord2x.append(trainVec[i][1].scalar!)
coord2y.append(trainVec[i][2].scalar!)
}
print(i)
}
var xpts = Array<Float>(repeating: 0, count: 60)
for i in 0..<xpts.count
{
xpts[i] = -3 + Float(i)*0.1
}
print(parameters.w)
let wVec = parameters.w.scalars
let w1 = wVec[0]
let w2 = wVec[1]
let w3 = wVec[2]
let ypts:[Float] = xpts.map{ (x) -> Float in
let a = -w1
let b = w2 * x
let c = w3
let d = (a - b) / c
return d
}
let matplot = Python.import("matplotlib.pyplot")
let fig = matplot.figure()
let ax = fig.add_subplot(111)
ax.scatter(coord1x, coord1y, 50, "red", "s")
ax.scatter(coord2x,coord2y,50, "green")
ax.plot(xpts, ypts)
matplot.show()
}
typealias Probability = Float
func launch()
{
typealias Record = (Position , Label)
let (trainingVec, labelVec) = loadTrainingSet()
var m = Model()
train(trainingVec: trainingVec, labelVec: labelVec, model: &m)
print(m)
plot(trainVec: trainingVec, labelVec: labelVec, parameters: m.parameters)
print(m.parameters.w)
while true {
}
}
launch()
func predict(model : Model, pos : Position) -> (Label, Probability){
return (.Green, 1)
}