Skip to content

Commit 12e7d4b

Browse files
authored
Enhance neural network layer types with additional configurations and default values. Update layer attributes to include type definitions and validation constraints for better user input handling in the NeuralNetworkBuilder component. (#998)
1 parent f6f5e46 commit 12e7d4b

File tree

1 file changed

+99
-89
lines changed

1 file changed

+99
-89
lines changed

js/neuralnetwork_builder.js

Lines changed: 99 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -2,31 +2,89 @@ import * as opt from '../../lib/model/nns/optimizer.js'
22

33
const layerTypes = {
44
abs: {},
5-
clip: { min: 0, max: 1 },
6-
conv: { kernel: 5, channel: 16 },
7-
dropout: { drop_rate: 0.5 },
5+
acos: {},
6+
acoh: {},
7+
asin: {},
8+
asinh: {},
9+
atan: {},
10+
atanh: {},
11+
bdaa: { alpha: { type: 'number', default: 1, multipleOf: 0.1 } },
12+
bent_identity: {},
13+
blu: { beta: { type: 'number', default: 0.1, multipleOf: 0.1 } },
14+
brelu: { a: { type: 'number', default: 1, multipleOf: 0.1 } },
15+
ceil: {},
16+
celu: { a: { type: 'number', default: 1, multipleOf: 0.1 } },
17+
clip: {
18+
min: { type: 'number', default: 0, multipleOf: 0.1 },
19+
max: { type: 'number', default: 1, multipleOf: 0.1 },
20+
},
21+
cloglog: {},
22+
cloglogm: {},
23+
conv: { kernel: { type: 'number', default: 5 }, channel: { type: 'number', default: 16 } },
24+
cos: {},
25+
cosh: {},
26+
crelu: {},
27+
dropout: {
28+
drop_rate: { type: 'number', label: 'Drop rate', default: 0.5, multipleOf: 0.1, minimum: 0, maximum: 1 },
29+
},
30+
eelu: {
31+
k: { type: 'number', default: 1, multipleOf: 0.1 },
32+
alpha: { type: 'number', default: 1, multipleOf: 0.1 },
33+
beta: { type: 'number', default: 1, multipleOf: 0.1 },
34+
},
35+
elish: {},
36+
elliott: {},
37+
elu: { a: { type: 'number', default: 1, multipleOf: 0.1 } },
38+
erelu: {},
39+
erf: {},
40+
eswish: { beta: { type: 'number', default: 1, multipleOf: 0.1 } },
841
exp: {},
42+
felu: { alpha: { type: 'number', default: 1, multipleOf: 0.1 } },
943
flatten: {},
10-
full: { size: 10, a: 'sigmoid' },
44+
floor: {},
45+
frelu: { b: { type: 'number', default: 0, multipleOf: 0.1 } },
46+
full: {
47+
out_size: { type: 'number', label: 'Output size', default: 10, minimum: 1, maximum: 100 },
48+
activation: {
49+
type: 'string',
50+
label: 'Activation',
51+
default: 'sigmoid',
52+
enum: [
53+
'sigmoid',
54+
'tanh',
55+
'relu',
56+
'leaky_relu',
57+
'softsign',
58+
'softplus',
59+
'identity',
60+
'polynomial',
61+
'abs',
62+
'gaussian',
63+
'softmax',
64+
],
65+
},
66+
},
67+
function: { func: { type: 'string', default: '2*x' } },
1168
gaussian: {},
12-
leaky_relu: { a: 0.1 },
69+
gelu: {},
70+
leaky_relu: { a: { type: 'number', default: 0.1, multipleOf: 0.1, minimum: 0, maximum: 1 } },
1371
identity: {},
1472
log: {},
15-
mean: { axis: 0 },
73+
mean: { axis: { type: 'number', default: 0, minimum: 0, maximum: 10 } },
1674
negative: {},
1775
relu: {},
18-
reshape: { size: [1, 1] },
76+
reshape: { size: { type: 'array', default: [1, 1] } },
1977
sigmoid: {},
2078
softmax: {},
2179
softplus: {},
2280
softsign: {},
23-
sparsity: { rho: 0.02 },
81+
sparsity: { rho: { type: 'number', default: 0.02, multipleOf: 0.01 } },
2482
square: {},
2583
sqrt: {},
26-
sum: { axis: 0 },
84+
sum: { axis: { type: 'number', default: 0, minimum: 0, maximum: 10 } },
2785
tanh: {},
28-
transpose: { axis: [1, 0] },
29-
variance: { axis: 0 },
86+
transpose: { axis: { type: 'array', default: [1, 0] } },
87+
variance: { axis: { type: 'number', default: 0, minimum: 0, maximum: 10 } },
3088
}
3189

3290
const arrayAttrDefinition = {
@@ -49,22 +107,23 @@ const nnModelDefinition = {
49107
const layers = Vue.ref([
50108
{
51109
type: 'full',
52-
size: 10,
53-
a: 'sigmoid',
54-
poly_pow: 2,
110+
out_size: 10,
111+
activation: 'sigmoid',
55112
},
56113
])
57114

58115
const changeType = function (idx) {
59-
const layer = { type: layers.value[idx].type, ...layerTypes[layers.value[idx].type] }
116+
const layer = { type: layers.value[idx].type }
117+
for (const [k, v] of Object.entries(layerTypes[layers.value[idx].type])) {
118+
layer[k] = v.default
119+
}
60120
layers.value.splice(idx, 1, layer)
61121
}
62122
const addLayer = function () {
63123
layers.value.push({
64124
type: 'full',
65-
size: 10,
66-
a: 'sigmoid',
67-
poly_pow: 2,
125+
out_size: 10,
126+
activation: 'sigmoid',
68127
})
69128
}
70129

@@ -77,19 +136,7 @@ const nnModelDefinition = {
77136
data: function () {
78137
return {
79138
layerTypeNames: Object.keys(layerTypes),
80-
activations: [
81-
'sigmoid',
82-
'tanh',
83-
'relu',
84-
'leaky_relu',
85-
'softsign',
86-
'softplus',
87-
'identity',
88-
'polynomial',
89-
'abs',
90-
'gaussian',
91-
'softmax',
92-
],
139+
layerTypes: layerTypes,
93140
}
94141
},
95142
template: `
@@ -101,47 +148,24 @@ const nnModelDefinition = {
101148
<select v-model="layer.type" v-on:change="changeType(i)">
102149
<option v-for="type in layerTypeNames" :value="type">{{ type }}</option>
103150
</select>
104-
<template v-if="layer.type === 'clip'">
105-
Min: <input v-model.number="layer.min" type="number" step="0.1">
106-
Max: <input v-model.number="layer.max" type="number" step="0.1">
107-
</template>
108-
<template v-if="layer.type === 'conv'">
109-
Kernel: <input v-model.number="layer.kernel" type="number">
110-
Channel: <input v-model.number="layer.channel" type="number">
111-
</template>
112-
<template v-if="layer.type === 'dropout'">
113-
Drop Rate: <input v-model.number="layer.drop_rate" type="number" min="0" max="1" step="0.1">
114-
</template>
115-
<template v-if="layer.type === 'full'">
116-
Size: <input v-model.number="layer.size" type="number" min="1" max="100">
117-
Activation: <select v-model="layer.a" v-on:change="$forceUpdate()">
118-
<option v-for="a in activations" :value="a">{{ a }}</option>
119-
</select>
120-
<input v-if="layer.a === 'polynomial'" v-model.number="layer.poly_pow" type="number" min="1" max="10">
121-
</template>
122-
<template v-if="layer.type === 'leaky_relu'">
123-
Alpha: <input v-model.number="layer.a" type="number" min="0" max="1" step="0.1">
124-
</template>
125-
<template v-if="layer.type === 'mean'">
126-
Axis: <input v-model.number="layer.axis" type="number" min="0" max="10">
127-
</template>
128-
<template v-if="layer.type === 'polynomial'">
129-
n: <input v-model.number="layer.n" type="number" min="0" max="10">
130-
</template>
131-
<template v-if="layer.type === 'reshape'">
132-
Sizes: <array_attr v-model="layer.size" />
133-
</template>
134-
<template v-if="layer.type === 'sparsity'">
135-
Rho: <input v-model.number="layer.rho" type="number" />
136-
</template>
137-
<template v-if="layer.type === 'sum'">
138-
Axis: <input v-model.number="layer.axis" type="number" min="0" max="10">
139-
</template>
140-
<template v-if="layer.type === 'transpose'">
141-
Axis: <array_attr v-model="layer.axis" />
142-
</template>
143-
<template v-if="layer.type === 'variance'">
144-
Axis: <input v-model.number="layer.axis" type="number" min="0" max="10">
151+
<template v-for="(aobj, attr) in layerTypes[layer.type]" :key="attr">
152+
{{ aobj.label ?? attr }}
153+
<template v-if="aobj.type === 'number'">
154+
<input v-model.number="layer[attr]" type="number" :step="aobj.multipleOf" :min="aobj.minimum" :max="aobj.maximum">
155+
</template>
156+
<template v-if="aobj.type === 'string'">
157+
<template v-if="aobj.enum">
158+
<select v-model="layer[attr]">
159+
<option v-for="a in aobj.enum" :value="a">{{ a }}</option>
160+
</select>
161+
</template>
162+
<template v-if="!aobj.enum">
163+
<input :value="layer[attr]" type="text">
164+
</template>
165+
</template>
166+
<template v-if="aobj.type === 'array'">
167+
<array_attr v-model="layer[attr]" />
168+
</template>
145169
</template>
146170
<input type="button" value="x" v-on:click="layers.splice(i, 1)">
147171
</div>
@@ -160,26 +184,12 @@ export default class NeuralNetworkBuilder {
160184
}
161185

162186
get layers() {
163-
const l = this._vue ? this._vue.$refs.layerselm.layers : [{ type: 'full', size: 10, a: 'sigmoid' }]
164-
const r = []
165-
for (let i = 0; i < l.length; i++) {
166-
if (l[i].type === 'full') {
167-
r.push({ type: 'full', out_size: l[i].size })
168-
r.push({ type: l[i].a, n: l[i].poly_pow })
169-
} else {
170-
r.push(l[i])
171-
}
172-
}
173-
return r
187+
const l = this._vue ? this._vue.$refs.layerselm.layers : [{ type: 'full', out_size: 10, a: 'sigmoid' }]
188+
return l.map(v => ({ ...v }))
174189
}
175190

176191
get invlayers() {
177-
const l = this.layers
178-
const r = []
179-
for (let i = l.length - 1; i >= 0; i -= 2) {
180-
r.push(l[i - 1], l[i])
181-
}
182-
return r
192+
return this.layers.concat().reverse()
183193
}
184194

185195
get optimizer() {

0 commit comments

Comments
 (0)