Skip to content

Commit 70323c9

Browse files
committed
Init: add examples
1 parent a8927f3 commit 70323c9

File tree

194 files changed

+20150
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

194 files changed

+20150
-0
lines changed

experiments/.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
venv/
Lines changed: 394 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,394 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "5fe41dbe-da97-4f29-a20b-05145fdecfe1",
6+
"metadata": {},
7+
"source": [
8+
"FPGA ML inference with oneAPI backend"
9+
]
10+
},
11+
{
12+
"cell_type": "code",
13+
"execution_count": null,
14+
"id": "a9b6e6fd-a294-44bc-8b80-6e5d5408f877",
15+
"metadata": {},
16+
"outputs": [
17+
{
18+
"name": "stdout",
19+
"output_type": "stream",
20+
"text": [
21+
"/opt/intel/oneapi/compiler/2024.2/bin/icpx\n"
22+
]
23+
}
24+
],
25+
"source": [
26+
"!which icpx"
27+
]
28+
},
29+
{
30+
"cell_type": "code",
31+
"execution_count": null,
32+
"id": "d8597340-7981-4207-967a-30e9501b8925",
33+
"metadata": {
34+
"scrolled": true
35+
},
36+
"outputs": [],
37+
"source": [
38+
"import os; os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' \n",
39+
"\n",
40+
"import tensorflow as tf; tf.get_logger().setLevel('INFO')\n",
41+
"\n",
42+
"import hls4ml\n",
43+
"import matplotlib.pyplot as plt\n",
44+
"\n",
45+
"from tensorflow.keras.models import Sequential\n",
46+
"from tensorflow.keras.layers import Dense, GRU, Conv1D, Conv2D, Flatten, MaxPool1D, MaxPool2D, Activation, BatchNormalization, Dropout\n",
47+
"from tensorflow.keras.losses import MSE\n",
48+
"from tensorflow.keras.optimizers import Adam\n",
49+
"import numpy as np"
50+
]
51+
},
52+
{
53+
"cell_type": "markdown",
54+
"id": "9b8ee1f2-9540-4a6f-8984-172034a72d92",
55+
"metadata": {},
56+
"source": [
57+
"Example of Conv1D layers followed by GRU (dma hostpipe)\n",
58+
"\n"
59+
]
60+
},
61+
{
62+
"cell_type": "code",
63+
"execution_count": 30,
64+
"id": "5e2f6fd6-f2fa-44a7-b749-a46a9405c4d4",
65+
"metadata": {},
66+
"outputs": [],
67+
"source": [
68+
"model = Sequential()\n",
69+
"model.add(Conv1D(16, kernel_size=3, padding='same', input_shape=(32,3)))\n",
70+
"model.add(Activation(activation='relu', name='relu1'))\n",
71+
"model.add(GRU(16))\n",
72+
"model.compile(loss='mse', optimizer=Adam())"
73+
]
74+
},
75+
{
76+
"cell_type": "code",
77+
"execution_count": null,
78+
"id": "471fe60b-2f09-47cb-b21d-410e6c327017",
79+
"metadata": {},
80+
"outputs": [
81+
{
82+
"name": "stdout",
83+
"output_type": "stream",
84+
"text": [
85+
"Interpreting Sequential\n",
86+
"Topology:\n",
87+
"Layer name: conv1d_4_input, layer type: InputLayer, input shapes: [[None, 32, 3]], output shape: [None, 32, 3]\n",
88+
"Layer name: conv1d_4, layer type: Conv1D, input shapes: [[None, 32, 3]], output shape: [None, 32, 16]\n",
89+
"Layer name: relu1, layer type: Activation, input shapes: [[None, 32, 16]], output shape: [None, 32, 16]\n",
90+
"Layer name: gru_4, layer type: GRU, input shapes: [[None, 32, 16]], output shape: [None, 16]\n",
91+
"Creating HLS model\n"
92+
]
93+
}
94+
],
95+
"source": [
96+
"hls_model = hls4ml.converters.convert_from_keras_model(\n",
97+
" model=model,\n",
98+
" output_dir=\"hostpipe\",\n",
99+
" backend=\"oneAPI\",\n",
100+
" part=\"Agilex7\")"
101+
]
102+
},
103+
{
104+
"cell_type": "code",
105+
"execution_count": 33,
106+
"id": "1e1d1b3f-2eaf-4cc3-bbff-ddd39de27d4f",
107+
"metadata": {
108+
"scrolled": true
109+
},
110+
"outputs": [
111+
{
112+
"name": "stdout",
113+
"output_type": "stream",
114+
"text": [
115+
"Writing HLS project\n",
116+
"Done\n"
117+
]
118+
}
119+
],
120+
"source": [
121+
"# Write the project to disk and invoke oneAPI backend\n",
122+
"hls_model.compile()"
123+
]
124+
},
125+
{
126+
"cell_type": "code",
127+
"execution_count": 34,
128+
"id": "9c9376b6-92ac-405b-a9da-60f960575011",
129+
"metadata": {},
130+
"outputs": [
131+
{
132+
"data": {
133+
"text/plain": [
134+
"array([[ 0.34667969, -0.32226562, 0.49316406, -0.2265625 , -0.48046875,\n",
135+
" -0.55273438, -0.04882812, -0.16699219, 0.21972656, -0.31347656,\n",
136+
" 0.25390625, -0.15820312, 0.17578125, 0.2890625 , 0.30957031,\n",
137+
" 0.06054688],\n",
138+
" [ 0.34667969, -0.32226562, 0.49316406, -0.2265625 , -0.48046875,\n",
139+
" -0.55273438, -0.04882812, -0.16699219, 0.21972656, -0.31347656,\n",
140+
" 0.25390625, -0.15820312, 0.17578125, 0.2890625 , 0.30957031,\n",
141+
" 0.06054688],\n",
142+
" [ 0.34667969, -0.32226562, 0.49316406, -0.2265625 , -0.48046875,\n",
143+
" -0.55273438, -0.04882812, -0.16699219, 0.21972656, -0.31347656,\n",
144+
" 0.25390625, -0.15820312, 0.17578125, 0.2890625 , 0.30957031,\n",
145+
" 0.06054688]])"
146+
]
147+
},
148+
"execution_count": 34,
149+
"metadata": {},
150+
"output_type": "execute_result"
151+
}
152+
],
153+
"source": [
154+
"X_test = np.ones((3, 32, 3))\n",
155+
"hls_model.predict(X_test)"
156+
]
157+
},
158+
{
159+
"cell_type": "markdown",
160+
"id": "2e5d80cc",
161+
"metadata": {},
162+
"source": [
163+
"Get dense streaming"
164+
]
165+
},
166+
{
167+
"cell_type": "code",
168+
"execution_count": null,
169+
"id": "faf22038",
170+
"metadata": {},
171+
"outputs": [],
172+
"source": [
173+
"def get_dense():\n",
174+
" model = Sequential()\n",
175+
" model.add(Dense(4, input_shape=(8,), name='fc1'))\n",
176+
" model.add(Dense(2, name='fc2'))\n",
177+
" model.compile()\n",
178+
" model.summary()\n",
179+
" config = hls4ml.utils.config_from_keras_model(model, granularity='name', default_precision='ac_fixed<16, 6>')\n",
180+
" hls_model = hls4ml.converters.convert_from_keras_model(model=model, output_dir=\"dense_streaming\", backend=\"oneAPI\", part=\"Agilex7\", io_type=\"io_stream\", hls_config=config)\n",
181+
" return model, config, hls_model\n",
182+
"mlp_cpu, config, mlp_hls = get_dense()\n",
183+
"mlp_hls.compile()\n",
184+
"mlp_hls.predict(np.ones(8))"
185+
]
186+
},
187+
{
188+
"cell_type": "markdown",
189+
"id": "d151759e-e371-43c3-b1ec-c0e6d7c59c47",
190+
"metadata": {
191+
"jp-MarkdownHeadingCollapsed": true
192+
},
193+
"source": [
194+
"MLP streaming"
195+
]
196+
},
197+
{
198+
"cell_type": "code",
199+
"execution_count": null,
200+
"id": "2cff8d32-c2f4-41b0-8ee5-e4d5bc2b7ea2",
201+
"metadata": {},
202+
"outputs": [],
203+
"source": [
204+
"def get_mlp():\n",
205+
" model = Sequential()\n",
206+
" model.add(Dense(4, input_shape=(8,), name='fc1'))\n",
207+
" model.add(Activation(activation='relu', name='relu1'))\n",
208+
" model.add(Dense(2, name='fc2'))\n",
209+
" model.add(Activation(activation='relu', name='relu2'))\n",
210+
" model.compile()\n",
211+
" model.summary()\n",
212+
" config = hls4ml.utils.config_from_keras_model(model, granularity='name', default_precision='ac_fixed<16, 6>')\n",
213+
" hls_model = hls4ml.converters.convert_from_keras_model(model=model, output_dir=\"mlp_streaming\", backend=\"oneAPI\", part=\"Agilex7\", io_type=\"io_stream\", hls_config=config)\n",
214+
" return model, config, hls_model\n",
215+
"mlp_cpu, config, mlp_hls = get_mlp()"
216+
]
217+
},
218+
{
219+
"cell_type": "code",
220+
"execution_count": null,
221+
"id": "afa6da72-432f-4a28-916f-bfb098408ff3",
222+
"metadata": {},
223+
"outputs": [],
224+
"source": [
225+
"def get_larger_mlp():\n",
226+
" model = Sequential()\n",
227+
" model.add(Dense(64, input_shape=(16,), name='fc1', kernel_initializer='lecun_uniform'))\n",
228+
" model.add(Activation(activation='relu', name='relu1'))\n",
229+
" model.add(Dense(32, name='fc2', kernel_initializer='lecun_uniform'))\n",
230+
" model.add(Activation(activation='relu', name='relu2'))\n",
231+
" model.add(Dense(32, name='fc3', kernel_initializer='lecun_uniform'))\n",
232+
" model.add(Activation(activation='relu', name='relu3'))\n",
233+
" model.add(Dense(5, name='output', kernel_initializer='lecun_uniform'))\n",
234+
" model.add(Activation(activation='softmax', name='softmax'))\n",
235+
" model.compile(loss='mse', optimizer=Adam())\n",
236+
" model.summary()\n",
237+
"\n",
238+
" config = hls4ml.utils.config_from_keras_model(model, granularity='name', default_precision='ac_fixed<16, 6>')\n",
239+
" hls_model = hls4ml.converters.convert_from_keras_model(model=model, output_dir=\"model_mlp_out\", backend=\"oneAPI\", part=\"Agilex7\", hls_config=config)\n",
240+
"\n",
241+
" return model, config, hls_model\n",
242+
"\n",
243+
"mlp_cpu, config, mlp_hls = get_larger_mlp()\n",
244+
"mlp_hls.compile()\n",
245+
"mlp_hls.predict(np.ones(8))"
246+
]
247+
},
248+
{
249+
"cell_type": "markdown",
250+
"id": "bfa41472-1431-499b-93fe-8781ccb889d3",
251+
"metadata": {
252+
"jp-MarkdownHeadingCollapsed": true
253+
},
254+
"source": [
255+
"CNN - MNIST"
256+
]
257+
},
258+
{
259+
"cell_type": "code",
260+
"execution_count": null,
261+
"id": "ee582768-f534-4137-af45-d9ed37fb337f",
262+
"metadata": {},
263+
"outputs": [],
264+
"source": [
265+
"def get_cnn():\n",
266+
" model = Sequential()\n",
267+
" model.add(Conv2D(5, (4, 4), input_shape=(5, 5, 3)))\n",
268+
" model.compile()\n",
269+
" \n",
270+
" config = hls4ml.utils.config_from_keras_model(model, granularity='name', default_precision='ac_fixed<16, 6>')\n",
271+
" hls_model = hls4ml.converters.convert_from_keras_model(model=model, output_dir=\"model_cnn_out\", io_type=\"io_stream\", backend=\"oneAPI\", part=\"Agilex7\", hls_config=config)\n",
272+
"\n",
273+
" return model, config, hls_model\n",
274+
"\n",
275+
"cnn_cpu, config, cnn_hls = get_cnn()"
276+
]
277+
},
278+
{
279+
"cell_type": "code",
280+
"execution_count": null,
281+
"id": "73e2b580-5575-4ce5-b2d7-907ec3aaba59",
282+
"metadata": {},
283+
"outputs": [],
284+
"source": [
285+
"def get_cnn():\n",
286+
" model = Sequential()\n",
287+
" model.add(Conv2D(16, (3, 3), activation='relu', padding='same', input_shape=(28, 28, 1)))\n",
288+
" model.add(MaxPool2D(pool_size=(2, 2)))\n",
289+
" model.add(Conv2D(32, (3, 3), activation='relu', padding='same'))\n",
290+
" model.add(MaxPool2D(pool_size=(2, 2)))\n",
291+
" model.add(Flatten())\n",
292+
" model.add(Dense(10, activation='softmax'))\n",
293+
" model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n",
294+
" model.summary()\n",
295+
"\n",
296+
" config = hls4ml.utils.config_from_keras_model(model, granularity='name', default_precision='ac_fixed<16, 6>')\n",
297+
" hls_model = hls4ml.converters.convert_from_keras_model(model=model, output_dir=\"model_cnn_out\", backend=\"oneAPI\", part=\"Agilex7\", hls_config=config)\n",
298+
"\n",
299+
" return model, config, hls_model\n",
300+
"\n",
301+
"cnn_cpu, config, cnn_hls = get_cnn()"
302+
]
303+
},
304+
{
305+
"cell_type": "code",
306+
"execution_count": null,
307+
"id": "359f4663-e493-4776-9eb3-07ca054ca982",
308+
"metadata": {},
309+
"outputs": [],
310+
"source": [
311+
"cnn_hls.compile()"
312+
]
313+
},
314+
{
315+
"cell_type": "code",
316+
"execution_count": null,
317+
"id": "ee82fbf2-6836-424a-b7a0-8f949eeede50",
318+
"metadata": {},
319+
"outputs": [],
320+
"source": [
321+
"cnn_cpu.predict(np.ones((1,5,5,3)))"
322+
]
323+
},
324+
{
325+
"cell_type": "markdown",
326+
"id": "5f774f2f-c50e-463d-835d-09768beaea85",
327+
"metadata": {},
328+
"source": [
329+
"## Library and Layer Support\n",
330+
"\n",
331+
"### ML framework support:\n",
332+
"\n",
333+
"(Q)Keras\n",
334+
"\n",
335+
"PyTorch\n",
336+
"\n",
337+
"(Q)ONNX (in development)\n",
338+
"\n",
339+
"### Neural network architectures:\n",
340+
"\n",
341+
"Fully connected NN (multilayer perceptron, MLP)\n",
342+
"\n",
343+
"Convolutional NN\n",
344+
"\n",
345+
"Recurrent NN (LSTM, GRU)\n",
346+
"\n",
347+
"Graph NN (GarNet)\n",
348+
"\n",
349+
"### Layers:\n",
350+
"\n",
351+
"- Core Layers\n",
352+
"\n",
353+
"InputLayer, Dropout, Flatten, Dense, TernaryDense, BinaryDense, Transpose, Resize\n",
354+
"\n",
355+
"- Convolution\n",
356+
"\n",
357+
"Conv1D, Conv2D\n",
358+
"\n",
359+
"- Pooling\n",
360+
"\n",
361+
"MaxPooling1D, MaxPooling2D, AveragePooling1D, AveragePooling2D\n",
362+
"\n",
363+
"- Normalization\n",
364+
"\n",
365+
"BatchNormalization\n",
366+
"\n",
367+
"- Activation\n",
368+
"\n",
369+
"LeakyReLU, ThresholdedReLU, Sigmoid, ELU, PReLU, TanH, Binary TanH, Softmax, Softsign, SELU Activation"
370+
]
371+
}
372+
],
373+
"metadata": {
374+
"kernelspec": {
375+
"display_name": "Python 3 (ipykernel)",
376+
"language": "python",
377+
"name": "python3"
378+
},
379+
"language_info": {
380+
"codemirror_mode": {
381+
"name": "ipython",
382+
"version": 3
383+
},
384+
"file_extension": ".py",
385+
"mimetype": "text/x-python",
386+
"name": "python",
387+
"nbconvert_exporter": "python",
388+
"pygments_lexer": "ipython3",
389+
"version": "3.10.6"
390+
}
391+
},
392+
"nbformat": 4,
393+
"nbformat_minor": 5
394+
}

0 commit comments

Comments
 (0)