2323 typedef {bias_t.name} bias_t;
2424 typedef {weight_t.name} weight_t;
2525 template<class data_T, class res_T, class CONFIG_T>
26- using kernel = nnet:: {dense_function}<data_T, res_T, CONFIG_T>;
26+ using kernel = {dense_function}<data_T, res_T, CONFIG_T>;
2727 template<class x_T, class y_T>
2828 using product = nnet::product::{product_type}<x_T, y_T>;
2929}};\n """
5353 static const unsigned n_partitions = {n_partitions};
5454 static const unsigned n_pixels = out_width / n_partitions;
5555 template<class data_T, class CONFIG_T>
56- using fill_buffer = nnet:: {fill_fn}<data_T, CONFIG_T>;
56+ using fill_buffer = {fill_fn}<data_T, CONFIG_T>;
5757 typedef {accum_t.name} accum_t;
5858 typedef {bias_t.name} bias_t;
5959 typedef {weight_t.name} weight_t;
@@ -91,9 +91,10 @@ def format(self, node):
9191 params ['scale_index_type' ] = 'scale_index_regular'
9292
9393 if node .model .config .get_config_value ('IOType' ) == 'io_parallel' :
94- params ['fill_fn' ] = f'fill_buffer_{ node .index } '
94+ namespace = params ['namespace' ]
95+ params ['fill_fn' ] = f'{ namespace } ::fill_buffer_{ node .index } '
9596 else :
96- params ['fill_fn' ] = 'FillConv1DBuffer'
97+ params ['fill_fn' ] = 'nnet:: FillConv1DBuffer'
9798
9899 is_pointwise_parallel_latency = (
99100 node .get_attr ('filt_width' ) == 1
@@ -127,16 +128,18 @@ def format(self, node):
127128 node .get_input_variable ().type .precision , node .get_weights ('weight' ).type .precision
128129 )
129130
131+ namespace = params ['namespace' ]
132+
130133 if node .get_attr ('strategy' ).lower () == 'latency' :
131- mult_params ['dense_function' ] = 'DenseLatency'
134+ mult_params ['dense_function' ] = 'nnet:: DenseLatency'
132135 elif node .get_attr ('strategy' ).lower () == 'resource' :
133136 if int (mult_params ['reuse_factor' ]) <= int (mult_params ['n_in' ]):
134- mult_params ['dense_function' ] = 'DenseResource_rf_leq_nin'
137+ mult_params ['dense_function' ] = 'nnet:: DenseResource_rf_leq_nin'
135138 else :
136- mult_params ['dense_function' ] = 'DenseResource_rf_gt_nin_rem0'
139+ mult_params ['dense_function' ] = 'nnet:: DenseResource_rf_gt_nin_rem0'
137140 # The 3rd case is never used
138141 elif node .get_attr ('strategy' ).lower () == 'resource_unrolled' :
139- mult_params ['dense_function' ] = f'dense_resource_unrolled_{ node .index } '
142+ mult_params ['dense_function' ] = f'{ namespace } :: dense_resource_unrolled_{ node .index } '
140143
141144 mult_config = self .mult_template .format (** mult_params )
142145
@@ -194,7 +197,7 @@ def __init__(self):
194197 static const unsigned n_partitions = {n_partitions};
195198 static const unsigned n_pixels = out_height * out_width / n_partitions;
196199 template<class data_T, class CONFIG_T>
197- using fill_buffer = nnet:: {fill_fn}<data_T, CONFIG_T>;
200+ using fill_buffer = {fill_fn}<data_T, CONFIG_T>;
198201 typedef {accum_t.name} accum_t;
199202 typedef {bias_t.name} bias_t;
200203 typedef {weight_t.name} weight_t;
@@ -238,9 +241,10 @@ def format(self, node):
238241 params ['scale_index_width_type' ] = 'scale_index_regular'
239242
240243 if node .model .config .get_config_value ('IOType' ) == 'io_parallel' :
241- params ['fill_fn' ] = f'fill_buffer_{ node .index } '
244+ namespace = params ['namespace' ]
245+ params ['fill_fn' ] = f'{ namespace } ::fill_buffer_{ node .index } '
242246 else :
243- params ['fill_fn' ] = 'FillConv2DBuffer'
247+ params ['fill_fn' ] = 'nnet:: FillConv2DBuffer'
244248
245249 params ['min_height' ] = node .get_attr ('min_height' , node .get_attr ('in_height' ))
246250 params ['min_width' ] = node .get_attr ('min_width' , node .get_attr ('in_width' ))
@@ -256,16 +260,17 @@ def format(self, node):
256260 node .get_input_variable ().type .precision , node .get_weights ('weight' ).type .precision
257261 )
258262
263+ namespace = params ['namespace' ]
259264 if node .get_attr ('strategy' ).lower () == 'latency' :
260- mult_params ['dense_function' ] = 'DenseLatency'
265+ mult_params ['dense_function' ] = 'nnet:: DenseLatency'
261266 elif node .get_attr ('strategy' ).lower () == 'resource' :
262267 if int (mult_params ['reuse_factor' ]) <= int (mult_params ['n_in' ]):
263- mult_params ['dense_function' ] = 'DenseResource_rf_leq_nin'
268+ mult_params ['dense_function' ] = 'nnet:: DenseResource_rf_leq_nin'
264269 else :
265- mult_params ['dense_function' ] = 'DenseResource_rf_gt_nin_rem0'
270+ mult_params ['dense_function' ] = 'nnet:: DenseResource_rf_gt_nin_rem0'
266271 # The 3rd case is never used
267272 elif node .get_attr ('strategy' ).lower () == 'resource_unrolled' :
268- mult_params ['dense_function' ] = f'dense_resource_unrolled_{ node .index } '
273+ mult_params ['dense_function' ] = f'{ namespace } :: dense_resource_unrolled_{ node .index } '
269274
270275 mult_config = self .mult_template .format (** mult_params )
271276
@@ -341,9 +346,10 @@ def format(self, node):
341346 params ['weight_t' ] = node .get_weights ('depthwise' ).type
342347 params ['bias_t' ] = node .get_weights ('zero_bias' ).type
343348 if node .model .config .get_config_value ('IOType' ) == 'io_parallel' :
344- params ['fill_fn' ] = f'fill_buffer_{ node .index } _dw'
349+ namespace = params ['namespace' ]
350+ params ['fill_fn' ] = f'{ namespace } ::fill_buffer_{ node .index } _dw'
345351 else :
346- params ['fill_fn' ] = 'FillConv1DBuffer'
352+ params ['fill_fn' ] = 'nnet:: FillConv1DBuffer'
347353
348354 if node .get_attr ('unscaled' ):
349355 params ['scale_index_type' ] = 'scale_index_unscaled'
@@ -387,9 +393,10 @@ def format(self, node):
387393 params ['min_width' ] = params ['in_width' ]
388394 params ['instructions' ] = '0'
389395 if node .model .config .get_config_value ('IOType' ) == 'io_parallel' :
390- params ['fill_fn' ] = f'fill_buffer_{ node .index } _pw'
396+ namespace = params ['namespace' ]
397+ params ['fill_fn' ] = f'{ namespace } ::fill_buffer_{ node .index } _pw'
391398 else :
392- params ['fill_fn' ] = 'FillConv1DBuffer'
399+ params ['fill_fn' ] = 'nnet:: FillConv1DBuffer'
393400
394401 if node .get_attr ('unscaled' ):
395402 params ['scale_index_type' ] = 'scale_index_unscaled'
@@ -474,9 +481,10 @@ def format(self, node):
474481 params ['index' ] = str (node .index ) + '_depthwise'
475482 params ['weight_t' ] = node .get_weights ('depthwise' ).type
476483 if node .model .config .get_config_value ('IOType' ) == 'io_parallel' :
477- params ['fill_fn' ] = f'fill_buffer_{ node .index } _dw'
484+ namespace = params ['namespace' ]
485+ params ['fill_fn' ] = f'{ namespace } ::fill_buffer_{ node .index } _dw'
478486 else :
479- params ['fill_fn' ] = 'FillConv2DBuffer'
487+ params ['fill_fn' ] = 'nnet:: FillConv2DBuffer'
480488
481489 if node .get_attr ('unscaled_h' ):
482490 params ['scale_index_height_type' ] = 'scale_index_unscaled'
@@ -528,9 +536,10 @@ def format(self, node):
528536 params ['min_width' ] = params ['in_width' ]
529537 params ['instructions' ] = '0'
530538 if node .model .config .get_config_value ('IOType' ) == 'io_parallel' :
531- params ['fill_fn' ] = f'fill_buffer_{ node .index } _pw'
539+ namespace = params ['namespace' ]
540+ params ['fill_fn' ] = f'{ namespace } ::fill_buffer_{ node .index } _pw'
532541 else :
533- params ['fill_fn' ] = 'FillConv2DBuffer'
542+ params ['fill_fn' ] = 'nnet:: FillConv2DBuffer'
534543
535544 if node .get_attr ('unscaled_h' ):
536545 params ['scale_index_height_type' ] = 'scale_index_unscaled'
0 commit comments