Skip to content

Commit 2c54223

Browse files
author
Daniel
committed
minor compadability update, added MetaInfo to weights in full translation test SEDGE
1 parent 310029a commit 2c54223

File tree

4 files changed

+24
-16
lines changed

4 files changed

+24
-16
lines changed

tests/full_translation_test_SEDGE/network.hpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,12 @@
55

66
// const auto __attribute__(( section(".data") )) network=layers::Sequence(
77
const auto network=layers::Sequence(
8-
layers::Sedge<float,Complex<float>>(A0, B0_1_48, B0_bias, C0_1_96, C0_bias, Matrix<float, "E", 1>{1}, LeakyReLU<float>),
9-
layers::Sedge<float,Complex<float>>(A1, B1_1_48, B1_bias, C1_1_96, C1_bias, Matrix<float, "E", 1>{1}, LeakyReLU<float>),
10-
layers::Sedge<float,Complex<float>>(A2, B2_1_48, B2_bias, C2_1_96, C2_bias, Matrix<float, "E", 1>{1}, LeakyReLU<float>),
11-
layers::Sedge<float,Complex<float>>(A3, B3_1_48, B3_bias, C3_1_96, C3_bias, Matrix<float, "E", 1>{1}, LeakyReLU<float>),
12-
layers::Sedge<float,Complex<float>>(A4, B4_1_48, B4_bias, C4_1_96, C4_bias, Matrix<float, "E", 1>{1}, LeakyReLU<float>),
13-
layers::Sedge<float,Complex<float>>(A5, B5_1_48, B5_bias, C5_1_96, C5_bias, Matrix<float, "E", 1>{1}, LeakyReLU<float>),
8+
layers::Sedge<float,Complex<float>>(A0, B0_1_48, B0_bias, C0_1_96, C0_bias, SkipLayer0_1_112, LeakyReLU<float>),
9+
layers::Sedge<float,Complex<float>>(A1, B1_1_48, B1_bias, C1_1_96, C1_bias, SkipLayer1_1_112, LeakyReLU<float>),
10+
layers::Sedge<float,Complex<float>>(A2, B2_1_48, B2_bias, C2_1_96, C2_bias, SkipLayer2_1_112, LeakyReLU<float>),
11+
layers::Sedge<float,Complex<float>>(A3, B3_1_48, B3_bias, C3_1_96, C3_bias, SkipLayer3_1_112, LeakyReLU<float>),
12+
layers::Sedge<float,Complex<float>>(A4, B4_1_48, B4_bias, C4_1_96, C4_bias, SkipLayer4_1_112, LeakyReLU<float>),
13+
layers::Sedge<float,Complex<float>>(A5, B5_1_48, B5_bias, C5_1_96, C5_bias, SkipLayer5_1_112, LeakyReLU<float>),
1414
layers::SumReduction<"S">(),
1515
layers::Linear<float>(W_1_112, b, PassThrough<float>)
1616
);

tests/full_translation_test_SEDGE/test_SEDGE.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,10 @@
114114
file.write(f'#define NUMBER_OF_LAYERS {number_of_ssm_layers}\n')
115115

116116
file.write('\n')
117+
file.write('std::string MetaInfo = R"(\n')
118+
file.write('This model was generated by the full translation test Script.\n')
119+
file.write(f'It contains {number_of_ssm_layers} SSM layers.\n')
120+
file.write(')";\n')
117121
step_scale = []
118122
for index, layer in enumerate(range(len(weights_dict))):
119123
if 'step_scale' in weights_dict[index].keys():
@@ -128,15 +132,15 @@
128132
file.write(f'constexpr unsigned int step_scale[] = {{{"".join([f"{x}," for x in step_scale_tmp])[:-1]}}};\n')
129133
file.write(f'constexpr unsigned int step_scale_index_offsets[] = {{{"".join([f"{x}," for x in index_ofsets])[:-1]}}};\n')
130134

131-
write_network_weights(weights_dict, step_scale, file)
135+
write_network_weights(weights_dict, step_scale, file, os.path.join(os.path.dirname(__file__),include_path, "include"))
132136

133137
with open("network.hpp", "w") as file:
134138
file.write('#pragma once\n')
135139
file.write(f'#include "include/NeuralNetwork.hpp"\n')
136140
file.write('#include "weights.inc"\n')
137141
file.write('#include "weights_unrolled.inc"\n')
138142
file.write('\n')
139-
write_network(weights_dict, file, B_KP, C_KP, S_KP, W_KP)
143+
write_network(weights_dict, file,os.path.join(os.path.dirname(__file__),include_path, "include"), B_KP, C_KP, S_KP, W_KP)
140144

141145
total_number_of_weights = 0
142146
for index,_ in enumerate(weights_dict):

tests/full_translation_test_SEDGE/test_SEdge.cpp

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -177,11 +177,16 @@ auto getStepScaleTImes() {
177177
return array;
178178
}
179179

180+
std::string getMetaInfo(){
181+
return MetaInfo;
182+
}
183+
180184
PYBIND11_MODULE(CppSEdge, m) {
181185
m.doc() = "Runs the Network";
182186

183187
m.def("run", &run_SEdge, "Runs the network, takes input as a numpy array");
184188
m.def("printModelInfo", &printModelInfo, "Prints the model information");
185189
m.def("getInputSize", &getInputSize, "Returns the input size of the network");
186190
m.def("getStepScaleTimes", &getStepScaleTImes, "Returns the step scale times of the network");
191+
m.def("getMetaInfo", &getMetaInfo, "Returns the meta information of the network");
187192
}

tests/full_translation_test_SEDGE/write_network.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,12 @@
22
import sys
33
import numpy as np
44

5-
6-
include_path = '../../'
7-
8-
sys.path.append(os.path.join(os.path.dirname(__file__),include_path, "include"))
9-
from write_weights import write_weight
10-
115
complex_types = [np.complex64, np.complex128]
126

7+
def write_network_weights(weights_dict, step_scale, file, Cpp_NN_include_path):
8+
sys.path.append(Cpp_NN_include_path)
9+
from write_weights import write_weight
1310

14-
def write_network_weights(weights_dict, step_scale, file):
1511
for index, layer in enumerate(weights_dict):
1612
print(weights_dict[index].keys())
1713
if 'A' in weights_dict[index].keys():
@@ -66,7 +62,10 @@ def write_network_weights(weights_dict, step_scale, file):
6662

6763

6864

69-
def write_network(weights_dict, file, B_KP=(12*4), C_KP=(12*8), S_KP=(14*8), W_KP=(14*8)):
65+
def write_network(weights_dict, file, Cpp_NN_include_path, B_KP=(12*4), C_KP=(12*8), S_KP=(14*8), W_KP=(14*8)):
66+
sys.path.append(Cpp_NN_include_path)
67+
from write_weights import write_weight
68+
7069
file.write('// const auto __attribute__(( section(".data") )) network=layers::Sequence(\n')
7170
file.write('const auto network=layers::Sequence(\n')
7271
string = ""

0 commit comments

Comments
 (0)